diff --git a/Anymate/.gitignore b/Anymate/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..d4162f599d7bbc11d4337c8fdeecc8789dcb909e --- /dev/null +++ b/Anymate/.gitignore @@ -0,0 +1,26 @@ +__pycache__ +*.pt +*.tar +*.tar +*.txt +*.glb* +*.obj +*.ckpt +*.blend +*.blend1 +test_* + +blender-* +*.json* +*.glb +*.gltf +*.fbx +*.FBX +*.dae +*.obj +*.mtl +*.binvox +*.csv +*.tga +*.png +*.jpg \ No newline at end of file diff --git a/Anymate/__init__.py b/Anymate/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Anymate/args.py b/Anymate/args.py new file mode 100644 index 0000000000000000000000000000000000000000..715dedbb118882b926d0480978a56fa7cd710ca4 --- /dev/null +++ b/Anymate/args.py @@ -0,0 +1,22 @@ +class AnymateArgs: + def __init__(self): + # self.encoder = "miche" + # self.decoder = "transformer_latent" + # self.dataset = "train" + # self.run_name = "miche-transformer_latent-train-8gpu-finetune" + self.checkpoint_joint = "Anymate/checkpoints/joint/bert-transformer_latent-train-8gpu-finetune.pth.tar" + self.checkpoint_conn = "Anymate/checkpoints/conn/bert-attendjoints_con_combine-train-8gpu-finetune.pth.tar" + self.checkpoint_skin = "Anymate/checkpoints/skin/bert-attendjoints_combine-train-8gpu-finetune.pth.tar" + + self.device = "cuda" + self.num_joints = 96 + + +class UIArgs: + def __init__(self): + self.checkpoint_joint = "Anymate/checkpoints/joint/bert-transformer_latent-train-8gpu-finetune.pth.tar" + self.checkpoint_conn = "Anymate/checkpoints/conn/bert-attendjoints_con_combine-train-8gpu-finetune.pth.tar" + self.checkpoint_skin = "Anymate/checkpoints/skin/bert-attendjoints_combine-train-8gpu-finetune.pth.tar" + +ui_args = UIArgs() +anymate_args = AnymateArgs() \ No newline at end of file diff --git a/Anymate/blender_script.py b/Anymate/blender_script.py new file mode 100644 index 0000000000000000000000000000000000000000..a55226add7c137073d90555380790d1a34133441 --- /dev/null +++ b/Anymate/blender_script.py @@ -0,0 +1,747 @@ +import bpy +import mathutils +from mathutils import Vector, Matrix + +import os +import sys +import random +import numpy as np +import json +import argparse + + +IMPORT_FUNCTIONS = { + "obj": bpy.ops.wm.obj_import, + "glb": bpy.ops.import_scene.gltf, + "gltf": bpy.ops.import_scene.gltf, + "usd": bpy.ops.import_scene.usd, + "fbx": bpy.ops.import_scene.fbx, + "stl": bpy.ops.import_mesh.stl, + "usda": bpy.ops.import_scene.usda, + "dae": bpy.ops.wm.collada_import, + "ply": bpy.ops.import_mesh.ply, + "abc": bpy.ops.wm.alembic_import, + "blend": bpy.ops.wm.append, +} + +def load_object(object_path: str) -> None: + """Loads a model with a supported file extension into the scene. + + Args: + object_path (str): Path to the model file. + + Raises: + ValueError: If the file extension is not supported. + + Returns: + None + """ + file_extension = object_path.split(".")[-1].lower() + if file_extension is None: + raise ValueError(f"Unsupported file type: {object_path}") + + # load from existing import functions + import_function = IMPORT_FUNCTIONS[file_extension] + + if file_extension == "blend": + import_function(directory=object_path, link=False) + elif file_extension in {"glb", "gltf"}: + import_function(filepath=object_path, merge_vertices=True) + else: + import_function(filepath=object_path) + +####################### save json ################################ +def save_json(output_path, mesh_obj, armature_obj, extra=None, arm_name=False): + # makedirs output_path + os.makedirs(output_path, exist_ok=True) + + # start retrieve the information of mesh, skining and rigging + + #1. retrieve the information of rigging, save the world matrix of the amature object + total_armature_info = {} + for obj in armature_obj: + # depsgraph = bpy.context.evaluated_depsgraph_get() + # obj = obj.evaluated_get(depsgraph) + armature_info = {} + armature_info["world_matrix"] = [list(row) for row in obj.matrix_world.copy()] + translation = obj.matrix_world.translation + for bone in obj.pose.bones: + bone_info = {} + bone_info["head_local"] = list(bone.head.copy()) + bone_info["head_world"] = list((obj.matrix_world.to_3x3() @ bone.head+translation).copy()) + # bone_info["matrix_local"] = [list(row) for row in bone.matrix_local.copy()] + bone_info["tail_local"] = list(bone.tail.copy()) + bone_info["tail_world"] = list((obj.matrix_world.to_3x3() @ bone.tail+translation).copy()) + + if bone.parent: + bone_info["parent"] = bone.parent.name.replace(" ", "_") + if arm_name: + bone_info["parent"] = obj.name + "--" + bone_info["parent"] + else: + bone_info["parent"] = None + bone_info["children"] = [] + if bone.children: + for child in bone.children: + if arm_name: + bone_info["children"].append(obj.name + "--" + child.name.replace(" ", "_")) + else: + bone_info["children"].append(child.name.replace(" ", "_")) + bone_name = bone.name.replace(" ", "_") + if arm_name: + bone_name = obj.name + "--" + bone_name + armature_info[bone_name] = bone_info + obj_name = obj.name.replace(" ", "_") + total_armature_info[obj.name] = armature_info + + + #2. retrieve the informatioon of skining + total_skinning_info = {} + for obj in mesh_obj: + vertex_groups = obj.vertex_groups + # if not vertex_groups: + # continue + # for group in vertex_groups: + skinning_info = {} + skinning_info["world_matrix"] = [list(row) for row in obj.matrix_world.copy()] + weight_info = [] + for vertex in obj.data.vertices: + vertex_info = {} + for group in vertex.groups: + name = vertex_groups[group.group].name + name = name.replace(" ", "_") + if arm_name: + arm_modifier = [modifier for modifier in obj.modifiers if modifier.type == 'ARMATURE'] + assert(len(arm_modifier) == 1) + name = arm_modifier[0].object.name + "--" + name + weight = group.weight + vertex_info[name] = weight + weight_info.append(vertex_info) + skinning_info["weight"] = weight_info + obj_name = obj.name.replace(" ", "_") + total_skinning_info[obj_name]=skinning_info + + + rigging_file_path = os.path.join(output_path, "rigging.json") + if extra: + rigging_file_path = rigging_file_path.replace("rigging.json", f'rigging_{extra}.json') + with open(rigging_file_path, "w") as f: + json.dump(total_armature_info, f, indent = 2) + + skining_file_path = os.path.join(output_path, "skining.json") + if extra: + skining_file_path = skining_file_path.replace("skining.json", f'skining_{extra}.json') + with open(skining_file_path, "w") as f: + json.dump(total_skinning_info, f , indent = 2) + + + return rigging_file_path + + +def apply_skinning_weights(json_file): + + with open(json_file, "r") as f: + skinning_data = json.load(f) + + armature_obj = bpy.data.objects.get("Armature") + if not armature_obj: + print("Error: Armature object 'Armature' not found.") + return + + # 将所有网格对象放置在骨骼对象的子集中 + count = 0 + for obj in bpy.context.scene.objects: + if obj.type == 'MESH': + obj.parent = armature_obj + count += 1 + + print("total mesh count:", count) + + for obj in bpy.context.scene.objects: + vertex_index = 0 + if obj.type == 'MESH': + mesh_name = obj.name + if mesh_name in skinning_data: + skinning_info = skinning_data[mesh_name] + if "weight" in skinning_info: + print("Applying skinning data for mesh:", mesh_name) + vertex_index = 0 + for vertex_weight in skinning_info["weight"]: + for bone_name, weight_value in vertex_weight.items(): + vertex_group = obj.vertex_groups.get(bone_name) + if vertex_group is None: + vertex_group = obj.vertex_groups.new(name=bone_name) + print("Vertex group created:", bone_name) + vertex_group.add([vertex_index], weight_value, 'REPLACE') + vertex_index += 1 + else: + print("No skinning data found for mesh:", mesh_name) + for obj in bpy.context.scene.objects: + if obj.type == 'MESH': + modifier = obj.modifiers.new(name="Armature", type='ARMATURE') + modifier.object = armature_obj + modifier.use_vertex_groups = True + print("Armature modifier added to mesh:", obj.name) + +def reload_rigging(rigging_file_path): + with open(rigging_file_path, "r") as f: + total_armature_info = json.load(f) + + bpy.ops.object.armature_add() + armature_obj = bpy.context.object + armature_obj.name = "Armature" + + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.armature.select_all(action='SELECT') + bpy.ops.armature.delete() + bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.mode_set(mode='EDIT') + + world_matrix = mathutils.Matrix([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + armature_obj.matrix_world = world_matrix + + for armature_name, armature_info in total_armature_info.items(): + for bone_name, bone_info in armature_info.items(): + if bone_name == "world_matrix": + continue + bone = armature_obj.data.edit_bones.new(bone_name) + bone.head = bone_info["head_world"] + bone.tail = bone_info["tail_world"] + + for bone_name, bone_info in armature_info.items(): + if bone_name == "world_matrix": + continue + bone = armature_obj.data.edit_bones[bone_name] + parent_name = bone_info["parent"] + if parent_name: + parent_bone = armature_obj.data.edit_bones[parent_name] + bone.parent = parent_bone + edit_len = len(armature_obj.data.edit_bones.keys()) + bpy.ops.object.mode_set(mode='OBJECT') + bone_len = len(armature_obj.data.bones.keys()) + assert(edit_len == bone_len, "bone number not match!" + str(edit_len) + " " + str(bone_len)) + bpy.ops.object.select_all(action='DESELECT') + armature_obj.select_set(True) + bpy.context.view_layer.objects.active = armature_obj + print("Rigging information has been reloaded!") + +############################# reload json ################################ +def reload_json(folder_path, version=0, export = None): + bpy.ops.wm.read_homefile(use_empty=True) + if version == 0: + obj_path = os.path.join(folder_path, "object.obj") + skinning_file_path = os.path.join(folder_path, "skining.json") + rigging_file_path = os.path.join(folder_path, "rigging.json") + elif version == 1: + obj_path = os.path.join(folder_path, "join.obj") + skinning_file_path = os.path.join(folder_path, "skining_norig.json") + rigging_file_path = os.path.join(folder_path, "rigging_norig.json") + elif version == 2: + obj_path = os.path.join(folder_path, "join.obj") + skinning_file_path = os.path.join(folder_path, "skining_norig2.json") + rigging_file_path = os.path.join(folder_path, "rigging_norig2.json") + # import_obj(obj_path) + load_object(obj_path) + reload_rigging(rigging_file_path) + apply_skinning_weights(skinning_file_path) + if export: + bpy.ops.wm.save_as_mainfile(filepath=export) + print("Done!") + + +def reset_scene() -> None: + """Resets the scene to a clean state. + + Returns: + None + """ + # delete everything that isn't part of a camera or a light + for obj in bpy.data.objects: + if obj.type not in {"CAMERA", "LIGHT"}: + bpy.data.objects.remove(obj, do_unlink=True) + + # delete all the materials + for material in bpy.data.materials: + bpy.data.materials.remove(material, do_unlink=True) + + # delete all the textures + for texture in bpy.data.textures: + bpy.data.textures.remove(texture, do_unlink=True) + + # delete all the images + for image in bpy.data.images: + bpy.data.images.remove(image, do_unlink=True) + + +def save_mesh(path, mtl=False, obj_path=None): + if mtl: + # save the blend file + bpy.ops.wm.save_as_mainfile(filepath=obj_path + '/object.blend') + # reopen the blend file + bpy.ops.wm.open_mainfile(filepath=obj_path + '/object.blend') + # unpack all the materials and textures to obj_path + bpy.ops.file.unpack_all(method='WRITE_LOCAL') + # save to .obj without material + bpy.ops.wm.obj_export(filepath=path, export_materials=mtl, export_uv=mtl, export_triangulated_mesh=True) + + +def get_root_obj(obj): + if not obj.parent: + return obj + return get_root_obj(obj.parent) + +def normalize(objs): + # bpy.ops.object.select_all(action='DESELECT') + # # select objs and join them + # for obj in objs: + # obj.select_set(True) + # bpy.context.view_layer.objects.active = objs[0] + # name_join = objs[0].name + # bpy.ops.object.join() + # obj_join = bpy.context.active_object + # print(obj_join.matrix_world) + # print(name_join) + # assert(name_join == obj_join.name) + + objs_eval = [] + depsgraph = bpy.context.evaluated_depsgraph_get() + for obj in objs: + objs_eval.append(obj.evaluated_get(depsgraph)) + + vertices = [] + for obj in objs_eval: + for v in obj.data.vertices: + vertices.append(obj.matrix_world @ Vector((v.co.x, v.co.y, v.co.z, 1))) + + vertices = np.array(vertices) + min_x, min_y, min_z, _ = np.min(vertices, axis=0) + max_x, max_y, max_z, _ = np.max(vertices, axis=0) + + # print(min_x, min_y, min_z) + # print(max_x, max_y, max_z) + + scale_x = 1 / (max_x - min_x) + scale_y = 1 / (max_y - min_y) + scale_z = 1 / (max_z - min_z) + scale_min = min(scale_x, scale_y, scale_z) + + assert scale_min < 1e6 + + translate_x = - (max_x + min_x) / 2 * scale_min + translate_y = - (max_y + min_y) / 2 * scale_min + translate_z = - min_z * scale_min + + # form transformation matrix + trans = Matrix.Translation((translate_x, translate_y, translate_z)) + + scale = Matrix.Scale(scale_min, 4, (1, 0, 0)) @ Matrix.Scale(scale_min, 4, (0, 1, 0)) @ Matrix.Scale(scale_min, 4, (0, 0, 1)) + + # print(trans, scale) + + + root = get_root_obj(objs[0]) + # print(root.name) + # print(root.scale) + # print(root.location) + # print(root.matrix_world) + # root.location = mathutils.Vector(root.location) + mathutils.Vector((translate_x, translate_y, translate_z)) + # root.scale = mathutils.Vector(root.scale) * mathutils.Vector((scale_x, scale_y, scale_z)) + + # add the extra transformation to the root object's world matrix + root.matrix_world = trans @ scale @ root.matrix_world + # print(root.name) + # print(root.scale) + # print(root.location) + # print(root.matrix_world) + + # refresh + bpy.context.view_layer.update() + + ######### check if its successful + # objs_eval = [] + # depsgraph = bpy.context.evaluated_depsgraph_get() + # for obj in objs: + # objs_eval.append(obj.evaluated_get(depsgraph)) + + # vertices = [] + # for obj in objs_eval: + # for v in obj.data.vertices: + # vertices.append(obj.matrix_world @ Vector((v.co.x, v.co.y, v.co.z, 1))) + + # vertices = np.array(vertices) + # min_x, min_y, min_z, _ = np.min(vertices, axis=0) + # max_x, max_y, max_z, _ = np.max(vertices, axis=0) + + # print(min_x, min_y, min_z) + # print(max_x, max_y, max_z) + +def remesh(objs, target=5000): + num_v = {} + for obj in objs: + num_v[obj] = len(obj.data.vertices) + + # sort the num_v dict and make it a dict again + num_v_sort = sorted(num_v.items(), key=lambda x: x[1], reverse=True) + + # print(num_v_sort) + total_v = sum([num_v[obj] for obj in num_v]) + + iters = 0 + while total_v > target and iters<20: + reduce = [] + for obj, v in num_v_sort: + reduce.append(obj) + if sum([num_v[oo] for oo in reduce]) > 0.5 * total_v: + break + for obj in reduce: + # check if have shape key + if obj.data.shape_keys is not None: + # remove obj from num_v + num_v.pop(obj) + continue + + ratio = 0.5 + # apply decimate modifier + bpy.context.view_layer.objects.active = obj + bpy.ops.object.modifier_add(type='DECIMATE') + bpy.context.object.modifiers["Decimate"].ratio = ratio + bpy.ops.object.modifier_apply(modifier="Decimate") + # update num_v + num_v[obj] = len(obj.data.vertices) + total_v = sum([num_v[obj] for obj in num_v]) + num_v_sort = sorted(num_v.items(), key=lambda x: x[1], reverse=True) + # print(num_v_sort) + iters+=1 + + +def get_parents(obj): + if not obj.parent: + return [obj.name] + parents = get_parents(obj.parent) + parents.append(obj.name) + return parents + +def check(objs, arm): + # assert('Sketchfab_model' in bpy.data.objects) + + # root_arm = get_root_obj(arm) + # for obj in objs: + # if root_arm != get_root_obj(obj): + # print('not same root') + # return -1 + # return 1 + + # action_num = 0 + # actions = bpy.data.actions + # for act in actions: + # action_num += 1 + # fcurves = act.fcurves + # data_paths = [] + # not_pose = False + # for fcurve in fcurves: + # data_paths.append(fcurve.data_path) + # if not fcurve.data_path.startswith('pose.bones'): + # # print(fcurve.data_path) + # not_pose = True + # # return -1 + # if not_pose: + # print('zyhsb') + # print(data_paths) + # return -1 + # return action_num + + for obj in objs: + vertex_groups = obj.vertex_groups + # if not vertex_groups: + # continue + # for group in vertex_groups: + for vertex in obj.data.vertices: + vertex_info = {} + for group in vertex.groups: + name = vertex_groups[group.group].name + name = name.replace(" ", "_") + if True: + arm_modifier = [modifier for modifier in obj.modifiers if modifier.type == 'ARMATURE'] + if len(arm_modifier) != 1: + print('zyhsb', len(arm_modifier)) + return -2 + # name = arm_modifier[0].object.name + "--" + name + return 1 + + # for obj in objs: + # if obj.data.shape_keys is not None: + # return 1 + # # only 942!!! + # return 0 + + +def delete(objs): + # check if the mesh object has skinning weight + for obj in objs: + vertex_groups = obj.vertex_groups + if not vertex_groups: + # delete the object + bpy.data.objects.remove(obj) + # print('delete!!!') + meshes = [] + for obj in bpy.context.scene.objects: + if obj.type == "MESH": + meshes.append(obj) + + return meshes + + +def merge_mesh(folder_path, export = None, save_join = True): + # output_path = os.path.join(folder_path, "rigging_norig.json") + # if os.path.exists(output_path): + # print("Already processed folder:", folder_path) + # return + bpy.ops.wm.read_homefile(use_empty=True) + try: + reload_json(folder_path) + except: + print("Error in reloading json file") + # remove the folder + os.system(f"rm -r {folder_path}") + return None, None + + bpy.ops.object.select_all(action='DESELECT') + if export: + bpy.ops.wm.save_as_mainfile(filepath='reload_' + export) + + meshes = [] + for obj in bpy.context.scene.objects: + if obj.type == "MESH": + bpy.context.view_layer.objects.active = obj + obj.select_set(True) + meshes.append(obj) + print("meshes length", len(meshes)) + + bpy.ops.object.join() + if export: + bpy.ops.wm.save_as_mainfile(filepath='join_' + export) + + meshes = [] + for obj in bpy.context.scene.objects: + if obj.type == "MESH": + meshes.append(obj) + if len(meshes) != 1: + bpy.ops.wm.save_as_mainfile(filepath='join_f.blend') + assert len(meshes) == 1 + # remesh(meshes[0]) + + + if save_join: + obj_path = os.path.join(folder_path, "object.obj") + bpy.ops.wm.obj_export(filepath=obj_path, export_materials=False, export_uv=False, export_triangulated_mesh=True) + # mesh = trimesh.load(glb_file_path) + # mesh.export(obj_path, file_type='obj') + + + # save to json file + total_armature_count = 0 + armature_obj = [] + mesh_obj = [] + for obj in bpy.context.scene.objects: + if obj.type == "ARMATURE": + total_armature_count += 1 + armature_obj.append(obj) + if obj.type == "MESH": + mesh_obj.append(obj) + if total_armature_count == 0: + print("No rigging information for the file:", folder_path+"\n") + return None, None + + + ######### delete bones that are not in the vertex group + vertex_group_name = [group.name for group in mesh_obj[0].vertex_groups] + bpy.context.view_layer.objects.active = armature_obj[0] + bpy.ops.object.mode_set(mode='EDIT') + edit_bones = armature_obj[0].data.edit_bones + bone_delete = set([bone.name for bone in edit_bones]) - set(vertex_group_name) + print(f"Deleting {len(bone_delete)} bones") + for bone in bone_delete: + # if the bone is root, then do not delete it + if edit_bones[bone].parent == None: + # return len([1 for child in edit_bones[bone].children if child.name in bone_delete]) + num_children = len(edit_bones[bone].children) + if num_children <= 1: + edit_bones.remove(edit_bones[bone]) + continue + if num_children > 1: + center = mathutils.Vector((0, 0, 0)) + for child in edit_bones[bone].children: + center += child.head + center /= num_children + min_dist = 1e9 + for child in edit_bones[bone].children: + dist = (child.head - center).length + if dist < min_dist: + min_dist = dist + min_child = child + for child in edit_bones[bone].children: + if child != min_child: + child.parent = min_child + edit_bones.remove(edit_bones[bone]) + continue + continue + # assign bone's children to bone's parent + bone_obj = edit_bones[bone] + for child in bone_obj.children: + child.parent = bone_obj.parent + + edit_bones.remove(edit_bones[bone]) + bpy.ops.object.mode_set(mode='OBJECT') + + if export: + bpy.ops.wm.save_as_mainfile(filepath='delete_' + export) + + mesh_obj = [] + armature_obj = [] + for obj in bpy.context.scene.objects: + if obj.type == "MESH": + mesh_obj.append(obj) + if obj.type == "ARMATURE": + armature_obj.append(obj) + assert len(mesh_obj) == 1 + assert len(armature_obj) == 1 + + return mesh_obj, armature_obj + + +def process(file_path, obj_path=None, stamp=None, tex=False): + # check if obj_path exists + # if os.path.exists(obj_path + '/object.obj'): + # print('object.obj exists') + # return True + reset_scene() + load_object(file_path) + # bpy.ops.import_scene.gltf(filepath=glb_file_path) + + # delete hierarchy collections['glTF_not_exported'] + if 'glTF_not_exported' in bpy.data.collections: + print('DELETE glTF_not_exported') + bpy.data.collections.remove(bpy.data.collections['glTF_not_exported']) + + if stamp is not None: + # Set the current frame to the stamp value + bpy.context.scene.frame_set(stamp) + print(f'Set the current frame to {stamp}') + + # Ensure all objects are updated to this frame + bpy.context.view_layer.update() + + mesh_obj = [] + armature_obj = [] + for obj in bpy.context.scene.objects: + if obj.type == "ARMATURE": + # if len(armature_obj) > 0: + # print(file_path, 'has more than 1 armature') + # return -2 + armature_obj.append(obj) + # obj.show_in_front = True + armature_obj[-1].data.pose_position = 'POSE' + if obj.type == "MESH": + mesh_obj.append(obj) + # if obj.data.shape_keys is not None: + # return False + + # mesh_obj = delete(mesh_obj) + # if len(mesh_obj) == 0: + # # print('zyhsb -1', file_path, obj_path) + # return -1 + # return check(mesh_obj, armature_obj) + + + # total_vertices = np.array([len(obj.data.vertices) for obj in mesh_obj]).sum() + # if total_vertices < 1000: return + # if total_vertices > 10000: remesh(mesh_obj) + + + # bpy.ops.object.select_all(action='DESELECT') + # armature_obj.select_set(True) + # execute(bpy.context) + + + # normalize(mesh_obj) + + + mesh_obj = delete(mesh_obj) + if len(mesh_obj) == 0: + # print('zyhsb -1', file_path, obj_path) + return -1 + + + save_json(obj_path, mesh_obj, armature_obj, arm_name=True) + + + if not tex: + save_mesh(obj_path + '/object.obj') + else: + save_mesh(obj_path + '/object.obj', mtl=True, obj_path=obj_path) + + + mesh_obj, armature_obj = merge_mesh(obj_path) + if mesh_obj is None or armature_obj is None: + # print('zyhsb -2', file_path, obj_path) + return -2 + + + try: + normalize(mesh_obj) + except: + os.system(f"rm -r {obj_path}") + # print('zyhsb -3', file_path, obj_path) + return -3 + + + save_json(obj_path, mesh_obj, armature_obj) + + if not tex: + save_mesh(obj_path + '/object.obj') + else: + save_mesh(obj_path + '/object.obj', mtl=True, obj_path=obj_path) + + + return 1 + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser() + parser.add_argument( + "--object_path", + type=str, + required=True, + help="Path to the object file", + ) + parser.add_argument( + "--output_dir", + type=str, + required=True, + help="Path to the directory where the rendered images and metadata will be saved.", + ) + parser.add_argument( + "--stamp", + type=int, + required=False, + help="Stamp to be used for the rendering.", + ) + parser.add_argument( + "--tex", + type=bool, + required=False, + help="Save the texture.", + ) + argv = sys.argv[sys.argv.index("--") + 1 :] + args = parser.parse_args(argv) + + os.makedirs(args.output_dir, exist_ok=True) + stamp = args.stamp if args.stamp else None + print(f'Stamp: {stamp}') + result = process(args.object_path, obj_path=args.output_dir, stamp=stamp, tex=args.tex) + # import numpy as np + # os.makedirs(args.output_dir, exist_ok=True) # the directory may be removed + # np.save(args.output_dir + '/result.npy', np.array(result)) \ No newline at end of file diff --git a/Anymate/checkpoints/.gitkeep b/Anymate/checkpoints/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Anymate/configs/.gitkeep b/Anymate/configs/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Anymate/configs/conn.yaml b/Anymate/configs/conn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e1abff94d426050be88f3053e00a592240359562 --- /dev/null +++ b/Anymate/configs/conn.yaml @@ -0,0 +1,40 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 200 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: ce + mode: conn + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 16 + trainset: Anymate_train + test_freq: 10 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + decoder: attendjoints_con_combine + encoder: bert + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + load_encoder: '' + num_joints: 96 + out_channels: 3 + width: 768 + heads: 12 + init_scale: 0.25 + flash: False + use_checkpoint: False + qkv_bias: False + separate: False \ No newline at end of file diff --git a/Anymate/configs/conn_token.yaml b/Anymate/configs/conn_token.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e1abff94d426050be88f3053e00a592240359562 --- /dev/null +++ b/Anymate/configs/conn_token.yaml @@ -0,0 +1,40 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 200 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: ce + mode: conn + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 16 + trainset: Anymate_train + test_freq: 10 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + decoder: attendjoints_con_combine + encoder: bert + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + load_encoder: '' + num_joints: 96 + out_channels: 3 + width: 768 + heads: 12 + init_scale: 0.25 + flash: False + use_checkpoint: False + qkv_bias: False + separate: False \ No newline at end of file diff --git a/Anymate/configs/diffusion.yaml b/Anymate/configs/diffusion.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b8dae8c9f1a0237f7f215fffa34caf1357957b72 --- /dev/null +++ b/Anymate/configs/diffusion.yaml @@ -0,0 +1,49 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 4000 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: chamfer + mode: diffusion + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 16 + trainset: Anymate_train + test_freq: 50 + num_train_step: 100 + num_training_points: 128 + seed: 42 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + encoder: transformer + decoder: Cross_Attention_Diffusion + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + input_channels: 3 + output_channels: 3 + num_z: 16 + num_x: 128 + z_dim: 768 + x_dim: 512 + num_blocks: 4 + num_compute_layers: 4 + num_heads: 8 + mlp_ratio: 4.0 + qkv_bias: true + drop: 0.0 + attn_drop: 0.0 + drop_path: 0.0 + num_latents: 16 + use_projection: true \ No newline at end of file diff --git a/Anymate/configs/diffusion_concat.yaml b/Anymate/configs/diffusion_concat.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8513edcdaf655e85ab8d822f85eeb285ee7986b8 --- /dev/null +++ b/Anymate/configs/diffusion_concat.yaml @@ -0,0 +1,46 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 4000 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: chamfer + mode: diffusion + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 16 + trainset: Anymate_train + test_freq: 1000 + num_train_step: 100 + num_training_points: 128 + seed: 42 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + encoder: bert + decoder: Pointe_Diffusion + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + input_channels: 3 + output_channels: 3 + n_ctx: 128 + width: 768 + layers: 12 + heads: 8 + init_scale: 0.25 + time_token_cond: true + cond_drop_prob: 0.1 + use_projection: true + + + diff --git a/Anymate/configs/diffusion_cross.yaml b/Anymate/configs/diffusion_cross.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3cf718c365c496d1726b1279d3dfed27cc9c2deb --- /dev/null +++ b/Anymate/configs/diffusion_cross.yaml @@ -0,0 +1,51 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 4000 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: chamfer + mode: diffusion + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 32 + trainset: Anymate_train + test_freq: 1000 + num_train_step: 100 + num_training_points: 128 + seed: 42 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + encoder: miche + decoder: Cross_Attention_Diffusion + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + input_channels: 3 + output_channels: 3 + num_z: 16 + num_x: 128 + z_dim: 768 + x_dim: 512 + num_blocks: 4 + num_compute_layers: 4 + num_heads: 8 + mlp_ratio: 4.0 + qkv_bias: true + drop: 0.0 + attn_drop: 0.0 + drop_path: 0.0 + num_latents: 16 + use_projection: true + + diff --git a/Anymate/configs/joints.yaml b/Anymate/configs/joints.yaml new file mode 100644 index 0000000000000000000000000000000000000000..250725603c166362c8d946f4de949901f55511fb --- /dev/null +++ b/Anymate/configs/joints.yaml @@ -0,0 +1,40 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 200 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: chamfer + mode: joints + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 16 + trainset: Anymate_train + test_freq: 10 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + decoder: transformer_latent + encoder: bert + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + load_encoder: '' + num_joints: 96 + out_channels: 3 + width: 768 + heads: 12 + init_scale: 0.25 + flash: False + use_checkpoint: False + qkv_bias: False + separate: False \ No newline at end of file diff --git a/Anymate/configs/joints_implicit.yaml b/Anymate/configs/joints_implicit.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b7869584de281e527172ae90a7414707aa6beee6 --- /dev/null +++ b/Anymate/configs/joints_implicit.yaml @@ -0,0 +1,40 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 200 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: chamfer + mode: joints + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 8 + trainset: Anymate_train + test_freq: 10 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + decoder: implicit_transformer + encoder: bert + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + load_encoder: '' + num_joints: 96 + out_channels: 3 + width: 768 + heads: 12 + init_scale: 0.25 + flash: False + use_checkpoint: False + qkv_bias: False + separate: False \ No newline at end of file diff --git a/Anymate/configs/joints_triplane.yaml b/Anymate/configs/joints_triplane.yaml new file mode 100644 index 0000000000000000000000000000000000000000..cefc29b69a2d7d0cc2f61f8f469d212938c4486d --- /dev/null +++ b/Anymate/configs/joints_triplane.yaml @@ -0,0 +1,40 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 200 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: chamfer + mode: joints + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 16 + trainset: Anymate_train + test_freq: 10 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + decoder: triplane + encoder: bert + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + load_encoder: '' + num_joints: 96 + out_channels: 3 + width: 768 + heads: 12 + init_scale: 0.25 + flash: False + use_checkpoint: False + qkv_bias: False + separate: False \ No newline at end of file diff --git a/Anymate/configs/skin.yaml b/Anymate/configs/skin.yaml new file mode 100644 index 0000000000000000000000000000000000000000..438408308f3ebb98ecc9800dbb3c48d0c1ec3399 --- /dev/null +++ b/Anymate/configs/skin.yaml @@ -0,0 +1,40 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 200 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: cos_clamp + mode: skin + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 16 + trainset: Anymate_train + test_freq: 10 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + decoder: attendjoints_combine + encoder: bert + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + load_encoder: '' + num_joints: 96 + out_channels: 3 + width: 768 + heads: 12 + init_scale: 0.25 + flash: False + use_checkpoint: False + qkv_bias: False + separate: False \ No newline at end of file diff --git a/Anymate/configs/skin_multi.yaml b/Anymate/configs/skin_multi.yaml new file mode 100644 index 0000000000000000000000000000000000000000..27c1822dd1ec04201370ff7dd4684b9736371f5a --- /dev/null +++ b/Anymate/configs/skin_multi.yaml @@ -0,0 +1,40 @@ +args: + aggr: max + checkpoint: Anymate/checkpoints + device: cuda + epochs: 200 + finetune: true + gamma: 0.2 + input_normal: false + logdir: Anymate/logs + loss: cos_clamp + mode: skin + resume: '' + root: Anymate/data + schedule: [] + start_epoch: 0 + test_batch: 1 + testset: Anymate_test + train_batch: 4 + trainset: Anymate_train + test_freq: 10 + +optimizer: + weight_decay: 1.0e-05 + lr: 0.0001 + +model: + decoder: attendjoints_multi + encoder: bert + config_path: ./ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml + ckpt_path: ./ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt + load_encoder: '' + num_joints: 96 + out_channels: 3 + width: 768 + heads: 12 + init_scale: 0.25 + flash: False + use_checkpoint: False + qkv_bias: False + separate: False \ No newline at end of file diff --git a/Anymate/dataset.py b/Anymate/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..893071f9df342855a3661aa7e34f55fd7fac3195 --- /dev/null +++ b/Anymate/dataset.py @@ -0,0 +1,62 @@ +import torch +from torch.utils.data import Dataset +import os +import numpy as np +from Anymate.utils.dataset_utils import create_mask, index_to_sparse, index_to_sparse_con + +def my_collate(batch): + # print(len(batch)) + data = {} + for key in batch[0]: + if key=='vox' or key=='name' or key=='joints_num' or key=='skins_index' or key=='skins_weight' or key=='parent_index' or key=='conns' or key=='joints' or key=='bones' or key=='mesh_skins_index' or key=='mesh_skins_weight' or key=='mesh_pc' or key=='mesh_face': + data[key] = [sample[key] for sample in batch] + elif key=='pc': + data['points_cloud'] = torch.stack([sample['pc'] for sample in batch]) + elif key=='skins': + continue + elif key=='bones_num': + data[key] = torch.tensor([sample['bones_num'] for sample in batch]) + else: + data[key] = torch.stack([sample[key] for sample in batch]) + + if 'skins_index' in batch[0]: + max_joints = max(data['joints_num']) + max_bones = max(data['bones_num']) + # max_joints = 64 + skin_list = [index_to_sparse(data['skins_index'][i].unsqueeze(0), data['skins_weight'][i].unsqueeze(0), [1, 8192, max_bones])[0] for i in range(len(data['skins_index']))] + data['skins'] = torch.stack(skin_list,dim=0) + data['joints_mask'] = torch.stack([create_mask(sample['joints_num'],max_len=max_joints) for sample in batch]) + data['bones_mask'] = torch.stack([create_mask(sample['bones_num'],max_len=max_bones) for sample in batch]) + + if 'conns' in batch[0]: + max_joints = max(data['joints_num']) + conn_matrix = torch.zeros(len(data['conns']), 96, max_joints) + for i in range(len(data['conns'])): + for j in range(data['joints_num'][i]): + conn_matrix[i, j, data['conns'][i][j].long()] = 1 + data['conns'] = conn_matrix + if 'joints' in batch[0]: + padded_joints_matrix = torch.ones(len(data['name']), 96, 3) * (-3) + for i in range(len(data['name'])): + padded_joints_matrix[i, :data['joints_num'][i], :] = data['joints'][i] + data['joints'] = padded_joints_matrix + if 'bones' in batch[0]: + padded_bones_matrix = torch.ones(len(data['name']), 64, 6) * (-3) + for i in range(len(data['name'])): + padded_bones_matrix[i, :data['bones_num'][i], :] = data['bones'][i] + data['bones'] = padded_bones_matrix + return data + +class AnymateDataset(Dataset): + def __init__(self, name='Anymate_test', root='Anymate/data'): + + if os.path.exists(os.path.join(root, name) + '.pt'): + self.data_list = torch.load(os.path.join(root, name) + '.pt') + else: + raise ValueError('Dataset not found at path: {}'.format(os.path.join(root, name) + '.pt')) + + def __len__(self): + return len(self.data_list) + + def __getitem__(self, idx): + return self.data_list[idx] \ No newline at end of file diff --git a/Anymate/get_checkpoints.sh b/Anymate/get_checkpoints.sh new file mode 100644 index 0000000000000000000000000000000000000000..280d8f2f852fd11511b255d1980bc28dd67c4b7b --- /dev/null +++ b/Anymate/get_checkpoints.sh @@ -0,0 +1,22 @@ +cd Anymate/checkpoints +mkdir joint +cd joint + +echo "Downloading joint checkpoints..." +wget "https://huggingface.co/yfdeng/Anymate/resolve/main/checkpoints/joint/bert-transformer_latent-train-8gpu-finetune.pth.tar?download=true" -O bert-transformer_latent-train-8gpu-finetune.pth.tar + +cd .. +mkdir conn +cd conn + +echo "Downloading conn checkpoints..." +wget "https://huggingface.co/yfdeng/Anymate/resolve/main/checkpoints/conn/bert-attendjoints_con_combine-train-8gpu-finetune.pth.tar?download=true" -O bert-attendjoints_con_combine-train-8gpu-finetune.pth.tar + +cd .. +mkdir skin +cd skin + +echo "Downloading skin checkpoints..." +wget "https://huggingface.co/yfdeng/Anymate/resolve/main/checkpoints/skin/bert-attendjoints_combine-train-8gpu-finetune.pth.tar?download=true" -O bert-attendjoints_combine-train-8gpu-finetune.pth.tar + +echo "Finished downloading checkpoints!" diff --git a/Anymate/get_datasets.sh b/Anymate/get_datasets.sh new file mode 100644 index 0000000000000000000000000000000000000000..464fe4da7e8abf0fabf5c3ac22573015933b6289 --- /dev/null +++ b/Anymate/get_datasets.sh @@ -0,0 +1,12 @@ +cd Anymate/data +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_test.pt?download=true" -O Anymate_test.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_0.pt?download=true" -O Anymate_train_0.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_1.pt?download=true" -O Anymate_train_1.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_2.pt?download=true" -O Anymate_train_2.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_3.pt?download=true" -O Anymate_train_3.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_4.pt?download=true" -O Anymate_train_4.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_5.pt?download=true" -O Anymate_train_5.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_6.pt?download=true" -O Anymate_train_6.pt +wget "https://huggingface.co/datasets/yfdeng/Anymate/resolve/main/Anymate_train_7.pt?download=true" -O Anymate_train_7.pt + +echo "Finished downloading datasets!" \ No newline at end of file diff --git a/Anymate/model.py b/Anymate/model.py new file mode 100644 index 0000000000000000000000000000000000000000..762c88d73c8feaee371d6afd57feb41aaf91dbe2 --- /dev/null +++ b/Anymate/model.py @@ -0,0 +1,360 @@ +import torch +import torch.nn as nn +from ThirdParty.michelangelo.utils.misc import get_config_from_file, instantiate_from_config +# from ThirdParty.PointLLM.pointllm.model.pointllm import PointLLMLlamaForCausalLM +from ThirdParty.michelangelo.models.modules.distributions import DiagonalGaussianDistribution +from ThirdParty.michelangelo.models.modules.embedder import components_from_spherical_harmonics +from Anymate.utils.diffusion_encoder import TransformerEncoder +from Anymate.models.joint import TransformerDecoder, ImplicitTransformerDecoder, TriPlaneDecoder +from Anymate.models.conn import AttendjointsDecoder_con_combine, AttendjointsDecoder_con_token +from Anymate.models.skin import AttendjointsDecoder_combine, AttendjointsDecoder_multi +from Anymate.models.diffusion import Pointe_Diffusion, Cross_Attention_Diffusion + +class Encoder(nn.Module): + def __init__(self, + only_embed = True, + config_path = './ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml', + ckpt_path = './ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt', + num_latents = 257, + device = 'cuda'): + + super().__init__() + + model_config = get_config_from_file(config_path) + if hasattr(model_config, "model"): + model_config = model_config.model + + if ckpt_path is not None: + model = instantiate_from_config(model_config, ckpt_path=ckpt_path) + else: + model = instantiate_from_config(model_config) + model.model.shape_model.encoder.num_latents = num_latents + model.model.shape_model.encoder.query = nn.Parameter(torch.randn((num_latents, 768), device=device, dtype=torch.float32) * 0.02) + + self.shape_projection = model.model.shape_projection + self.encoder = model.model.shape_model.encoder + self.normal_embedder = components_from_spherical_harmonics + old_linear_proj = self.encoder.input_proj + self.encoder.input_proj = nn.Linear(old_linear_proj.in_features + 25, old_linear_proj.out_features) + self.encoder.input_proj.weight.data[:, :old_linear_proj.in_features] = old_linear_proj.weight.data[:, :old_linear_proj.in_features].clone() + self.encoder.input_proj.bias.data = old_linear_proj.bias.data.clone() + if not only_embed: + self.embed_dim = model.model.shape_model.embed_dim + self.pre_kl = model.model.shape_model.pre_kl + self.post_kl = model.model.shape_model.post_kl + self.transformer = model.model.shape_model.transformer + + + def encode_latents(self, + pc: torch.FloatTensor, + feats = None): + + feats_embed = self.normal_embedder(feats) + feats = torch.cat([feats, feats_embed], dim=-1) + + x, _ = self.encoder(pc, feats) + + shape_embed = x[:, 0] + latents = x[:, 1:] + + return shape_embed, latents + + + def encode_shape_embed(self, surface, return_latents: bool = False): + """ + + Args: + surface (torch.FloatTensor): [bs, n, 3 + c] + return_latents (bool): + + Returns: + x (torch.FloatTensor): [bs, projection_dim] + shape_latents (torch.FloatTensor): [bs, m, d] + """ + + pc = surface[..., 0:3] + feats = surface[..., 3:] + + shape_embed, shape_latents = self.encode_latents(pc, feats) + x = shape_embed @ self.shape_projection + + if return_latents: + return x, shape_latents + else: + return x + + + def encode_kl_embed(self, latents: torch.FloatTensor, sample_posterior: bool = True): + posterior = None + if self.embed_dim > 0: + moments = self.pre_kl(latents) + posterior = DiagonalGaussianDistribution(moments, feat_dim=-1) + + if sample_posterior: + kl_embed = posterior.sample() + else: + kl_embed = posterior.mode() + else: + kl_embed = latents + + return kl_embed, posterior + + + def decode(self, latents: torch.FloatTensor): + latents = self.post_kl(latents) + return self.transformer(latents) + + +class EncoderDecoder(nn.Module): + def __init__(self, + decoder = 'mlp', + encoder = 'miche', + config_path = './ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml', + ckpt_path = './ThirdParty/michelangelo/checkpoints/aligned_shape_latents/shapevae-256.ckpt', + load_encoder = '', + num_joints = 96, + out_channels = 3, + width = 768, + device = 'cuda', + dtype = torch.float32, + heads = 12, + init_scale: float = 0.25, + flash = False, + use_checkpoint = False, + qkv_bias = False, + separate = False, + **kwargs): + + super().__init__() + self.decoder_name = decoder + self.encoder_name = encoder + self.dtype = dtype + self.load_encoder = load_encoder + + if decoder == 'transformer_latent': + self.only_embed = False + self.return_latents = True + self.decoder = TransformerDecoder( + num_latents = num_joints, + out_channels = out_channels, + width = width, + device = device, + dtype = dtype, + heads = heads, + init_scale = init_scale, + flash = flash, + use_checkpoint = use_checkpoint, + qkv_bias = qkv_bias + ) + elif decoder == 'implicit_transformer': + self.only_embed = False + self.return_latents = True + self.decoder = ImplicitTransformerDecoder( + device = device, + dtype = dtype, + num_latents = 257, + out_channels = 1, + width = width, + heads = heads, + init_scale = init_scale, + flash = flash, + use_checkpoint = use_checkpoint, + qkv_bias = qkv_bias + ) + elif decoder == 'triplane': #consider add these parameters to config + self.only_embed = True + self.return_latents = False + self.decoder = TriPlaneDecoder( + z_dim = 768, + c_dim = 0, + w_dim = 768, + mapping_kwargs = {'num_layers': 2}, + synthesis_kwargs = {'num_fp16_res': 0, 'conv_clamp': None, 'fused_modconv_default': 'inference_only'} + ) + + elif decoder == 'Pointe_Diffusion': + self.only_embed = False + self.return_latents = True + self.decoder = Pointe_Diffusion(**kwargs) + + elif decoder == 'Cross_Attention_Diffusion': + self.only_embed = False + self.return_latents = True + self.decoder = Cross_Attention_Diffusion(**kwargs) + + elif decoder == 'attendjoints_combine': + self.only_embed = False + self.return_latents = True + self.decoder = AttendjointsDecoder_combine( + width = width, + device = device, + dtype = dtype, + heads = heads, + init_scale = init_scale, + flash = flash, + use_checkpoint = use_checkpoint, + separate = separate, + qkv_bias = qkv_bias + ) + elif decoder == 'attendjoints_multi': + self.only_embed = False + self.return_latents = True + self.decoder = AttendjointsDecoder_multi( + width = width, + device = device, + dtype = dtype, + heads = heads, + init_scale = init_scale, + flash = flash, + use_checkpoint = use_checkpoint, + qkv_bias = qkv_bias, + separate=separate + ) + elif decoder == 'attendjoints_con_combine': + self.only_embed = False + self.return_latents = True + self.decoder = AttendjointsDecoder_con_combine( + width = width, + device = device, + dtype = dtype, + heads = heads, + init_scale = init_scale, + flash = flash, + use_checkpoint = use_checkpoint, + qkv_bias = qkv_bias + ) + elif decoder == 'attendjoints_con_token': + self.only_embed = False + self.return_latents = True + self.decoder = AttendjointsDecoder_con_token( + width = width, + device = device, + dtype = dtype, + heads = heads, + init_scale = init_scale, + flash = flash, + use_checkpoint = use_checkpoint, + qkv_bias = qkv_bias, + separate = separate + ) + + if encoder == 'miche': + if not self.load_encoder: + self.encoder = Encoder(only_embed=self.only_embed, config_path=config_path, ckpt_path=ckpt_path, device=device) + else: + self.encoder = Encoder(only_embed=self.only_embed, config_path=config_path, ckpt_path=None, device=device) + try: + print("=> loading encoder checkpoint '{}'".format(self.load_encoder)) + checkpoint = torch.load(self.load_encoder, map_location='cpu') + state_dict = {k[8:]: v for k, v in checkpoint['state_dict'].items() if k.startswith('encoder')} + self.encoder.load_state_dict(state_dict) + print("=> loaded encoder checkpoint '{}'".format(self.load_encoder)) + except: + print("=> no encoder checkpoint found at '{}'".format(self.load_encoder)) + if self.load_encoder: + self.point_proj = nn.Sequential( + nn.Linear(768, 768, dtype=dtype), + nn.GELU(), + nn.Linear(768, 768, dtype=dtype), + ) + + if encoder == 'bert': + # model_name = 'RunsenXu/PointLLM_7B_v1.2' + # model = PointLLMLlamaForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=False, use_cache=True, torch_dtype=dtype) + # self.encoder = model.model.point_backbone.to(device) + # model = None + from ThirdParty.PointLLM.pointllm.model import PointTransformer + from ThirdParty.PointLLM.pointllm.utils import cfg_from_yaml_file + import os + # address of config file, in the same dir of this file + point_bert_config_name = "PointTransformer_8192point_2layer" # * default for v1.2, v1.1 uses PointTransformer_base_8192point.yaml + point_bert_config_addr = os.path.join("./ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_8192point_2layer.yaml") + print(f"Loading PointBERT config from {point_bert_config_addr}.") + point_bert_config = cfg_from_yaml_file(point_bert_config_addr) + point_bert_config.model.point_dims = 6 + use_max_pool = getattr(point_bert_config.model, "use_max_pool", False) # * default is false + + self.encoder = PointTransformer(point_bert_config.model, use_max_pool=use_max_pool).to(device) + if self.return_latents: + self.point_proj = nn.Sequential( + nn.Linear(384, 512, dtype=dtype), + nn.GELU(), + nn.Linear(512, 512, dtype=dtype), + nn.GELU(), + nn.Linear(512, 768, dtype=dtype) + ) + else: + self.point_proj = nn.ModuleList([ + nn.Sequential( + nn.Linear(384, 512, dtype=dtype), + nn.GELU(), + nn.Linear(512, 512, dtype=dtype), + nn.GELU(), + nn.Linear(512, 768, dtype=dtype) + ), + nn.Linear(513, 1, dtype=dtype) + ]) + if encoder == 'transformer': + self.points_cloud_embed = nn.Linear( + 768, 768, device=device, dtype=dtype + ) + self.encoder = TransformerEncoder(device=device,dtype=dtype, num_latents=kwargs['num_latents']) + + + + def encode(self, data, device='cuda'): + assert self.encoder_name in ['miche', 'bert', 'transformer'], f'Encoder {self.encoder_name} not supported' + if self.encoder_name == 'miche': + surface = data['points_cloud'].to(self.dtype).to(device) + + # encoding + shape_embed, shape_latents = self.encoder.encode_shape_embed(surface, return_latents=True) # ShapeAsLatentPerceiver.encode_latents(): encoder + + if self.only_embed: + if self.return_latents: + if self.load_encoder: + return self.point_proj(torch.cat([shape_embed.unsqueeze(1), shape_latents], dim=1)) + return torch.cat([shape_embed.unsqueeze(1), shape_latents], dim=1) # torch.Size([bs, 257, 768] + return shape_embed # shape_embed: torch.Size([bs, 768]) + + shape_zq, posterior = self.encoder.encode_kl_embed(shape_latents) # ShapeAsLatentPerceiver.encode_kl_embed(): pre_kl + DiagonalGaussianDistribution() + # shape_zq, posterior = self.encoder.encode_kl_embed(shape_latents, sample_posterior=False) # not sample + # pretrained weight has 0 +- 0.7 mean and 0.5 +- 0.5 std + # trained weight has 0 +- 1.8 mean and 0.1 +- 0.1 std + # generally okay + + latents = self.encoder.decode(shape_zq) # ShapeAsLatentPerceiver.decode(): post_kl + transformer + + if not self.return_latents: + latents = torch.cat([shape_latents, latents], dim=1) # torch.Size([bs, 512, 768]) + + if self.load_encoder: + return self.point_proj(torch.cat([shape_embed.unsqueeze(1), latents], dim=1)) + return torch.cat([shape_embed.unsqueeze(1), latents], dim=1) # torch.Size([bs, 257 / 513, 768]) + + if self.encoder_name == 'bert': + points = data['points_cloud'].to(self.dtype).to(device) + points = points[:, :, :3] / 2 + points = torch.cat([points, torch.zeros_like(points)], dim=-1) + points = self.encoder(points) + + if self.return_latents: + points = self.point_proj(points) + else: + points = self.point_proj[0](points) + points = self.point_proj[1](points.permute(0, 2, 1)).squeeze(-1) + return points + + if self.encoder_name == 'transformer': + points = data['points_cloud'].to(self.dtype).to(device) + cond = self.encoder.encode_pc(points) + cond = self.points_cloud_embed(cond) + return cond + + def forward(self, data, device='cuda', downsample=False, **kwargs): + latents = self.encode(data, device) + # print('latents shape', latents.shape) + + logits = self.decoder(latents, data, device=device, downsample=downsample,**kwargs) + + return logits \ No newline at end of file diff --git a/Anymate/models/__init__.py b/Anymate/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Anymate/models/conn.py b/Anymate/models/conn.py new file mode 100644 index 0000000000000000000000000000000000000000..433b85f258e6409ecb88144017f290220cb4d6f0 --- /dev/null +++ b/Anymate/models/conn.py @@ -0,0 +1,195 @@ +import torch +import torch.nn as nn +from ThirdParty.michelangelo.models.modules.transformer_blocks import ResidualCrossAttentionBlock, ResidualAttentionBlock, Transformer +from ThirdParty.michelangelo.models.modules.embedder import FourierEmbedder, components_from_spherical_harmonics + +class AttendjointsDecoder_con_combine(nn.Module): + def __init__(self, + width = 768, + layers = 2, + device = 'cuda', + dtype = torch.float32, + heads = 12, + init_scale: float = 0.25, + flash = False, + use_checkpoint = False, + qkv_bias = False, + num_freqs: int = 8, + include_pi: bool = True, + separate = False, + use_mask = True): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.separate = separate + self.use_mask = use_mask + # self.num_latents = num_latents + + # self.query = nn.Parameter(torch.randn((num_latents, width), device=device, dtype=dtype) * 0.02) + + self.fourier_embedder = FourierEmbedder(num_freqs=num_freqs, include_pi=include_pi) + self.co_proj = nn.Linear(self.fourier_embedder.out_dim, width, device=device, dtype=dtype) + + # self.proj_attn = nn.Linear(width, width, device=device, dtype=dtype) + + self.cross_attn = nn.ModuleList([ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + ) for _ in range(layers)]) + + self.self_attn = nn.ModuleList([ResidualAttentionBlock( + device=device, + dtype=dtype, + n_ctx=-1, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + ) for _ in range(layers * 2)]) + + # self.joint_embed_proj = nn.ModuleList([nn.Linear(width, width, device=device, dtype=dtype) for _ in range(layers)]) + + + self.q_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.k_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.ln_1 = nn.LayerNorm(width, device=device, dtype=dtype) + self.ln_2 = nn.LayerNorm(width, device=device, dtype=dtype) + + # self.last_cross_attn = ResidualCrossAttentionBlock( + # device=device, + # dtype=dtype, + # width=width, + # heads=heads, + # init_scale=init_scale, + # qkv_bias=qkv_bias, + # flash=flash, + # ) + # self.mlp = MLP(device=device, dtype=dtype, width=width, init_scale=init_scale) + # self.output_proj = nn.Linear(width, 1, device=device, dtype=dtype) + + def forward(self, latents, data=None, device='cuda', downsample=None, dtype=torch.float32): + + joints = data['joints'].to(device) + max_joints = max(data['joints_num']) + joints = joints[:, :max_joints, :3] + + joints_embeds = self.fourier_embedder(joints) + joints_embeds = self.co_proj(joints_embeds) + + joints_num = joints_embeds.shape[-2] + + x = [joints_embeds, joints_embeds.clone()] + + for i in range(2): + for j, layer in enumerate(self.cross_attn): + + x[i] = layer(x[i], latents) + + if self.use_mask: + x[i] = self.self_attn[2*i+j](x[i], mask=data['joints_mask'].to(device)) + else: + x[i] = self.self_attn[2*i+j](x[i]) + + # Dot Product between points and joints + logits = torch.einsum('bnc,bmc->bnm', self.k_proj(self.ln_1(x[0])), self.q_proj(self.ln_2(x[1]))) # (b, n, m) + + if self.use_mask: + mask = data['joints_mask'].to(device) + logits = logits.masked_fill(mask.unsqueeze(1) == 0, -1e8) + + return logits + +class AttendjointsDecoder_con_token(nn.Module): + def __init__(self, + width = 768, + layers = 4, + device = 'cuda', + dtype = torch.float32, + heads = 12, + init_scale: float = 0.25, + flash = False, + use_checkpoint = False, + qkv_bias = False, + num_freqs: int = 8, + include_pi: bool = True, + head_token_length =128, + separate = False, + use_mask = True): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.use_mask = use_mask + self.layer_norm = nn.LayerNorm(width) + self.head_token = nn.Parameter(torch.randn((1, 1, head_token_length), device=device, dtype=dtype) * 0.02) + self.tail_token = nn.Parameter(torch.randn((1, 1, head_token_length), device=device, dtype=dtype) * 0.02) + self.head_mlp = nn.ModuleList([ + nn.Linear(width + head_token_length, 512, device=device, dtype=dtype), + nn.Linear(512, 512, device=device, dtype=dtype), + nn.Linear(512, width, device=device, dtype=dtype), + nn.LayerNorm(width) + + ]) + self.tail_mlp = nn.ModuleList([ + nn.Linear(width + head_token_length, 512, device=device, dtype=dtype), + nn.Linear(512, 512, device=device, dtype=dtype), + nn.Linear(512, width, device=device, dtype=dtype), + nn.LayerNorm(width) + ]) + + self.self_attn = Transformer( + device=device, + dtype=dtype, + n_ctx=-1, + width=width, + layers=layers, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_checkpoint=False, + ) + self.separate = separate + self.normal_embedder = components_from_spherical_harmonics + self.fourier_embedder = FourierEmbedder(num_freqs=num_freqs, include_pi=include_pi) + self.joints_proj = nn.Linear(self.fourier_embedder.out_dim, width, device=device, dtype=dtype) + self.output_proj_joints = nn.Linear(width, width, device=device, dtype=dtype) + + def forward(self, latents, data=None,device='cuda', downsample=None, dtype='float32'): + joints = data['joints'].to(device) + max_joints = max(data['joints_num']) + joints = joints[:, :max_joints, :3] + joints_embeds_fourier = self.fourier_embedder(joints) + joints_embeds = self.joints_proj(joints_embeds_fourier) + # Concatenate embeddings + x = torch.cat([joints_embeds, latents], dim=-2) # (b, max_joint+token_num, c) + # Pass through self-attention + if self.use_mask: + mask = data['mask'].to(device) + append_size = x.shape[1]-mask.shape[1] # the zero needs to append after mask + batch_size = mask.shape[0] + + mask_extend = torch.ones((batch_size,append_size)).to(device) + mask = torch.cat([mask,mask_extend],dim=-1).to(device) + + x = self.self_attn(x,mask) + else: + x = self.self_attn(x) + joints, _= x.split([joints_embeds.shape[1], latents.shape[1]], dim=1) + joints = self.output_proj_joints(self.layer_norm(joints)) + joints_head = torch.concat([joints, self.head_token.repeat(joints.shape[0],joints.shape[1],1)], dim=-1) + joints_tail = torch.concat([joints, self.tail_token.repeat(joints.shape[0],joints.shape[1],1)], dim=-1) + for layer in self.head_mlp: + joints_head = layer(joints_head) + for layer in self.tail_mlp: + joints_tail = layer(joints_tail) + logits = torch.einsum('bik,bjk->bij', joints_head, joints_tail) + + return logits \ No newline at end of file diff --git a/Anymate/models/diffusion.py b/Anymate/models/diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..84fb6c021d1f0e103e09ba5dc608ce193b8431aa --- /dev/null +++ b/Anymate/models/diffusion.py @@ -0,0 +1,483 @@ +F""" +Adapted from: https://github.com/openai/openai/blob/55363aa496049423c37124b440e9e30366db3ed6/orc/orc/diffusion/vit.py +""" + +import math +from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union, Callable + +import torch +import torch.nn as nn + +from einops import repeat +from Anymate.utils.diffusion_utils import * +from ThirdParty.michelangelo.models.modules.transformer_blocks import Transformer, ResidualCrossAttentionBlock + +from diffusers import DDPMScheduler, DDIMScheduler +from sklearn.cluster import DBSCAN + +def init_linear(l, stddev): + nn.init.normal_(l.weight, std=stddev) + if l.bias is not None: + nn.init.constant_(l.bias, 0.0) + +class projection_transformer(nn.Module): + def __init__(self, num_latents=16, width = 16, heads=8, dtype = torch.float32): + super().__init__() + self.num_latents = num_latents + self.query = nn.Parameter(torch.randn((num_latents, width), dtype=dtype) * 0.02) + + self.cross_attn = ResidualCrossAttentionBlock( + device= 'cuda', + dtype=dtype, + width=width, + heads=heads, + init_scale=0.25, + qkv_bias=True, + flash=False, + ) + self.output_proj = nn.Linear(width, width,dtype=dtype) + + def forward(self, latents): + bs = latents.shape[0] + query = repeat(self.query, "m c -> b m c", b=bs) + embed = self.cross_attn(query, latents) + logits = self.output_proj(embed) + + return logits + +def timestep_embedding(timesteps, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].to(timesteps.dtype) * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + +class MultiheadAttention(nn.Module): + def __init__( + self, + *, + dtype: torch.dtype, + n_ctx: int, + width: int, + heads: int, + init_scale: float, + ): + super().__init__() + self.n_ctx = n_ctx + self.width = width + self.heads = heads + self.c_qkv = nn.Linear(width, width * 3, dtype=dtype) + self.c_proj = nn.Linear(width, width, dtype=dtype) + self.attention = QKVMultiheadAttention(dtype=dtype, heads=heads, n_ctx=n_ctx) + init_linear(self.c_qkv, init_scale) + init_linear(self.c_proj, init_scale) + + def forward(self, x): + x = self.c_qkv(x) + x = self.attention(x) + x = self.c_proj(x) + return x + +class MLP(nn.Module): + def __init__(self, *, dtype: torch.dtype, width: int, init_scale: float): + super().__init__() + self.width = width + self.c_fc = nn.Linear(width, width * 4, dtype=dtype) + self.c_proj = nn.Linear(width * 4, width, dtype=dtype) + self.gelu = nn.GELU() + init_linear(self.c_fc, init_scale) + init_linear(self.c_proj, init_scale) + + def forward(self, x): + return self.c_proj(self.gelu(self.c_fc(x))) + +class QKVMultiheadAttention(nn.Module): + def __init__(self, *, dtype: torch.dtype, heads: int, n_ctx: int): + super().__init__() + self.dtype = dtype + self.heads = heads + self.n_ctx = n_ctx + + def forward(self, qkv): + bs, n_ctx, width = qkv.shape + attn_ch = width // self.heads // 3 + scale = 1 / math.sqrt(math.sqrt(attn_ch)) + qkv = qkv.view(bs, n_ctx, self.heads, -1) + q, k, v = torch.split(qkv, attn_ch, dim=-1) + weight = torch.einsum( + "bthc,bshc->bhts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + wdtype = weight.dtype + weight = torch.softmax(weight.float(), dim=-1).type(wdtype) + return torch.einsum("bhts,bshc->bthc", weight, v).reshape(bs, n_ctx, -1) + +class ResidualAttentionBlock(nn.Module): + def __init__( + self, + *, + dtype: torch.dtype, + n_ctx: int, + width: int, + heads: int, + init_scale: float = 1.0, + ): + super().__init__() + + self.attn = MultiheadAttention( + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + ) + self.ln_1 = nn.LayerNorm(width, dtype=dtype) + self.mlp = MLP(dtype=dtype, width=width, init_scale=init_scale) + self.ln_2 = nn.LayerNorm(width, dtype=dtype) + + def forward(self, x: torch.Tensor): + x = x + self.attn(self.ln_1(x)) + x = x + self.mlp(self.ln_2(x)) + return x + +class Transformer(nn.Module): + def __init__( + self, + *, + dtype: torch.dtype, + n_ctx: int, + width: int, + layers: int, + heads: int, + init_scale: float = 0.25, + ): + super().__init__() + self.n_ctx = n_ctx + self.width = width + self.layers = layers + init_scale = init_scale * math.sqrt(1.0 / width) + self.resblocks = nn.ModuleList( + [ + ResidualAttentionBlock( + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + ) + for _ in range(layers) + ] + ) + + def forward(self, x: torch.Tensor): + for block in self.resblocks: + x = block(x) + return x + +class PointDiffusionTransformer(nn.Module): + def __init__( + self, + *, + dtype: torch.dtype, + input_channels: int = 3, + output_channels: int = 3, + n_ctx: int = 1024, + width: int = 768, + layers: int = 12, + heads: int = 8, + init_scale: float = 0.25, + time_token_cond: bool = True, + ): + super().__init__() + self.input_channels = input_channels + self.output_channels = output_channels + self.n_ctx = n_ctx + self.time_token_cond = time_token_cond + self.time_embed = MLP( + dtype=dtype, width=width, init_scale=init_scale * math.sqrt(1.0 / width) + ) + self.ln_pre = nn.LayerNorm(width, dtype=dtype) + self.backbone = Transformer( + dtype=dtype, + n_ctx=n_ctx + int(time_token_cond), + width=width, + layers=layers, + heads=heads, + init_scale=init_scale, + ) + self.ln_post = nn.LayerNorm(width,dtype=dtype) + self.input_proj = nn.Linear(input_channels, width, dtype=dtype) + self.output_proj = nn.Linear(width, output_channels,dtype=dtype) + with torch.no_grad(): + self.output_proj.weight.zero_() + self.output_proj.bias.zero_() + + def forward(self, x: torch.Tensor, t: torch.Tensor): + """ + :param x: an [N x C x T] tensor. + :param t: an [N] tensor. + :return: an [N x C' x T] tensor. + """ + assert x.shape[-1] == self.n_ctx + t_embed = self.time_embed(timestep_embedding(t, self.backbone.width)) + return self._forward_with_cond(x, [(t_embed, self.time_token_cond)]) + + def _forward_with_cond( + self, x: torch.Tensor, cond_as_token: List[Tuple[torch.Tensor, bool]] + ) -> torch.Tensor: + h = self.input_proj(x.permute(0, 2, 1)) # NCL -> NLC + for emb, as_token in cond_as_token: + if not as_token: + h = h + emb[:, None] + extra_tokens = [ + (emb[:, None] if len(emb.shape) == 2 else emb) + for emb, as_token in cond_as_token + if as_token + ] + if len(extra_tokens): + h = torch.cat(extra_tokens + [h], dim=1) + + h = self.ln_pre(h) + h = self.backbone(h) + h = self.ln_post(h) + if len(extra_tokens): + h = h[:, sum(h.shape[1] for h in extra_tokens) :] + h = self.output_proj(h) + return h.permute(0, 2, 1) + +class Pointe_Diffusion(PointDiffusionTransformer): + ''' + input: data: data dict + x: [N x C x T] tensor + t: [N] tensor + init: + n_ctx: int = 1024: context length + ''' + def __init__( + self, + *, + device = 'cuda', + dtype = torch.float32, + encoder = 'miche', + n_ctx: int = 1024, + token_cond: bool = True, + cond_drop_prob: float = 0.1, + fix_emb: bool = False, + + **kwargs, + ): + super().__init__(dtype=dtype, n_ctx=n_ctx + int(token_cond), **kwargs) + self.n_ctx = n_ctx + self.token_cond = token_cond + # self.proj_transformer = projection_transformer(**kwargs) + self.encoder_name = encoder + self.cond_drop_prob = cond_drop_prob + self.fix_emb = fix_emb + self.dtype = dtype + self.inference = False + def cached_model_kwargs(self, batch_size: int, model_kwargs: Dict[str, Any]) -> Dict[str, Any]: + with torch.no_grad(): + return dict(embeddings=self.clip(batch_size, **model_kwargs)) + + def inference_mode(self,eps=0.03): + self.inference = True + + def forward_func( + self, + latent: torch.Tensor, + data, + device='cuda', + downsample = False, + **kwargs, + ): + t = kwargs['timesteps'].to(latent.device) + x = kwargs['noisy_joints'].to(latent.device) + assert x.shape[-1] == self.n_ctx, f"x shape: {x.shape}, n_ctx: {self.n_ctx}" + t_embed = self.time_embed(timestep_embedding(t, self.backbone.width)) + + if self.training: + mask = torch.rand(size=[len(x)]) >= self.cond_drop_prob + latent = latent * mask[:,None,None].to(latent.device) + + latent = [(latent, self.token_cond), (t_embed, self.time_token_cond)] + return self._forward_with_cond(x, latent) + + def forward(self, latent, data, device='cuda', downsample = False, **kwargs): + if self.inference == False: + return self.forward_func(latent, data, device, downsample, **kwargs) + else: + generator=torch.Generator(device='cpu') + scheduler = DDIMScheduler(100) + scheduler.set_timesteps(100) + points_shape = [1, self.n_ctx, 3] + + points_noise = randn_tensor(points_shape, generator=generator) + points = points_noise.permute(0, 2, 1).to(latent.device) + for t in scheduler.timesteps: + with torch.no_grad(): + time_steps = torch.ones(1, 1, dtype=torch.long) * t + model_output = self.forward_func(latent, data, noisy_joints=points, timesteps = time_steps) + + points = scheduler.step(model_output, t, points, generator=generator).prev_sample + points = points.permute(0, 2, 1).cpu() + assert points.shape[0] == 1, "Inference mode only supports batch size 1" + joints = points[0].detach().cpu().numpy() + clustering = DBSCAN(eps=0.05, min_samples=1).fit(joints) + cluster_centers = [] + for cluster in set(clustering.labels_): + cluster_centers.append(joints[clustering.labels_ == cluster].mean(axis=0)) + return cluster_centers + +class Cross_Attention_Diffusion(nn.Module): + def __init__(self, + input_channels=3, output_channels=3, + num_z=16, num_x=1024, z_dim=768, x_dim=512, + num_blocks=6, num_compute_layers=4, num_heads=8, + mlp_ratio=4., qkv_bias=True, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm,num_latents=16, + device=torch.device('cuda' if torch.cuda.is_available() else 'cpu'), + use_projection = True,): + super().__init__() + self.use_projection = use_projection + self.device = device + self.num_z = num_z + self.num_x = num_x + self.z_dim = z_dim + if use_projection: + self.proj_transformer = projection_transformer(num_latents=num_latents, width=z_dim, heads=num_heads) + self.prev_latent = nn.Parameter(torch.zeros(1, self.num_z + num_latents + 1, z_dim)) + self.inference = False + + self.input_proj = nn.Linear(input_channels, x_dim) + self.ln_pre = nn.LayerNorm(x_dim) + self.z_init = nn.Parameter(torch.zeros(1, num_z, z_dim)) + + mlp_hidden_dim = int(z_dim * mlp_ratio) + self.time_embed = Mlp(in_features=z_dim, hidden_features=mlp_hidden_dim) + + self.latent_mlp = Mlp(in_features=z_dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + self.ln_latent = nn.LayerNorm(z_dim) + self.blocks = nn.ModuleList([ + RCW_Block(z_dim, x_dim, num_compute_layers=num_compute_layers, + num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, + drop=drop, attn_drop=attn_drop, drop_path=drop_path, + act_layer=act_layer, norm_layer=norm_layer) + for _ in range(num_blocks) + ]) + + # output blocks + self.ln_post = nn.LayerNorm(x_dim) + self.output_proj = nn.Linear(x_dim, output_channels) + + self.initialize_weights() + + def initialize_weights(self): + nn.init.normal_(self.z_init, std=.02) + + # initialize nn.Linear and nn.LayerNorm + self.apply(self._init_weights) + + nn.init.constant_(self.ln_latent.weight, 0) + nn.init.constant_(self.ln_latent.bias, 0) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + torch.nn.init.xavier_uniform_(m.weight) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def inference_mode(self,eps=0.03): + self.inference = True + + def forward_func(self, latent, data, device='cuda', downsample = False, **kwargs): + """ + Forward pass of the model. + + Parameters: + x: [B, num_x, C_in] + t: [B] + cond: [B, num_cond, C_latent] + prev_latent: [B, num_z + num_cond + 1, C_latent] + + Returns: + x_denoised: [B, num_x, C_out] + z: [B, num_z + num_cond + 1, C_latent] + """ + t = kwargs['timesteps'].to(latent.device) + x = kwargs['noisy_joints'].to(latent.device) + x = x.permute(0, 2, 1) + B, num_x, _ = x.shape + if self.use_projection: + latent = self.proj_transformer(latent) + assert num_x == self.num_x, f"x shape: {x.shape}, num_x: {self.num_x}" + # if prev_latent is not None: + # _, num_z, _ = prev_latent.shape + # assert num_z == self.num_z + num_cond + 1 + # else: + # prev_latent = torch.zeros(B, self.num_z + num_cond + 1, self.z_dim).to(x.device) + + # timestep embedding, [B, 1, z_dim] + t_embed = self.time_embed(timestep_embedding(t, self.z_dim)) + if t_embed.dim() == 2: + t_embed = t_embed.unsqueeze(1) + + # project x -> [B, num_x, C_x] + x = self.input_proj(x) + x = self.ln_pre(x) + + # latent self-conditioning + z = self.z_init.repeat(B, 1, 1) # [B, num_z, z_dim + z = torch.cat([z, latent, t_embed], dim=1) # [B, num_z + num_cond + 1, z_dim] + prev_latent = self.prev_latent + self.latent_mlp(self.prev_latent.detach()) + z = z + (self.ln_latent(prev_latent)) + + # compute + for blk in self.blocks: + z, x = blk(z, x) + + # output proj + x = self.ln_post(x) + x_denoised = self.output_proj(x) + return x_denoised.permute(0, 2, 1) + + def forward(self, latent, data, device='cuda', downsample = False, **kwargs): + if self.inference == False: + return self.forward_func(latent, data, device, downsample, **kwargs) + else: + generator=torch.Generator(device='cpu') + scheduler = DDIMScheduler(100) + scheduler.set_timesteps(100) + points_shape = [1, self.num_x, 3] + + points_noise = randn_tensor(points_shape, generator=generator) + points = points_noise.permute(0, 2, 1).to(latent.device) + for t in scheduler.timesteps: + with torch.no_grad(): + time_steps = torch.ones(1, 1, dtype=torch.long) * t + time_steps = time_steps.to(latent.device) + model_output = self.forward_func(latent, data, noisy_joints=points, timesteps = time_steps) + + points = scheduler.step(model_output, t, points, generator=generator).prev_sample + points = points.permute(0, 2, 1).cpu() + assert points.shape[0] == 1, "Inference mode only supports batch size 1" + joints = points[0].detach().cpu().numpy() + clustering = DBSCAN(eps=0.05, min_samples=1).fit(joints) + cluster_centers = [] + for cluster in set(clustering.labels_): + cluster_centers.append(joints[clustering.labels_ == cluster].mean(axis=0)) + return cluster_centers + \ No newline at end of file diff --git a/Anymate/models/joint.py b/Anymate/models/joint.py new file mode 100644 index 0000000000000000000000000000000000000000..e78adf8a2c844c0c3c1d3584589abd9acf78451c --- /dev/null +++ b/Anymate/models/joint.py @@ -0,0 +1,282 @@ +import torch +import torch.nn as nn +from ThirdParty.michelangelo.models.modules.embedder import FourierEmbedder +from ThirdParty.michelangelo.models.modules.transformer_blocks import ResidualCrossAttentionBlock +from ThirdParty.eg3d.training.networks_stylegan2 import Generator as StyleGAN2Backbone +from ThirdParty.eg3d.training.networks_stylegan2 import FullyConnectedLayer +from Anymate.utils.vol_utils import get_co, sample_from_planes, generate_planes +from einops import repeat +from sklearn.cluster import DBSCAN +from Anymate.utils.vol_utils import extract_keypoints + +class TransformerDecoder(nn.Module): + def __init__(self, + num_latents = 96, + num_kv_latents = 257, + out_channels = 3, + width = 768, + layers = 7, + device = 'cuda', + dtype = torch.float32, + heads = 12, + init_scale: float = 0.25, + flash = False, + use_checkpoint = False, + qkv_bias = False): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.num_latents = num_latents + self.inference = False + self.eps = 0.03 + + self.query = nn.Parameter(torch.randn((num_latents, width), device=device, dtype=dtype) * 0.02) + + self.cross_attn_decoder = ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + n_data=num_kv_latents, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash + ) + + self.ln_post = nn.LayerNorm(width, device=device, dtype=dtype) + self.output_proj = nn.Linear(width, out_channels, device=device, dtype=dtype) + + def inference_mode(self, eps=0.03, min_samples=1): + self.inference = True + self.eps = eps + self.min_samples = min_samples + + def forward(self, latents, data=None, device='cuda', downsample=False, dtype=torch.float32): + + bs = latents.shape[0] + query = repeat(self.query, "m c -> b m c", b=bs) + logits = self.cross_attn_decoder(query, latents) + logits = self.ln_post(logits) + logits = self.output_proj(logits) + if self.inference: + assert logits.shape[0] == 1, "Inference mode only supports batch size 1" + joints = logits[0].detach().cpu().numpy() + clustering = DBSCAN(eps=self.eps, min_samples=self.min_samples).fit(joints) + cluster_centers = [] + for cluster in set(clustering.labels_): + cluster_centers.append(joints[clustering.labels_ == cluster].mean(axis=0)) + return cluster_centers + return logits + + +class ImplicitTransformerDecoder(nn.Module): + + def __init__(self, *, + device = 'cuda', + dtype = torch.float32, + num_latents = 257, + out_channels = 1, + width = 768, + heads = 12, + num_freqs: int = 8, + include_pi: bool = True, + init_scale: float = 0.25, + qkv_bias: bool = False, + flash: bool = False, + use_checkpoint: bool = False): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.fourier_embedder = FourierEmbedder(num_freqs=num_freqs, include_pi=include_pi) + self.inference = False + + self.query_proj = nn.Linear(self.fourier_embedder.out_dim, width, device=device, dtype=dtype) + + self.cross_attn_decoder = ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + n_data=num_latents, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash + ) + + self.ln_post = nn.LayerNorm(width, device=device, dtype=dtype) + self.output_proj = nn.Linear(width, out_channels, device=device, dtype=dtype) + + # self.queries = get_vol().to(device) + + def inference_mode(self): + self.inference = True + + def forward(self, latents: torch.FloatTensor, data=None, device='cuda', downsample=False): + bs = latents.shape[0] + # queries = repeat(self.queries, "m c -> b m c", b=bs) + out = [] + for b in range(bs): + queries = get_co(data['vox'][b]).to(device).unsqueeze(0) + if downsample and data['vox'][b].shape[0] > 50000: + # random sample + idx = torch.randperm(data['vox'][b].shape[0])[:50000] + queries = queries[:, idx] + queries = self.query_proj(self.fourier_embedder(queries)) + x = self.cross_attn_decoder(queries, latents[b:b+1]) + x = self.ln_post(x) + x = self.output_proj(x) + if downsample and data['vox'][b].shape[0] > 50000: + out.append((x.squeeze(0), idx)) + else: + out.append(x.squeeze(0)) + if self.inference: + assert len(out) == 1, "Inference mode only supports batch size 1" + return extract_keypoints(out[0], data['vox'][0]) + + return out + + +class TriPlaneDecoder(torch.nn.Module): + def __init__(self, + z_dim = 768, # Input latent (Z) dimensionality. + c_dim = 0, # Conditioning label (C) dimensionality. + w_dim = 768, # Intermediate latent (W) dimensionality. + # img_resolution, # Output resolution. + # img_channels, # Number of output color channels. + # sr_num_fp16_res = 0, + mapping_kwargs = {'num_layers': 2}, # Arguments for MappingNetwork. + # rendering_kwargs = {}, + # sr_kwargs = {}, + synthesis_kwargs = {'num_fp16_res': 0, 'conv_clamp': None, 'fused_modconv_default': 'inference_only'}, # Arguments for SynthesisNetwork. + ): + super().__init__() + self.z_dim=z_dim + self.c_dim=c_dim + self.w_dim=w_dim + # self.img_resolution=img_resolution + # self.img_channels=img_channels + # self.renderer = ImportanceRenderer() + # self.ray_sampler = RaySampler() + self.backbone = StyleGAN2Backbone(z_dim, c_dim, w_dim, img_resolution=256, img_channels=32*3, mapping_kwargs=mapping_kwargs, **synthesis_kwargs) + # self.superresolution = dnnlib.util.construct_class_by_name(class_name=rendering_kwargs['superresolution_module'], channels=32, img_resolution=img_resolution, sr_num_fp16_res=sr_num_fp16_res, sr_antialias=rendering_kwargs['sr_antialias'], **sr_kwargs) + self.decoder = OSGDecoder(32, {'decoder_output_dim': 0}) + self.inference = False + # self.neural_rendering_resolution = 64 + # self.rendering_kwargs = rendering_kwargs + + self._last_planes = None + self.plane_axes = generate_planes() + + def mapping(self, z, c=None, truncation_psi=1, truncation_cutoff=None, update_emas=False): + # if self.rendering_kwargs['c_gen_conditioning_zero']: + # c = torch.zeros_like(c) + # return self.backbone.mapping(z, c * self.rendering_kwargs.get('c_scale', 0), truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + return self.backbone.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + + def synthesis(self, ws, c=None, neural_rendering_resolution=None, update_emas=False, cache_backbone=False, use_cached_backbone=False, **synthesis_kwargs): + # cam2world_matrix = c[:, :16].view(-1, 4, 4) + # intrinsics = c[:, 16:25].view(-1, 3, 3) + + # if neural_rendering_resolution is None: + # neural_rendering_resolution = self.neural_rendering_resolution + # else: + # self.neural_rendering_resolution = neural_rendering_resolution + + # Create a batch of rays for volume rendering + # ray_origins, ray_directions = self.ray_sampler(cam2world_matrix, intrinsics, neural_rendering_resolution) + + # Create triplanes by running StyleGAN backbone + # N, M, _ = ray_origins.shape + if use_cached_backbone and self._last_planes is not None: + planes = self._last_planes + else: + planes = self.backbone.synthesis(ws, update_emas=update_emas, **synthesis_kwargs) + if cache_backbone: + self._last_planes = planes + + # Reshape output into three 32-channel planes + planes = planes.view(len(planes), 3, 32, planes.shape[-2], planes.shape[-1]) + return planes + + # Perform volume rendering + feature_samples, depth_samples, weights_samples = self.renderer(planes, self.decoder, ray_origins, ray_directions, self.rendering_kwargs) # channels last + + # Reshape into 'raw' neural-rendered image + H = W = self.neural_rendering_resolution + feature_image = feature_samples.permute(0, 2, 1).reshape(N, feature_samples.shape[-1], H, W).contiguous() + depth_image = depth_samples.permute(0, 2, 1).reshape(N, 1, H, W) + + # Run superresolution to get final image + rgb_image = feature_image[:, :3] + sr_image = self.superresolution(rgb_image, feature_image, ws, noise_mode=self.rendering_kwargs['superresolution_noise_mode'], **{k:synthesis_kwargs[k] for k in synthesis_kwargs.keys() if k != 'noise_mode'}) + + return {'image': sr_image, 'image_raw': rgb_image, 'image_depth': depth_image} + + def sample(self, coordinates, directions, z, c, truncation_psi=1, truncation_cutoff=None, update_emas=False, **synthesis_kwargs): + # Compute RGB features, density for arbitrary 3D coordinates. Mostly used for extracting shapes. + ws = self.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + planes = self.backbone.synthesis(ws, update_emas=update_emas, **synthesis_kwargs) + planes = planes.view(len(planes), 3, 32, planes.shape[-2], planes.shape[-1]) + return self.renderer.run_model(planes, self.decoder, coordinates, directions, self.rendering_kwargs) + + def sample_mixed(self, coordinates, directions, ws, truncation_psi=1, truncation_cutoff=None, update_emas=False, **synthesis_kwargs): + # Same as sample, but expects latent vectors 'ws' instead of Gaussian noise 'z' + planes = self.backbone.synthesis(ws, update_emas = update_emas, **synthesis_kwargs) + planes = planes.view(len(planes), 3, 32, planes.shape[-2], planes.shape[-1]) + return self.renderer.run_model(planes, self.decoder, coordinates, directions, self.rendering_kwargs) + + def inference_mode(self): + self.inference = True + + def forward(self, z, data=None, device='cuda', downsample=False, c=None, truncation_psi=1, truncation_cutoff=None, neural_rendering_resolution=None, update_emas=False, cache_backbone=False, use_cached_backbone=False, **synthesis_kwargs): + # Render a batch of generated images. + assert z.shape[-1] == self.z_dim + ws = self.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + planes = self.synthesis(ws, c, update_emas=update_emas, neural_rendering_resolution=neural_rendering_resolution, cache_backbone=cache_backbone, use_cached_backbone=use_cached_backbone, **synthesis_kwargs) + bs = planes.shape[0] + logits = [] + for b in range(bs): + queries = get_co(data['vox'][b]).to(device).unsqueeze(0) + if downsample and data['vox'][b].shape[0] > 50000: + # random sample + idx = torch.randperm(data['vox'][b].shape[0])[:50000] + queries = queries[:, idx] + out = sample_from_planes(self.plane_axes.to(device), planes[b:b+1], queries) + out = self.decoder(out) + if downsample and data['vox'][b].shape[0] > 50000: + logits.append((out.squeeze(0), idx)) + else: + logits.append(out.squeeze(0)) + if self.inference: + assert len(logits) == 1, "Inference mode only supports batch size 1" + return extract_keypoints(logits[0], data['vox'][0]) + return logits + + +class OSGDecoder(torch.nn.Module): + def __init__(self, n_features, options): + super().__init__() + self.hidden_dim = 64 + + self.net = torch.nn.Sequential( + FullyConnectedLayer(n_features, self.hidden_dim), + torch.nn.Softplus(), + FullyConnectedLayer(self.hidden_dim, 1 + options['decoder_output_dim']) + ) + + def forward(self, sampled_features, ray_directions=None): + # Aggregate features + sampled_features = sampled_features.mean(1) + x = sampled_features + + N, M, C = x.shape + x = x.view(N*M, C) + + x = self.net(x) + x = x.view(N, M, -1) + return x + rgb = torch.sigmoid(x[..., 1:])*(1 + 2*0.001) - 0.001 # Uses sigmoid clamping from MipNeRF + sigma = x[..., 0:1] + return {'rgb': rgb, 'sigma': sigma} \ No newline at end of file diff --git a/Anymate/models/skin.py b/Anymate/models/skin.py new file mode 100644 index 0000000000000000000000000000000000000000..ec81a907d6b4e15d321cc45a1b93e86da6bd2e8f --- /dev/null +++ b/Anymate/models/skin.py @@ -0,0 +1,309 @@ +import torch +import torch.nn as nn +from ThirdParty.michelangelo.models.modules.transformer_blocks import ResidualCrossAttentionBlock, Transformer +from ThirdParty.michelangelo.models.modules.embedder import components_from_spherical_harmonics, FourierEmbedder +from einops import repeat, rearrange + +class AttendjointsDecoder_combine(nn.Module): + def __init__(self, + width = 768, + layers = 2, + device = 'cuda', + dtype = torch.float32, + heads = 12, + init_scale: float = 0.25, + flash = False, + use_checkpoint = False, + qkv_bias = False, + num_freqs: int = 8, + include_pi: bool = True, + separate = False, + use_mask = True, + use_bone = True, + inference= False): + + super().__init__() + self.inference = inference + self.use_checkpoint = use_checkpoint + self.separate = separate + self.use_mask = use_mask + # self.num_latents = num_latents + + # self.query = nn.Parameter(torch.randn((num_latents, width), device=device, dtype=dtype) * 0.02) + + self.normal_embedder = components_from_spherical_harmonics + self.fourier_embedder = FourierEmbedder(num_freqs=num_freqs, include_pi=include_pi) + self.bone_proj = None if not use_bone else nn.Linear(self.fourier_embedder.out_dim * 2, width, device=device, dtype=dtype) + self.use_bone = use_bone + + if not self.separate: + self.co_proj = nn.Linear(self.fourier_embedder.out_dim, width, device=device, dtype=dtype) + self.normal_proj = nn.Linear(25, width, device=device, dtype=dtype) + else: + self.pc_proj = nn.Linear(self.fourier_embedder.out_dim + 25, width, device=device, dtype=dtype) + + + # self.proj_attn = nn.Linear(width, width, device=device, dtype=dtype) + + self.cross_attn = nn.ModuleList([ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + ) for _ in range(layers)]) + + self.cross_attn_joint = nn.ModuleList([ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + ) for _ in range(layers)]) + + # self.joint_embed_proj = nn.ModuleList([nn.Linear(width, width, device=device, dtype=dtype) for _ in range(layers)]) + + + self.q_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.k_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.ln_1 = nn.LayerNorm(width, device=device, dtype=dtype) + self.ln_2 = nn.LayerNorm(width, device=device, dtype=dtype) + + # self.last_cross_attn = ResidualCrossAttentionBlock( + # device=device, + # dtype=dtype, + # width=width, + # heads=heads, + # init_scale=init_scale, + # qkv_bias=qkv_bias, + # flash=flash, + # ) + # self.mlp = MLP(device=device, dtype=dtype, width=width, init_scale=init_scale) + # self.output_proj = nn.Linear(width, 1, device=device, dtype=dtype) + + def forward(self, latents, data=None, device='cuda', downsample=None, dtype=torch.float32): + joints = data['bones'].to(device) if self.use_bone else data['joints'].to(device) + max_joints = max(data['bones_num']) if self.use_bone else max(data['joints_num']) + mask = data['bones_mask'].to(device) if self.use_bone else data['joints_mask'] + + pc = data['vertices'][..., 0:3].to(device) if self.inference else data['points_cloud'][..., 0:3].to(device) + feats = data['vertices'][..., 3:].to(device) if self.inference else data['points_cloud'][..., 3:].to(device) + + if downsample and not self.inference: + # random sample + idx = torch.randperm(pc.shape[1])[:downsample].to(device) + pc = pc[:, idx] + feats = feats[:, idx] + + # Embed the input data + co_embeds = self.fourier_embedder(pc) + if not self.separate: + co_embeds = self.co_proj(co_embeds) + + if self.use_bone: + # joints_fourier = torch.cat((self.fourier_embedder(joints[:,:max_joints*2:2, :3]), self.fourier_embedder(joints[:,1:max_joints*2:2, :3])), dim=-1) + joints_fourier = torch.cat((self.fourier_embedder(joints[:,:max_joints,:3]), self.fourier_embedder(joints[:,:max_joints, 3:])), dim=-1) + else: + joints_fourier = self.fourier_embedder(joints[:,:max_joints, :3]) + + if not self.separate: + joints_embeds = self.co_proj(joints_fourier) if not self.use_bone else self.bone_proj(joints_fourier) + + normal_embeds = self.normal_proj(self.normal_embedder(feats)) if not self.separate else self.normal_embedder(feats) + + if not self.separate: + pc_embeds = co_embeds + normal_embeds + else: + joints_embeds = self.co_proj(joints_fourier.to(dtype)) if not self.use_bone else self.bone_proj(joints_fourier.to(dtype)) + pc_embeds = self.pc_proj(torch.cat([co_embeds.to(dtype), normal_embeds.to(dtype)], dim=-1)) + + pc_num = pc_embeds.shape[-2] + joints_num = joints_embeds.shape[-2] + x = torch.cat([pc_embeds, joints_embeds], dim=-2) + for i, layer in enumerate(self.cross_attn): + + x = layer(x, latents) + if self.use_mask: + x = self.cross_attn_joint[i](x, x[:, pc_num:], mask=mask.to(device)) + else: + x = self.cross_attn_joint[i](x, x[:, pc_num:]) + pc_embeds, joints_embeds = x.split([pc_num, joints_num], dim=1) + + logits = torch.einsum('bnc,bmc->bnm', self.k_proj(self.ln_1(pc_embeds)), self.q_proj(self.ln_2(joints_embeds))) # (b, n, m) + + if self.use_mask: + logits = logits.masked_fill(mask.unsqueeze(1) == 0, -1e8) + + if downsample and not self.inference: + return logits, idx + + return logits + +class AttendjointsDecoder_multi(nn.Module): + def __init__(self, + # num_latents = 64, + # num_kv_latents = 257, + # out_channels = 3, + width = 768, + layers = 4, + device = 'cuda', + dtype = torch.float32, + heads = 12, + init_scale: float = 0.25, + flash = False, + use_checkpoint = False, + qkv_bias = False, + num_freqs: int = 8, + concat_num: int = 512, + include_pi: bool = True, + separate = False, + use_mask = True, + inference_with_repeat=False, + use_bone = True, + inference = False): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.use_mask = use_mask + self.inference_with_repeat = inference_with_repeat + self.inference = inference + + self.self_attn = Transformer( + device=device, + dtype=dtype, + n_ctx=-1, + width=width, + layers=layers, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_checkpoint=False, + + ) + self.concat_number = concat_num + self.separate = separate + self.normal_embedder = components_from_spherical_harmonics + self.fourier_embedder = FourierEmbedder(num_freqs=num_freqs, include_pi=include_pi) + self.bone_proj = None if not use_bone else nn.Linear(self.fourier_embedder.out_dim * 2, width, device=device, dtype=dtype) + self.use_bone = use_bone + if not self.separate: + self.co_proj = nn.Linear(self.fourier_embedder.out_dim, width, device=device, dtype=dtype) + self.normal_proj = nn.Linear(25, width, device=device, dtype=dtype) + else: + self.pc_proj = nn.Linear(self.fourier_embedder.out_dim + 25, width, device=device, dtype=dtype) + + # self.proj_attn = nn.Linear(width, width, device=device, dtype=dtype) + + # self.ln_post = nn.LayerNorm(width, device=device, dtype=dtype) + self.output_proj_joints = nn.Linear(width, width, device=device, dtype=dtype) + self.output_proj_points = nn.Linear(width, width, device=device, dtype=dtype) + self.layer_norm = nn.LayerNorm(width) + + # def inference(self, latents, data=None,device='cuda', dtype='float32', use_mask=False): + def inference_mode(self): + self.inference = True + + def forward(self, latents, data=None,device='cuda', downsample=None, dtype='float32'): + joints = data['bones'].to(device) if self.use_bone else data['joints'].to(device) + max_joints = max(data['bones_num']) if self.use_bone else max(data['joints_num']) + + pc = data['points_cloud'][..., 0:3].to(device) + feats = data['points_cloud'][..., 3:].to(device) + + if downsample: + # random sample + idx = torch.randperm(pc.shape[1])[:downsample].to(device) + pc = pc[:, idx] + feats = feats[:, idx] + + bs = pc.shape[1]//self.concat_number + + # Embed the input data + if self.use_bone: + # joints_fourier = torch.cat((self.fourier_embedder(joints[:,:max_joints*2:2, :3]), self.fourier_embedder(joints[:,1:max_joints*2:2, :3])), dim=-1) + joints_fourier = torch.cat((self.fourier_embedder(joints[:,:max_joints,:3]), self.fourier_embedder(joints[:,:max_joints, 3:])), dim=-1) + else: + joints_fourier = self.fourier_embedder(joints[:,:max_joints, :3]) + + if self.separate: + joints_embeds = self.co_proj(joints_fourier.to(dtype)) if not self.use_bone else self.bone_proj(joints_fourier.to(dtype)) + points_embeds = self.fourier_embedder(pc) + normal_embeds = self.normal_embedder(feats) + points = self.pc_proj(torch.cat([points_embeds, normal_embeds], dim=-1)) + else: + joints_embeds = self.co_proj(joints_fourier) if not self.use_bone else self.bone_proj(joints_fourier) + co_embeds = self.fourier_embedder(pc) + co_embeds = self.co_proj(co_embeds) + # Embed the normals + normal_embeds = self.normal_embedder(feats) + normal_embeds = self.normal_proj(normal_embeds) # (b, n, c) + points = (co_embeds + normal_embeds) + + repeated_latents = repeat(latents, "b m c -> b n m c", n=bs) + repeated_joints = repeat(joints_embeds, "b m c -> b n m c", n=bs) + points = points.reshape( latents.shape[0], bs, self.concat_number, -1) + + # Concatenate embeddings + x = torch.cat([repeated_joints, points, repeated_latents], dim=-2) # (b, bs, concat_number+latent_num+joints_num, c) + + # Pass through self-attention + if self.use_mask: + mask = data['bones_mask'].to(device) + append_size = x.shape[2]-mask.shape[1] # the zero needs to append after mask + batch_size = mask.shape[0] + mask_extend = torch.ones((batch_size,append_size)).to(device) + mask = torch.cat([mask,mask_extend],dim=-1).repeat(bs,1).to(device) + x = rearrange(x, "b n m c -> (b n) m c") + x = self.self_attn(x,mask) + else: + x = rearrange(x, "b n m c -> (b n) m c") + x = self.self_attn(x) + joints, points, _ = x.split([joints_embeds.shape[1],self.concat_number, latents.shape[1]], dim=1) + joints = self.output_proj_joints(self.layer_norm(joints)) + points = self.output_proj_points(self.layer_norm(points)) + + logits = torch.einsum('bik,bjk->bij', points, joints) + logits = rearrange(logits, '(b n) m c -> b (n m) c', b=pc.shape[0],n=bs) # (b, n, c) + + if self.use_mask: + mask = data['bones_mask'].to(device) + logits = logits.masked_fill(mask.unsqueeze(1) == 0, -1e8) + + if self.inference: + vertices = data['vertice'] + points_cloud = data['points_cloud'][0,..., 0:3].to(device) + vertices_exp = vertices[0,...,:3] # (batch_size, num_vertices, 1, 3) + logits = compute_nearest_points(vertices_exp, points_cloud, logits[0], device) + + if downsample: + return logits, idx + + return logits + +def compute_nearest_points(vertices, points, logits, device, batch_size=1024): + # vertices: [N, 3] + # points: [M, 3] + # logits: [M, K] (K is the number of skinning weights) + + num_vertices = vertices.shape[0] + # Initialize the output tensor for skinning weights + skin_predict = torch.zeros((num_vertices, logits.shape[1]), device=device) + + # Split vertices into batches + for i in range(0, num_vertices, batch_size): + + batch_vertices = vertices[i:i+batch_size] # [batch_size, 3] + vertices_exp = batch_vertices.unsqueeze(1) # [batch_size, 1, 3] + points_exp = points.unsqueeze(0) # [1, num_points, 3] + distances = torch.sum((vertices_exp - points_exp) ** 2, dim=-1) # [batch_size, num_points] + nearest_idx = torch.argmin(distances, dim=-1) # [batch_size] + skin_predict_batch = logits[nearest_idx] # [batch_size, K] + skin_predict[i:i+batch_size] = skin_predict_batch + + return skin_predict \ No newline at end of file diff --git a/Anymate/tmp/.gitkeep b/Anymate/tmp/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Anymate/utils/dataset_utils.py b/Anymate/utils/dataset_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..19563ccf9b1ee3a3991d60811c073f43b952d3bc --- /dev/null +++ b/Anymate/utils/dataset_utils.py @@ -0,0 +1,129 @@ +import numpy as np +import torch +import trimesh +from ThirdParty.Rignet_utils import binvox_rw + + +def sparse_to_index(sparse_matrix): + index = [] + weight = [] + for j in range(len(sparse_matrix)): + if sparse_matrix[j] > 0: + index.append(j) + weight.append(sparse_matrix[j]) + + return index, weight + +def index_to_sparse(index, weight, shape): + sparse_matrix = np.zeros([shape[0], shape[1], shape[2]+1]) + + row_indices, col_indices = np.meshgrid(np.arange(sparse_matrix.shape[0]), np.arange(sparse_matrix.shape[1]), indexing='ij') + + row_indices = np.expand_dims(row_indices, axis=-1) + col_indices = np.expand_dims(col_indices, axis=-1) + + sparse_matrix[row_indices, col_indices, index] = weight + + + return torch.from_numpy(sparse_matrix[:, :, :-1]) + +def index_to_sparse_con(index, shape): + + sparse_matrix = np.zeros([shape[0], shape[1], shape[2]+1],dtype=np.int8) + row_indices, col_indices = np.meshgrid(np.arange(sparse_matrix.shape[0]), np.arange(sparse_matrix.shape[1]), indexing='ij') + + row_indices = np.expand_dims(row_indices, axis=-1) + col_indices = np.expand_dims(col_indices, axis=-1) + + sparse_matrix[row_indices, col_indices, index] = 1 + + + return torch.from_numpy(sparse_matrix[:, :, :-1]) + +def create_mask(n, max_len=64): + mask = torch.zeros(max_len, dtype=torch.bool) + mask[:n] = 1 + return mask + +def reduce(vox): + new_data = np.zeros((vox.dims[0] // 2, vox.dims[1] // 2, vox.dims[2] // 2)).astype(bool) + new_data = np.logical_or(new_data, vox.data[::2, ::2, ::2]) + new_data = np.logical_or(new_data, vox.data[1::2, ::2, ::2]) + new_data = np.logical_or(new_data, vox.data[::2, 1::2, ::2]) + new_data = np.logical_or(new_data, vox.data[::2, ::2, 1::2]) + new_data = np.logical_or(new_data, vox.data[1::2, 1::2, ::2]) + new_data = np.logical_or(new_data, vox.data[1::2, ::2, 1::2]) + new_data = np.logical_or(new_data, vox.data[::2, 1::2, 1::2]) + new_data = np.logical_or(new_data, vox.data[1::2, 1::2, 1::2]) + # dilate the new voxel + new_data[:-1, :, :] = np.logical_or(new_data[:-1, :, :], new_data[1:, :, :]) + new_data[:, :-1, :] = np.logical_or(new_data[:, :-1, :], new_data[:, 1:, :]) + new_data[:, :, :-1] = np.logical_or(new_data[:, :, :-1], new_data[:, :, 1:]) + return binvox_rw.Voxels(new_data, new_data.shape, vox.translate, vox.scale, vox.axis_order) + +def align(vox, y_max): + new_data = np.zeros(vox.dims).astype(bool) + ind = np.argwhere(vox.data) + ind = ind + (np.array(vox.translate) - np.array([-0.5, -0.5 * (1 - y_max), -0.5])) * vox.dims[0] + # round to the nearest integer + # ind = np.round(ind).astype(int) + ind = np.ceil(ind).astype(int) + # clip to the valid range + ind = np.clip(ind, 0, vox.dims[0] - 1) + # new_data[ind[:, 0], ind[:, 1], ind[:, 2]] = True + return ind + +def get_skin_direction(joint_idx, data, parent_index, joints_matrix): + # Get points influenced by this joint (weight > 0) + weights = index_to_sparse(data['skins_index'].unsqueeze(0), data['skins_weight'].unsqueeze(0), [1, 8192, data['bones_num']])[0][:,joint_idx] + mask = weights > 0 + + if not torch.any(mask): + # If no points are influenced, return the opposite direction of its parent + parent_idx = parent_index[joint_idx].item() + if parent_idx == joint_idx: + return torch.tensor([0, 0, 0.001]) + parent_pos = joints_matrix[parent_idx, :3] + joint_pos = joints_matrix[joint_idx, :3] + direction = joint_pos - parent_pos + norm = torch.norm(direction) + if norm < 1e-8: # Add check for zero norm + return torch.tensor([0, 0, 0.001]) + normalized_direction = direction / norm + return normalized_direction * 0.01 + + # Get joint position + joint_pos = joints_matrix[joint_idx, :3] + + # Get weighted average direction from joint to influenced points + points = data['pc'][mask][:,:3] + point_weights = weights[mask] + + # Calculate directions from joint to each point + directions = points - joint_pos + + # Calculate weighted average direction + avg_direction = torch.sum(directions * point_weights.unsqueeze(1), dim=0) / torch.sum(point_weights) + if torch.norm(avg_direction) < 1e-5: + return torch.tensor([0, 0, 0.001]) + return avg_direction * 1.25 + +def obj2mesh(obj_path): + # open the obj as txt + vertices = [] + faces = [] + with open(obj_path, 'r') as f: + obj = f.readlines() + for line in obj: + if line.startswith('v '): + vertices.append(list(map(float, line.split()[1:]))) + elif line.startswith('f '): + faces.append(list(map(int, [i.split('/')[0] for i in line.split()[1:]]))) + vertices = np.array(vertices) + faces = np.array(faces) - 1 + # print(vertices.shape, faces.shape) + + # create trimesh mesh with given vertices and faces + mesh = trimesh.Trimesh(vertices, faces, process=False) + # print(mesh.vertices.shape, mesh.faces.shape) + return mesh \ No newline at end of file diff --git a/Anymate/utils/diffusion_encoder.py b/Anymate/utils/diffusion_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..ee0060d314cc64398ebe0aa28b9b671d4ef1e396 --- /dev/null +++ b/Anymate/utils/diffusion_encoder.py @@ -0,0 +1,258 @@ +import torch +import torch.nn as nn +from typing import Optional +from einops import repeat +import math +from ThirdParty.michelangelo.models.modules.transformer_blocks import ResidualCrossAttentionBlock,Transformer, checkpoint +from torch.nn import Sequential, Dropout, Linear, ReLU, Parameter, BatchNorm1d +from typing import List, Optional, Tuple, Union + +class ShapeAsLatentModule(nn.Module): + latent_shape: Tuple[int, int] + + def __init__(self, *args, **kwargs): + super().__init__() + + def encode(self, *args, **kwargs): + raise NotImplementedError + + def decode(self, *args, **kwargs): + raise NotImplementedError + + def query_geometry(self, *args, **kwargs): + raise NotImplementedError + +class FourierEmbedder(nn.Module): + + def __init__(self, + num_freqs: int = 6, + logspace: bool = True, + input_dim: int = 3, + include_input: bool = True, + include_pi: bool = True) -> None: + + """The initialization""" + + super().__init__() + + if logspace: + frequencies = 2.0 ** torch.arange( + num_freqs, + dtype=torch.float32 + ) + else: + frequencies = torch.linspace( + 1.0, + 2.0 ** (num_freqs - 1), + num_freqs, + dtype=torch.float32 + ) + + if include_pi: + frequencies *= torch.pi + + self.register_buffer("frequencies", frequencies, persistent=False) + self.include_input = include_input + self.num_freqs = num_freqs + + self.out_dim = self.get_dims(input_dim) + + def get_dims(self, input_dim): + temp = 1 if self.include_input or self.num_freqs == 0 else 0 + out_dim = input_dim * (self.num_freqs * 2 + temp) + + return out_dim + + def forward(self, x: torch.Tensor) -> torch.Tensor: + + if self.num_freqs > 0: + self.frequencies = self.frequencies.to(x.device) + embed = (x[..., None].contiguous() * self.frequencies).view(*x.shape[:-1], -1) + + if self.include_input: + return torch.cat((x, embed.sin(), embed.cos()), dim=-1) + else: + return torch.cat((embed.sin(), embed.cos()), dim=-1) + else: + return x + +def MLP(channels, batch_norm=True): + if batch_norm: + return Sequential(*[Sequential(Linear(channels[i - 1], channels[i]), ReLU(), BatchNorm1d(channels[i], momentum=0.1)) + for i in range(1, len(channels))]) + else: + return Sequential(*[Sequential(Linear(channels[i - 1], channels[i]), ReLU()) for i in range(1, len(channels))]) + +class CrossAttentionEncoder(nn.Module): + + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + num_latents: int, + fourier_embedder: FourierEmbedder, + point_feats: int, + width: int, + heads: int, + layers: int, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False, + use_ln_post: bool = False, + use_checkpoint: bool = False): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.num_latents = num_latents + self.query = nn.Parameter(torch.randn((num_latents, width), device=device, dtype=dtype) * 0.02) + + self.fourier_embedder = fourier_embedder + self.input_proj = nn.Linear(self.fourier_embedder.out_dim + point_feats, width, device=device, dtype=dtype) + self.cross_attn = ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + ) + + self.self_attn = Transformer( + device=device, + dtype=dtype, + n_ctx=num_latents, + width=width, + layers=layers, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_checkpoint=False + ) + + if use_ln_post: + self.ln_post = nn.LayerNorm(width, dtype=dtype, device=device) + else: + self.ln_post = None + + def _forward(self, pc, feats): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, C] + + Returns: + + """ + + bs = pc.shape[0] + + data = self.fourier_embedder(pc) + if feats is not None: + data = torch.cat([data, feats], dim=-1) + data = self.input_proj(data) + + query = repeat(self.query, "m c -> b m c", b=bs) + latents = self.cross_attn(query, data) + latents = self.self_attn(latents) + + if self.ln_post is not None: + latents = self.ln_post(latents) + + return latents, pc + + def forward(self, pc: torch.FloatTensor, feats: Optional[torch.FloatTensor] = None): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, C] + + Returns: + dict + """ + + return checkpoint(self._forward, (pc, feats), self.parameters(), self.use_checkpoint) + + + +class TransformerEncoder(ShapeAsLatentModule): + def __init__(self, *, + device: Optional[torch.device]='cuda', + dtype: Optional[torch.dtype], + num_latents: int = 16, + point_feats: int = 3, + embed_dim: int = 64, + num_freqs: int = 8, + include_pi: bool = True, + width: int = 768, + heads: int = 12, + num_encoder_layers: int = 8, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False, + use_ln_post: bool = False, + use_checkpoint: bool = False, + out_channels: int = 4): + + super().__init__() + + self.use_checkpoint = use_checkpoint + + self.num_latents = num_latents + self.fourier_embedder = FourierEmbedder(num_freqs=num_freqs, include_pi=include_pi) + + init_scale = init_scale * math.sqrt(1.0 / width) + self.encoder = CrossAttentionEncoder( + device=device, + dtype=dtype, + fourier_embedder=self.fourier_embedder, + num_latents=num_latents, + point_feats=point_feats, + width=width, + heads=heads, + layers=num_encoder_layers, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_ln_post=use_ln_post, + use_checkpoint=use_checkpoint + ) + self.width = width + self.out_channels = out_channels + self.device = device + + self.embed_dim = embed_dim + + def encode(self,data): + input_points = data['points_cloud'].to(self.device) + bs = input_points.shape[0] + pc, feats = input_points[...,:3], input_points[..., 3:] + latents, _ = self.encoder(pc, feats) + # print_time('after encoder') + latents = latents.reshape(bs,-1, self.width) + return latents + def encode_pc(self,points_cloud): + bs = points_cloud.shape[0] + input_points = points_cloud.to(self.device) + pc, feats = input_points[...,:3], input_points[..., 3:] + latents, _ = self.encoder(pc, feats) + + latents = latents.reshape(bs,-1, self.width) + return latents + def forward(self, data): + + # input_points = torch.from_numpy(np.array(data.points_cloud)).cuda() + input_points = data['points_cloud'].to(self.device) + pc, feats = input_points[...,:3], input_points[..., 3:] + latents, _ = self.encoder(pc, feats) + + latents = latents.reshape(-1, self.width) + latents =latents.reshape(-1, self.num_latents, self.out_channels) + latents[..., :3] = torch.tanh(latents[..., :3]) + latents[..., 3:] = torch.sigmoid(latents[..., 3:]) + + + return latents \ No newline at end of file diff --git a/Anymate/utils/diffusion_utils.py b/Anymate/utils/diffusion_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c171038ede38173542450c79feb5b9c375f81f4e --- /dev/null +++ b/Anymate/utils/diffusion_utils.py @@ -0,0 +1,314 @@ + +import numpy as np +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +from torchvision.utils import make_grid +import torch +from typing import List, Optional, Tuple, Union +import torch.nn as nn +import math +from timm.models.vision_transformer import Mlp, DropPath + +def my_collate_diff(batch,return_joints_num=128,random=False): + data = {} + for key in batch[0]: + if key=='vox' or key=='name' or key=='joints_num' or key=='skins_index' or key=='skins_weight' or key=='parent_index' or key=='conns' or key=='joints' or key=='bones' or key=='mesh_skins_index' or key=='mesh_skins_weight' or key=='mesh_pc' or key=='mesh_face': + data[key] = [sample[key] for sample in batch] + elif key=='pc': + data['points_cloud'] = torch.stack([sample['pc'] for sample in batch]) + elif key=='skins': + continue + elif key=='bones_num': + data[key] = torch.tensor([sample['bones_num'] for sample in batch]) + else: + data[key] = torch.stack([sample[key] for sample in batch]) + + if 'joints' in batch[0]: + padded_joints_matrix = torch.ones(len(data['name']), return_joints_num, 3) * (-3) + joints_matrix = torch.ones(len(data['name']), 96, 3) * (-3) + for i in range(len(data['name'])): + joints_matrix[i, :data['joints_num'][i], :] = data['joints'][i] + if not random: + for i in range(len(data['name'])): + padded_joints_matrix[i] = data['joints'][i].repeat(return_joints_num//data['joints_num'][i]+1,1)[:return_joints_num,:] + else: + for i in range(len(data['name'])): + padded_joints_matrix[i] = data['joints'][i][torch.randint(0, data['joints_num'][i], (return_joints_num,))] + data['joints_repeat'] = padded_joints_matrix + data['joints'] = joints_matrix + + return data + +def randn_tensor( + shape: Union[Tuple, List], + generator: Optional[Union[List["torch.Generator"], "torch.Generator"]] = None, + device: Optional["torch.device"] = None, + dtype: Optional["torch.dtype"] = None, + layout: Optional["torch.layout"] = None, +): + """A helper function to create random tensors on the desired `device` with the desired `dtype`. When + passing a list of generators, you can seed each batch size individually. If CPU generators are passed, the tensor + is always created on the CPU. + """ + # device on which tensor is created defaults to device + rand_device = device + batch_size = shape[0] + + layout = layout or torch.strided + device = device or torch.device("cpu") + + if generator is not None: + gen_device_type = generator.device.type if not isinstance(generator, list) else generator[0].device.type + if gen_device_type != device.type and gen_device_type == "cpu": + rand_device = "cpu" + + elif gen_device_type != device.type and gen_device_type == "cuda": + raise ValueError(f"Cannot generate a {device} tensor from a generator of type {gen_device_type}.") + + # make sure generator list of length 1 is treated like a non-list + if isinstance(generator, list) and len(generator) == 1: + generator = generator[0] + + if isinstance(generator, list): + shape = (1,) + shape[1:] + latents = [ + torch.randn(shape, generator=generator[i], device=rand_device, dtype=dtype, layout=layout) + for i in range(batch_size) + ] + latents = torch.cat(latents, dim=0).to(device) + else: + latents = torch.randn(shape, generator=generator, device=rand_device, dtype=dtype, layout=layout).to(device) + + return latents + +def timestep_embedding(timesteps, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].to(timesteps.dtype) * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + +class CrossAttention(nn.Module): + def __init__( + self, + dim, + kv_dim=None, + num_heads=16, + qkv_bias=False, + attn_drop=0., + proj_drop=0., + ): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + + kv_dim = dim if not kv_dim else kv_dim + self.wq = nn.Linear(dim, dim, bias=qkv_bias) + self.wk = nn.Linear(kv_dim, dim, bias=qkv_bias) + self.wv = nn.Linear(kv_dim, dim, bias=qkv_bias) + self.attn_drop_rate = attn_drop + self.attn_drop = nn.Dropout(self.attn_drop_rate) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x_q, x_kv): + B, N_q, C = x_q.shape + B, N_kv, _ = x_kv.shape + # [B, N_q, C] -> [B, N_q, H, C/H] -> [B, H, N_q, C/H] + q = self.wq(x_q).reshape(B, N_q, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3) + # [B, N_kv, C] -> [B, N_kv, H, C/H] -> [B, H, N_kv, C/H] + k = self.wk(x_kv).reshape(B, N_kv, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3) + # [B, N_kv, C] -> [B, N_kv, H, C/H] -> [B, H, N_kv, C/H] + v = self.wv(x_kv).reshape(B, N_kv, self.num_heads, C // self.num_heads).permute(0, 2, 1, 3) + + # [B, H, N_q, C/H] @ [B, H, C/H, N_kv] -> [B, H, N_q, N_kv] + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + # [B, H, N_q, N_kv] @ [B, H, N_kv, C/H] -> [B, H, N_q, C/H] + x = attn @ v + + # [B, H, N_q, C/H] -> [B, N_q, C] + x = x.transpose(1, 2).reshape(B, N_q, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class Compute_Block(nn.Module): + + def __init__(self, z_dim, num_heads=16, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.norm_z1 = norm_layer(z_dim) + self.attn = CrossAttention( + z_dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm_z2 = norm_layer(z_dim) + mlp_hidden_dim = int(z_dim * mlp_ratio) + self.mlp = Mlp(in_features=z_dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + def forward(self, z): + zn = self.norm_z1(z) + z = z + self.drop_path(self.attn(zn, zn)) + z = z + self.drop_path(self.mlp(self.norm_z2(z))) + return z + +class Read_Block(nn.Module): + + def __init__(self, z_dim, x_dim, num_heads=16, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.norm_x = norm_layer(x_dim) + self.norm_z1 = norm_layer(z_dim) + self.attn = CrossAttention( + z_dim, x_dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm_z2 = norm_layer(z_dim) + mlp_hidden_dim = int(z_dim * mlp_ratio) + self.mlp = Mlp(in_features=z_dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + def forward(self, z, x): + z = z + self.drop_path(self.attn(self.norm_z1(z), self.norm_x(x))) + z = z + self.drop_path(self.mlp(self.norm_z2(z))) + return z + +class Write_Block(nn.Module): + + def __init__(self, z_dim, x_dim, num_heads=16, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.norm_z = norm_layer(z_dim) + self.norm_x1 = norm_layer(x_dim) + self.attn = CrossAttention( + x_dim, z_dim, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm_x2 = norm_layer(x_dim) + mlp_hidden_dim = int(x_dim * mlp_ratio) + self.mlp = Mlp(in_features=x_dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + def forward(self, z, x): + x = x + self.drop_path(self.attn(self.norm_x1(x), self.norm_z(z))) + x = x + self.drop_path(self.mlp(self.norm_x2(x))) + return x + +class RCW_Block(nn.Module): + + def __init__(self, z_dim, x_dim, num_compute_layers=4, num_heads=16, + mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.read = Read_Block(z_dim, x_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, drop=drop, + attn_drop=attn_drop, drop_path=drop_path, act_layer=act_layer, norm_layer=norm_layer) + self.write = Write_Block(z_dim, x_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, drop=drop, + attn_drop=attn_drop, drop_path=drop_path, act_layer=act_layer, norm_layer=norm_layer) + self.compute = nn.ModuleList([ + Compute_Block(z_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, drop=drop, + attn_drop=attn_drop, drop_path=drop_path, act_layer=act_layer, norm_layer=norm_layer) + for _ in range(num_compute_layers) + ]) + + def forward(self, z, x): + z = self.read(z, x) + for layer in self.compute: + z = layer(z) + x = self.write(z, x) + return z, x + +def pairwise_distances(x, y): + #Input: x is a Nxd matrix + # y is an optional Mxd matirx + #Output: dist is a NxM matrix where dist[i,j] is the square norm between x[i,:] and y[j,:] + # if y is not given then use 'y=x'. + #i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 + x_norm = (x ** 2).sum(1).view(-1, 1) + y_t = torch.transpose(y, 0, 1) + y_norm = (y ** 2).sum(1).view(1, -1) + dist = x_norm + y_norm - 2.0 * torch.mm(x, y_t) + return torch.clamp(dist, 0.0, np.inf) + +def meanshift_cluster(pts_in, bandwidth, weights=None, max_iter=20): + """ + Meanshift clustering + :param pts_in: input points + :param bandwidth: bandwidth + :param weights: weights per pts indicting its importance in the clustering + :return: points after clustering + """ + diff = 1e10 + num_iter = 1 + while diff > 1e-3 and num_iter < max_iter: + Y = np.sum(((pts_in[np.newaxis, ...] - pts_in[:, np.newaxis, :]) ** 2), axis=2) + K = np.maximum(bandwidth**2 - Y, np.zeros(Y.shape)) + if weights is not None: + K = K * weights + row_sums = K.sum(axis=0, keepdims=True) + P = K / (row_sums + 1e-10) + P = P.transpose() + pts_in_prim = 0.3 * (np.matmul(P, pts_in) - pts_in) + pts_in + diff = np.sqrt(np.sum((pts_in_prim - pts_in)**2)) + pts_in = pts_in_prim + num_iter += 1 + return pts_in + +def nms_meanshift(pts_in, density, bandwidth): + """ + NMS to extract modes after meanshift. Code refers to sci-kit-learn. + :param pts_in: input points + :param density: density at each point + :param bandwidth: bandwidth used in meanshift. Used here as neighbor region for NMS + :return: extracted clusters. + """ + Y = np.sum(((pts_in[np.newaxis, ...] - pts_in[:, np.newaxis, :]) ** 2), axis=2) + sorted_ids = np.argsort(density)[::-1] + unique = np.ones(len(sorted_ids), dtype=bool) + dist = np.sqrt(Y) + for i in sorted_ids: + if unique[i]: + neighbor_idxs = np.argwhere(dist[:, i] <= bandwidth) + unique[neighbor_idxs.squeeze()] = 0 + unique[i] = 1 # leave the current point as unique + pts_in = pts_in[unique] + return pts_in + +def get_predictions(y_pred_np, attn_pred_np=None,bandwidth=0.05, threshold=0.001): + """ + get the final predictions + :param pts: input points + :param weights: weight per point during clustering + :return: clustered points + """ + # if attn_pred_np is None: + # attn_pred_np = np.ones(y_pred_np.shape[0]) + y_pred_np = meanshift_cluster(y_pred_np, bandwidth, attn_pred_np, max_iter=40) + + + Y_dist = np.sum(((y_pred_np[np.newaxis, ...] - y_pred_np[:, np.newaxis, :]) ** 2), axis=2) + density = np.maximum(bandwidth ** 2 - Y_dist, np.zeros(Y_dist.shape)) + density = np.sum(density, axis=0) + density_sum = np.sum(density) + y_pred_np = y_pred_np[density / density_sum > threshold] + + density = density[density / density_sum > threshold] + pred_joints = nms_meanshift(y_pred_np, density, bandwidth) + return pred_joints + + +if __name__ == '__main__': + points_cloud = np.ones((100, 3)) + predict_out = get_predictions(points_cloud, bandwidth=0.05, threshold=0.001) + print(predict_out.shape) + \ No newline at end of file diff --git a/Anymate/utils/eval_utils.py b/Anymate/utils/eval_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b58622c660cdc2482d924197d65019c94816cd51 --- /dev/null +++ b/Anymate/utils/eval_utils.py @@ -0,0 +1,225 @@ +from tqdm import tqdm +import torch +import torch.nn.functional as F +import numpy as np +import point_cloud_utils as pcu +from Anymate.utils.loss_utils import chamfer_distance_with_average, cross_entropy_with_probs_batch, cos_loss, cos_loss_clamp +from ThirdParty.Rignet_utils.utils import get_skel +from ThirdParty.Rignet_utils.Rignet_loss import edit_dist, chamfer_dist, joint2bone_chamfer_dist, bone2bone_chamfer_dist +from scipy.optimize import linear_sum_assignment + +def evaluate_joint(joints, joints_gt, threshold=1e-1): + """ + joints: list of predicted joints: tensor of shape (n,joints_num,3) + joints_gt: list of ground truth joints : tensor of shape (n,joints_num,3) + """ + chamfer_loss_all = 0 + emd_loss_all = 0 + precision = 0 + recall = 0 + count = 0 + + for i in tqdm(range(len(joints))): + joint_predict = joints[i].cpu() + joint_gt = joints_gt[i].cpu() + distance_matrix = torch.cdist(joint_gt, joint_predict) # (n_gt, n_predict) + n_gt,n_predict = distance_matrix.shape + min_distance_pred = torch.min(distance_matrix, dim=0) + min_distance_gt = torch.min(distance_matrix, dim=1) + precision += torch.sum(min_distance_pred.values < threshold).item()/n_predict + recall += torch.sum(min_distance_gt.values < threshold).item()/n_gt + + chamfer_loss_all += chamfer_distance_with_average(joint_predict.unsqueeze(0), joint_gt.unsqueeze(0)) + joint_predict = joint_predict.numpy().astype(np.float64) + joint_gt = joint_gt.numpy().astype(np.float64) + emd,_ = pcu.earth_movers_distance(joint_predict, joint_gt) + emd_loss_all += emd + + count += 1 + + print('------------------------------------') + print('Evaluation results for joint:') + print('chamfer_loss:', chamfer_loss_all/count) + print('emd_loss:', emd_loss_all/count) + print('precision:', precision/count) + print('recall:', recall/count) + print('count:', count) + print('------------------------------------') + return chamfer_loss_all/count, emd_loss_all/count, precision/count, recall/count + +def evaluate_connectivity(conns, conns_gt, joints_gt, vox_list): + + """ + conns: list of predicted connections probability: tensor of shape (n,joints_num,joints_num) + conns_gt: list of ground truth connections: tensor of shape (n,joints_num,joints_num) + """ + + precision_all = 0 + recall_all = 0 + cross_entropy_all = 0 + bone2bone_dist_con = 0 + count = 0 + for i in tqdm(range(len(conns))): + + conn_predict = conns[i].cpu().numpy() + conn_gt = conns_gt[i].cpu().numpy() + joints = joints_gt[i].cpu().numpy() + vox = vox_list[i] + + cross_entropy_all += cross_entropy_with_probs_batch(torch.from_numpy(conn_predict).unsqueeze(0), torch.from_numpy(conn_gt).unsqueeze(0), reduction='mean') + # consider to add tree edit distance (need joint and vox information) + pred_skel, parent_matrix = get_skel(joints, conn_predict, vox=vox) + gt_skel, parent_matrix = get_skel(joints, conn_gt, vox=vox) + bone2bone_dist_con += bone2bone_chamfer_dist(pred_skel, gt_skel) + + conn_predict = np.argmax(conn_predict, axis=1) + conn_gt = np.argmax(conn_gt, axis=1) + connection_matrix_pre = torch.zeros((len(conn_predict),len(conn_predict))) + connection_matrix_gt = torch.zeros((len(conn_predict),len(conn_predict))) + + for i in range(len(conn_predict)): + connection_matrix_pre[i][conn_predict[i]] = 1 + connection_matrix_pre[conn_predict[i]][i] = 1 + connection_matrix_gt[i][conn_gt[i]] = 1 + connection_matrix_gt[conn_gt[i]][i] = 1 + + TP = 0 + FP = 0 + FN = 0 + FP = 0 + + for i in range(len(conn_predict)): + if connection_matrix_gt[i][conn_predict[i]] == 1: + TP += 1 + if connection_matrix_gt[i][conn_predict[i]] == 0: + FP += 1 + if connection_matrix_pre[i][conn_gt[i]] == 0: + FN += 1 + + precision = TP/(TP+FP) + recall = TP/(TP+FN) + + precision_all += precision + recall_all += recall + count+=1 + print('------------------------------------') + print('Evaluation results for connectivity:') + print('precision:',precision_all/count) + print('recall:',recall_all/count) + print('cross_entropy:',cross_entropy_all/count) + print('bone2bone_dist_con:',bone2bone_dist_con/count) + print('count:',count) + print('------------------------------------') + return precision_all/count, recall_all/count + +def evaluate_skinning(skins, skins_gt, threshold=5e-2): + """ + skins: list of predicted skinning weights: tensor of shape (n,vertices_num, bones_num) + skins_gt: list of ground truth skinning weights: tensor of shape (n,vertices_num, bones_num) + """ + cs_loss = 0 + ce_loss = 0 + cs_loss_clamp = 0 + count = 0 + L1_loss = 0 + precision = 0 + recall = 0 + mean_l1_dist = 0 + + for i in tqdm(range(len(skins))): + skin_predict = skins[i].cpu().unsqueeze(0) + skin_gt = skins_gt[i].cpu().unsqueeze(0) + + precision_one = 0 + recall_one = 0 + + ce_loss += cross_entropy_with_probs_batch(skin_predict, skin_gt) + cs_loss += cos_loss(skin_predict, skin_gt) + cs_loss_clamp += cos_loss_clamp(skin_predict, skin_gt) + L1_loss += F.l1_loss(skin_predict, skin_gt) + skin_predict = skin_predict[0].cpu().detach().numpy() + skin_gt = skin_gt[0].cpu().detach().numpy() + mean_l1_dist += np.sum(np.abs(skin_predict - skin_gt )) / len(skin_predict) + + for i in range(len(skin_predict)): + influencial_bone_predict = skin_predict[i] >=threshold + influencial_bone_gt = skin_gt[i] >=threshold + influencial_bone_correct = influencial_bone_predict*influencial_bone_gt + + if np.sum(influencial_bone_predict)==0 or np.sum(influencial_bone_gt)==0: + continue + precision_one += np.sum(influencial_bone_correct)/np.sum(influencial_bone_predict) + recall_one += np.sum(influencial_bone_correct)/np.sum(influencial_bone_gt) + + precision += precision_one/len(skin_predict) + recall += recall_one/len(skin_predict) + count +=1 + + print('------------------------------------') + print('Evaluation results for skinning:') + print('cos loss: ', cs_loss/count) + print('ce loss: ', ce_loss/count) + print('cs_loss_clamp: ', cs_loss_clamp/count) + print('L1 loss: ', L1_loss/count) + print('mean_l1_dist: ', mean_l1_dist/count) + print('precision: ', precision/count) + print('recall: ', recall/count) + print('count: ', count) + print('------------------------------------') + +def evaluate_skeleton(joints,joints_gt,conns,conns_gt,vox_list,fs_threshold=0.2): + + """ + joints: list of predicted joints: tensor of shape (n,joints_num,3) + joints_gt: list of ground truth joints : tensor of shape (n,joints_num,3) + conns: list of predicted connections probability: tensor of shape (n,joints_num,joints_num) + conns_gt: list of ground truth connections: tensor of shape (n,joints_num,joints_num) + vox_list: list of voxel: (n,88,88,88) + """ + + data_count = 0 + chamfer_score = 0 + j2b_chamfer_joint = 0 + bone2bone_dist_joint = 0 + edit_distance_joint = 0 + joint_IoU_total = 0 + joint_precision_total = 0 + joint_recall_total = 0 + + for i in tqdm(range(len(joints))): + joint_predict = joints[i].cpu().numpy() + joint_gt = joints_gt[i].cpu().numpy() + conn_predict = conns[i].cpu().numpy() + conn_gt = conns_gt[i].cpu().numpy() + vox = vox_list[i] + + # add shape diameter after we have vertex and faces + # shape_diameter = get_shape_diameter(mesh, points, parent_index[:,0]) + + dist_matrix = np.sqrt(np.sum((joint_predict[np.newaxis, ...] - joint_gt[:, np.newaxis, :]) ** 2, axis=2)) + row_ind, col_ind = linear_sum_assignment(dist_matrix) + # fs_threshold = shape_diameter[row_ind] + joint_IoU = 2 * np.sum(dist_matrix[row_ind, col_ind] < fs_threshold) / (len(joint_predict) + len(joint_gt)) + joint_IoU_total += joint_IoU + joint_precision = np.sum(dist_matrix[row_ind, col_ind] < fs_threshold) / len(joint_predict) + joint_precision_total += joint_precision + joint_recall = np.sum(dist_matrix[row_ind, col_ind] < fs_threshold) / len(joint_gt) + joint_recall_total += joint_recall + + pred_skel_joint,parent_matrix = get_skel(joint_predict,conn_predict,vox=vox) + gt_skel, parent_matrix = get_skel(joint_gt,conn_gt,vox=vox) + chamfer_score += chamfer_dist(joint_predict, joint_gt) + j2b_chamfer_joint += joint2bone_chamfer_dist(pred_skel_joint, gt_skel) + bone2bone_dist_joint += bone2bone_chamfer_dist(pred_skel_joint, gt_skel) + edit_distance_joint += edit_dist(pred_skel_joint, gt_skel) + data_count+=1 + + print('------------------------------------') + print('Evaluation results for skeleton:') + print('chamfer_score:', chamfer_score/data_count) + print('j2b_chamfer_joint:', j2b_chamfer_joint/data_count) + print('bone2bone_dist_joint:', bone2bone_dist_joint/data_count) + print('joint_IoU:', joint_IoU_total/data_count) + print('joint_precision:', joint_precision_total/data_count) + print('joint_recall:', joint_recall_total/data_count) + print('------------------------------------') \ No newline at end of file diff --git a/Anymate/utils/loss_utils.py b/Anymate/utils/loss_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6acff96ada6c34426405f6b3e19ce080947d30db --- /dev/null +++ b/Anymate/utils/loss_utils.py @@ -0,0 +1,56 @@ +import torch +import torch.nn.functional as F +import torch.nn as nn +def chamfer_distance_with_average(p1, p2): + + ''' + Calculate Chamfer Distance between two point sets + :param p1: size[1, N, D] + :param p2: size[1, M, D] + :param debug: whether need to output debug info + :return: sum of Chamfer Distance of two point sets + ''' + + assert p1.size(0) == 1 and p2.size(0) == 1 + assert p1.size(2) == p2.size(2) + p1 = p1.repeat(p2.size(1), 1, 1) + p1 = p1.transpose(0, 1) + p2 = p2.repeat(p1.size(0), 1, 1) + dist = torch.add(p1, torch.neg(p2)) + dist_norm = torch.norm(dist, 2, dim=2) + dist1 = torch.min(dist_norm, dim=1)[0] + dist2 = torch.min(dist_norm, dim=0)[0] + loss = 0.5 * ((torch.mean(dist1)) + (torch.mean(dist2))) + return loss + +def cross_entropy_with_probs_batch(input, target, weight=None, reduction="mean"): # tested, same as nn.CrossEntropyLoss at dim=1, CE can be negative + # input_logsoftmax = F.log_softmax(input, dim=2) + input_logsoftmax = torch.log(input+1e-6) + cum_losses = -target * input_logsoftmax + if weight is not None: + cum_losses = cum_losses * weight.unsqueeze(1) # Broadcasting the weight + + if reduction == "none": + return cum_losses + elif reduction == "mean": + return cum_losses.sum(dim=2).mean(dim=1).mean(dim=0) + elif reduction == "sum": + return cum_losses.sum(dim=2).sum(dim=1).mean(dim=0) + else: + raise ValueError("Keyword 'reduction' must be one of ['none', 'mean', 'sum']") + +def cos_loss(input, target): + # input = F.softmax(input, dim=-1) + cos = nn.CosineSimilarity(dim=-1, eps=1e-6) + similarity = cos(input, target) + loss = 1 - similarity.mean() + return loss + +def cos_loss_clamp(input, target): + # input = F.softmax(input, dim=-1)*(1 + 2*0.001) - 0.001 + input = input*(1 + 2*0.001) - 0.001 + input = torch.clamp(input, 0, 1) + cos = nn.CosineSimilarity(dim=-1, eps=1e-6) + similarity = cos(input, target) + loss = 1 - similarity.mean() + return loss \ No newline at end of file diff --git a/Anymate/utils/render_utils.py b/Anymate/utils/render_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..db98616131cb43dcc10c3762fbb9ee85533e0b2e --- /dev/null +++ b/Anymate/utils/render_utils.py @@ -0,0 +1,1169 @@ +import bpy +import numpy as np +from mathutils import Vector, Matrix +from tqdm import tqdm +import glob +import os +import torch +from PIL import Image +import matplotlib.pyplot as plt +cmap = plt.get_cmap('viridis') +import torch +import torchvision.io as io +import cv2 +import trimesh + +def get_data(ids, root, animate=False, shift_rig=True, id2=None, rignet=False): + dataset= torch.load('/data2/aod/testJointDataSet_9.pt') + joints = [] + conns = [] + skins = [] + + for id in ids: + if id2 is None: + for data in dataset: + if id in data['name']: + print(data['name']) + break + else: + for data in dataset: + if id2 in data['name']: + print(data['name']) + break + + joint = torch.tensor(torch.load(root + '/joints/' + id + '.pt')).cpu() + if shift_rig and id2 is None: + y_max = data['points_cloud'][:,1].max() + joint = joint/2 + torch.tensor([0,y_max/2,0]) + temp = joint[:, 1].clone() + joint[:, 1] = -joint[:, 2] + joint[:, 2] = temp + + conn = torch.tensor(torch.load(root + '/connectivity/' + id + '.pt')).long() + if not animate: + skin = torch.load(root + '/skinning/' + id + '.pt') + if rignet: + skins.append(skin[0]) + elif id2 is None: + skins.append(skin[0].softmax(dim=-1).cpu().numpy()) + else: + skins.append(skin) + + joints.append(joint) + conns.append(conn) + + return joints, conns, skins + +def index_to_sparse(index, weight, shape): + sparse_matrix = np.zeros([shape[0], shape[1], shape[2]+1]) + + row_indices, col_indices = np.meshgrid(np.arange(sparse_matrix.shape[0]), np.arange(sparse_matrix.shape[1]), indexing='ij') + + row_indices = np.expand_dims(row_indices, axis=-1) + col_indices = np.expand_dims(col_indices, axis=-1) + + sparse_matrix[row_indices, col_indices, index] = weight + + + return torch.from_numpy(sparse_matrix[:, :, :-1]) + +def get_gt(ids, root): + dataset= torch.load('/data2/aod/testJointDataSet_9.pt') + joints = [] + conns = [] + skins = [] + + for id in ids: + for data in dataset: + if id in data['name']: + print(data['name']) + break + + joint = data['joints_matrix'][:data['joints_num'], :3] + y_max = data['points_cloud'][:,1].max() + joint = joint/2 + torch.tensor([0,y_max/2,0]) + temp = joint[:, 1].clone() + joint[:, 1] = -joint[:, 2] + joint[:, 2] = temp + + conn = data['parent_index'][:data['joints_num']].long().unsqueeze(1) + + skin = index_to_sparse(data['skin_index'].unsqueeze(0), data['skin_weight'].unsqueeze(0), [1, 8192, data['joints_num']]) + + joints.append(joint) + conns.append(conn) + skins.append(skin[0]) + + return joints, conns, skins + +def empty(): + bpy.ops.wm.read_homefile(use_empty=True) + # Delete all mesh objects from the scene + # for obj in bpy.context.scene.objects: + # bpy.data.objects.remove(obj, do_unlink=True) + +def add_mesh(filepath, co=None, tex=False, color=(0.5, 0.5, 0.5, 1)): + bpy.ops.wm.obj_import(filepath=filepath) + obj = bpy.context.object + + if not tex: + # give the mesh a material + bpy.context.view_layer.objects.active = obj + bpy.ops.object.shade_smooth() + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.normals_make_consistent(inside=False) + bpy.ops.object.mode_set(mode='OBJECT') + mat = bpy.data.materials.new(name='mat') + obj.data.materials.clear() + obj.data.materials.append(mat) + mat.use_nodes = True + mat.node_tree.nodes.clear() + bsdf = mat.node_tree.nodes.new('ShaderNodeBsdfPrincipled') + output = mat.node_tree.nodes.new('ShaderNodeOutputMaterial') + mat.node_tree.links.new(bsdf.outputs['BSDF'], output.inputs['Surface']) + mat.node_tree.nodes['Principled BSDF'].inputs['Roughness'].default_value = 0.8 + # mat.node_tree.nodes['Principled BSDF'].inputs['Specular'].default_value = 0.5 + # mat.node_tree.nodes['Principled BSDF'].inputs['Metallic'].default_value = 0.5 + mat.node_tree.nodes['Principled BSDF'].inputs['Base Color'].default_value = color + if co is not None: + obj.parent = co + +def create_sphere(location, size=0.01, color=(1.0, 0.0, 0.0, 1.0), reduced=False): + if reduced: + bpy.ops.mesh.primitive_uv_sphere_add(radius=size, location=location, segments=8, ring_count=4) + else: + bpy.ops.mesh.primitive_uv_sphere_add(radius=size, location=location) + sphere = bpy.context.active_object + + material_name = f"ColorMaterial_{color}" + material = bpy.data.materials.get(material_name) + + if not material: + material = bpy.data.materials.new(name=material_name) + material.use_nodes = True + material.node_tree.nodes.clear() + bsdf = material.node_tree.nodes.new('ShaderNodeBsdfPrincipled') + output = material.node_tree.nodes.new('ShaderNodeOutputMaterial') + material.node_tree.links.new(bsdf.outputs['BSDF'], output.inputs['Surface']) + material.node_tree.nodes['Principled BSDF'].inputs['Base Color'].default_value = color + + sphere.data.materials.append(material) + + return sphere + +def add_co(location=(0,0,0), rotation=(0,0,0), scale=(1,1,1)): + co = bpy.data.objects.new("CoordinateSystem", None) + bpy.context.collection.objects.link(co) + bpy.context.view_layer.objects.active = co + co.empty_display_size = 0.1 + co.empty_display_type = 'ARROWS' + co.location = location + co.rotation_euler = rotation + co.scale = scale + + return co + +def add_joint(joints_matrix, co=None): + + for i, joint in enumerate(joints_matrix): + sphere = create_sphere((joint[0], joint[1], joint[2]), size=0.01) + if co is not None: + sphere.parent = co + +def create_blue_cone(base_point, apex_point, radius=0.1): + # Calculate the radius and length of the cone + direction = apex_point - base_point + length = direction.length + + # Create cone mesh + bpy.ops.mesh.primitive_cone_add(vertices=32, radius1=radius, depth=length, location=(base_point + direction * 0.5)) + cone = bpy.context.active_object + + # Create or get the blue material + blue_material = bpy.data.materials.get("BlueMaterial") + if not blue_material: + blue_material = bpy.data.materials.new(name="BlueMaterial") + blue_material.use_nodes = True + blue_material.node_tree.nodes.clear() + bsdf = blue_material.node_tree.nodes.new('ShaderNodeBsdfPrincipled') + output = blue_material.node_tree.nodes.new('ShaderNodeOutputMaterial') + blue_material.node_tree.links.new(bsdf.outputs['BSDF'], output.inputs['Surface']) + blue_material.node_tree.nodes['Principled BSDF'].inputs['Base Color'].default_value = (0.0, 0.0, 1.0, 1.0) + + cone.data.materials.append(blue_material) + + # Set the cone's orientation + cone.rotation_euler = direction.to_track_quat('Z', 'Y').to_euler() + + return cone + +def add_conn(con_index, joints_matrix, co=None): + for i, parent in enumerate(con_index): + parent = parent.item() + if parent != i: + parent_co = Vector((joints_matrix[parent][0], joints_matrix[parent][1], joints_matrix[parent][2])) + position = Vector((joints_matrix[i][0], joints_matrix[i][1], joints_matrix[i][2])) + cone = create_blue_cone(parent_co, position, radius=0.008) + if co is not None: + cone.parent = co + +def merge_images(img1, img2, output_path, alpha=1): + image_mesh = Image.open(img1) + image_rig = Image.open(img2) + + if alpha == 1: + image_mesh.paste(image_rig, (0, 0), image_rig) + image_mesh.save(output_path) + return + + data = image_rig.getdata() + data2 = image_mesh.getdata() + new_data = [] + for item, item2 in zip(data, data2): + if item[3] == 0: + new_data.append(item2) + else: + new_data.append((int(item[0]*alpha + item2[0]*(1-alpha)), int(item[1]*alpha + item2[1]*(1-alpha)), int(item[2]*alpha + item2[2]*(1-alpha)), 255)) + image_mesh.putdata(new_data) + + # image_mesh.paste(image_rig, (0, 0), image_rig) + + image_mesh.save(output_path) + +def merge_videos(video1, video2, output_path): + + # overlap two videos together, video1 is the background, video2 is the foreground + # os.system(f'ffmpeg -i {video1} -i {video2} -filter_complex "[0:v][1:v] overlay=0:0:enable=\'between(t,0,60)\'" -pix_fmt yuv420p -c:a copy {output_path}') + + frames_path_1 = glob.glob(video1 + '*.png') + total_frames = len(frames_path_1) + combined_frames = [] + for i in range(total_frames): + frame1 = Image.open(f'{video1}{i:04d}.png') + frame2 = Image.open(f'{video2}{i:04d}.png') + frame1.paste(frame2, (0, 0), frame2) + combined_frames.append(frame1) + + # paste the combined frames on a pure white background + combined_frames_white = [] + for frame in combined_frames: + white = Image.new('RGB', frame.size, (255, 255, 255)) + white.paste(frame, (0, 0), frame) + combined_frames_white.append(white) + + combined_frames=combined_frames_white + + combined_videos = torch.stack([torch.tensor(np.array(frame)) for frame in combined_frames])[..., :3] + + # write the video with high quality + # io.write_video(output_path, combined_videos, 24) + io.write_video(output_path, combined_videos, 24, video_codec='libx264', options={'crf': '18'}) + + # comvert the frames to mp4 video + + # video = cv2.VideoWriter(output_path, cv2.VideoWriter_fourcc(*'H264'), 30, (frame1.size[0], frame1.size[1])) + # for frame in combined_frames: + # video.write(cv2.cvtColor(np.array(frame), cv2.COLOR_RGB2BGR)) + # video.release() + + # video_1, audio_1, fps_1 = io.read_video(video1, pts_unit="sec") + # video_2, audio_2, fps_2 = io.read_video(video2, pts_unit="sec") + # non_zero = video_2.sum(dim=-1) != 0 + # non_zero = torch.stack([non_zero, non_zero, non_zero], dim=-1) + # video_1[non_zero] = video_2[non_zero] + # io.write_video(output_path, video_1, int(fps_1['video_fps'])) + +def add_skin(filepath, skin, bone_index, co=None, pc=None): + bpy.ops.wm.obj_import(filepath=filepath) + obj = bpy.context.object + + bpy.context.view_layer.objects.active = obj + bpy.ops.object.shade_smooth() + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.mesh.normals_make_consistent(inside=False) + bpy.ops.object.mode_set(mode='OBJECT') + + if co is not None: + obj.parent = co + + if pc is not None: + skin = np.array(skin) + pc = pc[:, :3].numpy() + y_max = pc[:, 1].max() + pc = pc + np.array([0, y_max, 0]) + pc = pc / 2 + new_skin = np.zeros((len(obj.data.vertices), skin.shape[1])) + for i, v in enumerate(obj.data.vertices): + v_co = np.array(v.co) + + dist = np.linalg.norm(pc - v_co, axis=1) + # min_idx = np.argmin(dist) + # sort, and then get top 3 index + min_idx_list = np.argsort(dist)[:3] + + for min_idx in min_idx_list: + # get inverse distance weight + interpolate_weight = np.square(1 / dist[min_idx]) / np.square(1 / dist[min_idx_list]).sum() + new_skin[i] = new_skin[i] + interpolate_weight * skin[min_idx] + + skin = new_skin + + color_list = skin + + color_list = color_list[:,bone_index] + + vertex_colors = obj.data.vertex_colors.new() + + for poly in obj.data.polygons: + for loop_index in poly.loop_indices: + + vertex_index = obj.data.loops[loop_index].vertex_index + # Get the weight for the vertex + weight = color_list[vertex_index] + + color = cmap(weight) + + # Assign the weight to the vertex color (RGBA) + vertex_colors.data[loop_index].color = color # Use the weight for RGB + + # let bsdf use vertex color and then output to surface + mat = bpy.data.materials.new(name='mat') + # delete all material of obj + obj.data.materials.clear() + obj.data.materials.append(mat) + mat.use_nodes = True + mat.node_tree.nodes.clear() + vertex_color = mat.node_tree.nodes.new('ShaderNodeVertexColor') + bsdf = mat.node_tree.nodes.new('ShaderNodeBsdfPrincipled') + output = mat.node_tree.nodes.new('ShaderNodeOutputMaterial') + mat.node_tree.links.new(vertex_color.outputs['Color'], bsdf.inputs['Base Color']) + mat.node_tree.links.new(bsdf.outputs['BSDF'], output.inputs['Surface']) + mat.node_tree.nodes['Principled BSDF'].inputs['Roughness'].default_value = 0.5 + + + +def add_pc(points): + base_sphere = create_sphere((points[0][0], points[0][1], points[0][2]), size=0.003, color=cmap(0), reduced=True) + # copy the base sphere to create the rest of the spheres + for i in tqdm(range(1, points.shape[0])): + new_sphere = base_sphere.copy() + new_sphere.location = (points[i][0], points[i][1], points[i][2]) + bpy.context.collection.objects.link(new_sphere) + +def add_floor(back=False): + # create a plane as floor + bpy.ops.mesh.primitive_plane_add(size=50, enter_editmode=False, align='WORLD', location=(0, 20, 0)) + floor = bpy.context.object + floor.name = 'floor' + # set white material for floor + mat = bpy.data.materials.new(name='floor_mat') + floor.data.materials.append(mat) + mat.use_nodes = True + mat.node_tree.nodes.clear() + bsdf = mat.node_tree.nodes.new('ShaderNodeBsdfDiffuse') + output = mat.node_tree.nodes.new('ShaderNodeOutputMaterial') + mat.node_tree.links.new(bsdf.outputs['BSDF'], output.inputs['Surface']) + mat.node_tree.nodes['Diffuse BSDF'].inputs['Color'].default_value = (1, 1, 1, 1) + + if back: + # create a plane as background + bpy.ops.mesh.primitive_plane_add(size=30, enter_editmode=False, align='WORLD', location=(0, 15, 0), rotation=(-0.5*np.pi, 0, 0)) + background = bpy.context.object + background.name = 'background' + # set white material for background + mat = bpy.data.materials.new(name='background_mat') + background.data.materials.append(mat) + mat.use_nodes = True + mat.node_tree.nodes.clear() + bsdf = mat.node_tree.nodes.new('ShaderNodeBsdfDiffuse') + output = mat.node_tree.nodes.new('ShaderNodeOutputMaterial') + mat.node_tree.links.new(bsdf.outputs['BSDF'], output.inputs['Surface']) + mat.node_tree.nodes['Diffuse BSDF'].inputs['Color'].default_value = (1, 1, 1, 1) + +def setup_render(): + # color management + bpy.context.scene.view_settings.view_transform = 'Standard' + + # set the render engine to Cycles + bpy.context.scene.render.engine = 'CYCLES' + # enable cuda + bpy.context.preferences.addons['cycles'].preferences.get_devices() + bpy.context.preferences.addons['cycles'].preferences.compute_device_type = 'CUDA' + bpy.context.scene.cycles.device = 'GPU' + + # set render background to transparent + bpy.context.scene.render.film_transparent = True + +def render(output_path, shadow=True, shading=True, quick=False): + + if shadow: + add_floor() + + if shading: + # create a sun light + bpy.ops.object.light_add(type='SUN', radius=1, align='WORLD', location=(-1, -1, 3)) + light = bpy.context.object + light.data.energy = 5 + # angle pointing to the origin + light.rotation_euler = (0.1*np.pi, 0, 0) + # set angle + light.data.angle = 0.08*np.pi + + else: + # global illumination by create world light + world = bpy.data.worlds.new('World') + bpy.context.scene.world = world + world.use_nodes = True + world_light = world.node_tree.nodes['Background'] + world_light.inputs['Strength'].default_value = 1 + world_light.inputs['Color'].default_value = (1, 1, 1, 1) + + # create a camera + cam = bpy.data.cameras.new("Camera") + cam_ob = bpy.data.objects.new("Camera", cam) + camera = bpy.data.objects['Camera'] + bpy.context.scene.collection.objects.link(camera) + camera.location = Vector((2, -1.5, 2)) + look_at = Vector((0, 0, 0.36)) + # compute the rotation + camera.rotation_mode = 'QUATERNION' + camera.rotation_quaternion = (camera.location - look_at).to_track_quat('Z', 'Y') + # set size + camera.data.sensor_width = 26 + # set the camera to be active + bpy.context.scene.camera = camera + + + + # make the rendered image square + bpy.context.scene.render.resolution_x = 2048 + bpy.context.scene.render.resolution_y = 2048 + + setup_render() + + if quick: + # reduce the number of samples + bpy.context.scene.cycles.samples = 128 + bpy.context.scene.cycles.preview_samples = 128 + bpy.context.scene.cycles.max_bounces = 1 + bpy.context.scene.cycles.min_bounces = 1 + bpy.context.scene.cycles.diffuse_bounces = 1 + bpy.context.scene.cycles.glossy_bounces = 1 + else: + bpy.context.scene.cycles.samples = 1024 + bpy.context.scene.cycles.preview_samples = 1024 + bpy.context.scene.cycles.max_bounces = 4 + bpy.context.scene.cycles.min_bounces = 4 + bpy.context.scene.cycles.diffuse_bounces = 4 + bpy.context.scene.cycles.glossy_bounces = 4 + + # output path + # output_path = '/home/ydengbd/objaverse/test.png' + bpy.context.scene.render.filepath = output_path + bpy.ops.render.render(write_still=True) + +def render_spin(output_path, co, shadow=True, shading=True, quick=False): + # create a new coordinate system at the origin + new_co = add_co(location=(0, 0, 0), rotation=(0, 0, 0), scale=(1, 1, 1)) + # set the object to be the child of the new coordinate system + co.parent = new_co + + # add spin animation to the new coordinate system + new_co.rotation_mode = 'XYZ' + new_co.rotation_euler = (0, 0, 0) + new_co.keyframe_insert(data_path='rotation_euler', index=2, frame=0) + new_co.rotation_euler = (0, 0, 2*np.pi) + new_co.keyframe_insert(data_path='rotation_euler', index=2, frame=60) + + if shadow: + add_floor() + + if shading: + # create a sun light + bpy.ops.object.light_add(type='SUN', radius=1, align='WORLD', location=(-1, -1, 3)) + light = bpy.context.object + light.data.energy = 5 + # angle pointing to the origin + light.rotation_euler = (0.1*np.pi, 0, 0) + # set angle + light.data.angle = 0.08*np.pi + + else: + # global illumination by create world light + world = bpy.data.worlds.new('World') + bpy.context.scene.world = world + world.use_nodes = True + world_light = world.node_tree.nodes['Background'] + world_light.inputs['Strength'].default_value = 1 + world_light.inputs['Color'].default_value = (1, 1, 1, 1) + + # create a camera + cam = bpy.data.cameras.new("Camera") + cam_ob = bpy.data.objects.new("Camera", cam) + camera = bpy.data.objects['Camera'] + bpy.context.scene.collection.objects.link(camera) + camera.location = Vector((2, -1.5, 2)) + look_at = Vector((0, 0, 0.36)) + # compute the rotation + camera.rotation_mode = 'QUATERNION' + camera.rotation_quaternion = (camera.location - look_at).to_track_quat('Z', 'Y') + # set size + camera.data.sensor_width = 26 + # set the camera to be active + bpy.context.scene.camera = camera + + + # render the animation + bpy.context.scene.frame_start = 0 + bpy.context.scene.frame_end = 60 + + # make the rendered image square + bpy.context.scene.render.resolution_x = 1024 + bpy.context.scene.render.resolution_y = 1024 + + setup_render() + + if quick: + # reduce the number of samples + bpy.context.scene.cycles.samples = 128 + bpy.context.scene.cycles.preview_samples = 128 + bpy.context.scene.cycles.max_bounces = 1 + bpy.context.scene.cycles.min_bounces = 1 + bpy.context.scene.cycles.diffuse_bounces = 1 + bpy.context.scene.cycles.glossy_bounces = 1 + else: + bpy.context.scene.cycles.samples = 512 + bpy.context.scene.cycles.preview_samples = 512 + bpy.context.scene.cycles.max_bounces = 4 + bpy.context.scene.cycles.min_bounces = 4 + bpy.context.scene.cycles.diffuse_bounces = 4 + bpy.context.scene.cycles.glossy_bounces = 4 + + # output path + bpy.context.scene.render.filepath = output_path + if output_path.endswith('.mp4'): + # render a mp4 video + bpy.context.scene.render.image_settings.file_format = 'FFMPEG' + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + + bpy.ops.render.render(animation=True, write_still=True) + +def setup_anim(armature, arti): + # enter pose mode + print('Arti shape', arti.shape) + bpy.ops.object.mode_set(mode='POSE') + print('total bones', len(armature.pose.bones)) + for i, pose_bone in enumerate(armature.pose.bones): + pose_bone.rotation_mode = 'XYZ' + pose_bone.keyframe_insert(data_path="rotation_euler", frame=0) + + pose_bone.rotation_euler = arti[i] + pose_bone.keyframe_insert(data_path="rotation_euler", frame=30) + + pose_bone.rotation_euler = Vector((0, 0, 0)) + pose_bone.keyframe_insert(data_path="rotation_euler", frame=60) + bpy.ops.object.mode_set(mode='OBJECT') + +def render_anim(output_path, armature, arti, quick=False): + # enter pose mode + setup_anim(armature, arti) + + # save blend file + # bpy.ops.wm.save_as_mainfile(filepath='/data2/ydengbd/objaverse/test.blend') + + add_floor() + + # create a sun light + bpy.ops.object.light_add(type='SUN', radius=1, align='WORLD', location=(-1, -1, 3)) + light = bpy.context.object + light.data.energy = 5 + # angle pointing to the origin + light.rotation_euler = (50/180*np.pi, 0, -20/180*np.pi) + # set angle + light.data.angle = 12/180*np.pi + + # create a camera + cam = bpy.data.cameras.new("Camera") + cam_ob = bpy.data.objects.new("Camera", cam) + camera = bpy.data.objects['Camera'] + bpy.context.scene.collection.objects.link(camera) + camera.location = Vector((0, -3, 1.3)) + camera.rotation_euler = Vector((1.309, 0, 0)) + # set size + camera.data.sensor_width = 36 + # set the camera to be active + bpy.context.scene.camera = camera + + # render the animation + bpy.context.scene.frame_start = 0 + bpy.context.scene.frame_end = 60 + + # make the rendered image square + bpy.context.scene.render.resolution_x = 1920 + bpy.context.scene.render.resolution_y = 1080 + + setup_render() + + if quick: + # reduce the number of samples + bpy.context.scene.cycles.samples = 128 + bpy.context.scene.cycles.preview_samples = 128 + bpy.context.scene.cycles.max_bounces = 1 + bpy.context.scene.cycles.min_bounces = 1 + bpy.context.scene.cycles.diffuse_bounces = 1 + bpy.context.scene.cycles.glossy_bounces = 1 + else: + bpy.context.scene.cycles.samples = 1024 + bpy.context.scene.cycles.preview_samples = 1024 + bpy.context.scene.cycles.max_bounces = 4 + bpy.context.scene.cycles.min_bounces = 4 + bpy.context.scene.cycles.diffuse_bounces = 4 + bpy.context.scene.cycles.glossy_bounces = 4 + + # output path + bpy.context.scene.render.filepath = output_path + if output_path.endswith('.mp4'): + # render a mp4 video + bpy.context.scene.render.image_settings.file_format = 'FFMPEG' + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + + bpy.ops.render.render(animation=True, write_still=True) + + +def render_animspin(output_path, co, armature, arti, shadow=True, shading=True, quick=False): + # enter pose mode + print('Arti shape', arti.shape) + bpy.ops.object.mode_set(mode='POSE') + print('total bones', len(armature.pose.bones)) + for i, pose_bone in enumerate(armature.pose.bones): + pose_bone.rotation_mode = 'XYZ' + pose_bone.keyframe_insert(data_path="rotation_euler", frame=0) + + pose_bone.rotation_euler = arti[i] + pose_bone.keyframe_insert(data_path="rotation_euler", frame=30) + + pose_bone.rotation_euler = Vector((0, 0, 0)) + pose_bone.keyframe_insert(data_path="rotation_euler", frame=60) + + pose_bone.rotation_euler = arti[i] + pose_bone.keyframe_insert(data_path="rotation_euler", frame=90) + pose_bone.keyframe_insert(data_path="rotation_euler", frame=150) + + pose_bone.rotation_euler = Vector((0, 0, 0)) + pose_bone.keyframe_insert(data_path="rotation_euler", frame=180) + bpy.ops.object.mode_set(mode='OBJECT') + + # create a new coordinate system at the origin + new_co = add_co(location=(0, 0, 0), rotation=(0, 0, 0), scale=(1, 1, 1)) + # set the object to be the child of the new coordinate system + co.parent = new_co + + # add spin animation to the new coordinate system + new_co.rotation_mode = 'XYZ' + new_co.rotation_euler = (0, 0, 0) + new_co.keyframe_insert(data_path='rotation_euler', index=2, frame=90) + new_co.rotation_euler = (0, 0, 2*np.pi) + new_co.keyframe_insert(data_path='rotation_euler', index=2, frame=150) + + if shadow: + add_floor() + + if shading: + # create a sun light + bpy.ops.object.light_add(type='SUN', radius=1, align='WORLD', location=(-1, -1, 3)) + light = bpy.context.object + light.data.energy = 5 + # angle pointing to the origin + light.rotation_euler = (0.1*np.pi, 0, 0) + # set angle + light.data.angle = 0.08*np.pi + + else: + # global illumination by create world light + world = bpy.data.worlds.new('World') + bpy.context.scene.world = world + world.use_nodes = True + world_light = world.node_tree.nodes['Background'] + world_light.inputs['Strength'].default_value = 1 + world_light.inputs['Color'].default_value = (1, 1, 1, 1) + + # create a camera + cam = bpy.data.cameras.new("Camera") + cam_ob = bpy.data.objects.new("Camera", cam) + camera = bpy.data.objects['Camera'] + bpy.context.scene.collection.objects.link(camera) + camera.location = Vector((2, -1.5, 2)) + look_at = Vector((0, 0, 0.36)) + # compute the rotation + camera.rotation_mode = 'QUATERNION' + camera.rotation_quaternion = (camera.location - look_at).to_track_quat('Z', 'Y') + # set size + camera.data.sensor_width = 26 + # set the camera to be active + bpy.context.scene.camera = camera + + + # render the animation + bpy.context.scene.frame_start = 0 + bpy.context.scene.frame_end = 180 + + # make the rendered image square + bpy.context.scene.render.resolution_x = 1024 + bpy.context.scene.render.resolution_y = 1024 + + setup_render() + + if quick: + # reduce the number of samples + bpy.context.scene.cycles.samples = 128 + bpy.context.scene.cycles.preview_samples = 128 + bpy.context.scene.cycles.max_bounces = 1 + bpy.context.scene.cycles.min_bounces = 1 + bpy.context.scene.cycles.diffuse_bounces = 1 + bpy.context.scene.cycles.glossy_bounces = 1 + else: + bpy.context.scene.cycles.samples = 512 + bpy.context.scene.cycles.preview_samples = 512 + bpy.context.scene.cycles.max_bounces = 4 + bpy.context.scene.cycles.min_bounces = 4 + bpy.context.scene.cycles.diffuse_bounces = 4 + bpy.context.scene.cycles.glossy_bounces = 4 + + # output path + bpy.context.scene.render.filepath = output_path + if output_path.endswith('.mp4'): + # render a mp4 video + bpy.context.scene.render.image_settings.file_format = 'FFMPEG' + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + + bpy.ops.render.render(animation=True, write_still=True) + +def render_scene(output_path, shadow=True): + + if shadow: + add_floor() + + + # create a sun light + bpy.ops.object.light_add(type='SUN', radius=1, align='WORLD', location=(-1, -1, 3)) + light = bpy.context.object + light.data.energy = 5 + # angle pointing to the origin + light.rotation_euler = (50/180*np.pi, 0, -20/180*np.pi) + # set angle + light.data.angle = 12/180*np.pi + + # create a camera + cam = bpy.data.cameras.new("Camera") + cam_ob = bpy.data.objects.new("Camera", cam) + camera = bpy.data.objects['Camera'] + bpy.context.scene.collection.objects.link(camera) + camera.location = Vector((0, -10, 5)) + camera.rotation_euler = Vector((1.22, 0, 0)) + # set size + camera.data.sensor_width = 26 + # set the camera to be active + bpy.context.scene.camera = camera + + + + # make the rendered image square + bpy.context.scene.render.resolution_x = 1920 + bpy.context.scene.render.resolution_y = 1080 + + setup_render() + + + + # output path + # output_path = '/home/ydengbd/objaverse/test.png' + bpy.context.scene.render.filepath = output_path + bpy.ops.render.render(write_still=True) + + +def render_teaser(output_path, shadow=True, quick=False): + + if shadow: + add_floor(back=True) + + # create a sun light + bpy.ops.object.light_add(type='SUN', radius=1, align='WORLD', location=(-1, -1, 3)) + light = bpy.context.object + light.data.energy = 5 + # angle pointing to the origin + light.rotation_euler = (50/180*np.pi, 0, -20/180*np.pi) + # set angle + light.data.angle = 12/180*np.pi + + # create a camera + cam = bpy.data.cameras.new("Camera") + cam_ob = bpy.data.objects.new("Camera", cam) + camera = bpy.data.objects['Camera'] + bpy.context.scene.collection.objects.link(camera) + camera.location = Vector((0, -3, 1.3)) + camera.rotation_euler = Vector((80/180*np.pi, 0, 0)) + # set size + camera.data.sensor_width = 48 + # set the camera to be active + bpy.context.scene.camera = camera + + # render the animation + bpy.context.scene.frame_start = 0 + bpy.context.scene.frame_end = 60 + + # make the rendered image square + bpy.context.scene.render.resolution_x = 2400 + bpy.context.scene.render.resolution_y = 1080 + + setup_render() + + if quick: + # reduce the number of samples + bpy.context.scene.cycles.samples = 128 + bpy.context.scene.cycles.preview_samples = 128 + bpy.context.scene.cycles.max_bounces = 1 + bpy.context.scene.cycles.min_bounces = 1 + bpy.context.scene.cycles.diffuse_bounces = 1 + bpy.context.scene.cycles.glossy_bounces = 1 + else: + bpy.context.scene.cycles.samples = 1024 + bpy.context.scene.cycles.preview_samples = 1024 + bpy.context.scene.cycles.max_bounces = 4 + bpy.context.scene.cycles.min_bounces = 4 + bpy.context.scene.cycles.diffuse_bounces = 4 + bpy.context.scene.cycles.glossy_bounces = 4 + + # output path + bpy.context.scene.render.filepath = output_path + if output_path.endswith('.mp4'): + # render a mp4 video + bpy.context.scene.render.image_settings.file_format = 'FFMPEG' + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + + bpy.ops.render.render(animation=True, write_still=True) + +def setup_armature(path, tex=False, save=True): + joints_matrix = torch.load(os.path.join(path, 'joints.pt')) + connectivity = torch.load(os.path.join(path, 'conns.pt')) + skinning_weights = torch.load(os.path.join(path, 'skins.pt')) + obj_file_path = os.path.join(path, 'object.obj') + + # bpy.ops.wm.obj_import(filepath=obj_file_path) + add_mesh(obj_file_path, tex=tex) + mesh_object = bpy.context.selected_objects[0] + + # pack textures + bpy.ops.file.pack_all() + + temp = torch.tensor(joints_matrix)[:, 1].clone() + joints_matrix[:, 1] = -joints_matrix[:, 2] + joints_matrix[:, 2] = temp + + bpy.ops.object.armature_add() + armature_obj = bpy.context.object + + + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.armature.select_all(action='SELECT') + bpy.ops.armature.delete() + + world_matrix = Matrix([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + armature_obj.matrix_world = world_matrix + + bone_dict = {} + + i_name = 0 + + for i in range(len(joints_matrix)): + + if connectivity[i] == i: + continue + bone_name = str(i_name) + bone = armature_obj.data.edit_bones.new(bone_name) + bone.head = joints_matrix[connectivity[i]].cpu().numpy() + bone.tail = joints_matrix[i].cpu().numpy() + bone_dict[bone_name] = bone + i_name += 1 + + for bone_name, bone in bone_dict.items(): + # Find parent bone by checking if current bone's head matches any other bone's tail + for other_bone_name, other_bone in bone_dict.items(): + if other_bone != bone and bone.head == other_bone.tail: + bone.parent = other_bone + break + + assert i_name == skinning_weights.shape[1] + + for i, skinning_weight in enumerate(skinning_weights): + # print("skinning_weight", skinning_weight) + vertex_index = i + for j,weight in enumerate(skinning_weight): + bone_name = str(j) + bone_weight = float(weight) + + vertex_group_name = f"{bone_name}" + vertex_group = mesh_object.vertex_groups.get(vertex_group_name) + if vertex_group is None: + vertex_group = mesh_object.vertex_groups.new(name=vertex_group_name) + vertex_group.add([vertex_index], bone_weight, 'ADD') + + # for obj in bpy.context.scene.objects: + # if obj.type == 'MESH': + modifier = mesh_object.modifiers.new(name="Armature", type='ARMATURE') + modifier.object = armature_obj + modifier.use_vertex_groups = True + print("Armature modifier added to mesh:", mesh_object.name) + + bpy.ops.object.mode_set(mode='OBJECT') + if save: + bpy.ops.wm.save_as_mainfile(filepath= os.path.join(path, 'blender_output.blend')) + + return armature_obj + +def reload_tensor_skinning(data, bone_name_list): + + # with open(json_file, "r") as f: + # skinning_data = json.load(f) + + armature_obj = bpy.data.objects.get("Armature") + if not armature_obj: + print("Error: Armature object 'Armature' not found.") + return + + # 将所有网格对象放置在骨骼对象的子集中 + count = 0 + for obj in bpy.context.scene.objects: + if obj.type == 'MESH': + obj.parent = armature_obj + count += 1 + + print("total mesh count:", count) + + for obj in bpy.context.scene.objects: + vertex_index = 0 + if obj.type == 'MESH': + # mesh_name = obj.name + # if mesh_name in skinning_data: + # skinning_info = skinning_data[mesh_name] + # if "weight" in skinning_info: + # print("Applying skinning data for mesh:", mesh_name) + # vertex_index = 0 + # for vertex_weight in skinning_info["weight"]: + # for bone_name, weight_value in vertex_weight.items(): + # vertex_group = obj.vertex_groups.get(bone_name) + # if vertex_group is None: + # vertex_group = obj.vertex_groups.new(name=bone_name) + # print("Vertex group created:", bone_name) + # vertex_group.add([vertex_index], weight_value, 'REPLACE') + # vertex_index += 1 + # else: + # print("No skinning data found for mesh:", mesh_name) + + for i, v in enumerate(obj.data.vertices): + v_co = np.array(v.co) + pc = data['pc'][:, :3].numpy() + y_max = pc[:, 1].max() + pc = pc + np.array([0, y_max, 0]) + pc = pc / 2 + dist = np.linalg.norm(pc - v_co, axis=1) + # min_idx = np.argmin(dist) + # sort, and then get top 3 index + min_idx_list = np.argsort(dist)[:3] + + for min_idx in min_idx_list: + # get inverse distance weight + interpolate_weight = np.square(1 / dist[min_idx]) / np.square(1 / dist[min_idx_list]).sum() + + for idx, j in enumerate(data['skins_index'][min_idx]): + if j == -1: + break + bone_name = bone_name_list[j] + vertex_group = obj.vertex_groups.get(str(int(bone_name))) + if vertex_group is None: + vertex_group = obj.vertex_groups.new(name=str(int(bone_name))) + print("Vertex group created:", bone_name) + + vertex_group.add([i], interpolate_weight * data['skins_weight'][min_idx][idx], 'ADD') + + + for obj in bpy.context.scene.objects: + if obj.type == 'MESH': + modifier = obj.modifiers.new(name="Armature", type='ARMATURE') + modifier.object = armature_obj + modifier.use_vertex_groups = True + print("Armature modifier added to mesh:", obj.name) + +def reload_tensor(data, root='data', save=True): + joints_matrix = data['joints'].clone() + connectivity = data['conns'] + obj_file_path = os.path.join(root, data['name'], 'object.obj') + + # bpy.ops.wm.obj_import(filepath=obj_file_path) + add_mesh(obj_file_path) + mesh_object = bpy.context.selected_objects[0] + + # pack textures + bpy.ops.file.pack_all() + + y_max = data['pc'][:, 1].max() + joints_matrix = joints_matrix + torch.tensor([0, y_max, 0]) + joints_matrix = joints_matrix / 2 + + temp = joints_matrix[:, 1].clone() + joints_matrix[:, 1] = -joints_matrix[:, 2] + joints_matrix[:, 2] = temp + + bpy.ops.object.armature_add() + armature_obj = bpy.context.object + + + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.armature.select_all(action='SELECT') + bpy.ops.armature.delete() + + world_matrix = Matrix([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1]]) + armature_obj.matrix_world = world_matrix + + bone_dict = {} + bone_name_list = np.zeros(data['bones_num']) + i_name = 0 + + for i in range(len(joints_matrix)): + + if connectivity[i] == i: + continue + bone_name = str(i_name) + bone = armature_obj.data.edit_bones.new(bone_name) + bone.head = joints_matrix[connectivity[i]].cpu().numpy() + bone.tail = joints_matrix[i].cpu().numpy() + bone_dict[bone_name] = bone + for j, skinbone in enumerate(data['bones']): + if torch.equal(skinbone[:3], data['joints'][connectivity[i]]) and torch.equal(skinbone[3:], data['joints'][i]): + bone_name_list[j] = i_name + i_name += 1 + + for bone_name, bone in bone_dict.items(): + # Find parent bone by checking if current bone's head matches any other bone's tail + for other_bone_name, other_bone in bone_dict.items(): + if other_bone != bone and bone.head == other_bone.tail: + bone.parent = other_bone + break + + print(bone_name_list) + + reload_tensor_skinning(data, bone_name_list) + + print("Armature modifier added to mesh:", mesh_object.name) + + bpy.ops.object.mode_set(mode='OBJECT') + if save: + bpy.ops.wm.save_as_mainfile(filepath= os.path.join('/data2/ydengbd/Anymate/Anymate/data', data['name'], 'blender_output.blend')) + + return armature_obj + +def load_blender(blender_path): + + bpy.ops.wm.read_homefile(use_empty=True) + # bpy.ops.wm.append(directory=object_path, link=False) + # load_object(object_path) + bpy.ops.wm.open_mainfile(filepath=blender_path) + armature_obj = [] + mesh_obj = [] + for obj in bpy.context.scene.objects: + if obj.type == "ARMATURE": + armature_obj.append(obj) + if obj.type == "MESH": + mesh_obj.append(obj) + + print('mesh obj:', len(mesh_obj)) + + + + # start retrieve the information of mesh, skining and rigging + + #1. retrieve the information of rigging, save the world matrix of the amature object + total_armature_info = {} + joints_matrix = [] + bone_dict = {} + parent_name= [] + bone_count = 0 + for obj in armature_obj: + # depsgraph = bpy.context.evaluated_depsgraph_get() + # obj = obj.evaluated_get(depsgraph) + armature_info = {} + armature_info["world_matrix"] = [list(row) for row in obj.matrix_world.copy()] + translation = obj.matrix_world.translation + for bone in obj.pose.bones: + + joints_matrix.append(np.array(list((obj.matrix_world.to_3x3() @ bone.head+translation).copy()))) + + if bone.parent: + parent_name.append(bone.parent.name) + else: + parent_name.append('root') + bone_dict[bone.name] = bone_count + bone_count += 1 + connectivity = torch.zeros(bone_count, dtype=torch.int32) + + for i, bone_name in enumerate(parent_name): + if bone_name == 'root': + connectivity[i] = i + else: + connectivity[i] = bone_dict[bone_name] + joints_matrix = torch.from_numpy(np.array(joints_matrix)) + + skinning_weight = torch.zeros(len(mesh_obj[0].data.vertices), joints_matrix.shape[0]) + + vertex_index = 0 + for obj in mesh_obj: + vertex_groups = obj.vertex_groups + + + for vertex in obj.data.vertices: + vertex_info = {} + for group in vertex.groups: + name = vertex_groups[group.group].name + + weight = group.weight + skinning_weight[vertex.index][bone_dict[name]] = weight + + obj_save_path = blender_path.replace('.blend', '.obj') + bpy.ops.wm.obj_export(filepath=obj_save_path, export_materials=False) + return joints_matrix,connectivity, skinning_weight + + +def save_scene(scene_path): + # export the scene as a glb file + if scene_path.endswith('.glb'): + bpy.ops.export_scene.gltf(filepath=scene_path) + bpy.ops.wm.save_as_mainfile(filepath=scene_path.replace('.glb', '.blend')) + elif scene_path.endswith('.blend'): + bpy.ops.wm.save_as_mainfile(filepath=scene_path) + elif scene_path.endswith('.obj'): + bpy.ops.wm.obj_export(filepath=scene_path, export_materials=False) + else: + raise ValueError(f"Unsupported file extension: {scene_path}") + +if __name__ == '__main__': + # load the mesh + empty() + add_mesh('/home/ydengbd/objaverse/obj/0001.obj') + # load the joints + joints_matrix = np.load('/home/ydengbd/objaverse/joints/0001.npy') + add_joint(joints_matrix) + # load the connections + con_index = np.load('/home/ydengbd/objaverse/connections/0001.npy') + add_conn(con_index) + # load the skin \ No newline at end of file diff --git a/Anymate/utils/train_utils.py b/Anymate/utils/train_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..23a96990bccd52ead08e6e329440ed850edfee1d --- /dev/null +++ b/Anymate/utils/train_utils.py @@ -0,0 +1,406 @@ +import os +import numpy as np +from tqdm import tqdm + +import torch +import torch.backends.cudnn as cudnn +from torch.utils.tensorboard import SummaryWriter +from torch.utils.data import DataLoader + +from Anymate.dataset import AnymateDataset, my_collate +from Anymate.model import EncoderDecoder +from Anymate.utils.loss_utils import cross_entropy_with_probs_batch, cos_loss, cos_loss_clamp, chamfer_distance_with_average +from Anymate.utils.vol_utils import get_co, get_gt, extract_keypoints +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.utils.data.distributed import DistributedSampler + +import point_cloud_utils as pcu +from sklearn.cluster import DBSCAN +from diffusers import DDPMScheduler, DDIMScheduler +import torch.nn.functional as F +from Anymate.utils.diffusion_utils import my_collate_diff, randn_tensor + + +def ddp_setup(rank: int, world_size: int, port: int): + """ + Args: + rank: Unique identifier of each process + world_size: Total number of processes + """ + os.environ["MASTER_ADDR"] = "localhost" + os.environ["MASTER_PORT"] = str(port) + torch.cuda.set_device(rank) + init_process_group(backend="nccl", rank=rank, world_size=world_size) + +class AverageMeter(object): + """Computes and stores the average and current value""" + def __init__(self): + self.reset() + + def reset(self): + self.val = 0.0 + self.avg = 0.0 + self.sum = 0.0 + self.count = 0.0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + def accumulate(self, val, n=1): + self.val = val + self.sum += val + self.count += n + self.avg = self.sum / self.count + +def save_checkpoint(state, is_best, checkpoint='checkpoint', filename='model_best.pth.tar', snapshot=None): + filepath = os.path.join(checkpoint, filename) + if is_best: + torch.save(state, filepath) + + if snapshot and state['epoch'] % snapshot == 0: + torch.save(state, os.path.join(checkpoint, 'checkpoint_{}.pth.tar'.format(state['epoch']))) + +def train_model(rank, world_size, config, args, shared_dict, port=12355): + ddp_setup(rank, world_size, port) + lowest_loss = 1e20 + model_config = config['model'] + model = EncoderDecoder(device=f'cuda:{rank}', dtype=torch.float32, **model_config) + model.to(f'cuda:{rank}') + + if rank == 0: + print('only_embed', model.only_embed) + print('return_latents', model.return_latents) + print(model) + if not args.finetune: + model.encoder.requires_grad_(False) + model = DDP(model, device_ids=[rank]) + optimizer_config = config['optimizer'] + if args.finetune: + optimizer = torch.optim.Adam(model.module.parameters(), **optimizer_config) + else: + if args.encoder == 'miche': + optimizer = torch.optim.Adam(model.module.decoder.parameters(), **optimizer_config) + elif args.encoder == 'bert': + optimizer = torch.optim.Adam(list(model.module.decoder.parameters()) + list(model.module.point_proj.parameters()), **optimizer_config) + # optionally resume from a checkpoint + if args.resume: + try: + print("=> loading checkpoint '{}'".format(args.resume)) + checkpoint = torch.load(args.resume) + args.start_epoch = checkpoint['epoch'] + lowest_loss = checkpoint['lowest_loss'] + model.module.load_state_dict(checkpoint['state_dict'], strict=True) + + print("=> loaded checkpoint '{}' (epoch {})".format(args.resume, checkpoint['epoch'])) + except: + print("=> no checkpoint found at '{}'".format(args.resume)) + + cudnn.benchmark = True + print(' Total params: %.2fM' % (sum(p.numel() for p in optimizer.param_groups[0]['params']) / 1000000.0)) + my_collate_func = my_collate_diff if args.mode == 'diffusion' else my_collate + if world_size > 1: + if not args.split: + train_dataset = shared_dict['train_dataset'] + train_sampler = DistributedSampler(train_dataset, num_replicas=world_size, rank=rank) + train_loader = DataLoader(train_dataset, batch_size=args.train_batch, sampler=train_sampler, collate_fn= my_collate_func) + else: + train_dataset = AnymateDataset(name=args.trainset + f'_{rank}', root=args.root) #should changed to dpp version + train_loader = DataLoader(train_dataset, batch_size=args.train_batch, shuffle=True, collate_fn= my_collate_func) + else: + train_dataset = AnymateDataset(name=args.trainset, root=args.root) + train_loader = DataLoader(train_dataset, batch_size=args.train_batch, shuffle=True, collate_fn= my_collate_func) + + if rank == 0: + test_loader = DataLoader(AnymateDataset(name=args.testset, root=args.root), batch_size=args.test_batch, shuffle=False, collate_fn= my_collate_func ) + + if not args.schedule: + args.schedule = [args.epochs//2] + scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, args.schedule, gamma=args.gamma) + # step the scheduler to the start epoch + for _ in range(args.start_epoch): + scheduler.step() + if rank == 0: + logger = SummaryWriter(log_dir=args.logdir) + print('start ') + print('test_frequency', args.test_freq) + print('start from epoch', args.start_epoch) + # start training + for epoch in range(args.start_epoch, args.epochs): + test_dict = None + is_best = False + lr = scheduler.get_last_lr() + if rank == 0: + print('\nEpoch: %d | LR: %.8f' % (epoch + 1, lr[0])) + train_loss, grad_norm = train(train_loader, model, optimizer, args) + if rank == 0 and (epoch == 0 or (epoch+1)%args.test_freq== 0): + print('Testing epoch', epoch+1) + test_dict = test(test_loader, model, args, world_size=world_size) + + + scheduler.step() + if rank == 0: + print('Epoch{:d}. train_loss: {:.6f}.'.format(epoch + 1, train_loss)) + print('Epoch{:d}. grad_norm: {:.6f}.'.format(epoch + 1, grad_norm)) + info = {'train_loss': train_loss, 'grad_norm': grad_norm, 'lr': lr[0]} + # print('Epoch{:d}. val_loss: {:.6f}.'.format(epoch + 1, val_loss)) + if test_dict is not None: + for key, value in test_dict.items(): + print('Epoch{:d}. {:s}: {:.6f}.'.format(epoch + 1, key, value)) + + test_loss = test_dict['test loss'] if not args.mode == 'diffusion' else test_dict['chamfer'] + is_best = test_loss < lowest_loss + lowest_loss = min(test_loss, lowest_loss) + for key, value in test_dict.items(): + info[key] = value + + for tag, value in info.items(): + logger.add_scalar(tag, value, epoch+1) + save_dict = {'epoch': epoch + 1, 'state_dict': model.module.state_dict(), 'lowest_loss': lowest_loss, 'optimizer': optimizer.state_dict(), 'model_config': model_config} + save_checkpoint(save_dict, is_best=is_best, checkpoint=args.checkpoint, snapshot=args.epochs//20) + +def get_criterion(args): + if args.loss == 'cos': + criterion = cos_loss + elif args.loss == 'ce': + criterion = cross_entropy_with_probs_batch + elif args.loss == 'cos_clamp': + criterion = cos_loss_clamp + else: + criterion = chamfer_distance_with_average + return criterion + +def get_train_loss(model, data, args): + criterion = get_criterion(args) + loss = 0.0 + if args.mode == 'skin': + y_pred, idx = model(data, downsample=1024) + y_pred = torch.softmax(y_pred, dim=-1) + y = data['skins'].to(args.device) + y = y[:, idx] + loss = criterion(y_pred, y) + + elif args.mode == 'conn': + y_pred = model(data, args.device) + y_pred = torch.softmax(y_pred, dim=-1) + y = data['conns'].to(args.device) + y = y[:, :y_pred.shape[1], :y_pred.shape[1]].float() + loss = criterion(y_pred, y) + + elif args.mode == 'joints': # joints mode + if args.decoder == 'transformer_latent': + y_pred = model(data, args.device) + joints_gt = data['joints'].to(args.device) + loss = 0.0 + for i in range(joints_gt.shape[0]): + joints_gt_i = joints_gt[i,:data['joints_num'][i], :3] + loss += criterion(y_pred[i:i+1], joints_gt_i.unsqueeze(0)) + loss /= joints_gt.shape[0] + + elif args.decoder == 'triplane' or args.decoder == 'implicit_transformer': + criterion = torch.nn.BCEWithLogitsLoss() + y_pred = model(data, args.device, downsample=True) + joints_gt = data['joints'].to(args.device) + for i in range(joints_gt.shape[0]): + joints_gt_i = joints_gt[i,:data['joints_num'][i], :3] + vol = get_co(data['vox'][i]) + if data['vox'][i].shape[0] > 50000: + vol = vol[y_pred[i][1]] + gt = get_gt(vol.to(args.device), joints_gt_i) + loss += criterion(y_pred[i][0].squeeze(-1).unsqueeze(0), gt.unsqueeze(0)) + else: + gt = get_gt(vol.to(args.device), joints_gt_i) + loss += criterion(y_pred[i].squeeze(-1).unsqueeze(0), gt.unsqueeze(0)) + loss /= joints_gt.shape[0] + + elif args.mode == 'diffusion': + noise_scheduler = DDIMScheduler(num_train_timesteps=args.num_train_step) + + samples = data['joints_repeat'].to(model.device).float() + #use 256 input joints + samples = samples[...,:args.num_training_points,:] + + samples = samples.to(model.device) + noise = torch.randn(samples.shape, device=samples.device) + assert samples.device == noise.device + bs = samples.shape[0] + + # Sample a random timestep for each image + timesteps = torch.randint( + 0, noise_scheduler.config.num_train_timesteps, (bs,), device=samples.device, + dtype=torch.int64 + ) + + noisy_joints = noise_scheduler.add_noise(samples, noise, timesteps) + noisy_joints = noisy_joints.to(model.device) + noisy_joints = noisy_joints.permute(0, 2, 1) + + noise_pred = model(data, noisy_joints=noisy_joints, timesteps = timesteps) + noise_pred = noise_pred.permute(0, 2, 1) + loss = F.mse_loss(noise_pred, noise) + + return loss + +def train(train_loader, model, optimizer, args): + if not args.finetune: + model.train() + model.module.encoder.eval() + else: + model.train() + loss_meter = AverageMeter() + grad_norm_meter = AverageMeter() + + for data in tqdm(train_loader): + loss = get_train_loss(model, data, args) + optimizer.zero_grad() + loss.backward() + grad_norm = 0 + + for p in optimizer.param_groups[0]['params']: + grad_norm += p.grad.data.norm(2).item() + grad_norm_meter.update(grad_norm) + optimizer.step() + loss_meter.update(loss.item()) + + return loss_meter.avg, grad_norm_meter.avg + +def test(test_loader, model, args, world_size=1): + model.eval() + assert args.mode in ['skin', 'joints', 'conn', 'diffusion'], 'mode should be choose from [skin, joints, conn, diffusion], got {}'.format(args.mode) + + if args.mode == 'skin' or args.mode == 'conn': + loss_meter = AverageMeter() + cos_sim_meter = AverageMeter() + cos_clamp_meter = AverageMeter() + for i, data in enumerate(tqdm(test_loader)): + if world_size > 1 and i > 1000: + break + with torch.no_grad(): + y_pred = model(data, args.device) + y_pred = torch.softmax(y_pred, dim=-1) + + if args.mode == 'skin': + y = data['skins'].to(args.device) + elif args.mode == 'conn': + y = data['conns'].to(args.device) + y = y[:, :y_pred.shape[1], :y_pred.shape[1]].float() + + loss = 0.0 + loss = cross_entropy_with_probs_batch(y_pred, y) + loss_meter.update(loss.item()) + cos_sim = cos_loss(y_pred, y) + cos_sim_meter.update(cos_sim.mean().item()) # 1 - loss.item() + cos_clamp = cos_loss_clamp(y_pred, y) + cos_clamp_meter.update(cos_clamp.mean().item()) + + loss_dict = {'test loss': loss_meter.avg, 'cos_sim': cos_sim_meter.avg, 'cos_clamp': cos_clamp_meter.avg} + # get the loss of the joints prediction + elif args.mode == 'joints': + if args.decoder == 'transformer_latent': + loss_meter = AverageMeter() + emd_meter = AverageMeter() + for i, data in tqdm(enumerate(test_loader)): + if world_size > 1 and i > 1000: + break + with torch.no_grad(): + y_pred = model(data, args.device) + joints_gt = data['joints'].to(args.device) + + loss = 0.0 + emd = 0.0 + for i in range(joints_gt.shape[0]): + joints_gt_i = joints_gt[i,:data['joints_num'][i], :3] + y_pred_i = y_pred[i] + + y_pred_i = y_pred[i].detach().cpu().numpy() + clustering = DBSCAN(eps=0.03, min_samples=1).fit(y_pred_i) # Consider add eps and min_samples as arguments + cluster_centers = [] + for cluster in set(clustering.labels_): + cluster_centers.append(y_pred_i[clustering.labels_ == cluster].mean(axis=0)) + y_pred_i = torch.from_numpy(np.array(cluster_centers)).to(args.device) + + if y_pred_i.shape[0] < 2: + print(data['name'][i] + ' has less than 2 points') + continue + loss += chamfer_distance_with_average(y_pred_i.unsqueeze(0), joints_gt_i.unsqueeze(0)) + emd_i, pi = pcu.earth_movers_distance(y_pred_i.cpu().numpy().astype(np.float64), joints_gt_i.cpu().numpy().astype(np.float64)) + emd += emd_i + if loss == 0 or emd == 0: + continue + loss /= joints_gt.shape[0] + loss_meter.update(loss.item()) + emd_meter.update(emd) + loss_dict = {'test loss': loss_meter.avg, 'emd': emd_meter.avg} + + elif args.decoder == 'triplane' or 'implicit_transformer': + loss_meter = AverageMeter() + emd_meter = AverageMeter() + chamfer_meter = AverageMeter() + criterion = torch.nn.BCEWithLogitsLoss() + for data in tqdm(test_loader): + with torch.no_grad(): + y_pred = model(data, args.device) + joints_gt = data['joints'].to(args.device) + loss = 0.0 + emd = 0.0 + chamfer = 0.0 + for i in range(joints_gt.shape[0]): + joints_gt_i = joints_gt[i,:data['joints_num'][i], :3] + vol = get_co(data['vox'][i]) + gt = get_gt(vol.to(args.device), joints_gt_i) + loss += criterion(y_pred[i].squeeze(-1).unsqueeze(0), gt.unsqueeze(0)) + key_points = extract_keypoints(y_pred[i].cpu(), data['vox'][i]) + if len(key_points) < 2: + continue + key_points = key_points / 32 - 1 + chamfer += chamfer_distance_with_average(torch.from_numpy(key_points).unsqueeze(0).to(joints_gt_i.device), joints_gt_i.unsqueeze(0)) + emd_i, _ = pcu.earth_movers_distance(key_points.astype(np.float64), joints_gt_i.cpu().numpy().astype(np.float64)) + emd += emd_i + if loss == 0 or emd == 0 or chamfer == 0: + continue + loss /= joints_gt.shape[0] + loss_meter.update(loss.item()) + emd_meter.update(emd) + chamfer_meter.update(chamfer.item()) + loss_dict = {'test loss': loss_meter.avg, 'emd': emd_meter.avg, 'chamfer': chamfer_meter.avg} + + elif args.mode == 'diffusion': + loss_meter = AverageMeter() + emd_meter = AverageMeter() + chamfer_meter = AverageMeter() + generator=torch.Generator(device='cpu').manual_seed(args.seed+1) + scheduler = DDIMScheduler(num_train_timesteps=args.num_train_step) + scheduler.set_timesteps(args.num_train_step) + points_shape = [args.test_batch, args.num_training_points, 3] + for data in tqdm(test_loader): + joints_gt = data['joints'].to(dtype=torch.float64) + points_noise = randn_tensor(points_shape, generator=generator) + points = points_noise.permute(0, 2, 1).to(model.device) + for t in scheduler.timesteps: + with torch.no_grad(): + time_steps = torch.ones(args.test_batch, 1, dtype=torch.long) * t + time_steps = time_steps.to(model.device) + model_output = model(data, noisy_joints=points, timesteps = time_steps) + + points = scheduler.step(model_output, t, points, generator=generator).prev_sample + points = points.permute(0, 2, 1).cpu() + + chamfer_sum = 0.0 + emd_sum = 0.0 + + for i in range(args.test_batch): + joints_gt_i = joints_gt[i,:data['joints_num'][i], :3] + points_i = points[i] + points_i = points_i.reshape( -1, 3) + emd, p = pcu.earth_movers_distance(points_i.cpu().numpy(),joints_gt_i[:,:3].cpu().numpy()) + emd_sum += emd + chamfer_sum += chamfer_distance_with_average(points_i.unsqueeze(0),joints_gt_i[:,:3].unsqueeze(0)) + + emd_meter.update(emd_sum) + chamfer_meter.update(chamfer_sum.item()) + loss_dict = {'chamfer': chamfer_meter.avg, 'emd': emd_meter.avg} + + return loss_dict diff --git a/Anymate/utils/ui_utils.py b/Anymate/utils/ui_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..341ae52f6f828b950dddda587980b234e14db716 --- /dev/null +++ b/Anymate/utils/ui_utils.py @@ -0,0 +1,284 @@ +import trimesh +import numpy as np +import torch +import os +import matplotlib.pyplot as plt +import gradio as gr +import time +bone_colors = plt.get_cmap('tab10') + +from Anymate.utils.utils import load_checkpoint, get_joint, get_connectivity, get_skinning +from Anymate.utils.dataset_utils import obj2mesh +from Anymate.args import anymate_args +# from Anymate.utils.render_utils import empty, add_co, add_mesh, add_joint, add_conn, add_skin, setup_armature + +def visualize_results(mesh_file=None, joints=None, conns=None, skins=None): + # Create a scene with both original and processed meshes + scene = trimesh.Scene() + vis_file = mesh_file.replace('object.obj', 'vis.glb') + + if mesh_file is not None: + # Load the original mesh (in blue) with transparency + # original_mesh = trimesh.load(mesh_file) + original_mesh = obj2mesh(mesh_file) + if skins is not None: + # pdb.set_trace() + # Get per-vertex colors based on skinning weights + vertex_colors = np.zeros((len(original_mesh.vertices), 4)) + + # Convert skinning weights to numpy if needed + if isinstance(skins, torch.Tensor): + skins = skins.cpu().numpy() + + # For each bone, blend colors based on skinning weights + for bone_idx in range(skins.shape[1]): + bone_color = np.array(bone_colors(bone_idx % 10)) # Get base color for this bone + weights = skins[:, bone_idx] + vertex_colors += np.outer(weights, bone_color) # Blend weighted colors + + # Normalize and clip colors + vertex_colors = np.clip(vertex_colors, 0, 1) + + # Convert to vertex colors and set alpha + vertex_colors = (vertex_colors * 255).astype(np.uint8) + vertex_colors[:, 3] = 255 # Set alpha to 100 for transparency + # print(vertex_colors.shape) + # print(vertex_colors.max(axis=0), vertex_colors.min(axis=0), vertex_colors.mean(axis=0)) + + # Apply colors directly to vertices + original_mesh.visual.vertex_colors = vertex_colors + + # face_colors = np.zeros((len(original_mesh.faces), 4)) + + # processed_mesh = trimesh.load(mesh_file) + processed_mesh = obj2mesh(mesh_file) + # Assign vertex colors from original_mesh to processed_mesh + # Since they might have different number of vertices, we need to find closest vertices + + # Get vertices from both meshes + orig_vertices = original_mesh.vertices + proc_vertices = processed_mesh.vertices + + # For each vertex in processed_mesh, find the closest vertex in original_mesh + closest_indices = [] + for proc_vertex in proc_vertices: + # Calculate distances to all original vertices + distances = np.linalg.norm(orig_vertices - proc_vertex, axis=1) + # Find index of closest vertex + closest_idx = np.argmin(distances) + closest_indices.append(closest_idx) + + proc_vertex_colors = original_mesh.visual.vertex_colors[closest_indices] + processed_mesh.visual.vertex_colors = proc_vertex_colors + original_mesh = processed_mesh + + else: + original_mesh.visual.face_colors = [255, 255, 255, 100] # Blue with alpha=100 for transparency + scene.add_geometry(original_mesh) + + if joints is not None: + # create a sphere for each joint + for position in joints: + sphere = trimesh.primitives.Sphere(radius=0.02) + sphere.visual.face_colors = [255, 0, 0, 255] # Red with transparency + sphere.apply_translation(position.cpu().numpy()) + scene.add_geometry(sphere) + + if conns is not None: + # create a line for each connectivity + for i, conn in enumerate(conns): + if i == conn: + continue + # Create cylinder between joints + points = [joints[i].cpu().numpy(), joints[conn].cpu().numpy()] + direction = points[1] - points[0] + height = np.linalg.norm(direction) + cylinder = trimesh.primitives.Cylinder(radius=0.01, height=height) + + # Calculate rotation matrix to align cylinder with direction + direction = direction / height # Normalize direction vector + up_vector = np.array([0, 0, 1]) + rotation_matrix = trimesh.geometry.align_vectors(up_vector, direction) + + # Apply rotation and translation to cylinder + cylinder.apply_transform(rotation_matrix) + cylinder.apply_translation(points[0] + direction * height/2) + + cylinder.visual.face_colors = [0, 0, 255, 255] # Blue + scene.add_geometry(cylinder) + + # Export the scene + scene.export(vis_file) + return vis_file + + +def process_mesh_to_pc(obj_path, sample_num = 8192, save_path = None): + # mesh_list : list of trimesh + try : + mesh = trimesh.load_mesh(obj_path) + + points, face_idx = mesh.sample(sample_num, return_index=True) + normals = mesh.face_normals[face_idx] + + pc_normal = np.concatenate([points, normals], axis=-1, dtype=np.float16) + + + if save_path is not None: + np.save(save_path, pc_normal) + + return pc_normal + except Exception as e: + print(f"Error: {obj_path} {e}") + return None + + +def normalize_mesh(mesh): + # Check if input is a scene with multiple meshes + if isinstance(mesh, trimesh.Scene): + # Combine all meshes in the scene into a single mesh + meshes = [] + for geometry in mesh.geometry.values(): + if isinstance(geometry, trimesh.Trimesh): + # Transform mesh to scene coordinates + transform = mesh.graph[mesh.graph.nodes_geometry[0]][0] + geometry.apply_transform(transform) + meshes.append(geometry) + + # Combine all meshes + mesh = trimesh.util.concatenate(meshes) + + # Get vertices and compute bounding box + vertices = mesh.vertices + bbox_min = vertices.min(axis=0) + bbox_max = vertices.max(axis=0) + + # Find center and scale + center = (bbox_min + bbox_max) * 0.5 + scale = 2.0 / (bbox_max - bbox_min).max() + + # Center and scale vertices + vertices = (vertices - center) * scale + + # Create new mesh with normalized vertices + normalized_mesh = trimesh.Trimesh(vertices=vertices, + faces=mesh.faces, + face_normals=mesh.face_normals, + vertex_normals=mesh.vertex_normals, + process=False) + + # # Copy texture from original mesh if it exists + # if hasattr(mesh, 'visual') and hasattr(mesh.visual, 'material'): + # print("copy material") + # normalized_mesh.visual.material = mesh.visual.material + # if hasattr(mesh, 'visual') and hasattr(mesh.visual, 'texture'): + # print("copy texture") + # normalized_mesh.visual.texture = mesh.visual.texture + # if hasattr(mesh, 'visual') and hasattr(mesh.visual, 'uv'): + # print("copy uv") + # normalized_mesh.visual.uv = mesh.visual.uv + + return normalized_mesh + + +def vis_joint(normalized_mesh_file, joints): + if normalized_mesh_file is None or joints is None: + return None, None + vis_file = visualize_results(mesh_file=normalized_mesh_file, joints=joints) + return vis_file, vis_file + +def vis_connectivity(normalized_mesh_file, joints, conns): + if normalized_mesh_file is None or joints is None or conns is None: + return None, None + vis_file = visualize_results(mesh_file=normalized_mesh_file, joints=joints, conns=conns) + return vis_file, vis_file + +def vis_skinning(normalized_mesh_file, joints, conns, skins): + if normalized_mesh_file is None or joints is None or conns is None or skins is None: + return None, None + vis_file = visualize_results(mesh_file=normalized_mesh_file, joints=joints, conns=conns, skins=skins) + return vis_file, vis_file + +def prepare_blender_file(normalized_mesh_file): + if normalized_mesh_file is None: + return None + + if not os.path.exists(normalized_mesh_file) or not os.path.exists(normalized_mesh_file.replace('object.obj', 'joints.pt')) or not os.path.exists(normalized_mesh_file.replace('object.obj', 'conns.pt')) or not os.path.exists(normalized_mesh_file.replace('object.obj', 'skins.pt')): + return None + + folder = normalized_mesh_file.replace('object.obj', '') + abs_folder = os.path.abspath(folder) + os.system(f"python Render.py --path {abs_folder}") + + blender_file = os.path.join(folder, 'blender_output.blend') + while not os.path.exists(blender_file): + time.sleep(1) + + return blender_file + + +def process_input(mesh_file): + """ + Function to handle input changes and initialize visualization + + Args: + mesh_file: Path to input mesh file + joint_checkpoint: Path to joint prediction checkpoint + conn_checkpoint: Path to connectivity prediction checkpoint + skin_checkpoint: Path to skinning prediction checkpoint + + Returns: + vis_file: Path to visualization file + """ + + # For now just visualize the input mesh + if mesh_file is None: + return None, None, None, None, None, None, None, None + + # make folder for tmp files + os.makedirs(f"Anymate/tmp/{mesh_file.split('/')[-1].replace('.obj', '')}", exist_ok=True) + + normalized_mesh = normalize_mesh(obj2mesh(mesh_file)) + normalized_mesh_file = f"Anymate/tmp/{mesh_file.split('/')[-1].replace('.obj', '')}/object.obj" + normalized_mesh.export(normalized_mesh_file) + + # normalized_mesh.export(mesh_file) + + vis_file = visualize_results(mesh_file=normalized_mesh_file) + pc = process_mesh_to_pc(normalized_mesh_file) + pc = torch.from_numpy(pc).to(anymate_args.device).to(torch.float32) + + # print(pc.shape, pc.max(dim=0), pc.min(dim=0)) + + return normalized_mesh_file, vis_file, vis_file, None, pc, None, None, None + + +def get_model(checkpoint): + model = load_checkpoint(checkpoint, anymate_args.device, anymate_args.num_joints) + return model, True + +def get_result_joint(mesh_file, model, pc, eps=0.03, min_samples=1): + return get_joint(pc, model, device=anymate_args.device, save=mesh_file.replace('object.obj', 'joints.pt'), eps=eps, min_samples=min_samples) + +def get_result_connectivity(mesh_file, model, pc, joints): + return get_connectivity(pc, joints, model, device=anymate_args.device, save=mesh_file.replace('object.obj', 'conns.pt')) + +def get_result_skinning(mesh_file, model, pc, joints, conns): + # mesh = trimesh.load(mesh_file) + mesh = obj2mesh(mesh_file) + vertices = torch.from_numpy(mesh.vertices).to(anymate_args.device).to(torch.float32) + vertex_normals = torch.from_numpy(mesh.vertex_normals).to(anymate_args.device).to(torch.float32) + vertices = torch.cat([vertices, vertex_normals], dim=-1) + return get_skinning(pc, joints, conns, model, vertices=vertices, device=anymate_args.device, save=mesh_file.replace('object.obj', 'skins.pt')) + +def get_all_models(checkpoint_joint, checkpoint_conn, checkpoint_skin): + model_joint = load_checkpoint(checkpoint_joint, anymate_args.device, anymate_args.num_joints) + model_connectivity = load_checkpoint(checkpoint_conn, anymate_args.device, anymate_args.num_joints) + model_skin = load_checkpoint(checkpoint_skin, anymate_args.device, anymate_args.num_joints) + return model_joint, model_connectivity, model_skin, True, True, True + +def get_all_results(mesh_file, model_joint, model_connectivity, model_skin, pc, eps=0.03, min_samples=1): + joints = get_result_joint(mesh_file, model_joint, pc, eps=eps, min_samples=min_samples) + conns = get_result_connectivity(mesh_file, model_connectivity, pc, joints) + skins = get_result_skinning(mesh_file, model_skin, pc, joints, conns) + return joints, conns, skins + diff --git a/Anymate/utils/ui_utils_bpy.py b/Anymate/utils/ui_utils_bpy.py new file mode 100644 index 0000000000000000000000000000000000000000..943b7fc372571f348f3044ebba4b39772a34cf92 --- /dev/null +++ b/Anymate/utils/ui_utils_bpy.py @@ -0,0 +1,134 @@ +import trimesh +import numpy as np +import torch + +from Anymate.utils.utils import load_checkpoint, get_joints, get_connectivity +from Anymate.args import anymate_args +from Anymate.utils.render_utils import empty, add_co, add_mesh, add_joints, add_conn, add_skin, setup_armature, save_scene + +def visualize_results(mesh_file=None, joints=None, connectivity=None, skinning=None): + + import bpy + # Create a scene with both original and processed meshes + vis_file = "Anymate/tmp/vis_scene.glb" + print('fffffffff') + + # empty() + bpy.ops.wm.read_homefile(use_empty=True) + + if mesh_file is not None: + # add_mesh(mesh_file) + bpy.ops.wm.obj_import(filepath=mesh_file) + + if joints is not None: + add_joints(joints) + + if connectivity is not None: + add_conn(connectivity, joints) + + if skinning is not None: + add_skin(mesh_file, skinning) + + # setup_armature() + # save_scene(vis_file) + bpy.ops.wm.save_as_mainfile(filepath=vis_file) + return vis_file + + +def process_mesh_to_pc(obj_path, sample_num = 8192, save_path = None): + # mesh_list : list of trimesh + try : + mesh = trimesh.load_mesh(obj_path) + + points, face_idx = mesh.sample(sample_num, return_index=True) + normals = mesh.face_normals[face_idx] + + pc_normal = np.concatenate([points, normals], axis=-1, dtype=np.float16) + + + if save_path is not None: + np.save(save_path, pc_normal) + + return pc_normal + except Exception as e: + print(f"Error: {obj_path} {e}") + return None + + +def normalize_mesh(mesh): + # Get vertices and compute bounding box + vertices = mesh.vertices + bbox_min = vertices.min(axis=0) + bbox_max = vertices.max(axis=0) + + # Find center and scale + center = (bbox_min + bbox_max) * 0.5 + scale = 2.0 / (bbox_max - bbox_min).max() + + # Center and scale vertices + vertices = (vertices - center) * scale + + # Create new mesh with normalized vertices + normalized_mesh = trimesh.Trimesh(vertices=vertices, + faces=mesh.faces, + face_normals=mesh.face_normals, + vertex_normals=mesh.vertex_normals) + + return normalized_mesh + + +def vis_joint(normalized_mesh_file, joints): + vis_file = visualize_results(mesh_file=normalized_mesh_file, joints=joints) + return vis_file + +def vis_connectivity(normalized_mesh_file, joints, connectivity): + vis_file = visualize_results(mesh_file=normalized_mesh_file, joints=joints, connectivity=connectivity) + return vis_file + +def vis_skinning(skinning): + vis_file = visualize_results(skinning=skinning) + return vis_file + + +def process_input(mesh_file): + """ + Function to handle input changes and initialize visualization + + Args: + mesh_file: Path to input mesh file + joint_checkpoint: Path to joint prediction checkpoint + conn_checkpoint: Path to connectivity prediction checkpoint + skin_checkpoint: Path to skinning prediction checkpoint + + Returns: + vis_file: Path to visualization file + """ + + # For now just visualize the input mesh + + normalized_mesh = normalize_mesh(trimesh.load(mesh_file)) + normalized_mesh_file = "Anymate/tmp/normalized_mesh.obj" + normalized_mesh.export(normalized_mesh_file) + vis_file = visualize_results(mesh_file=normalized_mesh_file) + pc = process_mesh_to_pc(normalized_mesh_file) + pc = torch.from_numpy(pc).to(anymate_args.device).to(torch.float32) + + print(pc.shape, pc.max(dim=0), pc.min(dim=0)) + + return normalized_mesh_file, vis_file, pc, None, None, None + + +def get_model(checkpoint): + model = load_checkpoint(checkpoint, anymate_args.device, anymate_args.num_joints) + return model, True + +def get_result_joint(model, pc): + return get_joints(pc, model, anymate_args.device) + +def get_result_connectivity(model, pc, joints): + return get_connectivity(pc, joints, model, anymate_args.device) + +def get_result_skinning(model, pc): + with torch.no_grad(): + skinning = model(pc) + return skinning \ No newline at end of file diff --git a/Anymate/utils/utils.py b/Anymate/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..483f899ab34fcbb8380292cfe7cf3cec9fa26aff --- /dev/null +++ b/Anymate/utils/utils.py @@ -0,0 +1,77 @@ +import torch +from Anymate.model import EncoderDecoder +from sklearn.cluster import DBSCAN + +def load_checkpoint(path, device, num_joints): + print(f"Loading model from {path}") + model_state = torch.load(path) + model_weights = model_state['state_dict'] + + try: + model_config = model_state['model_config'] + model = EncoderDecoder(device=device, dtype=torch.float32, **model_config) + model.to(device) + model.load_state_dict(model_weights, strict=True) + except: + encoder = path.split('/')[-1].split('.')[0].split('-')[0] + decoder = path.split('/')[-1].split('.')[0].split('-')[1] + model = EncoderDecoder(encoder=encoder, decoder=decoder, device=device, dtype=torch.float32, num_joints=num_joints) + model.to(device) + model.load_state_dict(model_weights, strict=True) + + print(f"Loaded model from {path}") + + return model + +def get_joint(pc, model, device='cuda', save=None, vox=None, eps=0.03, min_samples=1): + model.eval() + data = {'points_cloud': pc.unsqueeze(0)} + if vox is not None: + data['vox'] = vox.unsqueeze(0) + with torch.no_grad(): + model.decoder.inference_mode(eps=eps, min_samples=min_samples) + joints = model(data, device=device) + joints = torch.tensor(joints, dtype=torch.float32).to(device) + + if save is not None: + torch.save(joints, save) + + return joints + +def get_connectivity(pc, joints, model, device='cuda',return_prob=False, save=None): + model.eval() + data = {'points_cloud': pc.unsqueeze(0), 'joints': joints.unsqueeze(0), 'joints_num': torch.tensor([joints.shape[0]]), + 'joints_mask': torch.ones(joints.shape[0], device=device).unsqueeze(0)} + with torch.no_grad(): + conns = model(data, device=device).softmax(dim=-1) + conns = conns.squeeze(0) if return_prob else torch.argmax(conns, dim=-1).squeeze(0) + + if save is not None: + torch.save(conns, save) + + return conns + +def get_skinning(pc, joints, conns, model, vertices=None, bones=None, device='cuda', save=None): + model.eval() + + if bones is None: + bones = [] + for i in range(joints.shape[0]): + if conns[i] != i: + bones.append(torch.cat((joints[conns[i]], joints[i]), dim=-1)) + bones = torch.stack(bones, dim=0) + + data = {'points_cloud': pc.unsqueeze(0), 'bones': bones.unsqueeze(0), 'bones_num': torch.tensor([bones.shape[0]]), + 'bones_mask': torch.ones(bones.shape[0], device=device).unsqueeze(0)} + + if vertices is not None: + data['vertices'] = vertices.unsqueeze(0) + model.decoder.inference = True + + with torch.no_grad(): + skins = model(data, device=device).softmax(dim=-1).squeeze(0) + + if save is not None: + torch.save(skins, save) + + return skins diff --git a/Anymate/utils/vol_utils.py b/Anymate/utils/vol_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f41ffc979dd381456df71b8c23ed763ea70e9ed1 --- /dev/null +++ b/Anymate/utils/vol_utils.py @@ -0,0 +1,135 @@ +import numpy as np +import torch +from ThirdParty.michelangelo.graphics.primitives import generate_dense_grid_points +from sklearn.cluster import DBSCAN + +def get_vol(bounds=(-0.5, 0.0, -0.5, 0.5, 1.0, 0.5), octree_depth=6): + + bbox_min = np.array(bounds[0:3]) + bbox_max = np.array(bounds[3:6]) + bbox_size = bbox_max - bbox_min + + xyz_samples, grid_size, length = generate_dense_grid_points( + bbox_min=bbox_min, + bbox_max=bbox_max, + octree_depth=octree_depth, + indexing="ij" + ) + xyz_samples = torch.FloatTensor(xyz_samples) # ((2^d)+1)^3 + + return xyz_samples + +def get_co(vox, bounds=(-1.0, -1.0, -1.0, 1.0, 1.0, 1.0), dtype = torch.float32): + + bbox_min = torch.tensor(bounds[0:3]).to(vox.device) + bbox_max = torch.tensor(bounds[3:6]).to(vox.device) + bbox_size = bbox_max - bbox_min + + # ind = torch.argwhere(vox) + # ind = ind.to(dtype) / (vox.shape[0]) * bbox_size + bbox_min + ind = vox + ind = ind.to(dtype) / 64 * bbox_size + bbox_min + + return ind.to(dtype) + +def get_gt(vol, joints, octree_depth=6): + sigma = 2 / 2**octree_depth + + dist = torch.cdist(vol, joints) + # print(dist.min(), dist.max()) + + dist = dist.min(dim=1).values + + gt = torch.exp(-dist**2 / 2 / sigma**2) + + return gt + +def project_onto_planes(planes, coordinates): + """ + Does a projection of a 3D point onto a batch of 2D planes, + returning 2D plane coordinates. + + Takes plane axes of shape n_planes, 3, 3 + # Takes coordinates of shape N, M, 3 + # returns projections of shape N*n_planes, M, 2 + """ + N, M, C = coordinates.shape + n_planes, _, _ = planes.shape + coordinates = coordinates.unsqueeze(1).expand(-1, n_planes, -1, -1).reshape(N*n_planes, M, 3) + inv_planes = torch.linalg.inv(planes).unsqueeze(0).expand(N, -1, -1, -1).reshape(N*n_planes, 3, 3) + projections = torch.bmm(coordinates, inv_planes) + return projections[..., :2] + +def sample_from_planes(plane_axes, plane_features, coordinates, mode='bilinear', padding_mode='zeros', box_warp=None): + assert padding_mode == 'zeros' + N, n_planes, C, H, W = plane_features.shape + _, M, _ = coordinates.shape + plane_features = plane_features.view(N*n_planes, C, H, W) + + # coordinates = (2/box_warp) * coordinates # TODO: add specific box bounds + + projected_coordinates = project_onto_planes(plane_axes, coordinates).unsqueeze(1) + output_features = torch.nn.functional.grid_sample(plane_features, projected_coordinates.float(), mode=mode, padding_mode=padding_mode, align_corners=False).permute(0, 3, 2, 1).reshape(N, n_planes, M, C) + return output_features + +def generate_planes(): + """ + Defines planes by the three vectors that form the "axes" of the + plane. Should work with arbitrary number of planes and planes of + arbitrary orientation. + """ + return torch.tensor([[[1, 0, 0], + [0, 1, 0], + [0, 0, 1]], + [[1, 0, 0], + [0, 0, 1], + [0, 1, 0]], + [[0, 0, 1], + [1, 0, 0], + [0, 1, 0]]], dtype=torch.float32) + +def extract_keypoints(y_pred, vox): + + y_pred = y_pred.detach().cpu().numpy() + vox = vox.detach().cpu().numpy() + volume = np.zeros([64, 64, 64]) + volume[...] = -100 + volume[vox[:, 0], vox[:, 1], vox[:, 2]] = y_pred.squeeze(-1) + + clusters = [] + cluster_model = DBSCAN(eps=1.8, min_samples=1) + + level = min((0.85 * y_pred.max() + 0.15 * y_pred.min()).item(), 0) + potential_points = np.argwhere(volume >= level) + clustering = cluster_model.fit(potential_points) + for cluster in set(clustering.labels_): + if cluster == -1: + print('got noise', len(potential_points[clustering.labels_ == cluster])) + continue + clusters.append(potential_points[clustering.labels_ == cluster]) + + while True: + if np.all(np.array([(len(cluster) < 10) for cluster in clusters])): + break + new_clusters = [] + for points in clusters: + if len(points) < 10: + new_clusters.append(points) + continue + + value = volume[points[:, 0], points[:, 1], points[:, 2]] + + potential_points = points[value >= (0.1 * value.max() + 0.9 * value.min())] + clustering = cluster_model.fit(potential_points) + for cluster in set(clustering.labels_): + if cluster == -1: + print('got noise', len(potential_points[clustering.labels_ == cluster])) + continue + new_clusters.append(potential_points[clustering.labels_ == cluster]) + + clusters = new_clusters + + key_points = np.array([cluster.mean(axis=0) for cluster in clusters]) + key_points = key_points / 32 - 1 + + return key_points \ No newline at end of file diff --git a/Render.py b/Render.py new file mode 100644 index 0000000000000000000000000000000000000000..46d14b83fa9b374fe59a151603f290e7ddee3669 --- /dev/null +++ b/Render.py @@ -0,0 +1,17 @@ +import argparse +import bpy +import mathutils +from Anymate.utils.render_utils import empty, setup_armature + + +def parse_args(): + parser = argparse.ArgumentParser(description='Anymate rendering script') + parser.add_argument('--path', type=str, required=True, help='Path to the model file') + return parser.parse_args() + +args = parse_args() + +print(f"Starting converting {args.path} to blender format...") + +empty() +setup_armature(args.path) \ No newline at end of file diff --git a/ThirdParty/PointLLM/.gitignore b/ThirdParty/PointLLM/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..c79e9c0f36deb1fc479ee756f91fa833a09dd820 --- /dev/null +++ b/ThirdParty/PointLLM/.gitignore @@ -0,0 +1,12 @@ +__pycache__ +*.egg-info +.vscode +checkpoints +outputs +wandb +anno_data +objaverse_data +modelnet40_data +*.zip +*.ipynb +serving_workdirs \ No newline at end of file diff --git a/ThirdParty/PointLLM/README.md b/ThirdParty/PointLLM/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3315b991048449382e43939e3a7e14e313f0996e --- /dev/null +++ b/ThirdParty/PointLLM/README.md @@ -0,0 +1,353 @@ +
+

+

PointLLM: Empowering Large Language Models to Understand Point Clouds

+

+ Runsen Xu  + Xiaolong Wang  + Tai Wang  + Yilun Chen  + Jiangmiao Pang*  + Dahua Lin  +
+ The Chinese University of Hong Kong Shanghai AI Laboratory Zhejiang University +

+

+ +

+ + + + + + + + + + + + + + + + + + +

+ +## 🏠 About + +
+ Dialogue_Teaser +
+We introduce PointLLM, a multi-modal large language model capable of understanding colored point clouds of objects. It perceives object types, geometric structures, and appearance without concerns for ambiguous depth, occlusion, or viewpoint dependency. We collect a novel dataset comprising 660K simple and 70K complex point-text instruction pairs to enable a two-stage training strategy. To rigorously evaluate our model's perceptual abilities and its generalization capabilities, we establish two benchmarks: Generative 3D Object Classification and 3D Object Captioning, assessed through three different evaluation methods. + +## 🔥 News +- [2024-09-06] We have uploaded the camera-ready version of PointLLM for ECCV 2024, which includes clearer writing and additional experimental results. Please check the paper [here](https://arxiv.org/abs/2308.16911). +- [2024-07-01] PointLLM has been accepted by ECCV 2024 with all "strong-accept" recommendation. 🎉 We are looking for self-motivated students to conduct research regarding PointLLM. Please send an email to runsxu@gmail.com with your CV if you are interested! +- [2023-12-29] We release the codes of our online Gradio demo. +- [2023-12-26] We release the codes for model evaluation, including ChatGPT/GPT-4 evaluation and traditional metric evaluation. +- [2023-12-08] We release the codes for training and PointLLM-v1.2. The online demo has also been upgraded to the v1.2 version. Please enjoy! 🎉 +- [2023-12-01] We have released an updated version of our paper (v2), which includes additional baseline comparisons, enhanced human-evaluation metrics, improved model performance (PointLLM-v1.2), and other refinements. Please check the updated version [here](https://arxiv.org/abs/2308.16911). +- [2023-10-18] We release our instruction-following data, including both the simple-description and complex instructions. Download [here](https://huggingface.co/datasets/RunsenXu/PointLLM). +- [2023-09-26] We release the inferencing codes with checkpoints as well as the Objaverse colored point cloud files we use. You can chat with PointLLM with your own machines. +- [2023-08-31] We release the [paper](http://arxiv.org/abs/2308.16911) of PointLLM and an online gradio [demo](http://101.230.144.196). Try it! 🎉 + + +## 📋 Contents +- [🤖 Online Demo](#-online-demo) +- [💬 Dialogue Examples](#-dialogue-examples) +- [🔍 Overview](#-overview) +- [📦 Training and Evaluation](#-training-and-evaluation) +- [📝 TODO List](#-todo-list) +- [🔗 Citation](#-citation) +- [📄 License](#-license) +- [📚 Related Work](#-related-work) +- [👏 Acknowledgements](#-acknowledgements) + +## 🤖 Online Demo +PointLLM is online! Try it at [http://101.230.144.196](http://101.230.144.196) or at [OpenXLab/PointLLM](https://openxlab.org.cn/apps/detail/openxlab-app/PointLLM). + +You can chat with PointLLM about the models of the [Objaverse](https://objaverse.allenai.org) dataset or about your own point clouds! + +Please do not hesitate to tell us if you have any feedback! 😃 + +## 💬 Dialogue Examples +| Dialogue 1 | Dialogue 2| Dialogue 3 | Dialogue 4 +| :-: | :-: | :-: | :-: | +| | | | | + + +## 🔍 Overview + +### Model +

+ +

+The point encoder extracts features from the input point cloud and projects them to the latent space of the LLM backbone. The LLM backbone processes sequences of point tokens and text tokens, and generates the predicted tokens as the output. + +### Experiment Results +#### Quantitative Comparisons with baselines. +Please refer to our paper for more results. +

+ +

+

+ +

+!!!Note: Traditional metrics such as BLEU-1, ROUGE-L, and METEOR tend to favor shorter responses and may not effectively capture semantic accuracy. For a detailed discussion on this, please refer to our paper. We suggest the community not solely rely on these metrics for evaluation. + +#### Qualitative Comparisons with baselines. +Please refer to our paper for more results. +

+ +

+ +## 📦 Training and Evaluation +### Installation +We test our codes under the following environment: +- Ubuntu 20.04 +- NVIDIA Driver: 515.65.01 +- CUDA 11.7 +- Python 3.10.13 +- PyTorch 2.0.1 +- Transformers 4.28.0.dev(transformers.git@cae78c46) + +To start: +1. Clone this repository. +```bash +git clone git@github.com:OpenRobotLab/PointLLM.git +cd PointLLM +``` +2. Install packages +```bash +conda create -n pointllm python=3.10 -y +conda activate pointllm +pip install --upgrade pip # enable PEP 660 support +pip install -e . + +# * for training +pip install ninja +pip install flash-attn +``` + +### Data Preparation +#### Objaverse Training Data +1. Download the two compressed files of 660K Objaverse colored point clouds [here](https://huggingface.co/datasets/RunsenXu/PointLLM/tree/main). They require about 77GB of storage space. +2. Run the following command to merge the two files into one and uncompress it. This will produce a folder named `8192_npy` containing 660K point cloud files named `{Objaverse_ID}_8192.npy`. Each file is a numpy array with dimensions (8192, 6), where the first three dimensions are `xyz` and the last three dimensions are `rgb` in [0, 1] range. +```bash +cat Objaverse_660K_8192_npy_split_a* > Objaverse_660K_8192_npy.tar.gz +tar -xvf Objaverse_660K_8192_npy.tar.gz +``` +3. In `PointLLM` folder, create a folder `data` and create a soft link to the uncompressed file in the directory. +```bash +cd PointLLM +mkdir data +ln -s /path/to/8192_npy data/objaverse_data +``` + +#### Instruction-Following Data +1. In `PointLLM/data` folder, create a directory named `anno_data`. +2. Our instruction-following data, including both the simple-description and complex instructions, can be downloaded [here](https://huggingface.co/datasets/RunsenXu/PointLLM). If you have difficulty downloading the data (e.g. network issue), please email the authors. +- The simple-description data has 660K samples and the complex instructions have 70K samples. +- Both training data are based on the Objaverse dataset. +- The complex instructions are generated with GPT-4. +3. Put the data files in the `anno_data` directory. The directory should look like this: +```bash +PointLLM/data/anno_data +├── PointLLM_brief_description_660K_filtered.json +├── PointLLM_brief_description_660K.json +└── PointLLM_complex_instruction_70K.json +``` +4. Note, the `PointLLM_brief_description_660K_filtered.json` is filtered from `PointLLM_brief_description_660K.json` by removing the 3000 objects we reserved as the validation set. If you want to reproduce the results in our paper, you should use the `PointLLM_brief_description_660K_filtered.json` for training. The `PointLLM_complex_instruction_70K.json` contains objects from the training set. +5. If you want to generate the complex instructions by yourself, please refer to our paper for other details. The system prompt is at `pointllm/data/data_generation/system_prompt_gpt4_0613.txt`. + +#### Evaluation Data +1. Download the referencing GT `PointLLM_brief_description_val_200_GT.json` we use for the benchmarks on Objaverse dataset [here](https://huggingface.co/datasets/RunsenXu/PointLLM/blob/main/PointLLM_brief_description_val_200_GT.json), and put it in `PointLLM/data/anno_data`. We also provide the 3000 object ids we filter during training [here](https://huggingface.co/datasets/RunsenXu/PointLLM/blob/main/val_object_ids_3000.txt) and their corresponding referencing GT [here](https://huggingface.co/datasets/RunsenXu/PointLLM/blob/main/PointLLM_brief_description_val_3000_GT.json), which can be used to evaluate on all the 3000 objects. +2. Create a directory named `modelnet40_data` in `PointLLM/data`. Download the test split of ModelNet40 point clouds `modelnet40_test_8192pts_fps.dat` [here](https://huggingface.co/datasets/RunsenXu/PointLLM/blob/main/modelnet40_test_8192pts_fps.dat) and put it in `PointLLM/data/modelnet40_data`. + +### Training +#### Download the Initial LLM and Point Encoder Weights +1. In `PointLLM` folder, create a directory named `checkpoints`. +2. Download the pre-trained LLM and point encoder: [ +PointLLM_7B_v1.1_init](https://huggingface.co/RunsenXu/PointLLM_7B_v1.1_init/tree/main) or [PointLLM_13B_v1.1_init](https://huggingface.co/RunsenXu/PointLLM_13B_v1.1_init/tree/main). Put them in the `checkpoints` directory. +3. Note that the above "v1.1" means we use the Vicuna-v1.1 checkpoints, and you do **not** need to download the original LLaMA weights again. + +#### Start Training +1. For stage-1 training, simply run: +```bash +cd PointLLM +scripts/PointLLM_train_stage1.sh +``` +2. After stage-1 training, start stage-2 training: +```bash +scripts/PointLLM_train_stage2.sh +``` + +#### PointLLM-v1.1 and PointLLM-v1.2 +Usually, you do not have to care about the following contents. They are only for reproducing the results in our v1 paper (PointLLM-v1.1). If you want to compare with our models or use our models for downstream tasks, please use PointLLM-v1.2 (refer to our v2 paper), which has better performance. +
+ The following steps are for reproducing PointLLM-v1.1 (click to expand) + +1. PointLLM v1.1 and v1.2 use slightly different pre-trained point encoders and projectors. If you want to reproduce PointLLM v1.1, edit the `config.json` file in the directory of initial LLM and point encoder weights, for example, `vim checkpoints/PointLLM_7B_v1.1_init/config.json`. + +2. Change the key `"point_backbone_config_name"` to specify another point encoder config: + ```bash + # change from + "point_backbone_config_name": "PointTransformer_8192point_2layer" # v1.2 + # to + "point_backbone_config_name": "PointTransformer_base_8192point", # v1.1 + ``` + +3. Edit the checkpoint path of the point encoder in `scripts/train_stage1.sh`: + ```bash + # change from + point_backbone_ckpt=$model_name_or_path/point_bert_v1.2.pt # v1.2 + # to + point_backbone_ckpt=$model_name_or_path/point_bert_v1.1.pt # v1.1 + ``` +
+ +### Chatting +1. The trained model checkpoints are available [here](https://huggingface.co/RunsenXu) (including different versions of PointLLM). +2. Run the following command to launch a chatbot using the `torch.float32` data type for chatting about 3D models of Objaverse. The model checkpoints will be downloaded automatically. You can also manually download the model checkpoints and specify their paths. Here is an example: +```bash +cd PointLLM +PYTHONPATH=$PWD python pointllm/eval/PointLLM_chat.py --model_name RunsenXu/PointLLM_7B_v1.2 --data_name data/objaverse_data --torch_dtype float32 +``` +3. You can also easily modify the codes for using point clouds other than those from Objaverse, as long as the point clouds input to the model have dimensions (N, 6), where the first three dimensions are `xyz` and the last three dimensions are `rgb` (in [0, 1] range). You may sample the point clouds to have 8192 points, as our model is trained on such point clouds. +4. The following table shows GPU requirements for different models and data types. We recommend using `torch.bfloat16` if applicable, which is used in the experiments in our paper. + + | Model | Data Type | GPU Memory | + |:--------:|:---------:|:----------:| + | PointLLM-7B | torch.float16 | 14GB | + | PointLLM-7B | torch.float32 | 28GB | + | PointLLM-13B | torch.float16 | 26GB | + | PointLLM-13B | torch.float32 | 52GB | + +### Gradio Demo +1. We provide the codes for our online Gradio demo. You can run the following commands to launch the demo locally for chatting and visualization. +```bash +cd PointLLM +PYTHONPATH=$PWD python pointllm/eval/chat_gradio.py --model_name RunsenXu/PointLLM_7B_v1.2 --data_name data/objaverse_data +``` +2. Kind remind: if you want to release the demo in public, please refer to https://www.gradio.app/guides/sharing-your-app#security-and-file-access. + +### Evaluation +#### Inferencing +1. Run the following commands to infer the results. +2. Different commands for inferencing on different benchmarks (PointLLM_7B_v1.2 as an example): +```bash +cd PointLLM +export PYTHONPATH=$PWD + +# Open Vocabulary Classification on Objaverse +python pointllm/eval/eval_objaverse.py --model_name RunsenXu/PointLLM_7B_v1.2 --task_type classification --prompt_index 0 # or --prompt_index 1 + +# Object captioning on Objaverse +python pointllm/eval/eval_objaverse.py --model_name RunsenXu/PointLLM_7B_v1.2 --task_type captioning --prompt_index 2 + +# Close-set Zero-shot Classification on ModelNet40 +python pointllm/eval/eval_modelnet_cls.py --model_name RunsenXu/PointLLM_7B_v1.2 --prompt_index 0 # or --prompt_index 1 +``` +3. Please check the default command-line arguments of these two scripts. You can specify different prompts, data paths, and other parameters. +4. After inferencing, the results will be saved in `{model_name}/evaluation` as a dict with the following format: +```bash +{ + "prompt": "", + "results": [ + { + "object_id": "", + "ground_truth": "", + "model_output": "", + "label_name": "" # only for classification on modelnet40 + } + ] +} +``` + +#### ChatGPT/GPT-4 Evaluation +1. Get your OpenAI API key at [https://platform.openai.com/api-keys](https://platform.openai.com/api-keys). +2. Run the following commands to evaluate the model outputs in parallel with ChatGPT/GPT-4 (which cost approximately $1.5 to $2.2 USD). +```bash +cd PointLLM +export PYTHONPATH=$PWD +export OPENAI_API_KEY=sk-**** + +# Open Vocabulary Classification on Objaverse +python pointllm/eval/evaluator.py --results_path /path/to/model_output --model_type gpt-4-0613 --eval_type open-free-form-classification --parallel --num_workers 15 + +# Object captioning on Objaverse +python pointllm/eval/evaluator.py --results_path /path/to/model_output --model_type gpt-4-0613 --eval_type object-captioning --parallel --num_workers 15 + +# Close-set Zero-shot Classification on ModelNet40 +python pointllm/eval/evaluator.py --results_path /path/to/model_output --model_type gpt-3.5-turbo-0613 --eval_type modelnet-close-set-classification --parallel --num_workers 15 +``` +3. The evaluation script supports interruption and resumption. You can interrupt the evaluation process at any time by using `Ctrl+C`. This will save the temporary results. If an error occurs during the evaluation, the script will also save the current state. You can resume the evaluation from where it left off by running the same command again. +4. The evaluation results will be saved in `{model_name}/evaluation` as another dict. +Some of the metrics are explained as follows: +```bash +"average_score": The GPT-evaluated captioning score we report in our paper. +"accuracy": The classification accuracy we report in our paper, including random choices made by ChatGPT when model outputs are vague or ambiguous and ChatGPT outputs "INVALID". +"clean_accuracy": The classification accuracy after removing those "INVALID" outputs. +"total_predictions": The number of predictions. +"correct_predictions": The number of correct predictions. +"invalid_responses": The number of "INVALID" outputs by ChatGPT. + +# Some other statistics for calling OpenAI API +"prompt_tokens": The total number of tokens of the prompts for ChatGPT/GPT-4. +"completion_tokens": The total number of tokens of the completion results from ChatGPT/GPT-4. +"GPT_cost": The API cost of the whole evaluation process, in US Dollars 💵. +``` +5. Open-Step Evaluation. You can also start evaluation immediately after inferencing by passing the `--start_eval` flag and specifying the `--gpt_type`. For example: +```bash +python pointllm/eval/eval_objaverse.py --model_name RunsenXu/PointLLM_7B_v1.2 --task_type classification --prompt_index 0 --start_eval --gpt_type gpt-4-0613 +``` + +#### Traditional Metric Evaluation +1. For the object captioning task, run the following command to evaluate model outputs with traditional metrics including BLEU, ROUGE, METEOR, Sentence-BERT, and SimCSE. +```bash +python pointllm/eval/traditional_evaluator.py --results_path /path/to/model_captioning_output +``` +2. Note that we recommend not using BLEU, ROUGE, and METEOR for evaluation as they favor short captions and fall short of capturing semantic accuracy and diversity. + +## 📝 TODO List +- [x] Add inferencing codes with checkpoints. +- [x] Release instruction-following data. +- [x] Add training codes. +- [x] Add evaluation codes. +- [x] Add gradio demo codes. + +Community contributions are welcome!👇 If you need any support, please feel free to open an issue or contact us. +- [ ] Support Phi-2 LLM to make PointLLM more accessible to the community. +- [ ] Support Chinese LLMs like InternLM. + +## 🔗 Citation + +If you find our work and this codebase helpful, please consider starring this repo 🌟 and cite: + +```bibtex +@article{xu2023pointllm, + title={PointLLM: Empowering Large Language Models to Understand Point Clouds}, + author={Xu, Runsen and Wang, Xiaolong and Wang, Tai and Chen, Yilun and Pang, Jiangmiao and Lin, Dahua}, + journal={arXiv preprint arXiv:2308.16911}, + year={2023} +} +``` + +## 📄 License +Creative Commons License +
+This work is under the Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. + +## 📚 Related Work +Together, Let's make LLM for 3D great! +- [Point-Bind & Point-LLM](https://arxiv.org/abs/2309.00615): aligns point clouds with Image-Bind, and leverages ImageBind-LLM to reason multi-modality input without 3D-instruction data training. +- [3D-LLM](https://arxiv.org/abs/2307.12981): employs 2D foundation models to encode multi-view images of 3D point clouds. + + +## 👏 Acknowledgements +- [LLaVA](https://github.com/haotian-liu/LLaVA): Our codebase is built upon LLaVA. +- [Vicuna](https://github.com/lm-sys/FastChat): We use the Vicuna-7B and Vicuna-13B checkpoints. +- [Objaverse](https://objaverse.allenai.org): We use models of the Objaverse dataset for training and evaluation. +- [Cap3D](https://github.com/crockwell/Cap3D/): We use the Cap3D captioning data for our data generation. +- [ULIP-2](https://github.com/salesforce/ULIP): We use ULIP-2 for pre-training our point cloud encoder. diff --git a/ThirdParty/PointLLM/__init__.py b/ThirdParty/PointLLM/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ThirdParty/PointLLM/pointllm/__init__.py b/ThirdParty/PointLLM/pointllm/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e43701abfd68f05cd3bf1a85117b96c4ecc58299 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/__init__.py @@ -0,0 +1 @@ +# from .model import PointLLMLlamaForCausalLM diff --git a/ThirdParty/PointLLM/pointllm/conversation.py b/ThirdParty/PointLLM/pointllm/conversation.py new file mode 100644 index 0000000000000000000000000000000000000000..5350627507c3ef2f6f36f4a99ca3671f2995d1c8 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/conversation.py @@ -0,0 +1,375 @@ +import dataclasses +from enum import auto, Enum +from typing import List, Tuple + + +class SeparatorStyle(Enum): + """Different separator style.""" + SINGLE = auto() + TWO = auto() + MPT = auto() + + +@dataclasses.dataclass +class Conversation: + """A class that keeps all conversation history.""" + system: str + roles: List[str] + messages: List[List[str]] + offset: int + sep_style: SeparatorStyle = SeparatorStyle.SINGLE + sep: str = "###" + sep2: str = None + version: str = "Unknown" + + skip_next: bool = False + + def reset(self): + self.messages = self.messages[:self.offset] + + def get_prompt(self): + if self.sep_style == SeparatorStyle.SINGLE: + ret = self.system + self.sep + for role, message in self.messages: + if message: + if type(message) is tuple: + message, _, _ = message + ret += role + ": " + message + self.sep + else: + ret += role + ":" + return ret + elif self.sep_style == SeparatorStyle.TWO: + seps = [self.sep, self.sep2] + ret = self.system + seps[0] + for i, (role, message) in enumerate(self.messages): + if message: + if type(message) is tuple: + message, _, _ = message + ret += role + ": " + message + seps[i % 2] + else: + ret += role + ":" + return ret + if self.sep_style == SeparatorStyle.MPT: + ret = self.system + self.sep + for role, message in self.messages: + if message: + if type(message) is tuple: + message, _, _ = message + ret += role + message + self.sep + else: + ret += role + return ret + else: + raise ValueError(f"Invalid style: {self.sep_style}") + + def append_message(self, role, message): + self.messages.append([role, message]) + + def pop_last_none_message(self): + # * pop the last message if it's None, this is used for multi-round dialogue + if self.messages[-1][1] is None: + self.messages.pop() + + def get_images(self, return_pil=False): + images = [] + for i, (role, msg) in enumerate(self.messages[self.offset:]): + if i % 2 == 0: + if type(msg) is tuple: + import base64 + from io import BytesIO + from PIL import Image + msg, image, image_process_mode = msg + if image_process_mode == "Pad": + def expand2square(pil_img, background_color=(122, 116, 104)): + width, height = pil_img.size + if width == height: + return pil_img + elif width > height: + result = Image.new(pil_img.mode, (width, width), background_color) + result.paste(pil_img, (0, (width - height) // 2)) + return result + else: + result = Image.new(pil_img.mode, (height, height), background_color) + result.paste(pil_img, ((height - width) // 2, 0)) + return result + image = expand2square(image) + elif image_process_mode == "Crop": + pass + elif image_process_mode == "Resize": + image = image.resize((224, 224)) + else: + raise ValueError(f"Invalid image_process_mode: {image_process_mode}") + max_hw, min_hw = max(image.size), min(image.size) + aspect_ratio = max_hw / min_hw + max_len, min_len = 800, 400 + shortest_edge = int(min(max_len / aspect_ratio, min_len, min_hw)) + longest_edge = int(shortest_edge * aspect_ratio) + W, H = image.size + if H > W: + H, W = longest_edge, shortest_edge + else: + H, W = shortest_edge, longest_edge + image = image.resize((W, H)) + if return_pil: + images.append(image) + else: + buffered = BytesIO() + image.save(buffered, format="JPEG") + img_b64_str = base64.b64encode(buffered.getvalue()).decode() + images.append(img_b64_str) + return images + + def to_gradio_chatbot(self): + ret = [] + for i, (role, msg) in enumerate(self.messages[self.offset:]): + if i % 2 == 0: + if type(msg) is tuple: + import base64 + from io import BytesIO + msg, image, image_process_mode = msg + max_hw, min_hw = max(image.size), min(image.size) + aspect_ratio = max_hw / min_hw + max_len, min_len = 800, 400 + shortest_edge = int(min(max_len / aspect_ratio, min_len, min_hw)) + longest_edge = int(shortest_edge * aspect_ratio) + W, H = image.size + if H > W: + H, W = longest_edge, shortest_edge + else: + H, W = shortest_edge, longest_edge + image = image.resize((W, H)) + # image = image.resize((224, 224)) + buffered = BytesIO() + image.save(buffered, format="JPEG") + img_b64_str = base64.b64encode(buffered.getvalue()).decode() + img_str = f'user upload image' + msg = msg.replace('', img_str) + ret.append([msg, None]) + else: + ret[-1][-1] = msg + return ret + + def copy(self): + return Conversation( + system=self.system, + roles=self.roles, + messages=[[x, y] for x, y in self.messages], + offset=self.offset, + sep_style=self.sep_style, + sep=self.sep, + sep2=self.sep2) + + def dict(self): + if len(self.get_images()) > 0: + return { + "system": self.system, + "roles": self.roles, + "messages": [[x, y[0] if type(y) is tuple else y] for x, y in self.messages], + "offset": self.offset, + "sep": self.sep, + "sep2": self.sep2, + } + return { + "system": self.system, + "roles": self.roles, + "messages": self.messages, + "offset": self.offset, + "sep": self.sep, + "sep2": self.sep2, + } + + +conv_v1 = Conversation( + system="A chat between a curious human and an artificial intelligence assistant. " + "The assistant gives helpful, detailed, and polite answers to the human's questions.", + roles=("Human", "Assistant"), + messages=( + ("Human", "Give three tips for staying healthy."), + ("Assistant", + "Sure, here are three tips for staying healthy:\n" + "1. Exercise regularly: Regular physical activity can help improve your overall health and wellbeing. " + "It can also help reduce your risk of chronic conditions such as obesity, diabetes, heart disease, " + "and certain cancers. Aim for at least 150 minutes of moderate-intensity aerobic exercise or " + "75 minutes of vigorous-intensity aerobic exercise per week, along with muscle-strengthening " + "activities at least two days per week.\n" + "2. Eat a balanced diet: Eating a balanced diet that is rich in fruits, " + "vegetables, whole grains, lean proteins, and healthy fats can help support " + "your overall health. Try to limit your intake of processed and high-sugar foods, " + "and aim to drink plenty of water throughout the day.\n" + "3. Get enough sleep: Getting enough quality sleep is essential for your physical " + "and mental health. Adults should aim for seven to nine hours of sleep per night. " + "Establish a regular sleep schedule and try to create a relaxing bedtime routine to " + "help improve the quality of your sleep.") + ), + offset=2, + sep_style=SeparatorStyle.SINGLE, + sep="###", +) + +conv_v1_2 = Conversation( + system="A chat between a curious human and an artificial intelligence assistant. " + "The assistant gives helpful, detailed, and polite answers to the human's questions.", + roles=("Human", "Assistant"), + messages=( + ("Human", "What are the key differences between renewable and non-renewable energy sources?"), + ("Assistant", + "Renewable energy sources are those that can be replenished naturally in a relatively " + "short amount of time, such as solar, wind, hydro, geothermal, and biomass. " + "Non-renewable energy sources, on the other hand, are finite and will eventually be " + "depleted, such as coal, oil, and natural gas. Here are some key differences between " + "renewable and non-renewable energy sources:\n" + "1. Availability: Renewable energy sources are virtually inexhaustible, while non-renewable " + "energy sources are finite and will eventually run out.\n" + "2. Environmental impact: Renewable energy sources have a much lower environmental impact " + "than non-renewable sources, which can lead to air and water pollution, greenhouse gas emissions, " + "and other negative effects.\n" + "3. Cost: Renewable energy sources can be more expensive to initially set up, but they typically " + "have lower operational costs than non-renewable sources.\n" + "4. Reliability: Renewable energy sources are often more reliable and can be used in more remote " + "locations than non-renewable sources.\n" + "5. Flexibility: Renewable energy sources are often more flexible and can be adapted to different " + "situations and needs, while non-renewable sources are more rigid and inflexible.\n" + "6. Sustainability: Renewable energy sources are more sustainable over the long term, while " + "non-renewable sources are not, and their depletion can lead to economic and social instability.\n") + ), + offset=2, + sep_style=SeparatorStyle.SINGLE, + sep="###", +) + +conv_vicuna_v1_1 = Conversation( + system="A chat between a curious user and an artificial intelligence assistant. " + "The assistant gives helpful, detailed, and polite answers to the user's questions.", + roles=("USER", "ASSISTANT"), + version="v1", + messages=(), + offset=0, + sep_style=SeparatorStyle.TWO, + sep=" ", + sep2="", +) + +conv_mpt = Conversation( + system="""<|im_start|>system +- You are a helpful language and vision assistant. +- You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language. +- You should follow the instructions carefully and explain your answers in detail.""", + roles=("<|im_start|>user\n", "<|im_start|>assistant\n"), + version="mpt", + messages=(), + offset=0, + sep_style=SeparatorStyle.MPT, + sep="<|im_end|>", +) + +conv_mpt_text = Conversation( + system="""<|im_start|>system +- You are a helpful assistant chatbot trained by MosaicML. +- You answer questions. +- You are excited to be able to help the user, but will refuse to do anything that could be considered harmful to the user. +- You are more than just an information source, you are also able to write poetry, short stories, and make jokes.""", + roles=("<|im_start|>user\n", "<|im_start|>assistant\n"), + version="mpt", + messages=(), + offset=0, + sep_style=SeparatorStyle.MPT, + sep="<|im_end|>", +) + +conv_bair_v1 = Conversation( + system="BEGINNING OF CONVERSATION:", + roles=("USER", "GPT"), + messages=(), + offset=0, + sep_style=SeparatorStyle.TWO, + sep=" ", + sep2="", +) + +simple_conv = Conversation( + system="A chat between a curious human and an artificial intelligence assistant. " + "The assistant gives helpful, detailed, and polite answers to the human's questions.", + roles=("Human", "Assistant"), + messages=( + ("Human", "Hi!"), + ("Assistant", "Hi there! How can I help you today?") + ), + offset=2, + sep_style=SeparatorStyle.SINGLE, + sep="###", +) + +simple_conv_multimodal = Conversation( + system="You are LLaVA, a large language and vision assistant trained by UW Madison WAIV Lab." + "You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language." + "Follow the instructions carefully and explain your answers in detail.", + roles=("Human", "Assistant"), + messages=( + ("Human", "Hi!"), + ("Assistant", "Hi there! How can I help you today?\n") + ), + offset=2, + sep_style=SeparatorStyle.SINGLE, + sep="###", +) + +simple_conv_mpt_multimodal = Conversation( + system="""<|im_start|>system +- You are LLaVA, a large language and vision assistant trained by UW Madison WAIV Lab. +- You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language. +- You should follow the instructions carefully and explain your answers in detail.""", + roles=("<|im_start|>user\n", "<|im_start|>assistant\n"), + version="mpt", + messages=(), + offset=0, + sep_style=SeparatorStyle.MPT, + sep="<|im_end|>", +) + +simple_conv_legacy = Conversation( + system="You are LLaVA, a large language model trained by UW Madison WAIV Lab." + "You are designed to assist human with a variety of tasks using natural language." + "Follow the instructions carefully.", + roles=("Human", "Assistant"), + messages=( + ("Human", "Hi!\n\n### Response:"), + ("Assistant", "Hi there! How can I help you today?\n") + ), + offset=2, + sep_style=SeparatorStyle.SINGLE, + sep="###", +) + +conv_llava_v1 = Conversation( + system="You are LLaVA, a large language and vision assistant trained by UW Madison WAIV Lab." + "You are able to understand the visual content that the user provides, and assist the user with a variety of tasks using natural language." + "Follow the instructions carefully and explain your answers in detail.", + roles=("USER", "ASSISTANT"), + version="v1", + messages=(), + offset=0, + sep_style=SeparatorStyle.TWO, + sep=" ", + sep2="", +) + +default_conversation = conv_v1_2 +conv_templates = { + "default": conv_v1_2, + "simple": simple_conv, + "simple_legacy": simple_conv_legacy, + "multimodal": simple_conv_multimodal, + "mpt_multimodal": simple_conv_mpt_multimodal, + "llava_v1": conv_llava_v1, + + # fastchat + "v1": conv_v1_2, + "bair_v1": conv_bair_v1, + "vicuna_v1_1": conv_vicuna_v1_1, + "mpt": conv_mpt, + "mpt_text": conv_mpt_text, +} + + +if __name__ == "__main__": + print(default_conversation.get_prompt()) diff --git a/ThirdParty/PointLLM/pointllm/data/__init__.py b/ThirdParty/PointLLM/pointllm/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2681ef21d7b4c758651eda7320bec4b5cbfc5b20 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/data/__init__.py @@ -0,0 +1,3 @@ +from .utils import load_objaverse_point_cloud, pc_norm, farthest_point_sample +from .object_point_dataset import ObjectPointCloudDataset, make_object_point_data_module +from .modelnet import ModelNet \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/data/modelnet.py b/ThirdParty/PointLLM/pointllm/data/modelnet.py new file mode 100644 index 0000000000000000000000000000000000000000..ae42d24ec0a41c53bde71054176180e0e5c4bbce --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/data/modelnet.py @@ -0,0 +1,147 @@ +import os +import torch +import numpy as np +import pickle +from torch.utils.data import Dataset +from pointllm.utils import * +from pointllm.data.utils import * + +class ModelNet(Dataset): + def __init__(self, config_path, split, subset_nums=-1, use_color=False): + """ + Args: + data_args: + split: train or test + """ + super(ModelNet, self).__init__() + + if config_path is None: + # * use the default config file in the same dir + config_path = os.path.join(os.path.dirname(__file__), "modelnet_config", "ModelNet40.yaml") + + config = cfg_from_yaml_file(config_path) + # * check data path + self.root = config["DATA_PATH"] + + if not os.path.exists(self.root): + print(f"Data path {self.root} does not exist. Please check your data path.") + exit() + + self.npoints = config.npoints + self.num_category = config.NUM_CATEGORY # * should be 40 + self.random_sample = config.random_sampling + self.use_height = config.use_height + self.use_normals = config.USE_NORMALS + self.subset_nums = subset_nums + self.normalize_pc = True + self.use_color = use_color + + if self.use_height or self.use_normals: + print(f"Warning: Usually we don't use height or normals for shapenet but use_height: {self.use_height} and \ + use_normals: {self.use_normals}.") + + self.split = split + assert (self.split == 'train' or self.split == 'test') + + self.catfile = os.path.join(os.path.dirname(__file__), "modelnet_config", 'modelnet40_shape_names_modified.txt') + + # "tv_stand" -> "tv stand" + self.categories = [line.rstrip() for line in open(self.catfile)] # * list of category names + + self.save_path = os.path.join(self.root, + 'modelnet%d_%s_%dpts_fps.dat' % (self.num_category, self.split, self.npoints)) + + print('Load processed data from %s...' % self.save_path) + with open(self.save_path, 'rb') as f: + self.list_of_points, self.list_of_labels = pickle.load(f) # * ndarray of N, C: (8192, 6) (xyz and normals) + + if self.subset_nums > 0: + # * set random seed + import random + random.seed(0) + # * random choose subset_nums + idxs = random.sample(range(len(self.list_of_labels)), self.subset_nums) + self.list_of_labels = [self.list_of_labels[idx] for idx in idxs] + self.list_of_points = [self.list_of_points[idx] for idx in idxs] + + # * print len + print(f"Load {len(self.list_of_points)} data from {self.save_path}.") + + def __len__(self): + return len(self.list_of_labels) + + def _get_item(self, index): + point_set, label = self.list_of_points[index], self.list_of_labels[index] + + if self.npoints < point_set.shape[0]: + if self.random_sample: + # * random sample + point_set = point_set[np.random.choice(point_set.shape[0], self.npoints, replace=False)] + else: + point_set = farthest_point_sample(point_set, self.npoints) + + point_set[:, 0:3] = pc_normalize(point_set[:, 0:3]) + if not self.use_normals: + point_set = point_set[:, 0:3] + + if self.use_height: + self.gravity_dim = 1 + height_array = point_set[:, self.gravity_dim:self.gravity_dim + 1] - point_set[:, + self.gravity_dim:self.gravity_dim + 1].min() + point_set = np.concatenate((point_set, height_array), axis=1) + + point_set = np.concatenate((point_set, np.zeros_like(point_set)), axis=-1) if self.use_color else point_set + + return point_set, label.item() # * ndarray, int + + def pc_norm(self, pc): + """ pc: NxC, return NxC """ + xyz = pc[:, :3] + other_feature = pc[:, 3:] + + centroid = np.mean(xyz, axis=0) + xyz = xyz - centroid + m = np.max(np.sqrt(np.sum(xyz ** 2, axis=1))) + xyz = xyz / m + + pc = np.concatenate((xyz, other_feature), axis=1) + return pc + + def __getitem__(self, index): + points, label = self._get_item(index) + pt_idxs = np.arange(0, points.shape[0]) # 2048 + if self.split == 'train': + np.random.shuffle(pt_idxs) + current_points = points[pt_idxs].copy() + + if self.normalize_pc: + # * modelnet point cloud is already normalized + current_points = self.pc_norm(current_points) + + current_points = torch.from_numpy(current_points).float() # * N, C tensors + label_name = self.categories[int(label)] + + data_dict = { + "indice": index, # * int + "point_clouds": current_points, # * tensor of N, C + "labels": label, # * int + "label_names": label_name # * str + } + + return data_dict + +if __name__ == '__main__': + import argparse + + parser = argparse.ArgumentParser(description='ModelNet Dataset') + + parser.add_argument("--config_path", type=str, default=None, help="config file path.") + parser.add_argument("--split", type=str, default="test", help="train or test.") + parser.add_argument("--subset_nums", type=int, default=200) + + args = parser.parse_args() + + dataset = ModelNet(config_path=args.config_path, split=args.split, subset_nums=args.subset_nums) + + # * get the first item + print(dataset[0]) \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/data/modelnet_config/ModelNet40.yaml b/ThirdParty/PointLLM/pointllm/data/modelnet_config/ModelNet40.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1519c08a16dd78c8bb17cef58e138048534c37f7 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/data/modelnet_config/ModelNet40.yaml @@ -0,0 +1,8 @@ +NAME: ModelNet +DATA_PATH: data/modelnet40_data +NUM_CATEGORY: 40 +USE_NORMALS: FALSE +npoints: 8192 +random_sampling: TRUE +use_height: FALSE +use_normals: FALSE \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/data/object_point_dataset.py b/ThirdParty/PointLLM/pointllm/data/object_point_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..4ab0f30ece7ff860df70abce0151d918b82d1e6a --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/data/object_point_dataset.py @@ -0,0 +1,250 @@ +import os +import json +import torch +import numpy as np + +import copy +import transformers +from torch.utils.data import Dataset + +from .utils import * + + +def make_object_point_data_module(tokenizer: transformers.PreTrainedTokenizer, data_args) -> Dict: + """Make dataset and collator for Joint3Ddataset with text and point cloud data.""" + """Initialize datasets.""" + + data_collator = DataCollatorForPointTextDataset(tokenizer=tokenizer) + if data_args.split_train_val: + print("Loading training datasets.") + train_dataset = ObjectPointCloudDataset( + split='train', + data_path=data_args.data_path, + anno_path=data_args.anno_path, + pointnum=data_args.pointnum, + conversation_types=data_args.conversation_types, + tokenizer=tokenizer, + use_color=data_args.use_color, + data_args=data_args + ) + print("Done!") + if data_args.data_debug_num > 0: + print('Debug mode, using training set as val set.') + val_dataset = train_dataset + else: + # * make a val dataset + print("Loading validation datasets.") + val_dataset = ObjectPointCloudDataset( + split='val', # * load train split + data_path=data_args.data_path, + anno_path=data_args.anno_path, + pointnum=data_args.pointnum, + conversation_types=data_args.conversation_types, + tokenizer=tokenizer, + use_color=data_args.use_color, + data_args=data_args + ) + return dict(train_dataset=train_dataset, eval_dataset=val_dataset, data_collator=data_collator) + else: + # * use all data as training data + train_dataset = ObjectPointCloudDataset( + split='train', + data_path=data_args.data_path, + anno_path=data_args.anno_path, + pointnum=data_args.pointnum, + conversation_types=data_args.conversation_types, + use_color=data_args.use_color, + tokenizer=tokenizer, + data_args=data_args + ) + return dict(train_dataset=train_dataset, eval_dataset=None, data_collator=data_collator) + +class ObjectPointCloudDataset(Dataset): + """Dataset utilities for objaverse.""" + def __init__(self, + data_path=None, + anno_path=None, + tokenizer=None, + pointnum=8192, + split='train', + conversation_types=None, # * default is simple_des, used for stage1 pre-train + use_color=True, + data_args=None): + + """ + split: only considered when data_args.split_train_val is True. + conversation_types: tuple, used to filter the data, default is ('simple_description'), other types is: + "detailed_description", "single_round", "multi_round". + tokenizer: load point clouds only if None + """ + super(ObjectPointCloudDataset, self).__init__() + + """Initialize dataset with object point clouds and text""" + self.data_path = data_path + self.anno_path = anno_path + self.tokenizer = tokenizer + self.split = split + if conversation_types is None: + self.conversation_types = ("simple_description",) + else: + self.conversation_types = conversation_types + + self.data_args = data_args + self.normalize_pc = True + self.use_color = use_color + + self.pointnum = pointnum + self.point_backbone_config = data_args.point_backbone_config if data_args is not None else None + self.point_indicator = '' + + # Load the data list from JSON + print(f"Loading anno file from {anno_path}.") + with open(anno_path, "r") as json_file: + self.list_data_dict = json.load(json_file) + + # * print the conversations_type + print(f"Using conversation_type: {self.conversation_types}") + # * print before filtering + print(f"Before filtering, the dataset size is: {len(self.list_data_dict)}.") + + # * iterate the list and filter + # * these two ids have corrupted colored point files, so filter them when use_color is True + filter_ids = ['6760e543e1d645d5aaacd3803bcae524', 'b91c0711149d460a8004f9c06d3b7f38'] if self.use_color else [] + + # Iterate the list, filter those "conversation_type" not in self.conversation_types + self.list_data_dict = [ + data for data in self.list_data_dict + if data.get('conversation_type', 'simple_description') in self.conversation_types + and data.get('object_id') not in filter_ids + ] + + # * print after filtering + print(f"After filtering, the dataset size is: {len(self.list_data_dict)}.") + # * print the size of different conversation_type + for conversation_type in self.conversation_types: + print(f"Number of {conversation_type}: {len([data for data in self.list_data_dict if data.get('conversation_type', 'simple_description') == conversation_type])}") + + if self.data_args is not None and self.data_args.data_debug_num > 0: + self.list_data_dict = self.list_data_dict[:self.data_args.data_debug_num] + # * print all the scan_id in debug mode, not using for loop + print('Debug mode, using: ' + ' '.join([data['object_id'] for data in self.list_data_dict])) + elif self.data_args is not None and self.data_args.split_train_val: + # * split train and val with 9:1 ratios + if self.split == 'train': + self.list_data_dict = self.list_data_dict[:int(self.data_args.split_ratio * len(self.list_data_dict))] + print(f"Train set size: {len(self.list_data_dict)}") + else: + self.list_data_dict = self.list_data_dict[int(self.data_args.split_ratio * len(self.list_data_dict)):] + print(f"Val set size: {len(self.list_data_dict)}") + + def _load_point_cloud(self, object_id, type='objaverse'): + if type == 'objaverse': + return self._load_objaverse_point_cloud(object_id) + + def _load_objaverse_point_cloud(self, object_id): + filename = f"{object_id}_{self.pointnum}.npy" + point_cloud = np.load(os.path.join(self.data_path, filename)) + + if not self.use_color: + point_cloud = point_cloud[:, :3] + + return point_cloud + + def pc_norm(self, pc): + """ pc: NxC, return NxC """ + xyz = pc[:, :3] + other_feature = pc[:, 3:] + + centroid = np.mean(xyz, axis=0) + xyz = xyz - centroid + m = np.max(np.sqrt(np.sum(xyz ** 2, axis=1))) + xyz = xyz / m + + pc = np.concatenate((xyz, other_feature), axis=1) + return pc + + def __getitem__(self, index): + sources = self.list_data_dict[index] + if isinstance(index, int): + sources = [sources] + assert len(sources) == 1, "sources should be a list" + if self.point_indicator in sources[0]['conversations'][0]['value']: + + object_id = self.list_data_dict[index]['object_id'] + + # Point cloud representation + point_cloud = self._load_point_cloud(object_id) # * N, C + if self.normalize_pc: + point_cloud = self.pc_norm(point_cloud) # * need to norm since point encoder is norm + + if self.tokenizer is None: + data_dict = dict( + point_clouds=torch.from_numpy(point_cloud.astype(np.float32)), + object_ids=object_id + ) + return data_dict + + sources = preprocess_multimodal_point_cloud( + copy.deepcopy([e["conversations"] for e in sources]), self.point_backbone_config, point_indicator=self.point_indicator) + else: + sources = copy.deepcopy([e["conversations"] for e in sources]) + + data_dict = preprocess_v1( + sources, + self.tokenizer) + + if isinstance(index, int): + data_dict = dict(input_ids=data_dict["input_ids"][0], + labels=data_dict["labels"][0]) + + # point exist in the data + if self.point_indicator in self.list_data_dict[index]['conversations'][0]['value']: + data_dict['point_clouds'] = torch.from_numpy(point_cloud.astype(np.float32)) + + return data_dict + + def __len__(self): + """Return number of utterances.""" + return len(self.list_data_dict) + +if __name__ == '__main__': + import argparse + parser = argparse.ArgumentParser() + + parser.add_argument("--data_path", default="data/objaverse_data", type=str, + help="Path to the data directory.") + parser.add_argument("--anno_path", default=None, type=str, required=True, + help="Path to the annotation file.") + parser.add_argument("--split", default='train', type=str, + help="Whether to use the train or validation dataset.") + parser.add_argument("--pointnum", default=8192, type=int, + help="Number of points in the point cloud.") + parser.add_argument("--data_debug_num", default=0, type=int, + help="Number of data to debug with.") + parser.add_argument("--split_train_val", default=False, type=bool, + help="Whether to split the dataset into training and validation.") + parser.add_argument("--split_ratio", default=0.9, type=float, + help="The ratio of training to validation data.") + parser.add_argument("--tokenizer_path", default=None, type=str, required=True, + help="Path to the tokenizer config file.") + + args = parser.parse_args() + + # Initialize tokenizer + tokenizer = transformers.AutoTokenizer.from_pretrained(args.tokenizer_path) + + args.point_backbone_config = None + + # Initialize dataset + dataset = ObjectPointCloudDataset( + data_path=args.data_path, + anno_path=args.anno_path, + pointnum=args.pointnum, + split=args.split, + tokenizer=tokenizer, + data_args=args + ) + + # Example usage + print(f'Dataset length: {len(dataset)}') + diff --git a/ThirdParty/PointLLM/pointllm/data/utils.py b/ThirdParty/PointLLM/pointllm/data/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c41aaca765e4e670207ee798807ec64c65730a48 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/data/utils.py @@ -0,0 +1,236 @@ +from collections import OrderedDict, defaultdict + +import transformers +from pointllm import conversation as conversation_lib +from dataclasses import dataclass +from typing import Optional, Dict, Sequence +import torch + +import numpy as np +import os + +IGNORE_INDEX = -100 + +# * Sample Usage: +# * from utils import LRUCache +# * cache = LRUCache(capacity, max_access_count) +# if self.cache is None: +# info_data = self.multiview_scannet[info_index] +# else: +# info_data = self.cache.get(info_index) +# if info_data is None or self.cache.get_access_count(info_index) >= self.cache.max_access_count: +# # If not in cache, or accessed max_access_count times, load it and put it in cache +# info_data = self.multiview_scannet[info_index] +# self.cache.put(info_index, info_data) +# self.cache.reset_access_count(info_index) + +class LRUCache: + def __init__(self, capacity, max_access_count): + self.cache = OrderedDict() + self.access_count = defaultdict(int) + self.capacity = capacity + self.max_access_count = max_access_count + + def get(self, key): + if key not in self.cache: + return None + value = self.cache.pop(key) + self.cache[key] = value # Put key as the newest one + self.access_count[key] += 1 + return value + + def put(self, key, value): + if key in self.cache: # Update the value and put it as newest + self.cache.pop(key) + elif len(self.cache) == self.capacity: # If cache is full + oldest_key = next(iter(self.cache)) + self.cache.popitem(last=False) # Remove oldest item + del self.access_count[oldest_key] # Remove the corresponding access count + self.cache[key] = value + self.access_count[key] = 1 + + def get_access_count(self, key): + return self.access_count.get(key, 0) + + def reset_access_count(self, key): + self.access_count[key] = 0 + + +def preprocess_v1( + sources, + tokenizer: transformers.PreTrainedTokenizer, +) -> Dict: + conv = conversation_lib.default_conversation.copy() + roles = {"human": conv.roles[0], "gpt": conv.roles[1]} + + # Apply prompt templates + conversations = [] + for i, source in enumerate(sources): + if roles[source[0]["from"]] != conv.roles[0]: + # Skip the first one if it is not from human + source = source[1:] + + conv.messages = [] + for j, sentence in enumerate(source): + role = roles[sentence["from"]] + assert role == conv.roles[j % 2], f"{i}" + conv.append_message(role, sentence["value"]) + conversations.append(conv.get_prompt()) + + # Tokenize conversations + input_ids = tokenizer( + conversations, + return_tensors="pt", + padding="longest", + max_length=tokenizer.model_max_length, + truncation=True, + ).input_ids + targets = input_ids.clone() + + assert conv.sep_style == conversation_lib.SeparatorStyle.TWO + + # Mask targets + sep = conv.sep + conv.roles[1] + ": " + for conversation, target in zip(conversations, targets): + total_len = int(target.ne(tokenizer.pad_token_id).sum()) + + rounds = conversation.split(conv.sep2) + cur_len = 1 + target[:cur_len] = IGNORE_INDEX + for i, rou in enumerate(rounds): + if rou == "": + break + + parts = rou.split(sep) + if len(parts) != 2: # * can handle padded tokens + break + parts[0] += sep + round_len = len(tokenizer(rou).input_ids) + instruction_len = len(tokenizer(parts[0]).input_ids) - 2 + + target[cur_len : cur_len + instruction_len] = IGNORE_INDEX + + cur_len += round_len + target[cur_len:] = IGNORE_INDEX # * this is necessary for padded tokens + + if cur_len < tokenizer.model_max_length: + if cur_len != total_len: # * unk tokens in the dialogue will cause this. + target[:] = IGNORE_INDEX + print( + f"WARNING: tokenization mismatch: {cur_len} vs. {total_len}." + f" (ignored)" + ) + + return dict( + input_ids=input_ids, + labels=targets, + ) + +def preprocess_multimodal_point_cloud( + sources: Sequence[str], + point_backbone_config: dict, + point_indicator: str = "", +) -> Dict: + point_token_len = point_backbone_config['point_token_len'] + default_point_patch_token = point_backbone_config['default_point_patch_token'] + + for source in sources: + for sentence in source: + replace_token = default_point_patch_token * point_token_len + if point_backbone_config['mm_use_point_start_end']: + replace_token = point_backbone_config['default_point_start_token']+ replace_token + point_backbone_config['default_point_end_token'] + sentence["value"] = sentence["value"].replace(point_indicator, replace_token) + + return sources + +def pc_norm(pc): + """ pc: NxC, return NxC """ + xyz = pc[:, :3] + other_feature = pc[:, 3:] + + centroid = np.mean(xyz, axis=0) + xyz = xyz - centroid + m = np.max(np.sqrt(np.sum(xyz ** 2, axis=1))) + xyz = xyz / m + + pc = np.concatenate((xyz, other_feature), axis=1) + return pc + +def load_objaverse_point_cloud(data_path, object_id, pointnum=8192, use_color=False): + filename = f"{object_id}_{pointnum}.npy" + point_cloud = np.load(os.path.join(data_path, filename)) + + # * normalize + point_cloud = pc_norm(point_cloud) + + if not use_color: + point_cloud = point_cloud[:, :3] + + return point_cloud + +@dataclass +class DataCollatorForPointTextDataset(object): + """Collate examples for mixed dataset with text and point cloud data.""" + + tokenizer: transformers.PreTrainedTokenizer + + def __call__(self, instances: Sequence[Dict]) -> Dict[str, torch.Tensor]: + input_ids, labels = tuple([instance[key] for instance in instances] + for key in ("input_ids", "labels")) + input_ids = torch.nn.utils.rnn.pad_sequence( + input_ids, + batch_first=True, + padding_value=self.tokenizer.pad_token_id) + labels = torch.nn.utils.rnn.pad_sequence(labels, + batch_first=True, + padding_value=IGNORE_INDEX) + batch = dict( + input_ids=input_ids, + labels=labels, + attention_mask=input_ids.ne(self.tokenizer.pad_token_id), + ) + + if 'point_clouds' in instances[0]: + point_clouds = [instance['point_clouds'] for instance in instances] + if all(x is not None and x.shape == point_clouds[0].shape for x in point_clouds): # * point_clouds have different shapes + batch['point_clouds'] = torch.stack(point_clouds) + else: + batch['point_clouds'] = point_clouds # * return as lists + + return batch + +def farthest_point_sample(point, npoint): + """ + Input: + xyz: pointcloud data, [N, D] + npoint: number of samples + Return: + centroids: sampled pointcloud index, [npoint, D] + """ + N, D = point.shape + xyz = point[:,:3] + centroids = np.zeros((npoint,)) + distance = np.ones((N,)) * 1e10 + farthest = np.random.randint(0, N) + for i in range(npoint): + centroids[i] = farthest + centroid = xyz[farthest, :] + dist = np.sum((xyz - centroid) ** 2, -1) + mask = dist < distance + distance[mask] = dist[mask] + farthest = np.argmax(distance, -1) + point = point[centroids.astype(np.int32)] + return point + +def pc_normalize(pc): + """ + pc: Nx3 array + This functions normalizes a point cloud to fit within a unit sphere. + It first calculates the centroid of the point cloud and then subtracts + it from all points before scaling all points to fit within a unit sphere. + """ + centroid = np.mean(pc, axis=0) + pc = pc - centroid + m = np.max(np.sqrt(np.sum(pc**2, axis=1))) + pc = pc / m + return pc \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/eval/PointLLM_chat.py b/ThirdParty/PointLLM/pointllm/eval/PointLLM_chat.py new file mode 100644 index 0000000000000000000000000000000000000000..920a271c1fd3c784d69055e681ca7091951d78a8 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/eval/PointLLM_chat.py @@ -0,0 +1,157 @@ +import argparse +from transformers import AutoTokenizer +import torch +import os +from pointllm.conversation import conv_templates, SeparatorStyle +from pointllm.utils import disable_torch_init +from pointllm.model import * +from pointllm.model.utils import KeywordsStoppingCriteria + +from pointllm.data import load_objaverse_point_cloud + +import os + +def load_point_cloud(args): + object_id = args.object_id + print(f"[INFO] Loading point clouds using object_id: {object_id}") + point_cloud = load_objaverse_point_cloud(args.data_path, object_id, pointnum=8192, use_color=True) + + return object_id, torch.from_numpy(point_cloud).unsqueeze_(0).to(torch.float32) + +def init_model(args): + # Model + disable_torch_init() + + model_path = args.model_path + print(f'[INFO] Model name: {model_path}') + + tokenizer = AutoTokenizer.from_pretrained(model_path) + model = PointLLMLlamaForCausalLM.from_pretrained(model_path, low_cpu_mem_usage=False, use_cache=True, torch_dtype=args.torch_dtype).cuda() + model.initialize_tokenizer_point_backbone_config_wo_embedding(tokenizer) + + model.eval() + + mm_use_point_start_end = getattr(model.config, "mm_use_point_start_end", False) + # Add special tokens ind to model.point_config + point_backbone_config = model.get_model().point_backbone_config + + if mm_use_point_start_end: + if "v1" in model_path.lower(): + conv_mode = "vicuna_v1_1" + else: + raise NotImplementedError + + conv = conv_templates[conv_mode].copy() + + stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2 + keywords = [stop_str] + + return model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv + +def start_conversation(args, model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv): + point_token_len = point_backbone_config['point_token_len'] + default_point_patch_token = point_backbone_config['default_point_patch_token'] + default_point_start_token = point_backbone_config['default_point_start_token'] + default_point_end_token = point_backbone_config['default_point_end_token'] + # The while loop will keep running until the user decides to quit + print("[INFO] Starting conversation... Enter 'q' to exit the program and enter 'exit' to exit the current conversation.") + while True: + print("-" * 80) + # Prompt for object_id + object_id = input("[INFO] Please enter the object_id or 'q' to quit: ") + + # Check if the user wants to quit + if object_id.lower() == 'q': + print("[INFO] Quitting...") + break + else: + # print info + print(f"[INFO] Chatting with object_id: {object_id}.") + + # Update args with new object_id + args.object_id = object_id.strip() + + # Load the point cloud data + try: + id, point_clouds = load_point_cloud(args) + except Exception as e: + print(f"[ERROR] {e}") + continue + point_clouds = point_clouds.cuda().to(args.torch_dtype) + + # Reset the conversation template + conv.reset() + + print("-" * 80) + + # Start a loop for multiple rounds of dialogue + for i in range(100): + # This if-else block ensures the initial question from the user is included in the conversation + qs = input(conv.roles[0] + ': ') + if qs == 'exit': + break + + if i == 0: + if mm_use_point_start_end: + qs = default_point_start_token + default_point_patch_token * point_token_len + default_point_end_token + '\n' + qs + else: + qs = default_point_patch_token * point_token_len + '\n' + qs + + # Append the new message to the conversation history + conv.append_message(conv.roles[0], qs) + conv.append_message(conv.roles[1], None) + prompt = conv.get_prompt() + inputs = tokenizer([prompt]) + + input_ids = torch.as_tensor(inputs.input_ids).cuda() + + stopping_criteria = KeywordsStoppingCriteria(keywords, tokenizer, input_ids) + stop_str = keywords[0] + + with torch.inference_mode(): + output_ids = model.generate( + input_ids, + point_clouds=point_clouds, + do_sample=True, + temperature=1.0, + top_k=50, + max_length=2048, + top_p=0.95, + stopping_criteria=[stopping_criteria]) + + input_token_len = input_ids.shape[1] + n_diff_input_output = (input_ids != output_ids[:, :input_token_len]).sum().item() + if n_diff_input_output > 0: + print(f'[Warning] {n_diff_input_output} output_ids are not the same as the input_ids') + outputs = tokenizer.batch_decode(output_ids[:, input_token_len:], skip_special_tokens=True)[0] + outputs = outputs.strip() + if outputs.endswith(stop_str): + outputs = outputs[:-len(stop_str)] + outputs = outputs.strip() + + # Append the model's response to the conversation history + conv.pop_last_none_message() + conv.append_message(conv.roles[1], outputs) + print(f'{conv.roles[1]}: {outputs}\n') + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--model_name", type=str, \ + default="RunsenXu/PointLLM_7B_v1.2") + + parser.add_argument("--data_path", type=str, default="data/objaverse_data") + parser.add_argument("--torch_dtype", type=str, default="float32", choices=["float32", "float16", "bfloat16"]) + + args = parser.parse_args() + + dtype_mapping = { + "float32": torch.float32, + "float16": torch.float16, + "bfloat16": torch.bfloat16, + } + + args.torch_dtype = dtype_mapping[args.torch_dtype] + + model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv = init_model(args) + + start_conversation(args, model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv) \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/eval/chat_gradio.py b/ThirdParty/PointLLM/pointllm/eval/chat_gradio.py new file mode 100644 index 0000000000000000000000000000000000000000..10ab00c4d9a38ff9030b40f0cb27b88b171d58de --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/eval/chat_gradio.py @@ -0,0 +1,394 @@ +import argparse +from transformers import AutoTokenizer +import torch +import os +from pointllm.conversation import conv_templates, SeparatorStyle +from pointllm.utils import disable_torch_init +from pointllm.model import * +from pointllm.model.utils import KeywordsStoppingCriteria +import numpy as np + +from pointllm.data import pc_norm, farthest_point_sample + +import os + +# Additional import for gradio +import gradio as gr +import open3d as o3d +import plotly.graph_objects as go +import objaverse +import time + +import logging + + +def change_input_method(input_method): + if input_method == 'File': + result = [gr.update(visible=True), + gr.update(visible=False)] + elif input_method == 'Object ID': + result = [gr.update(visible=False), + gr.update(visible=True)] + return result + +def init_model(args): + # Model + disable_torch_init() + model_name = os.path.expanduser(args.model_name) + + # * print the model_name (get the basename) + print(f'[INFO] Model name: {os.path.basename(model_name)}') + logging.warning(f'Model name: {os.path.basename(model_name)}') + + tokenizer = AutoTokenizer.from_pretrained(model_name) + model = PointLLMLlamaForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=False, use_cache=True).cuda() + model.initialize_tokenizer_point_backbone_config_wo_embedding(tokenizer) + + model.eval() + + mm_use_point_start_end = getattr(model.config, "mm_use_point_start_end", False) + # Add special tokens ind to model.point_config + point_backbone_config = model.get_model().point_backbone_config + + conv = conv_templates["vicuna_v1_1"].copy() + + stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2 + keywords = [stop_str] + + return model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv + +def start_conversation(args, model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv): + point_token_len = point_backbone_config['point_token_len'] + default_point_patch_token = point_backbone_config['default_point_patch_token'] + default_point_start_token = point_backbone_config['default_point_start_token'] + default_point_end_token = point_backbone_config['default_point_end_token'] + + # The while loop will keep running until the user decides to quit + print("[INFO] Starting conversation...") + logging.warning("Starting conversation...") + while True: + print("-" * 80) + logging.warning("-" * 80) + + # Reset the conversation template + conv.reset() + + def confirm_point_cloud(input_choice, object_id_input, point_cloud_input, chatbot, answer_time, conv): + objects = None + data = None + object_id_input = object_id_input.strip() + + print("%" * 80) + logging.warning("%" * 80) + + if input_choice == 'File': + file = point_cloud_input.name + print(f"Uploading file: {file}.") + logging.warning(f"Uploading file: {file}.") + elif input_choice == 'Object ID': + file = os.path.join(args.data_path, "{}_8192.npy".format(object_id_input)) + print(f"Object_id: {object_id_input}") + logging.warning(f"Object_id: {object_id_input}") + + object_uids = [object_id_input] + objects = objaverse.load_objects(uids=object_uids) + print("%" * 80) + logging.warning("%" * 80) + + manual_no_color = "no_color" in file + + try: + if '.ply' in file: + pcd = o3d.io.read_point_cloud(file) + points = np.asarray(pcd.points) # xyz + colors = np.asarray(pcd.colors) # rgb, if available + # * if no colors actually, empty array + if colors.size == 0: + colors = None + elif '.npy' in file: + data = np.load(file) + if data.shape[1] >= 3: + points = data[:, :3] + else: + raise ValueError("Input array has the wrong shape. Expected: [N, 3]. Got: {}.".format(data.shape)) + colors = None if data.shape[1] < 6 else data[:, 3:6] + else: + raise ValueError("Not supported data format.") + # error + except Exception as e: + print(f"[ERROR] {e}") + logging.warning(f"[ERROR] {e}") + + chatbot_system_message = "Sorry. The Objaverse id is not supported or the uploaded file has something wrong!" + print(f"[ChatBot System Message]: {chatbot_system_message}") + logging.warning(f"[ChatBot System Message]: {chatbot_system_message}") + + outputs = f"[System] {chatbot_system_message}" # "You upload a new Points Cloud" + chatbot = chatbot + [[None, outputs]] + + return None, None, chatbot, answer_time, None + + if manual_no_color: + colors = None + + if colors is not None: + # * if colors in range(0-1) + if np.max(colors) <= 1: + color_data = np.multiply(colors, 255).astype(int) # Convert float values (0-1) to integers (0-255) + # * if colors in range(0-255) + elif np.max(colors) <= 255: + color_data = colors.astype(int) + else: + color_data = np.zeros_like(points).astype(int) # Default to black color if RGB information is not available + colors = color_data.astype(np.float32) / 255 # model input is (0-1) + + # Convert the RGB color data to a list of RGB strings in the format 'rgb(r, g, b)' + color_strings = ['rgb({},{},{})'.format(r, g, b) for r, g, b in color_data] + + fig = go.Figure( + data=[ + go.Scatter3d( + x=points[:, 0], y=points[:, 1], z=points[:, 2], + mode='markers', + marker=dict( + size=1.2, + color=color_strings, # Use the list of RGB strings for the marker colors + ) + ) + ], + layout=dict( + scene=dict( + xaxis=dict(visible=False), + yaxis=dict(visible=False), + zaxis=dict(visible=False) + ), + paper_bgcolor='rgb(255,255,255)' # Set the background color to dark gray 50, 50, 50 + ), + ) + + points = np.concatenate((points, colors), axis=1) + if 8192 < points.shape[0]: + points = farthest_point_sample(points, 8192) + point_clouds = pc_norm(points) + point_clouds = torch.from_numpy(point_clouds).unsqueeze_(0).to(torch.float32).cuda() + + answer_time = 0 + conv.reset() + + outputs = "[System] New Point Cloud" + chatbot = chatbot + [[None, outputs]] + + return fig, list(objects.values())[0] if objects is not None else None, chatbot, answer_time, point_clouds + + def answer_generate(history, answer_time, point_clouds, conv): + if point_clouds is None: + outputs = "[System] Please input point cloud! " + history[-1][1] = outputs + yield history + else: + print(f"Answer Time: {answer_time}") + logging.warning(f"Answer Time: {answer_time}") + input_text = history[-1][0] + qs = input_text + + if answer_time == 0: + if mm_use_point_start_end: + qs = default_point_start_token + default_point_patch_token * point_token_len + default_point_end_token + '\n' + qs + else: + qs = default_point_patch_token * point_token_len + '\n' + qs + + # Append the new message to the conversation history + conv.append_message(conv.roles[0], qs) + conv.append_message(conv.roles[1], None) + prompt = conv.get_prompt() + print("#" * 80) + print(f'{prompt.replace("" * point_token_len, f" * {point_token_len}")}') # for concise printing + print("#" * 80) + + logging.warning("#" * 80) + logging.warning(f'{prompt.replace("" * point_token_len, f" * {point_token_len}")}') # for concise printing + logging.warning("#" * 80) + inputs = tokenizer([prompt]) + + input_ids = torch.as_tensor(inputs.input_ids).cuda() + + stopping_criteria = KeywordsStoppingCriteria(keywords, tokenizer, input_ids) + stop_str = keywords[0] + + try: + if input_ids.shape[1] >= 2047: + raise ValueError("Current context length exceeds the maximum context length (2048) of the model.") + with torch.inference_mode(): + output_ids = model.generate( + input_ids, + point_clouds=point_clouds, + do_sample=True, + temperature=1.0, + top_k=50, + max_length=2048, + top_p=0.95, + stopping_criteria=[stopping_criteria]) + + input_token_len = input_ids.shape[1] + n_diff_input_output = (input_ids != output_ids[:, :input_token_len]).sum().item() + if n_diff_input_output > 0: + print(f'[Warning] {n_diff_input_output} output_ids are not the same as the input_ids') + logging.warning(f'{n_diff_input_output} output_ids are not the same as the input_ids') + outputs = tokenizer.batch_decode(output_ids[:, input_token_len:], skip_special_tokens=True)[0] + outputs = outputs.strip() + if outputs.endswith(stop_str): + outputs = outputs[:-len(stop_str)] + outputs = outputs.strip() + + # Append the model's response to the conversation history + conv.pop_last_none_message() + conv.append_message(conv.roles[1], outputs) + print(f'{conv.roles[1]}: {outputs}\n') + logging.warning(f'{conv.roles[1]}: {outputs}\n') + answer_time += 1 + history[-1][1] = "" + for character in outputs: + history[-1][1] += character + yield history + # error + except Exception as e: + print(f"[ERROR] {e}") + logging.warning(f"[ERROR] {e}") + + if input_ids.shape[1] >= 2047: + chatbot_system_message = "Current context length exceeds the maximum context length (2048) of the model. Please press 'Clear' to restart." + else: + chatbot_system_message = "Sorry. There is something wrong when generating. Please check the your uploaded point cloud or the Objaverse id, and \ + confirm the point cloud again." + print(f"[ChatBot System Message]: {chatbot_system_message}") + logging.warning(f"[ChatBot System Message]: {chatbot_system_message}") + + outputs = f"[System] {chatbot_system_message}" # "You upload a new Points Cloud" + history[-1][1] = outputs + yield history + + with gr.Blocks() as demo: + answer_time = gr.State(value=0) + point_clouds = gr.State(value=None) + conv_state = gr.State(value=conv.copy()) + gr.Markdown( + """ + # PointLLM: Empowering Large Language Models to Understand Point Clouds. 🚀 + If you think this demo interesting, please consider starring 🌟 our github repo. :) + [[Project Page](https://runsenxu.com/projects/PointLLM)] [[Paper](https://arxiv.org/abs/2308.16911)] [[Code](https://github.com/OpenRobotLab/PointLLM)] + """ + ) + with gr.Row(): + with gr.Column(): + input_choice = gr.Radio(['File', 'Object ID'], value='Object ID', interactive=True, label='Input Method', info="How do you want to load point clouds?") + object_id_input = gr.Textbox(visible = True,lines=1, label='Object ID Input') + point_cloud_input = gr.File(visible = False, label="Upload Point Cloud File (PLY, NPY)") + output = gr.Plot() + btn = gr.Button(value="Confirm Point Cloud") + model3D = gr.Model3D() + with gr.Column(): + chatbot = gr.Chatbot([], elem_id="chatbot", height=560) # ,color_map=("green", "pink") + + def user(user_message, history): + return "", history + [[user_message, None]] + + def clear_conv(history, conv): + conv.reset() + return None, 0 + + with gr.Row(): + text_input = gr.Textbox( + show_label=False, + placeholder="Enter text and press enter", + container=False, + ) + run_button = gr.Button("Send") + + clear = gr.Button("Clear") + text_input.submit(user, [text_input, chatbot], [text_input, chatbot], queue=False).then(answer_generate, [chatbot, answer_time, point_clouds, conv_state], chatbot).then(lambda x : x+1,answer_time, answer_time) + clear.click(clear_conv, inputs=[chatbot, conv_state], outputs=[chatbot, answer_time], queue=False) + + btn.click(confirm_point_cloud, inputs=[input_choice, object_id_input, point_cloud_input, chatbot, answer_time, conv_state], outputs=[output, model3D, chatbot, answer_time, point_clouds]) + + input_choice.change(change_input_method, input_choice, [point_cloud_input, object_id_input]) + run_button.click(user, [text_input, chatbot], [text_input, chatbot], queue=False).then(answer_generate, [chatbot, answer_time, point_clouds, conv_state], chatbot).then(lambda x : x+1, answer_time, answer_time) + + gr.Markdown( + """ + ### Usage: + 1. Upload your point cloud file (ply, npy only) or input the supported [Objaverse object id (uid)](https://drive.google.com/file/d/1gLwA7aHfy1KCrGeXlhICG9rT2387tWY8/view?usp=sharing) (currently 660K objects only, you may try the example object ids below). + 2. If your point cloud file does not contian colors, manually set the file name contains 'no_color' (e.g., 'xxx_no_color.npy'), and the black color will be assigned. + 3. If uploading your own point cloud file with color in npy format, the first three dimensions should be xyz, and the next three dimensions should be rgb. The rgb values should range from **0 to 1**. + 4. Click **Confirm Point Cloud**. + 5. As we use FPS sampling to downsample the point cloud to 8192 points, it may take a long time to confirm the point cloud if the point cloud has too many points. You may use random sampling to downsample the point cloud before uploading. + 6. Once '[System] New Point Cloud' appears in the dialogue box, a new conversation with PointLLM is initialized. + 7. The 'Clear' button will clear the conversation history. + """) + with gr.Accordion("Example Objaverse object ids in the validation set!", open=False): + example_object_ids = [ ["b4bbf2116b1a41a5a3b9d3622b07074c", "0b8da82a3d7a436f9b585436c4b72f56", "650c53d68d374c18886aab91bcf8bb54"], + ["983fa8b23a084f5dacd157e6c9ceba97", "8fe23dd4bf8542b49c3a574b33e377c3", "83cb2a9e9afb47cd9f45461613796645"], + ["3d679a3888c548afb8cf889915af7fd2", "7bcf8626eaca40e592ffd0aed08aa30b", "69865c89fc7344be8ed5c1a54dbddc20"], + ["252f3b3f5cd64698826fc1ab42614677", "e85ebb729b02402bbe3b917e1196f8d3", "97367c4740f64935b7a5e34ae1398035"], + ["fc8dd5a2fc9f4dd19ad6a64a8a6e89e9", "8257772b0e2f408ba269264855dfea00", "d6a3520486bb474f9b5e72eda8408974"], + ["3d10918e6a9a4ad395a7280c022ad2b9", "00002bcb84af4a4781174e62619f14e2", "76ba80230d454de996878c2763fe7e5c"]] + gr.DataFrame( + type="array", + headers=["Example Object IDs"] * 3, + row_count=6, + col_count=3, + value=example_object_ids + ) + gr.Markdown( + """ + #### Terms of use + By using this service, users are required to agree to the following terms: The service is a research preview intended for non-commercial use only. It only provides limited safety measures and may generate offensive content. It must not be used for any illegal, harmful, violent, racist, or sexual purposes. The service may collect user dialogue data for future research. + """ + ) + demo.queue() + demo.launch(server_name="0.0.0.0", server_port=args.port, share=False) # server_port=7832, share=True + +if __name__ == "__main__": + # ! To release this demo in public, make sure to start in a place where no important data is stored. + # ! Please check 1. the lanuch dir 2. the tmp dir (GRADIO_TEMP_DIR) + # ! refer to https://www.gradio.app/guides/sharing-your-app#security-and-file-access + parser = argparse.ArgumentParser() + parser.add_argument("--model-name", type=str, \ + default="RunsenXu/PointLLM_7B_v1.2") + + + parser.add_argument("--data_path", type=str, default="data/objaverse_data", required=False) + parser.add_argument("--pointnum", type=int, default=8192) + + parser.add_argument("--log_file", type=str, default="serving_workdirs/serving_log.txt") + parser.add_argument("--tmp_dir", type=str, default="serving_workdirs/tmp") + + # For gradio + parser.add_argument("--port", type=int, default=7810) + + args = parser.parse_args() + + # * make serving dirs + os.makedirs(os.path.dirname(args.log_file), exist_ok=True) + os.makedirs(args.tmp_dir, exist_ok=True) + + # * add the current time for log name + args.log_file = args.log_file.replace(".txt", f"_{time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime())}.txt") + + logging.basicConfig( + filename=args.log_file, + level=logging.WARNING, # * default gradio is info, so use warning + format='%(asctime)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + + logging.warning("-----New Run-----") + logging.warning(f"args: {args}") + + print("-----New Run-----") + print(f"[INFO] Args: {args}") + + # * set env variable GRADIO_TEMP_DIR to args.tmp_dir + os.environ["GRADIO_TEMP_DIR"] = args.tmp_dir + + model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv = init_model(args) + start_conversation(args, model, tokenizer, point_backbone_config, keywords, mm_use_point_start_end, conv) diff --git a/ThirdParty/PointLLM/pointllm/eval/eval_modelnet_cls.py b/ThirdParty/PointLLM/pointllm/eval/eval_modelnet_cls.py new file mode 100644 index 0000000000000000000000000000000000000000..65752c03a8be53a2269dbc948a96229e0176e6f0 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/eval/eval_modelnet_cls.py @@ -0,0 +1,195 @@ +import argparse +import torch +from torch.utils.data import DataLoader +import os +from pointllm.conversation import conv_templates, SeparatorStyle +from pointllm.utils import disable_torch_init +from pointllm.model.utils import KeywordsStoppingCriteria +from pointllm.model import PointLLMLlamaForCausalLM +from pointllm.data import ModelNet +from tqdm import tqdm +from pointllm.eval.evaluator import start_evaluation +from transformers import AutoTokenizer + +import os +import json + +PROMPT_LISTS = [ + "What is this?", + "This is an object of " +] + +def init_model(args): + # Model + disable_torch_init() + model_name = os.path.expanduser(args.model_name) + + # * print the model_name (get the basename) + print(f'[INFO] Model name: {os.path.basename(model_name)}') + + tokenizer = AutoTokenizer.from_pretrained(model_name) + model = PointLLMLlamaForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=False, use_cache=True, torch_dtype=torch.bfloat16).cuda() + model.initialize_tokenizer_point_backbone_config_wo_embedding(tokenizer) + + conv_mode = "vicuna_v1_1" + + conv = conv_templates[conv_mode].copy() + + return model, tokenizer, conv + +def load_dataset(config_path, split, subset_nums, use_color): + print(f"Loading {split} split of ModelNet datasets.") + dataset = ModelNet(config_path=config_path, split=split, subset_nums=subset_nums, use_color=use_color) + print("Done!") + return dataset + +def get_dataloader(dataset, batch_size, shuffle=False, num_workers=4): + assert shuffle is False, "Since we using the index of ModelNet as Object ID when evaluation \ + so shuffle shoudl be False and should always set random seed." + dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + return dataloader + +def generate_outputs(model, tokenizer, input_ids, point_clouds, stopping_criteria, do_sample=True, temperature=1.0, top_k=50, max_length=2048, top_p=0.95): + model.eval() + with torch.inference_mode(): + output_ids = model.generate( + input_ids, + point_clouds=point_clouds, + do_sample=do_sample, + temperature=temperature, + top_k=top_k, + max_length=max_length, + top_p=top_p, + stopping_criteria=[stopping_criteria]) # * B, L' + + input_token_len = input_ids.shape[1] + n_diff_input_output = (input_ids != output_ids[:, :input_token_len]).sum().item() + if n_diff_input_output > 0: + print(f'[Warning] {n_diff_input_output} output_ids are not the same as the input_ids') + outputs = tokenizer.batch_decode(output_ids[:, input_token_len:], skip_special_tokens=True) + outputs = [output.strip() for output in outputs] + + return outputs + +def start_generation(model, tokenizer, conv, dataloader, prompt_index, output_dir, output_file): + stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2 + qs = PROMPT_LISTS[prompt_index] + + results = {"prompt": qs} + + point_backbone_config = model.get_model().point_backbone_config + point_token_len = point_backbone_config['point_token_len'] + default_point_patch_token = point_backbone_config['default_point_patch_token'] + default_point_start_token = point_backbone_config['default_point_start_token'] + default_point_end_token = point_backbone_config['default_point_end_token'] + mm_use_point_start_end = point_backbone_config['mm_use_point_start_end'] + + if mm_use_point_start_end: + qs = default_point_start_token + default_point_patch_token * point_token_len + default_point_end_token + '\n' + qs + else: + qs = default_point_patch_token * point_token_len + '\n' + qs + + conv.append_message(conv.roles[0], qs) + conv.append_message(conv.roles[1], None) + + prompt = conv.get_prompt() + inputs = tokenizer([prompt]) + + input_ids_ = torch.as_tensor(inputs.input_ids).cuda() # * tensor of 1, L + + stopping_criteria = KeywordsStoppingCriteria([stop_str], tokenizer, input_ids_) + + responses = [] + + for batch in tqdm(dataloader): + point_clouds = batch["point_clouds"].cuda().to(model.dtype) # * tensor of B, N, C(3) + labels = batch["labels"] + label_names = batch["label_names"] + indice = batch["indice"] + + batchsize = point_clouds.shape[0] + + input_ids = input_ids_.repeat(batchsize, 1) # * tensor of B, L + + outputs = generate_outputs(model, tokenizer, input_ids, point_clouds, stopping_criteria) # List of str, length is B + + # saving results + for index, output, label, label_name in zip(indice, outputs, labels, label_names): + responses.append({ + "object_id": index.item(), + "ground_truth": label.item(), + "model_output": output, + "label_name": label_name + }) + + results["results"] = responses + + os.makedirs(output_dir, exist_ok=True) + # save the results to a JSON file + with open(os.path.join(output_dir, output_file), 'w') as fp: + json.dump(results, fp, indent=2) + + # * print info + print(f"Saved results to {os.path.join(output_dir, output_file)}") + + return results + +def main(args): + # * ouptut + args.output_dir = os.path.join(args.model_name, "evaluation") + + # * output file + args.output_file = f"ModelNet_classification_prompt{args.prompt_index}.json" + args.output_file_path = os.path.join(args.output_dir, args.output_file) + + # * First inferencing, then evaluate + if not os.path.exists(args.output_file_path): + # * need to generate results first + dataset = load_dataset(config_path=None, split=args.split, subset_nums=args.subset_nums, use_color=args.use_color) # * defalut config + dataloader = get_dataloader(dataset, args.batch_size, args.shuffle, args.num_workers) + + model, tokenizer, conv = init_model(args) + + # * ouptut + print(f'[INFO] Start generating results for {args.output_file}.') + results = start_generation(model, tokenizer, conv, dataloader, args.prompt_index, args.output_dir, args.output_file) + + # * release model and tokenizer, and release cuda memory + del model + del tokenizer + torch.cuda.empty_cache() + else: + # * directly load the results + print(f'[INFO] {args.output_file_path} already exists, directly loading...') + with open(args.output_file_path, 'r') as fp: + results = json.load(fp) + + # * evaluation file + evaluated_output_file = args.output_file.replace(".json", f"_evaluated_{args.gpt_type}.json") + # * start evaluation + if args.start_eval: + start_evaluation(results, output_dir=args.output_dir, output_file=evaluated_output_file, eval_type="modelnet-close-set-classification", model_type=args.gpt_type, parallel=True, num_workers=20) + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--model_name", type=str, \ + default="RunsenXu/PointLLM_7B_v1.2") + + # * dataset type + parser.add_argument("--split", type=str, default="test", help="train or test.") + parser.add_argument("--use_color", action="store_true", default=True) + + # * data loader, batch_size, shuffle, num_workers + parser.add_argument("--batch_size", type=int, default=30) + parser.add_argument("--shuffle", type=bool, default=False) + parser.add_argument("--num_workers", type=int, default=20) + parser.add_argument("--subset_nums", type=int, default=-1) # * only use "subset_nums" of samples, mainly for debug + + # * evaluation setting + parser.add_argument("--prompt_index", type=int, default=0) + parser.add_argument("--start_eval", action="store_true", default=False) + parser.add_argument("--gpt_type", type=str, default="gpt-3.5-turbo-0613", choices=["gpt-3.5-turbo-0613", "gpt-3.5-turbo-1106", "gpt-4-0613", "gpt-4-1106-preview"], help="Type of the model used to evaluate.") + + args = parser.parse_args() + + main(args) diff --git a/ThirdParty/PointLLM/pointllm/eval/eval_objaverse.py b/ThirdParty/PointLLM/pointllm/eval/eval_objaverse.py new file mode 100644 index 0000000000000000000000000000000000000000..c92b2656220986bbc97181726d04735698636eac --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/eval/eval_objaverse.py @@ -0,0 +1,220 @@ +import argparse +import torch +from torch.utils.data import DataLoader +import os +from pointllm.conversation import conv_templates, SeparatorStyle +from pointllm.utils import disable_torch_init +from pointllm.model import * +from pointllm.model.utils import KeywordsStoppingCriteria +from pointllm.data import ObjectPointCloudDataset +from tqdm import tqdm +from transformers import AutoTokenizer +from pointllm.eval.evaluator import start_evaluation + +import os +import json + +PROMPT_LISTS = [ + "What is this?", + "This is an object of ", + "Caption this 3D model in detail." +] + +def init_model(args): + # Model + disable_torch_init() + model_name = os.path.expanduser(args.model_name) + + # * print the model_name (get the basename) + print(f'[INFO] Model name: {os.path.basename(model_name)}') + + tokenizer = AutoTokenizer.from_pretrained(model_name) + model = PointLLMLlamaForCausalLM.from_pretrained(model_name, low_cpu_mem_usage=False, use_cache=True, torch_dtype=torch.bfloat16).cuda() + model.initialize_tokenizer_point_backbone_config_wo_embedding(tokenizer) + + conv_mode = "vicuna_v1_1" + + conv = conv_templates[conv_mode].copy() + + return model, tokenizer, conv + +def load_dataset(data_path, anno_path, pointnum, conversation_types, use_color): + print("Loading validation datasets.") + dataset = ObjectPointCloudDataset( + data_path=data_path, + anno_path=anno_path, + pointnum=pointnum, + conversation_types=conversation_types, + use_color=use_color, + tokenizer=None # * load point cloud only + ) + print("Done!") + return dataset + +def get_dataloader(dataset, batch_size, shuffle=False, num_workers=4): + dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) + return dataloader + +def generate_outputs(model, tokenizer, input_ids, point_clouds, stopping_criteria, do_sample=True, temperature=1.0, top_k=50, max_length=2048, top_p=0.95): + model.eval() + with torch.inference_mode(): + output_ids = model.generate( + input_ids, + point_clouds=point_clouds, + do_sample=do_sample, + temperature=temperature, + top_k=top_k, + max_length=max_length, + top_p=top_p, + stopping_criteria=[stopping_criteria]) # * B, L' + + input_token_len = input_ids.shape[1] + n_diff_input_output = (input_ids != output_ids[:, :input_token_len]).sum().item() + if n_diff_input_output > 0: + print(f'[Warning] {n_diff_input_output} output_ids are not the same as the input_ids') + outputs = tokenizer.batch_decode(output_ids[:, input_token_len:], skip_special_tokens=True) + outputs = [output.strip() for output in outputs] + + return outputs + +def start_generation(model, tokenizer, conv, dataloader, annos, prompt_index, output_dir, output_file): + stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2 + qs = PROMPT_LISTS[prompt_index] + + results = {"prompt": qs} + + point_backbone_config = model.get_model().point_backbone_config + point_token_len = point_backbone_config['point_token_len'] + default_point_patch_token = point_backbone_config['default_point_patch_token'] + default_point_start_token = point_backbone_config['default_point_start_token'] + default_point_end_token = point_backbone_config['default_point_end_token'] + mm_use_point_start_end = point_backbone_config['mm_use_point_start_end'] + + if mm_use_point_start_end: + qs = default_point_start_token + default_point_patch_token * point_token_len + default_point_end_token + '\n' + qs + else: + qs = default_point_patch_token * point_token_len + '\n' + qs + + conv.append_message(conv.roles[0], qs) + conv.append_message(conv.roles[1], None) + + prompt = conv.get_prompt() + inputs = tokenizer([prompt]) + + input_ids_ = torch.as_tensor(inputs.input_ids).cuda() # * tensor of 1, L + + stopping_criteria = KeywordsStoppingCriteria([stop_str], tokenizer, input_ids_) + + responses = [] + + for batch in tqdm(dataloader): + point_clouds = batch["point_clouds"].cuda().to(model.dtype) # * tensor of B, N, C(3) + object_ids = batch["object_ids"] # * list of string + + batchsize = len(object_ids) + + input_ids = input_ids_.repeat(batchsize, 1) # * tensor of B, L + + outputs = generate_outputs(model, tokenizer, input_ids, point_clouds, stopping_criteria) # List of str, length is B + + # saving results + for obj_id, output in zip(object_ids, outputs): + responses.append({ + "object_id": obj_id, + "ground_truth": annos[obj_id], + "model_output": output + }) + + results["results"] = responses + + os.makedirs(output_dir, exist_ok=True) + # save the results to a JSON file + with open(os.path.join(output_dir, output_file), 'w') as fp: + json.dump(results, fp, indent=2) + + # * print info + print(f"Saved results to {os.path.join(output_dir, output_file)}") + + return results + +def main(args): + # * ouptut + args.output_dir = os.path.join(args.model_name, "evaluation") + + # * output file + anno_file = os.path.splitext(os.path.basename(args.anno_path))[0] + args.output_file = f"{anno_file}_Objaverse_{args.task_type}_prompt{args.prompt_index}.json" + args.output_file_path = os.path.join(args.output_dir, args.output_file) + + # * First inferencing, then evaluate + if not os.path.exists(args.output_file_path): + # * need inferencing + # * load annotation files + with open(args.anno_path, 'r') as fp: + annos = json.load(fp) + + dataset = load_dataset(args.data_path, args.anno_path, args.pointnum, ("simple_description",), args.use_color) + dataloader = get_dataloader(dataset, args.batch_size, args.shuffle, args.num_workers) + + model, tokenizer, conv = init_model(args) + + # * convert annos file from [{"object_id": }] to {"object_id": } + annos = {anno["object_id"]: anno["conversations"][1]['value'] for anno in annos} + + print(f'[INFO] Start generating results for {args.output_file}.') + results = start_generation(model, tokenizer, conv, dataloader, annos, args.prompt_index, args.output_dir, args.output_file) + + # * release model and tokenizer, and release cuda memory + del model + del tokenizer + torch.cuda.empty_cache() + else: + # * directly load the results + print(f'[INFO] {args.output_file_path} already exists, directly loading...') + with open(args.output_file_path, 'r') as fp: + results = json.load(fp) + + if args.start_eval: + evaluated_output_file = args.output_file.replace(".json", f"_evaluated_{args.gpt_type}.json") + eval_type_mapping = { + "captioning": "object-captioning", + "classification": "open-free-form-classification" + } + start_evaluation(results, output_dir=args.output_dir, output_file=evaluated_output_file, eval_type=eval_type_mapping[args.task_type], model_type=args.gpt_type, parallel=True, num_workers=20) + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--model_name", type=str, \ + default="RunsenXu/PointLLM_7B_v1.2") + + # * dataset type + parser.add_argument("--data_path", type=str, default="data/objaverse_data", required=False) + parser.add_argument("--anno_path", type=str, default="data/anno_data/PointLLM_brief_description_val_200_GT.json", required=False) + parser.add_argument("--pointnum", type=int, default=8192) + parser.add_argument("--use_color", action="store_true", default=True) + + # * data loader, batch_size, shuffle, num_workers + parser.add_argument("--batch_size", type=int, default=6) + parser.add_argument("--shuffle", type=bool, default=False) + parser.add_argument("--num_workers", type=int, default=10) + + # * evaluation setting + parser.add_argument("--prompt_index", type=int, default=0) + parser.add_argument("--start_eval", action="store_true", default=False) + parser.add_argument("--gpt_type", type=str, default="gpt-4-0613", choices=["gpt-3.5-turbo-0613", "gpt-3.5-turbo-1106", "gpt-4-0613", "gpt-4-1106-preview"], help="Type of the model used to evaluate.") + parser.add_argument("--task_type", type=str, default="captioning", choices=["captioning", "classification"], help="Type of the task to evaluate.") + + args = parser.parse_args() + + # * check prompt index + # * * classification: 0, 1 and captioning: 2. Raise Warning otherwise. + if args.task_type == "classification": + if args.prompt_index != 0 and args.prompt_index != 1: + print("[Warning] For classification task, prompt_index should be 0 or 1.") + elif args.task_type == "captioning": + if args.prompt_index != 2: + print("[Warning] For captioning task, prompt_index should be 2.") + else: + raise NotImplementedError + + main(args) \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/eval/evaluator.py b/ThirdParty/PointLLM/pointllm/eval/evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..5a723589ba605e0eb823e2fe77b0777bfb17f5e8 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/eval/evaluator.py @@ -0,0 +1,843 @@ +import argparse +import json +import os +from utils import OpenAIGPT +from tqdm import tqdm +from multiprocessing import Pool +import random +random.seed(0) +import re + +gpt4_open_free_from_cls_prompt = """Analyze two sentences and determine if they're referring to the same general object or concept, focusing on the type of object, not attributes such as color, size, or shape. Respond with 'T' if they refer to the same thing and 'F' if not. Also, provide a brief rationale (no more than 20 words) for your judgment. +Example: +Input: 1. Spiral staircase that goes from a ground floor. 2. This is a 3D model of wooden stairs in light brown +Output: T#Both refer to a staircase. + +Now, analyze the following: +Input: 1. {ground_truth} 2. {model_output} +Output: """ # * about 230 input tokens + +chatgpt_close_set_cls_prompt = """Given the following free-form description of a 3D object, please determine the most probable class index from the following 40 available categories, even if the description doesn't clearly refer to any one of them. Make your best-educated guess based on the information provided. If the description already contains a valid index, then the index should be selected. If it contains more than one valid index, then randomly select one index (specify your reason). If there is no valid index and it cannot be inferred from the information, return '-1#NA#Cannot infer'. +Categories: +{candidate_lists} +Reply with the format of 'index#class#short reason (no more than 10 words)'. + +Examples: +Input: This is a 3D object model of a cartoon white truck. +Output: 7#car#Closest match to 'car' in categories. + +Input: A green leaf in a flower pot. +Output: 26#plant#The primary subject 'leaf' directly indicates a plant. + +Input: It's difficult to determine the exact type of this object due to insufficient details. But it seems to be like a piece of furniture. +Output: 33#table#Randomly select one kind of furniture from the list. + +Input: I cannot determine the specific type of the object without additional information or context. +Output: -1#NA#Cannot infer. + +Now analyze the following: +Input: """ + +gpt4_object_captioning_prompt = """Evaluate a model-generated caption against a human-generated caption (ground truth) for a 3D model. Identify the aspects mentioned in the human caption and calculate the percentage of these aspects correctly mentioned or partially matched in the model caption. Score from 0 to 100, where each aspect contributes equally to the score. Consider similar concepts for partial score. + +Provide your score (0-100) and a short justification (less than 15 words) in the format of 'score#reason' + +Example: +Human: A white brown skeleton +Model: This is a 3D model of a small, cartoon-like robot. It has a spherical body and is covered in a layer of white dust. +Output: 50#mention white; skeleton and robot have similar appearence. + +Now score the following: +Human: {ground_truth} +Model: {model_output} +Output: """ + +chatgpt_object_captioning_prompt = gpt4_object_captioning_prompt +chatgpt_open_free_from_cls_prompt = gpt4_open_free_from_cls_prompt +gpt4_close_set_cls_prompt = chatgpt_close_set_cls_prompt + +GPT_PRICES = { + # * check https://openai.com/pricing for updated price + "gpt-3.5-turbo-0613": { + "price_1k_prompt_tokens": 0.0015, + "price_1k_completion_tokens": 0.002 + }, + "gpt-3.5-turbo-1106": { + "price_1k_prompt_tokens": 0.0010, + "price_1k_completion_tokens": 0.002 + }, + "gpt-4-0613":{ + "price_1k_prompt_tokens": 0.03, + "price_1k_completion_tokens": 0.06 + }, + "gpt-4-1106-preview":{ + "price_1k_prompt_tokens": 0.01, + "price_1k_completion_tokens": 0.03 + } +} + +class OpenAIOpenFreeFormClsEvaluator(): + def __init__(self, inputs, output_dir, output_file, model_type="gpt-4-0613"): + """ + Args: + inputs: A dictionary containing the results of the evaluation. It contains two keys: "results" and "prompt". + "prompt": str + "results": [ + { + "object_id": str, + "model_output": str, + "ground_truth": str + } + ] + """ + print("-" * 80) + print("Initializing OpenAIEvaluator...") + self.results = inputs['results']# * contains two keys: "results" and "prompt" + self.inference_prompt = inputs['prompt'] # * used to prompt PointLLM + self.correct_predictions = 0 + self.total_predictions = 0 + self.invalid_responses = 0 + self.response_data = [] # to save all the response data by openaigpt + self.model_type = model_type + self.check_model_type() + + self.prompt_tokens = 0 + self.completion_tokens = 0 + + self.default_chat_parameters = { + "model": model_type, + "temperature": 1, + "top_p": 1, + "max_tokens": 2048 + } + + # * price + self.price_1k_prompt_tokens = GPT_PRICES[model_type]["price_1k_prompt_tokens"] + self.price_1k_completion_tokens = GPT_PRICES[model_type]["price_1k_completion_tokens"] + + print(f"OpenAIGPT config: ") + print(self.default_chat_parameters) + + self.openaigpt = OpenAIGPT(**self.default_chat_parameters) + self.gpt_prompt = chatgpt_open_free_from_cls_prompt if "gpt-3.5" in model_type else gpt4_open_free_from_cls_prompt + self.output_dir = output_dir + self.output_file = output_file + self.temp_output_file = self.output_file.replace(".json", "_processed_temp.json") + + def check_model_type(self): + # * warning if not using gpt-4, recommend using gpt-4 for this task + if "gpt-4" not in self.model_type: + print(f"[WARNING] You are using {self.model_type} for evaluation. We recommend using gpt-4 for this task.") + + def resume_processing(self): + processed_results_path = os.path.join(self.output_dir, self.temp_output_file) + if os.path.exists(processed_results_path): + print("-" * 80) + # * print resuming + print(f"Resuming processing...") + print(f"Loading processed results from {processed_results_path}...") + with open(processed_results_path, "r") as f: + saved_results = json.load(f) + self.correct_predictions = saved_results["correct_predictions"] + self.total_predictions = saved_results["total_predictions"] + self.invalid_responses = saved_results["invalid_responses"] + self.response_data = saved_results["results"] + self.prompt_tokens = saved_results["prompt_tokens"] + self.completion_tokens = saved_results["completion_tokens"] + + print(f"Processed results: {len(self.response_data)}") + # * print the length of all the data + print(f"Total results: {len(self.results)}") + + # * remove processed data + processed_ids = [d['object_id'] for d in self.response_data] + self.results = [r for r in self.results if r['object_id'] not in processed_ids] + + print(f"Remaining results: {len(self.results)}") + + def remove_temp_file(self): + processed_results_path = os.path.join(self.output_dir, self.temp_output_file) + if os.path.exists(processed_results_path): + os.remove(processed_results_path) + print("-" * 80) + print(f"Removed Temporary file {processed_results_path}") + + def parse_gpt_response_evaluate(self, gpt_response): + gpt_response = gpt_response.strip() + + cls_result = gpt_response[0].upper() + reason = gpt_response[2:] if len(gpt_response) > 2 else "" + + if cls_result not in ['T', 'F']: + self.invalid_responses += 1 + return 0, "INVALID", gpt_response + + accuracy = 1 if cls_result == 'T' else 0 + + return accuracy, cls_result, reason + + def evaluate_result(self, result): + object_id = result['object_id'] + ground_truth = result['ground_truth'] + model_output = result['model_output'] + messages = [{"role": "user", "content": self.gpt_prompt.format(ground_truth=ground_truth, model_output=model_output)}] + + gpt_response = self.openaigpt.safe_chat_complete(messages, content_only=False) + + prompt_tokens = gpt_response['usage']['prompt_tokens'] + completion_tokens = gpt_response['usage']['completion_tokens'] + + gpt_response = gpt_response['choices'][0]["message"]['content'] + + + accuracy, cls_result, reason = self.parse_gpt_response_evaluate(gpt_response) # return 0, "INVALID", gpt_response if not valid + + return object_id, model_output, ground_truth, accuracy, cls_result, reason, prompt_tokens, completion_tokens + + def evaluate(self): + + self.resume_processing() + + print('-' * 80) + print("Starting single-thread evaluation...") + results = self.results + + try: + for result in tqdm(results): + object_id, model_output, ground_truth, accuracy, cls_result, reason, prompt_tokens, completion_tokens = self.evaluate_result(result) + self.correct_predictions += accuracy + self.total_predictions += 1 + self.prompt_tokens += prompt_tokens + self.completion_tokens += completion_tokens + + # save the object_id, model_output, ground_truth, gpt_cls_result and gpt_reason for each result + self.response_data.append({ + 'object_id': object_id, + 'ground_truth': ground_truth, + 'model_output': model_output, + 'gpt_cls_result': cls_result, + 'gpt_reason': reason + }) + + print("Evaluation finished.") + + self.save_results() + self.print_results() + self.remove_temp_file() + except (Exception, KeyboardInterrupt) as e: + print(f"Error {e} occurred during parallel evaluation. Saving processed results to temporary file...") + self.save_results(is_temp=True) + exit() + + def parallel_evaluate(self, num_workers=20): + + self.resume_processing() + + print('-' * 80) + print("Starting parallel evaluation...") + results = self.results + + try: + with Pool(num_workers) as pool: + with tqdm(total=len(results)) as pbar: # create a progress bar + for object_id, model_output, ground_truth, accuracy, cls_result, reason, prompt_tokens, completion_tokens in pool.imap_unordered(self.evaluate_result, results): + self.correct_predictions += accuracy + self.total_predictions += 1 + self.prompt_tokens += prompt_tokens + self.completion_tokens += completion_tokens + + if cls_result == 'INVALID': + self.invalid_responses += 1 + + # save the object_id, model_output, ground_truth, gpt_cls_result and gpt_reason for each result + self.response_data.append({ + 'object_id': object_id, + 'ground_truth': ground_truth, + 'model_output': model_output, + 'gpt_cls_result': cls_result, + 'gpt_reason': reason + }) + + pbar.update() # update the progress bar + + print("Parallel evaluation finished.") + + self.save_results() + self.print_results() + self.remove_temp_file() + + except (Exception, KeyboardInterrupt) as e: + print(f"Error {e} occurred during parallel evaluation. Saving processed results to temporary file...") + self.save_results(is_temp=True) + exit() + + def save_results(self, is_temp=False): + if is_temp: + output_path = os.path.join(self.output_dir, self.temp_output_file) + else: + output_path = os.path.join(self.output_dir, self.output_file) + if self.total_predictions - self.invalid_responses == 0: + accuracy = 0 # * no results and get error + else: + accuracy = self.correct_predictions / (self.total_predictions - self.invalid_responses) * 100 + with open(output_path, 'w') as f: + results_to_save = { + 'inference_prompt': self.inference_prompt, + 'prompt': self.gpt_prompt, + 'accuracy': f"{accuracy:.2f}%", + 'total_predictions': self.total_predictions, + 'correct_predictions': self.correct_predictions, + 'invalid_responses': self.invalid_responses, + 'prompt_tokens': self.prompt_tokens, + 'completion_tokens': self.completion_tokens, + 'GPT_cost': self.get_costs(), + 'results': self.response_data, + } + json.dump(results_to_save, f, indent=2) + + print(f"Results saved to {output_path}") + # * print the length of saved results + print(f"Saved {len(self.response_data)} results in total.") + + def print_results(self): + print('-' * 80) + if self.total_predictions - self.invalid_responses == 0: + accuracy = 0 # * no results and get error + else: + accuracy = self.correct_predictions / (self.total_predictions - self.invalid_responses) * 100 + print("Results:") + print(f"Accuracy: {accuracy:.2f}%") + print(f"Total Predictions: {self.total_predictions}") + print(f"Correct Predictions: {self.correct_predictions}") + print(f"Invalid Responses: {self.invalid_responses}") + self.print_costs() + + def print_costs(self): + print(f"Prompt Tokens Price: {self.prompt_tokens * self.price_1k_prompt_tokens / 1000:.2f} USD") + print(f"Completion Tokens Price: {self.completion_tokens * self.price_1k_completion_tokens / 1000:.2f} USD") + + def get_costs(self): + return self.prompt_tokens * self.price_1k_prompt_tokens / 1000 + self.completion_tokens * self.price_1k_completion_tokens / 1000 + + +class OpenAICloseSetClsEvaluator(OpenAIOpenFreeFormClsEvaluator): + def __init__(self, inputs, output_dir, output_file, model_type="gpt-3.5-turbo-0613"): + super().__init__(inputs, output_dir, output_file, model_type) + self.gpt_prompt = chatgpt_close_set_cls_prompt if "gpt-3.5" in model_type else gpt4_close_set_cls_prompt + + self.invalid_correct_predictions = 0 # * random choice and correct coincidently + + # * import category names + try: + # * load a txt files of category names + catfile = os.path.join(os.path.dirname(__file__), '../data/modelnet_config/modelnet40_shape_names_modified.txt') # * i.e. pointllm/data/modelnet_config/modelnet40_shape_names_modified.txt + self.candidate_lists_names = [line.strip() for line in open(catfile)] # * list of category names + except: + print(f"Current categories file is {catfile}. Need to move the category file to pointllm/eval/configs/.") + + # * make the prompt + candidate_lists = [f'{i}: {cat}' for i, cat in enumerate(self.candidate_lists_names)] + self.num_categories = len(candidate_lists) + self.candidate_lists = '\n'.join(candidate_lists) + self.gpt_prompt = self.gpt_prompt.format(num_categories=self.num_categories, candidate_lists=self.candidate_lists) + "{model_output}\nOutput: " + + def check_model_type(self): + # * no need to check for this task + return + + def resume_processing(self): + processed_results_path = os.path.join(self.output_dir, self.temp_output_file) + if os.path.exists(processed_results_path): + print("-" * 80) + # * print resuming + print(f"Resuming processing...") + print(f"Loading processed results from {processed_results_path}...") + with open(processed_results_path, "r") as f: + saved_results = json.load(f) + self.correct_predictions = saved_results["correct_predictions"] + self.total_predictions = saved_results["total_predictions"] + self.invalid_responses = saved_results["invalid_responses"] + self.invalid_correct_predictions = saved_results["invalid_correct_predictions"] + self.response_data = saved_results["results"] + self.prompt_tokens = saved_results["prompt_tokens"] + self.completion_tokens = saved_results["completion_tokens"] + + print(f"Processed results: {len(self.response_data)}") + # * print the length of all the data + print(f"Total results: {len(self.results)}") + + # * remove processed data + processed_ids = [d['object_id'] for d in self.response_data] + self.results = [r for r in self.results if r['object_id'] not in processed_ids] + + print(f"Remaining results: {len(self.results)}") + + def parse_gpt_response_evaluate(self, gpt_response, ground_truth): + """ + Argument: + gpt_response: str, index#label#short_reason + groud_truth: int + """ + + # * use regular expression to extract + pattern = r'(\d+#[^#]*#.*$)' + match = re.search(pattern, gpt_response) + + gpt_response = match.group(1) if match else gpt_response + + gpt_response = gpt_response.strip() + gpt_response_list = gpt_response.split('#') + + cls_result = gpt_response_list[0] + cls_label = gpt_response_list[1] if len(gpt_response_list) > 1 else "" + reason = gpt_response_list[2] if len(gpt_response_list) > 2 else "" + + try: + # * convert to int + cls_result = int(cls_result) + if cls_result not in range(self.num_categories) or cls_label == "NA": + # * not valid range + cls_result = -1 + except ValueError: + print(f"Error: unale to parse {gpt_response}.") + cls_result = -1 + + if cls_result == -1: + # * random choose one index from 0 to self.num_categories + cls_result = random.choice(range(self.num_categories)) + cls_label = "INVALID" + reason = gpt_response + + self.invalid_responses += 1 + + accuracy = 1 if cls_result == ground_truth else 0 + + return accuracy, cls_result, cls_label, reason + + def evaluate_result(self, result): + object_id = result.get('object_id', -1) + ground_truth = result['ground_truth'] + ground_truth_label = result['label_name'] + model_output = result['model_output'] + + messages = [{"role": "user", "content": self.gpt_prompt.format(model_output=model_output)}] + + gpt_response = self.openaigpt.safe_chat_complete(messages, content_only=False) + + prompt_tokens = gpt_response['usage']['prompt_tokens'] + completion_tokens = gpt_response['usage']['completion_tokens'] + + gpt_response = gpt_response['choices'][0]["message"]['content'] + + accuracy, cls_result, cls_label, reason = self.parse_gpt_response_evaluate(gpt_response, ground_truth) # return 0, "INVALID", gpt_response if not valid + + return object_id, model_output, ground_truth, accuracy, cls_result, cls_label, reason, ground_truth_label, prompt_tokens, completion_tokens + + def evaluate(self): + + self.resume_processing() + + print('-' * 80) + print("Starting single-thread evaluation...") + results = self.results + + try: + for result in tqdm(results): + object_id, model_output, ground_truth, accuracy, cls_result, cls_label, reason, ground_truth_label, prompt_tokens, completion_tokens = self.evaluate_result(result) + self.correct_predictions += accuracy + self.total_predictions += 1 + + if cls_label == "INVALID": + self.invalid_correct_predictions += accuracy + self.invalid_responses += 1 + + self.prompt_tokens += prompt_tokens + self.completion_tokens += completion_tokens + + # save the object_id, model_output, ground_truth, gpt_cls_result and gpt_reason for each result + self.response_data.append({ + 'object_id': object_id, + 'ground_truth': ground_truth, + 'gpt_cls_result': cls_result, + 'ground_truth_label': ground_truth_label, + 'gpt_cls_label': cls_label, + 'model_output': model_output, + 'gpt_reason': reason, + 'prompt_tokens': prompt_tokens, + 'completion_tokens': completion_tokens + }) + + print("Evaluation finished.") + + self.save_results() + self.print_results() + self.remove_temp_file() + except (Exception, KeyboardInterrupt) as e: + print(f"Error {e} occurred during parallel evaluation. Saving processed results to temporary file...") + print(f"Current sample is {result}.") + self.save_results(is_temp=True) + exit() + + def parallel_evaluate(self, num_workers=20): + + self.resume_processing() + + print('-' * 80) + print("Starting parallel evaluation...") + results = self.results + + try: + with Pool(num_workers) as pool: + with tqdm(total=len(results)) as pbar: # create a progress bar + for object_id, model_output, ground_truth, accuracy, cls_result, cls_label, reason, ground_truth_label, prompt_tokens, completion_tokens in pool.imap_unordered(self.evaluate_result, results): + self.correct_predictions += accuracy + self.total_predictions += 1 + + self.prompt_tokens += prompt_tokens + self.completion_tokens += completion_tokens + + if cls_label == "INVALID": + self.invalid_correct_predictions += accuracy + self.invalid_responses += 1 + + # save the object_id, model_output, ground_truth, gpt_cls_result and gpt_reason for each result + self.response_data.append({ + 'object_id': object_id, + 'ground_truth': ground_truth, + 'gpt_cls_result': cls_result, + 'ground_truth_label': ground_truth_label, + 'gpt_cls_label': cls_label, + 'model_output': model_output, + 'gpt_reason': reason, + 'prompt_tokens': prompt_tokens, + 'completion_tokens': completion_tokens + }) + + pbar.update() # update the progress bar + + print("Parallel evaluation finished.") + + self.save_results() + self.print_results() + self.remove_temp_file() + + except (Exception, KeyboardInterrupt) as e: + print(f"Error {e} occurred during parallel evaluation. Saving processed results to temporary file...") + self.save_results(is_temp=True) + exit() + + def save_results(self, is_temp=False): + if is_temp: + output_path = os.path.join(self.output_dir, self.temp_output_file) + else: + output_path = os.path.join(self.output_dir, self.output_file) + if self.total_predictions - self.invalid_responses == 0: + accuracy = 0 # * no results and get error + clean_accuracy = 0 + else: + accuracy = self.correct_predictions / self.total_predictions * 100 + clean_accuracy = (self.correct_predictions - self.invalid_correct_predictions) / (self.total_predictions - self.invalid_responses) * 100 + with open(output_path, 'w') as f: + results_to_save = { + 'inference_prompt': self.inference_prompt, + 'prompt': self.gpt_prompt, + 'accuracy': f"{accuracy:.2f}%", + 'clean_accuracy': f"{clean_accuracy:.2f}%", + 'total_predictions': self.total_predictions, + 'correct_predictions': self.correct_predictions, + 'invalid_correct_predictions': self.invalid_correct_predictions, + 'invalid_responses': self.invalid_responses, + 'prompt_tokens': self.prompt_tokens, + 'completion_tokens': self.completion_tokens, + 'GPT_cost': self.get_costs(), + 'results': self.response_data, + } + json.dump(results_to_save, f, indent=2) + + print(f"Results saved to {output_path}") + # * print the length of saved results + print(f"Saved {len(self.response_data)} results in total.") + + def print_results(self): + print('-' * 80) + if self.total_predictions - self.invalid_responses == 0: + accuracy = 0 # * no results and get error + else: + accuracy = self.correct_predictions / self.total_predictions * 100 + clean_accuracy = (self.correct_predictions - self.invalid_correct_predictions) / (self.total_predictions - self.invalid_responses) * 100 + accuracy = self.correct_predictions / self.total_predictions * 100 + print("Results:") + print(f"Accuracy: {accuracy:.2f}%") + print(f"Clean Accuracy: {clean_accuracy:.2f}%",) + print(f"Total Predictions: {self.total_predictions}") + print(f"Correct Predictions: {self.correct_predictions}") + print(f"Invalid Correct Predictions: {self.invalid_correct_predictions}") + print(f"Invalid Responses: {self.invalid_responses}") + print(f"Prompt Tokens: {self.prompt_tokens}") + print(f"Completion Tokens: {self.completion_tokens}") + + self.print_costs() + +class OpenAIObjectCaptioningEvaluator(OpenAIOpenFreeFormClsEvaluator): + def __init__(self, inputs, output_dir, output_file, model_type="gpt-4-0613"): + super().__init__(inputs, output_dir, output_file, model_type) + self.gpt_prompt = chatgpt_object_captioning_prompt if "gpt-3.5" in model_type else gpt4_object_captioning_prompt + + self.total_scores = 0 + + def resume_processing(self): + processed_results_path = os.path.join(self.output_dir, self.temp_output_file) + if os.path.exists(processed_results_path): + print("-" * 80) + # * print resuming + print(f"Resuming processing...") + print(f"Loading processed results from {processed_results_path}...") + with open(processed_results_path, "r") as f: + saved_results = json.load(f) + self.total_scores = float(saved_results["total_score"]) + + self.total_predictions = saved_results["total_predictions"] + self.invalid_responses = saved_results["invalid_responses"] + self.response_data = saved_results["results"] + self.prompt_tokens = saved_results["prompt_tokens"] + self.completion_tokens = saved_results["completion_tokens"] + + print(f"Processed results: {len(self.response_data)}") + # * print the length of all the data + print(f"Total results: {len(self.results)}") + + # * remove processed data + processed_ids = [d['object_id'] for d in self.response_data] + self.results = [r for r in self.results if r['object_id'] not in processed_ids] + + print(f"Remaining results: {len(self.results)}") + + def parse_gpt_response_evaluate(self, gpt_response, ground_truth): + """ + Argument: + gpt_response: str, index#label#short_reason + groud_truth: int + """ + + # * use regular expression to extract + pattern = r'(\d*#.*)' + match = re.search(pattern, gpt_response) + + gpt_response = match.group(1) if match else gpt_response + + gpt_response = gpt_response.strip() + gpt_response_list = gpt_response.split('#') + + gpt_score = gpt_response_list[0] + reason = gpt_response_list[1] if len(gpt_response_list) > 1 else "" + + try: + # * convert to int + gpt_score = int(gpt_score) + if gpt_score not in range(101): # * in 0-100 + # * not valid range + gpt_score = -1 + except ValueError: + print(f"Error: unale to parse {gpt_response}.") + gpt_score = -1 + + if gpt_score == -1: + reason = gpt_response + + return gpt_score, reason + + def evaluate_result(self, result): + object_id = result.get('object_id', -1) + ground_truth = result['ground_truth'] + model_output = result['model_output'] + + messages = [{"role": "user", "content": self.gpt_prompt.format(ground_truth=ground_truth, model_output=model_output)}] + + gpt_response = self.openaigpt.safe_chat_complete(messages, content_only=False) + + prompt_tokens = gpt_response['usage']['prompt_tokens'] + completion_tokens = gpt_response['usage']['completion_tokens'] + + gpt_response = gpt_response['choices'][0]["message"]['content'] + + gpt_score, reason = self.parse_gpt_response_evaluate(gpt_response, ground_truth) # return 0, "INVALID", gpt_response if not valid + + return object_id, model_output, ground_truth, gpt_score, reason, prompt_tokens, completion_tokens + + def evaluate(self): + + self.resume_processing() + + print('-' * 80) + print("Starting single-thread evaluation...") + results = self.results + + try: + for result in tqdm(results): + object_id, model_output, ground_truth, gpt_score, reason, prompt_tokens, completion_tokens = self.evaluate_result(result) + + self.total_scores += gpt_score if gpt_score != -1 else 0 + self.total_predictions += 1 + self.prompt_tokens += prompt_tokens + self.completion_tokens += completion_tokens + + if gpt_score == -1: + self.invalid_responses += 1 + + # save the object_id, model_output, ground_truth, gpt_cls_result and gpt_reason for each result + self.response_data.append({ + 'object_id': object_id, + 'ground_truth': ground_truth, + 'model_output': model_output, + "gpt_score": gpt_score, + 'gpt_reason': reason + }) + + print("Evaluation finished.") + + self.save_results() + self.print_results() + self.remove_temp_file() + except (Exception, KeyboardInterrupt) as e: + print(f"Error {e} occurred during parallel evaluation. Saving processed results to temporary file...") + self.save_results(is_temp=True) + exit() + + def parallel_evaluate(self, num_workers=20): + + self.resume_processing() + + print('-' * 80) + print("Starting parallel evaluation...") + results = self.results + + try: + with Pool(num_workers) as pool: + with tqdm(total=len(results)) as pbar: # create a progress bar + for object_id, model_output, ground_truth, gpt_score, reason, prompt_tokens, completion_tokens in pool.imap_unordered(self.evaluate_result, results): + self.total_scores += gpt_score if gpt_score != -1 else 0 + self.total_predictions += 1 + self.prompt_tokens += prompt_tokens + self.completion_tokens += completion_tokens + + if gpt_score == -1: + self.invalid_responses += 1 + + # save the object_id, model_output, ground_truth, gpt_cls_result and gpt_reason for each result + self.response_data.append({ + 'object_id': object_id, + 'ground_truth': ground_truth, + 'model_output': model_output, + "gpt_score": gpt_score, + 'gpt_reason': reason + }) + + pbar.update() # update the progress bar + + print("Parallel evaluation finished.") + + self.save_results() + self.print_results() + self.remove_temp_file() + + except (Exception, KeyboardInterrupt) as e: + print(f"Error {e} occurred during parallel evaluation. Saving processed results to temporary file...") + self.save_results(is_temp=True) + exit() + + def save_results(self, is_temp=False): + if is_temp: + output_path = os.path.join(self.output_dir, self.temp_output_file) + else: + output_path = os.path.join(self.output_dir, self.output_file) + if self.total_predictions - self.invalid_responses == 0: + average_score = 0 # * no results and get error + else: + average_score = self.total_scores / (self.total_predictions - self.invalid_responses) + with open(output_path, 'w') as f: + results_to_save = { + 'inference_prompt': self.inference_prompt, + 'gpt_prompt': self.gpt_prompt, + 'average_score': f"{average_score:.2f}", + 'total_score': f"{self.total_scores:.2f}", + 'total_predictions': self.total_predictions, + 'invalid_responses': self.invalid_responses, + 'prompt_tokens': self.prompt_tokens, + 'completion_tokens': self.completion_tokens, + 'GPT_cost': self.get_costs(), + 'results': self.response_data, + } + json.dump(results_to_save, f, indent=2) + + print(f"Results saved to {output_path}") + # * print the length of saved results + print(f"Saved {len(self.response_data)} results in total.") + + def print_results(self): + print('-' * 80) + if self.total_predictions - self.invalid_responses == 0: + average_score = 0 # * no results and get error + else: + average_score = self.total_scores / (self.total_predictions - self.invalid_responses) + print("Results:") + print(f"Average Score: {average_score:.2f}") + print(f"Total Predictions: {self.total_predictions}") + print(f"Invalid Responses: {self.invalid_responses}") + print(f"Prompt Tokens: {self.prompt_tokens}") + print(f"Completion Tokens: {self.completion_tokens}") + + self.print_costs() + + +def start_evaluation(results, output_dir, output_file, eval_type="open-free-form-classification", model_type="gpt-3.5-turbo-0613", + parallel=True, num_workers=20): + """ + Args: + results: dict or file path to the json file containing the dict + output_file: the path the final evaluation results to be saved. + """ + if isinstance(results, str): + with open(results, 'r') as fp: + results = json.load(fp) + + if eval_type == "open-free-form-classification": + evaluator = OpenAIOpenFreeFormClsEvaluator(results, output_dir, output_file, model_type=model_type) + elif eval_type == "modelnet-close-set-classification": + evaluator = OpenAICloseSetClsEvaluator(results, output_dir, output_file, model_type=model_type) + elif eval_type == "object-captioning": + evaluator = OpenAIObjectCaptioningEvaluator(results, output_dir, output_file, model_type=model_type) + else: + raise NotImplementedError(f"eval_type {eval_type} not supported.") + + if parallel: + evaluator.parallel_evaluate(num_workers=num_workers) + else: + evaluator.evaluate() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + + parser.add_argument("--results_path", type=str, \ + default="", help="Path to the results file.") + parser.add_argument("--output_dir", type=str, default=None, help="Path to the output directory.") + parser.add_argument("--model_type", type=str, default="gpt-4-0613", choices=["gpt-3.5-turbo-0613", "gpt-3.5-turbo-1106", "gpt-4-0613", "gpt-4-1106-preview"], help="Type of the model used to evaluate.") + parser.add_argument("--parallel", default=True, action="store_true", help="Whether to use parallel evaluation.") + parser.add_argument("--num_workers", type=int, default=15, help="Number of workers to use for parallel evaluation.") + parser.add_argument("--eval_type", type=str, choices=["modelnet-close-set-classification", "open-free-form-classification", "object-captioning"], default="object-captioning") + + args = parser.parse_args() + + if args.output_dir is None: + args.output_dir = os.path.dirname(args.results_path) + + output_file = os.path.basename(args.results_path).replace(".json", f"_evaluated_{args.model_type}.json") + + # if exists, then exit + if os.path.exists(os.path.join(args.output_dir, output_file)): + print(f"[INFO] Evaulated results already exists in {os.path.join(args.output_dir, output_file)}.") + exit() + + start_evaluation(results=args.results_path, output_dir=args.output_dir, output_file=output_file, eval_type=args.eval_type, model_type=args.model_type, + parallel=args.parallel, num_workers=args.num_workers) + \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/eval/traditional_evaluator.py b/ThirdParty/PointLLM/pointllm/eval/traditional_evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..29a4c337b251fac5016bb49f9593b12ec2c7ff95 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/eval/traditional_evaluator.py @@ -0,0 +1,179 @@ +import argparse +import json +import os +import random +random.seed(0) + +import nltk +nltk.download('wordnet') +from nltk.translate.bleu_score import sentence_bleu, SmoothingFunction +from nltk.translate.meteor_score import meteor_score +from rouge import Rouge +from sentence_transformers import SentenceTransformer, util +from scipy.spatial.distance import cosine +from transformers import AutoModel, AutoTokenizer +import torch + + +import numpy as np +from tqdm import tqdm + +class TraditionalMetricEvaluator(): + def __init__(self, inputs, output_dir, output_file): + self.results = inputs['results'] + self.inference_prompt = inputs['prompt'] + self.output_dir = output_dir + self.output_file = output_file + self.rouge = Rouge() + self.response_data = [] + + self.ground_truths = [] + self.generated_captions = [] + + self.sbert_model = SentenceTransformer('all-mpnet-base-v2') + + self.simcse_tokenizer = AutoTokenizer.from_pretrained("princeton-nlp/sup-simcse-roberta-large") + self.simcse_model = AutoModel.from_pretrained("princeton-nlp/sup-simcse-roberta-large") + + self.scores = { + 'bleu-1': [], + 'bleu-2': [], + 'bleu-3': [], + 'bleu-4': [], + 'rouge-1': [], + 'rouge-2': [], + 'rouge-l': [], + 'meteor': [], + 'sbert_similarity': [], + 'simcse_similarity': [] + } + + def evaluate_result(self, result): + object_id = result['object_id'] + ground_truth = result['ground_truth'] + model_output = result['model_output'] + + if model_output == "": + # * all score should be 0 + model_output = "##" + + # create a SmoothingFunction object + smoothing_function = SmoothingFunction().method1 # * used to deal with non-overlap n-gram + + # calculate BLEU-1 score with smoothing function + bleu_1_score = sentence_bleu([ground_truth.split()], model_output.split(), weights=(1, 0, 0, 0), smoothing_function=smoothing_function) + + # calculate BLEU-2, BLEU-3, and BLEU-4 scores + bleu_2_score = sentence_bleu([ground_truth.split()], model_output.split(), weights=(0.5, 0.5, 0, 0), smoothing_function=smoothing_function) + bleu_3_score = sentence_bleu([ground_truth.split()], model_output.split(), weights=(0.33, 0.33, 0.33, 0), smoothing_function=smoothing_function) + bleu_4_score = sentence_bleu([ground_truth.split()], model_output.split(), weights=(0.25, 0.25, 0.25, 0.25), smoothing_function=smoothing_function) + + # calculate ROUGE-L score + rouge_scores_l = self.rouge.get_scores(model_output, ground_truth)[0]['rouge-l'] + rouge_scores_1 = self.rouge.get_scores(model_output, ground_truth)[0]['rouge-1'] + rouge_scores_2 = self.rouge.get_scores(model_output, ground_truth)[0]['rouge-2'] + + # calculate METEOR score + meteor_scores = meteor_score([ground_truth.split()], model_output.split()) + + # Calculate SBERT similarity + embeddings = self.sbert_model.encode([ground_truth, model_output]) + sbert_similarity = util.cos_sim(embeddings[0], embeddings[1])[0][0].item() + + # calculate SimCSE similarity + # Tokenize input texts + inputs = self.simcse_tokenizer([ground_truth, model_output], padding=True, truncation=True, return_tensors="pt") + + # Get the embeddings + with torch.no_grad(): + embeddings = self.simcse_model(**inputs, output_hidden_states=True, return_dict=True).pooler_output + + # Calculate cosine similarity + simcse_similarity = 1 - cosine(embeddings[0], embeddings[1]) # * consine actually calculates consine distance, which is 1 - consine similarity + + scores = { + 'bleu-1': bleu_1_score * 100, + 'bleu-2': bleu_2_score * 100, + 'bleu-3': bleu_3_score * 100, + 'bleu-4': bleu_4_score * 100, + 'rouge-l': rouge_scores_l['f'] * 100, + 'rouge-1': rouge_scores_1['f'] * 100, + 'rouge-2': rouge_scores_2['f'] * 100, + 'meteor': meteor_scores * 100, + 'sbert_similarity': sbert_similarity * 100, + 'simcse_similarity': simcse_similarity * 100 + } + + return object_id, model_output, ground_truth, scores + + def evaluate(self): + print("Starting evaluation...") + + for result in tqdm(self.results, desc="Evaluating"): + object_id, model_output, ground_truth, scores = self.evaluate_result(result) + + # save the object_id, model_output, ground_truth, and scores for each result + self.response_data.append({ + 'object_id': object_id, + 'ground_truth': ground_truth, + 'model_output': model_output, + 'scores': scores, + }) + + # save the scores for overall results + for metric, score in scores.items(): + self.scores[metric].append(score) + + print("Evaluation finished.") + self.save_results() + self.print_results() + + def save_results(self): + output_path = os.path.join(self.output_dir, self.output_file) + + with open(output_path, 'w') as f: + results_to_save = { + 'inference_prompt': self.inference_prompt, + 'overall_scores': {metric: f"{np.mean(scores):.4f}" for metric, scores in self.scores.items()}, + 'results': self.response_data, + } + json.dump(results_to_save, f, indent=2) + + print(f"Results saved to {output_path}") + + def print_results(self): + print('-' * 80) + print("Results:") + for metric, scores in self.scores.items(): + print(f"Average {metric.upper()} Score: {np.mean(scores):.4f}") + +def start_evaluation(results, output_dir, output_file, + parallel=True, num_workers=20): + """ + Args: + results: dict or file path to the json file containing the dict + output_file: the path the final evaluation results to be saved. + """ + if isinstance(results, str): + with open(results, 'r') as fp: + results = json.load(fp) + + evaluator = TraditionalMetricEvaluator(results, output_dir, output_file) + evaluator.evaluate() + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + + parser.add_argument("--results_path", type=str, \ + default="", help="Path to the results file.") + parser.add_argument("--output_dir", type=str, default=None, help="Path to the output directory.") + + args = parser.parse_args() + + if args.output_dir is None: + args.output_dir = os.path.dirname(args.results_path) + + output_file = os.path.basename(args.results_path).replace(".json", f"_evaluated_traditional.json") + + start_evaluation(results=args.results_path, output_dir=args.output_dir, output_file=output_file) + \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/eval/utils.py b/ThirdParty/PointLLM/pointllm/eval/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1ee145fac7ffd185128ae08f4447d7102ee72a62 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/eval/utils.py @@ -0,0 +1,69 @@ +import openai +import time +import random +import os + +def retry_with_exponential_backoff( + func, + initial_delay: float = 1, + exponential_base: float = 2, + jitter: bool = True, + max_retries: int = 40, + max_delay: int = 30, + errors: tuple = (openai.error.RateLimitError, openai.error.ServiceUnavailableError, openai.error.Timeout), +): + """Retry a function with exponential backoff.""" + def wrapper(*args, **kwargs): + num_retries = 0 + delay = initial_delay + + while True: + try: + return func(*args, **kwargs) + except errors as e: + # * print the error info + num_retries += 1 + if num_retries > max_retries: + print(f"[OPENAI] Encounter error: {e}.") + raise Exception( + f"[OPENAI] Maximum number of retries ({max_retries}) exceeded." + ) + delay *= exponential_base * (1 + jitter * random.random()) + time.sleep(min(delay, max_delay)) + except Exception as e: + raise e + return wrapper + +class OpenAIGPT(): + def __init__(self, model="gpt-3.5-turbo-0613", temperature=1, top_p=1, max_tokens=2048, **kwargs) -> None: + setup_openai(model) + self.default_chat_parameters = { + "model": model, + "temperature": temperature, + "top_p": top_p, + "max_tokens": max_tokens, + **kwargs + } + + @retry_with_exponential_backoff + def safe_chat_complete(self, messages, content_only=True, **kwargs): + chat_parameters = self.default_chat_parameters.copy() + if len(kwargs) > 0: + chat_parameters.update(**kwargs) + + response = openai.ChatCompletion.create( + messages=messages, + **chat_parameters + ) + + if content_only: + response = response['choices'][0]["message"]['content'] + + return response + +def setup_openai(model_name): + # Setup OpenAI API Key + print("[OPENAI] Setting OpenAI api_key...") + openai.api_key = os.getenv('OPENAI_API_KEY') + print(f"[OPENAI] OpenAI organization: {openai.organization}") + print(f"[OPENAI] Using MODEL: {model_name}") \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/__init__.py b/ThirdParty/PointLLM/pointllm/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2ea5e0477ba727cf099c4fcfb89e9dcf59ec2be0 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/__init__.py @@ -0,0 +1,2 @@ +# from .pointllm import PointLLMLlamaForCausalLM, PointLLMConfig +from .pointbert.point_encoder import PointTransformer \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_8192point_2layer.yaml b/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_8192point_2layer.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a90473b82e3afaa9654c2f7127c8e01d11006e4c --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_8192point_2layer.yaml @@ -0,0 +1,16 @@ +model : { + NAME: PointTransformer, + trans_dim: 384, + depth: 12, + drop_path_rate: 0.1, + cls_dim: 40, + num_heads: 6, + group_size: 32, + num_group: 512, + encoder_dims: 256, + point_dims: 3, + projection_hidden_layer: 2, + projection_hidden_dim: [1024, 2048], + use_max_pool: false +} +npoints: 8192 \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_base_8192point.yaml b/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_base_8192point.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ac9db169433888af8cb9eed641f327eb8b00536d --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_base_8192point.yaml @@ -0,0 +1,13 @@ +model : { + NAME: PointTransformer, + trans_dim: 1152, # * point feature dims (hidden state) + depth: 12, + drop_path_rate: 0.1, + cls_dim: 40, + num_heads: 12, + group_size: 48, + num_group: 512, + encoder_dims: 512, # * point group tokens feature + point_input_dims: 3, +} +npoints: 8192 \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/pointbert/checkpoint.py b/ThirdParty/PointLLM/pointllm/model/pointbert/checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..2ac680ab712235a4b8f4cc74f4c36b969ad6e57b --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointbert/checkpoint.py @@ -0,0 +1,126 @@ +from collections import defaultdict +import torch.nn as nn + +from typing import Any +from typing import Optional, List, Dict, NamedTuple, Tuple, Iterable + +from termcolor import colored + +def get_missing_parameters_message(keys: List[str]) -> str: + """ + Get a logging-friendly message to report parameter names (keys) that are in + the model but not found in a checkpoint. + Args: + keys (list[str]): List of keys that were not found in the checkpoint. + Returns: + str: message. + """ + groups = _group_checkpoint_keys(keys) + msg = "Some model parameters or buffers are not found in the checkpoint:\n" + msg += "\n".join( + " " + colored(k + _group_to_str(v), "blue") for k, v in groups.items() + ) + return msg + + +def get_unexpected_parameters_message(keys: List[str]) -> str: + """ + Get a logging-friendly message to report parameter names (keys) that are in + the checkpoint but not found in the model. + Args: + keys (list[str]): List of keys that were not found in the model. + Returns: + str: message. + """ + groups = _group_checkpoint_keys(keys) + msg = "The checkpoint state_dict contains keys that are not used by the model:\n" + msg += "\n".join( + " " + colored(k + _group_to_str(v), "magenta") for k, v in groups.items() + ) + return msg + + +def _strip_prefix_if_present(state_dict: Dict[str, Any], prefix: str) -> None: + """ + Strip the prefix in metadata, if any. + Args: + state_dict (OrderedDict): a state-dict to be loaded to the model. + prefix (str): prefix. + """ + keys = sorted(state_dict.keys()) + if not all(len(key) == 0 or key.startswith(prefix) for key in keys): + return + + for key in keys: + newkey = key[len(prefix):] + state_dict[newkey] = state_dict.pop(key) + + # also strip the prefix in metadata, if any.. + try: + metadata = state_dict._metadata # pyre-ignore + except AttributeError: + pass + else: + for key in list(metadata.keys()): + # for the metadata dict, the key can be: + # '': for the DDP module, which we want to remove. + # 'module': for the actual model. + # 'module.xx.xx': for the rest. + + if len(key) == 0: + continue + newkey = key[len(prefix):] + metadata[newkey] = metadata.pop(key) + + +def _group_checkpoint_keys(keys: List[str]) -> Dict[str, List[str]]: + """ + Group keys based on common prefixes. A prefix is the string up to the final + "." in each key. + Args: + keys (list[str]): list of parameter names, i.e. keys in the model + checkpoint dict. + Returns: + dict[list]: keys with common prefixes are grouped into lists. + """ + groups = defaultdict(list) + for key in keys: + pos = key.rfind(".") + if pos >= 0: + head, tail = key[:pos], [key[pos + 1:]] + else: + head, tail = key, [] + groups[head].extend(tail) + return groups + + +def _group_to_str(group: List[str]) -> str: + """ + Format a group of parameter name suffixes into a loggable string. + Args: + group (list[str]): list of parameter name suffixes. + Returns: + str: formated string. + """ + if len(group) == 0: + return "" + + if len(group) == 1: + return "." + group[0] + + return ".{" + ", ".join(group) + "}" + + +def _named_modules_with_dup( + model: nn.Module, prefix: str = "" +) -> Iterable[Tuple[str, nn.Module]]: + """ + The same as `model.named_modules()`, except that it includes + duplicated modules that have more than one name. + """ + yield prefix, model + for name, module in model._modules.items(): # pyre-ignore + if module is None: + continue + submodule_prefix = prefix + ("." if prefix else "") + name + yield from _named_modules_with_dup(module, submodule_prefix) \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/pointbert/dvae.py b/ThirdParty/PointLLM/pointllm/model/pointbert/dvae.py new file mode 100644 index 0000000000000000000000000000000000000000..056c025bbc9b0ba0eab61ba1163c79745214fe81 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointbert/dvae.py @@ -0,0 +1,355 @@ +import torch.nn as nn +import torch +import torch.nn.functional as F +from . import misc + +# from knn_cuda import KNN + +# knn = KNN(k=4, transpose_mode=False) + + +class DGCNN(nn.Module): + def __init__(self, encoder_channel, output_channel): + super().__init__() + ''' + K has to be 16 + ''' + self.input_trans = nn.Conv1d(encoder_channel, 128, 1) + + self.layer1 = nn.Sequential(nn.Conv2d(256, 256, kernel_size=1, bias=False), + nn.GroupNorm(4, 256), + nn.LeakyReLU(negative_slope=0.2) + ) + + self.layer2 = nn.Sequential(nn.Conv2d(512, 512, kernel_size=1, bias=False), + nn.GroupNorm(4, 512), + nn.LeakyReLU(negative_slope=0.2) + ) + + self.layer3 = nn.Sequential(nn.Conv2d(1024, 512, kernel_size=1, bias=False), + nn.GroupNorm(4, 512), + nn.LeakyReLU(negative_slope=0.2) + ) + + self.layer4 = nn.Sequential(nn.Conv2d(1024, 1024, kernel_size=1, bias=False), + nn.GroupNorm(4, 1024), + nn.LeakyReLU(negative_slope=0.2) + ) + + self.layer5 = nn.Sequential(nn.Conv1d(2304, output_channel, kernel_size=1, bias=False), + nn.GroupNorm(4, output_channel), + nn.LeakyReLU(negative_slope=0.2) + ) + + @staticmethod + def get_graph_feature(coor_q, x_q, coor_k, x_k): + # coor: bs, 3, np, x: bs, c, np + + k = 4 + batch_size = x_k.size(0) + num_points_k = x_k.size(2) + num_points_q = x_q.size(2) + + with torch.no_grad(): + _, idx = knn(coor_k, coor_q) # bs k np + assert idx.shape[1] == k + idx_base = torch.arange(0, batch_size, device=x_q.device).view(-1, 1, 1) * num_points_k + idx = idx + idx_base + idx = idx.view(-1) + num_dims = x_k.size(1) + x_k = x_k.transpose(2, 1).contiguous() + feature = x_k.view(batch_size * num_points_k, -1)[idx, :] + feature = feature.view(batch_size, k, num_points_q, num_dims).permute(0, 3, 2, 1).contiguous() + x_q = x_q.view(batch_size, num_dims, num_points_q, 1).expand(-1, -1, -1, k) + feature = torch.cat((feature - x_q, x_q), dim=1) + return feature + + def forward(self, f, coor): + # f: B G C + # coor: B G 3 + + # bs 3 N bs C N + feature_list = [] + coor = coor.transpose(1, 2).contiguous() # B 3 N + f = f.transpose(1, 2).contiguous() # B C N + f = self.input_trans(f) # B 128 N + + f = self.get_graph_feature(coor, f, coor, f) # B 256 N k + f = self.layer1(f) # B 256 N k + f = f.max(dim=-1, keepdim=False)[0] # B 256 N + feature_list.append(f) + + f = self.get_graph_feature(coor, f, coor, f) # B 512 N k + f = self.layer2(f) # B 512 N k + f = f.max(dim=-1, keepdim=False)[0] # B 512 N + feature_list.append(f) + + f = self.get_graph_feature(coor, f, coor, f) # B 1024 N k + f = self.layer3(f) # B 512 N k + f = f.max(dim=-1, keepdim=False)[0] # B 512 N + feature_list.append(f) + + f = self.get_graph_feature(coor, f, coor, f) # B 1024 N k + f = self.layer4(f) # B 1024 N k + f = f.max(dim=-1, keepdim=False)[0] # B 1024 N + feature_list.append(f) + + f = torch.cat(feature_list, dim=1) # B 2304 N + + f = self.layer5(f) # B C' N + + f = f.transpose(-1, -2) + + return f + + +### ref https://github.com/Strawberry-Eat-Mango/PCT_Pytorch/blob/main/util.py ### +def knn_point(nsample, xyz, new_xyz): + """ + Input: + nsample: max sample number in local region + xyz: all points, [B, N, C] + new_xyz: query points, [B, S, C] + Return: + group_idx: grouped points index, [B, S, nsample] + """ + sqrdists = square_distance(new_xyz, xyz) + _, group_idx = torch.topk(sqrdists, nsample, dim=-1, largest=False, sorted=False) + return group_idx + + +def square_distance(src, dst): + """ + Calculate Euclid distance between each two points. + src^T * dst = xn * xm + yn * ym + zn * zm; + sum(src^2, dim=-1) = xn*xn + yn*yn + zn*zn; + sum(dst^2, dim=-1) = xm*xm + ym*ym + zm*zm; + dist = (xn - xm)^2 + (yn - ym)^2 + (zn - zm)^2 + = sum(src**2,dim=-1)+sum(dst**2,dim=-1)-2*src^T*dst + Input: + src: source points, [B, N, C] + dst: target points, [B, M, C] + Output: + dist: per-point square distance, [B, N, M] + """ + B, N, _ = src.shape + _, M, _ = dst.shape + dist = -2 * torch.matmul(src, dst.permute(0, 2, 1)) + dist += torch.sum(src ** 2, -1).view(B, N, 1) + dist += torch.sum(dst ** 2, -1).view(B, 1, M) + return dist + + +class Group(nn.Module): + def __init__(self, num_group, group_size): + super().__init__() + self.num_group = num_group + self.group_size = group_size + # self.knn = KNN(k=self.group_size, transpose_mode=True) + + def forward(self, xyz): + ''' + input: B N 3 + --------------------------- + output: B G M 3 + center : B G 3 + ''' + B, N, C = xyz.shape + if C > 3: + data = xyz + xyz = data[:,:,:3] + rgb = data[:, :, 3:] + batch_size, num_points, _ = xyz.shape + # fps the centers out + center = misc.fps(xyz, self.num_group) # B G 3 + + # knn to get the neighborhood + # _, idx = self.knn(xyz, center) # B G M + idx = knn_point(self.group_size, xyz, center) # B G M + assert idx.size(1) == self.num_group + assert idx.size(2) == self.group_size + idx_base = torch.arange(0, batch_size, device=xyz.device).view(-1, 1, 1) * num_points + idx = idx + idx_base + idx = idx.view(-1) + + neighborhood_xyz = xyz.view(batch_size * num_points, -1)[idx, :] + neighborhood_xyz = neighborhood_xyz.view(batch_size, self.num_group, self.group_size, 3).contiguous() + if C > 3: + neighborhood_rgb = rgb.view(batch_size * num_points, -1)[idx, :] + neighborhood_rgb = neighborhood_rgb.view(batch_size, self.num_group, self.group_size, -1).contiguous() + + # normalize xyz + neighborhood_xyz = neighborhood_xyz - center.unsqueeze(2) + if C > 3: + neighborhood = torch.cat((neighborhood_xyz, neighborhood_rgb), dim=-1) + else: + neighborhood = neighborhood_xyz + return neighborhood, center + +class Encoder(nn.Module): + def __init__(self, encoder_channel, point_input_dims=3): + super().__init__() + self.encoder_channel = encoder_channel + self.point_input_dims = point_input_dims + self.first_conv = nn.Sequential( + nn.Conv1d(self.point_input_dims, 128, 1), + nn.BatchNorm1d(128), + nn.ReLU(inplace=True), + nn.Conv1d(128, 256, 1) + ) + self.second_conv = nn.Sequential( + nn.Conv1d(512, 512, 1), + nn.BatchNorm1d(512), + nn.ReLU(inplace=True), + nn.Conv1d(512, self.encoder_channel, 1) + ) + + def forward(self, point_groups): + ''' + point_groups : B G N 3 + ----------------- + feature_global : B G C + ''' + bs, g, n, c = point_groups.shape + point_groups = point_groups.reshape(bs * g, n, c) + # encoder + feature = self.first_conv(point_groups.transpose(2, 1)) # BG 256 n + feature_global = torch.max(feature, dim=2, keepdim=True)[0] # BG 256 1 + feature = torch.cat([feature_global.expand(-1, -1, n), feature], dim=1) # BG 512 n + feature = self.second_conv(feature) # BG 1024 n + feature_global = torch.max(feature, dim=2, keepdim=False)[0] # BG 1024 + return feature_global.reshape(bs, g, self.encoder_channel) + + +class Decoder(nn.Module): + def __init__(self, encoder_channel, num_fine): + super().__init__() + self.num_fine = num_fine + self.grid_size = 2 + self.num_coarse = self.num_fine // 4 + assert num_fine % 4 == 0 + + self.mlp = nn.Sequential( + nn.Linear(encoder_channel, 1024), + nn.ReLU(inplace=True), + nn.Linear(1024, 1024), + nn.ReLU(inplace=True), + nn.Linear(1024, 3 * self.num_coarse) + ) + self.final_conv = nn.Sequential( + nn.Conv1d(encoder_channel + 3 + 2, 512, 1), + nn.BatchNorm1d(512), + nn.ReLU(inplace=True), + nn.Conv1d(512, 512, 1), + nn.BatchNorm1d(512), + nn.ReLU(inplace=True), + nn.Conv1d(512, 3, 1) + ) + a = torch.linspace(-0.05, 0.05, steps=self.grid_size, dtype=torch.float).view(1, self.grid_size).expand( + self.grid_size, self.grid_size).reshape(1, -1) + b = torch.linspace(-0.05, 0.05, steps=self.grid_size, dtype=torch.float).view(self.grid_size, 1).expand( + self.grid_size, self.grid_size).reshape(1, -1) + self.folding_seed = torch.cat([a, b], dim=0).view(1, 2, self.grid_size ** 2) # 1 2 S + + def forward(self, feature_global): + ''' + feature_global : B G C + ------- + coarse : B G M 3 + fine : B G N 3 + + ''' + bs, g, c = feature_global.shape + feature_global = feature_global.reshape(bs * g, c) + + coarse = self.mlp(feature_global).reshape(bs * g, self.num_coarse, 3) # BG M 3 + + point_feat = coarse.unsqueeze(2).expand(-1, -1, self.grid_size ** 2, -1) # BG (M) S 3 + point_feat = point_feat.reshape(bs * g, self.num_fine, 3).transpose(2, 1) # BG 3 N + + seed = self.folding_seed.unsqueeze(2).expand(bs * g, -1, self.num_coarse, -1) # BG 2 M (S) + seed = seed.reshape(bs * g, -1, self.num_fine).to(feature_global.device) # BG 2 N + + feature_global = feature_global.unsqueeze(2).expand(-1, -1, self.num_fine) # BG 1024 N + feat = torch.cat([feature_global, seed, point_feat], dim=1) # BG C N + + center = coarse.unsqueeze(2).expand(-1, -1, self.grid_size ** 2, -1) # BG (M) S 3 + center = center.reshape(bs * g, self.num_fine, 3).transpose(2, 1) # BG 3 N + + fine = self.final_conv(feat) + center # BG 3 N + fine = fine.reshape(bs, g, 3, self.num_fine).transpose(-1, -2) + coarse = coarse.reshape(bs, g, self.num_coarse, 3) + return coarse, fine + + +class DiscreteVAE(nn.Module): + def __init__(self, config, **kwargs): + super().__init__() + self.group_size = config.group_size + self.num_group = config.num_group + self.encoder_dims = config.encoder_dims + self.tokens_dims = config.tokens_dims + + self.decoder_dims = config.decoder_dims + self.num_tokens = config.num_tokens + + self.group_divider = Group(num_group=self.num_group, group_size=self.group_size) + self.encoder = Encoder(encoder_channel=self.encoder_dims) + self.dgcnn_1 = DGCNN(encoder_channel=self.encoder_dims, output_channel=self.num_tokens) + self.codebook = nn.Parameter(torch.randn(self.num_tokens, self.tokens_dims)) + + self.dgcnn_2 = DGCNN(encoder_channel=self.tokens_dims, output_channel=self.decoder_dims) + self.decoder = Decoder(encoder_channel=self.decoder_dims, num_fine=self.group_size) + # self.build_loss_func() + + # def build_loss_func(self): + # self.loss_func_cdl1 = ChamferDistanceL1().cuda() + # self.loss_func_cdl2 = ChamferDistanceL2().cuda() + # self.loss_func_emd = emd().cuda() + + def recon_loss(self, ret, gt): + whole_coarse, whole_fine, coarse, fine, group_gt, _ = ret + + bs, g, _, _ = coarse.shape + + coarse = coarse.reshape(bs * g, -1, 3).contiguous() + fine = fine.reshape(bs * g, -1, 3).contiguous() + group_gt = group_gt.reshape(bs * g, -1, 3).contiguous() + + loss_coarse_block = self.loss_func_cdl1(coarse, group_gt) + loss_fine_block = self.loss_func_cdl1(fine, group_gt) + + loss_recon = loss_coarse_block + loss_fine_block + + return loss_recon + + def get_loss(self, ret, gt): + # reconstruction loss + loss_recon = self.recon_loss(ret, gt) + # kl divergence + logits = ret[-1] # B G N + softmax = F.softmax(logits, dim=-1) + mean_softmax = softmax.mean(dim=1) + log_qy = torch.log(mean_softmax) + log_uniform = torch.log(torch.tensor([1. / self.num_tokens], device=gt.device)) + loss_klv = F.kl_div(log_qy, log_uniform.expand(log_qy.size(0), log_qy.size(1)), None, None, 'batchmean', + log_target=True) + + return loss_recon, loss_klv + + def forward(self, inp, temperature=1., hard=False, **kwargs): + neighborhood, center = self.group_divider(inp) + logits = self.encoder(neighborhood) # B G C + logits = self.dgcnn_1(logits, center) # B G N + soft_one_hot = F.gumbel_softmax(logits, tau=temperature, dim=2, hard=hard) # B G N + sampled = torch.einsum('b g n, n c -> b g c', soft_one_hot, self.codebook) # B G C + feature = self.dgcnn_2(sampled, center) + coarse, fine = self.decoder(feature) + + with torch.no_grad(): + whole_fine = (fine + center.unsqueeze(2)).reshape(inp.size(0), -1, 3) + whole_coarse = (coarse + center.unsqueeze(2)).reshape(inp.size(0), -1, 3) + + assert fine.size(2) == self.group_size + ret = (whole_coarse, whole_fine, coarse, fine, neighborhood, logits) + return ret \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/pointbert/logger.py b/ThirdParty/PointLLM/pointllm/model/pointbert/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..847c1c7a2f50f310cd5daf96b928838c1c293525 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointbert/logger.py @@ -0,0 +1,127 @@ +import logging +import torch.distributed as dist + +logger_initialized = {} + +def get_root_logger(log_file=None, log_level=logging.INFO, name='main'): + """Get root logger and add a keyword filter to it. + The logger will be initialized if it has not been initialized. By default a + StreamHandler will be added. If `log_file` is specified, a FileHandler will + also be added. The name of the root logger is the top-level package name, + e.g., "mmdet3d". + Args: + log_file (str, optional): File path of log. Defaults to None. + log_level (int, optional): The level of logger. + Defaults to logging.INFO. + name (str, optional): The name of the root logger, also used as a + filter keyword. Defaults to 'mmdet3d'. + Returns: + :obj:`logging.Logger`: The obtained logger + """ + logger = get_logger(name=name, log_file=log_file, log_level=log_level) + # add a logging filter + logging_filter = logging.Filter(name) + logging_filter.filter = lambda record: record.find(name) != -1 + + return logger + + +def get_logger(name, log_file=None, log_level=logging.INFO, file_mode='w'): + """Initialize and get a logger by name. + If the logger has not been initialized, this method will initialize the + logger by adding one or two handlers, otherwise the initialized logger will + be directly returned. During initialization, a StreamHandler will always be + added. If `log_file` is specified and the process rank is 0, a FileHandler + will also be added. + Args: + name (str): Logger name. + log_file (str | None): The log filename. If specified, a FileHandler + will be added to the logger. + log_level (int): The logger level. Note that only the process of + rank 0 is affected, and other processes will set the level to + "Error" thus be silent most of the time. + file_mode (str): The file mode used in opening log file. + Defaults to 'w'. + Returns: + logging.Logger: The expected logger. + """ + logger = logging.getLogger(name) + if name in logger_initialized: + return logger + # handle hierarchical names + # e.g., logger "a" is initialized, then logger "a.b" will skip the + # initialization since it is a child of "a". + for logger_name in logger_initialized: + if name.startswith(logger_name): + return logger + + # handle duplicate logs to the console + # Starting in 1.8.0, PyTorch DDP attaches a StreamHandler (NOTSET) + # to the root logger. As logger.propagate is True by default, this root + # level handler causes logging messages from rank>0 processes to + # unexpectedly show up on the console, creating much unwanted clutter. + # To fix this issue, we set the root logger's StreamHandler, if any, to log + # at the ERROR level. + for handler in logger.root.handlers: + if type(handler) is logging.StreamHandler: + handler.setLevel(logging.ERROR) + + stream_handler = logging.StreamHandler() + handlers = [stream_handler] + + if dist.is_available() and dist.is_initialized(): + rank = dist.get_rank() + else: + rank = 0 + + # only rank 0 will add a FileHandler + if rank == 0 and log_file is not None: + # Here, the default behaviour of the official logger is 'a'. Thus, we + # provide an interface to change the file mode to the default + # behaviour. + file_handler = logging.FileHandler(log_file, file_mode) + handlers.append(file_handler) + + formatter = logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s') + for handler in handlers: + handler.setFormatter(formatter) + handler.setLevel(log_level) + logger.addHandler(handler) + + if rank == 0: + logger.setLevel(log_level) + else: + logger.setLevel(logging.ERROR) + + logger_initialized[name] = True + + + return logger + + +def print_log(msg, logger=None, level=logging.INFO): + """Print a log message. + Args: + msg (str): The message to be logged. + logger (logging.Logger | str | None): The logger to be used. + Some special loggers are: + - "silent": no message will be printed. + - other str: the logger obtained with `get_root_logger(logger)`. + - None: The `print()` method will be used to print log messages. + level (int): Logging level. Only available when `logger` is a Logger + object or "root". + """ + if logger is None: + print(msg) + elif isinstance(logger, logging.Logger): + logger.log(level, msg) + elif logger == 'silent': + pass + elif isinstance(logger, str): + _logger = get_logger(logger) + _logger.log(level, msg) + else: + raise TypeError( + 'logger should be either a logging.Logger object, str, ' + f'"silent" or None, but got {type(logger)}') \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/pointbert/misc.py b/ThirdParty/PointLLM/pointllm/model/pointbert/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..02071cb2e4f70b143c86c617f16d5922a88f24f6 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointbert/misc.py @@ -0,0 +1,287 @@ +import numpy as np +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +import random +import torch +import torch.nn as nn +import torch.nn.functional as F +import os +from collections import abc +# from pointnet2_ops import pointnet2_utils + + +# def fps(data, number): +# ''' +# data B N 3 +# number int +# ''' +# fps_idx = pointnet2_utils.furthest_point_sample(data, number) +# fps_data = pointnet2_utils.gather_operation(data.transpose(1, 2).contiguous(), fps_idx).transpose(1,2).contiguous() +# return fps_data + +def index_points(points, idx): + """ + Input: + points: input points data, [B, N, C] + idx: sample index data, [B, S] + Return: + new_points:, indexed points data, [B, S, C] + """ + device = points.device + B = points.shape[0] + view_shape = list(idx.shape) + view_shape[1:] = [1] * (len(view_shape) - 1) + repeat_shape = list(idx.shape) + repeat_shape[0] = 1 + batch_indices = torch.arange(B, dtype=torch.long).to(device).view(view_shape).repeat(repeat_shape) + new_points = points[batch_indices, idx, :] + return new_points + +def fps(xyz, npoint): + """ + Input: + xyz: pointcloud data, [B, N, 3] + npoint: number of samples + Return: + centroids: sampled pointcloud index, [B, npoint] + """ + device = xyz.device + B, N, C = xyz.shape + centroids = torch.zeros(B, npoint, dtype=torch.long).to(device) + distance = torch.ones(B, N).to(device) * 1e10 + farthest = torch.randint(0, N, (B,), dtype=torch.long).to(device) + batch_indices = torch.arange(B, dtype=torch.long).to(device) + for i in range(npoint): + centroids[:, i] = farthest + centroid = xyz[batch_indices, farthest, :].view(B, 1, 3) + dist = torch.sum((xyz - centroid) ** 2, -1) + distance = torch.min(distance, dist) + farthest = torch.max(distance, -1)[1] + return index_points(xyz, centroids) + +def worker_init_fn(worker_id): + np.random.seed(np.random.get_state()[1][0] + worker_id) + +def build_lambda_sche(opti, config): + if config.get('decay_step') is not None: + lr_lbmd = lambda e: max(config.lr_decay ** (e / config.decay_step), config.lowest_decay) + scheduler = torch.optim.lr_scheduler.LambdaLR(opti, lr_lbmd) + else: + raise NotImplementedError() + return scheduler + +def build_lambda_bnsche(model, config): + if config.get('decay_step') is not None: + bnm_lmbd = lambda e: max(config.bn_momentum * config.bn_decay ** (e / config.decay_step), config.lowest_decay) + bnm_scheduler = BNMomentumScheduler(model, bnm_lmbd) + else: + raise NotImplementedError() + return bnm_scheduler + +def set_random_seed(seed, deterministic=False): + """Set random seed. + Args: + seed (int): Seed to be used. + deterministic (bool): Whether to set the deterministic option for + CUDNN backend, i.e., set `torch.backends.cudnn.deterministic` + to True and `torch.backends.cudnn.benchmark` to False. + Default: False. + + # Speed-reproducibility tradeoff https://pytorch.org/docs/stable/notes/randomness.html + if cuda_deterministic: # slower, more reproducible + cudnn.deterministic = True + cudnn.benchmark = False + else: # faster, less reproducible + cudnn.deterministic = False + cudnn.benchmark = True + + """ + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + if deterministic: + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def is_seq_of(seq, expected_type, seq_type=None): + """Check whether it is a sequence of some type. + Args: + seq (Sequence): The sequence to be checked. + expected_type (type): Expected type of sequence items. + seq_type (type, optional): Expected sequence type. + Returns: + bool: Whether the sequence is valid. + """ + if seq_type is None: + exp_seq_type = abc.Sequence + else: + assert isinstance(seq_type, type) + exp_seq_type = seq_type + if not isinstance(seq, exp_seq_type): + return False + for item in seq: + if not isinstance(item, expected_type): + return False + return True + + +def set_bn_momentum_default(bn_momentum): + def fn(m): + if isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d)): + m.momentum = bn_momentum + return fn + +class BNMomentumScheduler(object): + + def __init__( + self, model, bn_lambda, last_epoch=-1, + setter=set_bn_momentum_default + ): + if not isinstance(model, nn.Module): + raise RuntimeError( + "Class '{}' is not a PyTorch nn Module".format( + type(model).__name__ + ) + ) + + self.model = model + self.setter = setter + self.lmbd = bn_lambda + + self.step(last_epoch + 1) + self.last_epoch = last_epoch + + def step(self, epoch=None): + if epoch is None: + epoch = self.last_epoch + 1 + + self.last_epoch = epoch + self.model.apply(self.setter(self.lmbd(epoch))) + + def get_momentum(self, epoch=None): + if epoch is None: + epoch = self.last_epoch + 1 + return self.lmbd(epoch) + + + +def seprate_point_cloud(xyz, num_points, crop, fixed_points = None, padding_zeros = False): + ''' + seprate point cloud: usage : using to generate the incomplete point cloud with a setted number. + ''' + _,n,c = xyz.shape + + assert n == num_points + assert c == 3 + if crop == num_points: + return xyz, None + + INPUT = [] + CROP = [] + for points in xyz: + if isinstance(crop,list): + num_crop = random.randint(crop[0],crop[1]) + else: + num_crop = crop + + points = points.unsqueeze(0) + + if fixed_points is None: + center = F.normalize(torch.randn(1,1,3),p=2,dim=-1).cuda() + else: + if isinstance(fixed_points,list): + fixed_point = random.sample(fixed_points,1)[0] + else: + fixed_point = fixed_points + center = fixed_point.reshape(1,1,3).cuda() + + distance_matrix = torch.norm(center.unsqueeze(2) - points.unsqueeze(1), p =2 ,dim = -1) # 1 1 2048 + + idx = torch.argsort(distance_matrix,dim=-1, descending=False)[0,0] # 2048 + + if padding_zeros: + input_data = points.clone() + input_data[0, idx[:num_crop]] = input_data[0,idx[:num_crop]] * 0 + + else: + input_data = points.clone()[0, idx[num_crop:]].unsqueeze(0) # 1 N 3 + + crop_data = points.clone()[0, idx[:num_crop]].unsqueeze(0) + + if isinstance(crop,list): + INPUT.append(fps(input_data,2048)) + CROP.append(fps(crop_data,2048)) + else: + INPUT.append(input_data) + CROP.append(crop_data) + + input_data = torch.cat(INPUT,dim=0)# B N 3 + crop_data = torch.cat(CROP,dim=0)# B M 3 + + return input_data.contiguous(), crop_data.contiguous() + +def get_ptcloud_img(ptcloud): + fig = plt.figure(figsize=(8, 8)) + + x, z, y = ptcloud.transpose(1, 0) + ax = fig.gca(projection=Axes3D.name, adjustable='box') + ax.axis('off') + # ax.axis('scaled') + ax.view_init(30, 45) + max, min = np.max(ptcloud), np.min(ptcloud) + ax.set_xbound(min, max) + ax.set_ybound(min, max) + ax.set_zbound(min, max) + ax.scatter(x, y, z, zdir='z', c=x, cmap='jet') + + fig.canvas.draw() + img = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') + img = img.reshape(fig.canvas.get_width_height()[::-1] + (3, )) + return img + + + +def visualize_KITTI(path, data_list, titles = ['input','pred'], cmap=['bwr','autumn'], zdir='y', + xlim=(-1, 1), ylim=(-1, 1), zlim=(-1, 1) ): + fig = plt.figure(figsize=(6*len(data_list),6)) + cmax = data_list[-1][:,0].max() + + for i in range(len(data_list)): + data = data_list[i][:-2048] if i == 1 else data_list[i] + color = data[:,0] /cmax + ax = fig.add_subplot(1, len(data_list) , i + 1, projection='3d') + ax.view_init(30, -120) + b = ax.scatter(data[:, 0], data[:, 1], data[:, 2], zdir=zdir, c=color,vmin=-1,vmax=1 ,cmap = cmap[0],s=4,linewidth=0.05, edgecolors = 'black') + ax.set_title(titles[i]) + + ax.set_axis_off() + ax.set_xlim(xlim) + ax.set_ylim(ylim) + ax.set_zlim(zlim) + plt.subplots_adjust(left=0, right=1, bottom=0, top=1, wspace=0.2, hspace=0) + if not os.path.exists(path): + os.makedirs(path) + + pic_path = path + '.png' + fig.savefig(pic_path) + + np.save(os.path.join(path, 'input.npy'), data_list[0].numpy()) + np.save(os.path.join(path, 'pred.npy'), data_list[1].numpy()) + plt.close(fig) + + +def random_dropping(pc, e): + up_num = max(64, 768 // (e//50 + 1)) + pc = pc + random_num = torch.randint(1, up_num, (1,1))[0,0] + pc = fps(pc, random_num) + padding = torch.zeros(pc.size(0), 2048 - pc.size(1), 3).to(pc.device) + pc = torch.cat([pc, padding], dim = 1) + return pc + + +def random_scale(partial, scale_range=[0.8, 1.2]): + scale = torch.rand(1).cuda() * (scale_range[1] - scale_range[0]) + scale_range[0] + return partial * scale diff --git a/ThirdParty/PointLLM/pointllm/model/pointbert/point_encoder.py b/ThirdParty/PointLLM/pointllm/model/pointbert/point_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..e01a0186bdb6d18bc64f0c9838043854d635c645 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointbert/point_encoder.py @@ -0,0 +1,189 @@ +import torch +import torch.nn as nn +from timm.models.layers import DropPath +from .dvae import Group +from .dvae import Encoder +from .logger import print_log +from collections import OrderedDict + +from .checkpoint import get_missing_parameters_message, get_unexpected_parameters_message + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + # NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class Block(nn.Module): + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.norm1 = norm_layer(dim) + + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.attn = Attention( + dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + + def forward(self, x): + x = x + self.drop_path(self.attn(self.norm1(x))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + + +class TransformerEncoder(nn.Module): + """ Transformer Encoder without hierarchical structure + """ + + def __init__(self, embed_dim=768, depth=4, num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, + drop_rate=0., attn_drop_rate=0., drop_path_rate=0.): + super().__init__() + + self.blocks = nn.ModuleList([ + Block( + dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale, + drop=drop_rate, attn_drop=attn_drop_rate, + drop_path=drop_path_rate[i] if isinstance(drop_path_rate, list) else drop_path_rate + ) + for i in range(depth)]) + + def forward(self, x, pos): + for _, block in enumerate(self.blocks): + x = block(x + pos) + return x + + +class PointTransformer(nn.Module): + def __init__(self, config, use_max_pool=True): + super().__init__() + self.config = config + + self.use_max_pool = use_max_pool # * whethet to max pool the features of different tokens + + self.trans_dim = config.trans_dim + self.depth = config.depth + self.drop_path_rate = config.drop_path_rate + self.cls_dim = config.cls_dim + self.num_heads = config.num_heads + + self.group_size = config.group_size + self.num_group = config.num_group + self.point_dims = config.point_dims + # grouper + self.group_divider = Group(num_group=self.num_group, group_size=self.group_size) + # define the encoder + self.encoder_dims = config.encoder_dims + self.encoder = Encoder(encoder_channel=self.encoder_dims, point_input_dims=self.point_dims) + # bridge encoder and transformer + self.reduce_dim = nn.Linear(self.encoder_dims, self.trans_dim) + + self.cls_token = nn.Parameter(torch.zeros(1, 1, self.trans_dim)) + self.cls_pos = nn.Parameter(torch.randn(1, 1, self.trans_dim)) + + self.pos_embed = nn.Sequential( + nn.Linear(3, 128), + nn.GELU(), + nn.Linear(128, self.trans_dim) + ) + + dpr = [x.item() for x in torch.linspace(0, self.drop_path_rate, self.depth)] + self.blocks = TransformerEncoder( + embed_dim=self.trans_dim, + depth=self.depth, + drop_path_rate=dpr, + num_heads=self.num_heads + ) + + self.norm = nn.LayerNorm(self.trans_dim) + + def load_checkpoint(self, bert_ckpt_path): + ckpt = torch.load(bert_ckpt_path, map_location='cpu') + state_dict = OrderedDict() + for k, v in ckpt['state_dict'].items(): + if k.startswith('module.point_encoder.'): + state_dict[k.replace('module.point_encoder.', '')] = v + + incompatible = self.load_state_dict(state_dict, strict=False) + + if incompatible.missing_keys: + print_log('missing_keys', logger='Transformer') + print_log( + get_missing_parameters_message(incompatible.missing_keys), + logger='Transformer' + ) + if incompatible.unexpected_keys: + print_log('unexpected_keys', logger='Transformer') + print_log( + get_unexpected_parameters_message(incompatible.unexpected_keys), + logger='Transformer' + ) + if not incompatible.missing_keys and not incompatible.unexpected_keys: + # * print successful loading + print_log("PointBERT's weights are successfully loaded from {}".format(bert_ckpt_path), logger='Transformer') + + def forward(self, pts): + # divide the point cloud in the same form. This is important + neighborhood, center = self.group_divider(pts) + # encoder the input cloud blocks + group_input_tokens = self.encoder(neighborhood) # B G N + group_input_tokens = self.reduce_dim(group_input_tokens) + # prepare cls + cls_tokens = self.cls_token.expand(group_input_tokens.size(0), -1, -1) + cls_pos = self.cls_pos.expand(group_input_tokens.size(0), -1, -1) + # add pos embedding + pos = self.pos_embed(center) + # final input + x = torch.cat((cls_tokens, group_input_tokens), dim=1) + pos = torch.cat((cls_pos, pos), dim=1) + # transformer + x = self.blocks(x, pos) + x = self.norm(x) # * B, G + 1(cls token)(513), C(384) + if not self.use_max_pool: + return x + concat_f = torch.cat([x[:, 0], x[:, 1:].max(1)[0]], dim=-1).unsqueeze(1) # * concat the cls token and max pool the features of different tokens, make it B, 1, C + return concat_f # * B, 1, C(384 + 384) \ No newline at end of file diff --git a/ThirdParty/PointLLM/pointllm/model/pointllm.py b/ThirdParty/PointLLM/pointllm/model/pointllm.py new file mode 100644 index 0000000000000000000000000000000000000000..0ef8111218fe7e17c3bee2a958063afc57ac080a --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/pointllm.py @@ -0,0 +1,353 @@ +# Copyright 2023 Runsen Xu + +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +from torch.nn import CrossEntropyLoss +from .utils import * +from ThirdParty.PointLLM.pointllm.utils import * + +from contextlib import nullcontext +from transformers import AutoConfig, AutoModelForCausalLM, \ + LlamaConfig, LlamaModel, LlamaForCausalLM + +from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast + +import os + +# * add logger +import logging +logger = logging.getLogger(__name__) + +class PointLLMConfig(LlamaConfig): + model_type = "pointllm" + +class PointLLMLlamaModel(LlamaModel): + config_class = PointLLMConfig + + def __init__(self, config: LlamaConfig): + super(PointLLMLlamaModel, self).__init__(config) + + self.point_backbone_type = config.point_backbone + logger.info(f"Using {self.point_backbone_type}.") + + if self.point_backbone_type == "PointBERT": + from pointllm.model import PointTransformer + # address of config file, in the same dir of this file + point_bert_config_name = getattr(config, "point_backbone_config_name", "PointTransformer_8192point_2layer") # * default for v1.2, v1.1 uses PointTransformer_base_8192point.yaml + point_bert_config_addr = os.path.join(os.path.dirname(__file__), "pointbert", f"{point_bert_config_name}.yaml") + print(f"Loading PointBERT config from {point_bert_config_addr}.") + point_bert_config = cfg_from_yaml_file(point_bert_config_addr) + if getattr(config, "use_color", False): + point_bert_config.model.point_dims = 6 + use_max_pool = getattr(point_bert_config.model, "use_max_pool", False) # * default is false + + self.point_backbone = PointTransformer(point_bert_config.model, use_max_pool=use_max_pool) + logger.info(f"Using {self.point_backbone.point_dims} dim of points.") + + self.point_backbone_config = { + "point_cloud_dim": point_bert_config.model.point_dims, + "backbone_output_dim": point_bert_config.model.trans_dim if not use_max_pool else point_bert_config.model.trans_dim * 2, + "project_output_dim": self.config.hidden_size, + "point_token_len": point_bert_config.model.num_group + 1 if not use_max_pool else 1, # * number of output features, with cls token + "mm_use_point_start_end": self.config.mm_use_point_start_end, + "projection_hidden_layer": point_bert_config.model.get('projection_hidden_layer', 0), + "use_max_pool": use_max_pool + } + if point_bert_config.model.get('projection_hidden_layer', 0) > 0: + self.point_backbone_config["projection_hidden_dim"] = point_bert_config.model.projection_hidden_dim # a list + + logger.info(f"Use max pool is {use_max_pool}. Number of point token is {self.point_backbone_config['point_token_len']}.") + + # * print relevant info with projection layers + backbone_output_dim = self.point_backbone_config["backbone_output_dim"] + logger.info(f"Point backbone output dim: {backbone_output_dim}.") + logger.info(f"Use {self.point_backbone_config['projection_hidden_layer']} projection hiddent layers.") + if self.point_backbone_config['projection_hidden_layer'] > 0: + # Add projection layer with linear layers and GELU activation + projection_layers = [] + last_dim = backbone_output_dim + for i in range(point_bert_config.model.projection_hidden_layer): + projection_layers.append(nn.Linear(last_dim, self.point_backbone_config["projection_hidden_dim"][i])) + projection_layers.append(nn.GELU()) + last_dim = self.point_backbone_config["projection_hidden_dim"][i] + + projection_layers.append(nn.Linear(last_dim, self.point_backbone_config["project_output_dim"])) + self.point_proj = nn.Sequential(*projection_layers) + logger.info(f"Each layer with {point_bert_config.model.projection_hidden_dim} hidden units.") + else: + # Single layer + self.point_proj = nn.Linear(backbone_output_dim, self.point_backbone_config['project_output_dim']) + logger.info(f"Point projector output dim: {self.point_backbone_config['project_output_dim']}.") + + self.fix_pointnet = False + self.fix_llm = False + + def load_point_backbone_checkpoint(self, checkpoint_path=None): + self.point_backbone.load_checkpoint(self.config.point_backbone_ckpt if checkpoint_path is None else checkpoint_path) + + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + point_clouds: Optional[torch.FloatTensor] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPast]: + + # HACK: replace back original embeddings for pretraining + orig_embeds_params = getattr(self, 'orig_embeds_params', None) + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + + point_backbone = getattr(self, 'point_backbone', None) + point_backbone_config = getattr(self, 'point_backbone_config', None) + + if point_backbone is not None and (input_ids.shape[1] != 1 or self.training) and point_clouds is not None: + # * enter when training or the first generation step of inference + with torch.no_grad() if self.fix_pointnet else nullcontext(): + if self.fix_pointnet: + self.point_backbone.eval() + if type(point_clouds) is list: + # * variable numbers of points + point_features = [] + for point_cloud in point_clouds: # * iterate over batch + point_feature = self.point_backbone(point_cloud.unsqueeze(0))[0] + point_features.append(point_feature) + else: + point_features = self.point_backbone(point_clouds) + + if type(point_clouds) is list: + point_features = [self.point_proj(point_feature) for point_feature in point_features] + else: + point_features = self.point_proj(point_features) + + dummy_point_features = torch.zeros(point_backbone_config['point_token_len'], point_backbone_config['backbone_output_dim'], device=inputs_embeds.device, dtype=inputs_embeds.dtype) + dummy_point_features = self.point_proj(dummy_point_features) + + new_input_embeds = [] + cur_point_idx = 0 + for cur_input_ids, cur_input_embeds in zip(input_ids, inputs_embeds): # * input_ids: B, L; input_embeds: B, L, C + if (cur_input_ids == point_backbone_config['point_patch_token']).sum() == 0: + # multimodal LLM, but the current sample is not multimodal + cur_input_embeds = cur_input_embeds + (0. * dummy_point_features).sum() # * do nothing + new_input_embeds.append(cur_input_embeds) + cur_point_idx += 1 + continue + cur_point_features = point_features[cur_point_idx].to(device=cur_input_embeds.device) + num_patches = cur_point_features.shape[0] # * number of point tokens + if point_backbone_config['mm_use_point_start_end']: + if (cur_input_ids == point_backbone_config["point_start_token"]).sum() != (cur_input_ids == point_backbone_config["point_end_token"]).sum(): + raise ValueError("The number of point start tokens and point end tokens should be the same.") + point_start_tokens = torch.where(cur_input_ids == point_backbone_config["point_start_token"])[0] + for point_start_token_pos in point_start_tokens: + if cur_input_ids[point_start_token_pos + num_patches + 1] != point_backbone_config["point_end_token"]: + raise ValueError("The point end token should follow the point start token.") + if orig_embeds_params is not None: # * will not update the original embeddings except for POINT_START_TOKEN and POINT_END_TOKEN + cur_new_input_embeds = torch.cat((cur_input_embeds[:point_start_token_pos].detach(), cur_input_embeds[point_start_token_pos:point_start_token_pos+1], cur_point_features, cur_input_embeds[point_start_token_pos + num_patches + 1:point_start_token_pos + num_patches + 2], cur_input_embeds[point_start_token_pos + num_patches + 2:].detach()), dim=0) + else: + cur_new_input_embeds = torch.cat((cur_input_embeds[:point_start_token_pos+1], cur_point_features, cur_input_embeds[point_start_token_pos + num_patches + 1:]), dim=0) + cur_point_idx += 1 + new_input_embeds.append(cur_new_input_embeds) + else: + if (cur_input_ids == point_backbone_config["point_patch_token"]).sum() != num_patches: + raise ValueError("The number of point patch tokens should be the same as the number of point patches.") + masked_indices = torch.where(cur_input_ids == point_backbone_config["point_patch_token"])[0] + mask_index_start = masked_indices[0] + if (masked_indices != torch.arange(mask_index_start, mask_index_start+num_patches, device=masked_indices.device, dtype=masked_indices.dtype)).any(): + raise ValueError("The point patch tokens should be consecutive.") + if orig_embeds_params is not None: + cur_new_input_embeds = torch.cat((cur_input_embeds[:mask_index_start].detach(), cur_point_features, cur_input_embeds[mask_index_start+num_patches:].detach()), dim=0) + else: + cur_new_input_embeds = torch.cat((cur_input_embeds[:mask_index_start], cur_point_features, cur_input_embeds[mask_index_start+num_patches:]), dim=0) + new_input_embeds.append(cur_new_input_embeds) + cur_point_idx += 1 + inputs_embeds = torch.stack(new_input_embeds, dim=0) + + return super(PointLLMLlamaModel, self).forward( + input_ids=None, attention_mask=attention_mask, past_key_values=past_key_values, + inputs_embeds=inputs_embeds, use_cache=use_cache, + output_attentions=output_attentions, output_hidden_states=output_hidden_states, + return_dict=return_dict + ) + + +class PointLLMLlamaForCausalLM(LlamaForCausalLM): + config_class = PointLLMConfig + + def __init__(self, config): + super(LlamaForCausalLM, self).__init__(config) + self.model = PointLLMLlamaModel(config) + + self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_model(self): + return self.model + + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, # * control whether to return past_key_values + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + point_clouds: Optional[torch.FloatTensor] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, CausalLMOutputWithPast]: + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + point_clouds=point_clouds + ) + + hidden_states = outputs[0] + logits = self.lm_head(hidden_states) + + loss = None + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() # * B, L, V(32003) + shift_labels = labels[..., 1:].contiguous() # * B, L + # Flatten the tokens + loss_fct = CrossEntropyLoss() + shift_logits = shift_logits.view(-1, self.config.vocab_size) + shift_labels = shift_labels.view(-1) + # Enable model/pipeline parallelism + shift_labels = shift_labels.to(shift_logits.device) + loss = loss_fct(shift_logits, shift_labels) + + if not return_dict: + output = (logits,) + outputs[1:] + return (loss,) + output if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + def prepare_inputs_for_generation( + self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, **kwargs + ): + if past_key_values: + input_ids = input_ids[:, -1:] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + model_inputs = {"inputs_embeds": inputs_embeds} + else: + model_inputs = {"input_ids": input_ids} + + model_inputs.update( + { + "past_key_values": past_key_values, + "use_cache": kwargs.get("use_cache"), + "attention_mask": attention_mask, + "point_clouds": kwargs.get("point_clouds", None), + } + ) + return model_inputs + + def initialize_tokenizer_point_backbone_config_wo_embedding(self, tokenizer): + # * called when stage2 or inference or inference without pre-training, assume tokenizer has point tokens + config = self.config + point_backbone_config = self.get_model().point_backbone_config + mm_use_point_start_end = point_backbone_config['mm_use_point_start_end'] = config.mm_use_point_start_end + + default_point_patch_token = config.DEFAULT_POINT_PATCH_TOKEN + + tokenizer.add_tokens([default_point_patch_token], special_tokens=True) + + # * assert tokenizer has the default_point_patch_token + point_backbone_config['default_point_patch_token'] = default_point_patch_token + point_backbone_config['point_patch_token'] = tokenizer.convert_tokens_to_ids([default_point_patch_token])[0] + + if mm_use_point_start_end: + default_point_start_token = config.DEFAULT_POINT_START_TOKEN + default_point_end_token = config.DEFAULT_POINT_END_TOKEN + tokenizer.add_tokens([default_point_start_token, default_point_end_token], special_tokens=True) + + point_backbone_config['default_point_start_token'] = default_point_start_token + point_backbone_config['default_point_end_token'] = default_point_end_token + + point_backbone_config["point_start_token"] = tokenizer.convert_tokens_to_ids([default_point_start_token])[0] + point_backbone_config["point_end_token"] = tokenizer.convert_tokens_to_ids([default_point_end_token])[0] + + def initialize_tokenizer_point_backbone_config(self, tokenizer, device, fix_llm=True): + + config = self.config + point_backbone_config = self.get_model().point_backbone_config + mm_use_point_start_end = point_backbone_config['mm_use_point_start_end'] = config.mm_use_point_start_end + + default_point_patch_token = config.DEFAULT_POINT_PATCH_TOKEN + point_backbone_config['default_point_patch_token'] = default_point_patch_token + tokenizer.add_tokens([default_point_patch_token], special_tokens=True) # * no need to update embed since it will be replaced + self.resize_token_embeddings(len(tokenizer)) # ! resize_token_embeddings will make the tokens trainable again + point_backbone_config['point_patch_token'] = tokenizer.convert_tokens_to_ids([default_point_patch_token])[0] + + if mm_use_point_start_end: + default_point_start_token = config.DEFAULT_POINT_START_TOKEN + default_point_end_token = config.DEFAULT_POINT_END_TOKEN + point_backbone_config['default_point_start_token'] = default_point_start_token + point_backbone_config['default_point_end_token'] = default_point_end_token + + num_new_tokens = tokenizer.add_tokens([default_point_start_token, default_point_end_token], special_tokens=True) + self.resize_token_embeddings(len(tokenizer)) + point_backbone_config["point_start_token"] = tokenizer.convert_tokens_to_ids([default_point_start_token])[0] + point_backbone_config["point_end_token"] = tokenizer.convert_tokens_to_ids([default_point_end_token])[0] + + if num_new_tokens > 0: + input_embeddings = self.get_input_embeddings().weight.data + output_embeddings = self.get_output_embeddings().weight.data + + input_embeddings_avg = input_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + output_embeddings_avg = output_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + + input_embeddings[-num_new_tokens:] = input_embeddings_avg + output_embeddings[-num_new_tokens:] = output_embeddings_avg + + # need to update the input embeding, but no need to update the output embedding + for p in self.get_input_embeddings().parameters(): + p.requires_grad = True + if fix_llm: + self.get_model().orig_embeds_params = [self.get_input_embeddings().weight.data.clone().to(device=device)] # * only tuning the new embeddings + for p in self.get_output_embeddings().parameters(): # * the llm head + p.requires_grad = False + print(f"Setting output embeddings fixed and {num_new_tokens} new tokens' input embeddings trainable.") + else: + self.get_model().orig_embeds_params = None + for p in self.get_output_embeddings().parameters(): + p.requires_grad = True + print("Setting output embeddings and all input embeddings trainable.") + +AutoConfig.register("pointllm", PointLLMConfig) +AutoModelForCausalLM.register(PointLLMConfig, PointLLMLlamaForCausalLM) diff --git a/ThirdParty/PointLLM/pointllm/model/utils.py b/ThirdParty/PointLLM/pointllm/model/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b78741ca050c66d3c3891a236715f30652130c97 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/model/utils.py @@ -0,0 +1,24 @@ +import torch +from transformers import StoppingCriteria + +class KeywordsStoppingCriteria(StoppingCriteria): + def __init__(self, keywords, tokenizer, input_ids): + self.keywords = keywords + self.keyword_ids = [tokenizer(keyword).input_ids for keyword in keywords] + self.keyword_ids = [keyword_id[0] for keyword_id in self.keyword_ids if type(keyword_id) is list and len(keyword_id) == 1] + self.tokenizer = tokenizer + self.start_len = None + self.input_ids = input_ids + + def __call__(self, output_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool: + if self.start_len is None: + self.start_len = self.input_ids.shape[1] + else: + for keyword_id in self.keyword_ids: + if output_ids[0, -1] == keyword_id: + return True + outputs = self.tokenizer.batch_decode(output_ids[:, self.start_len:], skip_special_tokens=True)[0] + for keyword in self.keywords: + if keyword in outputs: + return True + return False diff --git a/ThirdParty/PointLLM/pointllm/train/llama_flash_attn_monkey_patch.py b/ThirdParty/PointLLM/pointllm/train/llama_flash_attn_monkey_patch.py new file mode 100644 index 0000000000000000000000000000000000000000..fcd3ba7f9361649b5ba0e5a9db312e002c1cac44 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/train/llama_flash_attn_monkey_patch.py @@ -0,0 +1,107 @@ +# Adopted from https://github.com/lm-sys/FastChat. Below is the original copyright: +from typing import List, Optional, Tuple +from cv2 import exp + +import torch +from torch import nn + +import transformers +from transformers.models.llama.modeling_llama import apply_rotary_pos_emb + +from einops import rearrange + +# * some version is changed to flash_attn_varlen_qkvpacked_func, so need to check +try: + from flash_attn.flash_attn_interface import flash_attn_unpadded_qkvpacked_func +except: + from flash_attn.flash_attn_interface import flash_attn_varlen_qkvpacked_func as flash_attn_unpadded_qkvpacked_func +from flash_attn.bert_padding import unpad_input, pad_input + +def forward( + self, + hidden_states: torch.Tensor, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + attention_mask: Optional[torch.Tensor] = None, + output_attentions: bool = False, + use_cache: bool = False, +) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + """Input shape: Batch x Time x Channel + + attention_mask: [bsz, q_len] + """ + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states).view( + bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) + key_states = self.k_proj(hidden_states).view( + bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) + value_states = self.v_proj(hidden_states).view( + bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) + # [bsz, q_len, nh, hd] + # [bsz, nh, q_len, hd] + + kv_seq_len = key_states.shape[-2] + offset = 0 + if past_key_value is not None: + offset = past_key_value[0].shape[-2] + kv_seq_len += offset + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, + key_states, + cos, + sin, + offset=offset) + # [bsz, nh, t, hd] + assert not output_attentions, "output_attentions is not supported" + assert not use_cache, "use_cache is not supported" + assert past_key_value is None, "past_key_value is not supported" + + # Flash attention codes from + # https://github.com/HazyResearch/flash-attention/blob/main/flash_attn/flash_attention.py + + # transform the data into the format required by flash attention + qkv = torch.stack([query_states, key_states, value_states], dim=2) # [bsz, nh, 3, q_len, hd] + qkv = qkv.transpose(1, 3) # [bsz, q_len, 3, nh, hd] + # We have disabled _prepare_decoder_attention_mask in LlamaModel + # the attention_mask should be the same as the key_padding_mask + key_padding_mask = attention_mask + + + if key_padding_mask is None: + qkv = rearrange(qkv, 'b s ... -> (b s) ...') + max_s = q_len + cu_q_lens = torch.arange(0, (bsz + 1) * q_len, step=q_len, dtype=torch.int32, + device=qkv.device) + output = flash_attn_unpadded_qkvpacked_func( + qkv, cu_q_lens, max_s, 0.0, + softmax_scale=None, causal=True + ) + output = rearrange(output, '(b s) ... -> b s ...', b=bsz) + else: + nheads = qkv.shape[-2] + x = rearrange(qkv, 'b s three h d -> b s (three h d)') + x_unpad, indices, cu_q_lens, max_s = unpad_input(x, key_padding_mask) + x_unpad = rearrange(x_unpad, 'nnz (three h d) -> nnz three h d', three=3, h=nheads) + output_unpad = flash_attn_unpadded_qkvpacked_func( + x_unpad, cu_q_lens, max_s, 0.0, + softmax_scale=None, causal=True + ) + output = rearrange(pad_input(rearrange(output_unpad, 'nnz h d -> nnz (h d)'), + indices, bsz, q_len), + 'b s (h d) -> b s h d', h=nheads) + return self.o_proj(rearrange(output, + 'b s h d -> b s (h d)')), None, None + + +# Disable the transformation of the attention mask in LlamaModel as the flash attention +# requires the attention mask to be the same as the key_padding_mask +def _prepare_decoder_attention_mask(self, attention_mask, input_shape, + inputs_embeds, past_key_values_length): + # [bsz, seq_len] + return attention_mask + + +def replace_llama_attn_with_flash_attn(): + transformers.models.llama.modeling_llama.LlamaModel._prepare_decoder_attention_mask = _prepare_decoder_attention_mask + transformers.models.llama.modeling_llama.LlamaAttention.forward = forward diff --git a/ThirdParty/PointLLM/pointllm/train/pointllm_trainer.py b/ThirdParty/PointLLM/pointllm/train/pointllm_trainer.py new file mode 100644 index 0000000000000000000000000000000000000000..096fa75d673e8f3f51b8cc33997c76cc927c1e9f --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/train/pointllm_trainer.py @@ -0,0 +1,49 @@ +import os +import torch +import torch.nn as nn + +from transformers import Trainer +from typing import Optional + + +def unwrap_model(model: nn.Module) -> nn.Module: + """ + Recursively unwraps a model from potential containers (as used in distributed training). + + Args: + model (`torch.nn.Module`): The model to unwrap. + """ + # since there could be multiple levels of wrapping, unwrap recursively + if hasattr(model, "module"): + return unwrap_model(model.module) + else: + return model + + +class PointLLMTrainer(Trainer): + + def _save(self, output_dir: Optional[str] = None, state_dict=None): + if getattr(self.args, 'tune_mm_mlp_adapter', False): + # Save the model + _state_dict = state_dict + if _state_dict is None: + # Only save the model itself if we are using distributed training + model_to_save = unwrap_model(self.model) + _state_dict = model_to_save.state_dict() + + weight_to_save = {} + keys_to_match = ['point_proj', 'embed_tokens', 'embed_in'] + for k, v in _state_dict.items(): + if any(key_match in k for key_match in keys_to_match): + weight_to_save[k] = v + + current_folder = output_dir.split('/')[-1] + parent_folder = os.path.dirname(output_dir) + if current_folder.startswith('checkpoint-'): + mm_projector_folder = os.path.join(parent_folder, "point_proj") + os.makedirs(mm_projector_folder, exist_ok=True) + torch.save(weight_to_save, os.path.join(mm_projector_folder, f'{current_folder}.bin')) + else: + torch.save(weight_to_save, os.path.join(output_dir, f'point_proj.bin')) + + super(PointLLMTrainer, self)._save(output_dir, state_dict) diff --git a/ThirdParty/PointLLM/pointllm/train/train.py b/ThirdParty/PointLLM/pointllm/train/train.py new file mode 100644 index 0000000000000000000000000000000000000000..6c0f07f6980930fc991a05ac7a5aebf456f62879 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/train/train.py @@ -0,0 +1,216 @@ +# Adopted from https://github.com/lm-sys/FastChat. Below is the original copyright: +# Adopted from tatsu-lab@stanford_alpaca. Below is the original copyright: +# Copyright 2023 Rohan Taori, Ishaan Gulrajani, Tianyi Zhang, Yann Dubois, Xuechen Li +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass, field +import pathlib +from typing import Optional, List + + +import transformers +from pointllm.train.pointllm_trainer import PointLLMTrainer + +from pointllm import conversation as conversation_lib +from pointllm.model import * +from pointllm.data import make_object_point_data_module + +# * logger +from pointllm.utils import build_logger + +IGNORE_INDEX = -100 + +DEFAULT_PAD_TOKEN = "[PAD]" +DEFAULT_EOS_TOKEN = "" +DEFAULT_BOS_TOKEN = "" +DEFAULT_UNK_TOKEN = "" + + +@dataclass +class ModelArguments: + model_name_or_path: Optional[str] = field(default="") + version: Optional[str] = field(default="v1") + +@dataclass +class DataArguments: + data_path: str = field(default="ScanNet", metadata={"help": "Path to the training data."}) + anno_path: str = field(default=None, metadata={"help": "Path to the utterance data. If None, will use referit3d by defautl."}) + use_color: bool = field(default=False, metadata={"help": "Whether to use color."}) + data_debug_num: int = field(default=0, metadata={"help": "Number of data to use in debug mode. If larger than 0, use debug mode, else use the whole data"}) + split_train_val: bool = field(default=False, metadata={"help": "Whether to split train and val."}) + split_ratio: float = field(default=0.9, metadata={"help": "Ratio of train and val."}) + pointnum: int = field(default=8192, metadata={"help": "Number of points."}) + conversation_types: List[str] = field(default_factory=lambda: ["simple_description"], metadata={"help": "Conversation types to use."}) + is_multimodal: bool = True + +@dataclass +class TrainingArguments(transformers.TrainingArguments): + # * can refer to https://huggingface.co/docs/transformers/v4.28.1/en/main_classes/trainer#transformers.TrainingArgument + cache_dir: Optional[str] = field(default=None) + optim: str = field(default="adamw_torch") + model_max_length: int = field( + default=2048, + metadata={"help": "Maximum sequence length. Sequences will be right padded (and possibly truncated)."}, + ) + model_debug: bool = field(default=False, metadata={"help": "Whether to use small model."}) # * whether to load checkpoints at the mo + fix_llm: bool = field(default=True, metadata={"help": "Whether to fix the LLM."}) + fix_pointnet: bool = field(default=True, metadata={"help": "Whether to fix the PointNet."}) + + remove_unused_columns: bool = field(default=False) + force_fsdp: bool = field(default=False) + + # * for two stage training + tune_mm_mlp_adapter: bool = field(default=True) # * set True when pre-training, and false when fine-tuning + stage_2: bool = field(default=False) # * set True when fine-tuning + pretrained_mm_mlp_adapter: Optional[str] = field(default=None) # * path to the pre-trained projector & output_embed & input_embed + detatch_point_token: bool = field(default=False) # * deprecated + # * point backbone ckpt path + point_backbone_ckpt: str = field(default=None) + +def safe_save_model_for_hf_trainer(trainer: transformers.Trainer, + output_dir: str): + """Collects the state dict and dump to disk.""" + state_dict = trainer.model.state_dict() + if trainer.args.should_save: + cpu_state_dict = { + key: value.cpu() + for key, value in state_dict.items() + } + del state_dict + trainer._save(output_dir, state_dict=cpu_state_dict) # noqa + + +def train(): + parser = transformers.HfArgumentParser( + (ModelArguments, DataArguments, TrainingArguments)) + model_args, data_args, training_args = parser.parse_args_into_dataclasses() + + training_args.log_level = "info" # * default is passive(warning) + # * build logger + logger = build_logger(__name__, training_args.output_dir + '/train.log') + + if training_args.model_debug: + # * do not load checkpoint, load from config + config = transformers.AutoConfig.from_pretrained( + model_args.model_name_or_path, + cache_dir=training_args.cache_dir, + ) + model = PointLLMLlamaForCausalLM._from_config(config) + else: + model = PointLLMLlamaForCausalLM.from_pretrained( + model_args.model_name_or_path, + cache_dir=training_args.cache_dir, + ) + + model.config.use_cache = False + + if training_args.fix_llm: + # * This will fix all the parameters + logger.info("LLM is fixed. Fix_llm flag is set to True") + # * fix llama, lm_head, pointnet, projection layer here + model.requires_grad_(False) + model.get_model().fix_llm = True + model.get_model().point_proj.requires_grad_(True) + model.get_model().point_backbone.requires_grad_(True) # * set as True for fsdp, use fix_pointnet flag to control + else: + model.get_model().fix_llm = False + logger.warning("LLM is trainable. Fix_llm flag is set to False") + + tokenizer = transformers.AutoTokenizer.from_pretrained( + model_args.model_name_or_path, + cache_dir=training_args.cache_dir, + model_max_length=training_args.model_max_length, + padding_side="right", + use_fast=False, + ) + + if model_args.version == "v0" or "v0" in model_args.model_name_or_path: + raise ValueError("v0 is deprecated.") + else: + tokenizer.pad_token = tokenizer.unk_token + conversation_lib.default_conversation = conversation_lib.conv_templates["vicuna_v1_1"] + + if not training_args.fix_pointnet: + # * not fix pointnet + logger.info("Point backbone is trainable. Fix_pointnet flag is set to False, pointnet grad will be recorded.") + model.get_model().fix_pointnet = False + else: + logger.info("Point backbone is fixed. Fix_pointnet flag is set to True, pointnet grad will not be recorded.") + model.get_model().fix_pointnet = True # * use with torch.inference_mode to control, not requires_grad for fsdp for second stage + if not training_args.stage_2: + logger.info("Set requires_grad of point backbone to False") + model.get_model().point_backbone.requires_grad_(False) # * fix pointnet for first stage, need for fsdp in stage2 + + if training_args.tune_mm_mlp_adapter: + # * not fix the projection layer + # * may need to set the embed_tokens to require_grad = True if added new tokens + # * this is done in initialize_tokenizer_point_backbone_config + logger.info("Point projection layer is trainable.") + else: + model.get_model().point_proj.requires_grad_(False) + logger.info("Point prejcetion layer is fixed.") + + if not training_args.stage_2: + # * we assume in stage2, llm, point_backbone, and projection layer can be loaded from the model checkpoint + print(f"Default point_backbone_ckpt is {training_args.point_backbone_ckpt}.") + model.get_model().load_point_backbone_checkpoint(training_args.point_backbone_ckpt) + model.initialize_tokenizer_point_backbone_config(tokenizer=tokenizer, device=training_args.device, fix_llm=training_args.fix_llm) + else: + # * stage2 + model.initialize_tokenizer_point_backbone_config_wo_embedding(tokenizer=tokenizer) + + point_backbone_config = model.get_model().point_backbone_config + + data_args.point_token_len = point_backbone_config['point_token_len'] + data_args.mm_use_point_start_end = point_backbone_config['mm_use_point_start_end'] + data_args.point_backbone_config = point_backbone_config + + params_no_grad = [n for n, p in model.named_parameters() if not p.requires_grad] + if len(params_no_grad) > 0: + if training_args.fsdp is not None and len(training_args.fsdp) > 0: + if len(params_no_grad) < 10: + print('[WARNING] Attempting to use FSDP while {} parameters do not require gradients: {}'. format(len(params_no_grad), params_no_grad)) + else: + print('[WARNING] Attempting to use FSDP while {} parameters do not require gradients: {}...(omitted)'. format(len(params_no_grad), ', '.join(params_no_grad[:10]))) + print("[WARNING] Attempting to use FSDP with partially frozen paramters, this is experimental.") + print("[WARNING] As of 4/30/23, this feature requires PyTorch-nightly build. See here for details: https://github.com/haotian-liu/LLaVA#experimental-use-fsdp-to-save-memory-in-pretraining") + + from torch.distributed.fsdp.fully_sharded_data_parallel import FullyShardedDataParallel as FSDP + def patch_FSDP_use_orig_params(func): + def wrap_func(*args, **kwargs): + use_orig_params = kwargs.pop('use_orig_params', True) + return func(*args, **kwargs, use_orig_params=use_orig_params) + return wrap_func + + FSDP.__init__ = patch_FSDP_use_orig_params(FSDP.__init__) + + data_module = make_object_point_data_module(tokenizer=tokenizer, + data_args=data_args) + + trainer = PointLLMTrainer(model=model, + tokenizer=tokenizer, + args=training_args, + **data_module) + + if list(pathlib.Path(training_args.output_dir).glob("checkpoint-*")): + trainer.train(resume_from_checkpoint=True) + else: + trainer.train() + trainer.save_state() + safe_save_model_for_hf_trainer(trainer=trainer, + output_dir=training_args.output_dir) + + +if __name__ == "__main__": + train() diff --git a/ThirdParty/PointLLM/pointllm/train/train_mem.py b/ThirdParty/PointLLM/pointllm/train/train_mem.py new file mode 100644 index 0000000000000000000000000000000000000000..67d8035750cd9a463547eac788dc856e79375ad2 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/train/train_mem.py @@ -0,0 +1,13 @@ +# Adopted from https://github.com/lm-sys/FastChat. Below is the original copyright: +# Adopted from tatsu-lab@stanford_alpaca. Below is the original copyright: +# Make it more memory efficient by monkey patching the LLaMA model with FlashAttn. + +# Need to call this before importing transformers. +from pointllm.train.llama_flash_attn_monkey_patch import replace_llama_attn_with_flash_attn + +replace_llama_attn_with_flash_attn() + +from pointllm.train.train import train + +if __name__ == "__main__": + train() diff --git a/ThirdParty/PointLLM/pointllm/utils.py b/ThirdParty/PointLLM/pointllm/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..95a35e802b162dde6d4b83d50100b515113a3719 --- /dev/null +++ b/ThirdParty/PointLLM/pointllm/utils.py @@ -0,0 +1,154 @@ +import logging +import logging.handlers +import os +import sys + +import requests + +import yaml +from easydict import EasyDict + +server_error_msg = "**NETWORK ERROR DUE TO HIGH TRAFFIC. PLEASE REGENERATE OR REFRESH THIS PAGE.**" +moderation_msg = "YOUR INPUT VIOLATES OUR CONTENT MODERATION GUIDELINES. PLEASE TRY AGAIN." + +handler = None + + +def merge_new_config(config, new_config): + for key, val in new_config.items(): + if not isinstance(val, dict): + if key == '_base_': + with open(new_config['_base_'], 'r') as f: + try: + val = yaml.load(f, Loader=yaml.FullLoader) + except: + val = yaml.load(f) + config[key] = EasyDict() + merge_new_config(config[key], val) + else: + config[key] = val + continue + if key not in config: + config[key] = EasyDict() + merge_new_config(config[key], val) + return config + +def cfg_from_yaml_file(cfg_file): + config = EasyDict() + with open(cfg_file, 'r') as f: + new_config = yaml.load(f, Loader=yaml.FullLoader) + merge_new_config(config=config, new_config=new_config) + return config + + +def build_logger(logger_name, logger_filepath): + global handler + + formatter = logging.Formatter( + fmt="%(asctime)s - %(levelname)s - %(name)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + # Set the format of root handlers + if not logging.getLogger().handlers: + logging.basicConfig(level=logging.INFO) + logging.getLogger().handlers[0].setFormatter(formatter) + + # Redirect stdout and stderr to loggers + stdout_logger = logging.getLogger("stdout") + stdout_logger.setLevel(logging.INFO) + sl = StreamToLogger(stdout_logger, logging.INFO) + sys.stdout = sl + + stderr_logger = logging.getLogger("stderr") + stderr_logger.setLevel(logging.ERROR) + sl = StreamToLogger(stderr_logger, logging.ERROR) + sys.stderr = sl + + # Get logger + logger = logging.getLogger(logger_name) + logger.setLevel(logging.INFO) + + # Add a file handler for all loggers + if handler is None: + # * get the logger_file's directory, and create it if not exist + logger_filedir = os.path.dirname(logger_filepath) + os.makedirs(logger_filedir, exist_ok=True) + handler = logging.handlers.TimedRotatingFileHandler( + logger_filepath, when='D', utc=True) + handler.setFormatter(formatter) + + for name, item in logging.root.manager.loggerDict.items(): + if isinstance(item, logging.Logger): + item.addHandler(handler) + + return logger + + +class StreamToLogger(object): + """ + Fake file-like stream object that redirects writes to a logger instance. + """ + def __init__(self, logger, log_level=logging.INFO): + self.terminal = sys.stdout + self.logger = logger + self.log_level = log_level + self.linebuf = '' + + def __getattr__(self, attr): + return getattr(self.terminal, attr) + + def write(self, buf): + temp_linebuf = self.linebuf + buf + self.linebuf = '' + for line in temp_linebuf.splitlines(True): + # From the io.TextIOWrapper docs: + # On output, if newline is None, any '\n' characters written + # are translated to the system default line separator. + # By default sys.stdout.write() expects '\n' newlines and then + # translates them so this is still cross platform. + if line[-1] == '\n': + self.logger.log(self.log_level, line.rstrip()) + else: + self.linebuf += line + + def flush(self): + if self.linebuf != '': + self.logger.log(self.log_level, self.linebuf.rstrip()) + self.linebuf = '' + + +def disable_torch_init(): + """ + Disable the redundant torch default initialization to accelerate model creation. + """ + import torch + setattr(torch.nn.Linear, "reset_parameters", lambda self: None) + setattr(torch.nn.LayerNorm, "reset_parameters", lambda self: None) + + +def violates_moderation(text): + """ + Check whether the text violates OpenAI moderation API. + """ + url = "https://api.openai.com/v1/moderations" + headers = {"Content-Type": "application/json", + "Authorization": "Bearer " + os.environ["OPENAI_API_KEY"]} + text = text.replace("\n", "") + data = "{" + '"input": ' + f'"{text}"' + "}" + data = data.encode("utf-8") + try: + ret = requests.post(url, headers=headers, data=data, timeout=5) + flagged = ret.json()["results"][0]["flagged"] + except requests.exceptions.RequestException as e: + flagged = False + except KeyError as e: + flagged = False + + return flagged + + +def pretty_print_semaphore(semaphore): + if semaphore is None: + return "None" + return f"Semaphore(value={semaphore._value}, locked={semaphore.locked()})" diff --git a/ThirdParty/PointLLM/pyproject.toml b/ThirdParty/PointLLM/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..4660f79fb94ad9363c9072525e3f876dda29c9e5 --- /dev/null +++ b/ThirdParty/PointLLM/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = ["setuptools>=61.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "pointllm" +version = "0.1.2" +description = "Empower large language models to understand point clouds." +readme = "README.md" +requires-python = ">=3.8" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: Apache Software License", +] +dependencies = [ + "accelerate", "einops", "fastapi", "gradio", "markdown2[all]", "numpy", + "requests", "sentencepiece", "tokenizers==0.12.1", + "torch>=2.0", "torchvision", "uvicorn", "wandb", + "shortuuid", + "deepspeed", "peft", + "transformers @ git+https://github.com/huggingface/transformers.git@cae78c46", + "openai", "tqdm", + "easydict", "timm==0.4.12", "ftfy==6.0.1", "regex", "open3d==0.16.0", "h5py", "termcolor", + "plyfile", "nltk", "rouge", "scikit-learn", "py-rouge" +] + +[tool.setuptools.packages.find] +exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"] + +[tool.wheel] +exclude = ["assets*", "benchmark*", "docs", "dist*", "playground*", "scripts*", "tests*"] diff --git a/ThirdParty/PointLLM/scripts/PointLLM_train_stage1.sh b/ThirdParty/PointLLM/scripts/PointLLM_train_stage1.sh new file mode 100755 index 0000000000000000000000000000000000000000..392f9fec15811df40889690a21b15a9e29b61b3c --- /dev/null +++ b/ThirdParty/PointLLM/scripts/PointLLM_train_stage1.sh @@ -0,0 +1,43 @@ +master_port=$((RANDOM % (65535 - 49152 + 1) + 49152)) +# Get the filename without extension +filename=$(basename "$0" | cut -f 1 -d '.') + +dir_path=PointLLM +model_name_or_path=checkpoints/PointLLM_7B_v1.1_init +data_path=data/objaverse_data +anno_path=data/anno_data/PointLLM_brief_description_660K_filtered.json # or PointLLM_brief_description_660K.json (including val sets) +output_dir=outputs/PointLLM_train_stage1/$filename +point_backbone_ckpt=$model_name_or_path/point_bert_v1.2.pt + +cd $dir_path + +PYTHONPATH=$dir_path:$PYTHONPATH \ +torchrun --nnodes=1 --nproc_per_node=8 --master_port=$master_port pointllm/train/train_mem.py \ + --model_name_or_path $model_name_or_path \ + --data_path $data_path \ + --anno_path $anno_path \ + --output_dir $output_dir \ + --version v1 \ + --model_max_length 2048 \ + --num_train_epochs 3 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 4 \ + --gradient_accumulation_steps 1 \ + --evaluation_strategy "no" \ + --save_strategy "no" \ + --save_steps 2400 \ + --save_total_limit 1 \ + --learning_rate 2e-3 \ + --weight_decay 0. \ + --warmup_ratio 0.03 \ + --lr_scheduler_type "cosine" \ + --logging_steps 1 \ + --bf16 True \ + --evaluation_strategy "no" \ + --fix_llm True \ + --fix_pointnet True \ + --gradient_checkpointing True \ + --report_to wandb \ + --run_name $filename \ + --point_backbone_ckpt $point_backbone_ckpt \ + --use_color True \ No newline at end of file diff --git a/ThirdParty/PointLLM/scripts/PointLLM_train_stage2.sh b/ThirdParty/PointLLM/scripts/PointLLM_train_stage2.sh new file mode 100755 index 0000000000000000000000000000000000000000..eda415daf5c96b6367aab51e26468ae49f06ed60 --- /dev/null +++ b/ThirdParty/PointLLM/scripts/PointLLM_train_stage2.sh @@ -0,0 +1,46 @@ +master_port=$((RANDOM % (65535 - 49152 + 1) + 49152)) +# Get the filename without extension +filename=$(basename "$0" | cut -f 1 -d '.') + +dir_path=PointLLM + +model_name_or_path=outputs/PointLLM_train_stage1/PointLLM_train_stage1 # Path to the output dir of stage 1 training +data_path=data/objaverse_data +anno_path=data/anno_data/PointLLM_complex_instruction_70K.json +output_dir=outputs/PointLLM_train_stage2/$filename + +cd $dir_path + +PYTHONPATH=$dir_path:$PYTHONPATH \ +torchrun --nnodes=1 --nproc_per_node=8 --master_port=$master_port pointllm/train/train_mem.py \ + --model_name_or_path $model_name_or_path \ + --data_path $data_path \ + --anno_path $anno_path \ + --output_dir $output_dir \ + --version v1 \ + --model_max_length 2048 \ + --num_train_epochs 3 \ + --per_device_train_batch_size 4 \ + --per_device_eval_batch_size 1 \ + --gradient_accumulation_steps 1 \ + --evaluation_strategy "no" \ + --eval_steps 100 \ + --save_strategy "no" \ + --save_steps 2400 \ + --save_total_limit 1 \ + --learning_rate 2e-5 \ + --weight_decay 0. \ + --warmup_ratio 0.03 \ + --lr_scheduler_type "cosine" \ + --logging_steps 1 \ + --bf16 True \ + --fix_llm False \ + --fix_pointnet True \ + --report_to wandb \ + --run_name $filename \ + --gradient_checkpointing True \ + --stage_2 True \ + --fsdp "full_shard auto_wrap" \ + --fsdp_transformer_layer_cls_to_wrap 'LlamaDecoderLayer' \ + --conversation_types "detailed_description" "single_round" "multi_round" \ + --use_color True \ No newline at end of file diff --git a/ThirdParty/Rignet_utils/Rignet_loss.py b/ThirdParty/Rignet_utils/Rignet_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..b0803afe7d3d2c5b35830400825d8139acc28310 --- /dev/null +++ b/ThirdParty/Rignet_utils/Rignet_loss.py @@ -0,0 +1,163 @@ +#------------------------------------------------------------------------------- +# Name: utils.py +# Purpose: utilize for Loss function in RigNet +# RigNet Copyright 2020 University of Massachusetts +# RigNet is made available under General Public License Version 3 (GPLv3), or under a Commercial License. +# Please see the LICENSE README.txt file in the main directory for more information and instruction on using and licensing RigNet. +#------------------------------------------------------------------------------- + + +from apted import APTED, Config +import numpy as np + +class CustomConfig(Config): + valuecls = float + + def rename(self, node1, node2): + """Compares attribute .value of trees""" + # return 1 if node1.value != node2.value else 0 + # if not node1 or not node2: + # return 1.0 + # return np.sqrt(np.sum((np.array(node1.pos) - np.array(node2.pos))**2)) + return 0 + + def children(self, node): + """Get left and right children of binary tree""" + # return [x for x in (node.left, node.right) if x] + if not node: + return list() + else: + return node.children + + +def getJointNum(skel): + this_level = [skel.root] + n_joint = 1 + while this_level: + next_level = [] + for p_node in this_level: + n_joint += len(p_node.children) + next_level += p_node.children + this_level = next_level + return n_joint + + +def dist_pts2bone(pts, pos_1, pos_2): + l2 = np.sum((pos_2 - pos_1) ** 2) + if l2 < 1e-10: + dist_to_lineseg = np.linalg.norm(pts - pos_1, axis=1) + dist_proj = np.linalg.norm(pts - pos_1, axis=1) + else: + t_ = np.sum((pts - pos_1[np.newaxis, :]) * (pos_2 - pos_1), axis=1) / l2 + t = np.clip(t_, 0, 1) + t_pos = pos_1[np.newaxis, :] + t[:, np.newaxis] * (pos_2 - pos_1)[np.newaxis, :] + lineseg_len = np.linalg.norm(pos_2 - pos_1) + dist_proj = np.zeros(len(t_)) + dist_proj[np.argwhere(t_ < 0.5).squeeze()] = np.abs(t_[np.argwhere(t_ < 0.5).squeeze()] - 0.0) * lineseg_len + dist_proj[np.argwhere(t_ >= 0.5).squeeze()] = np.abs(t_[np.argwhere(t_ >= 0.5).squeeze()] - 1.0) * lineseg_len + dist_to_lineseg = np.linalg.norm(pts - t_pos, axis=1) + return dist_to_lineseg, dist_proj + + +def chamfer_dist(pt1, pt2): + pt1 = pt1[np.newaxis, :, :] + pt2 = pt2[:, np.newaxis, :] + dist = np.sqrt(np.sum((pt1 - pt2) ** 2, axis=2)) + min_left = np.mean(np.min(dist, axis=0)) + min_right = np.mean(np.min(dist, axis=1)) + #print(min_left, min_right) + return (min_left + min_right) / 2 + + +def oneway_chamfer(pt_src, pt_dst): + pt1 = pt_src[np.newaxis, :, :] + pt2 = pt_dst[:, np.newaxis, :] + dist = np.sqrt(np.sum((pt1 - pt2) ** 2, axis=2)) + avg_dist = np.mean(np.min(dist, axis=0)) + return avg_dist + + +def getJointArr(skel): + joints = [] + this_level = [skel.root] + while this_level: + next_level = [] + for p_node in this_level: + joint_ = np.array(p_node.pos) + joint_ = joint_[np.newaxis, :] + joints.append(joint_) + next_level += p_node.children + this_level = next_level + joints = np.concatenate(joints, axis=0) + return joints + + +def edit_dist(tree1, tree2): + #n_joint1 = getJointNum(tree2) + #n_joint2 = getJointNum(tree2) + apted = APTED(tree1.root, tree2.root, CustomConfig()) + ted = apted.compute_edit_distance() + #ted /= max(n_joint1, n_joint2) + return ted + + +def tree_dist(tree1, tree2, ted_weight): + # get edit distance + ted = edit_dist(tree1, tree2) + + # get chamfer distance + joint_arr_1 = getJointArr(tree1) + joint_arr_2 = getJointArr(tree2) + cd = chamfer_dist(joint_arr_1, joint_arr_2) + + return (1-ted_weight)*cd + ted_weight * ted + + +def sample_bone(p_pos, ch_pos): + ray = ch_pos - p_pos + bone_length = np.sqrt(np.sum((p_pos - ch_pos) ** 2)) + num_step = np.round(bone_length / 0.005) + i_step = np.arange(0, num_step + 1) + unit_step = ray / (num_step + 1e-30) + unit_step = np.repeat(unit_step, num_step+1, axis=0) + res = p_pos + unit_step * i_step[:, np.newaxis] + return res + + +def sample_skel(skel): + bone_sample = [] + this_level = [skel.root] + while this_level: + next_level = [] + for p_node in this_level: + p_pos = np.array([p_node.pos]) + next_level += p_node.children + for c_node in p_node.children: + ch_pos = np.array([c_node.pos]) + res = sample_bone(p_pos, ch_pos) + bone_sample.append(res) + this_level = next_level + bone_sample = np.concatenate(bone_sample, axis=0) + return bone_sample + + +def bone2bone_chamfer_dist(skel_1, skel_2): + bone_sample_1 = sample_skel(skel_1) + bone_sample_2 = sample_skel(skel_2) + pt1 = bone_sample_1[np.newaxis, :, :] + pt2 = bone_sample_2[:, np.newaxis, :] + dist = np.sqrt(np.sum((pt1 - pt2) ** 2, axis=2)) + min_left = np.mean(np.min(dist, axis=0)) + min_right = np.mean(np.min(dist, axis=1)) + # print(min_left, min_right) + return (min_left + min_right) / 2 + + +def joint2bone_chamfer_dist(skel1, skel2): + bone_sample_1 = sample_skel(skel1) + bone_sample_2 = sample_skel(skel2) + joint_1 = getJointArr(skel1) + joint_2 = getJointArr(skel2) + dist1 = oneway_chamfer(joint_1, bone_sample_2) + dist2 = oneway_chamfer(joint_2, bone_sample_1) + return (dist1 + dist2) / 2 \ No newline at end of file diff --git a/ThirdParty/Rignet_utils/__init__.py b/ThirdParty/Rignet_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ThirdParty/Rignet_utils/__pycache__/__init__.cpython-310.pyc b/ThirdParty/Rignet_utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0abf79f5166fad1a19e8c11107ffb93608c3451f Binary files /dev/null and b/ThirdParty/Rignet_utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/Rignet_utils/__pycache__/binvox_rw.cpython-310.pyc b/ThirdParty/Rignet_utils/__pycache__/binvox_rw.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6b29eaa16a722bb7a1216b6dd954d7dedbbe608 Binary files /dev/null and b/ThirdParty/Rignet_utils/__pycache__/binvox_rw.cpython-310.pyc differ diff --git a/ThirdParty/Rignet_utils/binvox_rw.py b/ThirdParty/Rignet_utils/binvox_rw.py new file mode 100644 index 0000000000000000000000000000000000000000..3e42024802c0428438d4ecf42a07d68cf285008f --- /dev/null +++ b/ThirdParty/Rignet_utils/binvox_rw.py @@ -0,0 +1,246 @@ +# Copyright (C) 2012 Daniel Maturana +# This file is part of binvox-rw-py. +# +# binvox-rw-py is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# binvox-rw-py is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with binvox-rw-py. If not, see . +# + + +import numpy as np +import struct + + +class Voxels(object): + """ Holds a binvox model. + data is either a three-dimensional numpy boolean array (dense representation) + or a two-dimensional numpy float array (coordinate representation). + + dims, translate and scale are the model metadata. + + dims are the voxel dimensions, e.g. [32, 32, 32] for a 32x32x32 model. + + scale and translate relate the voxels to the original model coordinates. + + To translate voxel coordinates i, j, k to original coordinates x, y, z: + + x_n = (i+.5)/dims[0] + y_n = (j+.5)/dims[1] + z_n = (k+.5)/dims[2] + x = scale*x_n + translate[0] + y = scale*y_n + translate[1] + z = scale*z_n + translate[2] + + """ + + def __init__(self, data, dims, translate, scale, axis_order): + self.data = data + self.dims = dims + self.translate = translate + self.scale = scale + assert (axis_order in ('xzy', 'xyz')) + self.axis_order = axis_order + + def clone(self): + data = self.data.copy() + dims = self.dims[:] + translate = self.translate[:] + return Voxels(data, dims, translate, self.scale, self.axis_order) + + def write(self, fp): + write(self, fp) + +def read_header(fp): + """ Read binvox header. Mostly meant for internal use. + """ + line = fp.readline().strip() + if not line.startswith(b'#binvox'): + raise IOError('Not a binvox file') + dims = list(map(int, fp.readline().strip().split(b' ')[1:])) + translate = list(map(float, fp.readline().strip().split(b' ')[1:])) + scale = list(map(float, fp.readline().strip().split(b' ')[1:]))[0] + line = fp.readline() + + return dims, translate, scale + +def read_as_3d_array(fp, fix_coords=True): + """ Read binary binvox format as array. + + Returns the model with accompanying metadata. + + Voxels are stored in a three-dimensional numpy array, which is simple and + direct, but may use a lot of memory for large models. (Storage requirements + are 8*(d^3) bytes, where d is the dimensions of the binvox model. Numpy + boolean arrays use a byte per element). + + Doesn't do any checks on input except for the '#binvox' line. + """ + dims, translate, scale = read_header(fp) + raw_data = np.frombuffer(fp.read(), dtype=np.uint8) + # if just using reshape() on the raw data: + # indexing the array as array[i,j,k], the indices map into the + # coords as: + # i -> x + # j -> z + # k -> y + # if fix_coords is true, then data is rearranged so that + # mapping is + # i -> x + # j -> y + # k -> z + values, counts = raw_data[::2], raw_data[1::2] + data = np.repeat(values, counts).astype(bool) + data = data.reshape(dims) + if fix_coords: + # xzy to xyz TODO the right thing + data = np.transpose(data, (0, 2, 1)) + axis_order = 'xyz' + else: + axis_order = 'xzy' + return Voxels(data, dims, translate, scale, axis_order) + +def read_as_coord_array(fp, fix_coords=True): + """ Read binary binvox format as coordinates. + + Returns binvox model with voxels in a "coordinate" representation, i.e. an + 3 x N array where N is the number of nonzero voxels. Each column + corresponds to a nonzero voxel and the 3 rows are the (x, z, y) coordinates + of the voxel. (The odd ordering is due to the way binvox format lays out + data). Note that coordinates refer to the binvox voxels, without any + scaling or translation. + + Use this to save memory if your model is very sparse (mostly empty). + + Doesn't do any checks on input except for the '#binvox' line. + """ + dims, translate, scale = read_header(fp) + raw_data = np.frombuffer(fp.read(), dtype=np.uint8) + + values, counts = raw_data[::2], raw_data[1::2] + + sz = np.prod(dims) + index, end_index = 0, 0 + end_indices = np.cumsum(counts) + indices = np.concatenate(([0], end_indices[:-1])).astype(end_indices.dtype) + + values = values.astype(bool) + indices = indices[values] + end_indices = end_indices[values] + + nz_voxels = [] + for index, end_index in zip(indices, end_indices): + nz_voxels.extend(range(index, end_index)) + nz_voxels = np.array(nz_voxels) + # TODO are these dims correct? + # according to docs, + # index = x * wxh + z * width + y; // wxh = width * height = d * d + + x = nz_voxels / (dims[0]*dims[1]) + zwpy = nz_voxels % (dims[0]*dims[1]) # z*w + y + z = zwpy / dims[0] + y = zwpy % dims[0] + if fix_coords: + data = np.vstack((x, y, z)) + axis_order = 'xyz' + else: + data = np.vstack((x, z, y)) + axis_order = 'xzy' + + #return Voxels(data, dims, translate, scale, axis_order) + return Voxels(np.ascontiguousarray(data), dims, translate, scale, axis_order) + +def dense_to_sparse(voxel_data, dtype=int): + """ From dense representation to sparse (coordinate) representation. + No coordinate reordering. + """ + if voxel_data.ndim!=3: + raise ValueError('voxel_data is wrong shape; should be 3D array.') + return np.asarray(np.nonzero(voxel_data), dtype) + +def sparse_to_dense(voxel_data, dims, dtype=bool): + if voxel_data.ndim!=2 or voxel_data.shape[0]!=3: + raise ValueError('voxel_data is wrong shape; should be 3xN array.') + if np.isscalar(dims): + dims = [dims]*3 + dims = np.atleast_2d(dims).T + # truncate to integers + xyz = voxel_data.astype(int) + # discard voxels that fall outside dims + valid_ix = ~np.any((xyz < 0) | (xyz >= dims), 0) + xyz = xyz[:,valid_ix] + out = np.zeros(dims.flatten(), dtype=dtype) + out[tuple(xyz)] = True + return out + +#def get_linear_index(x, y, z, dims): + #""" Assuming xzy order. (y increasing fastest. + #TODO ensure this is right when dims are not all same + #""" + #return x*(dims[1]*dims[2]) + z*dims[1] + y + +def bwrite(fp,s): + fp.write(s.encode()) + +def write_pair(fp,state, ctr): + fp.write(struct.pack('B',state)) + fp.write(struct.pack('B',ctr)) + +def write(voxel_model, fp): + """ Write binary binvox format. + + Note that when saving a model in sparse (coordinate) format, it is first + converted to dense format. + + Doesn't check if the model is 'sane'. + + """ + if voxel_model.data.ndim==2: + # TODO avoid conversion to dense + dense_voxel_data = sparse_to_dense(voxel_model.data, voxel_model.dims) + else: + dense_voxel_data = voxel_model.data + + bwrite(fp, '#binvox 1\n') + bwrite(fp, 'dim ' + ' '.join(map(str, voxel_model.dims)) + '\n') + bwrite(fp, 'translate ' + ' '.join(map(str, voxel_model.translate)) + '\n') + bwrite(fp, 'scale ' + str(voxel_model.scale) + '\n') + bwrite(fp, 'data\n') + if not voxel_model.axis_order in ('xzy', 'xyz'): + raise ValueError('Unsupported voxel model axis order') + + if voxel_model.axis_order=='xzy': + voxels_flat = dense_voxel_data.flatten() + elif voxel_model.axis_order=='xyz': + voxels_flat = np.transpose(dense_voxel_data, (0, 2, 1)).flatten() + + # keep a sort of state machine for writing run length encoding + state = voxels_flat[0] + ctr = 0 + for c in voxels_flat: + if c==state: + ctr += 1 + # if ctr hits max, dump + if ctr==255: + write_pair(fp, state, ctr) + ctr = 0 + else: + # if switch state, dump + write_pair(fp, state, ctr) + state = c + ctr = 1 + # flush out remainders + if ctr > 0: + write_pair(fp, state, ctr) + +if __name__ == '__main__': + import doctest + doctest.testmod() diff --git a/ThirdParty/Rignet_utils/mst_utils.py b/ThirdParty/Rignet_utils/mst_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..cf8b50e32f612736618cf93698750095efec2897 --- /dev/null +++ b/ThirdParty/Rignet_utils/mst_utils.py @@ -0,0 +1,179 @@ +#------------------------------------------------------------------------------- +# Name: mst_utils.py +# Purpose: utilize functions for skeleton generation +# RigNet Copyright 2020 University of Massachusetts +# RigNet is made available under General Public License Version 3 (GPLv3), or under a Commercial License. +# Please see the LICENSE README.txt file in the main directory for more information and instruction on using and licensing RigNet. +#------------------------------------------------------------------------------- + +import sys +import numpy as np +from .rig_parser import TreeNode +from .rig_parser import Skel +import torch + +def inside_check(pts, vox): + """ + Check where points are inside or outside the mesh based on its voxelization. + :param pts: points to be checked + :param vox: voxelized mesh + :return: internal points, and index of them in the input array. + """ + vc = (pts - vox.translate) / vox.scale * vox.dims[0] + vc = np.round(vc).astype(int) + ind1 = np.logical_and(np.all(vc >= 0, axis=1), np.all(vc < 88, axis=1)) + vc = np.clip(vc, 0, 87) + ind2 = vox.data[vc[:, 0], vc[:, 1], vc[:, 2]] + ind = np.logical_and(ind1, ind2) + pts = pts[ind] + return pts, np.argwhere(ind).squeeze() + + +def sample_on_bone(p_pos, ch_pos): + """ + sample points on a bone + :param p_pos: parent joint position + :param ch_pos: child joint position + :return: a array of samples on this bone. + """ + ray = ch_pos - p_pos + bone_length = np.sqrt(np.sum((p_pos - ch_pos) ** 2)) + num_step = np.round(bone_length / 0.01) + i_step = np.arange(1, num_step + 1) + unit_step = ray / (num_step + 1e-30) + unit_step = np.repeat(unit_step[np.newaxis, :], num_step, axis=0) + res = p_pos + unit_step * i_step[:, np.newaxis] + return res + + +def minKey(key, mstSet, nV): + # Initilaize min value + min = sys.maxsize + for v in range(nV): + if key[v] < min and mstSet[v] == False: + min = key[v] + min_index = v + return min_index + +def primMST_normal(graph, init_id, normal_matrix): + """ + Modified Prim's algorithm to generate a minimum spanning tree (MST). + :param graph: pairwise cost matrix + :param init_id: init node ID as root + :return: parent array, key array, init_id + """ + nV = graph.shape[0] + key = [sys.maxsize] * nV + parent = [None] * nV + mstSet = [False] * nV + key[init_id] = 0 + parent[init_id] = -1 + previous_normal = np.zeros((nV, 3)) + + while not all(mstSet): + u = minKey(key, mstSet, nV) + mstSet[u] = True + if parent[u] >= 0: + previous_normal[u] = normal_matrix[u, parent[u]] + updated_normal = np.dot(previous_normal[u], normal_matrix[u, :].T) #1*n + updated_normal[updated_normal<0]=0 + # print('updated_normal',updated_normal.shape) + graph[u, :] = graph[u, :] +(1e8*updated_normal**2+1) + graph[:, u] = graph[:, u] +(1e8*updated_normal**2+1) + + for v in range(nV): + + if graph[u, v] > 0 and mstSet[v] is False and key[v] > graph[u, v]: + key[v] = graph[u, v] + parent[v] = u + + + return parent, key, init_id + + +def loadSkel_recur(p_node, parent_id, joint_name, joint_pos, parent): + """ + Converst prim algorithm result to our skel/info format recursively + :param p_node: Root node + :param parent_id: parent name of current step of recursion. + :param joint_name: list of joint names + :param joint_pos: joint positions + :param parent: parent index returned by prim alg. + :return: p_node (root) will be expanded to linked with all joints + """ + for i in range(len(parent)): + if parent[i] == parent_id: + if joint_name is not None: + ch_node = TreeNode(joint_name[i], tuple(joint_pos[i])) + else: + ch_node = TreeNode('joint_{}'.format(i), tuple(joint_pos[i])) + p_node.children.append(ch_node) + ch_node.parent = p_node + loadSkel_recur(ch_node, i, joint_name, joint_pos, parent) + + +def unique_rows(a): + """ + remove repeat rows from a numpy array + """ + a = np.ascontiguousarray(a) + unique_a = np.unique(a.view([('', a.dtype)]*a.shape[1])) + return unique_a.view(a.dtype).reshape((unique_a.shape[0], a.shape[1])) + + +def increase_cost_for_outside_bone(cost_matrix, joint_pos, vox): + """ + increase connectivity cost for bones outside the meshs + """ + for i in range(len(joint_pos)): + for j in range(i+1, len(joint_pos)): + bone_samples = sample_on_bone(joint_pos[i], joint_pos[j]) + bone_samples_vox = (bone_samples - vox.translate) / vox.scale * vox.dims[0] + bone_samples_vox = np.round(bone_samples_vox).astype(int) + + ind1 = np.logical_and(np.all(bone_samples_vox >= 0, axis=1), np.all(bone_samples_vox < vox.dims[0], axis=1)) + bone_samples_vox = np.clip(bone_samples_vox, 0, vox.dims[0]-1) + ind2 = vox.data[bone_samples_vox[:, 0], bone_samples_vox[:, 1], bone_samples_vox[:, 2]] + in_flags = np.logical_and(ind1, ind2) + outside_bone_sample = np.sum(in_flags == False) + + if outside_bone_sample > 1: + cost_matrix[i, j] = 2 * outside_bone_sample + cost_matrix[j, i] = 2 * outside_bone_sample + if np.abs(joint_pos[i, 0]) < 2e-2 and np.abs(joint_pos[j, 0]) < 2e-2: + cost_matrix[i, j] *= 0.5 + cost_matrix[j, i] *= 0.5 + return cost_matrix + +def increase_cost_for_outside_bone_tensor(cost_matrix, joint_pos, vox,resolution=64): + """ + increase connectivity cost for bones outside the meshs + vox is a tensor with size(N,3), N is the number of voxels that inside the mesh, range (0,64) + """ + + vox = torch.clamp(vox, 0, resolution-1).long() + for i in range(len(joint_pos)): + for j in range(i+1, len(joint_pos)): + bone_samples = sample_on_bone(joint_pos[i], joint_pos[j]) # return coordinates of points on the bone + bone_samples_vox = bone_samples * (resolution/2) + (resolution/2) + bone_samples_vox = np.round(bone_samples_vox).astype(int) + bone_samples_vox = np.clip(bone_samples_vox, 0, resolution-1) + + vox_remap = torch.zeros((resolution,resolution,resolution)) + vox_remap[vox[:,0],vox[:,1],vox[:,2]] = 1 + vox_remap = vox_remap.numpy() + inside_index = vox_remap[bone_samples_vox[:,0],bone_samples_vox[:,1],bone_samples_vox[:,2]] + outside_bone_sample = np.sum(inside_index == 0) + + + # check the intersection of the bone with the mesh + + if outside_bone_sample > 1: + cost_matrix[i, j] = 2 * outside_bone_sample + cost_matrix[j, i] = 2 * outside_bone_sample + if np.abs(joint_pos[i, 0]) < 2e-2 and np.abs(joint_pos[j, 0]) < 2e-2: + cost_matrix[i, j] *= 0.5 + cost_matrix[j, i] *= 0.5 + return cost_matrix + + diff --git a/ThirdParty/Rignet_utils/rig_parser.py b/ThirdParty/Rignet_utils/rig_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..af6c2ff6659abc88bdd0adc43931d17457b63b19 --- /dev/null +++ b/ThirdParty/Rignet_utils/rig_parser.py @@ -0,0 +1,268 @@ +#------------------------------------------------------------------------------- +# Name: rig_parser.py +# Purpose: classes for skeleton and rig +# RigNet Copyright 2020 University of Massachusetts +# RigNet is made available under General Public License Version 3 (GPLv3), or under a Commercial License. +# Please see the LICENSE README.txt file in the main directory for more information and instruction on using and licensing RigNet. +#------------------------------------------------------------------------------- + +import numpy as np + +try: + import Queue as Q # ver. < 3.0 +except ImportError: + import queue as Q + +class Node(object): + def __init__(self, name, pos): + self.name = name + self.pos = pos + + +class TreeNode(Node): + def __init__(self, name, pos): + super(TreeNode, self).__init__(name, pos) + self.children = [] + self.parent = None + +class Info: + """ + Wrap class for rig information + """ + def __init__(self, filename=None): + self.joint_pos = {} + self.joint_skin = [] + self.root = None + if filename is not None: + self.load(filename) + + def load(self, filename): + with open(filename, 'r') as f_txt: + lines = f_txt.readlines() + for line in lines: + word = line.split() + if word[0] == 'joints': + self.joint_pos[word[1]] = [float(word[2]), float(word[3]), float(word[4])] + elif word[0] == 'root': + root_pos = self.joint_pos[word[1]] + self.root = TreeNode(word[1], (root_pos[0], root_pos[1], root_pos[2])) + elif word[0] == 'skin': + skin_item = word[1:] + self.joint_skin.append(skin_item) + self.loadHierarchy_recur(self.root, lines, self.joint_pos) + + def loadHierarchy_recur(self, node, lines, joint_pos): + for li in lines: + if li.split()[0] == 'hier' and li.split()[1] == node.name: + pos = joint_pos[li.split()[2]] + ch_node = TreeNode(li.split()[2], tuple(pos)) + node.children.append(ch_node) + ch_node.parent = node + self.loadHierarchy_recur(ch_node, lines, joint_pos) + + def save(self, filename): + with open(filename, 'w') as file_info: + for key, val in self.joint_pos.items(): + file_info.write( + 'joints {0} {1:.8f} {2:.8f} {3:.8f}\n'.format(key, val[0], val[1], val[2])) + file_info.write('root {}\n'.format(self.root.name)) + + for skw in self.joint_skin: + cur_line = 'skin {0} '.format(skw[0]) + for cur_j in range(1, len(skw), 2): + cur_line += '{0} {1:.4f} '.format(skw[cur_j], float(skw[cur_j+1])) + cur_line += '\n' + file_info.write(cur_line) + + this_level = self.root.children + while this_level: + next_level = [] + for p_node in this_level: + file_info.write('hier {0} {1}\n'.format(p_node.parent.name, p_node.name)) + next_level += p_node.children + this_level = next_level + # return a numpy array skin_relation, where skin_relation[i, j] = 1 if joint i is skinned to joint j + + def get_skin_dict(self, filename): + skinning_dict = {} + with open (filename, 'r') as f: + lines = f.readlines() + skin_lines = [line for line in lines if line.startswith('skin')] + vertex_num = len(skin_lines) + for line in skin_lines: + word = line.split() + word = word[1:] + skin_vertex = {} + for i in range(1,len(word),2): + skin_vertex[word[i]] = float(word[i+1]) + skinning_dict[word[0]] = skin_vertex + return skinning_dict,vertex_num + + def save_as_skel_format(self, filename): + fout = open(filename, 'w') + this_level = [self.root] + hier_level = 1 + while this_level: + next_level = [] + for p_node in this_level: + pos = p_node.pos + parent = p_node.parent.name if p_node.parent is not None else 'None' + line = '{0} {1} {2:8f} {3:8f} {4:8f} {5}\n'.format(hier_level, p_node.name, pos[0], pos[1], pos[2], + parent) + fout.write(line) + for c_node in p_node.children: + next_level.append(c_node) + this_level = next_level + hier_level += 1 + fout.close() + + def normalize(self, scale, trans): + for k, v in self.joint_pos.items(): + self.joint_pos[k] /= scale + self.joint_pos[k] -= trans + + + this_level = [self.root] + while this_level: + next_level = [] + for node in this_level: + node.pos /= scale + node.pos = (node.pos[0] - trans[0], node.pos[1] - trans[1], node.pos[2] - trans[2]) + for ch in node.children: + next_level.append(ch) + this_level = next_level + + def get_joint_dict(self): + joint_dict = {} + this_level = [self.root] + while this_level: + next_level = [] + for node in this_level: + joint_dict[node.name] = node.pos + next_level += node.children + this_level = next_level + return joint_dict + + def adjacent_matrix(self): + joint_pos = self.get_joint_dict() + joint_name_list = list(joint_pos.keys()) + num_joint = len(joint_pos) + adj_matrix = np.zeros((num_joint, num_joint)) + this_level = [self.root] + while this_level: + next_level = [] + for p_node in this_level: + for c_node in p_node.children: + index_parent = joint_name_list.index(p_node.name) + index_children = joint_name_list.index(c_node.name) + adj_matrix[index_parent, index_children] = 1. + next_level += p_node.children + this_level = next_level + adj_matrix = adj_matrix + adj_matrix.transpose() + return adj_matrix + + +class Skel: + """ + Wrap class for skeleton topology + """ + def __init__(self, filename=None): + self.root = None + if filename is not None: + self.load(filename) + + def load(self, filename): + with open(filename, 'r') as fin: + lines = fin.readlines() + for li in lines: + words = li.split() + if words[5] == "None": + self.root = TreeNode(words[1], (float(words[2]), float(words[3]), float(words[4]))) + if len(words) == 7: + has_order = True + self.root.order = int(words[6]) + else: + has_order = False + break + self.loadSkel_recur(self.root, lines, has_order) + + def loadSkel_recur(self, node, lines, has_order): + if has_order: + ch_queue = Q.PriorityQueue() + for li in lines: + words = li.split() + if words[5] == node.name: + ch_queue.put((int(li.split()[6]), li)) + while not ch_queue.empty(): + item = ch_queue.get() + li = item[1] + ch_node = TreeNode(li.split()[1], (float(li.split()[2]), float(li.split()[3]), float(li.split()[4]))) + ch_node.order = int(li.split()[6]) + node.children.append(ch_node) + ch_node.parent = node + self.loadSkel_recur(ch_node, lines, has_order) + else: + for li in lines: + words = li.split() + if words[5] == node.name: + ch_node = TreeNode(words[1], (float(words[2]), float(words[3]), float(words[4]))) + node.children.append(ch_node) + ch_node.parent = node + self.loadSkel_recur(ch_node, lines, has_order) + + def save(self, filename): + fout = open(filename, 'w') + this_level = [self.root] + hier_level = 1 + while this_level: + next_level = [] + for p_node in this_level: + pos = p_node.pos + parent = p_node.parent.name if p_node.parent is not None else 'None' + line = '{0} {1} {2:8f} {3:8f} {4:8f} {5}\n'.format(hier_level, p_node.name, pos[0], pos[1], pos[2], parent) + fout.write(line) + for c_node in p_node.children: + next_level.append(c_node) + this_level = next_level + hier_level += 1 + fout.close() + + def normalize(self, scale, trans): + this_level = [self.root] + while this_level: + next_level = [] + for node in this_level: + node.pos /= scale + node.pos = (node.pos[0] - trans[0], node.pos[1] - trans[1], node.pos[2] - trans[2]) + for ch in node.children: + next_level.append(ch) + this_level = next_level + + def get_joint_pos(self): + joint_pos = {} + this_level = [self.root] + while this_level: + next_level = [] + for node in this_level: + joint_pos[node.name] = node.pos + next_level += node.children + this_level = next_level + return joint_pos + + def adjacent_matrix(self): + joint_pos = self.get_joint_pos() + joint_name_list = list(joint_pos.keys()) + num_joint = len(joint_pos) + adj_matrix = np.zeros((num_joint, num_joint)) + this_level = [self.root] + while this_level: + next_level = [] + for p_node in this_level: + for c_node in p_node.children: + index_parent = joint_name_list.index(p_node.name) + index_children = joint_name_list.index(c_node.name) + adj_matrix[index_parent, index_children] = 1. + next_level += p_node.children + this_level = next_level + adj_matrix = adj_matrix + adj_matrix.transpose() + return adj_matrix diff --git a/ThirdParty/Rignet_utils/utils.py b/ThirdParty/Rignet_utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..75dc9bea7edb077149b0ddc5bf0e0a1abf0498e8 --- /dev/null +++ b/ThirdParty/Rignet_utils/utils.py @@ -0,0 +1,55 @@ +#------------------------------------------------------------------------------- +# Name: utils.py +# Purpose: utilize functions for skeleton generation +# RigNet Copyright 2020 University of Massachusetts +# RigNet is made available under General Public License Version 3 (GPLv3), or under a Commercial License. +# Please see the LICENSE README.txt file in the main directory for more information and instruction on using and licensing RigNet. +#------------------------------------------------------------------------------- + +import numpy as np +from .rig_parser import Info, TreeNode +from .mst_utils import increase_cost_for_outside_bone, loadSkel_recur,primMST_normal, increase_cost_for_outside_bone_tensor +import trimesh +import torch + +def get_skel(pred_joints, prob_matrix,vox): + "use predict connection which indicte the connection prob between joints to find the root joints,whihc is the joint with the highest connection prob with itself" + root_id = np.argmax(np.diag(prob_matrix)) + # set the digonal to 0 and normalize the prob_matrix + np.fill_diagonal(prob_matrix, 0) + prob_matrix = prob_matrix / (np.sum(prob_matrix, axis=1, keepdims=True)+1e-6) + + cost_matrix = -np.log(prob_matrix + 1e-10) + if torch.is_tensor(vox): + cost_matrix = increase_cost_for_outside_bone_tensor(cost_matrix, pred_joints, vox) + else: + cost_matrix = increase_cost_for_outside_bone(cost_matrix, pred_joints, vox) + + pred_joints = np.array(pred_joints) + + # Create a matrix of shape (n, n, 3) where each element is the difference pred_joints[j] - pred_joints[i] + diff_matrix = pred_joints[:, np.newaxis, :] - pred_joints[np.newaxis, :, :] + norms = np.linalg.norm(diff_matrix, axis=2, keepdims=True) + norms[norms == 0] = 1 + normal_matrix = diff_matrix / norms + np.fill_diagonal(normal_matrix[:, :, 0], 0) + np.fill_diagonal(normal_matrix[:, :, 1], 0) + np.fill_diagonal(normal_matrix[:, :, 2], 0) + + pred_skel = Info() + + parent, key, root_id = primMST_normal(cost_matrix, root_id, normal_matrix) + + for i in range(len(parent)): + if parent[i] == -1: + pred_skel.root = TreeNode('root', tuple(pred_joints[i])) + break + loadSkel_recur(pred_skel.root, i, None, pred_joints, parent) + pred_skel.joint_pos = pred_skel.get_joint_dict() + #create mtrx n*n*3 matrix for normal vector between two joints + + return pred_skel, parent + + + + diff --git a/ThirdParty/__init__.py b/ThirdParty/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ThirdParty/__pycache__/__init__.cpython-310.pyc b/ThirdParty/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a92dd289073f9cfd045f734e5dec8b9347fb84f Binary files /dev/null and b/ThirdParty/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/__init__.py b/ThirdParty/eg3d/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ThirdParty/eg3d/__pycache__/__init__.cpython-310.pyc b/ThirdParty/eg3d/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7d8a4ddea086130cde36cd3a29574f216cc894d Binary files /dev/null and b/ThirdParty/eg3d/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/calc_metrics.py b/ThirdParty/eg3d/calc_metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..d401b22554e142a4146a0eb0fc952cc20742e3e7 --- /dev/null +++ b/ThirdParty/eg3d/calc_metrics.py @@ -0,0 +1,190 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Calculate quality metrics for previous training run or pretrained network pickle.""" + +import os +import click +import json +import tempfile +import copy +import torch + +import dnnlib +import legacy +from metrics import metric_main +from metrics import metric_utils +from torch_utils import training_stats +from torch_utils import custom_ops +from torch_utils import misc +from torch_utils.ops import conv2d_gradfix + +#---------------------------------------------------------------------------- + +def subprocess_fn(rank, args, temp_dir): + dnnlib.util.Logger(should_flush=True) + + # Init torch.distributed. + if args.num_gpus > 1: + init_file = os.path.abspath(os.path.join(temp_dir, '.torch_distributed_init')) + if os.name == 'nt': + init_method = 'file:///' + init_file.replace('\\', '/') + torch.distributed.init_process_group(backend='gloo', init_method=init_method, rank=rank, world_size=args.num_gpus) + else: + init_method = f'file://{init_file}' + torch.distributed.init_process_group(backend='nccl', init_method=init_method, rank=rank, world_size=args.num_gpus) + + # Init torch_utils. + sync_device = torch.device('cuda', rank) if args.num_gpus > 1 else None + training_stats.init_multiprocessing(rank=rank, sync_device=sync_device) + if rank != 0 or not args.verbose: + custom_ops.verbosity = 'none' + + # Configure torch. + device = torch.device('cuda', rank) + torch.backends.cuda.matmul.allow_tf32 = False + torch.backends.cudnn.allow_tf32 = False + conv2d_gradfix.enabled = True + + # Print network summary. + G = copy.deepcopy(args.G).eval().requires_grad_(False).to(device) + if rank == 0 and args.verbose: + z = torch.empty([1, G.z_dim], device=device) + c = torch.empty([1, G.c_dim], device=device) + misc.print_module_summary(G, [z, c]) + + # Calculate each metric. + for metric in args.metrics: + if rank == 0 and args.verbose: + print(f'Calculating {metric}...') + progress = metric_utils.ProgressMonitor(verbose=args.verbose) + result_dict = metric_main.calc_metric(metric=metric, G=G, dataset_kwargs=args.dataset_kwargs, + num_gpus=args.num_gpus, rank=rank, device=device, progress=progress) + if rank == 0: + metric_main.report_metric(result_dict, run_dir=args.run_dir, snapshot_pkl=args.network_pkl) + if rank == 0 and args.verbose: + print() + + # Done. + if rank == 0 and args.verbose: + print('Exiting...') + +#---------------------------------------------------------------------------- + +def parse_comma_separated_list(s): + if isinstance(s, list): + return s + if s is None or s.lower() == 'none' or s == '': + return [] + return s.split(',') + +#---------------------------------------------------------------------------- + +@click.command() +@click.pass_context +@click.option('network_pkl', '--network', help='Network pickle filename or URL', metavar='PATH', required=True) +@click.option('--metrics', help='Quality metrics', metavar='[NAME|A,B,C|none]', type=parse_comma_separated_list, default='fid50k_full', show_default=True) +@click.option('--data', help='Dataset to evaluate against [default: look up]', metavar='[ZIP|DIR]') +@click.option('--mirror', help='Enable dataset x-flips [default: look up]', type=bool, metavar='BOOL') +@click.option('--gpus', help='Number of GPUs to use', type=int, default=1, metavar='INT', show_default=True) +@click.option('--verbose', help='Print optional information', type=bool, default=True, metavar='BOOL', show_default=True) + +def calc_metrics(ctx, network_pkl, metrics, data, mirror, gpus, verbose): + """Calculate quality metrics for previous training run or pretrained network pickle. + + Examples: + + \b + # Previous training run: look up options automatically, save result to JSONL file. + python calc_metrics.py --metrics=eqt50k_int,eqr50k \\ + --network=~/training-runs/00000-stylegan3-r-mydataset/network-snapshot-000000.pkl + + \b + # Pre-trained network pickle: specify dataset explicitly, print result to stdout. + python calc_metrics.py --metrics=fid50k_full --data=~/datasets/ffhq-1024x1024.zip --mirror=1 \\ + --network=https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/stylegan3-t-ffhq-1024x1024.pkl + + \b + Recommended metrics: + fid50k_full Frechet inception distance against the full dataset. + kid50k_full Kernel inception distance against the full dataset. + pr50k3_full Precision and recall againt the full dataset. + ppl2_wend Perceptual path length in W, endpoints, full image. + eqt50k_int Equivariance w.r.t. integer translation (EQ-T). + eqt50k_frac Equivariance w.r.t. fractional translation (EQ-T_frac). + eqr50k Equivariance w.r.t. rotation (EQ-R). + + \b + Legacy metrics: + fid50k Frechet inception distance against 50k real images. + kid50k Kernel inception distance against 50k real images. + pr50k3 Precision and recall against 50k real images. + is50k Inception score for CIFAR-10. + """ + dnnlib.util.Logger(should_flush=True) + + # Validate arguments. + args = dnnlib.EasyDict(metrics=metrics, num_gpus=gpus, network_pkl=network_pkl, verbose=verbose) + if not all(metric_main.is_valid_metric(metric) for metric in args.metrics): + ctx.fail('\n'.join(['--metrics can only contain the following values:'] + metric_main.list_valid_metrics())) + if not args.num_gpus >= 1: + ctx.fail('--gpus must be at least 1') + + # Load network. + if not dnnlib.util.is_url(network_pkl, allow_file_urls=True) and not os.path.isfile(network_pkl): + ctx.fail('--network must point to a file or URL') + if args.verbose: + print(f'Loading network from "{network_pkl}"...') + with dnnlib.util.open_url(network_pkl, verbose=args.verbose) as f: + network_dict = legacy.load_network_pkl(f) + args.G = network_dict['G_ema'] # subclass of torch.nn.Module + + # Initialize dataset options. + if data is not None: + args.dataset_kwargs = dnnlib.EasyDict(class_name='training.dataset.ImageFolderDataset', path=data) + elif network_dict['training_set_kwargs'] is not None: + args.dataset_kwargs = dnnlib.EasyDict(network_dict['training_set_kwargs']) + else: + ctx.fail('Could not look up dataset options; please specify --data') + + # Finalize dataset options. + args.dataset_kwargs.resolution = args.G.img_resolution + args.dataset_kwargs.use_labels = (args.G.c_dim != 0) + if mirror is not None: + args.dataset_kwargs.xflip = mirror + + # Print dataset options. + if args.verbose: + print('Dataset options:') + print(json.dumps(args.dataset_kwargs, indent=2)) + + # Locate run dir. + args.run_dir = None + if os.path.isfile(network_pkl): + pkl_dir = os.path.dirname(network_pkl) + if os.path.isfile(os.path.join(pkl_dir, 'training_options.json')): + args.run_dir = pkl_dir + + # Launch processes. + if args.verbose: + print('Launching processes...') + torch.multiprocessing.set_start_method('spawn') + with tempfile.TemporaryDirectory() as temp_dir: + if args.num_gpus == 1: + subprocess_fn(rank=0, args=args, temp_dir=temp_dir) + else: + torch.multiprocessing.spawn(fn=subprocess_fn, args=(args, temp_dir), nprocs=args.num_gpus) + +#---------------------------------------------------------------------------- + +if __name__ == "__main__": + calc_metrics() # pylint: disable=no-value-for-parameter + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/camera_utils.py b/ThirdParty/eg3d/camera_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4d4be88a575b4f43cce42f71222215e9b912d9f9 --- /dev/null +++ b/ThirdParty/eg3d/camera_utils.py @@ -0,0 +1,149 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +""" +Helper functions for constructing camera parameter matrices. Primarily used in visualization and inference scripts. +""" + +import math + +import torch +import torch.nn as nn + +from training.volumetric_rendering import math_utils + +class GaussianCameraPoseSampler: + """ + Samples pitch and yaw from a Gaussian distribution and returns a camera pose. + Camera is specified as looking at the origin. + If horizontal and vertical stddev (specified in radians) are zero, gives a + deterministic camera pose with yaw=horizontal_mean, pitch=vertical_mean. + The coordinate system is specified with y-up, z-forward, x-left. + Horizontal mean is the azimuthal angle (rotation around y axis) in radians, + vertical mean is the polar angle (angle from the y axis) in radians. + A point along the z-axis has azimuthal_angle=0, polar_angle=pi/2. + + Example: + For a camera pose looking at the origin with the camera at position [0, 0, 1]: + cam2world = GaussianCameraPoseSampler.sample(math.pi/2, math.pi/2, radius=1) + """ + + @staticmethod + def sample(horizontal_mean, vertical_mean, horizontal_stddev=0, vertical_stddev=0, radius=1, batch_size=1, device='cpu'): + h = torch.randn((batch_size, 1), device=device) * horizontal_stddev + horizontal_mean + v = torch.randn((batch_size, 1), device=device) * vertical_stddev + vertical_mean + v = torch.clamp(v, 1e-5, math.pi - 1e-5) + + theta = h + v = v / math.pi + phi = torch.arccos(1 - 2*v) + + camera_origins = torch.zeros((batch_size, 3), device=device) + + camera_origins[:, 0:1] = radius*torch.sin(phi) * torch.cos(math.pi-theta) + camera_origins[:, 2:3] = radius*torch.sin(phi) * torch.sin(math.pi-theta) + camera_origins[:, 1:2] = radius*torch.cos(phi) + + forward_vectors = math_utils.normalize_vecs(-camera_origins) + return create_cam2world_matrix(forward_vectors, camera_origins) + + +class LookAtPoseSampler: + """ + Same as GaussianCameraPoseSampler, except the + camera is specified as looking at 'lookat_position', a 3-vector. + + Example: + For a camera pose looking at the origin with the camera at position [0, 0, 1]: + cam2world = LookAtPoseSampler.sample(math.pi/2, math.pi/2, torch.tensor([0, 0, 0]), radius=1) + """ + + @staticmethod + def sample(horizontal_mean, vertical_mean, lookat_position, horizontal_stddev=0, vertical_stddev=0, radius=1, batch_size=1, device='cpu'): + h = torch.randn((batch_size, 1), device=device) * horizontal_stddev + horizontal_mean + v = torch.randn((batch_size, 1), device=device) * vertical_stddev + vertical_mean + v = torch.clamp(v, 1e-5, math.pi - 1e-5) + + theta = h + v = v / math.pi + phi = torch.arccos(1 - 2*v) + + camera_origins = torch.zeros((batch_size, 3), device=device) + + camera_origins[:, 0:1] = radius*torch.sin(phi) * torch.cos(math.pi-theta) + camera_origins[:, 2:3] = radius*torch.sin(phi) * torch.sin(math.pi-theta) + camera_origins[:, 1:2] = radius*torch.cos(phi) + + # forward_vectors = math_utils.normalize_vecs(-camera_origins) + forward_vectors = math_utils.normalize_vecs(lookat_position - camera_origins) + return create_cam2world_matrix(forward_vectors, camera_origins) + +class UniformCameraPoseSampler: + """ + Same as GaussianCameraPoseSampler, except the + pose is sampled from a uniform distribution with range +-[horizontal/vertical]_stddev. + + Example: + For a batch of random camera poses looking at the origin with yaw sampled from [-pi/2, +pi/2] radians: + + cam2worlds = UniformCameraPoseSampler.sample(math.pi/2, math.pi/2, horizontal_stddev=math.pi/2, radius=1, batch_size=16) + """ + + @staticmethod + def sample(horizontal_mean, vertical_mean, horizontal_stddev=0, vertical_stddev=0, radius=1, batch_size=1, device='cpu'): + h = (torch.rand((batch_size, 1), device=device) * 2 - 1) * horizontal_stddev + horizontal_mean + v = (torch.rand((batch_size, 1), device=device) * 2 - 1) * vertical_stddev + vertical_mean + v = torch.clamp(v, 1e-5, math.pi - 1e-5) + + theta = h + v = v / math.pi + phi = torch.arccos(1 - 2*v) + + camera_origins = torch.zeros((batch_size, 3), device=device) + + camera_origins[:, 0:1] = radius*torch.sin(phi) * torch.cos(math.pi-theta) + camera_origins[:, 2:3] = radius*torch.sin(phi) * torch.sin(math.pi-theta) + camera_origins[:, 1:2] = radius*torch.cos(phi) + + forward_vectors = math_utils.normalize_vecs(-camera_origins) + return create_cam2world_matrix(forward_vectors, camera_origins) + +def create_cam2world_matrix(forward_vector, origin): + """ + Takes in the direction the camera is pointing and the camera origin and returns a cam2world matrix. + Works on batches of forward_vectors, origins. Assumes y-axis is up and that there is no camera roll. + """ + + forward_vector = math_utils.normalize_vecs(forward_vector) + up_vector = torch.tensor([0, 1, 0], dtype=torch.float, device=origin.device).expand_as(forward_vector) + + right_vector = -math_utils.normalize_vecs(torch.cross(up_vector, forward_vector, dim=-1)) + up_vector = math_utils.normalize_vecs(torch.cross(forward_vector, right_vector, dim=-1)) + + rotation_matrix = torch.eye(4, device=origin.device).unsqueeze(0).repeat(forward_vector.shape[0], 1, 1) + rotation_matrix[:, :3, :3] = torch.stack((right_vector, up_vector, forward_vector), axis=-1) + + translation_matrix = torch.eye(4, device=origin.device).unsqueeze(0).repeat(forward_vector.shape[0], 1, 1) + translation_matrix[:, :3, 3] = origin + cam2world = (translation_matrix @ rotation_matrix)[:, :, :] + assert(cam2world.shape[1:] == (4, 4)) + return cam2world + + +def FOV_to_intrinsics(fov_degrees, device='cpu'): + """ + Creates a 3x3 camera intrinsics matrix from the camera field of view, specified in degrees. + Note the intrinsics are returned as normalized by image size, rather than in pixel units. + Assumes principal point is at image center. + """ + + focal_length = float(1 / (math.tan(fov_degrees * 3.14159 / 360) * 1.414)) + intrinsics = torch.tensor([[focal_length, 0, 0.5], [0, focal_length, 0.5], [0, 0, 1]], device=device) + return intrinsics \ No newline at end of file diff --git a/ThirdParty/eg3d/dataset_tool.py b/ThirdParty/eg3d/dataset_tool.py new file mode 100644 index 0000000000000000000000000000000000000000..a400f770fa477ef09adf4804235be4d67898765a --- /dev/null +++ b/ThirdParty/eg3d/dataset_tool.py @@ -0,0 +1,458 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Tool for creating ZIP/PNG based datasets.""" + +import functools +import gzip +import io +import json +import os +import pickle +import re +import sys +import tarfile +import zipfile +from pathlib import Path +from typing import Callable, Optional, Tuple, Union + +import click +import numpy as np +import PIL.Image +from tqdm import tqdm + +#---------------------------------------------------------------------------- + +def error(msg): + print('Error: ' + msg) + sys.exit(1) + +#---------------------------------------------------------------------------- + +def parse_tuple(s: str) -> Tuple[int, int]: + '''Parse a 'M,N' or 'MxN' integer tuple. + + Example: + '4x2' returns (4,2) + '0,1' returns (0,1) + ''' + if m := re.match(r'^(\d+)[x,](\d+)$', s): + return (int(m.group(1)), int(m.group(2))) + raise ValueError(f'cannot parse tuple {s}') + +#---------------------------------------------------------------------------- + +def maybe_min(a: int, b: Optional[int]) -> int: + if b is not None: + return min(a, b) + return a + +#---------------------------------------------------------------------------- + +def file_ext(name: Union[str, Path]) -> str: + return str(name).split('.')[-1] + +#---------------------------------------------------------------------------- + +def is_image_ext(fname: Union[str, Path]) -> bool: + ext = file_ext(fname).lower() + return f'.{ext}' in PIL.Image.EXTENSION # type: ignore + +#---------------------------------------------------------------------------- + +def open_image_folder(source_dir, *, max_images: Optional[int]): + input_images = [str(f) for f in sorted(Path(source_dir).rglob('*')) if is_image_ext(f) and os.path.isfile(f)] + + # Load labels. + labels = {} + meta_fname = os.path.join(source_dir, 'dataset.json') + if os.path.isfile(meta_fname): + with open(meta_fname, 'r') as file: + labels = json.load(file)['labels'] + if labels is not None: + labels = { x[0]: x[1] for x in labels } + else: + labels = {} + + max_idx = maybe_min(len(input_images), max_images) + + def iterate_images(): + for idx, fname in enumerate(input_images): + arch_fname = os.path.relpath(fname, source_dir) + arch_fname = arch_fname.replace('\\', '/') + img = np.array(PIL.Image.open(fname)) + yield dict(img=img, label=labels.get(arch_fname)) + if idx >= max_idx-1: + break + return max_idx, iterate_images() + +#---------------------------------------------------------------------------- + +def open_image_zip(source, *, max_images: Optional[int]): + with zipfile.ZipFile(source, mode='r') as z: + input_images = [str(f) for f in sorted(z.namelist()) if is_image_ext(f)] + + # Load labels. + labels = {} + if 'dataset.json' in z.namelist(): + with z.open('dataset.json', 'r') as file: + labels = json.load(file)['labels'] + if labels is not None: + labels = { x[0]: x[1] for x in labels } + else: + labels = {} + + max_idx = maybe_min(len(input_images), max_images) + + def iterate_images(): + with zipfile.ZipFile(source, mode='r') as z: + for idx, fname in enumerate(input_images): + with z.open(fname, 'r') as file: + img = PIL.Image.open(file) # type: ignore + img = np.array(img) + yield dict(img=img, label=labels.get(fname)) + if idx >= max_idx-1: + break + return max_idx, iterate_images() + +#---------------------------------------------------------------------------- + +def open_lmdb(lmdb_dir: str, *, max_images: Optional[int]): + import cv2 # pip install opencv-python # pylint: disable=import-error + import lmdb # pip install lmdb # pylint: disable=import-error + + with lmdb.open(lmdb_dir, readonly=True, lock=False).begin(write=False) as txn: + max_idx = maybe_min(txn.stat()['entries'], max_images) + + def iterate_images(): + with lmdb.open(lmdb_dir, readonly=True, lock=False).begin(write=False) as txn: + for idx, (_key, value) in enumerate(txn.cursor()): + try: + try: + img = cv2.imdecode(np.frombuffer(value, dtype=np.uint8), 1) + if img is None: + raise IOError('cv2.imdecode failed') + img = img[:, :, ::-1] # BGR => RGB + except IOError: + img = np.array(PIL.Image.open(io.BytesIO(value))) + yield dict(img=img, label=None) + if idx >= max_idx-1: + break + except: + print(sys.exc_info()[1]) + + return max_idx, iterate_images() + +#---------------------------------------------------------------------------- + +def open_cifar10(tarball: str, *, max_images: Optional[int]): + images = [] + labels = [] + + with tarfile.open(tarball, 'r:gz') as tar: + for batch in range(1, 6): + member = tar.getmember(f'cifar-10-batches-py/data_batch_{batch}') + with tar.extractfile(member) as file: + data = pickle.load(file, encoding='latin1') + images.append(data['data'].reshape(-1, 3, 32, 32)) + labels.append(data['labels']) + + images = np.concatenate(images) + labels = np.concatenate(labels) + images = images.transpose([0, 2, 3, 1]) # NCHW -> NHWC + assert images.shape == (50000, 32, 32, 3) and images.dtype == np.uint8 + assert labels.shape == (50000,) and labels.dtype in [np.int32, np.int64] + assert np.min(images) == 0 and np.max(images) == 255 + assert np.min(labels) == 0 and np.max(labels) == 9 + + max_idx = maybe_min(len(images), max_images) + + def iterate_images(): + for idx, img in enumerate(images): + yield dict(img=img, label=int(labels[idx])) + if idx >= max_idx-1: + break + + return max_idx, iterate_images() + +#---------------------------------------------------------------------------- + +def open_mnist(images_gz: str, *, max_images: Optional[int]): + labels_gz = images_gz.replace('-images-idx3-ubyte.gz', '-labels-idx1-ubyte.gz') + assert labels_gz != images_gz + images = [] + labels = [] + + with gzip.open(images_gz, 'rb') as f: + images = np.frombuffer(f.read(), np.uint8, offset=16) + with gzip.open(labels_gz, 'rb') as f: + labels = np.frombuffer(f.read(), np.uint8, offset=8) + + images = images.reshape(-1, 28, 28) + images = np.pad(images, [(0,0), (2,2), (2,2)], 'constant', constant_values=0) + assert images.shape == (60000, 32, 32) and images.dtype == np.uint8 + assert labels.shape == (60000,) and labels.dtype == np.uint8 + assert np.min(images) == 0 and np.max(images) == 255 + assert np.min(labels) == 0 and np.max(labels) == 9 + + max_idx = maybe_min(len(images), max_images) + + def iterate_images(): + for idx, img in enumerate(images): + yield dict(img=img, label=int(labels[idx])) + if idx >= max_idx-1: + break + + return max_idx, iterate_images() + +#---------------------------------------------------------------------------- + +def make_transform( + transform: Optional[str], + output_width: Optional[int], + output_height: Optional[int] +) -> Callable[[np.ndarray], Optional[np.ndarray]]: + def scale(width, height, img): + w = img.shape[1] + h = img.shape[0] + if width == w and height == h: + return img + img = PIL.Image.fromarray(img) + ww = width if width is not None else w + hh = height if height is not None else h + img = img.resize((ww, hh), PIL.Image.LANCZOS) + return np.array(img) + + def center_crop(width, height, img): + crop = np.min(img.shape[:2]) + img = img[(img.shape[0] - crop) // 2 : (img.shape[0] + crop) // 2, (img.shape[1] - crop) // 2 : (img.shape[1] + crop) // 2] + img = PIL.Image.fromarray(img, 'RGB') + img = img.resize((width, height), PIL.Image.LANCZOS) + return np.array(img) + + def center_crop_wide(width, height, img): + ch = int(np.round(width * img.shape[0] / img.shape[1])) + if img.shape[1] < width or ch < height: + return None + + img = img[(img.shape[0] - ch) // 2 : (img.shape[0] + ch) // 2] + img = PIL.Image.fromarray(img, 'RGB') + img = img.resize((width, height), PIL.Image.LANCZOS) + img = np.array(img) + + canvas = np.zeros([width, width, 3], dtype=np.uint8) + canvas[(width - height) // 2 : (width + height) // 2, :] = img + return canvas + + if transform is None: + return functools.partial(scale, output_width, output_height) + if transform == 'center-crop': + if (output_width is None) or (output_height is None): + error ('must specify --resolution=WxH when using ' + transform + 'transform') + return functools.partial(center_crop, output_width, output_height) + if transform == 'center-crop-wide': + if (output_width is None) or (output_height is None): + error ('must specify --resolution=WxH when using ' + transform + ' transform') + return functools.partial(center_crop_wide, output_width, output_height) + assert False, 'unknown transform' + +#---------------------------------------------------------------------------- + +def open_dataset(source, *, max_images: Optional[int]): + if os.path.isdir(source): + if source.rstrip('/').endswith('_lmdb'): + return open_lmdb(source, max_images=max_images) + else: + return open_image_folder(source, max_images=max_images) + elif os.path.isfile(source): + if os.path.basename(source) == 'cifar-10-python.tar.gz': + return open_cifar10(source, max_images=max_images) + elif os.path.basename(source) == 'train-images-idx3-ubyte.gz': + return open_mnist(source, max_images=max_images) + elif file_ext(source) == 'zip': + return open_image_zip(source, max_images=max_images) + else: + assert False, 'unknown archive type' + else: + error(f'Missing input file or directory: {source}') + +#---------------------------------------------------------------------------- + +def open_dest(dest: str) -> Tuple[str, Callable[[str, Union[bytes, str]], None], Callable[[], None]]: + dest_ext = file_ext(dest) + + if dest_ext == 'zip': + if os.path.dirname(dest) != '': + os.makedirs(os.path.dirname(dest), exist_ok=True) + zf = zipfile.ZipFile(file=dest, mode='w', compression=zipfile.ZIP_STORED) + def zip_write_bytes(fname: str, data: Union[bytes, str]): + zf.writestr(fname, data) + return '', zip_write_bytes, zf.close + else: + # If the output folder already exists, check that is is + # empty. + # + # Note: creating the output directory is not strictly + # necessary as folder_write_bytes() also mkdirs, but it's better + # to give an error message earlier in case the dest folder + # somehow cannot be created. + if os.path.isdir(dest) and len(os.listdir(dest)) != 0: + error('--dest folder must be empty') + os.makedirs(dest, exist_ok=True) + + def folder_write_bytes(fname: str, data: Union[bytes, str]): + os.makedirs(os.path.dirname(fname), exist_ok=True) + with open(fname, 'wb') as fout: + if isinstance(data, str): + data = data.encode('utf8') + fout.write(data) + return dest, folder_write_bytes, lambda: None + +#---------------------------------------------------------------------------- + +@click.command() +@click.pass_context +@click.option('--source', help='Directory or archive name for input dataset', required=True, metavar='PATH') +@click.option('--dest', help='Output directory or archive name for output dataset', required=True, metavar='PATH') +@click.option('--max-images', help='Output only up to `max-images` images', type=int, default=None) +@click.option('--transform', help='Input crop/resize mode', type=click.Choice(['center-crop', 'center-crop-wide'])) +@click.option('--resolution', help='Output resolution (e.g., \'512x512\')', metavar='WxH', type=parse_tuple) +def convert_dataset( + ctx: click.Context, + source: str, + dest: str, + max_images: Optional[int], + transform: Optional[str], + resolution: Optional[Tuple[int, int]] +): + """Convert an image dataset into a dataset archive usable with StyleGAN2 ADA PyTorch. + + The input dataset format is guessed from the --source argument: + + \b + --source *_lmdb/ Load LSUN dataset + --source cifar-10-python.tar.gz Load CIFAR-10 dataset + --source train-images-idx3-ubyte.gz Load MNIST dataset + --source path/ Recursively load all images from path/ + --source dataset.zip Recursively load all images from dataset.zip + + Specifying the output format and path: + + \b + --dest /path/to/dir Save output files under /path/to/dir + --dest /path/to/dataset.zip Save output files into /path/to/dataset.zip + + The output dataset format can be either an image folder or an uncompressed zip archive. + Zip archives makes it easier to move datasets around file servers and clusters, and may + offer better training performance on network file systems. + + Images within the dataset archive will be stored as uncompressed PNG. + Uncompressed PNGs can be efficiently decoded in the training loop. + + Class labels are stored in a file called 'dataset.json' that is stored at the + dataset root folder. This file has the following structure: + + \b + { + "labels": [ + ["00000/img00000000.png",6], + ["00000/img00000001.png",9], + ... repeated for every image in the dataset + ["00049/img00049999.png",1] + ] + } + + If the 'dataset.json' file cannot be found, the dataset is interpreted as + not containing class labels. + + Image scale/crop and resolution requirements: + + Output images must be square-shaped and they must all have the same power-of-two + dimensions. + + To scale arbitrary input image size to a specific width and height, use the + --resolution option. Output resolution will be either the original + input resolution (if resolution was not specified) or the one specified with + --resolution option. + + Use the --transform=center-crop or --transform=center-crop-wide options to apply a + center crop transform on the input image. These options should be used with the + --resolution option. For example: + + \b + python dataset_tool.py --source LSUN/raw/cat_lmdb --dest /tmp/lsun_cat \\ + --transform=center-crop-wide --resolution=512x384 + """ + + PIL.Image.init() # type: ignore + + if dest == '': + ctx.fail('--dest output filename or directory must not be an empty string') + + num_files, input_iter = open_dataset(source, max_images=max_images) + archive_root_dir, save_bytes, close_dest = open_dest(dest) + + if resolution is None: resolution = (None, None) + transform_image = make_transform(transform, *resolution) + + dataset_attrs = None + + labels = [] + for idx, image in tqdm(enumerate(input_iter), total=num_files): + idx_str = f'{idx:08d}' + archive_fname = f'{idx_str[:5]}/img{idx_str}.png' + + # Apply crop and resize. + img = transform_image(image['img']) + + # Transform may drop images. + if img is None: + continue + + # Error check to require uniform image attributes across + # the whole dataset. + channels = img.shape[2] if img.ndim == 3 else 1 + cur_image_attrs = { + 'width': img.shape[1], + 'height': img.shape[0], + 'channels': channels + } + if dataset_attrs is None: + dataset_attrs = cur_image_attrs + width = dataset_attrs['width'] + height = dataset_attrs['height'] + if width != height: + error(f'Image dimensions after scale and crop are required to be square. Got {width}x{height}') + if dataset_attrs['channels'] not in [1, 3, 4]: + error('Input images must be stored as RGB or grayscale') + if width != 2 ** int(np.floor(np.log2(width))): + error('Image width/height after scale and crop are required to be power-of-two') + elif dataset_attrs != cur_image_attrs: + err = [f' dataset {k}/cur image {k}: {dataset_attrs[k]}/{cur_image_attrs[k]}' for k in dataset_attrs.keys()] # pylint: disable=unsubscriptable-object + error(f'Image {archive_fname} attributes must be equal across all images of the dataset. Got:\n' + '\n'.join(err)) + + # Save the image as an uncompressed PNG. + img = PIL.Image.fromarray(img, { 1: 'L', 3: 'RGB', 4: 'RGBA'}[channels]) + if channels == 4: img = img.convert('RGB') + image_bits = io.BytesIO() + img.save(image_bits, format='png', compress_level=0, optimize=False) + save_bytes(os.path.join(archive_root_dir, archive_fname), image_bits.getbuffer()) + labels.append([archive_fname, image['label']] if image['label'] is not None else None) + + metadata = { + 'labels': labels if all(x is not None for x in labels) else None + } + save_bytes(os.path.join(archive_root_dir, 'dataset.json'), json.dumps(metadata)) + close_dest() + +#---------------------------------------------------------------------------- + +if __name__ == "__main__": + convert_dataset() # pylint: disable=no-value-for-parameter diff --git a/ThirdParty/eg3d/dnnlib/__init__.py b/ThirdParty/eg3d/dnnlib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dd91ed142e955581e83948455fb71cd837215f61 --- /dev/null +++ b/ThirdParty/eg3d/dnnlib/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +from .util import EasyDict, make_cache_dir_path diff --git a/ThirdParty/eg3d/dnnlib/__pycache__/__init__.cpython-310.pyc b/ThirdParty/eg3d/dnnlib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b69694f8dff325adb4e7176d784e2e68dfd451c4 Binary files /dev/null and b/ThirdParty/eg3d/dnnlib/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/dnnlib/__pycache__/util.cpython-310.pyc b/ThirdParty/eg3d/dnnlib/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..09c2f46daed7598d146e9060f7ac6f251451b7da Binary files /dev/null and b/ThirdParty/eg3d/dnnlib/__pycache__/util.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/dnnlib/util.py b/ThirdParty/eg3d/dnnlib/util.py new file mode 100644 index 0000000000000000000000000000000000000000..80b67c4e312cd1b847ca21fd3b929802a57e6f6d --- /dev/null +++ b/ThirdParty/eg3d/dnnlib/util.py @@ -0,0 +1,493 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Miscellaneous utility classes and functions.""" + +import ctypes +import fnmatch +import importlib +import inspect +import numpy as np +import os +import shutil +import sys +import types +import io +import pickle +import re +import requests +import html +import hashlib +import glob +import tempfile +import urllib +import urllib.request +import uuid + +from distutils.util import strtobool +from typing import Any, List, Tuple, Union + + +# Util classes +# ------------------------------------------------------------------------------------------ + + +class EasyDict(dict): + """Convenience class that behaves like a dict but allows access with the attribute syntax.""" + + def __getattr__(self, name: str) -> Any: + try: + return self[name] + except KeyError: + raise AttributeError(name) + + def __setattr__(self, name: str, value: Any) -> None: + self[name] = value + + def __delattr__(self, name: str) -> None: + del self[name] + + +class Logger(object): + """Redirect stderr to stdout, optionally print stdout to a file, and optionally force flushing on both stdout and the file.""" + + def __init__(self, file_name: str = None, file_mode: str = "w", should_flush: bool = True): + self.file = None + + if file_name is not None: + self.file = open(file_name, file_mode) + + self.should_flush = should_flush + self.stdout = sys.stdout + self.stderr = sys.stderr + + sys.stdout = self + sys.stderr = self + + def __enter__(self) -> "Logger": + return self + + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: + self.close() + + def write(self, text: Union[str, bytes]) -> None: + """Write text to stdout (and a file) and optionally flush.""" + if isinstance(text, bytes): + text = text.decode() + if len(text) == 0: # workaround for a bug in VSCode debugger: sys.stdout.write(''); sys.stdout.flush() => crash + return + + if self.file is not None: + self.file.write(text) + + self.stdout.write(text) + + if self.should_flush: + self.flush() + + def flush(self) -> None: + """Flush written text to both stdout and a file, if open.""" + if self.file is not None: + self.file.flush() + + self.stdout.flush() + + def close(self) -> None: + """Flush, close possible files, and remove stdout/stderr mirroring.""" + self.flush() + + # if using multiple loggers, prevent closing in wrong order + if sys.stdout is self: + sys.stdout = self.stdout + if sys.stderr is self: + sys.stderr = self.stderr + + if self.file is not None: + self.file.close() + self.file = None + + +# Cache directories +# ------------------------------------------------------------------------------------------ + +_dnnlib_cache_dir = None + +def set_cache_dir(path: str) -> None: + global _dnnlib_cache_dir + _dnnlib_cache_dir = path + +def make_cache_dir_path(*paths: str) -> str: + if _dnnlib_cache_dir is not None: + return os.path.join(_dnnlib_cache_dir, *paths) + if 'DNNLIB_CACHE_DIR' in os.environ: + return os.path.join(os.environ['DNNLIB_CACHE_DIR'], *paths) + if 'HOME' in os.environ: + return os.path.join(os.environ['HOME'], '.cache', 'dnnlib', *paths) + if 'USERPROFILE' in os.environ: + return os.path.join(os.environ['USERPROFILE'], '.cache', 'dnnlib', *paths) + return os.path.join(tempfile.gettempdir(), '.cache', 'dnnlib', *paths) + +# Small util functions +# ------------------------------------------------------------------------------------------ + + +def format_time(seconds: Union[int, float]) -> str: + """Convert the seconds to human readable string with days, hours, minutes and seconds.""" + s = int(np.rint(seconds)) + + if s < 60: + return "{0}s".format(s) + elif s < 60 * 60: + return "{0}m {1:02}s".format(s // 60, s % 60) + elif s < 24 * 60 * 60: + return "{0}h {1:02}m {2:02}s".format(s // (60 * 60), (s // 60) % 60, s % 60) + else: + return "{0}d {1:02}h {2:02}m".format(s // (24 * 60 * 60), (s // (60 * 60)) % 24, (s // 60) % 60) + + +def format_time_brief(seconds: Union[int, float]) -> str: + """Convert the seconds to human readable string with days, hours, minutes and seconds.""" + s = int(np.rint(seconds)) + + if s < 60: + return "{0}s".format(s) + elif s < 60 * 60: + return "{0}m {1:02}s".format(s // 60, s % 60) + elif s < 24 * 60 * 60: + return "{0}h {1:02}m".format(s // (60 * 60), (s // 60) % 60) + else: + return "{0}d {1:02}h".format(s // (24 * 60 * 60), (s // (60 * 60)) % 24) + + +def ask_yes_no(question: str) -> bool: + """Ask the user the question until the user inputs a valid answer.""" + while True: + try: + print("{0} [y/n]".format(question)) + return strtobool(input().lower()) + except ValueError: + pass + + +def tuple_product(t: Tuple) -> Any: + """Calculate the product of the tuple elements.""" + result = 1 + + for v in t: + result *= v + + return result + + +_str_to_ctype = { + "uint8": ctypes.c_ubyte, + "uint16": ctypes.c_uint16, + "uint32": ctypes.c_uint32, + "uint64": ctypes.c_uint64, + "int8": ctypes.c_byte, + "int16": ctypes.c_int16, + "int32": ctypes.c_int32, + "int64": ctypes.c_int64, + "float32": ctypes.c_float, + "float64": ctypes.c_double +} + + +def get_dtype_and_ctype(type_obj: Any) -> Tuple[np.dtype, Any]: + """Given a type name string (or an object having a __name__ attribute), return matching Numpy and ctypes types that have the same size in bytes.""" + type_str = None + + if isinstance(type_obj, str): + type_str = type_obj + elif hasattr(type_obj, "__name__"): + type_str = type_obj.__name__ + elif hasattr(type_obj, "name"): + type_str = type_obj.name + else: + raise RuntimeError("Cannot infer type name from input") + + assert type_str in _str_to_ctype.keys() + + my_dtype = np.dtype(type_str) + my_ctype = _str_to_ctype[type_str] + + assert my_dtype.itemsize == ctypes.sizeof(my_ctype) + + return my_dtype, my_ctype + + +def is_pickleable(obj: Any) -> bool: + try: + with io.BytesIO() as stream: + pickle.dump(obj, stream) + return True + except: + return False + + +# Functionality to import modules/objects by name, and call functions by name +# ------------------------------------------------------------------------------------------ + +def get_module_from_obj_name(obj_name: str) -> Tuple[types.ModuleType, str]: + """Searches for the underlying module behind the name to some python object. + Returns the module and the object name (original name with module part removed).""" + + # allow convenience shorthands, substitute them by full names + obj_name = re.sub("^np.", "numpy.", obj_name) + obj_name = re.sub("^tf.", "tensorflow.", obj_name) + + # list alternatives for (module_name, local_obj_name) + parts = obj_name.split(".") + name_pairs = [(".".join(parts[:i]), ".".join(parts[i:])) for i in range(len(parts), 0, -1)] + + # try each alternative in turn + for module_name, local_obj_name in name_pairs: + try: + module = importlib.import_module(module_name) # may raise ImportError + get_obj_from_module(module, local_obj_name) # may raise AttributeError + return module, local_obj_name + except: + pass + + # maybe some of the modules themselves contain errors? + for module_name, _local_obj_name in name_pairs: + try: + importlib.import_module(module_name) # may raise ImportError + except ImportError: + if not str(sys.exc_info()[1]).startswith("No module named '" + module_name + "'"): + raise + + # maybe the requested attribute is missing? + for module_name, local_obj_name in name_pairs: + try: + module = importlib.import_module(module_name) # may raise ImportError + get_obj_from_module(module, local_obj_name) # may raise AttributeError + except ImportError: + pass + + # we are out of luck, but we have no idea why + raise ImportError(obj_name) + + +def get_obj_from_module(module: types.ModuleType, obj_name: str) -> Any: + """Traverses the object name and returns the last (rightmost) python object.""" + if obj_name == '': + return module + obj = module + for part in obj_name.split("."): + obj = getattr(obj, part) + return obj + + +def get_obj_by_name(name: str) -> Any: + """Finds the python object with the given name.""" + module, obj_name = get_module_from_obj_name(name) + return get_obj_from_module(module, obj_name) + + +def call_func_by_name(*args, func_name: str = None, **kwargs) -> Any: + """Finds the python object with the given name and calls it as a function.""" + assert func_name is not None + func_obj = get_obj_by_name(func_name) + assert callable(func_obj) + return func_obj(*args, **kwargs) + + +def construct_class_by_name(*args, class_name: str = None, **kwargs) -> Any: + """Finds the python class with the given name and constructs it with the given arguments.""" + return call_func_by_name(*args, func_name=class_name, **kwargs) + + +def get_module_dir_by_obj_name(obj_name: str) -> str: + """Get the directory path of the module containing the given object name.""" + module, _ = get_module_from_obj_name(obj_name) + return os.path.dirname(inspect.getfile(module)) + + +def is_top_level_function(obj: Any) -> bool: + """Determine whether the given object is a top-level function, i.e., defined at module scope using 'def'.""" + return callable(obj) and obj.__name__ in sys.modules[obj.__module__].__dict__ + + +def get_top_level_function_name(obj: Any) -> str: + """Return the fully-qualified name of a top-level function.""" + assert is_top_level_function(obj) + module = obj.__module__ + if module == '__main__': + module = os.path.splitext(os.path.basename(sys.modules[module].__file__))[0] + return module + "." + obj.__name__ + + +# File system helpers +# ------------------------------------------------------------------------------------------ + +def list_dir_recursively_with_ignore(dir_path: str, ignores: List[str] = None, add_base_to_relative: bool = False) -> List[Tuple[str, str]]: + """List all files recursively in a given directory while ignoring given file and directory names. + Returns list of tuples containing both absolute and relative paths.""" + assert os.path.isdir(dir_path) + base_name = os.path.basename(os.path.normpath(dir_path)) + + if ignores is None: + ignores = [] + + result = [] + + for root, dirs, files in os.walk(dir_path, topdown=True): + for ignore_ in ignores: + dirs_to_remove = [d for d in dirs if fnmatch.fnmatch(d, ignore_)] + + # dirs need to be edited in-place + for d in dirs_to_remove: + dirs.remove(d) + + files = [f for f in files if not fnmatch.fnmatch(f, ignore_)] + + absolute_paths = [os.path.join(root, f) for f in files] + relative_paths = [os.path.relpath(p, dir_path) for p in absolute_paths] + + if add_base_to_relative: + relative_paths = [os.path.join(base_name, p) for p in relative_paths] + + assert len(absolute_paths) == len(relative_paths) + result += zip(absolute_paths, relative_paths) + + return result + + +def copy_files_and_create_dirs(files: List[Tuple[str, str]]) -> None: + """Takes in a list of tuples of (src, dst) paths and copies files. + Will create all necessary directories.""" + for file in files: + target_dir_name = os.path.dirname(file[1]) + + # will create all intermediate-level directories + if not os.path.exists(target_dir_name): + os.makedirs(target_dir_name) + + shutil.copyfile(file[0], file[1]) + + +# URL helpers +# ------------------------------------------------------------------------------------------ + +def is_url(obj: Any, allow_file_urls: bool = False) -> bool: + """Determine whether the given object is a valid URL string.""" + if not isinstance(obj, str) or not "://" in obj: + return False + if allow_file_urls and obj.startswith('file://'): + return True + try: + res = requests.compat.urlparse(obj) + if not res.scheme or not res.netloc or not "." in res.netloc: + return False + res = requests.compat.urlparse(requests.compat.urljoin(obj, "/")) + if not res.scheme or not res.netloc or not "." in res.netloc: + return False + except: + return False + return True + + +def open_url(url: str, cache_dir: str = None, num_attempts: int = 10, verbose: bool = True, return_filename: bool = False, cache: bool = True) -> Any: + """Download the given URL and return a binary-mode file object to access the data.""" + assert num_attempts >= 1 + assert not (return_filename and (not cache)) + + # Doesn't look like an URL scheme so interpret it as a local filename. + if not re.match('^[a-z]+://', url): + return url if return_filename else open(url, "rb") + + # Handle file URLs. This code handles unusual file:// patterns that + # arise on Windows: + # + # file:///c:/foo.txt + # + # which would translate to a local '/c:/foo.txt' filename that's + # invalid. Drop the forward slash for such pathnames. + # + # If you touch this code path, you should test it on both Linux and + # Windows. + # + # Some internet resources suggest using urllib.request.url2pathname() but + # but that converts forward slashes to backslashes and this causes + # its own set of problems. + if url.startswith('file://'): + filename = urllib.parse.urlparse(url).path + if re.match(r'^/[a-zA-Z]:', filename): + filename = filename[1:] + return filename if return_filename else open(filename, "rb") + + assert is_url(url) + + # Lookup from cache. + if cache_dir is None: + cache_dir = make_cache_dir_path('downloads') + + url_md5 = hashlib.md5(url.encode("utf-8")).hexdigest() + if cache: + cache_files = glob.glob(os.path.join(cache_dir, url_md5 + "_*")) + if len(cache_files) == 1: + filename = cache_files[0] + return filename if return_filename else open(filename, "rb") + + # Download. + url_name = None + url_data = None + with requests.Session() as session: + if verbose: + print("Downloading %s ..." % url, end="", flush=True) + for attempts_left in reversed(range(num_attempts)): + try: + with session.get(url) as res: + res.raise_for_status() + if len(res.content) == 0: + raise IOError("No data received") + + if len(res.content) < 8192: + content_str = res.content.decode("utf-8") + if "download_warning" in res.headers.get("Set-Cookie", ""): + links = [html.unescape(link) for link in content_str.split('"') if "export=download" in link] + if len(links) == 1: + url = requests.compat.urljoin(url, links[0]) + raise IOError("Google Drive virus checker nag") + if "Google Drive - Quota exceeded" in content_str: + raise IOError("Google Drive download quota exceeded -- please try again later") + + match = re.search(r'filename="([^"]*)"', res.headers.get("Content-Disposition", "")) + url_name = match[1] if match else url + url_data = res.content + if verbose: + print(" done") + break + except KeyboardInterrupt: + raise + except: + if not attempts_left: + if verbose: + print(" failed") + raise + if verbose: + print(".", end="", flush=True) + + # Save to cache. + if cache: + safe_name = re.sub(r"[^0-9a-zA-Z-._]", "_", url_name) + cache_file = os.path.join(cache_dir, url_md5 + "_" + safe_name) + temp_file = os.path.join(cache_dir, "tmp_" + uuid.uuid4().hex + "_" + url_md5 + "_" + safe_name) + os.makedirs(cache_dir, exist_ok=True) + with open(temp_file, "wb") as f: + f.write(url_data) + os.replace(temp_file, cache_file) # atomic + if return_filename: + return cache_file + + # Return data as file object. + assert not return_filename + return io.BytesIO(url_data) diff --git a/ThirdParty/eg3d/environment.yml b/ThirdParty/eg3d/environment.yml new file mode 100644 index 0000000000000000000000000000000000000000..082bcaf51b257b8dfe6148fac2fcac263fee3f15 --- /dev/null +++ b/ThirdParty/eg3d/environment.yml @@ -0,0 +1,37 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +name: eg3d +channels: + - pytorch + - nvidia +dependencies: + - python >= 3.8 + - pip + - numpy>=1.20 + - click>=8.0 + - pillow=8.3.1 + - scipy=1.7.1 + - pytorch=1.11.0 + - cudatoolkit=11.1 + - requests=2.26.0 + - tqdm=4.62.2 + - ninja=1.10.2 + - matplotlib=3.4.2 + - imageio=2.9.0 + - pip: + - imgui==1.3.0 + - glfw==2.2.0 + - pyopengl==3.1.5 + - imageio-ffmpeg==0.4.3 + - pyspng + - psutil + - mrcfile + - tensorboard \ No newline at end of file diff --git a/ThirdParty/eg3d/gen_samples.py b/ThirdParty/eg3d/gen_samples.py new file mode 100644 index 0000000000000000000000000000000000000000..fab4a22cc6f8e557542a0bb26ae0e8b8862c732c --- /dev/null +++ b/ThirdParty/eg3d/gen_samples.py @@ -0,0 +1,230 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Generate images and shapes using pretrained network pickle.""" + +import os +import re +from typing import List, Optional, Tuple, Union + +import click +import dnnlib +import numpy as np +import PIL.Image +import torch +from tqdm import tqdm +import mrcfile + + +import legacy +from camera_utils import LookAtPoseSampler, FOV_to_intrinsics +from torch_utils import misc +from training.triplane import TriPlaneGenerator + + +#---------------------------------------------------------------------------- + +def parse_range(s: Union[str, List]) -> List[int]: + '''Parse a comma separated list of numbers or ranges and return a list of ints. + + Example: '1,2,5-10' returns [1, 2, 5, 6, 7] + ''' + if isinstance(s, list): return s + ranges = [] + range_re = re.compile(r'^(\d+)-(\d+)$') + for p in s.split(','): + if m := range_re.match(p): + ranges.extend(range(int(m.group(1)), int(m.group(2))+1)) + else: + ranges.append(int(p)) + return ranges + +#---------------------------------------------------------------------------- + +def parse_vec2(s: Union[str, Tuple[float, float]]) -> Tuple[float, float]: + '''Parse a floating point 2-vector of syntax 'a,b'. + + Example: + '0,1' returns (0,1) + ''' + if isinstance(s, tuple): return s + parts = s.split(',') + if len(parts) == 2: + return (float(parts[0]), float(parts[1])) + raise ValueError(f'cannot parse 2-vector {s}') + +#---------------------------------------------------------------------------- + +def make_transform(translate: Tuple[float,float], angle: float): + m = np.eye(3) + s = np.sin(angle/360.0*np.pi*2) + c = np.cos(angle/360.0*np.pi*2) + m[0][0] = c + m[0][1] = s + m[0][2] = translate[0] + m[1][0] = -s + m[1][1] = c + m[1][2] = translate[1] + return m + +#---------------------------------------------------------------------------- + +def create_samples(N=256, voxel_origin=[0, 0, 0], cube_length=2.0): + # NOTE: the voxel_origin is actually the (bottom, left, down) corner, not the middle + voxel_origin = np.array(voxel_origin) - cube_length/2 + voxel_size = cube_length / (N - 1) + + overall_index = torch.arange(0, N ** 3, 1, out=torch.LongTensor()) + samples = torch.zeros(N ** 3, 3) + + # transform first 3 columns + # to be the x, y, z index + samples[:, 2] = overall_index % N + samples[:, 1] = (overall_index.float() / N) % N + samples[:, 0] = ((overall_index.float() / N) / N) % N + + # transform first 3 columns + # to be the x, y, z coordinate + samples[:, 0] = (samples[:, 0] * voxel_size) + voxel_origin[2] + samples[:, 1] = (samples[:, 1] * voxel_size) + voxel_origin[1] + samples[:, 2] = (samples[:, 2] * voxel_size) + voxel_origin[0] + + num_samples = N ** 3 + + return samples.unsqueeze(0), voxel_origin, voxel_size + +#---------------------------------------------------------------------------- + +@click.command() +@click.option('--network', 'network_pkl', help='Network pickle filename', required=True) +@click.option('--seeds', type=parse_range, help='List of random seeds (e.g., \'0,1,4-6\')', required=True) +@click.option('--trunc', 'truncation_psi', type=float, help='Truncation psi', default=1, show_default=True) +@click.option('--trunc-cutoff', 'truncation_cutoff', type=int, help='Truncation cutoff', default=14, show_default=True) +@click.option('--class', 'class_idx', type=int, help='Class label (unconditional if not specified)') +@click.option('--outdir', help='Where to save the output images', type=str, required=True, metavar='DIR') +@click.option('--shapes', help='Export shapes as .mrc files viewable in ChimeraX', type=bool, required=False, metavar='BOOL', default=False, show_default=True) +@click.option('--shape-res', help='', type=int, required=False, metavar='int', default=512, show_default=True) +@click.option('--fov-deg', help='Field of View of camera in degrees', type=int, required=False, metavar='float', default=18.837, show_default=True) +@click.option('--shape-format', help='Shape Format', type=click.Choice(['.mrc', '.ply']), default='.mrc') +@click.option('--reload_modules', help='Overload persistent modules?', type=bool, required=False, metavar='BOOL', default=False, show_default=True) +def generate_images( + network_pkl: str, + seeds: List[int], + truncation_psi: float, + truncation_cutoff: int, + outdir: str, + shapes: bool, + shape_res: int, + fov_deg: float, + shape_format: str, + class_idx: Optional[int], + reload_modules: bool, +): + """Generate images using pretrained network pickle. + + Examples: + + \b + # Generate an image using pre-trained FFHQ model. + python gen_samples.py --outdir=output --trunc=0.7 --seeds=0-5 --shapes=True\\ + --network=ffhq-rebalanced-128.pkl + """ + + print('Loading networks from "%s"...' % network_pkl) + device = torch.device('cuda') + with dnnlib.util.open_url(network_pkl) as f: + G = legacy.load_network_pkl(f)['G_ema'].to(device) # type: ignore + + # Specify reload_modules=True if you want code modifications to take effect; otherwise uses pickled code + if reload_modules: + print("Reloading Modules!") + G_new = TriPlaneGenerator(*G.init_args, **G.init_kwargs).eval().requires_grad_(False).to(device) + misc.copy_params_and_buffers(G, G_new, require_all=True) + G_new.neural_rendering_resolution = G.neural_rendering_resolution + G_new.rendering_kwargs = G.rendering_kwargs + G = G_new + + os.makedirs(outdir, exist_ok=True) + + cam2world_pose = LookAtPoseSampler.sample(3.14/2, 3.14/2, torch.tensor([0, 0, 0.2], device=device), radius=2.7, device=device) + intrinsics = FOV_to_intrinsics(fov_deg, device=device) + + # Generate images. + for seed_idx, seed in enumerate(seeds): + print('Generating image for seed %d (%d/%d) ...' % (seed, seed_idx, len(seeds))) + z = torch.from_numpy(np.random.RandomState(seed).randn(1, G.z_dim)).to(device) + + imgs = [] + angle_p = -0.2 + for angle_y, angle_p in [(.4, angle_p), (0, angle_p), (-.4, angle_p)]: + cam_pivot = torch.tensor(G.rendering_kwargs.get('avg_camera_pivot', [0, 0, 0]), device=device) + cam_radius = G.rendering_kwargs.get('avg_camera_radius', 2.7) + cam2world_pose = LookAtPoseSampler.sample(np.pi/2 + angle_y, np.pi/2 + angle_p, cam_pivot, radius=cam_radius, device=device) + conditioning_cam2world_pose = LookAtPoseSampler.sample(np.pi/2, np.pi/2, cam_pivot, radius=cam_radius, device=device) + camera_params = torch.cat([cam2world_pose.reshape(-1, 16), intrinsics.reshape(-1, 9)], 1) + conditioning_params = torch.cat([conditioning_cam2world_pose.reshape(-1, 16), intrinsics.reshape(-1, 9)], 1) + + ws = G.mapping(z, conditioning_params, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff) + img = G.synthesis(ws, camera_params)['image'] + + img = (img.permute(0, 2, 3, 1) * 127.5 + 128).clamp(0, 255).to(torch.uint8) + imgs.append(img) + + img = torch.cat(imgs, dim=2) + + PIL.Image.fromarray(img[0].cpu().numpy(), 'RGB').save(f'{outdir}/seed{seed:04d}.png') + + if shapes: + # extract a shape.mrc with marching cubes. You can view the .mrc file using ChimeraX from UCSF. + max_batch=1000000 + + samples, voxel_origin, voxel_size = create_samples(N=shape_res, voxel_origin=[0, 0, 0], cube_length=G.rendering_kwargs['box_warp'] * 1)#.reshape(1, -1, 3) + samples = samples.to(z.device) + sigmas = torch.zeros((samples.shape[0], samples.shape[1], 1), device=z.device) + transformed_ray_directions_expanded = torch.zeros((samples.shape[0], max_batch, 3), device=z.device) + transformed_ray_directions_expanded[..., -1] = -1 + + head = 0 + with tqdm(total = samples.shape[1]) as pbar: + with torch.no_grad(): + while head < samples.shape[1]: + torch.manual_seed(0) + sigma = G.sample(samples[:, head:head+max_batch], transformed_ray_directions_expanded[:, :samples.shape[1]-head], z, conditioning_params, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, noise_mode='const')['sigma'] + sigmas[:, head:head+max_batch] = sigma + head += max_batch + pbar.update(max_batch) + + sigmas = sigmas.reshape((shape_res, shape_res, shape_res)).cpu().numpy() + sigmas = np.flip(sigmas, 0) + + # Trim the border of the extracted cube + pad = int(30 * shape_res / 256) + pad_value = -1000 + sigmas[:pad] = pad_value + sigmas[-pad:] = pad_value + sigmas[:, :pad] = pad_value + sigmas[:, -pad:] = pad_value + sigmas[:, :, :pad] = pad_value + sigmas[:, :, -pad:] = pad_value + + if shape_format == '.ply': + from shape_utils import convert_sdf_samples_to_ply + convert_sdf_samples_to_ply(np.transpose(sigmas, (2, 1, 0)), [0, 0, 0], 1, os.path.join(outdir, f'seed{seed:04d}.ply'), level=10) + elif shape_format == '.mrc': # output mrc + with mrcfile.new_mmap(os.path.join(outdir, f'seed{seed:04d}.mrc'), overwrite=True, shape=sigmas.shape, mrc_mode=2) as mrc: + mrc.data[:] = sigmas + + +#---------------------------------------------------------------------------- + +if __name__ == "__main__": + generate_images() # pylint: disable=no-value-for-parameter + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/gen_videos.py b/ThirdParty/eg3d/gen_videos.py new file mode 100644 index 0000000000000000000000000000000000000000..de03d44c66f89999590979932792f6770c51fe69 --- /dev/null +++ b/ThirdParty/eg3d/gen_videos.py @@ -0,0 +1,331 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Generate lerp videos using pretrained network pickle.""" + +import os +import re +from typing import List, Optional, Tuple, Union + +import click +import dnnlib +import imageio +import numpy as np +import scipy.interpolate +import torch +from tqdm import tqdm +import mrcfile + +import legacy + +from camera_utils import LookAtPoseSampler +from torch_utils import misc +#---------------------------------------------------------------------------- + +def layout_grid(img, grid_w=None, grid_h=1, float_to_uint8=True, chw_to_hwc=True, to_numpy=True): + batch_size, channels, img_h, img_w = img.shape + if grid_w is None: + grid_w = batch_size // grid_h + assert batch_size == grid_w * grid_h + if float_to_uint8: + img = (img * 127.5 + 128).clamp(0, 255).to(torch.uint8) + img = img.reshape(grid_h, grid_w, channels, img_h, img_w) + img = img.permute(2, 0, 3, 1, 4) + img = img.reshape(channels, grid_h * img_h, grid_w * img_w) + if chw_to_hwc: + img = img.permute(1, 2, 0) + if to_numpy: + img = img.cpu().numpy() + return img + +def create_samples(N=256, voxel_origin=[0, 0, 0], cube_length=2.0): + # NOTE: the voxel_origin is actually the (bottom, left, down) corner, not the middle + voxel_origin = np.array(voxel_origin) - cube_length/2 + voxel_size = cube_length / (N - 1) + + overall_index = torch.arange(0, N ** 3, 1, out=torch.LongTensor()) + samples = torch.zeros(N ** 3, 3) + + # transform first 3 columns + # to be the x, y, z index + samples[:, 2] = overall_index % N + samples[:, 1] = (overall_index.float() / N) % N + samples[:, 0] = ((overall_index.float() / N) / N) % N + + # transform first 3 columns + # to be the x, y, z coordinate + samples[:, 0] = (samples[:, 0] * voxel_size) + voxel_origin[2] + samples[:, 1] = (samples[:, 1] * voxel_size) + voxel_origin[1] + samples[:, 2] = (samples[:, 2] * voxel_size) + voxel_origin[0] + + num_samples = N ** 3 + + return samples.unsqueeze(0), voxel_origin, voxel_size + +#---------------------------------------------------------------------------- + +def gen_interp_video(G, mp4: str, seeds, shuffle_seed=None, w_frames=60*4, kind='cubic', grid_dims=(1,1), num_keyframes=None, wraps=2, psi=1, truncation_cutoff=14, cfg='FFHQ', image_mode='image', gen_shapes=False, device=torch.device('cuda'), **video_kwargs): + grid_w = grid_dims[0] + grid_h = grid_dims[1] + + if num_keyframes is None: + if len(seeds) % (grid_w*grid_h) != 0: + raise ValueError('Number of input seeds must be divisible by grid W*H') + num_keyframes = len(seeds) // (grid_w*grid_h) + + all_seeds = np.zeros(num_keyframes*grid_h*grid_w, dtype=np.int64) + for idx in range(num_keyframes*grid_h*grid_w): + all_seeds[idx] = seeds[idx % len(seeds)] + + if shuffle_seed is not None: + rng = np.random.RandomState(seed=shuffle_seed) + rng.shuffle(all_seeds) + + camera_lookat_point = torch.tensor(G.rendering_kwargs['avg_camera_pivot'], device=device) + zs = torch.from_numpy(np.stack([np.random.RandomState(seed).randn(G.z_dim) for seed in all_seeds])).to(device) + cam2world_pose = LookAtPoseSampler.sample(3.14/2, 3.14/2, camera_lookat_point, radius=G.rendering_kwargs['avg_camera_radius'], device=device) + focal_length = 4.2647 if cfg != 'Shapenet' else 1.7074 # shapenet has higher FOV + intrinsics = torch.tensor([[focal_length, 0, 0.5], [0, focal_length, 0.5], [0, 0, 1]], device=device) + c = torch.cat([cam2world_pose.reshape(-1, 16), intrinsics.reshape(-1, 9)], 1) + c = c.repeat(len(zs), 1) + ws = G.mapping(z=zs, c=c, truncation_psi=psi, truncation_cutoff=truncation_cutoff) + _ = G.synthesis(ws[:1], c[:1]) # warm up + ws = ws.reshape(grid_h, grid_w, num_keyframes, *ws.shape[1:]) + + # Interpolation. + grid = [] + for yi in range(grid_h): + row = [] + for xi in range(grid_w): + x = np.arange(-num_keyframes * wraps, num_keyframes * (wraps + 1)) + y = np.tile(ws[yi][xi].cpu().numpy(), [wraps * 2 + 1, 1, 1]) + interp = scipy.interpolate.interp1d(x, y, kind=kind, axis=0) + row.append(interp) + grid.append(row) + + # Render video. + max_batch = 10000000 + voxel_resolution = 512 + video_out = imageio.get_writer(mp4, mode='I', fps=60, codec='libx264', **video_kwargs) + + if gen_shapes: + outdir = 'interpolation_{}_{}/'.format(all_seeds[0], all_seeds[1]) + os.makedirs(outdir, exist_ok=True) + all_poses = [] + for frame_idx in tqdm(range(num_keyframes * w_frames)): + imgs = [] + for yi in range(grid_h): + for xi in range(grid_w): + pitch_range = 0.25 + yaw_range = 0.35 + cam2world_pose = LookAtPoseSampler.sample(3.14/2 + yaw_range * np.sin(2 * 3.14 * frame_idx / (num_keyframes * w_frames)), + 3.14/2 -0.05 + pitch_range * np.cos(2 * 3.14 * frame_idx / (num_keyframes * w_frames)), + camera_lookat_point, radius=G.rendering_kwargs['avg_camera_radius'], device=device) + all_poses.append(cam2world_pose.squeeze().cpu().numpy()) + focal_length = 4.2647 if cfg != 'Shapenet' else 1.7074 # shapenet has higher FOV + intrinsics = torch.tensor([[focal_length, 0, 0.5], [0, focal_length, 0.5], [0, 0, 1]], device=device) + c = torch.cat([cam2world_pose.reshape(-1, 16), intrinsics.reshape(-1, 9)], 1) + + interp = grid[yi][xi] + w = torch.from_numpy(interp(frame_idx / w_frames)).to(device) + + entangle = 'camera' + if entangle == 'conditioning': + c_forward = torch.cat([LookAtPoseSampler.sample(3.14/2, + 3.14/2, + camera_lookat_point, + radius=G.rendering_kwargs['avg_camera_radius'], device=device).reshape(-1, 16), intrinsics.reshape(-1, 9)], 1) + w_c = G.mapping(z=zs[0:1], c=c[0:1], truncation_psi=psi, truncation_cutoff=truncation_cutoff) + img = G.synthesis(ws=w_c, c=c_forward, noise_mode='const')[image_mode][0] + elif entangle == 'camera': + img = G.synthesis(ws=w.unsqueeze(0), c=c[0:1], noise_mode='const')[image_mode][0] + elif entangle == 'both': + w_c = G.mapping(z=zs[0:1], c=c[0:1], truncation_psi=psi, truncation_cutoff=truncation_cutoff) + img = G.synthesis(ws=w_c, c=c[0:1], noise_mode='const')[image_mode][0] + + if image_mode == 'image_depth': + img = -img + img = (img - img.min()) / (img.max() - img.min()) * 2 - 1 + + imgs.append(img) + + if gen_shapes: + # generate shapes + print('Generating shape for frame %d / %d ...' % (frame_idx, num_keyframes * w_frames)) + + samples, voxel_origin, voxel_size = create_samples(N=voxel_resolution, voxel_origin=[0, 0, 0], cube_length=G.rendering_kwargs['box_warp']) + samples = samples.to(device) + sigmas = torch.zeros((samples.shape[0], samples.shape[1], 1), device=device) + transformed_ray_directions_expanded = torch.zeros((samples.shape[0], max_batch, 3), device=device) + transformed_ray_directions_expanded[..., -1] = -1 + + head = 0 + with tqdm(total = samples.shape[1]) as pbar: + with torch.no_grad(): + while head < samples.shape[1]: + torch.manual_seed(0) + sigma = G.sample_mixed(samples[:, head:head+max_batch], transformed_ray_directions_expanded[:, :samples.shape[1]-head], w.unsqueeze(0), truncation_psi=psi, noise_mode='const')['sigma'] + sigmas[:, head:head+max_batch] = sigma + head += max_batch + pbar.update(max_batch) + + sigmas = sigmas.reshape((voxel_resolution, voxel_resolution, voxel_resolution)).cpu().numpy() + sigmas = np.flip(sigmas, 0) + + pad = int(30 * voxel_resolution / 256) + pad_top = int(38 * voxel_resolution / 256) + sigmas[:pad] = 0 + sigmas[-pad:] = 0 + sigmas[:, :pad] = 0 + sigmas[:, -pad_top:] = 0 + sigmas[:, :, :pad] = 0 + sigmas[:, :, -pad:] = 0 + + output_ply = True + if output_ply: + from shape_utils import convert_sdf_samples_to_ply + convert_sdf_samples_to_ply(np.transpose(sigmas, (2, 1, 0)), [0, 0, 0], 1, os.path.join(outdir, f'{frame_idx:04d}_shape.ply'), level=10) + else: # output mrc + with mrcfile.new_mmap(outdir + f'{frame_idx:04d}_shape.mrc', overwrite=True, shape=sigmas.shape, mrc_mode=2) as mrc: + mrc.data[:] = sigmas + + video_out.append_data(layout_grid(torch.stack(imgs), grid_w=grid_w, grid_h=grid_h)) + video_out.close() + all_poses = np.stack(all_poses) + + if gen_shapes: + print(all_poses.shape) + with open(mp4.replace('.mp4', '_trajectory.npy'), 'wb') as f: + np.save(f, all_poses) + +#---------------------------------------------------------------------------- + +def parse_range(s: Union[str, List[int]]) -> List[int]: + '''Parse a comma separated list of numbers or ranges and return a list of ints. + + Example: '1,2,5-10' returns [1, 2, 5, 6, 7] + ''' + if isinstance(s, list): return s + ranges = [] + range_re = re.compile(r'^(\d+)-(\d+)$') + for p in s.split(','): + if m := range_re.match(p): + ranges.extend(range(int(m.group(1)), int(m.group(2))+1)) + else: + ranges.append(int(p)) + return ranges + +#---------------------------------------------------------------------------- + +def parse_tuple(s: Union[str, Tuple[int,int]]) -> Tuple[int, int]: + '''Parse a 'M,N' or 'MxN' integer tuple. + + Example: + '4x2' returns (4,2) + '0,1' returns (0,1) + ''' + if isinstance(s, tuple): return s + if m := re.match(r'^(\d+)[x,](\d+)$', s): + return (int(m.group(1)), int(m.group(2))) + raise ValueError(f'cannot parse tuple {s}') + +#---------------------------------------------------------------------------- + +@click.command() +@click.option('--network', 'network_pkl', help='Network pickle filename', required=True) +@click.option('--seeds', type=parse_range, help='List of random seeds', required=True) +@click.option('--shuffle-seed', type=int, help='Random seed to use for shuffling seed order', default=None) +@click.option('--grid', type=parse_tuple, help='Grid width/height, e.g. \'4x3\' (default: 1x1)', default=(1,1)) +@click.option('--num-keyframes', type=int, help='Number of seeds to interpolate through. If not specified, determine based on the length of the seeds array given by --seeds.', default=None) +@click.option('--w-frames', type=int, help='Number of frames to interpolate between latents', default=120) +@click.option('--trunc', 'truncation_psi', type=float, help='Truncation psi', default=1, show_default=True) +@click.option('--trunc-cutoff', 'truncation_cutoff', type=int, help='Truncation cutoff', default=14, show_default=True) +@click.option('--outdir', help='Output directory', type=str, required=True, metavar='DIR') +@click.option('--reload_modules', help='Overload persistent modules?', type=bool, required=False, metavar='BOOL', default=False, show_default=True) +@click.option('--cfg', help='Config', type=click.Choice(['FFHQ', 'AFHQ', 'Shapenet']), required=False, metavar='STR', default='FFHQ', show_default=True) +@click.option('--image_mode', help='Image mode', type=click.Choice(['image', 'image_depth', 'image_raw']), required=False, metavar='STR', default='image', show_default=True) +@click.option('--sample_mult', 'sampling_multiplier', type=float, help='Multiplier for depth sampling in volume rendering', default=2, show_default=True) +@click.option('--nrr', type=int, help='Neural rendering resolution override', default=None, show_default=True) +@click.option('--shapes', type=bool, help='Gen shapes for shape interpolation', default=False, show_default=True) +@click.option('--interpolate', type=bool, help='Interpolate between seeds', default=True, show_default=True) + +def generate_images( + network_pkl: str, + seeds: List[int], + shuffle_seed: Optional[int], + truncation_psi: float, + truncation_cutoff: int, + grid: Tuple[int,int], + num_keyframes: Optional[int], + w_frames: int, + outdir: str, + reload_modules: bool, + cfg: str, + image_mode: str, + sampling_multiplier: float, + nrr: Optional[int], + shapes: bool, + interpolate: bool, +): + """Render a latent vector interpolation video. + + Examples: + + \b + # Render a 4x2 grid of interpolations for seeds 0 through 31. + python gen_video.py --output=lerp.mp4 --trunc=1 --seeds=0-31 --grid=4x2 \\ + --network=https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/stylegan3-r-afhqv2-512x512.pkl + + Animation length and seed keyframes: + + The animation length is either determined based on the --seeds value or explicitly + specified using the --num-keyframes option. + + When num keyframes is specified with --num-keyframes, the output video length + will be 'num_keyframes*w_frames' frames. + + If --num-keyframes is not specified, the number of seeds given with + --seeds must be divisible by grid size W*H (--grid). In this case the + output video length will be '# seeds/(w*h)*w_frames' frames. + """ + + if not os.path.exists(outdir): + os.makedirs(outdir, exist_ok=True) + + print('Loading networks from "%s"...' % network_pkl) + device = torch.device('cuda') + with dnnlib.util.open_url(network_pkl) as f: + G = legacy.load_network_pkl(f)['G_ema'].to(device) # type: ignore + + + G.rendering_kwargs['depth_resolution'] = int(G.rendering_kwargs['depth_resolution'] * sampling_multiplier) + G.rendering_kwargs['depth_resolution_importance'] = int(G.rendering_kwargs['depth_resolution_importance'] * sampling_multiplier) + if nrr is not None: G.neural_rendering_resolution = nrr + + if truncation_cutoff == 0: + truncation_psi = 1.0 # truncation cutoff of 0 means no truncation anyways + if truncation_psi == 1.0: + truncation_cutoff = 14 # no truncation so doesn't matter where we cutoff + + if interpolate: + output = os.path.join(outdir, 'interpolation.mp4') + gen_interp_video(G=G, mp4=output, bitrate='10M', grid_dims=grid, num_keyframes=num_keyframes, w_frames=w_frames, seeds=seeds, shuffle_seed=shuffle_seed, psi=truncation_psi, truncation_cutoff=truncation_cutoff, cfg=cfg, image_mode=image_mode, gen_shapes=shapes) + else: + for seed in seeds: + output = os.path.join(outdir, f'{seed}.mp4') + seeds_ = [seed] + gen_interp_video(G=G, mp4=output, bitrate='10M', grid_dims=grid, num_keyframes=num_keyframes, w_frames=w_frames, seeds=seeds_, shuffle_seed=shuffle_seed, psi=truncation_psi, truncation_cutoff=truncation_cutoff, cfg=cfg, image_mode=image_mode) + +#---------------------------------------------------------------------------- + +if __name__ == "__main__": + generate_images() # pylint: disable=no-value-for-parameter + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/gui_utils/__init__.py b/ThirdParty/eg3d/gui_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240 --- /dev/null +++ b/ThirdParty/eg3d/gui_utils/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +# empty diff --git a/ThirdParty/eg3d/gui_utils/gl_utils.py b/ThirdParty/eg3d/gui_utils/gl_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1312f027c23bbb80eb489bba7a0f9014d95ac5b0 --- /dev/null +++ b/ThirdParty/eg3d/gui_utils/gl_utils.py @@ -0,0 +1,376 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import os +import functools +import contextlib +import numpy as np +import OpenGL.GL as gl +import OpenGL.GL.ARB.texture_float +import dnnlib + +#---------------------------------------------------------------------------- + +def init_egl(): + assert os.environ['PYOPENGL_PLATFORM'] == 'egl' # Must be set before importing OpenGL. + import OpenGL.EGL as egl + import ctypes + + # Initialize EGL. + display = egl.eglGetDisplay(egl.EGL_DEFAULT_DISPLAY) + assert display != egl.EGL_NO_DISPLAY + major = ctypes.c_int32() + minor = ctypes.c_int32() + ok = egl.eglInitialize(display, major, minor) + assert ok + assert major.value * 10 + minor.value >= 14 + + # Choose config. + config_attribs = [ + egl.EGL_RENDERABLE_TYPE, egl.EGL_OPENGL_BIT, + egl.EGL_SURFACE_TYPE, egl.EGL_PBUFFER_BIT, + egl.EGL_NONE + ] + configs = (ctypes.c_int32 * 1)() + num_configs = ctypes.c_int32() + ok = egl.eglChooseConfig(display, config_attribs, configs, 1, num_configs) + assert ok + assert num_configs.value == 1 + config = configs[0] + + # Create dummy pbuffer surface. + surface_attribs = [ + egl.EGL_WIDTH, 1, + egl.EGL_HEIGHT, 1, + egl.EGL_NONE + ] + surface = egl.eglCreatePbufferSurface(display, config, surface_attribs) + assert surface != egl.EGL_NO_SURFACE + + # Setup GL context. + ok = egl.eglBindAPI(egl.EGL_OPENGL_API) + assert ok + context = egl.eglCreateContext(display, config, egl.EGL_NO_CONTEXT, None) + assert context != egl.EGL_NO_CONTEXT + ok = egl.eglMakeCurrent(display, surface, surface, context) + assert ok + +#---------------------------------------------------------------------------- + +_texture_formats = { + ('uint8', 1): dnnlib.EasyDict(type=gl.GL_UNSIGNED_BYTE, format=gl.GL_LUMINANCE, internalformat=gl.GL_LUMINANCE8), + ('uint8', 2): dnnlib.EasyDict(type=gl.GL_UNSIGNED_BYTE, format=gl.GL_LUMINANCE_ALPHA, internalformat=gl.GL_LUMINANCE8_ALPHA8), + ('uint8', 3): dnnlib.EasyDict(type=gl.GL_UNSIGNED_BYTE, format=gl.GL_RGB, internalformat=gl.GL_RGB8), + ('uint8', 4): dnnlib.EasyDict(type=gl.GL_UNSIGNED_BYTE, format=gl.GL_RGBA, internalformat=gl.GL_RGBA8), + ('float32', 1): dnnlib.EasyDict(type=gl.GL_FLOAT, format=gl.GL_LUMINANCE, internalformat=OpenGL.GL.ARB.texture_float.GL_LUMINANCE32F_ARB), + ('float32', 2): dnnlib.EasyDict(type=gl.GL_FLOAT, format=gl.GL_LUMINANCE_ALPHA, internalformat=OpenGL.GL.ARB.texture_float.GL_LUMINANCE_ALPHA32F_ARB), + ('float32', 3): dnnlib.EasyDict(type=gl.GL_FLOAT, format=gl.GL_RGB, internalformat=gl.GL_RGB32F), + ('float32', 4): dnnlib.EasyDict(type=gl.GL_FLOAT, format=gl.GL_RGBA, internalformat=gl.GL_RGBA32F), +} + +def get_texture_format(dtype, channels): + return _texture_formats[(np.dtype(dtype).name, int(channels))] + +#---------------------------------------------------------------------------- + +def prepare_texture_data(image): + image = np.asarray(image) + if image.ndim == 2: + image = image[:, :, np.newaxis] + if image.dtype.name == 'float64': + image = image.astype('float32') + return image + +#---------------------------------------------------------------------------- + +def draw_pixels(image, *, pos=0, zoom=1, align=0, rint=True): + pos = np.broadcast_to(np.asarray(pos, dtype='float32'), [2]) + zoom = np.broadcast_to(np.asarray(zoom, dtype='float32'), [2]) + align = np.broadcast_to(np.asarray(align, dtype='float32'), [2]) + image = prepare_texture_data(image) + height, width, channels = image.shape + size = zoom * [width, height] + pos = pos - size * align + if rint: + pos = np.rint(pos) + fmt = get_texture_format(image.dtype, channels) + + gl.glPushAttrib(gl.GL_CURRENT_BIT | gl.GL_PIXEL_MODE_BIT) + gl.glPushClientAttrib(gl.GL_CLIENT_PIXEL_STORE_BIT) + gl.glRasterPos2f(pos[0], pos[1]) + gl.glPixelZoom(zoom[0], -zoom[1]) + gl.glPixelStorei(gl.GL_UNPACK_ALIGNMENT, 1) + gl.glDrawPixels(width, height, fmt.format, fmt.type, image) + gl.glPopClientAttrib() + gl.glPopAttrib() + +#---------------------------------------------------------------------------- + +def read_pixels(width, height, *, pos=0, dtype='uint8', channels=3): + pos = np.broadcast_to(np.asarray(pos, dtype='float32'), [2]) + dtype = np.dtype(dtype) + fmt = get_texture_format(dtype, channels) + image = np.empty([height, width, channels], dtype=dtype) + + gl.glPushClientAttrib(gl.GL_CLIENT_PIXEL_STORE_BIT) + gl.glPixelStorei(gl.GL_PACK_ALIGNMENT, 1) + gl.glReadPixels(int(np.round(pos[0])), int(np.round(pos[1])), width, height, fmt.format, fmt.type, image) + gl.glPopClientAttrib() + return np.flipud(image) + +#---------------------------------------------------------------------------- + +class Texture: + def __init__(self, *, image=None, width=None, height=None, channels=None, dtype=None, bilinear=True, mipmap=True): + self.gl_id = None + self.bilinear = bilinear + self.mipmap = mipmap + + # Determine size and dtype. + if image is not None: + image = prepare_texture_data(image) + self.height, self.width, self.channels = image.shape + self.dtype = image.dtype + else: + assert width is not None and height is not None + self.width = width + self.height = height + self.channels = channels if channels is not None else 3 + self.dtype = np.dtype(dtype) if dtype is not None else np.uint8 + + # Validate size and dtype. + assert isinstance(self.width, int) and self.width >= 0 + assert isinstance(self.height, int) and self.height >= 0 + assert isinstance(self.channels, int) and self.channels >= 1 + assert self.is_compatible(width=width, height=height, channels=channels, dtype=dtype) + + # Create texture object. + self.gl_id = gl.glGenTextures(1) + with self.bind(): + gl.glTexParameterf(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP_TO_EDGE) + gl.glTexParameterf(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP_TO_EDGE) + gl.glTexParameterf(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, gl.GL_LINEAR if self.bilinear else gl.GL_NEAREST) + gl.glTexParameterf(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR_MIPMAP_LINEAR if self.mipmap else gl.GL_NEAREST) + self.update(image) + + def delete(self): + if self.gl_id is not None: + gl.glDeleteTextures([self.gl_id]) + self.gl_id = None + + def __del__(self): + try: + self.delete() + except: + pass + + @contextlib.contextmanager + def bind(self): + prev_id = gl.glGetInteger(gl.GL_TEXTURE_BINDING_2D) + gl.glBindTexture(gl.GL_TEXTURE_2D, self.gl_id) + yield + gl.glBindTexture(gl.GL_TEXTURE_2D, prev_id) + + def update(self, image): + if image is not None: + image = prepare_texture_data(image) + assert self.is_compatible(image=image) + with self.bind(): + fmt = get_texture_format(self.dtype, self.channels) + gl.glPushClientAttrib(gl.GL_CLIENT_PIXEL_STORE_BIT) + gl.glPixelStorei(gl.GL_UNPACK_ALIGNMENT, 1) + gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, fmt.internalformat, self.width, self.height, 0, fmt.format, fmt.type, image) + if self.mipmap: + gl.glGenerateMipmap(gl.GL_TEXTURE_2D) + gl.glPopClientAttrib() + + def draw(self, *, pos=0, zoom=1, align=0, rint=False, color=1, alpha=1, rounding=0): + zoom = np.broadcast_to(np.asarray(zoom, dtype='float32'), [2]) + size = zoom * [self.width, self.height] + with self.bind(): + gl.glPushAttrib(gl.GL_ENABLE_BIT) + gl.glEnable(gl.GL_TEXTURE_2D) + draw_rect(pos=pos, size=size, align=align, rint=rint, color=color, alpha=alpha, rounding=rounding) + gl.glPopAttrib() + + def is_compatible(self, *, image=None, width=None, height=None, channels=None, dtype=None): # pylint: disable=too-many-return-statements + if image is not None: + if image.ndim != 3: + return False + ih, iw, ic = image.shape + if not self.is_compatible(width=iw, height=ih, channels=ic, dtype=image.dtype): + return False + if width is not None and self.width != width: + return False + if height is not None and self.height != height: + return False + if channels is not None and self.channels != channels: + return False + if dtype is not None and self.dtype != dtype: + return False + return True + +#---------------------------------------------------------------------------- + +class Framebuffer: + def __init__(self, *, texture=None, width=None, height=None, channels=None, dtype=None, msaa=0): + self.texture = texture + self.gl_id = None + self.gl_color = None + self.gl_depth_stencil = None + self.msaa = msaa + + # Determine size and dtype. + if texture is not None: + assert isinstance(self.texture, Texture) + self.width = texture.width + self.height = texture.height + self.channels = texture.channels + self.dtype = texture.dtype + else: + assert width is not None and height is not None + self.width = width + self.height = height + self.channels = channels if channels is not None else 4 + self.dtype = np.dtype(dtype) if dtype is not None else np.float32 + + # Validate size and dtype. + assert isinstance(self.width, int) and self.width >= 0 + assert isinstance(self.height, int) and self.height >= 0 + assert isinstance(self.channels, int) and self.channels >= 1 + assert width is None or width == self.width + assert height is None or height == self.height + assert channels is None or channels == self.channels + assert dtype is None or dtype == self.dtype + + # Create framebuffer object. + self.gl_id = gl.glGenFramebuffers(1) + with self.bind(): + + # Setup color buffer. + if self.texture is not None: + assert self.msaa == 0 + gl.glFramebufferTexture2D(gl.GL_FRAMEBUFFER, gl.GL_COLOR_ATTACHMENT0, gl.GL_TEXTURE_2D, self.texture.gl_id, 0) + else: + fmt = get_texture_format(self.dtype, self.channels) + self.gl_color = gl.glGenRenderbuffers(1) + gl.glBindRenderbuffer(gl.GL_RENDERBUFFER, self.gl_color) + gl.glRenderbufferStorageMultisample(gl.GL_RENDERBUFFER, self.msaa, fmt.internalformat, self.width, self.height) + gl.glFramebufferRenderbuffer(gl.GL_FRAMEBUFFER, gl.GL_COLOR_ATTACHMENT0, gl.GL_RENDERBUFFER, self.gl_color) + + # Setup depth/stencil buffer. + self.gl_depth_stencil = gl.glGenRenderbuffers(1) + gl.glBindRenderbuffer(gl.GL_RENDERBUFFER, self.gl_depth_stencil) + gl.glRenderbufferStorageMultisample(gl.GL_RENDERBUFFER, self.msaa, gl.GL_DEPTH24_STENCIL8, self.width, self.height) + gl.glFramebufferRenderbuffer(gl.GL_FRAMEBUFFER, gl.GL_DEPTH_STENCIL_ATTACHMENT, gl.GL_RENDERBUFFER, self.gl_depth_stencil) + + def delete(self): + if self.gl_id is not None: + gl.glDeleteFramebuffers([self.gl_id]) + self.gl_id = None + if self.gl_color is not None: + gl.glDeleteRenderbuffers(1, [self.gl_color]) + self.gl_color = None + if self.gl_depth_stencil is not None: + gl.glDeleteRenderbuffers(1, [self.gl_depth_stencil]) + self.gl_depth_stencil = None + + def __del__(self): + try: + self.delete() + except: + pass + + @contextlib.contextmanager + def bind(self): + prev_fbo = gl.glGetInteger(gl.GL_FRAMEBUFFER_BINDING) + prev_rbo = gl.glGetInteger(gl.GL_RENDERBUFFER_BINDING) + gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, self.gl_id) + if self.width is not None and self.height is not None: + gl.glViewport(0, 0, self.width, self.height) + yield + gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, prev_fbo) + gl.glBindRenderbuffer(gl.GL_RENDERBUFFER, prev_rbo) + + def blit(self, dst=None): + assert dst is None or isinstance(dst, Framebuffer) + with self.bind(): + gl.glBindFramebuffer(gl.GL_DRAW_FRAMEBUFFER, 0 if dst is None else dst.fbo) + gl.glBlitFramebuffer(0, 0, self.width, self.height, 0, 0, self.width, self.height, gl.GL_COLOR_BUFFER_BIT, gl.GL_NEAREST) + +#---------------------------------------------------------------------------- + +def draw_shape(vertices, *, mode=gl.GL_TRIANGLE_FAN, pos=0, size=1, color=1, alpha=1): + assert vertices.ndim == 2 and vertices.shape[1] == 2 + pos = np.broadcast_to(np.asarray(pos, dtype='float32'), [2]) + size = np.broadcast_to(np.asarray(size, dtype='float32'), [2]) + color = np.broadcast_to(np.asarray(color, dtype='float32'), [3]) + alpha = np.clip(np.broadcast_to(np.asarray(alpha, dtype='float32'), []), 0, 1) + + gl.glPushClientAttrib(gl.GL_CLIENT_VERTEX_ARRAY_BIT) + gl.glPushAttrib(gl.GL_CURRENT_BIT | gl.GL_TRANSFORM_BIT) + gl.glMatrixMode(gl.GL_MODELVIEW) + gl.glPushMatrix() + + gl.glEnableClientState(gl.GL_VERTEX_ARRAY) + gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) + gl.glVertexPointer(2, gl.GL_FLOAT, 0, vertices) + gl.glTexCoordPointer(2, gl.GL_FLOAT, 0, vertices) + gl.glTranslate(pos[0], pos[1], 0) + gl.glScale(size[0], size[1], 1) + gl.glColor4f(color[0] * alpha, color[1] * alpha, color[2] * alpha, alpha) + gl.glDrawArrays(mode, 0, vertices.shape[0]) + + gl.glPopMatrix() + gl.glPopAttrib() + gl.glPopClientAttrib() + +#---------------------------------------------------------------------------- + +def draw_rect(*, pos=0, pos2=None, size=None, align=0, rint=False, color=1, alpha=1, rounding=0): + assert pos2 is None or size is None + pos = np.broadcast_to(np.asarray(pos, dtype='float32'), [2]) + pos2 = np.broadcast_to(np.asarray(pos2, dtype='float32'), [2]) if pos2 is not None else None + size = np.broadcast_to(np.asarray(size, dtype='float32'), [2]) if size is not None else None + size = size if size is not None else pos2 - pos if pos2 is not None else np.array([1, 1], dtype='float32') + pos = pos - size * align + if rint: + pos = np.rint(pos) + rounding = np.broadcast_to(np.asarray(rounding, dtype='float32'), [2]) + rounding = np.minimum(np.abs(rounding) / np.maximum(np.abs(size), 1e-8), 0.5) + if np.min(rounding) == 0: + rounding *= 0 + vertices = _setup_rect(float(rounding[0]), float(rounding[1])) + draw_shape(vertices, mode=gl.GL_TRIANGLE_FAN, pos=pos, size=size, color=color, alpha=alpha) + +@functools.lru_cache(maxsize=10000) +def _setup_rect(rx, ry): + t = np.linspace(0, np.pi / 2, 1 if max(rx, ry) == 0 else 64) + s = 1 - np.sin(t); c = 1 - np.cos(t) + x = [c * rx, 1 - s * rx, 1 - c * rx, s * rx] + y = [s * ry, c * ry, 1 - s * ry, 1 - c * ry] + v = np.stack([x, y], axis=-1).reshape(-1, 2) + return v.astype('float32') + +#---------------------------------------------------------------------------- + +def draw_circle(*, center=0, radius=100, hole=0, color=1, alpha=1): + hole = np.broadcast_to(np.asarray(hole, dtype='float32'), []) + vertices = _setup_circle(float(hole)) + draw_shape(vertices, mode=gl.GL_TRIANGLE_STRIP, pos=center, size=radius, color=color, alpha=alpha) + +@functools.lru_cache(maxsize=10000) +def _setup_circle(hole): + t = np.linspace(0, np.pi * 2, 128) + s = np.sin(t); c = np.cos(t) + v = np.stack([c, s, c * hole, s * hole], axis=-1).reshape(-1, 2) + return v.astype('float32') + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/gui_utils/glfw_window.py b/ThirdParty/eg3d/gui_utils/glfw_window.py new file mode 100644 index 0000000000000000000000000000000000000000..aeb96e8707db91c620825541c9b3c846b7362407 --- /dev/null +++ b/ThirdParty/eg3d/gui_utils/glfw_window.py @@ -0,0 +1,231 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import time +import glfw +import OpenGL.GL as gl +from . import gl_utils + +#---------------------------------------------------------------------------- + +class GlfwWindow: # pylint: disable=too-many-public-methods + def __init__(self, *, title='GlfwWindow', window_width=1920, window_height=1080, deferred_show=True, close_on_esc=True): + self._glfw_window = None + self._drawing_frame = False + self._frame_start_time = None + self._frame_delta = 0 + self._fps_limit = None + self._vsync = None + self._skip_frames = 0 + self._deferred_show = deferred_show + self._close_on_esc = close_on_esc + self._esc_pressed = False + self._drag_and_drop_paths = None + self._capture_next_frame = False + self._captured_frame = None + + # Create window. + glfw.init() + glfw.window_hint(glfw.VISIBLE, False) + self._glfw_window = glfw.create_window(width=window_width, height=window_height, title=title, monitor=None, share=None) + self._attach_glfw_callbacks() + self.make_context_current() + + # Adjust window. + self.set_vsync(False) + self.set_window_size(window_width, window_height) + if not self._deferred_show: + glfw.show_window(self._glfw_window) + + def close(self): + if self._drawing_frame: + self.end_frame() + if self._glfw_window is not None: + glfw.destroy_window(self._glfw_window) + self._glfw_window = None + #glfw.terminate() # Commented out to play it nice with other glfw clients. + + def __del__(self): + try: + self.close() + except: + pass + + @property + def window_width(self): + return self.content_width + + @property + def window_height(self): + return self.content_height + self.title_bar_height + + @property + def content_width(self): + width, _height = glfw.get_window_size(self._glfw_window) + return width + + @property + def content_height(self): + _width, height = glfw.get_window_size(self._glfw_window) + return height + + @property + def title_bar_height(self): + _left, top, _right, _bottom = glfw.get_window_frame_size(self._glfw_window) + return top + + @property + def monitor_width(self): + _, _, width, _height = glfw.get_monitor_workarea(glfw.get_primary_monitor()) + return width + + @property + def monitor_height(self): + _, _, _width, height = glfw.get_monitor_workarea(glfw.get_primary_monitor()) + return height + + @property + def frame_delta(self): + return self._frame_delta + + def set_title(self, title): + glfw.set_window_title(self._glfw_window, title) + + def set_window_size(self, width, height): + width = min(width, self.monitor_width) + height = min(height, self.monitor_height) + glfw.set_window_size(self._glfw_window, width, max(height - self.title_bar_height, 0)) + if width == self.monitor_width and height == self.monitor_height: + self.maximize() + + def set_content_size(self, width, height): + self.set_window_size(width, height + self.title_bar_height) + + def maximize(self): + glfw.maximize_window(self._glfw_window) + + def set_position(self, x, y): + glfw.set_window_pos(self._glfw_window, x, y + self.title_bar_height) + + def center(self): + self.set_position((self.monitor_width - self.window_width) // 2, (self.monitor_height - self.window_height) // 2) + + def set_vsync(self, vsync): + vsync = bool(vsync) + if vsync != self._vsync: + glfw.swap_interval(1 if vsync else 0) + self._vsync = vsync + + def set_fps_limit(self, fps_limit): + self._fps_limit = int(fps_limit) + + def should_close(self): + return glfw.window_should_close(self._glfw_window) or (self._close_on_esc and self._esc_pressed) + + def skip_frame(self): + self.skip_frames(1) + + def skip_frames(self, num): # Do not update window for the next N frames. + self._skip_frames = max(self._skip_frames, int(num)) + + def is_skipping_frames(self): + return self._skip_frames > 0 + + def capture_next_frame(self): + self._capture_next_frame = True + + def pop_captured_frame(self): + frame = self._captured_frame + self._captured_frame = None + return frame + + def pop_drag_and_drop_paths(self): + paths = self._drag_and_drop_paths + self._drag_and_drop_paths = None + return paths + + def draw_frame(self): # To be overridden by subclass. + self.begin_frame() + # Rendering code goes here. + self.end_frame() + + def make_context_current(self): + if self._glfw_window is not None: + glfw.make_context_current(self._glfw_window) + + def begin_frame(self): + # End previous frame. + if self._drawing_frame: + self.end_frame() + + # Apply FPS limit. + if self._frame_start_time is not None and self._fps_limit is not None: + delay = self._frame_start_time - time.perf_counter() + 1 / self._fps_limit + if delay > 0: + time.sleep(delay) + cur_time = time.perf_counter() + if self._frame_start_time is not None: + self._frame_delta = cur_time - self._frame_start_time + self._frame_start_time = cur_time + + # Process events. + glfw.poll_events() + + # Begin frame. + self._drawing_frame = True + self.make_context_current() + + # Initialize GL state. + gl.glViewport(0, 0, self.content_width, self.content_height) + gl.glMatrixMode(gl.GL_PROJECTION) + gl.glLoadIdentity() + gl.glTranslate(-1, 1, 0) + gl.glScale(2 / max(self.content_width, 1), -2 / max(self.content_height, 1), 1) + gl.glMatrixMode(gl.GL_MODELVIEW) + gl.glLoadIdentity() + gl.glEnable(gl.GL_BLEND) + gl.glBlendFunc(gl.GL_ONE, gl.GL_ONE_MINUS_SRC_ALPHA) # Pre-multiplied alpha. + + # Clear. + gl.glClearColor(0, 0, 0, 1) + gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) + + def end_frame(self): + assert self._drawing_frame + self._drawing_frame = False + + # Skip frames if requested. + if self._skip_frames > 0: + self._skip_frames -= 1 + return + + # Capture frame if requested. + if self._capture_next_frame: + self._captured_frame = gl_utils.read_pixels(self.content_width, self.content_height) + self._capture_next_frame = False + + # Update window. + if self._deferred_show: + glfw.show_window(self._glfw_window) + self._deferred_show = False + glfw.swap_buffers(self._glfw_window) + + def _attach_glfw_callbacks(self): + glfw.set_key_callback(self._glfw_window, self._glfw_key_callback) + glfw.set_drop_callback(self._glfw_window, self._glfw_drop_callback) + + def _glfw_key_callback(self, _window, key, _scancode, action, _mods): + if action == glfw.PRESS and key == glfw.KEY_ESCAPE: + self._esc_pressed = True + + def _glfw_drop_callback(self, _window, paths): + self._drag_and_drop_paths = paths + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/gui_utils/imgui_utils.py b/ThirdParty/eg3d/gui_utils/imgui_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..05a8357caf20493956769984f32776441beefd27 --- /dev/null +++ b/ThirdParty/eg3d/gui_utils/imgui_utils.py @@ -0,0 +1,171 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import contextlib +import imgui + +#---------------------------------------------------------------------------- + +def set_default_style(color_scheme='dark', spacing=9, indent=23, scrollbar=27): + s = imgui.get_style() + s.window_padding = [spacing, spacing] + s.item_spacing = [spacing, spacing] + s.item_inner_spacing = [spacing, spacing] + s.columns_min_spacing = spacing + s.indent_spacing = indent + s.scrollbar_size = scrollbar + s.frame_padding = [4, 3] + s.window_border_size = 1 + s.child_border_size = 1 + s.popup_border_size = 1 + s.frame_border_size = 1 + s.window_rounding = 0 + s.child_rounding = 0 + s.popup_rounding = 3 + s.frame_rounding = 3 + s.scrollbar_rounding = 3 + s.grab_rounding = 3 + + getattr(imgui, f'style_colors_{color_scheme}')(s) + c0 = s.colors[imgui.COLOR_MENUBAR_BACKGROUND] + c1 = s.colors[imgui.COLOR_FRAME_BACKGROUND] + s.colors[imgui.COLOR_POPUP_BACKGROUND] = [x * 0.7 + y * 0.3 for x, y in zip(c0, c1)][:3] + [1] + +#---------------------------------------------------------------------------- + +@contextlib.contextmanager +def grayed_out(cond=True): + if cond: + s = imgui.get_style() + text = s.colors[imgui.COLOR_TEXT_DISABLED] + grab = s.colors[imgui.COLOR_SCROLLBAR_GRAB] + back = s.colors[imgui.COLOR_MENUBAR_BACKGROUND] + imgui.push_style_color(imgui.COLOR_TEXT, *text) + imgui.push_style_color(imgui.COLOR_CHECK_MARK, *grab) + imgui.push_style_color(imgui.COLOR_SLIDER_GRAB, *grab) + imgui.push_style_color(imgui.COLOR_SLIDER_GRAB_ACTIVE, *grab) + imgui.push_style_color(imgui.COLOR_FRAME_BACKGROUND, *back) + imgui.push_style_color(imgui.COLOR_FRAME_BACKGROUND_HOVERED, *back) + imgui.push_style_color(imgui.COLOR_FRAME_BACKGROUND_ACTIVE, *back) + imgui.push_style_color(imgui.COLOR_BUTTON, *back) + imgui.push_style_color(imgui.COLOR_BUTTON_HOVERED, *back) + imgui.push_style_color(imgui.COLOR_BUTTON_ACTIVE, *back) + imgui.push_style_color(imgui.COLOR_HEADER, *back) + imgui.push_style_color(imgui.COLOR_HEADER_HOVERED, *back) + imgui.push_style_color(imgui.COLOR_HEADER_ACTIVE, *back) + imgui.push_style_color(imgui.COLOR_POPUP_BACKGROUND, *back) + yield + imgui.pop_style_color(14) + else: + yield + +#---------------------------------------------------------------------------- + +@contextlib.contextmanager +def item_width(width=None): + if width is not None: + imgui.push_item_width(width) + yield + imgui.pop_item_width() + else: + yield + +#---------------------------------------------------------------------------- + +def scoped_by_object_id(method): + def decorator(self, *args, **kwargs): + imgui.push_id(str(id(self))) + res = method(self, *args, **kwargs) + imgui.pop_id() + return res + return decorator + +#---------------------------------------------------------------------------- + +def button(label, width=0, enabled=True): + with grayed_out(not enabled): + clicked = imgui.button(label, width=width) + clicked = clicked and enabled + return clicked + +#---------------------------------------------------------------------------- + +def collapsing_header(text, visible=None, flags=0, default=False, enabled=True, show=True): + expanded = False + if show: + if default: + flags |= imgui.TREE_NODE_DEFAULT_OPEN + if not enabled: + flags |= imgui.TREE_NODE_LEAF + with grayed_out(not enabled): + expanded, visible = imgui.collapsing_header(text, visible=visible, flags=flags) + expanded = expanded and enabled + return expanded, visible + +#---------------------------------------------------------------------------- + +def popup_button(label, width=0, enabled=True): + if button(label, width, enabled): + imgui.open_popup(label) + opened = imgui.begin_popup(label) + return opened + +#---------------------------------------------------------------------------- + +def input_text(label, value, buffer_length, flags, width=None, help_text=''): + old_value = value + color = list(imgui.get_style().colors[imgui.COLOR_TEXT]) + if value == '': + color[-1] *= 0.5 + with item_width(width): + imgui.push_style_color(imgui.COLOR_TEXT, *color) + value = value if value != '' else help_text + changed, value = imgui.input_text(label, value, buffer_length, flags) + value = value if value != help_text else '' + imgui.pop_style_color(1) + if not flags & imgui.INPUT_TEXT_ENTER_RETURNS_TRUE: + changed = (value != old_value) + return changed, value + +#---------------------------------------------------------------------------- + +def drag_previous_control(enabled=True): + dragging = False + dx = 0 + dy = 0 + if imgui.begin_drag_drop_source(imgui.DRAG_DROP_SOURCE_NO_PREVIEW_TOOLTIP): + if enabled: + dragging = True + dx, dy = imgui.get_mouse_drag_delta() + imgui.reset_mouse_drag_delta() + imgui.end_drag_drop_source() + return dragging, dx, dy + +#---------------------------------------------------------------------------- + +def drag_button(label, width=0, enabled=True): + clicked = button(label, width=width, enabled=enabled) + dragging, dx, dy = drag_previous_control(enabled=enabled) + return clicked, dragging, dx, dy + +#---------------------------------------------------------------------------- + +def drag_hidden_window(label, x, y, width, height, enabled=True): + imgui.push_style_color(imgui.COLOR_WINDOW_BACKGROUND, 0, 0, 0, 0) + imgui.push_style_color(imgui.COLOR_BORDER, 0, 0, 0, 0) + imgui.set_next_window_position(x, y) + imgui.set_next_window_size(width, height) + imgui.begin(label, closable=False, flags=(imgui.WINDOW_NO_TITLE_BAR | imgui.WINDOW_NO_RESIZE | imgui.WINDOW_NO_MOVE)) + dragging, dx, dy = drag_previous_control(enabled=enabled) + imgui.end() + imgui.pop_style_color(2) + return dragging, dx, dy + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/gui_utils/imgui_window.py b/ThirdParty/eg3d/gui_utils/imgui_window.py new file mode 100644 index 0000000000000000000000000000000000000000..0e1a6382b41c593c5ea4d9d2888c716282e575ec --- /dev/null +++ b/ThirdParty/eg3d/gui_utils/imgui_window.py @@ -0,0 +1,105 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import os +import imgui +import imgui.integrations.glfw + +from . import glfw_window +from . import imgui_utils +from . import text_utils + +#---------------------------------------------------------------------------- + +class ImguiWindow(glfw_window.GlfwWindow): + def __init__(self, *, title='ImguiWindow', font=None, font_sizes=range(14,24), **glfw_kwargs): + if font is None: + font = text_utils.get_default_font() + font_sizes = {int(size) for size in font_sizes} + super().__init__(title=title, **glfw_kwargs) + + # Init fields. + self._imgui_context = None + self._imgui_renderer = None + self._imgui_fonts = None + self._cur_font_size = max(font_sizes) + + # Delete leftover imgui.ini to avoid unexpected behavior. + if os.path.isfile('imgui.ini'): + os.remove('imgui.ini') + + # Init ImGui. + self._imgui_context = imgui.create_context() + self._imgui_renderer = _GlfwRenderer(self._glfw_window) + self._attach_glfw_callbacks() + imgui.get_io().ini_saving_rate = 0 # Disable creating imgui.ini at runtime. + imgui.get_io().mouse_drag_threshold = 0 # Improve behavior with imgui_utils.drag_custom(). + self._imgui_fonts = {size: imgui.get_io().fonts.add_font_from_file_ttf(font, size) for size in font_sizes} + self._imgui_renderer.refresh_font_texture() + + def close(self): + self.make_context_current() + self._imgui_fonts = None + if self._imgui_renderer is not None: + self._imgui_renderer.shutdown() + self._imgui_renderer = None + if self._imgui_context is not None: + #imgui.destroy_context(self._imgui_context) # Commented out to avoid creating imgui.ini at the end. + self._imgui_context = None + super().close() + + def _glfw_key_callback(self, *args): + super()._glfw_key_callback(*args) + self._imgui_renderer.keyboard_callback(*args) + + @property + def font_size(self): + return self._cur_font_size + + @property + def spacing(self): + return round(self._cur_font_size * 0.4) + + def set_font_size(self, target): # Applied on next frame. + self._cur_font_size = min((abs(key - target), key) for key in self._imgui_fonts.keys())[1] + + def begin_frame(self): + # Begin glfw frame. + super().begin_frame() + + # Process imgui events. + self._imgui_renderer.mouse_wheel_multiplier = self._cur_font_size / 10 + if self.content_width > 0 and self.content_height > 0: + self._imgui_renderer.process_inputs() + + # Begin imgui frame. + imgui.new_frame() + imgui.push_font(self._imgui_fonts[self._cur_font_size]) + imgui_utils.set_default_style(spacing=self.spacing, indent=self.font_size, scrollbar=self.font_size+4) + + def end_frame(self): + imgui.pop_font() + imgui.render() + imgui.end_frame() + self._imgui_renderer.render(imgui.get_draw_data()) + super().end_frame() + +#---------------------------------------------------------------------------- +# Wrapper class for GlfwRenderer to fix a mouse wheel bug on Linux. + +class _GlfwRenderer(imgui.integrations.glfw.GlfwRenderer): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.mouse_wheel_multiplier = 1 + + def scroll_callback(self, window, x_offset, y_offset): + self.io.mouse_wheel += y_offset * self.mouse_wheel_multiplier + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/gui_utils/text_utils.py b/ThirdParty/eg3d/gui_utils/text_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e64a34d1287d58960141fa06a8e76446cd9cebc8 --- /dev/null +++ b/ThirdParty/eg3d/gui_utils/text_utils.py @@ -0,0 +1,125 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import functools +from typing import Optional + +import dnnlib +import numpy as np +import PIL.Image +import PIL.ImageFont +import scipy.ndimage + +from . import gl_utils + +#---------------------------------------------------------------------------- + +def get_default_font(): + url = 'http://fonts.gstatic.com/s/opensans/v17/mem8YaGs126MiZpBA-U1UpcaXcl0Aw.ttf' # Open Sans regular + return dnnlib.util.open_url(url, return_filename=True) + +#---------------------------------------------------------------------------- + +@functools.lru_cache(maxsize=None) +def get_pil_font(font=None, size=32): + if font is None: + font = get_default_font() + return PIL.ImageFont.truetype(font=font, size=size) + +#---------------------------------------------------------------------------- + +def get_array(string, *, dropshadow_radius: int=None, **kwargs): + if dropshadow_radius is not None: + offset_x = int(np.ceil(dropshadow_radius*2/3)) + offset_y = int(np.ceil(dropshadow_radius*2/3)) + return _get_array_priv(string, dropshadow_radius=dropshadow_radius, offset_x=offset_x, offset_y=offset_y, **kwargs) + else: + return _get_array_priv(string, **kwargs) + +@functools.lru_cache(maxsize=10000) +def _get_array_priv( + string: str, *, + size: int = 32, + max_width: Optional[int]=None, + max_height: Optional[int]=None, + min_size=10, + shrink_coef=0.8, + dropshadow_radius: int=None, + offset_x: int=None, + offset_y: int=None, + **kwargs +): + cur_size = size + array = None + while True: + if dropshadow_radius is not None: + # separate implementation for dropshadow text rendering + array = _get_array_impl_dropshadow(string, size=cur_size, radius=dropshadow_radius, offset_x=offset_x, offset_y=offset_y, **kwargs) + else: + array = _get_array_impl(string, size=cur_size, **kwargs) + height, width, _ = array.shape + if (max_width is None or width <= max_width) and (max_height is None or height <= max_height) or (cur_size <= min_size): + break + cur_size = max(int(cur_size * shrink_coef), min_size) + return array + +#---------------------------------------------------------------------------- + +@functools.lru_cache(maxsize=10000) +def _get_array_impl(string, *, font=None, size=32, outline=0, outline_pad=3, outline_coef=3, outline_exp=2, line_pad: int=None): + pil_font = get_pil_font(font=font, size=size) + lines = [pil_font.getmask(line, 'L') for line in string.split('\n')] + lines = [np.array(line, dtype=np.uint8).reshape([line.size[1], line.size[0]]) for line in lines] + width = max(line.shape[1] for line in lines) + lines = [np.pad(line, ((0, 0), (0, width - line.shape[1])), mode='constant') for line in lines] + line_spacing = line_pad if line_pad is not None else size // 2 + lines = [np.pad(line, ((0, line_spacing), (0, 0)), mode='constant') for line in lines[:-1]] + lines[-1:] + mask = np.concatenate(lines, axis=0) + alpha = mask + if outline > 0: + mask = np.pad(mask, int(np.ceil(outline * outline_pad)), mode='constant', constant_values=0) + alpha = mask.astype(np.float32) / 255 + alpha = scipy.ndimage.gaussian_filter(alpha, outline) + alpha = 1 - np.maximum(1 - alpha * outline_coef, 0) ** outline_exp + alpha = (alpha * 255 + 0.5).clip(0, 255).astype(np.uint8) + alpha = np.maximum(alpha, mask) + return np.stack([mask, alpha], axis=-1) + +#---------------------------------------------------------------------------- + +@functools.lru_cache(maxsize=10000) +def _get_array_impl_dropshadow(string, *, font=None, size=32, radius: int, offset_x: int, offset_y: int, line_pad: int=None, **kwargs): + assert (offset_x > 0) and (offset_y > 0) + pil_font = get_pil_font(font=font, size=size) + lines = [pil_font.getmask(line, 'L') for line in string.split('\n')] + lines = [np.array(line, dtype=np.uint8).reshape([line.size[1], line.size[0]]) for line in lines] + width = max(line.shape[1] for line in lines) + lines = [np.pad(line, ((0, 0), (0, width - line.shape[1])), mode='constant') for line in lines] + line_spacing = line_pad if line_pad is not None else size // 2 + lines = [np.pad(line, ((0, line_spacing), (0, 0)), mode='constant') for line in lines[:-1]] + lines[-1:] + mask = np.concatenate(lines, axis=0) + alpha = mask + + mask = np.pad(mask, 2*radius + max(abs(offset_x), abs(offset_y)), mode='constant', constant_values=0) + alpha = mask.astype(np.float32) / 255 + alpha = scipy.ndimage.gaussian_filter(alpha, radius) + alpha = 1 - np.maximum(1 - alpha * 1.5, 0) ** 1.4 + alpha = (alpha * 255 + 0.5).clip(0, 255).astype(np.uint8) + alpha = np.pad(alpha, [(offset_y, 0), (offset_x, 0)], mode='constant')[:-offset_y, :-offset_x] + alpha = np.maximum(alpha, mask) + return np.stack([mask, alpha], axis=-1) + +#---------------------------------------------------------------------------- + +@functools.lru_cache(maxsize=10000) +def get_texture(string, bilinear=True, mipmap=True, **kwargs): + return gl_utils.Texture(image=get_array(string, **kwargs), bilinear=bilinear, mipmap=mipmap) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/legacy.py b/ThirdParty/eg3d/legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..f30944a15c8f7da114c3b1d94da8c31b1ed13ae8 --- /dev/null +++ b/ThirdParty/eg3d/legacy.py @@ -0,0 +1,325 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Converting legacy network pickle into the new format.""" + +import click +import pickle +import re +import copy +import numpy as np +import torch +import dnnlib +from torch_utils import misc + +#---------------------------------------------------------------------------- + +def load_network_pkl(f, force_fp16=False): + data = _LegacyUnpickler(f).load() + + # Legacy TensorFlow pickle => convert. + if isinstance(data, tuple) and len(data) == 3 and all(isinstance(net, _TFNetworkStub) for net in data): + tf_G, tf_D, tf_Gs = data + G = convert_tf_generator(tf_G) + D = convert_tf_discriminator(tf_D) + G_ema = convert_tf_generator(tf_Gs) + data = dict(G=G, D=D, G_ema=G_ema) + + # Add missing fields. + if 'training_set_kwargs' not in data: + data['training_set_kwargs'] = None + if 'augment_pipe' not in data: + data['augment_pipe'] = None + + # Validate contents. + assert isinstance(data['G'], torch.nn.Module) + assert isinstance(data['D'], torch.nn.Module) + assert isinstance(data['G_ema'], torch.nn.Module) + assert isinstance(data['training_set_kwargs'], (dict, type(None))) + assert isinstance(data['augment_pipe'], (torch.nn.Module, type(None))) + + # Force FP16. + if force_fp16: + for key in ['G', 'D', 'G_ema']: + old = data[key] + kwargs = copy.deepcopy(old.init_kwargs) + fp16_kwargs = kwargs.get('synthesis_kwargs', kwargs) + fp16_kwargs.num_fp16_res = 4 + fp16_kwargs.conv_clamp = 256 + if kwargs != old.init_kwargs: + new = type(old)(**kwargs).eval().requires_grad_(False) + misc.copy_params_and_buffers(old, new, require_all=True) + data[key] = new + return data + +#---------------------------------------------------------------------------- + +class _TFNetworkStub(dnnlib.EasyDict): + pass + +class _LegacyUnpickler(pickle.Unpickler): + def find_class(self, module, name): + if module == 'dnnlib.tflib.network' and name == 'Network': + return _TFNetworkStub + return super().find_class(module, name) + +#---------------------------------------------------------------------------- + +def _collect_tf_params(tf_net): + # pylint: disable=protected-access + tf_params = dict() + def recurse(prefix, tf_net): + for name, value in tf_net.variables: + tf_params[prefix + name] = value + for name, comp in tf_net.components.items(): + recurse(prefix + name + '/', comp) + recurse('', tf_net) + return tf_params + +#---------------------------------------------------------------------------- + +def _populate_module_params(module, *patterns): + for name, tensor in misc.named_params_and_buffers(module): + found = False + value = None + for pattern, value_fn in zip(patterns[0::2], patterns[1::2]): + match = re.fullmatch(pattern, name) + if match: + found = True + if value_fn is not None: + value = value_fn(*match.groups()) + break + try: + assert found + if value is not None: + tensor.copy_(torch.from_numpy(np.array(value))) + except: + print(name, list(tensor.shape)) + raise + +#---------------------------------------------------------------------------- + +def convert_tf_generator(tf_G): + if tf_G.version < 4: + raise ValueError('TensorFlow pickle version too low') + + # Collect kwargs. + tf_kwargs = tf_G.static_kwargs + known_kwargs = set() + def kwarg(tf_name, default=None, none=None): + known_kwargs.add(tf_name) + val = tf_kwargs.get(tf_name, default) + return val if val is not None else none + + # Convert kwargs. + from training import networks_stylegan2 + network_class = networks_stylegan2.Generator + kwargs = dnnlib.EasyDict( + z_dim = kwarg('latent_size', 512), + c_dim = kwarg('label_size', 0), + w_dim = kwarg('dlatent_size', 512), + img_resolution = kwarg('resolution', 1024), + img_channels = kwarg('num_channels', 3), + channel_base = kwarg('fmap_base', 16384) * 2, + channel_max = kwarg('fmap_max', 512), + num_fp16_res = kwarg('num_fp16_res', 0), + conv_clamp = kwarg('conv_clamp', None), + architecture = kwarg('architecture', 'skip'), + resample_filter = kwarg('resample_kernel', [1,3,3,1]), + use_noise = kwarg('use_noise', True), + activation = kwarg('nonlinearity', 'lrelu'), + mapping_kwargs = dnnlib.EasyDict( + num_layers = kwarg('mapping_layers', 8), + embed_features = kwarg('label_fmaps', None), + layer_features = kwarg('mapping_fmaps', None), + activation = kwarg('mapping_nonlinearity', 'lrelu'), + lr_multiplier = kwarg('mapping_lrmul', 0.01), + w_avg_beta = kwarg('w_avg_beta', 0.995, none=1), + ), + ) + + # Check for unknown kwargs. + kwarg('truncation_psi') + kwarg('truncation_cutoff') + kwarg('style_mixing_prob') + kwarg('structure') + kwarg('conditioning') + kwarg('fused_modconv') + unknown_kwargs = list(set(tf_kwargs.keys()) - known_kwargs) + if len(unknown_kwargs) > 0: + raise ValueError('Unknown TensorFlow kwarg', unknown_kwargs[0]) + + # Collect params. + tf_params = _collect_tf_params(tf_G) + for name, value in list(tf_params.items()): + match = re.fullmatch(r'ToRGB_lod(\d+)/(.*)', name) + if match: + r = kwargs.img_resolution // (2 ** int(match.group(1))) + tf_params[f'{r}x{r}/ToRGB/{match.group(2)}'] = value + kwargs.synthesis.kwargs.architecture = 'orig' + #for name, value in tf_params.items(): print(f'{name:<50s}{list(value.shape)}') + + # Convert params. + G = network_class(**kwargs).eval().requires_grad_(False) + # pylint: disable=unnecessary-lambda + # pylint: disable=f-string-without-interpolation + _populate_module_params(G, + r'mapping\.w_avg', lambda: tf_params[f'dlatent_avg'], + r'mapping\.embed\.weight', lambda: tf_params[f'mapping/LabelEmbed/weight'].transpose(), + r'mapping\.embed\.bias', lambda: tf_params[f'mapping/LabelEmbed/bias'], + r'mapping\.fc(\d+)\.weight', lambda i: tf_params[f'mapping/Dense{i}/weight'].transpose(), + r'mapping\.fc(\d+)\.bias', lambda i: tf_params[f'mapping/Dense{i}/bias'], + r'synthesis\.b4\.const', lambda: tf_params[f'synthesis/4x4/Const/const'][0], + r'synthesis\.b4\.conv1\.weight', lambda: tf_params[f'synthesis/4x4/Conv/weight'].transpose(3, 2, 0, 1), + r'synthesis\.b4\.conv1\.bias', lambda: tf_params[f'synthesis/4x4/Conv/bias'], + r'synthesis\.b4\.conv1\.noise_const', lambda: tf_params[f'synthesis/noise0'][0, 0], + r'synthesis\.b4\.conv1\.noise_strength', lambda: tf_params[f'synthesis/4x4/Conv/noise_strength'], + r'synthesis\.b4\.conv1\.affine\.weight', lambda: tf_params[f'synthesis/4x4/Conv/mod_weight'].transpose(), + r'synthesis\.b4\.conv1\.affine\.bias', lambda: tf_params[f'synthesis/4x4/Conv/mod_bias'] + 1, + r'synthesis\.b(\d+)\.conv0\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/weight'][::-1, ::-1].transpose(3, 2, 0, 1), + r'synthesis\.b(\d+)\.conv0\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/bias'], + r'synthesis\.b(\d+)\.conv0\.noise_const', lambda r: tf_params[f'synthesis/noise{int(np.log2(int(r)))*2-5}'][0, 0], + r'synthesis\.b(\d+)\.conv0\.noise_strength', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/noise_strength'], + r'synthesis\.b(\d+)\.conv0\.affine\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/mod_weight'].transpose(), + r'synthesis\.b(\d+)\.conv0\.affine\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv0_up/mod_bias'] + 1, + r'synthesis\.b(\d+)\.conv1\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/weight'].transpose(3, 2, 0, 1), + r'synthesis\.b(\d+)\.conv1\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/bias'], + r'synthesis\.b(\d+)\.conv1\.noise_const', lambda r: tf_params[f'synthesis/noise{int(np.log2(int(r)))*2-4}'][0, 0], + r'synthesis\.b(\d+)\.conv1\.noise_strength', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/noise_strength'], + r'synthesis\.b(\d+)\.conv1\.affine\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/mod_weight'].transpose(), + r'synthesis\.b(\d+)\.conv1\.affine\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/Conv1/mod_bias'] + 1, + r'synthesis\.b(\d+)\.torgb\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/weight'].transpose(3, 2, 0, 1), + r'synthesis\.b(\d+)\.torgb\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/bias'], + r'synthesis\.b(\d+)\.torgb\.affine\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/mod_weight'].transpose(), + r'synthesis\.b(\d+)\.torgb\.affine\.bias', lambda r: tf_params[f'synthesis/{r}x{r}/ToRGB/mod_bias'] + 1, + r'synthesis\.b(\d+)\.skip\.weight', lambda r: tf_params[f'synthesis/{r}x{r}/Skip/weight'][::-1, ::-1].transpose(3, 2, 0, 1), + r'.*\.resample_filter', None, + r'.*\.act_filter', None, + ) + return G + +#---------------------------------------------------------------------------- + +def convert_tf_discriminator(tf_D): + if tf_D.version < 4: + raise ValueError('TensorFlow pickle version too low') + + # Collect kwargs. + tf_kwargs = tf_D.static_kwargs + known_kwargs = set() + def kwarg(tf_name, default=None): + known_kwargs.add(tf_name) + return tf_kwargs.get(tf_name, default) + + # Convert kwargs. + kwargs = dnnlib.EasyDict( + c_dim = kwarg('label_size', 0), + img_resolution = kwarg('resolution', 1024), + img_channels = kwarg('num_channels', 3), + architecture = kwarg('architecture', 'resnet'), + channel_base = kwarg('fmap_base', 16384) * 2, + channel_max = kwarg('fmap_max', 512), + num_fp16_res = kwarg('num_fp16_res', 0), + conv_clamp = kwarg('conv_clamp', None), + cmap_dim = kwarg('mapping_fmaps', None), + block_kwargs = dnnlib.EasyDict( + activation = kwarg('nonlinearity', 'lrelu'), + resample_filter = kwarg('resample_kernel', [1,3,3,1]), + freeze_layers = kwarg('freeze_layers', 0), + ), + mapping_kwargs = dnnlib.EasyDict( + num_layers = kwarg('mapping_layers', 0), + embed_features = kwarg('mapping_fmaps', None), + layer_features = kwarg('mapping_fmaps', None), + activation = kwarg('nonlinearity', 'lrelu'), + lr_multiplier = kwarg('mapping_lrmul', 0.1), + ), + epilogue_kwargs = dnnlib.EasyDict( + mbstd_group_size = kwarg('mbstd_group_size', None), + mbstd_num_channels = kwarg('mbstd_num_features', 1), + activation = kwarg('nonlinearity', 'lrelu'), + ), + ) + + # Check for unknown kwargs. + kwarg('structure') + kwarg('conditioning') + unknown_kwargs = list(set(tf_kwargs.keys()) - known_kwargs) + if len(unknown_kwargs) > 0: + raise ValueError('Unknown TensorFlow kwarg', unknown_kwargs[0]) + + # Collect params. + tf_params = _collect_tf_params(tf_D) + for name, value in list(tf_params.items()): + match = re.fullmatch(r'FromRGB_lod(\d+)/(.*)', name) + if match: + r = kwargs.img_resolution // (2 ** int(match.group(1))) + tf_params[f'{r}x{r}/FromRGB/{match.group(2)}'] = value + kwargs.architecture = 'orig' + #for name, value in tf_params.items(): print(f'{name:<50s}{list(value.shape)}') + + # Convert params. + from training import networks_stylegan2 + D = networks_stylegan2.Discriminator(**kwargs).eval().requires_grad_(False) + # pylint: disable=unnecessary-lambda + # pylint: disable=f-string-without-interpolation + _populate_module_params(D, + r'b(\d+)\.fromrgb\.weight', lambda r: tf_params[f'{r}x{r}/FromRGB/weight'].transpose(3, 2, 0, 1), + r'b(\d+)\.fromrgb\.bias', lambda r: tf_params[f'{r}x{r}/FromRGB/bias'], + r'b(\d+)\.conv(\d+)\.weight', lambda r, i: tf_params[f'{r}x{r}/Conv{i}{["","_down"][int(i)]}/weight'].transpose(3, 2, 0, 1), + r'b(\d+)\.conv(\d+)\.bias', lambda r, i: tf_params[f'{r}x{r}/Conv{i}{["","_down"][int(i)]}/bias'], + r'b(\d+)\.skip\.weight', lambda r: tf_params[f'{r}x{r}/Skip/weight'].transpose(3, 2, 0, 1), + r'mapping\.embed\.weight', lambda: tf_params[f'LabelEmbed/weight'].transpose(), + r'mapping\.embed\.bias', lambda: tf_params[f'LabelEmbed/bias'], + r'mapping\.fc(\d+)\.weight', lambda i: tf_params[f'Mapping{i}/weight'].transpose(), + r'mapping\.fc(\d+)\.bias', lambda i: tf_params[f'Mapping{i}/bias'], + r'b4\.conv\.weight', lambda: tf_params[f'4x4/Conv/weight'].transpose(3, 2, 0, 1), + r'b4\.conv\.bias', lambda: tf_params[f'4x4/Conv/bias'], + r'b4\.fc\.weight', lambda: tf_params[f'4x4/Dense0/weight'].transpose(), + r'b4\.fc\.bias', lambda: tf_params[f'4x4/Dense0/bias'], + r'b4\.out\.weight', lambda: tf_params[f'Output/weight'].transpose(), + r'b4\.out\.bias', lambda: tf_params[f'Output/bias'], + r'.*\.resample_filter', None, + ) + return D + +#---------------------------------------------------------------------------- + +@click.command() +@click.option('--source', help='Input pickle', required=True, metavar='PATH') +@click.option('--dest', help='Output pickle', required=True, metavar='PATH') +@click.option('--force-fp16', help='Force the networks to use FP16', type=bool, default=False, metavar='BOOL', show_default=True) +def convert_network_pickle(source, dest, force_fp16): + """Convert legacy network pickle into the native PyTorch format. + + The tool is able to load the main network configurations exported using the TensorFlow version of StyleGAN2 or StyleGAN2-ADA. + It does not support e.g. StyleGAN2-ADA comparison methods, StyleGAN2 configs A-D, or StyleGAN1 networks. + + Example: + + \b + python legacy.py \\ + --source=https://nvlabs-fi-cdn.nvidia.com/stylegan2/networks/stylegan2-cat-config-f.pkl \\ + --dest=stylegan2-cat-config-f.pkl + """ + print(f'Loading "{source}"...') + with dnnlib.util.open_url(source) as f: + data = load_network_pkl(f, force_fp16=force_fp16) + print(f'Saving "{dest}"...') + with open(dest, 'wb') as f: + pickle.dump(data, f) + print('Done.') + +#---------------------------------------------------------------------------- + +if __name__ == "__main__": + convert_network_pickle() # pylint: disable=no-value-for-parameter + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/__init__.py b/ThirdParty/eg3d/metrics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240 --- /dev/null +++ b/ThirdParty/eg3d/metrics/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +# empty diff --git a/ThirdParty/eg3d/metrics/equivariance.py b/ThirdParty/eg3d/metrics/equivariance.py new file mode 100644 index 0000000000000000000000000000000000000000..4609296593dd60cf0a1afa28ae4abb17d5b23576 --- /dev/null +++ b/ThirdParty/eg3d/metrics/equivariance.py @@ -0,0 +1,269 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Equivariance metrics (EQ-T, EQ-T_frac, and EQ-R) from the paper +"Alias-Free Generative Adversarial Networks".""" + +import copy +import numpy as np +import torch +import torch.fft +from torch_utils.ops import upfirdn2d +from . import metric_utils + +#---------------------------------------------------------------------------- +# Utilities. + +def sinc(x): + y = (x * np.pi).abs() + z = torch.sin(y) / y.clamp(1e-30, float('inf')) + return torch.where(y < 1e-30, torch.ones_like(x), z) + +def lanczos_window(x, a): + x = x.abs() / a + return torch.where(x < 1, sinc(x), torch.zeros_like(x)) + +def rotation_matrix(angle): + angle = torch.as_tensor(angle).to(torch.float32) + mat = torch.eye(3, device=angle.device) + mat[0, 0] = angle.cos() + mat[0, 1] = angle.sin() + mat[1, 0] = -angle.sin() + mat[1, 1] = angle.cos() + return mat + +#---------------------------------------------------------------------------- +# Apply integer translation to a batch of 2D images. Corresponds to the +# operator T_x in Appendix E.1. + +def apply_integer_translation(x, tx, ty): + _N, _C, H, W = x.shape + tx = torch.as_tensor(tx * W).to(dtype=torch.float32, device=x.device) + ty = torch.as_tensor(ty * H).to(dtype=torch.float32, device=x.device) + ix = tx.round().to(torch.int64) + iy = ty.round().to(torch.int64) + + z = torch.zeros_like(x) + m = torch.zeros_like(x) + if abs(ix) < W and abs(iy) < H: + y = x[:, :, max(-iy,0) : H+min(-iy,0), max(-ix,0) : W+min(-ix,0)] + z[:, :, max(iy,0) : H+min(iy,0), max(ix,0) : W+min(ix,0)] = y + m[:, :, max(iy,0) : H+min(iy,0), max(ix,0) : W+min(ix,0)] = 1 + return z, m + +#---------------------------------------------------------------------------- +# Apply integer translation to a batch of 2D images. Corresponds to the +# operator T_x in Appendix E.2. + +def apply_fractional_translation(x, tx, ty, a=3): + _N, _C, H, W = x.shape + tx = torch.as_tensor(tx * W).to(dtype=torch.float32, device=x.device) + ty = torch.as_tensor(ty * H).to(dtype=torch.float32, device=x.device) + ix = tx.floor().to(torch.int64) + iy = ty.floor().to(torch.int64) + fx = tx - ix + fy = ty - iy + b = a - 1 + + z = torch.zeros_like(x) + zx0 = max(ix - b, 0) + zy0 = max(iy - b, 0) + zx1 = min(ix + a, 0) + W + zy1 = min(iy + a, 0) + H + if zx0 < zx1 and zy0 < zy1: + taps = torch.arange(a * 2, device=x.device) - b + filter_x = (sinc(taps - fx) * sinc((taps - fx) / a)).unsqueeze(0) + filter_y = (sinc(taps - fy) * sinc((taps - fy) / a)).unsqueeze(1) + y = x + y = upfirdn2d.filter2d(y, filter_x / filter_x.sum(), padding=[b,a,0,0]) + y = upfirdn2d.filter2d(y, filter_y / filter_y.sum(), padding=[0,0,b,a]) + y = y[:, :, max(b-iy,0) : H+b+a+min(-iy-a,0), max(b-ix,0) : W+b+a+min(-ix-a,0)] + z[:, :, zy0:zy1, zx0:zx1] = y + + m = torch.zeros_like(x) + mx0 = max(ix + a, 0) + my0 = max(iy + a, 0) + mx1 = min(ix - b, 0) + W + my1 = min(iy - b, 0) + H + if mx0 < mx1 and my0 < my1: + m[:, :, my0:my1, mx0:mx1] = 1 + return z, m + +#---------------------------------------------------------------------------- +# Construct an oriented low-pass filter that applies the appropriate +# bandlimit with respect to the input and output of the given affine 2D +# image transformation. + +def construct_affine_bandlimit_filter(mat, a=3, amax=16, aflt=64, up=4, cutoff_in=1, cutoff_out=1): + assert a <= amax < aflt + mat = torch.as_tensor(mat).to(torch.float32) + + # Construct 2D filter taps in input & output coordinate spaces. + taps = ((torch.arange(aflt * up * 2 - 1, device=mat.device) + 1) / up - aflt).roll(1 - aflt * up) + yi, xi = torch.meshgrid(taps, taps) + xo, yo = (torch.stack([xi, yi], dim=2) @ mat[:2, :2].t()).unbind(2) + + # Convolution of two oriented 2D sinc filters. + fi = sinc(xi * cutoff_in) * sinc(yi * cutoff_in) + fo = sinc(xo * cutoff_out) * sinc(yo * cutoff_out) + f = torch.fft.ifftn(torch.fft.fftn(fi) * torch.fft.fftn(fo)).real + + # Convolution of two oriented 2D Lanczos windows. + wi = lanczos_window(xi, a) * lanczos_window(yi, a) + wo = lanczos_window(xo, a) * lanczos_window(yo, a) + w = torch.fft.ifftn(torch.fft.fftn(wi) * torch.fft.fftn(wo)).real + + # Construct windowed FIR filter. + f = f * w + + # Finalize. + c = (aflt - amax) * up + f = f.roll([aflt * up - 1] * 2, dims=[0,1])[c:-c, c:-c] + f = torch.nn.functional.pad(f, [0, 1, 0, 1]).reshape(amax * 2, up, amax * 2, up) + f = f / f.sum([0,2], keepdim=True) / (up ** 2) + f = f.reshape(amax * 2 * up, amax * 2 * up)[:-1, :-1] + return f + +#---------------------------------------------------------------------------- +# Apply the given affine transformation to a batch of 2D images. + +def apply_affine_transformation(x, mat, up=4, **filter_kwargs): + _N, _C, H, W = x.shape + mat = torch.as_tensor(mat).to(dtype=torch.float32, device=x.device) + + # Construct filter. + f = construct_affine_bandlimit_filter(mat, up=up, **filter_kwargs) + assert f.ndim == 2 and f.shape[0] == f.shape[1] and f.shape[0] % 2 == 1 + p = f.shape[0] // 2 + + # Construct sampling grid. + theta = mat.inverse() + theta[:2, 2] *= 2 + theta[0, 2] += 1 / up / W + theta[1, 2] += 1 / up / H + theta[0, :] *= W / (W + p / up * 2) + theta[1, :] *= H / (H + p / up * 2) + theta = theta[:2, :3].unsqueeze(0).repeat([x.shape[0], 1, 1]) + g = torch.nn.functional.affine_grid(theta, x.shape, align_corners=False) + + # Resample image. + y = upfirdn2d.upsample2d(x=x, f=f, up=up, padding=p) + z = torch.nn.functional.grid_sample(y, g, mode='bilinear', padding_mode='zeros', align_corners=False) + + # Form mask. + m = torch.zeros_like(y) + c = p * 2 + 1 + m[:, :, c:-c, c:-c] = 1 + m = torch.nn.functional.grid_sample(m, g, mode='nearest', padding_mode='zeros', align_corners=False) + return z, m + +#---------------------------------------------------------------------------- +# Apply fractional rotation to a batch of 2D images. Corresponds to the +# operator R_\alpha in Appendix E.3. + +def apply_fractional_rotation(x, angle, a=3, **filter_kwargs): + angle = torch.as_tensor(angle).to(dtype=torch.float32, device=x.device) + mat = rotation_matrix(angle) + return apply_affine_transformation(x, mat, a=a, amax=a*2, **filter_kwargs) + +#---------------------------------------------------------------------------- +# Modify the frequency content of a batch of 2D images as if they had undergo +# fractional rotation -- but without actually rotating them. Corresponds to +# the operator R^*_\alpha in Appendix E.3. + +def apply_fractional_pseudo_rotation(x, angle, a=3, **filter_kwargs): + angle = torch.as_tensor(angle).to(dtype=torch.float32, device=x.device) + mat = rotation_matrix(-angle) + f = construct_affine_bandlimit_filter(mat, a=a, amax=a*2, up=1, **filter_kwargs) + y = upfirdn2d.filter2d(x=x, f=f) + m = torch.zeros_like(y) + c = f.shape[0] // 2 + m[:, :, c:-c, c:-c] = 1 + return y, m + +#---------------------------------------------------------------------------- +# Compute the selected equivariance metrics for the given generator. + +def compute_equivariance_metrics(opts, num_samples, batch_size, translate_max=0.125, rotate_max=1, compute_eqt_int=False, compute_eqt_frac=False, compute_eqr=False): + assert compute_eqt_int or compute_eqt_frac or compute_eqr + + # Setup generator and labels. + G = copy.deepcopy(opts.G).eval().requires_grad_(False).to(opts.device) + I = torch.eye(3, device=opts.device) + M = getattr(getattr(getattr(G, 'synthesis', None), 'input', None), 'transform', None) + if M is None: + raise ValueError('Cannot compute equivariance metrics; the given generator does not support user-specified image transformations') + c_iter = metric_utils.iterate_random_labels(opts=opts, batch_size=batch_size) + + # Sampling loop. + sums = None + progress = opts.progress.sub(tag='eq sampling', num_items=num_samples) + for batch_start in range(0, num_samples, batch_size * opts.num_gpus): + progress.update(batch_start) + s = [] + + # Randomize noise buffers, if any. + for name, buf in G.named_buffers(): + if name.endswith('.noise_const'): + buf.copy_(torch.randn_like(buf)) + + # Run mapping network. + z = torch.randn([batch_size, G.z_dim], device=opts.device) + c = next(c_iter) + ws = G.mapping(z=z, c=c) + + # Generate reference image. + M[:] = I + orig = G.synthesis(ws=ws, noise_mode='const', **opts.G_kwargs) + + # Integer translation (EQ-T). + if compute_eqt_int: + t = (torch.rand(2, device=opts.device) * 2 - 1) * translate_max + t = (t * G.img_resolution).round() / G.img_resolution + M[:] = I + M[:2, 2] = -t + img = G.synthesis(ws=ws, noise_mode='const', **opts.G_kwargs) + ref, mask = apply_integer_translation(orig, t[0], t[1]) + s += [(ref - img).square() * mask, mask] + + # Fractional translation (EQ-T_frac). + if compute_eqt_frac: + t = (torch.rand(2, device=opts.device) * 2 - 1) * translate_max + M[:] = I + M[:2, 2] = -t + img = G.synthesis(ws=ws, noise_mode='const', **opts.G_kwargs) + ref, mask = apply_fractional_translation(orig, t[0], t[1]) + s += [(ref - img).square() * mask, mask] + + # Rotation (EQ-R). + if compute_eqr: + angle = (torch.rand([], device=opts.device) * 2 - 1) * (rotate_max * np.pi) + M[:] = rotation_matrix(-angle) + img = G.synthesis(ws=ws, noise_mode='const', **opts.G_kwargs) + ref, ref_mask = apply_fractional_rotation(orig, angle) + pseudo, pseudo_mask = apply_fractional_pseudo_rotation(img, angle) + mask = ref_mask * pseudo_mask + s += [(ref - pseudo).square() * mask, mask] + + # Accumulate results. + s = torch.stack([x.to(torch.float64).sum() for x in s]) + sums = sums + s if sums is not None else s + progress.update(num_samples) + + # Compute PSNRs. + if opts.num_gpus > 1: + torch.distributed.all_reduce(sums) + sums = sums.cpu() + mses = sums[0::2] / sums[1::2] + psnrs = np.log10(2) * 20 - mses.log10() * 10 + psnrs = tuple(psnrs.numpy()) + return psnrs[0] if len(psnrs) == 1 else psnrs + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/frechet_inception_distance.py b/ThirdParty/eg3d/metrics/frechet_inception_distance.py new file mode 100644 index 0000000000000000000000000000000000000000..c2944eb21dbb88d2f383991ff88f557513b38168 --- /dev/null +++ b/ThirdParty/eg3d/metrics/frechet_inception_distance.py @@ -0,0 +1,43 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Frechet Inception Distance (FID) from the paper +"GANs trained by a two time-scale update rule converge to a local Nash +equilibrium". Matches the original implementation by Heusel et al. at +https://github.com/bioinf-jku/TTUR/blob/master/fid.py""" + +import numpy as np +import scipy.linalg +from . import metric_utils + +#---------------------------------------------------------------------------- + +def compute_fid(opts, max_real, num_gen): + # Direct TorchScript translation of http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz + detector_url = 'https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/metrics/inception-2015-12-05.pkl' + detector_kwargs = dict(return_features=True) # Return raw features before the softmax layer. + + mu_real, sigma_real = metric_utils.compute_feature_stats_for_dataset( + opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, + rel_lo=0, rel_hi=0, capture_mean_cov=True, max_items=max_real).get_mean_cov() + + mu_gen, sigma_gen = metric_utils.compute_feature_stats_for_generator( + opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, + rel_lo=0, rel_hi=1, capture_mean_cov=True, max_items=num_gen).get_mean_cov() + + if opts.rank != 0: + return float('nan') + + m = np.square(mu_gen - mu_real).sum() + s, _ = scipy.linalg.sqrtm(np.dot(sigma_gen, sigma_real), disp=False) # pylint: disable=no-member + fid = np.real(m + np.trace(sigma_gen + sigma_real - s * 2)) + return float(fid) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/inception_score.py b/ThirdParty/eg3d/metrics/inception_score.py new file mode 100644 index 0000000000000000000000000000000000000000..1e5e247280f76471819550295bf2fc5ea3f7b42e --- /dev/null +++ b/ThirdParty/eg3d/metrics/inception_score.py @@ -0,0 +1,40 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Inception Score (IS) from the paper "Improved techniques for training +GANs". Matches the original implementation by Salimans et al. at +https://github.com/openai/improved-gan/blob/master/inception_score/model.py""" + +import numpy as np +from . import metric_utils + +#---------------------------------------------------------------------------- + +def compute_is(opts, num_gen, num_splits): + # Direct TorchScript translation of http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz + detector_url = 'https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/metrics/inception-2015-12-05.pkl' + detector_kwargs = dict(no_output_bias=True) # Match the original implementation by not applying bias in the softmax layer. + + gen_probs = metric_utils.compute_feature_stats_for_generator( + opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, + capture_all=True, max_items=num_gen).get_all() + + if opts.rank != 0: + return float('nan'), float('nan') + + scores = [] + for i in range(num_splits): + part = gen_probs[i * num_gen // num_splits : (i + 1) * num_gen // num_splits] + kl = part * (np.log(part) - np.log(np.mean(part, axis=0, keepdims=True))) + kl = np.mean(np.sum(kl, axis=1)) + scores.append(np.exp(kl)) + return float(np.mean(scores)), float(np.std(scores)) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/kernel_inception_distance.py b/ThirdParty/eg3d/metrics/kernel_inception_distance.py new file mode 100644 index 0000000000000000000000000000000000000000..48906eba23a7d29ba912b7d209f83fba6d0b9f37 --- /dev/null +++ b/ThirdParty/eg3d/metrics/kernel_inception_distance.py @@ -0,0 +1,48 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Kernel Inception Distance (KID) from the paper "Demystifying MMD +GANs". Matches the original implementation by Binkowski et al. at +https://github.com/mbinkowski/MMD-GAN/blob/master/gan/compute_scores.py""" + +import numpy as np +from . import metric_utils + +#---------------------------------------------------------------------------- + +def compute_kid(opts, max_real, num_gen, num_subsets, max_subset_size): + # Direct TorchScript translation of http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz + detector_url = 'https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/metrics/inception-2015-12-05.pkl' + detector_kwargs = dict(return_features=True) # Return raw features before the softmax layer. + + real_features = metric_utils.compute_feature_stats_for_dataset( + opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, + rel_lo=0, rel_hi=0, capture_all=True, max_items=max_real).get_all() + + gen_features = metric_utils.compute_feature_stats_for_generator( + opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, + rel_lo=0, rel_hi=1, capture_all=True, max_items=num_gen).get_all() + + if opts.rank != 0: + return float('nan') + + n = real_features.shape[1] + m = min(min(real_features.shape[0], gen_features.shape[0]), max_subset_size) + t = 0 + for _subset_idx in range(num_subsets): + x = gen_features[np.random.choice(gen_features.shape[0], m, replace=False)] + y = real_features[np.random.choice(real_features.shape[0], m, replace=False)] + a = (x @ x.T / n + 1) ** 3 + (y @ y.T / n + 1) ** 3 + b = (x @ y.T / n + 1) ** 3 + t += (a.sum() - np.diag(a).sum()) / (m - 1) - b.sum() * 2 / m + kid = t / num_subsets / m + return float(kid) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/metric_main.py b/ThirdParty/eg3d/metrics/metric_main.py new file mode 100644 index 0000000000000000000000000000000000000000..52318ee48a523f30e7eace0b62b936c7826ffc56 --- /dev/null +++ b/ThirdParty/eg3d/metrics/metric_main.py @@ -0,0 +1,155 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Main API for computing and reporting quality metrics.""" + +import os +import time +import json +import torch +import dnnlib + +from . import metric_utils +from . import frechet_inception_distance +from . import kernel_inception_distance +from . import precision_recall +from . import perceptual_path_length +from . import inception_score +from . import equivariance + +#---------------------------------------------------------------------------- + +_metric_dict = dict() # name => fn + +def register_metric(fn): + assert callable(fn) + _metric_dict[fn.__name__] = fn + return fn + +def is_valid_metric(metric): + return metric in _metric_dict + +def list_valid_metrics(): + return list(_metric_dict.keys()) + +#---------------------------------------------------------------------------- + +def calc_metric(metric, **kwargs): # See metric_utils.MetricOptions for the full list of arguments. + assert is_valid_metric(metric) + opts = metric_utils.MetricOptions(**kwargs) + + # Calculate. + start_time = time.time() + results = _metric_dict[metric](opts) + total_time = time.time() - start_time + + # Broadcast results. + for key, value in list(results.items()): + if opts.num_gpus > 1: + value = torch.as_tensor(value, dtype=torch.float64, device=opts.device) + torch.distributed.broadcast(tensor=value, src=0) + value = float(value.cpu()) + results[key] = value + + # Decorate with metadata. + return dnnlib.EasyDict( + results = dnnlib.EasyDict(results), + metric = metric, + total_time = total_time, + total_time_str = dnnlib.util.format_time(total_time), + num_gpus = opts.num_gpus, + ) + +#---------------------------------------------------------------------------- + +def report_metric(result_dict, run_dir=None, snapshot_pkl=None): + metric = result_dict['metric'] + assert is_valid_metric(metric) + if run_dir is not None and snapshot_pkl is not None: + snapshot_pkl = os.path.relpath(snapshot_pkl, run_dir) + + jsonl_line = json.dumps(dict(result_dict, snapshot_pkl=snapshot_pkl, timestamp=time.time())) + print(jsonl_line) + if run_dir is not None and os.path.isdir(run_dir): + with open(os.path.join(run_dir, f'metric-{metric}.jsonl'), 'at') as f: + f.write(jsonl_line + '\n') + +#---------------------------------------------------------------------------- +# Recommended metrics. + +@register_metric +def fid50k_full(opts): + opts.dataset_kwargs.update(max_size=None, xflip=False) + fid = frechet_inception_distance.compute_fid(opts, max_real=None, num_gen=50000) + return dict(fid50k_full=fid) + +@register_metric +def kid50k_full(opts): + opts.dataset_kwargs.update(max_size=None, xflip=False) + kid = kernel_inception_distance.compute_kid(opts, max_real=1000000, num_gen=50000, num_subsets=100, max_subset_size=1000) + return dict(kid50k_full=kid) + +@register_metric +def pr50k3_full(opts): + opts.dataset_kwargs.update(max_size=None, xflip=False) + precision, recall = precision_recall.compute_pr(opts, max_real=200000, num_gen=50000, nhood_size=3, row_batch_size=10000, col_batch_size=10000) + return dict(pr50k3_full_precision=precision, pr50k3_full_recall=recall) + +@register_metric +def ppl2_wend(opts): + ppl = perceptual_path_length.compute_ppl(opts, num_samples=50000, epsilon=1e-4, space='w', sampling='end', crop=False, batch_size=2) + return dict(ppl2_wend=ppl) + +@register_metric +def eqt50k_int(opts): + opts.G_kwargs.update(force_fp32=True) + psnr = equivariance.compute_equivariance_metrics(opts, num_samples=50000, batch_size=4, compute_eqt_int=True) + return dict(eqt50k_int=psnr) + +@register_metric +def eqt50k_frac(opts): + opts.G_kwargs.update(force_fp32=True) + psnr = equivariance.compute_equivariance_metrics(opts, num_samples=50000, batch_size=4, compute_eqt_frac=True) + return dict(eqt50k_frac=psnr) + +@register_metric +def eqr50k(opts): + opts.G_kwargs.update(force_fp32=True) + psnr = equivariance.compute_equivariance_metrics(opts, num_samples=50000, batch_size=4, compute_eqr=True) + return dict(eqr50k=psnr) + +#---------------------------------------------------------------------------- +# Legacy metrics. + +@register_metric +def fid50k(opts): + opts.dataset_kwargs.update(max_size=None) + fid = frechet_inception_distance.compute_fid(opts, max_real=50000, num_gen=50000) + return dict(fid50k=fid) + +@register_metric +def kid50k(opts): + opts.dataset_kwargs.update(max_size=None) + kid = kernel_inception_distance.compute_kid(opts, max_real=50000, num_gen=50000, num_subsets=100, max_subset_size=1000) + return dict(kid50k=kid) + +@register_metric +def pr50k3(opts): + opts.dataset_kwargs.update(max_size=None) + precision, recall = precision_recall.compute_pr(opts, max_real=50000, num_gen=50000, nhood_size=3, row_batch_size=10000, col_batch_size=10000) + return dict(pr50k3_precision=precision, pr50k3_recall=recall) + +@register_metric +def is50k(opts): + opts.dataset_kwargs.update(max_size=None, xflip=False) + mean, std = inception_score.compute_is(opts, num_gen=50000, num_splits=10) + return dict(is50k_mean=mean, is50k_std=std) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/metric_utils.py b/ThirdParty/eg3d/metrics/metric_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..212cb7d38fabf6c7b60c55a0fa0a07560ac602b2 --- /dev/null +++ b/ThirdParty/eg3d/metrics/metric_utils.py @@ -0,0 +1,281 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Miscellaneous utilities used internally by the quality metrics.""" + +import os +import time +import hashlib +import pickle +import copy +import uuid +import numpy as np +import torch +import dnnlib + +#---------------------------------------------------------------------------- + +class MetricOptions: + def __init__(self, G=None, G_kwargs={}, dataset_kwargs={}, num_gpus=1, rank=0, device=None, progress=None, cache=True): + assert 0 <= rank < num_gpus + self.G = G + self.G_kwargs = dnnlib.EasyDict(G_kwargs) + self.dataset_kwargs = dnnlib.EasyDict(dataset_kwargs) + self.num_gpus = num_gpus + self.rank = rank + self.device = device if device is not None else torch.device('cuda', rank) + self.progress = progress.sub() if progress is not None and rank == 0 else ProgressMonitor() + self.cache = cache + +#---------------------------------------------------------------------------- + +_feature_detector_cache = dict() + +def get_feature_detector_name(url): + return os.path.splitext(url.split('/')[-1])[0] + +def get_feature_detector(url, device=torch.device('cpu'), num_gpus=1, rank=0, verbose=False): + assert 0 <= rank < num_gpus + key = (url, device) + if key not in _feature_detector_cache: + is_leader = (rank == 0) + if not is_leader and num_gpus > 1: + torch.distributed.barrier() # leader goes first + with dnnlib.util.open_url(url, verbose=(verbose and is_leader)) as f: + _feature_detector_cache[key] = pickle.load(f).to(device) + if is_leader and num_gpus > 1: + torch.distributed.barrier() # others follow + return _feature_detector_cache[key] + +#---------------------------------------------------------------------------- + +def iterate_random_labels(opts, batch_size): + if opts.G.c_dim == 0: + c = torch.zeros([batch_size, opts.G.c_dim], device=opts.device) + while True: + yield c + else: + dataset = dnnlib.util.construct_class_by_name(**opts.dataset_kwargs) + while True: + c = [dataset.get_label(np.random.randint(len(dataset))) for _i in range(batch_size)] + c = torch.from_numpy(np.stack(c)).pin_memory().to(opts.device) + yield c + +#---------------------------------------------------------------------------- + +class FeatureStats: + def __init__(self, capture_all=False, capture_mean_cov=False, max_items=None): + self.capture_all = capture_all + self.capture_mean_cov = capture_mean_cov + self.max_items = max_items + self.num_items = 0 + self.num_features = None + self.all_features = None + self.raw_mean = None + self.raw_cov = None + + def set_num_features(self, num_features): + if self.num_features is not None: + assert num_features == self.num_features + else: + self.num_features = num_features + self.all_features = [] + self.raw_mean = np.zeros([num_features], dtype=np.float64) + self.raw_cov = np.zeros([num_features, num_features], dtype=np.float64) + + def is_full(self): + return (self.max_items is not None) and (self.num_items >= self.max_items) + + def append(self, x): + x = np.asarray(x, dtype=np.float32) + assert x.ndim == 2 + if (self.max_items is not None) and (self.num_items + x.shape[0] > self.max_items): + if self.num_items >= self.max_items: + return + x = x[:self.max_items - self.num_items] + + self.set_num_features(x.shape[1]) + self.num_items += x.shape[0] + if self.capture_all: + self.all_features.append(x) + if self.capture_mean_cov: + x64 = x.astype(np.float64) + self.raw_mean += x64.sum(axis=0) + self.raw_cov += x64.T @ x64 + + def append_torch(self, x, num_gpus=1, rank=0): + assert isinstance(x, torch.Tensor) and x.ndim == 2 + assert 0 <= rank < num_gpus + if num_gpus > 1: + ys = [] + for src in range(num_gpus): + y = x.clone() + torch.distributed.broadcast(y, src=src) + ys.append(y) + x = torch.stack(ys, dim=1).flatten(0, 1) # interleave samples + self.append(x.cpu().numpy()) + + def get_all(self): + assert self.capture_all + return np.concatenate(self.all_features, axis=0) + + def get_all_torch(self): + return torch.from_numpy(self.get_all()) + + def get_mean_cov(self): + assert self.capture_mean_cov + mean = self.raw_mean / self.num_items + cov = self.raw_cov / self.num_items + cov = cov - np.outer(mean, mean) + return mean, cov + + def save(self, pkl_file): + with open(pkl_file, 'wb') as f: + pickle.dump(self.__dict__, f) + + @staticmethod + def load(pkl_file): + with open(pkl_file, 'rb') as f: + s = dnnlib.EasyDict(pickle.load(f)) + obj = FeatureStats(capture_all=s.capture_all, max_items=s.max_items) + obj.__dict__.update(s) + return obj + +#---------------------------------------------------------------------------- + +class ProgressMonitor: + def __init__(self, tag=None, num_items=None, flush_interval=1000, verbose=False, progress_fn=None, pfn_lo=0, pfn_hi=1000, pfn_total=1000): + self.tag = tag + self.num_items = num_items + self.verbose = verbose + self.flush_interval = flush_interval + self.progress_fn = progress_fn + self.pfn_lo = pfn_lo + self.pfn_hi = pfn_hi + self.pfn_total = pfn_total + self.start_time = time.time() + self.batch_time = self.start_time + self.batch_items = 0 + if self.progress_fn is not None: + self.progress_fn(self.pfn_lo, self.pfn_total) + + def update(self, cur_items): + assert (self.num_items is None) or (cur_items <= self.num_items) + if (cur_items < self.batch_items + self.flush_interval) and (self.num_items is None or cur_items < self.num_items): + return + cur_time = time.time() + total_time = cur_time - self.start_time + time_per_item = (cur_time - self.batch_time) / max(cur_items - self.batch_items, 1) + if (self.verbose) and (self.tag is not None): + print(f'{self.tag:<19s} items {cur_items:<7d} time {dnnlib.util.format_time(total_time):<12s} ms/item {time_per_item*1e3:.2f}') + self.batch_time = cur_time + self.batch_items = cur_items + + if (self.progress_fn is not None) and (self.num_items is not None): + self.progress_fn(self.pfn_lo + (self.pfn_hi - self.pfn_lo) * (cur_items / self.num_items), self.pfn_total) + + def sub(self, tag=None, num_items=None, flush_interval=1000, rel_lo=0, rel_hi=1): + return ProgressMonitor( + tag = tag, + num_items = num_items, + flush_interval = flush_interval, + verbose = self.verbose, + progress_fn = self.progress_fn, + pfn_lo = self.pfn_lo + (self.pfn_hi - self.pfn_lo) * rel_lo, + pfn_hi = self.pfn_lo + (self.pfn_hi - self.pfn_lo) * rel_hi, + pfn_total = self.pfn_total, + ) + +#---------------------------------------------------------------------------- + +def compute_feature_stats_for_dataset(opts, detector_url, detector_kwargs, rel_lo=0, rel_hi=1, batch_size=64, data_loader_kwargs=None, max_items=None, **stats_kwargs): + dataset = dnnlib.util.construct_class_by_name(**opts.dataset_kwargs) + if data_loader_kwargs is None: + data_loader_kwargs = dict(pin_memory=True, num_workers=3, prefetch_factor=2) + + # Try to lookup from cache. + cache_file = None + if opts.cache: + # Choose cache file name. + args = dict(dataset_kwargs=opts.dataset_kwargs, detector_url=detector_url, detector_kwargs=detector_kwargs, stats_kwargs=stats_kwargs) + md5 = hashlib.md5(repr(sorted(args.items())).encode('utf-8')) + cache_tag = f'{dataset.name}-{get_feature_detector_name(detector_url)}-{md5.hexdigest()}' + cache_file = dnnlib.make_cache_dir_path('gan-metrics', cache_tag + '.pkl') + + # Check if the file exists (all processes must agree). + flag = os.path.isfile(cache_file) if opts.rank == 0 else False + if opts.num_gpus > 1: + flag = torch.as_tensor(flag, dtype=torch.float32, device=opts.device) + torch.distributed.broadcast(tensor=flag, src=0) + flag = (float(flag.cpu()) != 0) + + # Load. + if flag: + return FeatureStats.load(cache_file) + + # Initialize. + num_items = len(dataset) + if max_items is not None: + num_items = min(num_items, max_items) + stats = FeatureStats(max_items=num_items, **stats_kwargs) + progress = opts.progress.sub(tag='dataset features', num_items=num_items, rel_lo=rel_lo, rel_hi=rel_hi) + detector = get_feature_detector(url=detector_url, device=opts.device, num_gpus=opts.num_gpus, rank=opts.rank, verbose=progress.verbose) + + # Main loop. + item_subset = [(i * opts.num_gpus + opts.rank) % num_items for i in range((num_items - 1) // opts.num_gpus + 1)] + for images, _labels in torch.utils.data.DataLoader(dataset=dataset, sampler=item_subset, batch_size=batch_size, **data_loader_kwargs): + if images.shape[1] == 1: + images = images.repeat([1, 3, 1, 1]) + features = detector(images.to(opts.device), **detector_kwargs) + stats.append_torch(features, num_gpus=opts.num_gpus, rank=opts.rank) + progress.update(stats.num_items) + + # Save to cache. + if cache_file is not None and opts.rank == 0: + os.makedirs(os.path.dirname(cache_file), exist_ok=True) + temp_file = cache_file + '.' + uuid.uuid4().hex + stats.save(temp_file) + os.replace(temp_file, cache_file) # atomic + return stats + +#---------------------------------------------------------------------------- + +def compute_feature_stats_for_generator(opts, detector_url, detector_kwargs, rel_lo=0, rel_hi=1, batch_size=64, batch_gen=None, **stats_kwargs): + if batch_gen is None: + batch_gen = min(batch_size, 4) + assert batch_size % batch_gen == 0 + + # Setup generator and labels. + G = copy.deepcopy(opts.G).eval().requires_grad_(False).to(opts.device) + c_iter = iterate_random_labels(opts=opts, batch_size=batch_gen) + + # Initialize. + stats = FeatureStats(**stats_kwargs) + assert stats.max_items is not None + progress = opts.progress.sub(tag='generator features', num_items=stats.max_items, rel_lo=rel_lo, rel_hi=rel_hi) + detector = get_feature_detector(url=detector_url, device=opts.device, num_gpus=opts.num_gpus, rank=opts.rank, verbose=progress.verbose) + + # Main loop. + while not stats.is_full(): + images = [] + for _i in range(batch_size // batch_gen): + z = torch.randn([batch_gen, G.z_dim], device=opts.device) + img = G(z=z, c=next(c_iter), **opts.G_kwargs)['image'] + img = (img * 127.5 + 128).clamp(0, 255).to(torch.uint8) + images.append(img) + images = torch.cat(images) + if images.shape[1] == 1: + images = images.repeat([1, 3, 1, 1]) + features = detector(images, **detector_kwargs) + stats.append_torch(features, num_gpus=opts.num_gpus, rank=opts.rank) + progress.update(stats.num_items) + return stats + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/perceptual_path_length.py b/ThirdParty/eg3d/metrics/perceptual_path_length.py new file mode 100644 index 0000000000000000000000000000000000000000..5e58dac3317733e2ace6d64ee1f97cafa0a38225 --- /dev/null +++ b/ThirdParty/eg3d/metrics/perceptual_path_length.py @@ -0,0 +1,127 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Perceptual Path Length (PPL) from the paper "A Style-Based Generator +Architecture for Generative Adversarial Networks". Matches the original +implementation by Karras et al. at +https://github.com/NVlabs/stylegan/blob/master/metrics/perceptual_path_length.py""" + +import copy +import numpy as np +import torch +from . import metric_utils + +#---------------------------------------------------------------------------- + +# Spherical interpolation of a batch of vectors. +def slerp(a, b, t): + a = a / a.norm(dim=-1, keepdim=True) + b = b / b.norm(dim=-1, keepdim=True) + d = (a * b).sum(dim=-1, keepdim=True) + p = t * torch.acos(d) + c = b - d * a + c = c / c.norm(dim=-1, keepdim=True) + d = a * torch.cos(p) + c * torch.sin(p) + d = d / d.norm(dim=-1, keepdim=True) + return d + +#---------------------------------------------------------------------------- + +class PPLSampler(torch.nn.Module): + def __init__(self, G, G_kwargs, epsilon, space, sampling, crop, vgg16): + assert space in ['z', 'w'] + assert sampling in ['full', 'end'] + super().__init__() + self.G = copy.deepcopy(G) + self.G_kwargs = G_kwargs + self.epsilon = epsilon + self.space = space + self.sampling = sampling + self.crop = crop + self.vgg16 = copy.deepcopy(vgg16) + + def forward(self, c): + # Generate random latents and interpolation t-values. + t = torch.rand([c.shape[0]], device=c.device) * (1 if self.sampling == 'full' else 0) + z0, z1 = torch.randn([c.shape[0] * 2, self.G.z_dim], device=c.device).chunk(2) + + # Interpolate in W or Z. + if self.space == 'w': + w0, w1 = self.G.mapping(z=torch.cat([z0,z1]), c=torch.cat([c,c])).chunk(2) + wt0 = w0.lerp(w1, t.unsqueeze(1).unsqueeze(2)) + wt1 = w0.lerp(w1, t.unsqueeze(1).unsqueeze(2) + self.epsilon) + else: # space == 'z' + zt0 = slerp(z0, z1, t.unsqueeze(1)) + zt1 = slerp(z0, z1, t.unsqueeze(1) + self.epsilon) + wt0, wt1 = self.G.mapping(z=torch.cat([zt0,zt1]), c=torch.cat([c,c])).chunk(2) + + # Randomize noise buffers. + for name, buf in self.G.named_buffers(): + if name.endswith('.noise_const'): + buf.copy_(torch.randn_like(buf)) + + # Generate images. + img = self.G.synthesis(ws=torch.cat([wt0,wt1]), noise_mode='const', force_fp32=True, **self.G_kwargs) + + # Center crop. + if self.crop: + assert img.shape[2] == img.shape[3] + c = img.shape[2] // 8 + img = img[:, :, c*3 : c*7, c*2 : c*6] + + # Downsample to 256x256. + factor = self.G.img_resolution // 256 + if factor > 1: + img = img.reshape([-1, img.shape[1], img.shape[2] // factor, factor, img.shape[3] // factor, factor]).mean([3, 5]) + + # Scale dynamic range from [-1,1] to [0,255]. + img = (img + 1) * (255 / 2) + if self.G.img_channels == 1: + img = img.repeat([1, 3, 1, 1]) + + # Evaluate differential LPIPS. + lpips_t0, lpips_t1 = self.vgg16(img, resize_images=False, return_lpips=True).chunk(2) + dist = (lpips_t0 - lpips_t1).square().sum(1) / self.epsilon ** 2 + return dist + +#---------------------------------------------------------------------------- + +def compute_ppl(opts, num_samples, epsilon, space, sampling, crop, batch_size): + vgg16_url = 'https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/metrics/vgg16.pkl' + vgg16 = metric_utils.get_feature_detector(vgg16_url, num_gpus=opts.num_gpus, rank=opts.rank, verbose=opts.progress.verbose) + + # Setup sampler and labels. + sampler = PPLSampler(G=opts.G, G_kwargs=opts.G_kwargs, epsilon=epsilon, space=space, sampling=sampling, crop=crop, vgg16=vgg16) + sampler.eval().requires_grad_(False).to(opts.device) + c_iter = metric_utils.iterate_random_labels(opts=opts, batch_size=batch_size) + + # Sampling loop. + dist = [] + progress = opts.progress.sub(tag='ppl sampling', num_items=num_samples) + for batch_start in range(0, num_samples, batch_size * opts.num_gpus): + progress.update(batch_start) + x = sampler(next(c_iter)) + for src in range(opts.num_gpus): + y = x.clone() + if opts.num_gpus > 1: + torch.distributed.broadcast(y, src=src) + dist.append(y) + progress.update(num_samples) + + # Compute PPL. + if opts.rank != 0: + return float('nan') + dist = torch.cat(dist)[:num_samples].cpu().numpy() + lo = np.percentile(dist, 1, interpolation='lower') + hi = np.percentile(dist, 99, interpolation='higher') + ppl = np.extract(np.logical_and(dist >= lo, dist <= hi), dist).mean() + return float(ppl) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/metrics/precision_recall.py b/ThirdParty/eg3d/metrics/precision_recall.py new file mode 100644 index 0000000000000000000000000000000000000000..e33e85f64de81fa211135edaf3863c2fe851a6f4 --- /dev/null +++ b/ThirdParty/eg3d/metrics/precision_recall.py @@ -0,0 +1,64 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Precision/Recall (PR) from the paper "Improved Precision and Recall +Metric for Assessing Generative Models". Matches the original implementation +by Kynkaanniemi et al. at +https://github.com/kynkaat/improved-precision-and-recall-metric/blob/master/precision_recall.py""" + +import torch +from . import metric_utils + +#---------------------------------------------------------------------------- + +def compute_distances(row_features, col_features, num_gpus, rank, col_batch_size): + assert 0 <= rank < num_gpus + num_cols = col_features.shape[0] + num_batches = ((num_cols - 1) // col_batch_size // num_gpus + 1) * num_gpus + col_batches = torch.nn.functional.pad(col_features, [0, 0, 0, -num_cols % num_batches]).chunk(num_batches) + dist_batches = [] + for col_batch in col_batches[rank :: num_gpus]: + dist_batch = torch.cdist(row_features.unsqueeze(0), col_batch.unsqueeze(0))[0] + for src in range(num_gpus): + dist_broadcast = dist_batch.clone() + if num_gpus > 1: + torch.distributed.broadcast(dist_broadcast, src=src) + dist_batches.append(dist_broadcast.cpu() if rank == 0 else None) + return torch.cat(dist_batches, dim=1)[:, :num_cols] if rank == 0 else None + +#---------------------------------------------------------------------------- + +def compute_pr(opts, max_real, num_gen, nhood_size, row_batch_size, col_batch_size): + detector_url = 'https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/metrics/vgg16.pkl' + detector_kwargs = dict(return_features=True) + + real_features = metric_utils.compute_feature_stats_for_dataset( + opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, + rel_lo=0, rel_hi=0, capture_all=True, max_items=max_real).get_all_torch().to(torch.float16).to(opts.device) + + gen_features = metric_utils.compute_feature_stats_for_generator( + opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, + rel_lo=0, rel_hi=1, capture_all=True, max_items=num_gen).get_all_torch().to(torch.float16).to(opts.device) + + results = dict() + for name, manifold, probes in [('precision', real_features, gen_features), ('recall', gen_features, real_features)]: + kth = [] + for manifold_batch in manifold.split(row_batch_size): + dist = compute_distances(row_features=manifold_batch, col_features=manifold, num_gpus=opts.num_gpus, rank=opts.rank, col_batch_size=col_batch_size) + kth.append(dist.to(torch.float32).kthvalue(nhood_size + 1).values.to(torch.float16) if opts.rank == 0 else None) + kth = torch.cat(kth) if opts.rank == 0 else None + pred = [] + for probes_batch in probes.split(row_batch_size): + dist = compute_distances(row_features=probes_batch, col_features=manifold, num_gpus=opts.num_gpus, rank=opts.rank, col_batch_size=col_batch_size) + pred.append((dist <= kth).any(dim=1) if opts.rank == 0 else None) + results[name] = float(torch.cat(pred).to(torch.float32).mean() if opts.rank == 0 else 'nan') + return results['precision'], results['recall'] + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/shape_utils.py b/ThirdParty/eg3d/shape_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e16f6cc82a59d9d3e455ba334abf68b576fdc10f --- /dev/null +++ b/ThirdParty/eg3d/shape_utils.py @@ -0,0 +1,124 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + + +""" +Utils for extracting 3D shapes using marching cubes. Based on code from DeepSDF (Park et al.) + +Takes as input an .mrc file and extracts a mesh. + +Ex. + python shape_utils.py my_shape.mrc +Ex. + python shape_utils.py myshapes_directory --level=12 +""" + + +import time +import plyfile +import glob +import logging +import numpy as np +import os +import random +import torch +import torch.utils.data +import trimesh +import skimage.measure +import argparse +import mrcfile +from tqdm import tqdm + + +def convert_sdf_samples_to_ply( + numpy_3d_sdf_tensor, + voxel_grid_origin, + voxel_size, + ply_filename_out, + offset=None, + scale=None, + level=0.0 +): + """ + Convert sdf samples to .ply + :param pytorch_3d_sdf_tensor: a torch.FloatTensor of shape (n,n,n) + :voxel_grid_origin: a list of three floats: the bottom, left, down origin of the voxel grid + :voxel_size: float, the size of the voxels + :ply_filename_out: string, path of the filename to save to + This function adapted from: https://github.com/RobotLocomotion/spartan + """ + start_time = time.time() + + verts, faces, normals, values = np.zeros((0, 3)), np.zeros((0, 3)), np.zeros((0, 3)), np.zeros(0) + # try: + verts, faces, normals, values = skimage.measure.marching_cubes( + numpy_3d_sdf_tensor, level=level, spacing=[voxel_size] * 3 + ) + # except: + # pass + + # transform from voxel coordinates to camera coordinates + # note x and y are flipped in the output of marching_cubes + mesh_points = np.zeros_like(verts) + mesh_points[:, 0] = voxel_grid_origin[0] + verts[:, 0] + mesh_points[:, 1] = voxel_grid_origin[1] + verts[:, 1] + mesh_points[:, 2] = voxel_grid_origin[2] + verts[:, 2] + + # apply additional offset and scale + if scale is not None: + mesh_points = mesh_points / scale + if offset is not None: + mesh_points = mesh_points - offset + + # try writing to the ply file + + num_verts = verts.shape[0] + num_faces = faces.shape[0] + + verts_tuple = np.zeros((num_verts,), dtype=[("x", "f4"), ("y", "f4"), ("z", "f4")]) + + for i in range(0, num_verts): + verts_tuple[i] = tuple(mesh_points[i, :]) + + faces_building = [] + for i in range(0, num_faces): + faces_building.append(((faces[i, :].tolist(),))) + faces_tuple = np.array(faces_building, dtype=[("vertex_indices", "i4", (3,))]) + + el_verts = plyfile.PlyElement.describe(verts_tuple, "vertex") + el_faces = plyfile.PlyElement.describe(faces_tuple, "face") + + ply_data = plyfile.PlyData([el_verts, el_faces]) + ply_data.write(ply_filename_out) + print(f"wrote to {ply_filename_out}") + + +def convert_mrc(input_filename, output_filename, isosurface_level=1): + with mrcfile.open(input_filename) as mrc: + convert_sdf_samples_to_ply(np.transpose(mrc.data, (2, 1, 0)), [0, 0, 0], 1, output_filename, level=isosurface_level) + +if __name__ == '__main__': + start_time = time.time() + parser = argparse.ArgumentParser() + parser.add_argument('input_mrc_path') + parser.add_argument('--level', type=float, default=10, help="The isosurface level for marching cubes") + args = parser.parse_args() + + if os.path.isfile(args.input_mrc_path) and args.input_mrc_path.split('.')[-1] == 'ply': + output_obj_path = args.input_mrc_path.split('.mrc')[0] + '.ply' + convert_mrc(args.input_mrc_path, output_obj_path, isosurface_level=1) + + print(f"{time.time() - start_time:02f} s") + else: + assert os.path.isdir(args.input_mrc_path) + + for mrc_path in tqdm(glob.glob(os.path.join(args.input_mrc_path, '*.mrc'))): + output_obj_path = mrc_path.split('.mrc')[0] + '.ply' + convert_mrc(mrc_path, output_obj_path, isosurface_level=args.level) \ No newline at end of file diff --git a/ThirdParty/eg3d/torch_utils/__init__.py b/ThirdParty/eg3d/torch_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +# empty diff --git a/ThirdParty/eg3d/torch_utils/__pycache__/__init__.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..661f7dde7f0e738919bc34eacc21c8ebbd342c11 Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/__pycache__/custom_ops.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/__pycache__/custom_ops.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ff9cedc2f46d31d5d333222afd19b843641fb83 Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/__pycache__/custom_ops.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/__pycache__/misc.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/__pycache__/misc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b6ea02a1dcb1d5f87ce09beb9f33c044c339d87 Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/__pycache__/misc.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/__pycache__/persistence.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/__pycache__/persistence.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c5af3cb936b8a5f48c947cc545279e082235e38 Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/__pycache__/persistence.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/custom_ops.py b/ThirdParty/eg3d/torch_utils/custom_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..ed2524f47ab3d5b8750cfb868cc14012f424acc8 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/custom_ops.py @@ -0,0 +1,159 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import glob +import hashlib +import importlib +import os +import re +import shutil +import uuid + +import torch +import torch.utils.cpp_extension +from torch.utils.file_baton import FileBaton + +#---------------------------------------------------------------------------- +# Global options. + +verbosity = 'brief' # Verbosity level: 'none', 'brief', 'full' + +#---------------------------------------------------------------------------- +# Internal helper funcs. + +def _find_compiler_bindir(): + patterns = [ + 'C:/Program Files (x86)/Microsoft Visual Studio/*/Professional/VC/Tools/MSVC/*/bin/Hostx64/x64', + 'C:/Program Files (x86)/Microsoft Visual Studio/*/BuildTools/VC/Tools/MSVC/*/bin/Hostx64/x64', + 'C:/Program Files (x86)/Microsoft Visual Studio/*/Community/VC/Tools/MSVC/*/bin/Hostx64/x64', + 'C:/Program Files (x86)/Microsoft Visual Studio */vc/bin', + ] + for pattern in patterns: + matches = sorted(glob.glob(pattern)) + if len(matches): + return matches[-1] + return None + +#---------------------------------------------------------------------------- + +def _get_mangled_gpu_name(): + name = torch.cuda.get_device_name().lower() + out = [] + for c in name: + if re.match('[a-z0-9_-]+', c): + out.append(c) + else: + out.append('-') + return ''.join(out) + +#---------------------------------------------------------------------------- +# Main entry point for compiling and loading C++/CUDA plugins. + +_cached_plugins = dict() + +def get_plugin(module_name, sources, headers=None, source_dir=None, **build_kwargs): + assert verbosity in ['none', 'brief', 'full'] + if headers is None: + headers = [] + if source_dir is not None: + sources = [os.path.join(source_dir, fname) for fname in sources] + headers = [os.path.join(source_dir, fname) for fname in headers] + + # Already cached? + if module_name in _cached_plugins: + return _cached_plugins[module_name] + + # Print status. + if verbosity == 'full': + print(f'Setting up PyTorch plugin "{module_name}"...') + elif verbosity == 'brief': + print(f'Setting up PyTorch plugin "{module_name}"... ', end='', flush=True) + verbose_build = (verbosity == 'full') + + # Compile and load. + try: # pylint: disable=too-many-nested-blocks + # Make sure we can find the necessary compiler binaries. + if os.name == 'nt' and os.system("where cl.exe >nul 2>nul") != 0: + compiler_bindir = _find_compiler_bindir() + if compiler_bindir is None: + raise RuntimeError(f'Could not find MSVC/GCC/CLANG installation on this computer. Check _find_compiler_bindir() in "{__file__}".') + os.environ['PATH'] += ';' + compiler_bindir + + # Some containers set TORCH_CUDA_ARCH_LIST to a list that can either + # break the build or unnecessarily restrict what's available to nvcc. + # Unset it to let nvcc decide based on what's available on the + # machine. + os.environ['TORCH_CUDA_ARCH_LIST'] = '' + + # Incremental build md5sum trickery. Copies all the input source files + # into a cached build directory under a combined md5 digest of the input + # source files. Copying is done only if the combined digest has changed. + # This keeps input file timestamps and filenames the same as in previous + # extension builds, allowing for fast incremental rebuilds. + # + # This optimization is done only in case all the source files reside in + # a single directory (just for simplicity) and if the TORCH_EXTENSIONS_DIR + # environment variable is set (we take this as a signal that the user + # actually cares about this.) + # + # EDIT: We now do it regardless of TORCH_EXTENSIOS_DIR, in order to work + # around the *.cu dependency bug in ninja config. + # + all_source_files = sorted(sources + headers) + all_source_dirs = set(os.path.dirname(fname) for fname in all_source_files) + if len(all_source_dirs) == 1: # and ('TORCH_EXTENSIONS_DIR' in os.environ): + + # Compute combined hash digest for all source files. + hash_md5 = hashlib.md5() + for src in all_source_files: + with open(src, 'rb') as f: + hash_md5.update(f.read()) + + # Select cached build directory name. + source_digest = hash_md5.hexdigest() + build_top_dir = torch.utils.cpp_extension._get_build_directory(module_name, verbose=verbose_build) # pylint: disable=protected-access + cached_build_dir = os.path.join(build_top_dir, f'{source_digest}-{_get_mangled_gpu_name()}') + + if not os.path.isdir(cached_build_dir): + tmpdir = f'{build_top_dir}/srctmp-{uuid.uuid4().hex}' + os.makedirs(tmpdir) + for src in all_source_files: + shutil.copyfile(src, os.path.join(tmpdir, os.path.basename(src))) + try: + os.replace(tmpdir, cached_build_dir) # atomic + except OSError: + # source directory already exists, delete tmpdir and its contents. + shutil.rmtree(tmpdir) + if not os.path.isdir(cached_build_dir): raise + + # Compile. + cached_sources = [os.path.join(cached_build_dir, os.path.basename(fname)) for fname in sources] + torch.utils.cpp_extension.load(name=module_name, build_directory=cached_build_dir, + verbose=verbose_build, sources=cached_sources, **build_kwargs) + else: + torch.utils.cpp_extension.load(name=module_name, verbose=verbose_build, sources=sources, **build_kwargs) + + # Load. + module = importlib.import_module(module_name) + + except: + if verbosity == 'brief': + print('Failed!') + raise + + # Print status and add to cache dict. + if verbosity == 'full': + print(f'Done setting up PyTorch plugin "{module_name}".') + elif verbosity == 'brief': + print('Done.') + _cached_plugins[module_name] = module + return module + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/misc.py b/ThirdParty/eg3d/torch_utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..2f15d37235fcf5458b27302c278209754bc83965 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/misc.py @@ -0,0 +1,268 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import re +import contextlib +import numpy as np +import torch +import warnings +import ThirdParty.eg3d.dnnlib + +#---------------------------------------------------------------------------- +# Cached construction of constant tensors. Avoids CPU=>GPU copy when the +# same constant is used multiple times. + +_constant_cache = dict() + +def constant(value, shape=None, dtype=None, device=None, memory_format=None): + value = np.asarray(value) + if shape is not None: + shape = tuple(shape) + if dtype is None: + dtype = torch.get_default_dtype() + if device is None: + device = torch.device('cpu') + if memory_format is None: + memory_format = torch.contiguous_format + + key = (value.shape, value.dtype, value.tobytes(), shape, dtype, device, memory_format) + tensor = _constant_cache.get(key, None) + if tensor is None: + tensor = torch.as_tensor(value.copy(), dtype=dtype, device=device) + if shape is not None: + tensor, _ = torch.broadcast_tensors(tensor, torch.empty(shape)) + tensor = tensor.contiguous(memory_format=memory_format) + _constant_cache[key] = tensor + return tensor + +#---------------------------------------------------------------------------- +# Replace NaN/Inf with specified numerical values. + +try: + nan_to_num = torch.nan_to_num # 1.8.0a0 +except AttributeError: + def nan_to_num(input, nan=0.0, posinf=None, neginf=None, *, out=None): # pylint: disable=redefined-builtin + assert isinstance(input, torch.Tensor) + if posinf is None: + posinf = torch.finfo(input.dtype).max + if neginf is None: + neginf = torch.finfo(input.dtype).min + assert nan == 0 + return torch.clamp(input.unsqueeze(0).nansum(0), min=neginf, max=posinf, out=out) + +#---------------------------------------------------------------------------- +# Symbolic assert. + +try: + symbolic_assert = torch._assert # 1.8.0a0 # pylint: disable=protected-access +except AttributeError: + symbolic_assert = torch.Assert # 1.7.0 + +#---------------------------------------------------------------------------- +# Context manager to temporarily suppress known warnings in torch.jit.trace(). +# Note: Cannot use catch_warnings because of https://bugs.python.org/issue29672 + +@contextlib.contextmanager +def suppress_tracer_warnings(): + flt = ('ignore', None, torch.jit.TracerWarning, None, 0) + warnings.filters.insert(0, flt) + yield + warnings.filters.remove(flt) + +#---------------------------------------------------------------------------- +# Assert that the shape of a tensor matches the given list of integers. +# None indicates that the size of a dimension is allowed to vary. +# Performs symbolic assertion when used in torch.jit.trace(). + +def assert_shape(tensor, ref_shape): + if tensor.ndim != len(ref_shape): + raise AssertionError(f'Wrong number of dimensions: got {tensor.ndim}, expected {len(ref_shape)}') + for idx, (size, ref_size) in enumerate(zip(tensor.shape, ref_shape)): + if ref_size is None: + pass + elif isinstance(ref_size, torch.Tensor): + with suppress_tracer_warnings(): # as_tensor results are registered as constants + symbolic_assert(torch.equal(torch.as_tensor(size), ref_size), f'Wrong size for dimension {idx}') + elif isinstance(size, torch.Tensor): + with suppress_tracer_warnings(): # as_tensor results are registered as constants + symbolic_assert(torch.equal(size, torch.as_tensor(ref_size)), f'Wrong size for dimension {idx}: expected {ref_size}') + elif size != ref_size: + raise AssertionError(f'Wrong size for dimension {idx}: got {size}, expected {ref_size}') + +#---------------------------------------------------------------------------- +# Function decorator that calls torch.autograd.profiler.record_function(). + +def profiled_function(fn): + def decorator(*args, **kwargs): + with torch.autograd.profiler.record_function(fn.__name__): + return fn(*args, **kwargs) + decorator.__name__ = fn.__name__ + return decorator + +#---------------------------------------------------------------------------- +# Sampler for torch.utils.data.DataLoader that loops over the dataset +# indefinitely, shuffling items as it goes. + +class InfiniteSampler(torch.utils.data.Sampler): + def __init__(self, dataset, rank=0, num_replicas=1, shuffle=True, seed=0, window_size=0.5): + assert len(dataset) > 0 + assert num_replicas > 0 + assert 0 <= rank < num_replicas + assert 0 <= window_size <= 1 + super().__init__(dataset) + self.dataset = dataset + self.rank = rank + self.num_replicas = num_replicas + self.shuffle = shuffle + self.seed = seed + self.window_size = window_size + + def __iter__(self): + order = np.arange(len(self.dataset)) + rnd = None + window = 0 + if self.shuffle: + rnd = np.random.RandomState(self.seed) + rnd.shuffle(order) + window = int(np.rint(order.size * self.window_size)) + + idx = 0 + while True: + i = idx % order.size + if idx % self.num_replicas == self.rank: + yield order[i] + if window >= 2: + j = (i - rnd.randint(window)) % order.size + order[i], order[j] = order[j], order[i] + idx += 1 + +#---------------------------------------------------------------------------- +# Utilities for operating with torch.nn.Module parameters and buffers. + +def params_and_buffers(module): + assert isinstance(module, torch.nn.Module) + return list(module.parameters()) + list(module.buffers()) + +def named_params_and_buffers(module): + assert isinstance(module, torch.nn.Module) + return list(module.named_parameters()) + list(module.named_buffers()) + +def copy_params_and_buffers(src_module, dst_module, require_all=False): + assert isinstance(src_module, torch.nn.Module) + assert isinstance(dst_module, torch.nn.Module) + src_tensors = dict(named_params_and_buffers(src_module)) + for name, tensor in named_params_and_buffers(dst_module): + assert (name in src_tensors) or (not require_all) + if name in src_tensors: + tensor.copy_(src_tensors[name].detach()).requires_grad_(tensor.requires_grad) + +#---------------------------------------------------------------------------- +# Context manager for easily enabling/disabling DistributedDataParallel +# synchronization. + +@contextlib.contextmanager +def ddp_sync(module, sync): + assert isinstance(module, torch.nn.Module) + if sync or not isinstance(module, torch.nn.parallel.DistributedDataParallel): + yield + else: + with module.no_sync(): + yield + +#---------------------------------------------------------------------------- +# Check DistributedDataParallel consistency across processes. + +def check_ddp_consistency(module, ignore_regex=None): + assert isinstance(module, torch.nn.Module) + for name, tensor in named_params_and_buffers(module): + fullname = type(module).__name__ + '.' + name + if ignore_regex is not None and re.fullmatch(ignore_regex, fullname): + continue + tensor = tensor.detach() + if tensor.is_floating_point(): + tensor = nan_to_num(tensor) + other = tensor.clone() + torch.distributed.broadcast(tensor=other, src=0) + assert (tensor == other).all(), fullname + +#---------------------------------------------------------------------------- +# Print summary table of module hierarchy. + +def print_module_summary(module, inputs, max_nesting=3, skip_redundant=True): + assert isinstance(module, torch.nn.Module) + assert not isinstance(module, torch.jit.ScriptModule) + assert isinstance(inputs, (tuple, list)) + + # Register hooks. + entries = [] + nesting = [0] + def pre_hook(_mod, _inputs): + nesting[0] += 1 + def post_hook(mod, _inputs, outputs): + nesting[0] -= 1 + if nesting[0] <= max_nesting: + outputs = list(outputs) if isinstance(outputs, (tuple, list)) else [outputs] + outputs = [t for t in outputs if isinstance(t, torch.Tensor)] + entries.append(dnnlib.EasyDict(mod=mod, outputs=outputs)) + hooks = [mod.register_forward_pre_hook(pre_hook) for mod in module.modules()] + hooks += [mod.register_forward_hook(post_hook) for mod in module.modules()] + + # Run module. + outputs = module(*inputs) + for hook in hooks: + hook.remove() + + # Identify unique outputs, parameters, and buffers. + tensors_seen = set() + for e in entries: + e.unique_params = [t for t in e.mod.parameters() if id(t) not in tensors_seen] + e.unique_buffers = [t for t in e.mod.buffers() if id(t) not in tensors_seen] + e.unique_outputs = [t for t in e.outputs if id(t) not in tensors_seen] + tensors_seen |= {id(t) for t in e.unique_params + e.unique_buffers + e.unique_outputs} + + # Filter out redundant entries. + if skip_redundant: + entries = [e for e in entries if len(e.unique_params) or len(e.unique_buffers) or len(e.unique_outputs)] + + # Construct table. + rows = [[type(module).__name__, 'Parameters', 'Buffers', 'Output shape', 'Datatype']] + rows += [['---'] * len(rows[0])] + param_total = 0 + buffer_total = 0 + submodule_names = {mod: name for name, mod in module.named_modules()} + for e in entries: + name = '' if e.mod is module else submodule_names[e.mod] + param_size = sum(t.numel() for t in e.unique_params) + buffer_size = sum(t.numel() for t in e.unique_buffers) + output_shapes = [str(list(t.shape)) for t in e.outputs] + output_dtypes = [str(t.dtype).split('.')[-1] for t in e.outputs] + rows += [[ + name + (':0' if len(e.outputs) >= 2 else ''), + str(param_size) if param_size else '-', + str(buffer_size) if buffer_size else '-', + (output_shapes + ['-'])[0], + (output_dtypes + ['-'])[0], + ]] + for idx in range(1, len(e.outputs)): + rows += [[name + f':{idx}', '-', '-', output_shapes[idx], output_dtypes[idx]]] + param_total += param_size + buffer_total += buffer_size + rows += [['---'] * len(rows[0])] + rows += [['Total', str(param_total), str(buffer_total), '-', '-']] + + # Print table. + widths = [max(len(cell) for cell in column) for column in zip(*rows)] + print() + for row in rows: + print(' '.join(cell + ' ' * (width - len(cell)) for cell, width in zip(row, widths))) + print() + return outputs + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/ops/__init__.py b/ThirdParty/eg3d/torch_utils/ops/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +# empty diff --git a/ThirdParty/eg3d/torch_utils/ops/__pycache__/__init__.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/ops/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cebdd06b975c8a96eb7ac2b5d011646686d6e2f9 Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/ops/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/ops/__pycache__/bias_act.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/ops/__pycache__/bias_act.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee5016e66dc88b0e44f4effd84501c008974783a Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/ops/__pycache__/bias_act.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/ops/__pycache__/conv2d_gradfix.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/ops/__pycache__/conv2d_gradfix.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..180dd45663d68ff402594da070f529257e5e0661 Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/ops/__pycache__/conv2d_gradfix.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/ops/__pycache__/conv2d_resample.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/ops/__pycache__/conv2d_resample.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44dd2d08423f4c022cc4a4dc30d739e73cec45ab Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/ops/__pycache__/conv2d_resample.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/ops/__pycache__/fma.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/ops/__pycache__/fma.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b02e77fcc850ee3bfbf9c2d74b02144eff7fecb Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/ops/__pycache__/fma.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/ops/__pycache__/upfirdn2d.cpython-310.pyc b/ThirdParty/eg3d/torch_utils/ops/__pycache__/upfirdn2d.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e534a4f1f89f726d86099adda068ba10b6c955ca Binary files /dev/null and b/ThirdParty/eg3d/torch_utils/ops/__pycache__/upfirdn2d.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/torch_utils/ops/bias_act.cpp b/ThirdParty/eg3d/torch_utils/ops/bias_act.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ee6f6d0caaf4f84b94851d223e384344e1109cdc --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/bias_act.cpp @@ -0,0 +1,103 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include +#include +#include +#include "bias_act.h" + +//------------------------------------------------------------------------ + +static bool has_same_layout(torch::Tensor x, torch::Tensor y) +{ + if (x.dim() != y.dim()) + return false; + for (int64_t i = 0; i < x.dim(); i++) + { + if (x.size(i) != y.size(i)) + return false; + if (x.size(i) >= 2 && x.stride(i) != y.stride(i)) + return false; + } + return true; +} + +//------------------------------------------------------------------------ + +static torch::Tensor bias_act(torch::Tensor x, torch::Tensor b, torch::Tensor xref, torch::Tensor yref, torch::Tensor dy, int grad, int dim, int act, float alpha, float gain, float clamp) +{ + // Validate arguments. + TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device"); + TORCH_CHECK(b.numel() == 0 || (b.dtype() == x.dtype() && b.device() == x.device()), "b must have the same dtype and device as x"); + TORCH_CHECK(xref.numel() == 0 || (xref.sizes() == x.sizes() && xref.dtype() == x.dtype() && xref.device() == x.device()), "xref must have the same shape, dtype, and device as x"); + TORCH_CHECK(yref.numel() == 0 || (yref.sizes() == x.sizes() && yref.dtype() == x.dtype() && yref.device() == x.device()), "yref must have the same shape, dtype, and device as x"); + TORCH_CHECK(dy.numel() == 0 || (dy.sizes() == x.sizes() && dy.dtype() == x.dtype() && dy.device() == x.device()), "dy must have the same dtype and device as x"); + TORCH_CHECK(x.numel() <= INT_MAX, "x is too large"); + TORCH_CHECK(b.dim() == 1, "b must have rank 1"); + TORCH_CHECK(b.numel() == 0 || (dim >= 0 && dim < x.dim()), "dim is out of bounds"); + TORCH_CHECK(b.numel() == 0 || b.numel() == x.size(dim), "b has wrong number of elements"); + TORCH_CHECK(grad >= 0, "grad must be non-negative"); + + // Validate layout. + TORCH_CHECK(x.is_non_overlapping_and_dense(), "x must be non-overlapping and dense"); + TORCH_CHECK(b.is_contiguous(), "b must be contiguous"); + TORCH_CHECK(xref.numel() == 0 || has_same_layout(xref, x), "xref must have the same layout as x"); + TORCH_CHECK(yref.numel() == 0 || has_same_layout(yref, x), "yref must have the same layout as x"); + TORCH_CHECK(dy.numel() == 0 || has_same_layout(dy, x), "dy must have the same layout as x"); + + // Create output tensor. + const at::cuda::OptionalCUDAGuard device_guard(device_of(x)); + torch::Tensor y = torch::empty_like(x); + TORCH_CHECK(has_same_layout(y, x), "y must have the same layout as x"); + + // Initialize CUDA kernel parameters. + bias_act_kernel_params p; + p.x = x.data_ptr(); + p.b = (b.numel()) ? b.data_ptr() : NULL; + p.xref = (xref.numel()) ? xref.data_ptr() : NULL; + p.yref = (yref.numel()) ? yref.data_ptr() : NULL; + p.dy = (dy.numel()) ? dy.data_ptr() : NULL; + p.y = y.data_ptr(); + p.grad = grad; + p.act = act; + p.alpha = alpha; + p.gain = gain; + p.clamp = clamp; + p.sizeX = (int)x.numel(); + p.sizeB = (int)b.numel(); + p.stepB = (b.numel()) ? (int)x.stride(dim) : 1; + + // Choose CUDA kernel. + void* kernel; + AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "upfirdn2d_cuda", [&] + { + kernel = choose_bias_act_kernel(p); + }); + TORCH_CHECK(kernel, "no CUDA kernel found for the specified activation func"); + + // Launch CUDA kernel. + p.loopX = 4; + int blockSize = 4 * 32; + int gridSize = (p.sizeX - 1) / (p.loopX * blockSize) + 1; + void* args[] = {&p}; + AT_CUDA_CHECK(cudaLaunchKernel(kernel, gridSize, blockSize, args, 0, at::cuda::getCurrentCUDAStream())); + return y; +} + +//------------------------------------------------------------------------ + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) +{ + m.def("bias_act", &bias_act); +} + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/bias_act.cu b/ThirdParty/eg3d/torch_utils/ops/bias_act.cu new file mode 100644 index 0000000000000000000000000000000000000000..71ca3900deda41e62d80044f0e409875f4c794b5 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/bias_act.cu @@ -0,0 +1,177 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include +#include "bias_act.h" + +//------------------------------------------------------------------------ +// Helpers. + +template struct InternalType; +template <> struct InternalType { typedef double scalar_t; }; +template <> struct InternalType { typedef float scalar_t; }; +template <> struct InternalType { typedef float scalar_t; }; + +//------------------------------------------------------------------------ +// CUDA kernel. + +template +__global__ void bias_act_kernel(bias_act_kernel_params p) +{ + typedef typename InternalType::scalar_t scalar_t; + int G = p.grad; + scalar_t alpha = (scalar_t)p.alpha; + scalar_t gain = (scalar_t)p.gain; + scalar_t clamp = (scalar_t)p.clamp; + scalar_t one = (scalar_t)1; + scalar_t two = (scalar_t)2; + scalar_t expRange = (scalar_t)80; + scalar_t halfExpRange = (scalar_t)40; + scalar_t seluScale = (scalar_t)1.0507009873554804934193349852946; + scalar_t seluAlpha = (scalar_t)1.6732632423543772848170429916717; + + // Loop over elements. + int xi = blockIdx.x * p.loopX * blockDim.x + threadIdx.x; + for (int loopIdx = 0; loopIdx < p.loopX && xi < p.sizeX; loopIdx++, xi += blockDim.x) + { + // Load. + scalar_t x = (scalar_t)((const T*)p.x)[xi]; + scalar_t b = (p.b) ? (scalar_t)((const T*)p.b)[(xi / p.stepB) % p.sizeB] : 0; + scalar_t xref = (p.xref) ? (scalar_t)((const T*)p.xref)[xi] : 0; + scalar_t yref = (p.yref) ? (scalar_t)((const T*)p.yref)[xi] : 0; + scalar_t dy = (p.dy) ? (scalar_t)((const T*)p.dy)[xi] : one; + scalar_t yy = (gain != 0) ? yref / gain : 0; + scalar_t y = 0; + + // Apply bias. + ((G == 0) ? x : xref) += b; + + // linear + if (A == 1) + { + if (G == 0) y = x; + if (G == 1) y = x; + } + + // relu + if (A == 2) + { + if (G == 0) y = (x > 0) ? x : 0; + if (G == 1) y = (yy > 0) ? x : 0; + } + + // lrelu + if (A == 3) + { + if (G == 0) y = (x > 0) ? x : x * alpha; + if (G == 1) y = (yy > 0) ? x : x * alpha; + } + + // tanh + if (A == 4) + { + if (G == 0) { scalar_t c = exp(x); scalar_t d = one / c; y = (x < -expRange) ? -one : (x > expRange) ? one : (c - d) / (c + d); } + if (G == 1) y = x * (one - yy * yy); + if (G == 2) y = x * (one - yy * yy) * (-two * yy); + } + + // sigmoid + if (A == 5) + { + if (G == 0) y = (x < -expRange) ? 0 : one / (exp(-x) + one); + if (G == 1) y = x * yy * (one - yy); + if (G == 2) y = x * yy * (one - yy) * (one - two * yy); + } + + // elu + if (A == 6) + { + if (G == 0) y = (x >= 0) ? x : exp(x) - one; + if (G == 1) y = (yy >= 0) ? x : x * (yy + one); + if (G == 2) y = (yy >= 0) ? 0 : x * (yy + one); + } + + // selu + if (A == 7) + { + if (G == 0) y = (x >= 0) ? seluScale * x : (seluScale * seluAlpha) * (exp(x) - one); + if (G == 1) y = (yy >= 0) ? x * seluScale : x * (yy + seluScale * seluAlpha); + if (G == 2) y = (yy >= 0) ? 0 : x * (yy + seluScale * seluAlpha); + } + + // softplus + if (A == 8) + { + if (G == 0) y = (x > expRange) ? x : log(exp(x) + one); + if (G == 1) y = x * (one - exp(-yy)); + if (G == 2) { scalar_t c = exp(-yy); y = x * c * (one - c); } + } + + // swish + if (A == 9) + { + if (G == 0) + y = (x < -expRange) ? 0 : x / (exp(-x) + one); + else + { + scalar_t c = exp(xref); + scalar_t d = c + one; + if (G == 1) + y = (xref > halfExpRange) ? x : x * c * (xref + d) / (d * d); + else + y = (xref > halfExpRange) ? 0 : x * c * (xref * (two - d) + two * d) / (d * d * d); + yref = (xref < -expRange) ? 0 : xref / (exp(-xref) + one) * gain; + } + } + + // Apply gain. + y *= gain * dy; + + // Clamp. + if (clamp >= 0) + { + if (G == 0) + y = (y > -clamp & y < clamp) ? y : (y >= 0) ? clamp : -clamp; + else + y = (yref > -clamp & yref < clamp) ? y : 0; + } + + // Store. + ((T*)p.y)[xi] = (T)y; + } +} + +//------------------------------------------------------------------------ +// CUDA kernel selection. + +template void* choose_bias_act_kernel(const bias_act_kernel_params& p) +{ + if (p.act == 1) return (void*)bias_act_kernel; + if (p.act == 2) return (void*)bias_act_kernel; + if (p.act == 3) return (void*)bias_act_kernel; + if (p.act == 4) return (void*)bias_act_kernel; + if (p.act == 5) return (void*)bias_act_kernel; + if (p.act == 6) return (void*)bias_act_kernel; + if (p.act == 7) return (void*)bias_act_kernel; + if (p.act == 8) return (void*)bias_act_kernel; + if (p.act == 9) return (void*)bias_act_kernel; + return NULL; +} + +//------------------------------------------------------------------------ +// Template specializations. + +template void* choose_bias_act_kernel (const bias_act_kernel_params& p); +template void* choose_bias_act_kernel (const bias_act_kernel_params& p); +template void* choose_bias_act_kernel (const bias_act_kernel_params& p); + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/bias_act.h b/ThirdParty/eg3d/torch_utils/ops/bias_act.h new file mode 100644 index 0000000000000000000000000000000000000000..8994bfb4e9cae790865348e08de5f685152d3344 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/bias_act.h @@ -0,0 +1,42 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +//------------------------------------------------------------------------ +// CUDA kernel parameters. + +struct bias_act_kernel_params +{ + const void* x; // [sizeX] + const void* b; // [sizeB] or NULL + const void* xref; // [sizeX] or NULL + const void* yref; // [sizeX] or NULL + const void* dy; // [sizeX] or NULL + void* y; // [sizeX] + + int grad; + int act; + float alpha; + float gain; + float clamp; + + int sizeX; + int sizeB; + int stepB; + int loopX; +}; + +//------------------------------------------------------------------------ +// CUDA kernel selection. + +template void* choose_bias_act_kernel(const bias_act_kernel_params& p); + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/bias_act.py b/ThirdParty/eg3d/torch_utils/ops/bias_act.py new file mode 100644 index 0000000000000000000000000000000000000000..d46ca82fed202efe31b615698981c76d935f9e72 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/bias_act.py @@ -0,0 +1,211 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Custom PyTorch ops for efficient bias and activation.""" + +import os +import numpy as np +import torch +from ThirdParty.eg3d import dnnlib + +from .. import custom_ops +from .. import misc + +#---------------------------------------------------------------------------- + +activation_funcs = { + 'linear': dnnlib.EasyDict(func=lambda x, **_: x, def_alpha=0, def_gain=1, cuda_idx=1, ref='', has_2nd_grad=False), + 'relu': dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.relu(x), def_alpha=0, def_gain=np.sqrt(2), cuda_idx=2, ref='y', has_2nd_grad=False), + 'lrelu': dnnlib.EasyDict(func=lambda x, alpha, **_: torch.nn.functional.leaky_relu(x, alpha), def_alpha=0.2, def_gain=np.sqrt(2), cuda_idx=3, ref='y', has_2nd_grad=False), + 'tanh': dnnlib.EasyDict(func=lambda x, **_: torch.tanh(x), def_alpha=0, def_gain=1, cuda_idx=4, ref='y', has_2nd_grad=True), + 'sigmoid': dnnlib.EasyDict(func=lambda x, **_: torch.sigmoid(x), def_alpha=0, def_gain=1, cuda_idx=5, ref='y', has_2nd_grad=True), + 'elu': dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.elu(x), def_alpha=0, def_gain=1, cuda_idx=6, ref='y', has_2nd_grad=True), + 'selu': dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.selu(x), def_alpha=0, def_gain=1, cuda_idx=7, ref='y', has_2nd_grad=True), + 'softplus': dnnlib.EasyDict(func=lambda x, **_: torch.nn.functional.softplus(x), def_alpha=0, def_gain=1, cuda_idx=8, ref='y', has_2nd_grad=True), + 'swish': dnnlib.EasyDict(func=lambda x, **_: torch.sigmoid(x) * x, def_alpha=0, def_gain=np.sqrt(2), cuda_idx=9, ref='x', has_2nd_grad=True), +} + +#---------------------------------------------------------------------------- + +_plugin = None +_null_tensor = torch.empty([0]) + +def _init(): + global _plugin + if _plugin is None: + _plugin = custom_ops.get_plugin( + module_name='bias_act_plugin', + sources=['bias_act.cpp', 'bias_act.cu'], + headers=['bias_act.h'], + source_dir=os.path.dirname(__file__), + extra_cuda_cflags=['--use_fast_math'], + ) + return True + +#---------------------------------------------------------------------------- + +def bias_act(x, b=None, dim=1, act='linear', alpha=None, gain=None, clamp=None, impl='cuda'): + r"""Fused bias and activation function. + + Adds bias `b` to activation tensor `x`, evaluates activation function `act`, + and scales the result by `gain`. Each of the steps is optional. In most cases, + the fused op is considerably more efficient than performing the same calculation + using standard PyTorch ops. It supports first and second order gradients, + but not third order gradients. + + Args: + x: Input activation tensor. Can be of any shape. + b: Bias vector, or `None` to disable. Must be a 1D tensor of the same type + as `x`. The shape must be known, and it must match the dimension of `x` + corresponding to `dim`. + dim: The dimension in `x` corresponding to the elements of `b`. + The value of `dim` is ignored if `b` is not specified. + act: Name of the activation function to evaluate, or `"linear"` to disable. + Can be e.g. `"relu"`, `"lrelu"`, `"tanh"`, `"sigmoid"`, `"swish"`, etc. + See `activation_funcs` for a full list. `None` is not allowed. + alpha: Shape parameter for the activation function, or `None` to use the default. + gain: Scaling factor for the output tensor, or `None` to use default. + See `activation_funcs` for the default scaling of each activation function. + If unsure, consider specifying 1. + clamp: Clamp the output values to `[-clamp, +clamp]`, or `None` to disable + the clamping (default). + impl: Name of the implementation to use. Can be `"ref"` or `"cuda"` (default). + + Returns: + Tensor of the same shape and datatype as `x`. + """ + assert isinstance(x, torch.Tensor) + assert impl in ['ref', 'cuda'] + if impl == 'cuda' and x.device.type == 'cuda' and _init(): + return _bias_act_cuda(dim=dim, act=act, alpha=alpha, gain=gain, clamp=clamp).apply(x, b) + return _bias_act_ref(x=x, b=b, dim=dim, act=act, alpha=alpha, gain=gain, clamp=clamp) + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def _bias_act_ref(x, b=None, dim=1, act='linear', alpha=None, gain=None, clamp=None): + """Slow reference implementation of `bias_act()` using standard TensorFlow ops. + """ + assert isinstance(x, torch.Tensor) + assert clamp is None or clamp >= 0 + spec = activation_funcs[act] + alpha = float(alpha if alpha is not None else spec.def_alpha) + gain = float(gain if gain is not None else spec.def_gain) + clamp = float(clamp if clamp is not None else -1) + + # Add bias. + if b is not None: + assert isinstance(b, torch.Tensor) and b.ndim == 1 + assert 0 <= dim < x.ndim + assert b.shape[0] == x.shape[dim] + x = x + b.reshape([-1 if i == dim else 1 for i in range(x.ndim)]) + + # Evaluate activation function. + alpha = float(alpha) + x = spec.func(x, alpha=alpha) + + # Scale by gain. + gain = float(gain) + if gain != 1: + x = x * gain + + # Clamp. + if clamp >= 0: + x = x.clamp(-clamp, clamp) # pylint: disable=invalid-unary-operand-type + return x + +#---------------------------------------------------------------------------- + +_bias_act_cuda_cache = dict() + +def _bias_act_cuda(dim=1, act='linear', alpha=None, gain=None, clamp=None): + """Fast CUDA implementation of `bias_act()` using custom ops. + """ + # Parse arguments. + assert clamp is None or clamp >= 0 + spec = activation_funcs[act] + alpha = float(alpha if alpha is not None else spec.def_alpha) + gain = float(gain if gain is not None else spec.def_gain) + clamp = float(clamp if clamp is not None else -1) + + # Lookup from cache. + key = (dim, act, alpha, gain, clamp) + if key in _bias_act_cuda_cache: + return _bias_act_cuda_cache[key] + + # Forward op. + class BiasActCuda(torch.autograd.Function): + @staticmethod + def forward(ctx, x, b): # pylint: disable=arguments-differ + ctx.memory_format = torch.channels_last if x.ndim > 2 and x.stride(1) == 1 else torch.contiguous_format + x = x.contiguous(memory_format=ctx.memory_format) + b = b.contiguous() if b is not None else _null_tensor + y = x + if act != 'linear' or gain != 1 or clamp >= 0 or b is not _null_tensor: + y = _plugin.bias_act(x, b, _null_tensor, _null_tensor, _null_tensor, 0, dim, spec.cuda_idx, alpha, gain, clamp) + ctx.save_for_backward( + x if 'x' in spec.ref or spec.has_2nd_grad else _null_tensor, + b if 'x' in spec.ref or spec.has_2nd_grad else _null_tensor, + y if 'y' in spec.ref else _null_tensor) + return y + + @staticmethod + def backward(ctx, dy): # pylint: disable=arguments-differ + dy = dy.contiguous(memory_format=ctx.memory_format) + x, b, y = ctx.saved_tensors + dx = None + db = None + + if ctx.needs_input_grad[0] or ctx.needs_input_grad[1]: + dx = dy + if act != 'linear' or gain != 1 or clamp >= 0: + dx = BiasActCudaGrad.apply(dy, x, b, y) + + if ctx.needs_input_grad[1]: + db = dx.sum([i for i in range(dx.ndim) if i != dim]) + + return dx, db + + # Backward op. + class BiasActCudaGrad(torch.autograd.Function): + @staticmethod + def forward(ctx, dy, x, b, y): # pylint: disable=arguments-differ + ctx.memory_format = torch.channels_last if dy.ndim > 2 and dy.stride(1) == 1 else torch.contiguous_format + dx = _plugin.bias_act(dy, b, x, y, _null_tensor, 1, dim, spec.cuda_idx, alpha, gain, clamp) + ctx.save_for_backward( + dy if spec.has_2nd_grad else _null_tensor, + x, b, y) + return dx + + @staticmethod + def backward(ctx, d_dx): # pylint: disable=arguments-differ + d_dx = d_dx.contiguous(memory_format=ctx.memory_format) + dy, x, b, y = ctx.saved_tensors + d_dy = None + d_x = None + d_b = None + d_y = None + + if ctx.needs_input_grad[0]: + d_dy = BiasActCudaGrad.apply(d_dx, x, b, y) + + if spec.has_2nd_grad and (ctx.needs_input_grad[1] or ctx.needs_input_grad[2]): + d_x = _plugin.bias_act(d_dx, b, x, y, dy, 2, dim, spec.cuda_idx, alpha, gain, clamp) + + if spec.has_2nd_grad and ctx.needs_input_grad[2]: + d_b = d_x.sum([i for i in range(d_x.ndim) if i != dim]) + + return d_dy, d_x, d_b, d_y + + # Add to cache. + _bias_act_cuda_cache[key] = BiasActCuda + return BiasActCuda + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/ops/conv2d_gradfix.py b/ThirdParty/eg3d/torch_utils/ops/conv2d_gradfix.py new file mode 100644 index 0000000000000000000000000000000000000000..9a177cc1c0b6eabf16908cf9afaa4387e7716b72 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/conv2d_gradfix.py @@ -0,0 +1,199 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Custom replacement for `torch.nn.functional.conv2d` that supports +arbitrarily high order gradients with zero performance penalty.""" + +import contextlib +import torch + +# pylint: disable=redefined-builtin +# pylint: disable=arguments-differ +# pylint: disable=protected-access + +#---------------------------------------------------------------------------- + +enabled = False # Enable the custom op by setting this to true. +weight_gradients_disabled = False # Forcefully disable computation of gradients with respect to the weights. + +@contextlib.contextmanager +def no_weight_gradients(disable=True): + global weight_gradients_disabled + old = weight_gradients_disabled + if disable: + weight_gradients_disabled = True + yield + weight_gradients_disabled = old + +#---------------------------------------------------------------------------- + +def conv2d(input, weight, bias=None, stride=1, padding=0, dilation=1, groups=1): + if _should_use_custom_op(input): + return _conv2d_gradfix(transpose=False, weight_shape=weight.shape, stride=stride, padding=padding, output_padding=0, dilation=dilation, groups=groups).apply(input, weight, bias) + return torch.nn.functional.conv2d(input=input, weight=weight, bias=bias, stride=stride, padding=padding, dilation=dilation, groups=groups) + +def conv_transpose2d(input, weight, bias=None, stride=1, padding=0, output_padding=0, groups=1, dilation=1): + if _should_use_custom_op(input): + return _conv2d_gradfix(transpose=True, weight_shape=weight.shape, stride=stride, padding=padding, output_padding=output_padding, groups=groups, dilation=dilation).apply(input, weight, bias) + return torch.nn.functional.conv_transpose2d(input=input, weight=weight, bias=bias, stride=stride, padding=padding, output_padding=output_padding, groups=groups, dilation=dilation) + +#---------------------------------------------------------------------------- + +def _should_use_custom_op(input): + assert isinstance(input, torch.Tensor) + if (not enabled) or (not torch.backends.cudnn.enabled): + return False + if input.device.type != 'cuda': + return False + return True + +def _tuple_of_ints(xs, ndim): + xs = tuple(xs) if isinstance(xs, (tuple, list)) else (xs,) * ndim + assert len(xs) == ndim + assert all(isinstance(x, int) for x in xs) + return xs + +#---------------------------------------------------------------------------- + +_conv2d_gradfix_cache = dict() +_null_tensor = torch.empty([0]) + +def _conv2d_gradfix(transpose, weight_shape, stride, padding, output_padding, dilation, groups): + # Parse arguments. + ndim = 2 + weight_shape = tuple(weight_shape) + stride = _tuple_of_ints(stride, ndim) + padding = _tuple_of_ints(padding, ndim) + output_padding = _tuple_of_ints(output_padding, ndim) + dilation = _tuple_of_ints(dilation, ndim) + + # Lookup from cache. + key = (transpose, weight_shape, stride, padding, output_padding, dilation, groups) + if key in _conv2d_gradfix_cache: + return _conv2d_gradfix_cache[key] + + # Validate arguments. + assert groups >= 1 + assert len(weight_shape) == ndim + 2 + assert all(stride[i] >= 1 for i in range(ndim)) + assert all(padding[i] >= 0 for i in range(ndim)) + assert all(dilation[i] >= 0 for i in range(ndim)) + if not transpose: + assert all(output_padding[i] == 0 for i in range(ndim)) + else: # transpose + assert all(0 <= output_padding[i] < max(stride[i], dilation[i]) for i in range(ndim)) + + # Helpers. + common_kwargs = dict(stride=stride, padding=padding, dilation=dilation, groups=groups) + def calc_output_padding(input_shape, output_shape): + if transpose: + return [0, 0] + return [ + input_shape[i + 2] + - (output_shape[i + 2] - 1) * stride[i] + - (1 - 2 * padding[i]) + - dilation[i] * (weight_shape[i + 2] - 1) + for i in range(ndim) + ] + + # Forward & backward. + class Conv2d(torch.autograd.Function): + @staticmethod + def forward(ctx, input, weight, bias): + assert weight.shape == weight_shape + ctx.save_for_backward( + input if weight.requires_grad else _null_tensor, + weight if input.requires_grad else _null_tensor, + ) + ctx.input_shape = input.shape + + # Simple 1x1 convolution => cuBLAS (only on Volta, not on Ampere). + if weight_shape[2:] == stride == dilation == (1, 1) and padding == (0, 0) and torch.cuda.get_device_capability(input.device) < (8, 0): + a = weight.reshape(groups, weight_shape[0] // groups, weight_shape[1]) + b = input.reshape(input.shape[0], groups, input.shape[1] // groups, -1) + c = (a.transpose(1, 2) if transpose else a) @ b.permute(1, 2, 0, 3).flatten(2) + c = c.reshape(-1, input.shape[0], *input.shape[2:]).transpose(0, 1) + c = c if bias is None else c + bias.unsqueeze(0).unsqueeze(2).unsqueeze(3) + return c.contiguous(memory_format=(torch.channels_last if input.stride(1) == 1 else torch.contiguous_format)) + + # General case => cuDNN. + if transpose: + return torch.nn.functional.conv_transpose2d(input=input, weight=weight, bias=bias, output_padding=output_padding, **common_kwargs) + return torch.nn.functional.conv2d(input=input, weight=weight, bias=bias, **common_kwargs) + + @staticmethod + def backward(ctx, grad_output): + input, weight = ctx.saved_tensors + input_shape = ctx.input_shape + grad_input = None + grad_weight = None + grad_bias = None + + if ctx.needs_input_grad[0]: + p = calc_output_padding(input_shape=input_shape, output_shape=grad_output.shape) + op = _conv2d_gradfix(transpose=(not transpose), weight_shape=weight_shape, output_padding=p, **common_kwargs) + grad_input = op.apply(grad_output, weight, None) + assert grad_input.shape == input_shape + + if ctx.needs_input_grad[1] and not weight_gradients_disabled: + grad_weight = Conv2dGradWeight.apply(grad_output, input, weight) + assert grad_weight.shape == weight_shape + + if ctx.needs_input_grad[2]: + grad_bias = grad_output.sum([0, 2, 3]) + + return grad_input, grad_weight, grad_bias + + # Gradient with respect to the weights. + class Conv2dGradWeight(torch.autograd.Function): + @staticmethod + def forward(ctx, grad_output, input, weight): + ctx.save_for_backward( + grad_output if input.requires_grad else _null_tensor, + input if grad_output.requires_grad else _null_tensor, + ) + ctx.grad_output_shape = grad_output.shape + ctx.input_shape = input.shape + + # Simple 1x1 convolution => cuBLAS (on both Volta and Ampere). + if weight_shape[2:] == stride == dilation == (1, 1) and padding == (0, 0): + a = grad_output.reshape(grad_output.shape[0], groups, grad_output.shape[1] // groups, -1).permute(1, 2, 0, 3).flatten(2) + b = input.reshape(input.shape[0], groups, input.shape[1] // groups, -1).permute(1, 2, 0, 3).flatten(2) + c = (b @ a.transpose(1, 2) if transpose else a @ b.transpose(1, 2)).reshape(weight_shape) + return c.contiguous(memory_format=(torch.channels_last if input.stride(1) == 1 else torch.contiguous_format)) + + # General case => cuDNN. + return torch.ops.aten.convolution_backward(grad_output=grad_output, input=input, weight=weight, bias_sizes=None, stride=stride, padding=padding, dilation=dilation, transposed=transpose, output_padding=output_padding, groups=groups, output_mask=[False, True, False])[1] + + + @staticmethod + def backward(ctx, grad2_grad_weight): + grad_output, input = ctx.saved_tensors + grad_output_shape = ctx.grad_output_shape + input_shape = ctx.input_shape + grad2_grad_output = None + grad2_input = None + + if ctx.needs_input_grad[0]: + grad2_grad_output = Conv2d.apply(input, grad2_grad_weight, None) + assert grad2_grad_output.shape == grad_output_shape + + if ctx.needs_input_grad[1]: + p = calc_output_padding(input_shape=input_shape, output_shape=grad_output_shape) + op = _conv2d_gradfix(transpose=(not transpose), weight_shape=weight_shape, output_padding=p, **common_kwargs) + grad2_input = op.apply(grad_output, grad2_grad_weight, None) + assert grad2_input.shape == input_shape + + return grad2_grad_output, grad2_input + + _conv2d_gradfix_cache[key] = Conv2d + return Conv2d + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/ops/conv2d_resample.py b/ThirdParty/eg3d/torch_utils/ops/conv2d_resample.py new file mode 100644 index 0000000000000000000000000000000000000000..d46f4ddd85606b9032d08efe3556ecad4676cee5 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/conv2d_resample.py @@ -0,0 +1,145 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""2D convolution with optional up/downsampling.""" + +import torch + +from .. import misc +from . import conv2d_gradfix +from . import upfirdn2d +from .upfirdn2d import _parse_padding +from .upfirdn2d import _get_filter_size + +#---------------------------------------------------------------------------- + +def _get_weight_shape(w): + with misc.suppress_tracer_warnings(): # this value will be treated as a constant + shape = [int(sz) for sz in w.shape] + misc.assert_shape(w, shape) + return shape + +#---------------------------------------------------------------------------- + +def _conv2d_wrapper(x, w, stride=1, padding=0, groups=1, transpose=False, flip_weight=True): + """Wrapper for the underlying `conv2d()` and `conv_transpose2d()` implementations. + """ + _out_channels, _in_channels_per_group, kh, kw = _get_weight_shape(w) + + # Flip weight if requested. + # Note: conv2d() actually performs correlation (flip_weight=True) not convolution (flip_weight=False). + if not flip_weight and (kw > 1 or kh > 1): + w = w.flip([2, 3]) + + # Execute using conv2d_gradfix. + op = conv2d_gradfix.conv_transpose2d if transpose else conv2d_gradfix.conv2d + return op(x, w, stride=stride, padding=padding, groups=groups) + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def conv2d_resample(x, w, f=None, up=1, down=1, padding=0, groups=1, flip_weight=True, flip_filter=False): + r"""2D convolution with optional up/downsampling. + + Padding is performed only once at the beginning, not between the operations. + + Args: + x: Input tensor of shape + `[batch_size, in_channels, in_height, in_width]`. + w: Weight tensor of shape + `[out_channels, in_channels//groups, kernel_height, kernel_width]`. + f: Low-pass filter for up/downsampling. Must be prepared beforehand by + calling upfirdn2d.setup_filter(). None = identity (default). + up: Integer upsampling factor (default: 1). + down: Integer downsampling factor (default: 1). + padding: Padding with respect to the upsampled image. Can be a single number + or a list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]` + (default: 0). + groups: Split input channels into N groups (default: 1). + flip_weight: False = convolution, True = correlation (default: True). + flip_filter: False = convolution, True = correlation (default: False). + + Returns: + Tensor of the shape `[batch_size, num_channels, out_height, out_width]`. + """ + # Validate arguments. + assert isinstance(x, torch.Tensor) and (x.ndim == 4) + assert isinstance(w, torch.Tensor) and (w.ndim == 4) and (w.dtype == x.dtype) + assert f is None or (isinstance(f, torch.Tensor) and f.ndim in [1, 2] and f.dtype == torch.float32) + assert isinstance(up, int) and (up >= 1) + assert isinstance(down, int) and (down >= 1) + assert isinstance(groups, int) and (groups >= 1) + out_channels, in_channels_per_group, kh, kw = _get_weight_shape(w) + fw, fh = _get_filter_size(f) + px0, px1, py0, py1 = _parse_padding(padding) + + # Adjust padding to account for up/downsampling. + if up > 1: + px0 += (fw + up - 1) // 2 + px1 += (fw - up) // 2 + py0 += (fh + up - 1) // 2 + py1 += (fh - up) // 2 + if down > 1: + px0 += (fw - down + 1) // 2 + px1 += (fw - down) // 2 + py0 += (fh - down + 1) // 2 + py1 += (fh - down) // 2 + + # Fast path: 1x1 convolution with downsampling only => downsample first, then convolve. + if kw == 1 and kh == 1 and (down > 1 and up == 1): + x = upfirdn2d.upfirdn2d(x=x, f=f, down=down, padding=[px0,px1,py0,py1], flip_filter=flip_filter) + x = _conv2d_wrapper(x=x, w=w, groups=groups, flip_weight=flip_weight) + return x + + # Fast path: 1x1 convolution with upsampling only => convolve first, then upsample. + if kw == 1 and kh == 1 and (up > 1 and down == 1): + x = _conv2d_wrapper(x=x, w=w, groups=groups, flip_weight=flip_weight) + x = upfirdn2d.upfirdn2d(x=x, f=f, up=up, padding=[px0,px1,py0,py1], gain=up**2, flip_filter=flip_filter) + return x + + # Fast path: downsampling only => use strided convolution. + if down > 1 and up == 1: + x = upfirdn2d.upfirdn2d(x=x, f=f, padding=[px0,px1,py0,py1], flip_filter=flip_filter) + x = _conv2d_wrapper(x=x, w=w, stride=down, groups=groups, flip_weight=flip_weight) + return x + + # Fast path: upsampling with optional downsampling => use transpose strided convolution. + if up > 1: + if groups == 1: + w = w.transpose(0, 1) + else: + w = w.reshape(groups, out_channels // groups, in_channels_per_group, kh, kw) + w = w.transpose(1, 2) + w = w.reshape(groups * in_channels_per_group, out_channels // groups, kh, kw) + px0 -= kw - 1 + px1 -= kw - up + py0 -= kh - 1 + py1 -= kh - up + pxt = max(min(-px0, -px1), 0) + pyt = max(min(-py0, -py1), 0) + x = _conv2d_wrapper(x=x, w=w, stride=up, padding=[pyt,pxt], groups=groups, transpose=True, flip_weight=(not flip_weight)) + x = upfirdn2d.upfirdn2d(x=x, f=f, padding=[px0+pxt,px1+pxt,py0+pyt,py1+pyt], gain=up**2, flip_filter=flip_filter) + if down > 1: + x = upfirdn2d.upfirdn2d(x=x, f=f, down=down, flip_filter=flip_filter) + return x + + # Fast path: no up/downsampling, padding supported by the underlying implementation => use plain conv2d. + if up == 1 and down == 1: + if px0 == px1 and py0 == py1 and px0 >= 0 and py0 >= 0: + return _conv2d_wrapper(x=x, w=w, padding=[py0,px0], groups=groups, flip_weight=flip_weight) + + # Fallback: Generic reference implementation. + x = upfirdn2d.upfirdn2d(x=x, f=(f if up > 1 else None), up=up, padding=[px0,px1,py0,py1], gain=up**2, flip_filter=flip_filter) + x = _conv2d_wrapper(x=x, w=w, groups=groups, flip_weight=flip_weight) + if down > 1: + x = upfirdn2d.upfirdn2d(x=x, f=f, down=down, flip_filter=flip_filter) + return x + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.cpp b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4f55466235a020b0f5e150350bfdcd8b2a1e579d --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.cpp @@ -0,0 +1,304 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include +#include +#include +#include "filtered_lrelu.h" + +//------------------------------------------------------------------------ + +static std::tuple filtered_lrelu( + torch::Tensor x, torch::Tensor fu, torch::Tensor fd, torch::Tensor b, torch::Tensor si, + int up, int down, int px0, int px1, int py0, int py1, int sx, int sy, float gain, float slope, float clamp, bool flip_filters, bool writeSigns) +{ + // Set CUDA device. + TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device"); + const at::cuda::OptionalCUDAGuard device_guard(device_of(x)); + + // Validate arguments. + TORCH_CHECK(fu.device() == x.device() && fd.device() == x.device() && b.device() == x.device(), "all input tensors must reside on the same device"); + TORCH_CHECK(fu.dtype() == torch::kFloat && fd.dtype() == torch::kFloat, "fu and fd must be float32"); + TORCH_CHECK(b.dtype() == x.dtype(), "x and b must have the same dtype"); + TORCH_CHECK(x.dtype() == torch::kHalf || x.dtype() == torch::kFloat, "x and b must be float16 or float32"); + TORCH_CHECK(x.dim() == 4, "x must be rank 4"); + TORCH_CHECK(x.size(0) * x.size(1) <= INT_MAX && x.size(2) <= INT_MAX && x.size(3) <= INT_MAX, "x is too large"); + TORCH_CHECK(x.numel() > 0, "x is empty"); + TORCH_CHECK((fu.dim() == 1 || fu.dim() == 2) && (fd.dim() == 1 || fd.dim() == 2), "fu and fd must be rank 1 or 2"); + TORCH_CHECK(fu.size(0) <= INT_MAX && fu.size(-1) <= INT_MAX, "fu is too large"); + TORCH_CHECK(fd.size(0) <= INT_MAX && fd.size(-1) <= INT_MAX, "fd is too large"); + TORCH_CHECK(fu.numel() > 0, "fu is empty"); + TORCH_CHECK(fd.numel() > 0, "fd is empty"); + TORCH_CHECK(b.dim() == 1 && b.size(0) == x.size(1), "b must be a vector with the same number of channels as x"); + TORCH_CHECK(up >= 1 && down >= 1, "up and down must be at least 1"); + + // Figure out how much shared memory is available on the device. + int maxSharedBytes = 0; + AT_CUDA_CHECK(cudaDeviceGetAttribute(&maxSharedBytes, cudaDevAttrMaxSharedMemoryPerBlockOptin, x.device().index())); + int sharedKB = maxSharedBytes >> 10; + + // Populate enough launch parameters to check if a CUDA kernel exists. + filtered_lrelu_kernel_params p; + p.up = up; + p.down = down; + p.fuShape = make_int2((int)fu.size(-1), fu.dim() == 2 ? (int)fu.size(0) : 0); // shape [n, 0] indicates separable filter. + p.fdShape = make_int2((int)fd.size(-1), fd.dim() == 2 ? (int)fd.size(0) : 0); + filtered_lrelu_kernel_spec test_spec = choose_filtered_lrelu_kernel(p, sharedKB); + if (!test_spec.exec) + { + // No kernel found - return empty tensors and indicate missing kernel with return code of -1. + return std::make_tuple(torch::Tensor(), torch::Tensor(), -1); + } + + // Input/output element size. + int64_t sz = (x.dtype() == torch::kHalf) ? 2 : 4; + + // Input sizes. + int64_t xw = (int)x.size(3); + int64_t xh = (int)x.size(2); + int64_t fut_w = (int)fu.size(-1) - 1; + int64_t fut_h = (int)fu.size(0) - 1; + int64_t fdt_w = (int)fd.size(-1) - 1; + int64_t fdt_h = (int)fd.size(0) - 1; + + // Logical size of upsampled buffer. + int64_t cw = xw * up + (px0 + px1) - fut_w; + int64_t ch = xh * up + (py0 + py1) - fut_h; + TORCH_CHECK(cw > fdt_w && ch > fdt_h, "upsampled buffer must be at least the size of downsampling filter"); + TORCH_CHECK(cw <= INT_MAX && ch <= INT_MAX, "upsampled buffer is too large"); + + // Compute output size and allocate. + int64_t yw = (cw - fdt_w + (down - 1)) / down; + int64_t yh = (ch - fdt_h + (down - 1)) / down; + TORCH_CHECK(yw > 0 && yh > 0, "output must be at least 1x1"); + TORCH_CHECK(yw <= INT_MAX && yh <= INT_MAX, "output is too large"); + torch::Tensor y = torch::empty({x.size(0), x.size(1), yh, yw}, x.options(), x.suggest_memory_format()); + + // Allocate sign tensor. + torch::Tensor so; + torch::Tensor s = si; + bool readSigns = !!s.numel(); + int64_t sw_active = 0; // Active width of sign tensor. + if (writeSigns) + { + sw_active = yw * down - (down - 1) + fdt_w; // Active width in elements. + int64_t sh = yh * down - (down - 1) + fdt_h; // Height = active height. + int64_t sw = (sw_active + 15) & ~15; // Width = active width in elements, rounded up to multiple of 16. + TORCH_CHECK(sh <= INT_MAX && (sw >> 2) <= INT_MAX, "signs is too large"); + s = so = torch::empty({x.size(0), x.size(1), sh, sw >> 2}, x.options().dtype(torch::kUInt8), at::MemoryFormat::Contiguous); + } + else if (readSigns) + sw_active = s.size(3) << 2; + + // Validate sign tensor if in use. + if (readSigns || writeSigns) + { + TORCH_CHECK(s.is_contiguous(), "signs must be contiguous"); + TORCH_CHECK(s.dtype() == torch::kUInt8, "signs must be uint8"); + TORCH_CHECK(s.device() == x.device(), "signs must reside on the same device as x"); + TORCH_CHECK(s.dim() == 4, "signs must be rank 4"); + TORCH_CHECK(s.size(0) == x.size(0) && s.size(1) == x.size(1), "signs must have same batch & channels as x"); + TORCH_CHECK(s.size(2) <= INT_MAX && s.size(3) <= INT_MAX, "signs is too large"); + } + + // Populate rest of CUDA kernel parameters. + p.x = x.data_ptr(); + p.y = y.data_ptr(); + p.b = b.data_ptr(); + p.s = (readSigns || writeSigns) ? s.data_ptr() : 0; + p.fu = fu.data_ptr(); + p.fd = fd.data_ptr(); + p.pad0 = make_int2(px0, py0); + p.gain = gain; + p.slope = slope; + p.clamp = clamp; + p.flip = (flip_filters) ? 1 : 0; + p.xShape = make_int4((int)x.size(3), (int)x.size(2), (int)x.size(1), (int)x.size(0)); + p.yShape = make_int4((int)y.size(3), (int)y.size(2), (int)y.size(1), (int)y.size(0)); + p.sShape = (readSigns || writeSigns) ? make_int2((int)s.size(3), (int)s.size(2)) : make_int2(0, 0); // Width is in bytes. Contiguous. + p.sOfs = make_int2(sx, sy); + p.swLimit = (sw_active + 3) >> 2; // Rounded up to bytes. + + // x, y, b strides are in bytes. + p.xStride = make_longlong4(sz * x.stride(3), sz * x.stride(2), sz * x.stride(1), sz * x.stride(0)); + p.yStride = make_longlong4(sz * y.stride(3), sz * y.stride(2), sz * y.stride(1), sz * y.stride(0)); + p.bStride = sz * b.stride(0); + + // fu, fd strides are in elements. + p.fuStride = make_longlong3(fu.stride(-1), fu.dim() == 2 ? fu.stride(0) : 0, 0); + p.fdStride = make_longlong3(fd.stride(-1), fd.dim() == 2 ? fd.stride(0) : 0, 0); + + // Determine if indices don't fit in int32. Support negative strides although Torch currently never produces those. + bool index64b = false; + if (std::abs(p.bStride * x.size(1)) > INT_MAX) index64b = true; + if (std::min(x.size(0) * p.xStride.w, 0ll) + std::min(x.size(1) * p.xStride.z, 0ll) + std::min(x.size(2) * p.xStride.y, 0ll) + std::min(x.size(3) * p.xStride.x, 0ll) < -INT_MAX) index64b = true; + if (std::max(x.size(0) * p.xStride.w, 0ll) + std::max(x.size(1) * p.xStride.z, 0ll) + std::max(x.size(2) * p.xStride.y, 0ll) + std::max(x.size(3) * p.xStride.x, 0ll) > INT_MAX) index64b = true; + if (std::min(y.size(0) * p.yStride.w, 0ll) + std::min(y.size(1) * p.yStride.z, 0ll) + std::min(y.size(2) * p.yStride.y, 0ll) + std::min(y.size(3) * p.yStride.x, 0ll) < -INT_MAX) index64b = true; + if (std::max(y.size(0) * p.yStride.w, 0ll) + std::max(y.size(1) * p.yStride.z, 0ll) + std::max(y.size(2) * p.yStride.y, 0ll) + std::max(y.size(3) * p.yStride.x, 0ll) > INT_MAX) index64b = true; + if (s.numel() > INT_MAX) index64b = true; + + // Choose CUDA kernel. + filtered_lrelu_kernel_spec spec = { 0 }; + AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "filtered_lrelu_cuda", [&] + { + if constexpr (sizeof(scalar_t) <= 4) // Exclude doubles. constexpr prevents template instantiation. + { + // Choose kernel based on index type, datatype and sign read/write modes. + if (!index64b && writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB); + else if (!index64b && !writeSigns && readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB); + else if (!index64b && !writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB); + else if ( index64b && writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB); + else if ( index64b && !writeSigns && readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB); + else if ( index64b && !writeSigns && !readSigns) spec = choose_filtered_lrelu_kernel(p, sharedKB); + } + }); + TORCH_CHECK(spec.exec, "internal error - CUDA kernel not found") // This should not happen because we tested earlier that kernel exists. + + // Launch CUDA kernel. + void* args[] = {&p}; + int bx = spec.numWarps * 32; + int gx = (p.yShape.x - 1) / spec.tileOut.x + 1; + int gy = (p.yShape.y - 1) / spec.tileOut.y + 1; + int gz = p.yShape.z * p.yShape.w; + + // Repeat multiple horizontal tiles in a CTA? + if (spec.xrep) + { + p.tilesXrep = spec.xrep; + p.tilesXdim = gx; + + gx = (gx + p.tilesXrep - 1) / p.tilesXrep; + std::swap(gx, gy); + } + else + { + p.tilesXrep = 0; + p.tilesXdim = 0; + } + + // Launch filter setup kernel. + AT_CUDA_CHECK(cudaLaunchKernel(spec.setup, 1, 1024, args, 0, at::cuda::getCurrentCUDAStream())); + + // Copy kernels to constant memory. + if ( writeSigns && !readSigns) AT_CUDA_CHECK((copy_filters(at::cuda::getCurrentCUDAStream()))); + else if (!writeSigns && readSigns) AT_CUDA_CHECK((copy_filters(at::cuda::getCurrentCUDAStream()))); + else if (!writeSigns && !readSigns) AT_CUDA_CHECK((copy_filters(at::cuda::getCurrentCUDAStream()))); + + // Set cache and shared memory configurations for main kernel. + AT_CUDA_CHECK(cudaFuncSetCacheConfig(spec.exec, cudaFuncCachePreferShared)); + if (spec.dynamicSharedKB) // Need dynamically allocated shared memory? + AT_CUDA_CHECK(cudaFuncSetAttribute(spec.exec, cudaFuncAttributeMaxDynamicSharedMemorySize, spec.dynamicSharedKB << 10)); + AT_CUDA_CHECK(cudaFuncSetSharedMemConfig(spec.exec, cudaSharedMemBankSizeFourByte)); + + // Launch main kernel. + const int maxSubGz = 65535; // CUDA maximum for block z dimension. + for (int zofs=0; zofs < gz; zofs += maxSubGz) // Do multiple launches if gz is too big. + { + p.blockZofs = zofs; + int subGz = std::min(maxSubGz, gz - zofs); + AT_CUDA_CHECK(cudaLaunchKernel(spec.exec, dim3(gx, gy, subGz), bx, args, spec.dynamicSharedKB << 10, at::cuda::getCurrentCUDAStream())); + } + + // Done. + return std::make_tuple(y, so, 0); +} + +//------------------------------------------------------------------------ + +static torch::Tensor filtered_lrelu_act(torch::Tensor x, torch::Tensor si, int sx, int sy, float gain, float slope, float clamp, bool writeSigns) +{ + // Set CUDA device. + TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device"); + const at::cuda::OptionalCUDAGuard device_guard(device_of(x)); + + // Validate arguments. + TORCH_CHECK(x.dim() == 4, "x must be rank 4"); + TORCH_CHECK(x.size(0) * x.size(1) <= INT_MAX && x.size(2) <= INT_MAX && x.size(3) <= INT_MAX, "x is too large"); + TORCH_CHECK(x.numel() > 0, "x is empty"); + TORCH_CHECK(x.dtype() == torch::kHalf || x.dtype() == torch::kFloat || x.dtype() == torch::kDouble, "x must be float16, float32 or float64"); + + // Output signs if we don't have sign input. + torch::Tensor so; + torch::Tensor s = si; + bool readSigns = !!s.numel(); + if (writeSigns) + { + int64_t sw = x.size(3); + sw = (sw + 15) & ~15; // Round to a multiple of 16 for coalescing. + s = so = torch::empty({x.size(0), x.size(1), x.size(2), sw >> 2}, x.options().dtype(torch::kUInt8), at::MemoryFormat::Contiguous); + } + + // Validate sign tensor if in use. + if (readSigns || writeSigns) + { + TORCH_CHECK(s.is_contiguous(), "signs must be contiguous"); + TORCH_CHECK(s.dtype() == torch::kUInt8, "signs must be uint8"); + TORCH_CHECK(s.device() == x.device(), "signs must reside on the same device as x"); + TORCH_CHECK(s.dim() == 4, "signs must be rank 4"); + TORCH_CHECK(s.size(0) == x.size(0) && s.size(1) == x.size(1), "signs must have same batch & channels as x"); + TORCH_CHECK(s.size(2) <= INT_MAX && (s.size(3) << 2) <= INT_MAX, "signs tensor is too large"); + } + + // Initialize CUDA kernel parameters. + filtered_lrelu_act_kernel_params p; + p.x = x.data_ptr(); + p.s = (readSigns || writeSigns) ? s.data_ptr() : 0; + p.gain = gain; + p.slope = slope; + p.clamp = clamp; + p.xShape = make_int4((int)x.size(3), (int)x.size(2), (int)x.size(1), (int)x.size(0)); + p.xStride = make_longlong4(x.stride(3), x.stride(2), x.stride(1), x.stride(0)); + p.sShape = (readSigns || writeSigns) ? make_int2((int)s.size(3) << 2, (int)s.size(2)) : make_int2(0, 0); // Width is in elements. Contiguous. + p.sOfs = make_int2(sx, sy); + + // Choose CUDA kernel. + void* func = 0; + AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "filtered_lrelu_act_cuda", [&] + { + if (writeSigns) + func = choose_filtered_lrelu_act_kernel(); + else if (readSigns) + func = choose_filtered_lrelu_act_kernel(); + else + func = choose_filtered_lrelu_act_kernel(); + }); + TORCH_CHECK(func, "internal error - CUDA kernel not found"); + + // Launch CUDA kernel. + void* args[] = {&p}; + int bx = 128; // 4 warps per block. + + // Logical size of launch = writeSigns ? p.s : p.x + uint32_t gx = writeSigns ? p.sShape.x : p.xShape.x; + uint32_t gy = writeSigns ? p.sShape.y : p.xShape.y; + uint32_t gz = p.xShape.z * p.xShape.w; // Same as in p.sShape if signs are in use. + gx = (gx - 1) / bx + 1; + + // Make sure grid y and z dimensions are within CUDA launch limits. Kernel loops internally to do the rest. + const uint32_t gmax = 65535; + gy = std::min(gy, gmax); + gz = std::min(gz, gmax); + + // Launch. + AT_CUDA_CHECK(cudaLaunchKernel(func, dim3(gx, gy, gz), bx, args, 0, at::cuda::getCurrentCUDAStream())); + return so; +} + +//------------------------------------------------------------------------ + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) +{ + m.def("filtered_lrelu", &filtered_lrelu); // The whole thing. + m.def("filtered_lrelu_act_", &filtered_lrelu_act); // Activation and sign tensor handling only. Modifies data tensor in-place. +} + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.cu b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.cu new file mode 100644 index 0000000000000000000000000000000000000000..aaac95408365f023ffaa4cb89348d499d3b948f0 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.cu @@ -0,0 +1,1288 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include +#include "filtered_lrelu.h" +#include + +//------------------------------------------------------------------------ +// Helpers. + +enum // Filter modes. +{ + MODE_SUSD = 0, // Separable upsampling, separable downsampling. + MODE_FUSD = 1, // Full upsampling, separable downsampling. + MODE_SUFD = 2, // Separable upsampling, full downsampling. + MODE_FUFD = 3, // Full upsampling, full downsampling. +}; + +template struct InternalType; +template <> struct InternalType +{ + typedef double scalar_t; typedef double2 vec2_t; typedef double4 vec4_t; + __device__ __forceinline__ static vec2_t zero_vec2(void) { return make_double2(0, 0); } + __device__ __forceinline__ static vec4_t zero_vec4(void) { return make_double4(0, 0, 0, 0); } + __device__ __forceinline__ static double clamp(double x, double c) { return fmin(fmax(x, -c), c); } +}; +template <> struct InternalType +{ + typedef float scalar_t; typedef float2 vec2_t; typedef float4 vec4_t; + __device__ __forceinline__ static vec2_t zero_vec2(void) { return make_float2(0, 0); } + __device__ __forceinline__ static vec4_t zero_vec4(void) { return make_float4(0, 0, 0, 0); } + __device__ __forceinline__ static float clamp(float x, float c) { return fminf(fmaxf(x, -c), c); } +}; +template <> struct InternalType +{ + typedef float scalar_t; typedef float2 vec2_t; typedef float4 vec4_t; + __device__ __forceinline__ static vec2_t zero_vec2(void) { return make_float2(0, 0); } + __device__ __forceinline__ static vec4_t zero_vec4(void) { return make_float4(0, 0, 0, 0); } + __device__ __forceinline__ static float clamp(float x, float c) { return fminf(fmaxf(x, -c), c); } +}; + +#define MIN(A, B) ((A) < (B) ? (A) : (B)) +#define MAX(A, B) ((A) > (B) ? (A) : (B)) +#define CEIL_DIV(A, B) (((B)==1) ? (A) : \ + ((B)==2) ? ((int)((A)+1) >> 1) : \ + ((B)==4) ? ((int)((A)+3) >> 2) : \ + (((A) + ((A) > 0 ? (B) - 1 : 0)) / (B))) + +// This works only up to blocks of size 256 x 256 and for all N that are powers of two. +template __device__ __forceinline__ void fast_div_mod(int& x, int& y, unsigned int i) +{ + if ((N & (N-1)) && N <= 256) + y = (i * ((1<<24)/N + 1)) >> 24; // Assumes N <= 256, i < N*256. + else + y = i/N; + + x = i - y*N; +} + +// Type cast stride before reading it. +template __device__ __forceinline__ T get_stride(const int64_t& x) +{ + return *reinterpret_cast(&x); +} + +//------------------------------------------------------------------------ +// Filters, setup kernel, copying function. + +#define MAX_FILTER_SIZE 32 + +// Combined up/down filter buffers so that transfer can be done with one copy. +__device__ float g_fbuf[2 * MAX_FILTER_SIZE * MAX_FILTER_SIZE]; // Filters in global memory, written by setup kernel. +__device__ __constant__ float c_fbuf[2 * MAX_FILTER_SIZE * MAX_FILTER_SIZE]; // Filters in constant memory, read by main kernel. + +// Accessors to combined buffers to index up/down filters individually. +#define c_fu (c_fbuf) +#define c_fd (c_fbuf + MAX_FILTER_SIZE * MAX_FILTER_SIZE) +#define g_fu (g_fbuf) +#define g_fd (g_fbuf + MAX_FILTER_SIZE * MAX_FILTER_SIZE) + +// Set up filters into global memory buffer. +static __global__ void setup_filters_kernel(filtered_lrelu_kernel_params p) +{ + for (int idx = threadIdx.x; idx < MAX_FILTER_SIZE * MAX_FILTER_SIZE; idx += blockDim.x) + { + int x, y; + fast_div_mod(x, y, idx); + + int fu_x = p.flip ? x : (p.fuShape.x - 1 - x); + int fu_y = p.flip ? y : (p.fuShape.y - 1 - y); + if (p.fuShape.y > 0) + g_fu[idx] = (x >= p.fuShape.x || y >= p.fuShape.y) ? 0.0f : p.fu[fu_x * p.fuStride.x + fu_y * p.fuStride.y]; + else + g_fu[idx] = (x >= p.fuShape.x || y > 0) ? 0.0f : p.fu[fu_x * p.fuStride.x]; + + int fd_x = p.flip ? x : (p.fdShape.x - 1 - x); + int fd_y = p.flip ? y : (p.fdShape.y - 1 - y); + if (p.fdShape.y > 0) + g_fd[idx] = (x >= p.fdShape.x || y >= p.fdShape.y) ? 0.0f : p.fd[fd_x * p.fdStride.x + fd_y * p.fdStride.y]; + else + g_fd[idx] = (x >= p.fdShape.x || y > 0) ? 0.0f : p.fd[fd_x * p.fdStride.x]; + } +} + +// Host function to copy filters written by setup kernel into constant buffer for main kernel. +template static cudaError_t copy_filters(cudaStream_t stream) +{ + void* src = 0; + cudaError_t err = cudaGetSymbolAddress(&src, g_fbuf); + if (err) return err; + return cudaMemcpyToSymbolAsync(c_fbuf, src, 2 * MAX_FILTER_SIZE * MAX_FILTER_SIZE * sizeof(float), 0, cudaMemcpyDeviceToDevice, stream); +} + +//------------------------------------------------------------------------ +// Coordinate spaces: +// - Relative to input tensor: inX, inY, tileInX, tileInY +// - Relative to input tile: relInX, relInY, tileInW, tileInH +// - Relative to upsampled tile: relUpX, relUpY, tileUpW, tileUpH +// - Relative to output tile: relOutX, relOutY, tileOutW, tileOutH +// - Relative to output tensor: outX, outY, tileOutX, tileOutY +// +// Relationships between coordinate spaces: +// - inX = tileInX + relInX +// - inY = tileInY + relInY +// - relUpX = relInX * up + phaseInX +// - relUpY = relInY * up + phaseInY +// - relUpX = relOutX * down +// - relUpY = relOutY * down +// - outX = tileOutX + relOutX +// - outY = tileOutY + relOutY + +extern __shared__ char s_buf_raw[]; // When sharedKB <= 48, allocate shared memory statically inside the kernel, otherwise use the externally allocated shared memory buffer. + +template +static __global__ void filtered_lrelu_kernel(filtered_lrelu_kernel_params p) +{ + // Check that we don't try to support non-existing filter modes. + static_assert(up == 1 || up == 2 || up == 4, "only up=1, up=2, up=4 scales supported"); + static_assert(down == 1 || down == 2 || down == 4, "only down=1, down=2, down=4 scales supported"); + static_assert(fuSize >= up, "upsampling filter size must be at least upsampling factor"); + static_assert(fdSize >= down, "downsampling filter size must be at least downsampling factor"); + static_assert(fuSize % up == 0, "upsampling filter size must be divisible with upsampling factor"); + static_assert(fdSize % down == 0, "downsampling filter size must be divisible with downsampling factor"); + static_assert(fuSize <= MAX_FILTER_SIZE && fdSize <= MAX_FILTER_SIZE, "filter size greater than MAX_FILTER_SIZE"); + static_assert(up != 1 || (fuSize == 1 && (filterMode == MODE_FUFD || filterMode == MODE_FUSD)), "up=1 supported only for 1x1 full filters"); + static_assert(down != 1 || (fdSize == 1 && (filterMode == MODE_FUFD || filterMode == MODE_SUFD)), "down=1 supported only for 1x1 full filters"); + static_assert(!(up == 4 && (filterMode == MODE_FUFD || filterMode == MODE_FUSD)), "full filters not supported for up=4"); + static_assert(!(down == 4 && (filterMode == MODE_FUFD || filterMode == MODE_SUFD)), "full filters not supported for down=4"); + + // Static definitions. + typedef typename InternalType::scalar_t scalar_t; + typedef typename InternalType::vec2_t vec2_t; + typedef typename InternalType::vec4_t vec4_t; + const int tileUpW = (tileOutW * down + (fdSize - 1) - (down - 1) + 3) & ~3; // Upsampled tile width, rounded up to multiple of 4. + const int tileUpH = tileOutH * down + (fdSize - 1) - (down - 1); // Upsampled tile height. + const int tileInW = CEIL_DIV(tileUpW + (fuSize - 1), up); // Input tile width. + const int tileInH = CEIL_DIV(tileUpH + (fuSize - 1), up); // Input tile height. + const int tileUpH_up = CEIL_DIV(tileUpH, up) * up; // Upsampled tile height rounded up to a multiple of up. + const int tileInH_up = CEIL_DIV(tileUpH_up + (fuSize - 1), up); // For allocations only, to avoid shared memory read overruns with up=2 and up=4. + + // Merge 1x1 downsampling into last upsampling step for upf1 and ups2. + const bool downInline = (down == 1) && ((up == 1 && filterMode == MODE_FUFD) || (up == 2 && filterMode == MODE_SUFD)); + + // Sizes of logical buffers. + const int szIn = tileInH_up * tileInW; + const int szUpX = tileInH_up * tileUpW; + const int szUpXY = downInline ? 0 : (tileUpH * tileUpW); + const int szDownX = tileUpH * tileOutW; + + // Sizes for shared memory arrays. + const int s_buf0_size_base = + (filterMode == MODE_SUSD) ? MAX(szIn, szUpXY) : + (filterMode == MODE_FUSD) ? MAX(szIn, szDownX) : + (filterMode == MODE_SUFD) ? MAX(szIn, szUpXY) : + (filterMode == MODE_FUFD) ? szIn : + -1; + const int s_buf1_size_base = + (filterMode == MODE_SUSD) ? MAX(szUpX, szDownX) : + (filterMode == MODE_FUSD) ? szUpXY : + (filterMode == MODE_SUFD) ? szUpX : + (filterMode == MODE_FUFD) ? szUpXY : + -1; + + // Ensure U128 alignment. + const int s_buf0_size = (s_buf0_size_base + 3) & ~3; + const int s_buf1_size = (s_buf1_size_base + 3) & ~3; + + // Check at compile time that we don't use too much shared memory. + static_assert((s_buf0_size + s_buf1_size) * sizeof(scalar_t) <= (sharedKB << 10), "shared memory overflow"); + + // Declare shared memory arrays. + scalar_t* s_buf0; + scalar_t* s_buf1; + if (sharedKB <= 48) + { + // Allocate shared memory arrays here. + __shared__ scalar_t s_buf0_st[(sharedKB > 48) ? (1<<24) : (s_buf0_size + s_buf1_size)]; // Prevent launching if this isn't optimized away when unused. + s_buf0 = s_buf0_st; + s_buf1 = s_buf0 + s_buf0_size; + } + else + { + // Use the dynamically allocated shared memory array. + s_buf0 = (scalar_t*)s_buf_raw; + s_buf1 = s_buf0 + s_buf0_size; + } + + // Pointers to the buffers. + scalar_t* s_tileIn; // Input tile: [relInX * tileInH + relInY] + scalar_t* s_tileUpX; // After horizontal upsampling: [relInY * tileUpW + relUpX] + scalar_t* s_tileUpXY; // After upsampling: [relUpY * tileUpW + relUpX] + scalar_t* s_tileDownX; // After horizontal downsampling: [relUpY * tileOutW + relOutX] + if (filterMode == MODE_SUSD) + { + s_tileIn = s_buf0; + s_tileUpX = s_buf1; + s_tileUpXY = s_buf0; + s_tileDownX = s_buf1; + } + else if (filterMode == MODE_FUSD) + { + s_tileIn = s_buf0; + s_tileUpXY = s_buf1; + s_tileDownX = s_buf0; + } + else if (filterMode == MODE_SUFD) + { + s_tileIn = s_buf0; + s_tileUpX = s_buf1; + s_tileUpXY = s_buf0; + } + else if (filterMode == MODE_FUFD) + { + s_tileIn = s_buf0; + s_tileUpXY = s_buf1; + } + + // Allow large grids in z direction via per-launch offset. + int channelIdx = blockIdx.z + p.blockZofs; + int batchIdx = channelIdx / p.yShape.z; + channelIdx -= batchIdx * p.yShape.z; + + // Offset to output feature map. In bytes. + index_t mapOfsOut = channelIdx * get_stride(p.yStride.z) + batchIdx * get_stride(p.yStride.w); + + // Sign shift amount. + uint32_t signXo = ((threadIdx.x + p.sOfs.x) << 1) & 6; + + // Inner tile loop. + #pragma unroll 1 + for (int tileIdx = 0; !enableXrep || (tileIdx < MIN(p.tilesXrep, p.tilesXdim - p.tilesXrep * blockIdx.y)); tileIdx++) + { + // Locate output tile. + int tileX = enableXrep ? blockIdx.y * p.tilesXrep + tileIdx : blockIdx.x; + int tileOutX = tileX * tileOutW; + int tileOutY = (enableXrep ? blockIdx.x : blockIdx.y) * tileOutH; + + // Locate input tile. + int tmpX = tileOutX * down - p.pad0.x; + int tmpY = tileOutY * down - p.pad0.y; + int tileInX = CEIL_DIV(tmpX, up); + int tileInY = CEIL_DIV(tmpY, up); + const int phaseInX = tileInX * up - tmpX; + const int phaseInY = tileInY * up - tmpY; + + // Extra sync if input and output buffers are the same and we are not on first tile. + if (enableXrep && tileIdx > 0 && (filterMode == MODE_FUSD || (filterMode == MODE_SUFD && !downInline) || (filterMode == MODE_FUFD && downInline))) + __syncthreads(); + + // Load input tile & apply bias. Unrolled. + scalar_t b = (scalar_t)*(const T*)((const char*)p.b + (channelIdx * get_stride(p.bStride))); + index_t mapOfsIn = channelIdx * get_stride(p.xStride.z) + batchIdx * get_stride(p.xStride.w); + int idx = threadIdx.x; + const int loopCountIN = CEIL_DIV(tileInW * tileInH, threadsPerBlock); + #pragma unroll + for (int loop = 0; loop < loopCountIN; loop++) + { + int relInX, relInY; + fast_div_mod(relInX, relInY, idx); + int inX = tileInX + relInX; + int inY = tileInY + relInY; + scalar_t v = 0; + + if ((uint32_t)inX < p.xShape.x && (uint32_t)inY < p.xShape.y) + v = (scalar_t)*((const T*)((const char*)p.x + (inX * get_stride(p.xStride.x) + inY * get_stride(p.xStride.y) + mapOfsIn))) + b; + + bool skip = (loop == loopCountIN-1) && (idx >= tileInW * tileInH); + if (!skip) + s_tileIn[idx] = v; + + idx += threadsPerBlock; + } + + if (filterMode == MODE_SUSD || filterMode == MODE_SUFD) // Separable upsampling filter. + { + // Horizontal upsampling. + __syncthreads(); + if (up == 4) + { + for (int idx = threadIdx.x*up; idx < tileUpW * tileInH; idx += blockDim.x*up) + { + int relUpX0, relInY; + fast_div_mod(relUpX0, relInY, idx); + int relInX0 = relUpX0 / up; + int src0 = relInX0 + tileInW * relInY; + int dst = relInY * tileUpW + relUpX0; + vec4_t v = InternalType::zero_vec4(); + scalar_t a = s_tileIn[src0]; + if (phaseInX == 0) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileIn[src0 + step + 1]; + v.y += a * (scalar_t)c_fu[step * up + 3]; + v.z += a * (scalar_t)c_fu[step * up + 2]; + v.w += a * (scalar_t)c_fu[step * up + 1]; + } + } + else if (phaseInX == 1) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 1]; + v.y += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileIn[src0 + step + 1]; + v.z += a * (scalar_t)c_fu[step * up + 3]; + v.w += a * (scalar_t)c_fu[step * up + 2]; + } + } + else if (phaseInX == 2) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 2]; + v.y += a * (scalar_t)c_fu[step * up + 1]; + v.z += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileIn[src0 + step + 1]; + v.w += a * (scalar_t)c_fu[step * up + 3]; + } + } + else // (phaseInX == 3) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 3]; + v.y += a * (scalar_t)c_fu[step * up + 2]; + v.z += a * (scalar_t)c_fu[step * up + 1]; + v.w += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileIn[src0 + step + 1]; + } + } + s_tileUpX[dst+0] = v.x; + s_tileUpX[dst+1] = v.y; + s_tileUpX[dst+2] = v.z; + s_tileUpX[dst+3] = v.w; + } + } + else if (up == 2) + { + bool p0 = (phaseInX == 0); + for (int idx = threadIdx.x*up; idx < tileUpW * tileInH; idx += blockDim.x*up) + { + int relUpX0, relInY; + fast_div_mod(relUpX0, relInY, idx); + int relInX0 = relUpX0 / up; + int src0 = relInX0 + tileInW * relInY; + int dst = relInY * tileUpW + relUpX0; + vec2_t v = InternalType::zero_vec2(); + scalar_t a = s_tileIn[src0]; + if (p0) // (phaseInX == 0) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileIn[src0 + step + 1]; + v.y += a * (scalar_t)c_fu[step * up + 1]; + } + } + else // (phaseInX == 1) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 1]; + v.y += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileIn[src0 + step + 1]; + } + } + s_tileUpX[dst+0] = v.x; + s_tileUpX[dst+1] = v.y; + } + } + + // Vertical upsampling & nonlinearity. + + __syncthreads(); + int groupMask = 15 << ((threadIdx.x & 31) & ~3); + int minY = tileOutY ? (tileOutY - tileOutH) * down + tileUpH : 0; // Skip already written signs. + int sShapeMaxY = MIN(p.sShape.y, tileOutY * down + tileUpH); // Avoid out-of-tile sign writes. + if (up == 4) + { + minY -= 3; // Adjust according to block height. + for (int idx = threadIdx.x; idx < tileUpW * tileUpH_up / up; idx += blockDim.x) + { + int relUpX, relInY0; + fast_div_mod(relUpX, relInY0, idx); + int relUpY0 = relInY0 * up; + int src0 = relInY0 * tileUpW + relUpX; + int dst = relUpY0 * tileUpW + relUpX; + vec4_t v = InternalType::zero_vec4(); + + scalar_t a = s_tileUpX[src0]; + if (phaseInY == 0) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileUpX[src0 + (step + 1) * tileUpW]; + v.y += a * (scalar_t)c_fu[step * up + 3]; + v.z += a * (scalar_t)c_fu[step * up + 2]; + v.w += a * (scalar_t)c_fu[step * up + 1]; + } + } + else if (phaseInY == 1) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 1]; + v.y += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileUpX[src0 + (step + 1) * tileUpW]; + v.z += a * (scalar_t)c_fu[step * up + 3]; + v.w += a * (scalar_t)c_fu[step * up + 2]; + } + } + else if (phaseInY == 2) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 2]; + v.y += a * (scalar_t)c_fu[step * up + 1]; + v.z += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileUpX[src0 + (step + 1) * tileUpW]; + v.w += a * (scalar_t)c_fu[step * up + 3]; + } + } + else // (phaseInY == 3) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 3]; + v.y += a * (scalar_t)c_fu[step * up + 2]; + v.z += a * (scalar_t)c_fu[step * up + 1]; + v.w += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileUpX[src0 + (step + 1) * tileUpW]; + } + } + + int x = tileOutX * down + relUpX; + int y = tileOutY * down + relUpY0; + int signX = x + p.sOfs.x; + int signY = y + p.sOfs.y; + int signZ = blockIdx.z + p.blockZofs; + int signXb = signX >> 2; + index_t si0 = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ); + index_t si1 = si0 + p.sShape.x; + index_t si2 = si0 + p.sShape.x * 2; + index_t si3 = si0 + p.sShape.x * 3; + + v.x *= (scalar_t)((float)up * (float)up * p.gain); + v.y *= (scalar_t)((float)up * (float)up * p.gain); + v.z *= (scalar_t)((float)up * (float)up * p.gain); + v.w *= (scalar_t)((float)up * (float)up * p.gain); + + if (signWrite) + { + if (!enableWriteSkip) + { + // Determine and write signs. + int sx = __float_as_uint(v.x) >> 31 << 0; + int sy = __float_as_uint(v.y) >> 31 << 8; + int sz = __float_as_uint(v.z) >> 31 << 16; + int sw = __float_as_uint(v.w) >> 31 << 24; + if (sx) v.x *= p.slope; + if (sy) v.y *= p.slope; + if (sz) v.z *= p.slope; + if (sw) v.w *= p.slope; + if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); } + if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); } + if (fabsf(v.z) > p.clamp) { sz = 2 << 16; v.z = InternalType::clamp(v.z, p.clamp); } + if (fabsf(v.w) > p.clamp) { sw = 2 << 24; v.w = InternalType::clamp(v.w, p.clamp); } + + if ((uint32_t)signXb < p.swLimit && signY >= minY) + { + // Combine signs. + uint32_t s = sx + sy + sw + sz; + s <<= (signX & 3) << 1; + s |= __shfl_xor_sync(groupMask, s, 1); + s |= __shfl_xor_sync(groupMask, s, 2); + + // Write signs. + if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); } + if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); } + if ((uint32_t)(signY + 2) < sShapeMaxY) { p.s[si2] = (unsigned char)(s >> 16); } + if ((uint32_t)(signY + 3) < sShapeMaxY) { p.s[si3] = (unsigned char)(s >> 24); } + } + } + else + { + // Determine and write signs. + if ((uint32_t)signXb < p.swLimit && signY >= minY) + { + int sx = __float_as_uint(v.x) >> 31 << 0; + int sy = __float_as_uint(v.y) >> 31 << 8; + int sz = __float_as_uint(v.z) >> 31 << 16; + int sw = __float_as_uint(v.w) >> 31 << 24; + if (sx) v.x *= p.slope; + if (sy) v.y *= p.slope; + if (sz) v.z *= p.slope; + if (sw) v.w *= p.slope; + if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); } + if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); } + if (fabsf(v.z) > p.clamp) { sz = 2 << 16; v.z = InternalType::clamp(v.z, p.clamp); } + if (fabsf(v.w) > p.clamp) { sw = 2 << 24; v.w = InternalType::clamp(v.w, p.clamp); } + + // Combine signs. + uint32_t s = sx + sy + sw + sz; + s <<= (signX & 3) << 1; + s |= __shfl_xor_sync(groupMask, s, 1); + s |= __shfl_xor_sync(groupMask, s, 2); + + // Write signs. + if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); } + if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); } + if ((uint32_t)(signY + 2) < sShapeMaxY) { p.s[si2] = (unsigned char)(s >> 16); } + if ((uint32_t)(signY + 3) < sShapeMaxY) { p.s[si3] = (unsigned char)(s >> 24); } + } + else + { + // Just compute the values. + if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp); + if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp); + if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp); + if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp); + } + } + } + else if (signRead) // Read signs and apply. + { + if ((uint32_t)signXb < p.swLimit) + { + int ss = (signX & 3) << 1; + if ((uint32_t)(signY + 0) < p.sShape.y) { int s = p.s[si0] >> ss; if (s & 1) v.x *= p.slope; if (s & 2) v.x = 0.f; } + if ((uint32_t)(signY + 1) < p.sShape.y) { int s = p.s[si1] >> ss; if (s & 1) v.y *= p.slope; if (s & 2) v.y = 0.f; } + if ((uint32_t)(signY + 2) < p.sShape.y) { int s = p.s[si2] >> ss; if (s & 1) v.z *= p.slope; if (s & 2) v.z = 0.f; } + if ((uint32_t)(signY + 3) < p.sShape.y) { int s = p.s[si3] >> ss; if (s & 1) v.w *= p.slope; if (s & 2) v.w = 0.f; } + } + } + else // Forward pass with no sign write. + { + if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp); + if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp); + if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp); + if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp); + } + + s_tileUpXY[dst + 0 * tileUpW] = v.x; + if (relUpY0 + 1 < tileUpH) s_tileUpXY[dst + 1 * tileUpW] = v.y; + if (relUpY0 + 2 < tileUpH) s_tileUpXY[dst + 2 * tileUpW] = v.z; + if (relUpY0 + 3 < tileUpH) s_tileUpXY[dst + 3 * tileUpW] = v.w; + } + } + else if (up == 2) + { + minY -= 1; // Adjust according to block height. + for (int idx = threadIdx.x; idx < tileUpW * tileUpH_up / up; idx += blockDim.x) + { + int relUpX, relInY0; + fast_div_mod(relUpX, relInY0, idx); + int relUpY0 = relInY0 * up; + int src0 = relInY0 * tileUpW + relUpX; + int dst = relUpY0 * tileUpW + relUpX; + vec2_t v = InternalType::zero_vec2(); + + scalar_t a = s_tileUpX[src0]; + if (phaseInY == 0) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileUpX[src0 + (step + 1) * tileUpW]; + v.y += a * (scalar_t)c_fu[step * up + 1]; + } + } + else // (phaseInY == 1) + { + #pragma unroll + for (int step = 0; step < fuSize / up; step++) + { + v.x += a * (scalar_t)c_fu[step * up + 1]; + v.y += a * (scalar_t)c_fu[step * up + 0]; + a = s_tileUpX[src0 + (step + 1) * tileUpW]; + } + } + + int x = tileOutX * down + relUpX; + int y = tileOutY * down + relUpY0; + int signX = x + p.sOfs.x; + int signY = y + p.sOfs.y; + int signZ = blockIdx.z + p.blockZofs; + int signXb = signX >> 2; + index_t si0 = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ); + index_t si1 = si0 + p.sShape.x; + + v.x *= (scalar_t)((float)up * (float)up * p.gain); + v.y *= (scalar_t)((float)up * (float)up * p.gain); + + if (signWrite) + { + if (!enableWriteSkip) + { + // Determine and write signs. + int sx = __float_as_uint(v.x) >> 31 << 0; + int sy = __float_as_uint(v.y) >> 31 << 8; + if (sx) v.x *= p.slope; + if (sy) v.y *= p.slope; + if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); } + if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); } + + if ((uint32_t)signXb < p.swLimit && signY >= minY) + { + // Combine signs. + int s = sx + sy; + s <<= signXo; + s |= __shfl_xor_sync(groupMask, s, 1); + s |= __shfl_xor_sync(groupMask, s, 2); + + // Write signs. + if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); } + if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); } + } + } + else + { + // Determine and write signs. + if ((uint32_t)signXb < p.swLimit && signY >= minY) + { + int sx = __float_as_uint(v.x) >> 31 << 0; + int sy = __float_as_uint(v.y) >> 31 << 8; + if (sx) v.x *= p.slope; + if (sy) v.y *= p.slope; + if (fabsf(v.x) > p.clamp) { sx = 2 << 0; v.x = InternalType::clamp(v.x, p.clamp); } + if (fabsf(v.y) > p.clamp) { sy = 2 << 8; v.y = InternalType::clamp(v.y, p.clamp); } + + // Combine signs. + int s = sx + sy; + s <<= signXo; + s |= __shfl_xor_sync(groupMask, s, 1); + s |= __shfl_xor_sync(groupMask, s, 2); + + // Write signs. + if ((uint32_t)(signY + 0) < sShapeMaxY) { p.s[si0] = (unsigned char)(s >> 0); } + if ((uint32_t)(signY + 1) < sShapeMaxY) { p.s[si1] = (unsigned char)(s >> 8); } + } + else + { + // Just compute the values. + if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp); + if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp); + } + } + } + else if (signRead) // Read signs and apply. + { + if ((uint32_t)signXb < p.swLimit) + { + if ((uint32_t)(signY + 0) < p.sShape.y) { int s = p.s[si0] >> signXo; if (s & 1) v.x *= p.slope; if (s & 2) v.x = 0.f; } + if ((uint32_t)(signY + 1) < p.sShape.y) { int s = p.s[si1] >> signXo; if (s & 1) v.y *= p.slope; if (s & 2) v.y = 0.f; } + } + } + else // Forward pass with no sign write. + { + if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp); + if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp); + } + + if (!downInline) + { + // Write into temporary buffer. + s_tileUpXY[dst] = v.x; + if (relUpY0 < tileUpH - 1) + s_tileUpXY[dst + tileUpW] = v.y; + } + else + { + // Write directly into output buffer. + if ((uint32_t)x < p.yShape.x) + { + int ymax = MIN(p.yShape.y, tileUpH + tileOutY * down); + index_t ofs = x * get_stride(p.yStride.x) + y * get_stride(p.yStride.y) + mapOfsOut; + if ((uint32_t)y + 0 < p.yShape.y) *((T*)((char*)p.y + ofs)) = (T)(v.x * (scalar_t)c_fd[0]); + if ((uint32_t)y + 1 < ymax) *((T*)((char*)p.y + ofs + get_stride(p.yStride.y))) = (T)(v.y * (scalar_t)c_fd[0]); + } + } + } + } + } + else if (filterMode == MODE_FUSD || filterMode == MODE_FUFD) + { + // Full upsampling filter. + + if (up == 2) + { + // 2 x 2-wide. + __syncthreads(); + int minY = tileOutY ? (tileOutY - tileOutH) * down + tileUpH + p.sOfs.y : 0; // Skip already written signs. + for (int idx = threadIdx.x * 4; idx < tileUpW * tileUpH; idx += blockDim.x * 4) + { + int relUpX0, relUpY0; + fast_div_mod(relUpX0, relUpY0, idx); + int relInX0 = CEIL_DIV(relUpX0 - phaseInX, up); + int relInY0 = CEIL_DIV(relUpY0 - phaseInY, up); + int src0 = relInX0 + tileInW * relInY0; + int tap0y = (relInY0 * up + phaseInY - relUpY0); + + #define X_LOOP(TAPY, PX) \ + for (int sx = 0; sx < fuSize / up; sx++) \ + { \ + v.x += a * (scalar_t)c_fu[(sx * up + (((PX) - 0) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; \ + v.z += b * (scalar_t)c_fu[(sx * up + (((PX) - 0) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; if ((PX) == 0) { a = b; b = s_tileIn[src0 + 2 + sx + sy * tileInW]; } \ + v.y += a * (scalar_t)c_fu[(sx * up + (((PX) - 1) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; \ + v.w += b * (scalar_t)c_fu[(sx * up + (((PX) - 1) & (up - 1))) + (sy * up + (TAPY)) * MAX_FILTER_SIZE]; if ((PX) == 1) { a = b; b = s_tileIn[src0 + 2 + sx + sy * tileInW]; } \ + } + + vec4_t v = InternalType::zero_vec4(); + if (tap0y == 0 && phaseInX == 0) + #pragma unroll + for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1]; + #pragma unroll + X_LOOP(0, 0) } + if (tap0y == 0 && phaseInX == 1) + #pragma unroll + for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1]; + #pragma unroll + X_LOOP(0, 1) } + if (tap0y == 1 && phaseInX == 0) + #pragma unroll + for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1]; + #pragma unroll + X_LOOP(1, 0) } + if (tap0y == 1 && phaseInX == 1) + #pragma unroll + for (int sy = 0; sy < fuSize / up; sy++) { scalar_t a = s_tileIn[src0 + sy * tileInW]; scalar_t b = s_tileIn[src0 + sy * tileInW + 1]; + #pragma unroll + X_LOOP(1, 1) } + + #undef X_LOOP + + int x = tileOutX * down + relUpX0; + int y = tileOutY * down + relUpY0; + int signX = x + p.sOfs.x; + int signY = y + p.sOfs.y; + int signZ = blockIdx.z + p.blockZofs; + int signXb = signX >> 2; + index_t si = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ); + + v.x *= (scalar_t)((float)up * (float)up * p.gain); + v.y *= (scalar_t)((float)up * (float)up * p.gain); + v.z *= (scalar_t)((float)up * (float)up * p.gain); + v.w *= (scalar_t)((float)up * (float)up * p.gain); + + if (signWrite) + { + if (!enableWriteSkip) + { + // Determine and write signs. + int sx = __float_as_uint(v.x) >> 31; + int sy = __float_as_uint(v.y) >> 31; + int sz = __float_as_uint(v.z) >> 31; + int sw = __float_as_uint(v.w) >> 31; + if (sx) v.x *= p.slope; if (fabsf(v.x) > p.clamp) { sx = 2; v.x = InternalType::clamp(v.x, p.clamp); } + if (sy) v.y *= p.slope; if (fabsf(v.y) > p.clamp) { sy = 2; v.y = InternalType::clamp(v.y, p.clamp); } + if (sz) v.z *= p.slope; if (fabsf(v.z) > p.clamp) { sz = 2; v.z = InternalType::clamp(v.z, p.clamp); } + if (sw) v.w *= p.slope; if (fabsf(v.w) > p.clamp) { sw = 2; v.w = InternalType::clamp(v.w, p.clamp); } + + if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY) + { + p.s[si] = sx + (sy << 2) + (sz << 4) + (sw << 6); + } + } + else + { + // Determine and write signs. + if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY) + { + int sx = __float_as_uint(v.x) >> 31; + int sy = __float_as_uint(v.y) >> 31; + int sz = __float_as_uint(v.z) >> 31; + int sw = __float_as_uint(v.w) >> 31; + if (sx) v.x *= p.slope; if (fabsf(v.x) > p.clamp) { sx = 2; v.x = InternalType::clamp(v.x, p.clamp); } + if (sy) v.y *= p.slope; if (fabsf(v.y) > p.clamp) { sy = 2; v.y = InternalType::clamp(v.y, p.clamp); } + if (sz) v.z *= p.slope; if (fabsf(v.z) > p.clamp) { sz = 2; v.z = InternalType::clamp(v.z, p.clamp); } + if (sw) v.w *= p.slope; if (fabsf(v.w) > p.clamp) { sw = 2; v.w = InternalType::clamp(v.w, p.clamp); } + + p.s[si] = sx + (sy << 2) + (sz << 4) + (sw << 6); + } + else + { + // Just compute the values. + if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp); + if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp); + if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp); + if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp); + } + } + } + else if (signRead) // Read sign and apply. + { + if ((uint32_t)signY < p.sShape.y) + { + int s = 0; + if ((uint32_t)signXb < p.swLimit) s = p.s[si]; + if ((uint32_t)signXb + 1 < p.swLimit) s |= p.s[si + 1] << 8; + s >>= (signX & 3) << 1; + if (s & 0x01) v.x *= p.slope; if (s & 0x02) v.x = 0.f; + if (s & 0x04) v.y *= p.slope; if (s & 0x08) v.y = 0.f; + if (s & 0x10) v.z *= p.slope; if (s & 0x20) v.z = 0.f; + if (s & 0x40) v.w *= p.slope; if (s & 0x80) v.w = 0.f; + } + } + else // Forward pass with no sign write. + { + if (v.x < 0.f) v.x *= p.slope; v.x = InternalType::clamp(v.x, p.clamp); + if (v.y < 0.f) v.y *= p.slope; v.y = InternalType::clamp(v.y, p.clamp); + if (v.z < 0.f) v.z *= p.slope; v.z = InternalType::clamp(v.z, p.clamp); + if (v.w < 0.f) v.w *= p.slope; v.w = InternalType::clamp(v.w, p.clamp); + } + + s_tileUpXY[idx + 0] = v.x; + s_tileUpXY[idx + 1] = v.y; + s_tileUpXY[idx + 2] = v.z; + s_tileUpXY[idx + 3] = v.w; + } + } + else if (up == 1) + { + __syncthreads(); + uint32_t groupMask = 15 << ((threadIdx.x & 31) & ~3); + int minY = tileOutY ? (tileOutY - tileOutH) * down + tileUpH : 0; // Skip already written signs. + for (int idx = threadIdx.x; idx < tileUpW * tileUpH; idx += blockDim.x) + { + int relUpX0, relUpY0; + fast_div_mod(relUpX0, relUpY0, idx); + scalar_t v = s_tileIn[idx] * (scalar_t)c_fu[0]; // 1x1 filter. + + int x = tileOutX * down + relUpX0; + int y = tileOutY * down + relUpY0; + int signX = x + p.sOfs.x; + int signY = y + p.sOfs.y; + int signZ = blockIdx.z + p.blockZofs; + int signXb = signX >> 2; + index_t si = signXb + p.sShape.x * (signY + (index_t)p.sShape.y * signZ); + v *= (scalar_t)((float)up * (float)up * p.gain); + + if (signWrite) + { + if (!enableWriteSkip) + { + // Determine and write sign. + uint32_t s = 0; + uint32_t signXbit = (1u << signXo); + if (v < 0.f) + { + s = signXbit; + v *= p.slope; + } + if (fabsf(v) > p.clamp) + { + s = signXbit * 2; + v = InternalType::clamp(v, p.clamp); + } + if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY) + { + s += __shfl_xor_sync(groupMask, s, 1); // Coalesce. + s += __shfl_xor_sync(groupMask, s, 2); // Coalesce. + p.s[si] = s; // Write. + } + } + else + { + // Determine and write sign. + if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y && signY >= minY) + { + uint32_t s = 0; + uint32_t signXbit = (1u << signXo); + if (v < 0.f) + { + s = signXbit; + v *= p.slope; + } + if (fabsf(v) > p.clamp) + { + s = signXbit * 2; + v = InternalType::clamp(v, p.clamp); + } + s += __shfl_xor_sync(groupMask, s, 1); // Coalesce. + s += __shfl_xor_sync(groupMask, s, 2); // Coalesce. + p.s[si] = s; // Write. + } + else + { + // Just compute the value. + if (v < 0.f) v *= p.slope; + v = InternalType::clamp(v, p.clamp); + } + } + } + else if (signRead) + { + // Read sign and apply if within sign tensor bounds. + if ((uint32_t)signXb < p.swLimit && (uint32_t)signY < p.sShape.y) + { + int s = p.s[si]; + s >>= signXo; + if (s & 1) v *= p.slope; + if (s & 2) v = 0.f; + } + } + else // Forward pass with no sign write. + { + if (v < 0.f) v *= p.slope; + v = InternalType::clamp(v, p.clamp); + } + + if (!downInline) // Write into temporary buffer. + s_tileUpXY[idx] = v; + else if ((uint32_t)x < p.yShape.x && (uint32_t)y < p.yShape.y) // Write directly into output buffer + *((T*)((char*)p.y + (x * get_stride(p.yStride.x) + y * get_stride(p.yStride.y) + mapOfsOut))) = (T)(v * (scalar_t)c_fd[0]); + } + } + } + + // Downsampling. + if (filterMode == MODE_SUSD || filterMode == MODE_FUSD) + { + // Horizontal downsampling. + __syncthreads(); + if (down == 4 && tileOutW % 4 == 0) + { + // Calculate 4 pixels at a time. + for (int idx = threadIdx.x * 4; idx < tileOutW * tileUpH; idx += blockDim.x * 4) + { + int relOutX0, relUpY; + fast_div_mod(relOutX0, relUpY, idx); + int relUpX0 = relOutX0 * down; + int src0 = relUpY * tileUpW + relUpX0; + vec4_t v = InternalType::zero_vec4(); + #pragma unroll + for (int step = 0; step < fdSize; step++) + { + v.x += s_tileUpXY[src0 + 0 + step] * (scalar_t)c_fd[step]; + v.y += s_tileUpXY[src0 + 4 + step] * (scalar_t)c_fd[step]; + v.z += s_tileUpXY[src0 + 8 + step] * (scalar_t)c_fd[step]; + v.w += s_tileUpXY[src0 + 12 + step] * (scalar_t)c_fd[step]; + } + s_tileDownX[idx+0] = v.x; + s_tileDownX[idx+1] = v.y; + s_tileDownX[idx+2] = v.z; + s_tileDownX[idx+3] = v.w; + } + } + else if ((down == 2 || down == 4) && (tileOutW % 2 == 0)) + { + // Calculate 2 pixels at a time. + for (int idx = threadIdx.x * 2; idx < tileOutW * tileUpH; idx += blockDim.x * 2) + { + int relOutX0, relUpY; + fast_div_mod(relOutX0, relUpY, idx); + int relUpX0 = relOutX0 * down; + int src0 = relUpY * tileUpW + relUpX0; + vec2_t v = InternalType::zero_vec2(); + #pragma unroll + for (int step = 0; step < fdSize; step++) + { + v.x += s_tileUpXY[src0 + 0 + step] * (scalar_t)c_fd[step]; + v.y += s_tileUpXY[src0 + down + step] * (scalar_t)c_fd[step]; + } + s_tileDownX[idx+0] = v.x; + s_tileDownX[idx+1] = v.y; + } + } + else + { + // Calculate 1 pixel at a time. + for (int idx = threadIdx.x; idx < tileOutW * tileUpH; idx += blockDim.x) + { + int relOutX0, relUpY; + fast_div_mod(relOutX0, relUpY, idx); + int relUpX0 = relOutX0 * down; + int src = relUpY * tileUpW + relUpX0; + scalar_t v = 0.f; + #pragma unroll + for (int step = 0; step < fdSize; step++) + v += s_tileUpXY[src + step] * (scalar_t)c_fd[step]; + s_tileDownX[idx] = v; + } + } + + // Vertical downsampling & store output tile. + __syncthreads(); + for (int idx = threadIdx.x; idx < tileOutW * tileOutH; idx += blockDim.x) + { + int relOutX, relOutY0; + fast_div_mod(relOutX, relOutY0, idx); + int relUpY0 = relOutY0 * down; + int src0 = relUpY0 * tileOutW + relOutX; + scalar_t v = 0; + #pragma unroll + for (int step = 0; step < fdSize; step++) + v += s_tileDownX[src0 + step * tileOutW] * (scalar_t)c_fd[step]; + + int outX = tileOutX + relOutX; + int outY = tileOutY + relOutY0; + + if (outX < p.yShape.x & outY < p.yShape.y) + *((T*)((char*)p.y + (outX * get_stride(p.yStride.x) + outY * get_stride(p.yStride.y) + mapOfsOut))) = (T)v; + } + } + else if (filterMode == MODE_SUFD || filterMode == MODE_FUFD) + { + // Full downsampling filter. + if (down == 2) + { + // 2-wide. + __syncthreads(); + for (int idx = threadIdx.x * 2; idx < tileOutW * tileOutH; idx += blockDim.x * 2) + { + int relOutX0, relOutY0; + fast_div_mod(relOutX0, relOutY0, idx); + int relUpX0 = relOutX0 * down; + int relUpY0 = relOutY0 * down; + int src0 = relUpY0 * tileUpW + relUpX0; + vec2_t v = InternalType::zero_vec2(); + #pragma unroll + for (int sy = 0; sy < fdSize; sy++) + #pragma unroll + for (int sx = 0; sx < fdSize; sx++) + { + v.x += s_tileUpXY[src0 + 0 + sx + sy * tileUpW] * (scalar_t)c_fd[sx + sy * MAX_FILTER_SIZE]; + v.y += s_tileUpXY[src0 + 2 + sx + sy * tileUpW] * (scalar_t)c_fd[sx + sy * MAX_FILTER_SIZE]; + } + + int outX = tileOutX + relOutX0; + int outY = tileOutY + relOutY0; + if ((uint32_t)outY < p.yShape.y) + { + index_t ofs = outX * get_stride(p.yStride.x) + outY * get_stride(p.yStride.y) + mapOfsOut; + if (outX + 0 < p.yShape.x) *((T*)((char*)p.y + ofs)) = (T)v.x; + if (outX + 1 < p.yShape.x) *((T*)((char*)p.y + ofs + get_stride(p.yStride.x))) = (T)v.y; + } + } + } + else if (down == 1 && !downInline) + { + // Thread per pixel. + __syncthreads(); + for (int idx = threadIdx.x; idx < tileOutW * tileOutH; idx += blockDim.x) + { + int relOutX0, relOutY0; + fast_div_mod(relOutX0, relOutY0, idx); + scalar_t v = s_tileUpXY[idx] * (scalar_t)c_fd[0]; // 1x1 filter. + + int outX = tileOutX + relOutX0; + int outY = tileOutY + relOutY0; + if ((uint32_t)outX < p.yShape.x && (uint32_t)outY < p.yShape.y) + *((T*)((char*)p.y + (outX * get_stride(p.yStride.x) + outY * get_stride(p.yStride.y) + mapOfsOut))) = (T)v; + } + } + } + + if (!enableXrep) + break; + } +} + +//------------------------------------------------------------------------ +// Compute activation function and signs for upsampled data tensor, modifying data tensor in-place. Used for accelerating the generic variant. +// Sign tensor is known to be contiguous, and p.x and p.s have the same z, w dimensions. 64-bit indexing is always used. + +template +static __global__ void filtered_lrelu_act_kernel(filtered_lrelu_act_kernel_params p) +{ + typedef typename InternalType::scalar_t scalar_t; + + // Indexing. + int32_t x = threadIdx.x + blockIdx.x * blockDim.x; + int32_t ymax = signWrite ? p.sShape.y : p.xShape.y; + int32_t qmax = p.xShape.z * p.xShape.w; // Combined minibatch*channel maximum index. + + // Loop to accommodate oversized tensors. + for (int32_t q = blockIdx.z; q < qmax; q += gridDim.z) + for (int32_t y = blockIdx.y; y < ymax; y += gridDim.y) + { + // Extract z and w (channel, minibatch index). + int32_t w = q / p.xShape.z; + int32_t z = q - w * p.xShape.z; + + // Choose behavior based on sign read/write mode. + if (signWrite) + { + // Process value if in p.x. + uint32_t s = 0; + if (x < p.xShape.x && y < p.xShape.y) + { + int64_t ix = x * p.xStride.x + y * p.xStride.y + z * p.xStride.z + w * p.xStride.w; + T* pv = ((T*)p.x) + ix; + scalar_t v = (scalar_t)(*pv); + + // Gain, LReLU, clamp. + v *= p.gain; + if (v < 0.f) + { + v *= p.slope; + s = 1; // Sign. + } + if (fabsf(v) > p.clamp) + { + v = InternalType::clamp(v, p.clamp); + s = 2; // Clamp. + } + + *pv = (T)v; // Write value. + } + + // Coalesce into threads 0 and 16 of warp. + uint32_t m = (threadIdx.x & 16) ? 0xffff0000u : 0x0000ffffu; + s <<= ((threadIdx.x & 15) << 1); // Shift into place. + s |= __shfl_xor_sync(m, s, 1); // Distribute. + s |= __shfl_xor_sync(m, s, 2); + s |= __shfl_xor_sync(m, s, 4); + s |= __shfl_xor_sync(m, s, 8); + + // Write signs if leader and in p.s. + if (!(threadIdx.x & 15) && x < p.sShape.x) // y is always in. + { + uint64_t is = x + p.sShape.x * (y + (int64_t)p.sShape.y * q); // Contiguous. + ((uint32_t*)p.s)[is >> 4] = s; + } + } + else if (signRead) + { + // Process value if in p.x. + if (x < p.xShape.x) // y is always in. + { + int64_t ix = x * p.xStride.x + y * p.xStride.y + z * p.xStride.z + w * p.xStride.w; + T* pv = ((T*)p.x) + ix; + scalar_t v = (scalar_t)(*pv); + v *= p.gain; + + // Apply sign buffer offset. + uint32_t sx = x + p.sOfs.x; + uint32_t sy = y + p.sOfs.y; + + // Read and apply signs if we land inside valid region of sign buffer. + if (sx < p.sShape.x && sy < p.sShape.y) + { + uint64_t is = (sx >> 2) + (p.sShape.x >> 2) * (sy + (uint64_t)p.sShape.y * q); // Contiguous. + unsigned char s = p.s[is]; + s >>= (sx & 3) << 1; // Shift into place. + if (s & 1) // Sign? + v *= p.slope; + if (s & 2) // Clamp? + v = 0.f; + } + + *pv = (T)v; // Write value. + } + } + else + { + // Forward pass with no sign write. Process value if in p.x. + if (x < p.xShape.x) // y is always in. + { + int64_t ix = x * p.xStride.x + y * p.xStride.y + z * p.xStride.z + w * p.xStride.w; + T* pv = ((T*)p.x) + ix; + scalar_t v = (scalar_t)(*pv); + v *= p.gain; + if (v < 0.f) + v *= p.slope; + if (fabsf(v) > p.clamp) + v = InternalType::clamp(v, p.clamp); + *pv = (T)v; // Write value. + } + } + } +} + +template void* choose_filtered_lrelu_act_kernel(void) +{ + return (void*)filtered_lrelu_act_kernel; +} + +//------------------------------------------------------------------------ +// CUDA kernel selection. + +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB) +{ + filtered_lrelu_kernel_spec s = { 0 }; + + // Return the first matching kernel. +#define CASE(SH, U, FU, D, FD, MODE, TW, TH, W, XR, WS) \ + if (sharedKB >= SH) \ + if ((p.fuShape.y == 0 && (MODE == MODE_SUSD || MODE == MODE_SUFD)) || (p.fuShape.y > 0 && (MODE == MODE_FUSD || MODE == MODE_FUFD))) \ + if ((p.fdShape.y == 0 && (MODE == MODE_SUSD || MODE == MODE_FUSD)) || (p.fdShape.y > 0 && (MODE == MODE_SUFD || MODE == MODE_FUFD))) \ + if (p.up == U && p.fuShape.x <= FU && p.fuShape.y <= FU && p.down == D && p.fdShape.x <= FD && p.fdShape.y <= FD) \ + { \ + static_assert((D*TW % 4) == 0, "down * tileWidth must be divisible by 4"); \ + static_assert(FU % U == 0, "upscaling filter size must be multiple of upscaling factor"); \ + static_assert(FD % D == 0, "downscaling filter size must be multiple of downscaling factor"); \ + s.setup = (void*)setup_filters_kernel; \ + s.exec = (void*)filtered_lrelu_kernel; \ + s.tileOut = make_int2(TW, TH); \ + s.numWarps = W; \ + s.xrep = XR; \ + s.dynamicSharedKB = (SH == 48) ? 0 : SH; \ + return s; \ + } + + // Launch parameters for various kernel specializations. + // Small filters must be listed before large filters, otherwise the kernel for larger filter will always match first. + // Kernels that use more shared memory must be listed before those that use less, for the same reason. + + CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/1,1, /*mode*/MODE_FUFD, /*tw,th,warps,xrep,wskip*/64, 178, 32, 0, 0) // 1t-upf1-downf1 + CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/1,1, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/152, 95, 16, 0, 0) // 4t-ups2-downf1 + CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/2,8, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/56, 22, 16, 0, 0) // 4t-upf1-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/2,8, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/56, 29, 16, 11, 0) // 4t-ups2-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/2,8, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/60, 28, 16, 0, 0) // 4t-upf2-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/2,8, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/56, 28, 16, 0, 0) // 4t-ups2-downf2 + CASE(/*sharedKB*/48, /*up,fu*/4,16, /*down,fd*/2,8, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/56, 31, 16, 11, 0) // 4t-ups4-downs2 + CASE(/*sharedKB*/48, /*up,fu*/4,16, /*down,fd*/2,8, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/56, 36, 16, 0, 0) // 4t-ups4-downf2 + CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/4,16, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/16, 22, 16, 12, 0) // 4t-ups2-downs4 + CASE(/*sharedKB*/48, /*up,fu*/2,8, /*down,fd*/4,16, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/29, 15, 16, 0, 0) // 4t-upf2-downs4 + CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/1,1, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/96, 150, 28, 0, 0) // 6t-ups2-downf1 + CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/2,12, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/32, 35, 24, 0, 0) // 6t-upf1-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/2,12, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 46, 16, 10, 0) // 6t-ups2-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/2,12, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/58, 28, 24, 8, 0) // 6t-upf2-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/2,12, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/52, 28, 16, 0, 0) // 6t-ups2-downf2 + CASE(/*sharedKB*/48, /*up,fu*/4,24, /*down,fd*/2,12, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 51, 16, 5, 0) // 6t-ups4-downs2 + CASE(/*sharedKB*/48, /*up,fu*/4,24, /*down,fd*/2,12, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/32, 56, 16, 6, 0) // 6t-ups4-downf2 + CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/4,24, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/16, 18, 16, 12, 0) // 6t-ups2-downs4 + CASE(/*sharedKB*/96, /*up,fu*/2,12, /*down,fd*/4,24, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/27, 31, 32, 6, 0) // 6t-upf2-downs4 96kB + CASE(/*sharedKB*/48, /*up,fu*/2,12, /*down,fd*/4,24, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/27, 13, 24, 0, 0) // 6t-upf2-downs4 + CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/1,1, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/148, 89, 24, 0, 0) // 8t-ups2-downf1 + CASE(/*sharedKB*/48, /*up,fu*/1,1, /*down,fd*/2,16, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/32, 31, 16, 5, 0) // 8t-upf1-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/2,16, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 41, 16, 9, 0) // 8t-ups2-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/2,16, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/56, 26, 24, 0, 0) // 8t-upf2-downs2 + CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/2,16, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/32, 40, 16, 0, 0) // 8t-ups2-downf2 + CASE(/*sharedKB*/48, /*up,fu*/4,32, /*down,fd*/2,16, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/32, 46, 24, 5, 0) // 8t-ups4-downs2 + CASE(/*sharedKB*/48, /*up,fu*/4,32, /*down,fd*/2,16, /*mode*/MODE_SUFD, /*tw,th,warps,xrep,wskip*/32, 50, 16, 0, 0) // 8t-ups4-downf2 + CASE(/*sharedKB*/96, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/24, 24, 32, 12, 1) // 8t-ups2-downs4 96kB + CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_SUSD, /*tw,th,warps,xrep,wskip*/16, 13, 16, 10, 1) // 8t-ups2-downs4 + CASE(/*sharedKB*/96, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/25, 28, 28, 4, 0) // 8t-upf2-downs4 96kB + CASE(/*sharedKB*/48, /*up,fu*/2,16, /*down,fd*/4,32, /*mode*/MODE_FUSD, /*tw,th,warps,xrep,wskip*/25, 10, 24, 0, 0) // 8t-upf2-downs4 + + #undef CASE + return s; // No kernel found. +} + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.h b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.h new file mode 100644 index 0000000000000000000000000000000000000000..f2bfd1dd537909de9cd3b14765a482056391683b --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.h @@ -0,0 +1,94 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include + +//------------------------------------------------------------------------ +// CUDA kernel parameters. + +struct filtered_lrelu_kernel_params +{ + // These parameters decide which kernel to use. + int up; // upsampling ratio (1, 2, 4) + int down; // downsampling ratio (1, 2, 4) + int2 fuShape; // [size, 1] | [size, size] + int2 fdShape; // [size, 1] | [size, size] + + int _dummy; // Alignment. + + // Rest of the parameters. + const void* x; // Input tensor. + void* y; // Output tensor. + const void* b; // Bias tensor. + unsigned char* s; // Sign tensor in/out. NULL if unused. + const float* fu; // Upsampling filter. + const float* fd; // Downsampling filter. + + int2 pad0; // Left/top padding. + float gain; // Additional gain factor. + float slope; // Leaky ReLU slope on negative side. + float clamp; // Clamp after nonlinearity. + int flip; // Filter kernel flip for gradient computation. + + int tilesXdim; // Original number of horizontal output tiles. + int tilesXrep; // Number of horizontal tiles per CTA. + int blockZofs; // Block z offset to support large minibatch, channel dimensions. + + int4 xShape; // [width, height, channel, batch] + int4 yShape; // [width, height, channel, batch] + int2 sShape; // [width, height] - width is in bytes. Contiguous. Zeros if unused. + int2 sOfs; // [ofs_x, ofs_y] - offset between upsampled data and sign tensor. + int swLimit; // Active width of sign tensor in bytes. + + longlong4 xStride; // Strides of all tensors except signs, same component order as shapes. + longlong4 yStride; // + int64_t bStride; // + longlong3 fuStride; // + longlong3 fdStride; // +}; + +struct filtered_lrelu_act_kernel_params +{ + void* x; // Input/output, modified in-place. + unsigned char* s; // Sign tensor in/out. NULL if unused. + + float gain; // Additional gain factor. + float slope; // Leaky ReLU slope on negative side. + float clamp; // Clamp after nonlinearity. + + int4 xShape; // [width, height, channel, batch] + longlong4 xStride; // Input/output tensor strides, same order as in shape. + int2 sShape; // [width, height] - width is in elements. Contiguous. Zeros if unused. + int2 sOfs; // [ofs_x, ofs_y] - offset between upsampled data and sign tensor. +}; + +//------------------------------------------------------------------------ +// CUDA kernel specialization. + +struct filtered_lrelu_kernel_spec +{ + void* setup; // Function for filter kernel setup. + void* exec; // Function for main operation. + int2 tileOut; // Width/height of launch tile. + int numWarps; // Number of warps per thread block, determines launch block size. + int xrep; // For processing multiple horizontal tiles per thread block. + int dynamicSharedKB; // How much dynamic shared memory the exec kernel wants. +}; + +//------------------------------------------------------------------------ +// CUDA kernel selection. + +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); +template void* choose_filtered_lrelu_act_kernel(void); +template cudaError_t copy_filters(cudaStream_t stream); + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.py b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.py new file mode 100644 index 0000000000000000000000000000000000000000..2047b7e19320e8d03e444ca1cb03fe00d0c5e96e --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu.py @@ -0,0 +1,276 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import os +import numpy as np +import torch +import warnings + +from .. import custom_ops +from .. import misc +from . import upfirdn2d +from . import bias_act + +#---------------------------------------------------------------------------- + +_plugin = None + +def _init(): + global _plugin + if _plugin is None: + _plugin = custom_ops.get_plugin( + module_name='filtered_lrelu_plugin', + sources=['filtered_lrelu.cpp', 'filtered_lrelu_wr.cu', 'filtered_lrelu_rd.cu', 'filtered_lrelu_ns.cu'], + headers=['filtered_lrelu.h', 'filtered_lrelu.cu'], + source_dir=os.path.dirname(__file__), + extra_cuda_cflags=['--use_fast_math'], + ) + return True + +def _get_filter_size(f): + if f is None: + return 1, 1 + assert isinstance(f, torch.Tensor) + assert 1 <= f.ndim <= 2 + return f.shape[-1], f.shape[0] # width, height + +def _parse_padding(padding): + if isinstance(padding, int): + padding = [padding, padding] + assert isinstance(padding, (list, tuple)) + assert all(isinstance(x, (int, np.integer)) for x in padding) + padding = [int(x) for x in padding] + if len(padding) == 2: + px, py = padding + padding = [px, px, py, py] + px0, px1, py0, py1 = padding + return px0, px1, py0, py1 + +#---------------------------------------------------------------------------- + +def filtered_lrelu(x, fu=None, fd=None, b=None, up=1, down=1, padding=0, gain=np.sqrt(2), slope=0.2, clamp=None, flip_filter=False, impl='cuda'): + r"""Filtered leaky ReLU for a batch of 2D images. + + Performs the following sequence of operations for each channel: + + 1. Add channel-specific bias if provided (`b`). + + 2. Upsample the image by inserting N-1 zeros after each pixel (`up`). + + 3. Pad the image with the specified number of zeros on each side (`padding`). + Negative padding corresponds to cropping the image. + + 4. Convolve the image with the specified upsampling FIR filter (`fu`), shrinking it + so that the footprint of all output pixels lies within the input image. + + 5. Multiply each value by the provided gain factor (`gain`). + + 6. Apply leaky ReLU activation function to each value. + + 7. Clamp each value between -clamp and +clamp, if `clamp` parameter is provided. + + 8. Convolve the image with the specified downsampling FIR filter (`fd`), shrinking + it so that the footprint of all output pixels lies within the input image. + + 9. Downsample the image by keeping every Nth pixel (`down`). + + The fused op is considerably more efficient than performing the same calculation + using standard PyTorch ops. It supports gradients of arbitrary order. + + Args: + x: Float32/float16/float64 input tensor of the shape + `[batch_size, num_channels, in_height, in_width]`. + fu: Float32 upsampling FIR filter of the shape + `[filter_height, filter_width]` (non-separable), + `[filter_taps]` (separable), or + `None` (identity). + fd: Float32 downsampling FIR filter of the shape + `[filter_height, filter_width]` (non-separable), + `[filter_taps]` (separable), or + `None` (identity). + b: Bias vector, or `None` to disable. Must be a 1D tensor of the same type + as `x`. The length of vector must must match the channel dimension of `x`. + up: Integer upsampling factor (default: 1). + down: Integer downsampling factor. (default: 1). + padding: Padding with respect to the upsampled image. Can be a single number + or a list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]` + (default: 0). + gain: Overall scaling factor for signal magnitude (default: sqrt(2)). + slope: Slope on the negative side of leaky ReLU (default: 0.2). + clamp: Maximum magnitude for leaky ReLU output (default: None). + flip_filter: False = convolution, True = correlation (default: False). + impl: Implementation to use. Can be `'ref'` or `'cuda'` (default: `'cuda'`). + + Returns: + Tensor of the shape `[batch_size, num_channels, out_height, out_width]`. + """ + assert isinstance(x, torch.Tensor) + assert impl in ['ref', 'cuda'] + if impl == 'cuda' and x.device.type == 'cuda' and _init(): + return _filtered_lrelu_cuda(up=up, down=down, padding=padding, gain=gain, slope=slope, clamp=clamp, flip_filter=flip_filter).apply(x, fu, fd, b, None, 0, 0) + return _filtered_lrelu_ref(x, fu=fu, fd=fd, b=b, up=up, down=down, padding=padding, gain=gain, slope=slope, clamp=clamp, flip_filter=flip_filter) + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def _filtered_lrelu_ref(x, fu=None, fd=None, b=None, up=1, down=1, padding=0, gain=np.sqrt(2), slope=0.2, clamp=None, flip_filter=False): + """Slow and memory-inefficient reference implementation of `filtered_lrelu()` using + existing `upfirdn2n()` and `bias_act()` ops. + """ + assert isinstance(x, torch.Tensor) and x.ndim == 4 + fu_w, fu_h = _get_filter_size(fu) + fd_w, fd_h = _get_filter_size(fd) + if b is not None: + assert isinstance(b, torch.Tensor) and b.dtype == x.dtype + misc.assert_shape(b, [x.shape[1]]) + assert isinstance(up, int) and up >= 1 + assert isinstance(down, int) and down >= 1 + px0, px1, py0, py1 = _parse_padding(padding) + assert gain == float(gain) and gain > 0 + assert slope == float(slope) and slope >= 0 + assert clamp is None or (clamp == float(clamp) and clamp >= 0) + + # Calculate output size. + batch_size, channels, in_h, in_w = x.shape + in_dtype = x.dtype + out_w = (in_w * up + (px0 + px1) - (fu_w - 1) - (fd_w - 1) + (down - 1)) // down + out_h = (in_h * up + (py0 + py1) - (fu_h - 1) - (fd_h - 1) + (down - 1)) // down + + # Compute using existing ops. + x = bias_act.bias_act(x=x, b=b) # Apply bias. + x = upfirdn2d.upfirdn2d(x=x, f=fu, up=up, padding=[px0, px1, py0, py1], gain=up**2, flip_filter=flip_filter) # Upsample. + x = bias_act.bias_act(x=x, act='lrelu', alpha=slope, gain=gain, clamp=clamp) # Bias, leaky ReLU, clamp. + x = upfirdn2d.upfirdn2d(x=x, f=fd, down=down, flip_filter=flip_filter) # Downsample. + + # Check output shape & dtype. + misc.assert_shape(x, [batch_size, channels, out_h, out_w]) + assert x.dtype == in_dtype + return x + +#---------------------------------------------------------------------------- + +_filtered_lrelu_cuda_cache = dict() + +def _filtered_lrelu_cuda(up=1, down=1, padding=0, gain=np.sqrt(2), slope=0.2, clamp=None, flip_filter=False): + """Fast CUDA implementation of `filtered_lrelu()` using custom ops. + """ + assert isinstance(up, int) and up >= 1 + assert isinstance(down, int) and down >= 1 + px0, px1, py0, py1 = _parse_padding(padding) + assert gain == float(gain) and gain > 0 + gain = float(gain) + assert slope == float(slope) and slope >= 0 + slope = float(slope) + assert clamp is None or (clamp == float(clamp) and clamp >= 0) + clamp = float(clamp if clamp is not None else 'inf') + + # Lookup from cache. + key = (up, down, px0, px1, py0, py1, gain, slope, clamp, flip_filter) + if key in _filtered_lrelu_cuda_cache: + return _filtered_lrelu_cuda_cache[key] + + # Forward op. + class FilteredLReluCuda(torch.autograd.Function): + @staticmethod + def forward(ctx, x, fu, fd, b, si, sx, sy): # pylint: disable=arguments-differ + assert isinstance(x, torch.Tensor) and x.ndim == 4 + + # Replace empty up/downsample kernels with full 1x1 kernels (faster than separable). + if fu is None: + fu = torch.ones([1, 1], dtype=torch.float32, device=x.device) + if fd is None: + fd = torch.ones([1, 1], dtype=torch.float32, device=x.device) + assert 1 <= fu.ndim <= 2 + assert 1 <= fd.ndim <= 2 + + # Replace separable 1x1 kernels with full 1x1 kernels when scale factor is 1. + if up == 1 and fu.ndim == 1 and fu.shape[0] == 1: + fu = fu.square()[None] + if down == 1 and fd.ndim == 1 and fd.shape[0] == 1: + fd = fd.square()[None] + + # Missing sign input tensor. + if si is None: + si = torch.empty([0]) + + # Missing bias tensor. + if b is None: + b = torch.zeros([x.shape[1]], dtype=x.dtype, device=x.device) + + # Construct internal sign tensor only if gradients are needed. + write_signs = (si.numel() == 0) and (x.requires_grad or b.requires_grad) + + # Warn if input storage strides are not in decreasing order due to e.g. channels-last layout. + strides = [x.stride(i) for i in range(x.ndim) if x.size(i) > 1] + if any(a < b for a, b in zip(strides[:-1], strides[1:])): + warnings.warn("low-performance memory layout detected in filtered_lrelu input", RuntimeWarning) + + # Call C++/Cuda plugin if datatype is supported. + if x.dtype in [torch.float16, torch.float32]: + if torch.cuda.current_stream(x.device) != torch.cuda.default_stream(x.device): + warnings.warn("filtered_lrelu called with non-default cuda stream but concurrent execution is not supported", RuntimeWarning) + y, so, return_code = _plugin.filtered_lrelu(x, fu, fd, b, si, up, down, px0, px1, py0, py1, sx, sy, gain, slope, clamp, flip_filter, write_signs) + else: + return_code = -1 + + # No Cuda kernel found? Fall back to generic implementation. Still more memory efficient than the reference implementation because + # only the bit-packed sign tensor is retained for gradient computation. + if return_code < 0: + warnings.warn("filtered_lrelu called with parameters that have no optimized CUDA kernel, using generic fallback", RuntimeWarning) + + y = x.add(b.unsqueeze(-1).unsqueeze(-1)) # Add bias. + y = upfirdn2d.upfirdn2d(x=y, f=fu, up=up, padding=[px0, px1, py0, py1], gain=up**2, flip_filter=flip_filter) # Upsample. + so = _plugin.filtered_lrelu_act_(y, si, sx, sy, gain, slope, clamp, write_signs) # Activation function and sign handling. Modifies y in-place. + y = upfirdn2d.upfirdn2d(x=y, f=fd, down=down, flip_filter=flip_filter) # Downsample. + + # Prepare for gradient computation. + ctx.save_for_backward(fu, fd, (si if si.numel() else so)) + ctx.x_shape = x.shape + ctx.y_shape = y.shape + ctx.s_ofs = sx, sy + return y + + @staticmethod + def backward(ctx, dy): # pylint: disable=arguments-differ + fu, fd, si = ctx.saved_tensors + _, _, xh, xw = ctx.x_shape + _, _, yh, yw = ctx.y_shape + sx, sy = ctx.s_ofs + dx = None # 0 + dfu = None; assert not ctx.needs_input_grad[1] + dfd = None; assert not ctx.needs_input_grad[2] + db = None # 3 + dsi = None; assert not ctx.needs_input_grad[4] + dsx = None; assert not ctx.needs_input_grad[5] + dsy = None; assert not ctx.needs_input_grad[6] + + if ctx.needs_input_grad[0] or ctx.needs_input_grad[3]: + pp = [ + (fu.shape[-1] - 1) + (fd.shape[-1] - 1) - px0, + xw * up - yw * down + px0 - (up - 1), + (fu.shape[0] - 1) + (fd.shape[0] - 1) - py0, + xh * up - yh * down + py0 - (up - 1), + ] + gg = gain * (up ** 2) / (down ** 2) + ff = (not flip_filter) + sx = sx - (fu.shape[-1] - 1) + px0 + sy = sy - (fu.shape[0] - 1) + py0 + dx = _filtered_lrelu_cuda(up=down, down=up, padding=pp, gain=gg, slope=slope, clamp=None, flip_filter=ff).apply(dy, fd, fu, None, si, sx, sy) + + if ctx.needs_input_grad[3]: + db = dx.sum([0, 2, 3]) + + return dx, dfu, dfd, db, dsi, dsx, dsy + + # Add to cache. + _filtered_lrelu_cuda_cache[key] = FilteredLReluCuda + return FilteredLReluCuda + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_ns.cu b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_ns.cu new file mode 100644 index 0000000000000000000000000000000000000000..8a3eae46215c3babea2c54e3ae255b05f4d777af --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_ns.cu @@ -0,0 +1,31 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include "filtered_lrelu.cu" + +// Template/kernel specializations for no signs mode (no gradients required). + +// Full op, 32-bit indexing. +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); + +// Full op, 64-bit indexing. +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); + +// Activation/signs only for generic variant. 64-bit indexing. +template void* choose_filtered_lrelu_act_kernel(void); +template void* choose_filtered_lrelu_act_kernel(void); +template void* choose_filtered_lrelu_act_kernel(void); + +// Copy filters to constant memory. +template cudaError_t copy_filters(cudaStream_t stream); diff --git a/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_rd.cu b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_rd.cu new file mode 100644 index 0000000000000000000000000000000000000000..3cd43ec0648d3db05e5808299fc0ee318e5ceaa6 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_rd.cu @@ -0,0 +1,31 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include "filtered_lrelu.cu" + +// Template/kernel specializations for sign read mode. + +// Full op, 32-bit indexing. +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); + +// Full op, 64-bit indexing. +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); + +// Activation/signs only for generic variant. 64-bit indexing. +template void* choose_filtered_lrelu_act_kernel(void); +template void* choose_filtered_lrelu_act_kernel(void); +template void* choose_filtered_lrelu_act_kernel(void); + +// Copy filters to constant memory. +template cudaError_t copy_filters(cudaStream_t stream); diff --git a/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_wr.cu b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_wr.cu new file mode 100644 index 0000000000000000000000000000000000000000..bc2fa06912eb703dd77ca64533208428bdf373ac --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/filtered_lrelu_wr.cu @@ -0,0 +1,31 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include "filtered_lrelu.cu" + +// Template/kernel specializations for sign write mode. + +// Full op, 32-bit indexing. +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); + +// Full op, 64-bit indexing. +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); +template filtered_lrelu_kernel_spec choose_filtered_lrelu_kernel(const filtered_lrelu_kernel_params& p, int sharedKB); + +// Activation/signs only for generic variant. 64-bit indexing. +template void* choose_filtered_lrelu_act_kernel(void); +template void* choose_filtered_lrelu_act_kernel(void); +template void* choose_filtered_lrelu_act_kernel(void); + +// Copy filters to constant memory. +template cudaError_t copy_filters(cudaStream_t stream); diff --git a/ThirdParty/eg3d/torch_utils/ops/fma.py b/ThirdParty/eg3d/torch_utils/ops/fma.py new file mode 100644 index 0000000000000000000000000000000000000000..5458116d0b6f8b133608456bbe9003aa0283ac85 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/fma.py @@ -0,0 +1,62 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Fused multiply-add, with slightly faster gradients than `torch.addcmul()`.""" + +import torch + +#---------------------------------------------------------------------------- + +def fma(a, b, c): # => a * b + c + return _FusedMultiplyAdd.apply(a, b, c) + +#---------------------------------------------------------------------------- + +class _FusedMultiplyAdd(torch.autograd.Function): # a * b + c + @staticmethod + def forward(ctx, a, b, c): # pylint: disable=arguments-differ + out = torch.addcmul(c, a, b) + ctx.save_for_backward(a, b) + ctx.c_shape = c.shape + return out + + @staticmethod + def backward(ctx, dout): # pylint: disable=arguments-differ + a, b = ctx.saved_tensors + c_shape = ctx.c_shape + da = None + db = None + dc = None + + if ctx.needs_input_grad[0]: + da = _unbroadcast(dout * b, a.shape) + + if ctx.needs_input_grad[1]: + db = _unbroadcast(dout * a, b.shape) + + if ctx.needs_input_grad[2]: + dc = _unbroadcast(dout, c_shape) + + return da, db, dc + +#---------------------------------------------------------------------------- + +def _unbroadcast(x, shape): + extra_dims = x.ndim - len(shape) + assert extra_dims >= 0 + dim = [i for i in range(x.ndim) if x.shape[i] > 1 and (i < extra_dims or shape[i - extra_dims] == 1)] + if len(dim): + x = x.sum(dim=dim, keepdim=True) + if extra_dims: + x = x.reshape(-1, *x.shape[extra_dims+1:]) + assert x.shape == shape + return x + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/ops/grid_sample_gradfix.py b/ThirdParty/eg3d/torch_utils/ops/grid_sample_gradfix.py new file mode 100644 index 0000000000000000000000000000000000000000..35d94724136ba162d8416803b1ad00d6da0db99f --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/grid_sample_gradfix.py @@ -0,0 +1,79 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Custom replacement for `torch.nn.functional.grid_sample` that +supports arbitrarily high order gradients between the input and output. +Only works on 2D images and assumes +`mode='bilinear'`, `padding_mode='zeros'`, `align_corners=False`.""" + +import torch + +# pylint: disable=redefined-builtin +# pylint: disable=arguments-differ +# pylint: disable=protected-access + +#---------------------------------------------------------------------------- + +enabled = False # Enable the custom op by setting this to true. + +#---------------------------------------------------------------------------- + +def grid_sample(input, grid): + if _should_use_custom_op(): + return _GridSample2dForward.apply(input, grid) + return torch.nn.functional.grid_sample(input=input, grid=grid, mode='bilinear', padding_mode='zeros', align_corners=False) + +#---------------------------------------------------------------------------- + +def _should_use_custom_op(): + return enabled + +#---------------------------------------------------------------------------- + +class _GridSample2dForward(torch.autograd.Function): + @staticmethod + def forward(ctx, input, grid): + assert input.ndim == 4 + assert grid.ndim == 4 + output = torch.nn.functional.grid_sample(input=input, grid=grid, mode='bilinear', padding_mode='zeros', align_corners=False) + ctx.save_for_backward(input, grid) + return output + + @staticmethod + def backward(ctx, grad_output): + input, grid = ctx.saved_tensors + grad_input, grad_grid = _GridSample2dBackward.apply(grad_output, input, grid) + return grad_input, grad_grid + +#---------------------------------------------------------------------------- + +class _GridSample2dBackward(torch.autograd.Function): + @staticmethod + def forward(ctx, grad_output, input, grid): + op = torch._C._jit_get_operation('aten::grid_sampler_2d_backward') + grad_input, grad_grid = op(grad_output, input, grid, 0, 0, False) + ctx.save_for_backward(grid) + return grad_input, grad_grid + + @staticmethod + def backward(ctx, grad2_grad_input, grad2_grad_grid): + _ = grad2_grad_grid # unused + grid, = ctx.saved_tensors + grad2_grad_output = None + grad2_input = None + grad2_grid = None + + if ctx.needs_input_grad[0]: + grad2_grad_output = _GridSample2dForward.apply(grad2_grad_input, grid) + + assert not ctx.needs_input_grad[2] + return grad2_grad_output, grad2_input, grad2_grid + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.cpp b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c1769c3cbe4dd04f76f9ccef726680720e6f39c8 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.cpp @@ -0,0 +1,111 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include +#include +#include +#include "upfirdn2d.h" + +//------------------------------------------------------------------------ + +static torch::Tensor upfirdn2d(torch::Tensor x, torch::Tensor f, int upx, int upy, int downx, int downy, int padx0, int padx1, int pady0, int pady1, bool flip, float gain) +{ + // Validate arguments. + TORCH_CHECK(x.is_cuda(), "x must reside on CUDA device"); + TORCH_CHECK(f.device() == x.device(), "f must reside on the same device as x"); + TORCH_CHECK(f.dtype() == torch::kFloat, "f must be float32"); + TORCH_CHECK(x.numel() <= INT_MAX, "x is too large"); + TORCH_CHECK(f.numel() <= INT_MAX, "f is too large"); + TORCH_CHECK(x.numel() > 0, "x has zero size"); + TORCH_CHECK(f.numel() > 0, "f has zero size"); + TORCH_CHECK(x.dim() == 4, "x must be rank 4"); + TORCH_CHECK(f.dim() == 2, "f must be rank 2"); + TORCH_CHECK((x.size(0)-1)*x.stride(0) + (x.size(1)-1)*x.stride(1) + (x.size(2)-1)*x.stride(2) + (x.size(3)-1)*x.stride(3) <= INT_MAX, "x memory footprint is too large"); + TORCH_CHECK(f.size(0) >= 1 && f.size(1) >= 1, "f must be at least 1x1"); + TORCH_CHECK(upx >= 1 && upy >= 1, "upsampling factor must be at least 1"); + TORCH_CHECK(downx >= 1 && downy >= 1, "downsampling factor must be at least 1"); + + // Create output tensor. + const at::cuda::OptionalCUDAGuard device_guard(device_of(x)); + int outW = ((int)x.size(3) * upx + padx0 + padx1 - (int)f.size(1) + downx) / downx; + int outH = ((int)x.size(2) * upy + pady0 + pady1 - (int)f.size(0) + downy) / downy; + TORCH_CHECK(outW >= 1 && outH >= 1, "output must be at least 1x1"); + torch::Tensor y = torch::empty({x.size(0), x.size(1), outH, outW}, x.options(), x.suggest_memory_format()); + TORCH_CHECK(y.numel() <= INT_MAX, "output is too large"); + TORCH_CHECK((y.size(0)-1)*y.stride(0) + (y.size(1)-1)*y.stride(1) + (y.size(2)-1)*y.stride(2) + (y.size(3)-1)*y.stride(3) <= INT_MAX, "output memory footprint is too large"); + + // Initialize CUDA kernel parameters. + upfirdn2d_kernel_params p; + p.x = x.data_ptr(); + p.f = f.data_ptr(); + p.y = y.data_ptr(); + p.up = make_int2(upx, upy); + p.down = make_int2(downx, downy); + p.pad0 = make_int2(padx0, pady0); + p.flip = (flip) ? 1 : 0; + p.gain = gain; + p.inSize = make_int4((int)x.size(3), (int)x.size(2), (int)x.size(1), (int)x.size(0)); + p.inStride = make_int4((int)x.stride(3), (int)x.stride(2), (int)x.stride(1), (int)x.stride(0)); + p.filterSize = make_int2((int)f.size(1), (int)f.size(0)); + p.filterStride = make_int2((int)f.stride(1), (int)f.stride(0)); + p.outSize = make_int4((int)y.size(3), (int)y.size(2), (int)y.size(1), (int)y.size(0)); + p.outStride = make_int4((int)y.stride(3), (int)y.stride(2), (int)y.stride(1), (int)y.stride(0)); + p.sizeMajor = (p.inStride.z == 1) ? p.inSize.w : p.inSize.w * p.inSize.z; + p.sizeMinor = (p.inStride.z == 1) ? p.inSize.z : 1; + + // Choose CUDA kernel. + upfirdn2d_kernel_spec spec; + AT_DISPATCH_FLOATING_TYPES_AND_HALF(x.scalar_type(), "upfirdn2d_cuda", [&] + { + spec = choose_upfirdn2d_kernel(p); + }); + + // Set looping options. + p.loopMajor = (p.sizeMajor - 1) / 16384 + 1; + p.loopMinor = spec.loopMinor; + p.loopX = spec.loopX; + p.launchMinor = (p.sizeMinor - 1) / p.loopMinor + 1; + p.launchMajor = (p.sizeMajor - 1) / p.loopMajor + 1; + + // Compute grid size. + dim3 blockSize, gridSize; + if (spec.tileOutW < 0) // large + { + blockSize = dim3(4, 32, 1); + gridSize = dim3( + ((p.outSize.y - 1) / blockSize.x + 1) * p.launchMinor, + (p.outSize.x - 1) / (blockSize.y * p.loopX) + 1, + p.launchMajor); + } + else // small + { + blockSize = dim3(256, 1, 1); + gridSize = dim3( + ((p.outSize.y - 1) / spec.tileOutH + 1) * p.launchMinor, + (p.outSize.x - 1) / (spec.tileOutW * p.loopX) + 1, + p.launchMajor); + } + + // Launch CUDA kernel. + void* args[] = {&p}; + AT_CUDA_CHECK(cudaLaunchKernel(spec.kernel, gridSize, blockSize, args, 0, at::cuda::getCurrentCUDAStream())); + return y; +} + +//------------------------------------------------------------------------ + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) +{ + m.def("upfirdn2d", &upfirdn2d); +} + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.cu b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.cu new file mode 100644 index 0000000000000000000000000000000000000000..7d182d7b86a9058d0c007b13716d6e7f08207f42 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.cu @@ -0,0 +1,388 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include +#include "upfirdn2d.h" + +//------------------------------------------------------------------------ +// Helpers. + +template struct InternalType; +template <> struct InternalType { typedef double scalar_t; }; +template <> struct InternalType { typedef float scalar_t; }; +template <> struct InternalType { typedef float scalar_t; }; + +static __device__ __forceinline__ int floor_div(int a, int b) +{ + int t = 1 - a / b; + return (a + t * b) / b - t; +} + +//------------------------------------------------------------------------ +// Generic CUDA implementation for large filters. + +template static __global__ void upfirdn2d_kernel_large(upfirdn2d_kernel_params p) +{ + typedef typename InternalType::scalar_t scalar_t; + + // Calculate thread index. + int minorBase = blockIdx.x * blockDim.x + threadIdx.x; + int outY = minorBase / p.launchMinor; + minorBase -= outY * p.launchMinor; + int outXBase = blockIdx.y * p.loopX * blockDim.y + threadIdx.y; + int majorBase = blockIdx.z * p.loopMajor; + if (outXBase >= p.outSize.x | outY >= p.outSize.y | majorBase >= p.sizeMajor) + return; + + // Setup Y receptive field. + int midY = outY * p.down.y + p.up.y - 1 - p.pad0.y; + int inY = min(max(floor_div(midY, p.up.y), 0), p.inSize.y); + int h = min(max(floor_div(midY + p.filterSize.y, p.up.y), 0), p.inSize.y) - inY; + int filterY = midY + p.filterSize.y - (inY + 1) * p.up.y; + if (p.flip) + filterY = p.filterSize.y - 1 - filterY; + + // Loop over major, minor, and X. + for (int majorIdx = 0, major = majorBase; majorIdx < p.loopMajor & major < p.sizeMajor; majorIdx++, major++) + for (int minorIdx = 0, minor = minorBase; minorIdx < p.loopMinor & minor < p.sizeMinor; minorIdx++, minor += p.launchMinor) + { + int nc = major * p.sizeMinor + minor; + int n = nc / p.inSize.z; + int c = nc - n * p.inSize.z; + for (int loopX = 0, outX = outXBase; loopX < p.loopX & outX < p.outSize.x; loopX++, outX += blockDim.y) + { + // Setup X receptive field. + int midX = outX * p.down.x + p.up.x - 1 - p.pad0.x; + int inX = min(max(floor_div(midX, p.up.x), 0), p.inSize.x); + int w = min(max(floor_div(midX + p.filterSize.x, p.up.x), 0), p.inSize.x) - inX; + int filterX = midX + p.filterSize.x - (inX + 1) * p.up.x; + if (p.flip) + filterX = p.filterSize.x - 1 - filterX; + + // Initialize pointers. + const T* xp = &((const T*)p.x)[inX * p.inStride.x + inY * p.inStride.y + c * p.inStride.z + n * p.inStride.w]; + const float* fp = &p.f[filterX * p.filterStride.x + filterY * p.filterStride.y]; + int filterStepX = ((p.flip) ? p.up.x : -p.up.x) * p.filterStride.x; + int filterStepY = ((p.flip) ? p.up.y : -p.up.y) * p.filterStride.y; + + // Inner loop. + scalar_t v = 0; + for (int y = 0; y < h; y++) + { + for (int x = 0; x < w; x++) + { + v += (scalar_t)(*xp) * (scalar_t)(*fp); + xp += p.inStride.x; + fp += filterStepX; + } + xp += p.inStride.y - w * p.inStride.x; + fp += filterStepY - w * filterStepX; + } + + // Store result. + v *= p.gain; + ((T*)p.y)[outX * p.outStride.x + outY * p.outStride.y + c * p.outStride.z + n * p.outStride.w] = (T)v; + } + } +} + +//------------------------------------------------------------------------ +// Specialized CUDA implementation for small filters. + +template +static __global__ void upfirdn2d_kernel_small(upfirdn2d_kernel_params p) +{ + typedef typename InternalType::scalar_t scalar_t; + const int tileInW = ((tileOutW - 1) * downx + filterW - 1) / upx + 1; + const int tileInH = ((tileOutH - 1) * downy + filterH - 1) / upy + 1; + __shared__ volatile scalar_t sf[filterH][filterW]; + __shared__ volatile scalar_t sx[tileInH][tileInW][loopMinor]; + + // Calculate tile index. + int minorBase = blockIdx.x; + int tileOutY = minorBase / p.launchMinor; + minorBase -= tileOutY * p.launchMinor; + minorBase *= loopMinor; + tileOutY *= tileOutH; + int tileOutXBase = blockIdx.y * p.loopX * tileOutW; + int majorBase = blockIdx.z * p.loopMajor; + if (tileOutXBase >= p.outSize.x | tileOutY >= p.outSize.y | majorBase >= p.sizeMajor) + return; + + // Load filter (flipped). + for (int tapIdx = threadIdx.x; tapIdx < filterH * filterW; tapIdx += blockDim.x) + { + int fy = tapIdx / filterW; + int fx = tapIdx - fy * filterW; + scalar_t v = 0; + if (fx < p.filterSize.x & fy < p.filterSize.y) + { + int ffx = (p.flip) ? fx : p.filterSize.x - 1 - fx; + int ffy = (p.flip) ? fy : p.filterSize.y - 1 - fy; + v = (scalar_t)p.f[ffx * p.filterStride.x + ffy * p.filterStride.y]; + } + sf[fy][fx] = v; + } + + // Loop over major and X. + for (int majorIdx = 0, major = majorBase; majorIdx < p.loopMajor & major < p.sizeMajor; majorIdx++, major++) + { + int baseNC = major * p.sizeMinor + minorBase; + int n = baseNC / p.inSize.z; + int baseC = baseNC - n * p.inSize.z; + for (int loopX = 0, tileOutX = tileOutXBase; loopX < p.loopX & tileOutX < p.outSize.x; loopX++, tileOutX += tileOutW) + { + // Load input pixels. + int tileMidX = tileOutX * downx + upx - 1 - p.pad0.x; + int tileMidY = tileOutY * downy + upy - 1 - p.pad0.y; + int tileInX = floor_div(tileMidX, upx); + int tileInY = floor_div(tileMidY, upy); + __syncthreads(); + for (int inIdx = threadIdx.x; inIdx < tileInH * tileInW * loopMinor; inIdx += blockDim.x) + { + int relC = inIdx; + int relInX = relC / loopMinor; + int relInY = relInX / tileInW; + relC -= relInX * loopMinor; + relInX -= relInY * tileInW; + int c = baseC + relC; + int inX = tileInX + relInX; + int inY = tileInY + relInY; + scalar_t v = 0; + if (inX >= 0 & inY >= 0 & inX < p.inSize.x & inY < p.inSize.y & c < p.inSize.z) + v = (scalar_t)((const T*)p.x)[inX * p.inStride.x + inY * p.inStride.y + c * p.inStride.z + n * p.inStride.w]; + sx[relInY][relInX][relC] = v; + } + + // Loop over output pixels. + __syncthreads(); + for (int outIdx = threadIdx.x; outIdx < tileOutH * tileOutW * loopMinor; outIdx += blockDim.x) + { + int relC = outIdx; + int relOutX = relC / loopMinor; + int relOutY = relOutX / tileOutW; + relC -= relOutX * loopMinor; + relOutX -= relOutY * tileOutW; + int c = baseC + relC; + int outX = tileOutX + relOutX; + int outY = tileOutY + relOutY; + + // Setup receptive field. + int midX = tileMidX + relOutX * downx; + int midY = tileMidY + relOutY * downy; + int inX = floor_div(midX, upx); + int inY = floor_div(midY, upy); + int relInX = inX - tileInX; + int relInY = inY - tileInY; + int filterX = (inX + 1) * upx - midX - 1; // flipped + int filterY = (inY + 1) * upy - midY - 1; // flipped + + // Inner loop. + if (outX < p.outSize.x & outY < p.outSize.y & c < p.outSize.z) + { + scalar_t v = 0; + #pragma unroll + for (int y = 0; y < filterH / upy; y++) + #pragma unroll + for (int x = 0; x < filterW / upx; x++) + v += sx[relInY + y][relInX + x][relC] * sf[filterY + y * upy][filterX + x * upx]; + v *= p.gain; + ((T*)p.y)[outX * p.outStride.x + outY * p.outStride.y + c * p.outStride.z + n * p.outStride.w] = (T)v; + } + } + } + } +} + +//------------------------------------------------------------------------ +// CUDA kernel selection. + +template upfirdn2d_kernel_spec choose_upfirdn2d_kernel(const upfirdn2d_kernel_params& p) +{ + int s = p.inStride.z, fx = p.filterSize.x, fy = p.filterSize.y; + upfirdn2d_kernel_spec spec = {(void*)upfirdn2d_kernel_large, -1,-1,1, 4}; // contiguous + if (s == 1) spec = {(void*)upfirdn2d_kernel_large, -1,-1,4, 1}; // channels_last + + // No up/downsampling. + if (p.up.x == 1 && p.up.y == 1 && p.down.x == 1 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1}; + if (s != 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1}; + if (s != 1 && fx <= 7 && fy <= 7 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 5 && fy <= 5 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 3 && fy <= 3 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 24 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + if (s != 1 && fx <= 16 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + if (s != 1 && fx <= 8 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + if (s != 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s != 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s != 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + // channels_last + if (s == 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s == 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s == 1 && fx <= 7 && fy <= 7 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 5 && fy <= 5 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 3 && fy <= 3 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 24 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + if (s == 1 && fx <= 16 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + if (s == 1 && fx <= 8 && fy <= 1 ) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + if (s == 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + if (s == 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + if (s == 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + } + + // 2x upsampling. + if (p.up.x == 2 && p.up.y == 2 && p.down.x == 1 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1}; + if (s != 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1}; + if (s != 1 && fx <= 8 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + if (s != 1 && fx <= 2 && fy <= 2 ) spec = {(void*)upfirdn2d_kernel_small, 64,16,1, 1}; + // channels_last + if (s == 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s == 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s == 1 && fx <= 8 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + if (s == 1 && fx <= 2 && fy <= 2 ) spec = {(void*)upfirdn2d_kernel_small, 16,16,8, 1}; + } + if (p.up.x == 2 && p.up.y == 1 && p.down.x == 1 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 24 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + if (s != 1 && fx <= 16 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + if (s != 1 && fx <= 8 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + // channels_last + if (s == 1 && fx <= 24 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + if (s == 1 && fx <= 16 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + if (s == 1 && fx <= 8 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + } + if (p.up.x == 1 && p.up.y == 2 && p.down.x == 1 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s != 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s != 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + // channels_last + if (s == 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + if (s == 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + if (s == 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + } + + // 2x downsampling. + if (p.up.x == 1 && p.up.y == 1 && p.down.x == 2 && p.down.y == 2) + { + // contiguous + if (s != 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,16,1, 1}; + if (s != 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,16,1, 1}; + if (s != 1 && fx <= 8 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + if (s != 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + if (s != 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + if (s != 1 && fx <= 2 && fy <= 2 ) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + // channels_last + if (s == 1 && fx <= 24 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 16,16,1, 1}; + if (s == 1 && fx <= 16 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 16,16,1, 1}; + if (s == 1 && fx <= 8 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 8,8,8, 1}; + if (s == 1 && fx <= 6 && fy <= 6 ) spec = {(void*)upfirdn2d_kernel_small, 8,8,8, 1}; + if (s == 1 && fx <= 4 && fy <= 4 ) spec = {(void*)upfirdn2d_kernel_small, 8,8,8, 1}; + if (s == 1 && fx <= 2 && fy <= 2 ) spec = {(void*)upfirdn2d_kernel_small, 8,8,8, 1}; + } + if (p.up.x == 1 && p.up.y == 1 && p.down.x == 2 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 24 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 64,8,1, 1}; + if (s != 1 && fx <= 16 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 64,8,1, 1}; + if (s != 1 && fx <= 8 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 64,8,1, 1}; + // channels_last + if (s == 1 && fx <= 24 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 64,1,8, 1}; + if (s == 1 && fx <= 16 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 64,1,8, 1}; + if (s == 1 && fx <= 8 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 64,1,8, 1}; + } + if (p.up.x == 1 && p.up.y == 1 && p.down.x == 1 && p.down.y == 2) + { + // contiguous + if (s != 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 32,16,1, 1}; + if (s != 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 32,16,1, 1}; + if (s != 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 32,16,1, 1}; + // channels_last + if (s == 1 && fx <= 1 && fy <= 24) spec = {(void*)upfirdn2d_kernel_small, 1,64,8, 1}; + if (s == 1 && fx <= 1 && fy <= 16) spec = {(void*)upfirdn2d_kernel_small, 1,64,8, 1}; + if (s == 1 && fx <= 1 && fy <= 8 ) spec = {(void*)upfirdn2d_kernel_small, 1,64,8, 1}; + } + + // 4x upsampling. + if (p.up.x == 4 && p.up.y == 4 && p.down.x == 1 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 48 && fy <= 48) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1}; + if (s != 1 && fx <= 32 && fy <= 32) spec = {(void*)upfirdn2d_kernel_small, 64,32,1, 1}; + // channels_last + if (s == 1 && fx <= 48 && fy <= 48) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s == 1 && fx <= 32 && fy <= 32) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + } + if (p.up.x == 4 && p.up.y == 1 && p.down.x == 1 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 48 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + if (s != 1 && fx <= 32 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,8,1, 1}; + // channels_last + if (s == 1 && fx <= 48 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + if (s == 1 && fx <= 32 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 128,1,16, 1}; + } + if (p.up.x == 1 && p.up.y == 4 && p.down.x == 1 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 1 && fy <= 48) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + if (s != 1 && fx <= 1 && fy <= 32) spec = {(void*)upfirdn2d_kernel_small, 32,32,1, 1}; + // channels_last + if (s == 1 && fx <= 1 && fy <= 48) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + if (s == 1 && fx <= 1 && fy <= 32) spec = {(void*)upfirdn2d_kernel_small, 1,128,16, 1}; + } + + // 4x downsampling (inefficient). + if (p.up.x == 1 && p.up.y == 1 && p.down.x == 4 && p.down.y == 1) + { + // contiguous + if (s != 1 && fx <= 48 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + if (s != 1 && fx <= 32 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + // channels_last + if (s == 1 && fx <= 48 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 32,1,8, 1}; + if (s == 1 && fx <= 32 && fy <= 1) spec = {(void*)upfirdn2d_kernel_small, 32,1,8, 1}; + } + if (p.up.x == 1 && p.up.y == 1 && p.down.x == 1 && p.down.y == 4) + { + // contiguous + if (s != 1 && fx <= 1 && fy <= 48) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + if (s != 1 && fx <= 1 && fy <= 32) spec = {(void*)upfirdn2d_kernel_small, 32,8,1, 1}; + // channels_last + if (s == 1 && fx <= 1 && fy <= 48) spec = {(void*)upfirdn2d_kernel_small, 1,32,8, 1}; + if (s == 1 && fx <= 1 && fy <= 32) spec = {(void*)upfirdn2d_kernel_small, 1,32,8, 1}; + } + return spec; +} + +//------------------------------------------------------------------------ +// Template specializations. + +template upfirdn2d_kernel_spec choose_upfirdn2d_kernel (const upfirdn2d_kernel_params& p); +template upfirdn2d_kernel_spec choose_upfirdn2d_kernel (const upfirdn2d_kernel_params& p); +template upfirdn2d_kernel_spec choose_upfirdn2d_kernel(const upfirdn2d_kernel_params& p); + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.h b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.h new file mode 100644 index 0000000000000000000000000000000000000000..d5de893d6489921d4689ac1e2cdb45da9a253f18 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.h @@ -0,0 +1,63 @@ +/* + * SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + * SPDX-License-Identifier: LicenseRef-NvidiaProprietary + * + * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual + * property and proprietary rights in and to this material, related + * documentation and any modifications thereto. Any use, reproduction, + * disclosure or distribution of this material and related documentation + * without an express license agreement from NVIDIA CORPORATION or + * its affiliates is strictly prohibited. + */ + +#include + +//------------------------------------------------------------------------ +// CUDA kernel parameters. + +struct upfirdn2d_kernel_params +{ + const void* x; + const float* f; + void* y; + + int2 up; + int2 down; + int2 pad0; + int flip; + float gain; + + int4 inSize; // [width, height, channel, batch] + int4 inStride; + int2 filterSize; // [width, height] + int2 filterStride; + int4 outSize; // [width, height, channel, batch] + int4 outStride; + int sizeMinor; + int sizeMajor; + + int loopMinor; + int loopMajor; + int loopX; + int launchMinor; + int launchMajor; +}; + +//------------------------------------------------------------------------ +// CUDA kernel specialization. + +struct upfirdn2d_kernel_spec +{ + void* kernel; + int tileOutW; + int tileOutH; + int loopMinor; + int loopX; +}; + +//------------------------------------------------------------------------ +// CUDA kernel selection. + +template upfirdn2d_kernel_spec choose_upfirdn2d_kernel(const upfirdn2d_kernel_params& p); + +//------------------------------------------------------------------------ diff --git a/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.py b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.py new file mode 100644 index 0000000000000000000000000000000000000000..5d634714167043daf63ec7f643ddd85d98d926dc --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/ops/upfirdn2d.py @@ -0,0 +1,391 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Custom PyTorch ops for efficient resampling of 2D images.""" + +import os +import numpy as np +import torch + +from .. import custom_ops +from .. import misc +from . import conv2d_gradfix + +#---------------------------------------------------------------------------- + +_plugin = None + +def _init(): + global _plugin + if _plugin is None: + _plugin = custom_ops.get_plugin( + module_name='upfirdn2d_plugin', + sources=['upfirdn2d.cpp', 'upfirdn2d.cu'], + headers=['upfirdn2d.h'], + source_dir=os.path.dirname(__file__), + extra_cuda_cflags=['--use_fast_math'], + ) + return True + +def _parse_scaling(scaling): + if isinstance(scaling, int): + scaling = [scaling, scaling] + assert isinstance(scaling, (list, tuple)) + assert all(isinstance(x, int) for x in scaling) + sx, sy = scaling + assert sx >= 1 and sy >= 1 + return sx, sy + +def _parse_padding(padding): + if isinstance(padding, int): + padding = [padding, padding] + assert isinstance(padding, (list, tuple)) + assert all(isinstance(x, int) for x in padding) + if len(padding) == 2: + padx, pady = padding + padding = [padx, padx, pady, pady] + padx0, padx1, pady0, pady1 = padding + return padx0, padx1, pady0, pady1 + +def _get_filter_size(f): + if f is None: + return 1, 1 + assert isinstance(f, torch.Tensor) and f.ndim in [1, 2] + fw = f.shape[-1] + fh = f.shape[0] + with misc.suppress_tracer_warnings(): + fw = int(fw) + fh = int(fh) + misc.assert_shape(f, [fh, fw][:f.ndim]) + assert fw >= 1 and fh >= 1 + return fw, fh + +#---------------------------------------------------------------------------- + +def setup_filter(f, device=torch.device('cpu'), normalize=True, flip_filter=False, gain=1, separable=None): + r"""Convenience function to setup 2D FIR filter for `upfirdn2d()`. + + Args: + f: Torch tensor, numpy array, or python list of the shape + `[filter_height, filter_width]` (non-separable), + `[filter_taps]` (separable), + `[]` (impulse), or + `None` (identity). + device: Result device (default: cpu). + normalize: Normalize the filter so that it retains the magnitude + for constant input signal (DC)? (default: True). + flip_filter: Flip the filter? (default: False). + gain: Overall scaling factor for signal magnitude (default: 1). + separable: Return a separable filter? (default: select automatically). + + Returns: + Float32 tensor of the shape + `[filter_height, filter_width]` (non-separable) or + `[filter_taps]` (separable). + """ + # Validate. + if f is None: + f = 1 + f = torch.as_tensor(f, dtype=torch.float32) + assert f.ndim in [0, 1, 2] + assert f.numel() > 0 + if f.ndim == 0: + f = f[np.newaxis] + + # Separable? + if separable is None: + separable = (f.ndim == 1 and f.numel() >= 8) + if f.ndim == 1 and not separable: + f = f.ger(f) + assert f.ndim == (1 if separable else 2) + + # Apply normalize, flip, gain, and device. + if normalize: + f /= f.sum() + if flip_filter: + f = f.flip(list(range(f.ndim))) + f = f * (gain ** (f.ndim / 2)) + f = f.to(device=device) + return f + +#---------------------------------------------------------------------------- + +def upfirdn2d(x, f, up=1, down=1, padding=0, flip_filter=False, gain=1, impl='cuda'): + r"""Pad, upsample, filter, and downsample a batch of 2D images. + + Performs the following sequence of operations for each channel: + + 1. Upsample the image by inserting N-1 zeros after each pixel (`up`). + + 2. Pad the image with the specified number of zeros on each side (`padding`). + Negative padding corresponds to cropping the image. + + 3. Convolve the image with the specified 2D FIR filter (`f`), shrinking it + so that the footprint of all output pixels lies within the input image. + + 4. Downsample the image by keeping every Nth pixel (`down`). + + This sequence of operations bears close resemblance to scipy.signal.upfirdn(). + The fused op is considerably more efficient than performing the same calculation + using standard PyTorch ops. It supports gradients of arbitrary order. + + Args: + x: Float32/float64/float16 input tensor of the shape + `[batch_size, num_channels, in_height, in_width]`. + f: Float32 FIR filter of the shape + `[filter_height, filter_width]` (non-separable), + `[filter_taps]` (separable), or + `None` (identity). + up: Integer upsampling factor. Can be a single int or a list/tuple + `[x, y]` (default: 1). + down: Integer downsampling factor. Can be a single int or a list/tuple + `[x, y]` (default: 1). + padding: Padding with respect to the upsampled image. Can be a single number + or a list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]` + (default: 0). + flip_filter: False = convolution, True = correlation (default: False). + gain: Overall scaling factor for signal magnitude (default: 1). + impl: Implementation to use. Can be `'ref'` or `'cuda'` (default: `'cuda'`). + + Returns: + Tensor of the shape `[batch_size, num_channels, out_height, out_width]`. + """ + assert isinstance(x, torch.Tensor) + assert impl in ['ref', 'cuda'] + if impl == 'cuda' and x.device.type == 'cuda' and _init(): + return _upfirdn2d_cuda(up=up, down=down, padding=padding, flip_filter=flip_filter, gain=gain).apply(x, f) + return _upfirdn2d_ref(x, f, up=up, down=down, padding=padding, flip_filter=flip_filter, gain=gain) + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def _upfirdn2d_ref(x, f, up=1, down=1, padding=0, flip_filter=False, gain=1): + """Slow reference implementation of `upfirdn2d()` using standard PyTorch ops. + """ + # Validate arguments. + assert isinstance(x, torch.Tensor) and x.ndim == 4 + if f is None: + f = torch.ones([1, 1], dtype=torch.float32, device=x.device) + assert isinstance(f, torch.Tensor) and f.ndim in [1, 2] + assert f.dtype == torch.float32 and not f.requires_grad + batch_size, num_channels, in_height, in_width = x.shape + upx, upy = _parse_scaling(up) + downx, downy = _parse_scaling(down) + padx0, padx1, pady0, pady1 = _parse_padding(padding) + + # Check that upsampled buffer is not smaller than the filter. + upW = in_width * upx + padx0 + padx1 + upH = in_height * upy + pady0 + pady1 + assert upW >= f.shape[-1] and upH >= f.shape[0] + + # Upsample by inserting zeros. + x = x.reshape([batch_size, num_channels, in_height, 1, in_width, 1]) + x = torch.nn.functional.pad(x, [0, upx - 1, 0, 0, 0, upy - 1]) + x = x.reshape([batch_size, num_channels, in_height * upy, in_width * upx]) + + # Pad or crop. + x = torch.nn.functional.pad(x, [max(padx0, 0), max(padx1, 0), max(pady0, 0), max(pady1, 0)]) + x = x[:, :, max(-pady0, 0) : x.shape[2] - max(-pady1, 0), max(-padx0, 0) : x.shape[3] - max(-padx1, 0)] + + # Setup filter. + f = f * (gain ** (f.ndim / 2)) + f = f.to(x.dtype) + if not flip_filter: + f = f.flip(list(range(f.ndim))) + + # Convolve with the filter. + f = f[np.newaxis, np.newaxis].repeat([num_channels, 1] + [1] * f.ndim) + if f.ndim == 4: + x = conv2d_gradfix.conv2d(input=x, weight=f, groups=num_channels) + else: + x = conv2d_gradfix.conv2d(input=x, weight=f.unsqueeze(2), groups=num_channels) + x = conv2d_gradfix.conv2d(input=x, weight=f.unsqueeze(3), groups=num_channels) + + # Downsample by throwing away pixels. + x = x[:, :, ::downy, ::downx] + return x + +#---------------------------------------------------------------------------- + +_upfirdn2d_cuda_cache = dict() + +def _upfirdn2d_cuda(up=1, down=1, padding=0, flip_filter=False, gain=1): + """Fast CUDA implementation of `upfirdn2d()` using custom ops. + """ + # Parse arguments. + upx, upy = _parse_scaling(up) + downx, downy = _parse_scaling(down) + padx0, padx1, pady0, pady1 = _parse_padding(padding) + + # Lookup from cache. + key = (upx, upy, downx, downy, padx0, padx1, pady0, pady1, flip_filter, gain) + if key in _upfirdn2d_cuda_cache: + return _upfirdn2d_cuda_cache[key] + + # Forward op. + class Upfirdn2dCuda(torch.autograd.Function): + @staticmethod + def forward(ctx, x, f): # pylint: disable=arguments-differ + assert isinstance(x, torch.Tensor) and x.ndim == 4 + if f is None: + f = torch.ones([1, 1], dtype=torch.float32, device=x.device) + if f.ndim == 1 and f.shape[0] == 1: + f = f.square().unsqueeze(0) # Convert separable-1 into full-1x1. + assert isinstance(f, torch.Tensor) and f.ndim in [1, 2] + y = x + if f.ndim == 2: + y = _plugin.upfirdn2d(y, f, upx, upy, downx, downy, padx0, padx1, pady0, pady1, flip_filter, gain) + else: + y = _plugin.upfirdn2d(y, f.unsqueeze(0), upx, 1, downx, 1, padx0, padx1, 0, 0, flip_filter, 1.0) + y = _plugin.upfirdn2d(y, f.unsqueeze(1), 1, upy, 1, downy, 0, 0, pady0, pady1, flip_filter, gain) + ctx.save_for_backward(f) + ctx.x_shape = x.shape + return y + + @staticmethod + def backward(ctx, dy): # pylint: disable=arguments-differ + f, = ctx.saved_tensors + _, _, ih, iw = ctx.x_shape + _, _, oh, ow = dy.shape + fw, fh = _get_filter_size(f) + p = [ + fw - padx0 - 1, + iw * upx - ow * downx + padx0 - upx + 1, + fh - pady0 - 1, + ih * upy - oh * downy + pady0 - upy + 1, + ] + dx = None + df = None + + if ctx.needs_input_grad[0]: + dx = _upfirdn2d_cuda(up=down, down=up, padding=p, flip_filter=(not flip_filter), gain=gain).apply(dy, f) + + assert not ctx.needs_input_grad[1] + return dx, df + + # Add to cache. + _upfirdn2d_cuda_cache[key] = Upfirdn2dCuda + return Upfirdn2dCuda + +#---------------------------------------------------------------------------- + +def filter2d(x, f, padding=0, flip_filter=False, gain=1, impl='cuda'): + r"""Filter a batch of 2D images using the given 2D FIR filter. + + By default, the result is padded so that its shape matches the input. + User-specified padding is applied on top of that, with negative values + indicating cropping. Pixels outside the image are assumed to be zero. + + Args: + x: Float32/float64/float16 input tensor of the shape + `[batch_size, num_channels, in_height, in_width]`. + f: Float32 FIR filter of the shape + `[filter_height, filter_width]` (non-separable), + `[filter_taps]` (separable), or + `None` (identity). + padding: Padding with respect to the output. Can be a single number or a + list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]` + (default: 0). + flip_filter: False = convolution, True = correlation (default: False). + gain: Overall scaling factor for signal magnitude (default: 1). + impl: Implementation to use. Can be `'ref'` or `'cuda'` (default: `'cuda'`). + + Returns: + Tensor of the shape `[batch_size, num_channels, out_height, out_width]`. + """ + padx0, padx1, pady0, pady1 = _parse_padding(padding) + fw, fh = _get_filter_size(f) + p = [ + padx0 + fw // 2, + padx1 + (fw - 1) // 2, + pady0 + fh // 2, + pady1 + (fh - 1) // 2, + ] + return upfirdn2d(x, f, padding=p, flip_filter=flip_filter, gain=gain, impl=impl) + +#---------------------------------------------------------------------------- + +def upsample2d(x, f, up=2, padding=0, flip_filter=False, gain=1, impl='cuda'): + r"""Upsample a batch of 2D images using the given 2D FIR filter. + + By default, the result is padded so that its shape is a multiple of the input. + User-specified padding is applied on top of that, with negative values + indicating cropping. Pixels outside the image are assumed to be zero. + + Args: + x: Float32/float64/float16 input tensor of the shape + `[batch_size, num_channels, in_height, in_width]`. + f: Float32 FIR filter of the shape + `[filter_height, filter_width]` (non-separable), + `[filter_taps]` (separable), or + `None` (identity). + up: Integer upsampling factor. Can be a single int or a list/tuple + `[x, y]` (default: 1). + padding: Padding with respect to the output. Can be a single number or a + list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]` + (default: 0). + flip_filter: False = convolution, True = correlation (default: False). + gain: Overall scaling factor for signal magnitude (default: 1). + impl: Implementation to use. Can be `'ref'` or `'cuda'` (default: `'cuda'`). + + Returns: + Tensor of the shape `[batch_size, num_channels, out_height, out_width]`. + """ + upx, upy = _parse_scaling(up) + padx0, padx1, pady0, pady1 = _parse_padding(padding) + fw, fh = _get_filter_size(f) + p = [ + padx0 + (fw + upx - 1) // 2, + padx1 + (fw - upx) // 2, + pady0 + (fh + upy - 1) // 2, + pady1 + (fh - upy) // 2, + ] + return upfirdn2d(x, f, up=up, padding=p, flip_filter=flip_filter, gain=gain*upx*upy, impl=impl) + +#---------------------------------------------------------------------------- + +def downsample2d(x, f, down=2, padding=0, flip_filter=False, gain=1, impl='cuda'): + r"""Downsample a batch of 2D images using the given 2D FIR filter. + + By default, the result is padded so that its shape is a fraction of the input. + User-specified padding is applied on top of that, with negative values + indicating cropping. Pixels outside the image are assumed to be zero. + + Args: + x: Float32/float64/float16 input tensor of the shape + `[batch_size, num_channels, in_height, in_width]`. + f: Float32 FIR filter of the shape + `[filter_height, filter_width]` (non-separable), + `[filter_taps]` (separable), or + `None` (identity). + down: Integer downsampling factor. Can be a single int or a list/tuple + `[x, y]` (default: 1). + padding: Padding with respect to the input. Can be a single number or a + list/tuple `[x, y]` or `[x_before, x_after, y_before, y_after]` + (default: 0). + flip_filter: False = convolution, True = correlation (default: False). + gain: Overall scaling factor for signal magnitude (default: 1). + impl: Implementation to use. Can be `'ref'` or `'cuda'` (default: `'cuda'`). + + Returns: + Tensor of the shape `[batch_size, num_channels, out_height, out_width]`. + """ + downx, downy = _parse_scaling(down) + padx0, padx1, pady0, pady1 = _parse_padding(padding) + fw, fh = _get_filter_size(f) + p = [ + padx0 + (fw - downx + 1) // 2, + padx1 + (fw - downx) // 2, + pady0 + (fh - downy + 1) // 2, + pady1 + (fh - downy) // 2, + ] + return upfirdn2d(x, f, down=down, padding=p, flip_filter=flip_filter, gain=gain, impl=impl) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/persistence.py b/ThirdParty/eg3d/torch_utils/persistence.py new file mode 100644 index 0000000000000000000000000000000000000000..610b220e47469ade9cb575e1a1f9f87649bee7a5 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/persistence.py @@ -0,0 +1,253 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Facilities for pickling Python code alongside other data. + +The pickled code is automatically imported into a separate Python module +during unpickling. This way, any previously exported pickles will remain +usable even if the original code is no longer available, or if the current +version of the code is not consistent with what was originally pickled.""" + +import sys +import pickle +import io +import inspect +import copy +import uuid +import types +import ThirdParty.eg3d.dnnlib + +#---------------------------------------------------------------------------- + +_version = 6 # internal version number +_decorators = set() # {decorator_class, ...} +_import_hooks = [] # [hook_function, ...] +_module_to_src_dict = dict() # {module: src, ...} +_src_to_module_dict = dict() # {src: module, ...} + +#---------------------------------------------------------------------------- + +def persistent_class(orig_class): + r"""Class decorator that extends a given class to save its source code + when pickled. + + Example: + + from torch_utils import persistence + + @persistence.persistent_class + class MyNetwork(torch.nn.Module): + def __init__(self, num_inputs, num_outputs): + super().__init__() + self.fc = MyLayer(num_inputs, num_outputs) + ... + + @persistence.persistent_class + class MyLayer(torch.nn.Module): + ... + + When pickled, any instance of `MyNetwork` and `MyLayer` will save its + source code alongside other internal state (e.g., parameters, buffers, + and submodules). This way, any previously exported pickle will remain + usable even if the class definitions have been modified or are no + longer available. + + The decorator saves the source code of the entire Python module + containing the decorated class. It does *not* save the source code of + any imported modules. Thus, the imported modules must be available + during unpickling, also including `torch_utils.persistence` itself. + + It is ok to call functions defined in the same module from the + decorated class. However, if the decorated class depends on other + classes defined in the same module, they must be decorated as well. + This is illustrated in the above example in the case of `MyLayer`. + + It is also possible to employ the decorator just-in-time before + calling the constructor. For example: + + cls = MyLayer + if want_to_make_it_persistent: + cls = persistence.persistent_class(cls) + layer = cls(num_inputs, num_outputs) + + As an additional feature, the decorator also keeps track of the + arguments that were used to construct each instance of the decorated + class. The arguments can be queried via `obj.init_args` and + `obj.init_kwargs`, and they are automatically pickled alongside other + object state. A typical use case is to first unpickle a previous + instance of a persistent class, and then upgrade it to use the latest + version of the source code: + + with open('old_pickle.pkl', 'rb') as f: + old_net = pickle.load(f) + new_net = MyNetwork(*old_obj.init_args, **old_obj.init_kwargs) + misc.copy_params_and_buffers(old_net, new_net, require_all=True) + """ + assert isinstance(orig_class, type) + if is_persistent(orig_class): + return orig_class + + assert orig_class.__module__ in sys.modules + orig_module = sys.modules[orig_class.__module__] + orig_module_src = _module_to_src(orig_module) + + class Decorator(orig_class): + _orig_module_src = orig_module_src + _orig_class_name = orig_class.__name__ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._init_args = copy.deepcopy(args) + self._init_kwargs = copy.deepcopy(kwargs) + assert orig_class.__name__ in orig_module.__dict__ + _check_pickleable(self.__reduce__()) + + @property + def init_args(self): + return copy.deepcopy(self._init_args) + + @property + def init_kwargs(self): + return dnnlib.EasyDict(copy.deepcopy(self._init_kwargs)) + + def __reduce__(self): + fields = list(super().__reduce__()) + fields += [None] * max(3 - len(fields), 0) + if fields[0] is not _reconstruct_persistent_obj: + meta = dict(type='class', version=_version, module_src=self._orig_module_src, class_name=self._orig_class_name, state=fields[2]) + fields[0] = _reconstruct_persistent_obj # reconstruct func + fields[1] = (meta,) # reconstruct args + fields[2] = None # state dict + return tuple(fields) + + Decorator.__name__ = orig_class.__name__ + _decorators.add(Decorator) + return Decorator + +#---------------------------------------------------------------------------- + +def is_persistent(obj): + r"""Test whether the given object or class is persistent, i.e., + whether it will save its source code when pickled. + """ + try: + if obj in _decorators: + return True + except TypeError: + pass + return type(obj) in _decorators # pylint: disable=unidiomatic-typecheck + +#---------------------------------------------------------------------------- + +def import_hook(hook): + r"""Register an import hook that is called whenever a persistent object + is being unpickled. A typical use case is to patch the pickled source + code to avoid errors and inconsistencies when the API of some imported + module has changed. + + The hook should have the following signature: + + hook(meta) -> modified meta + + `meta` is an instance of `dnnlib.EasyDict` with the following fields: + + type: Type of the persistent object, e.g. `'class'`. + version: Internal version number of `torch_utils.persistence`. + module_src Original source code of the Python module. + class_name: Class name in the original Python module. + state: Internal state of the object. + + Example: + + @persistence.import_hook + def wreck_my_network(meta): + if meta.class_name == 'MyNetwork': + print('MyNetwork is being imported. I will wreck it!') + meta.module_src = meta.module_src.replace("True", "False") + return meta + """ + assert callable(hook) + _import_hooks.append(hook) + +#---------------------------------------------------------------------------- + +def _reconstruct_persistent_obj(meta): + r"""Hook that is called internally by the `pickle` module to unpickle + a persistent object. + """ + meta = dnnlib.EasyDict(meta) + meta.state = dnnlib.EasyDict(meta.state) + for hook in _import_hooks: + meta = hook(meta) + assert meta is not None + + assert meta.version == _version + module = _src_to_module(meta.module_src) + + assert meta.type == 'class' + orig_class = module.__dict__[meta.class_name] + decorator_class = persistent_class(orig_class) + obj = decorator_class.__new__(decorator_class) + + setstate = getattr(obj, '__setstate__', None) + if callable(setstate): + setstate(meta.state) # pylint: disable=not-callable + else: + obj.__dict__.update(meta.state) + return obj + +#---------------------------------------------------------------------------- + +def _module_to_src(module): + r"""Query the source code of a given Python module. + """ + src = _module_to_src_dict.get(module, None) + if src is None: + src = inspect.getsource(module) + _module_to_src_dict[module] = src + _src_to_module_dict[src] = module + return src + +def _src_to_module(src): + r"""Get or create a Python module for the given source code. + """ + module = _src_to_module_dict.get(src, None) + if module is None: + module_name = "_imported_module_" + uuid.uuid4().hex + module = types.ModuleType(module_name) + sys.modules[module_name] = module + _module_to_src_dict[module] = src + _src_to_module_dict[src] = module + exec(src, module.__dict__) # pylint: disable=exec-used + return module + +#---------------------------------------------------------------------------- + +def _check_pickleable(obj): + r"""Check that the given object is pickleable, raising an exception if + it is not. This function is expected to be considerably more efficient + than actually pickling the object. + """ + def recurse(obj): + if isinstance(obj, (list, tuple, set)): + return [recurse(x) for x in obj] + if isinstance(obj, dict): + return [[recurse(x), recurse(y)] for x, y in obj.items()] + if isinstance(obj, (str, int, float, bool, bytes, bytearray)): + return None # Python primitive types are pickleable. + if f'{type(obj).__module__}.{type(obj).__name__}' in ['numpy.ndarray', 'torch.Tensor', 'torch.nn.parameter.Parameter']: + return None # NumPy arrays and PyTorch tensors are pickleable. + if is_persistent(obj): + return None # Persistent objects are pickleable, by virtue of the constructor check. + return obj + with io.BytesIO() as f: + pickle.dump(recurse(obj), f) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/torch_utils/training_stats.py b/ThirdParty/eg3d/torch_utils/training_stats.py new file mode 100644 index 0000000000000000000000000000000000000000..636dd7f9919632c84795265b7c472f1138c901b2 --- /dev/null +++ b/ThirdParty/eg3d/torch_utils/training_stats.py @@ -0,0 +1,270 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Facilities for reporting and collecting training statistics across +multiple processes and devices. The interface is designed to minimize +synchronization overhead as well as the amount of boilerplate in user +code.""" + +import re +import numpy as np +import torch +import dnnlib + +from . import misc + +#---------------------------------------------------------------------------- + +_num_moments = 3 # [num_scalars, sum_of_scalars, sum_of_squares] +_reduce_dtype = torch.float32 # Data type to use for initial per-tensor reduction. +_counter_dtype = torch.float64 # Data type to use for the internal counters. +_rank = 0 # Rank of the current process. +_sync_device = None # Device to use for multiprocess communication. None = single-process. +_sync_called = False # Has _sync() been called yet? +_counters = dict() # Running counters on each device, updated by report(): name => device => torch.Tensor +_cumulative = dict() # Cumulative counters on the CPU, updated by _sync(): name => torch.Tensor + +#---------------------------------------------------------------------------- + +def init_multiprocessing(rank, sync_device): + r"""Initializes `torch_utils.training_stats` for collecting statistics + across multiple processes. + + This function must be called after + `torch.distributed.init_process_group()` and before `Collector.update()`. + The call is not necessary if multi-process collection is not needed. + + Args: + rank: Rank of the current process. + sync_device: PyTorch device to use for inter-process + communication, or None to disable multi-process + collection. Typically `torch.device('cuda', rank)`. + """ + global _rank, _sync_device + assert not _sync_called + _rank = rank + _sync_device = sync_device + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def report(name, value): + r"""Broadcasts the given set of scalars to all interested instances of + `Collector`, across device and process boundaries. + + This function is expected to be extremely cheap and can be safely + called from anywhere in the training loop, loss function, or inside a + `torch.nn.Module`. + + Warning: The current implementation expects the set of unique names to + be consistent across processes. Please make sure that `report()` is + called at least once for each unique name by each process, and in the + same order. If a given process has no scalars to broadcast, it can do + `report(name, [])` (empty list). + + Args: + name: Arbitrary string specifying the name of the statistic. + Averages are accumulated separately for each unique name. + value: Arbitrary set of scalars. Can be a list, tuple, + NumPy array, PyTorch tensor, or Python scalar. + + Returns: + The same `value` that was passed in. + """ + if name not in _counters: + _counters[name] = dict() + + elems = torch.as_tensor(value) + if elems.numel() == 0: + return value + + elems = elems.detach().flatten().to(_reduce_dtype) + moments = torch.stack([ + torch.ones_like(elems).sum(), + elems.sum(), + elems.square().sum(), + ]) + assert moments.ndim == 1 and moments.shape[0] == _num_moments + moments = moments.to(_counter_dtype) + + device = moments.device + if device not in _counters[name]: + _counters[name][device] = torch.zeros_like(moments) + _counters[name][device].add_(moments) + return value + +#---------------------------------------------------------------------------- + +def report0(name, value): + r"""Broadcasts the given set of scalars by the first process (`rank = 0`), + but ignores any scalars provided by the other processes. + See `report()` for further details. + """ + report(name, value if _rank == 0 else []) + return value + +#---------------------------------------------------------------------------- + +class Collector: + r"""Collects the scalars broadcasted by `report()` and `report0()` and + computes their long-term averages (mean and standard deviation) over + user-defined periods of time. + + The averages are first collected into internal counters that are not + directly visible to the user. They are then copied to the user-visible + state as a result of calling `update()` and can then be queried using + `mean()`, `std()`, `as_dict()`, etc. Calling `update()` also resets the + internal counters for the next round, so that the user-visible state + effectively reflects averages collected between the last two calls to + `update()`. + + Args: + regex: Regular expression defining which statistics to + collect. The default is to collect everything. + keep_previous: Whether to retain the previous averages if no + scalars were collected on a given round + (default: True). + """ + def __init__(self, regex='.*', keep_previous=True): + self._regex = re.compile(regex) + self._keep_previous = keep_previous + self._cumulative = dict() + self._moments = dict() + self.update() + self._moments.clear() + + def names(self): + r"""Returns the names of all statistics broadcasted so far that + match the regular expression specified at construction time. + """ + return [name for name in _counters if self._regex.fullmatch(name)] + + def update(self): + r"""Copies current values of the internal counters to the + user-visible state and resets them for the next round. + + If `keep_previous=True` was specified at construction time, the + operation is skipped for statistics that have received no scalars + since the last update, retaining their previous averages. + + This method performs a number of GPU-to-CPU transfers and one + `torch.distributed.all_reduce()`. It is intended to be called + periodically in the main training loop, typically once every + N training steps. + """ + if not self._keep_previous: + self._moments.clear() + for name, cumulative in _sync(self.names()): + if name not in self._cumulative: + self._cumulative[name] = torch.zeros([_num_moments], dtype=_counter_dtype) + delta = cumulative - self._cumulative[name] + self._cumulative[name].copy_(cumulative) + if float(delta[0]) != 0: + self._moments[name] = delta + + def _get_delta(self, name): + r"""Returns the raw moments that were accumulated for the given + statistic between the last two calls to `update()`, or zero if + no scalars were collected. + """ + assert self._regex.fullmatch(name) + if name not in self._moments: + self._moments[name] = torch.zeros([_num_moments], dtype=_counter_dtype) + return self._moments[name] + + def num(self, name): + r"""Returns the number of scalars that were accumulated for the given + statistic between the last two calls to `update()`, or zero if + no scalars were collected. + """ + delta = self._get_delta(name) + return int(delta[0]) + + def mean(self, name): + r"""Returns the mean of the scalars that were accumulated for the + given statistic between the last two calls to `update()`, or NaN if + no scalars were collected. + """ + delta = self._get_delta(name) + if int(delta[0]) == 0: + return float('nan') + return float(delta[1] / delta[0]) + + def std(self, name): + r"""Returns the standard deviation of the scalars that were + accumulated for the given statistic between the last two calls to + `update()`, or NaN if no scalars were collected. + """ + delta = self._get_delta(name) + if int(delta[0]) == 0 or not np.isfinite(float(delta[1])): + return float('nan') + if int(delta[0]) == 1: + return float(0) + mean = float(delta[1] / delta[0]) + raw_var = float(delta[2] / delta[0]) + return np.sqrt(max(raw_var - np.square(mean), 0)) + + def as_dict(self): + r"""Returns the averages accumulated between the last two calls to + `update()` as an `dnnlib.EasyDict`. The contents are as follows: + + dnnlib.EasyDict( + NAME = dnnlib.EasyDict(num=FLOAT, mean=FLOAT, std=FLOAT), + ... + ) + """ + stats = dnnlib.EasyDict() + for name in self.names(): + stats[name] = dnnlib.EasyDict(num=self.num(name), mean=self.mean(name), std=self.std(name)) + return stats + + def __getitem__(self, name): + r"""Convenience getter. + `collector[name]` is a synonym for `collector.mean(name)`. + """ + return self.mean(name) + +#---------------------------------------------------------------------------- + +def _sync(names): + r"""Synchronize the global cumulative counters across devices and + processes. Called internally by `Collector.update()`. + """ + if len(names) == 0: + return [] + global _sync_called + _sync_called = True + + # Collect deltas within current rank. + deltas = [] + device = _sync_device if _sync_device is not None else torch.device('cpu') + for name in names: + delta = torch.zeros([_num_moments], dtype=_counter_dtype, device=device) + for counter in _counters[name].values(): + delta.add_(counter.to(device)) + counter.copy_(torch.zeros_like(counter)) + deltas.append(delta) + deltas = torch.stack(deltas) + + # Sum deltas across ranks. + if _sync_device is not None: + torch.distributed.all_reduce(deltas) + + # Update cumulative values. + deltas = deltas.cpu() + for idx, name in enumerate(names): + if name not in _cumulative: + _cumulative[name] = torch.zeros([_num_moments], dtype=_counter_dtype) + _cumulative[name].add_(deltas[idx]) + + # Return name-value pairs. + return [(name, _cumulative[name]) for name in names] + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/train.py b/ThirdParty/eg3d/train.py new file mode 100644 index 0000000000000000000000000000000000000000..7201e0aef2b982d1af94a0783288aea25dacc5eb --- /dev/null +++ b/ThirdParty/eg3d/train.py @@ -0,0 +1,398 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Train a GAN using the techniques described in the paper +"Efficient Geometry-aware 3D Generative Adversarial Networks." + +Code adapted from +"Alias-Free Generative Adversarial Networks".""" + +import os +import click +import re +import json +import tempfile +import torch + +import dnnlib +from training import training_loop +from metrics import metric_main +from torch_utils import training_stats +from torch_utils import custom_ops + +#---------------------------------------------------------------------------- + +def subprocess_fn(rank, c, temp_dir): + dnnlib.util.Logger(file_name=os.path.join(c.run_dir, 'log.txt'), file_mode='a', should_flush=True) + + # Init torch.distributed. + if c.num_gpus > 1: + init_file = os.path.abspath(os.path.join(temp_dir, '.torch_distributed_init')) + if os.name == 'nt': + init_method = 'file:///' + init_file.replace('\\', '/') + torch.distributed.init_process_group(backend='gloo', init_method=init_method, rank=rank, world_size=c.num_gpus) + else: + init_method = f'file://{init_file}' + torch.distributed.init_process_group(backend='nccl', init_method=init_method, rank=rank, world_size=c.num_gpus) + + # Init torch_utils. + sync_device = torch.device('cuda', rank) if c.num_gpus > 1 else None + training_stats.init_multiprocessing(rank=rank, sync_device=sync_device) + if rank != 0: + custom_ops.verbosity = 'none' + + # Execute training loop. + training_loop.training_loop(rank=rank, **c) + +#---------------------------------------------------------------------------- + +def launch_training(c, desc, outdir, dry_run): + dnnlib.util.Logger(should_flush=True) + + # Pick output directory. + prev_run_dirs = [] + if os.path.isdir(outdir): + prev_run_dirs = [x for x in os.listdir(outdir) if os.path.isdir(os.path.join(outdir, x))] + prev_run_ids = [re.match(r'^\d+', x) for x in prev_run_dirs] + prev_run_ids = [int(x.group()) for x in prev_run_ids if x is not None] + cur_run_id = max(prev_run_ids, default=-1) + 1 + c.run_dir = os.path.join(outdir, f'{cur_run_id:05d}-{desc}') + assert not os.path.exists(c.run_dir) + + # Print options. + print() + print('Training options:') + print(json.dumps(c, indent=2)) + print() + print(f'Output directory: {c.run_dir}') + print(f'Number of GPUs: {c.num_gpus}') + print(f'Batch size: {c.batch_size} images') + print(f'Training duration: {c.total_kimg} kimg') + print(f'Dataset path: {c.training_set_kwargs.path}') + print(f'Dataset size: {c.training_set_kwargs.max_size} images') + print(f'Dataset resolution: {c.training_set_kwargs.resolution}') + print(f'Dataset labels: {c.training_set_kwargs.use_labels}') + print(f'Dataset x-flips: {c.training_set_kwargs.xflip}') + print() + + # Dry run? + if dry_run: + print('Dry run; exiting.') + return + + # Create output directory. + print('Creating output directory...') + os.makedirs(c.run_dir) + with open(os.path.join(c.run_dir, 'training_options.json'), 'wt') as f: + json.dump(c, f, indent=2) + + # Launch processes. + print('Launching processes...') + torch.multiprocessing.set_start_method('spawn') + with tempfile.TemporaryDirectory() as temp_dir: + if c.num_gpus == 1: + subprocess_fn(rank=0, c=c, temp_dir=temp_dir) + else: + torch.multiprocessing.spawn(fn=subprocess_fn, args=(c, temp_dir), nprocs=c.num_gpus) + +#---------------------------------------------------------------------------- + +def init_dataset_kwargs(data): + try: + dataset_kwargs = dnnlib.EasyDict(class_name='training.dataset.ImageFolderDataset', path=data, use_labels=True, max_size=None, xflip=False) + dataset_obj = dnnlib.util.construct_class_by_name(**dataset_kwargs) # Subclass of training.dataset.Dataset. + dataset_kwargs.resolution = dataset_obj.resolution # Be explicit about resolution. + dataset_kwargs.use_labels = dataset_obj.has_labels # Be explicit about labels. + dataset_kwargs.max_size = len(dataset_obj) # Be explicit about dataset size. + return dataset_kwargs, dataset_obj.name + except IOError as err: + raise click.ClickException(f'--data: {err}') + +#---------------------------------------------------------------------------- + +def parse_comma_separated_list(s): + if isinstance(s, list): + return s + if s is None or s.lower() == 'none' or s == '': + return [] + return s.split(',') + +#---------------------------------------------------------------------------- + +@click.command() + +# Required. +@click.option('--outdir', help='Where to save the results', metavar='DIR', required=True) +@click.option('--cfg', help='Base configuration', type=str, required=True) +@click.option('--data', help='Training data', metavar='[ZIP|DIR]', type=str, required=True) +@click.option('--gpus', help='Number of GPUs to use', metavar='INT', type=click.IntRange(min=1), required=True) +@click.option('--batch', help='Total batch size', metavar='INT', type=click.IntRange(min=1), required=True) +@click.option('--gamma', help='R1 regularization weight', metavar='FLOAT', type=click.FloatRange(min=0), required=True) + +# Optional features. +@click.option('--cond', help='Train conditional model', metavar='BOOL', type=bool, default=True, show_default=True) +@click.option('--mirror', help='Enable dataset x-flips', metavar='BOOL', type=bool, default=False, show_default=True) +@click.option('--aug', help='Augmentation mode', type=click.Choice(['noaug', 'ada', 'fixed']), default='noaug', show_default=True) +@click.option('--resume', help='Resume from given network pickle', metavar='[PATH|URL]', type=str) +@click.option('--freezed', help='Freeze first layers of D', metavar='INT', type=click.IntRange(min=0), default=0, show_default=True) + +# Misc hyperparameters. +@click.option('--p', help='Probability for --aug=fixed', metavar='FLOAT', type=click.FloatRange(min=0, max=1), default=0.2, show_default=True) +@click.option('--target', help='Target value for --aug=ada', metavar='FLOAT', type=click.FloatRange(min=0, max=1), default=0.6, show_default=True) +@click.option('--batch-gpu', help='Limit batch size per GPU', metavar='INT', type=click.IntRange(min=1)) +@click.option('--cbase', help='Capacity multiplier', metavar='INT', type=click.IntRange(min=1), default=32768, show_default=True) +@click.option('--cmax', help='Max. feature maps', metavar='INT', type=click.IntRange(min=1), default=512, show_default=True) +@click.option('--glr', help='G learning rate [default: varies]', metavar='FLOAT', type=click.FloatRange(min=0)) +@click.option('--dlr', help='D learning rate', metavar='FLOAT', type=click.FloatRange(min=0), default=0.002, show_default=True) +@click.option('--map-depth', help='Mapping network depth [default: varies]', metavar='INT', type=click.IntRange(min=1), default=2, show_default=True) +@click.option('--mbstd-group', help='Minibatch std group size', metavar='INT', type=click.IntRange(min=1), default=4, show_default=True) + +# Misc settings. +@click.option('--desc', help='String to include in result dir name', metavar='STR', type=str) +@click.option('--metrics', help='Quality metrics', metavar='[NAME|A,B,C|none]', type=parse_comma_separated_list, default='fid50k_full', show_default=True) +@click.option('--kimg', help='Total training duration', metavar='KIMG', type=click.IntRange(min=1), default=25000, show_default=True) +@click.option('--tick', help='How often to print progress', metavar='KIMG', type=click.IntRange(min=1), default=4, show_default=True) +@click.option('--snap', help='How often to save snapshots', metavar='TICKS', type=click.IntRange(min=1), default=50, show_default=True) +@click.option('--seed', help='Random seed', metavar='INT', type=click.IntRange(min=0), default=0, show_default=True) +# @click.option('--fp32', help='Disable mixed-precision', metavar='BOOL', type=bool, default=False, show_default=True) +@click.option('--nobench', help='Disable cuDNN benchmarking', metavar='BOOL', type=bool, default=False, show_default=True) +@click.option('--workers', help='DataLoader worker processes', metavar='INT', type=click.IntRange(min=1), default=3, show_default=True) +@click.option('-n','--dry-run', help='Print training options and exit', is_flag=True) + +# @click.option('--sr_module', help='Superresolution module', metavar='STR', type=str, required=True) +@click.option('--neural_rendering_resolution_initial', help='Resolution to render at', metavar='INT', type=click.IntRange(min=1), default=64, required=False) +@click.option('--neural_rendering_resolution_final', help='Final resolution to render at, if blending', metavar='INT', type=click.IntRange(min=1), required=False, default=None) +@click.option('--neural_rendering_resolution_fade_kimg', help='Kimg to blend resolution over', metavar='INT', type=click.IntRange(min=0), required=False, default=1000, show_default=True) + +@click.option('--blur_fade_kimg', help='Blur over how many', metavar='INT', type=click.IntRange(min=1), required=False, default=200) +@click.option('--gen_pose_cond', help='If true, enable generator pose conditioning.', metavar='BOOL', type=bool, required=False, default=False) +@click.option('--c-scale', help='Scale factor for generator pose conditioning.', metavar='FLOAT', type=click.FloatRange(min=0), required=False, default=1) +@click.option('--c-noise', help='Add noise for generator pose conditioning.', metavar='FLOAT', type=click.FloatRange(min=0), required=False, default=0) +@click.option('--gpc_reg_prob', help='Strength of swapping regularization. None means no generator pose conditioning, i.e. condition with zeros.', metavar='FLOAT', type=click.FloatRange(min=0), required=False, default=0.5) +@click.option('--gpc_reg_fade_kimg', help='Length of swapping prob fade', metavar='INT', type=click.IntRange(min=0), required=False, default=1000) +@click.option('--disc_c_noise', help='Strength of discriminator pose conditioning regularization, in standard deviations.', metavar='FLOAT', type=click.FloatRange(min=0), required=False, default=0) +@click.option('--sr_noise_mode', help='Type of noise for superresolution', metavar='STR', type=click.Choice(['random', 'none']), required=False, default='none') +@click.option('--resume_blur', help='Enable to blur even on resume', metavar='BOOL', type=bool, required=False, default=False) +@click.option('--sr_num_fp16_res', help='Number of fp16 layers in superresolution', metavar='INT', type=click.IntRange(min=0), default=4, required=False, show_default=True) +@click.option('--g_num_fp16_res', help='Number of fp16 layers in generator', metavar='INT', type=click.IntRange(min=0), default=0, required=False, show_default=True) +@click.option('--d_num_fp16_res', help='Number of fp16 layers in discriminator', metavar='INT', type=click.IntRange(min=0), default=4, required=False, show_default=True) +@click.option('--sr_first_cutoff', help='First cutoff for AF superresolution', metavar='INT', type=click.IntRange(min=2), default=2, required=False, show_default=True) +@click.option('--sr_first_stopband', help='First cutoff for AF superresolution', metavar='FLOAT', type=click.FloatRange(min=2), default=2**2.1, required=False, show_default=True) +@click.option('--style_mixing_prob', help='Style-mixing regularization probability for training.', metavar='FLOAT', type=click.FloatRange(min=0, max=1), default=0, required=False, show_default=True) +@click.option('--sr-module', help='Superresolution module override', metavar='STR', type=str, required=False, default=None) +@click.option('--density_reg', help='Density regularization strength.', metavar='FLOAT', type=click.FloatRange(min=0), default=0.25, required=False, show_default=True) +@click.option('--density_reg_every', help='lazy density reg', metavar='int', type=click.FloatRange(min=1), default=4, required=False, show_default=True) +@click.option('--density_reg_p_dist', help='density regularization strength.', metavar='FLOAT', type=click.FloatRange(min=0), default=0.004, required=False, show_default=True) +@click.option('--reg_type', help='Type of regularization', metavar='STR', type=click.Choice(['l1', 'l1-alt', 'monotonic-detach', 'monotonic-fixed', 'total-variation']), required=False, default='l1') +@click.option('--decoder_lr_mul', help='decoder learning rate multiplier.', metavar='FLOAT', type=click.FloatRange(min=0), default=1, required=False, show_default=True) + +def main(**kwargs): + """Train a GAN using the techniques described in the paper + "Alias-Free Generative Adversarial Networks". + + Examples: + + \b + # Train StyleGAN3-T for AFHQv2 using 8 GPUs. + python train.py --outdir=~/training-runs --cfg=stylegan3-t --data=~/datasets/afhqv2-512x512.zip \\ + --gpus=8 --batch=32 --gamma=8.2 --mirror=1 + + \b + # Fine-tune StyleGAN3-R for MetFaces-U using 1 GPU, starting from the pre-trained FFHQ-U pickle. + python train.py --outdir=~/training-runs --cfg=stylegan3-r --data=~/datasets/metfacesu-1024x1024.zip \\ + --gpus=8 --batch=32 --gamma=6.6 --mirror=1 --kimg=5000 --snap=5 \\ + --resume=https://api.ngc.nvidia.com/v2/models/nvidia/research/stylegan3/versions/1/files/stylegan3-r-ffhqu-1024x1024.pkl + + \b + # Train StyleGAN2 for FFHQ at 1024x1024 resolution using 8 GPUs. + python train.py --outdir=~/training-runs --cfg=stylegan2 --data=~/datasets/ffhq-1024x1024.zip \\ + --gpus=8 --batch=32 --gamma=10 --mirror=1 --aug=noaug + """ + + # Initialize config. + opts = dnnlib.EasyDict(kwargs) # Command line arguments. + c = dnnlib.EasyDict() # Main config dict. + c.G_kwargs = dnnlib.EasyDict(class_name=None, z_dim=512, w_dim=512, mapping_kwargs=dnnlib.EasyDict()) + c.D_kwargs = dnnlib.EasyDict(class_name='training.networks_stylegan2.Discriminator', block_kwargs=dnnlib.EasyDict(), mapping_kwargs=dnnlib.EasyDict(), epilogue_kwargs=dnnlib.EasyDict()) + c.G_opt_kwargs = dnnlib.EasyDict(class_name='torch.optim.Adam', betas=[0,0.99], eps=1e-8) + c.D_opt_kwargs = dnnlib.EasyDict(class_name='torch.optim.Adam', betas=[0,0.99], eps=1e-8) + c.loss_kwargs = dnnlib.EasyDict(class_name='training.loss.StyleGAN2Loss') + c.data_loader_kwargs = dnnlib.EasyDict(pin_memory=True, prefetch_factor=2) + + # Training set. + c.training_set_kwargs, dataset_name = init_dataset_kwargs(data=opts.data) + if opts.cond and not c.training_set_kwargs.use_labels: + raise click.ClickException('--cond=True requires labels specified in dataset.json') + c.training_set_kwargs.use_labels = opts.cond + c.training_set_kwargs.xflip = opts.mirror + + # Hyperparameters & settings. + c.num_gpus = opts.gpus + c.batch_size = opts.batch + c.batch_gpu = opts.batch_gpu or opts.batch // opts.gpus + c.G_kwargs.channel_base = c.D_kwargs.channel_base = opts.cbase + c.G_kwargs.channel_max = c.D_kwargs.channel_max = opts.cmax + c.G_kwargs.mapping_kwargs.num_layers = opts.map_depth + c.D_kwargs.block_kwargs.freeze_layers = opts.freezed + c.D_kwargs.epilogue_kwargs.mbstd_group_size = opts.mbstd_group + c.loss_kwargs.r1_gamma = opts.gamma + c.G_opt_kwargs.lr = (0.002 if opts.cfg == 'stylegan2' else 0.0025) if opts.glr is None else opts.glr + c.D_opt_kwargs.lr = opts.dlr + c.metrics = opts.metrics + c.total_kimg = opts.kimg + c.kimg_per_tick = opts.tick + c.image_snapshot_ticks = c.network_snapshot_ticks = opts.snap + c.random_seed = c.training_set_kwargs.random_seed = opts.seed + c.data_loader_kwargs.num_workers = opts.workers + + # Sanity checks. + if c.batch_size % c.num_gpus != 0: + raise click.ClickException('--batch must be a multiple of --gpus') + if c.batch_size % (c.num_gpus * c.batch_gpu) != 0: + raise click.ClickException('--batch must be a multiple of --gpus times --batch-gpu') + if c.batch_gpu < c.D_kwargs.epilogue_kwargs.mbstd_group_size: + raise click.ClickException('--batch-gpu cannot be smaller than --mbstd') + if any(not metric_main.is_valid_metric(metric) for metric in c.metrics): + raise click.ClickException('\n'.join(['--metrics can only contain the following values:'] + metric_main.list_valid_metrics())) + + # Base configuration. + c.ema_kimg = c.batch_size * 10 / 32 + c.G_kwargs.class_name = 'training.triplane.TriPlaneGenerator' + c.D_kwargs.class_name = 'training.dual_discriminator.DualDiscriminator' + c.G_kwargs.fused_modconv_default = 'inference_only' # Speed up training by using regular convolutions instead of grouped convolutions. + c.loss_kwargs.filter_mode = 'antialiased' # Filter mode for raw images ['antialiased', 'none', float [0-1]] + c.D_kwargs.disc_c_noise = opts.disc_c_noise # Regularization for discriminator pose conditioning + + if c.training_set_kwargs.resolution == 512: + sr_module = 'training.superresolution.SuperresolutionHybrid8XDC' + elif c.training_set_kwargs.resolution == 256: + sr_module = 'training.superresolution.SuperresolutionHybrid4X' + elif c.training_set_kwargs.resolution == 128: + sr_module = 'training.superresolution.SuperresolutionHybrid2X' + else: + assert False, f"Unsupported resolution {c.training_set_kwargs.resolution}; make a new superresolution module" + + if opts.sr_module != None: + sr_module = opts.sr_module + + rendering_options = { + 'image_resolution': c.training_set_kwargs.resolution, + 'disparity_space_sampling': False, + 'clamp_mode': 'softplus', + 'superresolution_module': sr_module, + 'c_gen_conditioning_zero': not opts.gen_pose_cond, # if true, fill generator pose conditioning label with dummy zero vector + 'gpc_reg_prob': opts.gpc_reg_prob if opts.gen_pose_cond else None, + 'c_scale': opts.c_scale, # mutliplier for generator pose conditioning label + 'superresolution_noise_mode': opts.sr_noise_mode, # [random or none], whether to inject pixel noise into super-resolution layers + 'density_reg': opts.density_reg, # strength of density regularization + 'density_reg_p_dist': opts.density_reg_p_dist, # distance at which to sample perturbed points for density regularization + 'reg_type': opts.reg_type, # for experimenting with variations on density regularization + 'decoder_lr_mul': opts.decoder_lr_mul, # learning rate multiplier for decoder + 'sr_antialias': True, + } + + if opts.cfg == 'ffhq': + rendering_options.update({ + 'depth_resolution': 48, # number of uniform samples to take per ray. + 'depth_resolution_importance': 48, # number of importance samples to take per ray. + 'ray_start': 2.25, # near point along each ray to start taking samples. + 'ray_end': 3.3, # far point along each ray to stop taking samples. + 'box_warp': 1, # the side-length of the bounding box spanned by the tri-planes; box_warp=1 means [-0.5, -0.5, -0.5] -> [0.5, 0.5, 0.5]. + 'avg_camera_radius': 2.7, # used only in the visualizer to specify camera orbit radius. + 'avg_camera_pivot': [0, 0, 0.2], # used only in the visualizer to control center of camera rotation. + }) + elif opts.cfg == 'afhq': + rendering_options.update({ + 'depth_resolution': 48, + 'depth_resolution_importance': 48, + 'ray_start': 2.25, + 'ray_end': 3.3, + 'box_warp': 1, + 'avg_camera_radius': 2.7, + 'avg_camera_pivot': [0, 0, -0.06], + }) + elif opts.cfg == 'shapenet': + rendering_options.update({ + 'depth_resolution': 64, + 'depth_resolution_importance': 64, + 'ray_start': 0.1, + 'ray_end': 2.6, + 'box_warp': 1.6, + 'white_back': True, + 'avg_camera_radius': 1.7, + 'avg_camera_pivot': [0, 0, 0], + }) + else: + assert False, "Need to specify config" + + + + if opts.density_reg > 0: + c.G_reg_interval = opts.density_reg_every + c.G_kwargs.rendering_kwargs = rendering_options + c.G_kwargs.num_fp16_res = 0 + c.loss_kwargs.blur_init_sigma = 10 # Blur the images seen by the discriminator. + c.loss_kwargs.blur_fade_kimg = c.batch_size * opts.blur_fade_kimg / 32 # Fade out the blur during the first N kimg. + + c.loss_kwargs.gpc_reg_prob = opts.gpc_reg_prob if opts.gen_pose_cond else None + c.loss_kwargs.gpc_reg_fade_kimg = opts.gpc_reg_fade_kimg + c.loss_kwargs.dual_discrimination = True + c.loss_kwargs.neural_rendering_resolution_initial = opts.neural_rendering_resolution_initial + c.loss_kwargs.neural_rendering_resolution_final = opts.neural_rendering_resolution_final + c.loss_kwargs.neural_rendering_resolution_fade_kimg = opts.neural_rendering_resolution_fade_kimg + c.G_kwargs.sr_num_fp16_res = opts.sr_num_fp16_res + + c.G_kwargs.sr_kwargs = dnnlib.EasyDict(channel_base=opts.cbase, channel_max=opts.cmax, fused_modconv_default='inference_only') + + c.loss_kwargs.style_mixing_prob = opts.style_mixing_prob + + # Augmentation. + if opts.aug != 'noaug': + c.augment_kwargs = dnnlib.EasyDict(class_name='training.augment.AugmentPipe', xflip=1, rotate90=1, xint=1, scale=1, rotate=1, aniso=1, xfrac=1, brightness=1, contrast=1, lumaflip=1, hue=1, saturation=1) + if opts.aug == 'ada': + c.ada_target = opts.target + if opts.aug == 'fixed': + c.augment_p = opts.p + + # Resume. + if opts.resume is not None: + c.resume_pkl = opts.resume + c.ada_kimg = 100 # Make ADA react faster at the beginning. + c.ema_rampup = None # Disable EMA rampup. + if not opts.resume_blur: + c.loss_kwargs.blur_init_sigma = 0 # Disable blur rampup. + c.loss_kwargs.gpc_reg_fade_kimg = 0 # Disable swapping rampup + + # Performance-related toggles. + # if opts.fp32: + # c.G_kwargs.num_fp16_res = c.D_kwargs.num_fp16_res = 0 + # c.G_kwargs.conv_clamp = c.D_kwargs.conv_clamp = None + c.G_kwargs.num_fp16_res = opts.g_num_fp16_res + c.G_kwargs.conv_clamp = 256 if opts.g_num_fp16_res > 0 else None + c.D_kwargs.num_fp16_res = opts.d_num_fp16_res + c.D_kwargs.conv_clamp = 256 if opts.d_num_fp16_res > 0 else None + + if opts.nobench: + c.cudnn_benchmark = False + + # Description string. + desc = f'{opts.cfg:s}-{dataset_name:s}-gpus{c.num_gpus:d}-batch{c.batch_size:d}-gamma{c.loss_kwargs.r1_gamma:g}' + if opts.desc is not None: + desc += f'-{opts.desc}' + + # Launch. + launch_training(c=c, desc=desc, outdir=opts.outdir, dry_run=opts.dry_run) + +#---------------------------------------------------------------------------- + +if __name__ == "__main__": + main() # pylint: disable=no-value-for-parameter + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/training/__init__.py b/ThirdParty/eg3d/training/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240 --- /dev/null +++ b/ThirdParty/eg3d/training/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +# empty diff --git a/ThirdParty/eg3d/training/__pycache__/__init__.cpython-310.pyc b/ThirdParty/eg3d/training/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d30d6c73feefa9638fae6dcd808bdc7a3649ba62 Binary files /dev/null and b/ThirdParty/eg3d/training/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/training/__pycache__/networks_stylegan2.cpython-310.pyc b/ThirdParty/eg3d/training/__pycache__/networks_stylegan2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..277ea0b39c606031257cc8b2dbad9b695c6f9bfa Binary files /dev/null and b/ThirdParty/eg3d/training/__pycache__/networks_stylegan2.cpython-310.pyc differ diff --git a/ThirdParty/eg3d/training/augment.py b/ThirdParty/eg3d/training/augment.py new file mode 100644 index 0000000000000000000000000000000000000000..7b00a4ade50459c16e34fa4c132b2cb947cfff28 --- /dev/null +++ b/ThirdParty/eg3d/training/augment.py @@ -0,0 +1,441 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Augmentation pipeline from the paper +"Training Generative Adversarial Networks with Limited Data". +Matches the original implementation by Karras et al. at +https://github.com/NVlabs/stylegan2-ada/blob/main/training/augment.py""" + +import numpy as np +import scipy.signal +import torch +from torch_utils import persistence +from torch_utils import misc +from torch_utils.ops import upfirdn2d +from torch_utils.ops import grid_sample_gradfix +from torch_utils.ops import conv2d_gradfix + +#---------------------------------------------------------------------------- +# Coefficients of various wavelet decomposition low-pass filters. + +wavelets = { + 'haar': [0.7071067811865476, 0.7071067811865476], + 'db1': [0.7071067811865476, 0.7071067811865476], + 'db2': [-0.12940952255092145, 0.22414386804185735, 0.836516303737469, 0.48296291314469025], + 'db3': [0.035226291882100656, -0.08544127388224149, -0.13501102001039084, 0.4598775021193313, 0.8068915093133388, 0.3326705529509569], + 'db4': [-0.010597401784997278, 0.032883011666982945, 0.030841381835986965, -0.18703481171888114, -0.02798376941698385, 0.6308807679295904, 0.7148465705525415, 0.23037781330885523], + 'db5': [0.003335725285001549, -0.012580751999015526, -0.006241490213011705, 0.07757149384006515, -0.03224486958502952, -0.24229488706619015, 0.13842814590110342, 0.7243085284385744, 0.6038292697974729, 0.160102397974125], + 'db6': [-0.00107730108499558, 0.004777257511010651, 0.0005538422009938016, -0.031582039318031156, 0.02752286553001629, 0.09750160558707936, -0.12976686756709563, -0.22626469396516913, 0.3152503517092432, 0.7511339080215775, 0.4946238903983854, 0.11154074335008017], + 'db7': [0.0003537138000010399, -0.0018016407039998328, 0.00042957797300470274, 0.012550998556013784, -0.01657454163101562, -0.03802993693503463, 0.0806126091510659, 0.07130921926705004, -0.22403618499416572, -0.14390600392910627, 0.4697822874053586, 0.7291320908465551, 0.39653931948230575, 0.07785205408506236], + 'db8': [-0.00011747678400228192, 0.0006754494059985568, -0.0003917403729959771, -0.00487035299301066, 0.008746094047015655, 0.013981027917015516, -0.04408825393106472, -0.01736930100202211, 0.128747426620186, 0.00047248457399797254, -0.2840155429624281, -0.015829105256023893, 0.5853546836548691, 0.6756307362980128, 0.3128715909144659, 0.05441584224308161], + 'sym2': [-0.12940952255092145, 0.22414386804185735, 0.836516303737469, 0.48296291314469025], + 'sym3': [0.035226291882100656, -0.08544127388224149, -0.13501102001039084, 0.4598775021193313, 0.8068915093133388, 0.3326705529509569], + 'sym4': [-0.07576571478927333, -0.02963552764599851, 0.49761866763201545, 0.8037387518059161, 0.29785779560527736, -0.09921954357684722, -0.012603967262037833, 0.0322231006040427], + 'sym5': [0.027333068345077982, 0.029519490925774643, -0.039134249302383094, 0.1993975339773936, 0.7234076904024206, 0.6339789634582119, 0.01660210576452232, -0.17532808990845047, -0.021101834024758855, 0.019538882735286728], + 'sym6': [0.015404109327027373, 0.0034907120842174702, -0.11799011114819057, -0.048311742585633, 0.4910559419267466, 0.787641141030194, 0.3379294217276218, -0.07263752278646252, -0.021060292512300564, 0.04472490177066578, 0.0017677118642428036, -0.007800708325034148], + 'sym7': [0.002681814568257878, -0.0010473848886829163, -0.01263630340325193, 0.03051551316596357, 0.0678926935013727, -0.049552834937127255, 0.017441255086855827, 0.5361019170917628, 0.767764317003164, 0.2886296317515146, -0.14004724044296152, -0.10780823770381774, 0.004010244871533663, 0.010268176708511255], + 'sym8': [-0.0033824159510061256, -0.0005421323317911481, 0.03169508781149298, 0.007607487324917605, -0.1432942383508097, -0.061273359067658524, 0.4813596512583722, 0.7771857517005235, 0.3644418948353314, -0.05194583810770904, -0.027219029917056003, 0.049137179673607506, 0.003808752013890615, -0.01495225833704823, -0.0003029205147213668, 0.0018899503327594609], +} + +#---------------------------------------------------------------------------- +# Helpers for constructing transformation matrices. + +def matrix(*rows, device=None): + assert all(len(row) == len(rows[0]) for row in rows) + elems = [x for row in rows for x in row] + ref = [x for x in elems if isinstance(x, torch.Tensor)] + if len(ref) == 0: + return misc.constant(np.asarray(rows), device=device) + assert device is None or device == ref[0].device + elems = [x if isinstance(x, torch.Tensor) else misc.constant(x, shape=ref[0].shape, device=ref[0].device) for x in elems] + return torch.stack(elems, dim=-1).reshape(ref[0].shape + (len(rows), -1)) + +def translate2d(tx, ty, **kwargs): + return matrix( + [1, 0, tx], + [0, 1, ty], + [0, 0, 1], + **kwargs) + +def translate3d(tx, ty, tz, **kwargs): + return matrix( + [1, 0, 0, tx], + [0, 1, 0, ty], + [0, 0, 1, tz], + [0, 0, 0, 1], + **kwargs) + +def scale2d(sx, sy, **kwargs): + return matrix( + [sx, 0, 0], + [0, sy, 0], + [0, 0, 1], + **kwargs) + +def scale3d(sx, sy, sz, **kwargs): + return matrix( + [sx, 0, 0, 0], + [0, sy, 0, 0], + [0, 0, sz, 0], + [0, 0, 0, 1], + **kwargs) + +def rotate2d(theta, **kwargs): + return matrix( + [torch.cos(theta), torch.sin(-theta), 0], + [torch.sin(theta), torch.cos(theta), 0], + [0, 0, 1], + **kwargs) + +def rotate3d(v, theta, **kwargs): + vx = v[..., 0]; vy = v[..., 1]; vz = v[..., 2] + s = torch.sin(theta); c = torch.cos(theta); cc = 1 - c + return matrix( + [vx*vx*cc+c, vx*vy*cc-vz*s, vx*vz*cc+vy*s, 0], + [vy*vx*cc+vz*s, vy*vy*cc+c, vy*vz*cc-vx*s, 0], + [vz*vx*cc-vy*s, vz*vy*cc+vx*s, vz*vz*cc+c, 0], + [0, 0, 0, 1], + **kwargs) + +def translate2d_inv(tx, ty, **kwargs): + return translate2d(-tx, -ty, **kwargs) + +def scale2d_inv(sx, sy, **kwargs): + return scale2d(1 / sx, 1 / sy, **kwargs) + +def rotate2d_inv(theta, **kwargs): + return rotate2d(-theta, **kwargs) + +#---------------------------------------------------------------------------- +# Versatile image augmentation pipeline from the paper +# "Training Generative Adversarial Networks with Limited Data". +# +# All augmentations are disabled by default; individual augmentations can +# be enabled by setting their probability multipliers to 1. + +@persistence.persistent_class +class AugmentPipe(torch.nn.Module): + def __init__(self, + xflip=0, rotate90=0, xint=0, xint_max=0.125, + scale=0, rotate=0, aniso=0, xfrac=0, scale_std=0.2, rotate_max=1, aniso_std=0.2, xfrac_std=0.125, + brightness=0, contrast=0, lumaflip=0, hue=0, saturation=0, brightness_std=0.2, contrast_std=0.5, hue_max=1, saturation_std=1, + imgfilter=0, imgfilter_bands=[1,1,1,1], imgfilter_std=1, + noise=0, cutout=0, noise_std=0.1, cutout_size=0.5, + ): + super().__init__() + self.register_buffer('p', torch.ones([])) # Overall multiplier for augmentation probability. + + # Pixel blitting. + self.xflip = float(xflip) # Probability multiplier for x-flip. + self.rotate90 = float(rotate90) # Probability multiplier for 90 degree rotations. + self.xint = float(xint) # Probability multiplier for integer translation. + self.xint_max = float(xint_max) # Range of integer translation, relative to image dimensions. + + # General geometric transformations. + self.scale = float(scale) # Probability multiplier for isotropic scaling. + self.rotate = float(rotate) # Probability multiplier for arbitrary rotation. + self.aniso = float(aniso) # Probability multiplier for anisotropic scaling. + self.xfrac = float(xfrac) # Probability multiplier for fractional translation. + self.scale_std = float(scale_std) # Log2 standard deviation of isotropic scaling. + self.rotate_max = float(rotate_max) # Range of arbitrary rotation, 1 = full circle. + self.aniso_std = float(aniso_std) # Log2 standard deviation of anisotropic scaling. + self.xfrac_std = float(xfrac_std) # Standard deviation of frational translation, relative to image dimensions. + + # Color transformations. + self.brightness = float(brightness) # Probability multiplier for brightness. + self.contrast = float(contrast) # Probability multiplier for contrast. + self.lumaflip = float(lumaflip) # Probability multiplier for luma flip. + self.hue = float(hue) # Probability multiplier for hue rotation. + self.saturation = float(saturation) # Probability multiplier for saturation. + self.brightness_std = float(brightness_std) # Standard deviation of brightness. + self.contrast_std = float(contrast_std) # Log2 standard deviation of contrast. + self.hue_max = float(hue_max) # Range of hue rotation, 1 = full circle. + self.saturation_std = float(saturation_std) # Log2 standard deviation of saturation. + + # Image-space filtering. + self.imgfilter = float(imgfilter) # Probability multiplier for image-space filtering. + self.imgfilter_bands = list(imgfilter_bands) # Probability multipliers for individual frequency bands. + self.imgfilter_std = float(imgfilter_std) # Log2 standard deviation of image-space filter amplification. + + # Image-space corruptions. + self.noise = float(noise) # Probability multiplier for additive RGB noise. + self.cutout = float(cutout) # Probability multiplier for cutout. + self.noise_std = float(noise_std) # Standard deviation of additive RGB noise. + self.cutout_size = float(cutout_size) # Size of the cutout rectangle, relative to image dimensions. + + # Setup orthogonal lowpass filter for geometric augmentations. + self.register_buffer('Hz_geom', upfirdn2d.setup_filter(wavelets['sym6'])) + + # Construct filter bank for image-space filtering. + Hz_lo = np.asarray(wavelets['sym2']) # H(z) + Hz_hi = Hz_lo * ((-1) ** np.arange(Hz_lo.size)) # H(-z) + Hz_lo2 = np.convolve(Hz_lo, Hz_lo[::-1]) / 2 # H(z) * H(z^-1) / 2 + Hz_hi2 = np.convolve(Hz_hi, Hz_hi[::-1]) / 2 # H(-z) * H(-z^-1) / 2 + Hz_fbank = np.eye(4, 1) # Bandpass(H(z), b_i) + for i in range(1, Hz_fbank.shape[0]): + Hz_fbank = np.dstack([Hz_fbank, np.zeros_like(Hz_fbank)]).reshape(Hz_fbank.shape[0], -1)[:, :-1] + Hz_fbank = scipy.signal.convolve(Hz_fbank, [Hz_lo2]) + Hz_fbank[i, (Hz_fbank.shape[1] - Hz_hi2.size) // 2 : (Hz_fbank.shape[1] + Hz_hi2.size) // 2] += Hz_hi2 + self.register_buffer('Hz_fbank', torch.as_tensor(Hz_fbank, dtype=torch.float32)) + + def forward(self, images, debug_percentile=None): + assert isinstance(images, torch.Tensor) and images.ndim == 4 + batch_size, num_channels, height, width = images.shape + device = images.device + if debug_percentile is not None: + debug_percentile = torch.as_tensor(debug_percentile, dtype=torch.float32, device=device) + + # ------------------------------------- + # Select parameters for pixel blitting. + # ------------------------------------- + + # Initialize inverse homogeneous 2D transform: G_inv @ pixel_out ==> pixel_in + I_3 = torch.eye(3, device=device) + G_inv = I_3 + + # Apply x-flip with probability (xflip * strength). + if self.xflip > 0: + i = torch.floor(torch.rand([batch_size], device=device) * 2) + i = torch.where(torch.rand([batch_size], device=device) < self.xflip * self.p, i, torch.zeros_like(i)) + if debug_percentile is not None: + i = torch.full_like(i, torch.floor(debug_percentile * 2)) + G_inv = G_inv @ scale2d_inv(1 - 2 * i, 1) + + # Apply 90 degree rotations with probability (rotate90 * strength). + if self.rotate90 > 0: + i = torch.floor(torch.rand([batch_size], device=device) * 4) + i = torch.where(torch.rand([batch_size], device=device) < self.rotate90 * self.p, i, torch.zeros_like(i)) + if debug_percentile is not None: + i = torch.full_like(i, torch.floor(debug_percentile * 4)) + G_inv = G_inv @ rotate2d_inv(-np.pi / 2 * i) + + # Apply integer translation with probability (xint * strength). + if self.xint > 0: + t = (torch.rand([batch_size, 2], device=device) * 2 - 1) * self.xint_max + t = torch.where(torch.rand([batch_size, 1], device=device) < self.xint * self.p, t, torch.zeros_like(t)) + if debug_percentile is not None: + t = torch.full_like(t, (debug_percentile * 2 - 1) * self.xint_max) + G_inv = G_inv @ translate2d_inv(torch.round(t[:,0] * width), torch.round(t[:,1] * height)) + + # -------------------------------------------------------- + # Select parameters for general geometric transformations. + # -------------------------------------------------------- + + # Apply isotropic scaling with probability (scale * strength). + if self.scale > 0: + s = torch.exp2(torch.randn([batch_size], device=device) * self.scale_std) + s = torch.where(torch.rand([batch_size], device=device) < self.scale * self.p, s, torch.ones_like(s)) + if debug_percentile is not None: + s = torch.full_like(s, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.scale_std)) + G_inv = G_inv @ scale2d_inv(s, s) + + # Apply pre-rotation with probability p_rot. + p_rot = 1 - torch.sqrt((1 - self.rotate * self.p).clamp(0, 1)) # P(pre OR post) = p + if self.rotate > 0: + theta = (torch.rand([batch_size], device=device) * 2 - 1) * np.pi * self.rotate_max + theta = torch.where(torch.rand([batch_size], device=device) < p_rot, theta, torch.zeros_like(theta)) + if debug_percentile is not None: + theta = torch.full_like(theta, (debug_percentile * 2 - 1) * np.pi * self.rotate_max) + G_inv = G_inv @ rotate2d_inv(-theta) # Before anisotropic scaling. + + # Apply anisotropic scaling with probability (aniso * strength). + if self.aniso > 0: + s = torch.exp2(torch.randn([batch_size], device=device) * self.aniso_std) + s = torch.where(torch.rand([batch_size], device=device) < self.aniso * self.p, s, torch.ones_like(s)) + if debug_percentile is not None: + s = torch.full_like(s, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.aniso_std)) + G_inv = G_inv @ scale2d_inv(s, 1 / s) + + # Apply post-rotation with probability p_rot. + if self.rotate > 0: + theta = (torch.rand([batch_size], device=device) * 2 - 1) * np.pi * self.rotate_max + theta = torch.where(torch.rand([batch_size], device=device) < p_rot, theta, torch.zeros_like(theta)) + if debug_percentile is not None: + theta = torch.zeros_like(theta) + G_inv = G_inv @ rotate2d_inv(-theta) # After anisotropic scaling. + + # Apply fractional translation with probability (xfrac * strength). + if self.xfrac > 0: + t = torch.randn([batch_size, 2], device=device) * self.xfrac_std + t = torch.where(torch.rand([batch_size, 1], device=device) < self.xfrac * self.p, t, torch.zeros_like(t)) + if debug_percentile is not None: + t = torch.full_like(t, torch.erfinv(debug_percentile * 2 - 1) * self.xfrac_std) + G_inv = G_inv @ translate2d_inv(t[:,0] * width, t[:,1] * height) + + # ---------------------------------- + # Execute geometric transformations. + # ---------------------------------- + + # Execute if the transform is not identity. + if G_inv is not I_3: + + # Calculate padding. + cx = (width - 1) / 2 + cy = (height - 1) / 2 + cp = matrix([-cx, -cy, 1], [cx, -cy, 1], [cx, cy, 1], [-cx, cy, 1], device=device) # [idx, xyz] + cp = G_inv @ cp.t() # [batch, xyz, idx] + Hz_pad = self.Hz_geom.shape[0] // 4 + margin = cp[:, :2, :].permute(1, 0, 2).flatten(1) # [xy, batch * idx] + margin = torch.cat([-margin, margin]).max(dim=1).values # [x0, y0, x1, y1] + margin = margin + misc.constant([Hz_pad * 2 - cx, Hz_pad * 2 - cy] * 2, device=device) + margin = margin.max(misc.constant([0, 0] * 2, device=device)) + margin = margin.min(misc.constant([width-1, height-1] * 2, device=device)) + mx0, my0, mx1, my1 = margin.ceil().to(torch.int32) + + # Pad image and adjust origin. + images = torch.nn.functional.pad(input=images, pad=[mx0,mx1,my0,my1], mode='reflect') + G_inv = translate2d((mx0 - mx1) / 2, (my0 - my1) / 2) @ G_inv + + # Upsample. + images = upfirdn2d.upsample2d(x=images, f=self.Hz_geom, up=2) + G_inv = scale2d(2, 2, device=device) @ G_inv @ scale2d_inv(2, 2, device=device) + G_inv = translate2d(-0.5, -0.5, device=device) @ G_inv @ translate2d_inv(-0.5, -0.5, device=device) + + # Execute transformation. + shape = [batch_size, num_channels, (height + Hz_pad * 2) * 2, (width + Hz_pad * 2) * 2] + G_inv = scale2d(2 / images.shape[3], 2 / images.shape[2], device=device) @ G_inv @ scale2d_inv(2 / shape[3], 2 / shape[2], device=device) + grid = torch.nn.functional.affine_grid(theta=G_inv[:,:2,:], size=shape, align_corners=False) + images = grid_sample_gradfix.grid_sample(images, grid) + + # Downsample and crop. + images = upfirdn2d.downsample2d(x=images, f=self.Hz_geom, down=2, padding=-Hz_pad*2, flip_filter=True) + + # -------------------------------------------- + # Select parameters for color transformations. + # -------------------------------------------- + + # Initialize homogeneous 3D transformation matrix: C @ color_in ==> color_out + I_4 = torch.eye(4, device=device) + C = I_4 + + # Apply brightness with probability (brightness * strength). + if self.brightness > 0: + b = torch.randn([batch_size], device=device) * self.brightness_std + b = torch.where(torch.rand([batch_size], device=device) < self.brightness * self.p, b, torch.zeros_like(b)) + if debug_percentile is not None: + b = torch.full_like(b, torch.erfinv(debug_percentile * 2 - 1) * self.brightness_std) + C = translate3d(b, b, b) @ C + + # Apply contrast with probability (contrast * strength). + if self.contrast > 0: + c = torch.exp2(torch.randn([batch_size], device=device) * self.contrast_std) + c = torch.where(torch.rand([batch_size], device=device) < self.contrast * self.p, c, torch.ones_like(c)) + if debug_percentile is not None: + c = torch.full_like(c, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.contrast_std)) + C = scale3d(c, c, c) @ C + + # Apply luma flip with probability (lumaflip * strength). + v = misc.constant(np.asarray([1, 1, 1, 0]) / np.sqrt(3), device=device) # Luma axis. + if self.lumaflip > 0: + i = torch.floor(torch.rand([batch_size, 1, 1], device=device) * 2) + i = torch.where(torch.rand([batch_size, 1, 1], device=device) < self.lumaflip * self.p, i, torch.zeros_like(i)) + if debug_percentile is not None: + i = torch.full_like(i, torch.floor(debug_percentile * 2)) + C = (I_4 - 2 * v.ger(v) * i) @ C # Householder reflection. + + # Apply hue rotation with probability (hue * strength). + if self.hue > 0 and num_channels > 1: + theta = (torch.rand([batch_size], device=device) * 2 - 1) * np.pi * self.hue_max + theta = torch.where(torch.rand([batch_size], device=device) < self.hue * self.p, theta, torch.zeros_like(theta)) + if debug_percentile is not None: + theta = torch.full_like(theta, (debug_percentile * 2 - 1) * np.pi * self.hue_max) + C = rotate3d(v, theta) @ C # Rotate around v. + + # Apply saturation with probability (saturation * strength). + if self.saturation > 0 and num_channels > 1: + s = torch.exp2(torch.randn([batch_size, 1, 1], device=device) * self.saturation_std) + s = torch.where(torch.rand([batch_size, 1, 1], device=device) < self.saturation * self.p, s, torch.ones_like(s)) + if debug_percentile is not None: + s = torch.full_like(s, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.saturation_std)) + C = (v.ger(v) + (I_4 - v.ger(v)) * s) @ C + + # ------------------------------ + # Execute color transformations. + # ------------------------------ + + # Execute if the transform is not identity. + if C is not I_4: + images = images.reshape([batch_size, num_channels, height * width]) + if num_channels == 3: + images = C[:, :3, :3] @ images + C[:, :3, 3:] + elif num_channels == 1: + C = C[:, :3, :].mean(dim=1, keepdims=True) + images = images * C[:, :, :3].sum(dim=2, keepdims=True) + C[:, :, 3:] + elif num_channels == 6: + images[:, :3] = C[:, :3, :3] @ images[:, :3] + C[:, :3, 3:] + images[:, 3:] = C[:, :3, :3] @ images[:, 3:] + C[:, :3, 3:] + else: + raise ValueError('Image must be RGB (3 channels) or L (1 channel)') + images = images.reshape([batch_size, num_channels, height, width]) + + # ---------------------- + # Image-space filtering. + # ---------------------- + + if self.imgfilter > 0: + num_bands = self.Hz_fbank.shape[0] + assert len(self.imgfilter_bands) == num_bands + expected_power = misc.constant(np.array([10, 1, 1, 1]) / 13, device=device) # Expected power spectrum (1/f). + + # Apply amplification for each band with probability (imgfilter * strength * band_strength). + g = torch.ones([batch_size, num_bands], device=device) # Global gain vector (identity). + for i, band_strength in enumerate(self.imgfilter_bands): + t_i = torch.exp2(torch.randn([batch_size], device=device) * self.imgfilter_std) + t_i = torch.where(torch.rand([batch_size], device=device) < self.imgfilter * self.p * band_strength, t_i, torch.ones_like(t_i)) + if debug_percentile is not None: + t_i = torch.full_like(t_i, torch.exp2(torch.erfinv(debug_percentile * 2 - 1) * self.imgfilter_std)) if band_strength > 0 else torch.ones_like(t_i) + t = torch.ones([batch_size, num_bands], device=device) # Temporary gain vector. + t[:, i] = t_i # Replace i'th element. + t = t / (expected_power * t.square()).sum(dim=-1, keepdims=True).sqrt() # Normalize power. + g = g * t # Accumulate into global gain. + + # Construct combined amplification filter. + Hz_prime = g @ self.Hz_fbank # [batch, tap] + Hz_prime = Hz_prime.unsqueeze(1).repeat([1, num_channels, 1]) # [batch, channels, tap] + Hz_prime = Hz_prime.reshape([batch_size * num_channels, 1, -1]) # [batch * channels, 1, tap] + + # Apply filter. + p = self.Hz_fbank.shape[1] // 2 + images = images.reshape([1, batch_size * num_channels, height, width]) + images = torch.nn.functional.pad(input=images, pad=[p,p,p,p], mode='reflect') + images = conv2d_gradfix.conv2d(input=images, weight=Hz_prime.unsqueeze(2), groups=batch_size*num_channels) + images = conv2d_gradfix.conv2d(input=images, weight=Hz_prime.unsqueeze(3), groups=batch_size*num_channels) + images = images.reshape([batch_size, num_channels, height, width]) + + # ------------------------ + # Image-space corruptions. + # ------------------------ + + # Apply additive RGB noise with probability (noise * strength). + if self.noise > 0: + sigma = torch.randn([batch_size, 1, 1, 1], device=device).abs() * self.noise_std + sigma = torch.where(torch.rand([batch_size, 1, 1, 1], device=device) < self.noise * self.p, sigma, torch.zeros_like(sigma)) + if debug_percentile is not None: + sigma = torch.full_like(sigma, torch.erfinv(debug_percentile) * self.noise_std) + images = images + torch.randn([batch_size, num_channels, height, width], device=device) * sigma + + # Apply cutout with probability (cutout * strength). + if self.cutout > 0: + size = torch.full([batch_size, 2, 1, 1, 1], self.cutout_size, device=device) + size = torch.where(torch.rand([batch_size, 1, 1, 1, 1], device=device) < self.cutout * self.p, size, torch.zeros_like(size)) + center = torch.rand([batch_size, 2, 1, 1, 1], device=device) + if debug_percentile is not None: + size = torch.full_like(size, self.cutout_size) + center = torch.full_like(center, debug_percentile) + coord_x = torch.arange(width, device=device).reshape([1, 1, 1, -1]) + coord_y = torch.arange(height, device=device).reshape([1, 1, -1, 1]) + mask_x = (((coord_x + 0.5) / width - center[:, 0]).abs() >= size[:, 0] / 2) + mask_y = (((coord_y + 0.5) / height - center[:, 1]).abs() >= size[:, 1] / 2) + mask = torch.logical_or(mask_x, mask_y).to(torch.float32) + images = images * mask + + return images + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/training/crosssection_utils.py b/ThirdParty/eg3d/training/crosssection_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..72d49f29534ae98b6c4cbd807677dcc55fd5e5ce --- /dev/null +++ b/ThirdParty/eg3d/training/crosssection_utils.py @@ -0,0 +1,26 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import torch + +def sample_cross_section(G, ws, resolution=256, w=1.2): + axis=0 + A, B = torch.meshgrid(torch.linspace(w/2, -w/2, resolution, device=ws.device), torch.linspace(-w/2, w/2, resolution, device=ws.device), indexing='ij') + A, B = A.reshape(-1, 1), B.reshape(-1, 1) + C = torch.zeros_like(A) + coordinates = [A, B] + coordinates.insert(axis, C) + coordinates = torch.cat(coordinates, dim=-1).expand(ws.shape[0], -1, -1) + + sigma = G.sample_mixed(coordinates, torch.randn_like(coordinates), ws)['sigma'] + return sigma.reshape(-1, 1, resolution, resolution) + +# if __name__ == '__main__': +# sample_crossection(None) \ No newline at end of file diff --git a/ThirdParty/eg3d/training/dataset.py b/ThirdParty/eg3d/training/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..b4d7c4fb13d1541f9d11af92a76cc859d71f5547 --- /dev/null +++ b/ThirdParty/eg3d/training/dataset.py @@ -0,0 +1,244 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Streaming images and labels from datasets created with dataset_tool.py.""" + +import os +import numpy as np +import zipfile +import PIL.Image +import json +import torch +import dnnlib + +try: + import pyspng +except ImportError: + pyspng = None + +#---------------------------------------------------------------------------- + +class Dataset(torch.utils.data.Dataset): + def __init__(self, + name, # Name of the dataset. + raw_shape, # Shape of the raw image data (NCHW). + max_size = None, # Artificially limit the size of the dataset. None = no limit. Applied before xflip. + use_labels = False, # Enable conditioning labels? False = label dimension is zero. + xflip = False, # Artificially double the size of the dataset via x-flips. Applied after max_size. + random_seed = 0, # Random seed to use when applying max_size. + ): + self._name = name + self._raw_shape = list(raw_shape) + self._use_labels = use_labels + self._raw_labels = None + self._label_shape = None + + # Apply max_size. + self._raw_idx = np.arange(self._raw_shape[0], dtype=np.int64) + if (max_size is not None) and (self._raw_idx.size > max_size): + np.random.RandomState(random_seed).shuffle(self._raw_idx) + self._raw_idx = np.sort(self._raw_idx[:max_size]) + + # Apply xflip. + self._xflip = np.zeros(self._raw_idx.size, dtype=np.uint8) + if xflip: + self._raw_idx = np.tile(self._raw_idx, 2) + self._xflip = np.concatenate([self._xflip, np.ones_like(self._xflip)]) + + def _get_raw_labels(self): + if self._raw_labels is None: + self._raw_labels = self._load_raw_labels() if self._use_labels else None + if self._raw_labels is None: + self._raw_labels = np.zeros([self._raw_shape[0], 0], dtype=np.float32) + assert isinstance(self._raw_labels, np.ndarray) + assert self._raw_labels.shape[0] == self._raw_shape[0] + assert self._raw_labels.dtype in [np.float32, np.int64] + if self._raw_labels.dtype == np.int64: + assert self._raw_labels.ndim == 1 + assert np.all(self._raw_labels >= 0) + self._raw_labels_std = self._raw_labels.std(0) + return self._raw_labels + + def close(self): # to be overridden by subclass + pass + + def _load_raw_image(self, raw_idx): # to be overridden by subclass + raise NotImplementedError + + def _load_raw_labels(self): # to be overridden by subclass + raise NotImplementedError + + def __getstate__(self): + return dict(self.__dict__, _raw_labels=None) + + def __del__(self): + try: + self.close() + except: + pass + + def __len__(self): + return self._raw_idx.size + + def __getitem__(self, idx): + image = self._load_raw_image(self._raw_idx[idx]) + assert isinstance(image, np.ndarray) + assert list(image.shape) == self.image_shape + assert image.dtype == np.uint8 + if self._xflip[idx]: + assert image.ndim == 3 # CHW + image = image[:, :, ::-1] + return image.copy(), self.get_label(idx) + + def get_label(self, idx): + label = self._get_raw_labels()[self._raw_idx[idx]] + if label.dtype == np.int64: + onehot = np.zeros(self.label_shape, dtype=np.float32) + onehot[label] = 1 + label = onehot + return label.copy() + + def get_details(self, idx): + d = dnnlib.EasyDict() + d.raw_idx = int(self._raw_idx[idx]) + d.xflip = (int(self._xflip[idx]) != 0) + d.raw_label = self._get_raw_labels()[d.raw_idx].copy() + return d + + def get_label_std(self): + return self._raw_labels_std + + @property + def name(self): + return self._name + + @property + def image_shape(self): + return list(self._raw_shape[1:]) + + @property + def num_channels(self): + assert len(self.image_shape) == 3 # CHW + return self.image_shape[0] + + @property + def resolution(self): + assert len(self.image_shape) == 3 # CHW + assert self.image_shape[1] == self.image_shape[2] + return self.image_shape[1] + + @property + def label_shape(self): + if self._label_shape is None: + raw_labels = self._get_raw_labels() + if raw_labels.dtype == np.int64: + self._label_shape = [int(np.max(raw_labels)) + 1] + else: + self._label_shape = raw_labels.shape[1:] + return list(self._label_shape) + + @property + def label_dim(self): + assert len(self.label_shape) == 1 + return self.label_shape[0] + + @property + def has_labels(self): + return any(x != 0 for x in self.label_shape) + + @property + def has_onehot_labels(self): + return self._get_raw_labels().dtype == np.int64 + +#---------------------------------------------------------------------------- + +class ImageFolderDataset(Dataset): + def __init__(self, + path, # Path to directory or zip. + resolution = None, # Ensure specific resolution, None = highest available. + **super_kwargs, # Additional arguments for the Dataset base class. + ): + self._path = path + self._zipfile = None + + if os.path.isdir(self._path): + self._type = 'dir' + self._all_fnames = {os.path.relpath(os.path.join(root, fname), start=self._path) for root, _dirs, files in os.walk(self._path) for fname in files} + elif self._file_ext(self._path) == '.zip': + self._type = 'zip' + self._all_fnames = set(self._get_zipfile().namelist()) + else: + raise IOError('Path must point to a directory or zip') + + PIL.Image.init() + self._image_fnames = sorted(fname for fname in self._all_fnames if self._file_ext(fname) in PIL.Image.EXTENSION) + if len(self._image_fnames) == 0: + raise IOError('No image files found in the specified path') + + name = os.path.splitext(os.path.basename(self._path))[0] + raw_shape = [len(self._image_fnames)] + list(self._load_raw_image(0).shape) + if resolution is not None and (raw_shape[2] != resolution or raw_shape[3] != resolution): + raise IOError('Image files do not match the specified resolution') + super().__init__(name=name, raw_shape=raw_shape, **super_kwargs) + + @staticmethod + def _file_ext(fname): + return os.path.splitext(fname)[1].lower() + + def _get_zipfile(self): + assert self._type == 'zip' + if self._zipfile is None: + self._zipfile = zipfile.ZipFile(self._path) + return self._zipfile + + def _open_file(self, fname): + if self._type == 'dir': + return open(os.path.join(self._path, fname), 'rb') + if self._type == 'zip': + return self._get_zipfile().open(fname, 'r') + return None + + def close(self): + try: + if self._zipfile is not None: + self._zipfile.close() + finally: + self._zipfile = None + + def __getstate__(self): + return dict(super().__getstate__(), _zipfile=None) + + def _load_raw_image(self, raw_idx): + fname = self._image_fnames[raw_idx] + with self._open_file(fname) as f: + if pyspng is not None and self._file_ext(fname) == '.png': + image = pyspng.load(f.read()) + else: + image = np.array(PIL.Image.open(f)) + if image.ndim == 2: + image = image[:, :, np.newaxis] # HW => HWC + image = image.transpose(2, 0, 1) # HWC => CHW + return image + + def _load_raw_labels(self): + fname = 'dataset.json' + if fname not in self._all_fnames: + return None + with self._open_file(fname) as f: + labels = json.load(f)['labels'] + if labels is None: + return None + labels = dict(labels) + labels = [labels[fname.replace('\\', '/')] for fname in self._image_fnames] + labels = np.array(labels) + labels = labels.astype({1: np.int64, 2: np.float32}[labels.ndim]) + return labels + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/training/dual_discriminator.py b/ThirdParty/eg3d/training/dual_discriminator.py new file mode 100644 index 0000000000000000000000000000000000000000..99bfb5a2a5b3b14c6824813b6977be86b43f7ccc --- /dev/null +++ b/ThirdParty/eg3d/training/dual_discriminator.py @@ -0,0 +1,249 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Discriminator architectures from the paper +"Efficient Geometry-aware 3D Generative Adversarial Networks".""" + +import numpy as np +import torch +from torch_utils import persistence +from torch_utils.ops import upfirdn2d +from training.networks_stylegan2 import DiscriminatorBlock, MappingNetwork, DiscriminatorEpilogue + +@persistence.persistent_class +class SingleDiscriminator(torch.nn.Module): + def __init__(self, + c_dim, # Conditioning label (C) dimensionality. + img_resolution, # Input resolution. + img_channels, # Number of input color channels. + architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'. + channel_base = 32768, # Overall multiplier for the number of channels. + channel_max = 512, # Maximum number of channels in any layer. + num_fp16_res = 4, # Use FP16 for the N highest resolutions. + conv_clamp = 256, # Clamp the output of convolution layers to +-X, None = disable clamping. + cmap_dim = None, # Dimensionality of mapped conditioning label, None = default. + sr_upsample_factor = 1, # Ignored for SingleDiscriminator + block_kwargs = {}, # Arguments for DiscriminatorBlock. + mapping_kwargs = {}, # Arguments for MappingNetwork. + epilogue_kwargs = {}, # Arguments for DiscriminatorEpilogue. + ): + super().__init__() + self.c_dim = c_dim + self.img_resolution = img_resolution + self.img_resolution_log2 = int(np.log2(img_resolution)) + self.img_channels = img_channels + self.block_resolutions = [2 ** i for i in range(self.img_resolution_log2, 2, -1)] + channels_dict = {res: min(channel_base // res, channel_max) for res in self.block_resolutions + [4]} + fp16_resolution = max(2 ** (self.img_resolution_log2 + 1 - num_fp16_res), 8) + + if cmap_dim is None: + cmap_dim = channels_dict[4] + if c_dim == 0: + cmap_dim = 0 + + common_kwargs = dict(img_channels=img_channels, architecture=architecture, conv_clamp=conv_clamp) + cur_layer_idx = 0 + for res in self.block_resolutions: + in_channels = channels_dict[res] if res < img_resolution else 0 + tmp_channels = channels_dict[res] + out_channels = channels_dict[res // 2] + use_fp16 = (res >= fp16_resolution) + block = DiscriminatorBlock(in_channels, tmp_channels, out_channels, resolution=res, + first_layer_idx=cur_layer_idx, use_fp16=use_fp16, **block_kwargs, **common_kwargs) + setattr(self, f'b{res}', block) + cur_layer_idx += block.num_layers + if c_dim > 0: + self.mapping = MappingNetwork(z_dim=0, c_dim=c_dim, w_dim=cmap_dim, num_ws=None, w_avg_beta=None, **mapping_kwargs) + self.b4 = DiscriminatorEpilogue(channels_dict[4], cmap_dim=cmap_dim, resolution=4, **epilogue_kwargs, **common_kwargs) + + def forward(self, img, c, update_emas=False, **block_kwargs): + img = img['image'] + + _ = update_emas # unused + x = None + for res in self.block_resolutions: + block = getattr(self, f'b{res}') + x, img = block(x, img, **block_kwargs) + + cmap = None + if self.c_dim > 0: + cmap = self.mapping(None, c) + x = self.b4(x, img, cmap) + return x + + def extra_repr(self): + return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}' + +#---------------------------------------------------------------------------- + +def filtered_resizing(image_orig_tensor, size, f, filter_mode='antialiased'): + if filter_mode == 'antialiased': + ada_filtered_64 = torch.nn.functional.interpolate(image_orig_tensor, size=(size, size), mode='bilinear', align_corners=False, antialias=True) + elif filter_mode == 'classic': + ada_filtered_64 = upfirdn2d.upsample2d(image_orig_tensor, f, up=2) + ada_filtered_64 = torch.nn.functional.interpolate(ada_filtered_64, size=(size * 2 + 2, size * 2 + 2), mode='bilinear', align_corners=False) + ada_filtered_64 = upfirdn2d.downsample2d(ada_filtered_64, f, down=2, flip_filter=True, padding=-1) + elif filter_mode == 'none': + ada_filtered_64 = torch.nn.functional.interpolate(image_orig_tensor, size=(size, size), mode='bilinear', align_corners=False) + elif type(filter_mode) == float: + assert 0 < filter_mode < 1 + + filtered = torch.nn.functional.interpolate(image_orig_tensor, size=(size, size), mode='bilinear', align_corners=False, antialias=True) + aliased = torch.nn.functional.interpolate(image_orig_tensor, size=(size, size), mode='bilinear', align_corners=False, antialias=False) + ada_filtered_64 = (1 - filter_mode) * aliased + (filter_mode) * filtered + + return ada_filtered_64 + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class DualDiscriminator(torch.nn.Module): + def __init__(self, + c_dim, # Conditioning label (C) dimensionality. + img_resolution, # Input resolution. + img_channels, # Number of input color channels. + architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'. + channel_base = 32768, # Overall multiplier for the number of channels. + channel_max = 512, # Maximum number of channels in any layer. + num_fp16_res = 4, # Use FP16 for the N highest resolutions. + conv_clamp = 256, # Clamp the output of convolution layers to +-X, None = disable clamping. + cmap_dim = None, # Dimensionality of mapped conditioning label, None = default. + disc_c_noise = 0, # Corrupt camera parameters with X std dev of noise before disc. pose conditioning. + block_kwargs = {}, # Arguments for DiscriminatorBlock. + mapping_kwargs = {}, # Arguments for MappingNetwork. + epilogue_kwargs = {}, # Arguments for DiscriminatorEpilogue. + ): + super().__init__() + img_channels *= 2 + + self.c_dim = c_dim + self.img_resolution = img_resolution + self.img_resolution_log2 = int(np.log2(img_resolution)) + self.img_channels = img_channels + self.block_resolutions = [2 ** i for i in range(self.img_resolution_log2, 2, -1)] + channels_dict = {res: min(channel_base // res, channel_max) for res in self.block_resolutions + [4]} + fp16_resolution = max(2 ** (self.img_resolution_log2 + 1 - num_fp16_res), 8) + + if cmap_dim is None: + cmap_dim = channels_dict[4] + if c_dim == 0: + cmap_dim = 0 + + common_kwargs = dict(img_channels=img_channels, architecture=architecture, conv_clamp=conv_clamp) + cur_layer_idx = 0 + for res in self.block_resolutions: + in_channels = channels_dict[res] if res < img_resolution else 0 + tmp_channels = channels_dict[res] + out_channels = channels_dict[res // 2] + use_fp16 = (res >= fp16_resolution) + block = DiscriminatorBlock(in_channels, tmp_channels, out_channels, resolution=res, + first_layer_idx=cur_layer_idx, use_fp16=use_fp16, **block_kwargs, **common_kwargs) + setattr(self, f'b{res}', block) + cur_layer_idx += block.num_layers + if c_dim > 0: + self.mapping = MappingNetwork(z_dim=0, c_dim=c_dim, w_dim=cmap_dim, num_ws=None, w_avg_beta=None, **mapping_kwargs) + self.b4 = DiscriminatorEpilogue(channels_dict[4], cmap_dim=cmap_dim, resolution=4, **epilogue_kwargs, **common_kwargs) + self.register_buffer('resample_filter', upfirdn2d.setup_filter([1,3,3,1])) + self.disc_c_noise = disc_c_noise + + def forward(self, img, c, update_emas=False, **block_kwargs): + image_raw = filtered_resizing(img['image_raw'], size=img['image'].shape[-1], f=self.resample_filter) + img = torch.cat([img['image'], image_raw], 1) + + _ = update_emas # unused + x = None + for res in self.block_resolutions: + block = getattr(self, f'b{res}') + x, img = block(x, img, **block_kwargs) + + cmap = None + if self.c_dim > 0: + if self.disc_c_noise > 0: c += torch.randn_like(c) * c.std(0) * self.disc_c_noise + cmap = self.mapping(None, c) + x = self.b4(x, img, cmap) + return x + + def extra_repr(self): + return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class DummyDualDiscriminator(torch.nn.Module): + def __init__(self, + c_dim, # Conditioning label (C) dimensionality. + img_resolution, # Input resolution. + img_channels, # Number of input color channels. + architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'. + channel_base = 32768, # Overall multiplier for the number of channels. + channel_max = 512, # Maximum number of channels in any layer. + num_fp16_res = 4, # Use FP16 for the N highest resolutions. + conv_clamp = 256, # Clamp the output of convolution layers to +-X, None = disable clamping. + cmap_dim = None, # Dimensionality of mapped conditioning label, None = default. + block_kwargs = {}, # Arguments for DiscriminatorBlock. + mapping_kwargs = {}, # Arguments for MappingNetwork. + epilogue_kwargs = {}, # Arguments for DiscriminatorEpilogue. + ): + super().__init__() + img_channels *= 2 + + self.c_dim = c_dim + self.img_resolution = img_resolution + self.img_resolution_log2 = int(np.log2(img_resolution)) + self.img_channels = img_channels + self.block_resolutions = [2 ** i for i in range(self.img_resolution_log2, 2, -1)] + channels_dict = {res: min(channel_base // res, channel_max) for res in self.block_resolutions + [4]} + fp16_resolution = max(2 ** (self.img_resolution_log2 + 1 - num_fp16_res), 8) + + if cmap_dim is None: + cmap_dim = channels_dict[4] + if c_dim == 0: + cmap_dim = 0 + + common_kwargs = dict(img_channels=img_channels, architecture=architecture, conv_clamp=conv_clamp) + cur_layer_idx = 0 + for res in self.block_resolutions: + in_channels = channels_dict[res] if res < img_resolution else 0 + tmp_channels = channels_dict[res] + out_channels = channels_dict[res // 2] + use_fp16 = (res >= fp16_resolution) + block = DiscriminatorBlock(in_channels, tmp_channels, out_channels, resolution=res, + first_layer_idx=cur_layer_idx, use_fp16=use_fp16, **block_kwargs, **common_kwargs) + setattr(self, f'b{res}', block) + cur_layer_idx += block.num_layers + if c_dim > 0: + self.mapping = MappingNetwork(z_dim=0, c_dim=c_dim, w_dim=cmap_dim, num_ws=None, w_avg_beta=None, **mapping_kwargs) + self.b4 = DiscriminatorEpilogue(channels_dict[4], cmap_dim=cmap_dim, resolution=4, **epilogue_kwargs, **common_kwargs) + self.register_buffer('resample_filter', upfirdn2d.setup_filter([1,3,3,1])) + + self.raw_fade = 1 + + def forward(self, img, c, update_emas=False, **block_kwargs): + self.raw_fade = max(0, self.raw_fade - 1/(500000/32)) + + image_raw = filtered_resizing(img['image_raw'], size=img['image'].shape[-1], f=self.resample_filter) * self.raw_fade + img = torch.cat([img['image'], image_raw], 1) + + _ = update_emas # unused + x = None + for res in self.block_resolutions: + block = getattr(self, f'b{res}') + x, img = block(x, img, **block_kwargs) + + cmap = None + if self.c_dim > 0: + cmap = self.mapping(None, c) + x = self.b4(x, img, cmap) + return x + + def extra_repr(self): + return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}' + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/training/loss.py b/ThirdParty/eg3d/training/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..b2c637a6f81bb8d458449c355831c733fcb0cacd --- /dev/null +++ b/ThirdParty/eg3d/training/loss.py @@ -0,0 +1,292 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Loss functions.""" + +import numpy as np +import torch +from torch_utils import training_stats +from torch_utils.ops import conv2d_gradfix +from torch_utils.ops import upfirdn2d +from training.dual_discriminator import filtered_resizing + +#---------------------------------------------------------------------------- + +class Loss: + def accumulate_gradients(self, phase, real_img, real_c, gen_z, gen_c, gain, cur_nimg): # to be overridden by subclass + raise NotImplementedError() + +#---------------------------------------------------------------------------- + +class StyleGAN2Loss(Loss): + def __init__(self, device, G, D, augment_pipe=None, r1_gamma=10, style_mixing_prob=0, pl_weight=0, pl_batch_shrink=2, pl_decay=0.01, pl_no_weight_grad=False, blur_init_sigma=0, blur_fade_kimg=0, r1_gamma_init=0, r1_gamma_fade_kimg=0, neural_rendering_resolution_initial=64, neural_rendering_resolution_final=None, neural_rendering_resolution_fade_kimg=0, gpc_reg_fade_kimg=1000, gpc_reg_prob=None, dual_discrimination=False, filter_mode='antialiased'): + super().__init__() + self.device = device + self.G = G + self.D = D + self.augment_pipe = augment_pipe + self.r1_gamma = r1_gamma + self.style_mixing_prob = style_mixing_prob + self.pl_weight = pl_weight + self.pl_batch_shrink = pl_batch_shrink + self.pl_decay = pl_decay + self.pl_no_weight_grad = pl_no_weight_grad + self.pl_mean = torch.zeros([], device=device) + self.blur_init_sigma = blur_init_sigma + self.blur_fade_kimg = blur_fade_kimg + self.r1_gamma_init = r1_gamma_init + self.r1_gamma_fade_kimg = r1_gamma_fade_kimg + self.neural_rendering_resolution_initial = neural_rendering_resolution_initial + self.neural_rendering_resolution_final = neural_rendering_resolution_final + self.neural_rendering_resolution_fade_kimg = neural_rendering_resolution_fade_kimg + self.gpc_reg_fade_kimg = gpc_reg_fade_kimg + self.gpc_reg_prob = gpc_reg_prob + self.dual_discrimination = dual_discrimination + self.filter_mode = filter_mode + self.resample_filter = upfirdn2d.setup_filter([1,3,3,1], device=device) + self.blur_raw_target = True + assert self.gpc_reg_prob is None or (0 <= self.gpc_reg_prob <= 1) + + def run_G(self, z, c, swapping_prob, neural_rendering_resolution, update_emas=False): + if swapping_prob is not None: + c_swapped = torch.roll(c.clone(), 1, 0) + c_gen_conditioning = torch.where(torch.rand((c.shape[0], 1), device=c.device) < swapping_prob, c_swapped, c) + else: + c_gen_conditioning = torch.zeros_like(c) + + ws = self.G.mapping(z, c_gen_conditioning, update_emas=update_emas) + if self.style_mixing_prob > 0: + with torch.autograd.profiler.record_function('style_mixing'): + cutoff = torch.empty([], dtype=torch.int64, device=ws.device).random_(1, ws.shape[1]) + cutoff = torch.where(torch.rand([], device=ws.device) < self.style_mixing_prob, cutoff, torch.full_like(cutoff, ws.shape[1])) + ws[:, cutoff:] = self.G.mapping(torch.randn_like(z), c, update_emas=False)[:, cutoff:] + gen_output = self.G.synthesis(ws, c, neural_rendering_resolution=neural_rendering_resolution, update_emas=update_emas) + return gen_output, ws + + def run_D(self, img, c, blur_sigma=0, blur_sigma_raw=0, update_emas=False): + blur_size = np.floor(blur_sigma * 3) + if blur_size > 0: + with torch.autograd.profiler.record_function('blur'): + f = torch.arange(-blur_size, blur_size + 1, device=img['image'].device).div(blur_sigma).square().neg().exp2() + img['image'] = upfirdn2d.filter2d(img['image'], f / f.sum()) + + if self.augment_pipe is not None: + augmented_pair = self.augment_pipe(torch.cat([img['image'], + torch.nn.functional.interpolate(img['image_raw'], size=img['image'].shape[2:], mode='bilinear', antialias=True)], + dim=1)) + img['image'] = augmented_pair[:, :img['image'].shape[1]] + img['image_raw'] = torch.nn.functional.interpolate(augmented_pair[:, img['image'].shape[1]:], size=img['image_raw'].shape[2:], mode='bilinear', antialias=True) + + logits = self.D(img, c, update_emas=update_emas) + return logits + + def accumulate_gradients(self, phase, real_img, real_c, gen_z, gen_c, gain, cur_nimg): + assert phase in ['Gmain', 'Greg', 'Gboth', 'Dmain', 'Dreg', 'Dboth'] + if self.G.rendering_kwargs.get('density_reg', 0) == 0: + phase = {'Greg': 'none', 'Gboth': 'Gmain'}.get(phase, phase) + if self.r1_gamma == 0: + phase = {'Dreg': 'none', 'Dboth': 'Dmain'}.get(phase, phase) + blur_sigma = max(1 - cur_nimg / (self.blur_fade_kimg * 1e3), 0) * self.blur_init_sigma if self.blur_fade_kimg > 0 else 0 + r1_gamma = self.r1_gamma + + alpha = min(cur_nimg / (self.gpc_reg_fade_kimg * 1e3), 1) if self.gpc_reg_fade_kimg > 0 else 1 + swapping_prob = (1 - alpha) * 1 + alpha * self.gpc_reg_prob if self.gpc_reg_prob is not None else None + + if self.neural_rendering_resolution_final is not None: + alpha = min(cur_nimg / (self.neural_rendering_resolution_fade_kimg * 1e3), 1) + neural_rendering_resolution = int(np.rint(self.neural_rendering_resolution_initial * (1 - alpha) + self.neural_rendering_resolution_final * alpha)) + else: + neural_rendering_resolution = self.neural_rendering_resolution_initial + + real_img_raw = filtered_resizing(real_img, size=neural_rendering_resolution, f=self.resample_filter, filter_mode=self.filter_mode) + + if self.blur_raw_target: + blur_size = np.floor(blur_sigma * 3) + if blur_size > 0: + f = torch.arange(-blur_size, blur_size + 1, device=real_img_raw.device).div(blur_sigma).square().neg().exp2() + real_img_raw = upfirdn2d.filter2d(real_img_raw, f / f.sum()) + + real_img = {'image': real_img, 'image_raw': real_img_raw} + + # Gmain: Maximize logits for generated images. + if phase in ['Gmain', 'Gboth']: + with torch.autograd.profiler.record_function('Gmain_forward'): + gen_img, _gen_ws = self.run_G(gen_z, gen_c, swapping_prob=swapping_prob, neural_rendering_resolution=neural_rendering_resolution) + gen_logits = self.run_D(gen_img, gen_c, blur_sigma=blur_sigma) + training_stats.report('Loss/scores/fake', gen_logits) + training_stats.report('Loss/signs/fake', gen_logits.sign()) + loss_Gmain = torch.nn.functional.softplus(-gen_logits) + training_stats.report('Loss/G/loss', loss_Gmain) + with torch.autograd.profiler.record_function('Gmain_backward'): + loss_Gmain.mean().mul(gain).backward() + + # Density Regularization + if phase in ['Greg', 'Gboth'] and self.G.rendering_kwargs.get('density_reg', 0) > 0 and self.G.rendering_kwargs['reg_type'] == 'l1': + if swapping_prob is not None: + c_swapped = torch.roll(gen_c.clone(), 1, 0) + c_gen_conditioning = torch.where(torch.rand([], device=gen_c.device) < swapping_prob, c_swapped, gen_c) + else: + c_gen_conditioning = torch.zeros_like(gen_c) + + ws = self.G.mapping(gen_z, c_gen_conditioning, update_emas=False) + if self.style_mixing_prob > 0: + with torch.autograd.profiler.record_function('style_mixing'): + cutoff = torch.empty([], dtype=torch.int64, device=ws.device).random_(1, ws.shape[1]) + cutoff = torch.where(torch.rand([], device=ws.device) < self.style_mixing_prob, cutoff, torch.full_like(cutoff, ws.shape[1])) + ws[:, cutoff:] = self.G.mapping(torch.randn_like(z), c, update_emas=False)[:, cutoff:] + initial_coordinates = torch.rand((ws.shape[0], 1000, 3), device=ws.device) * 2 - 1 + perturbed_coordinates = initial_coordinates + torch.randn_like(initial_coordinates) * self.G.rendering_kwargs['density_reg_p_dist'] + all_coordinates = torch.cat([initial_coordinates, perturbed_coordinates], dim=1) + sigma = self.G.sample_mixed(all_coordinates, torch.randn_like(all_coordinates), ws, update_emas=False)['sigma'] + sigma_initial = sigma[:, :sigma.shape[1]//2] + sigma_perturbed = sigma[:, sigma.shape[1]//2:] + + TVloss = torch.nn.functional.l1_loss(sigma_initial, sigma_perturbed) * self.G.rendering_kwargs['density_reg'] + TVloss.mul(gain).backward() + + # Alternative density regularization + if phase in ['Greg', 'Gboth'] and self.G.rendering_kwargs.get('density_reg', 0) > 0 and self.G.rendering_kwargs['reg_type'] == 'monotonic-detach': + if swapping_prob is not None: + c_swapped = torch.roll(gen_c.clone(), 1, 0) + c_gen_conditioning = torch.where(torch.rand([], device=gen_c.device) < swapping_prob, c_swapped, gen_c) + else: + c_gen_conditioning = torch.zeros_like(gen_c) + + ws = self.G.mapping(gen_z, c_gen_conditioning, update_emas=False) + + initial_coordinates = torch.rand((ws.shape[0], 2000, 3), device=ws.device) * 2 - 1 # Front + + perturbed_coordinates = initial_coordinates + torch.tensor([0, 0, -1], device=ws.device) * (1/256) * self.G.rendering_kwargs['box_warp'] # Behind + all_coordinates = torch.cat([initial_coordinates, perturbed_coordinates], dim=1) + sigma = self.G.sample_mixed(all_coordinates, torch.randn_like(all_coordinates), ws, update_emas=False)['sigma'] + sigma_initial = sigma[:, :sigma.shape[1]//2] + sigma_perturbed = sigma[:, sigma.shape[1]//2:] + + monotonic_loss = torch.relu(sigma_initial.detach() - sigma_perturbed).mean() * 10 + monotonic_loss.mul(gain).backward() + + + if swapping_prob is not None: + c_swapped = torch.roll(gen_c.clone(), 1, 0) + c_gen_conditioning = torch.where(torch.rand([], device=gen_c.device) < swapping_prob, c_swapped, gen_c) + else: + c_gen_conditioning = torch.zeros_like(gen_c) + + ws = self.G.mapping(gen_z, c_gen_conditioning, update_emas=False) + if self.style_mixing_prob > 0: + with torch.autograd.profiler.record_function('style_mixing'): + cutoff = torch.empty([], dtype=torch.int64, device=ws.device).random_(1, ws.shape[1]) + cutoff = torch.where(torch.rand([], device=ws.device) < self.style_mixing_prob, cutoff, torch.full_like(cutoff, ws.shape[1])) + ws[:, cutoff:] = self.G.mapping(torch.randn_like(z), c, update_emas=False)[:, cutoff:] + initial_coordinates = torch.rand((ws.shape[0], 1000, 3), device=ws.device) * 2 - 1 + perturbed_coordinates = initial_coordinates + torch.randn_like(initial_coordinates) * (1/256) * self.G.rendering_kwargs['box_warp'] + all_coordinates = torch.cat([initial_coordinates, perturbed_coordinates], dim=1) + sigma = self.G.sample_mixed(all_coordinates, torch.randn_like(all_coordinates), ws, update_emas=False)['sigma'] + sigma_initial = sigma[:, :sigma.shape[1]//2] + sigma_perturbed = sigma[:, sigma.shape[1]//2:] + + TVloss = torch.nn.functional.l1_loss(sigma_initial, sigma_perturbed) * self.G.rendering_kwargs['density_reg'] + TVloss.mul(gain).backward() + + # Alternative density regularization + if phase in ['Greg', 'Gboth'] and self.G.rendering_kwargs.get('density_reg', 0) > 0 and self.G.rendering_kwargs['reg_type'] == 'monotonic-fixed': + if swapping_prob is not None: + c_swapped = torch.roll(gen_c.clone(), 1, 0) + c_gen_conditioning = torch.where(torch.rand([], device=gen_c.device) < swapping_prob, c_swapped, gen_c) + else: + c_gen_conditioning = torch.zeros_like(gen_c) + + ws = self.G.mapping(gen_z, c_gen_conditioning, update_emas=False) + + initial_coordinates = torch.rand((ws.shape[0], 2000, 3), device=ws.device) * 2 - 1 # Front + + perturbed_coordinates = initial_coordinates + torch.tensor([0, 0, -1], device=ws.device) * (1/256) * self.G.rendering_kwargs['box_warp'] # Behind + all_coordinates = torch.cat([initial_coordinates, perturbed_coordinates], dim=1) + sigma = self.G.sample_mixed(all_coordinates, torch.randn_like(all_coordinates), ws, update_emas=False)['sigma'] + sigma_initial = sigma[:, :sigma.shape[1]//2] + sigma_perturbed = sigma[:, sigma.shape[1]//2:] + + monotonic_loss = torch.relu(sigma_initial - sigma_perturbed).mean() * 10 + monotonic_loss.mul(gain).backward() + + + if swapping_prob is not None: + c_swapped = torch.roll(gen_c.clone(), 1, 0) + c_gen_conditioning = torch.where(torch.rand([], device=gen_c.device) < swapping_prob, c_swapped, gen_c) + else: + c_gen_conditioning = torch.zeros_like(gen_c) + + ws = self.G.mapping(gen_z, c_gen_conditioning, update_emas=False) + if self.style_mixing_prob > 0: + with torch.autograd.profiler.record_function('style_mixing'): + cutoff = torch.empty([], dtype=torch.int64, device=ws.device).random_(1, ws.shape[1]) + cutoff = torch.where(torch.rand([], device=ws.device) < self.style_mixing_prob, cutoff, torch.full_like(cutoff, ws.shape[1])) + ws[:, cutoff:] = self.G.mapping(torch.randn_like(z), c, update_emas=False)[:, cutoff:] + initial_coordinates = torch.rand((ws.shape[0], 1000, 3), device=ws.device) * 2 - 1 + perturbed_coordinates = initial_coordinates + torch.randn_like(initial_coordinates) * (1/256) * self.G.rendering_kwargs['box_warp'] + all_coordinates = torch.cat([initial_coordinates, perturbed_coordinates], dim=1) + sigma = self.G.sample_mixed(all_coordinates, torch.randn_like(all_coordinates), ws, update_emas=False)['sigma'] + sigma_initial = sigma[:, :sigma.shape[1]//2] + sigma_perturbed = sigma[:, sigma.shape[1]//2:] + + TVloss = torch.nn.functional.l1_loss(sigma_initial, sigma_perturbed) * self.G.rendering_kwargs['density_reg'] + TVloss.mul(gain).backward() + + # Dmain: Minimize logits for generated images. + loss_Dgen = 0 + if phase in ['Dmain', 'Dboth']: + with torch.autograd.profiler.record_function('Dgen_forward'): + gen_img, _gen_ws = self.run_G(gen_z, gen_c, swapping_prob=swapping_prob, neural_rendering_resolution=neural_rendering_resolution, update_emas=True) + gen_logits = self.run_D(gen_img, gen_c, blur_sigma=blur_sigma, update_emas=True) + training_stats.report('Loss/scores/fake', gen_logits) + training_stats.report('Loss/signs/fake', gen_logits.sign()) + loss_Dgen = torch.nn.functional.softplus(gen_logits) + with torch.autograd.profiler.record_function('Dgen_backward'): + loss_Dgen.mean().mul(gain).backward() + + # Dmain: Maximize logits for real images. + # Dr1: Apply R1 regularization. + if phase in ['Dmain', 'Dreg', 'Dboth']: + name = 'Dreal' if phase == 'Dmain' else 'Dr1' if phase == 'Dreg' else 'Dreal_Dr1' + with torch.autograd.profiler.record_function(name + '_forward'): + real_img_tmp_image = real_img['image'].detach().requires_grad_(phase in ['Dreg', 'Dboth']) + real_img_tmp_image_raw = real_img['image_raw'].detach().requires_grad_(phase in ['Dreg', 'Dboth']) + real_img_tmp = {'image': real_img_tmp_image, 'image_raw': real_img_tmp_image_raw} + + real_logits = self.run_D(real_img_tmp, real_c, blur_sigma=blur_sigma) + training_stats.report('Loss/scores/real', real_logits) + training_stats.report('Loss/signs/real', real_logits.sign()) + + loss_Dreal = 0 + if phase in ['Dmain', 'Dboth']: + loss_Dreal = torch.nn.functional.softplus(-real_logits) + training_stats.report('Loss/D/loss', loss_Dgen + loss_Dreal) + + loss_Dr1 = 0 + if phase in ['Dreg', 'Dboth']: + if self.dual_discrimination: + with torch.autograd.profiler.record_function('r1_grads'), conv2d_gradfix.no_weight_gradients(): + r1_grads = torch.autograd.grad(outputs=[real_logits.sum()], inputs=[real_img_tmp['image'], real_img_tmp['image_raw']], create_graph=True, only_inputs=True) + r1_grads_image = r1_grads[0] + r1_grads_image_raw = r1_grads[1] + r1_penalty = r1_grads_image.square().sum([1,2,3]) + r1_grads_image_raw.square().sum([1,2,3]) + else: # single discrimination + with torch.autograd.profiler.record_function('r1_grads'), conv2d_gradfix.no_weight_gradients(): + r1_grads = torch.autograd.grad(outputs=[real_logits.sum()], inputs=[real_img_tmp['image']], create_graph=True, only_inputs=True) + r1_grads_image = r1_grads[0] + r1_penalty = r1_grads_image.square().sum([1,2,3]) + loss_Dr1 = r1_penalty * (r1_gamma / 2) + training_stats.report('Loss/r1_penalty', r1_penalty) + training_stats.report('Loss/D/reg', loss_Dr1) + + with torch.autograd.profiler.record_function(name + '_backward'): + (loss_Dreal + loss_Dr1).mean().mul(gain).backward() + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/training/networks_stylegan2.py b/ThirdParty/eg3d/training/networks_stylegan2.py new file mode 100644 index 0000000000000000000000000000000000000000..298e6a2017d44bf8fd80bcfe9fd36dcb710668ec --- /dev/null +++ b/ThirdParty/eg3d/training/networks_stylegan2.py @@ -0,0 +1,796 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Network architectures from the paper +"Analyzing and Improving the Image Quality of StyleGAN". +Matches the original implementation of configs E-F by Karras et al. at +https://github.com/NVlabs/stylegan2/blob/master/training/networks_stylegan2.py""" + +import numpy as np +import torch +from ThirdParty.eg3d.torch_utils import misc +from ThirdParty.eg3d.torch_utils import persistence +from ThirdParty.eg3d.torch_utils.ops import conv2d_resample +from ThirdParty.eg3d.torch_utils.ops import upfirdn2d +from ThirdParty.eg3d.torch_utils.ops import bias_act +from ThirdParty.eg3d.torch_utils.ops import fma + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def normalize_2nd_moment(x, dim=1, eps=1e-8): + return x * (x.square().mean(dim=dim, keepdim=True) + eps).rsqrt() + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def modulated_conv2d( + x, # Input tensor of shape [batch_size, in_channels, in_height, in_width]. + weight, # Weight tensor of shape [out_channels, in_channels, kernel_height, kernel_width]. + styles, # Modulation coefficients of shape [batch_size, in_channels]. + noise = None, # Optional noise tensor to add to the output activations. + up = 1, # Integer upsampling factor. + down = 1, # Integer downsampling factor. + padding = 0, # Padding with respect to the upsampled image. + resample_filter = None, # Low-pass filter to apply when resampling activations. Must be prepared beforehand by calling upfirdn2d.setup_filter(). + demodulate = True, # Apply weight demodulation? + flip_weight = True, # False = convolution, True = correlation (matches torch.nn.functional.conv2d). + fused_modconv = True, # Perform modulation, convolution, and demodulation as a single fused operation? +): + batch_size = x.shape[0] + out_channels, in_channels, kh, kw = weight.shape + misc.assert_shape(weight, [out_channels, in_channels, kh, kw]) # [OIkk] + misc.assert_shape(x, [batch_size, in_channels, None, None]) # [NIHW] + misc.assert_shape(styles, [batch_size, in_channels]) # [NI] + + # Pre-normalize inputs to avoid FP16 overflow. + if x.dtype == torch.float16 and demodulate: + weight = weight * (1 / np.sqrt(in_channels * kh * kw) / weight.norm(float('inf'), dim=[1,2,3], keepdim=True)) # max_Ikk + styles = styles / styles.norm(float('inf'), dim=1, keepdim=True) # max_I + + # Calculate per-sample weights and demodulation coefficients. + w = None + dcoefs = None + if demodulate or fused_modconv: + w = weight.unsqueeze(0) # [NOIkk] + w = w * styles.reshape(batch_size, 1, -1, 1, 1) # [NOIkk] + if demodulate: + dcoefs = (w.square().sum(dim=[2,3,4]) + 1e-8).rsqrt() # [NO] + if demodulate and fused_modconv: + w = w * dcoefs.reshape(batch_size, -1, 1, 1, 1) # [NOIkk] + + # Execute by scaling the activations before and after the convolution. + if not fused_modconv: + x = x * styles.to(x.dtype).reshape(batch_size, -1, 1, 1) + x = conv2d_resample.conv2d_resample(x=x, w=weight.to(x.dtype), f=resample_filter, up=up, down=down, padding=padding, flip_weight=flip_weight) + if demodulate and noise is not None: + x = fma.fma(x, dcoefs.to(x.dtype).reshape(batch_size, -1, 1, 1), noise.to(x.dtype)) + elif demodulate: + x = x * dcoefs.to(x.dtype).reshape(batch_size, -1, 1, 1) + elif noise is not None: + x = x.add_(noise.to(x.dtype)) + return x + + # Execute as one fused op using grouped convolution. + with misc.suppress_tracer_warnings(): # this value will be treated as a constant + batch_size = int(batch_size) + misc.assert_shape(x, [batch_size, in_channels, None, None]) + x = x.reshape(1, -1, *x.shape[2:]) + w = w.reshape(-1, in_channels, kh, kw) + x = conv2d_resample.conv2d_resample(x=x, w=w.to(x.dtype), f=resample_filter, up=up, down=down, padding=padding, groups=batch_size, flip_weight=flip_weight) + x = x.reshape(batch_size, -1, *x.shape[2:]) + if noise is not None: + x = x.add_(noise) + return x + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class FullyConnectedLayer(torch.nn.Module): + def __init__(self, + in_features, # Number of input features. + out_features, # Number of output features. + bias = True, # Apply additive bias before the activation function? + activation = 'linear', # Activation function: 'relu', 'lrelu', etc. + lr_multiplier = 1, # Learning rate multiplier. + bias_init = 0, # Initial value for the additive bias. + ): + super().__init__() + self.in_features = in_features + self.out_features = out_features + self.activation = activation + self.weight = torch.nn.Parameter(torch.randn([out_features, in_features]) / lr_multiplier) + self.bias = torch.nn.Parameter(torch.full([out_features], np.float32(bias_init))) if bias else None + self.weight_gain = lr_multiplier / np.sqrt(in_features) + self.bias_gain = lr_multiplier + + def forward(self, x): + w = self.weight.to(x.dtype) * self.weight_gain + b = self.bias + if b is not None: + b = b.to(x.dtype) + if self.bias_gain != 1: + b = b * self.bias_gain + + if self.activation == 'linear' and b is not None: + x = torch.addmm(b.unsqueeze(0), x, w.t()) + else: + x = x.matmul(w.t()) + x = bias_act.bias_act(x, b, act=self.activation) + return x + + def extra_repr(self): + return f'in_features={self.in_features:d}, out_features={self.out_features:d}, activation={self.activation:s}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class Conv2dLayer(torch.nn.Module): + def __init__(self, + in_channels, # Number of input channels. + out_channels, # Number of output channels. + kernel_size, # Width and height of the convolution kernel. + bias = True, # Apply additive bias before the activation function? + activation = 'linear', # Activation function: 'relu', 'lrelu', etc. + up = 1, # Integer upsampling factor. + down = 1, # Integer downsampling factor. + resample_filter = [1,3,3,1], # Low-pass filter to apply when resampling activations. + conv_clamp = None, # Clamp the output to +-X, None = disable clamping. + channels_last = False, # Expect the input to have memory_format=channels_last? + trainable = True, # Update the weights of this layer during training? + ): + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.activation = activation + self.up = up + self.down = down + self.conv_clamp = conv_clamp + self.register_buffer('resample_filter', upfirdn2d.setup_filter(resample_filter)) + self.padding = kernel_size // 2 + self.weight_gain = 1 / np.sqrt(in_channels * (kernel_size ** 2)) + self.act_gain = bias_act.activation_funcs[activation].def_gain + + memory_format = torch.channels_last if channels_last else torch.contiguous_format + weight = torch.randn([out_channels, in_channels, kernel_size, kernel_size]).to(memory_format=memory_format) + bias = torch.zeros([out_channels]) if bias else None + if trainable: + self.weight = torch.nn.Parameter(weight) + self.bias = torch.nn.Parameter(bias) if bias is not None else None + else: + self.register_buffer('weight', weight) + if bias is not None: + self.register_buffer('bias', bias) + else: + self.bias = None + + def forward(self, x, gain=1): + w = self.weight * self.weight_gain + b = self.bias.to(x.dtype) if self.bias is not None else None + flip_weight = (self.up == 1) # slightly faster + x = conv2d_resample.conv2d_resample(x=x, w=w.to(x.dtype), f=self.resample_filter, up=self.up, down=self.down, padding=self.padding, flip_weight=flip_weight) + + act_gain = self.act_gain * gain + act_clamp = self.conv_clamp * gain if self.conv_clamp is not None else None + x = bias_act.bias_act(x, b, act=self.activation, gain=act_gain, clamp=act_clamp) + return x + + def extra_repr(self): + return ' '.join([ + f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}, activation={self.activation:s},', + f'up={self.up}, down={self.down}']) + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class MappingNetwork(torch.nn.Module): + def __init__(self, + z_dim, # Input latent (Z) dimensionality, 0 = no latent. + c_dim, # Conditioning label (C) dimensionality, 0 = no label. + w_dim, # Intermediate latent (W) dimensionality. + num_ws, # Number of intermediate latents to output, None = do not broadcast. + num_layers = 8, # Number of mapping layers. + embed_features = None, # Label embedding dimensionality, None = same as w_dim. + layer_features = None, # Number of intermediate features in the mapping layers, None = same as w_dim. + activation = 'lrelu', # Activation function: 'relu', 'lrelu', etc. + lr_multiplier = 0.01, # Learning rate multiplier for the mapping layers. + w_avg_beta = 0.998, # Decay for tracking the moving average of W during training, None = do not track. + ): + super().__init__() + self.z_dim = z_dim + self.c_dim = c_dim + self.w_dim = w_dim + self.num_ws = num_ws + self.num_layers = num_layers + self.w_avg_beta = w_avg_beta + + if embed_features is None: + embed_features = w_dim + if c_dim == 0: + embed_features = 0 + if layer_features is None: + layer_features = w_dim + features_list = [z_dim + embed_features] + [layer_features] * (num_layers - 1) + [w_dim] + + if c_dim > 0: + self.embed = FullyConnectedLayer(c_dim, embed_features) + for idx in range(num_layers): + in_features = features_list[idx] + out_features = features_list[idx + 1] + layer = FullyConnectedLayer(in_features, out_features, activation=activation, lr_multiplier=lr_multiplier) + setattr(self, f'fc{idx}', layer) + + if num_ws is not None and w_avg_beta is not None: + self.register_buffer('w_avg', torch.zeros([w_dim])) + + def forward(self, z, c, truncation_psi=1, truncation_cutoff=None, update_emas=False): + # Embed, normalize, and concat inputs. + x = None + with torch.autograd.profiler.record_function('input'): + if self.z_dim > 0: + misc.assert_shape(z, [None, self.z_dim]) + x = normalize_2nd_moment(z.to(torch.float32)) + if self.c_dim > 0: + misc.assert_shape(c, [None, self.c_dim]) + y = normalize_2nd_moment(self.embed(c.to(torch.float32))) + x = torch.cat([x, y], dim=1) if x is not None else y + + # Main layers. + for idx in range(self.num_layers): + layer = getattr(self, f'fc{idx}') + x = layer(x) + + # Update moving average of W. + if update_emas and self.w_avg_beta is not None: + with torch.autograd.profiler.record_function('update_w_avg'): + self.w_avg.copy_(x.detach().mean(dim=0).lerp(self.w_avg, self.w_avg_beta)) + + # Broadcast. + if self.num_ws is not None: + with torch.autograd.profiler.record_function('broadcast'): + x = x.unsqueeze(1).repeat([1, self.num_ws, 1]) + + # Apply truncation. + if truncation_psi != 1: + with torch.autograd.profiler.record_function('truncate'): + assert self.w_avg_beta is not None + if self.num_ws is None or truncation_cutoff is None: + x = self.w_avg.lerp(x, truncation_psi) + else: + x[:, :truncation_cutoff] = self.w_avg.lerp(x[:, :truncation_cutoff], truncation_psi) + return x + + def extra_repr(self): + return f'z_dim={self.z_dim:d}, c_dim={self.c_dim:d}, w_dim={self.w_dim:d}, num_ws={self.num_ws:d}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class SynthesisLayer(torch.nn.Module): + def __init__(self, + in_channels, # Number of input channels. + out_channels, # Number of output channels. + w_dim, # Intermediate latent (W) dimensionality. + resolution, # Resolution of this layer. + kernel_size = 3, # Convolution kernel size. + up = 1, # Integer upsampling factor. + use_noise = True, # Enable noise input? + activation = 'lrelu', # Activation function: 'relu', 'lrelu', etc. + resample_filter = [1,3,3,1], # Low-pass filter to apply when resampling activations. + conv_clamp = None, # Clamp the output of convolution layers to +-X, None = disable clamping. + channels_last = False, # Use channels_last format for the weights? + ): + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.w_dim = w_dim + self.resolution = resolution + self.up = up + self.use_noise = use_noise + self.activation = activation + self.conv_clamp = conv_clamp + self.register_buffer('resample_filter', upfirdn2d.setup_filter(resample_filter)) + self.padding = kernel_size // 2 + self.act_gain = bias_act.activation_funcs[activation].def_gain + + self.affine = FullyConnectedLayer(w_dim, in_channels, bias_init=1) + memory_format = torch.channels_last if channels_last else torch.contiguous_format + self.weight = torch.nn.Parameter(torch.randn([out_channels, in_channels, kernel_size, kernel_size]).to(memory_format=memory_format)) + if use_noise: + self.register_buffer('noise_const', torch.randn([resolution, resolution])) + self.noise_strength = torch.nn.Parameter(torch.zeros([])) + self.bias = torch.nn.Parameter(torch.zeros([out_channels])) + + def forward(self, x, w, noise_mode='random', fused_modconv=True, gain=1): + assert noise_mode in ['random', 'const', 'none'] + in_resolution = self.resolution // self.up + misc.assert_shape(x, [None, self.in_channels, in_resolution, in_resolution]) + styles = self.affine(w) + + noise = None + if self.use_noise and noise_mode == 'random': + noise = torch.randn([x.shape[0], 1, self.resolution, self.resolution], device=x.device) * self.noise_strength + if self.use_noise and noise_mode == 'const': + noise = self.noise_const * self.noise_strength + + flip_weight = (self.up == 1) # slightly faster + x = modulated_conv2d(x=x, weight=self.weight, styles=styles, noise=noise, up=self.up, + padding=self.padding, resample_filter=self.resample_filter, flip_weight=flip_weight, fused_modconv=fused_modconv) + + act_gain = self.act_gain * gain + act_clamp = self.conv_clamp * gain if self.conv_clamp is not None else None + x = bias_act.bias_act(x, self.bias.to(x.dtype), act=self.activation, gain=act_gain, clamp=act_clamp) + return x + + def extra_repr(self): + return ' '.join([ + f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}, w_dim={self.w_dim:d},', + f'resolution={self.resolution:d}, up={self.up}, activation={self.activation:s}']) + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class ToRGBLayer(torch.nn.Module): + def __init__(self, in_channels, out_channels, w_dim, kernel_size=1, conv_clamp=None, channels_last=False): + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.w_dim = w_dim + self.conv_clamp = conv_clamp + self.affine = FullyConnectedLayer(w_dim, in_channels, bias_init=1) + memory_format = torch.channels_last if channels_last else torch.contiguous_format + self.weight = torch.nn.Parameter(torch.randn([out_channels, in_channels, kernel_size, kernel_size]).to(memory_format=memory_format)) + self.bias = torch.nn.Parameter(torch.zeros([out_channels])) + self.weight_gain = 1 / np.sqrt(in_channels * (kernel_size ** 2)) + + def forward(self, x, w, fused_modconv=True): + styles = self.affine(w) * self.weight_gain + x = modulated_conv2d(x=x, weight=self.weight, styles=styles, demodulate=False, fused_modconv=fused_modconv) + x = bias_act.bias_act(x, self.bias.to(x.dtype), clamp=self.conv_clamp) + return x + + def extra_repr(self): + return f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}, w_dim={self.w_dim:d}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class SynthesisBlock(torch.nn.Module): + def __init__(self, + in_channels, # Number of input channels, 0 = first block. + out_channels, # Number of output channels. + w_dim, # Intermediate latent (W) dimensionality. + resolution, # Resolution of this block. + img_channels, # Number of output color channels. + is_last, # Is this the last block? + architecture = 'skip', # Architecture: 'orig', 'skip', 'resnet'. + resample_filter = [1,3,3,1], # Low-pass filter to apply when resampling activations. + conv_clamp = 256, # Clamp the output of convolution layers to +-X, None = disable clamping. + use_fp16 = False, # Use FP16 for this block? + fp16_channels_last = False, # Use channels-last memory format with FP16? + fused_modconv_default = True, # Default value of fused_modconv. 'inference_only' = True for inference, False for training. + **layer_kwargs, # Arguments for SynthesisLayer. + ): + assert architecture in ['orig', 'skip', 'resnet'] + super().__init__() + self.in_channels = in_channels + self.w_dim = w_dim + self.resolution = resolution + self.img_channels = img_channels + self.is_last = is_last + self.architecture = architecture + self.use_fp16 = use_fp16 + self.channels_last = (use_fp16 and fp16_channels_last) + self.fused_modconv_default = fused_modconv_default + self.register_buffer('resample_filter', upfirdn2d.setup_filter(resample_filter)) + self.num_conv = 0 + self.num_torgb = 0 + + if in_channels == 0: + self.const = torch.nn.Parameter(torch.randn([out_channels, resolution, resolution])) + + if in_channels != 0: + self.conv0 = SynthesisLayer(in_channels, out_channels, w_dim=w_dim, resolution=resolution, up=2, + resample_filter=resample_filter, conv_clamp=conv_clamp, channels_last=self.channels_last, **layer_kwargs) + self.num_conv += 1 + + self.conv1 = SynthesisLayer(out_channels, out_channels, w_dim=w_dim, resolution=resolution, + conv_clamp=conv_clamp, channels_last=self.channels_last, **layer_kwargs) + self.num_conv += 1 + + if is_last or architecture == 'skip': + self.torgb = ToRGBLayer(out_channels, img_channels, w_dim=w_dim, + conv_clamp=conv_clamp, channels_last=self.channels_last) + self.num_torgb += 1 + + if in_channels != 0 and architecture == 'resnet': + self.skip = Conv2dLayer(in_channels, out_channels, kernel_size=1, bias=False, up=2, + resample_filter=resample_filter, channels_last=self.channels_last) + + def forward(self, x, img, ws, force_fp32=False, fused_modconv=None, update_emas=False, **layer_kwargs): + _ = update_emas # unused + misc.assert_shape(ws, [None, self.num_conv + self.num_torgb, self.w_dim]) + w_iter = iter(ws.unbind(dim=1)) + if ws.device.type != 'cuda': + force_fp32 = True + dtype = torch.float16 if self.use_fp16 and not force_fp32 else torch.float32 + memory_format = torch.channels_last if self.channels_last and not force_fp32 else torch.contiguous_format + if fused_modconv is None: + fused_modconv = self.fused_modconv_default + if fused_modconv == 'inference_only': + fused_modconv = (not self.training) + + # Input. + if self.in_channels == 0: + x = self.const.to(dtype=dtype, memory_format=memory_format) + x = x.unsqueeze(0).repeat([ws.shape[0], 1, 1, 1]) + else: + misc.assert_shape(x, [None, self.in_channels, self.resolution // 2, self.resolution // 2]) + x = x.to(dtype=dtype, memory_format=memory_format) + + # Main layers. + if self.in_channels == 0: + x = self.conv1(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + elif self.architecture == 'resnet': + y = self.skip(x, gain=np.sqrt(0.5)) + x = self.conv0(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + x = self.conv1(x, next(w_iter), fused_modconv=fused_modconv, gain=np.sqrt(0.5), **layer_kwargs) + x = y.add_(x) + else: + x = self.conv0(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + x = self.conv1(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + + # ToRGB. + if img is not None: + misc.assert_shape(img, [None, self.img_channels, self.resolution // 2, self.resolution // 2]) + img = upfirdn2d.upsample2d(img, self.resample_filter) + if self.is_last or self.architecture == 'skip': + y = self.torgb(x, next(w_iter), fused_modconv=fused_modconv) + y = y.to(dtype=torch.float32, memory_format=torch.contiguous_format) + img = img.add_(y) if img is not None else y + + assert x.dtype == dtype + assert img is None or img.dtype == torch.float32 + return x, img + + def extra_repr(self): + return f'resolution={self.resolution:d}, architecture={self.architecture:s}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class SynthesisNetwork(torch.nn.Module): + def __init__(self, + w_dim, # Intermediate latent (W) dimensionality. + img_resolution, # Output image resolution. + img_channels, # Number of color channels. + channel_base = 32768, # Overall multiplier for the number of channels. + channel_max = 512, # Maximum number of channels in any layer. + num_fp16_res = 4, # Use FP16 for the N highest resolutions. + **block_kwargs, # Arguments for SynthesisBlock. + ): + assert img_resolution >= 4 and img_resolution & (img_resolution - 1) == 0 + super().__init__() + self.w_dim = w_dim + self.img_resolution = img_resolution + self.img_resolution_log2 = int(np.log2(img_resolution)) + self.img_channels = img_channels + self.num_fp16_res = num_fp16_res + self.block_resolutions = [2 ** i for i in range(2, self.img_resolution_log2 + 1)] + channels_dict = {res: min(channel_base // res, channel_max) for res in self.block_resolutions} + fp16_resolution = max(2 ** (self.img_resolution_log2 + 1 - num_fp16_res), 8) + + self.num_ws = 0 + for res in self.block_resolutions: + in_channels = channels_dict[res // 2] if res > 4 else 0 + out_channels = channels_dict[res] + use_fp16 = (res >= fp16_resolution) + is_last = (res == self.img_resolution) + block = SynthesisBlock(in_channels, out_channels, w_dim=w_dim, resolution=res, + img_channels=img_channels, is_last=is_last, use_fp16=use_fp16, **block_kwargs) + self.num_ws += block.num_conv + if is_last: + self.num_ws += block.num_torgb + setattr(self, f'b{res}', block) + + def forward(self, ws, **block_kwargs): + block_ws = [] + with torch.autograd.profiler.record_function('split_ws'): + misc.assert_shape(ws, [None, self.num_ws, self.w_dim]) + ws = ws.to(torch.float32) + w_idx = 0 + for res in self.block_resolutions: + block = getattr(self, f'b{res}') + block_ws.append(ws.narrow(1, w_idx, block.num_conv + block.num_torgb)) + w_idx += block.num_conv + + x = img = None + for res, cur_ws in zip(self.block_resolutions, block_ws): + block = getattr(self, f'b{res}') + x, img = block(x, img, cur_ws, **block_kwargs) + return img + + def extra_repr(self): + return ' '.join([ + f'w_dim={self.w_dim:d}, num_ws={self.num_ws:d},', + f'img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d},', + f'num_fp16_res={self.num_fp16_res:d}']) + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class Generator(torch.nn.Module): + def __init__(self, + z_dim, # Input latent (Z) dimensionality. + c_dim, # Conditioning label (C) dimensionality. + w_dim, # Intermediate latent (W) dimensionality. + img_resolution, # Output resolution. + img_channels, # Number of output color channels. + mapping_kwargs = {}, # Arguments for MappingNetwork. + **synthesis_kwargs, # Arguments for SynthesisNetwork. + ): + super().__init__() + self.z_dim = z_dim + self.c_dim = c_dim + self.w_dim = w_dim + self.img_resolution = img_resolution + self.img_channels = img_channels + self.synthesis = SynthesisNetwork(w_dim=w_dim, img_resolution=img_resolution, img_channels=img_channels, **synthesis_kwargs) + self.num_ws = self.synthesis.num_ws + self.mapping = MappingNetwork(z_dim=z_dim, c_dim=c_dim, w_dim=w_dim, num_ws=self.num_ws, **mapping_kwargs) + + def forward(self, z, c, truncation_psi=1, truncation_cutoff=None, update_emas=False, **synthesis_kwargs): + ws = self.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + img = self.synthesis(ws, update_emas=update_emas, **synthesis_kwargs) + return img + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class DiscriminatorBlock(torch.nn.Module): + def __init__(self, + in_channels, # Number of input channels, 0 = first block. + tmp_channels, # Number of intermediate channels. + out_channels, # Number of output channels. + resolution, # Resolution of this block. + img_channels, # Number of input color channels. + first_layer_idx, # Index of the first layer. + architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'. + activation = 'lrelu', # Activation function: 'relu', 'lrelu', etc. + resample_filter = [1,3,3,1], # Low-pass filter to apply when resampling activations. + conv_clamp = None, # Clamp the output of convolution layers to +-X, None = disable clamping. + use_fp16 = False, # Use FP16 for this block? + fp16_channels_last = False, # Use channels-last memory format with FP16? + freeze_layers = 0, # Freeze-D: Number of layers to freeze. + ): + assert in_channels in [0, tmp_channels] + assert architecture in ['orig', 'skip', 'resnet'] + super().__init__() + self.in_channels = in_channels + self.resolution = resolution + self.img_channels = img_channels + self.first_layer_idx = first_layer_idx + self.architecture = architecture + self.use_fp16 = use_fp16 + self.channels_last = (use_fp16 and fp16_channels_last) + self.register_buffer('resample_filter', upfirdn2d.setup_filter(resample_filter)) + + self.num_layers = 0 + def trainable_gen(): + while True: + layer_idx = self.first_layer_idx + self.num_layers + trainable = (layer_idx >= freeze_layers) + self.num_layers += 1 + yield trainable + trainable_iter = trainable_gen() + + if in_channels == 0 or architecture == 'skip': + self.fromrgb = Conv2dLayer(img_channels, tmp_channels, kernel_size=1, activation=activation, + trainable=next(trainable_iter), conv_clamp=conv_clamp, channels_last=self.channels_last) + + self.conv0 = Conv2dLayer(tmp_channels, tmp_channels, kernel_size=3, activation=activation, + trainable=next(trainable_iter), conv_clamp=conv_clamp, channels_last=self.channels_last) + + self.conv1 = Conv2dLayer(tmp_channels, out_channels, kernel_size=3, activation=activation, down=2, + trainable=next(trainable_iter), resample_filter=resample_filter, conv_clamp=conv_clamp, channels_last=self.channels_last) + + if architecture == 'resnet': + self.skip = Conv2dLayer(tmp_channels, out_channels, kernel_size=1, bias=False, down=2, + trainable=next(trainable_iter), resample_filter=resample_filter, channels_last=self.channels_last) + + def forward(self, x, img, force_fp32=False): + if (x if x is not None else img).device.type != 'cuda': + force_fp32 = True + dtype = torch.float16 if self.use_fp16 and not force_fp32 else torch.float32 + memory_format = torch.channels_last if self.channels_last and not force_fp32 else torch.contiguous_format + + # Input. + if x is not None: + misc.assert_shape(x, [None, self.in_channels, self.resolution, self.resolution]) + x = x.to(dtype=dtype, memory_format=memory_format) + + # FromRGB. + if self.in_channels == 0 or self.architecture == 'skip': + misc.assert_shape(img, [None, self.img_channels, self.resolution, self.resolution]) + img = img.to(dtype=dtype, memory_format=memory_format) + y = self.fromrgb(img) + x = x + y if x is not None else y + img = upfirdn2d.downsample2d(img, self.resample_filter) if self.architecture == 'skip' else None + + # Main layers. + if self.architecture == 'resnet': + y = self.skip(x, gain=np.sqrt(0.5)) + x = self.conv0(x) + x = self.conv1(x, gain=np.sqrt(0.5)) + x = y.add_(x) + else: + x = self.conv0(x) + x = self.conv1(x) + + assert x.dtype == dtype + return x, img + + def extra_repr(self): + return f'resolution={self.resolution:d}, architecture={self.architecture:s}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class MinibatchStdLayer(torch.nn.Module): + def __init__(self, group_size, num_channels=1): + super().__init__() + self.group_size = group_size + self.num_channels = num_channels + + def forward(self, x): + N, C, H, W = x.shape + with misc.suppress_tracer_warnings(): # as_tensor results are registered as constants + G = torch.min(torch.as_tensor(self.group_size), torch.as_tensor(N)) if self.group_size is not None else N + F = self.num_channels + c = C // F + + y = x.reshape(G, -1, F, c, H, W) # [GnFcHW] Split minibatch N into n groups of size G, and channels C into F groups of size c. + y = y - y.mean(dim=0) # [GnFcHW] Subtract mean over group. + y = y.square().mean(dim=0) # [nFcHW] Calc variance over group. + y = (y + 1e-8).sqrt() # [nFcHW] Calc stddev over group. + y = y.mean(dim=[2,3,4]) # [nF] Take average over channels and pixels. + y = y.reshape(-1, F, 1, 1) # [nF11] Add missing dimensions. + y = y.repeat(G, 1, H, W) # [NFHW] Replicate over group and pixels. + x = torch.cat([x, y], dim=1) # [NCHW] Append to input as new channels. + return x + + def extra_repr(self): + return f'group_size={self.group_size}, num_channels={self.num_channels:d}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class DiscriminatorEpilogue(torch.nn.Module): + def __init__(self, + in_channels, # Number of input channels. + cmap_dim, # Dimensionality of mapped conditioning label, 0 = no label. + resolution, # Resolution of this block. + img_channels, # Number of input color channels. + architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'. + mbstd_group_size = 4, # Group size for the minibatch standard deviation layer, None = entire minibatch. + mbstd_num_channels = 1, # Number of features for the minibatch standard deviation layer, 0 = disable. + activation = 'lrelu', # Activation function: 'relu', 'lrelu', etc. + conv_clamp = None, # Clamp the output of convolution layers to +-X, None = disable clamping. + ): + assert architecture in ['orig', 'skip', 'resnet'] + super().__init__() + self.in_channels = in_channels + self.cmap_dim = cmap_dim + self.resolution = resolution + self.img_channels = img_channels + self.architecture = architecture + + if architecture == 'skip': + self.fromrgb = Conv2dLayer(img_channels, in_channels, kernel_size=1, activation=activation) + self.mbstd = MinibatchStdLayer(group_size=mbstd_group_size, num_channels=mbstd_num_channels) if mbstd_num_channels > 0 else None + self.conv = Conv2dLayer(in_channels + mbstd_num_channels, in_channels, kernel_size=3, activation=activation, conv_clamp=conv_clamp) + self.fc = FullyConnectedLayer(in_channels * (resolution ** 2), in_channels, activation=activation) + self.out = FullyConnectedLayer(in_channels, 1 if cmap_dim == 0 else cmap_dim) + + def forward(self, x, img, cmap, force_fp32=False): + misc.assert_shape(x, [None, self.in_channels, self.resolution, self.resolution]) # [NCHW] + _ = force_fp32 # unused + dtype = torch.float32 + memory_format = torch.contiguous_format + + # FromRGB. + x = x.to(dtype=dtype, memory_format=memory_format) + if self.architecture == 'skip': + misc.assert_shape(img, [None, self.img_channels, self.resolution, self.resolution]) + img = img.to(dtype=dtype, memory_format=memory_format) + x = x + self.fromrgb(img) + + # Main layers. + if self.mbstd is not None: + x = self.mbstd(x) + x = self.conv(x) + x = self.fc(x.flatten(1)) + x = self.out(x) + + # Conditioning. + if self.cmap_dim > 0: + misc.assert_shape(cmap, [None, self.cmap_dim]) + x = (x * cmap).sum(dim=1, keepdim=True) * (1 / np.sqrt(self.cmap_dim)) + + assert x.dtype == dtype + return x + + def extra_repr(self): + return f'resolution={self.resolution:d}, architecture={self.architecture:s}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class Discriminator(torch.nn.Module): + def __init__(self, + c_dim, # Conditioning label (C) dimensionality. + img_resolution, # Input resolution. + img_channels, # Number of input color channels. + architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'. + channel_base = 32768, # Overall multiplier for the number of channels. + channel_max = 512, # Maximum number of channels in any layer. + num_fp16_res = 4, # Use FP16 for the N highest resolutions. + conv_clamp = 256, # Clamp the output of convolution layers to +-X, None = disable clamping. + cmap_dim = None, # Dimensionality of mapped conditioning label, None = default. + block_kwargs = {}, # Arguments for DiscriminatorBlock. + mapping_kwargs = {}, # Arguments for MappingNetwork. + epilogue_kwargs = {}, # Arguments for DiscriminatorEpilogue. + ): + super().__init__() + self.c_dim = c_dim + self.img_resolution = img_resolution + self.img_resolution_log2 = int(np.log2(img_resolution)) + self.img_channels = img_channels + self.block_resolutions = [2 ** i for i in range(self.img_resolution_log2, 2, -1)] + channels_dict = {res: min(channel_base // res, channel_max) for res in self.block_resolutions + [4]} + fp16_resolution = max(2 ** (self.img_resolution_log2 + 1 - num_fp16_res), 8) + + if cmap_dim is None: + cmap_dim = channels_dict[4] + if c_dim == 0: + cmap_dim = 0 + + common_kwargs = dict(img_channels=img_channels, architecture=architecture, conv_clamp=conv_clamp) + cur_layer_idx = 0 + for res in self.block_resolutions: + in_channels = channels_dict[res] if res < img_resolution else 0 + tmp_channels = channels_dict[res] + out_channels = channels_dict[res // 2] + use_fp16 = (res >= fp16_resolution) + block = DiscriminatorBlock(in_channels, tmp_channels, out_channels, resolution=res, + first_layer_idx=cur_layer_idx, use_fp16=use_fp16, **block_kwargs, **common_kwargs) + setattr(self, f'b{res}', block) + cur_layer_idx += block.num_layers + if c_dim > 0: + self.mapping = MappingNetwork(z_dim=0, c_dim=c_dim, w_dim=cmap_dim, num_ws=None, w_avg_beta=None, **mapping_kwargs) + self.b4 = DiscriminatorEpilogue(channels_dict[4], cmap_dim=cmap_dim, resolution=4, **epilogue_kwargs, **common_kwargs) + + def forward(self, img, c, update_emas=False, **block_kwargs): + _ = update_emas # unused + x = None + for res in self.block_resolutions: + block = getattr(self, f'b{res}') + x, img = block(x, img, **block_kwargs) + + cmap = None + if self.c_dim > 0: + cmap = self.mapping(None, c) + x = self.b4(x, img, cmap) + return x + + def extra_repr(self): + return f'c_dim={self.c_dim:d}, img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d}' + +#---------------------------------------------------------------------------- \ No newline at end of file diff --git a/ThirdParty/eg3d/training/networks_stylegan3.py b/ThirdParty/eg3d/training/networks_stylegan3.py new file mode 100644 index 0000000000000000000000000000000000000000..40e5508803feb7d4ebdd49ce140051f6a549cf9c --- /dev/null +++ b/ThirdParty/eg3d/training/networks_stylegan3.py @@ -0,0 +1,517 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Generator architecture from the paper +"Alias-Free Generative Adversarial Networks".""" + +import numpy as np +import scipy.signal +import scipy.optimize +import torch +from torch_utils import misc +from torch_utils import persistence +from torch_utils.ops import conv2d_gradfix +from torch_utils.ops import filtered_lrelu +from torch_utils.ops import bias_act + +#---------------------------------------------------------------------------- + +@misc.profiled_function +def modulated_conv2d( + x, # Input tensor: [batch_size, in_channels, in_height, in_width] + w, # Weight tensor: [out_channels, in_channels, kernel_height, kernel_width] + s, # Style tensor: [batch_size, in_channels] + demodulate = True, # Apply weight demodulation? + padding = 0, # Padding: int or [padH, padW] + input_gain = None, # Optional scale factors for the input channels: [], [in_channels], or [batch_size, in_channels] +): + with misc.suppress_tracer_warnings(): # this value will be treated as a constant + batch_size = int(x.shape[0]) + out_channels, in_channels, kh, kw = w.shape + misc.assert_shape(w, [out_channels, in_channels, kh, kw]) # [OIkk] + misc.assert_shape(x, [batch_size, in_channels, None, None]) # [NIHW] + misc.assert_shape(s, [batch_size, in_channels]) # [NI] + + # Pre-normalize inputs. + if demodulate: + w = w * w.square().mean([1,2,3], keepdim=True).rsqrt() + s = s * s.square().mean().rsqrt() + + # Modulate weights. + w = w.unsqueeze(0) # [NOIkk] + w = w * s.unsqueeze(1).unsqueeze(3).unsqueeze(4) # [NOIkk] + + # Demodulate weights. + if demodulate: + dcoefs = (w.square().sum(dim=[2,3,4]) + 1e-8).rsqrt() # [NO] + w = w * dcoefs.unsqueeze(2).unsqueeze(3).unsqueeze(4) # [NOIkk] + + # Apply input scaling. + if input_gain is not None: + input_gain = input_gain.expand(batch_size, in_channels) # [NI] + w = w * input_gain.unsqueeze(1).unsqueeze(3).unsqueeze(4) # [NOIkk] + + # Execute as one fused op using grouped convolution. + x = x.reshape(1, -1, *x.shape[2:]) + w = w.reshape(-1, in_channels, kh, kw) + x = conv2d_gradfix.conv2d(input=x, weight=w.to(x.dtype), padding=padding, groups=batch_size) + x = x.reshape(batch_size, -1, *x.shape[2:]) + return x + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class FullyConnectedLayer(torch.nn.Module): + def __init__(self, + in_features, # Number of input features. + out_features, # Number of output features. + activation = 'linear', # Activation function: 'relu', 'lrelu', etc. + bias = True, # Apply additive bias before the activation function? + lr_multiplier = 1, # Learning rate multiplier. + weight_init = 1, # Initial standard deviation of the weight tensor. + bias_init = 0, # Initial value of the additive bias. + ): + super().__init__() + self.in_features = in_features + self.out_features = out_features + self.activation = activation + self.weight = torch.nn.Parameter(torch.randn([out_features, in_features]) * (weight_init / lr_multiplier)) + bias_init = np.broadcast_to(np.asarray(bias_init, dtype=np.float32), [out_features]) + self.bias = torch.nn.Parameter(torch.from_numpy(bias_init / lr_multiplier)) if bias else None + self.weight_gain = lr_multiplier / np.sqrt(in_features) + self.bias_gain = lr_multiplier + + def forward(self, x): + w = self.weight.to(x.dtype) * self.weight_gain + b = self.bias + if b is not None: + b = b.to(x.dtype) + if self.bias_gain != 1: + b = b * self.bias_gain + if self.activation == 'linear' and b is not None: + x = torch.addmm(b.unsqueeze(0), x, w.t()) + else: + x = x.matmul(w.t()) + x = bias_act.bias_act(x, b, act=self.activation) + return x + + def extra_repr(self): + return f'in_features={self.in_features:d}, out_features={self.out_features:d}, activation={self.activation:s}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class MappingNetwork(torch.nn.Module): + def __init__(self, + z_dim, # Input latent (Z) dimensionality. + c_dim, # Conditioning label (C) dimensionality, 0 = no labels. + w_dim, # Intermediate latent (W) dimensionality. + num_ws, # Number of intermediate latents to output. + num_layers = 2, # Number of mapping layers. + lr_multiplier = 0.01, # Learning rate multiplier for the mapping layers. + w_avg_beta = 0.998, # Decay for tracking the moving average of W during training. + ): + super().__init__() + self.z_dim = z_dim + self.c_dim = c_dim + self.w_dim = w_dim + self.num_ws = num_ws + self.num_layers = num_layers + self.w_avg_beta = w_avg_beta + + # Construct layers. + self.embed = FullyConnectedLayer(self.c_dim, self.w_dim) if self.c_dim > 0 else None + features = [self.z_dim + (self.w_dim if self.c_dim > 0 else 0)] + [self.w_dim] * self.num_layers + for idx, in_features, out_features in zip(range(num_layers), features[:-1], features[1:]): + layer = FullyConnectedLayer(in_features, out_features, activation='lrelu', lr_multiplier=lr_multiplier) + setattr(self, f'fc{idx}', layer) + self.register_buffer('w_avg', torch.zeros([w_dim])) + + def forward(self, z, c, truncation_psi=1, truncation_cutoff=None, update_emas=False): + misc.assert_shape(z, [None, self.z_dim]) + if truncation_cutoff is None: + truncation_cutoff = self.num_ws + + # Embed, normalize, and concatenate inputs. + x = z.to(torch.float32) + x = x * (x.square().mean(1, keepdim=True) + 1e-8).rsqrt() + if self.c_dim > 0: + misc.assert_shape(c, [None, self.c_dim]) + y = self.embed(c.to(torch.float32)) + y = y * (y.square().mean(1, keepdim=True) + 1e-8).rsqrt() + x = torch.cat([x, y], dim=1) if x is not None else y + + # Execute layers. + for idx in range(self.num_layers): + x = getattr(self, f'fc{idx}')(x) + + # Update moving average of W. + if update_emas: + self.w_avg.copy_(x.detach().mean(dim=0).lerp(self.w_avg, self.w_avg_beta)) + + # Broadcast and apply truncation. + x = x.unsqueeze(1).repeat([1, self.num_ws, 1]) + if truncation_psi != 1: + x[:, :truncation_cutoff] = self.w_avg.lerp(x[:, :truncation_cutoff], truncation_psi) + return x + + def extra_repr(self): + return f'z_dim={self.z_dim:d}, c_dim={self.c_dim:d}, w_dim={self.w_dim:d}, num_ws={self.num_ws:d}' + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class SynthesisInput(torch.nn.Module): + def __init__(self, + w_dim, # Intermediate latent (W) dimensionality. + channels, # Number of output channels. + size, # Output spatial size: int or [width, height]. + sampling_rate, # Output sampling rate. + bandwidth, # Output bandwidth. + ): + super().__init__() + self.w_dim = w_dim + self.channels = channels + self.size = np.broadcast_to(np.asarray(size), [2]) + self.sampling_rate = sampling_rate + self.bandwidth = bandwidth + + # Draw random frequencies from uniform 2D disc. + freqs = torch.randn([self.channels, 2]) + radii = freqs.square().sum(dim=1, keepdim=True).sqrt() + freqs /= radii * radii.square().exp().pow(0.25) + freqs *= bandwidth + phases = torch.rand([self.channels]) - 0.5 + + # Setup parameters and buffers. + self.weight = torch.nn.Parameter(torch.randn([self.channels, self.channels])) + self.affine = FullyConnectedLayer(w_dim, 4, weight_init=0, bias_init=[1,0,0,0]) + self.register_buffer('transform', torch.eye(3, 3)) # User-specified inverse transform wrt. resulting image. + self.register_buffer('freqs', freqs) + self.register_buffer('phases', phases) + + def forward(self, w): + # Introduce batch dimension. + transforms = self.transform.unsqueeze(0) # [batch, row, col] + freqs = self.freqs.unsqueeze(0) # [batch, channel, xy] + phases = self.phases.unsqueeze(0) # [batch, channel] + + # Apply learned transformation. + t = self.affine(w) # t = (r_c, r_s, t_x, t_y) + t = t / t[:, :2].norm(dim=1, keepdim=True) # t' = (r'_c, r'_s, t'_x, t'_y) + m_r = torch.eye(3, device=w.device).unsqueeze(0).repeat([w.shape[0], 1, 1]) # Inverse rotation wrt. resulting image. + m_r[:, 0, 0] = t[:, 0] # r'_c + m_r[:, 0, 1] = -t[:, 1] # r'_s + m_r[:, 1, 0] = t[:, 1] # r'_s + m_r[:, 1, 1] = t[:, 0] # r'_c + m_t = torch.eye(3, device=w.device).unsqueeze(0).repeat([w.shape[0], 1, 1]) # Inverse translation wrt. resulting image. + m_t[:, 0, 2] = -t[:, 2] # t'_x + m_t[:, 1, 2] = -t[:, 3] # t'_y + transforms = m_r @ m_t @ transforms # First rotate resulting image, then translate, and finally apply user-specified transform. + + # Transform frequencies. + phases = phases + (freqs @ transforms[:, :2, 2:]).squeeze(2) + freqs = freqs @ transforms[:, :2, :2] + + # Dampen out-of-band frequencies that may occur due to the user-specified transform. + amplitudes = (1 - (freqs.norm(dim=2) - self.bandwidth) / (self.sampling_rate / 2 - self.bandwidth)).clamp(0, 1) + + # Construct sampling grid. + theta = torch.eye(2, 3, device=w.device) + theta[0, 0] = 0.5 * self.size[0] / self.sampling_rate + theta[1, 1] = 0.5 * self.size[1] / self.sampling_rate + grids = torch.nn.functional.affine_grid(theta.unsqueeze(0), [1, 1, self.size[1], self.size[0]], align_corners=False) + + # Compute Fourier features. + x = (grids.unsqueeze(3) @ freqs.permute(0, 2, 1).unsqueeze(1).unsqueeze(2)).squeeze(3) # [batch, height, width, channel] + x = x + phases.unsqueeze(1).unsqueeze(2) + x = torch.sin(x * (np.pi * 2)) + x = x * amplitudes.unsqueeze(1).unsqueeze(2) + + # Apply trainable mapping. + weight = self.weight / np.sqrt(self.channels) + x = x @ weight.t() + + # Ensure correct shape. + x = x.permute(0, 3, 1, 2) # [batch, channel, height, width] + misc.assert_shape(x, [w.shape[0], self.channels, int(self.size[1]), int(self.size[0])]) + return x + + def extra_repr(self): + return '\n'.join([ + f'w_dim={self.w_dim:d}, channels={self.channels:d}, size={list(self.size)},', + f'sampling_rate={self.sampling_rate:g}, bandwidth={self.bandwidth:g}']) + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class SynthesisLayer(torch.nn.Module): + def __init__(self, + w_dim, # Intermediate latent (W) dimensionality. + is_torgb, # Is this the final ToRGB layer? + is_critically_sampled, # Does this layer use critical sampling? + use_fp16, # Does this layer use FP16? + + # Input & output specifications. + in_channels, # Number of input channels. + out_channels, # Number of output channels. + in_size, # Input spatial size: int or [width, height]. + out_size, # Output spatial size: int or [width, height]. + in_sampling_rate, # Input sampling rate (s). + out_sampling_rate, # Output sampling rate (s). + in_cutoff, # Input cutoff frequency (f_c). + out_cutoff, # Output cutoff frequency (f_c). + in_half_width, # Input transition band half-width (f_h). + out_half_width, # Output Transition band half-width (f_h). + + # Hyperparameters. + conv_kernel = 3, # Convolution kernel size. Ignored for final the ToRGB layer. + filter_size = 6, # Low-pass filter size relative to the lower resolution when up/downsampling. + lrelu_upsampling = 2, # Relative sampling rate for leaky ReLU. Ignored for final the ToRGB layer. + use_radial_filters = False, # Use radially symmetric downsampling filter? Ignored for critically sampled layers. + conv_clamp = 256, # Clamp the output to [-X, +X], None = disable clamping. + magnitude_ema_beta = 0.999, # Decay rate for the moving average of input magnitudes. + ): + super().__init__() + self.w_dim = w_dim + self.is_torgb = is_torgb + self.is_critically_sampled = is_critically_sampled + self.use_fp16 = use_fp16 + self.in_channels = in_channels + self.out_channels = out_channels + self.in_size = np.broadcast_to(np.asarray(in_size), [2]) + self.out_size = np.broadcast_to(np.asarray(out_size), [2]) + self.in_sampling_rate = in_sampling_rate + self.out_sampling_rate = out_sampling_rate + self.tmp_sampling_rate = max(in_sampling_rate, out_sampling_rate) * (1 if is_torgb else lrelu_upsampling) + self.in_cutoff = in_cutoff + self.out_cutoff = out_cutoff + self.in_half_width = in_half_width + self.out_half_width = out_half_width + self.conv_kernel = 1 if is_torgb else conv_kernel + self.conv_clamp = conv_clamp + self.magnitude_ema_beta = magnitude_ema_beta + + # Setup parameters and buffers. + self.affine = FullyConnectedLayer(self.w_dim, self.in_channels, bias_init=1) + self.weight = torch.nn.Parameter(torch.randn([self.out_channels, self.in_channels, self.conv_kernel, self.conv_kernel])) + self.bias = torch.nn.Parameter(torch.zeros([self.out_channels])) + self.register_buffer('magnitude_ema', torch.ones([])) + + # Design upsampling filter. + self.up_factor = int(np.rint(self.tmp_sampling_rate / self.in_sampling_rate)) + assert self.in_sampling_rate * self.up_factor == self.tmp_sampling_rate + self.up_taps = filter_size * self.up_factor if self.up_factor > 1 and not self.is_torgb else 1 + self.register_buffer('up_filter', self.design_lowpass_filter( + numtaps=self.up_taps, cutoff=self.in_cutoff, width=self.in_half_width*2, fs=self.tmp_sampling_rate)) + + # Design downsampling filter. + self.down_factor = int(np.rint(self.tmp_sampling_rate / self.out_sampling_rate)) + assert self.out_sampling_rate * self.down_factor == self.tmp_sampling_rate + self.down_taps = filter_size * self.down_factor if self.down_factor > 1 and not self.is_torgb else 1 + self.down_radial = use_radial_filters and not self.is_critically_sampled + self.register_buffer('down_filter', self.design_lowpass_filter( + numtaps=self.down_taps, cutoff=self.out_cutoff, width=self.out_half_width*2, fs=self.tmp_sampling_rate, radial=self.down_radial)) + + # Compute padding. + pad_total = (self.out_size - 1) * self.down_factor + 1 # Desired output size before downsampling. + pad_total -= (self.in_size + self.conv_kernel - 1) * self.up_factor # Input size after upsampling. + pad_total += self.up_taps + self.down_taps - 2 # Size reduction caused by the filters. + pad_lo = (pad_total + self.up_factor) // 2 # Shift sample locations according to the symmetric interpretation (Appendix C.3). + pad_hi = pad_total - pad_lo + self.padding = [int(pad_lo[0]), int(pad_hi[0]), int(pad_lo[1]), int(pad_hi[1])] + + def forward(self, x, w, noise_mode='random', force_fp32=False, update_emas=False): + assert noise_mode in ['random', 'const', 'none'] # unused + misc.assert_shape(x, [None, self.in_channels, int(self.in_size[1]), int(self.in_size[0])]) + misc.assert_shape(w, [x.shape[0], self.w_dim]) + + # Track input magnitude. + if update_emas: + with torch.autograd.profiler.record_function('update_magnitude_ema'): + magnitude_cur = x.detach().to(torch.float32).square().mean() + self.magnitude_ema.copy_(magnitude_cur.lerp(self.magnitude_ema, self.magnitude_ema_beta)) + input_gain = self.magnitude_ema.rsqrt() + + # Execute affine layer. + styles = self.affine(w) + if self.is_torgb: + weight_gain = 1 / np.sqrt(self.in_channels * (self.conv_kernel ** 2)) + styles = styles * weight_gain + + # Execute modulated conv2d. + dtype = torch.float16 if (self.use_fp16 and not force_fp32 and x.device.type == 'cuda') else torch.float32 + x = modulated_conv2d(x=x.to(dtype), w=self.weight, s=styles, + padding=self.conv_kernel-1, demodulate=(not self.is_torgb), input_gain=input_gain) + + # Execute bias, filtered leaky ReLU, and clamping. + gain = 1 if self.is_torgb else np.sqrt(2) + slope = 1 if self.is_torgb else 0.2 + x = filtered_lrelu.filtered_lrelu(x=x, fu=self.up_filter, fd=self.down_filter, b=self.bias.to(x.dtype), + up=self.up_factor, down=self.down_factor, padding=self.padding, gain=gain, slope=slope, clamp=self.conv_clamp) + + # Ensure correct shape and dtype. + misc.assert_shape(x, [None, self.out_channels, int(self.out_size[1]), int(self.out_size[0])]) + assert x.dtype == dtype + return x + + @staticmethod + def design_lowpass_filter(numtaps, cutoff, width, fs, radial=False): + assert numtaps >= 1 + + # Identity filter. + if numtaps == 1: + return None + + # Separable Kaiser low-pass filter. + if not radial: + f = scipy.signal.firwin(numtaps=numtaps, cutoff=cutoff, width=width, fs=fs) + return torch.as_tensor(f, dtype=torch.float32) + + # Radially symmetric jinc-based filter. + x = (np.arange(numtaps) - (numtaps - 1) / 2) / fs + r = np.hypot(*np.meshgrid(x, x)) + f = scipy.special.j1(2 * cutoff * (np.pi * r)) / (np.pi * r) + beta = scipy.signal.kaiser_beta(scipy.signal.kaiser_atten(numtaps, width / (fs / 2))) + w = np.kaiser(numtaps, beta) + f *= np.outer(w, w) + f /= np.sum(f) + return torch.as_tensor(f, dtype=torch.float32) + + def extra_repr(self): + return '\n'.join([ + f'w_dim={self.w_dim:d}, is_torgb={self.is_torgb},', + f'is_critically_sampled={self.is_critically_sampled}, use_fp16={self.use_fp16},', + f'in_sampling_rate={self.in_sampling_rate:g}, out_sampling_rate={self.out_sampling_rate:g},', + f'in_cutoff={self.in_cutoff:g}, out_cutoff={self.out_cutoff:g},', + f'in_half_width={self.in_half_width:g}, out_half_width={self.out_half_width:g},', + f'in_size={list(self.in_size)}, out_size={list(self.out_size)},', + f'in_channels={self.in_channels:d}, out_channels={self.out_channels:d}']) + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class SynthesisNetwork(torch.nn.Module): + def __init__(self, + w_dim, # Intermediate latent (W) dimensionality. + img_resolution, # Output image resolution. + img_channels, # Number of color channels. + channel_base = 32768, # Overall multiplier for the number of channels. + channel_max = 512, # Maximum number of channels in any layer. + num_layers = 14, # Total number of layers, excluding Fourier features and ToRGB. + num_critical = 2, # Number of critically sampled layers at the end. + first_cutoff = 2, # Cutoff frequency of the first layer (f_{c,0}). + first_stopband = 2**2.1, # Minimum stopband of the first layer (f_{t,0}). + last_stopband_rel = 2**0.3, # Minimum stopband of the last layer, expressed relative to the cutoff. + margin_size = 10, # Number of additional pixels outside the image. + output_scale = 0.25, # Scale factor for the output image. + num_fp16_res = 4, # Use FP16 for the N highest resolutions. + **layer_kwargs, # Arguments for SynthesisLayer. + ): + super().__init__() + self.w_dim = w_dim + self.num_ws = num_layers + 2 + self.img_resolution = img_resolution + self.img_channels = img_channels + self.num_layers = num_layers + self.num_critical = num_critical + self.margin_size = margin_size + self.output_scale = output_scale + self.num_fp16_res = num_fp16_res + + # Geometric progression of layer cutoffs and min. stopbands. + last_cutoff = self.img_resolution / 2 # f_{c,N} + last_stopband = last_cutoff * last_stopband_rel # f_{t,N} + exponents = np.minimum(np.arange(self.num_layers + 1) / (self.num_layers - self.num_critical), 1) + cutoffs = first_cutoff * (last_cutoff / first_cutoff) ** exponents # f_c[i] + stopbands = first_stopband * (last_stopband / first_stopband) ** exponents # f_t[i] + + # Compute remaining layer parameters. + sampling_rates = np.exp2(np.ceil(np.log2(np.minimum(stopbands * 2, self.img_resolution)))) # s[i] + half_widths = np.maximum(stopbands, sampling_rates / 2) - cutoffs # f_h[i] + sizes = sampling_rates + self.margin_size * 2 + sizes[-2:] = self.img_resolution + channels = np.rint(np.minimum((channel_base / 2) / cutoffs, channel_max)) + channels[-1] = self.img_channels + + # Construct layers. + self.input = SynthesisInput( + w_dim=self.w_dim, channels=int(channels[0]), size=int(sizes[0]), + sampling_rate=sampling_rates[0], bandwidth=cutoffs[0]) + self.layer_names = [] + for idx in range(self.num_layers + 1): + prev = max(idx - 1, 0) + is_torgb = (idx == self.num_layers) + is_critically_sampled = (idx >= self.num_layers - self.num_critical) + use_fp16 = (sampling_rates[idx] * (2 ** self.num_fp16_res) > self.img_resolution) + layer = SynthesisLayer( + w_dim=self.w_dim, is_torgb=is_torgb, is_critically_sampled=is_critically_sampled, use_fp16=use_fp16, + in_channels=int(channels[prev]), out_channels= int(channels[idx]), + in_size=int(sizes[prev]), out_size=int(sizes[idx]), + in_sampling_rate=int(sampling_rates[prev]), out_sampling_rate=int(sampling_rates[idx]), + in_cutoff=cutoffs[prev], out_cutoff=cutoffs[idx], + in_half_width=half_widths[prev], out_half_width=half_widths[idx], + **layer_kwargs) + name = f'L{idx}_{layer.out_size[0]}_{layer.out_channels}' + setattr(self, name, layer) + self.layer_names.append(name) + + def forward(self, ws, **layer_kwargs): + misc.assert_shape(ws, [None, self.num_ws, self.w_dim]) + ws = ws.to(torch.float32).unbind(dim=1) + + # Execute layers. + x = self.input(ws[0]) + for name, w in zip(self.layer_names, ws[1:]): + x = getattr(self, name)(x, w, **layer_kwargs) + if self.output_scale != 1: + x = x * self.output_scale + + # Ensure correct shape and dtype. + misc.assert_shape(x, [None, self.img_channels, self.img_resolution, self.img_resolution]) + x = x.to(torch.float32) + return x + + def extra_repr(self): + return '\n'.join([ + f'w_dim={self.w_dim:d}, num_ws={self.num_ws:d},', + f'img_resolution={self.img_resolution:d}, img_channels={self.img_channels:d},', + f'num_layers={self.num_layers:d}, num_critical={self.num_critical:d},', + f'margin_size={self.margin_size:d}, num_fp16_res={self.num_fp16_res:d}']) + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class Generator(torch.nn.Module): + def __init__(self, + z_dim, # Input latent (Z) dimensionality. + c_dim, # Conditioning label (C) dimensionality. + w_dim, # Intermediate latent (W) dimensionality. + img_resolution, # Output resolution. + img_channels, # Number of output color channels. + mapping_kwargs = {}, # Arguments for MappingNetwork. + **synthesis_kwargs, # Arguments for SynthesisNetwork. + ): + super().__init__() + self.z_dim = z_dim + self.c_dim = c_dim + self.w_dim = w_dim + self.img_resolution = img_resolution + self.img_channels = img_channels + self.synthesis = SynthesisNetwork(w_dim=w_dim, img_resolution=img_resolution, img_channels=img_channels, **synthesis_kwargs) + self.num_ws = self.synthesis.num_ws + self.mapping = MappingNetwork(z_dim=z_dim, c_dim=c_dim, w_dim=w_dim, num_ws=self.num_ws, **mapping_kwargs) + + def forward(self, z, c, truncation_psi=1, truncation_cutoff=None, update_emas=False, **synthesis_kwargs): + ws = self.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + img = self.synthesis(ws, update_emas=update_emas, **synthesis_kwargs) + return img + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/training/superresolution.py b/ThirdParty/eg3d/training/superresolution.py new file mode 100644 index 0000000000000000000000000000000000000000..43321df26c8b23d324b20e0d4652396c869f8fdf --- /dev/null +++ b/ThirdParty/eg3d/training/superresolution.py @@ -0,0 +1,292 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Superresolution network architectures from the paper +"Efficient Geometry-aware 3D Generative Adversarial Networks".""" + +import torch +from training.networks_stylegan2 import Conv2dLayer, SynthesisLayer, ToRGBLayer +from torch_utils.ops import upfirdn2d +from torch_utils import persistence +from torch_utils import misc + +from training.networks_stylegan2 import SynthesisBlock +import numpy as np +from training.networks_stylegan3 import SynthesisLayer as AFSynthesisLayer + + +#---------------------------------------------------------------------------- + +# for 512x512 generation +@persistence.persistent_class +class SuperresolutionHybrid8X(torch.nn.Module): + def __init__(self, channels, img_resolution, sr_num_fp16_res, sr_antialias, + num_fp16_res=4, conv_clamp=None, channel_base=None, channel_max=None,# IGNORE + **block_kwargs): + super().__init__() + assert img_resolution == 512 + + use_fp16 = sr_num_fp16_res > 0 + self.input_resolution = 128 + self.sr_antialias = sr_antialias + self.block0 = SynthesisBlock(channels, 128, w_dim=512, resolution=256, + img_channels=3, is_last=False, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.block1 = SynthesisBlock(128, 64, w_dim=512, resolution=512, + img_channels=3, is_last=True, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.register_buffer('resample_filter', upfirdn2d.setup_filter([1,3,3,1])) + + def forward(self, rgb, x, ws, **block_kwargs): + ws = ws[:, -1:, :].repeat(1, 3, 1) + + if x.shape[-1] != self.input_resolution: + x = torch.nn.functional.interpolate(x, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + rgb = torch.nn.functional.interpolate(rgb, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + + x, rgb = self.block0(x, rgb, ws, **block_kwargs) + x, rgb = self.block1(x, rgb, ws, **block_kwargs) + return rgb + +#---------------------------------------------------------------------------- + +# for 256x256 generation +@persistence.persistent_class +class SuperresolutionHybrid4X(torch.nn.Module): + def __init__(self, channels, img_resolution, sr_num_fp16_res, sr_antialias, + num_fp16_res=4, conv_clamp=None, channel_base=None, channel_max=None,# IGNORE + **block_kwargs): + super().__init__() + assert img_resolution == 256 + use_fp16 = sr_num_fp16_res > 0 + self.sr_antialias = sr_antialias + self.input_resolution = 128 + self.block0 = SynthesisBlockNoUp(channels, 128, w_dim=512, resolution=128, + img_channels=3, is_last=False, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.block1 = SynthesisBlock(128, 64, w_dim=512, resolution=256, + img_channels=3, is_last=True, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.register_buffer('resample_filter', upfirdn2d.setup_filter([1,3,3,1])) + + def forward(self, rgb, x, ws, **block_kwargs): + ws = ws[:, -1:, :].repeat(1, 3, 1) + + if x.shape[-1] < self.input_resolution: + x = torch.nn.functional.interpolate(x, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + rgb = torch.nn.functional.interpolate(rgb, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + + x, rgb = self.block0(x, rgb, ws, **block_kwargs) + x, rgb = self.block1(x, rgb, ws, **block_kwargs) + return rgb + +#---------------------------------------------------------------------------- + +# for 128 x 128 generation +@persistence.persistent_class +class SuperresolutionHybrid2X(torch.nn.Module): + def __init__(self, channels, img_resolution, sr_num_fp16_res, sr_antialias, + num_fp16_res=4, conv_clamp=None, channel_base=None, channel_max=None,# IGNORE + **block_kwargs): + super().__init__() + assert img_resolution == 128 + + use_fp16 = sr_num_fp16_res > 0 + self.input_resolution = 64 + self.sr_antialias = sr_antialias + self.block0 = SynthesisBlockNoUp(channels, 128, w_dim=512, resolution=64, + img_channels=3, is_last=False, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.block1 = SynthesisBlock(128, 64, w_dim=512, resolution=128, + img_channels=3, is_last=True, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.register_buffer('resample_filter', upfirdn2d.setup_filter([1,3,3,1])) + + def forward(self, rgb, x, ws, **block_kwargs): + ws = ws[:, -1:, :].repeat(1, 3, 1) + + if x.shape[-1] != self.input_resolution: + x = torch.nn.functional.interpolate(x, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + rgb = torch.nn.functional.interpolate(rgb, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + + x, rgb = self.block0(x, rgb, ws, **block_kwargs) + x, rgb = self.block1(x, rgb, ws, **block_kwargs) + return rgb + +#---------------------------------------------------------------------------- + +# TODO: Delete (here for backwards compatibility with old 256x256 models) +@persistence.persistent_class +class SuperresolutionHybridDeepfp32(torch.nn.Module): + def __init__(self, channels, img_resolution, sr_num_fp16_res, + num_fp16_res=4, conv_clamp=None, channel_base=None, channel_max=None,# IGNORE + **block_kwargs): + super().__init__() + assert img_resolution == 256 + use_fp16 = sr_num_fp16_res > 0 + + self.input_resolution = 128 + self.block0 = SynthesisBlockNoUp(channels, 128, w_dim=512, resolution=128, + img_channels=3, is_last=False, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.block1 = SynthesisBlock(128, 64, w_dim=512, resolution=256, + img_channels=3, is_last=True, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.register_buffer('resample_filter', upfirdn2d.setup_filter([1,3,3,1])) + + def forward(self, rgb, x, ws, **block_kwargs): + ws = ws[:, -1:, :].repeat(1, 3, 1) + + if x.shape[-1] < self.input_resolution: + x = torch.nn.functional.interpolate(x, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False) + rgb = torch.nn.functional.interpolate(rgb, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False) + + x, rgb = self.block0(x, rgb, ws, **block_kwargs) + x, rgb = self.block1(x, rgb, ws, **block_kwargs) + return rgb + +#---------------------------------------------------------------------------- + +@persistence.persistent_class +class SynthesisBlockNoUp(torch.nn.Module): + def __init__(self, + in_channels, # Number of input channels, 0 = first block. + out_channels, # Number of output channels. + w_dim, # Intermediate latent (W) dimensionality. + resolution, # Resolution of this block. + img_channels, # Number of output color channels. + is_last, # Is this the last block? + architecture = 'skip', # Architecture: 'orig', 'skip', 'resnet'. + resample_filter = [1,3,3,1], # Low-pass filter to apply when resampling activations. + conv_clamp = 256, # Clamp the output of convolution layers to +-X, None = disable clamping. + use_fp16 = False, # Use FP16 for this block? + fp16_channels_last = False, # Use channels-last memory format with FP16? + fused_modconv_default = True, # Default value of fused_modconv. 'inference_only' = True for inference, False for training. + **layer_kwargs, # Arguments for SynthesisLayer. + ): + assert architecture in ['orig', 'skip', 'resnet'] + super().__init__() + self.in_channels = in_channels + self.w_dim = w_dim + self.resolution = resolution + self.img_channels = img_channels + self.is_last = is_last + self.architecture = architecture + self.use_fp16 = use_fp16 + self.channels_last = (use_fp16 and fp16_channels_last) + self.fused_modconv_default = fused_modconv_default + self.register_buffer('resample_filter', upfirdn2d.setup_filter(resample_filter)) + self.num_conv = 0 + self.num_torgb = 0 + + if in_channels == 0: + self.const = torch.nn.Parameter(torch.randn([out_channels, resolution, resolution])) + + if in_channels != 0: + self.conv0 = SynthesisLayer(in_channels, out_channels, w_dim=w_dim, resolution=resolution, + conv_clamp=conv_clamp, channels_last=self.channels_last, **layer_kwargs) + self.num_conv += 1 + + self.conv1 = SynthesisLayer(out_channels, out_channels, w_dim=w_dim, resolution=resolution, + conv_clamp=conv_clamp, channels_last=self.channels_last, **layer_kwargs) + self.num_conv += 1 + + if is_last or architecture == 'skip': + self.torgb = ToRGBLayer(out_channels, img_channels, w_dim=w_dim, + conv_clamp=conv_clamp, channels_last=self.channels_last) + self.num_torgb += 1 + + if in_channels != 0 and architecture == 'resnet': + self.skip = Conv2dLayer(in_channels, out_channels, kernel_size=1, bias=False, up=2, + resample_filter=resample_filter, channels_last=self.channels_last) + + def forward(self, x, img, ws, force_fp32=False, fused_modconv=None, update_emas=False, **layer_kwargs): + _ = update_emas # unused + misc.assert_shape(ws, [None, self.num_conv + self.num_torgb, self.w_dim]) + w_iter = iter(ws.unbind(dim=1)) + if ws.device.type != 'cuda': + force_fp32 = True + dtype = torch.float16 if self.use_fp16 and not force_fp32 else torch.float32 + memory_format = torch.channels_last if self.channels_last and not force_fp32 else torch.contiguous_format + if fused_modconv is None: + fused_modconv = self.fused_modconv_default + if fused_modconv == 'inference_only': + fused_modconv = (not self.training) + + # Input. + if self.in_channels == 0: + x = self.const.to(dtype=dtype, memory_format=memory_format) + x = x.unsqueeze(0).repeat([ws.shape[0], 1, 1, 1]) + else: + misc.assert_shape(x, [None, self.in_channels, self.resolution, self.resolution]) + x = x.to(dtype=dtype, memory_format=memory_format) + + # Main layers. + if self.in_channels == 0: + x = self.conv1(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + elif self.architecture == 'resnet': + y = self.skip(x, gain=np.sqrt(0.5)) + x = self.conv0(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + x = self.conv1(x, next(w_iter), fused_modconv=fused_modconv, gain=np.sqrt(0.5), **layer_kwargs) + x = y.add_(x) + else: + x = self.conv0(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + x = self.conv1(x, next(w_iter), fused_modconv=fused_modconv, **layer_kwargs) + + # ToRGB. + # if img is not None: + # misc.assert_shape(img, [None, self.img_channels, self.resolution // 2, self.resolution // 2]) + # img = upfirdn2d.upsample2d(img, self.resample_filter) + if self.is_last or self.architecture == 'skip': + y = self.torgb(x, next(w_iter), fused_modconv=fused_modconv) + y = y.to(dtype=torch.float32, memory_format=torch.contiguous_format) + img = img.add_(y) if img is not None else y + + assert x.dtype == dtype + assert img is None or img.dtype == torch.float32 + return x, img + + def extra_repr(self): + return f'resolution={self.resolution:d}, architecture={self.architecture:s}' + + +#---------------------------------------------------------------------------- + +# for 512x512 generation +@persistence.persistent_class +class SuperresolutionHybrid8XDC(torch.nn.Module): + def __init__(self, channels, img_resolution, sr_num_fp16_res, sr_antialias, + num_fp16_res=4, conv_clamp=None, channel_base=None, channel_max=None,# IGNORE + **block_kwargs): + super().__init__() + assert img_resolution == 512 + + use_fp16 = sr_num_fp16_res > 0 + self.input_resolution = 128 + self.sr_antialias = sr_antialias + self.block0 = SynthesisBlock(channels, 256, w_dim=512, resolution=256, + img_channels=3, is_last=False, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + self.block1 = SynthesisBlock(256, 128, w_dim=512, resolution=512, + img_channels=3, is_last=True, use_fp16=use_fp16, conv_clamp=(256 if use_fp16 else None), **block_kwargs) + + def forward(self, rgb, x, ws, **block_kwargs): + ws = ws[:, -1:, :].repeat(1, 3, 1) + + if x.shape[-1] != self.input_resolution: + x = torch.nn.functional.interpolate(x, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + rgb = torch.nn.functional.interpolate(rgb, size=(self.input_resolution, self.input_resolution), + mode='bilinear', align_corners=False, antialias=self.sr_antialias) + + x, rgb = self.block0(x, rgb, ws, **block_kwargs) + x, rgb = self.block1(x, rgb, ws, **block_kwargs) + return rgb + +#---------------------------------------------------------------------------- \ No newline at end of file diff --git a/ThirdParty/eg3d/training/training_loop.py b/ThirdParty/eg3d/training/training_loop.py new file mode 100644 index 0000000000000000000000000000000000000000..63526bd98e9343a25981f3cd67336d42ddbb2a15 --- /dev/null +++ b/ThirdParty/eg3d/training/training_loop.py @@ -0,0 +1,464 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +"""Main training loop.""" + +import os +import time +import copy +import json +import pickle +import psutil +import PIL.Image +import numpy as np +import torch +import dnnlib +from torch_utils import misc +from torch_utils import training_stats +from torch_utils.ops import conv2d_gradfix +from torch_utils.ops import grid_sample_gradfix + +import legacy +from metrics import metric_main +from camera_utils import LookAtPoseSampler +from training.crosssection_utils import sample_cross_section + +#---------------------------------------------------------------------------- + +def setup_snapshot_image_grid(training_set, random_seed=0): + rnd = np.random.RandomState(random_seed) + gw = np.clip(7680 // training_set.image_shape[2], 7, 32) + gh = np.clip(4320 // training_set.image_shape[1], 4, 32) + + # No labels => show random subset of training samples. + if not training_set.has_labels: + all_indices = list(range(len(training_set))) + rnd.shuffle(all_indices) + grid_indices = [all_indices[i % len(all_indices)] for i in range(gw * gh)] + + else: + # Group training samples by label. + label_groups = dict() # label => [idx, ...] + for idx in range(len(training_set)): + label = tuple(training_set.get_details(idx).raw_label.flat[::-1]) + if label not in label_groups: + label_groups[label] = [] + label_groups[label].append(idx) + + # Reorder. + label_order = list(label_groups.keys()) + rnd.shuffle(label_order) + for label in label_order: + rnd.shuffle(label_groups[label]) + + # Organize into grid. + grid_indices = [] + for y in range(gh): + label = label_order[y % len(label_order)] + indices = label_groups[label] + grid_indices += [indices[x % len(indices)] for x in range(gw)] + label_groups[label] = [indices[(i + gw) % len(indices)] for i in range(len(indices))] + + # Load data. + images, labels = zip(*[training_set[i] for i in grid_indices]) + return (gw, gh), np.stack(images), np.stack(labels) + +#---------------------------------------------------------------------------- + +def save_image_grid(img, fname, drange, grid_size): + lo, hi = drange + img = np.asarray(img, dtype=np.float32) + img = (img - lo) * (255 / (hi - lo)) + img = np.rint(img).clip(0, 255).astype(np.uint8) + + gw, gh = grid_size + _N, C, H, W = img.shape + img = img.reshape([gh, gw, C, H, W]) + img = img.transpose(0, 3, 1, 4, 2) + img = img.reshape([gh * H, gw * W, C]) + + assert C in [1, 3] + if C == 1: + PIL.Image.fromarray(img[:, :, 0], 'L').save(fname) + if C == 3: + PIL.Image.fromarray(img, 'RGB').save(fname) + +#---------------------------------------------------------------------------- + +def training_loop( + run_dir = '.', # Output directory. + training_set_kwargs = {}, # Options for training set. + data_loader_kwargs = {}, # Options for torch.utils.data.DataLoader. + G_kwargs = {}, # Options for generator network. + D_kwargs = {}, # Options for discriminator network. + G_opt_kwargs = {}, # Options for generator optimizer. + D_opt_kwargs = {}, # Options for discriminator optimizer. + augment_kwargs = None, # Options for augmentation pipeline. None = disable. + loss_kwargs = {}, # Options for loss function. + metrics = [], # Metrics to evaluate during training. + random_seed = 0, # Global random seed. + num_gpus = 1, # Number of GPUs participating in the training. + rank = 0, # Rank of the current process in [0, num_gpus[. + batch_size = 4, # Total batch size for one training iteration. Can be larger than batch_gpu * num_gpus. + batch_gpu = 4, # Number of samples processed at a time by one GPU. + ema_kimg = 10, # Half-life of the exponential moving average (EMA) of generator weights. + ema_rampup = 0.05, # EMA ramp-up coefficient. None = no rampup. + G_reg_interval = None, # How often to perform regularization for G? None = disable lazy regularization. + D_reg_interval = 16, # How often to perform regularization for D? None = disable lazy regularization. + augment_p = 0, # Initial value of augmentation probability. + ada_target = None, # ADA target value. None = fixed p. + ada_interval = 4, # How often to perform ADA adjustment? + ada_kimg = 500, # ADA adjustment speed, measured in how many kimg it takes for p to increase/decrease by one unit. + total_kimg = 25000, # Total length of the training, measured in thousands of real images. + kimg_per_tick = 4, # Progress snapshot interval. + image_snapshot_ticks = 50, # How often to save image snapshots? None = disable. + network_snapshot_ticks = 50, # How often to save network snapshots? None = disable. + resume_pkl = None, # Network pickle to resume training from. + resume_kimg = 0, # First kimg to report when resuming training. + cudnn_benchmark = True, # Enable torch.backends.cudnn.benchmark? + abort_fn = None, # Callback function for determining whether to abort training. Must return consistent results across ranks. + progress_fn = None, # Callback function for updating training progress. Called for all ranks. +): + # Initialize. + start_time = time.time() + device = torch.device('cuda', rank) + np.random.seed(random_seed * num_gpus + rank) + torch.manual_seed(random_seed * num_gpus + rank) + torch.backends.cudnn.benchmark = cudnn_benchmark # Improves training speed. + torch.backends.cuda.matmul.allow_tf32 = False # Improves numerical accuracy. + torch.backends.cudnn.allow_tf32 = False # Improves numerical accuracy. + torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction = False # Improves numerical accuracy. + conv2d_gradfix.enabled = True # Improves training speed. # TODO: ENABLE + grid_sample_gradfix.enabled = False # Avoids errors with the augmentation pipe. + + # Load training set. + if rank == 0: + print('Loading training set...') + training_set = dnnlib.util.construct_class_by_name(**training_set_kwargs) # subclass of training.dataset.Dataset + training_set_sampler = misc.InfiniteSampler(dataset=training_set, rank=rank, num_replicas=num_gpus, seed=random_seed) + training_set_iterator = iter(torch.utils.data.DataLoader(dataset=training_set, sampler=training_set_sampler, batch_size=batch_size//num_gpus, **data_loader_kwargs)) + if rank == 0: + print() + print('Num images: ', len(training_set)) + print('Image shape:', training_set.image_shape) + print('Label shape:', training_set.label_shape) + print() + + # Construct networks. + if rank == 0: + print('Constructing networks...') + common_kwargs = dict(c_dim=training_set.label_dim, img_resolution=training_set.resolution, img_channels=training_set.num_channels) + G = dnnlib.util.construct_class_by_name(**G_kwargs, **common_kwargs).train().requires_grad_(False).to(device) # subclass of torch.nn.Module + G.register_buffer('dataset_label_std', torch.tensor(training_set.get_label_std()).to(device)) + D = dnnlib.util.construct_class_by_name(**D_kwargs, **common_kwargs).train().requires_grad_(False).to(device) # subclass of torch.nn.Module + G_ema = copy.deepcopy(G).eval() + + # Resume from existing pickle. + if (resume_pkl is not None) and (rank == 0): + print(f'Resuming from "{resume_pkl}"') + with dnnlib.util.open_url(resume_pkl) as f: + resume_data = legacy.load_network_pkl(f) + for name, module in [('G', G), ('D', D), ('G_ema', G_ema)]: + misc.copy_params_and_buffers(resume_data[name], module, require_all=False) + + # Print network summary tables. + if rank == 0: + z = torch.empty([batch_gpu, G.z_dim], device=device) + c = torch.empty([batch_gpu, G.c_dim], device=device) + img = misc.print_module_summary(G, [z, c]) + misc.print_module_summary(D, [img, c]) + + # Setup augmentation. + if rank == 0: + print('Setting up augmentation...') + augment_pipe = None + ada_stats = None + if (augment_kwargs is not None) and (augment_p > 0 or ada_target is not None): + augment_pipe = dnnlib.util.construct_class_by_name(**augment_kwargs).train().requires_grad_(False).to(device) # subclass of torch.nn.Module + augment_pipe.p.copy_(torch.as_tensor(augment_p)) + if ada_target is not None: + ada_stats = training_stats.Collector(regex='Loss/signs/real') + + # Distribute across GPUs. + if rank == 0: + print(f'Distributing across {num_gpus} GPUs...') + for module in [G, D, G_ema, augment_pipe]: + if module is not None: + for param in misc.params_and_buffers(module): + if param.numel() > 0 and num_gpus > 1: + torch.distributed.broadcast(param, src=0) + + # Setup training phases. + if rank == 0: + print('Setting up training phases...') + loss = dnnlib.util.construct_class_by_name(device=device, G=G, D=D, augment_pipe=augment_pipe, **loss_kwargs) # subclass of training.loss.Loss + phases = [] + for name, module, opt_kwargs, reg_interval in [('G', G, G_opt_kwargs, G_reg_interval), ('D', D, D_opt_kwargs, D_reg_interval)]: + if reg_interval is None: + opt = dnnlib.util.construct_class_by_name(params=module.parameters(), **opt_kwargs) # subclass of torch.optim.Optimizer + phases += [dnnlib.EasyDict(name=name+'both', module=module, opt=opt, interval=1)] + else: # Lazy regularization. + mb_ratio = reg_interval / (reg_interval + 1) + opt_kwargs = dnnlib.EasyDict(opt_kwargs) + opt_kwargs.lr = opt_kwargs.lr * mb_ratio + opt_kwargs.betas = [beta ** mb_ratio for beta in opt_kwargs.betas] + opt = dnnlib.util.construct_class_by_name(module.parameters(), **opt_kwargs) # subclass of torch.optim.Optimizer + phases += [dnnlib.EasyDict(name=name+'main', module=module, opt=opt, interval=1)] + phases += [dnnlib.EasyDict(name=name+'reg', module=module, opt=opt, interval=reg_interval)] + for phase in phases: + phase.start_event = None + phase.end_event = None + if rank == 0: + phase.start_event = torch.cuda.Event(enable_timing=True) + phase.end_event = torch.cuda.Event(enable_timing=True) + + # Export sample images. + grid_size = None + grid_z = None + grid_c = None + if rank == 0: + print('Exporting sample images...') + grid_size, images, labels = setup_snapshot_image_grid(training_set=training_set) + save_image_grid(images, os.path.join(run_dir, 'reals.png'), drange=[0,255], grid_size=grid_size) + grid_z = torch.randn([labels.shape[0], G.z_dim], device=device).split(batch_gpu) + grid_c = torch.from_numpy(labels).to(device).split(batch_gpu) + + # Initialize logs. + if rank == 0: + print('Initializing logs...') + stats_collector = training_stats.Collector(regex='.*') + stats_metrics = dict() + stats_jsonl = None + stats_tfevents = None + if rank == 0: + stats_jsonl = open(os.path.join(run_dir, 'stats.jsonl'), 'wt') + try: + import torch.utils.tensorboard as tensorboard + stats_tfevents = tensorboard.SummaryWriter(run_dir) + except ImportError as err: + print('Skipping tfevents export:', err) + + # Train. + if rank == 0: + print(f'Training for {total_kimg} kimg...') + print() + cur_nimg = resume_kimg * 1000 + cur_tick = 0 + tick_start_nimg = cur_nimg + tick_start_time = time.time() + maintenance_time = tick_start_time - start_time + batch_idx = 0 + if progress_fn is not None: + progress_fn(0, total_kimg) + while True: + + # Fetch training data. + with torch.autograd.profiler.record_function('data_fetch'): + phase_real_img, phase_real_c = next(training_set_iterator) + phase_real_img = (phase_real_img.to(device).to(torch.float32) / 127.5 - 1).split(batch_gpu) + phase_real_c = phase_real_c.to(device).split(batch_gpu) + all_gen_z = torch.randn([len(phases) * batch_size, G.z_dim], device=device) + all_gen_z = [phase_gen_z.split(batch_gpu) for phase_gen_z in all_gen_z.split(batch_size)] + all_gen_c = [training_set.get_label(np.random.randint(len(training_set))) for _ in range(len(phases) * batch_size)] + all_gen_c = torch.from_numpy(np.stack(all_gen_c)).pin_memory().to(device) + all_gen_c = [phase_gen_c.split(batch_gpu) for phase_gen_c in all_gen_c.split(batch_size)] + + # Execute training phases. + for phase, phase_gen_z, phase_gen_c in zip(phases, all_gen_z, all_gen_c): + if batch_idx % phase.interval != 0: + continue + if phase.start_event is not None: + phase.start_event.record(torch.cuda.current_stream(device)) + + # Accumulate gradients. + phase.opt.zero_grad(set_to_none=True) + phase.module.requires_grad_(True) + for real_img, real_c, gen_z, gen_c in zip(phase_real_img, phase_real_c, phase_gen_z, phase_gen_c): + loss.accumulate_gradients(phase=phase.name, real_img=real_img, real_c=real_c, gen_z=gen_z, gen_c=gen_c, gain=phase.interval, cur_nimg=cur_nimg) + phase.module.requires_grad_(False) + + # Update weights. + with torch.autograd.profiler.record_function(phase.name + '_opt'): + params = [param for param in phase.module.parameters() if param.numel() > 0 and param.grad is not None] + if len(params) > 0: + flat = torch.cat([param.grad.flatten() for param in params]) + if num_gpus > 1: + torch.distributed.all_reduce(flat) + flat /= num_gpus + misc.nan_to_num(flat, nan=0, posinf=1e5, neginf=-1e5, out=flat) + grads = flat.split([param.numel() for param in params]) + for param, grad in zip(params, grads): + param.grad = grad.reshape(param.shape) + phase.opt.step() + + # Phase done. + if phase.end_event is not None: + phase.end_event.record(torch.cuda.current_stream(device)) + + # Update G_ema. + with torch.autograd.profiler.record_function('Gema'): + ema_nimg = ema_kimg * 1000 + if ema_rampup is not None: + ema_nimg = min(ema_nimg, cur_nimg * ema_rampup) + ema_beta = 0.5 ** (batch_size / max(ema_nimg, 1e-8)) + for p_ema, p in zip(G_ema.parameters(), G.parameters()): + p_ema.copy_(p.lerp(p_ema, ema_beta)) + for b_ema, b in zip(G_ema.buffers(), G.buffers()): + b_ema.copy_(b) + G_ema.neural_rendering_resolution = G.neural_rendering_resolution + G_ema.rendering_kwargs = G.rendering_kwargs.copy() + + # Update state. + cur_nimg += batch_size + batch_idx += 1 + + # Execute ADA heuristic. + if (ada_stats is not None) and (batch_idx % ada_interval == 0): + ada_stats.update() + adjust = np.sign(ada_stats['Loss/signs/real'] - ada_target) * (batch_size * ada_interval) / (ada_kimg * 1000) + augment_pipe.p.copy_((augment_pipe.p + adjust).max(misc.constant(0, device=device))) + + # Perform maintenance tasks once per tick. + done = (cur_nimg >= total_kimg * 1000) + if (not done) and (cur_tick != 0) and (cur_nimg < tick_start_nimg + kimg_per_tick * 1000): + continue + + # Print status line, accumulating the same information in training_stats. + tick_end_time = time.time() + fields = [] + fields += [f"tick {training_stats.report0('Progress/tick', cur_tick):<5d}"] + fields += [f"kimg {training_stats.report0('Progress/kimg', cur_nimg / 1e3):<8.1f}"] + fields += [f"time {dnnlib.util.format_time(training_stats.report0('Timing/total_sec', tick_end_time - start_time)):<12s}"] + fields += [f"sec/tick {training_stats.report0('Timing/sec_per_tick', tick_end_time - tick_start_time):<7.1f}"] + fields += [f"sec/kimg {training_stats.report0('Timing/sec_per_kimg', (tick_end_time - tick_start_time) / (cur_nimg - tick_start_nimg) * 1e3):<7.2f}"] + fields += [f"maintenance {training_stats.report0('Timing/maintenance_sec', maintenance_time):<6.1f}"] + fields += [f"cpumem {training_stats.report0('Resources/cpu_mem_gb', psutil.Process(os.getpid()).memory_info().rss / 2**30):<6.2f}"] + fields += [f"gpumem {training_stats.report0('Resources/peak_gpu_mem_gb', torch.cuda.max_memory_allocated(device) / 2**30):<6.2f}"] + fields += [f"reserved {training_stats.report0('Resources/peak_gpu_mem_reserved_gb', torch.cuda.max_memory_reserved(device) / 2**30):<6.2f}"] + torch.cuda.reset_peak_memory_stats() + fields += [f"augment {training_stats.report0('Progress/augment', float(augment_pipe.p.cpu()) if augment_pipe is not None else 0):.3f}"] + training_stats.report0('Timing/total_hours', (tick_end_time - start_time) / (60 * 60)) + training_stats.report0('Timing/total_days', (tick_end_time - start_time) / (24 * 60 * 60)) + if rank == 0: + print(' '.join(fields)) + + # Check for abort. + if (not done) and (abort_fn is not None) and abort_fn(): + done = True + if rank == 0: + print() + print('Aborting...') + + # Save image snapshot. + if (rank == 0) and (image_snapshot_ticks is not None) and (done or cur_tick % image_snapshot_ticks == 0): + out = [G_ema(z=z, c=c, noise_mode='const') for z, c in zip(grid_z, grid_c)] + images = torch.cat([o['image'].cpu() for o in out]).numpy() + images_raw = torch.cat([o['image_raw'].cpu() for o in out]).numpy() + images_depth = -torch.cat([o['image_depth'].cpu() for o in out]).numpy() + save_image_grid(images, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}.png'), drange=[-1,1], grid_size=grid_size) + save_image_grid(images_raw, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}_raw.png'), drange=[-1,1], grid_size=grid_size) + save_image_grid(images_depth, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}_depth.png'), drange=[images_depth.min(), images_depth.max()], grid_size=grid_size) + + #-------------------- + # # Log forward-conditioned images + + # forward_cam2world_pose = LookAtPoseSampler.sample(3.14/2, 3.14/2, torch.tensor([0, 0, 0.2], device=device), radius=2.7, device=device) + # intrinsics = torch.tensor([[4.2647, 0, 0.5], [0, 4.2647, 0.5], [0, 0, 1]], device=device) + # forward_label = torch.cat([forward_cam2world_pose.reshape(-1, 16), intrinsics.reshape(-1, 9)], 1) + + # grid_ws = [G_ema.mapping(z, forward_label.expand(z.shape[0], -1)) for z, c in zip(grid_z, grid_c)] + # out = [G_ema.synthesis(ws, c=c, noise_mode='const') for ws, c in zip(grid_ws, grid_c)] + + # images = torch.cat([o['image'].cpu() for o in out]).numpy() + # images_raw = torch.cat([o['image_raw'].cpu() for o in out]).numpy() + # images_depth = -torch.cat([o['image_depth'].cpu() for o in out]).numpy() + # save_image_grid(images, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}_f.png'), drange=[-1,1], grid_size=grid_size) + # save_image_grid(images_raw, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}_raw_f.png'), drange=[-1,1], grid_size=grid_size) + # save_image_grid(images_depth, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}_depth_f.png'), drange=[images_depth.min(), images_depth.max()], grid_size=grid_size) + + #-------------------- + # # Log Cross sections + + # grid_ws = [G_ema.mapping(z, c.expand(z.shape[0], -1)) for z, c in zip(grid_z, grid_c)] + # out = [sample_cross_section(G_ema, ws, w=G.rendering_kwargs['box_warp']) for ws, c in zip(grid_ws, grid_c)] + # crossections = torch.cat([o.cpu() for o in out]).numpy() + # save_image_grid(crossections, os.path.join(run_dir, f'fakes{cur_nimg//1000:06d}_crossection.png'), drange=[-50,100], grid_size=grid_size) + + # Save network snapshot. + snapshot_pkl = None + snapshot_data = None + if (network_snapshot_ticks is not None) and (done or cur_tick % network_snapshot_ticks == 0): + snapshot_data = dict(training_set_kwargs=dict(training_set_kwargs)) + for name, module in [('G', G), ('D', D), ('G_ema', G_ema), ('augment_pipe', augment_pipe)]: + if module is not None: + if num_gpus > 1: + misc.check_ddp_consistency(module, ignore_regex=r'.*\.[^.]+_(avg|ema)') + module = copy.deepcopy(module).eval().requires_grad_(False).cpu() + snapshot_data[name] = module + del module # conserve memory + snapshot_pkl = os.path.join(run_dir, f'network-snapshot-{cur_nimg//1000:06d}.pkl') + if rank == 0: + with open(snapshot_pkl, 'wb') as f: + pickle.dump(snapshot_data, f) + + # Evaluate metrics. + if (snapshot_data is not None) and (len(metrics) > 0): + if rank == 0: + print(run_dir) + print('Evaluating metrics...') + for metric in metrics: + result_dict = metric_main.calc_metric(metric=metric, G=snapshot_data['G_ema'], + dataset_kwargs=training_set_kwargs, num_gpus=num_gpus, rank=rank, device=device) + if rank == 0: + metric_main.report_metric(result_dict, run_dir=run_dir, snapshot_pkl=snapshot_pkl) + stats_metrics.update(result_dict.results) + del snapshot_data # conserve memory + + # Collect statistics. + for phase in phases: + value = [] + if (phase.start_event is not None) and (phase.end_event is not None): + phase.end_event.synchronize() + value = phase.start_event.elapsed_time(phase.end_event) + training_stats.report0('Timing/' + phase.name, value) + stats_collector.update() + stats_dict = stats_collector.as_dict() + + # Update logs. + timestamp = time.time() + if stats_jsonl is not None: + fields = dict(stats_dict, timestamp=timestamp) + stats_jsonl.write(json.dumps(fields) + '\n') + stats_jsonl.flush() + if stats_tfevents is not None: + global_step = int(cur_nimg / 1e3) + walltime = timestamp - start_time + for name, value in stats_dict.items(): + stats_tfevents.add_scalar(name, value.mean, global_step=global_step, walltime=walltime) + for name, value in stats_metrics.items(): + stats_tfevents.add_scalar(f'Metrics/{name}', value, global_step=global_step, walltime=walltime) + stats_tfevents.flush() + if progress_fn is not None: + progress_fn(cur_nimg // 1000, total_kimg) + + # Update state. + cur_tick += 1 + tick_start_nimg = cur_nimg + tick_start_time = time.time() + maintenance_time = tick_start_time - tick_end_time + if done: + break + + # Done. + if rank == 0: + print() + print('Exiting...') + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/training/triplane.py b/ThirdParty/eg3d/training/triplane.py new file mode 100644 index 0000000000000000000000000000000000000000..22c1e16c50247f5e871f33da5d6355a06955c024 --- /dev/null +++ b/ThirdParty/eg3d/training/triplane.py @@ -0,0 +1,138 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import torch +from torch_utils import persistence +from training.networks_stylegan2 import Generator as StyleGAN2Backbone +# from training.volumetric_rendering.renderer import ImportanceRenderer +# from training.volumetric_rendering.ray_sampler import RaySampler +import dnnlib + +@persistence.persistent_class +class TriPlaneGenerator(torch.nn.Module): + def __init__(self, + z_dim, # Input latent (Z) dimensionality. + c_dim, # Conditioning label (C) dimensionality. + w_dim, # Intermediate latent (W) dimensionality. + # img_resolution, # Output resolution. + # img_channels, # Number of output color channels. + # sr_num_fp16_res = 0, + mapping_kwargs = {}, # Arguments for MappingNetwork. + # rendering_kwargs = {}, + # sr_kwargs = {}, + **synthesis_kwargs, # Arguments for SynthesisNetwork. + ): + super().__init__() + self.z_dim=z_dim + self.c_dim=c_dim + self.w_dim=w_dim + # self.img_resolution=img_resolution + # self.img_channels=img_channels + # self.renderer = ImportanceRenderer() + # self.ray_sampler = RaySampler() + self.backbone = StyleGAN2Backbone(z_dim, c_dim, w_dim, img_resolution=256, img_channels=32*3, mapping_kwargs=mapping_kwargs, **synthesis_kwargs) + # self.superresolution = dnnlib.util.construct_class_by_name(class_name=rendering_kwargs['superresolution_module'], channels=32, img_resolution=img_resolution, sr_num_fp16_res=sr_num_fp16_res, sr_antialias=rendering_kwargs['sr_antialias'], **sr_kwargs) + self.decoder = OSGDecoder(32, {'decoder_output_dim': 0}) + # self.neural_rendering_resolution = 64 + # self.rendering_kwargs = rendering_kwargs + + self._last_planes = None + + def mapping(self, z, c=None, truncation_psi=1, truncation_cutoff=None, update_emas=False): + # if self.rendering_kwargs['c_gen_conditioning_zero']: + # c = torch.zeros_like(c) + # return self.backbone.mapping(z, c * self.rendering_kwargs.get('c_scale', 0), truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + return self.backbone.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + + def synthesis(self, ws, c=None, neural_rendering_resolution=None, update_emas=False, cache_backbone=False, use_cached_backbone=False, **synthesis_kwargs): + # cam2world_matrix = c[:, :16].view(-1, 4, 4) + # intrinsics = c[:, 16:25].view(-1, 3, 3) + + # if neural_rendering_resolution is None: + # neural_rendering_resolution = self.neural_rendering_resolution + # else: + # self.neural_rendering_resolution = neural_rendering_resolution + + # Create a batch of rays for volume rendering + # ray_origins, ray_directions = self.ray_sampler(cam2world_matrix, intrinsics, neural_rendering_resolution) + + # Create triplanes by running StyleGAN backbone + # N, M, _ = ray_origins.shape + if use_cached_backbone and self._last_planes is not None: + planes = self._last_planes + else: + planes = self.backbone.synthesis(ws, update_emas=update_emas, **synthesis_kwargs) + if cache_backbone: + self._last_planes = planes + + # Reshape output into three 32-channel planes + planes = planes.view(len(planes), 3, 32, planes.shape[-2], planes.shape[-1]) + return planes + + # Perform volume rendering + feature_samples, depth_samples, weights_samples = self.renderer(planes, self.decoder, ray_origins, ray_directions, self.rendering_kwargs) # channels last + + # Reshape into 'raw' neural-rendered image + H = W = self.neural_rendering_resolution + feature_image = feature_samples.permute(0, 2, 1).reshape(N, feature_samples.shape[-1], H, W).contiguous() + depth_image = depth_samples.permute(0, 2, 1).reshape(N, 1, H, W) + + # Run superresolution to get final image + rgb_image = feature_image[:, :3] + sr_image = self.superresolution(rgb_image, feature_image, ws, noise_mode=self.rendering_kwargs['superresolution_noise_mode'], **{k:synthesis_kwargs[k] for k in synthesis_kwargs.keys() if k != 'noise_mode'}) + + return {'image': sr_image, 'image_raw': rgb_image, 'image_depth': depth_image} + + def sample(self, coordinates, directions, z, c, truncation_psi=1, truncation_cutoff=None, update_emas=False, **synthesis_kwargs): + # Compute RGB features, density for arbitrary 3D coordinates. Mostly used for extracting shapes. + ws = self.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + planes = self.backbone.synthesis(ws, update_emas=update_emas, **synthesis_kwargs) + planes = planes.view(len(planes), 3, 32, planes.shape[-2], planes.shape[-1]) + return self.renderer.run_model(planes, self.decoder, coordinates, directions, self.rendering_kwargs) + + def sample_mixed(self, coordinates, directions, ws, truncation_psi=1, truncation_cutoff=None, update_emas=False, **synthesis_kwargs): + # Same as sample, but expects latent vectors 'ws' instead of Gaussian noise 'z' + planes = self.backbone.synthesis(ws, update_emas = update_emas, **synthesis_kwargs) + planes = planes.view(len(planes), 3, 32, planes.shape[-2], planes.shape[-1]) + return self.renderer.run_model(planes, self.decoder, coordinates, directions, self.rendering_kwargs) + + def forward(self, z, c=None, truncation_psi=1, truncation_cutoff=None, neural_rendering_resolution=None, update_emas=False, cache_backbone=False, use_cached_backbone=False, **synthesis_kwargs): + # Render a batch of generated images. + ws = self.mapping(z, c, truncation_psi=truncation_psi, truncation_cutoff=truncation_cutoff, update_emas=update_emas) + return self.synthesis(ws, c, update_emas=update_emas, neural_rendering_resolution=neural_rendering_resolution, cache_backbone=cache_backbone, use_cached_backbone=use_cached_backbone, **synthesis_kwargs) + + +from .training.networks_stylegan2 import FullyConnectedLayer + +class OSGDecoder(torch.nn.Module): + def __init__(self, n_features, options): + super().__init__() + self.hidden_dim = 64 + + self.net = torch.nn.Sequential( + FullyConnectedLayer(n_features, self.hidden_dim), + torch.nn.Softplus(), + FullyConnectedLayer(self.hidden_dim, 1 + options['decoder_output_dim']) + ) + + def forward(self, sampled_features, ray_directions=None): + # Aggregate features + sampled_features = sampled_features.mean(1) + x = sampled_features + + N, M, C = x.shape + x = x.view(N*M, C) + + x = self.net(x) + x = x.view(N, M, -1) + return x + rgb = torch.sigmoid(x[..., 1:])*(1 + 2*0.001) - 0.001 # Uses sigmoid clamping from MipNeRF + sigma = x[..., 0:1] + return {'rgb': rgb, 'sigma': sigma} diff --git a/ThirdParty/eg3d/training/volumetric_rendering/__init__.py b/ThirdParty/eg3d/training/volumetric_rendering/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..daba66567a95beabb103f7996198a9675ab20b4a --- /dev/null +++ b/ThirdParty/eg3d/training/volumetric_rendering/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +# empty \ No newline at end of file diff --git a/ThirdParty/eg3d/training/volumetric_rendering/math_utils.py b/ThirdParty/eg3d/training/volumetric_rendering/math_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4cf9d2b811e0acbc7923bc9126e010b52cb1a8af --- /dev/null +++ b/ThirdParty/eg3d/training/volumetric_rendering/math_utils.py @@ -0,0 +1,118 @@ +# MIT License + +# Copyright (c) 2022 Petr Kellnhofer + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import torch + +def transform_vectors(matrix: torch.Tensor, vectors4: torch.Tensor) -> torch.Tensor: + """ + Left-multiplies MxM @ NxM. Returns NxM. + """ + res = torch.matmul(vectors4, matrix.T) + return res + + +def normalize_vecs(vectors: torch.Tensor) -> torch.Tensor: + """ + Normalize vector lengths. + """ + return vectors / (torch.norm(vectors, dim=-1, keepdim=True)) + +def torch_dot(x: torch.Tensor, y: torch.Tensor): + """ + Dot product of two tensors. + """ + return (x * y).sum(-1) + + +def get_ray_limits_box(rays_o: torch.Tensor, rays_d: torch.Tensor, box_side_length): + """ + Author: Petr Kellnhofer + Intersects rays with the [-1, 1] NDC volume. + Returns min and max distance of entry. + Returns -1 for no intersection. + https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-box-intersection + """ + o_shape = rays_o.shape + rays_o = rays_o.detach().reshape(-1, 3) + rays_d = rays_d.detach().reshape(-1, 3) + + + bb_min = [-1*(box_side_length/2), -1*(box_side_length/2), -1*(box_side_length/2)] + bb_max = [1*(box_side_length/2), 1*(box_side_length/2), 1*(box_side_length/2)] + bounds = torch.tensor([bb_min, bb_max], dtype=rays_o.dtype, device=rays_o.device) + is_valid = torch.ones(rays_o.shape[:-1], dtype=bool, device=rays_o.device) + + # Precompute inverse for stability. + invdir = 1 / rays_d + sign = (invdir < 0).long() + + # Intersect with YZ plane. + tmin = (bounds.index_select(0, sign[..., 0])[..., 0] - rays_o[..., 0]) * invdir[..., 0] + tmax = (bounds.index_select(0, 1 - sign[..., 0])[..., 0] - rays_o[..., 0]) * invdir[..., 0] + + # Intersect with XZ plane. + tymin = (bounds.index_select(0, sign[..., 1])[..., 1] - rays_o[..., 1]) * invdir[..., 1] + tymax = (bounds.index_select(0, 1 - sign[..., 1])[..., 1] - rays_o[..., 1]) * invdir[..., 1] + + # Resolve parallel rays. + is_valid[torch.logical_or(tmin > tymax, tymin > tmax)] = False + + # Use the shortest intersection. + tmin = torch.max(tmin, tymin) + tmax = torch.min(tmax, tymax) + + # Intersect with XY plane. + tzmin = (bounds.index_select(0, sign[..., 2])[..., 2] - rays_o[..., 2]) * invdir[..., 2] + tzmax = (bounds.index_select(0, 1 - sign[..., 2])[..., 2] - rays_o[..., 2]) * invdir[..., 2] + + # Resolve parallel rays. + is_valid[torch.logical_or(tmin > tzmax, tzmin > tmax)] = False + + # Use the shortest intersection. + tmin = torch.max(tmin, tzmin) + tmax = torch.min(tmax, tzmax) + + # Mark invalid. + tmin[torch.logical_not(is_valid)] = -1 + tmax[torch.logical_not(is_valid)] = -2 + + return tmin.reshape(*o_shape[:-1], 1), tmax.reshape(*o_shape[:-1], 1) + + +def linspace(start: torch.Tensor, stop: torch.Tensor, num: int): + """ + Creates a tensor of shape [num, *start.shape] whose values are evenly spaced from start to end, inclusive. + Replicates but the multi-dimensional bahaviour of numpy.linspace in PyTorch. + """ + # create a tensor of 'num' steps from 0 to 1 + steps = torch.arange(num, dtype=torch.float32, device=start.device) / (num - 1) + + # reshape the 'steps' tensor to [-1, *([1]*start.ndim)] to allow for broadcastings + # - using 'steps.reshape([-1, *([1]*start.ndim)])' would be nice here but torchscript + # "cannot statically infer the expected size of a list in this contex", hence the code below + for i in range(start.ndim): + steps = steps.unsqueeze(-1) + + # the output starts at 'start' and increments until 'stop' in each dimension + out = start[None] + steps * (stop - start)[None] + + return out diff --git a/ThirdParty/eg3d/training/volumetric_rendering/ray_marcher.py b/ThirdParty/eg3d/training/volumetric_rendering/ray_marcher.py new file mode 100644 index 0000000000000000000000000000000000000000..c2c427f7499adf3d2a456d2a1f2d2724daa04621 --- /dev/null +++ b/ThirdParty/eg3d/training/volumetric_rendering/ray_marcher.py @@ -0,0 +1,63 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +""" +The ray marcher takes the raw output of the implicit representation and uses the volume rendering equation to produce composited colors and depths. +Based off of the implementation in MipNeRF (this one doesn't do any cone tracing though!) +""" + +import torch +import torch.nn as nn +import torch.nn.functional as F + +class MipRayMarcher2(nn.Module): + def __init__(self): + super().__init__() + + + def run_forward(self, colors, densities, depths, rendering_options): + deltas = depths[:, :, 1:] - depths[:, :, :-1] + colors_mid = (colors[:, :, :-1] + colors[:, :, 1:]) / 2 + densities_mid = (densities[:, :, :-1] + densities[:, :, 1:]) / 2 + depths_mid = (depths[:, :, :-1] + depths[:, :, 1:]) / 2 + + + if rendering_options['clamp_mode'] == 'softplus': + densities_mid = F.softplus(densities_mid - 1) # activation bias of -1 makes things initialize better + else: + assert False, "MipRayMarcher only supports `clamp_mode`=`softplus`!" + + density_delta = densities_mid * deltas + + alpha = 1 - torch.exp(-density_delta) + + alpha_shifted = torch.cat([torch.ones_like(alpha[:, :, :1]), 1-alpha + 1e-10], -2) + weights = alpha * torch.cumprod(alpha_shifted, -2)[:, :, :-1] + + composite_rgb = torch.sum(weights * colors_mid, -2) + weight_total = weights.sum(2) + composite_depth = torch.sum(weights * depths_mid, -2) / weight_total + + # clip the composite to min/max range of depths + composite_depth = torch.nan_to_num(composite_depth, float('inf')) + composite_depth = torch.clamp(composite_depth, torch.min(depths), torch.max(depths)) + + if rendering_options.get('white_back', False): + composite_rgb = composite_rgb + 1 - weight_total + + composite_rgb = composite_rgb * 2 - 1 # Scale to (-1, 1) + + return composite_rgb, composite_depth, weights + + + def forward(self, colors, densities, depths, rendering_options): + composite_rgb, composite_depth, weights = self.run_forward(colors, densities, depths, rendering_options) + + return composite_rgb, composite_depth, weights \ No newline at end of file diff --git a/ThirdParty/eg3d/training/volumetric_rendering/ray_sampler.py b/ThirdParty/eg3d/training/volumetric_rendering/ray_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..00dd07b908497bd07bbe0e394d9eac38acce2b50 --- /dev/null +++ b/ThirdParty/eg3d/training/volumetric_rendering/ray_sampler.py @@ -0,0 +1,63 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +""" +The ray sampler is a module that takes in camera matrices and resolution and batches of rays. +Expects cam2world matrices that use the OpenCV camera coordinate system conventions. +""" + +import torch + +class RaySampler(torch.nn.Module): + def __init__(self): + super().__init__() + self.ray_origins_h, self.ray_directions, self.depths, self.image_coords, self.rendering_options = None, None, None, None, None + + + def forward(self, cam2world_matrix, intrinsics, resolution): + """ + Create batches of rays and return origins and directions. + + cam2world_matrix: (N, 4, 4) + intrinsics: (N, 3, 3) + resolution: int + + ray_origins: (N, M, 3) + ray_dirs: (N, M, 2) + """ + N, M = cam2world_matrix.shape[0], resolution**2 + cam_locs_world = cam2world_matrix[:, :3, 3] + fx = intrinsics[:, 0, 0] + fy = intrinsics[:, 1, 1] + cx = intrinsics[:, 0, 2] + cy = intrinsics[:, 1, 2] + sk = intrinsics[:, 0, 1] + + uv = torch.stack(torch.meshgrid(torch.arange(resolution, dtype=torch.float32, device=cam2world_matrix.device), torch.arange(resolution, dtype=torch.float32, device=cam2world_matrix.device), indexing='ij')) * (1./resolution) + (0.5/resolution) + uv = uv.flip(0).reshape(2, -1).transpose(1, 0) + uv = uv.unsqueeze(0).repeat(cam2world_matrix.shape[0], 1, 1) + + x_cam = uv[:, :, 0].view(N, -1) + y_cam = uv[:, :, 1].view(N, -1) + z_cam = torch.ones((N, M), device=cam2world_matrix.device) + + x_lift = (x_cam - cx.unsqueeze(-1) + cy.unsqueeze(-1)*sk.unsqueeze(-1)/fy.unsqueeze(-1) - sk.unsqueeze(-1)*y_cam/fy.unsqueeze(-1)) / fx.unsqueeze(-1) * z_cam + y_lift = (y_cam - cy.unsqueeze(-1)) / fy.unsqueeze(-1) * z_cam + + cam_rel_points = torch.stack((x_lift, y_lift, z_cam, torch.ones_like(z_cam)), dim=-1) + + world_rel_points = torch.bmm(cam2world_matrix, cam_rel_points.permute(0, 2, 1)).permute(0, 2, 1)[:, :, :3] + + ray_dirs = world_rel_points - cam_locs_world[:, None, :] + ray_dirs = torch.nn.functional.normalize(ray_dirs, dim=2) + + ray_origins = cam_locs_world.unsqueeze(1).repeat(1, ray_dirs.shape[1], 1) + + return ray_origins, ray_dirs \ No newline at end of file diff --git a/ThirdParty/eg3d/training/volumetric_rendering/renderer.py b/ThirdParty/eg3d/training/volumetric_rendering/renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..a27aea61be0cc91a8cab14082252ca203b772d2d --- /dev/null +++ b/ThirdParty/eg3d/training/volumetric_rendering/renderer.py @@ -0,0 +1,253 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +""" +The renderer is a module that takes in rays, decides where to sample along each +ray, and computes pixel colors using the volume rendering equation. +""" + +import math +import torch +import torch.nn as nn + +from training.volumetric_rendering.ray_marcher import MipRayMarcher2 +from training.volumetric_rendering import math_utils + +def generate_planes(): + """ + Defines planes by the three vectors that form the "axes" of the + plane. Should work with arbitrary number of planes and planes of + arbitrary orientation. + """ + return torch.tensor([[[1, 0, 0], + [0, 1, 0], + [0, 0, 1]], + [[1, 0, 0], + [0, 0, 1], + [0, 1, 0]], + [[0, 0, 1], + [1, 0, 0], + [0, 1, 0]]], dtype=torch.float32) + +def project_onto_planes(planes, coordinates): + """ + Does a projection of a 3D point onto a batch of 2D planes, + returning 2D plane coordinates. + + Takes plane axes of shape n_planes, 3, 3 + # Takes coordinates of shape N, M, 3 + # returns projections of shape N*n_planes, M, 2 + """ + N, M, C = coordinates.shape + n_planes, _, _ = planes.shape + coordinates = coordinates.unsqueeze(1).expand(-1, n_planes, -1, -1).reshape(N*n_planes, M, 3) + inv_planes = torch.linalg.inv(planes).unsqueeze(0).expand(N, -1, -1, -1).reshape(N*n_planes, 3, 3) + projections = torch.bmm(coordinates, inv_planes) + return projections[..., :2] + +def sample_from_planes(plane_axes, plane_features, coordinates, mode='bilinear', padding_mode='zeros', box_warp=None): + assert padding_mode == 'zeros' + N, n_planes, C, H, W = plane_features.shape + _, M, _ = coordinates.shape + plane_features = plane_features.view(N*n_planes, C, H, W) + + coordinates = (2/box_warp) * coordinates # TODO: add specific box bounds + + projected_coordinates = project_onto_planes(plane_axes, coordinates).unsqueeze(1) + output_features = torch.nn.functional.grid_sample(plane_features, projected_coordinates.float(), mode=mode, padding_mode=padding_mode, align_corners=False).permute(0, 3, 2, 1).reshape(N, n_planes, M, C) + return output_features + +def sample_from_3dgrid(grid, coordinates): + """ + Expects coordinates in shape (batch_size, num_points_per_batch, 3) + Expects grid in shape (1, channels, H, W, D) + (Also works if grid has batch size) + Returns sampled features of shape (batch_size, num_points_per_batch, feature_channels) + """ + batch_size, n_coords, n_dims = coordinates.shape + sampled_features = torch.nn.functional.grid_sample(grid.expand(batch_size, -1, -1, -1, -1), + coordinates.reshape(batch_size, 1, 1, -1, n_dims), + mode='bilinear', padding_mode='zeros', align_corners=False) + N, C, H, W, D = sampled_features.shape + sampled_features = sampled_features.permute(0, 4, 3, 2, 1).reshape(N, H*W*D, C) + return sampled_features + +class ImportanceRenderer(torch.nn.Module): + def __init__(self): + super().__init__() + self.ray_marcher = MipRayMarcher2() + self.plane_axes = generate_planes() + + def forward(self, planes, decoder, ray_origins, ray_directions, rendering_options): + self.plane_axes = self.plane_axes.to(ray_origins.device) + + if rendering_options['ray_start'] == rendering_options['ray_end'] == 'auto': + ray_start, ray_end = math_utils.get_ray_limits_box(ray_origins, ray_directions, box_side_length=rendering_options['box_warp']) + is_ray_valid = ray_end > ray_start + if torch.any(is_ray_valid).item(): + ray_start[~is_ray_valid] = ray_start[is_ray_valid].min() + ray_end[~is_ray_valid] = ray_start[is_ray_valid].max() + depths_coarse = self.sample_stratified(ray_origins, ray_start, ray_end, rendering_options['depth_resolution'], rendering_options['disparity_space_sampling']) + else: + # Create stratified depth samples + depths_coarse = self.sample_stratified(ray_origins, rendering_options['ray_start'], rendering_options['ray_end'], rendering_options['depth_resolution'], rendering_options['disparity_space_sampling']) + + batch_size, num_rays, samples_per_ray, _ = depths_coarse.shape + + # Coarse Pass + sample_coordinates = (ray_origins.unsqueeze(-2) + depths_coarse * ray_directions.unsqueeze(-2)).reshape(batch_size, -1, 3) + sample_directions = ray_directions.unsqueeze(-2).expand(-1, -1, samples_per_ray, -1).reshape(batch_size, -1, 3) + + + out = self.run_model(planes, decoder, sample_coordinates, sample_directions, rendering_options) + colors_coarse = out['rgb'] + densities_coarse = out['sigma'] + colors_coarse = colors_coarse.reshape(batch_size, num_rays, samples_per_ray, colors_coarse.shape[-1]) + densities_coarse = densities_coarse.reshape(batch_size, num_rays, samples_per_ray, 1) + + # Fine Pass + N_importance = rendering_options['depth_resolution_importance'] + if N_importance > 0: + _, _, weights = self.ray_marcher(colors_coarse, densities_coarse, depths_coarse, rendering_options) + + depths_fine = self.sample_importance(depths_coarse, weights, N_importance) + + sample_directions = ray_directions.unsqueeze(-2).expand(-1, -1, N_importance, -1).reshape(batch_size, -1, 3) + sample_coordinates = (ray_origins.unsqueeze(-2) + depths_fine * ray_directions.unsqueeze(-2)).reshape(batch_size, -1, 3) + + out = self.run_model(planes, decoder, sample_coordinates, sample_directions, rendering_options) + colors_fine = out['rgb'] + densities_fine = out['sigma'] + colors_fine = colors_fine.reshape(batch_size, num_rays, N_importance, colors_fine.shape[-1]) + densities_fine = densities_fine.reshape(batch_size, num_rays, N_importance, 1) + + all_depths, all_colors, all_densities = self.unify_samples(depths_coarse, colors_coarse, densities_coarse, + depths_fine, colors_fine, densities_fine) + + # Aggregate + rgb_final, depth_final, weights = self.ray_marcher(all_colors, all_densities, all_depths, rendering_options) + else: + rgb_final, depth_final, weights = self.ray_marcher(colors_coarse, densities_coarse, depths_coarse, rendering_options) + + + return rgb_final, depth_final, weights.sum(2) + + def run_model(self, planes, decoder, sample_coordinates, sample_directions, options): + sampled_features = sample_from_planes(self.plane_axes, planes, sample_coordinates, padding_mode='zeros', box_warp=options['box_warp']) + + out = decoder(sampled_features, sample_directions) + if options.get('density_noise', 0) > 0: + out['sigma'] += torch.randn_like(out['sigma']) * options['density_noise'] + return out + + def sort_samples(self, all_depths, all_colors, all_densities): + _, indices = torch.sort(all_depths, dim=-2) + all_depths = torch.gather(all_depths, -2, indices) + all_colors = torch.gather(all_colors, -2, indices.expand(-1, -1, -1, all_colors.shape[-1])) + all_densities = torch.gather(all_densities, -2, indices.expand(-1, -1, -1, 1)) + return all_depths, all_colors, all_densities + + def unify_samples(self, depths1, colors1, densities1, depths2, colors2, densities2): + all_depths = torch.cat([depths1, depths2], dim = -2) + all_colors = torch.cat([colors1, colors2], dim = -2) + all_densities = torch.cat([densities1, densities2], dim = -2) + + _, indices = torch.sort(all_depths, dim=-2) + all_depths = torch.gather(all_depths, -2, indices) + all_colors = torch.gather(all_colors, -2, indices.expand(-1, -1, -1, all_colors.shape[-1])) + all_densities = torch.gather(all_densities, -2, indices.expand(-1, -1, -1, 1)) + + return all_depths, all_colors, all_densities + + def sample_stratified(self, ray_origins, ray_start, ray_end, depth_resolution, disparity_space_sampling=False): + """ + Return depths of approximately uniformly spaced samples along rays. + """ + N, M, _ = ray_origins.shape + if disparity_space_sampling: + depths_coarse = torch.linspace(0, + 1, + depth_resolution, + device=ray_origins.device).reshape(1, 1, depth_resolution, 1).repeat(N, M, 1, 1) + depth_delta = 1/(depth_resolution - 1) + depths_coarse += torch.rand_like(depths_coarse) * depth_delta + depths_coarse = 1./(1./ray_start * (1. - depths_coarse) + 1./ray_end * depths_coarse) + else: + if type(ray_start) == torch.Tensor: + depths_coarse = math_utils.linspace(ray_start, ray_end, depth_resolution).permute(1,2,0,3) + depth_delta = (ray_end - ray_start) / (depth_resolution - 1) + depths_coarse += torch.rand_like(depths_coarse) * depth_delta[..., None] + else: + depths_coarse = torch.linspace(ray_start, ray_end, depth_resolution, device=ray_origins.device).reshape(1, 1, depth_resolution, 1).repeat(N, M, 1, 1) + depth_delta = (ray_end - ray_start)/(depth_resolution - 1) + depths_coarse += torch.rand_like(depths_coarse) * depth_delta + + return depths_coarse + + def sample_importance(self, z_vals, weights, N_importance): + """ + Return depths of importance sampled points along rays. See NeRF importance sampling for more. + """ + with torch.no_grad(): + batch_size, num_rays, samples_per_ray, _ = z_vals.shape + + z_vals = z_vals.reshape(batch_size * num_rays, samples_per_ray) + weights = weights.reshape(batch_size * num_rays, -1) # -1 to account for loss of 1 sample in MipRayMarcher + + # smooth weights + weights = torch.nn.functional.max_pool1d(weights.unsqueeze(1).float(), 2, 1, padding=1) + weights = torch.nn.functional.avg_pool1d(weights, 2, 1).squeeze() + weights = weights + 0.01 + + z_vals_mid = 0.5 * (z_vals[: ,:-1] + z_vals[: ,1:]) + importance_z_vals = self.sample_pdf(z_vals_mid, weights[:, 1:-1], + N_importance).detach().reshape(batch_size, num_rays, N_importance, 1) + return importance_z_vals + + def sample_pdf(self, bins, weights, N_importance, det=False, eps=1e-5): + """ + Sample @N_importance samples from @bins with distribution defined by @weights. + Inputs: + bins: (N_rays, N_samples_+1) where N_samples_ is "the number of coarse samples per ray - 2" + weights: (N_rays, N_samples_) + N_importance: the number of samples to draw from the distribution + det: deterministic or not + eps: a small number to prevent division by zero + Outputs: + samples: the sampled samples + """ + N_rays, N_samples_ = weights.shape + weights = weights + eps # prevent division by zero (don't do inplace op!) + pdf = weights / torch.sum(weights, -1, keepdim=True) # (N_rays, N_samples_) + cdf = torch.cumsum(pdf, -1) # (N_rays, N_samples), cumulative distribution function + cdf = torch.cat([torch.zeros_like(cdf[: ,:1]), cdf], -1) # (N_rays, N_samples_+1) + # padded to 0~1 inclusive + + if det: + u = torch.linspace(0, 1, N_importance, device=bins.device) + u = u.expand(N_rays, N_importance) + else: + u = torch.rand(N_rays, N_importance, device=bins.device) + u = u.contiguous() + + inds = torch.searchsorted(cdf, u, right=True) + below = torch.clamp_min(inds-1, 0) + above = torch.clamp_max(inds, N_samples_) + + inds_sampled = torch.stack([below, above], -1).view(N_rays, 2*N_importance) + cdf_g = torch.gather(cdf, 1, inds_sampled).view(N_rays, N_importance, 2) + bins_g = torch.gather(bins, 1, inds_sampled).view(N_rays, N_importance, 2) + + denom = cdf_g[...,1]-cdf_g[...,0] + denom[denom 0: + self._defer_rendering -= 1 + elif self.args.pkl is not None: + self._async_renderer.set_args(**self.args) + result = self._async_renderer.get_result() + if result is not None: + self.result = result + + # Display. + max_w = self.content_width - self.pane_w + max_h = self.content_height + pos = np.array([self.pane_w + max_w / 2, max_h / 2]) + if 'image' in self.result: + if self._tex_img is not self.result.image: + self._tex_img = self.result.image + if self._tex_obj is None or not self._tex_obj.is_compatible(image=self._tex_img): + self._tex_obj = gl_utils.Texture(image=self._tex_img, bilinear=False, mipmap=False) + else: + self._tex_obj.update(self._tex_img) + zoom = min(max_w / self._tex_obj.width, max_h / self._tex_obj.height) + # print(zoom) + zoom = np.floor(zoom) if zoom >= 1 else zoom + # zoom = 1 + self._tex_obj.draw(pos=pos, zoom=zoom, align=0.5, rint=True) + if 'error' in self.result: + self.print_error(self.result.error) + if 'message' not in self.result: + self.result.message = str(self.result.error) + if 'message' in self.result: + tex = text_utils.get_texture(self.result.message, size=self.font_size, max_width=max_w, max_height=max_h, outline=2) + tex.draw(pos=pos, align=0.5, rint=True, color=1) + + # End frame. + self._adjust_font_size() + imgui.end() + self.end_frame() + +#---------------------------------------------------------------------------- + +class AsyncRenderer: + def __init__(self): + self._closed = False + self._is_async = False + self._cur_args = None + self._cur_result = None + self._cur_stamp = 0 + self._renderer_obj = None + self._args_queue = None + self._result_queue = None + self._process = None + + def close(self): + self._closed = True + self._renderer_obj = None + if self._process is not None: + self._process.terminate() + self._process = None + self._args_queue = None + self._result_queue = None + + @property + def is_async(self): + return self._is_async + + def set_async(self, is_async): + self._is_async = is_async + + def set_args(self, **args): + assert not self._closed + if args != self._cur_args: + if self._is_async: + self._set_args_async(**args) + else: + self._set_args_sync(**args) + self._cur_args = args + + def _set_args_async(self, **args): + if self._process is None: + self._args_queue = multiprocessing.Queue() + self._result_queue = multiprocessing.Queue() + try: + multiprocessing.set_start_method('spawn') + except RuntimeError: + pass + self._process = multiprocessing.Process(target=self._process_fn, args=(self._args_queue, self._result_queue), daemon=True) + self._process.start() + self._args_queue.put([args, self._cur_stamp]) + + def _set_args_sync(self, **args): + if self._renderer_obj is None: + self._renderer_obj = renderer.Renderer() + self._cur_result = self._renderer_obj.render(**args) + + def get_result(self): + assert not self._closed + if self._result_queue is not None: + while self._result_queue.qsize() > 0: + result, stamp = self._result_queue.get() + if stamp == self._cur_stamp: + self._cur_result = result + return self._cur_result + + def clear_result(self): + assert not self._closed + self._cur_args = None + self._cur_result = None + self._cur_stamp += 1 + + @staticmethod + def _process_fn(args_queue, result_queue): + renderer_obj = renderer.Renderer() + cur_args = None + cur_stamp = None + while True: + args, stamp = args_queue.get() + while args_queue.qsize() > 0: + args, stamp = args_queue.get() + if args != cur_args or stamp != cur_stamp: + result = renderer_obj.render(**args) + if 'error' in result: + result.error = renderer.CapturedException(result.error) + result_queue.put([result, stamp]) + cur_args = args + cur_stamp = stamp + +#---------------------------------------------------------------------------- + +@click.command() +@click.argument('pkls', metavar='PATH', nargs=-1) +@click.option('--capture-dir', help='Where to save screenshot captures', metavar='PATH', default=None) +@click.option('--browse-dir', help='Specify model path for the \'Browse...\' button', metavar='PATH') +def main( + pkls, + capture_dir, + browse_dir +): + """Interactive model visualizer. + + Optional PATH argument can be used specify which .pkl file to load. + """ + viz = Visualizer(capture_dir=capture_dir) + + if browse_dir is not None: + viz.pickle_widget.search_dirs = [browse_dir] + + # List pickles. + pretrained = [ + 'https://api.ngc.nvidia.com/v2/models/nvidia/research/eg3d/versions/1/files/ffhq512-128.pkl', + 'https://api.ngc.nvidia.com/v2/models/nvidia/research/eg3d/versions/1/files/afhqcats512-128.pkl', + 'https://api.ngc.nvidia.com/v2/models/nvidia/research/eg3d/versions/1/files/ffhqrebalanced512-64.pkl', + 'https://api.ngc.nvidia.com/v2/models/nvidia/research/eg3d/versions/1/files/ffhqrebalanced512-128.pkl', + 'https://api.ngc.nvidia.com/v2/models/nvidia/research/eg3d/versions/1/files/shapenetcars128-64.pkl', + ] + + # Populate recent pickles list with pretrained model URLs. + for url in pretrained: + viz.add_recent_pickle(url) + + # Run. + while not viz.should_close(): + viz.draw_frame() + viz.close() + +#---------------------------------------------------------------------------- + +if __name__ == "__main__": + main() + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/__init__.py b/ThirdParty/eg3d/viz/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dfebd04f47e6f6b1b44984c14c23b57d56f72240 --- /dev/null +++ b/ThirdParty/eg3d/viz/__init__.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +# empty diff --git a/ThirdParty/eg3d/viz/backbone_cache_widget.py b/ThirdParty/eg3d/viz/backbone_cache_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..71f3fb444bf48fa948f15054fc8b3aac73b3e1a5 --- /dev/null +++ b/ThirdParty/eg3d/viz/backbone_cache_widget.py @@ -0,0 +1,34 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import imgui +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class BackboneCacheWidget: + def __init__(self, viz): + self.viz = viz + self.cache_backbone = True + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + + if show: + imgui.text('Cache Backbone') + imgui.same_line(viz.label_w + viz.spacing * 4) + _clicked, self.cache_backbone = imgui.checkbox('##backbonecache', self.cache_backbone) + imgui.same_line(viz.label_w + viz.spacing * 10) + imgui.text('Note that when enabled, you may be unable to view intermediate backbone weights below') + + viz.args.do_backbone_caching = self.cache_backbone + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/capture_widget.py b/ThirdParty/eg3d/viz/capture_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..70f214ffae20209795cfb32148a88f4e09091fad --- /dev/null +++ b/ThirdParty/eg3d/viz/capture_widget.py @@ -0,0 +1,89 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import os +import re +import numpy as np +import imgui +import PIL.Image +from gui_utils import imgui_utils +from . import renderer + +#---------------------------------------------------------------------------- + +class CaptureWidget: + def __init__(self, viz): + self.viz = viz + self.path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '_screenshots')) + self.dump_image = False + self.dump_gui = False + self.defer_frames = 0 + self.disabled_time = 0 + + def dump_png(self, image): + viz = self.viz + try: + _height, _width, channels = image.shape + assert channels in [1, 3] + assert image.dtype == np.uint8 + os.makedirs(self.path, exist_ok=True) + file_id = 0 + for entry in os.scandir(self.path): + if entry.is_file(): + match = re.fullmatch(r'(\d+).*', entry.name) + if match: + file_id = max(file_id, int(match.group(1)) + 1) + if channels == 1: + pil_image = PIL.Image.fromarray(image[:, :, 0], 'L') + else: + pil_image = PIL.Image.fromarray(image, 'RGB') + pil_image.save(os.path.join(self.path, f'{file_id:05d}.png')) + except: + viz.result.error = renderer.CapturedException() + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + if show: + with imgui_utils.grayed_out(self.disabled_time != 0): + imgui.text('Capture') + imgui.same_line(viz.label_w) + _changed, self.path = imgui_utils.input_text('##path', self.path, 1024, + flags=(imgui.INPUT_TEXT_AUTO_SELECT_ALL | imgui.INPUT_TEXT_ENTER_RETURNS_TRUE), + width=(-1 - viz.button_w * 2 - viz.spacing * 2), + help_text='PATH') + if imgui.is_item_hovered() and not imgui.is_item_active() and self.path != '': + imgui.set_tooltip(self.path) + imgui.same_line() + if imgui_utils.button('Save image', width=viz.button_w, enabled=(self.disabled_time == 0 and 'image' in viz.result)): + self.dump_image = True + self.defer_frames = 2 + self.disabled_time = 0.5 + imgui.same_line() + if imgui_utils.button('Save GUI', width=-1, enabled=(self.disabled_time == 0)): + self.dump_gui = True + self.defer_frames = 2 + self.disabled_time = 0.5 + + self.disabled_time = max(self.disabled_time - viz.frame_delta, 0) + if self.defer_frames > 0: + self.defer_frames -= 1 + elif self.dump_image: + if 'image' in viz.result: + self.dump_png(viz.result.image) + self.dump_image = False + elif self.dump_gui: + viz.capture_next_frame() + self.dump_gui = False + captured_frame = viz.pop_captured_frame() + if captured_frame is not None: + self.dump_png(captured_frame) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/conditioning_pose_widget.py b/ThirdParty/eg3d/viz/conditioning_pose_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..875490104b7e292c01625eb83404aed26f7b70a1 --- /dev/null +++ b/ThirdParty/eg3d/viz/conditioning_pose_widget.py @@ -0,0 +1,57 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import numpy as np +import imgui +import dnnlib +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class ConditioningPoseWidget: + def __init__(self, viz): + self.viz = viz + self.pose = dnnlib.EasyDict(yaw=0, pitch=0, anim=False, speed=0.25) + self.pose_def = dnnlib.EasyDict(self.pose) + + def drag(self, dx, dy): + viz = self.viz + self.pose.yaw += -dx / viz.font_size * 3e-2 + self.pose.pitch += -dy / viz.font_size * 3e-2 + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + if show: + imgui.text('Cond Pose') + imgui.same_line(viz.label_w) + yaw = self.pose.yaw + pitch = self.pose.pitch + with imgui_utils.item_width(viz.font_size * 5): + changed, (new_yaw, new_pitch) = imgui.input_float2('##frac', yaw, pitch, format='%+.2f', flags=imgui.INPUT_TEXT_ENTER_RETURNS_TRUE) + if changed: + self.pose.yaw = new_yaw + self.pose.pitch = new_pitch + imgui.same_line(viz.label_w + viz.font_size * 13 + viz.spacing * 2) + _clicked, dragging, dx, dy = imgui_utils.drag_button('Drag', width=viz.button_w) + if dragging: + self.drag(dx, dy) + imgui.same_line() + snapped = dnnlib.EasyDict(self.pose, yaw=round(self.pose.yaw, 1), pitch=round(self.pose.pitch, 1)) + if imgui_utils.button('Snap', width=viz.button_w, enabled=(self.pose != snapped)): + self.pose = snapped + imgui.same_line() + if imgui_utils.button('Reset', width=-1, enabled=(self.pose != self.pose_def)): + self.pose = dnnlib.EasyDict(self.pose_def) + + viz.args.conditioning_yaw = self.pose.yaw + viz.args.conditioning_pitch = self.pose.pitch + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/latent_widget.py b/ThirdParty/eg3d/viz/latent_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..30ce50c4dd37125934152d9db57d88e36c845f5b --- /dev/null +++ b/ThirdParty/eg3d/viz/latent_widget.py @@ -0,0 +1,80 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import numpy as np +import imgui +import dnnlib +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class LatentWidget: + def __init__(self, viz): + self.viz = viz + self.latent = dnnlib.EasyDict(x=1, y=0, anim=False, speed=0.25) + self.latent_def = dnnlib.EasyDict(self.latent) + self.step_y = 100 + + def drag(self, dx, dy): + viz = self.viz + self.latent.x += dx / viz.font_size * 4e-2 + self.latent.y += dy / viz.font_size * 4e-2 + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + if show: + imgui.text('Latent') + imgui.same_line(viz.label_w) + seed = round(self.latent.x) + round(self.latent.y) * self.step_y + with imgui_utils.item_width(viz.font_size * 8): + changed, seed = imgui.input_int('##seed', seed, step=0) + if changed: + self.latent.x = seed + self.latent.y = 0 + imgui.same_line(viz.label_w + viz.font_size * 8 + viz.spacing) + frac_x = self.latent.x - round(self.latent.x) + frac_y = self.latent.y - round(self.latent.y) + with imgui_utils.item_width(viz.font_size * 5): + changed, (new_frac_x, new_frac_y) = imgui.input_float2('##frac', frac_x, frac_y, format='%+.2f', flags=imgui.INPUT_TEXT_ENTER_RETURNS_TRUE) + if changed: + self.latent.x += new_frac_x - frac_x + self.latent.y += new_frac_y - frac_y + imgui.same_line(viz.label_w + viz.font_size * 13 + viz.spacing * 2) + _clicked, dragging, dx, dy = imgui_utils.drag_button('Drag', width=viz.button_w) + if dragging: + self.drag(dx, dy) + imgui.same_line(viz.label_w + viz.font_size * 13 + viz.button_w + viz.spacing * 3) + _clicked, self.latent.anim = imgui.checkbox('Anim', self.latent.anim) + imgui.same_line(round(viz.font_size * 28.7)) + with imgui_utils.item_width(-2 - viz.button_w * 2 - viz.spacing * 2), imgui_utils.grayed_out(not self.latent.anim): + changed, speed = imgui.slider_float('##speed', self.latent.speed, -5, 5, format='Speed %.3f', power=3) + if changed: + self.latent.speed = speed + imgui.same_line() + snapped = dnnlib.EasyDict(self.latent, x=round(self.latent.x), y=round(self.latent.y)) + if imgui_utils.button('Snap', width=viz.button_w, enabled=(self.latent != snapped)): + self.latent = snapped + imgui.same_line() + if imgui_utils.button('Reset', width=-1, enabled=(self.latent != self.latent_def)): + self.latent = dnnlib.EasyDict(self.latent_def) + + if self.latent.anim: + self.latent.x += viz.frame_delta * self.latent.speed + viz.args.w0_seeds = [] # [[seed, weight], ...] + for ofs_x, ofs_y in [[0, 0], [1, 0], [0, 1], [1, 1]]: + seed_x = np.floor(self.latent.x) + ofs_x + seed_y = np.floor(self.latent.y) + ofs_y + seed = (int(seed_x) + int(seed_y) * self.step_y) & ((1 << 32) - 1) + weight = (1 - abs(self.latent.x - seed_x)) * (1 - abs(self.latent.y - seed_y)) + if weight > 0: + viz.args.w0_seeds.append([seed, weight]) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/layer_widget.py b/ThirdParty/eg3d/viz/layer_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..6da25858046af66acbf8521a441d9787a9869137 --- /dev/null +++ b/ThirdParty/eg3d/viz/layer_widget.py @@ -0,0 +1,185 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import imgui +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class LayerWidget: + def __init__(self, viz): + self.viz = viz + self.prev_layers = None + self.cur_layer = None + self.sel_channels = 3 + self.base_channel = 0 + self.img_scale_db = 0 + self.img_normalize = False + self.fft_show = False + self.fft_all = True + self.fft_range_db = 50 + self.fft_beta = 8 + self.refocus = False + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + layers = viz.result.get('layers', []) + if self.prev_layers != layers: + self.prev_layers = layers + self.refocus = True + layer = ([layer for layer in layers if layer.name == self.cur_layer] + [None])[0] + if layer is None and len(layers) > 0: + layer = layers[-1] + self.cur_layer = layer.name + num_channels = layer.shape[1] if layer is not None else 0 + base_channel_max = max(num_channels - self.sel_channels, 0) + + if show: + bg_color = [0.16, 0.29, 0.48, 0.2] + dim_color = list(imgui.get_style().colors[imgui.COLOR_TEXT]) + dim_color[-1] *= 0.5 + + # Begin list. + width = viz.font_size * 28 + height = imgui.get_text_line_height_with_spacing() * 12 + viz.spacing + imgui.push_style_var(imgui.STYLE_FRAME_PADDING, [0, 0]) + imgui.push_style_color(imgui.COLOR_CHILD_BACKGROUND, *bg_color) + imgui.push_style_color(imgui.COLOR_HEADER, 0, 0, 0, 0) + imgui.push_style_color(imgui.COLOR_HEADER_HOVERED, 0.16, 0.29, 0.48, 0.5) + imgui.push_style_color(imgui.COLOR_HEADER_ACTIVE, 0.16, 0.29, 0.48, 0.9) + imgui.begin_child('##list', width=width, height=height, border=True, flags=imgui.WINDOW_ALWAYS_VERTICAL_SCROLLBAR) + + # List items. + for layer in layers: + selected = (self.cur_layer == layer.name) + _opened, selected = imgui.selectable(f'##{layer.name}_selectable', selected) + imgui.same_line(viz.spacing) + _clicked, selected = imgui.checkbox(f'{layer.name}##radio', selected) + if selected: + self.cur_layer = layer.name + if self.refocus: + imgui.set_scroll_here() + viz.skip_frame() # Focus will change on next frame. + self.refocus = False + imgui.same_line(width - viz.font_size * 13) + imgui.text_colored('x'.join(str(x) for x in layer.shape[2:]), *dim_color) + imgui.same_line(width - viz.font_size * 8) + imgui.text_colored(str(layer.shape[1]), *dim_color) + imgui.same_line(width - viz.font_size * 5) + imgui.text_colored(layer.dtype, *dim_color) + + # End list. + if len(layers) == 0: + imgui.text_colored('No layers found', *dim_color) + imgui.end_child() + imgui.pop_style_color(4) + imgui.pop_style_var(1) + + # Begin options. + imgui.same_line() + imgui.begin_child('##options', width=-1, height=height, border=False) + + # RGB & normalize. + rgb = (self.sel_channels == 3) + _clicked, rgb = imgui.checkbox('RGB', rgb) + self.sel_channels = 3 if rgb else 1 + imgui.same_line(viz.font_size * 4) + _clicked, self.img_normalize = imgui.checkbox('Normalize', self.img_normalize) + imgui.same_line(imgui.get_content_region_max()[0] - 1 - viz.button_w) + if imgui_utils.button('Reset##img_flags', width=-1, enabled=(self.sel_channels != 3 or self.img_normalize)): + self.sel_channels = 3 + self.img_normalize = False + + # Image scale. + with imgui_utils.item_width(-1 - viz.button_w - viz.spacing): + _changed, self.img_scale_db = imgui.slider_float('##scale', self.img_scale_db, min_value=-40, max_value=40, format='Scale %+.1f dB') + imgui.same_line() + if imgui_utils.button('Reset##scale', width=-1, enabled=(self.img_scale_db != 0)): + self.img_scale_db = 0 + + # Base channel. + self.base_channel = min(max(self.base_channel, 0), base_channel_max) + narrow_w = imgui.get_text_line_height_with_spacing() + with imgui_utils.grayed_out(base_channel_max == 0): + with imgui_utils.item_width(-1 - viz.button_w - narrow_w * 2 - viz.spacing * 3): + _changed, self.base_channel = imgui.drag_int('##channel', self.base_channel, change_speed=0.05, min_value=0, max_value=base_channel_max, format=f'Channel %d/{num_channels}') + imgui.same_line() + if imgui_utils.button('-##channel', width=narrow_w): + self.base_channel -= 1 + imgui.same_line() + if imgui_utils.button('+##channel', width=narrow_w): + self.base_channel += 1 + imgui.same_line() + self.base_channel = min(max(self.base_channel, 0), base_channel_max) + if imgui_utils.button('Reset##channel', width=-1, enabled=(self.base_channel != 0 and base_channel_max > 0)): + self.base_channel = 0 + + # Stats. + stats = viz.result.get('stats', None) + stats = [f'{stats[idx]:g}' if stats is not None else 'N/A' for idx in range(6)] + rows = [ + ['Statistic', 'All channels', 'Selected'], + ['Mean', stats[0], stats[1]], + ['Std', stats[2], stats[3]], + ['Max', stats[4], stats[5]], + ] + height = imgui.get_text_line_height_with_spacing() * len(rows) + viz.spacing + imgui.push_style_color(imgui.COLOR_CHILD_BACKGROUND, *bg_color) + imgui.begin_child('##stats', width=-1, height=height, border=True) + for y, cols in enumerate(rows): + for x, col in enumerate(cols): + if x != 0: + imgui.same_line(viz.font_size * (4 + (x - 1) * 6)) + if x == 0 or y == 0: + imgui.text_colored(col, *dim_color) + else: + imgui.text(col) + imgui.end_child() + imgui.pop_style_color(1) + + # FFT & all. + _clicked, self.fft_show = imgui.checkbox('FFT', self.fft_show) + imgui.same_line(viz.font_size * 4) + with imgui_utils.grayed_out(not self.fft_show or base_channel_max == 0): + _clicked, self.fft_all = imgui.checkbox('All channels', self.fft_all) + imgui.same_line(imgui.get_content_region_max()[0] - 1 - viz.button_w) + with imgui_utils.grayed_out(not self.fft_show): + if imgui_utils.button('Reset##fft_flags', width=-1, enabled=(self.fft_show or not self.fft_all)): + self.fft_show = False + self.fft_all = True + + # FFT range. + with imgui_utils.grayed_out(not self.fft_show): + with imgui_utils.item_width(-1 - viz.button_w - viz.spacing): + _changed, self.fft_range_db = imgui.slider_float('##fft_range_db', self.fft_range_db, min_value=0.1, max_value=100, format='Range +-%.1f dB') + imgui.same_line() + if imgui_utils.button('Reset##fft_range_db', width=-1, enabled=(self.fft_range_db != 50)): + self.fft_range_db = 50 + + # FFT beta. + with imgui_utils.grayed_out(not self.fft_show): + with imgui_utils.item_width(-1 - viz.button_w - viz.spacing): + _changed, self.fft_beta = imgui.slider_float('##fft_beta', self.fft_beta, min_value=0, max_value=50, format='Kaiser beta %.2f', power=2.63) + imgui.same_line() + if imgui_utils.button('Reset##fft_beta', width=-1, enabled=(self.fft_beta != 8)): + self.fft_beta = 8 + + # End options. + imgui.end_child() + + self.base_channel = min(max(self.base_channel, 0), base_channel_max) + viz.args.layer_name = self.cur_layer if len(layers) > 0 and self.cur_layer != layers[-1].name else None + viz.args.update(sel_channels=self.sel_channels, base_channel=self.base_channel, img_scale_db=self.img_scale_db, img_normalize=self.img_normalize) + viz.args.fft_show = self.fft_show + if self.fft_show: + viz.args.update(fft_all=self.fft_all, fft_range_db=self.fft_range_db, fft_beta=self.fft_beta) + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/performance_widget.py b/ThirdParty/eg3d/viz/performance_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..deb208a741bf14dd57c70012fa23486902d31427 --- /dev/null +++ b/ThirdParty/eg3d/viz/performance_widget.py @@ -0,0 +1,75 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import array +import numpy as np +import imgui +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class PerformanceWidget: + def __init__(self, viz): + self.viz = viz + self.gui_times = [float('nan')] * 60 + self.render_times = [float('nan')] * 30 + self.fps_limit = 60 + self.use_vsync = False + self.is_async = False + self.force_fp32 = False + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + self.gui_times = self.gui_times[1:] + [viz.frame_delta] + if 'render_time' in viz.result: + self.render_times = self.render_times[1:] + [viz.result.render_time] + del viz.result.render_time + + if show: + imgui.text('GUI') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 8): + imgui.plot_lines('##gui_times', array.array('f', self.gui_times), scale_min=0) + imgui.same_line(viz.label_w + viz.font_size * 9) + t = [x for x in self.gui_times if x > 0] + t = np.mean(t) if len(t) > 0 else 0 + imgui.text(f'{t*1e3:.1f} ms' if t > 0 else 'N/A') + imgui.same_line(viz.label_w + viz.font_size * 14) + imgui.text(f'{1/t:.1f} FPS' if t > 0 else 'N/A') + imgui.same_line(viz.label_w + viz.font_size * 18 + viz.spacing * 3) + with imgui_utils.item_width(viz.font_size * 6): + _changed, self.fps_limit = imgui.input_int('FPS limit', self.fps_limit, flags=imgui.INPUT_TEXT_ENTER_RETURNS_TRUE) + self.fps_limit = min(max(self.fps_limit, 5), 1000) + imgui.same_line(imgui.get_content_region_max()[0] - 1 - viz.button_w * 2 - viz.spacing) + _clicked, self.use_vsync = imgui.checkbox('Vertical sync', self.use_vsync) + + if show: + imgui.text('Render') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 8): + imgui.plot_lines('##render_times', array.array('f', self.render_times), scale_min=0) + imgui.same_line(viz.label_w + viz.font_size * 9) + t = [x for x in self.render_times if x > 0] + t = np.mean(t) if len(t) > 0 else 0 + imgui.text(f'{t*1e3:.1f} ms' if t > 0 else 'N/A') + imgui.same_line(viz.label_w + viz.font_size * 14) + imgui.text(f'{1/t:.1f} FPS' if t > 0 else 'N/A') + imgui.same_line(viz.label_w + viz.font_size * 18 + viz.spacing * 3) + _clicked, self.is_async = imgui.checkbox('Separate process', self.is_async) + imgui.same_line(imgui.get_content_region_max()[0] - 1 - viz.button_w * 2 - viz.spacing) + _clicked, self.force_fp32 = imgui.checkbox('Force FP32', self.force_fp32) + + viz.set_fps_limit(self.fps_limit) + viz.set_vsync(self.use_vsync) + viz.set_async(self.is_async) + viz.args.force_fp32 = self.force_fp32 + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/pickle_widget.py b/ThirdParty/eg3d/viz/pickle_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..e85a8592aa700b551204b92827c11902b1b54851 --- /dev/null +++ b/ThirdParty/eg3d/viz/pickle_widget.py @@ -0,0 +1,172 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import glob +import os +import re + +import dnnlib +import imgui +import numpy as np +from gui_utils import imgui_utils + +from . import renderer + +#---------------------------------------------------------------------------- + +def _locate_results(pattern): + return pattern + +#---------------------------------------------------------------------------- + +class PickleWidget: + def __init__(self, viz): + self.viz = viz + self.search_dirs = [] + self.cur_pkl = None + self.user_pkl = '' + self.recent_pkls = [] + self.browse_cache = dict() # {tuple(path, ...): [dnnlib.EasyDict(), ...], ...} + self.browse_refocus = False + self.load('', ignore_errors=True) + + def add_recent(self, pkl, ignore_errors=False): + try: + resolved = self.resolve_pkl(pkl) + if resolved not in self.recent_pkls: + self.recent_pkls.append(resolved) + except: + if not ignore_errors: + raise + + def load(self, pkl, ignore_errors=False): + viz = self.viz + viz.clear_result() + viz.skip_frame() # The input field will change on next frame. + try: + resolved = self.resolve_pkl(pkl) + name = resolved.replace('\\', '/').split('/')[-1] + self.cur_pkl = resolved + self.user_pkl = resolved + viz.result.message = f'Loading {name}...' + viz.defer_rendering() + if resolved in self.recent_pkls: + self.recent_pkls.remove(resolved) + self.recent_pkls.insert(0, resolved) + except: + self.cur_pkl = None + self.user_pkl = pkl + if pkl == '': + viz.result = dnnlib.EasyDict(message='No network pickle loaded') + else: + viz.result = dnnlib.EasyDict(error=renderer.CapturedException()) + if not ignore_errors: + raise + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + recent_pkls = [pkl for pkl in self.recent_pkls if pkl != self.user_pkl] + if show: + imgui.text('Pickle') + imgui.same_line(viz.label_w) + changed, self.user_pkl = imgui_utils.input_text('##pkl', self.user_pkl, 1024, + flags=(imgui.INPUT_TEXT_AUTO_SELECT_ALL | imgui.INPUT_TEXT_ENTER_RETURNS_TRUE), + width=(-1 - viz.button_w * 2 - viz.spacing * 2), + help_text=' | | | | /.pkl') + if changed: + self.load(self.user_pkl, ignore_errors=True) + if imgui.is_item_hovered() and not imgui.is_item_active() and self.user_pkl != '': + imgui.set_tooltip(self.user_pkl) + imgui.same_line() + if imgui_utils.button('Recent...', width=viz.button_w, enabled=(len(recent_pkls) != 0)): + imgui.open_popup('recent_pkls_popup') + imgui.same_line() + if imgui_utils.button('Browse...', enabled=len(self.search_dirs) > 0, width=-1): + imgui.open_popup('browse_pkls_popup') + self.browse_cache.clear() + self.browse_refocus = True + + if imgui.begin_popup('recent_pkls_popup'): + for pkl in recent_pkls: + clicked, _state = imgui.menu_item(pkl) + if clicked: + self.load(pkl, ignore_errors=True) + imgui.end_popup() + + if imgui.begin_popup('browse_pkls_popup'): + def recurse(parents): + key = tuple(parents) + items = self.browse_cache.get(key, None) + if items is None: + items = self.list_runs_and_pkls(parents) + self.browse_cache[key] = items + for item in items: + if item.type == 'run' and imgui.begin_menu(item.name): + recurse([item.path]) + imgui.end_menu() + if item.type == 'pkl': + clicked, _state = imgui.menu_item(item.name) + if clicked: + self.load(item.path, ignore_errors=True) + if len(items) == 0: + with imgui_utils.grayed_out(): + imgui.menu_item('No results found') + recurse(self.search_dirs) + if self.browse_refocus: + imgui.set_scroll_here() + viz.skip_frame() # Focus will change on next frame. + self.browse_refocus = False + imgui.end_popup() + + paths = viz.pop_drag_and_drop_paths() + if paths is not None and len(paths) >= 1: + self.load(paths[0], ignore_errors=True) + + viz.args.pkl = self.cur_pkl + + def list_runs_and_pkls(self, parents): + items = [] + run_regex = re.compile(r'\d+-.*') + pkl_regex = re.compile(r'network-snapshot-\d+\.pkl') + for parent in set(parents): + if os.path.isdir(parent): + for entry in os.scandir(parent): + if entry.is_dir() and run_regex.fullmatch(entry.name): + items.append(dnnlib.EasyDict(type='run', name=entry.name, path=os.path.join(parent, entry.name))) + if entry.is_file() and pkl_regex.fullmatch(entry.name): + items.append(dnnlib.EasyDict(type='pkl', name=entry.name, path=os.path.join(parent, entry.name))) + + items = sorted(items, key=lambda item: (item.name.replace('_', ' '), item.path)) + return items + + def resolve_pkl(self, pattern): + assert isinstance(pattern, str) + assert pattern != '' + + # URL => return as is. + if dnnlib.util.is_url(pattern): + return pattern + + # Short-hand pattern => locate. + path = _locate_results(pattern) + + # Run dir => pick the last saved snapshot. + if os.path.isdir(path): + pkl_files = sorted(glob.glob(os.path.join(path, 'network-snapshot-*.pkl'))) + if len(pkl_files) == 0: + raise IOError(f'No network pickle found in "{path}"') + path = pkl_files[-1] + + # Normalize. + path = os.path.abspath(path) + return path + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/pose_widget.py b/ThirdParty/eg3d/viz/pose_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..bcb1f1715e1021adf928df2b931a1f23d336275f --- /dev/null +++ b/ThirdParty/eg3d/viz/pose_widget.py @@ -0,0 +1,92 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import numpy as np +import imgui +import dnnlib +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class PoseWidget: + def __init__(self, viz): + self.viz = viz + self.pose = dnnlib.EasyDict(yaw=0, pitch=0, anim=False, speed=0.25) + self.pose_def = dnnlib.EasyDict(self.pose) + + self.lookat_point_choice = 0 + self.lookat_point_option = ['auto', 'ffhq', 'shapenet', 'afhq', 'manual'] + self.lookat_point_labels = ['Auto Detect', 'FFHQ Default', 'Shapenet Default', 'AFHQ Default', 'Manual'] + self.lookat_point = (0.0, 0.0, 0.2) + + def drag(self, dx, dy): + viz = self.viz + self.pose.yaw += -dx / viz.font_size * 3e-2 + self.pose.pitch += -dy / viz.font_size * 3e-2 + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + if show: + imgui.text('Pose') + imgui.same_line(viz.label_w) + yaw = self.pose.yaw + pitch = self.pose.pitch + with imgui_utils.item_width(viz.font_size * 5): + changed, (new_yaw, new_pitch) = imgui.input_float2('##pose', yaw, pitch, format='%+.2f', flags=imgui.INPUT_TEXT_ENTER_RETURNS_TRUE) + if changed: + self.pose.yaw = new_yaw + self.pose.pitch = new_pitch + imgui.same_line(viz.label_w + viz.font_size * 13 + viz.spacing * 2) + _clicked, dragging, dx, dy = imgui_utils.drag_button('Drag', width=viz.button_w) + if dragging: + self.drag(dx, dy) + imgui.same_line() + snapped = dnnlib.EasyDict(self.pose, yaw=round(self.pose.yaw, 1), pitch=round(self.pose.pitch, 1)) + if imgui_utils.button('Snap', width=viz.button_w, enabled=(self.pose != snapped)): + self.pose = snapped + imgui.same_line() + if imgui_utils.button('Reset', width=-1, enabled=(self.pose != self.pose_def)): + self.pose = dnnlib.EasyDict(self.pose_def) + + # New line starts here + imgui.text('LookAt Point') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 8): + _clicked, self.lookat_point_choice = imgui.combo('', self.lookat_point_choice, self.lookat_point_labels) + lookat_point = self.lookat_point_option[self.lookat_point_choice] + if lookat_point == 'auto': + self.lookat_point = None + if lookat_point == 'ffhq': + self.lookat_point = (0.0, 0.0, 0.2) + changes_enabled=False + if lookat_point == 'shapenet': + self.lookat_point = (0.0, 0.0, 0.0) + changes_enabled=False + if lookat_point == 'afhq': + self.lookat_point = (0.0, 0.0, 0.0) + changes_enabled=False + if lookat_point == 'manual': + if self.lookat_point is None: + self.lookat_point = (0.0, 0.0, 0.0) + changes_enabled=True + if lookat_point != 'auto': + imgui.same_line(viz.label_w + viz.font_size * 13 + viz.spacing * 2) + with imgui_utils.item_width(viz.font_size * 16): + with imgui_utils.grayed_out(not changes_enabled): + _changed, self.lookat_point = imgui.input_float3('##lookat', *self.lookat_point, format='%.2f', flags=(imgui.INPUT_TEXT_READ_ONLY if not changes_enabled else 0)) + + + viz.args.yaw = self.pose.yaw + viz.args.pitch = self.pose.pitch + + viz.args.lookat_point = self.lookat_point + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/render_depth_sample_widget.py b/ThirdParty/eg3d/viz/render_depth_sample_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..27c48f748e23d465c6200687c8280541df2f28b9 --- /dev/null +++ b/ThirdParty/eg3d/viz/render_depth_sample_widget.py @@ -0,0 +1,40 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import imgui +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class RenderDepthSampleWidget: + def __init__(self, viz): + self.viz = viz + self.depth_mult = 2 + self.depth_importance_mult = 2 + self.render_types = [.5, 1, 2, 4] + self.labels = ['0.5x', '1x', '2x', '4x'] + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + + if show: + imgui.text('Render Type') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 4): + _clicked, self.depth_mult = imgui.combo('Depth Sample Multiplier', self.depth_mult, self.labels) + imgui.same_line(viz.label_w + viz.font_size * 16 + viz.spacing * 2) + with imgui_utils.item_width(viz.font_size * 4): + _clicked, self.depth_importance_mult = imgui.combo('Depth Sample Importance Multiplier', self.depth_importance_mult, self.labels) + + viz.args.depth_mult = self.render_types[self.depth_mult] + viz.args.depth_importance_mult = self.render_types[self.depth_importance_mult] + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/render_type_widget.py b/ThirdParty/eg3d/viz/render_type_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..6332ef42245603c1a9618612e2302eb33e6b0e11 --- /dev/null +++ b/ThirdParty/eg3d/viz/render_type_widget.py @@ -0,0 +1,35 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import imgui +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class RenderTypeWidget: + def __init__(self, viz): + self.viz = viz + self.render_type = 0 + self.render_types = ['image', 'image_depth', 'image_raw'] + self.labels = ['RGB Image', 'Depth Image', 'Neural Rendered Image'] + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + + if show: + imgui.text('Render Type') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 10): + _clicked, self.render_type = imgui.combo('', self.render_type, self.labels) + + viz.args.render_type = self.render_types[self.render_type] + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/renderer.py b/ThirdParty/eg3d/viz/renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..c11f05a9e26aabc3a7bfadfd2ad71942973d3ec8 --- /dev/null +++ b/ThirdParty/eg3d/viz/renderer.py @@ -0,0 +1,448 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import sys +import copy +import traceback +import numpy as np +import torch +import torch.fft +import torch.nn +import matplotlib.cm +import dnnlib +from torch_utils.ops import upfirdn2d +import legacy # pylint: disable=import-error + +from camera_utils import LookAtPoseSampler + + + +#---------------------------------------------------------------------------- + +class CapturedException(Exception): + def __init__(self, msg=None): + if msg is None: + _type, value, _traceback = sys.exc_info() + assert value is not None + if isinstance(value, CapturedException): + msg = str(value) + else: + msg = traceback.format_exc() + assert isinstance(msg, str) + super().__init__(msg) + +#---------------------------------------------------------------------------- + +class CaptureSuccess(Exception): + def __init__(self, out): + super().__init__() + self.out = out + +#---------------------------------------------------------------------------- + +def _sinc(x): + y = (x * np.pi).abs() + z = torch.sin(y) / y.clamp(1e-30, float('inf')) + return torch.where(y < 1e-30, torch.ones_like(x), z) + +def _lanczos_window(x, a): + x = x.abs() / a + return torch.where(x < 1, _sinc(x), torch.zeros_like(x)) + +#---------------------------------------------------------------------------- + +def _construct_affine_bandlimit_filter(mat, a=3, amax=16, aflt=64, up=4, cutoff_in=1, cutoff_out=1): + assert a <= amax < aflt + mat = torch.as_tensor(mat).to(torch.float32) + + # Construct 2D filter taps in input & output coordinate spaces. + taps = ((torch.arange(aflt * up * 2 - 1, device=mat.device) + 1) / up - aflt).roll(1 - aflt * up) + yi, xi = torch.meshgrid(taps, taps) + xo, yo = (torch.stack([xi, yi], dim=2) @ mat[:2, :2].t()).unbind(2) + + # Convolution of two oriented 2D sinc filters. + fi = _sinc(xi * cutoff_in) * _sinc(yi * cutoff_in) + fo = _sinc(xo * cutoff_out) * _sinc(yo * cutoff_out) + f = torch.fft.ifftn(torch.fft.fftn(fi) * torch.fft.fftn(fo)).real + + # Convolution of two oriented 2D Lanczos windows. + wi = _lanczos_window(xi, a) * _lanczos_window(yi, a) + wo = _lanczos_window(xo, a) * _lanczos_window(yo, a) + w = torch.fft.ifftn(torch.fft.fftn(wi) * torch.fft.fftn(wo)).real + + # Construct windowed FIR filter. + f = f * w + + # Finalize. + c = (aflt - amax) * up + f = f.roll([aflt * up - 1] * 2, dims=[0,1])[c:-c, c:-c] + f = torch.nn.functional.pad(f, [0, 1, 0, 1]).reshape(amax * 2, up, amax * 2, up) + f = f / f.sum([0,2], keepdim=True) / (up ** 2) + f = f.reshape(amax * 2 * up, amax * 2 * up)[:-1, :-1] + return f + +#---------------------------------------------------------------------------- + +def _apply_affine_transformation(x, mat, up=4, **filter_kwargs): + _N, _C, H, W = x.shape + mat = torch.as_tensor(mat).to(dtype=torch.float32, device=x.device) + + # Construct filter. + f = _construct_affine_bandlimit_filter(mat, up=up, **filter_kwargs) + assert f.ndim == 2 and f.shape[0] == f.shape[1] and f.shape[0] % 2 == 1 + p = f.shape[0] // 2 + + # Construct sampling grid. + theta = mat.inverse() + theta[:2, 2] *= 2 + theta[0, 2] += 1 / up / W + theta[1, 2] += 1 / up / H + theta[0, :] *= W / (W + p / up * 2) + theta[1, :] *= H / (H + p / up * 2) + theta = theta[:2, :3].unsqueeze(0).repeat([x.shape[0], 1, 1]) + g = torch.nn.functional.affine_grid(theta, x.shape, align_corners=False) + + # Resample image. + y = upfirdn2d.upsample2d(x=x, f=f, up=up, padding=p) + z = torch.nn.functional.grid_sample(y, g, mode='bilinear', padding_mode='zeros', align_corners=False) + + # Form mask. + m = torch.zeros_like(y) + c = p * 2 + 1 + m[:, :, c:-c, c:-c] = 1 + m = torch.nn.functional.grid_sample(m, g, mode='nearest', padding_mode='zeros', align_corners=False) + return z, m + +#---------------------------------------------------------------------------- + +class Renderer: + def __init__(self): + self._device = torch.device('cuda') + self._pkl_data = dict() # {pkl: dict | CapturedException, ...} + self._networks = dict() # {cache_key: torch.nn.Module, ...} + self._pinned_bufs = dict() # {(shape, dtype): torch.Tensor, ...} + self._cmaps = dict() # {name: torch.Tensor, ...} + self._is_timing = False + self._start_event = torch.cuda.Event(enable_timing=True) + self._end_event = torch.cuda.Event(enable_timing=True) + self._net_layers = dict() # {cache_key: [dnnlib.EasyDict, ...], ...} + self._last_model_input = None + + def render(self, **args): + self._is_timing = True + self._start_event.record(torch.cuda.current_stream(self._device)) + res = dnnlib.EasyDict() + try: + self._render_impl(res, **args) + except: + res.error = CapturedException() + self._end_event.record(torch.cuda.current_stream(self._device)) + if 'image' in res: + res.image = self.to_cpu(res.image).numpy() + if 'stats' in res: + res.stats = self.to_cpu(res.stats).numpy() + if 'error' in res: + res.error = str(res.error) + if self._is_timing: + self._end_event.synchronize() + res.render_time = self._start_event.elapsed_time(self._end_event) * 1e-3 + self._is_timing = False + return res + + def get_network(self, pkl, key, **tweak_kwargs): + data = self._pkl_data.get(pkl, None) + if data is None: + print(f'Loading "{pkl}"... ', end='', flush=True) + try: + with dnnlib.util.open_url(pkl, verbose=False) as f: + data = legacy.load_network_pkl(f) + print('Done.') + except: + data = CapturedException() + print('Failed!') + self._pkl_data[pkl] = data + self._ignore_timing() + if isinstance(data, CapturedException): + raise data + + orig_net = data[key] + cache_key = (orig_net, self._device, tuple(sorted(tweak_kwargs.items()))) + net = self._networks.get(cache_key, None) + if net is None: + try: + net = copy.deepcopy(orig_net) + net = self._tweak_network(net, **tweak_kwargs) + net.to(self._device) + except: + net = CapturedException() + self._networks[cache_key] = net + self._ignore_timing() + if isinstance(net, CapturedException): + raise net + return net + + def _tweak_network(self, net): + # Print diagnostics. + + RELOAD_MODULES = False + if RELOAD_MODULES: + from training.triplane import TriPlaneGenerator + from torch_utils import misc + print("Reloading Modules!") + net_new = TriPlaneGenerator(*net.init_args, **net.init_kwargs).eval().requires_grad_(False).to(self._device) + misc.copy_params_and_buffers(net, net_new, require_all=True) + net_new.neural_rendering_resolution = net.neural_rendering_resolution + net_new.rendering_kwargs = net.rendering_kwargs + net = net_new + # net.rendering_kwargs['ray_start'] = 'auto' + # net.rendering_kwargs['ray_end'] = 'auto' + # net.rendering_kwargs['avg_camera_pivot'] = [0, 0, 0] + + return net + + def _get_pinned_buf(self, ref): + key = (tuple(ref.shape), ref.dtype) + buf = self._pinned_bufs.get(key, None) + if buf is None: + buf = torch.empty(ref.shape, dtype=ref.dtype).pin_memory() + self._pinned_bufs[key] = buf + return buf + + def to_device(self, buf): + return self._get_pinned_buf(buf).copy_(buf).to(self._device) + + def to_cpu(self, buf): + return self._get_pinned_buf(buf).copy_(buf).clone() + + def _ignore_timing(self): + self._is_timing = False + + def _apply_cmap(self, x, name='viridis'): + cmap = self._cmaps.get(name, None) + if cmap is None: + cmap = matplotlib.cm.get_cmap(name) + cmap = cmap(np.linspace(0, 1, num=1024), bytes=True)[:, :3] + cmap = self.to_device(torch.from_numpy(cmap)) + self._cmaps[name] = cmap + hi = cmap.shape[0] - 1 + x = (x * hi + 0.5).clamp(0, hi).to(torch.int64) + x = torch.nn.functional.embedding(x, cmap) + return x + + def _render_impl(self, res, + pkl = None, + w0_seeds = [[0, 1]], + stylemix_idx = [], + stylemix_seed = 0, + trunc_psi = 1, + trunc_cutoff = 0, + random_seed = 0, + noise_mode = 'const', + force_fp32 = False, + layer_name = None, + sel_channels = 3, + base_channel = 0, + img_scale_db = 0, + img_normalize = False, + fft_show = False, + fft_all = True, + fft_range_db = 50, + fft_beta = 8, + input_transform = None, + untransform = False, + + yaw = 0, + pitch = 0, + lookat_point = (0, 0, 0.2), + conditioning_yaw = 0, + conditioning_pitch = 0, + focal_length = 4.2647, + render_type = 'image', + + do_backbone_caching = False, + + depth_mult = 1, + depth_importance_mult = 1, + ): + # Dig up network details. + G = self.get_network(pkl, 'G_ema').eval().requires_grad_(False).to('cuda') + res.img_resolution = G.img_resolution + res.num_ws = G.backbone.num_ws + res.has_noise = any('noise_const' in name for name, _buf in G.backbone.named_buffers()) + res.has_input_transform = (hasattr(G.backbone, 'input') and hasattr(G.backbone.input, 'transform')) + + # set G rendering kwargs + if 'depth_resolution_default' not in G.rendering_kwargs: + G.rendering_kwargs['depth_resolution_default'] = G.rendering_kwargs['depth_resolution'] + G.rendering_kwargs['depth_resolution_importance_default'] = G.rendering_kwargs['depth_resolution_importance'] + + G.rendering_kwargs['depth_resolution'] = int(G.rendering_kwargs['depth_resolution_default'] * depth_mult) + G.rendering_kwargs['depth_resolution_importance'] = int(G.rendering_kwargs['depth_resolution_importance_default'] * depth_importance_mult) + + # Set input transform. + if res.has_input_transform: + m = np.eye(3) + try: + if input_transform is not None: + m = np.linalg.inv(np.asarray(input_transform)) + except np.linalg.LinAlgError: + res.error = CapturedException() + G.synthesis.input.transform.copy_(torch.from_numpy(m)) + + # Generate random latents. + all_seeds = [seed for seed, _weight in w0_seeds] + [stylemix_seed] + all_seeds = list(set(all_seeds)) + all_zs = np.zeros([len(all_seeds), G.z_dim], dtype=np.float32) + all_cs = np.zeros([len(all_seeds), G.c_dim], dtype=np.float32) + for idx, seed in enumerate(all_seeds): + rnd = np.random.RandomState(seed) + all_zs[idx] = rnd.randn(G.z_dim) + if lookat_point is None: + camera_pivot = torch.tensor(G.rendering_kwargs.get('avg_camera_pivot', (0, 0, 0))) + else: + # override lookat point provided + camera_pivot = torch.tensor(lookat_point) + camera_radius = G.rendering_kwargs.get('avg_camera_radius', 2.7) + forward_cam2world_pose = LookAtPoseSampler.sample(3.14/2 + conditioning_yaw, 3.14/2 + conditioning_pitch, camera_pivot, radius=camera_radius) + intrinsics = torch.tensor([[focal_length, 0, 0.5], [0, focal_length, 0.5], [0, 0, 1]]) + conditioning_params = torch.cat([forward_cam2world_pose.reshape(16), intrinsics.reshape(9)], 0) + all_cs[idx, :] = conditioning_params.numpy() + + + # Run mapping network. + # w_avg = G.mapping.w_avg + w_avg = G.backbone.mapping.w_avg + all_zs = self.to_device(torch.from_numpy(all_zs)) + all_cs = self.to_device(torch.from_numpy(all_cs)) + all_ws = G.mapping(z=all_zs, c=all_cs, truncation_psi=trunc_psi, truncation_cutoff=trunc_cutoff) - w_avg + all_ws = dict(zip(all_seeds, all_ws)) + + # Calculate final W. + w = torch.stack([all_ws[seed] * weight for seed, weight in w0_seeds]).sum(dim=0, keepdim=True) + stylemix_idx = [idx for idx in stylemix_idx if 0 <= idx < G.backbone.num_ws] + if len(stylemix_idx) > 0: + w[:, stylemix_idx] = all_ws[stylemix_seed][np.newaxis, stylemix_idx] + w += w_avg + + # Run synthesis network. + synthesis_kwargs = dnnlib.EasyDict(noise_mode=noise_mode, force_fp32=force_fp32, cache_backbone=do_backbone_caching) + torch.manual_seed(random_seed) + + # Set camera params + pose = LookAtPoseSampler.sample(3.14/2 + yaw, 3.14/2 + pitch, camera_pivot, radius=camera_radius) + intrinsics = torch.tensor([[focal_length, 0, 0.5], [0, focal_length, 0.5], [0, 0, 1]]) + c = torch.cat([pose.reshape(-1, 16), intrinsics.reshape(-1, 9)], 1).to(w.device) + + # Backbone caching + if do_backbone_caching and self._last_model_input is not None and torch.all(self._last_model_input == w): + synthesis_kwargs.use_cached_backbone = True + else: + synthesis_kwargs.use_cached_backbone = False + self._last_model_input = w + out, layers = self.run_synthesis_net(G, w, c, capture_layer=layer_name, **synthesis_kwargs) + + # Update layer list. + cache_key = (G.synthesis, tuple(sorted(synthesis_kwargs.items()))) + if cache_key not in self._net_layers: + if layer_name is not None: + torch.manual_seed(random_seed) + _out, layers = self.run_synthesis_net(G, w, c, **synthesis_kwargs) + self._net_layers[cache_key] = layers + res.layers = self._net_layers[cache_key] + + # Untransform. + if untransform and res.has_input_transform: + out, _mask = _apply_affine_transformation(out.to(torch.float32), G.synthesis.input.transform, amax=6) # Override amax to hit the fast path in upfirdn2d. + + # Select channels and compute statistics. + if type(out) == dict: + # is model output. query render type + out = out[render_type][0].to(torch.float32) + else: + out = out[0].to(torch.float32) + + if sel_channels > out.shape[0]: + sel_channels = 1 + base_channel = max(min(base_channel, out.shape[0] - sel_channels), 0) + sel = out[base_channel : base_channel + sel_channels] + res.stats = torch.stack([ + out.mean(), sel.mean(), + out.std(), sel.std(), + out.norm(float('inf')), sel.norm(float('inf')), + ]) + + # normalize if type is 'image_depth' + if render_type == 'image_depth': + out -= out.min() + out /= out.max() + + out -= .5 + out *= -2 + + # Scale and convert to uint8. + img = sel + if img_normalize: + img = img / img.norm(float('inf'), dim=[1,2], keepdim=True).clip(1e-8, 1e8) + img = img * (10 ** (img_scale_db / 20)) + img = (img * 127.5 + 128).clamp(0, 255).to(torch.uint8).permute(1, 2, 0) + res.image = img + + # FFT. + if fft_show: + sig = out if fft_all else sel + sig = sig.to(torch.float32) + sig = sig - sig.mean(dim=[1,2], keepdim=True) + sig = sig * torch.kaiser_window(sig.shape[1], periodic=False, beta=fft_beta, device=self._device)[None, :, None] + sig = sig * torch.kaiser_window(sig.shape[2], periodic=False, beta=fft_beta, device=self._device)[None, None, :] + fft = torch.fft.fftn(sig, dim=[1,2]).abs().square().sum(dim=0) + fft = fft.roll(shifts=[fft.shape[0] // 2, fft.shape[1] // 2], dims=[0,1]) + fft = (fft / fft.mean()).log10() * 10 # dB + fft = self._apply_cmap((fft / fft_range_db + 1) / 2) + res.image = torch.cat([img.expand_as(fft), fft], dim=1) + + @staticmethod + def run_synthesis_net(net, *args, capture_layer=None, **kwargs): # => out, layers + submodule_names = {mod: name for name, mod in net.named_modules()} + unique_names = set() + layers = [] + + def module_hook(module, _inputs, outputs): + outputs = list(outputs) if isinstance(outputs, (tuple, list)) else [outputs] + outputs = [out for out in outputs if isinstance(out, torch.Tensor) and out.ndim in [4, 5]] + for idx, out in enumerate(outputs): + if out.ndim == 5: # G-CNN => remove group dimension. + out = out.mean(2) + name = submodule_names[module] + if name == '': + name = 'output' + if len(outputs) > 1: + name += f':{idx}' + if name in unique_names: + suffix = 2 + while f'{name}_{suffix}' in unique_names: + suffix += 1 + name += f'_{suffix}' + unique_names.add(name) + shape = [int(x) for x in out.shape] + dtype = str(out.dtype).split('.')[-1] + layers.append(dnnlib.EasyDict(name=name, shape=shape, dtype=dtype)) + if name == capture_layer: + raise CaptureSuccess(out) + + hooks = [module.register_forward_hook(module_hook) for module in net.modules()] + try: + out = net.synthesis(*args, **kwargs) + except CaptureSuccess as e: + out = e.out + for hook in hooks: + hook.remove() + return out, layers + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/stylemix_widget.py b/ThirdParty/eg3d/viz/stylemix_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..0b84d6426b27bc890cfcf7e74a74ce0569d77847 --- /dev/null +++ b/ThirdParty/eg3d/viz/stylemix_widget.py @@ -0,0 +1,68 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import imgui +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class StyleMixingWidget: + def __init__(self, viz): + self.viz = viz + self.seed_def = 1000 + self.seed = self.seed_def + self.animate = False + self.enables = [] + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + num_ws = viz.result.get('num_ws', 0) + num_enables = viz.result.get('num_ws', 18) + self.enables += [False] * max(num_enables - len(self.enables), 0) + + if show: + imgui.text('Stylemix') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 8), imgui_utils.grayed_out(num_ws == 0): + _changed, self.seed = imgui.input_int('##seed', self.seed) + imgui.same_line(viz.label_w + viz.font_size * 8 + viz.spacing) + with imgui_utils.grayed_out(num_ws == 0): + _clicked, self.animate = imgui.checkbox('Anim', self.animate) + + pos2 = imgui.get_content_region_max()[0] - 1 - viz.button_w + pos1 = pos2 - imgui.get_text_line_height() - viz.spacing + pos0 = viz.label_w + viz.font_size * 12 + imgui.push_style_var(imgui.STYLE_FRAME_PADDING, [0, 0]) + for idx in range(num_enables): + imgui.same_line(round(pos0 + (pos1 - pos0) * (idx / (num_enables - 1)))) + if idx == 0: + imgui.set_cursor_pos_y(imgui.get_cursor_pos_y() + 3) + with imgui_utils.grayed_out(num_ws == 0): + _clicked, self.enables[idx] = imgui.checkbox(f'##{idx}', self.enables[idx]) + if imgui.is_item_hovered(): + imgui.set_tooltip(f'{idx}') + imgui.pop_style_var(1) + + imgui.same_line(pos2) + imgui.set_cursor_pos_y(imgui.get_cursor_pos_y() - 3) + with imgui_utils.grayed_out(num_ws == 0): + if imgui_utils.button('Reset', width=-1, enabled=(self.seed != self.seed_def or self.animate or any(self.enables[:num_enables]))): + self.seed = self.seed_def + self.animate = False + self.enables = [False] * num_enables + + if any(self.enables[:num_ws]): + viz.args.stylemix_idx = [idx for idx, enable in enumerate(self.enables) if enable] + viz.args.stylemix_seed = self.seed & ((1 << 32) - 1) + if self.animate: + self.seed += 1 + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/trunc_noise_widget.py b/ThirdParty/eg3d/viz/trunc_noise_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..cf19c238f8934a01ef3e99247da3981f938c5336 --- /dev/null +++ b/ThirdParty/eg3d/viz/trunc_noise_widget.py @@ -0,0 +1,77 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +import imgui +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class TruncationNoiseWidget: + def __init__(self, viz): + self.viz = viz + self.prev_num_ws = 0 + self.trunc_psi = 0.7 + self.trunc_cutoff = 7 + self.noise_enable = True + self.noise_seed = 0 + self.noise_anim = False + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + num_ws = viz.result.get('num_ws', 0) + has_noise = viz.result.get('has_noise', False) + if num_ws > 0 and num_ws != self.prev_num_ws: + if self.trunc_cutoff > num_ws or self.trunc_cutoff == self.prev_num_ws: + self.trunc_cutoff = num_ws + self.prev_num_ws = num_ws + + if show: + imgui.text('Truncate') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 10), imgui_utils.grayed_out(num_ws == 0): + _changed, self.trunc_psi = imgui.slider_float('##psi', self.trunc_psi, -1, 2, format='Psi %.2f') + imgui.same_line() + if num_ws == 0: + imgui_utils.button('Cutoff 0', width=(viz.font_size * 8 + viz.spacing), enabled=False) + else: + with imgui_utils.item_width(viz.font_size * 8 + viz.spacing): + changed, new_cutoff = imgui.slider_int('##cutoff', self.trunc_cutoff, 0, num_ws, format='Cutoff %d') + if changed: + self.trunc_cutoff = min(max(new_cutoff, 0), num_ws) + + with imgui_utils.grayed_out(not has_noise): + imgui.same_line() + _clicked, self.noise_enable = imgui.checkbox('Noise##enable', self.noise_enable) + imgui.same_line(viz.font_size * 28.7) + with imgui_utils.grayed_out(not self.noise_enable): + with imgui_utils.item_width(-3 - viz.button_w - viz.spacing - viz.font_size * 4): + _changed, self.noise_seed = imgui.input_int('##seed', self.noise_seed) + imgui.same_line(spacing=0) + _clicked, self.noise_anim = imgui.checkbox('Anim##noise', self.noise_anim) + + is_def_trunc = (self.trunc_psi == 1 and self.trunc_cutoff == num_ws) + is_def_noise = (self.noise_enable and self.noise_seed == 0 and not self.noise_anim) + with imgui_utils.grayed_out(is_def_trunc and not has_noise): + imgui.same_line(imgui.get_content_region_max()[0] - 1 - viz.button_w) + if imgui_utils.button('Reset', width=-1, enabled=(not is_def_trunc or not is_def_noise)): + self.prev_num_ws = num_ws + self.trunc_psi = 0.7 + self.trunc_cutoff = 7 + self.noise_enable = True + self.noise_seed = 0 + self.noise_anim = False + + if self.noise_anim: + self.noise_seed += 1 + viz.args.update(trunc_psi=self.trunc_psi, trunc_cutoff=self.trunc_cutoff, random_seed=self.noise_seed) + viz.args.noise_mode = ('none' if not self.noise_enable else 'const' if self.noise_seed == 0 else 'random') + +#---------------------------------------------------------------------------- diff --git a/ThirdParty/eg3d/viz/zoom_widget.py b/ThirdParty/eg3d/viz/zoom_widget.py new file mode 100644 index 0000000000000000000000000000000000000000..ff749b0e69a01fdafa605fd1f82bfc3980a9f7dd --- /dev/null +++ b/ThirdParty/eg3d/viz/zoom_widget.py @@ -0,0 +1,43 @@ +# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +from inspect import formatargvalues +import numpy as np +import imgui +import dnnlib +from gui_utils import imgui_utils + +#---------------------------------------------------------------------------- + +class ZoomWidget: + def __init__(self, viz): + self.viz = viz + self.fov = 18.837 + self.fov_default = 18.837 + + @imgui_utils.scoped_by_object_id + def __call__(self, show=True): + viz = self.viz + if show: + imgui.text('FOV') + imgui.same_line(viz.label_w) + with imgui_utils.item_width(viz.font_size * 10): + _changed, self.fov = imgui.slider_float('##fov', self.fov, 12, 45, format='%.2f Degrees') + + imgui.same_line(viz.label_w + viz.font_size * 13 + viz.button_w + viz.spacing * 3) + snapped = round(self.fov) + if imgui_utils.button('Snap', width=viz.button_w, enabled=(self.fov != snapped)): + self.fov = snapped + imgui.same_line() + if imgui_utils.button('Reset', width=-1, enabled=(abs(self.fov - self.fov_default)) > .01): + self.fov = self.fov_default + + viz.args.focal_length = float(1 / (np.tan(self.fov * 3.14159 / 360) * 1.414)) +#---------------------------------------------------------------------------- diff --git a/ThirdParty/michelangelo/__init__.py b/ThirdParty/michelangelo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..40a96afc6ff09d58a702b76e3f7dd412fe975e26 --- /dev/null +++ b/ThirdParty/michelangelo/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/ThirdParty/michelangelo/__pycache__/__init__.cpython-310.pyc b/ThirdParty/michelangelo/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b35ec5d8d3d4855e89dbaa3cd2caeab633bbc239 Binary files /dev/null and b/ThirdParty/michelangelo/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/checkpoints/aligned_shape_latents/.gitkeep b/ThirdParty/michelangelo/checkpoints/aligned_shape_latents/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml b/ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ef2190500086d9e718b2c6943e5ef8b572c95e7b --- /dev/null +++ b/ThirdParty/michelangelo/configs/aligned_shape_latents/shapevae-256.yaml @@ -0,0 +1,46 @@ +model: + target: michelangelo.models.tsal.asl_pl_module.AlignedShapeAsLatentPLModule + params: + shape_module_cfg: + target: michelangelo.models.tsal.sal_perceiver.AlignedShapeLatentPerceiver + params: + num_latents: 256 + embed_dim: 64 + point_feats: 3 # normal + num_freqs: 8 + include_pi: false + heads: 12 + width: 768 + num_encoder_layers: 8 + num_decoder_layers: 16 + use_ln_post: true + init_scale: 0.25 + qkv_bias: false + use_checkpoint: true + aligned_module_cfg: + target: michelangelo.models.tsal.clip_asl_module.CLIPAlignedShapeAsLatentModule + params: + clip_model_version: "openai/clip-vit-large-patch14" + + loss_cfg: + target: michelangelo.models.tsal.loss.ContrastKLNearFar + params: + contrast_weight: 0.1 + near_weight: 0.1 + kl_weight: 0.001 + + optimizer_cfg: + optimizer: + target: torch.optim.AdamW + params: + betas: [0.9, 0.99] + eps: 1.e-6 + weight_decay: 1.e-2 + + scheduler: + target: michelangelo.utils.trainings.lr_scheduler.LambdaWarmUpCosineFactorScheduler + params: + warm_up_steps: 5000 + f_start: 1.e-6 + f_min: 1.e-3 + f_max: 1.0 diff --git a/ThirdParty/michelangelo/configs/image_cond_diffuser_asl/image-ASLDM-256.yaml b/ThirdParty/michelangelo/configs/image_cond_diffuser_asl/image-ASLDM-256.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d8d7e27aa16ea47ff346f0d307029df9c789d5b8 --- /dev/null +++ b/ThirdParty/michelangelo/configs/image_cond_diffuser_asl/image-ASLDM-256.yaml @@ -0,0 +1,97 @@ +model: + target: michelangelo.models.asl_diffusion.clip_asl_diffuser_pl_module.ClipASLDiffuser + params: + first_stage_config: + target: michelangelo.models.tsal.asl_pl_module.AlignedShapeAsLatentPLModule + params: + shape_module_cfg: + target: michelangelo.models.tsal.sal_perceiver.AlignedShapeLatentPerceiver + params: + num_latents: &num_latents 256 + embed_dim: &embed_dim 64 + point_feats: 3 # normal + num_freqs: 8 + include_pi: false + heads: 12 + width: 768 + num_encoder_layers: 8 + num_decoder_layers: 16 + use_ln_post: true + init_scale: 0.25 + qkv_bias: false + use_checkpoint: false + aligned_module_cfg: + target: michelangelo.models.tsal.clip_asl_module.CLIPAlignedShapeAsLatentModule + params: + clip_model_version: "openai/clip-vit-large-patch14" + + loss_cfg: + target: torch.nn.Identity + + cond_stage_config: + target: michelangelo.models.conditional_encoders.encoder_factory.FrozenCLIPImageGridEmbedder + params: + version: "openai/clip-vit-large-patch14" + zero_embedding_radio: 0.1 + + first_stage_key: "surface" + cond_stage_key: "image" + scale_by_std: false + + denoiser_cfg: + target: michelangelo.models.asl_diffusion.asl_udt.ConditionalASLUDTDenoiser + params: + input_channels: *embed_dim + output_channels: *embed_dim + n_ctx: *num_latents + width: 768 + layers: 6 # 2 * 6 + 1 = 13 + heads: 12 + context_dim: 1024 + init_scale: 1.0 + skip_ln: true + use_checkpoint: true + + scheduler_cfg: + guidance_scale: 7.5 + num_inference_steps: 50 + eta: 0.0 + + noise: + target: diffusers.schedulers.DDPMScheduler + params: + num_train_timesteps: 1000 + beta_start: 0.00085 + beta_end: 0.012 + beta_schedule: "scaled_linear" + variance_type: "fixed_small" + clip_sample: false + denoise: + target: diffusers.schedulers.DDIMScheduler + params: + num_train_timesteps: 1000 + beta_start: 0.00085 + beta_end: 0.012 + beta_schedule: "scaled_linear" + clip_sample: false # clip sample to -1~1 + set_alpha_to_one: false + steps_offset: 1 + + optimizer_cfg: + optimizer: + target: torch.optim.AdamW + params: + betas: [0.9, 0.99] + eps: 1.e-6 + weight_decay: 1.e-2 + + scheduler: + target: michelangelo.utils.trainings.lr_scheduler.LambdaWarmUpCosineFactorScheduler + params: + warm_up_steps: 5000 + f_start: 1.e-6 + f_min: 1.e-3 + f_max: 1.0 + + loss_cfg: + loss_type: "mse" diff --git a/ThirdParty/michelangelo/configs/text_cond_diffuser_asl/text-ASLDM-256.yaml b/ThirdParty/michelangelo/configs/text_cond_diffuser_asl/text-ASLDM-256.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e204d99513bc29a4f863537798580537af7634b1 --- /dev/null +++ b/ThirdParty/michelangelo/configs/text_cond_diffuser_asl/text-ASLDM-256.yaml @@ -0,0 +1,98 @@ +model: + target: michelangelo.models.asl_diffusion.clip_asl_diffuser_pl_module.ClipASLDiffuser + params: + first_stage_config: + target: michelangelo.models.tsal.asl_pl_module.AlignedShapeAsLatentPLModule + params: + shape_module_cfg: + target: michelangelo.models.tsal.sal_perceiver.AlignedShapeLatentPerceiver + params: + num_latents: &num_latents 256 + embed_dim: &embed_dim 64 + point_feats: 3 # normal + num_freqs: 8 + include_pi: false + heads: 12 + width: 768 + num_encoder_layers: 8 + num_decoder_layers: 16 + use_ln_post: true + init_scale: 0.25 + qkv_bias: false + use_checkpoint: true + aligned_module_cfg: + target: michelangelo.models.tsal.clip_asl_module.CLIPAlignedShapeAsLatentModule + params: + clip_model_version: "openai/clip-vit-large-patch14" + + loss_cfg: + target: torch.nn.Identity + + cond_stage_config: + target: michelangelo.models.conditional_encoders.encoder_factory.FrozenAlignedCLIPTextEmbedder + params: + version: "openai/clip-vit-large-patch14" + zero_embedding_radio: 0.1 + max_length: 77 + + first_stage_key: "surface" + cond_stage_key: "text" + scale_by_std: false + + denoiser_cfg: + target: michelangelo.models.asl_diffusion.asl_udt.ConditionalASLUDTDenoiser + params: + input_channels: *embed_dim + output_channels: *embed_dim + n_ctx: *num_latents + width: 768 + layers: 8 # 2 * 6 + 1 = 13 + heads: 12 + context_dim: 768 + init_scale: 1.0 + skip_ln: true + use_checkpoint: true + + scheduler_cfg: + guidance_scale: 7.5 + num_inference_steps: 50 + eta: 0.0 + + noise: + target: diffusers.schedulers.DDPMScheduler + params: + num_train_timesteps: 1000 + beta_start: 0.00085 + beta_end: 0.012 + beta_schedule: "scaled_linear" + variance_type: "fixed_small" + clip_sample: false + denoise: + target: diffusers.schedulers.DDIMScheduler + params: + num_train_timesteps: 1000 + beta_start: 0.00085 + beta_end: 0.012 + beta_schedule: "scaled_linear" + clip_sample: false # clip sample to -1~1 + set_alpha_to_one: false + steps_offset: 1 + + optimizer_cfg: + optimizer: + target: torch.optim.AdamW + params: + betas: [0.9, 0.99] + eps: 1.e-6 + weight_decay: 1.e-2 + + scheduler: + target: michelangelo.utils.trainings.lr_scheduler.LambdaWarmUpCosineFactorScheduler + params: + warm_up_steps: 5000 + f_start: 1.e-6 + f_min: 1.e-3 + f_max: 1.0 + + loss_cfg: + loss_type: "mse" \ No newline at end of file diff --git a/ThirdParty/michelangelo/data/__init__.py b/ThirdParty/michelangelo/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..40a96afc6ff09d58a702b76e3f7dd412fe975e26 --- /dev/null +++ b/ThirdParty/michelangelo/data/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/ThirdParty/michelangelo/data/transforms.py b/ThirdParty/michelangelo/data/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..869fb99cf0382cd35f3069d1e3e861b5c6293e9b --- /dev/null +++ b/ThirdParty/michelangelo/data/transforms.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +import os +import time +import numpy as np +import warnings +import random +from omegaconf.listconfig import ListConfig +from webdataset import pipelinefilter +import torch +import torchvision.transforms.functional as TVF +from torchvision.transforms import InterpolationMode +from torchvision.transforms.transforms import _interpolation_modes_from_int +from typing import Sequence + +from ThirdParty.michelangelo.utils import instantiate_from_config + + +def _uid_buffer_pick(buf_dict, rng): + uid_keys = list(buf_dict.keys()) + selected_uid = rng.choice(uid_keys) + buf = buf_dict[selected_uid] + + k = rng.randint(0, len(buf) - 1) + sample = buf[k] + buf[k] = buf[-1] + buf.pop() + + if len(buf) == 0: + del buf_dict[selected_uid] + + return sample + + +def _add_to_buf_dict(buf_dict, sample): + key = sample["__key__"] + uid, uid_sample_id = key.split("_") + if uid not in buf_dict: + buf_dict[uid] = [] + buf_dict[uid].append(sample) + + return buf_dict + + +def _uid_shuffle(data, bufsize=1000, initial=100, rng=None, handler=None): + """Shuffle the data in the stream. + + This uses a buffer of size `bufsize`. Shuffling at + startup is less random; this is traded off against + yielding samples quickly. + + data: iterator + bufsize: buffer size for shuffling + returns: iterator + rng: either random module or random.Random instance + + """ + if rng is None: + rng = random.Random(int((os.getpid() + time.time()) * 1e9)) + initial = min(initial, bufsize) + buf_dict = dict() + current_samples = 0 + for sample in data: + _add_to_buf_dict(buf_dict, sample) + current_samples += 1 + + if current_samples < bufsize: + try: + _add_to_buf_dict(buf_dict, next(data)) # skipcq: PYL-R1708 + current_samples += 1 + except StopIteration: + pass + + if current_samples >= initial: + current_samples -= 1 + yield _uid_buffer_pick(buf_dict, rng) + + while current_samples > 0: + current_samples -= 1 + yield _uid_buffer_pick(buf_dict, rng) + + +uid_shuffle = pipelinefilter(_uid_shuffle) + + +class RandomSample(object): + def __init__(self, + num_volume_samples: int = 1024, + num_near_samples: int = 1024): + + super().__init__() + + self.num_volume_samples = num_volume_samples + self.num_near_samples = num_near_samples + + def __call__(self, sample): + rng = np.random.default_rng() + + # 1. sample surface input + total_surface = sample["surface"] + ind = rng.choice(total_surface.shape[0], replace=False) + surface = total_surface[ind] + + # 2. sample volume/near geometric points + vol_points = sample["vol_points"] + vol_label = sample["vol_label"] + near_points = sample["near_points"] + near_label = sample["near_label"] + + ind = rng.choice(vol_points.shape[0], self.num_volume_samples, replace=False) + vol_points = vol_points[ind] + vol_label = vol_label[ind] + vol_points_labels = np.concatenate([vol_points, vol_label[:, np.newaxis]], axis=1) + + ind = rng.choice(near_points.shape[0], self.num_near_samples, replace=False) + near_points = near_points[ind] + near_label = near_label[ind] + near_points_labels = np.concatenate([near_points, near_label[:, np.newaxis]], axis=1) + + # concat sampled volume and near points + geo_points = np.concatenate([vol_points_labels, near_points_labels], axis=0) + + sample = { + "surface": surface, + "geo_points": geo_points + } + + return sample + + +class SplitRandomSample(object): + def __init__(self, + use_surface_sample: bool = False, + num_surface_samples: int = 4096, + num_volume_samples: int = 1024, + num_near_samples: int = 1024): + + super().__init__() + + self.use_surface_sample = use_surface_sample + self.num_surface_samples = num_surface_samples + self.num_volume_samples = num_volume_samples + self.num_near_samples = num_near_samples + + def __call__(self, sample): + + rng = np.random.default_rng() + + # 1. sample surface input + surface = sample["surface"] + + if self.use_surface_sample: + replace = surface.shape[0] < self.num_surface_samples + ind = rng.choice(surface.shape[0], self.num_surface_samples, replace=replace) + surface = surface[ind] + + # 2. sample volume/near geometric points + vol_points = sample["vol_points"] + vol_label = sample["vol_label"] + near_points = sample["near_points"] + near_label = sample["near_label"] + + ind = rng.choice(vol_points.shape[0], self.num_volume_samples, replace=False) + vol_points = vol_points[ind] + vol_label = vol_label[ind] + vol_points_labels = np.concatenate([vol_points, vol_label[:, np.newaxis]], axis=1) + + ind = rng.choice(near_points.shape[0], self.num_near_samples, replace=False) + near_points = near_points[ind] + near_label = near_label[ind] + near_points_labels = np.concatenate([near_points, near_label[:, np.newaxis]], axis=1) + + # concat sampled volume and near points + geo_points = np.concatenate([vol_points_labels, near_points_labels], axis=0) + + sample = { + "surface": surface, + "geo_points": geo_points + } + + return sample + + +class FeatureSelection(object): + + VALID_SURFACE_FEATURE_DIMS = { + "none": [0, 1, 2], # xyz + "watertight_normal": [0, 1, 2, 3, 4, 5], # xyz, normal + "normal": [0, 1, 2, 6, 7, 8] + } + + def __init__(self, surface_feature_type: str): + + self.surface_feature_type = surface_feature_type + self.surface_dims = self.VALID_SURFACE_FEATURE_DIMS[surface_feature_type] + + def __call__(self, sample): + sample["surface"] = sample["surface"][:, self.surface_dims] + return sample + + +class AxisScaleTransform(object): + def __init__(self, interval=(0.75, 1.25), jitter=True, jitter_scale=0.005): + assert isinstance(interval, (tuple, list, ListConfig)) + self.interval = interval + self.min_val = interval[0] + self.max_val = interval[1] + self.inter_size = interval[1] - interval[0] + self.jitter = jitter + self.jitter_scale = jitter_scale + + def __call__(self, sample): + + surface = sample["surface"][..., 0:3] + geo_points = sample["geo_points"][..., 0:3] + + scaling = torch.rand(1, 3) * self.inter_size + self.min_val + # print(scaling) + surface = surface * scaling + geo_points = geo_points * scaling + + scale = (1 / torch.abs(surface).max().item()) * 0.999999 + surface *= scale + geo_points *= scale + + if self.jitter: + surface += self.jitter_scale * torch.randn_like(surface) + surface.clamp_(min=-1.015, max=1.015) + + sample["surface"][..., 0:3] = surface + sample["geo_points"][..., 0:3] = geo_points + + return sample + + +class ToTensor(object): + + def __init__(self, tensor_keys=("surface", "geo_points", "tex_points")): + self.tensor_keys = tensor_keys + + def __call__(self, sample): + for key in self.tensor_keys: + if key not in sample: + continue + + sample[key] = torch.tensor(sample[key], dtype=torch.float32) + + return sample + + +class AxisScale(object): + def __init__(self, interval=(0.75, 1.25), jitter=True, jitter_scale=0.005): + assert isinstance(interval, (tuple, list, ListConfig)) + self.interval = interval + self.jitter = jitter + self.jitter_scale = jitter_scale + + def __call__(self, surface, *args): + scaling = torch.rand(1, 3) * 0.5 + 0.75 + # print(scaling) + surface = surface * scaling + scale = (1 / torch.abs(surface).max().item()) * 0.999999 + surface *= scale + + args_outputs = [] + for _arg in args: + _arg = _arg * scaling * scale + args_outputs.append(_arg) + + if self.jitter: + surface += self.jitter_scale * torch.randn_like(surface) + surface.clamp_(min=-1, max=1) + + if len(args) == 0: + return surface + else: + return surface, *args_outputs + + +class RandomResize(torch.nn.Module): + """Apply randomly Resize with a given probability.""" + + def __init__( + self, + size, + resize_radio=(0.5, 1), + allow_resize_interpolations=(InterpolationMode.BICUBIC, InterpolationMode.BILINEAR, InterpolationMode.BILINEAR), + interpolation=InterpolationMode.BICUBIC, + max_size=None, + antialias=None, + ): + super().__init__() + if not isinstance(size, (int, Sequence)): + raise TypeError(f"Size should be int or sequence. Got {type(size)}") + if isinstance(size, Sequence) and len(size) not in (1, 2): + raise ValueError("If size is a sequence, it should have 1 or 2 values") + + self.size = size + self.max_size = max_size + # Backward compatibility with integer value + if isinstance(interpolation, int): + warnings.warn( + "Argument 'interpolation' of type int is deprecated since 0.13 and will be removed in 0.15. " + "Please use InterpolationMode enum." + ) + interpolation = _interpolation_modes_from_int(interpolation) + + self.interpolation = interpolation + self.antialias = antialias + + self.resize_radio = resize_radio + self.allow_resize_interpolations = allow_resize_interpolations + + def random_resize_params(self): + radio = torch.rand(1) * (self.resize_radio[1] - self.resize_radio[0]) + self.resize_radio[0] + + if isinstance(self.size, int): + size = int(self.size * radio) + elif isinstance(self.size, Sequence): + size = list(self.size) + size = (int(size[0] * radio), int(size[1] * radio)) + else: + raise RuntimeError() + + interpolation = self.allow_resize_interpolations[ + torch.randint(low=0, high=len(self.allow_resize_interpolations), size=(1,)) + ] + return size, interpolation + + def forward(self, img): + size, interpolation = self.random_resize_params() + img = TVF.resize(img, size, interpolation, self.max_size, self.antialias) + img = TVF.resize(img, self.size, self.interpolation, self.max_size, self.antialias) + return img + + def __repr__(self) -> str: + detail = f"(size={self.size}, interpolation={self.interpolation.value}," + detail += f"max_size={self.max_size}, antialias={self.antialias}), resize_radio={self.resize_radio}" + return f"{self.__class__.__name__}{detail}" + + +class Compose(object): + """Composes several transforms together. This transform does not support torchscript. + Please, see the note below. + + Args: + transforms (list of ``Transform`` objects): list of transforms to compose. + + Example: + >>> transforms.Compose([ + >>> transforms.CenterCrop(10), + >>> transforms.ToTensor(), + >>> ]) + + .. note:: + In order to script the transformations, please use ``torch.nn.Sequential`` as below. + + >>> transforms = torch.nn.Sequential( + >>> transforms.CenterCrop(10), + >>> transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), + >>> ) + >>> scripted_transforms = torch.jit.script(transforms) + + Make sure to use only scriptable transformations, i.e. that work with ``torch.Tensor``, does not require + `lambda` functions or ``PIL.Image``. + + """ + + def __init__(self, transforms): + self.transforms = transforms + + def __call__(self, *args): + for t in self.transforms: + args = t(*args) + return args + + def __repr__(self): + format_string = self.__class__.__name__ + '(' + for t in self.transforms: + format_string += '\n' + format_string += ' {0}'.format(t) + format_string += '\n)' + return format_string + + +def identity(*args, **kwargs): + if len(args) == 1: + return args[0] + else: + return args + + +def build_transforms(cfg): + + if cfg is None: + return identity + + transforms = [] + + for transform_name, cfg_instance in cfg.items(): + transform_instance = instantiate_from_config(cfg_instance) + transforms.append(transform_instance) + print(f"Build transform: {transform_instance}") + + transforms = Compose(transforms) + + return transforms + diff --git a/ThirdParty/michelangelo/data/utils.py b/ThirdParty/michelangelo/data/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..af06ed0c8849819a5d2b72ece805e8ec26079ea9 --- /dev/null +++ b/ThirdParty/michelangelo/data/utils.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- + +import torch +import numpy as np + + +def worker_init_fn(_): + worker_info = torch.utils.data.get_worker_info() + worker_id = worker_info.id + + # dataset = worker_info.dataset + # split_size = dataset.num_records // worker_info.num_workers + # # reset num_records to the true number to retain reliable length information + # dataset.sample_ids = dataset.valid_ids[worker_id * split_size:(worker_id + 1) * split_size] + # current_id = np.random.choice(len(np.random.get_state()[1]), 1) + # return np.random.seed(np.random.get_state()[1][current_id] + worker_id) + + return np.random.seed(np.random.get_state()[1][0] + worker_id) + + +def collation_fn(samples, combine_tensors=True, combine_scalars=True): + """ + + Args: + samples (list[dict]): + combine_tensors: + combine_scalars: + + Returns: + + """ + + result = {} + + keys = samples[0].keys() + + for key in keys: + result[key] = [] + + for sample in samples: + for key in keys: + val = sample[key] + result[key].append(val) + + for key in keys: + val_list = result[key] + if isinstance(val_list[0], (int, float)): + if combine_scalars: + result[key] = np.array(result[key]) + + elif isinstance(val_list[0], torch.Tensor): + if combine_tensors: + result[key] = torch.stack(val_list) + + elif isinstance(val_list[0], np.ndarray): + if combine_tensors: + result[key] = np.stack(val_list) + + return result diff --git a/ThirdParty/michelangelo/get_checkpoints.sh b/ThirdParty/michelangelo/get_checkpoints.sh new file mode 100644 index 0000000000000000000000000000000000000000..66f01ba1af8bf67fa6065ea9cf2efd9531c9bd1c --- /dev/null +++ b/ThirdParty/michelangelo/get_checkpoints.sh @@ -0,0 +1,2 @@ +cd ThirdParty/michelangelo/checkpoints/aligned_shape_latents +wget "https://huggingface.co/Maikou/Michelangelo/resolve/main/checkpoints/aligned_shape_latents/shapevae-256.ckpt?download=true" -O shapevae-256.ckpt \ No newline at end of file diff --git a/ThirdParty/michelangelo/graphics/__init__.py b/ThirdParty/michelangelo/graphics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..40a96afc6ff09d58a702b76e3f7dd412fe975e26 --- /dev/null +++ b/ThirdParty/michelangelo/graphics/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/ThirdParty/michelangelo/graphics/__pycache__/__init__.cpython-310.pyc b/ThirdParty/michelangelo/graphics/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..299970faf5c83291eca64307c7a8bb65e5364a15 Binary files /dev/null and b/ThirdParty/michelangelo/graphics/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/graphics/primitives/__init__.py b/ThirdParty/michelangelo/graphics/primitives/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cb910878f98a83209a41b562d339d12d39f42e89 --- /dev/null +++ b/ThirdParty/michelangelo/graphics/primitives/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- + +from .volume import generate_dense_grid_points + +from .mesh import ( + MeshOutput, + save_obj, + savemeshtes2 +) diff --git a/ThirdParty/michelangelo/graphics/primitives/__pycache__/__init__.cpython-310.pyc b/ThirdParty/michelangelo/graphics/primitives/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e28f540caa86b3a6c1df42187822e9363746eab Binary files /dev/null and b/ThirdParty/michelangelo/graphics/primitives/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/graphics/primitives/__pycache__/mesh.cpython-310.pyc b/ThirdParty/michelangelo/graphics/primitives/__pycache__/mesh.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..961dde40fb32eea9f1a9b72a39e0894bc04c6f09 Binary files /dev/null and b/ThirdParty/michelangelo/graphics/primitives/__pycache__/mesh.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/graphics/primitives/__pycache__/volume.cpython-310.pyc b/ThirdParty/michelangelo/graphics/primitives/__pycache__/volume.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5293edbdbbfd4c5cd4bdf8c655247a70a486cf10 Binary files /dev/null and b/ThirdParty/michelangelo/graphics/primitives/__pycache__/volume.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/graphics/primitives/mesh.py b/ThirdParty/michelangelo/graphics/primitives/mesh.py new file mode 100644 index 0000000000000000000000000000000000000000..3e5e8a551378b8e86d041967736cacaf904dbf54 --- /dev/null +++ b/ThirdParty/michelangelo/graphics/primitives/mesh.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- + +import os +import cv2 +import numpy as np +import PIL.Image +from typing import Optional + +import trimesh + + +def save_obj(pointnp_px3, facenp_fx3, fname): + fid = open(fname, "w") + write_str = "" + for pidx, p in enumerate(pointnp_px3): + pp = p + write_str += "v %f %f %f\n" % (pp[0], pp[1], pp[2]) + + for i, f in enumerate(facenp_fx3): + f1 = f + 1 + write_str += "f %d %d %d\n" % (f1[0], f1[1], f1[2]) + fid.write(write_str) + fid.close() + return + + +def savemeshtes2(pointnp_px3, tcoords_px2, facenp_fx3, facetex_fx3, tex_map, fname): + fol, na = os.path.split(fname) + na, _ = os.path.splitext(na) + + matname = "%s/%s.mtl" % (fol, na) + fid = open(matname, "w") + fid.write("newmtl material_0\n") + fid.write("Kd 1 1 1\n") + fid.write("Ka 0 0 0\n") + fid.write("Ks 0.4 0.4 0.4\n") + fid.write("Ns 10\n") + fid.write("illum 2\n") + fid.write("map_Kd %s.png\n" % na) + fid.close() + #### + + fid = open(fname, "w") + fid.write("mtllib %s.mtl\n" % na) + + for pidx, p in enumerate(pointnp_px3): + pp = p + fid.write("v %f %f %f\n" % (pp[0], pp[1], pp[2])) + + for pidx, p in enumerate(tcoords_px2): + pp = p + fid.write("vt %f %f\n" % (pp[0], pp[1])) + + fid.write("usemtl material_0\n") + for i, f in enumerate(facenp_fx3): + f1 = f + 1 + f2 = facetex_fx3[i] + 1 + fid.write("f %d/%d %d/%d %d/%d\n" % (f1[0], f2[0], f1[1], f2[1], f1[2], f2[2])) + fid.close() + + PIL.Image.fromarray(np.ascontiguousarray(tex_map), "RGB").save( + os.path.join(fol, "%s.png" % na)) + + return + + +class MeshOutput(object): + + def __init__(self, + mesh_v: np.ndarray, + mesh_f: np.ndarray, + vertex_colors: Optional[np.ndarray] = None, + uvs: Optional[np.ndarray] = None, + mesh_tex_idx: Optional[np.ndarray] = None, + tex_map: Optional[np.ndarray] = None): + + self.mesh_v = mesh_v + self.mesh_f = mesh_f + self.vertex_colors = vertex_colors + self.uvs = uvs + self.mesh_tex_idx = mesh_tex_idx + self.tex_map = tex_map + + def contain_uv_texture(self): + return (self.uvs is not None) and (self.mesh_tex_idx is not None) and (self.tex_map is not None) + + def contain_vertex_colors(self): + return self.vertex_colors is not None + + def export(self, fname): + + if self.contain_uv_texture(): + savemeshtes2( + self.mesh_v, + self.uvs, + self.mesh_f, + self.mesh_tex_idx, + self.tex_map, + fname + ) + + elif self.contain_vertex_colors(): + mesh_obj = trimesh.Trimesh(vertices=self.mesh_v, faces=self.mesh_f, vertex_colors=self.vertex_colors) + mesh_obj.export(fname) + + else: + save_obj( + self.mesh_v, + self.mesh_f, + fname + ) + + + diff --git a/ThirdParty/michelangelo/graphics/primitives/volume.py b/ThirdParty/michelangelo/graphics/primitives/volume.py new file mode 100644 index 0000000000000000000000000000000000000000..f2ee50e1b66fca951123058fe82f4898f67ff0be --- /dev/null +++ b/ThirdParty/michelangelo/graphics/primitives/volume.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +import numpy as np + + +def generate_dense_grid_points(bbox_min: np.ndarray, + bbox_max: np.ndarray, + octree_depth: int, + indexing: str = "ij"): + length = bbox_max - bbox_min + num_cells = np.exp2(octree_depth) + x = np.linspace(bbox_min[0], bbox_max[0], int(num_cells) + 1, dtype=np.float32) + y = np.linspace(bbox_min[1], bbox_max[1], int(num_cells) + 1, dtype=np.float32) + z = np.linspace(bbox_min[2], bbox_max[2], int(num_cells) + 1, dtype=np.float32) + [xs, ys, zs] = np.meshgrid(x, y, z, indexing=indexing) + xyz = np.stack((xs, ys, zs), axis=-1) + xyz = xyz.reshape(-1, 3) + grid_size = [int(num_cells) + 1, int(num_cells) + 1, int(num_cells) + 1] + + return xyz, grid_size, length + +def generate_inside_grid_points(bbox_min: np.ndarray, + bbox_max: np.ndarray, + octree_depth: int, + indexing: str = "ij"): + length = bbox_max - bbox_min + num_cells = np.exp2(octree_depth) + x = np.linspace(bbox_min[0], bbox_max[0], int(num_cells), dtype=np.float32) + y = np.linspace(bbox_min[1], bbox_max[1], int(num_cells), dtype=np.float32) + z = np.linspace(bbox_min[2], bbox_max[2], int(num_cells), dtype=np.float32) + [xs, ys, zs] = np.meshgrid(x, y, z, indexing=indexing) + xyz = np.stack((xs, ys, zs), axis=-1) + xyz = xyz.reshape(-1, 3) + grid_size = [int(num_cells), int(num_cells), int(num_cells)] + + return xyz, grid_size, length diff --git a/ThirdParty/michelangelo/models/__init__.py b/ThirdParty/michelangelo/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..40a96afc6ff09d58a702b76e3f7dd412fe975e26 --- /dev/null +++ b/ThirdParty/michelangelo/models/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/ThirdParty/michelangelo/models/__pycache__/__init__.cpython-310.pyc b/ThirdParty/michelangelo/models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..95e297df0d82e3c55aeedebe388e9614a2df6e7d Binary files /dev/null and b/ThirdParty/michelangelo/models/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/models/asl_diffusion/__init__.py b/ThirdParty/michelangelo/models/asl_diffusion/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..40a96afc6ff09d58a702b76e3f7dd412fe975e26 --- /dev/null +++ b/ThirdParty/michelangelo/models/asl_diffusion/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/ThirdParty/michelangelo/models/asl_diffusion/asl_diffuser_pl_module.py b/ThirdParty/michelangelo/models/asl_diffusion/asl_diffuser_pl_module.py new file mode 100644 index 0000000000000000000000000000000000000000..ba5235884b8a07f7ed1bbffd56f2d7211759b1ae --- /dev/null +++ b/ThirdParty/michelangelo/models/asl_diffusion/asl_diffuser_pl_module.py @@ -0,0 +1,483 @@ +# -*- coding: utf-8 -*- + +from omegaconf import DictConfig +from typing import List, Tuple, Dict, Optional, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.optim import lr_scheduler +import pytorch_lightning as pl +from pytorch_lightning.utilities import rank_zero_only + +from einops import rearrange + +from diffusers.schedulers import ( + DDPMScheduler, + DDIMScheduler, + KarrasVeScheduler, + DPMSolverMultistepScheduler +) + +from ThirdParty.michelangelo.utils import instantiate_from_config +# from ThirdParty.michelangelo.models.tsal.tsal_base import ShapeAsLatentPLModule +from ThirdParty.michelangelo.models.tsal.tsal_base import AlignedShapeAsLatentPLModule +from ThirdParty.michelangelo.models.asl_diffusion.inference_utils import ddim_sample + +SchedulerType = Union[DDIMScheduler, KarrasVeScheduler, DPMSolverMultistepScheduler] + + +def disabled_train(self, mode=True): + """Overwrite model.train with this function to make sure train/eval mode + does not change anymore.""" + return self + + +class ASLDiffuser(pl.LightningModule): + first_stage_model: Optional[AlignedShapeAsLatentPLModule] + # cond_stage_model: Optional[Union[nn.Module, pl.LightningModule]] + model: nn.Module + + def __init__(self, *, + first_stage_config, + denoiser_cfg, + scheduler_cfg, + optimizer_cfg, + loss_cfg, + first_stage_key: str = "surface", + cond_stage_key: str = "image", + cond_stage_trainable: bool = True, + scale_by_std: bool = False, + z_scale_factor: float = 1.0, + ckpt_path: Optional[str] = None, + ignore_keys: Union[Tuple[str], List[str]] = ()): + + super().__init__() + + self.first_stage_key = first_stage_key + self.cond_stage_key = cond_stage_key + self.cond_stage_trainable = cond_stage_trainable + + # 1. initialize first stage. + # Note: the condition model contained in the first stage model. + self.first_stage_config = first_stage_config + self.first_stage_model = None + # self.instantiate_first_stage(first_stage_config) + + # 2. initialize conditional stage + # self.instantiate_cond_stage(cond_stage_config) + self.cond_stage_model = { + "image": self.encode_image, + "image_unconditional_embedding": self.empty_img_cond, + "text": self.encode_text, + "text_unconditional_embedding": self.empty_text_cond, + "surface": self.encode_surface, + "surface_unconditional_embedding": self.empty_surface_cond, + } + + # 3. diffusion model + self.model = instantiate_from_config( + denoiser_cfg, device=None, dtype=None + ) + + self.optimizer_cfg = optimizer_cfg + + # 4. scheduling strategy + self.scheduler_cfg = scheduler_cfg + + self.noise_scheduler: DDPMScheduler = instantiate_from_config(scheduler_cfg.noise) + self.denoise_scheduler: SchedulerType = instantiate_from_config(scheduler_cfg.denoise) + + # 5. loss configures + self.loss_cfg = loss_cfg + + self.scale_by_std = scale_by_std + if scale_by_std: + self.register_buffer("z_scale_factor", torch.tensor(z_scale_factor)) + else: + self.z_scale_factor = z_scale_factor + + self.ckpt_path = ckpt_path + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + + def instantiate_first_stage(self, config): + model = instantiate_from_config(config) + self.first_stage_model = model.eval() + self.first_stage_model.train = disabled_train + for param in self.first_stage_model.parameters(): + param.requires_grad = False + + self.first_stage_model = self.first_stage_model.to(self.device) + + # def instantiate_cond_stage(self, config): + # if not self.cond_stage_trainable: + # if config == "__is_first_stage__": + # print("Using first stage also as cond stage.") + # self.cond_stage_model = self.first_stage_model + # elif config == "__is_unconditional__": + # print(f"Training {self.__class__.__name__} as an unconditional model.") + # self.cond_stage_model = None + # # self.be_unconditional = True + # else: + # model = instantiate_from_config(config) + # self.cond_stage_model = model.eval() + # self.cond_stage_model.train = disabled_train + # for param in self.cond_stage_model.parameters(): + # param.requires_grad = False + # else: + # assert config != "__is_first_stage__" + # assert config != "__is_unconditional__" + # model = instantiate_from_config(config) + # self.cond_stage_model = model + + def init_from_ckpt(self, path, ignore_keys=()): + state_dict = torch.load(path, map_location="cpu")["state_dict"] + + keys = list(state_dict.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del state_dict[k] + + missing, unexpected = self.load_state_dict(state_dict, strict=False) + print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys") + if len(missing) > 0: + print(f"Missing Keys: {missing}") + print(f"Unexpected Keys: {unexpected}") + + @property + def zero_rank(self): + if self._trainer: + zero_rank = self.trainer.local_rank == 0 + else: + zero_rank = True + + return zero_rank + + def configure_optimizers(self) -> Tuple[List, List]: + + lr = self.learning_rate + + trainable_parameters = list(self.model.parameters()) + # if the conditional encoder is trainable + + # if self.cond_stage_trainable: + # conditioner_params = [p for p in self.cond_stage_model.parameters() if p.requires_grad] + # trainable_parameters += conditioner_params + # print(f"number of trainable conditional parameters: {len(conditioner_params)}.") + + if self.optimizer_cfg is None: + optimizers = [torch.optim.AdamW(trainable_parameters, lr=lr, betas=(0.9, 0.99), weight_decay=1e-3)] + schedulers = [] + else: + optimizer = instantiate_from_config(self.optimizer_cfg.optimizer, params=trainable_parameters) + scheduler_func = instantiate_from_config( + self.optimizer_cfg.scheduler, + max_decay_steps=self.trainer.max_steps, + lr_max=lr + ) + scheduler = { + "scheduler": lr_scheduler.LambdaLR(optimizer, lr_lambda=scheduler_func.schedule), + "interval": "step", + "frequency": 1 + } + optimizers = [optimizer] + schedulers = [scheduler] + + return optimizers, schedulers + + @torch.no_grad() + def encode_text(self, text): + + b = text.shape[0] + text_tokens = rearrange(text, "b t l -> (b t) l") + text_embed = self.first_stage_model.model.encode_text_embed(text_tokens) + text_embed = rearrange(text_embed, "(b t) d -> b t d", b=b) + text_embed = text_embed.mean(dim=1) + text_embed = text_embed / text_embed.norm(dim=-1, keepdim=True) + + return text_embed + + @torch.no_grad() + def encode_image(self, img): + + return self.first_stage_model.model.encode_image_embed(img) + + @torch.no_grad() + def encode_surface(self, surface): + + return self.first_stage_model.model.encode_shape_embed(surface, return_latents=False) + + @torch.no_grad() + def empty_text_cond(self, cond): + + return torch.zeros_like(cond, device=cond.device) + + @torch.no_grad() + def empty_img_cond(self, cond): + + return torch.zeros_like(cond, device=cond.device) + + @torch.no_grad() + def empty_surface_cond(self, cond): + + return torch.zeros_like(cond, device=cond.device) + + @torch.no_grad() + def encode_first_stage(self, surface: torch.FloatTensor, sample_posterior=True): + + z_q = self.first_stage_model.encode(surface, sample_posterior) + z_q = self.z_scale_factor * z_q + + return z_q + + @torch.no_grad() + def decode_first_stage(self, z_q: torch.FloatTensor, **kwargs): + + z_q = 1. / self.z_scale_factor * z_q + latents = self.first_stage_model.decode(z_q, **kwargs) + return latents + + @rank_zero_only + @torch.no_grad() + def on_train_batch_start(self, batch, batch_idx): + # only for very first batch + if self.scale_by_std and self.current_epoch == 0 and self.global_step == 0 \ + and batch_idx == 0 and self.ckpt_path is None: + # set rescale weight to 1./std of encodings + print("### USING STD-RESCALING ###") + + z_q = self.encode_first_stage(batch[self.first_stage_key]) + z = z_q.detach() + + del self.z_scale_factor + self.register_buffer("z_scale_factor", 1. / z.flatten().std()) + print(f"setting self.z_scale_factor to {self.z_scale_factor}") + + print("### USING STD-RESCALING ###") + + def compute_loss(self, model_outputs, split): + """ + + Args: + model_outputs (dict): + - x_0: + - noise: + - noise_prior: + - noise_pred: + - noise_pred_prior: + + split (str): + + Returns: + + """ + + pred = model_outputs["pred"] + + if self.noise_scheduler.prediction_type == "epsilon": + target = model_outputs["noise"] + elif self.noise_scheduler.prediction_type == "sample": + target = model_outputs["x_0"] + else: + raise NotImplementedError(f"Prediction Type: {self.noise_scheduler.prediction_type} not yet supported.") + + if self.loss_cfg.loss_type == "l1": + simple = F.l1_loss(pred, target, reduction="mean") + elif self.loss_cfg.loss_type in ["mse", "l2"]: + simple = F.mse_loss(pred, target, reduction="mean") + else: + raise NotImplementedError(f"Loss Type: {self.loss_cfg.loss_type} not yet supported.") + + total_loss = simple + + loss_dict = { + f"{split}/total_loss": total_loss.clone().detach(), + f"{split}/simple": simple.detach(), + } + + return total_loss, loss_dict + + def forward(self, batch): + """ + + Args: + batch: + + Returns: + + """ + + if self.first_stage_model is None: + self.instantiate_first_stage(self.first_stage_config) + + latents = self.encode_first_stage(batch[self.first_stage_key]) + + # conditions = self.cond_stage_model.encode(batch[self.cond_stage_key]) + + conditions = self.cond_stage_model[self.cond_stage_key](batch[self.cond_stage_key]).unsqueeze(1) + + mask = torch.rand((len(conditions), 1, 1), device=conditions.device, dtype=conditions.dtype) >= 0.1 + conditions = conditions * mask.to(conditions) + + # Sample noise that we"ll add to the latents + # [batch_size, n_token, latent_dim] + noise = torch.randn_like(latents) + bs = latents.shape[0] + # Sample a random timestep for each motion + timesteps = torch.randint( + 0, + self.noise_scheduler.config.num_train_timesteps, + (bs,), + device=latents.device, + ) + timesteps = timesteps.long() + # Add noise to the latents according to the noise magnitude at each timestep + noisy_z = self.noise_scheduler.add_noise(latents, noise, timesteps) + + # diffusion model forward + noise_pred = self.model(noisy_z, timesteps, conditions) + + diffusion_outputs = { + "x_0": noisy_z, + "noise": noise, + "pred": noise_pred + } + + return diffusion_outputs + + def training_step(self, batch: Dict[str, Union[torch.FloatTensor, List[str]]], + batch_idx: int, optimizer_idx: int = 0) -> torch.FloatTensor: + """ + + Args: + batch (dict): the batch sample, and it contains: + - surface (torch.FloatTensor): + - image (torch.FloatTensor): if provide, [bs, 3, h, w], item range [0, 1] + - depth (torch.FloatTensor): if provide, [bs, 1, h, w], item range [-1, 1] + - normal (torch.FloatTensor): if provide, [bs, 3, h, w], item range [-1, 1] + - text (list of str): + + batch_idx (int): + + optimizer_idx (int): + + Returns: + loss (torch.FloatTensor): + + """ + + diffusion_outputs = self(batch) + + loss, loss_dict = self.compute_loss(diffusion_outputs, "train") + self.log_dict(loss_dict, prog_bar=True, logger=True, sync_dist=False, rank_zero_only=True) + + return loss + + def validation_step(self, batch: Dict[str, torch.FloatTensor], + batch_idx: int, optimizer_idx: int = 0) -> torch.FloatTensor: + """ + + Args: + batch (dict): the batch sample, and it contains: + - surface_pc (torch.FloatTensor): [n_pts, 4] + - surface_feats (torch.FloatTensor): [n_pts, c] + - text (list of str): + + batch_idx (int): + + optimizer_idx (int): + + Returns: + loss (torch.FloatTensor): + + """ + + diffusion_outputs = self(batch) + + loss, loss_dict = self.compute_loss(diffusion_outputs, "val") + self.log_dict(loss_dict, prog_bar=True, logger=True, sync_dist=False, rank_zero_only=True) + + return loss + + @torch.no_grad() + def sample(self, + batch: Dict[str, Union[torch.FloatTensor, List[str]]], + sample_times: int = 1, + steps: Optional[int] = None, + guidance_scale: Optional[float] = None, + eta: float = 0.0, + return_intermediates: bool = False, **kwargs): + + if self.first_stage_model is None: + self.instantiate_first_stage(self.first_stage_config) + + if steps is None: + steps = self.scheduler_cfg.num_inference_steps + + if guidance_scale is None: + guidance_scale = self.scheduler_cfg.guidance_scale + do_classifier_free_guidance = guidance_scale > 0 + + # conditional encode + xc = batch[self.cond_stage_key] + # cond = self.cond_stage_model[self.cond_stage_key](xc) + cond = self.cond_stage_model[self.cond_stage_key](xc).unsqueeze(1) + + if do_classifier_free_guidance: + """ + Note: There are two kinds of uncond for text. + 1: using "" as uncond text; (in SAL diffusion) + 2: zeros_like(cond) as uncond text; (in MDM) + """ + # un_cond = self.cond_stage_model.unconditional_embedding(batch_size=len(xc)) + un_cond = self.cond_stage_model[f"{self.cond_stage_key}_unconditional_embedding"](cond) + # un_cond = torch.zeros_like(cond, device=cond.device) + cond = torch.cat([un_cond, cond], dim=0) + + outputs = [] + latents = None + + if not return_intermediates: + for _ in range(sample_times): + sample_loop = ddim_sample( + self.denoise_scheduler, + self.model, + shape=self.first_stage_model.latent_shape, + cond=cond, + steps=steps, + guidance_scale=guidance_scale, + do_classifier_free_guidance=do_classifier_free_guidance, + device=self.device, + eta=eta, + disable_prog=not self.zero_rank + ) + for sample, t in sample_loop: + latents = sample + outputs.append(self.decode_first_stage(latents, **kwargs)) + else: + + sample_loop = ddim_sample( + self.denoise_scheduler, + self.model, + shape=self.first_stage_model.latent_shape, + cond=cond, + steps=steps, + guidance_scale=guidance_scale, + do_classifier_free_guidance=do_classifier_free_guidance, + device=self.device, + eta=eta, + disable_prog=not self.zero_rank + ) + + iter_size = steps // sample_times + i = 0 + for sample, t in sample_loop: + latents = sample + if i % iter_size == 0 or i == steps - 1: + outputs.append(self.decode_first_stage(latents, **kwargs)) + i += 1 + + return outputs diff --git a/ThirdParty/michelangelo/models/asl_diffusion/asl_udt.py b/ThirdParty/michelangelo/models/asl_diffusion/asl_udt.py new file mode 100644 index 0000000000000000000000000000000000000000..460ecbccbb99eb6ecfa4db8817bfd2a419f334d2 --- /dev/null +++ b/ThirdParty/michelangelo/models/asl_diffusion/asl_udt.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- + +import torch +import torch.nn as nn +from typing import Optional +from diffusers.models.embeddings import Timesteps +import math + +from ThirdParty.michelangelo.models.modules.transformer_blocks import MLP +from ThirdParty.michelangelo.models.modules.diffusion_transformer import UNetDiffusionTransformer + + +class ConditionalASLUDTDenoiser(nn.Module): + + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + input_channels: int, + output_channels: int, + n_ctx: int, + width: int, + layers: int, + heads: int, + context_dim: int, + context_ln: bool = True, + skip_ln: bool = False, + init_scale: float = 0.25, + flip_sin_to_cos: bool = False, + use_checkpoint: bool = False): + super().__init__() + + self.use_checkpoint = use_checkpoint + + init_scale = init_scale * math.sqrt(1.0 / width) + + self.backbone = UNetDiffusionTransformer( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + layers=layers, + heads=heads, + skip_ln=skip_ln, + init_scale=init_scale, + use_checkpoint=use_checkpoint + ) + self.ln_post = nn.LayerNorm(width, device=device, dtype=dtype) + self.input_proj = nn.Linear(input_channels, width, device=device, dtype=dtype) + self.output_proj = nn.Linear(width, output_channels, device=device, dtype=dtype) + + # timestep embedding + self.time_embed = Timesteps(width, flip_sin_to_cos=flip_sin_to_cos, downscale_freq_shift=0) + self.time_proj = MLP( + device=device, dtype=dtype, width=width, init_scale=init_scale + ) + + self.context_embed = nn.Sequential( + nn.LayerNorm(context_dim, device=device, dtype=dtype), + nn.Linear(context_dim, width, device=device, dtype=dtype), + ) + + if context_ln: + self.context_embed = nn.Sequential( + nn.LayerNorm(context_dim, device=device, dtype=dtype), + nn.Linear(context_dim, width, device=device, dtype=dtype), + ) + else: + self.context_embed = nn.Linear(context_dim, width, device=device, dtype=dtype) + + def forward(self, + model_input: torch.FloatTensor, + timestep: torch.LongTensor, + context: torch.FloatTensor): + + r""" + Args: + model_input (torch.FloatTensor): [bs, n_data, c] + timestep (torch.LongTensor): [bs,] + context (torch.FloatTensor): [bs, context_tokens, c] + + Returns: + sample (torch.FloatTensor): [bs, n_data, c] + + """ + + _, n_data, _ = model_input.shape + + # 1. time + t_emb = self.time_proj(self.time_embed(timestep)).unsqueeze(dim=1) + + # 2. conditions projector + context = self.context_embed(context) + + # 3. denoiser + x = self.input_proj(model_input) + x = torch.cat([t_emb, context, x], dim=1) + x = self.backbone(x) + x = self.ln_post(x) + x = x[:, -n_data:] + sample = self.output_proj(x) + + return sample + + diff --git a/ThirdParty/michelangelo/models/asl_diffusion/base.py b/ThirdParty/michelangelo/models/asl_diffusion/base.py new file mode 100644 index 0000000000000000000000000000000000000000..a979197ae9990929aecbca42ce081a2b1aa1f465 --- /dev/null +++ b/ThirdParty/michelangelo/models/asl_diffusion/base.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- + +import torch +import torch.nn as nn + + +class BaseDenoiser(nn.Module): + + def __init__(self): + super().__init__() + + def forward(self, x, t, context): + raise NotImplementedError diff --git a/ThirdParty/michelangelo/models/asl_diffusion/clip_asl_diffuser_pl_module.py b/ThirdParty/michelangelo/models/asl_diffusion/clip_asl_diffuser_pl_module.py new file mode 100644 index 0000000000000000000000000000000000000000..46ad42b04034517ef8c97768db966869e4857c37 --- /dev/null +++ b/ThirdParty/michelangelo/models/asl_diffusion/clip_asl_diffuser_pl_module.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- + +from omegaconf import DictConfig +from typing import List, Tuple, Dict, Optional, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.optim import lr_scheduler +import pytorch_lightning as pl +from pytorch_lightning.utilities import rank_zero_only + +from diffusers.schedulers import ( + DDPMScheduler, + DDIMScheduler, + KarrasVeScheduler, + DPMSolverMultistepScheduler +) + +from ThirdParty.michelangelo.utils import instantiate_from_config +from ThirdParty.michelangelo.models.tsal.tsal_base import AlignedShapeAsLatentPLModule +from ThirdParty.michelangelo.models.asl_diffusion.inference_utils import ddim_sample + +SchedulerType = Union[DDIMScheduler, KarrasVeScheduler, DPMSolverMultistepScheduler] + + +def disabled_train(self, mode=True): + """Overwrite model.train with this function to make sure train/eval mode + does not change anymore.""" + return self + + +class ClipASLDiffuser(pl.LightningModule): + first_stage_model: Optional[AlignedShapeAsLatentPLModule] + cond_stage_model: Optional[Union[nn.Module, pl.LightningModule]] + model: nn.Module + + def __init__(self, *, + first_stage_config, + cond_stage_config, + denoiser_cfg, + scheduler_cfg, + optimizer_cfg, + loss_cfg, + first_stage_key: str = "surface", + cond_stage_key: str = "image", + scale_by_std: bool = False, + z_scale_factor: float = 1.0, + ckpt_path: Optional[str] = None, + ignore_keys: Union[Tuple[str], List[str]] = ()): + + super().__init__() + + self.first_stage_key = first_stage_key + self.cond_stage_key = cond_stage_key + + # 1. lazy initialize first stage + self.instantiate_first_stage(first_stage_config) + + # 2. initialize conditional stage + self.instantiate_cond_stage(cond_stage_config) + + # 3. diffusion model + self.model = instantiate_from_config( + denoiser_cfg, device=None, dtype=None + ) + + self.optimizer_cfg = optimizer_cfg + + # 4. scheduling strategy + self.scheduler_cfg = scheduler_cfg + + self.noise_scheduler: DDPMScheduler = instantiate_from_config(scheduler_cfg.noise) + self.denoise_scheduler: SchedulerType = instantiate_from_config(scheduler_cfg.denoise) + + # 5. loss configures + self.loss_cfg = loss_cfg + + self.scale_by_std = scale_by_std + if scale_by_std: + self.register_buffer("z_scale_factor", torch.tensor(z_scale_factor)) + else: + self.z_scale_factor = z_scale_factor + + self.ckpt_path = ckpt_path + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + + def instantiate_non_trainable_model(self, config): + model = instantiate_from_config(config) + model = model.eval() + model.train = disabled_train + for param in model.parameters(): + param.requires_grad = False + + return model + + def instantiate_first_stage(self, first_stage_config): + self.first_stage_model = self.instantiate_non_trainable_model(first_stage_config) + self.first_stage_model.set_shape_model_only() + + def instantiate_cond_stage(self, cond_stage_config): + self.cond_stage_model = self.instantiate_non_trainable_model(cond_stage_config) + + def init_from_ckpt(self, path, ignore_keys=()): + state_dict = torch.load(path, map_location="cpu")["state_dict"] + + keys = list(state_dict.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del state_dict[k] + + missing, unexpected = self.load_state_dict(state_dict, strict=False) + print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys") + if len(missing) > 0: + print(f"Missing Keys: {missing}") + print(f"Unexpected Keys: {unexpected}") + + @property + def zero_rank(self): + if self._trainer: + zero_rank = self.trainer.local_rank == 0 + else: + zero_rank = True + + return zero_rank + + def configure_optimizers(self) -> Tuple[List, List]: + + lr = self.learning_rate + + trainable_parameters = list(self.model.parameters()) + if self.optimizer_cfg is None: + optimizers = [torch.optim.AdamW(trainable_parameters, lr=lr, betas=(0.9, 0.99), weight_decay=1e-3)] + schedulers = [] + else: + optimizer = instantiate_from_config(self.optimizer_cfg.optimizer, params=trainable_parameters) + scheduler_func = instantiate_from_config( + self.optimizer_cfg.scheduler, + max_decay_steps=self.trainer.max_steps, + lr_max=lr + ) + scheduler = { + "scheduler": lr_scheduler.LambdaLR(optimizer, lr_lambda=scheduler_func.schedule), + "interval": "step", + "frequency": 1 + } + optimizers = [optimizer] + schedulers = [scheduler] + + return optimizers, schedulers + + @torch.no_grad() + def encode_first_stage(self, surface: torch.FloatTensor, sample_posterior=True): + + z_q = self.first_stage_model.encode(surface, sample_posterior) + z_q = self.z_scale_factor * z_q + + return z_q + + @torch.no_grad() + def decode_first_stage(self, z_q: torch.FloatTensor, **kwargs): + + z_q = 1. / self.z_scale_factor * z_q + latents = self.first_stage_model.decode(z_q, **kwargs) + return latents + + @rank_zero_only + @torch.no_grad() + def on_train_batch_start(self, batch, batch_idx): + # only for very first batch + if self.scale_by_std and self.current_epoch == 0 and self.global_step == 0 \ + and batch_idx == 0 and self.ckpt_path is None: + # set rescale weight to 1./std of encodings + print("### USING STD-RESCALING ###") + + z_q = self.encode_first_stage(batch[self.first_stage_key]) + z = z_q.detach() + + del self.z_scale_factor + self.register_buffer("z_scale_factor", 1. / z.flatten().std()) + print(f"setting self.z_scale_factor to {self.z_scale_factor}") + + print("### USING STD-RESCALING ###") + + def compute_loss(self, model_outputs, split): + """ + + Args: + model_outputs (dict): + - x_0: + - noise: + - noise_prior: + - noise_pred: + - noise_pred_prior: + + split (str): + + Returns: + + """ + + pred = model_outputs["pred"] + + if self.noise_scheduler.prediction_type == "epsilon": + target = model_outputs["noise"] + elif self.noise_scheduler.prediction_type == "sample": + target = model_outputs["x_0"] + else: + raise NotImplementedError(f"Prediction Type: {self.noise_scheduler.prediction_type} not yet supported.") + + if self.loss_cfg.loss_type == "l1": + simple = F.l1_loss(pred, target, reduction="mean") + elif self.loss_cfg.loss_type in ["mse", "l2"]: + simple = F.mse_loss(pred, target, reduction="mean") + else: + raise NotImplementedError(f"Loss Type: {self.loss_cfg.loss_type} not yet supported.") + + total_loss = simple + + loss_dict = { + f"{split}/total_loss": total_loss.clone().detach(), + f"{split}/simple": simple.detach(), + } + + return total_loss, loss_dict + + def forward(self, batch): + """ + + Args: + batch: + + Returns: + + """ + + latents = self.encode_first_stage(batch[self.first_stage_key]) + conditions = self.cond_stage_model.encode(batch[self.cond_stage_key]) + + # Sample noise that we"ll add to the latents + # [batch_size, n_token, latent_dim] + noise = torch.randn_like(latents) + bs = latents.shape[0] + # Sample a random timestep for each motion + timesteps = torch.randint( + 0, + self.noise_scheduler.config.num_train_timesteps, + (bs,), + device=latents.device, + ) + timesteps = timesteps.long() + # Add noise to the latents according to the noise magnitude at each timestep + noisy_z = self.noise_scheduler.add_noise(latents, noise, timesteps) + + # diffusion model forward + noise_pred = self.model(noisy_z, timesteps, conditions) + + diffusion_outputs = { + "x_0": noisy_z, + "noise": noise, + "pred": noise_pred + } + + return diffusion_outputs + + def training_step(self, batch: Dict[str, Union[torch.FloatTensor, List[str]]], + batch_idx: int, optimizer_idx: int = 0) -> torch.FloatTensor: + """ + + Args: + batch (dict): the batch sample, and it contains: + - surface (torch.FloatTensor): + - image (torch.FloatTensor): if provide, [bs, 3, h, w], item range [0, 1] + - depth (torch.FloatTensor): if provide, [bs, 1, h, w], item range [-1, 1] + - normal (torch.FloatTensor): if provide, [bs, 3, h, w], item range [-1, 1] + - text (list of str): + + batch_idx (int): + + optimizer_idx (int): + + Returns: + loss (torch.FloatTensor): + + """ + + diffusion_outputs = self(batch) + + loss, loss_dict = self.compute_loss(diffusion_outputs, "train") + self.log_dict(loss_dict, prog_bar=True, logger=True, sync_dist=False, rank_zero_only=True) + + return loss + + def validation_step(self, batch: Dict[str, torch.FloatTensor], + batch_idx: int, optimizer_idx: int = 0) -> torch.FloatTensor: + """ + + Args: + batch (dict): the batch sample, and it contains: + - surface_pc (torch.FloatTensor): [n_pts, 4] + - surface_feats (torch.FloatTensor): [n_pts, c] + - text (list of str): + + batch_idx (int): + + optimizer_idx (int): + + Returns: + loss (torch.FloatTensor): + + """ + + diffusion_outputs = self(batch) + + loss, loss_dict = self.compute_loss(diffusion_outputs, "val") + self.log_dict(loss_dict, prog_bar=True, logger=True, sync_dist=False, rank_zero_only=True) + + return loss + + @torch.no_grad() + def sample(self, + batch: Dict[str, Union[torch.FloatTensor, List[str]]], + sample_times: int = 1, + steps: Optional[int] = None, + guidance_scale: Optional[float] = None, + eta: float = 0.0, + return_intermediates: bool = False, **kwargs): + + if steps is None: + steps = self.scheduler_cfg.num_inference_steps + + if guidance_scale is None: + guidance_scale = self.scheduler_cfg.guidance_scale + do_classifier_free_guidance = guidance_scale > 0 + + # conditional encode + xc = batch[self.cond_stage_key] + + # print(self.first_stage_model.device, self.cond_stage_model.device, self.device) + + cond = self.cond_stage_model(xc) + + if do_classifier_free_guidance: + un_cond = self.cond_stage_model.unconditional_embedding(batch_size=len(xc)) + cond = torch.cat([un_cond, cond], dim=0) + + outputs = [] + latents = None + + if not return_intermediates: + for _ in range(sample_times): + sample_loop = ddim_sample( + self.denoise_scheduler, + self.model, + shape=self.first_stage_model.latent_shape, + cond=cond, + steps=steps, + guidance_scale=guidance_scale, + do_classifier_free_guidance=do_classifier_free_guidance, + device=self.device, + eta=eta, + disable_prog=not self.zero_rank + ) + for sample, t in sample_loop: + latents = sample + outputs.append(self.decode_first_stage(latents, **kwargs)) + else: + + sample_loop = ddim_sample( + self.denoise_scheduler, + self.model, + shape=self.first_stage_model.latent_shape, + cond=cond, + steps=steps, + guidance_scale=guidance_scale, + do_classifier_free_guidance=do_classifier_free_guidance, + device=self.device, + eta=eta, + disable_prog=not self.zero_rank + ) + + iter_size = steps // sample_times + i = 0 + for sample, t in sample_loop: + latents = sample + if i % iter_size == 0 or i == steps - 1: + outputs.append(self.decode_first_stage(latents, **kwargs)) + i += 1 + + return outputs diff --git a/ThirdParty/michelangelo/models/asl_diffusion/inference_utils.py b/ThirdParty/michelangelo/models/asl_diffusion/inference_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..967d5c52a8e33a6759d1c4891b0d21d1c9f95442 --- /dev/null +++ b/ThirdParty/michelangelo/models/asl_diffusion/inference_utils.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +import torch +from tqdm import tqdm +from typing import Tuple, List, Union, Optional +from diffusers.schedulers import DDIMScheduler + + +__all__ = ["ddim_sample"] + + +def ddim_sample(ddim_scheduler: DDIMScheduler, + diffusion_model: torch.nn.Module, + shape: Union[List[int], Tuple[int]], + cond: torch.FloatTensor, + steps: int, + eta: float = 0.0, + guidance_scale: float = 3.0, + do_classifier_free_guidance: bool = True, + generator: Optional[torch.Generator] = None, + device: torch.device = "cuda:0", + disable_prog: bool = True): + + assert steps > 0, f"{steps} must > 0." + + # init latents + bsz = cond.shape[0] + if do_classifier_free_guidance: + bsz = bsz // 2 + + latents = torch.randn( + (bsz, *shape), + generator=generator, + device=cond.device, + dtype=cond.dtype, + ) + # scale the initial noise by the standard deviation required by the scheduler + latents = latents * ddim_scheduler.init_noise_sigma + # set timesteps + ddim_scheduler.set_timesteps(steps) + timesteps = ddim_scheduler.timesteps.to(device) + # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature + # eta (η) is only used with the DDIMScheduler, and between [0, 1] + extra_step_kwargs = { + "eta": eta, + "generator": generator + } + + # reverse + for i, t in enumerate(tqdm(timesteps, disable=disable_prog, desc="DDIM Sampling:", leave=False)): + # expand the latents if we are doing classifier free guidance + latent_model_input = ( + torch.cat([latents] * 2) + if do_classifier_free_guidance + else latents + ) + # latent_model_input = scheduler.scale_model_input(latent_model_input, t) + # predict the noise residual + timestep_tensor = torch.tensor([t], dtype=torch.long, device=device) + timestep_tensor = timestep_tensor.expand(latent_model_input.shape[0]) + noise_pred = diffusion_model.forward(latent_model_input, timestep_tensor, cond) + + # perform guidance + if do_classifier_free_guidance: + noise_pred_uncond, noise_pred_text = noise_pred.chunk(2) + noise_pred = noise_pred_uncond + guidance_scale * ( + noise_pred_text - noise_pred_uncond + ) + # text_embeddings_for_guidance = encoder_hidden_states.chunk( + # 2)[1] if do_classifier_free_guidance else encoder_hidden_states + # compute the previous noisy sample x_t -> x_t-1 + latents = ddim_scheduler.step( + noise_pred, t, latents, **extra_step_kwargs + ).prev_sample + + yield latents, t + + +def karra_sample(): + pass diff --git a/ThirdParty/michelangelo/models/conditional_encoders/__init__.py b/ThirdParty/michelangelo/models/conditional_encoders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f644ce0eac101dbd60ffdb0225a7560a5dc25735 --- /dev/null +++ b/ThirdParty/michelangelo/models/conditional_encoders/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +from .clip import CLIPEncoder diff --git a/ThirdParty/michelangelo/models/conditional_encoders/clip.py b/ThirdParty/michelangelo/models/conditional_encoders/clip.py new file mode 100644 index 0000000000000000000000000000000000000000..099b237d543981cca70f92ccbbb0c1c560aa0f2a --- /dev/null +++ b/ThirdParty/michelangelo/models/conditional_encoders/clip.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- + +import torch +import numpy as np +from PIL import Image +from dataclasses import dataclass +from torchvision.transforms import Normalize +from transformers import CLIPModel, CLIPTokenizer +from transformers.utils import ModelOutput +from typing import Iterable, Optional, Union, List + + +ImageType = Union[np.ndarray, torch.Tensor, Image.Image] + + +@dataclass +class CLIPEmbedOutput(ModelOutput): + last_hidden_state: torch.FloatTensor = None + pooler_output: torch.FloatTensor = None + embeds: torch.FloatTensor = None + + +class CLIPEncoder(torch.nn.Module): + + def __init__(self, model_path="openai/clip-vit-base-patch32"): + + super().__init__() + + # Load the CLIP model and processor + self.model: CLIPModel = CLIPModel.from_pretrained(model_path) + self.tokenizer = CLIPTokenizer.from_pretrained(model_path) + self.image_preprocess = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) + + self.model.training = False + for p in self.model.parameters(): + p.requires_grad = False + + @torch.no_grad() + def encode_image(self, images: Iterable[Optional[ImageType]]): + pixel_values = self.image_preprocess(images) + + vision_outputs = self.model.vision_model(pixel_values=pixel_values) + + pooler_output = vision_outputs[1] # pooled_output + image_features = self.model.visual_projection(pooler_output) + + visual_embeds = CLIPEmbedOutput( + last_hidden_state=vision_outputs.last_hidden_state, + pooler_output=pooler_output, + embeds=image_features + ) + + return visual_embeds + + @torch.no_grad() + def encode_text(self, texts: List[str]): + text_inputs = self.tokenizer(texts, padding=True, return_tensors="pt") + + text_outputs = self.model.text_model(input_ids=text_inputs) + + pooler_output = text_outputs[1] # pooled_output + text_features = self.model.text_projection(pooler_output) + + text_embeds = CLIPEmbedOutput( + last_hidden_state=text_outputs.last_hidden_state, + pooler_output=pooler_output, + embeds=text_features + ) + + return text_embeds + + def forward(self, + images: Iterable[Optional[ImageType]], + texts: List[str]): + + visual_embeds = self.encode_image(images) + text_embeds = self.encode_text(texts) + + return visual_embeds, text_embeds + + + + + + + + + + diff --git a/ThirdParty/michelangelo/models/conditional_encoders/encoder_factory.py b/ThirdParty/michelangelo/models/conditional_encoders/encoder_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..9778a023c5691fd0ccba6f420196934926f89562 --- /dev/null +++ b/ThirdParty/michelangelo/models/conditional_encoders/encoder_factory.py @@ -0,0 +1,562 @@ +# -*- coding: utf-8 -*- +import os + +import torch +import torch.nn as nn +from torchvision import transforms +from transformers import CLIPModel, CLIPTokenizer +from collections import OrderedDict + +from ThirdParty.michelangelo.data.transforms import RandomResize + + +class AbstractEncoder(nn.Module): + embedding_dim: int + + def __init__(self): + super().__init__() + + def encode(self, *args, **kwargs): + raise NotImplementedError + + +class ClassEmbedder(nn.Module): + def __init__(self, embed_dim, n_classes=1000, key="class"): + super().__init__() + self.key = key + self.embedding = nn.Embedding(n_classes, embed_dim) + + def forward(self, batch, key=None): + if key is None: + key = self.key + # this is for use in crossattn + c = batch[key][:, None] + c = self.embedding(c) + return c + + +class FrozenCLIPTextEmbedder(AbstractEncoder): + """Uses the CLIP transformer encoder for text (from Hugging Face)""" + + def __init__( + self, + version="openai/clip-vit-large-patch14", + tokenizer_version=None, + device="cuda", + max_length=77, + zero_embedding_radio: float = 0.1, + ): + super().__init__() + self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer_version or version) + + self.device = device + self.max_length = max_length + self.zero_embedding_radio = zero_embedding_radio + + self.clip_dict = OrderedDict() + self.clip_name = os.path.split(version)[-1] + + transformer = CLIPModel.from_pretrained(version).text_model + + for param in transformer.parameters(): + param.requires_grad = False + self.clip_dict[self.clip_name] = transformer + + self._move_flag = False + + @property + def clip(self): + return self.clip_dict[self.clip_name] + + def move(self): + if self._move_flag: + return + + self.clip_dict[self.clip_name] = self.clip_dict[self.clip_name].to(self.device) + self._move_flag = True + + def unconditional_embedding(self, batch_size): + empty_text = [""] * batch_size + empty_z = self.forward(empty_text) + return empty_z + + def forward(self, text): + self.move() + + batch_encoding = self.tokenizer( + text, + truncation=True, + max_length=self.max_length, + return_length=True, + return_overflowing_tokens=False, + padding="max_length", + return_tensors="pt", + ) + + tokens = batch_encoding["input_ids"].to(self.device) + outputs = self.clip(input_ids=tokens) + + z = outputs.last_hidden_state + return z + + def encode(self, text): + batch_size = len(text) + batch_mask = torch.rand((batch_size,)) + for i in range(batch_size): + if batch_mask[i] < self.zero_embedding_radio: + text[i] = "" + + return self(text) + +class FrozenAlignedCLIPTextEmbedder(AbstractEncoder): + """Uses the CLIP transformer encoder for text (from Hugging Face)""" + + def __init__( + self, + version="openai/clip-vit-large-patch14", + tokenizer_version=None, + device="cuda", + max_length=77, + zero_embedding_radio: float = 0.1, + ): + super().__init__() + self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer_version or version) + + self.device = device + self.max_length = max_length + self.zero_embedding_radio = zero_embedding_radio + + self.clip_dict = OrderedDict() + self.clip_name = os.path.split(version)[-1] + + transformer = CLIPModel.from_pretrained(version).text_model + + for param in transformer.parameters(): + param.requires_grad = False + self.clip_dict[self.clip_name] = transformer + + self._move_flag = False + + @property + def clip(self): + return self.clip_dict[self.clip_name] + + def move(self): + if self._move_flag: + return + + self.clip_dict[self.clip_name] = self.clip_dict[self.clip_name].to(self.device) + self._move_flag = True + + def unconditional_embedding(self, batch_size): + empty_text = [""] * batch_size + empty_z = self.forward(empty_text) + return empty_z + + def forward(self, text): + self.move() + + batch_encoding = self.tokenizer( + text, + truncation=True, + max_length=self.max_length, + return_length=True, + return_overflowing_tokens=False, + padding="max_length", + return_tensors="pt", + ) + + tokens = batch_encoding["input_ids"].to(self.device) + outputs = self.clip(input_ids=tokens) + + z = outputs.last_hidden_state + return z + + def encode(self, text): + batch_size = len(text) + batch_mask = torch.rand((batch_size,)) + for i in range(batch_size): + if batch_mask[i] < self.zero_embedding_radio: + text[i] = "" + + return self(text) + + +class FrozenCLIPImageEmbedder(AbstractEncoder): + """Uses the CLIP transformer encoder for text (from Hugging Face)""" + + def __init__( + self, + version="openai/clip-vit-large-patch14", + device="cuda", + zero_embedding_radio=0.1, + normalize_embedding=True, + num_projection_vector=0, + linear_mapping_bias=True, + reverse_visual_projection=False, + ): + super().__init__() + + self.device = device + + self.clip_dict = OrderedDict() + self.clip_name = os.path.split(version)[-1] + + clip_model = CLIPModel.from_pretrained(version) + clip_model.text_model = None + clip_model.text_projection = None + clip_model = clip_model.eval() + for param in self.parameters(): + param.requires_grad = False + self.clip_dict[self.clip_name] = clip_model + + self.transform = transforms.Compose( + [ + transforms.Resize(224, transforms.InterpolationMode.BICUBIC, antialias=True), + transforms.CenterCrop(224), # crop a (224, 224) square + transforms.Normalize( + mean=[0.48145466, 0.4578275, 0.40821073], + std=[0.26862954, 0.26130258, 0.27577711], + ), + ] + ) + self.zero_embedding_radio = zero_embedding_radio + + self.num_projection_vector = num_projection_vector + self.reverse_visual_projection = reverse_visual_projection + self.normalize_embedding = normalize_embedding + + embedding_dim = ( + clip_model.visual_projection.in_features + if reverse_visual_projection + else clip_model.visual_projection.out_features + ) + self.embedding_dim = embedding_dim + if self.num_projection_vector > 0: + self.projection = nn.Linear( + embedding_dim, + clip_model.visual_projection.out_features * num_projection_vector, + bias=linear_mapping_bias, + ) + nn.init.normal_(self.projection.weight, std=embedding_dim ** -0.5) + + self._move_flag = False + + @property + def clip(self): + return self.clip_dict[self.clip_name] + + def unconditional_embedding(self, batch_size): + zero = torch.zeros( + batch_size, + 1, + self.embedding_dim, + device=self.device, + dtype=self.clip.visual_projection.weight.dtype, + ) + if self.num_projection_vector > 0: + zero = self.projection(zero).view(batch_size, self.num_projection_vector, -1) + return zero + + def forward(self, image, value_range=(-1, 1), zero_embedding_radio=0): + if value_range is not None: + low, high = value_range + image = (image - low) / (high - low) + + image = image.to(self.device, dtype=self.clip.visual_projection.weight.dtype) + + if self.reverse_visual_projection: + z = self.clip.vision_model(self.transform(image))[1] + else: + z = self.clip.get_image_features(self.transform(image)) + + if self.normalize_embedding: + z = z / z.norm(dim=-1, keepdim=True) + if z.ndim == 2: + z = z.unsqueeze(dim=-2) + + if zero_embedding_radio > 0: + mask = torch.rand((len(image), 1, 1), device=z.device, dtype=z.dtype) < zero_embedding_radio + z = z * mask.to(z) + + if self.num_projection_vector > 0: + z = self.projection(z).view(len(image), self.num_projection_vector, -1) + + return z + + def move(self): + if self._move_flag: + return + + self.clip_dict[self.clip_name] = self.clip_dict[self.clip_name].to(self.device) + self._move_flag = True + + def encode(self, image): + self.move() + return self(image, zero_embedding_radio=self.zero_embedding_radio) + + +class FrozenCLIPImageGridEmbedder(AbstractEncoder): + + def __init__( + self, + version="openai/clip-vit-large-patch14", + device="cuda", + zero_embedding_radio=0.1, + ): + super().__init__() + + self.device = device + + self.clip_dict = OrderedDict() + self.clip_name = os.path.split(version)[-1] + + clip_model: CLIPModel = CLIPModel.from_pretrained(version) + clip_model.text_model = None + clip_model.text_projection = None + clip_model = clip_model.eval() + for param in self.parameters(): + param.requires_grad = False + self.clip_dict[self.clip_name] = clip_model + + self.transform = transforms.Compose( + [ + transforms.Resize(224, transforms.InterpolationMode.BILINEAR, antialias=True), + transforms.CenterCrop(224), # crop a (224, 224) square + transforms.Normalize( + mean=[0.48145466, 0.4578275, 0.40821073], + std=[0.26862954, 0.26130258, 0.27577711], + ), + ] + ) + self.zero_embedding_radio = zero_embedding_radio + self.embedding_dim = clip_model.vision_embed_dim + + self._move_flag = False + + @property + def clip(self): + return self.clip_dict[self.clip_name] + + def move(self): + if self._move_flag: + return + + self.clip_dict[self.clip_name] = self.clip_dict[self.clip_name].to(self.device) + self._move_flag = True + + def unconditional_embedding(self, batch_size): + zero = torch.zeros( + batch_size, + self.clip.vision_model.embeddings.num_positions, + self.embedding_dim, + device=self.device, + dtype=self.clip.visual_projection.weight.dtype, + ) + return zero + + def forward(self, image, value_range=(-1, 1), zero_embedding_radio=0): + self.move() + + if value_range is not None: + low, high = value_range + image = (image - low) / (high - low) + + image = image.to(self.device, dtype=self.clip.visual_projection.weight.dtype) + + z = self.clip.vision_model(self.transform(image)).last_hidden_state + + if zero_embedding_radio > 0: + mask = torch.rand((len(image), 1, 1), device=z.device, dtype=z.dtype) >= zero_embedding_radio + z = z * mask.to(z) + + return z + + def encode(self, image): + return self(image, zero_embedding_radio=self.zero_embedding_radio) + + +class MoECLIPImageEncoder(nn.Module): + def __init__( + self, + versions, + hidden_state_dim, + num_projection_vector=8, + zero_embedding_radio=0.1, + device="cuda", + precision="fp16", + normalize=False, + clip_max=0, + transform_type="base", + argument_p=0.2, + ): + super().__init__() + + self.device = torch.device(device) + self.hidden_state_dim = hidden_state_dim + self.zero_embedding_radio = zero_embedding_radio + self.num_projection_vector = num_projection_vector + self.dtype = dict(fp16=torch.float16, fp32=torch.float32, bf16=torch.bfloat16)[precision] + self.normalize = normalize + self.clip_max = clip_max + + if transform_type == "base": + self.transform = transforms.Compose( + [ + transforms.Resize(224, transforms.InterpolationMode.BICUBIC, antialias=True), + transforms.CenterCrop(224), # crop a (224, 224) square + transforms.Normalize( + mean=[0.48145466, 0.4578275, 0.40821073], + std=[0.26862954, 0.26130258, 0.27577711], + ), + ] + ) + elif transform_type == "crop_blur_resize": + self.transform = transforms.Compose( + [ + transforms.Resize(224, transforms.InterpolationMode.BICUBIC, antialias=True), + transforms.CenterCrop(224), # crop a (224, 224) square + transforms.RandomApply( + transforms=[ + transforms.RandomResizedCrop( + size=224, + scale=(0.8, 1.0), + ratio=(0.99, 1.01), + interpolation=transforms.InterpolationMode.BICUBIC, + ), + ], + p=argument_p, + ), + transforms.RandomApply( + transforms=[ + transforms.GaussianBlur(kernel_size=9, sigma=(0.1, 5)), + ], + p=argument_p, + ), + transforms.RandomApply( + transforms=[ + RandomResize(size=224, resize_radio=(0.2, 1)), + ], + p=argument_p, + ), + transforms.Normalize( + mean=[0.48145466, 0.4578275, 0.40821073], + std=[0.26862954, 0.26130258, 0.27577711], + ), + ] + ) + else: + raise ValueError(f"invalid {transform_type=}") + + if isinstance(versions, str): + versions = (versions,) + + # 如果直接把clips定位为当前类的子module,1. 会在保存ckp时存无用的多个权重。 2. pl会调用to,导致layer_norm的权重也被转换成fp16 + clips = OrderedDict() + + for v in versions: + # 因为clips不是子module,直接指定device="cuda"会错误地导致clip模型权重都被放到cuda:0上。 + clips[v], _ = clip.load(name=v, device="cpu", jit=False, download_root=None) + delattr(clips[v], "transformer") + clips[v].eval() + clips[v].requires_grad_(False) + + self.clips_hidden_dim = sum(clips[v].ln_final.weight.size(0) for v in clips) + + if self.num_projection_vector == 0: + self.projection = nn.Identity() + else: + self.projection = nn.Linear(self.clips_hidden_dim, hidden_state_dim * self.num_projection_vector, bias=True) + self.projection.to(dtype=self.dtype) + nn.init.normal_(self.projection.weight, std=self.clips_hidden_dim ** -0.5) + + self.clips = clips + + self._move_flag = False + + def move(self): + if self._move_flag: + return + + def convert_weights(model: nn.Module): + """Convert applicable model parameters to fp16""" + + def _convert_weights_to_fp16(l): + if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)): + l.weight.data = l.weight.data.type(self.dtype) + if l.bias is not None: + l.bias.data = l.bias.data.type(self.dtype) + + if isinstance(l, nn.MultiheadAttention): + for attr in [ + *[f"{s}_proj_weight" for s in ["in", "q", "k", "v"]], + "in_proj_bias", + "bias_k", + "bias_v", + ]: + tensor = getattr(l, attr) + if tensor is not None: + tensor.data = tensor.data.type(self.dtype) + + for name in ["text_projection", "proj"]: + if hasattr(l, name): + attr = getattr(l, name) + if attr is not None: + attr.data = attr.data.type(self.dtype) + + model.apply(_convert_weights_to_fp16) + + for k in self.clips: + self.clips[k].to(self.device) + convert_weights(self.clips[k]) # fp32 -> self.dtype + self._move_flag = True + + def unconditional_embedding(self, batch_size=None): + zero = torch.zeros( + batch_size, + self.clips_hidden_dim, + device=self.device, + dtype=self.dtype, + ) + if self.num_projection_vector > 0: + zero = self.projection(zero).view(batch_size, self.num_projection_vector, -1) + return zero + + def convert_embedding(self, z): + if self.num_projection_vector > 0: + z = self.projection(z.type(self.projection.weight.dtype)).view(len(z), self.num_projection_vector, -1) + return z + + def forward(self, image, value_range=(-1, 1), zero_embedding_radio=0): + if value_range is not None: + low, high = value_range + image = (image - low) / (high - low) + + image = self.transform(image) + + with torch.no_grad(): + embs = [] + for v in self.clips: + x = self.clips[v].encode_image(image) + if self.normalize: + x = x / x.norm(p=2, dim=-1, keepdim=True) * (x.size(-1) ** 0.5) + # clip_max only works with normalization + if self.clip_max > 0: + x = x.clamp(-self.clip_max, self.clip_max) + embs.append(x) + + z = torch.cat(embs, dim=-1) + if self.normalize: + z /= z.size(-1) ** 0.5 + + if zero_embedding_radio > 0: + mask = torch.rand((len(image), 1, 1), device=z.device, dtype=z.dtype) >= zero_embedding_radio + z = z + mask.to(z) + + if self.num_projection_vector > 0: + z = self.projection(z).view(len(image), self.num_projection_vector, -1) + return z + + def encode(self, image): + self.move() + return self(image, zero_embedding_radio=self.zero_embedding_radio) diff --git a/ThirdParty/michelangelo/models/modules/__init__.py b/ThirdParty/michelangelo/models/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0729b49eadf964584d3524d9c0f6adec3f04a6a9 --- /dev/null +++ b/ThirdParty/michelangelo/models/modules/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +from .checkpoint import checkpoint diff --git a/ThirdParty/michelangelo/models/modules/__pycache__/__init__.cpython-310.pyc b/ThirdParty/michelangelo/models/modules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8401beeac1e0073c07db86ced05807ec828a7299 Binary files /dev/null and b/ThirdParty/michelangelo/models/modules/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/models/modules/__pycache__/checkpoint.cpython-310.pyc b/ThirdParty/michelangelo/models/modules/__pycache__/checkpoint.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dfdd6dab3ba70344dc314ea472245ec8a7156cff Binary files /dev/null and b/ThirdParty/michelangelo/models/modules/__pycache__/checkpoint.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/models/modules/__pycache__/distributions.cpython-310.pyc b/ThirdParty/michelangelo/models/modules/__pycache__/distributions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..12f7ca07ea53f5bf87df18c68f5d96be21d396af Binary files /dev/null and b/ThirdParty/michelangelo/models/modules/__pycache__/distributions.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/models/modules/__pycache__/embedder.cpython-310.pyc b/ThirdParty/michelangelo/models/modules/__pycache__/embedder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..baf0a0c4cbe29852f9ccc0248be3ba0d46cf7040 Binary files /dev/null and b/ThirdParty/michelangelo/models/modules/__pycache__/embedder.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/models/modules/__pycache__/transformer_blocks.cpython-310.pyc b/ThirdParty/michelangelo/models/modules/__pycache__/transformer_blocks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a14aa11109b22d2d8641ce585e9a57d01499f83 Binary files /dev/null and b/ThirdParty/michelangelo/models/modules/__pycache__/transformer_blocks.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/models/modules/checkpoint.py b/ThirdParty/michelangelo/models/modules/checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..4fef818bc15de279a06f9175aeadf85924ff18c0 --- /dev/null +++ b/ThirdParty/michelangelo/models/modules/checkpoint.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" +Adapted from: https://github.com/openai/guided-diffusion/blob/22e0df8183507e13a7813f8d38d51b072ca1e67c/guided_diffusion/nn.py#L124 +""" + +import torch +from typing import Callable, Iterable, Sequence, Union + + +def checkpoint( + func: Callable[..., Union[torch.Tensor, Sequence[torch.Tensor]]], + inputs: Sequence[torch.Tensor], + params: Iterable[torch.Tensor], + flag: bool, + use_deepspeed: bool = False +): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + :param use_deepspeed: if True, use deepspeed + """ + if flag: + if use_deepspeed: + import deepspeed + return deepspeed.checkpointing.checkpoint(func, *inputs) + + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + @torch.cuda.amp.custom_fwd + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + @torch.cuda.amp.custom_bwd + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads diff --git a/ThirdParty/michelangelo/models/modules/diffusion_transformer.py b/ThirdParty/michelangelo/models/modules/diffusion_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..f8fdd3eb5e3863b3d958b2acca26ad7594ae83b5 --- /dev/null +++ b/ThirdParty/michelangelo/models/modules/diffusion_transformer.py @@ -0,0 +1,218 @@ +# -*- coding: utf-8 -*- + +import math +import torch +import torch.nn as nn +from typing import Optional + +from ThirdParty.michelangelo.models.modules.checkpoint import checkpoint +from ThirdParty.michelangelo.models.modules.transformer_blocks import ( + init_linear, + MLP, + MultiheadCrossAttention, + MultiheadAttention, + ResidualAttentionBlock +) + + +class AdaLayerNorm(nn.Module): + def __init__(self, + device: torch.device, + dtype: torch.dtype, + width: int): + + super().__init__() + + self.silu = nn.SiLU(inplace=True) + self.linear = nn.Linear(width, width * 2, device=device, dtype=dtype) + self.layernorm = nn.LayerNorm(width, elementwise_affine=False, device=device, dtype=dtype) + + def forward(self, x, timestep): + emb = self.linear(timestep) + scale, shift = torch.chunk(emb, 2, dim=2) + x = self.layernorm(x) * (1 + scale) + shift + return x + + +class DitBlock(nn.Module): + def __init__( + self, + *, + device: torch.device, + dtype: torch.dtype, + n_ctx: int, + width: int, + heads: int, + context_dim: int, + qkv_bias: bool = False, + init_scale: float = 1.0, + use_checkpoint: bool = False + ): + super().__init__() + + self.use_checkpoint = use_checkpoint + + self.attn = MultiheadAttention( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias + ) + self.ln_1 = AdaLayerNorm(device, dtype, width) + + if context_dim is not None: + self.ln_2 = AdaLayerNorm(device, dtype, width) + self.cross_attn = MultiheadCrossAttention( + device=device, + dtype=dtype, + width=width, + heads=heads, + data_width=context_dim, + init_scale=init_scale, + qkv_bias=qkv_bias + ) + + self.mlp = MLP(device=device, dtype=dtype, width=width, init_scale=init_scale) + self.ln_3 = AdaLayerNorm(device, dtype, width) + + def forward(self, x: torch.Tensor, t: torch.Tensor, context: Optional[torch.Tensor] = None): + return checkpoint(self._forward, (x, t, context), self.parameters(), self.use_checkpoint) + + def _forward(self, x: torch.Tensor, t: torch.Tensor, context: Optional[torch.Tensor] = None): + x = x + self.attn(self.ln_1(x, t)) + if context is not None: + x = x + self.cross_attn(self.ln_2(x, t), context) + x = x + self.mlp(self.ln_3(x, t)) + return x + + +class DiT(nn.Module): + def __init__( + self, + *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + n_ctx: int, + width: int, + layers: int, + heads: int, + context_dim: int, + init_scale: float = 0.25, + qkv_bias: bool = False, + use_checkpoint: bool = False + ): + super().__init__() + self.n_ctx = n_ctx + self.width = width + self.layers = layers + + self.resblocks = nn.ModuleList( + [ + DitBlock( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + context_dim=context_dim, + qkv_bias=qkv_bias, + init_scale=init_scale, + use_checkpoint=use_checkpoint + ) + for _ in range(layers) + ] + ) + + def forward(self, x: torch.Tensor, t: torch.Tensor, context: Optional[torch.Tensor] = None): + for block in self.resblocks: + x = block(x, t, context) + return x + + +class UNetDiffusionTransformer(nn.Module): + def __init__( + self, + *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + n_ctx: int, + width: int, + layers: int, + heads: int, + init_scale: float = 0.25, + qkv_bias: bool = False, + skip_ln: bool = False, + use_checkpoint: bool = False + ): + super().__init__() + + self.n_ctx = n_ctx + self.width = width + self.layers = layers + + self.encoder = nn.ModuleList() + for _ in range(layers): + resblock = ResidualAttentionBlock( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + use_checkpoint=use_checkpoint + ) + self.encoder.append(resblock) + + self.middle_block = ResidualAttentionBlock( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + use_checkpoint=use_checkpoint + ) + + self.decoder = nn.ModuleList() + for _ in range(layers): + resblock = ResidualAttentionBlock( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + use_checkpoint=use_checkpoint + ) + linear = nn.Linear(width * 2, width, device=device, dtype=dtype) + init_linear(linear, init_scale) + + layer_norm = nn.LayerNorm(width, device=device, dtype=dtype) if skip_ln else None + + self.decoder.append(nn.ModuleList([resblock, linear, layer_norm])) + + def forward(self, x: torch.Tensor): + + enc_outputs = [] + for block in self.encoder: + x = block(x) + enc_outputs.append(x) + + x = self.middle_block(x) + + for i, (resblock, linear, layer_norm) in enumerate(self.decoder): + x = torch.cat([enc_outputs.pop(), x], dim=-1) + x = linear(x) + + if layer_norm is not None: + x = layer_norm(x) + + x = resblock(x) + + return x diff --git a/ThirdParty/michelangelo/models/modules/distributions.py b/ThirdParty/michelangelo/models/modules/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..b0e00dae5548283a282b79f6132b5f8b2194849e --- /dev/null +++ b/ThirdParty/michelangelo/models/modules/distributions.py @@ -0,0 +1,102 @@ +import torch +import numpy as np +from typing import Union, List + + +class AbstractDistribution(object): + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters: Union[torch.Tensor, List[torch.Tensor]], deterministic=False, feat_dim=1): + self.feat_dim = feat_dim + self.parameters = parameters + + if isinstance(parameters, list): + self.mean = parameters[0] + self.logvar = parameters[1] + else: + self.mean, self.logvar = torch.chunk(parameters, 2, dim=feat_dim) + + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean) + + def sample(self): + x = self.mean + self.std * torch.randn_like(self.mean) + # print('mean:', self.mean.shape, self.mean.mean(), self.mean.std()) + # print('std:', self.std.shape, self.std.mean(), self.std.std()) + return x + + def kl(self, other=None, dims=(1, 2, 3)): + if self.deterministic: + return torch.Tensor([0.]) + else: + if other is None: + return 0.5 * torch.mean(torch.pow(self.mean, 2) + + self.var - 1.0 - self.logvar, + dim=dims) + else: + return 0.5 * torch.mean( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var - 1.0 - self.logvar + other.logvar, + dim=dims) + + def nll(self, sample, dims=(1, 2, 3)): + if self.deterministic: + return torch.Tensor([0.]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/ThirdParty/michelangelo/models/modules/embedder.py b/ThirdParty/michelangelo/models/modules/embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..be6fa71d168461fdf1dd9db83c21ce404e95ecde --- /dev/null +++ b/ThirdParty/michelangelo/models/modules/embedder.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- + +import numpy as np +import torch +import torch.nn as nn +import math + +VALID_EMBED_TYPES = ["identity", "fourier", "hashgrid", "sphere_harmonic", "triplane_fourier"] + + +def components_from_spherical_harmonics( + directions, levels=5 +): + """ + Returns value for each component of spherical harmonics. + + Args: + levels: Number of spherical harmonic levels to compute. + directions: Spherical harmonic coefficients + """ + num_components = levels**2 + components = torch.zeros((*directions.shape[:-1], num_components), device=directions.device) + + assert 1 <= levels <= 5, f"SH levels must be in [1,4], got {levels}" + assert directions.shape[-1] == 3, f"Direction input should have three dimensions. Got {directions.shape[-1]}" + + x = directions[..., 0] + y = directions[..., 1] + z = directions[..., 2] + + xx = x**2 + yy = y**2 + zz = z**2 + + # l0 + components[..., 0] = 0.28209479177387814 + + # l1 + if levels > 1: + components[..., 1] = 0.4886025119029199 * y + components[..., 2] = 0.4886025119029199 * z + components[..., 3] = 0.4886025119029199 * x + + # l2 + if levels > 2: + components[..., 4] = 1.0925484305920792 * x * y + components[..., 5] = 1.0925484305920792 * y * z + components[..., 6] = 0.9461746957575601 * zz - 0.31539156525251999 + components[..., 7] = 1.0925484305920792 * x * z + components[..., 8] = 0.5462742152960396 * (xx - yy) + + # l3 + if levels > 3: + components[..., 9] = 0.5900435899266435 * y * (3 * xx - yy) + components[..., 10] = 2.890611442640554 * x * y * z + components[..., 11] = 0.4570457994644658 * y * (5 * zz - 1) + components[..., 12] = 0.3731763325901154 * z * (5 * zz - 3) + components[..., 13] = 0.4570457994644658 * x * (5 * zz - 1) + components[..., 14] = 1.445305721320277 * z * (xx - yy) + components[..., 15] = 0.5900435899266435 * x * (xx - 3 * yy) + + # l4 + if levels > 4: + components[..., 16] = 2.5033429417967046 * x * y * (xx - yy) + components[..., 17] = 1.7701307697799304 * y * z * (3 * xx - yy) + components[..., 18] = 0.9461746957575601 * x * y * (7 * zz - 1) + components[..., 19] = 0.6690465435572892 * y * z * (7 * zz - 3) + components[..., 20] = 0.10578554691520431 * (35 * zz * zz - 30 * zz + 3) + components[..., 21] = 0.6690465435572892 * x * z * (7 * zz - 3) + components[..., 22] = 0.47308734787878004 * (xx - yy) * (7 * zz - 1) + components[..., 23] = 1.7701307697799304 * x * z * (xx - 3 * yy) + components[..., 24] = 0.6258357354491761 * (xx * (xx - 3 * yy) - yy * (3 * xx - yy)) + + return components + + +class FourierEmbedder(nn.Module): + """The sin/cosine positional embedding. Given an input tensor `x` of shape [n_batch, ..., c_dim], it converts + each feature dimension of `x[..., i]` into: + [ + sin(x[..., i]), + sin(f_1*x[..., i]), + sin(f_2*x[..., i]), + ... + sin(f_N * x[..., i]), + cos(x[..., i]), + cos(f_1*x[..., i]), + cos(f_2*x[..., i]), + ... + cos(f_N * x[..., i]), + x[..., i] # only present if include_input is True. + ], here f_i is the frequency. + + Denote the space is [0 / num_freqs, 1 / num_freqs, 2 / num_freqs, 3 / num_freqs, ..., (num_freqs - 1) / num_freqs]. + If logspace is True, then the frequency f_i is [2^(0 / num_freqs), ..., 2^(i / num_freqs), ...]; + Otherwise, the frequencies are linearly spaced between [1.0, 2^(num_freqs - 1)]. + + Args: + num_freqs (int): the number of frequencies, default is 6; + logspace (bool): If logspace is True, then the frequency f_i is [..., 2^(i / num_freqs), ...], + otherwise, the frequencies are linearly spaced between [1.0, 2^(num_freqs - 1)]; + input_dim (int): the input dimension, default is 3; + include_input (bool): include the input tensor or not, default is True. + + Attributes: + frequencies (torch.Tensor): If logspace is True, then the frequency f_i is [..., 2^(i / num_freqs), ...], + otherwise, the frequencies are linearly spaced between [1.0, 2^(num_freqs - 1); + + out_dim (int): the embedding size, if include_input is True, it is input_dim * (num_freqs * 2 + 1), + otherwise, it is input_dim * num_freqs * 2. + + """ + + def __init__(self, + num_freqs: int = 6, + logspace: bool = True, + input_dim: int = 3, + include_input: bool = True, + include_pi: bool = True) -> None: + + """The initialization""" + + super().__init__() + + if logspace: + frequencies = 2.0 ** torch.arange( + num_freqs, + dtype=torch.float32 + ) + else: + frequencies = torch.linspace( + 1.0, + 2.0 ** (num_freqs - 1), + num_freqs, + dtype=torch.float32 + ) + + if include_pi: + frequencies *= torch.pi + + self.register_buffer("frequencies", frequencies, persistent=False) + self.include_input = include_input + self.num_freqs = num_freqs + + self.out_dim = self.get_dims(input_dim) + + def get_dims(self, input_dim): + temp = 1 if self.include_input or self.num_freqs == 0 else 0 + out_dim = input_dim * (self.num_freqs * 2 + temp) + + return out_dim + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """ Forward process. + + Args: + x: tensor of shape [..., dim] + + Returns: + embedding: an embedding of `x` of shape [..., dim * (num_freqs * 2 + temp)] + where temp is 1 if include_input is True and 0 otherwise. + """ + + if self.num_freqs > 0: + embed = (x[..., None].contiguous() * self.frequencies).view(*x.shape[:-1], -1) + if self.include_input: + return torch.cat((x, embed.sin(), embed.cos()), dim=-1) + else: + return torch.cat((embed.sin(), embed.cos()), dim=-1) + else: + return x + + +class LearnedFourierEmbedder(nn.Module): + """ following @crowsonkb "s lead with learned sinusoidal pos emb """ + """ https://github.com/crowsonkb/v-diffusion-jax/blob/master/diffusion/models/danbooru_128.py#L8 """ + + def __init__(self, in_channels, dim): + super().__init__() + assert (dim % 2) == 0 + half_dim = dim // 2 + per_channel_dim = half_dim // in_channels + self.weights = nn.Parameter(torch.randn(per_channel_dim)) + + def forward(self, x): + """ + + Args: + x (torch.FloatTensor): [..., c] + + Returns: + x (torch.FloatTensor): [..., d] + """ + + # [b, t, c, 1] * [1, d] = [b, t, c, d] -> [b, t, c * d] + freqs = (x[..., None] * self.weights[None] * 2 * np.pi).view(*x.shape[:-1], -1) + fouriered = torch.cat((x, freqs.sin(), freqs.cos()), dim=-1) + return fouriered + + +class TriplaneLearnedFourierEmbedder(nn.Module): + def __init__(self, in_channels, dim): + super().__init__() + + self.yz_plane_embedder = LearnedFourierEmbedder(in_channels, dim) + self.xz_plane_embedder = LearnedFourierEmbedder(in_channels, dim) + self.xy_plane_embedder = LearnedFourierEmbedder(in_channels, dim) + + self.out_dim = in_channels + dim + + def forward(self, x): + + yz_embed = self.yz_plane_embedder(x) + xz_embed = self.xz_plane_embedder(x) + xy_embed = self.xy_plane_embedder(x) + + embed = yz_embed + xz_embed + xy_embed + + return embed + + +def sequential_pos_embed(num_len, embed_dim): + assert embed_dim % 2 == 0 + + pos = torch.arange(num_len, dtype=torch.float32) + omega = torch.arange(embed_dim // 2, dtype=torch.float32) + omega /= embed_dim / 2. + omega = 1. / 10000 ** omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = torch.einsum("m,d->md", pos, omega) # (M, D/2), outer product + + emb_sin = torch.sin(out) # (M, D/2) + emb_cos = torch.cos(out) # (M, D/2) + + embeddings = torch.cat([emb_sin, emb_cos], dim=1) # (M, D) + + return embeddings + + +def timestep_embedding(timesteps, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].to(timesteps.dtype) * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + +def get_embedder(embed_type="fourier", num_freqs=-1, input_dim=3, degree=4, + num_levels=16, level_dim=2, per_level_scale=2, base_resolution=16, + log2_hashmap_size=19, desired_resolution=None): + if embed_type == "identity" or (embed_type == "fourier" and num_freqs == -1): + return nn.Identity(), input_dim + + elif embed_type == "fourier": + embedder_obj = FourierEmbedder(num_freqs=num_freqs, input_dim=input_dim, + logspace=True, include_input=True) + return embedder_obj, embedder_obj.out_dim + + elif embed_type == "hashgrid": + raise NotImplementedError + + elif embed_type == "sphere_harmonic": + raise NotImplementedError + + else: + raise ValueError(f"{embed_type} is not valid. Currently only supprts {VALID_EMBED_TYPES}") diff --git a/ThirdParty/michelangelo/models/modules/transformer_blocks.py b/ThirdParty/michelangelo/models/modules/transformer_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..d9e90a9a0da99b98851c95bc895e6aa49b011f70 --- /dev/null +++ b/ThirdParty/michelangelo/models/modules/transformer_blocks.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- + +import math +import torch +import torch.nn as nn +import torch.nn.functional as F +from typing import Optional + +from ThirdParty.michelangelo.models.modules.checkpoint import checkpoint + + +def init_linear(l, stddev): + nn.init.normal_(l.weight, std=stddev) + if l.bias is not None: + nn.init.constant_(l.bias, 0.0) + + +class MultiheadAttention(nn.Module): + def __init__( + self, + *, + device: torch.device, + dtype: torch.dtype, + n_ctx: int, + width: int, + heads: int, + init_scale: float, + qkv_bias: bool, + flash: bool = False + ): + super().__init__() + self.n_ctx = n_ctx + self.width = width + self.heads = heads + self.c_qkv = nn.Linear(width, width * 3, bias=qkv_bias, device=device, dtype=dtype) + self.c_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.attention = QKVMultiheadAttention(device=device, dtype=dtype, heads=heads, n_ctx=n_ctx, flash=flash) + init_linear(self.c_qkv, init_scale) + init_linear(self.c_proj, init_scale) + + def forward(self, x, mask=None): + x = self.c_qkv(x) + if mask is None: + x = checkpoint(self.attention, (x,), (), True) + else: + x = self.attention(x, mask) + x = self.c_proj(x) + return x + + +class QKVMultiheadAttention(nn.Module): + def __init__(self, *, device: torch.device, dtype: torch.dtype, heads: int, n_ctx: int, flash: bool = False): + super().__init__() + self.device = device + self.dtype = dtype + self.heads = heads + self.n_ctx = n_ctx + self.flash = flash + + def forward(self, qkv, mask=None): + bs, n_ctx, width = qkv.shape + attn_ch = width // self.heads // 3 + scale = 1 / math.sqrt(math.sqrt(attn_ch)) + qkv = qkv.view(bs, n_ctx, self.heads, -1) + q, k, v = torch.split(qkv, attn_ch, dim=-1) + + if self.flash: + out = F.scaled_dot_product_attention(q, k, v) + else: + weight = torch.einsum( + "bthc,bshc->bhts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + if mask is not None: + mask = mask.unsqueeze(1).unsqueeze(-2) + # value_to_fill = torch.tensor(-1e9, dtype=torch.float16) + weight.masked_fill_(mask == 0, -1e8) + wdtype = weight.dtype + weight = torch.softmax(weight.float(), dim=-1).type(wdtype) + out = torch.einsum("bhts,bshc->bthc", weight, v).reshape(bs, n_ctx, -1) + + return out + + +class ResidualAttentionBlock(nn.Module): + def __init__( + self, + *, + device: torch.device, + dtype: torch.dtype, + n_ctx: int, + width: int, + heads: int, + init_scale: float = 1.0, + qkv_bias: bool = True, + flash: bool = False, + use_checkpoint: bool = False + ): + super().__init__() + + self.use_checkpoint = use_checkpoint + + self.attn = MultiheadAttention( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash + ) + self.ln_1 = nn.LayerNorm(width, device=device, dtype=dtype) + self.mlp = MLP(device=device, dtype=dtype, width=width, init_scale=init_scale) + self.ln_2 = nn.LayerNorm(width, device=device, dtype=dtype) + + def _forward(self, x: torch.Tensor,mask=None): + x = x + self.attn(self.ln_1(x),mask) + x = x + self.mlp(self.ln_2(x)) + return x + + def forward(self, x: torch.Tensor, mask=None): + if mask is None: + return checkpoint(self._forward, (x,), self.parameters(), self.use_checkpoint) + else: + return self._forward(x, mask) + + +class MultiheadCrossAttention(nn.Module): + def __init__( + self, + *, + device: torch.device, + dtype: torch.dtype, + width: int, + heads: int, + init_scale: float, + qkv_bias: bool = True, + flash: bool = False, + n_data: Optional[int] = None, + data_width: Optional[int] = None, + ): + super().__init__() + self.n_data = n_data + self.width = width + self.heads = heads + self.data_width = width if data_width is None else data_width + self.c_q = nn.Linear(width, width, bias=qkv_bias, device=device, dtype=dtype) + self.c_kv = nn.Linear(self.data_width, width * 2, bias=qkv_bias, device=device, dtype=dtype) + self.c_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.attention = QKVMultiheadCrossAttention( + device=device, dtype=dtype, heads=heads, n_data=n_data, flash=flash + ) + init_linear(self.c_q, init_scale) + init_linear(self.c_kv, init_scale) + init_linear(self.c_proj, init_scale) + + def forward(self, x, data, mask=None): + x = self.c_q(x) + data = self.c_kv(data) + if mask is None: + x = checkpoint(self.attention, (x, data), (), True) + else: + x = self.attention(x, data, mask) + x = self.c_proj(x) + return x + + +class QKVMultiheadCrossAttention(nn.Module): + def __init__(self, *, device: torch.device, dtype: torch.dtype, heads: int, + flash: bool = False, n_data: Optional[int] = None): + + super().__init__() + self.device = device + self.dtype = dtype + self.heads = heads + self.n_data = n_data + self.flash = flash + + def forward(self, q, kv, mask=None): + _, n_ctx, _ = q.shape + bs, n_data, width = kv.shape + attn_ch = width // self.heads // 2 + scale = 1 / math.sqrt(math.sqrt(attn_ch)) + q = q.view(bs, n_ctx, self.heads, -1) + kv = kv.view(bs, n_data, self.heads, -1) + k, v = torch.split(kv, attn_ch, dim=-1) + + if self.flash: + out = F.scaled_dot_product_attention(q, k, v,mask=mask) + else: + weight = torch.einsum( + "bthc,bshc->bhts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + if mask is not None: + mask = mask.unsqueeze(1).unsqueeze(-2) + # value_to_fill = torch.tensor(-1e9, dtype=torch.float16) + weight.masked_fill_(mask == 0, -1e8) + wdtype = weight.dtype + weight = torch.softmax(weight.float(), dim=-1).type(wdtype) + out = torch.einsum("bhts,bshc->bthc", weight, v).reshape(bs, n_ctx, -1) + + return out + + +class ResidualCrossAttentionBlock(nn.Module): + def __init__( + self, + *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + n_data: Optional[int] = None, + width: int, + heads: int, + data_width: Optional[int] = None, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False + ): + super().__init__() + + if data_width is None: + data_width = width + + self.attn = MultiheadCrossAttention( + device=device, + dtype=dtype, + n_data=n_data, + width=width, + heads=heads, + data_width=data_width, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + ) + self.ln_1 = nn.LayerNorm(width, device=device, dtype=dtype) + self.ln_2 = nn.LayerNorm(data_width, device=device, dtype=dtype) + self.mlp = MLP(device=device, dtype=dtype, width=width, init_scale=init_scale) + self.ln_3 = nn.LayerNorm(width, device=device, dtype=dtype) + + def forward(self, x: torch.Tensor, data: torch.Tensor, mask=None): + x = x + self.attn(self.ln_1(x), self.ln_2(data), mask) + x = x + self.mlp(self.ln_3(x)) + return x + + +class MLP(nn.Module): + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + width: int, + init_scale: float): + super().__init__() + self.width = width + self.c_fc = nn.Linear(width, width * 4, device=device, dtype=dtype) + self.c_proj = nn.Linear(width * 4, width, device=device, dtype=dtype) + self.gelu = nn.GELU() + init_linear(self.c_fc, init_scale) + init_linear(self.c_proj, init_scale) + + def forward(self, x): + return self.c_proj(self.gelu(self.c_fc(x))) + + +class Transformer(nn.Module): + def __init__( + self, + *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + n_ctx: int, + width: int, + layers: int, + heads: int, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False, + use_checkpoint: bool = False + ): + super().__init__() + self.n_ctx = n_ctx + self.width = width + self.layers = layers + self.resblocks = nn.ModuleList( + [ + ResidualAttentionBlock( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_checkpoint=use_checkpoint + ) + for _ in range(layers) + ] + ) + + def forward(self, x: torch.Tensor, mask=None): + for block in self.resblocks: + x = block(x,mask) + return x + diff --git a/ThirdParty/michelangelo/models/modules/transformer_vit.py b/ThirdParty/michelangelo/models/modules/transformer_vit.py new file mode 100644 index 0000000000000000000000000000000000000000..a289b6cbbb34ec3df91f290871e24d34edc09292 --- /dev/null +++ b/ThirdParty/michelangelo/models/modules/transformer_vit.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- + +import math +import torch +import torch.nn as nn +from typing import Optional +import warnings + +from ThirdParty.michelangelo.models.modules.checkpoint import checkpoint + + +def _trunc_normal_(tensor, mean, std, a, b): + # Cut & paste from PyTorch official master until it's in a few official releases - RW + # Method based on https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf + def norm_cdf(x): + # Computes standard normal cumulative distribution function + return (1. + math.erf(x / math.sqrt(2.))) / 2. + + if (mean < a - 2 * std) or (mean > b + 2 * std): + warnings.warn("mean is more than 2 std from [a, b] in nn.init.trunc_normal_. " + "The distribution of values may be incorrect.", + stacklevel=2) + + # Values are generated by using a truncated uniform distribution and + # then using the inverse CDF for the normal distribution. + # Get upper and lower cdf values + l = norm_cdf((a - mean) / std) + u = norm_cdf((b - mean) / std) + + # Uniformly fill tensor with values from [l, u], then translate to + # [2l-1, 2u-1]. + tensor.uniform_(2 * l - 1, 2 * u - 1) + + # Use inverse cdf transform for normal distribution to get truncated + # standard normal + tensor.erfinv_() + + # Transform to proper mean, std + tensor.mul_(std * math.sqrt(2.)) + tensor.add_(mean) + + # Clamp to ensure it's in the proper range + tensor.clamp_(min=a, max=b) + return tensor + + +def trunc_normal_(tensor, mean=0., std=1., a=-2., b=2.): + # type: (Tensor | nn.Parameter, float, float, float, float) -> Tensor + r"""Fills the input Tensor with values drawn from a truncated + normal distribution. The values are effectively drawn from the + normal distribution :math:`\mathcal{N}(\text{mean}, \text{std}^2)` + with values outside :math:`[a, b]` redrawn until they are within + the bounds. The method used for generating the random values works + best when :math:`a \leq \text{mean} \leq b`. + NOTE: this impl is similar to the PyTorch trunc_normal_, the bounds [a, b] are + applied while sampling the normal with mean/std applied, therefore a, b args + should be adjusted to match the range of mean, std args. + Args: + tensor: an n-dimensional `torch.Tensor` + mean: the mean of the normal distribution + std: the standard deviation of the normal distribution + a: the minimum cutoff value + b: the maximum cutoff value + Examples: + >>> w = torch.empty(3, 5) + >>> nn.init.trunc_normal_(w) + """ + with torch.no_grad(): + return _trunc_normal_(tensor, mean, std, a, b) + + +def init_weights(m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + +class MultiheadAttention(nn.Module): + def __init__( + self, + *, + device: torch.device, + dtype: torch.dtype, + n_ctx: int, + width: int, + heads: int, + qkv_bias: bool + ): + super().__init__() + self.n_ctx = n_ctx + self.width = width + self.heads = heads + self.c_qkv = nn.Linear(width, width * 3, bias=qkv_bias, device=device, dtype=dtype) + self.c_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.attention = QKVMultiheadAttention(device=device, dtype=dtype, heads=heads, n_ctx=n_ctx) + + def forward(self, x): + x = self.c_qkv(x) + x = checkpoint(self.attention, (x,), (), True) + x = self.c_proj(x) + return x + + +class QKVMultiheadAttention(nn.Module): + def __init__(self, *, device: torch.device, dtype: torch.dtype, heads: int, n_ctx: int): + super().__init__() + self.device = device + self.dtype = dtype + self.heads = heads + self.n_ctx = n_ctx + + def forward(self, qkv): + bs, n_ctx, width = qkv.shape + attn_ch = width // self.heads // 3 + scale = 1 / math.sqrt(attn_ch) + qkv = qkv.view(bs, n_ctx, self.heads, -1) + q, k, v = torch.split(qkv, attn_ch, dim=-1) + weight = torch.einsum("bthc,bshc->bhts", q, k) * scale + wdtype = weight.dtype + weight = torch.softmax(weight.float(), dim=-1).type(wdtype) + return torch.einsum("bhts,bshc->bthc", weight, v).reshape(bs, n_ctx, -1) + + +class ResidualAttentionBlock(nn.Module): + def __init__( + self, + *, + device: torch.device, + dtype: torch.dtype, + n_ctx: int, + width: int, + heads: int, + qkv_bias: bool = True, + use_checkpoint: bool = False + ): + super().__init__() + + self.use_checkpoint = use_checkpoint + + self.attn = MultiheadAttention( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + qkv_bias=qkv_bias + ) + self.ln_1 = nn.LayerNorm(width, device=device, dtype=dtype) + self.mlp = MLP(device=device, dtype=dtype, width=width) + self.ln_2 = nn.LayerNorm(width, device=device, dtype=dtype) + + def _forward(self, x: torch.Tensor): + x = x + self.attn(self.ln_1(x)) + x = x + self.mlp(self.ln_2(x)) + return x + + def forward(self, x: torch.Tensor): + return checkpoint(self._forward, (x,), self.parameters(), self.use_checkpoint) + + +class MultiheadCrossAttention(nn.Module): + def __init__( + self, + *, + device: torch.device, + dtype: torch.dtype, + width: int, + heads: int, + qkv_bias: bool = True, + n_data: Optional[int] = None, + data_width: Optional[int] = None, + ): + super().__init__() + self.n_data = n_data + self.width = width + self.heads = heads + self.data_width = width if data_width is None else data_width + self.c_q = nn.Linear(width, width, bias=qkv_bias, device=device, dtype=dtype) + self.c_kv = nn.Linear(self.data_width, width * 2, bias=qkv_bias, device=device, dtype=dtype) + self.c_proj = nn.Linear(width, width, device=device, dtype=dtype) + self.attention = QKVMultiheadCrossAttention( + device=device, dtype=dtype, heads=heads, n_data=n_data + ) + + def forward(self, x, data): + x = self.c_q(x) + data = self.c_kv(data) + x = checkpoint(self.attention, (x, data), (), True) + x = self.c_proj(x) + return x + + +class QKVMultiheadCrossAttention(nn.Module): + def __init__(self, *, device: torch.device, dtype: torch.dtype, heads: int, n_data: Optional[int] = None): + super().__init__() + self.device = device + self.dtype = dtype + self.heads = heads + self.n_data = n_data + + def forward(self, q, kv): + _, n_ctx, _ = q.shape + bs, n_data, width = kv.shape + attn_ch = width // self.heads // 2 + scale = 1 / math.sqrt(attn_ch) + q = q.view(bs, n_ctx, self.heads, -1) + kv = kv.view(bs, n_data, self.heads, -1) + k, v = torch.split(kv, attn_ch, dim=-1) + weight = torch.einsum("bthc,bshc->bhts", q, k) * scale + wdtype = weight.dtype + weight = torch.softmax(weight.float(), dim=-1).type(wdtype) + return torch.einsum("bhts,bshc->bthc", weight, v).reshape(bs, n_ctx, -1) + + +class ResidualCrossAttentionBlock(nn.Module): + def __init__( + self, + *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + n_data: Optional[int] = None, + width: int, + heads: int, + data_width: Optional[int] = None, + qkv_bias: bool = True + ): + super().__init__() + + if data_width is None: + data_width = width + + self.attn = MultiheadCrossAttention( + device=device, + dtype=dtype, + n_data=n_data, + width=width, + heads=heads, + data_width=data_width, + qkv_bias=qkv_bias + ) + self.ln_1 = nn.LayerNorm(width, device=device, dtype=dtype) + self.ln_2 = nn.LayerNorm(data_width, device=device, dtype=dtype) + self.mlp = MLP(device=device, dtype=dtype, width=width) + self.ln_3 = nn.LayerNorm(width, device=device, dtype=dtype) + + def forward(self, x: torch.Tensor, data: torch.Tensor): + x = x + self.attn(self.ln_1(x), self.ln_2(data)) + x = x + self.mlp(self.ln_3(x)) + return x + + +class MLP(nn.Module): + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + width: int): + super().__init__() + self.width = width + self.c_fc = nn.Linear(width, width * 4, device=device, dtype=dtype) + self.c_proj = nn.Linear(width * 4, width, device=device, dtype=dtype) + self.gelu = nn.GELU() + + def forward(self, x): + return self.c_proj(self.gelu(self.c_fc(x))) + + +class Transformer(nn.Module): + def __init__( + self, + *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + n_ctx: int, + width: int, + layers: int, + heads: int, + qkv_bias: bool = True, + use_checkpoint: bool = False + ): + super().__init__() + self.n_ctx = n_ctx + self.width = width + self.layers = layers + self.resblocks = nn.ModuleList( + [ + ResidualAttentionBlock( + device=device, + dtype=dtype, + n_ctx=n_ctx, + width=width, + heads=heads, + qkv_bias=qkv_bias, + use_checkpoint=use_checkpoint + ) + for _ in range(layers) + ] + ) + + self.apply(init_weights) + + def forward(self, x: torch.Tensor): + for block in self.resblocks: + x = block(x) + return x diff --git a/ThirdParty/michelangelo/models/tsal/__init__.py b/ThirdParty/michelangelo/models/tsal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..40a96afc6ff09d58a702b76e3f7dd412fe975e26 --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/ThirdParty/michelangelo/models/tsal/asl_pl_module.py b/ThirdParty/michelangelo/models/tsal/asl_pl_module.py new file mode 100644 index 0000000000000000000000000000000000000000..034f3d4a2a7ccd5bf3c91de17b8aed02ad92e2e3 --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/asl_pl_module.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- + +from typing import List, Tuple, Dict, Optional +from omegaconf import DictConfig + +import torch +import torch.nn.functional as F +from torch.optim import lr_scheduler +import pytorch_lightning as pl +from typing import Union +from functools import partial + +from ThirdParty.michelangelo.utils import instantiate_from_config + +from .inference_utils import extract_geometry +from .tsal_base import ( + AlignedShapeAsLatentModule, + ShapeAsLatentModule, + Latent2MeshOutput, + AlignedMeshOutput +) + + +class AlignedShapeAsLatentPLModule(pl.LightningModule): + + def __init__(self, *, + shape_module_cfg, + aligned_module_cfg, + loss_cfg, + optimizer_cfg: Optional[DictConfig] = None, + ckpt_path: Optional[str] = None, + ignore_keys: Union[Tuple[str], List[str]] = ()): + + super().__init__() + + shape_model: ShapeAsLatentModule = instantiate_from_config( + shape_module_cfg, device=None, dtype=None + ) + self.model: AlignedShapeAsLatentModule = instantiate_from_config( + aligned_module_cfg, shape_model=shape_model + ) + + self.loss = instantiate_from_config(loss_cfg) + + self.optimizer_cfg = optimizer_cfg + + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + + self.save_hyperparameters() + + def set_shape_model_only(self): + self.model.set_shape_model_only() + + @property + def latent_shape(self): + return self.model.shape_model.latent_shape + + @property + def zero_rank(self): + if self._trainer: + zero_rank = self.trainer.local_rank == 0 + else: + zero_rank = True + + return zero_rank + + def init_from_ckpt(self, path, ignore_keys=()): + state_dict = torch.load(path, map_location="cpu")["state_dict"] + + keys = list(state_dict.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del state_dict[k] + + missing, unexpected = self.load_state_dict(state_dict, strict=False) + print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys") + if len(missing) > 0: + print(f"Missing Keys: {missing}") + print(f"Unexpected Keys: {unexpected}") + + def configure_optimizers(self) -> Tuple[List, List]: + lr = self.learning_rate + + trainable_parameters = list(self.model.parameters()) + + if self.optimizer_cfg is None: + optimizers = [torch.optim.AdamW(trainable_parameters, lr=lr, betas=(0.9, 0.99), weight_decay=1e-3)] + schedulers = [] + else: + optimizer = instantiate_from_config(self.optimizer_cfg.optimizer, params=trainable_parameters) + scheduler_func = instantiate_from_config( + self.optimizer_cfg.scheduler, + max_decay_steps=self.trainer.max_steps, + lr_max=lr + ) + scheduler = { + "scheduler": lr_scheduler.LambdaLR(optimizer, lr_lambda=scheduler_func.schedule), + "interval": "step", + "frequency": 1 + } + optimizers = [optimizer] + schedulers = [scheduler] + + return optimizers, schedulers + + def forward(self, + surface: torch.FloatTensor, + image: torch.FloatTensor, + text: torch.FloatTensor, + volume_queries: torch.FloatTensor): + + """ + + Args: + surface (torch.FloatTensor): + image (torch.FloatTensor): + text (torch.FloatTensor): + volume_queries (torch.FloatTensor): + + Returns: + + """ + + embed_outputs, shape_z = self.model(surface, image, text) + + shape_zq, posterior = self.model.shape_model.encode_kl_embed(shape_z) + latents = self.model.shape_model.decode(shape_zq) + logits = self.model.shape_model.query_geometry(volume_queries, latents) + + return embed_outputs, logits, posterior + + def encode(self, surface: torch.FloatTensor, sample_posterior=True): + + pc = surface[..., 0:3] + feats = surface[..., 3:6] + + shape_embed, shape_zq, posterior = self.model.shape_model.encode( + pc=pc, feats=feats, sample_posterior=sample_posterior + ) + + return shape_zq + + def decode(self, + z_q, + bounds: Union[Tuple[float], List[float], float] = 1.1, + octree_depth: int = 7, + num_chunks: int = 10000) -> List[Latent2MeshOutput]: + + latents = self.model.shape_model.decode(z_q) # latents: [bs, num_latents, dim] + outputs = self.latent2mesh(latents, bounds=bounds, octree_depth=octree_depth, num_chunks=num_chunks) + + return outputs + + def training_step(self, batch: Dict[str, torch.FloatTensor], + batch_idx: int, optimizer_idx: int = 0) -> torch.FloatTensor: + """ + + Args: + batch (dict): the batch sample, and it contains: + - surface (torch.FloatTensor): [bs, n_surface, (3 + input_dim)] + - image (torch.FloatTensor): [bs, 3, 224, 224] + - text (torch.FloatTensor): [bs, num_templates, 77] + - geo_points (torch.FloatTensor): [bs, n_pts, (3 + 1)] + + batch_idx (int): + + optimizer_idx (int): + + Returns: + loss (torch.FloatTensor): + + """ + + surface = batch["surface"] + image = batch["image"] + text = batch["text"] + + volume_queries = batch["geo_points"][..., 0:3] + shape_labels = batch["geo_points"][..., -1] + + embed_outputs, shape_logits, posteriors = self(surface, image, text, volume_queries) + + aeloss, log_dict_ae = self.loss( + **embed_outputs, + posteriors=posteriors, + shape_logits=shape_logits, + shape_labels=shape_labels, + split="train" + ) + + self.log_dict(log_dict_ae, prog_bar=True, logger=True, batch_size=shape_logits.shape[0], + sync_dist=False, rank_zero_only=True) + + return aeloss + + def validation_step(self, batch: Dict[str, torch.FloatTensor], batch_idx: int) -> torch.FloatTensor: + + surface = batch["surface"] + image = batch["image"] + text = batch["text"] + + volume_queries = batch["geo_points"][..., 0:3] + shape_labels = batch["geo_points"][..., -1] + + embed_outputs, shape_logits, posteriors = self(surface, image, text, volume_queries) + + aeloss, log_dict_ae = self.loss( + **embed_outputs, + posteriors=posteriors, + shape_logits=shape_logits, + shape_labels=shape_labels, + split="val" + ) + self.log_dict(log_dict_ae, prog_bar=True, logger=True, batch_size=shape_logits.shape[0], + sync_dist=False, rank_zero_only=True) + + return aeloss + + def visual_alignment(self, + surface: torch.FloatTensor, + image: torch.FloatTensor, + text: torch.FloatTensor, + description: Optional[List[str]] = None, + bounds: Union[Tuple[float], List[float]] = (-1.25, -1.25, -1.25, 1.25, 1.25, 1.25), + octree_depth: int = 7, + num_chunks: int = 10000) -> List[AlignedMeshOutput]: + + """ + + Args: + surface: + image: + text: + description: + bounds: + octree_depth: + num_chunks: + + Returns: + mesh_outputs (List[AlignedMeshOutput]): the mesh outputs list. + + """ + + outputs = [] + + device = surface.device + bs = surface.shape[0] + + embed_outputs, shape_z = self.model(surface, image, text) + + # calculate the similarity + image_embed = embed_outputs["image_embed"] + text_embed = embed_outputs["text_embed"] + shape_embed = embed_outputs["shape_embed"] + + # normalized features + shape_embed = F.normalize(shape_embed, dim=-1, p=2) + text_embed = F.normalize(text_embed, dim=-1, p=2) + image_embed = F.normalize(image_embed, dim=-1, p=2) + + # B x B + shape_text_similarity = (100.0 * shape_embed @ text_embed.T).softmax(dim=-1) + + # B x B + shape_image_similarity = (100.0 * shape_embed @ image_embed.T).softmax(dim=-1) + + # shape reconstruction + shape_zq, posterior = self.model.shape_model.encode_kl_embed(shape_z) + latents = self.model.shape_model.decode(shape_zq) + geometric_func = partial(self.model.shape_model.query_geometry, latents=latents) + + # 2. decode geometry + mesh_v_f, has_surface = extract_geometry( + geometric_func=geometric_func, + device=device, + batch_size=bs, + bounds=bounds, + octree_depth=octree_depth, + num_chunks=num_chunks, + disable=not self.zero_rank + ) + + # 3. decode texture + for i, ((mesh_v, mesh_f), is_surface) in enumerate(zip(mesh_v_f, has_surface)): + if not is_surface: + outputs.append(None) + continue + + out = AlignedMeshOutput() + out.mesh_v = mesh_v + out.mesh_f = mesh_f + out.surface = surface[i].cpu().numpy() + out.image = image[i].cpu().numpy() + if description is not None: + out.text = description[i] + out.shape_text_similarity = shape_text_similarity[i, i] + out.shape_image_similarity = shape_image_similarity[i, i] + + outputs.append(out) + + return outputs + + def latent2mesh(self, + latents: torch.FloatTensor, + bounds: Union[Tuple[float], List[float], float] = 1.1, + octree_depth: int = 7, + num_chunks: int = 10000) -> List[Latent2MeshOutput]: + + """ + + Args: + latents: [bs, num_latents, dim] + bounds: + octree_depth: + num_chunks: + + Returns: + mesh_outputs (List[MeshOutput]): the mesh outputs list. + + """ + + outputs = [] + + geometric_func = partial(self.model.shape_model.query_geometry, latents=latents) + + # 2. decode geometry + device = latents.device + mesh_v_f, has_surface = extract_geometry( + geometric_func=geometric_func, + device=device, + batch_size=len(latents), + bounds=bounds, + octree_depth=octree_depth, + num_chunks=num_chunks, + disable=not self.zero_rank + ) + + # 3. decode texture + for i, ((mesh_v, mesh_f), is_surface) in enumerate(zip(mesh_v_f, has_surface)): + if not is_surface: + outputs.append(None) + continue + + out = Latent2MeshOutput() + out.mesh_v = mesh_v + out.mesh_f = mesh_f + + outputs.append(out) + + return outputs + diff --git a/ThirdParty/michelangelo/models/tsal/clip_asl_module.py b/ThirdParty/michelangelo/models/tsal/clip_asl_module.py new file mode 100644 index 0000000000000000000000000000000000000000..82e989e83a0e5839a4bf4874eeb8df41af900682 --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/clip_asl_module.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- + +import torch +from torch import nn +from einops import rearrange +from transformers import CLIPModel + +from ThirdParty.michelangelo.models.tsal.tsal_base import AlignedShapeAsLatentModule + + +class CLIPAlignedShapeAsLatentModule(AlignedShapeAsLatentModule): + + def __init__(self, *, + shape_model, + clip_model_version: str = "openai/clip-vit-large-patch14"): + + super().__init__() + + self.clip_model: CLIPModel = CLIPModel.from_pretrained(clip_model_version) + for params in self.clip_model.parameters(): + params.requires_grad = False + + self.shape_model = shape_model + self.shape_projection = nn.Parameter(torch.empty(self.shape_model.width, self.clip_model.projection_dim)) + nn.init.normal_(self.shape_projection, std=self.clip_model.projection_dim ** -0.5) + + def set_shape_model_only(self): + self.clip_model = None + + def encode_shape_embed(self, surface, return_latents: bool = False): + """ + + Args: + surface (torch.FloatTensor): [bs, n, 3 + c] + return_latents (bool): + + Returns: + x (torch.FloatTensor): [bs, projection_dim] + shape_latents (torch.FloatTensor): [bs, m, d] + """ + + pc = surface[..., 0:3] + feats = surface[..., 3:] + + shape_embed, shape_latents = self.shape_model.encode_latents(pc, feats) + x = shape_embed @ self.shape_projection + + if return_latents: + return x, shape_latents + else: + return x + + def encode_image_embed(self, image): + """ + + Args: + image (torch.FloatTensor): [bs, 3, h, w] + + Returns: + x (torch.FloatTensor): [bs, projection_dim] + """ + + x = self.clip_model.get_image_features(image) + + return x + + def encode_text_embed(self, text): + x = self.clip_model.get_text_features(text) + return x + + def forward(self, surface, image, text): + """ + + Args: + surface (torch.FloatTensor): + image (torch.FloatTensor): [bs, 3, 224, 224] + text (torch.LongTensor): [bs, num_templates, 77] + + Returns: + embed_outputs (dict): the embedding outputs, and it contains: + - image_embed (torch.FloatTensor): + - text_embed (torch.FloatTensor): + - shape_embed (torch.FloatTensor): + - logit_scale (float): + """ + + # # text embedding + # text_embed_all = [] + # for i in range(text.shape[0]): + # text_for_one_sample = text[i] + # text_embed = self.encode_text_embed(text_for_one_sample) + # text_embed = text_embed / text_embed.norm(dim=-1, keepdim=True) + # text_embed = text_embed.mean(dim=0) + # text_embed = text_embed / text_embed.norm(dim=-1, keepdim=True) + # text_embed_all.append(text_embed) + # text_embed_all = torch.stack(text_embed_all) + + b = text.shape[0] + text_tokens = rearrange(text, "b t l -> (b t) l") + text_embed = self.encode_text_embed(text_tokens) + text_embed = rearrange(text_embed, "(b t) d -> b t d", b=b) + text_embed = text_embed.mean(dim=1) + text_embed = text_embed / text_embed.norm(dim=-1, keepdim=True) + + # image embedding + image_embed = self.encode_image_embed(image) + + # shape embedding + shape_embed, shape_latents = self.encode_shape_embed(surface, return_latents=True) + + embed_outputs = { + "image_embed": image_embed, + "text_embed": text_embed, + "shape_embed": shape_embed, + "logit_scale": self.clip_model.logit_scale.exp() + } + + return embed_outputs, shape_latents diff --git a/ThirdParty/michelangelo/models/tsal/inference_utils.py b/ThirdParty/michelangelo/models/tsal/inference_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8c4a9df780c6a325175240e92f40e7431eced0dd --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/inference_utils.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +import torch +from tqdm import tqdm +from einops import repeat +import numpy as np +from typing import Callable, Tuple, List, Union, Optional +from skimage import measure + +from ThirdParty.michelangelo.graphics.primitives import generate_dense_grid_points + + +@torch.no_grad() +def extract_geometry(geometric_func: Callable, + device: torch.device, + batch_size: int = 1, + bounds: Union[Tuple[float], List[float], float] = (-1.25, -1.25, -1.25, 1.25, 1.25, 1.25), + octree_depth: int = 7, + num_chunks: int = 10000, + disable: bool = True): + """ + + Args: + geometric_func: + device: + bounds: + octree_depth: + batch_size: + num_chunks: + disable: + + Returns: + + """ + + if isinstance(bounds, float): + bounds = [-bounds, -bounds, -bounds, bounds, bounds, bounds] + + bbox_min = np.array(bounds[0:3]) + bbox_max = np.array(bounds[3:6]) + bbox_size = bbox_max - bbox_min + + xyz_samples, grid_size, length = generate_dense_grid_points( + bbox_min=bbox_min, + bbox_max=bbox_max, + octree_depth=octree_depth, + indexing="ij" + ) + xyz_samples = torch.FloatTensor(xyz_samples) + + batch_logits = [] + for start in tqdm(range(0, xyz_samples.shape[0], num_chunks), + desc="Implicit Function:", disable=disable, leave=False): + queries = xyz_samples[start: start + num_chunks, :].to(device) + batch_queries = repeat(queries, "p c -> b p c", b=batch_size) + + logits = geometric_func(batch_queries) + batch_logits.append(logits.cpu()) + + grid_logits = torch.cat(batch_logits, dim=1).view((batch_size, grid_size[0], grid_size[1], grid_size[2])).numpy() + + mesh_v_f = [] + has_surface = np.zeros((batch_size,), dtype=np.bool_) + for i in range(batch_size): + try: + vertices, faces, normals, _ = measure.marching_cubes(grid_logits[i], 0, method="lewiner") + vertices = vertices / grid_size * bbox_size + bbox_min + # vertices[:, [0, 1]] = vertices[:, [1, 0]] + mesh_v_f.append((vertices.astype(np.float32), np.ascontiguousarray(faces))) + has_surface[i] = True + + except ValueError: + mesh_v_f.append((None, None)) + has_surface[i] = False + + except RuntimeError: + mesh_v_f.append((None, None)) + has_surface[i] = False + + return mesh_v_f, has_surface diff --git a/ThirdParty/michelangelo/models/tsal/loss.py b/ThirdParty/michelangelo/models/tsal/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..de49fe8e427ea709028da14f62b2170d57a4102e --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/loss.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +import torch +import torch.nn as nn +import torch.nn.functional as F + +from typing import Optional, Tuple, Dict + +from ThirdParty.michelangelo.models.modules.distributions import DiagonalGaussianDistribution +from ThirdParty.michelangelo.utils.eval import compute_psnr +from ThirdParty.michelangelo.utils import misc + + +class KLNearFar(nn.Module): + def __init__(self, + near_weight: float = 0.1, + kl_weight: float = 1.0, + num_near_samples: Optional[int] = None): + + super().__init__() + + self.near_weight = near_weight + self.kl_weight = kl_weight + self.num_near_samples = num_near_samples + self.geo_criterion = nn.BCEWithLogitsLoss() + + def forward(self, + posteriors: Optional[DiagonalGaussianDistribution], + logits: torch.FloatTensor, + labels: torch.FloatTensor, + split: Optional[str] = "train", **kwargs) -> Tuple[torch.FloatTensor, Dict[str, float]]: + + """ + + Args: + posteriors (DiagonalGaussianDistribution or torch.distributions.Normal): + logits (torch.FloatTensor): [B, 2*N], logits[:, 0:N] is the volume points; logits[:, N:2N] is the near points; + labels (torch.FloatTensor): [B, 2*N], labels[:, 0:N] is the volume points; labels[:, N:2N] is the near points; + split (str): + **kwargs: + + Returns: + loss (torch.Tensor): (,) + log (dict): + + """ + + if self.num_near_samples is None: + num_vol = logits.shape[1] // 2 + else: + num_vol = logits.shape[1] - self.num_near_samples + + vol_logits = logits[:, 0:num_vol] + vol_labels = labels[:, 0:num_vol] + + near_logits = logits[:, num_vol:] + near_labels = labels[:, num_vol:] + + # occupancy loss + # vol_bce = self.geo_criterion(vol_logits, vol_labels) + # near_bce = self.geo_criterion(near_logits, near_labels) + vol_bce = self.geo_criterion(vol_logits.float(), vol_labels.float()) + near_bce = self.geo_criterion(near_logits.float(), near_labels.float()) + + if posteriors is None: + kl_loss = torch.tensor(0.0, dtype=vol_logits.dtype, device=vol_logits.device) + else: + kl_loss = posteriors.kl(dims=(1, 2)) + kl_loss = torch.mean(kl_loss) + + loss = vol_bce + near_bce * self.near_weight + kl_loss * self.kl_weight + + with torch.no_grad(): + preds = logits >= 0 + accuracy = (preds == labels).float() + accuracy = accuracy.mean() + pos_ratio = torch.mean(labels) + + log = { + "{}/total_loss".format(split): loss.clone().detach(), + "{}/near".format(split): near_bce.detach(), + "{}/far".format(split): vol_bce.detach(), + "{}/kl".format(split): kl_loss.detach(), + "{}/accuracy".format(split): accuracy, + "{}/pos_ratio".format(split): pos_ratio + } + + if posteriors is not None: + log[f"{split}/mean"] = posteriors.mean.mean().detach() + log[f"{split}/std_mean"] = posteriors.std.mean().detach() + log[f"{split}/std_max"] = posteriors.std.max().detach() + + return loss, log + + +class KLNearFarColor(nn.Module): + def __init__(self, + near_weight: float = 0.1, + kl_weight: float = 1.0, + color_weight: float = 1.0, + color_criterion: str = "mse", + num_near_samples: Optional[int] = None): + + super().__init__() + + self.color_weight = color_weight + self.near_weight = near_weight + self.kl_weight = kl_weight + self.num_near_samples = num_near_samples + + if color_criterion == "mse": + self.color_criterion = nn.MSELoss() + + elif color_criterion == "l1": + self.color_criterion = nn.L1Loss() + + else: + raise ValueError(f"{color_criterion} must be [`mse`, `l1`].") + + self.geo_criterion = nn.BCEWithLogitsLoss() + + def forward(self, + posteriors: Optional[DiagonalGaussianDistribution], + logits: torch.FloatTensor, + labels: torch.FloatTensor, + pred_colors: torch.FloatTensor, + gt_colors: torch.FloatTensor, + split: Optional[str] = "train", **kwargs) -> Tuple[torch.FloatTensor, Dict[str, float]]: + + """ + + Args: + posteriors (DiagonalGaussianDistribution or torch.distributions.Normal): + logits (torch.FloatTensor): [B, 2*N], logits[:, 0:N] is the volume points; logits[:, N:2N] is the near points; + labels (torch.FloatTensor): [B, 2*N], labels[:, 0:N] is the volume points; labels[:, N:2N] is the near points; + pred_colors (torch.FloatTensor): [B, M, 3] + gt_colors (torch.FloatTensor): [B, M, 3] + split (str): + **kwargs: + + Returns: + loss (torch.Tensor): (,) + log (dict): + + """ + + if self.num_near_samples is None: + num_vol = logits.shape[1] // 2 + else: + num_vol = logits.shape[1] - self.num_near_samples + + vol_logits = logits[:, 0:num_vol] + vol_labels = labels[:, 0:num_vol] + + near_logits = logits[:, num_vol:] + near_labels = labels[:, num_vol:] + + # occupancy loss + # vol_bce = self.geo_criterion(vol_logits, vol_labels) + # near_bce = self.geo_criterion(near_logits, near_labels) + vol_bce = self.geo_criterion(vol_logits.float(), vol_labels.float()) + near_bce = self.geo_criterion(near_logits.float(), near_labels.float()) + + # surface color loss + color = self.color_criterion(pred_colors, gt_colors) + + if posteriors is None: + kl_loss = torch.tensor(0.0, dtype=pred_colors.dtype, device=pred_colors.device) + else: + kl_loss = posteriors.kl(dims=(1, 2)) + kl_loss = torch.mean(kl_loss) + + loss = vol_bce + near_bce * self.near_weight + color * self.color_weight + kl_loss * self.kl_weight + + with torch.no_grad(): + preds = logits >= 0 + accuracy = (preds == labels).float() + accuracy = accuracy.mean() + psnr = compute_psnr(pred_colors, gt_colors) + + log = { + "{}/total_loss".format(split): loss.clone().detach(), + "{}/near".format(split): near_bce.detach(), + "{}/far".format(split): vol_bce.detach(), + "{}/color".format(split): color.detach(), + "{}/kl".format(split): kl_loss.detach(), + "{}/psnr".format(split): psnr.detach(), + "{}/accuracy".format(split): accuracy + } + + return loss, log + + +class ContrastKLNearFar(nn.Module): + def __init__(self, + contrast_weight: float = 1.0, + near_weight: float = 0.1, + kl_weight: float = 1.0, + num_near_samples: Optional[int] = None): + + super().__init__() + + self.labels = None + self.last_local_batch_size = None + + self.contrast_weight = contrast_weight + self.near_weight = near_weight + self.kl_weight = kl_weight + self.num_near_samples = num_near_samples + self.geo_criterion = nn.BCEWithLogitsLoss() + + def forward(self, + shape_embed: torch.FloatTensor, + text_embed: torch.FloatTensor, + image_embed: torch.FloatTensor, + logit_scale: torch.FloatTensor, + posteriors: Optional[DiagonalGaussianDistribution], + shape_logits: torch.FloatTensor, + shape_labels: torch.FloatTensor, + split: Optional[str] = "train", **kwargs): + + local_batch_size = shape_embed.size(0) + + if local_batch_size != self.last_local_batch_size: + self.labels = local_batch_size * misc.get_rank() + torch.arange( + local_batch_size, device=shape_embed.device + ).long() + self.last_local_batch_size = local_batch_size + + # normalized features + shape_embed = F.normalize(shape_embed, dim=-1, p=2) + text_embed = F.normalize(text_embed, dim=-1, p=2) + image_embed = F.normalize(image_embed, dim=-1, p=2) + + # gather features from all GPUs + shape_embed_all, text_embed_all, image_embed_all = misc.all_gather_batch( + [shape_embed, text_embed, image_embed] + ) + + # cosine similarity as logits + logits_per_shape_text = logit_scale * shape_embed @ text_embed_all.t() + logits_per_text_shape = logit_scale * text_embed @ shape_embed_all.t() + logits_per_shape_image = logit_scale * shape_embed @ image_embed_all.t() + logits_per_image_shape = logit_scale * image_embed @ shape_embed_all.t() + contrast_loss = (F.cross_entropy(logits_per_shape_text, self.labels) + + F.cross_entropy(logits_per_text_shape, self.labels)) / 2 + \ + (F.cross_entropy(logits_per_shape_image, self.labels) + + F.cross_entropy(logits_per_image_shape, self.labels)) / 2 + + # shape reconstruction + if self.num_near_samples is None: + num_vol = shape_logits.shape[1] // 2 + else: + num_vol = shape_logits.shape[1] - self.num_near_samples + + vol_logits = shape_logits[:, 0:num_vol] + vol_labels = shape_labels[:, 0:num_vol] + + near_logits = shape_logits[:, num_vol:] + near_labels = shape_labels[:, num_vol:] + + # occupancy loss + vol_bce = self.geo_criterion(vol_logits.float(), vol_labels.float()) + near_bce = self.geo_criterion(near_logits.float(), near_labels.float()) + + if posteriors is None: + kl_loss = torch.tensor(0.0, dtype=vol_logits.dtype, device=vol_logits.device) + else: + kl_loss = posteriors.kl(dims=(1, 2)) + kl_loss = torch.mean(kl_loss) + + loss = vol_bce + near_bce * self.near_weight + kl_loss * self.kl_weight + contrast_loss * self.contrast_weight + + # compute accuracy + with torch.no_grad(): + pred = torch.argmax(logits_per_shape_text, dim=-1) + correct = pred.eq(self.labels).sum() + shape_text_acc = 100 * correct / local_batch_size + + pred = torch.argmax(logits_per_shape_image, dim=-1) + correct = pred.eq(self.labels).sum() + shape_image_acc = 100 * correct / local_batch_size + + preds = shape_logits >= 0 + accuracy = (preds == shape_labels).float() + accuracy = accuracy.mean() + + log = { + "{}/contrast".format(split): contrast_loss.clone().detach(), + "{}/near".format(split): near_bce.detach(), + "{}/far".format(split): vol_bce.detach(), + "{}/kl".format(split): kl_loss.detach(), + "{}/shape_text_acc".format(split): shape_text_acc, + "{}/shape_image_acc".format(split): shape_image_acc, + "{}/total_loss".format(split): loss.clone().detach(), + "{}/accuracy".format(split): accuracy, + } + + if posteriors is not None: + log[f"{split}/mean"] = posteriors.mean.mean().detach() + log[f"{split}/std_mean"] = posteriors.std.mean().detach() + log[f"{split}/std_max"] = posteriors.std.max().detach() + + return loss, log diff --git a/ThirdParty/michelangelo/models/tsal/sal_perceiver.py b/ThirdParty/michelangelo/models/tsal/sal_perceiver.py new file mode 100644 index 0000000000000000000000000000000000000000..4bccb092169a965762759f3c5a5b406f42d83f2c --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/sal_perceiver.py @@ -0,0 +1,423 @@ +# -*- coding: utf-8 -*- + +import torch +import torch.nn as nn +from typing import Optional +from einops import repeat +import math + +from ThirdParty.michelangelo.models.modules import checkpoint +from ThirdParty.michelangelo.models.modules.embedder import FourierEmbedder +from ThirdParty.michelangelo.models.modules.distributions import DiagonalGaussianDistribution +from ThirdParty.michelangelo.models.modules.transformer_blocks import ( + ResidualCrossAttentionBlock, + Transformer +) + +from .tsal_base import ShapeAsLatentModule + + +class CrossAttentionEncoder(nn.Module): + + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + num_latents: int, + fourier_embedder: FourierEmbedder, + point_feats: int, + width: int, + heads: int, + layers: int, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False, + use_ln_post: bool = False, + use_checkpoint: bool = False): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.num_latents = num_latents + + self.query = nn.Parameter(torch.randn((num_latents, width), device=device, dtype=dtype) * 0.02) + + self.fourier_embedder = fourier_embedder + self.input_proj = nn.Linear(self.fourier_embedder.out_dim + point_feats, width, device=device, dtype=dtype) + self.cross_attn = ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + ) + + self.self_attn = Transformer( + device=device, + dtype=dtype, + n_ctx=num_latents, + width=width, + layers=layers, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_checkpoint=False + ) + + if use_ln_post: + self.ln_post = nn.LayerNorm(width, dtype=dtype, device=device) + else: + self.ln_post = None + + def _forward(self, pc, feats): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, C] + + Returns: + + """ + + bs = pc.shape[0] + + data = self.fourier_embedder(pc) + if feats is not None: + data = torch.cat([data, feats], dim=-1) + data = self.input_proj(data) + + query = repeat(self.query, "m c -> b m c", b=bs) + latents = self.cross_attn(query, data) + latents = self.self_attn(latents) + + if self.ln_post is not None: + latents = self.ln_post(latents) + + return latents, pc + + def forward(self, pc: torch.FloatTensor, feats: Optional[torch.FloatTensor] = None): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, C] + + Returns: + dict + """ + + return checkpoint(self._forward, (pc, feats), self.parameters(), self.use_checkpoint) + + +class CrossAttentionDecoder(nn.Module): + + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + num_latents: int, + out_channels: int, + fourier_embedder: FourierEmbedder, + width: int, + heads: int, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False, + use_checkpoint: bool = False): + + super().__init__() + + self.use_checkpoint = use_checkpoint + self.fourier_embedder = fourier_embedder + + self.query_proj = nn.Linear(self.fourier_embedder.out_dim, width, device=device, dtype=dtype) + + self.cross_attn_decoder = ResidualCrossAttentionBlock( + device=device, + dtype=dtype, + n_data=num_latents, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash + ) + + self.ln_post = nn.LayerNorm(width, device=device, dtype=dtype) + self.output_proj = nn.Linear(width, out_channels, device=device, dtype=dtype) + + def _forward(self, queries: torch.FloatTensor, latents: torch.FloatTensor): + queries = self.query_proj(self.fourier_embedder(queries)) + x = self.cross_attn_decoder(queries, latents) + x = self.ln_post(x) + x = self.output_proj(x) + return x + + def forward(self, queries: torch.FloatTensor, latents: torch.FloatTensor): + return checkpoint(self._forward, (queries, latents), self.parameters(), self.use_checkpoint) + + +class ShapeAsLatentPerceiver(ShapeAsLatentModule): + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + num_latents: int, + point_feats: int = 0, + embed_dim: int = 0, + num_freqs: int = 8, + include_pi: bool = True, + width: int, + heads: int, + num_encoder_layers: int, + num_decoder_layers: int, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False, + use_ln_post: bool = False, + use_checkpoint: bool = False): + + super().__init__() + + self.use_checkpoint = use_checkpoint + + self.num_latents = num_latents + self.fourier_embedder = FourierEmbedder(num_freqs=num_freqs, include_pi=include_pi) + + init_scale = init_scale * math.sqrt(1.0 / width) + self.encoder = CrossAttentionEncoder( + device=device, + dtype=dtype, + fourier_embedder=self.fourier_embedder, + num_latents=num_latents, + point_feats=point_feats, + width=width, + heads=heads, + layers=num_encoder_layers, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_ln_post=use_ln_post, + use_checkpoint=use_checkpoint + ) + + self.embed_dim = embed_dim + if embed_dim > 0: + # VAE embed + self.pre_kl = nn.Linear(width, embed_dim * 2, device=device, dtype=dtype) + self.post_kl = nn.Linear(embed_dim, width, device=device, dtype=dtype) + self.latent_shape = (num_latents, embed_dim) + else: + self.latent_shape = (num_latents, width) + + self.transformer = Transformer( + device=device, + dtype=dtype, + n_ctx=num_latents, + width=width, + layers=num_decoder_layers, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_checkpoint=use_checkpoint + ) + + # geometry decoder + self.geo_decoder = CrossAttentionDecoder( + device=device, + dtype=dtype, + fourier_embedder=self.fourier_embedder, + out_channels=1, + num_latents=num_latents, + width=width, + heads=heads, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_checkpoint=use_checkpoint + ) + + def encode(self, + pc: torch.FloatTensor, + feats: Optional[torch.FloatTensor] = None, + sample_posterior: bool = True): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, C] + sample_posterior (bool): + + Returns: + latents (torch.FloatTensor) + center_pos (torch.FloatTensor or None): + posterior (DiagonalGaussianDistribution or None): + """ + + latents, center_pos = self.encoder(pc, feats) + + posterior = None + if self.embed_dim > 0: + moments = self.pre_kl(latents) + posterior = DiagonalGaussianDistribution(moments, feat_dim=-1) + + if sample_posterior: + latents = posterior.sample() + else: + latents = posterior.mode() + + return latents, center_pos, posterior + + def decode(self, latents: torch.FloatTensor): + latents = self.post_kl(latents) + return self.transformer(latents) + + def query_geometry(self, queries: torch.FloatTensor, latents: torch.FloatTensor): + logits = self.geo_decoder(queries, latents).squeeze(-1) + return logits + + def forward(self, + pc: torch.FloatTensor, + feats: torch.FloatTensor, + volume_queries: torch.FloatTensor, + sample_posterior: bool = True): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, C] + volume_queries (torch.FloatTensor): [B, P, 3] + sample_posterior (bool): + + Returns: + logits (torch.FloatTensor): [B, P] + center_pos (torch.FloatTensor): [B, M, 3] + posterior (DiagonalGaussianDistribution or None). + + """ + + latents, center_pos, posterior = self.encode(pc, feats, sample_posterior=sample_posterior) + + latents = self.decode(latents) + logits = self.query_geometry(volume_queries, latents) + + return logits, center_pos, posterior + + +class AlignedShapeLatentPerceiver(ShapeAsLatentPerceiver): + + def __init__(self, *, + device: Optional[torch.device], + dtype: Optional[torch.dtype], + num_latents: int, + point_feats: int = 0, + embed_dim: int = 0, + num_freqs: int = 8, + include_pi: bool = True, + width: int, + heads: int, + num_encoder_layers: int, + num_decoder_layers: int, + init_scale: float = 0.25, + qkv_bias: bool = True, + flash: bool = False, + use_ln_post: bool = False, + use_checkpoint: bool = False): + + super().__init__( + device=device, + dtype=dtype, + num_latents=1 + num_latents, + point_feats=point_feats, + embed_dim=embed_dim, + num_freqs=num_freqs, + include_pi=include_pi, + width=width, + heads=heads, + num_encoder_layers=num_encoder_layers, + num_decoder_layers=num_decoder_layers, + init_scale=init_scale, + qkv_bias=qkv_bias, + flash=flash, + use_ln_post=use_ln_post, + use_checkpoint=use_checkpoint + ) + + self.width = width + + def encode(self, + pc: torch.FloatTensor, + feats: Optional[torch.FloatTensor] = None, + sample_posterior: bool = True): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, c] + sample_posterior (bool): + + Returns: + shape_embed (torch.FloatTensor) + kl_embed (torch.FloatTensor): + posterior (DiagonalGaussianDistribution or None): + """ + + shape_embed, latents = self.encode_latents(pc, feats) + kl_embed, posterior = self.encode_kl_embed(latents, sample_posterior) + + return shape_embed, kl_embed, posterior + + def encode_latents(self, + pc: torch.FloatTensor, + feats: Optional[torch.FloatTensor] = None): + + x, _ = self.encoder(pc, feats) + + shape_embed = x[:, 0] + latents = x[:, 1:] + + return shape_embed, latents + + def encode_kl_embed(self, latents: torch.FloatTensor, sample_posterior: bool = True): + posterior = None + if self.embed_dim > 0: + moments = self.pre_kl(latents) + posterior = DiagonalGaussianDistribution(moments, feat_dim=-1) + + if sample_posterior: + kl_embed = posterior.sample() + else: + kl_embed = posterior.mode() + else: + kl_embed = latents + + return kl_embed, posterior + + def forward(self, + pc: torch.FloatTensor, + feats: torch.FloatTensor, + volume_queries: torch.FloatTensor, + sample_posterior: bool = True): + """ + + Args: + pc (torch.FloatTensor): [B, N, 3] + feats (torch.FloatTensor or None): [B, N, C] + volume_queries (torch.FloatTensor): [B, P, 3] + sample_posterior (bool): + + Returns: + shape_embed (torch.FloatTensor): [B, projection_dim] + logits (torch.FloatTensor): [B, M] + posterior (DiagonalGaussianDistribution or None). + + """ + + shape_embed, kl_embed, posterior = self.encode(pc, feats, sample_posterior=sample_posterior) + + latents = self.decode(kl_embed) + logits = self.query_geometry(volume_queries, latents) + + return shape_embed, logits, posterior diff --git a/ThirdParty/michelangelo/models/tsal/sal_pl_module.py b/ThirdParty/michelangelo/models/tsal/sal_pl_module.py new file mode 100644 index 0000000000000000000000000000000000000000..fe5a73b5f392aedd13af4103a4543af685609e18 --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/sal_pl_module.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- + +from typing import List, Tuple, Dict, Optional +from omegaconf import DictConfig + +import torch +from torch.optim import lr_scheduler +import pytorch_lightning as pl +from typing import Union +from functools import partial + +from ThirdParty.michelangelo.utils import instantiate_from_config + +from .inference_utils import extract_geometry +from .tsal_base import ( + ShapeAsLatentModule, + Latent2MeshOutput, + Point2MeshOutput +) + + +class ShapeAsLatentPLModule(pl.LightningModule): + + def __init__(self, *, + module_cfg, + loss_cfg, + optimizer_cfg: Optional[DictConfig] = None, + ckpt_path: Optional[str] = None, + ignore_keys: Union[Tuple[str], List[str]] = ()): + + super().__init__() + + self.sal: ShapeAsLatentModule = instantiate_from_config(module_cfg, device=None, dtype=None) + + self.loss = instantiate_from_config(loss_cfg) + + self.optimizer_cfg = optimizer_cfg + + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + + self.save_hyperparameters() + + @property + def latent_shape(self): + return self.sal.latent_shape + + @property + def zero_rank(self): + if self._trainer: + zero_rank = self.trainer.local_rank == 0 + else: + zero_rank = True + + return zero_rank + + def init_from_ckpt(self, path, ignore_keys=()): + state_dict = torch.load(path, map_location="cpu")["state_dict"] + + keys = list(state_dict.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del state_dict[k] + + missing, unexpected = self.load_state_dict(state_dict, strict=False) + print(f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys") + if len(missing) > 0: + print(f"Missing Keys: {missing}") + print(f"Unexpected Keys: {unexpected}") + + def configure_optimizers(self) -> Tuple[List, List]: + lr = self.learning_rate + + # optimizers = [torch.optim.AdamW(self.sal.parameters(), lr=lr, betas=(0.9, 0.99), weight_decay=1e-4)] + # optimizers = [torch.optim.AdamW(self.sal.parameters(), lr=lr, betas=(0.9, 0.99), weight_decay=1e-3)] + + if self.optimizer_cfg is None: + optimizers = [torch.optim.AdamW(self.sal.parameters(), lr=lr, betas=(0.9, 0.99), weight_decay=1e-3)] + schedulers = [] + else: + optimizer = instantiate_from_config(self.optimizer_cfg.optimizer, params=self.sal.parameters()) + scheduler_func = instantiate_from_config( + self.optimizer_cfg.scheduler, + max_decay_steps=self.trainer.max_steps, + lr_max=lr + ) + scheduler = { + "scheduler": lr_scheduler.LambdaLR(optimizer, lr_lambda=scheduler_func.schedule), + "interval": "step", + "frequency": 1 + } + optimizers = [optimizer] + schedulers = [scheduler] + + return optimizers, schedulers + + def forward(self, + pc: torch.FloatTensor, + feats: torch.FloatTensor, + volume_queries: torch.FloatTensor): + + logits, center_pos, posterior = self.sal(pc, feats, volume_queries) + + return posterior, logits + + def encode(self, surface: torch.FloatTensor, sample_posterior=True): + + pc = surface[..., 0:3] + feats = surface[..., 3:6] + + latents, center_pos, posterior = self.sal.encode( + pc=pc, feats=feats, sample_posterior=sample_posterior + ) + + return latents + + def decode(self, + z_q, + bounds: Union[Tuple[float], List[float], float] = 1.1, + octree_depth: int = 7, + num_chunks: int = 10000) -> List[Latent2MeshOutput]: + + latents = self.sal.decode(z_q) # latents: [bs, num_latents, dim] + outputs = self.latent2mesh(latents, bounds=bounds, octree_depth=octree_depth, num_chunks=num_chunks) + + return outputs + + def training_step(self, batch: Dict[str, torch.FloatTensor], + batch_idx: int, optimizer_idx: int = 0) -> torch.FloatTensor: + """ + + Args: + batch (dict): the batch sample, and it contains: + - surface (torch.FloatTensor): [bs, n_surface, (3 + input_dim)] + - geo_points (torch.FloatTensor): [bs, n_pts, (3 + 1)] + + batch_idx (int): + + optimizer_idx (int): + + Returns: + loss (torch.FloatTensor): + + """ + + pc = batch["surface"][..., 0:3] + feats = batch["surface"][..., 3:] + + volume_queries = batch["geo_points"][..., 0:3] + volume_labels = batch["geo_points"][..., -1] + + posterior, logits = self( + pc=pc, feats=feats, volume_queries=volume_queries + ) + aeloss, log_dict_ae = self.loss(posterior, logits, volume_labels, split="train") + + self.log_dict(log_dict_ae, prog_bar=True, logger=True, batch_size=logits.shape[0], + sync_dist=False, rank_zero_only=True) + + return aeloss + + def validation_step(self, batch: Dict[str, torch.FloatTensor], batch_idx: int) -> torch.FloatTensor: + + pc = batch["surface"][..., 0:3] + feats = batch["surface"][..., 3:] + + volume_queries = batch["geo_points"][..., 0:3] + volume_labels = batch["geo_points"][..., -1] + + posterior, logits = self( + pc=pc, feats=feats, volume_queries=volume_queries, + ) + aeloss, log_dict_ae = self.loss(posterior, logits, volume_labels, split="val") + + self.log_dict(log_dict_ae, prog_bar=True, logger=True, batch_size=logits.shape[0], + sync_dist=False, rank_zero_only=True) + + return aeloss + + def point2mesh(self, + pc: torch.FloatTensor, + feats: torch.FloatTensor, + bounds: Union[Tuple[float], List[float]] = (-1.25, -1.25, -1.25, 1.25, 1.25, 1.25), + octree_depth: int = 7, + num_chunks: int = 10000) -> List[Point2MeshOutput]: + + """ + + Args: + pc: + feats: + bounds: + octree_depth: + num_chunks: + + Returns: + mesh_outputs (List[MeshOutput]): the mesh outputs list. + + """ + + outputs = [] + + device = pc.device + bs = pc.shape[0] + + # 1. point encoder + latents transformer + latents, center_pos, posterior = self.sal.encode(pc, feats) + latents = self.sal.decode(latents) # latents: [bs, num_latents, dim] + + geometric_func = partial(self.sal.query_geometry, latents=latents) + + # 2. decode geometry + mesh_v_f, has_surface = extract_geometry( + geometric_func=geometric_func, + device=device, + batch_size=bs, + bounds=bounds, + octree_depth=octree_depth, + num_chunks=num_chunks, + disable=not self.zero_rank + ) + + # 3. decode texture + for i, ((mesh_v, mesh_f), is_surface) in enumerate(zip(mesh_v_f, has_surface)): + if not is_surface: + outputs.append(None) + continue + + out = Point2MeshOutput() + out.mesh_v = mesh_v + out.mesh_f = mesh_f + out.pc = torch.cat([pc[i], feats[i]], dim=-1).cpu().numpy() + + if center_pos is not None: + out.center = center_pos[i].cpu().numpy() + + outputs.append(out) + + return outputs + + def latent2mesh(self, + latents: torch.FloatTensor, + bounds: Union[Tuple[float], List[float], float] = 1.1, + octree_depth: int = 7, + num_chunks: int = 10000) -> List[Latent2MeshOutput]: + + """ + + Args: + latents: [bs, num_latents, dim] + bounds: + octree_depth: + num_chunks: + + Returns: + mesh_outputs (List[MeshOutput]): the mesh outputs list. + + """ + + outputs = [] + + geometric_func = partial(self.sal.query_geometry, latents=latents) + + # 2. decode geometry + device = latents.device + mesh_v_f, has_surface = extract_geometry( + geometric_func=geometric_func, + device=device, + batch_size=len(latents), + bounds=bounds, + octree_depth=octree_depth, + num_chunks=num_chunks, + disable=not self.zero_rank + ) + + # 3. decode texture + for i, ((mesh_v, mesh_f), is_surface) in enumerate(zip(mesh_v_f, has_surface)): + if not is_surface: + outputs.append(None) + continue + + out = Latent2MeshOutput() + out.mesh_v = mesh_v + out.mesh_f = mesh_f + + outputs.append(out) + + return outputs diff --git a/ThirdParty/michelangelo/models/tsal/tsal_base.py b/ThirdParty/michelangelo/models/tsal/tsal_base.py new file mode 100644 index 0000000000000000000000000000000000000000..233a8afbdd0eb24024a6f915e770a286361cf0fe --- /dev/null +++ b/ThirdParty/michelangelo/models/tsal/tsal_base.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- + +import torch.nn as nn +from typing import Tuple, List, Optional +import pytorch_lightning as pl + + +class Point2MeshOutput(object): + def __init__(self): + self.mesh_v = None + self.mesh_f = None + self.center = None + self.pc = None + + +class Latent2MeshOutput(object): + + def __init__(self): + self.mesh_v = None + self.mesh_f = None + + +class AlignedMeshOutput(object): + + def __init__(self): + self.mesh_v = None + self.mesh_f = None + self.surface = None + self.image = None + self.text: Optional[str] = None + self.shape_text_similarity: Optional[float] = None + self.shape_image_similarity: Optional[float] = None + + +class ShapeAsLatentPLModule(pl.LightningModule): + latent_shape: Tuple[int] + + def encode(self, surface, *args, **kwargs): + raise NotImplementedError + + def decode(self, z_q, *args, **kwargs): + raise NotImplementedError + + def latent2mesh(self, latents, *args, **kwargs) -> List[Latent2MeshOutput]: + raise NotImplementedError + + def point2mesh(self, *args, **kwargs) -> List[Point2MeshOutput]: + raise NotImplementedError + + +class ShapeAsLatentModule(nn.Module): + latent_shape: Tuple[int, int] + + def __init__(self, *args, **kwargs): + super().__init__() + + def encode(self, *args, **kwargs): + raise NotImplementedError + + def decode(self, *args, **kwargs): + raise NotImplementedError + + def query_geometry(self, *args, **kwargs): + raise NotImplementedError + + +class AlignedShapeAsLatentPLModule(pl.LightningModule): + latent_shape: Tuple[int] + + def set_shape_model_only(self): + raise NotImplementedError + + def encode(self, surface, *args, **kwargs): + raise NotImplementedError + + def decode(self, z_q, *args, **kwargs): + raise NotImplementedError + + def latent2mesh(self, latents, *args, **kwargs) -> List[Latent2MeshOutput]: + raise NotImplementedError + + def point2mesh(self, *args, **kwargs) -> List[Point2MeshOutput]: + raise NotImplementedError + + +class AlignedShapeAsLatentModule(nn.Module): + shape_model: ShapeAsLatentModule + latent_shape: Tuple[int, int] + + def __init__(self, *args, **kwargs): + super().__init__() + + def set_shape_model_only(self): + raise NotImplementedError + + def encode_image_embed(self, *args, **kwargs): + raise NotImplementedError + + def encode_text_embed(self, *args, **kwargs): + raise NotImplementedError + + def encode_shape_embed(self, *args, **kwargs): + raise NotImplementedError + + +class TexturedShapeAsLatentModule(nn.Module): + + def __init__(self, *args, **kwargs): + super().__init__() + + def encode(self, *args, **kwargs): + raise NotImplementedError + + def decode(self, *args, **kwargs): + raise NotImplementedError + + def query_geometry(self, *args, **kwargs): + raise NotImplementedError + + def query_color(self, *args, **kwargs): + raise NotImplementedError diff --git a/ThirdParty/michelangelo/utils/__init__.py b/ThirdParty/michelangelo/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..76d2dd39781034eaa33293a2243ebee3b3c982c6 --- /dev/null +++ b/ThirdParty/michelangelo/utils/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- + +from .misc import get_config_from_file +from .misc import instantiate_from_config diff --git a/ThirdParty/michelangelo/utils/__pycache__/__init__.cpython-310.pyc b/ThirdParty/michelangelo/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f0edcc929d68ef48fe9ad37cf50a11b1b9b3023 Binary files /dev/null and b/ThirdParty/michelangelo/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/utils/__pycache__/misc.cpython-310.pyc b/ThirdParty/michelangelo/utils/__pycache__/misc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e0ef9d98f7ed78d44837099da12050772f8b26f Binary files /dev/null and b/ThirdParty/michelangelo/utils/__pycache__/misc.cpython-310.pyc differ diff --git a/ThirdParty/michelangelo/utils/eval.py b/ThirdParty/michelangelo/utils/eval.py new file mode 100644 index 0000000000000000000000000000000000000000..954b9ae2643c8adb6c9af6141ede2b38a329db22 --- /dev/null +++ b/ThirdParty/michelangelo/utils/eval.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- + +import torch + + +def compute_psnr(x, y, data_range: float = 2, eps: float = 1e-7): + + mse = torch.mean((x - y) ** 2) + psnr = 10 * torch.log10(data_range / (mse + eps)) + + return psnr + diff --git a/ThirdParty/michelangelo/utils/io.py b/ThirdParty/michelangelo/utils/io.py new file mode 100644 index 0000000000000000000000000000000000000000..e651e5a8750ab485b5fbd59a70b38e339b6ed79b --- /dev/null +++ b/ThirdParty/michelangelo/utils/io.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- + +import os +import io +import tarfile +import json +import numpy as np +import numpy.lib.format + + +def mkdir(path): + os.makedirs(path, exist_ok=True) + return path + + +def npy_loads(data): + stream = io.BytesIO(data) + return np.lib.format.read_array(stream) + + +def npz_loads(data): + return np.load(io.BytesIO(data)) + + +def json_loads(data): + return json.loads(data) + + +def load_json(filepath): + with open(filepath, "r") as f: + data = json.load(f) + return data + + +def write_json(filepath, data): + with open(filepath, "w") as f: + json.dump(data, f, indent=2) + + +def extract_tar(tar_path, tar_cache_folder): + + with tarfile.open(tar_path, "r") as tar: + tar.extractall(path=tar_cache_folder) + + tar_uids = sorted(os.listdir(tar_cache_folder)) + print(f"extract tar: {tar_path} to {tar_cache_folder}") + return tar_uids diff --git a/ThirdParty/michelangelo/utils/misc.py b/ThirdParty/michelangelo/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..bbef357bc7c63d3c7f33d048aec68dda2b0e3992 --- /dev/null +++ b/ThirdParty/michelangelo/utils/misc.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- + +import importlib +from omegaconf import OmegaConf, DictConfig, ListConfig + +import torch +import torch.distributed as dist +from typing import Union + + +def get_config_from_file(config_file: str) -> Union[DictConfig, ListConfig]: + config_file = OmegaConf.load(config_file) + + if 'base_config' in config_file.keys(): + if config_file['base_config'] == "default_base": + base_config = OmegaConf.create() + # base_config = get_default_config() + elif config_file['base_config'].endswith(".yaml"): + base_config = get_config_from_file(config_file['base_config']) + else: + raise ValueError(f"{config_file} must be `.yaml` file or it contains `base_config` key.") + + config_file = {key: value for key, value in config_file if key != "base_config"} + + return OmegaConf.merge(base_config, config_file) + + return config_file + + +def get_obj_from_str(string, reload=False): + module, cls = string.rsplit(".", 1) + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) + + +def get_obj_from_config(config): + if "target" not in config: + raise KeyError("Expected key `target` to instantiate.") + + return get_obj_from_str(config["target"]) + + +def instantiate_from_config(config, **kwargs): + if "target" not in config: + raise KeyError("Expected key `target` to instantiate.") + + cls = get_obj_from_str(config["target"]) + + params = config.get("params", dict()) + # params.update(kwargs) + # instance = cls(**params) + kwargs.update(params) + instance = cls(**kwargs) + + return instance + + +def is_dist_avail_and_initialized(): + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + + +def get_world_size(): + if not is_dist_avail_and_initialized(): + return 1 + return dist.get_world_size() + + +def all_gather_batch(tensors): + """ + Performs all_gather operation on the provided tensors. + """ + # Queue the gathered tensors + world_size = get_world_size() + # There is no need for reduction in the single-proc case + if world_size == 1: + return tensors + tensor_list = [] + output_tensor = [] + for tensor in tensors: + tensor_all = [torch.ones_like(tensor) for _ in range(world_size)] + dist.all_gather( + tensor_all, + tensor, + async_op=False # performance opt + ) + + tensor_list.append(tensor_all) + + for tensor_all in tensor_list: + output_tensor.append(torch.cat(tensor_all, dim=0)) + return output_tensor diff --git a/ThirdParty/michelangelo/utils/visualizers/__init__.py b/ThirdParty/michelangelo/utils/visualizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..40a96afc6ff09d58a702b76e3f7dd412fe975e26 --- /dev/null +++ b/ThirdParty/michelangelo/utils/visualizers/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/ThirdParty/michelangelo/utils/visualizers/color_util.py b/ThirdParty/michelangelo/utils/visualizers/color_util.py new file mode 100644 index 0000000000000000000000000000000000000000..7983243fd37f5fee47bc51475dc58c460a067830 --- /dev/null +++ b/ThirdParty/michelangelo/utils/visualizers/color_util.py @@ -0,0 +1,43 @@ +import numpy as np +import matplotlib.pyplot as plt + + +# Helper functions +def get_colors(inp, colormap="viridis", normalize=True, vmin=None, vmax=None): + colormap = plt.cm.get_cmap(colormap) + if normalize: + vmin = np.min(inp) + vmax = np.max(inp) + + norm = plt.Normalize(vmin, vmax) + return colormap(norm(inp))[:, :3] + + +def gen_checkers(n_checkers_x, n_checkers_y, width=256, height=256): + # tex dims need to be power of two. + array = np.ones((width, height, 3), dtype='float32') + + # width in texels of each checker + checker_w = width / n_checkers_x + checker_h = height / n_checkers_y + + for y in range(height): + for x in range(width): + color_key = int(x / checker_w) + int(y / checker_h) + if color_key % 2 == 0: + array[x, y, :] = [1., 0.874, 0.0] + else: + array[x, y, :] = [0., 0., 0.] + return array + + +def gen_circle(width=256, height=256): + xx, yy = np.mgrid[:width, :height] + circle = (xx - width / 2 + 0.5) ** 2 + (yy - height / 2 + 0.5) ** 2 + array = np.ones((width, height, 4), dtype='float32') + array[:, :, 0] = (circle <= width) + array[:, :, 1] = (circle <= width) + array[:, :, 2] = (circle <= width) + array[:, :, 3] = circle <= width + return array + diff --git a/ThirdParty/michelangelo/utils/visualizers/html_util.py b/ThirdParty/michelangelo/utils/visualizers/html_util.py new file mode 100644 index 0000000000000000000000000000000000000000..f90fe6cfefe6108655b48c36d60db537589993d5 --- /dev/null +++ b/ThirdParty/michelangelo/utils/visualizers/html_util.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +import io +import base64 +import numpy as np +from PIL import Image + + +def to_html_frame(content): + + html_frame = f""" + + + {content} + + + """ + + return html_frame + + +def to_single_row_table(caption: str, content: str): + + table_html = f""" + + + + + +
{caption}
{content}
+ """ + + return table_html + + +def to_image_embed_tag(image: np.ndarray): + + # Convert np.ndarray to bytes + img = Image.fromarray(image) + raw_bytes = io.BytesIO() + img.save(raw_bytes, "PNG") + + # Encode bytes to base64 + image_base64 = base64.b64encode(raw_bytes.getvalue()).decode("utf-8") + + image_tag = f""" + Embedded Image + """ + + return image_tag diff --git a/ThirdParty/michelangelo/utils/visualizers/pythreejs_viewer.py b/ThirdParty/michelangelo/utils/visualizers/pythreejs_viewer.py new file mode 100644 index 0000000000000000000000000000000000000000..b3ce0f88f26fcd5e007fde2cec4816901a74ad33 --- /dev/null +++ b/ThirdParty/michelangelo/utils/visualizers/pythreejs_viewer.py @@ -0,0 +1,534 @@ +import numpy as np +from ipywidgets import embed +import pythreejs as p3s +import uuid + +from .color_util import get_colors, gen_circle, gen_checkers + + +EMBED_URL = "https://cdn.jsdelivr.net/npm/@jupyter-widgets/html-manager@1.0.1/dist/embed-amd.js" + + +class PyThreeJSViewer(object): + + def __init__(self, settings, render_mode="WEBSITE"): + self.render_mode = render_mode + self.__update_settings(settings) + self._light = p3s.DirectionalLight(color='white', position=[0, 0, 1], intensity=0.6) + self._light2 = p3s.AmbientLight(intensity=0.5) + self._cam = p3s.PerspectiveCamera(position=[0, 0, 1], lookAt=[0, 0, 0], fov=self.__s["fov"], + aspect=self.__s["width"] / self.__s["height"], children=[self._light]) + self._orbit = p3s.OrbitControls(controlling=self._cam) + self._scene = p3s.Scene(children=[self._cam, self._light2], background=self.__s["background"]) # "#4c4c80" + self._renderer = p3s.Renderer(camera=self._cam, scene=self._scene, controls=[self._orbit], + width=self.__s["width"], height=self.__s["height"], + antialias=self.__s["antialias"]) + + self.__objects = {} + self.__cnt = 0 + + def jupyter_mode(self): + self.render_mode = "JUPYTER" + + def offline(self): + self.render_mode = "OFFLINE" + + def website(self): + self.render_mode = "WEBSITE" + + def __get_shading(self, shading): + shad = {"flat": True, "wireframe": False, "wire_width": 0.03, "wire_color": "black", + "side": 'DoubleSide', "colormap": "viridis", "normalize": [None, None], + "bbox": False, "roughness": 0.5, "metalness": 0.25, "reflectivity": 1.0, + "line_width": 1.0, "line_color": "black", + "point_color": "red", "point_size": 0.01, "point_shape": "circle", + "text_color": "red" + } + for k in shading: + shad[k] = shading[k] + return shad + + def __update_settings(self, settings={}): + sett = {"width": 600, "height": 600, "antialias": True, "scale": 1.5, "background": "#ffffff", + "fov": 30} + for k in settings: + sett[k] = settings[k] + self.__s = sett + + def __add_object(self, obj, parent=None): + if not parent: # Object is added to global scene and objects dict + self.__objects[self.__cnt] = obj + self.__cnt += 1 + self._scene.add(obj["mesh"]) + else: # Object is added to parent object and NOT to objects dict + parent.add(obj["mesh"]) + + self.__update_view() + + if self.render_mode == "JUPYTER": + return self.__cnt - 1 + elif self.render_mode == "WEBSITE": + return self + + def __add_line_geometry(self, lines, shading, obj=None): + lines = lines.astype("float32", copy=False) + mi = np.min(lines, axis=0) + ma = np.max(lines, axis=0) + + geometry = p3s.LineSegmentsGeometry(positions=lines.reshape((-1, 2, 3))) + material = p3s.LineMaterial(linewidth=shading["line_width"], color=shading["line_color"]) + # , vertexColors='VertexColors'), + lines = p3s.LineSegments2(geometry=geometry, material=material) # type='LinePieces') + line_obj = {"geometry": geometry, "mesh": lines, "material": material, + "max": ma, "min": mi, "type": "Lines", "wireframe": None} + + if obj: + return self.__add_object(line_obj, obj), line_obj + else: + return self.__add_object(line_obj) + + def __update_view(self): + if len(self.__objects) == 0: + return + ma = np.zeros((len(self.__objects), 3)) + mi = np.zeros((len(self.__objects), 3)) + for r, obj in enumerate(self.__objects): + ma[r] = self.__objects[obj]["max"] + mi[r] = self.__objects[obj]["min"] + ma = np.max(ma, axis=0) + mi = np.min(mi, axis=0) + diag = np.linalg.norm(ma - mi) + mean = ((ma - mi) / 2 + mi).tolist() + scale = self.__s["scale"] * (diag) + self._orbit.target = mean + self._cam.lookAt(mean) + self._cam.position = [mean[0], mean[1], mean[2] + scale] + self._light.position = [mean[0], mean[1], mean[2] + scale] + + self._orbit.exec_three_obj_method('update') + self._cam.exec_three_obj_method('updateProjectionMatrix') + + def __get_bbox(self, v): + m = np.min(v, axis=0) + M = np.max(v, axis=0) + + # Corners of the bounding box + v_box = np.array([[m[0], m[1], m[2]], [M[0], m[1], m[2]], [M[0], M[1], m[2]], [m[0], M[1], m[2]], + [m[0], m[1], M[2]], [M[0], m[1], M[2]], [M[0], M[1], M[2]], [m[0], M[1], M[2]]]) + + f_box = np.array([[0, 1], [1, 2], [2, 3], [3, 0], [4, 5], [5, 6], [6, 7], [7, 4], + [0, 4], [1, 5], [2, 6], [7, 3]], dtype=np.uint32) + return v_box, f_box + + def __get_colors(self, v, f, c, sh): + coloring = "VertexColors" + if type(c) == np.ndarray and c.size == 3: # Single color + colors = np.ones_like(v) + colors[:, 0] = c[0] + colors[:, 1] = c[1] + colors[:, 2] = c[2] + # print("Single colors") + elif type(c) == np.ndarray and len(c.shape) == 2 and c.shape[1] == 3: # Color values for + if c.shape[0] == f.shape[0]: # faces + colors = np.hstack([c, c, c]).reshape((-1, 3)) + coloring = "FaceColors" + # print("Face color values") + elif c.shape[0] == v.shape[0]: # vertices + colors = c + # print("Vertex color values") + else: # Wrong size, fallback + print("Invalid color array given! Supported are numpy arrays.", type(c)) + colors = np.ones_like(v) + colors[:, 0] = 1.0 + colors[:, 1] = 0.874 + colors[:, 2] = 0.0 + elif type(c) == np.ndarray and c.size == f.shape[0]: # Function values for faces + normalize = sh["normalize"][0] != None and sh["normalize"][1] != None + cc = get_colors(c, sh["colormap"], normalize=normalize, + vmin=sh["normalize"][0], vmax=sh["normalize"][1]) + # print(cc.shape) + colors = np.hstack([cc, cc, cc]).reshape((-1, 3)) + coloring = "FaceColors" + # print("Face function values") + elif type(c) == np.ndarray and c.size == v.shape[0]: # Function values for vertices + normalize = sh["normalize"][0] != None and sh["normalize"][1] != None + colors = get_colors(c, sh["colormap"], normalize=normalize, + vmin=sh["normalize"][0], vmax=sh["normalize"][1]) + # print("Vertex function values") + + else: + colors = np.ones_like(v) + colors[:, 0] = 1.0 + colors[:, 1] = 0.874 + colors[:, 2] = 0.0 + + # No color + if c is not None: + print("Invalid color array given! Supported are numpy arrays.", type(c)) + + return colors, coloring + + def __get_point_colors(self, v, c, sh): + v_color = True + if c is None: # No color given, use global color + # conv = mpl.colors.ColorConverter() + colors = sh["point_color"] # np.array(conv.to_rgb(sh["point_color"])) + v_color = False + elif isinstance(c, str): # No color given, use global color + # conv = mpl.colors.ColorConverter() + colors = c # np.array(conv.to_rgb(c)) + v_color = False + elif type(c) == np.ndarray and len(c.shape) == 2 and c.shape[0] == v.shape[0] and c.shape[1] == 3: + # Point color + colors = c.astype("float32", copy=False) + + elif isinstance(c, np.ndarray) and len(c.shape) == 2 and c.shape[0] == v.shape[0] and c.shape[1] != 3: + # Function values for vertices, but the colors are features + c_norm = np.linalg.norm(c, ord=2, axis=-1) + normalize = sh["normalize"][0] != None and sh["normalize"][1] != None + colors = get_colors(c_norm, sh["colormap"], normalize=normalize, + vmin=sh["normalize"][0], vmax=sh["normalize"][1]) + colors = colors.astype("float32", copy=False) + + elif type(c) == np.ndarray and c.size == v.shape[0]: # Function color + normalize = sh["normalize"][0] != None and sh["normalize"][1] != None + colors = get_colors(c, sh["colormap"], normalize=normalize, + vmin=sh["normalize"][0], vmax=sh["normalize"][1]) + colors = colors.astype("float32", copy=False) + # print("Vertex function values") + + else: + print("Invalid color array given! Supported are numpy arrays.", type(c)) + colors = sh["point_color"] + v_color = False + + return colors, v_color + + def add_mesh(self, v, f, c=None, uv=None, n=None, shading={}, texture_data=None, **kwargs): + shading.update(kwargs) + sh = self.__get_shading(shading) + mesh_obj = {} + + # it is a tet + if v.shape[1] == 3 and f.shape[1] == 4: + f_tmp = np.ndarray([f.shape[0] * 4, 3], dtype=f.dtype) + for i in range(f.shape[0]): + f_tmp[i * 4 + 0] = np.array([f[i][1], f[i][0], f[i][2]]) + f_tmp[i * 4 + 1] = np.array([f[i][0], f[i][1], f[i][3]]) + f_tmp[i * 4 + 2] = np.array([f[i][1], f[i][2], f[i][3]]) + f_tmp[i * 4 + 3] = np.array([f[i][2], f[i][0], f[i][3]]) + f = f_tmp + + if v.shape[1] == 2: + v = np.append(v, np.zeros([v.shape[0], 1]), 1) + + # Type adjustment vertices + v = v.astype("float32", copy=False) + + # Color setup + colors, coloring = self.__get_colors(v, f, c, sh) + + # Type adjustment faces and colors + c = colors.astype("float32", copy=False) + + # Material and geometry setup + ba_dict = {"color": p3s.BufferAttribute(c)} + if coloring == "FaceColors": + verts = np.zeros((f.shape[0] * 3, 3), dtype="float32") + for ii in range(f.shape[0]): + # print(ii*3, f[ii]) + verts[ii * 3] = v[f[ii, 0]] + verts[ii * 3 + 1] = v[f[ii, 1]] + verts[ii * 3 + 2] = v[f[ii, 2]] + v = verts + else: + f = f.astype("uint32", copy=False).ravel() + ba_dict["index"] = p3s.BufferAttribute(f, normalized=False) + + ba_dict["position"] = p3s.BufferAttribute(v, normalized=False) + + if uv is not None: + uv = (uv - np.min(uv)) / (np.max(uv) - np.min(uv)) + if texture_data is None: + texture_data = gen_checkers(20, 20) + tex = p3s.DataTexture(data=texture_data, format="RGBFormat", type="FloatType") + material = p3s.MeshStandardMaterial(map=tex, reflectivity=sh["reflectivity"], side=sh["side"], + roughness=sh["roughness"], metalness=sh["metalness"], + flatShading=sh["flat"], + polygonOffset=True, polygonOffsetFactor=1, polygonOffsetUnits=5) + ba_dict["uv"] = p3s.BufferAttribute(uv.astype("float32", copy=False)) + else: + material = p3s.MeshStandardMaterial(vertexColors=coloring, reflectivity=sh["reflectivity"], + side=sh["side"], roughness=sh["roughness"], metalness=sh["metalness"], + flatShading=sh["flat"], + polygonOffset=True, polygonOffsetFactor=1, polygonOffsetUnits=5) + + if type(n) != type(None) and coloring == "VertexColors": # TODO: properly handle normals for FaceColors as well + ba_dict["normal"] = p3s.BufferAttribute(n.astype("float32", copy=False), normalized=True) + + geometry = p3s.BufferGeometry(attributes=ba_dict) + + if coloring == "VertexColors" and type(n) == type(None): + geometry.exec_three_obj_method('computeVertexNormals') + elif coloring == "FaceColors" and type(n) == type(None): + geometry.exec_three_obj_method('computeFaceNormals') + + # Mesh setup + mesh = p3s.Mesh(geometry=geometry, material=material) + + # Wireframe setup + mesh_obj["wireframe"] = None + if sh["wireframe"]: + wf_geometry = p3s.WireframeGeometry(mesh.geometry) # WireframeGeometry + wf_material = p3s.LineBasicMaterial(color=sh["wire_color"], linewidth=sh["wire_width"]) + wireframe = p3s.LineSegments(wf_geometry, wf_material) + mesh.add(wireframe) + mesh_obj["wireframe"] = wireframe + + # Bounding box setup + if sh["bbox"]: + v_box, f_box = self.__get_bbox(v) + _, bbox = self.add_edges(v_box, f_box, sh, mesh) + mesh_obj["bbox"] = [bbox, v_box, f_box] + + # Object setup + mesh_obj["max"] = np.max(v, axis=0) + mesh_obj["min"] = np.min(v, axis=0) + mesh_obj["geometry"] = geometry + mesh_obj["mesh"] = mesh + mesh_obj["material"] = material + mesh_obj["type"] = "Mesh" + mesh_obj["shading"] = sh + mesh_obj["coloring"] = coloring + mesh_obj["arrays"] = [v, f, c] # TODO replays with proper storage or remove if not needed + + return self.__add_object(mesh_obj) + + def add_lines(self, beginning, ending, shading={}, obj=None, **kwargs): + shading.update(kwargs) + if len(beginning.shape) == 1: + if len(beginning) == 2: + beginning = np.array([[beginning[0], beginning[1], 0]]) + else: + if beginning.shape[1] == 2: + beginning = np.append( + beginning, np.zeros([beginning.shape[0], 1]), 1) + if len(ending.shape) == 1: + if len(ending) == 2: + ending = np.array([[ending[0], ending[1], 0]]) + else: + if ending.shape[1] == 2: + ending = np.append( + ending, np.zeros([ending.shape[0], 1]), 1) + + sh = self.__get_shading(shading) + lines = np.hstack([beginning, ending]) + lines = lines.reshape((-1, 3)) + return self.__add_line_geometry(lines, sh, obj) + + def add_edges(self, vertices, edges, shading={}, obj=None, **kwargs): + shading.update(kwargs) + if vertices.shape[1] == 2: + vertices = np.append( + vertices, np.zeros([vertices.shape[0], 1]), 1) + sh = self.__get_shading(shading) + lines = np.zeros((edges.size, 3)) + cnt = 0 + for e in edges: + lines[cnt, :] = vertices[e[0]] + lines[cnt + 1, :] = vertices[e[1]] + cnt += 2 + return self.__add_line_geometry(lines, sh, obj) + + def add_points(self, points, c=None, shading={}, obj=None, **kwargs): + shading.update(kwargs) + if len(points.shape) == 1: + if len(points) == 2: + points = np.array([[points[0], points[1], 0]]) + else: + if points.shape[1] == 2: + points = np.append( + points, np.zeros([points.shape[0], 1]), 1) + sh = self.__get_shading(shading) + points = points.astype("float32", copy=False) + mi = np.min(points, axis=0) + ma = np.max(points, axis=0) + + g_attributes = {"position": p3s.BufferAttribute(points, normalized=False)} + m_attributes = {"size": sh["point_size"]} + + if sh["point_shape"] == "circle": # Plot circles + tex = p3s.DataTexture(data=gen_circle(16, 16), format="RGBAFormat", type="FloatType") + m_attributes["map"] = tex + m_attributes["alphaTest"] = 0.5 + m_attributes["transparency"] = True + else: # Plot squares + pass + + colors, v_colors = self.__get_point_colors(points, c, sh) + if v_colors: # Colors per point + m_attributes["vertexColors"] = 'VertexColors' + g_attributes["color"] = p3s.BufferAttribute(colors, normalized=False) + + else: # Colors for all points + m_attributes["color"] = colors + + material = p3s.PointsMaterial(**m_attributes) + geometry = p3s.BufferGeometry(attributes=g_attributes) + points = p3s.Points(geometry=geometry, material=material) + point_obj = {"geometry": geometry, "mesh": points, "material": material, + "max": ma, "min": mi, "type": "Points", "wireframe": None} + + if obj: + return self.__add_object(point_obj, obj), point_obj + else: + return self.__add_object(point_obj) + + def remove_object(self, obj_id): + if obj_id not in self.__objects: + print("Invalid object id. Valid ids are: ", list(self.__objects.keys())) + return + self._scene.remove(self.__objects[obj_id]["mesh"]) + del self.__objects[obj_id] + self.__update_view() + + def reset(self): + for obj_id in list(self.__objects.keys()).copy(): + self._scene.remove(self.__objects[obj_id]["mesh"]) + del self.__objects[obj_id] + self.__update_view() + + def update_object(self, oid=0, vertices=None, colors=None, faces=None): + obj = self.__objects[oid] + if type(vertices) != type(None): + if obj["coloring"] == "FaceColors": + f = obj["arrays"][1] + verts = np.zeros((f.shape[0] * 3, 3), dtype="float32") + for ii in range(f.shape[0]): + # print(ii*3, f[ii]) + verts[ii * 3] = vertices[f[ii, 0]] + verts[ii * 3 + 1] = vertices[f[ii, 1]] + verts[ii * 3 + 2] = vertices[f[ii, 2]] + v = verts + + else: + v = vertices.astype("float32", copy=False) + obj["geometry"].attributes["position"].array = v + # self.wireframe.attributes["position"].array = v # Wireframe updates? + obj["geometry"].attributes["position"].needsUpdate = True + # obj["geometry"].exec_three_obj_method('computeVertexNormals') + if type(colors) != type(None): + colors, coloring = self.__get_colors(obj["arrays"][0], obj["arrays"][1], colors, obj["shading"]) + colors = colors.astype("float32", copy=False) + obj["geometry"].attributes["color"].array = colors + obj["geometry"].attributes["color"].needsUpdate = True + if type(faces) != type(None): + if obj["coloring"] == "FaceColors": + print("Face updates are currently only possible in vertex color mode.") + return + f = faces.astype("uint32", copy=False).ravel() + print(obj["geometry"].attributes) + obj["geometry"].attributes["index"].array = f + # self.wireframe.attributes["position"].array = v # Wireframe updates? + obj["geometry"].attributes["index"].needsUpdate = True + # obj["geometry"].exec_three_obj_method('computeVertexNormals') + # self.mesh.geometry.verticesNeedUpdate = True + # self.mesh.geometry.elementsNeedUpdate = True + # self.update() + if self.render_mode == "WEBSITE": + return self + + # def update(self): + # self.mesh.exec_three_obj_method('update') + # self.orbit.exec_three_obj_method('update') + # self.cam.exec_three_obj_method('updateProjectionMatrix') + # self.scene.exec_three_obj_method('update') + + def add_text(self, text, shading={}, **kwargs): + shading.update(kwargs) + sh = self.__get_shading(shading) + tt = p3s.TextTexture(string=text, color=sh["text_color"]) + sm = p3s.SpriteMaterial(map=tt) + text = p3s.Sprite(material=sm, scaleToTexture=True) + self._scene.add(text) + + # def add_widget(self, widget, callback): + # self.widgets.append(widget) + # widget.observe(callback, names='value') + + # def add_dropdown(self, options, default, desc, cb): + # widget = widgets.Dropdown(options=options, value=default, description=desc) + # self.__widgets.append(widget) + # widget.observe(cb, names="value") + # display(widget) + + # def add_button(self, text, cb): + # button = widgets.Button(description=text) + # self.__widgets.append(button) + # button.on_click(cb) + # display(button) + + def to_html(self, imports=True, html_frame=True): + # Bake positions (fixes centering bug in offline rendering) + if len(self.__objects) == 0: + return + ma = np.zeros((len(self.__objects), 3)) + mi = np.zeros((len(self.__objects), 3)) + for r, obj in enumerate(self.__objects): + ma[r] = self.__objects[obj]["max"] + mi[r] = self.__objects[obj]["min"] + ma = np.max(ma, axis=0) + mi = np.min(mi, axis=0) + diag = np.linalg.norm(ma - mi) + mean = (ma - mi) / 2 + mi + for r, obj in enumerate(self.__objects): + v = self.__objects[obj]["geometry"].attributes["position"].array + v -= mean + v += np.array([0.0, .9, 0.0]) #! to move the obj to the center of window + + scale = self.__s["scale"] * (diag) + self._orbit.target = [0.0, 0.0, 0.0] + self._cam.lookAt([0.0, 0.0, 0.0]) + # self._cam.position = [0.0, 0.0, scale] + self._cam.position = [0.0, 0.5, scale * 1.3] #! show four complete meshes in the window + self._light.position = [0.0, 0.0, scale] + + state = embed.dependency_state(self._renderer) + + # Somehow these entries are missing when the state is exported in python. + # Exporting from the GUI works, so we are inserting the missing entries. + for k in state: + if state[k]["model_name"] == "OrbitControlsModel": + state[k]["state"]["maxAzimuthAngle"] = "inf" + state[k]["state"]["maxDistance"] = "inf" + state[k]["state"]["maxZoom"] = "inf" + state[k]["state"]["minAzimuthAngle"] = "-inf" + + tpl = embed.load_requirejs_template + if not imports: + embed.load_requirejs_template = "" + + s = embed.embed_snippet(self._renderer, state=state, embed_url=EMBED_URL) + # s = embed.embed_snippet(self.__w, state=state) + embed.load_requirejs_template = tpl + + if html_frame: + s = "\n\n" + s + "\n\n" + + # Revert changes + for r, obj in enumerate(self.__objects): + v = self.__objects[obj]["geometry"].attributes["position"].array + v += mean + self.__update_view() + + return s + + def save(self, filename=""): + if filename == "": + uid = str(uuid.uuid4()) + ".html" + else: + filename = filename.replace(".html", "") + uid = filename + '.html' + with open(uid, "w") as f: + f.write(self.to_html()) + print("Plot saved to file %s." % uid) diff --git a/Train.py b/Train.py new file mode 100644 index 0000000000000000000000000000000000000000..863ca5df5fe73989465936f50fb6fcd80561cacb --- /dev/null +++ b/Train.py @@ -0,0 +1,69 @@ +import os +import shutil +import argparse +import torch +import torch.multiprocessing as mp +from Anymate.utils.train_utils import train_model +import yaml +from Anymate.dataset import AnymateDataset + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='PyG DGCNN') + parser.add_argument('--config', type=str, default='joints', help='load decoder') + parser.add_argument('--split', action='store_true', help='use split dataset') + args = parser.parse_args() + + world_size = torch.cuda.device_count() + print('world_size', world_size) + + #load config file + config_folder = './Anymate/configs' + assert os.path.exists(os.path.join(config_folder, args.config+'.yaml')), f"Config file {os.path.join(config_folder, args.config+'.yaml')} not found" + with open(os.path.join(config_folder, args.config+'.yaml')) as f: + config = yaml.load(f, Loader=yaml.FullLoader) + + for key, value in config['args'].items(): + setattr(args, key, value) + setattr(args, 'decoder', config['model']['decoder']) + args.logdir = os.path.join(args.logdir, args.mode + '-' + config['model']['encoder']+ '-' + config['model']['decoder']) + args.checkpoint = os.path.join(args.checkpoint, args.mode + '-' + config['model']['encoder']+ '-' + config['model']['decoder']) + print(args) + + # create checkpoint dir and log dir + if not os.path.isdir(args.checkpoint): + print("Create new checkpoint folder " + args.checkpoint) + os.makedirs(args.checkpoint, exist_ok=True) + if not args.resume: + if os.path.isdir(args.logdir): + shutil.rmtree(args.logdir) + os.makedirs(args.logdir, exist_ok=True) + else: + os.makedirs(args.logdir, exist_ok=True) + global train_dataset + + if not args.split: + # create a shared memory dataset dictionary + train_dataset = AnymateDataset(name=args.trainset, root=args.root) + train_dataset.data_list = [data for data in train_dataset.data_list if data['vox'].shape[0] != 0] + print('train_dataset', len(train_dataset.data_list)) + import multiprocessing + manager = multiprocessing.Manager() + shared_dict = manager.dict() + shared_dict['train_dataset'] = train_dataset + else: + shared_dict = None + + # Try different ports until we find a free one + port = 12355 + while port < 65535: # Max port number + try: + mp.spawn(train_model, args=(world_size, config, args, shared_dict, port), nprocs=world_size) + break + except Exception as e: + if "address already in use" in str(e).lower(): + print(f"Port {port} is already in use, trying next port") + port += 1 + else: + print(f"Error starting training on port {port}: {e}") + raise e + print(f"Successfully started training on port {port}") \ No newline at end of file diff --git a/app.py b/app.py index 0da0319a5b670dce5025888fde58916b96f19869..60b2ac4d8cdd841a38192269a206dcfa9d4835c7 100644 --- a/app.py +++ b/app.py @@ -1,64 +1,230 @@ import gradio as gr -from huggingface_hub import InferenceClient - -""" -For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference -""" -client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") - - -def respond( - message, - history: list[tuple[str, str]], - system_message, - max_tokens, - temperature, - top_p, -): - messages = [{"role": "system", "content": system_message}] - - for val in history: - if val[0]: - messages.append({"role": "user", "content": val[0]}) - if val[1]: - messages.append({"role": "assistant", "content": val[1]}) - - messages.append({"role": "user", "content": message}) - - response = "" - - for message in client.chat_completion( - messages, - max_tokens=max_tokens, - stream=True, - temperature=temperature, - top_p=top_p, - ): - token = message.choices[0].delta.content - - response += token - yield response - - -""" -For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface -""" -demo = gr.ChatInterface( - respond, - additional_inputs=[ - gr.Textbox(value="You are a friendly Chatbot.", label="System message"), - gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"), - gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"), - gr.Slider( - minimum=0.1, - maximum=1.0, - value=0.95, - step=0.05, - label="Top-p (nucleus sampling)", - ), - ], -) +import os +from Anymate.args import ui_args +from Anymate.utils.ui_utils import process_input, vis_joint, vis_connectivity, vis_skinning, prepare_blender_file +from Anymate.utils.ui_utils import get_model, get_result_joint, get_result_connectivity, get_result_skinning, get_all_models, get_all_results +with gr.Blocks() as demo: + gr.Markdown(""" + # Anymate: Auto-rigging 3D Objects + """) + + pc = gr.State(value=None) + normalized_mesh_file = gr.State(value=None) + + result_joint = gr.State(value=None) + result_connectivity = gr.State(value=None) + result_skinning = gr.State(value=None) + + model_joint = gr.State(value=None) + model_connectivity = gr.State(value=None) + model_skinning = gr.State(value=None) + + with gr.Row(): + with gr.Column(): + # Input section + gr.Markdown("### Input") + mesh_input = gr.Model3D(label="Input 3D Mesh", clear_color=[0.0, 0.0, 0.0, 0.0]) + + # Sample 3D objects section + gr.Markdown("### Sample Objects") + sample_objects_dir = './samples' + sample_objects = [os.path.join(sample_objects_dir, f) for f in os.listdir(sample_objects_dir) + if f.endswith('.obj') and os.path.isfile(os.path.join(sample_objects_dir, f))] + sample_objects.sort() + + sample_dropdown = gr.Dropdown( + label="Select Sample Object", + choices=sample_objects, + interactive=True + ) + + load_sample_btn = gr.Button("Load Sample") + + with gr.Column(): + # Output section + gr.Markdown("### Output (wireframe display mode)") + mesh_output = gr.Model3D(label="Output 3D Mesh", clear_color=[0.0, 0.0, 0.0, 0.0], display_mode="wireframe") + + with gr.Column(): + # Output section + gr.Markdown("### (solid display mode & blender file)") + mesh_output2 = gr.Model3D(label="Output 3D Mesh", clear_color=[0.0, 0.0, 0.0, 0.0], display_mode="solid") + + blender_file = gr.File(label="Output Blender File", scale=1) + + # Checkpoint paths + joint_models_dir = 'Anymate/checkpoints/joint' + joint_models = [os.path.join(joint_models_dir, f) for f in os.listdir(joint_models_dir) + if os.path.isfile(os.path.join(joint_models_dir, f))] + with gr.Row(): + joint_checkpoint = gr.Dropdown( + label="Joint Checkpoint", + choices=joint_models, + value=ui_args.checkpoint_joint, + interactive=True + ) + joint_status = gr.Checkbox(label="Joint Model Status", value=False, interactive=False, scale=0.3) + # with gr.Column(): + # with gr.Row(): + # load_joint_btn = gr.Button("Load", scale=0.3) + + # process_joint_btn = gr.Button("Process", scale=0.3) + + conn_models_dir = 'Anymate/checkpoints/conn' + conn_models = [os.path.join(conn_models_dir, f) for f in os.listdir(conn_models_dir) + if os.path.isfile(os.path.join(conn_models_dir, f))] + with gr.Row(): + conn_checkpoint = gr.Dropdown( + label="Connection Checkpoint", + choices=conn_models, + value=ui_args.checkpoint_conn, + interactive=True + ) + conn_status = gr.Checkbox(label="Connectivity Model Status", value=False, interactive=False, scale=0.3) + # with gr.Column(): + # with gr.Row(): + # load_conn_btn = gr.Button("Load", scale=0.3) + + # process_conn_btn = gr.Button("Process", scale=0.3) + + skin_models_dir = 'Anymate/checkpoints/skin' + skin_models = [os.path.join(skin_models_dir, f) for f in os.listdir(skin_models_dir) + if os.path.isfile(os.path.join(skin_models_dir, f))] + with gr.Row(): + skin_checkpoint = gr.Dropdown( + label="Skin Checkpoint", + choices=skin_models, + value=ui_args.checkpoint_skin, + interactive=True + ) + skin_status = gr.Checkbox(label="Skinning Model Status", value=False, interactive=False, scale=0.3) + # with gr.Column(): + # with gr.Row(): + # load_skin_btn = gr.Button("Load", scale=0.3) + + # process_skin_btn = gr.Button("Process", scale=0.3) + + with gr.Row(): + load_all_btn = gr.Button("Load all models", scale=1) + process_all_btn = gr.Button("Run all models", scale=1) + # download_btn = gr.DownloadButton("Blender File Not Ready", scale=0.3) + # blender_file = gr.File(label="Blender File", scale=1) + + eps = gr.Number(label="Epsilon", value=0.03, interactive=True) + min_samples = gr.Number(label="Min Samples", value=1, interactive=True) + + mesh_input.change( + process_input, + inputs=mesh_input, + outputs=[normalized_mesh_file, mesh_output, mesh_output2, blender_file, pc, result_joint, result_connectivity, result_skinning] + ) + + load_sample_btn.click( + fn=lambda sample_path: sample_path if sample_path else None, + inputs=[sample_dropdown], + outputs=[mesh_input] + ).then( + process_input, + inputs=mesh_input, + outputs=[normalized_mesh_file, mesh_output, mesh_output2, blender_file, pc, result_joint, result_connectivity, result_skinning] + ) + + normalized_mesh_file.change( + lambda x: x, + inputs=normalized_mesh_file, + outputs=mesh_input + ) + + result_joint.change( + vis_joint, + inputs=[normalized_mesh_file, result_joint], + outputs=[mesh_output, mesh_output2] + ) + + result_connectivity.change( + vis_connectivity, + inputs=[normalized_mesh_file, result_joint, result_connectivity], + outputs=[mesh_output, mesh_output2] + ) + + result_skinning.change( + vis_skinning, + inputs=[normalized_mesh_file, result_joint, result_connectivity, result_skinning], + outputs=[mesh_output, mesh_output2] + ) + + result_skinning.change( + prepare_blender_file, + inputs=[normalized_mesh_file], + outputs=blender_file + ) + + joint_checkpoint.change( + get_model, + inputs=joint_checkpoint, + outputs=[model_joint, joint_status] + ) + + conn_checkpoint.change( + get_model, + inputs=conn_checkpoint, + outputs=[model_connectivity, conn_status] + ) + + skin_checkpoint.change( + get_model, + inputs=skin_checkpoint, + outputs=[model_skinning, skin_status] + ) + + load_all_btn.click( + get_all_models, + inputs=[joint_checkpoint, conn_checkpoint, skin_checkpoint], + outputs=[model_joint, model_connectivity, model_skinning, joint_status, conn_status, skin_status] + ) + + process_all_btn.click( + get_all_results, + inputs=[normalized_mesh_file, model_joint, model_connectivity, model_skinning, pc, eps, min_samples], + outputs=[result_joint, result_connectivity, result_skinning] + ) + + # load_joint_btn.click( + # fn=get_model, + # inputs=joint_checkpoint, + # outputs=[model_joint, joint_status] + # ) + + # load_conn_btn.click( + # fn=get_model, + # inputs=conn_checkpoint, + # outputs=[model_connectivity, conn_status] + # ) + + # load_skin_btn.click( + # fn=get_model, + # inputs=skin_checkpoint, + # outputs=[model_skinning, skin_status] + # ) + + # process_joint_btn.click( + # fn=get_result_joint, + # inputs=[normalized_mesh_file, model_joint, pc, eps, min_samples], + # outputs=result_joint + # ) + + # process_conn_btn.click( + # fn=get_result_connectivity, + # inputs=[normalized_mesh_file, model_connectivity, pc, result_joint], + # outputs=result_connectivity + # ) + + # process_skin_btn.click( + # fn=get_result_skinning, + # inputs=[normalized_mesh_file, model_skinning, pc, result_joint, result_connectivity], + # outputs=result_skinning + # ) if __name__ == "__main__": demo.launch() diff --git a/environment.yaml b/environment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e4e799c926e3ca6c6e3332ed4b2a62e2f97102b1 --- /dev/null +++ b/environment.yaml @@ -0,0 +1,225 @@ +name: anymate +channels: + - defaults +dependencies: + - _libgcc_mutex=0.1=main + - _openmp_mutex=5.1=1_gnu + - bzip2=1.0.8=h5eee18b_6 + - ca-certificates=2024.9.24=h06a4308_0 + - ld_impl_linux-64=2.40=h12ee557_0 + - libffi=3.4.4=h6a678d5_1 + - libgcc-ng=11.2.0=h1234567_1 + - libgomp=11.2.0=h1234567_1 + - libstdcxx-ng=11.2.0=h1234567_1 + - libuuid=1.41.5=h5eee18b_0 + - ncurses=6.4=h6a678d5_0 + - openssl=3.0.15=h5eee18b_0 + - pip=24.2=py310h06a4308_0 + - python=3.10.15=he870216_1 + - readline=8.2=h5eee18b_0 + - setuptools=75.1.0=py310h06a4308_0 + - sqlite=3.45.3=h5eee18b_0 + - tk=8.6.14=h39e8969_0 + - wheel=0.44.0=py310h06a4308_0 + - xz=5.4.6=h5eee18b_1 + - zlib=1.2.13=h5eee18b_1 + - pip: + - absl-py==2.1.0 + - accelerate==1.0.1 + - addict==2.4.0 + - aiofiles==23.2.1 + - aiohappyeyeballs==2.4.6 + - aiohttp==3.11.13 + - aiosignal==1.3.2 + - annotated-types==0.7.0 + - antlr4-python3-runtime==4.9.3 + - anyio==4.6.2.post1 + - apted==1.0.3 + - asttokens==2.4.1 + - async-timeout==5.0.1 + - attrs==24.2.0 + - blinker==1.8.2 + - bpy==4.0.0 + - certifi==2024.8.30 + - charset-normalizer==3.4.0 + - click==8.1.7 + - comm==0.2.2 + - configargparse==1.7 + - contourpy==1.3.0 + - cycler==0.12.1 + - cython==3.0.12 + - dash==2.18.1 + - dash-core-components==2.0.0 + - dash-html-components==2.0.0 + - dash-table==5.0.0 + - debugpy==1.8.7 + - decorator==5.1.1 + - deepspeed==0.15.3 + - diffusers==0.18.2 + - distro==1.9.0 + - docker-pycreds==0.4.0 + - easydict==1.13 + - einops==0.8.0 + - exceptiongroup==1.2.2 + - executing==2.1.0 + - fastapi==0.115.3 + - fastjsonschema==2.20.0 + - ffmpy==0.4.0 + - filelock==3.16.1 + - flask==3.0.3 + - fonttools==4.54.1 + - frozenlist==1.5.0 + - fsspec==2024.10.0 + - ftfy==6.0.1 + - gitdb==4.0.11 + - gitpython==3.1.43 + - gradio==5.3.0 + - gradio-client==1.4.2 + - grpcio==1.67.0 + - h11==0.14.0 + - h5py==3.12.1 + - hjson==3.1.0 + - httpcore==1.0.6 + - httpx==0.27.2 + - huggingface-hub==0.25.1 + - idna==3.10 + - imageio==2.36.0 + - importlib-metadata==8.5.0 + - ipykernel==6.29.5 + - ipython==8.28.0 + - ipywidgets==8.1.5 + - itsdangerous==2.2.0 + - jedi==0.19.1 + - jinja2==3.1.4 + - jiter==0.6.1 + - joblib==1.4.2 + - jsonschema==4.23.0 + - jsonschema-specifications==2024.10.1 + - jupyter-client==8.6.3 + - jupyter-core==5.7.2 + - jupyterlab-widgets==3.0.13 + - kiwisolver==1.4.7 + - latex2mathml==3.77.0 + - lightning-utilities==0.12.0 + - markdown==3.7 + - markdown-it-py==3.0.0 + - markdown2==2.5.1 + - markupsafe==2.1.5 + - matplotlib==3.9.2 + - matplotlib-inline==0.1.7 + - mdurl==0.1.2 + - mpmath==1.3.0 + - msgpack==1.1.0 + - multidict==6.1.0 + - nbformat==5.5.0 + - nest-asyncio==1.6.0 + - networkx==3.4.2 + - ninja==1.11.1.1 + - nltk==3.9.1 + - numpy==1.23.1 + - nvidia-cublas-cu12==12.4.5.8 + - nvidia-cuda-cupti-cu12==12.4.127 + - nvidia-cuda-nvrtc-cu12==12.4.127 + - nvidia-cuda-runtime-cu12==12.4.127 + - nvidia-cudnn-cu12==9.1.0.70 + - nvidia-cufft-cu12==11.2.1.3 + - nvidia-curand-cu12==10.3.5.147 + - nvidia-cusolver-cu12==11.6.1.9 + - nvidia-cusparse-cu12==12.3.1.170 + - nvidia-nccl-cu12==2.21.5 + - nvidia-nvjitlink-cu12==12.4.127 + - nvidia-nvtx-cu12==12.4.127 + - omegaconf==2.3.0 + - open3d==0.16.0 + - openai==1.52.2 + - opencv-python==4.7.0.72 + - orjson==3.10.10 + - packaging==24.1 + - pandas==2.2.3 + - parso==0.8.4 + - peft==0.13.2 + - pexpect==4.9.0 + - pillow==10.4.0 + - platformdirs==4.3.6 + - plotly==5.24.1 + - plyfile==1.1 + - point-cloud-utils==0.31.0 + - prompt-toolkit==3.0.48 + - propcache==0.3.0 + - protobuf==5.28.3 + - psutil==6.1.0 + - ptyprocess==0.7.0 + - pure-eval==0.2.3 + - py-cpuinfo==9.0.0 + - py-rouge==1.1 + - pydantic==2.9.2 + - pydantic-core==2.23.4 + - pydub==0.25.1 + - pygments==2.18.0 + - pyparsing==3.2.0 + - pyquaternion==0.9.9 + - python-dateutil==2.9.0.post0 + - python-multipart==0.0.12 + - pytorch-lightning==2.5.0.post0 + - pytz==2024.2 + - pywavelets==1.7.0 + - pyyaml==6.0.2 + - pyzmq==26.2.0 + - referencing==0.35.1 + - regex==2024.9.11 + - requests==2.32.3 + - retrying==1.3.4 + - rich==13.9.3 + - rouge==1.0.1 + - rpds-py==0.20.0 + - ruff==0.7.1 + - safehttpx==0.1.1 + - safetensors==0.4.5 + - scikit-image==0.19.3 + - scikit-learn==1.5.2 + - scipy==1.10.1 + - semantic-version==2.10.0 + - sentencepiece==0.2.0 + - sentry-sdk==2.17.0 + - setproctitle==1.3.3 + - shellingham==1.5.4 + - shortuuid==1.0.13 + - six==1.16.0 + - smmap==5.0.1 + - sniffio==1.3.1 + - stack-data==0.6.3 + - starlette==0.41.0 + - svgwrite==1.4.3 + - sympy==1.13.1 + - tenacity==9.0.0 + - tensorboard==2.18.0 + - tensorboard-data-server==0.7.2 + - termcolor==2.5.0 + - threadpoolctl==3.5.0 + - tifffile==2024.9.20 + - timm==0.4.12 + - tokenizers==0.12.1 + - tomlkit==0.12.0 + - torch==2.5.0 + - torchmetrics==1.6.1 + - torchvision==0.20.0 + - tornado==6.4.1 + - tqdm==4.66.5 + - traitlets==5.14.3 + - transformers==4.28.0 + - trimesh==3.18.3 + - triton==3.1.0 + - typer==0.12.5 + - typing-extensions==4.12.2 + - tzdata==2024.2 + - urllib3==2.2.3 + - uvicorn==0.32.0 + - wandb==0.18.5 + - wavedrom==2.0.3.post3 + - wcwidth==0.2.13 + - websockets==12.0 + - werkzeug==3.0.4 + - widgetsnbextension==4.0.13 + - yarl==1.18.3 + - zipp==3.20.2 + - zstandard==0.23.0 diff --git a/requirements.txt b/requirements.txt index cfc5b09a68217c6eba8d711a8c995c765049d339..7eac03f861636d147cb6a03c3ceaa256d4c65a99 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,199 @@ -huggingface_hub==0.25.2 \ No newline at end of file +huggingface_hub==0.25.2 +absl-py==2.1.0 +accelerate==1.0.1 +addict==2.4.0 +aiofiles==23.2.1 +aiohappyeyeballs==2.4.6 +aiohttp==3.11.13 +aiosignal==1.3.2 +annotated-types==0.7.0 +antlr4-python3-runtime==4.9.3 +anyio==4.6.2.post1 +apted==1.0.3 +asttokens==2.4.1 +async-timeout==5.0.1 +attrs==24.2.0 +blinker==1.8.2 +bpy==4.0.0 +certifi==2024.8.30 +charset-normalizer==3.4.0 +click==8.1.7 +comm==0.2.2 +ConfigArgParse==1.7 +contourpy==1.3.0 +cycler==0.12.1 +Cython==3.0.12 +dash==2.18.1 +dash-core-components==2.0.0 +dash-html-components==2.0.0 +dash-table==5.0.0 +debugpy==1.8.7 +decorator==5.1.1 +deepspeed==0.15.3 +diffusers==0.18.2 +distro==1.9.0 +docker-pycreds==0.4.0 +easydict==1.13 +einops==0.8.0 +exceptiongroup==1.2.2 +executing==2.1.0 +fastapi==0.115.3 +fastjsonschema==2.20.0 +ffmpy==0.4.0 +filelock==3.16.1 +Flask==3.0.3 +fonttools==4.54.1 +frozenlist==1.5.0 +fsspec==2024.10.0 +ftfy==6.0.1 +gitdb==4.0.11 +GitPython==3.1.43 +gradio==5.3.0 +gradio_client==1.4.2 +grpcio==1.67.0 +h11==0.14.0 +h5py==3.12.1 +hjson==3.1.0 +httpcore==1.0.6 +httpx==0.27.2 +idna==3.10 +imageio==2.36.0 +importlib_metadata==8.5.0 +ipykernel==6.29.5 +ipython==8.28.0 +ipywidgets==8.1.5 +itsdangerous==2.2.0 +jedi==0.19.1 +Jinja2==3.1.4 +jiter==0.6.1 +joblib==1.4.2 +jsonschema==4.23.0 +jsonschema-specifications==2024.10.1 +jupyter_client==8.6.3 +jupyter_core==5.7.2 +jupyterlab_widgets==3.0.13 +kiwisolver==1.4.7 +latex2mathml==3.77.0 +lightning-utilities==0.12.0 +Markdown==3.7 +markdown-it-py==3.0.0 +markdown2==2.5.1 +MarkupSafe==2.1.5 +matplotlib==3.9.2 +matplotlib-inline==0.1.7 +mdurl==0.1.2 +mpmath==1.3.0 +msgpack==1.1.0 +multidict==6.1.0 +nbformat==5.5.0 +nest-asyncio==1.6.0 +networkx==3.4.2 +ninja==1.11.1.1 +nltk==3.9.1 +numpy==1.23.1 +nvidia-cublas-cu12==12.4.5.8 +nvidia-cuda-cupti-cu12==12.4.127 +nvidia-cuda-nvrtc-cu12==12.4.127 +nvidia-cuda-runtime-cu12==12.4.127 +nvidia-cudnn-cu12==9.1.0.70 +nvidia-cufft-cu12==11.2.1.3 +nvidia-curand-cu12==10.3.5.147 +nvidia-cusolver-cu12==11.6.1.9 +nvidia-cusparse-cu12==12.3.1.170 +nvidia-nccl-cu12==2.21.5 +nvidia-nvjitlink-cu12==12.4.127 +nvidia-nvtx-cu12==12.4.127 +omegaconf==2.3.0 +open3d==0.16.0 +openai==1.52.2 +opencv-python==4.7.0.72 +orjson==3.10.10 +packaging==24.1 +pandas==2.2.3 +parso==0.8.4 +peft==0.13.2 +pexpect==4.9.0 +pillow==10.4.0 +platformdirs==4.3.6 +plotly==5.24.1 +plyfile==1.1 +point-cloud-utils==0.31.0 +prompt_toolkit==3.0.48 +propcache==0.3.0 +protobuf==5.28.3 +psutil==6.1.0 +ptyprocess==0.7.0 +pure_eval==0.2.3 +py-cpuinfo==9.0.0 +py-rouge==1.1 +pydantic==2.9.2 +pydantic_core==2.23.4 +pydub==0.25.1 +Pygments==2.18.0 +pyparsing==3.2.0 +pyquaternion==0.9.9 +python-dateutil==2.9.0.post0 +python-multipart==0.0.12 +pytorch-lightning==2.5.0.post0 +pytz==2024.2 +PyWavelets==1.7.0 +PyYAML==6.0.2 +pyzmq==26.2.0 +referencing==0.35.1 +regex==2024.9.11 +requests==2.32.3 +retrying==1.3.4 +rich==13.9.3 +rouge==1.0.1 +rpds-py==0.20.0 +ruff==0.7.1 +safehttpx==0.1.1 +safetensors==0.4.5 +scikit-image==0.19.3 +scikit-learn==1.5.2 +scipy==1.10.1 +semantic-version==2.10.0 +sentencepiece==0.2.0 +sentry-sdk==2.17.0 +setproctitle==1.3.3 +shellingham==1.5.4 +shortuuid==1.0.13 +six==1.16.0 +smmap==5.0.1 +sniffio==1.3.1 +stack-data==0.6.3 +starlette==0.41.0 +svgwrite==1.4.3 +sympy==1.13.1 +tenacity==9.0.0 +tensorboard==2.18.0 +tensorboard-data-server==0.7.2 +termcolor==2.5.0 +threadpoolctl==3.5.0 +tifffile==2024.9.20 +timm==0.4.12 +tokenizers==0.12.1 +tomlkit==0.12.0 +torch==2.5.0 +torchmetrics==1.6.1 +torchvision==0.20.0 +tornado==6.4.1 +tqdm==4.66.5 +traitlets==5.14.3 +transformers==4.28.0 +trimesh==3.18.3 +triton==3.1.0 +typer==0.12.5 +typing_extensions==4.12.2 +tzdata==2024.2 +urllib3==2.2.3 +uvicorn==0.32.0 +wandb==0.18.5 +wavedrom==2.0.3.post3 +wcwidth==0.2.13 +websockets==12.0 +Werkzeug==3.0.4 +widgetsnbextension==4.0.13 +yarl==1.18.3 +zipp==3.20.2 +zstandard==0.23.0 diff --git a/samples/sample1.obj b/samples/sample1.obj new file mode 100644 index 0000000000000000000000000000000000000000..4f65a3cd20b7ce2838397c909d64f19085e9ee89 --- /dev/null +++ b/samples/sample1.obj @@ -0,0 +1,35270 @@ +# Blender 4.0.0 +# www.blender.org +o Wolf3D_Teeth +v -0.015678 0.891563 0.038689 +v -0.015390 0.892217 0.045058 +v -0.014257 0.884128 0.037050 +v -0.013850 0.883809 0.042406 +v -0.013686 0.895775 0.040631 +v -0.013568 0.902773 0.042017 +v -0.013541 0.887696 0.049722 +v -0.013541 0.892119 0.051017 +v -0.013424 0.898361 0.046847 +v -0.012953 0.895463 0.046742 +v -0.012779 0.902243 0.047136 +v -0.012582 0.883346 0.047647 +v -0.011811 0.897330 0.052555 +v -0.011511 0.894074 0.051996 +v -0.011409 0.901255 0.052610 +v -0.011393 0.892962 0.028532 +v -0.011393 0.892962 0.028532 +v -0.011170 0.892123 0.040225 +v -0.011168 0.890123 0.041118 +v -0.011168 0.890123 0.041118 +v -0.010390 0.891752 0.055842 +v -0.009955 0.887314 0.053984 +v -0.009257 0.883013 0.051771 +v -0.008683 0.896073 0.056875 +v -0.008469 0.893120 0.056033 +v -0.008345 0.900520 0.057104 +v -0.007994 0.889792 0.048797 +v -0.007994 0.889792 0.048797 +v -0.007554 0.891574 0.048399 +v -0.004833 0.887448 0.056811 +v -0.004833 0.891392 0.058144 +v -0.004607 0.882779 0.054308 +v -0.004215 0.892148 0.058515 +v -0.004215 0.895277 0.059324 +v -0.004215 0.900171 0.059003 +v -0.000000 0.893004 0.050755 +v -0.000000 0.890578 0.051905 +v -0.000000 0.890578 0.051905 +v -0.000000 0.895245 0.039712 +v -0.000000 0.889364 0.040807 +v -0.000000 0.895878 0.028386 +v -0.000000 0.895467 0.023261 +v -0.000000 0.890415 0.029250 +v -0.000000 0.891797 0.060153 +v -0.000000 0.882746 0.055791 +v -0.000000 0.895159 0.060747 +v -0.000000 0.887271 0.058857 +v -0.000000 0.899929 0.060246 +v -0.000000 0.891124 0.059603 +v 0.004215 0.892148 0.058515 +v 0.004215 0.895277 0.059324 +v 0.004215 0.900171 0.059003 +v 0.004607 0.882779 0.054308 +v 0.004833 0.887448 0.056811 +v 0.004833 0.891392 0.058144 +v 0.007554 0.891574 0.048399 +v 0.007994 0.889792 0.048797 +v 0.007994 0.889792 0.048797 +v 0.008345 0.900520 0.057104 +v 0.008469 0.893120 0.056033 +v 0.008683 0.896073 0.056875 +v 0.009257 0.883013 0.051771 +v 0.009955 0.887314 0.053984 +v 0.010390 0.891752 0.055842 +v 0.011168 0.890123 0.041118 +v 0.011168 0.890123 0.041118 +v 0.011170 0.892123 0.040225 +v 0.011393 0.892962 0.028532 +v 0.011393 0.892962 0.028532 +v 0.011409 0.901255 0.052610 +v 0.011511 0.894074 0.051996 +v 0.011811 0.897330 0.052555 +v 0.012582 0.883346 0.047647 +v 0.012779 0.902243 0.047136 +v 0.012953 0.895463 0.046742 +v 0.013424 0.898361 0.046847 +v 0.013541 0.887696 0.049722 +v 0.013541 0.892119 0.051017 +v 0.013568 0.902773 0.042017 +v 0.013686 0.895775 0.040631 +v 0.013850 0.883809 0.042406 +v 0.014257 0.884128 0.037050 +v 0.015390 0.892217 0.045058 +v 0.015678 0.891563 0.038689 +v 0.006339 0.929619 0.040754 +v 0.007013 0.934599 0.040754 +v 0.007623 0.924761 0.040754 +v 0.007748 0.929707 0.046633 +v 0.008349 0.934141 0.046633 +v 0.008892 0.925380 0.046632 +v 0.009541 0.938943 0.040753 +v 0.009918 0.930753 0.049757 +v 0.010031 0.928936 0.049757 +v 0.010162 0.932557 0.049757 +v 0.010496 0.927176 0.049757 +v 0.010600 0.938009 0.046632 +v 0.010668 0.920763 0.040752 +v 0.010753 0.934278 0.049757 +v 0.011295 0.925541 0.049757 +v 0.011499 0.931439 0.050667 +v 0.011604 0.921820 0.046632 +v 0.011669 0.935852 0.049757 +v 0.011694 0.928415 0.050667 +v 0.012398 0.924093 0.049757 +v 0.012476 0.934307 0.050667 +v 0.012646 0.931252 0.051121 +v 0.012826 0.928682 0.051121 +v 0.012873 0.937216 0.049757 +v 0.013031 0.925696 0.050666 +v 0.013465 0.933696 0.051120 +v 0.013538 0.941988 0.040752 +v 0.013762 0.922888 0.049756 +v 0.013974 0.926375 0.051120 +v 0.014160 0.940721 0.046631 +v 0.014322 0.938319 0.049756 +v 0.014477 0.936584 0.050666 +v 0.015012 0.918235 0.040751 +v 0.015156 0.935640 0.051120 +v 0.015307 0.923695 0.050666 +v 0.015336 0.921972 0.049755 +v 0.015471 0.919568 0.046630 +v 0.015919 0.924684 0.051119 +v 0.015957 0.939118 0.049755 +v 0.016091 0.931476 0.052496 +v 0.016252 0.928968 0.052496 +v 0.017057 0.921381 0.049755 +v 0.017196 0.937920 0.050665 +v 0.017463 0.936789 0.051119 +v 0.017717 0.939583 0.049755 +v 0.017750 0.933362 0.052495 +v 0.018138 0.927310 0.052495 +v 0.018175 0.922718 0.050665 +v 0.018362 0.923865 0.051119 +v 0.018861 0.921137 0.049754 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019196 0.930376 0.040741 +v 0.019198 0.930416 0.053487 +v 0.019906 0.918969 0.046629 +v 0.019992 0.917562 0.040750 +v 0.020034 0.936968 0.051118 +v 0.020170 0.941748 0.046629 +v 0.020220 0.938115 0.050665 +v 0.020257 0.933524 0.052495 +v 0.020287 0.943141 0.040750 +v 0.020435 0.939574 0.049754 +v 0.020646 0.927470 0.052494 +v 0.020677 0.921250 0.049754 +v 0.020933 0.924044 0.051118 +v 0.021199 0.922913 0.050664 +v 0.022144 0.931864 0.052494 +v 0.022305 0.929357 0.052494 +v 0.022437 0.921715 0.049754 +v 0.022477 0.936149 0.051118 +v 0.023059 0.938860 0.049754 +v 0.023089 0.937138 0.050664 +v 0.023218 0.919616 0.046628 +v 0.023240 0.925193 0.051117 +v 0.023919 0.924250 0.050664 +v 0.024073 0.922514 0.049753 +v 0.024277 0.918565 0.040748 +v 0.024421 0.934458 0.051117 +v 0.024632 0.937945 0.049753 +v 0.024931 0.927137 0.051117 +v 0.024966 0.940329 0.046628 +v 0.025365 0.935137 0.050663 +v 0.025520 0.923618 0.049753 +v 0.025571 0.932151 0.051117 +v 0.025673 0.941548 0.040748 +v 0.025749 0.929581 0.051117 +v 0.025919 0.926526 0.050663 +v 0.025996 0.936740 0.049753 +v 0.026146 0.921292 0.046628 +v 0.026702 0.932418 0.050662 +v 0.026726 0.924982 0.049752 +v 0.026896 0.929394 0.050662 +v 0.027100 0.935292 0.049752 +v 0.027501 0.920554 0.040748 +v 0.027641 0.926555 0.049752 +v 0.027899 0.933656 0.049752 +v 0.028232 0.928276 0.049752 +v 0.028320 0.937367 0.046627 +v 0.028364 0.931897 0.049752 +v 0.028477 0.930080 0.049752 +v 0.029109 0.924647 0.046626 +v 0.029440 0.938221 0.040747 +v 0.030286 0.933346 0.046626 +v 0.030326 0.923938 0.040747 +v 0.030562 0.928879 0.046626 +v 0.031647 0.933706 0.040747 +v 0.031958 0.928691 0.040747 +v -0.032040 0.930438 0.040747 +v -0.031069 0.925507 0.040747 +v -0.031051 0.935365 0.040747 +v -0.030632 0.930435 0.046626 +v -0.029768 0.926044 0.046626 +v -0.029750 0.934823 0.046626 +v -0.028406 0.929521 0.049752 +v -0.028402 0.931342 0.049752 +v -0.028286 0.921323 0.040748 +v -0.028251 0.939538 0.040747 +v -0.028054 0.927735 0.049752 +v -0.028044 0.933126 0.049752 +v -0.027361 0.926052 0.049753 +v -0.027344 0.934806 0.049752 +v -0.027288 0.922319 0.046628 +v -0.027257 0.938539 0.046627 +v -0.026786 0.931893 0.050662 +v -0.026772 0.928864 0.050662 +v -0.026352 0.924537 0.049753 +v -0.026330 0.936318 0.049753 +v -0.025660 0.931578 0.051117 +v -0.025639 0.934698 0.050663 +v -0.025612 0.929004 0.051117 +v -0.025600 0.926070 0.050663 +v -0.025068 0.923247 0.049753 +v -0.025040 0.937602 0.049753 +v -0.024720 0.933976 0.051117 +v -0.024582 0.926644 0.051117 +v -0.024113 0.918523 0.040749 +v -0.024066 0.942322 0.040748 +v -0.023572 0.919825 0.046629 +v -0.023556 0.922233 0.049754 +v -0.023531 0.941018 0.046628 +v -0.023525 0.938610 0.049753 +v -0.023507 0.936849 0.050664 +v -0.023448 0.923938 0.050664 +v -0.022934 0.935831 0.051117 +v -0.022726 0.924858 0.051118 +v -0.022446 0.930400 0.052494 +v -0.021876 0.921533 0.049754 +v -0.021842 0.939304 0.049754 +v -0.021494 0.932725 0.052494 +v -0.021475 0.928084 0.052494 +v -0.020714 0.938022 0.050664 +v -0.020644 0.922792 0.050665 +v -0.020574 0.936861 0.051118 +v -0.020454 0.917536 0.040750 +v -0.020329 0.923918 0.051118 +v -0.020315 0.918946 0.046629 +v -0.020091 0.921174 0.049754 +v -0.020056 0.939656 0.049754 +v -0.019177 0.933695 0.052495 +v -0.019164 0.930414 0.053487 +v -0.019151 0.927132 0.052495 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018852 0.930342 0.040741 +v -0.018587 0.941772 0.046629 +v -0.018514 0.943168 0.040750 +v -0.018271 0.921172 0.049755 +v -0.017999 0.936910 0.051119 +v -0.017755 0.923966 0.051119 +v -0.017684 0.938035 0.050665 +v -0.017614 0.922805 0.050665 +v -0.017344 0.939473 0.049755 +v -0.016947 0.919160 0.046630 +v -0.016853 0.932743 0.052495 +v -0.016834 0.928102 0.052495 +v -0.016672 0.917776 0.040751 +v -0.016485 0.921523 0.049755 +v -0.015882 0.930427 0.052496 +v -0.015602 0.935969 0.051119 +v -0.015395 0.924996 0.051120 +v -0.014880 0.936889 0.050666 +v -0.014821 0.923978 0.050666 +v -0.014802 0.922216 0.049756 +v -0.014771 0.938594 0.049755 +v -0.013747 0.934183 0.051120 +v -0.013609 0.926851 0.051120 +v -0.013287 0.923225 0.049757 +v -0.013259 0.937580 0.049756 +v -0.012809 0.920865 0.046632 +v -0.012772 0.939938 0.046631 +v -0.012728 0.934757 0.050666 +v -0.012717 0.931823 0.051121 +v -0.012689 0.926130 0.050667 +v -0.012668 0.929249 0.051121 +v -0.012026 0.919690 0.040752 +v -0.011997 0.924509 0.049757 +v -0.011985 0.941108 0.040752 +v -0.011974 0.936290 0.049757 +v -0.011556 0.931963 0.050667 +v -0.011542 0.928934 0.050667 +v -0.010983 0.926021 0.049757 +v -0.010966 0.934775 0.049757 +v -0.010283 0.927701 0.049757 +v -0.010273 0.933092 0.049757 +v -0.009925 0.929486 0.049757 +v -0.009921 0.931306 0.049757 +v -0.009638 0.924023 0.046633 +v -0.009614 0.936767 0.046632 +v -0.008465 0.923237 0.040754 +v -0.008438 0.937548 0.040753 +v -0.007918 0.928154 0.046633 +v -0.007909 0.932629 0.046633 +v -0.006533 0.927876 0.040754 +v -0.006524 0.932902 0.040754 +v -0.244461 0.584580 0.029140 +v -0.244369 0.581082 0.020659 +v -0.242478 0.589484 0.019545 +v -0.240381 0.576096 0.015979 +v -0.239954 0.584268 0.013314 +v -0.239922 0.592280 0.029091 +v -0.239472 0.587404 0.037949 +v -0.238252 0.597560 0.016848 +v -0.237021 0.592223 0.009736 +v -0.235951 0.572260 0.015759 +v -0.234989 0.599678 0.026293 +v -0.234114 0.579674 0.013838 +v -0.233750 0.591679 0.035546 +v -0.233673 0.606983 0.012677 +v -0.232743 0.569355 0.017421 +v -0.232636 0.601865 0.004758 +v -0.232028 0.587024 0.040970 +v -0.231986 0.588093 0.008769 +v -0.231659 0.572060 0.019432 +v -0.230839 0.574632 0.024115 +v -0.230594 0.581069 0.017645 +v -0.230430 0.608992 0.021773 +v -0.229513 0.578318 0.030663 +v -0.229438 0.589575 0.036220 +v -0.229286 0.598158 0.031891 +v -0.228407 0.582726 0.037435 +v -0.227812 0.582216 0.024423 +v -0.227722 0.586853 0.012280 +v -0.227674 0.597063 0.003494 +v -0.227124 0.585249 0.032017 +v -0.225182 0.594198 0.031618 +v -0.224398 0.606880 0.027743 +v -0.224293 0.587509 0.019079 +v -0.223301 0.590282 0.026637 +v -0.222320 0.594746 0.007287 +v -0.219273 0.601754 0.027153 +v -0.218529 0.594976 0.014061 +v -0.217262 0.597251 0.021494 +v -0.114457 0.022952 -0.006629 +v -0.113898 0.012659 0.010862 +v -0.112058 0.042792 -0.020631 +v -0.112006 0.009822 0.010045 +v -0.111814 0.033562 -0.005409 +v -0.111727 0.020173 -0.009203 +v -0.111377 0.019533 0.009728 +v -0.109287 0.012016 0.025461 +v -0.109119 0.058435 -0.018075 +v -0.108772 0.038654 -0.022947 +v -0.108651 0.016707 0.025666 +v -0.107737 0.065524 -0.032750 +v -0.106230 0.155915 -0.036795 +v -0.105992 0.156231 -0.048760 +v -0.105246 0.008673 0.025046 +v -0.105208 0.082515 -0.029064 +v -0.105087 0.011873 0.034954 +v -0.104635 0.135275 -0.036949 +v -0.104530 0.014933 0.035931 +v -0.104455 0.112120 -0.037092 +v -0.104452 0.061210 -0.036699 +v -0.103558 0.136819 -0.047211 +v -0.103502 0.121577 -0.036597 +v -0.103020 0.103494 -0.037831 +v -0.102674 0.093114 -0.039659 +v -0.102118 0.125496 -0.047097 +v -0.102056 0.040878 0.001918 +v -0.101324 0.066596 -0.009319 +v -0.101225 0.154333 -0.024431 +v -0.101173 0.011288 0.041828 +v -0.100812 0.012770 0.042272 +v -0.100651 0.133945 -0.024005 +v -0.100490 0.024878 0.018233 +v -0.100488 0.092968 -0.022180 +v -0.100453 0.116388 -0.047371 +v -0.100199 0.082781 -0.056257 +v -0.100053 0.008817 0.035451 +v -0.099990 0.118189 -0.023121 +v -0.099946 0.020442 0.030121 +v -0.099885 0.020020 -0.011862 +v -0.099667 0.108148 -0.023915 +v -0.099653 0.007519 0.008344 +v -0.099388 0.081843 -0.041687 +v -0.099243 0.039065 -0.025043 +v -0.099026 0.155526 -0.057181 +v -0.098933 0.072885 -0.045706 +v -0.098813 0.075363 -0.054550 +v -0.098733 0.108238 -0.048811 +v -0.098688 0.016928 0.039204 +v -0.098589 0.100732 -0.053972 +v -0.098520 0.011244 0.045129 +v -0.097752 0.136167 -0.055764 +v -0.097654 0.013528 0.045095 +v -0.097114 0.009078 0.043147 +v -0.097020 0.125758 -0.055270 +v -0.096420 0.011101 0.047775 +v -0.096344 0.006127 0.023360 +v -0.095664 0.057292 -0.039381 +v -0.095332 0.009784 0.048260 +v -0.095186 0.118237 -0.055567 +v -0.094762 0.071525 -0.059015 +v -0.094454 0.082965 -0.062284 +v -0.093375 0.111160 -0.056307 +v -0.093219 0.068053 -0.050348 +v -0.092689 0.097203 -0.060305 +v -0.092416 0.009964 0.050894 +v -0.092294 0.044826 0.008962 +v -0.092280 0.152419 -0.014470 +v -0.092252 0.115340 -0.014186 +v -0.092141 0.068061 -0.003853 +v -0.092081 0.130996 -0.014331 +v -0.091878 0.103017 -0.013532 +v -0.091788 0.013095 0.048894 +v -0.091741 0.006435 0.035523 +v -0.091355 0.017749 0.042635 +v -0.091293 0.091892 -0.011478 +v -0.091287 0.027487 0.024514 +v -0.091105 0.022185 0.034173 +v -0.090037 0.006898 0.044263 +v -0.089667 0.007950 0.050880 +v -0.088843 0.041065 -0.023848 +v -0.088828 0.124035 -0.057009 +v -0.088705 0.133607 -0.058102 +v -0.088503 0.154112 -0.060304 +v -0.088098 0.116434 -0.056834 +v -0.087327 0.008786 0.053709 +v -0.086947 0.055833 -0.040423 +v -0.086908 0.068087 -0.062253 +v -0.086566 0.108480 -0.057072 +v -0.085647 0.064503 -0.052207 +v -0.085402 0.093999 -0.060811 +v -0.085048 0.077530 -0.063843 +v -0.084700 0.022529 -0.009630 +v -0.084684 0.011422 0.052539 +v -0.084268 0.017961 0.044596 +v -0.083202 0.089773 -0.010820 +v -0.083162 0.022836 0.035789 +v -0.083135 0.007900 0.054459 +v -0.083117 0.006083 0.051785 +v -0.082876 0.005761 0.043947 +v -0.082684 0.010979 0.007496 +v -0.082491 0.101399 -0.013279 +v -0.082463 0.045900 0.011999 +v -0.082391 0.066999 -0.002427 +v -0.082283 0.005054 0.035039 +v -0.082192 0.114178 -0.015003 +v -0.082164 0.005377 0.020708 +v -0.082145 0.130030 -0.015120 +v -0.081524 0.028114 0.027657 +v -0.081517 0.121941 -0.053403 +v -0.081454 0.114220 -0.052332 +v -0.081293 0.150732 -0.014168 +v -0.081192 0.106056 -0.052592 +v -0.081013 0.131544 -0.054883 +v -0.079133 0.063540 -0.057433 +v -0.078939 0.152456 -0.056798 +v -0.078654 0.007755 0.054652 +v -0.078443 0.011771 0.053079 +v -0.078337 0.006022 0.052594 +v -0.078282 0.062965 -0.049712 +v -0.078249 0.090043 -0.056939 +v -0.078158 0.045176 -0.022746 +v -0.078126 0.056311 -0.038392 +v -0.077512 0.006707 0.043595 +v -0.077405 0.101844 -0.041723 +v -0.077281 0.018195 0.045018 +v -0.077147 0.121247 -0.044220 +v -0.077124 0.110038 -0.042641 +v -0.076558 0.087606 -0.044124 +v -0.076297 0.070124 -0.059335 +v -0.076265 0.007766 0.052835 +v -0.076135 0.132385 -0.046056 +v -0.075645 0.010936 0.052328 +v -0.075464 0.091100 -0.018768 +v -0.075204 0.103032 -0.022145 +v -0.075162 0.094824 -0.030309 +v -0.075077 0.029819 -0.010020 +v -0.074917 0.115216 -0.023795 +v -0.074811 0.079374 -0.033430 +v -0.074705 0.009230 0.043676 +v -0.074695 0.069935 -0.048211 +v -0.074584 0.063503 -0.003181 +v -0.074287 0.014058 0.044215 +v -0.074122 0.130083 -0.024612 +v -0.074094 0.005790 0.035041 +v -0.074082 0.059131 -0.036519 +v -0.073869 0.022959 0.035830 +v -0.073679 0.118281 -0.034898 +v -0.073674 0.043060 0.010967 +v -0.073646 0.047706 -0.020229 +v -0.073241 0.151344 -0.047959 +v -0.072887 0.104112 -0.031845 +v -0.072759 0.131296 -0.036922 +v -0.072441 0.067011 -0.020160 +v -0.072417 0.150604 -0.024774 +v -0.071439 0.050313 -0.006064 +v -0.071272 0.035837 -0.008468 +v -0.070587 0.150792 -0.037589 +v -0.070446 0.027729 0.025864 +v -0.070309 0.015600 0.004168 +v -0.069820 0.016395 0.035053 +v -0.069621 0.007497 0.018029 +v -0.069234 0.008991 0.034693 +v -0.069020 0.033000 0.006068 +v -0.067248 0.021543 0.002460 +v -0.066563 0.010215 0.017659 +v -0.066547 0.021178 0.019627 +v 0.066549 0.021177 0.019627 +v 0.066564 0.010213 0.017659 +v 0.067248 0.021529 0.002456 +v 0.069018 0.032983 0.006061 +v 0.069241 0.008990 0.034686 +v 0.069623 0.007495 0.018030 +v 0.069828 0.016394 0.035045 +v 0.070307 0.015588 0.004168 +v 0.070450 0.027729 0.025863 +v 0.070585 0.150793 -0.037589 +v 0.071267 0.035812 -0.008472 +v 0.071438 0.050289 -0.006068 +v 0.072419 0.150602 -0.024772 +v 0.072440 0.066985 -0.020161 +v 0.072759 0.131296 -0.036921 +v 0.072889 0.104102 -0.031846 +v 0.073149 0.151484 -0.048379 +v 0.073386 0.065843 -0.049576 +v 0.073648 0.047679 -0.020234 +v 0.073672 0.043044 0.010962 +v 0.073680 0.118276 -0.034899 +v 0.073876 0.022956 0.035824 +v 0.074090 0.059111 -0.036521 +v 0.074100 0.005790 0.035031 +v 0.074120 0.130074 -0.024610 +v 0.074292 0.014057 0.044208 +v 0.074585 0.063479 -0.003186 +v 0.074710 0.009228 0.043668 +v 0.074811 0.079354 -0.033436 +v 0.074913 0.115202 -0.023799 +v 0.075073 0.029793 -0.010026 +v 0.075164 0.094809 -0.030311 +v 0.075205 0.103016 -0.022149 +v 0.075469 0.091074 -0.018773 +v 0.075646 0.010937 0.052328 +v 0.076073 0.130646 -0.046421 +v 0.076155 0.087311 -0.044551 +v 0.076266 0.007767 0.052833 +v 0.076297 0.070122 -0.059334 +v 0.076891 0.060124 -0.049436 +v 0.077192 0.120685 -0.044424 +v 0.077284 0.018192 0.045013 +v 0.077516 0.006704 0.043587 +v 0.077544 0.110101 -0.043236 +v 0.077717 0.102402 -0.040919 +v 0.078133 0.056290 -0.038394 +v 0.078160 0.045148 -0.022751 +v 0.078249 0.090045 -0.056936 +v 0.078338 0.006023 0.052591 +v 0.078443 0.011771 0.053081 +v 0.078655 0.007758 0.054653 +v 0.078939 0.152458 -0.056798 +v 0.079134 0.063534 -0.057433 +v 0.081013 0.131548 -0.054883 +v 0.081192 0.106058 -0.052590 +v 0.081293 0.150729 -0.014169 +v 0.081454 0.114222 -0.052330 +v 0.081517 0.121944 -0.053402 +v 0.081527 0.028113 0.027651 +v 0.082145 0.130024 -0.015123 +v 0.082163 0.005377 0.020712 +v 0.082195 0.114164 -0.015007 +v 0.082285 0.005054 0.035030 +v 0.082400 0.066978 -0.002438 +v 0.082462 0.045886 0.011993 +v 0.082491 0.101379 -0.013284 +v 0.082683 0.010970 0.007498 +v 0.082878 0.005758 0.043940 +v 0.083118 0.006083 0.051782 +v 0.083137 0.007902 0.054459 +v 0.083165 0.022834 0.035782 +v 0.083200 0.089749 -0.010828 +v 0.084268 0.017959 0.044591 +v 0.084685 0.011422 0.052539 +v 0.084696 0.022502 -0.009636 +v 0.085044 0.077535 -0.063838 +v 0.085398 0.094005 -0.060806 +v 0.086121 0.062448 -0.055792 +v 0.086564 0.108485 -0.057069 +v 0.086906 0.068088 -0.062251 +v 0.086954 0.055809 -0.040426 +v 0.087329 0.008787 0.053709 +v 0.088097 0.116439 -0.056832 +v 0.088503 0.154114 -0.060304 +v 0.088705 0.133611 -0.058102 +v 0.088827 0.124041 -0.057008 +v 0.088844 0.041032 -0.023854 +v 0.089669 0.007950 0.050878 +v 0.090037 0.006897 0.044257 +v 0.091105 0.022183 0.034168 +v 0.091288 0.027487 0.024510 +v 0.091291 0.091865 -0.011486 +v 0.091355 0.017747 0.042629 +v 0.091738 0.006434 0.035517 +v 0.091789 0.013094 0.048891 +v 0.091900 0.102988 -0.013540 +v 0.092082 0.130989 -0.014332 +v 0.092145 0.068033 -0.003861 +v 0.092252 0.115326 -0.014190 +v 0.092280 0.152416 -0.014470 +v 0.092296 0.044814 0.008957 +v 0.092418 0.009963 0.050893 +v 0.092682 0.097210 -0.060300 +v 0.093373 0.111158 -0.056304 +v 0.093531 0.066347 -0.054376 +v 0.094446 0.082972 -0.062277 +v 0.094757 0.071526 -0.059012 +v 0.095185 0.118240 -0.055565 +v 0.095331 0.009784 0.048258 +v 0.095671 0.057265 -0.039386 +v 0.096334 0.006128 0.023360 +v 0.096418 0.011100 0.047774 +v 0.097020 0.125763 -0.055269 +v 0.097110 0.009078 0.043143 +v 0.097650 0.013527 0.045093 +v 0.097752 0.136171 -0.055764 +v 0.098515 0.011243 0.045127 +v 0.098581 0.100735 -0.053967 +v 0.098683 0.016925 0.039200 +v 0.098730 0.108236 -0.048810 +v 0.098808 0.075360 -0.054549 +v 0.099026 0.155529 -0.057181 +v 0.099245 0.039030 -0.025050 +v 0.099650 0.007513 0.008347 +v 0.099882 0.019993 -0.011868 +v 0.099942 0.020441 0.030119 +v 0.100025 0.118167 -0.023135 +v 0.100047 0.008818 0.035446 +v 0.100190 0.082782 -0.056253 +v 0.100285 0.107947 -0.024083 +v 0.100450 0.116384 -0.047372 +v 0.100494 0.024874 0.018228 +v 0.100652 0.133942 -0.024001 +v 0.100668 0.093007 -0.022287 +v 0.100804 0.012769 0.042270 +v 0.100895 0.071480 -0.046316 +v 0.101165 0.011288 0.041825 +v 0.101222 0.154332 -0.024427 +v 0.101324 0.066562 -0.009326 +v 0.102053 0.040856 0.001915 +v 0.102116 0.125492 -0.047098 +v 0.102476 0.078644 -0.041731 +v 0.102894 0.093238 -0.039725 +v 0.103507 0.104083 -0.038132 +v 0.103557 0.136819 -0.047212 +v 0.104459 0.061180 -0.036707 +v 0.104516 0.014932 0.035932 +v 0.104761 0.120965 -0.036807 +v 0.105075 0.011874 0.034953 +v 0.105208 0.082489 -0.029070 +v 0.105235 0.008674 0.025042 +v 0.105877 0.134819 -0.036952 +v 0.105993 0.156233 -0.048761 +v 0.106127 0.111796 -0.037303 +v 0.106928 0.155503 -0.036820 +v 0.107744 0.065494 -0.032759 +v 0.108639 0.016708 0.025668 +v 0.108774 0.038618 -0.022951 +v 0.109120 0.058397 -0.018082 +v 0.109276 0.012018 0.025459 +v 0.111374 0.019531 0.009728 +v 0.111724 0.020150 -0.009209 +v 0.111812 0.033535 -0.005414 +v 0.111999 0.009817 0.010046 +v 0.112059 0.042755 -0.020635 +v 0.113893 0.012656 0.010863 +v 0.114454 0.022930 -0.006636 +v 0.217254 0.597240 0.021508 +v 0.218529 0.594975 0.014062 +v 0.219275 0.601772 0.027151 +v 0.222320 0.594745 0.007287 +v 0.223300 0.590281 0.026639 +v 0.224293 0.587509 0.019080 +v 0.224398 0.606882 0.027742 +v 0.225183 0.594201 0.031618 +v 0.227124 0.585249 0.032018 +v 0.227674 0.597062 0.003494 +v 0.227723 0.586853 0.012280 +v 0.227813 0.582216 0.024423 +v 0.228407 0.582726 0.037435 +v 0.229287 0.598158 0.031891 +v 0.229438 0.589576 0.036220 +v 0.229513 0.578318 0.030664 +v 0.230427 0.608988 0.021766 +v 0.230594 0.581069 0.017645 +v 0.230839 0.574632 0.024115 +v 0.231660 0.572060 0.019432 +v 0.231986 0.588093 0.008770 +v 0.232028 0.587024 0.040970 +v 0.232615 0.601860 0.004756 +v 0.232743 0.569355 0.017422 +v 0.233675 0.606973 0.012664 +v 0.233749 0.591681 0.035544 +v 0.234115 0.579674 0.013838 +v 0.234989 0.599678 0.026292 +v 0.235951 0.572260 0.015759 +v 0.237010 0.592220 0.009747 +v 0.238252 0.597559 0.016847 +v 0.239474 0.587403 0.037947 +v 0.239920 0.592279 0.029094 +v 0.239969 0.584264 0.013306 +v 0.240382 0.576096 0.015979 +v 0.242479 0.589484 0.019544 +v 0.244369 0.581082 0.020660 +v 0.244460 0.584580 0.029141 +v -0.046541 0.926611 -0.010516 +v -0.046541 0.926611 -0.010516 +v -0.046505 0.932039 -0.010265 +v -0.046505 0.932039 -0.010265 +v -0.046383 0.934970 -0.007225 +v -0.046383 0.934970 -0.007225 +v -0.046314 0.921446 -0.010028 +v -0.046314 0.921446 -0.010028 +v -0.046314 0.921446 -0.010028 +v -0.046244 0.935988 -0.002294 +v -0.046244 0.935988 -0.002294 +v -0.045999 0.921461 -0.009816 +v -0.045999 0.921461 -0.009816 +v -0.045999 0.921461 -0.009816 +v -0.045907 0.921542 -0.008896 +v -0.045907 0.921542 -0.008896 +v -0.045907 0.921542 -0.008896 +v -0.045785 0.926611 -0.010306 +v -0.045785 0.926611 -0.010306 +v -0.045625 0.932011 -0.010038 +v -0.045625 0.932011 -0.010038 +v -0.045624 0.926027 -0.008724 +v -0.045624 0.926027 -0.008724 +v -0.045506 0.935199 -0.007027 +v -0.045506 0.935199 -0.007027 +v -0.045462 0.930867 -0.008435 +v -0.045462 0.930867 -0.008435 +v -0.045433 0.933170 -0.006315 +v -0.045433 0.933170 -0.006315 +v -0.045404 0.933919 -0.002538 +v -0.045404 0.933919 -0.002538 +v -0.045404 0.933919 -0.002538 +v -0.045404 0.933919 -0.002538 +v -0.045363 0.936173 -0.002067 +v -0.045363 0.936173 -0.002067 +v -0.045363 0.936173 -0.002067 +v -0.045094 0.936331 0.007836 +v -0.045094 0.936331 0.007836 +v -0.045015 0.934499 0.007581 +v -0.045015 0.934499 0.007581 +v -0.045015 0.934499 0.007581 +v -0.043933 0.936550 0.008291 +v -0.043933 0.936550 0.008291 +v -0.043905 0.934313 0.007952 +v -0.043905 0.934313 0.007952 +v -0.043905 0.934313 0.007952 +v -0.042640 0.934840 0.029096 +v -0.042640 0.934840 0.029096 +v -0.042282 0.936851 0.028917 +v -0.042282 0.936851 0.028917 +v -0.040849 0.937018 0.028764 +v -0.040849 0.937018 0.028764 +v -0.040834 0.934348 0.029058 +v -0.040834 0.934348 0.029058 +v -0.038597 0.934783 0.054180 +v -0.038597 0.934783 0.054180 +v -0.038597 0.934783 0.054180 +v -0.038188 0.937550 0.054180 +v -0.038188 0.937550 0.054180 +v -0.036231 0.933395 0.053568 +v -0.036231 0.933395 0.053568 +v -0.036231 0.933395 0.053568 +v -0.036231 0.933395 0.053568 +v -0.036231 0.933395 0.053568 +v -0.036231 0.933549 0.056576 +v -0.036231 0.933549 0.056576 +v -0.036231 0.933549 0.056576 +v -0.036231 0.933549 0.056576 +v -0.036153 0.938099 0.056469 +v -0.036153 0.938099 0.056469 +v -0.036153 0.938099 0.056469 +v -0.036020 0.937901 0.053485 +v -0.036020 0.937901 0.053485 +v -0.036020 0.937901 0.053485 +v -0.035405 0.929682 0.053871 +v -0.035405 0.929682 0.053871 +v -0.035405 0.929836 0.056878 +v -0.035405 0.929836 0.056878 +v -0.034404 0.926326 0.054211 +v -0.034404 0.926326 0.054211 +v -0.034404 0.926479 0.057218 +v -0.034404 0.926479 0.057218 +v -0.033238 0.923655 0.057587 +v -0.033238 0.923655 0.057587 +v -0.033238 0.923501 0.054580 +v -0.033238 0.923501 0.054580 +v -0.033189 0.933009 0.057514 +v -0.033189 0.933009 0.057514 +v -0.033189 0.932855 0.054506 +v -0.033189 0.932855 0.054506 +v -0.032910 0.930015 0.054625 +v -0.032910 0.930015 0.054625 +v -0.032910 0.930168 0.057632 +v -0.032910 0.930168 0.057632 +v -0.032736 0.935280 0.057603 +v -0.032736 0.935280 0.057603 +v -0.032736 0.935125 0.054595 +v -0.032736 0.935125 0.054595 +v -0.032428 0.927298 0.057805 +v -0.032428 0.927298 0.057805 +v -0.032428 0.927144 0.054797 +v -0.032428 0.927144 0.054797 +v -0.031810 0.938984 0.054425 +v -0.031810 0.938984 0.054425 +v -0.031810 0.939138 0.057432 +v -0.031810 0.939138 0.057432 +v -0.031730 0.921162 0.055068 +v -0.031730 0.921162 0.055068 +v -0.031730 0.921316 0.058076 +v -0.031730 0.921316 0.058076 +v -0.031647 0.924417 0.055044 +v -0.031647 0.924417 0.055044 +v -0.031647 0.924571 0.058051 +v -0.031647 0.924571 0.058051 +v -0.030810 0.936852 0.058069 +v -0.030810 0.936852 0.058069 +v -0.030810 0.936698 0.055061 +v -0.030810 0.936698 0.055061 +v -0.030207 0.921898 0.055442 +v -0.030207 0.921898 0.055442 +v -0.030207 0.922051 0.058449 +v -0.030207 0.922051 0.058449 +v -0.029544 0.919000 0.055649 +v -0.029544 0.919000 0.055649 +v -0.029544 0.919154 0.058656 +v -0.029544 0.919154 0.058656 +v -0.028515 0.919982 0.055878 +v -0.028515 0.919982 0.055878 +v -0.028515 0.920136 0.058885 +v -0.028515 0.920136 0.058885 +v -0.028268 0.939819 0.058505 +v -0.028268 0.939819 0.058505 +v -0.028268 0.939665 0.055497 +v -0.028268 0.939665 0.055497 +v -0.027819 0.937627 0.058773 +v -0.027819 0.937627 0.058773 +v -0.027819 0.937474 0.055765 +v -0.027819 0.937474 0.055765 +v -0.027023 0.917566 0.056254 +v -0.027023 0.917566 0.056254 +v -0.027023 0.917720 0.059262 +v -0.027023 0.917720 0.059262 +v -0.026159 0.918766 0.059433 +v -0.026159 0.918766 0.059433 +v -0.026159 0.918612 0.056426 +v -0.026159 0.918612 0.056426 +v -0.024594 0.940252 0.059381 +v -0.024594 0.940252 0.059381 +v -0.024594 0.940098 0.056374 +v -0.024594 0.940098 0.056374 +v -0.024184 0.938196 0.059542 +v -0.024184 0.938196 0.059542 +v -0.024184 0.938042 0.056534 +v -0.024184 0.938042 0.056534 +v -0.024147 0.916810 0.059879 +v -0.024147 0.916810 0.059879 +v -0.024147 0.916656 0.056872 +v -0.024147 0.916656 0.056872 +v -0.023625 0.917964 0.059964 +v -0.023625 0.917964 0.059964 +v -0.023625 0.917810 0.056956 +v -0.023625 0.917810 0.056956 +v -0.020924 0.916259 0.057477 +v -0.020924 0.916259 0.057477 +v -0.020924 0.916413 0.060485 +v -0.020924 0.916413 0.060485 +v -0.020900 0.917508 0.057462 +v -0.020900 0.917508 0.057462 +v -0.020900 0.917661 0.060469 +v -0.020900 0.917661 0.060469 +v -0.020807 0.940297 0.060096 +v -0.020807 0.940297 0.060096 +v -0.020807 0.940143 0.057089 +v -0.020807 0.940143 0.057089 +v -0.020658 0.938140 0.057184 +v -0.020658 0.938140 0.057184 +v -0.020658 0.938294 0.060192 +v -0.020658 0.938294 0.060192 +v -0.017670 0.917815 0.060984 +v -0.017670 0.917815 0.060984 +v -0.017670 0.917662 0.057976 +v -0.017670 0.917662 0.057976 +v -0.017270 0.916549 0.061062 +v -0.017270 0.916549 0.061062 +v -0.017270 0.916395 0.058054 +v -0.017270 0.916395 0.058054 +v -0.016839 0.937822 0.057781 +v -0.016839 0.937822 0.057781 +v -0.016839 0.937976 0.060789 +v -0.016839 0.937976 0.060789 +v -0.016648 0.939808 0.057777 +v -0.016648 0.939808 0.057777 +v -0.016648 0.939962 0.060785 +v -0.016648 0.939962 0.060785 +v -0.015076 0.918233 0.058321 +v -0.015076 0.918233 0.058321 +v -0.015076 0.918387 0.061328 +v -0.015076 0.918387 0.061328 +v -0.014454 0.917157 0.061424 +v -0.014454 0.917157 0.061424 +v -0.014454 0.917003 0.058416 +v -0.014454 0.917003 0.058416 +v -0.013454 0.937250 0.061226 +v -0.013454 0.937250 0.061226 +v -0.013454 0.937096 0.058218 +v -0.013454 0.937096 0.058218 +v -0.012913 0.939279 0.061253 +v -0.012913 0.939279 0.061253 +v -0.012913 0.939125 0.058246 +v -0.012913 0.939125 0.058246 +v -0.012887 0.919497 0.061564 +v -0.012887 0.919497 0.061564 +v -0.012887 0.919344 0.058557 +v -0.012887 0.919344 0.058557 +v -0.011657 0.918435 0.061705 +v -0.011657 0.918435 0.061705 +v -0.011657 0.918281 0.058698 +v -0.011657 0.918281 0.058698 +v -0.010969 0.921191 0.061727 +v -0.010969 0.921191 0.061727 +v -0.010969 0.921037 0.058720 +v -0.010969 0.921037 0.058720 +v -0.010796 0.936256 0.061507 +v -0.010796 0.936256 0.061507 +v -0.010796 0.936102 0.058500 +v -0.010796 0.936102 0.058500 +v -0.009500 0.923084 0.058811 +v -0.009500 0.923084 0.058811 +v -0.009500 0.923238 0.061818 +v -0.009500 0.923238 0.061818 +v -0.009484 0.920005 0.058861 +v -0.009484 0.920005 0.058861 +v -0.009484 0.920159 0.061869 +v -0.009484 0.920159 0.061869 +v -0.009410 0.938054 0.061596 +v -0.009410 0.938054 0.061596 +v -0.009410 0.937900 0.058588 +v -0.009410 0.937900 0.058588 +v -0.008953 0.934731 0.061683 +v -0.008953 0.934731 0.061683 +v -0.008953 0.934577 0.058676 +v -0.008953 0.934577 0.058676 +v -0.008425 0.925408 0.061865 +v -0.008425 0.925408 0.061865 +v -0.008425 0.925254 0.058858 +v -0.008425 0.925254 0.058858 +v -0.007881 0.932534 0.061792 +v -0.007881 0.932534 0.061792 +v -0.007881 0.932380 0.058785 +v -0.007881 0.932380 0.058785 +v -0.007848 0.927716 0.061868 +v -0.007848 0.927716 0.061868 +v -0.007848 0.927562 0.058861 +v -0.007848 0.927562 0.058861 +v -0.007784 0.922341 0.061968 +v -0.007784 0.922341 0.061968 +v -0.007784 0.922188 0.058961 +v -0.007784 0.922188 0.058961 +v -0.007725 0.929973 0.061842 +v -0.007725 0.929973 0.061842 +v -0.007725 0.929819 0.058834 +v -0.007725 0.929819 0.058834 +v -0.006359 0.924896 0.059000 +v -0.006359 0.924896 0.059000 +v -0.006359 0.925050 0.062008 +v -0.006359 0.925050 0.062008 +v -0.006195 0.935598 0.061844 +v -0.006195 0.935598 0.061844 +v -0.006195 0.935444 0.058837 +v -0.006195 0.935444 0.058837 +v -0.005686 0.927579 0.062001 +v -0.005686 0.927579 0.062001 +v -0.005686 0.927425 0.058993 +v -0.005686 0.927425 0.058993 +v -0.005264 0.933307 0.061922 +v -0.005264 0.933307 0.061922 +v -0.005264 0.933307 0.061922 +v -0.005264 0.933153 0.058915 +v -0.005264 0.933153 0.058915 +v -0.005264 0.933153 0.058915 +v -0.005256 0.930245 0.061974 +v -0.005256 0.930245 0.061974 +v -0.005256 0.930092 0.058967 +v -0.005256 0.930092 0.058967 +v -0.002431 0.936382 0.061983 +v -0.002431 0.936382 0.061983 +v -0.002431 0.936257 0.059523 +v -0.002431 0.936257 0.059523 +v -0.002135 0.934825 0.062315 +v -0.002135 0.934825 0.062315 +v -0.002135 0.934672 0.059308 +v -0.002135 0.934672 0.059308 +v -0.000000 0.934831 0.059500 +v -0.000000 0.934831 0.059500 +v -0.000000 0.936437 0.059749 +v -0.000000 0.936437 0.059749 +v -0.000000 0.936562 0.062209 +v -0.000000 0.936562 0.062209 +v -0.000000 0.934985 0.062507 +v -0.000000 0.934985 0.062507 +v 0.002135 0.934672 0.059308 +v 0.002135 0.934672 0.059308 +v 0.002135 0.934825 0.062315 +v 0.002135 0.934825 0.062315 +v 0.002431 0.936257 0.059523 +v 0.002431 0.936257 0.059523 +v 0.002431 0.936382 0.061983 +v 0.002431 0.936382 0.061983 +v 0.005256 0.930092 0.058967 +v 0.005256 0.930092 0.058967 +v 0.005256 0.930245 0.061974 +v 0.005256 0.930245 0.061974 +v 0.005264 0.933153 0.058915 +v 0.005264 0.933153 0.058915 +v 0.005264 0.933153 0.058915 +v 0.005264 0.933307 0.061922 +v 0.005264 0.933307 0.061922 +v 0.005264 0.933307 0.061922 +v 0.005686 0.927425 0.058993 +v 0.005686 0.927425 0.058993 +v 0.005686 0.927579 0.062001 +v 0.005686 0.927579 0.062001 +v 0.006195 0.935444 0.058837 +v 0.006195 0.935444 0.058837 +v 0.006195 0.935598 0.061844 +v 0.006195 0.935598 0.061844 +v 0.006359 0.925050 0.062008 +v 0.006359 0.925050 0.062008 +v 0.006359 0.924896 0.059000 +v 0.006359 0.924896 0.059000 +v 0.007725 0.929819 0.058834 +v 0.007725 0.929819 0.058834 +v 0.007725 0.929973 0.061842 +v 0.007725 0.929973 0.061842 +v 0.007784 0.922188 0.058961 +v 0.007784 0.922188 0.058961 +v 0.007784 0.922341 0.061968 +v 0.007784 0.922341 0.061968 +v 0.007848 0.927562 0.058861 +v 0.007848 0.927562 0.058861 +v 0.007848 0.927716 0.061868 +v 0.007848 0.927716 0.061868 +v 0.007881 0.932380 0.058785 +v 0.007881 0.932380 0.058785 +v 0.007881 0.932534 0.061792 +v 0.007881 0.932534 0.061792 +v 0.008425 0.925254 0.058858 +v 0.008425 0.925254 0.058858 +v 0.008425 0.925408 0.061865 +v 0.008425 0.925408 0.061865 +v 0.008953 0.934577 0.058676 +v 0.008953 0.934577 0.058676 +v 0.008953 0.934731 0.061683 +v 0.008953 0.934731 0.061683 +v 0.009410 0.937900 0.058588 +v 0.009410 0.937900 0.058588 +v 0.009410 0.938054 0.061596 +v 0.009410 0.938054 0.061596 +v 0.009484 0.920005 0.058861 +v 0.009484 0.920005 0.058861 +v 0.009484 0.920159 0.061869 +v 0.009484 0.920159 0.061869 +v 0.009500 0.923084 0.058811 +v 0.009500 0.923084 0.058811 +v 0.009500 0.923238 0.061818 +v 0.009500 0.923238 0.061818 +v 0.010796 0.936102 0.058500 +v 0.010796 0.936102 0.058500 +v 0.010796 0.936256 0.061507 +v 0.010796 0.936256 0.061507 +v 0.010969 0.921037 0.058720 +v 0.010969 0.921037 0.058720 +v 0.010969 0.921191 0.061727 +v 0.010969 0.921191 0.061727 +v 0.011657 0.918281 0.058698 +v 0.011657 0.918281 0.058698 +v 0.011657 0.918435 0.061705 +v 0.011657 0.918435 0.061705 +v 0.012887 0.919344 0.058557 +v 0.012887 0.919344 0.058557 +v 0.012887 0.919497 0.061564 +v 0.012887 0.919497 0.061564 +v 0.012913 0.939125 0.058246 +v 0.012913 0.939125 0.058246 +v 0.012913 0.939279 0.061253 +v 0.012913 0.939279 0.061253 +v 0.013454 0.937096 0.058218 +v 0.013454 0.937096 0.058218 +v 0.013454 0.937250 0.061226 +v 0.013454 0.937250 0.061226 +v 0.014454 0.917003 0.058416 +v 0.014454 0.917003 0.058416 +v 0.014454 0.917157 0.061424 +v 0.014454 0.917157 0.061424 +v 0.015076 0.918233 0.058321 +v 0.015076 0.918233 0.058321 +v 0.015076 0.918387 0.061328 +v 0.015076 0.918387 0.061328 +v 0.016648 0.939962 0.060785 +v 0.016648 0.939962 0.060785 +v 0.016648 0.939808 0.057777 +v 0.016648 0.939808 0.057777 +v 0.016839 0.937822 0.057781 +v 0.016839 0.937822 0.057781 +v 0.016839 0.937976 0.060789 +v 0.016839 0.937976 0.060789 +v 0.017270 0.916395 0.058054 +v 0.017270 0.916395 0.058054 +v 0.017270 0.916549 0.061062 +v 0.017270 0.916549 0.061062 +v 0.017670 0.917662 0.057976 +v 0.017670 0.917662 0.057976 +v 0.017670 0.917815 0.060984 +v 0.017670 0.917815 0.060984 +v 0.020658 0.938140 0.057184 +v 0.020658 0.938140 0.057184 +v 0.020658 0.938294 0.060192 +v 0.020658 0.938294 0.060192 +v 0.020807 0.940143 0.057089 +v 0.020807 0.940143 0.057089 +v 0.020807 0.940297 0.060096 +v 0.020807 0.940297 0.060096 +v 0.020900 0.917508 0.057462 +v 0.020900 0.917508 0.057462 +v 0.020900 0.917661 0.060469 +v 0.020900 0.917661 0.060469 +v 0.020924 0.916259 0.057477 +v 0.020924 0.916259 0.057477 +v 0.020924 0.916413 0.060485 +v 0.020924 0.916413 0.060485 +v 0.023625 0.917810 0.056956 +v 0.023625 0.917810 0.056956 +v 0.023625 0.917964 0.059964 +v 0.023625 0.917964 0.059964 +v 0.024147 0.916656 0.056872 +v 0.024147 0.916656 0.056872 +v 0.024147 0.916810 0.059879 +v 0.024147 0.916810 0.059879 +v 0.024184 0.938042 0.056534 +v 0.024184 0.938042 0.056534 +v 0.024184 0.938196 0.059542 +v 0.024184 0.938196 0.059542 +v 0.024594 0.940098 0.056374 +v 0.024594 0.940098 0.056374 +v 0.024594 0.940252 0.059381 +v 0.024594 0.940252 0.059381 +v 0.026159 0.918612 0.056426 +v 0.026159 0.918612 0.056426 +v 0.026159 0.918766 0.059433 +v 0.026159 0.918766 0.059433 +v 0.027023 0.917566 0.056254 +v 0.027023 0.917566 0.056254 +v 0.027023 0.917720 0.059262 +v 0.027023 0.917720 0.059262 +v 0.027819 0.937474 0.055765 +v 0.027819 0.937474 0.055765 +v 0.027819 0.937627 0.058773 +v 0.027819 0.937627 0.058773 +v 0.028268 0.939665 0.055497 +v 0.028268 0.939665 0.055497 +v 0.028269 0.939819 0.058505 +v 0.028269 0.939819 0.058505 +v 0.028515 0.919982 0.055878 +v 0.028515 0.919982 0.055878 +v 0.028515 0.920136 0.058885 +v 0.028515 0.920136 0.058885 +v 0.029544 0.919000 0.055649 +v 0.029544 0.919000 0.055649 +v 0.029544 0.919154 0.058656 +v 0.029544 0.919154 0.058656 +v 0.030207 0.921898 0.055442 +v 0.030207 0.921898 0.055442 +v 0.030207 0.922051 0.058449 +v 0.030207 0.922051 0.058449 +v 0.030810 0.936698 0.055061 +v 0.030810 0.936698 0.055061 +v 0.030810 0.936852 0.058069 +v 0.030810 0.936852 0.058069 +v 0.031647 0.924571 0.058051 +v 0.031647 0.924571 0.058051 +v 0.031647 0.924417 0.055044 +v 0.031647 0.924417 0.055044 +v 0.031730 0.921162 0.055068 +v 0.031730 0.921162 0.055068 +v 0.031730 0.921316 0.058076 +v 0.031730 0.921316 0.058076 +v 0.031810 0.938984 0.054425 +v 0.031810 0.938984 0.054425 +v 0.031810 0.939138 0.057432 +v 0.031810 0.939138 0.057432 +v 0.032428 0.927144 0.054797 +v 0.032428 0.927144 0.054797 +v 0.032428 0.927298 0.057805 +v 0.032428 0.927298 0.057805 +v 0.032736 0.935125 0.054595 +v 0.032736 0.935125 0.054595 +v 0.032736 0.935280 0.057602 +v 0.032736 0.935280 0.057602 +v 0.032910 0.930015 0.054625 +v 0.032910 0.930015 0.054625 +v 0.032910 0.930168 0.057632 +v 0.032910 0.930168 0.057632 +v 0.033189 0.932855 0.054506 +v 0.033189 0.932855 0.054506 +v 0.033189 0.933009 0.057514 +v 0.033189 0.933009 0.057514 +v 0.033238 0.923501 0.054580 +v 0.033238 0.923501 0.054580 +v 0.033238 0.923655 0.057587 +v 0.033238 0.923655 0.057587 +v 0.034404 0.926326 0.054211 +v 0.034404 0.926326 0.054211 +v 0.034404 0.926479 0.057218 +v 0.034404 0.926479 0.057218 +v 0.035405 0.929682 0.053871 +v 0.035405 0.929682 0.053871 +v 0.035405 0.929836 0.056878 +v 0.035405 0.929836 0.056878 +v 0.036020 0.937901 0.053485 +v 0.036020 0.937901 0.053485 +v 0.036020 0.937901 0.053485 +v 0.036153 0.938099 0.056469 +v 0.036153 0.938099 0.056469 +v 0.036231 0.933395 0.053568 +v 0.036231 0.933395 0.053568 +v 0.036231 0.933395 0.053568 +v 0.036231 0.933395 0.053568 +v 0.036231 0.933395 0.053568 +v 0.036231 0.933549 0.056576 +v 0.036231 0.933549 0.056576 +v 0.036231 0.933549 0.056576 +v 0.038188 0.937550 0.054180 +v 0.038188 0.937550 0.054180 +v 0.038188 0.937550 0.054180 +v 0.038596 0.934783 0.054180 +v 0.038596 0.934783 0.054180 +v 0.038596 0.934783 0.054180 +v 0.038596 0.934783 0.054180 +v 0.040834 0.934348 0.029058 +v 0.040834 0.934348 0.029058 +v 0.040849 0.937018 0.028764 +v 0.040849 0.937018 0.028764 +v 0.042282 0.936851 0.028917 +v 0.042282 0.936851 0.028917 +v 0.042640 0.934840 0.029096 +v 0.042640 0.934840 0.029096 +v 0.043905 0.934313 0.007952 +v 0.043905 0.934313 0.007952 +v 0.043905 0.934313 0.007952 +v 0.043905 0.934313 0.007952 +v 0.043933 0.936550 0.008291 +v 0.043933 0.936550 0.008291 +v 0.043933 0.936550 0.008291 +v 0.045015 0.934499 0.007581 +v 0.045015 0.934499 0.007581 +v 0.045015 0.934499 0.007581 +v 0.045094 0.936331 0.007836 +v 0.045094 0.936331 0.007836 +v 0.045363 0.936173 -0.002067 +v 0.045363 0.936173 -0.002067 +v 0.045404 0.933919 -0.002538 +v 0.045404 0.933919 -0.002538 +v 0.045404 0.933919 -0.002538 +v 0.045404 0.933919 -0.002538 +v 0.045433 0.933170 -0.006315 +v 0.045433 0.933170 -0.006315 +v 0.045462 0.930867 -0.008435 +v 0.045462 0.930867 -0.008435 +v 0.045506 0.935199 -0.007027 +v 0.045506 0.935199 -0.007027 +v 0.045624 0.926027 -0.008724 +v 0.045624 0.926027 -0.008724 +v 0.045625 0.932011 -0.010038 +v 0.045625 0.932011 -0.010038 +v 0.045785 0.926611 -0.010306 +v 0.045785 0.926611 -0.010306 +v 0.045907 0.921542 -0.008896 +v 0.045907 0.921542 -0.008896 +v 0.045907 0.921542 -0.008896 +v 0.045999 0.921461 -0.009816 +v 0.045999 0.921461 -0.009816 +v 0.045999 0.921461 -0.009816 +v 0.046244 0.935988 -0.002294 +v 0.046244 0.935988 -0.002294 +v 0.046244 0.935988 -0.002294 +v 0.046314 0.921446 -0.010028 +v 0.046314 0.921446 -0.010028 +v 0.046314 0.921446 -0.010028 +v 0.046383 0.934970 -0.007225 +v 0.046383 0.934970 -0.007225 +v 0.046505 0.932039 -0.010265 +v 0.046505 0.932039 -0.010265 +v 0.046541 0.926611 -0.010516 +v 0.046541 0.926611 -0.010516 +v -0.067714 0.921733 -0.042770 +v -0.067396 0.923934 -0.036830 +v -0.066480 0.920570 -0.046640 +v -0.066371 0.911485 -0.044087 +v -0.066327 0.911725 -0.039370 +v -0.065531 0.911640 -0.048085 +v -0.065240 0.912877 -0.033518 +v -0.064247 0.926152 -0.030603 +v -0.064116 0.919254 -0.050182 +v -0.063380 0.902627 -0.038867 +v -0.063192 0.930093 -0.049060 +v -0.063148 0.933052 -0.044523 +v -0.062914 0.901285 -0.032496 +v -0.062756 0.904181 -0.043277 +v -0.062250 0.927372 -0.052002 +v -0.062195 0.912639 -0.051953 +v -0.061689 0.913902 -0.027420 +v -0.061621 0.906725 -0.047196 +v -0.061130 0.917226 -0.053032 +v -0.060686 0.908815 -0.050550 +v -0.060660 0.924575 -0.054165 +v -0.060644 0.936516 -0.038767 +v -0.060092 0.902344 -0.049260 +v -0.060056 0.898931 -0.045314 +v -0.059240 0.927786 -0.025610 +v -0.058900 0.918251 -0.055039 +v -0.058707 0.901514 -0.025669 +v -0.058249 0.912863 -0.053952 +v -0.058031 0.907813 -0.052106 +v -0.058009 0.896623 -0.040653 +v -0.057127 0.914902 -0.022386 +v -0.056998 0.928848 -0.057710 +v -0.056798 0.933031 -0.055646 +v -0.056711 0.899659 -0.052604 +v -0.056636 0.924796 -0.058849 +v -0.056428 0.895146 -0.049365 +v -0.055985 0.938424 -0.033707 +v -0.055615 0.936884 -0.052129 +v -0.055564 0.918591 -0.057851 +v -0.055169 0.896180 -0.036029 +v -0.054413 0.906981 -0.054093 +v -0.054235 0.912933 -0.055474 +v -0.054209 0.924265 -0.063904 +v -0.054150 0.891743 -0.045146 +v -0.054052 0.923561 0.023564 +v -0.053962 0.928512 -0.022208 +v -0.053933 0.925036 0.023619 +v -0.053557 0.917814 -0.061674 +v -0.053467 0.924600 0.024233 +v -0.053100 0.923280 0.024075 +v -0.052996 0.902671 -0.020815 +v -0.052912 0.948299 -0.003213 +v -0.052857 0.940200 -0.047858 +v -0.052611 0.928506 -0.062780 +v -0.052099 0.915809 -0.019028 +v -0.052056 0.952686 0.002546 +v -0.051983 0.922902 -0.067998 +v -0.051977 0.899203 -0.055905 +v -0.051208 0.896688 -0.031424 +v -0.051178 0.917062 -0.065168 +v -0.051171 0.906412 -0.056376 +v -0.051106 0.928350 -0.069836 +v -0.051060 0.933424 -0.061696 +v -0.050895 0.912720 -0.058161 +v -0.050842 0.889539 -0.040434 +v -0.050811 0.893872 -0.054169 +v -0.050677 0.938618 -0.030147 +v -0.050313 0.956724 0.008713 +v -0.050274 0.936260 -0.010282 +v -0.050254 0.943175 -0.006727 +v -0.049757 0.960409 0.016492 +v -0.049637 0.928743 0.020057 +v -0.049558 0.909454 0.012691 +v -0.049545 0.938488 -0.060021 +v -0.049439 0.929220 0.017273 +v -0.049283 0.903560 0.007085 +v -0.049272 0.928917 -0.020149 +v -0.049131 0.921496 0.015346 +v -0.049100 0.930706 0.018753 +v -0.049036 0.911993 -0.061954 +v -0.049022 0.926340 0.018917 +v -0.048901 0.933816 0.012127 +v -0.048892 0.967134 0.001486 +v -0.048884 0.942410 -0.043379 +v -0.048769 0.941127 0.001195 +v -0.048674 0.931368 -0.017185 +v -0.048599 0.933834 -0.070729 +v -0.048527 0.923068 0.010091 +v -0.048508 0.916485 -0.067974 +v -0.048452 0.957020 -0.011031 +v -0.048433 0.906142 -0.059951 +v -0.048413 0.921493 -0.070749 +v -0.048301 0.961878 -0.005460 +v -0.048156 0.889896 -0.051198 +v -0.047731 0.927305 0.022041 +v -0.047622 0.907097 0.004173 +v -0.047559 0.950438 -0.015849 +v -0.047515 0.912593 0.007966 +v -0.047376 0.903462 -0.016844 +v -0.047372 0.911237 -0.065798 +v -0.047318 0.916841 -0.016455 +v -0.047189 0.899840 -0.058815 +v -0.046913 0.920609 0.020266 +v -0.046911 0.947312 0.016335 +v -0.046874 0.925347 0.021116 +v -0.046829 0.935297 0.007407 +v -0.046807 0.907741 0.016399 +v -0.046537 0.923176 0.008485 +v -0.046473 0.897993 -0.027481 +v -0.046466 0.937205 0.019162 +v -0.046367 0.970687 0.009237 +v -0.046366 0.943861 -0.056776 +v -0.046325 0.900317 -0.000963 +v -0.046301 0.907490 0.003822 +v -0.046294 0.915264 -0.070130 +v -0.046251 0.901582 0.009287 +v -0.046203 0.889395 -0.035471 +v -0.046140 0.918023 -0.012663 +v -0.046052 0.942900 0.010452 +v -0.046043 0.938472 -0.027905 +v -0.046018 0.930323 0.020382 +v -0.045905 0.913137 0.007547 +v -0.045881 0.906195 -0.063873 +v -0.045852 0.897969 0.001813 +v -0.045851 0.962344 0.023365 +v -0.045654 0.943874 -0.019875 +v -0.045461 0.942757 0.029406 +v -0.045435 0.910719 -0.069304 +v -0.045303 0.949950 0.023072 +v -0.045197 0.970691 -0.014031 +v -0.045166 0.900877 -0.001193 +v -0.045103 0.895660 -0.057713 +v -0.045079 0.981403 0.002665 +v -0.044942 0.924521 -0.073269 +v -0.044706 0.958810 -0.025524 +v -0.044682 0.900933 -0.064423 +v -0.044581 0.934684 0.006355 +v -0.044517 0.928342 0.020364 +v -0.044471 0.943314 -0.039529 +v -0.044285 0.901357 0.009653 +v -0.044273 0.906364 -0.066977 +v -0.044160 0.976272 -0.005985 +v -0.044158 0.887249 -0.047427 +v -0.044152 0.908349 0.016352 +v -0.044084 0.943734 0.033455 +v -0.044035 0.964944 -0.019666 +v -0.043969 0.907299 0.004569 +v -0.043927 0.950913 0.029035 +v -0.043891 0.920481 0.021056 +v -0.043827 0.939802 -0.024365 +v -0.043677 0.939044 -0.069449 +v -0.043674 0.917507 -0.072748 +v -0.043497 0.936526 0.027352 +v -0.043494 0.944982 0.025034 +v -0.043281 0.892989 -0.003237 +v -0.043088 0.901297 -0.000392 +v -0.043032 0.972793 0.016606 +v -0.043028 0.896253 0.004104 +v -0.042932 0.913425 0.008296 +v -0.042882 0.933991 0.024985 +v -0.042859 0.953284 0.036297 +v -0.042716 0.892405 -0.000882 +v -0.042662 0.935863 0.030072 +v -0.042515 0.904865 0.005773 +v -0.042508 0.902730 0.007769 +v -0.042476 0.922330 0.008707 +v -0.042361 0.931588 0.021992 +v -0.042327 0.941889 0.030712 +v -0.042318 0.943351 0.033089 +v -0.042316 0.875214 0.003243 +v -0.042288 0.932659 0.026581 +v -0.042271 0.983888 0.011728 +v -0.042243 0.875182 0.002797 +v -0.042172 0.951824 -0.030120 +v -0.042151 0.875381 0.003293 +v -0.042143 0.930043 0.022623 +v -0.042088 0.875121 0.002733 +v -0.042049 0.893520 -0.003276 +v -0.041945 0.910432 -0.073176 +v -0.041931 0.896182 0.004565 +v -0.041833 0.875555 0.003461 +v -0.041795 0.909900 0.013258 +v -0.041781 0.875115 0.002679 +v -0.041781 0.911100 0.010640 +v -0.041753 0.892470 -0.055786 +v -0.041687 0.946337 -0.052306 +v -0.041606 0.899435 -0.024409 +v -0.041591 0.875493 0.003594 +v -0.041577 0.904800 -0.068704 +v -0.041541 0.875250 0.002990 +v -0.041486 0.896783 -0.064441 +v -0.041435 0.935821 0.027724 +v -0.041431 0.905149 -0.015160 +v -0.041383 0.935834 0.029770 +v -0.041359 0.875527 0.003318 +v -0.041320 0.932838 0.026388 +v -0.041279 0.933682 0.024960 +v -0.041258 0.930319 0.022648 +v -0.041207 0.891937 0.001206 +v -0.041203 0.891120 -0.031283 +v -0.041186 0.936989 0.019634 +v -0.041092 0.931884 0.022105 +v -0.041043 0.974252 0.025054 +v -0.041027 0.945332 0.031891 +v -0.040858 0.878253 0.001202 +v -0.040833 0.878739 0.001830 +v -0.040777 0.927876 -0.074554 +v -0.040739 0.887109 -0.000889 +v -0.040712 0.899361 0.001515 +v -0.040555 0.878145 0.001027 +v -0.040545 0.897145 0.003832 +v -0.040427 0.886889 -0.002591 +v -0.040425 0.921456 0.012451 +v -0.040370 0.920915 0.016663 +v -0.040319 0.879029 0.002339 +v -0.040310 0.881860 -0.000835 +v -0.040274 0.891769 0.001714 +v -0.040225 0.963265 0.028932 +v -0.040223 0.882282 0.000396 +v -0.040183 0.932793 0.007908 +v -0.039996 0.943870 -0.036484 +v -0.039934 0.894304 -0.002072 +v -0.039925 0.878942 0.002443 +v -0.039921 0.878168 0.001071 +v -0.039850 0.918630 -0.074016 +v -0.039782 0.881695 -0.001116 +v -0.039771 0.887278 0.000452 +v -0.039747 0.943318 0.021447 +v -0.039651 0.946655 -0.033424 +v -0.039631 0.886942 -0.002757 +v -0.039624 0.977885 -0.021861 +v -0.039619 0.971973 -0.028395 +v -0.039528 0.878290 0.001538 +v -0.039394 0.878585 0.002126 +v -0.039202 0.886098 -0.042708 +v -0.039138 0.882438 0.001317 +v -0.039111 0.887336 0.000722 +v -0.038837 0.891849 0.001162 +v -0.038803 0.881754 -0.000750 +v -0.038745 0.908002 -0.014974 +v -0.038736 0.893126 -0.000599 +v -0.038653 0.882460 0.001321 +v -0.038314 0.887184 -0.002090 +v -0.038302 0.965149 -0.033635 +v -0.038181 0.983729 -0.012850 +v -0.038166 0.881988 -0.000079 +v -0.038145 0.903168 -0.069143 +v -0.038067 0.887422 0.000250 +v -0.038013 0.910118 -0.075104 +v -0.037933 0.882375 0.000765 +v -0.037834 0.887255 -0.001020 +v -0.037583 0.966589 0.035910 +v -0.037575 0.946588 -0.048272 +v -0.037497 0.989293 -0.003628 +v -0.037496 0.890775 -0.053417 +v -0.037482 0.947966 0.038772 +v -0.037464 0.942149 -0.064813 +v -0.037142 0.959930 0.041649 +v -0.037080 0.894258 -0.063255 +v -0.036876 0.932553 0.011681 +v -0.036853 0.893124 -0.027977 +v -0.036612 0.982630 0.019474 +v -0.036420 0.947625 0.028419 +v -0.036389 0.991962 0.006503 +v -0.036291 0.956867 -0.039421 +v -0.036286 0.974950 0.031638 +v -0.036229 0.935475 0.016915 +v -0.035611 0.901463 -0.021730 +v -0.035567 0.919777 -0.073778 +v -0.034934 0.931517 -0.072911 +v -0.034314 0.887132 -0.037921 +v -0.034067 0.945966 -0.045009 +v -0.033382 0.899599 -0.017372 +v -0.033348 0.982792 0.027277 +v -0.033305 0.901713 -0.069178 +v -0.033241 0.969093 0.040838 +v -0.033181 0.942812 -0.059496 +v -0.033050 0.991117 0.016023 +v -0.032989 0.949656 -0.041778 +v -0.032982 0.935622 0.001949 +v -0.032847 0.954357 0.045407 +v -0.032798 0.975946 0.036297 +v -0.032752 0.963957 0.046799 +v -0.032720 0.889951 -0.049826 +v -0.032537 0.910147 -0.075869 +v -0.032527 0.984634 -0.028644 +v -0.031995 0.893295 -0.061564 +v -0.031961 0.977562 -0.035430 +v -0.031734 0.895632 -0.025484 +v -0.031517 0.928018 0.000340 +v -0.031134 0.949785 0.037748 +v -0.030777 0.969708 -0.040105 +v -0.030367 0.889342 -0.033949 +v -0.030308 0.942039 -0.054737 +v -0.030151 0.989560 -0.018455 +v -0.030059 0.990405 0.024459 +v -0.029870 0.921820 -0.072255 +v -0.029857 0.982508 0.032011 +v -0.029691 0.934221 -0.068343 +v -0.028828 0.960382 -0.044362 +v -0.028754 0.998153 0.002504 +v -0.028715 0.958196 0.048838 +v -0.028410 0.942403 0.021328 +v -0.028224 0.913800 -0.005654 +v -0.027987 0.978118 0.042601 +v -0.027977 0.889909 -0.045599 +v -0.027915 0.994119 -0.007887 +v -0.027857 0.900882 -0.068739 +v -0.027426 0.989748 0.029843 +v -0.027143 0.967346 0.052065 +v -0.027033 0.971135 0.045277 +v -0.026776 0.997501 0.013599 +v -0.026696 0.892156 -0.030367 +v -0.026640 0.953681 0.042786 +v -0.026400 0.911032 -0.075054 +v -0.026198 0.892747 -0.058172 +v -0.026175 0.892973 -0.022843 +v -0.026118 0.951496 -0.046204 +v -0.025863 0.935900 -0.063736 +v -0.025857 0.943649 -0.050596 +v -0.025509 0.946057 0.026607 +v -0.024756 0.982334 0.037640 +v -0.024695 0.933495 0.010092 +v -0.024315 0.923845 -0.069295 +v -0.024196 0.891296 -0.041285 +v -0.024098 0.957630 0.045593 +v -0.023980 0.961447 0.052743 +v -0.023949 0.908735 -0.008521 +v -0.023398 0.994939 0.021764 +v -0.023247 0.937152 -0.059562 +v -0.023140 0.950091 0.030412 +v -0.022486 0.937089 0.016778 +v -0.022414 0.988051 0.035677 +v -0.022113 0.987787 -0.032367 +v -0.021951 0.980362 -0.038791 +v -0.021664 0.900848 -0.065629 +v -0.021576 0.972482 -0.044381 +v -0.021350 0.994346 0.027813 +v -0.021224 0.993521 -0.022273 +v -0.020932 0.892828 -0.053550 +v -0.020827 0.961562 0.048754 +v -0.020724 0.890929 -0.036657 +v -0.020589 0.938066 -0.054497 +v -0.020461 0.978959 0.046427 +v -0.020342 0.962740 -0.047304 +v -0.020260 0.925649 -0.064872 +v -0.020248 0.983307 0.043779 +v -0.020120 0.889418 -0.030502 +v -0.019633 0.953316 0.033945 +v -0.019607 0.996578 -0.010477 +v -0.019571 0.973826 0.049139 +v -0.019228 0.912496 -0.072380 +v -0.019079 0.998631 0.000744 +v -0.018917 0.953557 -0.049671 +v -0.018552 0.970282 0.054682 +v -0.018202 0.987513 0.042329 +v -0.018108 0.945887 -0.052868 +v -0.018048 0.992513 0.034026 +v -0.017650 0.956404 0.036877 +v -0.017382 0.927090 -0.059915 +v -0.017233 0.998671 0.011162 +v -0.017104 0.893421 -0.048675 +v -0.016394 0.965113 0.056113 +v -0.016277 0.902144 -0.060760 +v -0.016059 0.938116 -0.054135 +v -0.015560 0.983627 0.048118 +v -0.015274 0.996856 0.019554 +v -0.015248 0.891892 -0.037881 +v -0.014998 0.959320 0.040079 +v -0.014747 0.928444 -0.054117 +v -0.014452 0.990474 0.040615 +v -0.014250 0.964007 0.052071 +v -0.014245 0.896320 -0.043325 +v -0.013905 0.980804 0.050076 +v -0.013657 0.986703 0.048014 +v -0.013577 0.995445 0.025807 +v -0.013356 0.914530 -0.066663 +v -0.012421 0.904277 -0.055630 +v -0.012324 0.928713 -0.052096 +v -0.011851 0.976312 0.052527 +v -0.011550 0.964173 -0.049917 +v -0.011547 0.955302 -0.053070 +v -0.011491 0.902633 -0.014055 +v -0.011402 0.995229 0.033173 +v -0.011398 0.988711 0.045811 +v -0.011138 0.981921 -0.041242 +v -0.011123 0.887895 -0.030087 +v -0.011087 0.898886 -0.043746 +v -0.011011 0.973057 -0.044872 +v -0.010943 0.989035 -0.034194 +v -0.010719 0.916205 -0.059793 +v -0.010708 0.994974 -0.024102 +v -0.010646 0.960637 0.042500 +v -0.010179 0.946777 -0.053821 +v -0.010043 0.972284 0.055752 +v -0.009961 0.999474 -0.012079 +v -0.009553 0.908193 -0.050528 +v -0.009310 0.917589 -0.053017 +v -0.009078 0.938092 -0.055874 +v -0.008949 0.991337 0.039863 +v -0.008733 0.893132 -0.038444 +v -0.008715 0.999021 -0.000713 +v -0.008595 0.967802 0.057159 +v -0.008449 0.918363 -0.051510 +v -0.008355 0.909130 -0.049028 +v -0.008255 0.984911 0.052716 +v -0.008086 0.966515 0.053095 +v -0.007583 0.998551 0.009412 +v -0.007450 0.928732 -0.053812 +v -0.007236 0.988977 0.046834 +v -0.006974 0.901126 -0.042960 +v -0.006566 0.997087 0.017907 +v -0.006275 0.961346 0.043746 +v -0.005945 0.919347 -0.050901 +v -0.005541 0.996446 0.024619 +v -0.005217 0.909598 -0.048314 +v -0.005179 0.978444 0.055611 +v -0.004113 0.994208 0.031805 +v -0.002794 0.990258 0.038423 +v -0.002657 0.973545 0.056519 +v -0.002449 0.895311 -0.038342 +v -0.002399 0.890498 -0.030960 +v -0.002361 0.969946 0.055914 +v -0.002247 0.904341 -0.040796 +v -0.002055 0.968223 0.053430 +v -0.001677 0.928995 -0.055344 +v -0.001422 0.988442 0.046241 +v -0.001388 0.961410 0.044224 +v -0.001291 0.902806 -0.017411 +v -0.001276 0.911792 -0.046423 +v -0.001230 0.920404 -0.050382 +v -0.000875 0.938401 -0.054652 +v -0.000442 0.984675 0.052948 +v -0.000363 0.947540 -0.054560 +v -0.000353 0.955632 -0.052254 +v -0.000136 0.964337 -0.050157 +v 0.000261 0.973213 -0.046166 +v 0.000586 0.981331 -0.041063 +v 0.000902 0.979046 0.056650 +v 0.001052 0.990103 -0.035774 +v 0.001084 0.966665 0.054408 +v 0.001200 0.960299 0.044427 +v 0.001413 0.995336 -0.024693 +v 0.001643 0.969703 0.057179 +v 0.001834 0.999510 -0.012360 +v 0.001863 0.973806 0.057354 +v 0.002024 0.999740 -0.001353 +v 0.002160 0.997769 0.008366 +v 0.002310 0.996750 0.016769 +v 0.002603 0.995845 0.023733 +v 0.002805 0.904009 -0.045324 +v 0.003127 0.994217 0.030801 +v 0.003276 0.919968 -0.052860 +v 0.003440 0.960568 0.044325 +v 0.003578 0.912454 -0.047961 +v 0.003715 0.992154 0.038518 +v 0.004127 0.967453 0.054073 +v 0.004296 0.928842 -0.056865 +v 0.004382 0.897774 -0.038745 +v 0.004602 0.988805 0.046079 +v 0.004871 0.969951 0.056493 +v 0.005475 0.983841 0.052167 +v 0.005685 0.973836 0.057717 +v 0.005956 0.978976 0.056699 +v 0.006035 0.961586 0.043954 +v 0.006113 0.888151 -0.034238 +v 0.006469 0.904908 -0.047555 +v 0.006860 0.919629 -0.053757 +v 0.007462 0.967330 0.053712 +v 0.007543 0.913052 -0.048540 +v 0.007691 0.938497 -0.056639 +v 0.008338 0.897210 -0.043543 +v 0.008431 0.905006 -0.049804 +v 0.008494 0.968722 0.057034 +v 0.008717 0.928773 -0.056381 +v 0.008746 0.919105 -0.054117 +v 0.009042 0.960980 0.042826 +v 0.009311 0.912604 -0.050157 +v 0.009548 0.902532 -0.055299 +v 0.009569 0.972798 0.056108 +v 0.009906 0.946869 -0.054438 +v 0.010117 0.992780 0.038584 +v 0.010136 0.901425 -0.014659 +v 0.010290 0.996099 0.030913 +v 0.010509 0.988945 0.046076 +v 0.010642 0.910839 -0.057367 +v 0.010649 0.996315 0.023470 +v 0.010812 0.977984 0.055088 +v 0.010980 0.919491 -0.059813 +v 0.011110 0.897026 -0.046369 +v 0.011115 0.962858 -0.048673 +v 0.011146 0.997221 0.016840 +v 0.011188 0.954660 -0.053150 +v 0.011286 0.928603 -0.054700 +v 0.011374 0.984162 0.052497 +v 0.011572 0.899312 -0.060590 +v 0.011775 0.964966 0.053076 +v 0.011948 1.000000 0.008725 +v 0.012133 0.972120 -0.046578 +v 0.012365 0.997120 -0.001145 +v 0.012917 0.980756 -0.041932 +v 0.012924 0.890151 -0.037177 +v 0.013124 0.894032 -0.050234 +v 0.013248 0.967234 0.056754 +v 0.013250 0.909272 -0.063070 +v 0.013340 0.996945 -0.012219 +v 0.013408 0.959735 0.040783 +v 0.013464 0.987668 -0.034593 +v 0.013848 0.937603 -0.055184 +v 0.014042 0.994172 -0.024640 +v 0.014043 0.918940 -0.065435 +v 0.014485 0.929399 -0.060338 +v 0.014878 0.971066 0.054235 +v 0.015493 0.988351 0.045624 +v 0.015810 0.891065 -0.054361 +v 0.015970 0.991437 0.038584 +v 0.016062 0.896689 -0.066658 +v 0.016146 0.976023 0.052451 +v 0.016916 0.981808 0.048527 +v 0.017212 0.936900 -0.054341 +v 0.017383 0.930148 -0.066073 +v 0.017453 0.891202 -0.039677 +v 0.017480 0.994666 0.031487 +v 0.017569 0.961656 0.051449 +v 0.017788 0.907689 -0.068034 +v 0.018418 0.994571 0.023983 +v 0.018556 0.987413 0.044924 +v 0.018718 0.917756 -0.070625 +v 0.018736 0.945278 -0.052681 +v 0.019062 0.956032 0.038533 +v 0.019546 0.888092 -0.043029 +v 0.019760 0.995476 0.017505 +v 0.019941 0.964204 0.054661 +v 0.019953 0.905495 -0.011786 +v 0.019979 0.952062 -0.048996 +v 0.020066 0.890257 -0.028612 +v 0.020120 0.988258 0.038382 +v 0.020373 0.983518 0.044000 +v 0.020488 0.889023 -0.059123 +v 0.021349 0.938098 -0.058128 +v 0.021660 0.960633 -0.046885 +v 0.021687 0.968352 0.051688 +v 0.021705 0.997645 0.009566 +v 0.022340 0.929314 -0.071742 +v 0.022486 0.973026 0.048649 +v 0.022611 0.989909 0.032096 +v 0.022614 0.936757 0.016363 +v 0.022759 0.995772 -0.000460 +v 0.022835 0.895974 -0.070553 +v 0.022961 0.992411 -0.010995 +v 0.023138 0.952411 0.036581 +v 0.023150 0.886517 -0.046800 +v 0.023233 0.969269 -0.043904 +v 0.023288 0.933460 0.009873 +v 0.023395 0.906203 -0.071702 +v 0.023405 0.981049 0.044037 +v 0.023570 0.978412 0.047072 +v 0.023743 0.984407 0.038868 +v 0.023961 0.956688 0.050389 +v 0.024394 0.977016 -0.039340 +v 0.024461 0.915935 -0.074826 +v 0.024925 0.990920 -0.023438 +v 0.025184 0.984415 -0.033272 +v 0.025489 0.938899 -0.061865 +v 0.025527 0.889770 -0.031621 +v 0.025906 0.992091 0.026251 +v 0.026009 0.983629 0.032375 +v 0.026101 0.950515 0.034003 +v 0.026742 0.889164 -0.063105 +v 0.027177 0.913332 -0.007491 +v 0.027367 0.959722 0.051320 +v 0.027717 0.886110 -0.050822 +v 0.027741 0.939750 0.019289 +v 0.027800 0.992184 0.019358 +v 0.027920 0.981809 0.041355 +v 0.027952 0.894984 -0.023391 +v 0.028177 0.949138 0.030542 +v 0.028192 0.975903 0.042321 +v 0.028357 0.945367 0.025607 +v 0.028368 0.927310 -0.076612 +v 0.028442 0.949047 -0.044595 +v 0.028487 0.886182 -0.034920 +v 0.028578 0.943123 -0.048277 +v 0.028969 0.965061 0.048194 +v 0.029002 0.970321 0.044968 +v 0.029124 0.905083 -0.073866 +v 0.029513 0.896311 -0.072203 +v 0.029598 0.895240 -0.028123 +v 0.029938 0.951881 0.046442 +v 0.030174 0.985424 0.027589 +v 0.030529 0.937564 -0.066272 +v 0.030549 0.913644 -0.076820 +v 0.030641 0.993155 0.011454 +v 0.030732 0.927547 0.000016 +v 0.031220 0.884874 -0.038537 +v 0.031376 0.956626 -0.043293 +v 0.031528 0.934653 0.004519 +v 0.032042 0.981459 0.036284 +v 0.032102 0.942192 -0.050673 +v 0.032299 0.973853 0.036859 +v 0.032537 0.964062 -0.039011 +v 0.032672 0.890818 -0.065194 +v 0.032810 0.886551 -0.054799 +v 0.032817 0.985024 0.021095 +v 0.033169 0.988295 -0.009197 +v 0.033440 0.993172 0.001170 +v 0.033576 0.984286 -0.020156 +v 0.033726 0.922037 -0.005102 +v 0.034044 0.971434 -0.035053 +v 0.034089 0.949547 0.041531 +v 0.034584 0.967640 0.039839 +v 0.034776 0.891903 -0.030605 +v 0.034815 0.955779 0.046483 +v 0.034866 0.903417 -0.019523 +v 0.034983 0.884867 -0.042564 +v 0.035020 0.904673 -0.075258 +v 0.035081 0.980204 0.030510 +v 0.035209 0.923949 -0.078438 +v 0.035353 0.978721 -0.029697 +v 0.035454 0.897601 -0.072660 +v 0.035491 0.961627 0.043339 +v 0.035658 0.941601 -0.054028 +v 0.036037 0.933613 -0.068802 +v 0.036061 0.911975 -0.076075 +v 0.036679 0.985020 0.013807 +v 0.036834 0.977811 0.023801 +v 0.036932 0.895415 0.005675 +v 0.036962 0.899646 -0.022165 +v 0.037132 0.892899 -0.065919 +v 0.037133 0.948100 0.036683 +v 0.037356 0.888822 0.005051 +v 0.037383 0.887873 -0.057909 +v 0.037459 0.944718 -0.036516 +v 0.037464 0.897558 0.003322 +v 0.037511 0.950312 -0.034930 +v 0.037691 0.965486 0.035094 +v 0.037763 0.972978 0.033297 +v 0.037935 0.888971 0.002843 +v 0.037942 0.932124 0.011226 +v 0.038009 0.898573 0.007420 +v 0.038193 0.890176 -0.032573 +v 0.038416 0.932726 0.017358 +v 0.038485 0.900954 0.005018 +v 0.038515 0.894171 0.007355 +v 0.038815 0.888127 0.006596 +v 0.038958 0.941933 -0.037908 +v 0.039205 0.896620 0.000918 +v 0.039213 0.959118 0.038568 +v 0.039225 0.888421 0.002118 +v 0.039238 0.953713 0.042080 +v 0.039508 0.897778 0.008902 +v 0.039526 0.975753 0.016358 +v 0.039575 0.920946 -0.077125 +v 0.039660 0.929593 -0.069103 +v 0.039663 0.885723 -0.047240 +v 0.039731 0.945927 0.030035 +v 0.039800 0.957673 -0.032463 +v 0.039879 0.881651 0.007336 +v 0.039943 0.939974 -0.057694 +v 0.039962 0.962892 0.029683 +v 0.039981 0.893799 0.006723 +v 0.040093 0.881517 0.006539 +v 0.040181 0.903470 0.010179 +v 0.040225 0.887131 0.006545 +v 0.040329 0.975482 -0.015934 +v 0.040493 0.905177 -0.073072 +v 0.040537 0.881309 0.008838 +v 0.040592 0.881635 0.005911 +v 0.040702 0.902231 0.003009 +v 0.040858 0.980006 -0.005534 +v 0.040869 0.887021 0.006185 +v 0.040887 0.894042 0.005951 +v 0.040946 0.906060 0.008053 +v 0.041036 0.910640 -0.073255 +v 0.041154 0.900179 -0.071143 +v 0.041255 0.912996 -0.017351 +v 0.041275 0.890264 -0.060008 +v 0.041328 0.963931 -0.028887 +v 0.041447 0.889132 -0.035082 +v 0.041490 0.902414 0.011535 +v 0.041493 0.910614 -0.021311 +v 0.041537 0.933140 0.006898 +v 0.041644 0.897415 -0.023900 +v 0.041739 0.897775 0.008776 +v 0.041747 0.896548 -0.065930 +v 0.041785 0.880726 0.006200 +v 0.041805 0.956629 0.033775 +v 0.041860 0.951540 0.037014 +v 0.041891 0.922625 0.016978 +v 0.041906 0.984953 0.004839 +v 0.041935 0.971336 0.027804 +v 0.041951 0.887608 0.004952 +v 0.041961 0.887957 0.003304 +v 0.041983 0.942944 -0.041433 +v 0.042029 0.894307 0.004521 +v 0.042043 0.970126 -0.023869 +v 0.042171 0.941963 0.024420 +v 0.042176 0.895309 0.002864 +v 0.042335 0.921919 0.013150 +v 0.042437 0.938836 -0.027159 +v 0.042863 0.926234 -0.068834 +v 0.043016 0.912892 0.014075 +v 0.043053 0.943293 -0.026585 +v 0.043202 0.880301 0.009833 +v 0.043274 0.898523 0.007469 +v 0.043309 0.913799 0.012403 +v 0.043410 0.933675 0.006813 +v 0.043493 0.887359 -0.051110 +v 0.043563 0.901232 0.003779 +v 0.043721 0.880133 0.008818 +v 0.043756 0.968003 0.020425 +v 0.043793 0.907858 0.006339 +v 0.043799 0.879893 0.007681 +v 0.043814 0.947994 0.029098 +v 0.043905 0.936767 -0.060954 +v 0.043982 0.952232 0.026602 +v 0.044059 0.912094 0.016674 +v 0.044072 0.933047 0.021243 +v 0.044223 0.906987 -0.069052 +v 0.044266 0.922911 0.020691 +v 0.044299 0.917108 -0.073411 +v 0.044417 0.875524 0.011019 +v 0.044420 0.875487 0.011221 +v 0.044438 0.974293 0.008529 +v 0.044444 0.875484 0.011425 +v 0.044530 0.949231 -0.024474 +v 0.044549 0.875453 0.011910 +v 0.044574 0.901814 0.011903 +v 0.044781 0.875369 0.011116 +v 0.044829 0.875209 0.011523 +v 0.044892 0.899591 0.005716 +v 0.045009 0.905430 -0.067098 +v 0.045022 0.894875 -0.061288 +v 0.045074 0.955059 0.016952 +v 0.045152 0.936969 -0.028981 +v 0.045153 0.954998 -0.021052 +v 0.045180 0.910214 -0.068767 +v 0.045299 0.875725 0.012098 +v 0.045314 0.930954 -0.009084 +v 0.045352 0.943422 -0.044857 +v 0.045389 0.944977 0.019542 +v 0.045439 0.942463 0.022935 +v 0.045448 0.875044 0.011282 +v 0.045506 0.914136 0.009692 +v 0.045641 0.895862 -0.026601 +v 0.045663 0.907130 0.006419 +v 0.045759 0.875088 0.011959 +v 0.045760 0.959687 0.024242 +v 0.045774 0.922048 -0.016144 +v 0.045828 0.921157 0.010440 +v 0.045954 0.942321 0.012723 +v 0.046104 0.908517 -0.023305 +v 0.046106 0.934259 0.022844 +v 0.046140 0.903625 -0.064119 +v 0.046215 0.934691 0.008119 +v 0.046267 0.931299 0.022572 +v 0.046377 0.924024 -0.011431 +v 0.046508 0.889073 -0.040435 +v 0.046606 0.903373 0.010027 +v 0.046688 0.889926 -0.054183 +v 0.046715 0.921972 -0.068760 +v 0.046739 0.910110 -0.065276 +v 0.046799 0.913774 -0.069134 +v 0.046850 0.933169 -0.062465 +v 0.046941 0.931999 0.023408 +v 0.046972 0.970679 -0.000685 +v 0.047173 0.965791 -0.009509 +v 0.047267 0.939972 -0.016651 +v 0.047348 0.921667 0.011111 +v 0.047350 0.913907 0.010015 +v 0.047363 0.960507 -0.016635 +v 0.047408 0.928827 -0.022698 +v 0.047450 0.940090 0.006048 +v 0.047455 0.902666 -0.060454 +v 0.047504 0.909909 -0.062343 +v 0.047609 0.905233 0.007648 +v 0.047648 0.964967 0.013327 +v 0.047696 0.935975 0.013852 +v 0.047757 0.910989 0.018368 +v 0.047812 0.919802 -0.018404 +v 0.047851 0.935712 0.020918 +v 0.047990 0.933756 0.018668 +v 0.048022 0.936924 0.001098 +v 0.048165 0.909716 -0.058669 +v 0.048192 0.930965 -0.021502 +v 0.048283 0.930618 0.021440 +v 0.048331 0.924768 0.021413 +v 0.048456 0.945118 -0.013348 +v 0.048498 0.938004 -0.032029 +v 0.048509 0.936062 -0.005542 +v 0.048514 0.895006 -0.029143 +v 0.048638 0.914958 -0.066240 +v 0.048731 0.932307 0.022398 +v 0.048846 0.929092 -0.063302 +v 0.049050 0.895011 -0.056704 +v 0.049235 0.927613 0.024566 +v 0.049319 0.940809 -0.002341 +v 0.049402 0.941965 -0.049131 +v 0.049439 0.960616 0.005517 +v 0.049449 0.955563 -0.002680 +v 0.049451 0.928199 0.024792 +v 0.049490 0.890718 -0.045214 +v 0.049596 0.902287 -0.057105 +v 0.049759 0.922287 0.013262 +v 0.049955 0.915960 -0.062747 +v 0.049963 0.913234 0.011549 +v 0.049979 0.950927 0.011176 +v 0.050082 0.907051 -0.025541 +v 0.050140 0.928109 0.024568 +v 0.050162 0.950155 -0.009307 +v 0.050273 0.912044 0.015161 +v 0.050351 0.924011 -0.063536 +v 0.050360 0.927661 0.023914 +v 0.050549 0.908955 -0.055656 +v 0.050561 0.923760 0.017343 +v 0.050574 0.946488 0.003394 +v 0.050737 0.934563 -0.019958 +v 0.051133 0.928873 -0.025381 +v 0.051211 0.893341 -0.048970 +v 0.051324 0.918695 -0.020625 +v 0.051925 0.916151 -0.058968 +v 0.052287 0.937720 -0.035120 +v 0.052300 0.897089 -0.052393 +v 0.052518 0.902518 -0.054461 +v 0.052909 0.938252 -0.053351 +v 0.053730 0.905997 -0.028015 +v 0.053937 0.907864 -0.054089 +v 0.054314 0.894040 -0.035231 +v 0.054865 0.928490 -0.028341 +v 0.054899 0.933966 -0.056005 +v 0.055048 0.914737 -0.056353 +v 0.055185 0.924298 -0.058933 +v 0.055601 0.917760 -0.022824 +v 0.055687 0.929796 -0.057751 +v 0.055907 0.902933 -0.053202 +v 0.056336 0.936335 -0.039125 +v 0.056760 0.895126 -0.040778 +v 0.056976 0.906890 -0.052372 +v 0.057368 0.912671 -0.053915 +v 0.057806 0.899337 -0.050065 +v 0.057957 0.896809 -0.045633 +v 0.058192 0.910801 -0.051746 +v 0.058412 0.904910 -0.033147 +v 0.058435 0.927127 -0.032328 +v 0.059650 0.922355 -0.054766 +v 0.059681 0.933985 -0.044179 +v 0.060058 0.916902 -0.026377 +v 0.061014 0.931115 -0.048567 +v 0.061195 0.927549 -0.051778 +v 0.061199 0.904817 -0.038196 +v 0.061473 0.925668 -0.037456 +v 0.061673 0.912927 -0.047417 +v 0.061788 0.905610 -0.048679 +v 0.062716 0.904767 -0.043411 +v 0.062890 0.918161 -0.051019 +v 0.063086 0.924330 -0.042159 +v 0.064112 0.921602 -0.046686 +v 0.064160 0.913913 -0.042993 +v 0.064308 0.915736 -0.032244 +v 0.065152 0.914812 -0.038555 +v -0.051645 0.926796 -0.009460 +v -0.051478 0.933258 -0.005717 +v -0.050882 0.920511 -0.009305 +v -0.050645 0.926588 -0.007389 +v -0.050627 0.920980 -0.006851 +v -0.050143 0.914651 -0.006550 +v -0.050063 0.931637 -0.004655 +v -0.049857 0.934474 -0.006798 +v -0.049857 0.934474 -0.006798 +v -0.049679 0.927676 -0.010413 +v -0.049679 0.927676 -0.010413 +v -0.049497 0.935178 -0.000741 +v -0.049124 0.916246 -0.004029 +v -0.049079 0.920564 -0.009955 +v -0.049079 0.920564 -0.009955 +v -0.048495 0.914319 -0.007324 +v -0.048495 0.914319 -0.007324 +v -0.048352 0.926740 -0.003442 +v -0.048208 0.936576 -0.001453 +v -0.048208 0.936576 -0.001453 +v -0.048136 0.913630 -0.000537 +v -0.048081 0.922536 -0.004069 +v -0.047850 0.931873 -0.001503 +v -0.047778 0.909790 -0.002525 +v -0.046836 0.918342 -0.001475 +v -0.046518 0.933168 0.003862 +v -0.046206 0.911247 0.003697 +v -0.045828 0.909728 -0.002744 +v -0.045828 0.909728 -0.002744 +v -0.045626 0.934633 0.004099 +v -0.045626 0.934633 0.004099 +v -0.044951 0.906948 0.002482 +v -0.044862 0.935266 0.004074 +v -0.044862 0.935266 0.004074 +v -0.044775 0.935539 0.001324 +v -0.044775 0.935539 0.001324 +v -0.044642 0.939659 0.007231 +v -0.044464 0.935734 -0.002367 +v -0.044464 0.935734 -0.002367 +v -0.044402 0.921140 0.006463 +v -0.044249 0.952255 -0.002405 +v -0.044165 0.929679 0.002551 +v -0.044148 0.928059 0.005165 +v -0.044088 0.921586 0.008489 +v -0.044068 0.910096 0.006623 +v -0.044027 0.930586 0.007341 +v -0.044027 0.930586 0.007341 +v -0.043813 0.946202 0.013251 +v -0.043771 0.933487 0.011024 +v -0.043762 0.957831 0.006393 +v -0.043468 0.918658 0.009067 +v -0.043395 0.925847 0.009374 +v -0.043362 0.914458 0.006950 +v -0.043333 0.938204 0.016954 +v -0.043299 0.928322 0.013523 +v -0.043121 0.930931 -0.005844 +v -0.043121 0.930931 -0.005844 +v -0.043094 0.930816 0.017946 +v -0.043072 0.922014 0.011048 +v -0.042985 0.906878 0.002050 +v -0.042985 0.906878 0.002050 +v -0.042983 0.925355 -0.001396 +v -0.042905 0.924271 0.020473 +v -0.042897 0.925668 0.023988 +v -0.042773 0.919041 0.026783 +v -0.042761 0.932734 0.021744 +v -0.042716 0.922781 0.015814 +v -0.042692 0.917359 0.023494 +v -0.042650 0.918066 0.012057 +v -0.042484 0.952704 -0.015326 +v -0.042448 0.951908 0.020571 +v -0.042253 0.921057 0.030950 +v -0.042243 0.927060 0.029122 +v -0.042239 0.916163 0.017854 +v -0.042178 0.907987 0.007318 +v -0.042172 0.914794 0.029493 +v -0.042158 0.914247 0.011964 +v -0.042067 0.912525 0.010052 +v -0.042022 0.912699 0.026619 +v -0.041974 0.916933 0.002230 +v -0.041965 0.960903 0.015249 +v -0.041955 0.943944 0.024633 +v -0.041934 0.942459 -0.016870 +v -0.041768 0.917275 0.033087 +v -0.041720 0.910556 0.015699 +v -0.041709 0.910385 0.023595 +v -0.041676 0.924734 -0.008091 +v -0.041676 0.924734 -0.008091 +v -0.041441 0.934524 0.028866 +v -0.041350 0.911036 0.032390 +v -0.041220 0.905888 0.020275 +v -0.041181 0.908600 0.029774 +v -0.041156 0.922527 0.034736 +v -0.041088 0.913806 0.035549 +v -0.040923 0.919230 0.036356 +v -0.040907 0.905983 0.027196 +v -0.040906 0.927443 0.033776 +v -0.040793 0.964455 -0.013441 +v -0.040766 0.968589 -0.006391 +v -0.040457 0.901841 0.024024 +v -0.040384 0.931249 -0.016986 +v -0.040356 0.916984 -0.006087 +v -0.040356 0.916984 -0.006087 +v -0.040347 0.916152 0.038440 +v -0.040327 0.938950 0.031777 +v -0.040310 0.907588 0.035157 +v -0.040300 0.922137 0.002936 +v -0.040203 0.957105 0.027341 +v -0.040197 0.904924 0.032838 +v -0.040150 0.910667 0.038083 +v -0.040139 0.906928 0.005776 +v -0.040139 0.906928 0.005776 +v -0.040136 0.923375 0.037122 +v -0.040037 0.932699 0.034376 +v -0.039986 0.920454 0.038483 +v -0.039898 0.905143 0.009355 +v -0.039859 0.902104 0.030618 +v -0.039814 0.927591 0.036436 +v -0.039790 0.971494 0.002695 +v -0.039717 0.908156 0.002813 +v -0.039717 0.908156 0.002813 +v -0.039652 0.899065 0.016703 +v -0.039575 0.950261 0.031679 +v -0.039528 0.917667 0.040337 +v -0.039453 0.963644 0.023131 +v -0.039443 0.911080 -0.002270 +v -0.039443 0.911080 -0.002270 +v -0.039421 0.913309 0.040780 +v -0.039235 0.898146 0.027800 +v -0.039168 0.831270 -0.021674 +v -0.039120 0.942671 0.034682 +v -0.039110 0.936238 0.036022 +v -0.039056 0.895906 0.020664 +v -0.039014 0.924104 0.039100 +v -0.039004 0.931897 0.037069 +v -0.038929 0.904469 0.038002 +v -0.038922 0.921541 0.040303 +v -0.038846 0.907870 0.040707 +v -0.038693 0.901656 0.035929 +v -0.038662 0.927713 0.038536 +v -0.038643 0.915126 0.042593 +v -0.038531 0.919071 0.042022 +v -0.038258 0.898791 0.033983 +v -0.038236 0.934916 0.038419 +v -0.038133 0.910832 0.043292 +v -0.038092 0.919754 -0.014601 +v -0.038047 0.972713 0.011968 +v -0.038015 0.939464 0.038219 +v -0.037972 0.892993 0.024707 +v -0.037947 0.931341 0.039144 +v -0.037730 0.945799 0.037484 +v -0.037656 0.893545 0.015959 +v -0.037648 0.916827 0.044223 +v -0.037412 0.894963 0.031486 +v -0.037388 0.891579 0.019324 +v -0.037367 0.933849 0.040352 +v -0.037343 0.937715 0.040339 +v -0.037302 0.912918 0.045083 +v -0.037294 0.925038 0.041232 +v -0.037253 0.922913 0.042288 +v -0.036956 0.895656 0.011207 +v -0.036911 0.927876 0.040889 +v -0.036862 0.920857 0.043884 +v -0.036822 0.942244 0.040748 +v -0.036679 0.900497 0.006175 +v -0.036673 0.936135 0.042089 +v -0.036522 0.889368 0.023062 +v -0.036489 0.829425 -0.012376 +v -0.036373 0.930729 0.041489 +v -0.036372 0.904691 0.044211 +v -0.036347 0.901011 0.041691 +v -0.036328 0.940171 0.042648 +v -0.036199 0.972496 0.019997 +v -0.036195 0.914922 0.046672 +v -0.036164 0.890402 0.028662 +v -0.036128 0.948548 0.040415 +v -0.036112 0.955562 0.037673 +v -0.036025 0.898093 0.039894 +v -0.035935 0.918939 0.045980 +v -0.035892 0.932582 0.042485 +v -0.035862 0.938258 0.044190 +v -0.035824 0.908073 0.046715 +v -0.035804 0.967665 0.029013 +v -0.035691 0.888384 0.019317 +v -0.035484 0.895198 0.038243 +v -0.035475 0.888982 0.017206 +v -0.035416 0.944687 0.043292 +v -0.035357 0.934220 0.043893 +v -0.035319 0.833601 -0.031513 +v -0.035209 0.925873 0.043307 +v -0.035152 0.924243 0.044196 +v -0.035140 0.901816 0.001358 +v -0.035054 0.942370 0.045024 +v -0.034984 0.835149 -0.019093 +v -0.034970 0.928011 0.042978 +v -0.034910 0.910490 0.048461 +v -0.034875 0.963382 0.034952 +v -0.034850 0.886621 0.022045 +v -0.034830 0.887170 0.026787 +v -0.034724 0.922629 0.045530 +v -0.034712 0.940232 0.046365 +v -0.034656 0.930026 0.043443 +v -0.034617 0.935793 0.045584 +v -0.034559 0.891480 0.036144 +v -0.034388 0.931359 0.044216 +v -0.034385 0.972571 0.025776 +v -0.034295 0.917383 0.048317 +v -0.034251 0.889660 0.013500 +v -0.034247 0.950782 0.043097 +v -0.034121 0.965150 -0.028122 +v -0.033968 0.832525 -0.010128 +v -0.033906 0.932571 0.045268 +v -0.033717 0.921055 0.047253 +v -0.033620 0.946812 0.045859 +v -0.033602 0.951908 -0.032828 +v -0.033597 0.937424 0.047436 +v -0.033437 0.902077 0.047361 +v -0.033423 0.912906 0.049985 +v -0.033385 0.944421 0.047507 +v -0.033335 0.885025 0.020285 +v -0.033316 0.928375 0.044443 +v -0.033313 0.926911 0.044757 +v -0.033285 0.933743 0.046505 +v -0.033282 0.898269 0.045005 +v -0.033280 0.884490 0.025365 +v -0.033240 0.887239 0.033872 +v -0.033233 0.929701 0.044728 +v -0.033181 0.942136 0.048741 +v -0.033179 0.925794 0.045389 +v -0.033069 0.974508 -0.020606 +v -0.033040 0.930565 0.045278 +v -0.033034 0.906177 -0.009330 +v -0.032976 0.905735 0.049760 +v -0.032892 0.895391 0.043426 +v -0.032817 0.959026 0.041814 +v -0.032704 0.931463 0.046066 +v -0.032671 0.924375 0.046519 +v -0.032655 0.891603 0.008899 +v -0.032571 0.929677 0.045427 +v -0.032355 0.930093 0.045836 +v -0.032344 0.928964 0.045425 +v -0.032296 0.935041 0.047913 +v -0.032283 0.892539 0.041996 +v -0.032163 0.932365 0.047098 +v -0.032105 0.938978 0.049492 +v -0.032056 0.929603 0.046008 +v -0.032038 0.919642 0.049204 +v -0.032001 0.930618 0.046516 +v -0.031983 0.884246 0.032356 +v -0.031978 0.928034 0.045759 +v -0.031900 0.952877 0.045884 +v -0.031879 0.980048 -0.010682 +v -0.031832 0.937158 -0.033638 +v -0.031829 0.908402 0.051437 +v -0.031799 0.927450 0.046127 +v -0.031721 0.929035 0.046112 +v -0.031667 0.929922 0.046775 +v -0.031530 0.882406 0.023269 +v -0.031524 0.922880 0.047972 +v -0.031504 0.928658 0.046220 +v -0.031503 0.931235 0.047390 +v -0.031389 0.929374 0.046895 +v -0.031381 0.948696 0.048468 +v -0.031379 0.933295 0.048291 +v -0.031359 0.928330 0.046425 +v -0.031295 0.930399 0.047540 +v -0.031273 0.982365 -0.000219 +v -0.031207 0.888889 0.040289 +v -0.031199 0.925923 0.047236 +v -0.031190 0.884187 0.017023 +v -0.031183 0.946247 0.049987 +v -0.031153 0.826989 -0.002857 +v -0.031132 0.929084 0.046851 +v -0.031028 0.929795 0.047494 +v -0.030981 0.928927 0.046863 +v -0.030944 0.837000 -0.030903 +v -0.030928 0.931993 0.048387 +v -0.030924 0.915825 0.051373 +v -0.030924 0.936294 0.049522 +v -0.030915 0.943718 0.051065 +v -0.030886 0.892840 0.002866 +v -0.030792 0.931183 0.048460 +v -0.030739 0.929479 0.047282 +v -0.030659 0.926966 0.047751 +v -0.030632 0.929421 0.047206 +v -0.030493 0.829824 -0.005401 +v -0.030480 0.930435 0.048139 +v -0.030461 0.929804 0.047652 +v -0.030401 0.929494 0.047071 +v -0.030398 0.967284 0.039200 +v -0.030352 0.881515 0.030596 +v -0.030346 0.982835 0.010340 +v -0.030247 0.934237 0.049665 +v -0.030230 0.927777 0.048207 +v -0.030165 0.929704 0.044866 +v -0.030060 0.961092 0.044382 +v -0.029974 0.940233 0.051479 +v -0.029900 0.924393 0.048662 +v -0.029882 0.931842 0.049624 +v -0.029875 0.932898 0.049584 +v -0.029846 0.928289 0.048379 +v -0.029729 0.921654 0.049543 +v -0.029719 0.884678 0.038552 +v -0.029673 0.911080 0.053013 +v -0.029664 0.931214 0.049368 +v -0.029663 0.844455 -0.017127 +v -0.029609 0.839858 -0.009344 +v -0.029482 0.928573 0.048320 +v -0.029352 0.930832 0.048956 +v -0.029234 0.925534 0.049303 +v -0.029207 0.954682 0.048419 +v -0.029130 0.930682 0.048509 +v -0.029109 0.937311 0.051139 +v -0.029061 0.922061 -0.030717 +v -0.028977 0.899211 0.050499 +v -0.028778 0.880861 0.019846 +v -0.028730 0.926498 0.049698 +v -0.028700 0.895608 0.048541 +v -0.028663 0.950215 0.050886 +v -0.028642 0.935145 0.051279 +v -0.028576 0.902833 0.052500 +v -0.028543 0.918361 0.051611 +v -0.028517 0.933014 0.051099 +v -0.028442 0.932233 0.051012 +v -0.028441 0.881775 0.037345 +v -0.028380 0.927008 0.049831 +v -0.028368 0.933843 0.051078 +v -0.028354 0.947547 0.052321 +v -0.028268 0.944831 0.053445 +v -0.028257 0.878935 0.028315 +v -0.028234 0.981674 0.019844 +v -0.028202 0.892940 0.047334 +v -0.028162 0.931844 0.050724 +v -0.028123 0.927348 0.049697 +v -0.028089 0.883746 0.012783 +v -0.028053 0.894424 -0.007656 +v -0.028024 0.836964 -0.005097 +v -0.027991 0.923263 0.050023 +v -0.027919 0.931738 0.050428 +v -0.027918 0.932205 0.045022 +v -0.027879 0.926667 0.046751 +v -0.027739 0.931975 0.049828 +v -0.027598 0.884649 0.005310 +v -0.027448 0.890300 0.046333 +v -0.027391 0.940996 0.053253 +v -0.027361 0.905442 0.053892 +v -0.027296 0.924407 0.050735 +v -0.026858 0.879034 0.035972 +v -0.026710 0.925379 0.051149 +v -0.026705 0.938072 0.052700 +v -0.026675 0.979454 0.028395 +v -0.026502 0.926011 0.051139 +v -0.026454 0.936074 0.052835 +v -0.026417 0.934066 0.052692 +v -0.026391 0.933357 0.052493 +v -0.026375 0.920481 0.051505 +v -0.026351 0.934714 0.052672 +v -0.026351 0.853507 -0.006627 +v -0.026332 0.926395 0.050838 +v -0.026198 0.933037 0.052144 +v -0.026194 0.886791 0.045225 +v -0.026049 0.932805 0.051751 +v -0.026013 0.932945 0.051029 +v -0.025995 0.884942 -0.005413 +v -0.025827 0.914712 0.054072 +v -0.025803 0.970157 0.042290 +v -0.025692 0.857347 -0.015913 +v -0.025591 0.823954 0.004392 +v -0.025394 0.963668 0.047793 +v -0.025216 0.907872 0.055149 +v -0.025176 0.830931 -0.036075 +v -0.025167 0.826618 0.003807 +v -0.024965 0.976279 0.036318 +v -0.024874 0.922279 0.051730 +v -0.024870 0.877627 0.007630 +v -0.024868 0.876600 -0.003588 +v -0.024826 0.847677 -0.028588 +v -0.024749 0.907269 -0.024142 +v -0.024708 0.876954 0.025080 +v -0.024694 0.846078 -0.002512 +v -0.024685 0.956606 0.051800 +v -0.024683 0.876241 0.034061 +v -0.024365 0.882566 0.044179 +v -0.024227 0.896891 0.052851 +v -0.024157 0.923459 0.052195 +v -0.024149 0.868524 -0.001413 +v -0.024106 0.951483 0.054017 +v -0.024093 0.917810 0.053191 +v -0.023971 0.900218 0.054249 +v -0.023803 0.893718 0.051579 +v -0.023783 0.948390 0.055351 +v -0.023748 0.868299 -0.015570 +v -0.023723 0.924525 0.052510 +v -0.023677 0.878295 0.015369 +v -0.023609 0.925210 0.052420 +v -0.023596 0.925532 0.052016 +v -0.023564 0.945501 0.056382 +v -0.023339 0.859605 0.001934 +v -0.023303 0.891488 0.050814 +v -0.022931 0.832822 0.002763 +v -0.022931 0.879490 0.043539 +v -0.022887 0.902532 0.055253 +v -0.022700 0.941437 0.055470 +v -0.022692 0.839013 0.001678 +v -0.022644 0.919992 0.052745 +v -0.022540 0.889167 0.050236 +v -0.022476 0.910203 0.056007 +v -0.022388 0.870420 0.008357 +v -0.022351 0.934198 0.053983 +v -0.022337 0.933929 0.053624 +v -0.022323 0.933750 0.053175 +v -0.022320 0.934666 0.054243 +v -0.022288 0.934104 0.052238 +v -0.022243 0.935335 0.054408 +v -0.022065 0.925088 0.048609 +v -0.022042 0.936693 0.054513 +v -0.022034 0.895512 -0.019545 +v -0.022010 0.876751 -0.015777 +v -0.021999 0.987682 -0.013768 +v -0.021968 0.938643 0.054405 +v -0.021790 0.989572 -0.002303 +v -0.021729 0.835931 -0.034765 +v -0.021724 0.930833 0.044656 +v -0.021652 0.851147 0.004406 +v -0.021512 0.921806 0.052805 +v -0.021490 0.876481 0.042688 +v -0.021434 0.988906 0.009809 +v -0.021414 0.885667 -0.017070 +v -0.021202 0.885667 0.049518 +v -0.021187 0.873874 0.031398 +v -0.021139 0.904665 0.056101 +v -0.021118 0.915858 0.054655 +v -0.020964 0.972570 0.044918 +v -0.020936 0.935789 0.049172 +v -0.020920 0.923069 0.053155 +v -0.020886 0.895568 0.054057 +v -0.020676 0.898522 0.055058 +v -0.020671 0.982515 -0.025460 +v -0.020609 0.892941 0.053110 +v -0.020585 0.918252 0.053671 +v -0.020554 0.924185 0.053301 +v -0.020425 0.965705 0.050795 +v -0.020388 0.925117 0.052845 +v -0.020386 0.924834 0.053188 +v -0.020375 0.987511 0.020729 +v -0.020111 0.862122 0.008975 +v -0.020078 0.891105 0.052578 +v -0.019864 0.900627 0.055775 +v -0.019859 0.873577 0.041229 +v -0.019726 0.920290 0.053184 +v -0.019692 0.871640 0.014453 +v -0.019652 0.957983 0.054836 +v -0.019605 0.859589 -0.027347 +v -0.019276 0.888988 0.052164 +v -0.019263 0.841714 0.006374 +v -0.019243 0.822044 0.007191 +v -0.019161 0.881499 0.049181 +v -0.019122 0.912156 0.056180 +v -0.018931 0.952164 0.056971 +v -0.018923 0.894810 0.054636 +v -0.018836 0.892639 0.053736 +v -0.018801 0.906778 0.056781 +v -0.018753 0.922034 0.053230 +v -0.018598 0.824897 0.007987 +v -0.018500 0.897366 0.055606 +v -0.018465 0.948660 0.058123 +v -0.018293 0.891215 0.053363 +v -0.018282 0.902682 0.056468 +v -0.018274 0.923241 0.053431 +v -0.018195 0.933928 0.052819 +v -0.018190 0.945282 0.058839 +v -0.018088 0.933676 0.053725 +v -0.018083 0.985367 0.030609 +v -0.018026 0.853518 0.009747 +v -0.017996 0.933823 0.054157 +v -0.017955 0.924277 0.053458 +v -0.017895 0.934057 0.054473 +v -0.017883 0.832734 0.007596 +v -0.017803 0.885636 0.051970 +v -0.017785 0.934476 0.054767 +v -0.017764 0.924889 0.053330 +v -0.017716 0.935086 0.054916 +v -0.017702 0.899247 0.056259 +v -0.017685 0.925174 0.053044 +v -0.017631 0.940930 0.057171 +v -0.017536 0.892449 0.054099 +v -0.017520 0.894194 0.054979 +v -0.017477 0.878471 0.049316 +v -0.017467 0.917340 0.054620 +v -0.017464 0.936354 0.055134 +v -0.017439 0.889236 0.053058 +v -0.017432 0.919237 0.053897 +v -0.017317 0.938209 0.055192 +v -0.017119 0.871141 0.039401 +v -0.017104 0.974134 -0.036723 +v -0.016973 0.920999 0.053366 +v -0.016944 0.891331 0.053806 +v -0.016835 0.896326 0.056115 +v -0.016695 0.863358 0.013436 +v -0.016691 0.960277 -0.044393 +v -0.016613 0.966748 0.052441 +v -0.016543 0.924848 0.049029 +v -0.016362 0.922585 0.053204 +v -0.016207 0.892325 0.054396 +v -0.016198 0.904490 0.057085 +v -0.016144 0.901189 0.056981 +v -0.016135 0.908495 0.057187 +v -0.016098 0.893403 0.055236 +v -0.016047 0.889693 0.053798 +v -0.016003 0.874927 0.049111 +v -0.015981 0.923717 0.053248 +v -0.015949 0.913951 0.056205 +v -0.015943 0.886163 0.053341 +v -0.015927 0.932627 0.052391 +v -0.015906 0.958556 0.056547 +v -0.015905 0.869054 -0.026593 +v -0.015866 0.897911 0.056888 +v -0.015816 0.981096 0.039989 +v -0.015798 0.932469 0.053354 +v -0.015746 0.924707 0.053130 +v -0.015743 0.881682 0.052404 +v -0.015682 0.891517 0.054234 +v -0.015631 0.845892 -0.032436 +v -0.015629 0.932625 0.053767 +v -0.015567 0.925230 0.052898 +v -0.015544 0.873080 0.023400 +v -0.015454 0.925514 0.052597 +v -0.015446 0.928837 0.045199 +v -0.015437 0.871683 0.047345 +v -0.015434 0.932902 0.054166 +v -0.015322 0.932686 0.048860 +v -0.015306 0.843534 0.010146 +v -0.015220 0.933354 0.054491 +v -0.015210 0.891965 0.054478 +v -0.015116 0.895725 0.039023 +v -0.015112 0.952279 0.058673 +v -0.015057 0.974483 0.047317 +v -0.015025 0.942495 -0.046449 +v -0.015016 0.934083 0.054679 +v -0.015014 0.894931 0.056773 +v -0.014940 0.891901 0.054453 +v -0.014884 0.892540 0.055207 +v -0.014792 0.891531 0.054512 +v -0.014693 0.920593 0.053847 +v -0.014682 0.890383 0.054601 +v -0.014657 0.855195 0.012969 +v -0.014633 0.935274 0.055105 +v -0.014607 0.891924 0.054437 +v -0.014588 0.891650 0.054543 +v -0.014565 0.922088 0.053231 +v -0.014533 0.892449 0.055244 +v -0.014512 0.948402 0.059644 +v -0.014486 0.886997 0.054494 +v -0.014423 0.918994 0.054526 +v -0.014348 0.891777 0.054563 +v -0.014328 0.899628 0.057794 +v -0.014291 0.923356 0.052866 +v -0.014271 0.937114 0.055301 +v -0.014253 0.930939 0.052529 +v -0.014219 0.902772 0.057666 +v -0.014195 0.931040 0.051849 +v -0.014188 0.891949 0.054500 +v -0.014150 0.892320 0.055265 +v -0.014110 0.931091 0.052965 +v -0.014066 0.924364 0.052772 +v -0.014019 0.906034 0.057603 +v -0.014018 0.944519 0.060004 +v -0.013915 0.925226 0.052622 +v -0.013871 0.891935 0.054710 +v -0.013848 0.879161 0.053133 +v -0.013842 0.896068 0.057921 +v -0.013816 0.939973 0.057887 +v -0.013816 0.925691 0.052404 +v -0.013767 0.926056 0.051946 +v -0.013758 0.931339 0.053637 +v -0.013738 0.910373 0.057202 +v -0.013708 0.882564 0.054329 +v -0.013672 0.871303 0.029165 +v -0.013637 0.890947 0.055167 +v -0.013577 0.857948 -0.029706 +v -0.013491 0.869369 0.045563 +v -0.013484 0.891982 0.054635 +v -0.013467 0.931629 0.053936 +v -0.013466 0.925417 0.048438 +v -0.013415 0.893512 0.057390 +v -0.013365 0.892007 0.054738 +v -0.013357 0.892150 0.055427 +v -0.013329 0.891126 0.055298 +v -0.013301 0.828433 -0.039074 +v -0.013201 0.915658 0.056216 +v -0.013142 0.888352 0.055892 +v -0.013130 0.932394 0.054230 +v -0.013106 0.894291 0.048778 +v -0.013106 0.894291 0.048778 +v -0.013094 0.924951 -0.042969 +v -0.013050 0.868665 0.020260 +v -0.013044 0.929426 0.051122 +v -0.013043 0.891281 0.055423 +v -0.013040 0.929225 0.051694 +v -0.012988 0.893739 0.023208 +v -0.012988 0.893739 0.023208 +v -0.012891 0.893132 0.057559 +v -0.012738 0.967455 0.053635 +v -0.012706 0.929316 0.052358 +v -0.012698 0.924223 0.052633 +v -0.012685 0.926194 0.051902 +v -0.012673 0.891991 0.055397 +v -0.012638 0.925700 0.052201 +v -0.012632 0.924967 0.052426 +v -0.012619 0.892006 0.054659 +v -0.012616 0.926816 0.050885 +v -0.012608 0.933765 0.054605 +v -0.012597 0.892019 0.054710 +v -0.012596 0.984888 -0.026412 +v -0.012593 0.833458 0.010485 +v -0.012569 0.891519 0.055570 +v -0.012557 0.923233 0.053106 +v -0.012547 0.870623 0.050623 +v -0.012447 0.901127 0.058668 +v -0.012439 0.929565 0.052922 +v -0.012425 0.892834 0.057665 +v -0.012360 0.897212 0.059163 +v -0.012338 0.928178 0.046304 +v -0.012297 0.873314 0.052505 +v -0.012289 0.904088 0.058303 +v -0.012283 0.928131 0.050712 +v -0.012262 0.894421 0.059100 +v -0.012226 0.820079 0.010503 +v -0.012167 0.889490 0.056924 +v -0.012167 0.823645 0.010355 +v -0.012148 0.922112 0.053807 +v -0.012143 0.883763 0.055893 +v -0.012119 0.891731 0.055638 +v -0.012110 0.907757 0.057955 +v -0.012108 0.891430 0.050773 +v -0.012108 0.891430 0.050773 +v -0.012102 0.877371 0.053921 +v -0.012098 0.876363 -0.026599 +v -0.012074 0.927775 0.050904 +v -0.012051 0.929887 0.053278 +v -0.012008 0.926873 0.051210 +v -0.011978 0.891828 0.055190 +v -0.011977 0.958867 0.057812 +v -0.011952 0.891839 0.054061 +v -0.011952 0.891839 0.054061 +v -0.011952 0.891839 0.054061 +v -0.011918 0.891970 0.054122 +v -0.011918 0.891970 0.054122 +v -0.011869 0.926582 0.051738 +v -0.011847 0.927929 0.051627 +v -0.011846 0.889839 0.057142 +v -0.011829 0.926212 0.052036 +v -0.011769 0.893886 0.059336 +v -0.011751 0.925739 0.052236 +v -0.011745 0.935584 0.055457 +v -0.011737 0.907913 0.058360 +v -0.011706 0.844096 0.012328 +v -0.011704 0.930653 0.053585 +v -0.011650 0.891933 0.055578 +v -0.011646 0.920680 0.054643 +v -0.011646 0.892654 0.051567 +v -0.011639 0.908570 0.059539 +v -0.011634 0.927251 0.051417 +v -0.011617 0.928193 0.052258 +v -0.011559 0.892459 0.057777 +v -0.011557 0.891686 0.051797 +v -0.011557 0.891686 0.051797 +v -0.011548 0.890162 0.057272 +v -0.011503 0.925222 0.052573 +v -0.011499 0.927534 0.051745 +v -0.011496 0.880431 0.055402 +v -0.011470 0.927107 0.051902 +v -0.011431 0.869036 0.037297 +v -0.011396 0.912144 0.057478 +v -0.011393 0.891706 0.054454 +v -0.011393 0.891706 0.054454 +v -0.011346 0.906461 0.060145 +v -0.011322 0.927651 0.052214 +v -0.011318 0.926932 0.052179 +v -0.011312 0.952296 0.059874 +v -0.011296 0.893465 0.059421 +v -0.011236 0.908914 -0.034588 +v -0.011211 0.928552 0.052701 +v -0.011198 0.890502 0.051782 +v -0.011195 0.926698 0.052288 +v -0.011157 0.907830 0.061804 +v -0.011114 0.868490 0.048791 +v -0.011101 0.862243 0.018122 +v -0.011090 0.891793 0.052817 +v -0.011090 0.891793 0.052817 +v -0.011056 0.891333 0.052535 +v -0.011055 0.932015 0.053992 +v -0.011045 0.891973 0.052912 +v -0.011030 0.905231 0.058893 +v -0.011001 0.890706 0.057485 +v -0.010991 0.895249 0.060604 +v -0.010976 0.909707 0.060941 +v -0.010950 0.892034 0.054972 +v -0.010950 0.892034 0.054972 +v -0.010937 0.910202 0.059383 +v -0.010925 0.927666 0.052534 +v -0.010907 0.905546 0.059171 +v -0.010881 0.886155 -0.027079 +v -0.010847 0.993504 -0.001936 +v -0.010844 0.924503 0.053082 +v -0.010839 0.910519 0.058238 +v -0.010835 0.926466 0.052612 +v -0.010829 0.929050 0.052968 +v -0.010797 0.892042 0.057578 +v -0.010791 0.891824 0.053422 +v -0.010758 0.910312 0.058536 +v -0.010756 0.885593 0.057814 +v -0.010751 0.948146 0.060610 +v -0.010738 0.892082 0.053021 +v -0.010720 0.991189 -0.014339 +v -0.010638 0.917246 0.056664 +v -0.010613 0.885862 0.042346 +v -0.010581 0.894643 0.060886 +v -0.010580 0.992792 0.009868 +v -0.010567 0.891323 0.057629 +v -0.010561 0.927793 0.052757 +v -0.010549 0.898378 0.060289 +v -0.010494 0.869906 0.052337 +v -0.010458 0.902708 0.059344 +v -0.010446 0.896462 -0.029153 +v -0.010383 0.892857 0.059458 +v -0.010332 0.875802 0.054976 +v -0.010303 0.891132 0.053470 +v -0.010278 0.938214 0.057835 +v -0.010271 0.891683 0.057065 +v -0.010228 0.891850 0.054073 +v -0.010191 0.906750 0.062069 +v -0.010188 0.872266 0.054075 +v -0.010153 0.894109 0.060976 +v -0.010122 0.894938 0.055086 +v -0.010100 0.891922 0.057461 +v -0.010054 0.943916 0.060746 +v -0.010036 0.905396 0.060676 +v -0.010017 0.930065 0.053344 +v -0.010006 0.923816 0.053891 +v -0.009993 0.991519 0.021241 +v -0.009919 0.911365 0.060309 +v -0.009916 0.833011 -0.037901 +v -0.009854 0.882031 0.057163 +v -0.009848 0.887205 0.059610 +v -0.009833 0.933570 0.055027 +v -0.009824 0.926206 0.053059 +v -0.009798 0.909140 0.063935 +v -0.009747 0.891411 0.056177 +v -0.009747 0.891411 0.056177 +v -0.009747 0.854779 0.016329 +v -0.009722 0.887170 0.051032 +v -0.009612 0.904207 0.059574 +v -0.009605 0.928148 0.053080 +v -0.009596 0.896171 0.061837 +v -0.009581 0.892207 0.056755 +v -0.009581 0.892207 0.056755 +v -0.009568 0.891879 0.055259 +v -0.009562 0.887722 0.060110 +v -0.009504 0.892182 0.059206 +v -0.009494 0.904757 0.059785 +v -0.009437 0.913752 0.058264 +v -0.009418 0.868000 0.050504 +v -0.009378 0.910818 0.062720 +v -0.009365 0.911919 0.059378 +v -0.009355 0.895482 0.062165 +v -0.009316 0.893173 0.060915 +v -0.009285 0.912243 0.059080 +v -0.009257 0.888256 0.060384 +v -0.009200 0.890655 0.054887 +v -0.009183 0.883770 0.013369 +v -0.009183 0.883770 0.013369 +v -0.009150 0.885889 0.026124 +v -0.009124 0.867594 0.043373 +v -0.009123 0.922654 0.055113 +v -0.009116 0.908033 0.064192 +v -0.009103 0.891844 0.055674 +v -0.009056 0.988126 0.031822 +v -0.009033 0.891636 0.058619 +v -0.009011 0.894794 0.062207 +v -0.009010 0.879244 0.056540 +v -0.008693 0.874570 0.055886 +v -0.008668 0.834625 0.012048 +v -0.008650 0.911994 0.061664 +v -0.008650 0.893201 0.056620 +v -0.008640 0.892060 0.056572 +v -0.008599 0.889361 0.060591 +v -0.008597 0.891329 0.057381 +v -0.008597 0.891329 0.057381 +v -0.008490 0.931270 0.054508 +v -0.008471 0.899531 0.061238 +v -0.008460 0.919000 0.057501 +v -0.008421 0.967938 0.054548 +v -0.008378 0.888547 0.054692 +v -0.008370 0.884120 0.059344 +v -0.008301 0.925998 0.054071 +v -0.008286 0.892255 0.060493 +v -0.008268 0.844330 0.014065 +v -0.008217 0.906062 0.062427 +v -0.008170 0.893528 0.062071 +v -0.008065 0.902874 0.060634 +v -0.008017 0.959024 0.058680 +v -0.007987 0.877281 0.056682 +v -0.007975 0.935329 0.056953 +v -0.007956 0.897076 0.062834 +v -0.007925 0.890579 0.060445 +v -0.007905 0.928716 0.054102 +v -0.007897 0.896401 0.063182 +v -0.007866 0.904929 0.061101 +v -0.007798 0.903967 0.060560 +v -0.007738 0.891548 0.059855 +v -0.007680 0.912680 0.060607 +v -0.007668 0.866975 0.046611 +v -0.007662 0.895562 0.063213 +v -0.007656 0.881380 0.058220 +v -0.007651 0.906607 0.063914 +v -0.007622 0.952336 0.060575 +v -0.007561 0.915172 0.059308 +v -0.007543 0.869150 0.054052 +v -0.007490 0.913148 0.060252 +v -0.007448 0.891757 0.059965 +v -0.007442 0.839168 0.013379 +v -0.007439 0.885711 0.061494 +v -0.007434 0.892241 0.057822 +v -0.007373 0.871250 0.055643 +v -0.007337 0.912185 0.063082 +v -0.007318 0.903319 0.041464 +v -0.007300 0.909884 0.065715 +v -0.007262 0.906413 0.062301 +v -0.007249 0.947982 0.061137 +v -0.007246 0.886269 0.062145 +v -0.007224 0.844188 -0.035118 +v -0.007220 0.911362 0.064470 +v -0.007191 0.891241 0.058534 +v -0.007191 0.891241 0.058534 +v -0.007160 0.900753 0.054190 +v -0.007152 0.908501 0.065885 +v -0.007043 0.886892 0.062462 +v -0.007025 0.905850 0.061189 +v -0.006998 0.892196 0.058876 +v -0.006998 0.892196 0.058876 +v -0.006995 0.818938 0.010318 +v -0.006972 0.821856 0.010933 +v -0.006942 0.893958 0.063075 +v -0.006941 0.875865 0.057088 +v -0.006940 0.892354 0.061578 +v -0.006916 0.982857 0.041785 +v -0.006909 0.906577 0.063552 +v -0.006891 0.867537 0.052127 +v -0.006822 0.891806 0.057684 +v -0.006815 0.943757 0.061096 +v -0.006753 0.866718 0.048361 +v -0.006617 0.976073 0.049067 +v -0.006552 0.873347 0.056932 +v -0.006523 0.891572 0.060772 +v -0.006522 0.932548 0.056141 +v -0.006500 0.906923 0.065422 +v -0.006463 0.888427 0.062611 +v -0.006435 0.939749 0.060159 +v -0.006409 0.883521 0.060270 +v -0.006361 0.858577 -0.032407 +v -0.006326 0.899671 0.062373 +v -0.006306 0.893927 0.058917 +v -0.006265 0.890032 0.057177 +v -0.006213 0.897093 0.063945 +v -0.006203 0.897617 0.063637 +v -0.006195 0.902071 0.061519 +v -0.006167 0.892365 0.058854 +v -0.006058 0.907050 0.062152 +v -0.006030 0.896231 0.064011 +v -0.006001 0.891116 0.059385 +v -0.006001 0.891116 0.059385 +v -0.005984 0.903377 0.061185 +v -0.005962 0.906688 0.064856 +v -0.005953 0.906813 0.063037 +v -0.005907 0.925752 0.056396 +v -0.005844 0.896877 0.058575 +v -0.005739 0.874423 0.057559 +v -0.005736 0.929649 0.055748 +v -0.005728 0.890030 0.062100 +v -0.005716 0.866590 0.049705 +v -0.005693 0.906964 0.061509 +v -0.005657 0.921152 0.059119 +v -0.005600 0.892424 0.062548 +v -0.005542 0.904552 0.061242 +v -0.005529 0.885026 0.062362 +v -0.005467 0.910363 0.067103 +v -0.005459 0.911920 0.066044 +v -0.005445 0.894366 0.063918 +v -0.005412 0.912970 0.064162 +v -0.005374 0.885605 0.063055 +v -0.005322 0.906640 0.063925 +v -0.005269 0.891467 0.061351 +v -0.005243 0.886282 0.063350 +v -0.005215 0.908810 0.067246 +v -0.005198 0.916991 0.061406 +v -0.005137 0.936508 0.059034 +v -0.005127 0.914309 0.063038 +v -0.005101 0.892457 0.059492 +v -0.005021 0.891573 0.061703 +v -0.005000 0.891851 0.058587 +v -0.004949 0.892181 0.059960 +v -0.004949 0.892181 0.059960 +v -0.004942 0.905614 0.061501 +v -0.004918 0.869095 0.027229 +v -0.004896 0.888002 0.063413 +v -0.004876 0.906936 0.063081 +v -0.004852 0.891175 0.060083 +v -0.004852 0.891175 0.060083 +v -0.004829 0.906574 0.065616 +v -0.004784 0.869163 0.030148 +v -0.004631 0.906821 0.061742 +v -0.004547 0.906659 0.064743 +v -0.004509 0.906903 0.066647 +v -0.004496 0.892561 0.063100 +v -0.004467 0.865815 0.023969 +v -0.004314 0.968195 0.054998 +v -0.004308 0.889905 0.062738 +v -0.004268 0.933526 0.057868 +v -0.004249 0.880145 0.058882 +v -0.004129 0.868101 0.036380 +v -0.004126 0.878578 0.058515 +v -0.004092 0.959167 0.059161 +v -0.004069 0.891645 0.062215 +v -0.004051 0.906475 0.064582 +v -0.004021 0.860351 0.021303 +v -0.004007 0.906082 0.065308 +v -0.003960 0.891479 0.061850 +v -0.003943 0.952380 0.060975 +v -0.003885 0.892582 0.059980 +v -0.003884 0.891876 0.059078 +v -0.003881 0.881554 0.059644 +v -0.003845 0.906434 0.062465 +v -0.003818 0.892253 0.060423 +v -0.003818 0.892253 0.060423 +v -0.003805 0.901464 0.062573 +v -0.003803 0.899655 0.063336 +v -0.003791 0.877012 0.058400 +v -0.003763 0.891275 0.060600 +v -0.003763 0.891275 0.060600 +v -0.003758 0.912583 0.067520 +v -0.003750 0.947981 0.061426 +v -0.003710 0.906315 0.063535 +v -0.003707 0.853295 0.019016 +v -0.003691 0.910695 0.068335 +v -0.003651 0.930726 0.057239 +v -0.003602 0.943742 0.061343 +v -0.003587 0.905752 0.065728 +v -0.003553 0.908807 0.068252 +v -0.003516 0.902695 0.062333 +v -0.003488 0.913941 0.066206 +v -0.003472 0.927259 0.057786 +v -0.003467 0.870572 0.056963 +v -0.003466 0.868556 0.055417 +v -0.003439 0.922133 0.060191 +v -0.003364 0.897584 0.065023 +v -0.003354 0.866878 0.042010 +v -0.003349 0.896665 0.065090 +v -0.003324 0.905412 0.062276 +v -0.003316 0.874871 0.058332 +v -0.003309 0.898045 0.064593 +v -0.003306 0.894275 0.060036 +v -0.003306 0.915567 0.064649 +v -0.003302 0.883294 0.061153 +v -0.003273 0.939843 0.060866 +v -0.003270 0.918194 0.062664 +v -0.003257 0.872784 0.057920 +v -0.003228 0.818103 0.010218 +v -0.003216 0.842750 0.015694 +v -0.003157 0.867097 0.053303 +v -0.003138 0.835846 0.013932 +v -0.003102 0.828259 0.011594 +v -0.003070 0.889956 0.058255 +v -0.003057 0.894727 0.064797 +v -0.003046 0.906862 0.067360 +v -0.003017 0.905733 0.063825 +v -0.002904 0.866355 0.045343 +v -0.002845 0.821072 0.010665 +v -0.002844 0.903963 0.062547 +v -0.002843 0.884575 0.062879 +v -0.002839 0.936839 0.060061 +v -0.002741 0.866298 0.050098 +v -0.002737 0.866225 0.047406 +v -0.002735 0.904910 0.064163 +v -0.002707 0.885103 0.063569 +v -0.002630 0.885812 0.063923 +v -0.002583 0.905508 0.066055 +v -0.002461 0.892683 0.063696 +v -0.002443 0.887638 0.064029 +v -0.002419 0.933873 0.058874 +v -0.002398 0.912653 0.067992 +v -0.002278 0.910553 0.069028 +v -0.002256 0.889666 0.063222 +v -0.002248 0.914258 0.066509 +v -0.002227 0.908671 0.068773 +v -0.002173 0.931151 0.058187 +v -0.002160 0.891703 0.062766 +v -0.002078 0.915865 0.064980 +v -0.002036 0.901179 0.062704 +v -0.002031 0.904467 0.064532 +v -0.002026 0.899514 0.063377 +v -0.002020 0.927635 0.058471 +v -0.002009 0.891874 0.059475 +v -0.002001 0.891389 0.062161 +v -0.001994 0.892720 0.060288 +v -0.001985 0.892406 0.060872 +v -0.001985 0.892406 0.060872 +v -0.001949 0.918414 0.063024 +v -0.001941 0.922367 0.060619 +v -0.001933 0.902431 0.062618 +v -0.001921 0.891347 0.061004 +v -0.001921 0.891347 0.061004 +v -0.001770 0.906637 0.067506 +v -0.001766 0.903679 0.063159 +v -0.001536 0.898163 0.064374 +v -0.001496 0.905412 0.066157 +v -0.001262 0.904364 0.064654 +v -0.001155 0.899418 0.063266 +v -0.001135 0.901153 0.062673 +v -0.001125 0.902413 0.062639 +v -0.001080 0.903655 0.063377 +v -0.001034 0.898267 0.064055 +v -0.000265 0.877068 0.010982 +v -0.000265 0.877068 0.010982 +v -0.000210 0.878828 0.022034 +v -0.000198 0.889176 0.009685 +v -0.000146 0.900192 0.022664 +v -0.000125 0.843966 -0.036014 +v -0.000124 0.858838 -0.032739 +v -0.000124 0.858838 -0.032739 +v -0.000105 0.865351 0.024483 +v -0.000086 0.868599 0.027734 +v -0.000083 0.859914 0.021942 +v -0.000072 0.852811 0.019668 +v -0.000071 0.882474 0.042469 +v -0.000060 0.876250 -0.030026 +v -0.000060 0.876250 -0.030026 +v -0.000059 0.868856 0.030371 +v -0.000037 0.868015 0.036297 +v -0.000034 0.886098 -0.029773 +v -0.000034 0.886098 -0.029773 +v -0.000029 0.866811 0.041850 +v -0.000025 0.866294 0.045194 +v -0.000022 0.866152 0.047306 +v -0.000021 0.866197 0.050207 +v -0.000019 0.867000 0.053619 +v -0.000018 0.868384 0.055827 +v -0.000017 0.870375 0.057384 +v -0.000014 0.896929 -0.031698 +v -0.000014 0.896929 -0.031698 +v -0.000013 0.872664 0.058369 +v -0.000013 0.820798 0.010494 +v -0.000011 0.897791 0.064826 +v -0.000010 0.842239 0.016055 +v -0.000010 0.897414 0.065408 +v -0.000010 0.898084 0.064218 +v -0.000010 0.908389 0.068995 +v -0.000009 0.827655 0.011577 +v -0.000009 0.899365 0.063221 +v -0.000009 0.875054 0.058741 +v -0.000008 0.835052 0.014083 +v -0.000007 0.896742 0.065869 +v -0.000007 0.910445 0.069416 +v -0.000006 0.892820 0.064063 +v -0.000006 0.912693 0.068288 +v -0.000006 0.906467 0.067475 +v -0.000005 0.894532 0.065303 +v -0.000005 0.827066 -0.040647 +v -0.000005 0.827066 -0.040647 +v -0.000005 0.896032 0.065844 +v -0.000005 0.905380 0.066106 +v -0.000005 0.817459 0.010033 +v -0.000005 0.832616 -0.038875 +v -0.000005 0.832616 -0.038875 +v -0.000004 0.901235 0.062665 +v -0.000004 0.904321 0.064736 +v -0.000003 0.903632 0.063477 +v -0.000003 0.877480 0.058959 +v -0.000003 0.902427 0.062612 +v -0.000003 0.939918 0.061128 +v -0.000002 0.943731 0.061526 +v -0.000002 0.948035 0.061552 +v -0.000002 0.952384 0.061126 +v -0.000001 0.959253 0.059360 +v 0.000001 0.879080 0.059286 +v 0.000003 0.968312 0.055125 +v 0.000003 0.976632 0.049427 +v 0.000004 0.880520 0.059728 +v 0.000005 0.962324 -0.046671 +v 0.000005 0.931321 0.058406 +v 0.000005 0.983460 0.041879 +v 0.000005 0.976107 -0.038313 +v 0.000005 0.976107 -0.038313 +v 0.000005 0.989030 0.032189 +v 0.000006 0.944293 -0.049635 +v 0.000006 0.944293 -0.049635 +v 0.000006 0.986133 -0.027173 +v 0.000006 0.986133 -0.027173 +v 0.000006 0.992802 0.021299 +v 0.000006 0.994155 0.009878 +v 0.000006 0.994835 -0.002210 +v 0.000006 0.994835 -0.002210 +v 0.000006 0.992226 -0.014983 +v 0.000006 0.992226 -0.014983 +v 0.000006 0.891689 0.062953 +v 0.000006 0.909842 -0.036979 +v 0.000006 0.909842 -0.036979 +v 0.000007 0.881763 0.060244 +v 0.000010 0.926150 -0.046220 +v 0.000010 0.926150 -0.046220 +v 0.000010 0.887542 0.064140 +v 0.000011 0.914328 0.066748 +v 0.000011 0.883285 0.061358 +v 0.000012 0.891370 0.062255 +v 0.000013 0.885721 0.064027 +v 0.000015 0.889565 0.063375 +v 0.000015 0.884459 0.063018 +v 0.000015 0.884940 0.063755 +v 0.000017 0.933993 0.059132 +v 0.000021 0.936936 0.060349 +v 0.000036 0.904278 0.042222 +v 0.000036 0.884158 0.053928 +v 0.000046 0.927781 0.058728 +v 0.000054 0.915903 0.065229 +v 0.000074 0.887169 0.057211 +v 0.000106 0.901827 0.055421 +v 0.000133 0.891362 0.061089 +v 0.000133 0.891362 0.061089 +v 0.000134 0.897575 0.059559 +v 0.000138 0.918615 0.063232 +v 0.000144 0.894373 0.060313 +v 0.000144 0.922479 0.060835 +v 0.000148 0.892776 0.060414 +v 0.000193 0.892354 0.061171 +v 0.000193 0.892354 0.061171 +v 0.000264 0.891950 0.059698 +v 0.000280 0.889908 0.058685 +v 0.001010 0.898268 0.064056 +v 0.001051 0.903655 0.063378 +v 0.001096 0.902413 0.062639 +v 0.001107 0.901154 0.062675 +v 0.001129 0.899419 0.063267 +v 0.001232 0.904364 0.064655 +v 0.001465 0.905411 0.066158 +v 0.001509 0.898162 0.064375 +v 0.001738 0.903678 0.063160 +v 0.001739 0.906636 0.067507 +v 0.001820 0.891361 0.061024 +v 0.001853 0.892730 0.060307 +v 0.001904 0.902431 0.062620 +v 0.001961 0.892415 0.060835 +v 0.001961 0.892415 0.060835 +v 0.001993 0.891913 0.059436 +v 0.001994 0.899514 0.063377 +v 0.002001 0.904467 0.064533 +v 0.002003 0.901178 0.062705 +v 0.002004 0.891393 0.062159 +v 0.002080 0.927600 0.058470 +v 0.002140 0.891698 0.062773 +v 0.002165 0.931148 0.058190 +v 0.002177 0.915881 0.064950 +v 0.002181 0.922339 0.060610 +v 0.002191 0.918427 0.062994 +v 0.002195 0.908669 0.068777 +v 0.002248 0.910553 0.069031 +v 0.002260 0.914278 0.066489 +v 0.002263 0.889661 0.063215 +v 0.002370 0.912660 0.067992 +v 0.002414 0.933870 0.058874 +v 0.002442 0.887638 0.064026 +v 0.002442 0.892689 0.063705 +v 0.002552 0.905507 0.066057 +v 0.002625 0.885814 0.063924 +v 0.002693 0.866221 0.047410 +v 0.002700 0.866298 0.050101 +v 0.002701 0.885104 0.063572 +v 0.002706 0.904906 0.064166 +v 0.002818 0.903957 0.062551 +v 0.002833 0.936837 0.060061 +v 0.002837 0.884573 0.062882 +v 0.002837 0.821069 0.010665 +v 0.002856 0.866351 0.045348 +v 0.002991 0.905725 0.063828 +v 0.003014 0.906861 0.067363 +v 0.003029 0.894740 0.064809 +v 0.003058 0.889989 0.058218 +v 0.003091 0.828254 0.011596 +v 0.003117 0.867100 0.053300 +v 0.003122 0.835839 0.013936 +v 0.003137 0.894296 0.060060 +v 0.003194 0.842741 0.015695 +v 0.003230 0.872778 0.057927 +v 0.003268 0.939840 0.060865 +v 0.003276 0.898046 0.064599 +v 0.003294 0.874865 0.058340 +v 0.003298 0.883289 0.061158 +v 0.003298 0.866869 0.042018 +v 0.003301 0.905402 0.062280 +v 0.003314 0.896669 0.065098 +v 0.003325 0.915677 0.064492 +v 0.003330 0.897584 0.065031 +v 0.003407 0.918231 0.062568 +v 0.003431 0.870571 0.056965 +v 0.003431 0.868554 0.055421 +v 0.003448 0.927064 0.057814 +v 0.003487 0.913982 0.066158 +v 0.003488 0.902691 0.062339 +v 0.003520 0.908806 0.068254 +v 0.003552 0.922015 0.060162 +v 0.003557 0.905750 0.065732 +v 0.003572 0.853295 0.019024 +v 0.003598 0.943739 0.061341 +v 0.003645 0.930722 0.057241 +v 0.003660 0.910695 0.068338 +v 0.003681 0.906307 0.063539 +v 0.003685 0.891315 0.060633 +v 0.003685 0.891315 0.060633 +v 0.003726 0.912599 0.067517 +v 0.003758 0.892602 0.060022 +v 0.003760 0.947989 0.061420 +v 0.003770 0.899655 0.063344 +v 0.003774 0.901464 0.062580 +v 0.003775 0.877007 0.058410 +v 0.003818 0.906425 0.062469 +v 0.003819 0.818212 0.010262 +v 0.003839 0.892284 0.060429 +v 0.003839 0.892284 0.060429 +v 0.003873 0.860345 0.021317 +v 0.003875 0.881547 0.059652 +v 0.003896 0.891893 0.059057 +v 0.003932 0.952371 0.060979 +v 0.003965 0.891469 0.061856 +v 0.003977 0.906080 0.065311 +v 0.004019 0.906471 0.064586 +v 0.004059 0.868084 0.036391 +v 0.004063 0.891675 0.062246 +v 0.004090 0.959164 0.059162 +v 0.004113 0.878574 0.058526 +v 0.004239 0.880140 0.058893 +v 0.004267 0.933549 0.057863 +v 0.004318 0.968193 0.054997 +v 0.004320 0.889892 0.062736 +v 0.004322 0.865802 0.023979 +v 0.004477 0.892573 0.063122 +v 0.004477 0.906902 0.066651 +v 0.004515 0.906657 0.064748 +v 0.004602 0.906818 0.061747 +v 0.004687 0.869154 0.030157 +v 0.004770 0.891208 0.060122 +v 0.004770 0.891208 0.060122 +v 0.004796 0.869077 0.027239 +v 0.004798 0.906574 0.065622 +v 0.004848 0.906930 0.063085 +v 0.004908 0.887988 0.063420 +v 0.004914 0.905616 0.061507 +v 0.004970 0.892488 0.059543 +v 0.004991 0.892243 0.059982 +v 0.004991 0.892243 0.059982 +v 0.005005 0.891595 0.061737 +v 0.005026 0.891879 0.058583 +v 0.005029 0.914486 0.062930 +v 0.005099 0.917099 0.061276 +v 0.005131 0.936505 0.059034 +v 0.005184 0.908807 0.067250 +v 0.005242 0.886287 0.063359 +v 0.005278 0.891450 0.061364 +v 0.005288 0.906639 0.063930 +v 0.005359 0.913044 0.064141 +v 0.005371 0.885609 0.063066 +v 0.005422 0.894381 0.063943 +v 0.005430 0.911916 0.066048 +v 0.005435 0.910360 0.067109 +v 0.005512 0.920992 0.059108 +v 0.005517 0.904551 0.061250 +v 0.005528 0.885029 0.062373 +v 0.005584 0.892434 0.062573 +v 0.005636 0.896913 0.058606 +v 0.005674 0.866588 0.049709 +v 0.005704 0.906996 0.061495 +v 0.005716 0.874413 0.057571 +v 0.005725 0.890008 0.062101 +v 0.005731 0.929652 0.055750 +v 0.005847 0.925634 0.056475 +v 0.005900 0.906799 0.063043 +v 0.005907 0.891156 0.059438 +v 0.005929 0.906690 0.064864 +v 0.005959 0.903376 0.061196 +v 0.005998 0.896241 0.064032 +v 0.006023 0.892408 0.058919 +v 0.006050 0.907066 0.062147 +v 0.006126 0.893972 0.058970 +v 0.006165 0.858588 -0.032426 +v 0.006170 0.902069 0.061530 +v 0.006171 0.897624 0.063657 +v 0.006182 0.897100 0.063966 +v 0.006273 0.890041 0.057176 +v 0.006299 0.899672 0.062387 +v 0.006407 0.883515 0.060284 +v 0.006429 0.939746 0.060159 +v 0.006469 0.906922 0.065428 +v 0.006471 0.888406 0.062619 +v 0.006519 0.891573 0.060801 +v 0.006519 0.932551 0.056140 +v 0.006524 0.873339 0.056943 +v 0.006624 0.976070 0.049067 +v 0.006710 0.866715 0.048366 +v 0.006810 0.943753 0.061096 +v 0.006836 0.906554 0.063560 +v 0.006849 0.867537 0.052128 +v 0.006855 0.891827 0.057708 +v 0.006912 0.900767 0.054209 +v 0.006921 0.875857 0.057100 +v 0.006922 0.893970 0.063104 +v 0.006925 0.982855 0.041784 +v 0.006929 0.892357 0.061605 +v 0.006961 0.821851 0.010933 +v 0.006999 0.905855 0.061195 +v 0.007021 0.903314 0.041470 +v 0.007043 0.892243 0.058922 +v 0.007044 0.886888 0.062473 +v 0.007091 0.891290 0.058599 +v 0.007091 0.891290 0.058599 +v 0.007100 0.818956 0.010328 +v 0.007123 0.908499 0.065889 +v 0.007196 0.911357 0.064471 +v 0.007224 0.844194 -0.035124 +v 0.007246 0.947978 0.061137 +v 0.007247 0.886272 0.062157 +v 0.007273 0.909881 0.065719 +v 0.007274 0.892293 0.057899 +v 0.007309 0.906447 0.062285 +v 0.007310 0.912178 0.063084 +v 0.007337 0.871249 0.055648 +v 0.007421 0.839153 0.013383 +v 0.007441 0.885711 0.061509 +v 0.007462 0.891736 0.059994 +v 0.007464 0.913159 0.060260 +v 0.007486 0.915244 0.059316 +v 0.007509 0.869143 0.054060 +v 0.007590 0.906590 0.063921 +v 0.007624 0.866969 0.046619 +v 0.007632 0.952337 0.060568 +v 0.007633 0.895575 0.063242 +v 0.007650 0.881371 0.058238 +v 0.007661 0.912677 0.060613 +v 0.007730 0.891557 0.059891 +v 0.007775 0.903965 0.060571 +v 0.007843 0.904928 0.061113 +v 0.007871 0.896411 0.063210 +v 0.007905 0.928702 0.054106 +v 0.007929 0.897085 0.062857 +v 0.007947 0.890559 0.060473 +v 0.007969 0.935323 0.056955 +v 0.007973 0.877274 0.056697 +v 0.008023 0.959022 0.058677 +v 0.008042 0.902873 0.060647 +v 0.008156 0.893536 0.062102 +v 0.008232 0.906073 0.062425 +v 0.008244 0.844311 0.014071 +v 0.008280 0.892260 0.060527 +v 0.008300 0.925986 0.054076 +v 0.008366 0.888545 0.054638 +v 0.008371 0.884114 0.059363 +v 0.008382 0.918990 0.057528 +v 0.008425 0.967936 0.054550 +v 0.008439 0.893261 0.056693 +v 0.008449 0.899533 0.061256 +v 0.008464 0.892111 0.056655 +v 0.008486 0.891389 0.057453 +v 0.008486 0.891389 0.057453 +v 0.008488 0.931258 0.054511 +v 0.008597 0.889349 0.060610 +v 0.008625 0.911991 0.061666 +v 0.008642 0.885839 0.026149 +v 0.008645 0.834611 0.012054 +v 0.008669 0.874564 0.055896 +v 0.008682 0.883709 0.013397 +v 0.008682 0.883709 0.013397 +v 0.008996 0.894801 0.062241 +v 0.009000 0.879237 0.056558 +v 0.009025 0.891650 0.058659 +v 0.009067 0.988124 0.031822 +v 0.009075 0.867582 0.043386 +v 0.009086 0.908029 0.064200 +v 0.009120 0.922647 0.055121 +v 0.009138 0.891832 0.055675 +v 0.009231 0.890641 0.054862 +v 0.009256 0.888251 0.060406 +v 0.009265 0.912238 0.059085 +v 0.009308 0.893178 0.060949 +v 0.009339 0.895487 0.062198 +v 0.009345 0.911913 0.059383 +v 0.009353 0.910813 0.062726 +v 0.009371 0.891944 0.055347 +v 0.009376 0.867998 0.050508 +v 0.009419 0.913745 0.058269 +v 0.009472 0.904754 0.059798 +v 0.009502 0.892175 0.059244 +v 0.009562 0.887719 0.060135 +v 0.009579 0.896171 0.061869 +v 0.009590 0.904203 0.059588 +v 0.009607 0.928129 0.053085 +v 0.009620 0.892202 0.056790 +v 0.009620 0.892202 0.056790 +v 0.009633 0.891421 0.056250 +v 0.009633 0.891421 0.056250 +v 0.009649 0.887131 0.050949 +v 0.009673 0.854746 0.016348 +v 0.009768 0.909134 0.063943 +v 0.009824 0.926190 0.053065 +v 0.009828 0.933562 0.055031 +v 0.009850 0.882023 0.057186 +v 0.009851 0.887200 0.059637 +v 0.009883 0.894993 0.055138 +v 0.009897 0.911359 0.060314 +v 0.009929 0.833007 -0.037895 +v 0.010003 0.923805 0.053897 +v 0.010004 0.991517 0.021241 +v 0.010014 0.905392 0.060690 +v 0.010017 0.930051 0.053349 +v 0.010049 0.943912 0.060746 +v 0.010119 0.891896 0.057509 +v 0.010143 0.894113 0.061012 +v 0.010157 0.872262 0.054080 +v 0.010162 0.906744 0.062082 +v 0.010258 0.891859 0.054071 +v 0.010272 0.938209 0.057837 +v 0.010275 0.891657 0.057113 +v 0.010311 0.875795 0.054989 +v 0.010368 0.891110 0.053431 +v 0.010381 0.892859 0.059497 +v 0.010403 0.896336 -0.029079 +v 0.010437 0.902705 0.059362 +v 0.010457 0.869902 0.052344 +v 0.010494 0.892125 0.053098 +v 0.010535 0.898378 0.060315 +v 0.010564 0.927771 0.052762 +v 0.010572 0.894645 0.060922 +v 0.010584 0.891295 0.057673 +v 0.010592 0.992790 0.009869 +v 0.010603 0.891851 0.053495 +v 0.010622 0.917240 0.056668 +v 0.010733 0.991186 -0.014338 +v 0.010739 0.910304 0.058546 +v 0.010749 0.948140 0.060609 +v 0.010760 0.885584 0.057841 +v 0.010803 0.892025 0.057622 +v 0.010822 0.910511 0.058246 +v 0.010831 0.886061 -0.027059 +v 0.010832 0.929034 0.052975 +v 0.010834 0.926448 0.052619 +v 0.010843 0.924489 0.053089 +v 0.010859 0.993502 -0.001936 +v 0.010888 0.905539 0.059184 +v 0.010918 0.910194 0.059392 +v 0.010929 0.927643 0.052542 +v 0.010956 0.909696 0.060949 +v 0.010977 0.891960 0.052832 +v 0.010977 0.891960 0.052832 +v 0.010981 0.895252 0.060639 +v 0.010982 0.891317 0.052540 +v 0.010992 0.891850 0.052977 +v 0.011002 0.892045 0.055001 +v 0.011009 0.905225 0.058907 +v 0.011018 0.890685 0.057527 +v 0.011038 0.862190 0.018149 +v 0.011053 0.932007 0.053999 +v 0.011073 0.868485 0.048797 +v 0.011129 0.907821 0.061817 +v 0.011171 0.890466 0.051735 +v 0.011195 0.926677 0.052295 +v 0.011216 0.928536 0.052709 +v 0.011229 0.908866 -0.034577 +v 0.011294 0.893466 0.059459 +v 0.011297 0.891703 0.054520 +v 0.011297 0.891703 0.054520 +v 0.011320 0.926905 0.052188 +v 0.011323 0.906455 0.060159 +v 0.011325 0.952293 0.059866 +v 0.011327 0.927626 0.052221 +v 0.011375 0.892687 0.051634 +v 0.011376 0.869007 0.037319 +v 0.011378 0.912136 0.057484 +v 0.011380 0.891660 0.051839 +v 0.011380 0.891660 0.051839 +v 0.011473 0.927070 0.051920 +v 0.011491 0.880424 0.055426 +v 0.011499 0.927500 0.051767 +v 0.011503 0.925208 0.052579 +v 0.011534 0.886767 0.040274 +v 0.011563 0.890150 0.057313 +v 0.011565 0.892451 0.057820 +v 0.011617 0.908562 0.059551 +v 0.011625 0.928178 0.052265 +v 0.011642 0.920671 0.054649 +v 0.011677 0.844078 0.012341 +v 0.011682 0.891967 0.055642 +v 0.011698 0.930647 0.053599 +v 0.011713 0.907907 0.058373 +v 0.011737 0.935581 0.055461 +v 0.011748 0.925723 0.052244 +v 0.011767 0.893886 0.059376 +v 0.011810 0.927895 0.051679 +v 0.011823 0.926185 0.052044 +v 0.011833 0.927115 0.051307 +v 0.011857 0.926566 0.051766 +v 0.011861 0.889829 0.057181 +v 0.011896 0.891405 0.050799 +v 0.011896 0.891405 0.050799 +v 0.011906 0.891887 0.054115 +v 0.011906 0.891887 0.054115 +v 0.011906 0.891887 0.054115 +v 0.011920 0.891931 0.054163 +v 0.011920 0.891931 0.054163 +v 0.011982 0.958862 0.057806 +v 0.011986 0.926736 0.046747 +v 0.011998 0.891792 0.055243 +v 0.012035 0.876338 -0.026621 +v 0.012051 0.929878 0.053293 +v 0.012088 0.907750 0.057967 +v 0.012089 0.877363 0.053940 +v 0.012144 0.922103 0.053816 +v 0.012147 0.883754 0.055921 +v 0.012154 0.891726 0.055702 +v 0.012158 0.823640 0.010356 +v 0.012182 0.889481 0.056964 +v 0.012213 0.820073 0.010506 +v 0.012260 0.894421 0.059138 +v 0.012265 0.873310 0.052512 +v 0.012270 0.904082 0.058320 +v 0.012342 0.927469 0.051705 +v 0.012353 0.897211 0.059195 +v 0.012430 0.892831 0.057708 +v 0.012432 0.901125 0.058691 +v 0.012440 0.929558 0.052942 +v 0.012506 0.870622 0.050625 +v 0.012553 0.893674 0.023230 +v 0.012553 0.893674 0.023230 +v 0.012556 0.923221 0.053114 +v 0.012561 0.833444 0.010495 +v 0.012595 0.891489 0.055629 +v 0.012601 0.933764 0.054612 +v 0.012608 0.984884 -0.026411 +v 0.012624 0.924959 0.052431 +v 0.012628 0.925661 0.052204 +v 0.012642 0.892007 0.054778 +v 0.012643 0.891973 0.054706 +v 0.012651 0.926178 0.051933 +v 0.012694 0.891973 0.055453 +v 0.012699 0.924212 0.052642 +v 0.012706 0.929320 0.052383 +v 0.012741 0.967450 0.053636 +v 0.012823 0.894284 0.048799 +v 0.012823 0.894284 0.048799 +v 0.012899 0.893131 0.057601 +v 0.012914 0.929537 0.048265 +v 0.013000 0.868607 0.020289 +v 0.013043 0.929400 0.051133 +v 0.013043 0.929229 0.051712 +v 0.013062 0.891268 0.055477 +v 0.013098 0.924943 -0.042971 +v 0.013114 0.932403 0.054253 +v 0.013155 0.888340 0.055928 +v 0.013185 0.915651 0.056219 +v 0.013289 0.828434 -0.039057 +v 0.013352 0.891115 0.055347 +v 0.013381 0.892134 0.055478 +v 0.013394 0.892014 0.054801 +v 0.013423 0.893508 0.057430 +v 0.013450 0.869359 0.045574 +v 0.013451 0.931642 0.053968 +v 0.013475 0.925399 0.048448 +v 0.013522 0.891962 0.054680 +v 0.013541 0.857982 -0.029750 +v 0.013617 0.871257 0.029191 +v 0.013660 0.890939 0.055211 +v 0.013711 0.882555 0.054357 +v 0.013722 0.910366 0.057210 +v 0.013742 0.931354 0.053672 +v 0.013763 0.926015 0.051964 +v 0.013807 0.925645 0.052425 +v 0.013810 0.939969 0.057890 +v 0.013840 0.879153 0.053157 +v 0.013841 0.896066 0.057957 +v 0.013897 0.891932 0.054767 +v 0.013914 0.925184 0.052635 +v 0.014001 0.906028 0.057618 +v 0.014013 0.944513 0.060005 +v 0.014054 0.924360 0.052786 +v 0.014091 0.931098 0.052998 +v 0.014171 0.892300 0.055312 +v 0.014183 0.931032 0.051874 +v 0.014205 0.902768 0.057689 +v 0.014221 0.891949 0.054548 +v 0.014237 0.930938 0.052555 +v 0.014260 0.937114 0.055306 +v 0.014290 0.923342 0.052875 +v 0.014321 0.899625 0.057823 +v 0.014373 0.891790 0.054624 +v 0.014415 0.918985 0.054532 +v 0.014503 0.886984 0.054525 +v 0.014509 0.948396 0.059643 +v 0.014563 0.892424 0.055285 +v 0.014564 0.922076 0.053239 +v 0.014620 0.935285 0.055114 +v 0.014624 0.855162 0.013004 +v 0.014624 0.891654 0.054591 +v 0.014643 0.891923 0.054486 +v 0.014688 0.920583 0.053858 +v 0.014704 0.890368 0.054640 +v 0.014748 0.895678 0.039036 +v 0.014748 0.895678 0.039036 +v 0.014826 0.891558 0.054559 +v 0.014913 0.892515 0.055250 +v 0.014980 0.891898 0.054496 +v 0.014995 0.934098 0.054696 +v 0.015021 0.894927 0.056812 +v 0.015029 0.942492 -0.046449 +v 0.015062 0.974477 0.047316 +v 0.015110 0.952273 0.058670 +v 0.015141 0.928811 0.045228 +v 0.015195 0.933370 0.054514 +v 0.015245 0.891963 0.054522 +v 0.015271 0.843525 0.010163 +v 0.015318 0.932684 0.048876 +v 0.015403 0.932919 0.054196 +v 0.015403 0.871676 0.047354 +v 0.015437 0.925493 0.052631 +v 0.015492 0.873038 0.023427 +v 0.015549 0.925216 0.052931 +v 0.015603 0.932641 0.053798 +v 0.015630 0.845919 -0.032464 +v 0.015715 0.891499 0.054271 +v 0.015735 0.924699 0.053154 +v 0.015745 0.881673 0.052431 +v 0.015777 0.932484 0.053378 +v 0.015824 0.981092 0.039988 +v 0.015866 0.897906 0.056921 +v 0.015867 0.869088 -0.026641 +v 0.015906 0.958548 0.056544 +v 0.015913 0.932636 0.052409 +v 0.015932 0.913944 0.056209 +v 0.015962 0.886150 0.053370 +v 0.015972 0.923716 0.053272 +v 0.015974 0.874923 0.049120 +v 0.016073 0.889674 0.053829 +v 0.016118 0.893390 0.055275 +v 0.016119 0.908490 0.057198 +v 0.016137 0.901185 0.057009 +v 0.016184 0.904485 0.057105 +v 0.016246 0.892294 0.054432 +v 0.016361 0.922571 0.053214 +v 0.016550 0.924835 0.049041 +v 0.016616 0.966741 0.052439 +v 0.016679 0.863303 0.013490 +v 0.016698 0.960275 -0.044392 +v 0.016840 0.896321 0.056151 +v 0.016970 0.920988 0.053376 +v 0.016978 0.891307 0.053835 +v 0.017078 0.871119 0.039420 +v 0.017113 0.974131 -0.036722 +v 0.017303 0.938212 0.055199 +v 0.017426 0.919227 0.053907 +v 0.017446 0.936370 0.055145 +v 0.017455 0.917332 0.054629 +v 0.017459 0.878466 0.049333 +v 0.017466 0.889217 0.053087 +v 0.017535 0.894182 0.055015 +v 0.017565 0.892418 0.054134 +v 0.017623 0.940926 0.057175 +v 0.017680 0.925162 0.053068 +v 0.017690 0.935108 0.054934 +v 0.017700 0.899241 0.056291 +v 0.017757 0.934495 0.054786 +v 0.017763 0.924873 0.053349 +v 0.017816 0.885620 0.052000 +v 0.017854 0.832727 0.007601 +v 0.017864 0.934084 0.054497 +v 0.017965 0.924259 0.053473 +v 0.017966 0.933855 0.054182 +v 0.017988 0.853509 0.009782 +v 0.018062 0.933708 0.053747 +v 0.018093 0.985363 0.030609 +v 0.018175 0.933951 0.052839 +v 0.018186 0.945277 0.058841 +v 0.018275 0.902676 0.056492 +v 0.018276 0.923234 0.053454 +v 0.018325 0.891193 0.053392 +v 0.018462 0.948653 0.058122 +v 0.018503 0.897359 0.055640 +v 0.018584 0.824895 0.007983 +v 0.018753 0.922023 0.053240 +v 0.018790 0.906773 0.056794 +v 0.018857 0.892606 0.053769 +v 0.018928 0.952158 0.056974 +v 0.018933 0.894799 0.054671 +v 0.019105 0.912149 0.056188 +v 0.019153 0.881491 0.049204 +v 0.019220 0.841714 0.006387 +v 0.019232 0.822040 0.007194 +v 0.019293 0.888971 0.052192 +v 0.019595 0.859660 -0.027399 +v 0.019652 0.957977 0.054839 +v 0.019676 0.871571 0.014494 +v 0.019723 0.920281 0.053195 +v 0.019824 0.873564 0.041244 +v 0.019862 0.900620 0.055805 +v 0.020078 0.862117 0.009023 +v 0.020093 0.891086 0.052607 +v 0.020386 0.987507 0.020729 +v 0.020418 0.925107 0.052851 +v 0.020423 0.924815 0.053190 +v 0.020427 0.965697 0.050794 +v 0.020579 0.918244 0.053681 +v 0.020597 0.924158 0.053296 +v 0.020621 0.892915 0.053140 +v 0.020677 0.898514 0.055089 +v 0.020681 0.982510 -0.025460 +v 0.020891 0.895557 0.054089 +v 0.020926 0.935802 0.049196 +v 0.020933 0.923061 0.053171 +v 0.020969 0.972565 0.044920 +v 0.021091 0.915846 0.054668 +v 0.021133 0.904659 0.056118 +v 0.021148 0.873840 0.031422 +v 0.021205 0.885657 0.049545 +v 0.021380 0.885602 -0.017087 +v 0.021446 0.988902 0.009810 +v 0.021455 0.876477 0.042697 +v 0.021511 0.921796 0.052817 +v 0.021586 0.851159 0.004435 +v 0.021717 0.835936 -0.034771 +v 0.021728 0.930838 0.044676 +v 0.021802 0.989568 -0.002303 +v 0.021955 0.938645 0.054416 +v 0.021965 0.876740 -0.015795 +v 0.022002 0.895432 -0.019523 +v 0.022010 0.987678 -0.013767 +v 0.022025 0.936702 0.054530 +v 0.022073 0.925083 0.048624 +v 0.022226 0.935348 0.054429 +v 0.022273 0.934119 0.052264 +v 0.022303 0.934681 0.054268 +v 0.022305 0.933768 0.053199 +v 0.022319 0.933945 0.053651 +v 0.022333 0.934213 0.054009 +v 0.022372 0.870434 0.008391 +v 0.022463 0.910195 0.056021 +v 0.022547 0.889153 0.050260 +v 0.022640 0.919983 0.052757 +v 0.022642 0.839016 0.001688 +v 0.022693 0.941431 0.055478 +v 0.022881 0.902524 0.055275 +v 0.022902 0.879487 0.043547 +v 0.022906 0.832829 0.002758 +v 0.023264 0.859642 0.001981 +v 0.023303 0.891470 0.050839 +v 0.023560 0.945493 0.056385 +v 0.023623 0.925523 0.052018 +v 0.023639 0.925198 0.052420 +v 0.023669 0.878273 0.015396 +v 0.023685 0.868344 -0.015581 +v 0.023754 0.924512 0.052505 +v 0.023779 0.948382 0.055353 +v 0.023800 0.893705 0.051603 +v 0.023964 0.900210 0.054271 +v 0.024087 0.917801 0.053204 +v 0.024090 0.951483 0.054026 +v 0.024093 0.868573 -0.001379 +v 0.024172 0.923455 0.052199 +v 0.024221 0.896881 0.052876 +v 0.024342 0.882559 0.044192 +v 0.024609 0.846096 -0.002501 +v 0.024647 0.876224 0.034080 +v 0.024669 0.876926 0.025102 +v 0.024678 0.956601 0.051805 +v 0.024743 0.907227 -0.024155 +v 0.024830 0.876639 -0.003567 +v 0.024831 0.847734 -0.028619 +v 0.024846 0.877650 0.007673 +v 0.024872 0.922271 0.051744 +v 0.024971 0.976272 0.036318 +v 0.025154 0.826630 0.003803 +v 0.025167 0.830932 -0.036073 +v 0.025203 0.907863 0.055162 +v 0.025396 0.963659 0.047793 +v 0.025581 0.823955 0.004391 +v 0.025623 0.857395 -0.015926 +v 0.025803 0.914695 0.054088 +v 0.025807 0.970150 0.042292 +v 0.025979 0.884981 -0.005393 +v 0.026000 0.932950 0.051067 +v 0.026030 0.932819 0.051788 +v 0.026177 0.886783 0.045243 +v 0.026179 0.933049 0.052182 +v 0.026264 0.853542 -0.006612 +v 0.026323 0.926379 0.050855 +v 0.026338 0.934719 0.052709 +v 0.026373 0.920474 0.051518 +v 0.026373 0.933368 0.052533 +v 0.026403 0.934075 0.052730 +v 0.026441 0.936078 0.052862 +v 0.026491 0.925988 0.051158 +v 0.026683 0.979447 0.028396 +v 0.026696 0.938070 0.052718 +v 0.026702 0.925360 0.051156 +v 0.026823 0.879026 0.035982 +v 0.027299 0.924415 0.050734 +v 0.027349 0.905431 0.053908 +v 0.027387 0.940989 0.053264 +v 0.027432 0.890289 0.046350 +v 0.027595 0.884697 0.005341 +v 0.027728 0.931985 0.049874 +v 0.027888 0.926667 0.046778 +v 0.027899 0.931752 0.050478 +v 0.027920 0.932213 0.045058 +v 0.027968 0.836990 -0.005100 +v 0.027989 0.923257 0.050041 +v 0.028054 0.894464 -0.007632 +v 0.028070 0.883743 0.012793 +v 0.028105 0.927336 0.049710 +v 0.028141 0.931860 0.050774 +v 0.028184 0.892926 0.047352 +v 0.028225 0.878916 0.028331 +v 0.028243 0.981666 0.019845 +v 0.028265 0.944821 0.053449 +v 0.028350 0.933853 0.051118 +v 0.028350 0.947538 0.052325 +v 0.028356 0.926986 0.049848 +v 0.028409 0.881770 0.037352 +v 0.028422 0.932249 0.051062 +v 0.028498 0.933028 0.051144 +v 0.028539 0.918353 0.051627 +v 0.028564 0.902822 0.052518 +v 0.028632 0.935149 0.051311 +v 0.028648 0.950217 0.050895 +v 0.028682 0.895598 0.048558 +v 0.028696 0.926472 0.049717 +v 0.028746 0.880839 0.019860 +v 0.028961 0.899200 0.050515 +v 0.029062 0.922047 -0.030721 +v 0.029103 0.937308 0.051160 +v 0.029112 0.930696 0.048560 +v 0.029200 0.954678 0.048426 +v 0.029228 0.925530 0.049322 +v 0.029331 0.930845 0.049010 +v 0.029462 0.928567 0.048350 +v 0.029534 0.839885 -0.009343 +v 0.029574 0.844515 -0.017143 +v 0.029645 0.931228 0.049422 +v 0.029664 0.911067 0.053025 +v 0.029688 0.884673 0.038559 +v 0.029725 0.921647 0.049558 +v 0.029826 0.928274 0.048408 +v 0.029864 0.932910 0.049629 +v 0.029867 0.931855 0.049676 +v 0.029898 0.924386 0.048680 +v 0.029971 0.940225 0.051492 +v 0.030061 0.961079 0.044383 +v 0.030165 0.929715 0.044905 +v 0.030210 0.927759 0.048236 +v 0.030239 0.934242 0.049697 +v 0.030319 0.881508 0.030604 +v 0.030355 0.982828 0.010340 +v 0.030389 0.929507 0.047104 +v 0.030400 0.967275 0.039201 +v 0.030443 0.929817 0.047688 +v 0.030462 0.930448 0.048179 +v 0.030467 0.829814 -0.005422 +v 0.030619 0.929432 0.047237 +v 0.030651 0.926959 0.047773 +v 0.030725 0.929491 0.047315 +v 0.030781 0.931193 0.048500 +v 0.030896 0.892906 0.002894 +v 0.030912 0.943708 0.051071 +v 0.030916 0.915813 0.051385 +v 0.030919 0.931998 0.048425 +v 0.030919 0.936289 0.049543 +v 0.030944 0.837014 -0.030912 +v 0.030972 0.928930 0.046893 +v 0.031017 0.929805 0.047527 +v 0.031123 0.929088 0.046881 +v 0.031134 0.827006 -0.002879 +v 0.031161 0.884175 0.017030 +v 0.031179 0.888881 0.040299 +v 0.031181 0.946239 0.049992 +v 0.031197 0.925913 0.047256 +v 0.031282 0.982358 -0.000218 +v 0.031288 0.930407 0.047573 +v 0.031359 0.928328 0.046446 +v 0.031369 0.948701 0.048478 +v 0.031372 0.933293 0.048321 +v 0.031382 0.929378 0.046924 +v 0.031495 0.931238 0.047421 +v 0.031501 0.882392 0.023278 +v 0.031505 0.928655 0.046241 +v 0.031518 0.922867 0.047989 +v 0.031663 0.929924 0.046801 +v 0.031721 0.929033 0.046134 +v 0.031799 0.927442 0.046147 +v 0.031817 0.908388 0.051450 +v 0.031834 0.937150 -0.033638 +v 0.031887 0.980041 -0.010681 +v 0.031894 0.952874 0.045891 +v 0.031952 0.884241 0.032361 +v 0.031979 0.928028 0.045779 +v 0.031998 0.930618 0.046540 +v 0.032031 0.919631 0.049219 +v 0.032054 0.929601 0.046030 +v 0.032104 0.938970 0.049505 +v 0.032157 0.932362 0.047123 +v 0.032257 0.892530 0.042006 +v 0.032293 0.935032 0.047933 +v 0.032344 0.928958 0.045445 +v 0.032352 0.930090 0.045857 +v 0.032570 0.929671 0.045447 +v 0.032650 0.891633 0.008919 +v 0.032666 0.924364 0.046536 +v 0.032701 0.931456 0.046086 +v 0.032818 0.959013 0.041816 +v 0.032870 0.895380 0.043437 +v 0.032962 0.905721 0.049773 +v 0.033036 0.906185 -0.009333 +v 0.033038 0.930556 0.045296 +v 0.033076 0.974501 -0.020605 +v 0.033177 0.925783 0.045405 +v 0.033177 0.942131 0.048749 +v 0.033209 0.887231 0.033878 +v 0.033232 0.929692 0.044745 +v 0.033248 0.884483 0.025369 +v 0.033260 0.898259 0.045015 +v 0.033283 0.933731 0.046523 +v 0.033308 0.885015 0.020290 +v 0.033311 0.926901 0.044774 +v 0.033315 0.928366 0.044459 +v 0.033381 0.944417 0.047514 +v 0.033412 0.912893 0.049996 +v 0.033420 0.902065 0.047374 +v 0.033591 0.937426 0.047450 +v 0.033606 0.951900 -0.032828 +v 0.033609 0.946818 0.045869 +v 0.033709 0.921040 0.047268 +v 0.033863 0.832594 -0.010099 +v 0.033905 0.932559 0.045282 +v 0.034127 0.965143 -0.028121 +v 0.034224 0.889654 0.013504 +v 0.034242 0.950779 0.043105 +v 0.034285 0.917369 0.048328 +v 0.034388 0.931345 0.044230 +v 0.034389 0.972561 0.025777 +v 0.034530 0.891471 0.036151 +v 0.034613 0.935795 0.045597 +v 0.034656 0.930011 0.043457 +v 0.034699 0.940251 0.046379 +v 0.034715 0.922616 0.045544 +v 0.034800 0.887162 0.026790 +v 0.034817 0.886614 0.022048 +v 0.034875 0.835203 -0.019055 +v 0.034877 0.963371 0.034954 +v 0.034898 0.910475 0.048472 +v 0.034968 0.927998 0.042992 +v 0.035043 0.942382 0.045035 +v 0.035133 0.901842 0.001369 +v 0.035144 0.924228 0.044213 +v 0.035203 0.925860 0.043323 +v 0.035289 0.833596 -0.031509 +v 0.035355 0.934219 0.043905 +v 0.035406 0.944695 0.043303 +v 0.035442 0.888974 0.017210 +v 0.035457 0.895185 0.038252 +v 0.035655 0.888377 0.019323 +v 0.035806 0.967655 0.029015 +v 0.035809 0.908058 0.046726 +v 0.035849 0.938284 0.044204 +v 0.035891 0.932570 0.042496 +v 0.035926 0.918923 0.045992 +v 0.036001 0.898079 0.039902 +v 0.036112 0.955550 0.037677 +v 0.036122 0.948547 0.040423 +v 0.036132 0.890393 0.028667 +v 0.036183 0.914907 0.046682 +v 0.036203 0.972486 0.019998 +v 0.036318 0.940183 0.042658 +v 0.036324 0.900997 0.041700 +v 0.036354 0.904677 0.044221 +v 0.036372 0.930715 0.041500 +v 0.036416 0.829394 -0.012358 +v 0.036489 0.889361 0.023065 +v 0.036655 0.900501 0.006184 +v 0.036664 0.936161 0.042100 +v 0.036814 0.942249 0.040757 +v 0.036854 0.920841 0.043897 +v 0.036907 0.927861 0.040901 +v 0.036925 0.895652 0.011214 +v 0.037244 0.922897 0.042299 +v 0.037287 0.925022 0.041245 +v 0.037290 0.912902 0.045093 +v 0.037339 0.937716 0.040346 +v 0.037351 0.891572 0.019330 +v 0.037360 0.933874 0.040362 +v 0.037381 0.894953 0.031492 +v 0.037623 0.893535 0.015964 +v 0.037637 0.916810 0.044234 +v 0.037725 0.945796 0.037492 +v 0.037942 0.892982 0.024711 +v 0.037944 0.931360 0.039154 +v 0.038011 0.939462 0.038226 +v 0.038049 0.972703 0.011969 +v 0.038090 0.919743 -0.014602 +v 0.038120 0.910815 0.043301 +v 0.038230 0.934934 0.038427 +v 0.038231 0.898776 0.033990 +v 0.038521 0.919052 0.042033 +v 0.038631 0.915109 0.042604 +v 0.038656 0.927700 0.038546 +v 0.038668 0.901641 0.035936 +v 0.038828 0.907854 0.040715 +v 0.038836 0.831267 -0.021774 +v 0.038907 0.904453 0.038008 +v 0.038912 0.921523 0.040314 +v 0.039000 0.931912 0.037076 +v 0.039006 0.924087 0.039110 +v 0.039028 0.895891 0.020667 +v 0.039105 0.936246 0.036029 +v 0.039116 0.942665 0.034689 +v 0.039206 0.898134 0.027805 +v 0.039407 0.913291 0.040788 +v 0.039432 0.911069 -0.002269 +v 0.039432 0.911069 -0.002269 +v 0.039453 0.963632 0.023133 +v 0.039515 0.917648 0.040347 +v 0.039572 0.950250 0.031685 +v 0.039622 0.899053 0.016705 +v 0.039696 0.908145 0.002815 +v 0.039696 0.908145 0.002815 +v 0.039792 0.971485 0.002695 +v 0.039808 0.927578 0.036445 +v 0.039833 0.902090 0.030623 +v 0.039870 0.905133 0.009357 +v 0.039976 0.920436 0.038492 +v 0.040032 0.932705 0.034383 +v 0.040114 0.906916 0.005779 +v 0.040114 0.906916 0.005779 +v 0.040127 0.923356 0.037131 +v 0.040132 0.910650 0.038091 +v 0.040172 0.904909 0.032844 +v 0.040201 0.957093 0.027343 +v 0.040280 0.922122 0.002932 +v 0.040289 0.907571 0.035163 +v 0.040323 0.938941 0.031783 +v 0.040333 0.916133 0.038448 +v 0.040351 0.916968 -0.006085 +v 0.040351 0.916968 -0.006085 +v 0.040383 0.931236 -0.016984 +v 0.040427 0.901829 0.024030 +v 0.040766 0.968580 -0.006389 +v 0.040794 0.964445 -0.013439 +v 0.040880 0.905968 0.027202 +v 0.040897 0.927428 0.033783 +v 0.040910 0.919210 0.036365 +v 0.041070 0.913789 0.035557 +v 0.041144 0.922507 0.034744 +v 0.041157 0.908584 0.029780 +v 0.041189 0.905875 0.020281 +v 0.041328 0.911019 0.032397 +v 0.041434 0.934510 0.028870 +v 0.041675 0.924719 -0.008086 +v 0.041675 0.924719 -0.008086 +v 0.041684 0.910369 0.023600 +v 0.041691 0.910542 0.015704 +v 0.041750 0.917256 0.033095 +v 0.041931 0.942445 -0.016867 +v 0.041950 0.943929 0.024639 +v 0.041962 0.960892 0.015250 +v 0.041998 0.912682 0.026625 +v 0.042021 0.916196 0.004003 +v 0.042038 0.912510 0.010056 +v 0.042129 0.914232 0.011968 +v 0.042150 0.914774 0.029500 +v 0.042152 0.907975 0.007323 +v 0.042213 0.916148 0.017859 +v 0.042230 0.927043 0.029128 +v 0.042236 0.921037 0.030958 +v 0.042443 0.951895 0.020576 +v 0.042480 0.952691 -0.015324 +v 0.042622 0.918050 0.012062 +v 0.042670 0.917341 0.023501 +v 0.042693 0.922765 0.015819 +v 0.042749 0.932720 0.021748 +v 0.042753 0.919021 0.026788 +v 0.042881 0.925650 0.023993 +v 0.042885 0.924254 0.020478 +v 0.042965 0.906866 0.002055 +v 0.042965 0.906866 0.002055 +v 0.042970 0.925340 -0.001392 +v 0.043046 0.921999 0.011052 +v 0.043078 0.930801 0.017950 +v 0.043112 0.930911 -0.005834 +v 0.043112 0.930911 -0.005834 +v 0.043278 0.928306 0.013527 +v 0.043321 0.938188 0.016959 +v 0.043336 0.914442 0.006956 +v 0.043369 0.925833 0.009377 +v 0.043440 0.918640 0.009072 +v 0.043753 0.933469 0.011029 +v 0.043755 0.957822 0.006394 +v 0.043802 0.946188 0.013255 +v 0.044007 0.930573 0.007344 +v 0.044007 0.930573 0.007344 +v 0.044041 0.910084 0.006630 +v 0.044063 0.921571 0.008494 +v 0.044129 0.928046 0.005163 +v 0.044149 0.929662 0.002551 +v 0.044239 0.952242 -0.002402 +v 0.044250 0.917430 0.001293 +v 0.044377 0.921125 0.006468 +v 0.044456 0.935709 -0.002363 +v 0.044456 0.935709 -0.002363 +v 0.044627 0.939641 0.007234 +v 0.044759 0.935515 0.001336 +v 0.044759 0.935515 0.001336 +v 0.044842 0.935239 0.004085 +v 0.044842 0.935239 0.004085 +v 0.044930 0.906936 0.002491 +v 0.045608 0.934609 0.004110 +v 0.045608 0.934609 0.004110 +v 0.045816 0.909715 -0.002737 +v 0.045816 0.909715 -0.002737 +v 0.046185 0.911233 0.003706 +v 0.046326 0.918573 -0.002120 +v 0.046503 0.933145 0.003868 +v 0.047766 0.909776 -0.002514 +v 0.047844 0.931854 -0.001494 +v 0.048076 0.922525 -0.004055 +v 0.048126 0.913616 -0.000525 +v 0.048196 0.936550 -0.001445 +v 0.048196 0.936550 -0.001445 +v 0.048346 0.926727 -0.003432 +v 0.048494 0.914304 -0.007314 +v 0.048494 0.914304 -0.007314 +v 0.049088 0.920549 -0.009946 +v 0.049088 0.920549 -0.009946 +v 0.049119 0.916232 -0.004017 +v 0.049490 0.935155 -0.000732 +v 0.049686 0.927663 -0.010399 +v 0.049686 0.927663 -0.010399 +v 0.049846 0.934447 -0.006781 +v 0.049846 0.934447 -0.006781 +v 0.050060 0.931623 -0.004642 +v 0.050141 0.914636 -0.006539 +v 0.050629 0.920967 -0.006836 +v 0.050649 0.926574 -0.007375 +v 0.050889 0.920497 -0.009292 +v 0.051472 0.933239 -0.005704 +v 0.051651 0.926781 -0.009443 +v -0.119017 0.114442 -0.047530 +v -0.118527 0.072579 -0.047136 +v -0.116695 0.071273 -0.024835 +v -0.115790 0.170910 -0.043737 +v -0.115727 0.114980 -0.027638 +v -0.115034 0.070491 -0.065249 +v -0.114423 0.114493 -0.066233 +v -0.112747 0.171283 -0.026957 +v -0.111500 0.068245 -0.045575 +v -0.110998 0.238178 -0.038462 +v -0.110440 0.071648 -0.001928 +v -0.109906 0.173404 -0.063550 +v -0.108997 0.067980 -0.026001 +v -0.108300 0.066664 -0.062011 +v -0.107444 0.238114 -0.023602 +v -0.107395 0.114855 -0.004984 +v -0.104915 0.067722 -0.004859 +v -0.104643 0.236882 -0.059444 +v -0.104218 0.070103 -0.078919 +v -0.103442 0.482462 0.000494 +v -0.103358 0.433643 -0.003575 +v -0.103091 0.116570 -0.078262 +v -0.103072 0.079066 -0.046922 +v -0.102719 0.290144 -0.030705 +v -0.102313 0.171155 -0.005592 +v -0.101717 0.308054 -0.026311 +v -0.101596 0.430925 0.013097 +v -0.101483 0.387531 -0.009868 +v -0.101353 0.481206 0.018241 +v -0.101020 0.299156 -0.027751 +v -0.100513 0.323035 -0.023104 +v -0.100437 0.287619 -0.015282 +v -0.100347 0.527985 0.002670 +v -0.100160 0.066716 -0.073675 +v -0.100111 0.314860 -0.024252 +v -0.099926 0.291916 -0.014403 +v -0.099798 0.076980 -0.059917 +v -0.099646 0.383106 0.005761 +v -0.099611 0.304554 -0.012107 +v -0.099600 0.344275 -0.018265 +v -0.099413 0.072039 0.012099 +v -0.099222 0.319538 -0.008611 +v -0.099213 0.078029 -0.027853 +v -0.099077 0.312332 -0.010350 +v -0.098220 0.339800 -0.003590 +v -0.098142 0.290888 -0.049353 +v -0.098093 0.527105 0.022019 +v -0.097585 0.236749 -0.005201 +v -0.097028 0.114297 0.006793 +v -0.096772 0.303053 -0.043958 +v -0.096536 0.067603 0.006586 +v -0.096424 0.484151 -0.020224 +v -0.096251 0.311784 -0.045576 +v -0.094211 0.320506 -0.042085 +v -0.093879 0.328444 -0.041456 +v -0.093714 0.525126 -0.019553 +v -0.092302 0.346021 -0.037606 +v -0.092193 0.565432 0.004623 +v -0.091594 0.283254 0.002032 +v -0.091408 0.562989 0.023614 +v -0.090723 0.300217 0.005796 +v -0.090343 0.313328 0.010323 +v -0.090250 0.426836 0.035459 +v -0.090134 0.083726 -0.044998 +v -0.089980 0.389468 -0.034560 +v -0.089956 0.070294 -0.083250 +v -0.089942 0.079599 -0.060737 +v -0.089813 0.066990 -0.077100 +v -0.089721 0.478851 0.043074 +v -0.089599 0.377566 0.025500 +v -0.089214 0.433708 -0.033007 +v -0.088982 0.558755 -0.016181 +v -0.088908 0.333060 0.014917 +v -0.088443 0.079459 -0.027295 +v -0.087918 0.114724 -0.083306 +v -0.087438 0.588774 0.014335 +v -0.087092 0.071309 0.015484 +v -0.086676 0.067004 0.008338 +v -0.086270 0.587602 0.004343 +v -0.086010 0.526425 0.046265 +v -0.085733 0.114799 0.010392 +v -0.084745 0.586242 0.027536 +v -0.084108 0.168931 -0.076095 +v -0.083255 0.482054 -0.036574 +v -0.082620 0.171127 0.004408 +v -0.082316 0.585639 -0.011168 +v -0.080737 0.234350 -0.070226 +v -0.079840 0.556752 0.046522 +v -0.079646 0.066825 -0.073324 +v -0.078945 0.235076 0.004101 +v -0.078693 0.518719 -0.037944 +v -0.078631 0.076864 -0.056757 +v -0.078472 0.611547 0.022310 +v -0.078072 0.076113 -0.026235 +v -0.077473 0.078871 -0.042189 +v -0.077423 0.610626 0.007150 +v -0.076712 0.291432 -0.059976 +v -0.076704 0.069524 -0.078415 +v -0.076067 0.066163 0.002461 +v -0.075902 0.302835 -0.056435 +v -0.075888 0.311022 -0.055470 +v -0.074866 0.608886 0.033529 +v -0.074810 0.319539 -0.052031 +v -0.074447 0.277647 0.009664 +v -0.074210 0.113680 -0.079190 +v -0.074046 0.069382 0.009142 +v -0.073770 0.543539 -0.040061 +v -0.073608 0.610100 -0.006009 +v -0.073256 0.326656 -0.051532 +v -0.073217 0.581171 0.046122 +v -0.073143 0.563079 -0.037724 +v -0.072423 0.113721 0.004065 +v -0.071846 0.296830 0.016684 +v -0.071670 0.585706 -0.030092 +v -0.071520 0.344658 -0.047573 +v -0.071421 0.309465 0.021479 +v -0.071168 0.066380 -0.065286 +v -0.069322 0.629788 0.019387 +v -0.069256 0.329256 0.025157 +v -0.069233 0.628170 0.026967 +v -0.069116 0.065617 -0.008785 +v -0.068413 0.389389 -0.046190 +v -0.067992 0.631258 0.010043 +v -0.067623 0.424073 0.046574 +v -0.067519 0.372730 0.036061 +v -0.067166 0.069576 -0.070100 +v -0.066645 0.066053 -0.051789 +v -0.066562 0.635322 0.004316 +v -0.066052 0.477514 0.054753 +v -0.065688 0.625395 0.037812 +v -0.065617 0.608835 -0.021766 +v -0.065502 0.433205 -0.045728 +v -0.065189 0.065802 -0.024876 +v -0.065171 0.066960 -0.039228 +v -0.065019 0.604892 0.047081 +v -0.064912 0.641813 0.005508 +v -0.064815 0.635492 -0.001123 +v -0.064566 0.112500 -0.005668 +v -0.064477 0.069112 -0.002943 +v -0.064313 0.640781 0.021630 +v -0.064217 0.638739 0.021464 +v -0.064217 0.638739 0.021464 +v -0.064156 0.112866 -0.071316 +v -0.064005 0.632455 -0.001041 +v -0.063958 0.552170 -0.045942 +v -0.063774 0.634111 0.004848 +v -0.063408 0.642808 0.011129 +v -0.063216 0.639005 0.030416 +v -0.063036 0.640859 0.010938 +v -0.063036 0.640859 0.010938 +v -0.062989 0.636789 0.029802 +v -0.062989 0.636789 0.029802 +v -0.062948 0.646010 0.019794 +v -0.062937 0.519168 0.057961 +v -0.062885 0.641962 -0.000436 +v -0.062479 0.649252 0.006793 +v -0.062143 0.634285 -0.000611 +v -0.061962 0.644099 0.030264 +v -0.061598 0.477997 -0.045774 +v -0.061576 0.647826 0.009152 +v -0.061565 0.169078 -0.009886 +v -0.061537 0.640441 0.006221 +v -0.061374 0.650394 0.020048 +v -0.061153 0.649656 0.000627 +v -0.060549 0.644289 0.001964 +v -0.060514 0.648501 0.030428 +v -0.060468 0.649659 0.019552 +v -0.060197 0.642446 0.001329 +v -0.060197 0.642446 0.001329 +v -0.060146 0.166915 -0.066750 +v -0.059959 0.651985 0.009444 +v -0.059933 0.068855 -0.054609 +v -0.059829 0.655657 0.008053 +v -0.059766 0.644736 0.019663 +v -0.059516 0.640537 0.000227 +v -0.059414 0.647855 0.007304 +v -0.059306 0.647215 0.031497 +v -0.059276 0.634538 0.040379 +v -0.059276 0.634538 0.040379 +v -0.059259 0.653670 0.020088 +v -0.059114 0.636738 0.041381 +v -0.058973 0.514807 -0.045261 +v -0.058715 0.642908 0.029432 +v -0.058690 0.069626 -0.021130 +v -0.058552 0.641908 0.040030 +v -0.058544 0.646290 0.009673 +v -0.058452 0.548224 0.057252 +v -0.058438 0.651999 0.030171 +v -0.058193 0.070392 -0.039333 +v -0.058133 0.655883 0.002017 +v -0.058075 0.233515 -0.008453 +v -0.057998 0.655103 0.009780 +v -0.057917 0.648219 0.001200 +v -0.057814 0.649023 -0.001255 +v -0.057571 0.652370 0.004708 +v -0.057540 0.621875 0.048932 +v -0.057473 0.232217 -0.062011 +v -0.057413 0.111855 -0.023534 +v -0.057376 0.646473 0.040117 +v -0.057184 0.660546 0.008744 +v -0.057014 0.657766 0.020935 +v -0.056800 0.654055 0.008602 +v -0.056783 0.632101 -0.013380 +v -0.056720 0.644342 0.019557 +v -0.056592 0.111727 -0.055245 +v -0.056507 0.563699 -0.050178 +v -0.056395 0.655179 0.031962 +v -0.056341 0.653144 -0.000904 +v -0.056318 0.658549 0.020745 +v -0.055837 0.653356 0.019933 +v -0.055766 0.656113 0.031873 +v -0.055673 0.642668 0.028745 +v -0.055628 0.645760 0.010134 +v -0.055492 0.660737 0.003283 +v -0.055465 0.536267 -0.046674 +v -0.055446 0.644483 0.042100 +v -0.055438 0.640982 0.038573 +v -0.055413 0.650225 0.039575 +v -0.055314 0.288451 -0.052200 +v -0.055099 0.647431 -0.000122 +v -0.055048 0.654294 0.002573 +v -0.054967 0.651872 0.029293 +v -0.054849 0.659734 0.006805 +v -0.054760 0.111034 -0.040151 +v -0.054726 0.654661 0.010349 +v -0.054704 0.575092 0.056788 +v -0.054650 0.656268 -0.000278 +v -0.054517 0.659139 0.009223 +v -0.054479 0.626043 0.053426 +v -0.054429 0.308610 -0.046830 +v -0.054395 0.659015 0.020273 +v -0.054286 0.316360 -0.045259 +v -0.054276 0.660520 0.007003 +v -0.054128 0.645219 -0.009457 +v -0.054064 0.642985 -0.009526 +v -0.054064 0.642985 -0.009526 +v -0.053937 0.298610 -0.050418 +v -0.053910 0.278367 -0.003870 +v -0.053770 0.632602 0.053642 +v -0.053687 0.656710 0.031278 +v -0.053287 0.325389 -0.044037 +v -0.053161 0.652605 0.019846 +v -0.052972 0.652692 0.041834 +v -0.052943 0.639927 0.048527 +v -0.052873 0.584107 -0.047588 +v -0.052849 0.284342 -0.002936 +v -0.052779 0.659284 0.003752 +v -0.052678 0.625572 0.050940 +v -0.052515 0.640886 0.037314 +v -0.052468 0.632275 0.049622 +v -0.052468 0.632275 0.049622 +v -0.052457 0.646807 0.000878 +v -0.052453 0.640478 0.053636 +v -0.052429 0.653776 0.041568 +v -0.052323 0.661004 0.007500 +v -0.052275 0.651234 0.028620 +v -0.052266 0.652893 -0.006602 +v -0.052173 0.653811 0.010843 +v -0.052117 0.650265 0.037989 +v -0.052072 0.649703 -0.010736 +v -0.052031 0.644512 0.048491 +v -0.051737 0.343396 -0.040869 +v -0.051690 0.632142 0.050771 +v -0.051677 0.655741 0.001008 +v -0.051437 0.635137 0.050802 +v -0.050819 0.653859 -0.010308 +v -0.050633 0.166053 -0.039444 +v -0.050494 0.647570 0.052979 +v -0.050447 0.654664 0.040282 +v -0.050413 0.639915 0.050749 +v -0.050386 0.648417 0.047804 +v -0.050282 0.598843 0.056817 +v -0.050228 0.300625 -0.000661 +v -0.050153 0.660888 -0.004407 +v -0.050086 0.639165 0.046476 +v -0.049793 0.648158 -0.009075 +v -0.049653 0.625151 0.056630 +v -0.049617 0.422694 0.040078 +v -0.049519 0.661751 -0.003956 +v -0.049511 0.649745 0.036738 +v -0.049441 0.657071 -0.009444 +v -0.049355 0.654815 0.002066 +v -0.049333 0.642583 0.050200 +v -0.048690 0.631684 0.057182 +v -0.048475 0.646808 0.050125 +v -0.048460 0.652879 0.052308 +v -0.048121 0.657558 0.019918 +v -0.048074 0.607927 -0.039775 +v -0.047882 0.314087 0.002538 +v -0.047855 0.624713 0.054077 +v -0.047622 0.661850 -0.002851 +v -0.047591 0.650802 0.049569 +v -0.047532 0.647463 -0.007578 +v -0.047386 0.639189 0.044697 +v -0.047330 0.648670 0.045546 +v -0.047269 0.644966 -0.017599 +v -0.047264 0.639468 0.057116 +v -0.047134 0.655066 0.028807 +v -0.047069 0.652069 0.049292 +v -0.047016 0.643009 -0.017576 +v -0.047016 0.643009 -0.017576 +v -0.046930 0.545062 -0.052273 +v -0.046872 0.656515 -0.007489 +v -0.046789 0.233127 -0.036135 +v -0.046693 0.652164 0.049796 +v -0.046575 0.618341 0.056505 +v -0.046473 0.631252 0.054248 +v -0.046160 0.659559 0.009239 +v -0.045873 0.653005 -0.014591 +v -0.045632 0.646464 0.056617 +v -0.045383 0.638232 0.055094 +v -0.045370 0.332887 0.006464 +v -0.045340 0.638905 0.054331 +v -0.045160 0.653277 0.047690 +v -0.044911 0.648260 0.043762 +v -0.044881 0.655528 -0.005911 +v -0.044854 0.650121 -0.018678 +v -0.044797 0.286927 -0.029132 +v -0.044752 0.653232 0.036824 +v -0.044691 0.642803 0.055012 +v -0.044227 0.661417 -0.012654 +v -0.044209 0.472090 0.048360 +v -0.044188 0.386919 -0.039004 +v -0.044145 0.475661 -0.043619 +v -0.044107 0.651968 0.055600 +v -0.044004 0.630845 0.055462 +v -0.044004 0.630845 0.055462 +v -0.043757 0.654168 -0.018121 +v -0.043638 0.645686 0.053819 +v -0.043461 0.662138 -0.011981 +v -0.043398 0.646815 0.054344 +v -0.043380 0.633453 0.056152 +v -0.043040 0.648519 -0.016574 +v -0.042980 0.305239 -0.025299 +v -0.042923 0.637633 0.052604 +v -0.042631 0.657490 -0.017109 +v -0.042499 0.660543 0.000627 +v -0.042363 0.651322 0.053129 +v -0.042156 0.296023 -0.026426 +v -0.042030 0.661989 -0.010691 +v -0.041951 0.641445 0.055691 +v -0.041502 0.630120 -0.029157 +v -0.041221 0.647754 -0.014642 +v -0.040979 0.322762 -0.022030 +v -0.040778 0.650283 0.055185 +v -0.040766 0.647229 0.051572 +v -0.040709 0.511691 0.052990 +v -0.040594 0.559729 -0.056703 +v -0.040560 0.656924 -0.014584 +v -0.040539 0.637748 0.050398 +v -0.040318 0.651484 0.054693 +v -0.040114 0.652085 0.042637 +v -0.040101 0.509829 -0.042571 +v -0.039459 0.374183 0.015445 +v -0.038978 0.655889 -0.012567 +v -0.038890 0.652455 0.052731 +v -0.038869 0.537239 0.055886 +v -0.038630 0.646909 0.049338 +v -0.038140 0.340097 -0.018747 +v -0.037586 0.644850 -0.025392 +v -0.037545 0.660852 -0.006422 +v -0.037429 0.566304 0.058809 +v -0.037327 0.642963 -0.025342 +v -0.037327 0.642963 -0.025342 +v -0.036582 0.637032 0.059708 +v -0.036466 0.650179 -0.024809 +v -0.036296 0.593457 0.059839 +v -0.036285 0.581916 -0.054269 +v -0.036051 0.641433 0.059570 +v -0.036048 0.652500 -0.022585 +v -0.035856 0.429664 -0.038033 +v -0.035751 0.615709 0.059495 +v -0.035727 0.529030 -0.043863 +v -0.035554 0.654185 -0.024084 +v -0.035160 0.648620 -0.022398 +v -0.035106 0.645533 0.059093 +v -0.034662 0.657555 -0.023001 +v -0.034573 0.636412 0.056837 +v -0.034262 0.661390 -0.020995 +v -0.034243 0.651470 0.047005 +v -0.033809 0.647800 -0.020121 +v -0.033445 0.661755 -0.020044 +v -0.033116 0.657047 -0.020039 +v -0.033000 0.646023 0.056010 +v -0.032655 0.420658 0.025070 +v -0.032586 0.636593 0.054373 +v -0.032197 0.606492 -0.046292 +v -0.032021 0.661623 -0.018382 +v -0.031970 0.655998 -0.017698 +v -0.031221 0.645774 0.053437 +v -0.030288 0.635967 -0.033331 +v -0.030120 0.642397 -0.032649 +v -0.029806 0.473382 -0.036875 +v -0.029579 0.382606 -0.010543 +v -0.029348 0.635179 -0.030504 +v -0.029292 0.649745 -0.031270 +v -0.029009 0.641532 -0.029185 +v -0.028766 0.628813 -0.035834 +v -0.028750 0.660621 -0.013563 +v -0.028369 0.656407 -0.029404 +v -0.028320 0.639943 0.061088 +v -0.028188 0.648789 -0.028155 +v -0.028047 0.631278 0.061327 +v -0.027718 0.648831 0.060938 +v -0.027347 0.650176 -0.029116 +v -0.027341 0.655222 -0.026229 +v -0.027325 0.650359 0.060282 +v -0.027321 0.636157 0.062618 +v -0.027090 0.661519 -0.027241 +v -0.026919 0.640460 0.062474 +v -0.026817 0.628965 0.060915 +v -0.026817 0.628965 0.060915 +v -0.026646 0.654038 -0.028127 +v -0.026493 0.648568 -0.026492 +v -0.026295 0.644444 0.062334 +v -0.026277 0.651449 0.057395 +v -0.026231 0.660540 -0.024434 +v -0.025988 0.657399 -0.027003 +v -0.025781 0.635523 0.059599 +v -0.025601 0.428156 -0.024351 +v -0.025598 0.647729 -0.023986 +v -0.025049 0.613664 0.061573 +v -0.025048 0.635668 -0.035434 +v -0.024955 0.656998 -0.023781 +v -0.024742 0.469919 0.034027 +v -0.024686 0.645111 0.058995 +v -0.024503 0.505587 -0.035055 +v -0.024221 0.635743 0.056982 +v -0.024213 0.655984 -0.021257 +v -0.024139 0.642132 -0.034681 +v -0.024055 0.634873 -0.032635 +v -0.023920 0.650889 0.051858 +v -0.023392 0.649718 -0.033522 +v -0.023309 0.644917 0.056187 +v -0.023206 0.588461 0.061623 +v -0.022940 0.641197 -0.031270 +v -0.022599 0.556741 0.058513 +v -0.022507 0.656476 -0.031353 +v -0.022255 0.648741 -0.030250 +v -0.021690 0.661541 -0.028746 +v -0.021514 0.536950 -0.052729 +v -0.021441 0.503465 0.040057 +v -0.021429 0.655278 -0.028120 +v -0.020818 0.426542 -0.004970 +v -0.020773 0.660483 -0.025902 +v -0.020743 0.644282 -0.032573 +v -0.020424 0.527152 0.046329 +v -0.020336 0.642244 -0.032305 +v -0.020336 0.642244 -0.032305 +v -0.019995 0.652207 -0.029709 +v -0.019827 0.558117 -0.059923 +v -0.019733 0.522489 -0.034468 +v -0.019104 0.661218 -0.027621 +v -0.018853 0.661825 -0.026746 +v -0.018356 0.661941 -0.024895 +v -0.018251 0.581182 -0.057529 +v -0.018014 0.650043 -0.031805 +v -0.017995 0.635602 0.064352 +v -0.017738 0.639854 0.064207 +v -0.017662 0.471746 -0.019304 +v -0.017551 0.653835 -0.030582 +v -0.017533 0.648432 -0.029084 +v -0.017336 0.643736 0.064260 +v -0.017117 0.657219 -0.029386 +v -0.017028 0.647596 -0.026449 +v -0.016961 0.634942 0.061199 +v -0.016536 0.660828 -0.018811 +v -0.016531 0.656892 -0.026091 +v -0.016476 0.469411 0.016607 +v -0.016269 0.644493 0.060797 +v -0.016121 0.655933 -0.023487 +v -0.015893 0.605623 -0.049155 +v -0.015875 0.635174 0.058466 +v -0.015571 0.628250 -0.038579 +v -0.015329 0.644334 0.057842 +v -0.013737 0.471104 -0.001656 +v -0.013272 0.639113 0.063623 +v -0.013084 0.634286 0.065702 +v -0.013039 0.644230 0.065860 +v -0.013031 0.647840 0.063499 +v -0.012942 0.630460 0.063536 +v -0.012830 0.649398 0.062846 +v -0.012625 0.502987 -0.016937 +v -0.012435 0.634389 0.063633 +v -0.012387 0.644166 0.063782 +v -0.012368 0.650804 0.059825 +v -0.012210 0.626986 0.063085 +v -0.012210 0.626986 0.063085 +v -0.011855 0.502254 0.020732 +v -0.011618 0.634830 0.067097 +v -0.011579 0.643618 0.067239 +v -0.011373 0.634629 0.061918 +v -0.011321 0.643928 0.062059 +v -0.011297 0.650386 0.054141 +v -0.011203 0.611790 0.063388 +v -0.011007 0.646987 0.066170 +v -0.010983 0.631405 0.065887 +v -0.010568 0.582696 0.062228 +v -0.010486 0.646751 0.064146 +v -0.010470 0.631674 0.063939 +v -0.009939 0.645913 0.067568 +v -0.009931 0.632408 0.067344 +v -0.009870 0.528045 -0.036608 +v -0.009686 0.632241 0.062279 +v -0.009679 0.646173 0.062473 +v -0.009525 0.550821 0.057602 +v -0.009493 0.635839 0.067193 +v -0.009452 0.642524 0.067306 +v -0.009223 0.501611 0.000864 +v -0.009145 0.519608 -0.016984 +v -0.009104 0.635724 0.062070 +v -0.009061 0.642738 0.062185 +v -0.008940 0.635306 0.065123 +v -0.008913 0.649934 -0.033225 +v -0.008790 0.639523 0.065023 +v -0.008707 0.648276 -0.030463 +v -0.008687 0.653673 -0.031852 +v -0.008593 0.643283 0.065155 +v -0.008572 0.525893 0.044673 +v -0.008485 0.636184 0.064656 +v -0.008485 0.647448 -0.027776 +v -0.008476 0.657046 -0.030641 +v -0.008458 0.642193 0.064758 +v -0.008393 0.634621 0.061972 +v -0.008220 0.519781 0.024161 +v -0.008215 0.656822 -0.027306 +v -0.008060 0.644139 0.061645 +v -0.008039 0.655909 -0.024683 +v -0.007897 0.643875 0.067579 +v -0.007892 0.634406 0.067424 +v -0.007835 0.634864 0.059227 +v -0.007610 0.634314 0.062400 +v -0.007606 0.644081 0.062553 +v -0.007583 0.643999 0.058645 +v -0.007118 0.643375 0.065045 +v -0.007090 0.634948 0.064907 +v -0.005057 0.518361 0.002531 +v -0.004342 0.523128 -0.014477 +v -0.003807 0.520733 0.024629 +v -0.002358 0.519953 0.003275 +v -0.000064 0.631437 0.065948 +v -0.000005 0.656996 -0.030994 +v -0.000005 0.649849 -0.033637 +v -0.000001 0.656800 -0.027663 +v -0.000001 0.655906 -0.025042 +v -0.000001 0.653609 -0.032219 +v -0.000001 0.634797 0.059607 +v -0.000000 0.648199 -0.030872 +v -0.000000 0.634569 0.062471 +v -0.000000 0.521254 0.003657 +v -0.000000 0.521426 0.024857 +v -0.000000 0.526960 0.045172 +v -0.000000 0.527229 -0.013699 +v -0.000000 0.533404 -0.034378 +v -0.000000 0.544143 -0.050432 +v -0.000000 0.548280 0.056357 +v -0.000000 0.559325 -0.056628 +v -0.000000 0.578404 0.061423 +v -0.000000 0.579856 -0.056833 +v -0.000000 0.604472 -0.047431 +v -0.000000 0.610478 0.063651 +v -0.000000 0.626476 0.062834 +v -0.000000 0.626476 0.062834 +v -0.000000 0.626947 -0.038257 +v -0.000000 0.630423 0.064061 +v -0.000000 0.638674 0.064142 +v -0.000000 0.640715 -0.030721 +v -0.000000 0.640715 -0.030721 +v -0.000000 0.643175 -0.034193 +v -0.000000 0.647130 0.064186 +v -0.000000 0.648666 0.063689 +v -0.000000 0.650430 0.055363 +v -0.000000 0.650500 0.061133 +v -0.000000 0.651937 -0.031864 +v -0.000000 0.660458 -0.020156 +v -0.000000 0.660546 -0.029428 +v -0.000000 0.661453 -0.028431 +v -0.000000 0.661645 -0.026220 +v -0.000000 0.647378 -0.028175 +v -0.000000 0.643895 0.058877 +v 0.000001 0.644032 0.061898 +v 0.000001 0.639449 0.065413 +v 0.000006 0.643223 0.065433 +v 0.000007 0.635224 0.065672 +v 0.000138 0.632428 0.067283 +v 0.000145 0.631694 0.064155 +v 0.000403 0.632392 0.062758 +v 0.000532 0.647039 0.066135 +v 0.000611 0.634452 0.067326 +v 0.000693 0.645986 0.067441 +v 0.000724 0.646794 0.064331 +v 0.000911 0.634467 0.062886 +v 0.000920 0.635051 0.065122 +v 0.000950 0.646088 0.062930 +v 0.001097 0.643949 0.067429 +v 0.001369 0.643969 0.063009 +v 0.001402 0.643349 0.065220 +v 0.002359 0.519953 0.003275 +v 0.003807 0.520733 0.024629 +v 0.004343 0.523128 -0.014477 +v 0.005058 0.518361 0.002531 +v 0.007582 0.643999 0.058649 +v 0.007833 0.634864 0.059227 +v 0.008037 0.655909 -0.024682 +v 0.008061 0.644139 0.061649 +v 0.008212 0.656822 -0.027305 +v 0.008220 0.519781 0.024161 +v 0.008392 0.634621 0.061971 +v 0.008473 0.657047 -0.030640 +v 0.008485 0.647448 -0.027776 +v 0.008573 0.525893 0.044672 +v 0.008596 0.643272 0.065158 +v 0.008600 0.635253 0.065004 +v 0.008634 0.643279 0.064980 +v 0.008686 0.653674 -0.031852 +v 0.008706 0.648275 -0.030463 +v 0.008792 0.639522 0.065024 +v 0.008828 0.634728 0.066962 +v 0.008893 0.643856 0.066950 +v 0.008912 0.649934 -0.033224 +v 0.008940 0.635311 0.065124 +v 0.009146 0.519608 -0.016983 +v 0.009168 0.643822 0.063087 +v 0.009179 0.634731 0.063116 +v 0.009224 0.501610 0.000865 +v 0.009525 0.550821 0.057602 +v 0.009870 0.528044 -0.036607 +v 0.009930 0.633095 0.067096 +v 0.009996 0.645517 0.067058 +v 0.010048 0.637131 0.064691 +v 0.010067 0.641477 0.064635 +v 0.010236 0.633091 0.062869 +v 0.010276 0.645570 0.062877 +v 0.010551 0.636947 0.066596 +v 0.010568 0.582696 0.062228 +v 0.010572 0.641670 0.066493 +v 0.010601 0.631990 0.065912 +v 0.010653 0.646652 0.065870 +v 0.010731 0.632014 0.063843 +v 0.010758 0.646638 0.063851 +v 0.010836 0.641834 0.062896 +v 0.010863 0.636815 0.062933 +v 0.011203 0.611790 0.063388 +v 0.011297 0.650386 0.054141 +v 0.011855 0.502253 0.020731 +v 0.012175 0.642244 0.066644 +v 0.012185 0.636420 0.066788 +v 0.012210 0.626986 0.063085 +v 0.012210 0.626986 0.063085 +v 0.012368 0.650804 0.059825 +v 0.012534 0.636214 0.062814 +v 0.012536 0.642492 0.062747 +v 0.012625 0.502986 -0.016937 +v 0.012830 0.649398 0.062846 +v 0.012942 0.630460 0.063536 +v 0.013031 0.647840 0.063499 +v 0.013272 0.639113 0.063623 +v 0.013337 0.635971 0.065766 +v 0.013366 0.642735 0.065646 +v 0.013545 0.635827 0.063867 +v 0.013581 0.642904 0.063761 +v 0.013737 0.471105 -0.001656 +v 0.015328 0.644334 0.057849 +v 0.015571 0.628250 -0.038579 +v 0.015872 0.635172 0.058463 +v 0.015893 0.605623 -0.049155 +v 0.016118 0.655933 -0.023486 +v 0.016270 0.644493 0.060803 +v 0.016476 0.469411 0.016607 +v 0.016527 0.656892 -0.026090 +v 0.016536 0.660828 -0.018811 +v 0.016960 0.634941 0.061198 +v 0.017028 0.647594 -0.026450 +v 0.017113 0.657220 -0.029385 +v 0.017338 0.643730 0.064267 +v 0.017532 0.648431 -0.029084 +v 0.017548 0.653836 -0.030581 +v 0.017662 0.471746 -0.019303 +v 0.017740 0.639853 0.064209 +v 0.017996 0.635602 0.064352 +v 0.018013 0.650044 -0.031804 +v 0.018251 0.581182 -0.057529 +v 0.018356 0.661941 -0.024895 +v 0.018853 0.661825 -0.026746 +v 0.019104 0.661218 -0.027621 +v 0.019733 0.522489 -0.034467 +v 0.019827 0.558117 -0.059922 +v 0.019994 0.652207 -0.029709 +v 0.020336 0.642244 -0.032305 +v 0.020336 0.642244 -0.032305 +v 0.020424 0.527152 0.046329 +v 0.020743 0.644282 -0.032573 +v 0.020773 0.660483 -0.025902 +v 0.020818 0.426542 -0.004970 +v 0.021429 0.655278 -0.028120 +v 0.021441 0.503465 0.040056 +v 0.021514 0.536950 -0.052728 +v 0.021690 0.661541 -0.028746 +v 0.022255 0.648741 -0.030250 +v 0.022507 0.656476 -0.031353 +v 0.022599 0.556741 0.058511 +v 0.022940 0.641197 -0.031270 +v 0.023206 0.588461 0.061623 +v 0.023307 0.644917 0.056196 +v 0.023392 0.649718 -0.033522 +v 0.023920 0.650889 0.051858 +v 0.024055 0.634873 -0.032635 +v 0.024139 0.642132 -0.034681 +v 0.024211 0.655984 -0.021256 +v 0.024218 0.635741 0.056980 +v 0.024503 0.505588 -0.035054 +v 0.024686 0.645111 0.059003 +v 0.024742 0.469919 0.034026 +v 0.024951 0.656999 -0.023781 +v 0.025048 0.635668 -0.035434 +v 0.025049 0.613664 0.061573 +v 0.025597 0.647727 -0.023986 +v 0.025601 0.428156 -0.024350 +v 0.025779 0.635521 0.059597 +v 0.025983 0.657401 -0.027003 +v 0.026231 0.660540 -0.024434 +v 0.026277 0.651449 0.057395 +v 0.026296 0.644443 0.062341 +v 0.026492 0.648568 -0.026492 +v 0.026644 0.654039 -0.028126 +v 0.026817 0.628965 0.060915 +v 0.026817 0.628965 0.060915 +v 0.026920 0.640457 0.062477 +v 0.027090 0.661519 -0.027241 +v 0.027322 0.636155 0.062617 +v 0.027325 0.650359 0.060282 +v 0.027341 0.655222 -0.026229 +v 0.027345 0.650175 -0.029115 +v 0.027718 0.648831 0.060938 +v 0.028047 0.631278 0.061327 +v 0.028188 0.648789 -0.028155 +v 0.028320 0.639943 0.061088 +v 0.028369 0.656407 -0.029404 +v 0.028750 0.660621 -0.013563 +v 0.028766 0.628813 -0.035834 +v 0.029009 0.641532 -0.029185 +v 0.029292 0.649745 -0.031270 +v 0.029348 0.635179 -0.030504 +v 0.029579 0.382605 -0.010543 +v 0.029806 0.473382 -0.036874 +v 0.030120 0.642397 -0.032649 +v 0.030288 0.635967 -0.033331 +v 0.031219 0.645775 0.053446 +v 0.031967 0.655999 -0.017697 +v 0.032021 0.661623 -0.018382 +v 0.032197 0.606491 -0.046292 +v 0.032583 0.636589 0.054370 +v 0.032656 0.420659 0.025070 +v 0.032999 0.646025 0.056020 +v 0.033113 0.657048 -0.020039 +v 0.033445 0.661755 -0.020044 +v 0.033809 0.647797 -0.020122 +v 0.034243 0.651470 0.047005 +v 0.034262 0.661390 -0.020995 +v 0.034571 0.636409 0.056835 +v 0.034657 0.657557 -0.023002 +v 0.035106 0.645533 0.059101 +v 0.035159 0.648617 -0.022398 +v 0.035553 0.654186 -0.024083 +v 0.035728 0.529029 -0.043862 +v 0.035750 0.615709 0.059495 +v 0.035856 0.429664 -0.038033 +v 0.036048 0.652500 -0.022585 +v 0.036052 0.641431 0.059572 +v 0.036285 0.581915 -0.054268 +v 0.036296 0.593457 0.059839 +v 0.036464 0.650177 -0.024809 +v 0.036581 0.637028 0.059706 +v 0.037327 0.642963 -0.025342 +v 0.037327 0.642963 -0.025342 +v 0.037429 0.566304 0.058808 +v 0.037545 0.660852 -0.006422 +v 0.037586 0.644850 -0.025392 +v 0.038140 0.340097 -0.018747 +v 0.038627 0.646910 0.049346 +v 0.038869 0.537238 0.055886 +v 0.038890 0.652455 0.052731 +v 0.038975 0.655891 -0.012566 +v 0.039459 0.374182 0.015444 +v 0.040101 0.509829 -0.042570 +v 0.040114 0.652085 0.042637 +v 0.040318 0.651484 0.054693 +v 0.040535 0.637743 0.050396 +v 0.040557 0.656926 -0.014584 +v 0.040594 0.559729 -0.056702 +v 0.040709 0.511690 0.052989 +v 0.040764 0.647231 0.051581 +v 0.040778 0.650283 0.055185 +v 0.040980 0.322762 -0.022030 +v 0.041221 0.647748 -0.014643 +v 0.041502 0.630120 -0.029157 +v 0.041951 0.641445 0.055691 +v 0.042029 0.661989 -0.010691 +v 0.042156 0.296022 -0.026426 +v 0.042363 0.651322 0.053129 +v 0.042499 0.660543 0.000627 +v 0.042628 0.657492 -0.017111 +v 0.042921 0.637629 0.052602 +v 0.042980 0.305239 -0.025299 +v 0.043039 0.648515 -0.016575 +v 0.043380 0.633453 0.056152 +v 0.043398 0.646815 0.054351 +v 0.043461 0.662138 -0.011981 +v 0.043638 0.645686 0.053819 +v 0.043755 0.654168 -0.018121 +v 0.044004 0.630845 0.055462 +v 0.044004 0.630845 0.055462 +v 0.044107 0.651968 0.055600 +v 0.044145 0.475661 -0.043619 +v 0.044189 0.386919 -0.039004 +v 0.044209 0.472090 0.048359 +v 0.044227 0.661417 -0.012654 +v 0.044692 0.642800 0.055014 +v 0.044752 0.653232 0.036824 +v 0.044797 0.286926 -0.029132 +v 0.044853 0.650118 -0.018679 +v 0.044878 0.655531 -0.005910 +v 0.044907 0.648262 0.043769 +v 0.045160 0.653277 0.047690 +v 0.045340 0.638905 0.054331 +v 0.045370 0.332887 0.006464 +v 0.045381 0.638228 0.055093 +v 0.045632 0.646464 0.056617 +v 0.045873 0.653005 -0.014591 +v 0.046160 0.659559 0.009239 +v 0.046473 0.631252 0.054248 +v 0.046575 0.618341 0.056505 +v 0.046693 0.652164 0.049796 +v 0.046789 0.233126 -0.036135 +v 0.046868 0.656518 -0.007489 +v 0.046930 0.545062 -0.052272 +v 0.047016 0.643009 -0.017576 +v 0.047016 0.643009 -0.017576 +v 0.047069 0.652069 0.049292 +v 0.047134 0.655066 0.028807 +v 0.047264 0.639468 0.057116 +v 0.047269 0.644966 -0.017599 +v 0.047327 0.648672 0.045553 +v 0.047381 0.639184 0.044694 +v 0.047532 0.647455 -0.007580 +v 0.047591 0.650802 0.049569 +v 0.047622 0.661850 -0.002851 +v 0.047855 0.624713 0.054077 +v 0.047882 0.314087 0.002538 +v 0.048074 0.607927 -0.039775 +v 0.048121 0.657558 0.019918 +v 0.048460 0.652879 0.052308 +v 0.048475 0.646808 0.050125 +v 0.048690 0.631684 0.057182 +v 0.049333 0.642583 0.050200 +v 0.049350 0.654820 0.002069 +v 0.049437 0.657073 -0.009446 +v 0.049505 0.649748 0.036743 +v 0.049519 0.661751 -0.003956 +v 0.049617 0.422694 0.040077 +v 0.049652 0.625151 0.056630 +v 0.049792 0.648151 -0.009076 +v 0.050083 0.639160 0.046474 +v 0.050153 0.660888 -0.004407 +v 0.050228 0.300625 -0.000662 +v 0.050282 0.598843 0.056816 +v 0.050386 0.648418 0.047810 +v 0.050413 0.639915 0.050749 +v 0.050447 0.654664 0.040282 +v 0.050494 0.647570 0.052979 +v 0.050634 0.166052 -0.039444 +v 0.050818 0.653858 -0.010308 +v 0.051437 0.635137 0.050802 +v 0.051671 0.655746 0.001009 +v 0.051690 0.632142 0.050771 +v 0.051737 0.343397 -0.040868 +v 0.052031 0.644508 0.048493 +v 0.052072 0.649698 -0.010736 +v 0.052112 0.650268 0.037994 +v 0.052166 0.653817 0.010846 +v 0.052266 0.652893 -0.006602 +v 0.052268 0.651238 0.028624 +v 0.052323 0.661004 0.007500 +v 0.052429 0.653776 0.041568 +v 0.052453 0.640478 0.053636 +v 0.052456 0.646797 0.000876 +v 0.052468 0.632275 0.049622 +v 0.052468 0.632275 0.049622 +v 0.052510 0.640879 0.037313 +v 0.052678 0.625572 0.050940 +v 0.052779 0.659284 0.003752 +v 0.052849 0.284342 -0.002936 +v 0.052873 0.584107 -0.047588 +v 0.052942 0.639922 0.048527 +v 0.052972 0.652692 0.041834 +v 0.053154 0.652609 0.019849 +v 0.053287 0.325389 -0.044036 +v 0.053687 0.656710 0.031278 +v 0.053770 0.632602 0.053642 +v 0.053910 0.278366 -0.003870 +v 0.053937 0.298610 -0.050417 +v 0.054064 0.642985 -0.009526 +v 0.054064 0.642985 -0.009526 +v 0.054128 0.645219 -0.009457 +v 0.054276 0.660520 0.007003 +v 0.054287 0.316360 -0.045259 +v 0.054395 0.659015 0.020273 +v 0.054429 0.308609 -0.046828 +v 0.054479 0.626043 0.053426 +v 0.054517 0.659139 0.009223 +v 0.054646 0.656271 -0.000278 +v 0.054704 0.575093 0.056787 +v 0.054720 0.654666 0.010352 +v 0.054761 0.111034 -0.040151 +v 0.054849 0.659734 0.006805 +v 0.054962 0.651876 0.029297 +v 0.055048 0.654294 0.002573 +v 0.055099 0.647422 -0.000123 +v 0.055314 0.288451 -0.052199 +v 0.055412 0.650227 0.039580 +v 0.055435 0.640975 0.038572 +v 0.055446 0.644483 0.042100 +v 0.055465 0.536266 -0.046673 +v 0.055492 0.660737 0.003283 +v 0.055627 0.645749 0.010133 +v 0.055669 0.642660 0.028744 +v 0.055766 0.656113 0.031873 +v 0.055831 0.653361 0.019936 +v 0.056318 0.658549 0.020745 +v 0.056340 0.653142 -0.000904 +v 0.056395 0.655179 0.031962 +v 0.056507 0.563699 -0.050176 +v 0.056592 0.111726 -0.055244 +v 0.056718 0.644333 0.019555 +v 0.056783 0.632101 -0.013380 +v 0.056800 0.654055 0.008602 +v 0.057014 0.657766 0.020935 +v 0.057184 0.660546 0.008744 +v 0.057376 0.646471 0.040119 +v 0.057414 0.111854 -0.023534 +v 0.057473 0.232216 -0.062010 +v 0.057540 0.621875 0.048932 +v 0.057571 0.652370 0.004708 +v 0.057813 0.649016 -0.001256 +v 0.057917 0.648219 0.001200 +v 0.057994 0.655107 0.009782 +v 0.058075 0.233515 -0.008453 +v 0.058133 0.655883 0.002017 +v 0.058193 0.070391 -0.039333 +v 0.058435 0.652001 0.030175 +v 0.058452 0.548224 0.057251 +v 0.058543 0.646280 0.009672 +v 0.058551 0.641902 0.040030 +v 0.058690 0.069626 -0.021130 +v 0.058713 0.642900 0.029431 +v 0.058973 0.514806 -0.045260 +v 0.059114 0.636738 0.041381 +v 0.059256 0.653673 0.020091 +v 0.059276 0.634538 0.040379 +v 0.059276 0.634538 0.040379 +v 0.059306 0.647215 0.031497 +v 0.059414 0.647855 0.007304 +v 0.059516 0.640537 0.000227 +v 0.059765 0.644727 0.019662 +v 0.059829 0.655657 0.008053 +v 0.059933 0.068854 -0.054609 +v 0.059958 0.651983 0.009444 +v 0.060147 0.166914 -0.066749 +v 0.060197 0.642446 0.001329 +v 0.060197 0.642446 0.001329 +v 0.060468 0.649659 0.019552 +v 0.060513 0.648499 0.030429 +v 0.060549 0.644289 0.001964 +v 0.061153 0.649656 0.000627 +v 0.061374 0.650392 0.020049 +v 0.061537 0.640441 0.006221 +v 0.061566 0.169078 -0.009886 +v 0.061576 0.647818 0.009151 +v 0.061598 0.477997 -0.045773 +v 0.061962 0.644092 0.030264 +v 0.062143 0.634285 -0.000611 +v 0.062479 0.649252 0.006793 +v 0.062885 0.641962 -0.000436 +v 0.062937 0.519168 0.057960 +v 0.062948 0.646003 0.019794 +v 0.062989 0.636789 0.029802 +v 0.062989 0.636789 0.029802 +v 0.063036 0.640859 0.010938 +v 0.063036 0.640859 0.010938 +v 0.063216 0.639005 0.030416 +v 0.063408 0.642808 0.011129 +v 0.063774 0.634111 0.004848 +v 0.063958 0.552169 -0.045941 +v 0.064005 0.632454 -0.001041 +v 0.064156 0.112865 -0.071315 +v 0.064217 0.638739 0.021464 +v 0.064217 0.638739 0.021464 +v 0.064313 0.640781 0.021630 +v 0.064478 0.069112 -0.002944 +v 0.064567 0.112500 -0.005668 +v 0.064815 0.635492 -0.001123 +v 0.064912 0.641813 0.005508 +v 0.065019 0.604892 0.047081 +v 0.065171 0.066960 -0.039228 +v 0.065189 0.065802 -0.024876 +v 0.065502 0.433205 -0.045727 +v 0.065617 0.608834 -0.021765 +v 0.065688 0.625395 0.037812 +v 0.066052 0.477514 0.054752 +v 0.066562 0.635322 0.004316 +v 0.066645 0.066053 -0.051789 +v 0.067167 0.069576 -0.070099 +v 0.067519 0.372730 0.036060 +v 0.067623 0.424073 0.046573 +v 0.067992 0.631258 0.010043 +v 0.068413 0.389389 -0.046189 +v 0.069116 0.065617 -0.008786 +v 0.069233 0.628170 0.026967 +v 0.069256 0.329256 0.025156 +v 0.069322 0.629788 0.019387 +v 0.071169 0.066379 -0.065284 +v 0.071421 0.309465 0.021478 +v 0.071520 0.344657 -0.047573 +v 0.071670 0.585705 -0.030092 +v 0.071846 0.296829 0.016683 +v 0.072423 0.113721 0.004064 +v 0.073142 0.563079 -0.037723 +v 0.073217 0.581171 0.046122 +v 0.073256 0.326655 -0.051531 +v 0.073608 0.610100 -0.006008 +v 0.073769 0.543539 -0.040060 +v 0.074046 0.069381 0.009141 +v 0.074210 0.113679 -0.079188 +v 0.074447 0.277647 0.009664 +v 0.074810 0.319539 -0.052030 +v 0.074865 0.608886 0.033529 +v 0.075888 0.311021 -0.055469 +v 0.075903 0.302835 -0.056434 +v 0.076067 0.066163 0.002460 +v 0.076705 0.069523 -0.078414 +v 0.076712 0.291432 -0.059975 +v 0.077423 0.610625 0.007150 +v 0.077473 0.078870 -0.042188 +v 0.078072 0.076112 -0.026235 +v 0.078472 0.611547 0.022310 +v 0.078632 0.076863 -0.056756 +v 0.078693 0.518718 -0.037943 +v 0.078945 0.235076 0.004100 +v 0.079647 0.066824 -0.073323 +v 0.079840 0.556751 0.046520 +v 0.080737 0.234349 -0.070224 +v 0.082316 0.585639 -0.011168 +v 0.082620 0.171127 0.004408 +v 0.083255 0.482054 -0.036573 +v 0.084107 0.168930 -0.076093 +v 0.084745 0.586242 0.027536 +v 0.085733 0.114799 0.010392 +v 0.086009 0.526425 0.046265 +v 0.086269 0.587602 0.004343 +v 0.086676 0.067003 0.008337 +v 0.087092 0.071309 0.015483 +v 0.087437 0.588773 0.014335 +v 0.087919 0.114723 -0.083305 +v 0.088443 0.079458 -0.027295 +v 0.088908 0.333060 0.014917 +v 0.088981 0.558755 -0.016180 +v 0.089214 0.433707 -0.033006 +v 0.089599 0.377566 0.025499 +v 0.089720 0.478850 0.043073 +v 0.089813 0.066990 -0.077099 +v 0.089942 0.079599 -0.060736 +v 0.089956 0.070294 -0.083249 +v 0.089980 0.389467 -0.034559 +v 0.090134 0.083726 -0.044998 +v 0.090250 0.426836 0.035458 +v 0.090343 0.313328 0.010323 +v 0.090723 0.300217 0.005796 +v 0.091408 0.562989 0.023613 +v 0.091594 0.283254 0.002031 +v 0.092192 0.565431 0.004623 +v 0.092302 0.346021 -0.037606 +v 0.093714 0.525125 -0.019552 +v 0.093879 0.328443 -0.041456 +v 0.094211 0.320505 -0.042085 +v 0.096251 0.311783 -0.045575 +v 0.096424 0.484151 -0.020224 +v 0.096536 0.067602 0.006585 +v 0.096772 0.303053 -0.043957 +v 0.097028 0.114296 0.006793 +v 0.097585 0.236748 -0.005201 +v 0.098093 0.527105 0.022019 +v 0.098142 0.290888 -0.049353 +v 0.098220 0.339800 -0.003590 +v 0.099077 0.312332 -0.010350 +v 0.099213 0.078028 -0.027853 +v 0.099222 0.319537 -0.008611 +v 0.099413 0.072038 0.012098 +v 0.099599 0.344275 -0.018264 +v 0.099611 0.304553 -0.012107 +v 0.099646 0.383106 0.005760 +v 0.099798 0.076979 -0.059916 +v 0.099926 0.291915 -0.014403 +v 0.100110 0.314859 -0.024251 +v 0.100161 0.066715 -0.073674 +v 0.100347 0.527985 0.002671 +v 0.100437 0.287618 -0.015282 +v 0.100513 0.323035 -0.023104 +v 0.101020 0.299156 -0.027751 +v 0.101352 0.481206 0.018241 +v 0.101483 0.387531 -0.009867 +v 0.101596 0.430925 0.013096 +v 0.101717 0.308054 -0.026311 +v 0.102313 0.171155 -0.005592 +v 0.102719 0.290144 -0.030704 +v 0.103072 0.079066 -0.046921 +v 0.103092 0.116569 -0.078261 +v 0.103358 0.433643 -0.003575 +v 0.103442 0.482462 0.000494 +v 0.104218 0.070102 -0.078918 +v 0.104643 0.236882 -0.059442 +v 0.104915 0.067721 -0.004859 +v 0.107396 0.114855 -0.004984 +v 0.107444 0.238114 -0.023602 +v 0.108300 0.066664 -0.062010 +v 0.108996 0.067979 -0.026001 +v 0.109906 0.173404 -0.063549 +v 0.110440 0.071647 -0.001928 +v 0.110998 0.238177 -0.038462 +v 0.111500 0.068244 -0.045575 +v 0.112747 0.171282 -0.026957 +v 0.114423 0.114493 -0.066232 +v 0.115034 0.070490 -0.065248 +v 0.115727 0.114979 -0.027638 +v 0.115789 0.170910 -0.043737 +v 0.116695 0.071272 -0.024835 +v 0.118527 0.072579 -0.047135 +v 0.119017 0.114441 -0.047530 +v -0.115785 0.009832 -0.002756 +v -0.115766 0.003877 0.005447 +v -0.115732 0.014753 0.001403 +v -0.115712 0.004221 0.000639 +v -0.115593 0.018613 -0.004557 +v -0.115206 0.005265 -0.003927 +v -0.115182 0.003056 0.000294 +v -0.115175 0.002509 0.005083 +v -0.115131 0.024685 -0.009262 +v -0.115075 0.022453 -0.000770 +v -0.115046 0.026730 -0.005490 +v -0.114955 0.004306 -0.004090 +v -0.114930 0.003633 0.011601 +v -0.114898 0.014376 -0.008316 +v -0.114898 0.029335 -0.011578 +v -0.114882 0.031172 -0.008509 +v -0.114751 0.018834 0.005203 +v -0.114663 0.036309 -0.014342 +v -0.114574 0.006641 -0.007226 +v -0.114532 0.010848 0.011557 +v -0.114520 0.037429 -0.011166 +v -0.114331 0.002170 0.011299 +v -0.114268 0.005934 -0.007779 +v -0.114044 0.047132 -0.018672 +v -0.113789 0.021351 -0.013458 +v -0.113715 0.017326 0.011403 +v -0.113626 0.003674 0.017477 +v -0.113505 0.008669 -0.010557 +v -0.113409 0.026013 -0.015796 +v -0.113130 0.007885 -0.010997 +v -0.113024 0.002143 0.017088 +v -0.113018 0.010552 0.017934 +v -0.112961 0.058124 -0.024019 +v -0.112914 0.049506 -0.014589 +v -0.112821 0.033944 -0.019871 +v -0.112660 0.035183 -0.002402 +v -0.112647 0.031623 0.000387 +v -0.112609 0.028639 0.003980 +v -0.112539 0.010746 -0.012980 +v -0.112147 0.026641 0.008218 +v -0.112124 0.010302 -0.013646 +v -0.112103 0.067541 -0.032904 +v -0.112083 0.040623 -0.004938 +v -0.112033 0.053803 -0.031574 +v -0.111995 0.043053 -0.025499 +v -0.111981 0.016719 0.018225 +v -0.111707 0.072124 -0.039272 +v -0.111655 0.003724 0.023631 +v -0.111623 0.061882 -0.039102 +v -0.111536 0.065484 -0.042908 +v -0.111449 0.013479 -0.015451 +v -0.111291 0.010605 0.023397 +v -0.111192 0.050676 -0.012036 +v -0.111189 0.060385 -0.018950 +v -0.111171 0.075843 -0.044590 +v -0.111149 0.069629 -0.049041 +v -0.111147 0.025910 0.012462 +v -0.111066 0.002319 0.023384 +v -0.110994 0.013016 -0.016108 +v -0.110249 0.017603 -0.018377 +v -0.110229 0.072340 -0.055143 +v -0.110082 0.079139 -0.051004 +v -0.109999 0.017733 0.023194 +v -0.109862 0.067705 -0.022657 +v -0.109788 0.077439 -0.036750 +v -0.109727 0.017151 -0.018939 +v -0.109530 0.061574 -0.015937 +v -0.109457 0.025801 0.017190 +v -0.109391 0.076206 -0.032871 +v -0.109372 0.080291 -0.040689 +v -0.109284 0.072813 -0.027361 +v -0.109280 0.074542 -0.029847 +v -0.109253 0.070805 -0.024581 +v -0.109247 0.024260 -0.021521 +v -0.108855 0.023907 -0.022125 +v -0.108686 0.075179 -0.060224 +v -0.108619 0.047523 -0.012813 +v -0.108435 0.003927 0.031306 +v -0.108431 0.027106 -0.000628 +v -0.108396 0.030601 -0.024638 +v -0.108348 0.024348 0.002628 +v -0.108347 0.030295 -0.002976 +v -0.108309 0.012552 0.030484 +v -0.108308 0.082266 -0.056377 +v -0.108074 0.084634 -0.047649 +v -0.108013 0.056124 -0.043558 +v -0.107963 0.022376 0.006643 +v -0.107945 0.035197 -0.005454 +v -0.107889 0.030048 -0.025350 +v -0.107819 0.002503 0.031159 +v -0.107809 0.040182 -0.030118 +v -0.107756 0.049251 -0.036568 +v -0.107748 0.061998 -0.050777 +v -0.107684 0.058934 -0.046560 +v -0.107671 0.057464 -0.016255 +v -0.107639 0.069790 -0.019409 +v -0.107447 0.026111 0.021251 +v -0.107405 0.019020 0.028141 +v -0.107370 0.048770 -0.037213 +v -0.107356 0.084967 -0.042059 +v -0.107327 0.055392 -0.044095 +v -0.107206 0.021745 0.010454 +v -0.107184 0.065634 -0.058547 +v -0.107113 0.039502 -0.030767 +v -0.107070 0.083251 -0.038021 +v -0.106915 0.058055 -0.046871 +v -0.106882 0.060598 -0.050967 +v -0.106715 0.073681 -0.021700 +v -0.106573 0.082402 -0.035405 +v -0.106408 0.087765 -0.044520 +v -0.106296 0.088301 -0.052785 +v -0.106270 0.077696 -0.064719 +v -0.106238 0.081099 -0.031998 +v -0.106127 0.060011 -0.051056 +v -0.106072 0.076709 -0.024213 +v -0.106048 0.068197 -0.062747 +v -0.106040 0.085737 -0.038719 +v -0.106023 0.064783 -0.019319 +v -0.105886 0.085238 -0.061012 +v -0.105877 0.079246 -0.027726 +v -0.105860 0.021548 0.014621 +v -0.105338 0.077692 -0.036836 +v -0.105230 0.068092 -0.021165 +v -0.104906 0.076507 -0.033451 +v -0.104857 0.026732 0.024687 +v -0.104707 0.061217 -0.053499 +v -0.104699 0.014818 0.037389 +v -0.104654 0.092211 -0.047705 +v -0.104617 0.089801 -0.039923 +v -0.104576 0.071187 -0.023578 +v -0.104546 0.075347 -0.030331 +v -0.104518 0.080001 -0.037558 +v -0.104429 0.091064 -0.056997 +v -0.104387 0.004022 0.039797 +v -0.104336 0.073697 -0.026723 +v -0.104238 0.021906 0.018254 +v -0.104133 0.060508 -0.053139 +v -0.103852 0.070076 -0.066732 +v -0.103659 0.002704 0.039674 +v -0.103306 0.021045 0.033504 +v -0.103050 0.083915 -0.038597 +v -0.102987 0.079926 -0.068405 +v -0.102906 0.096091 -0.051170 +v -0.102815 0.087715 -0.064904 +v -0.102667 0.095414 -0.042289 +v -0.102302 0.022455 0.021586 +v -0.102073 0.093808 -0.060304 +v -0.101725 0.060314 -0.057369 +v -0.101626 0.013373 0.044667 +v -0.101564 0.071743 -0.069571 +v -0.101249 0.059627 -0.056651 +v -0.101247 0.027643 0.028193 +v -0.101242 0.088995 -0.040780 +v -0.101177 0.099909 -0.045260 +v -0.100877 0.055177 -0.058504 +v -0.100874 0.099417 -0.054915 +v -0.100834 0.062610 -0.065664 +v -0.100607 0.055163 -0.057603 +v -0.100571 0.055203 -0.063876 +v -0.099914 0.004169 0.048091 +v -0.099891 0.092995 -0.043261 +v -0.099856 0.018375 0.042980 +v -0.099634 0.103903 -0.048766 +v -0.099556 0.013207 0.048790 +v -0.099520 0.049888 -0.059555 +v -0.099375 0.003075 0.047736 +v -0.099332 0.023141 0.024592 +v -0.099027 0.049890 -0.058461 +v -0.098782 0.089610 -0.068326 +v -0.098589 0.065124 -0.069675 +v -0.098553 0.081587 -0.071481 +v -0.098514 0.096610 -0.046308 +v -0.098505 0.041322 -0.060163 +v -0.098458 0.096423 -0.063952 +v -0.098444 0.023069 0.037645 +v -0.098370 0.055244 -0.068280 +v -0.098261 0.049888 -0.067207 +v -0.098117 0.102351 -0.058732 +v -0.097859 0.041332 -0.058603 +v -0.097665 0.041291 -0.066800 +v -0.097338 0.073028 -0.072145 +v -0.097308 0.019169 -0.059718 +v -0.097214 0.107891 -0.053345 +v -0.097069 0.028621 0.031138 +v -0.096684 0.005648 -0.059817 +v -0.096534 0.019167 -0.066825 +v -0.096421 0.100176 -0.050370 +v -0.096245 0.005647 -0.065595 +v -0.096198 0.000912 -0.059689 +v -0.096195 0.000866 -0.059685 +v -0.096185 0.019172 -0.058370 +v -0.096168 0.008502 -0.013712 +v -0.096102 0.002531 -0.005099 +v -0.096041 0.006337 -0.011381 +v -0.096038 0.004124 -0.008227 +v -0.095932 0.000900 -0.065348 +v -0.095929 0.000854 -0.065343 +v -0.095922 0.011671 0.056072 +v -0.095921 0.024192 0.027135 +v -0.095867 0.016131 -0.019543 +v -0.095860 0.011974 -0.016671 +v -0.095752 0.022619 -0.023363 +v -0.095530 0.005658 -0.058463 +v -0.095496 0.027749 -0.026087 +v -0.095446 0.066191 -0.071831 +v -0.095142 0.001228 -0.001072 +v -0.095135 0.000898 -0.058591 +v -0.095132 0.000852 -0.058595 +v -0.095098 0.004583 0.056449 +v -0.094642 0.005655 -0.067615 +v -0.094511 0.003620 0.056318 +v -0.094506 0.037239 -0.031168 +v -0.094431 0.019168 -0.068778 +v -0.094346 0.055273 -0.071105 +v -0.094342 0.041311 -0.069764 +v -0.094324 0.049889 -0.070483 +v -0.094259 0.000901 -0.067583 +v -0.094254 0.000855 -0.067582 +v -0.094186 0.110368 -0.056816 +v -0.094114 0.104600 -0.061830 +v -0.094061 0.090990 -0.070536 +v -0.093959 0.082817 -0.073157 +v -0.093849 0.098020 -0.066808 +v -0.093734 0.102646 -0.053507 +v -0.093588 0.021627 0.046490 +v -0.093515 0.073805 -0.073471 +v -0.093491 0.000387 0.003379 +v -0.093400 0.046877 -0.037454 +v -0.093348 0.024884 0.039702 +v -0.092502 0.017165 0.056081 +v -0.092316 0.053981 -0.043840 +v -0.092288 0.029488 0.033270 +v -0.092105 0.005657 -0.068482 +v -0.092004 0.024875 0.029297 +v -0.091993 0.000857 -0.068347 +v -0.091990 0.000810 -0.068314 +v -0.091836 0.019171 -0.069387 +v -0.091825 0.000073 0.007336 +v -0.091781 0.010521 0.063161 +v -0.091725 0.057027 -0.046472 +v -0.091400 0.058892 -0.055080 +v -0.091333 0.041331 -0.070511 +v -0.091325 0.058766 -0.050619 +v -0.091031 0.049888 -0.071243 +v -0.091003 0.059280 -0.052091 +v -0.090943 0.055277 -0.071748 +v -0.090630 0.005305 0.064197 +v -0.090278 0.074198 -0.073915 +v -0.090250 0.000001 0.012341 +v -0.090214 0.004470 0.064028 +v -0.090112 0.066589 -0.072938 +v -0.090108 0.091259 -0.071279 +v -0.090105 0.083178 -0.073705 +v -0.089866 0.014735 0.062807 +v -0.089751 0.104123 -0.054864 +v -0.089728 0.098529 -0.067578 +v -0.089675 0.111909 -0.058278 +v -0.089612 0.105362 -0.063112 +v -0.089261 0.000808 -0.067532 +v -0.089224 0.000855 -0.067576 +v -0.089070 0.005641 -0.067772 +v -0.088943 0.019174 -0.068702 +v -0.088558 0.009240 0.068481 +v -0.088280 0.000860 -0.058741 +v -0.088276 0.000906 -0.058740 +v -0.088209 0.000102 0.019369 +v -0.087867 0.000851 -0.065583 +v -0.087862 0.000896 -0.065581 +v -0.087789 0.011706 0.068527 +v -0.087764 0.005649 -0.058701 +v -0.087671 0.000833 -0.059646 +v -0.087663 0.000880 -0.059647 +v -0.087457 0.041326 -0.069575 +v -0.087388 0.005636 -0.065489 +v -0.087352 0.025503 0.030163 +v -0.087220 0.049890 -0.070390 +v -0.087218 0.005653 -0.059605 +v -0.087024 0.019183 -0.058742 +v -0.087023 0.006201 0.070054 +v -0.086851 0.019177 -0.066225 +v -0.086772 0.009066 0.070918 +v -0.086716 0.000326 0.027014 +v -0.086663 0.019186 -0.059644 +v -0.086632 0.005349 0.069991 +v -0.086599 0.055252 -0.070682 +v -0.086406 0.026749 0.040972 +v -0.086399 0.000620 0.035981 +v -0.086145 0.030170 0.034302 +v -0.086134 0.019130 0.057716 +v -0.086104 0.090921 -0.070733 +v -0.086039 0.083070 -0.073154 +v -0.086016 0.074037 -0.073414 +v -0.085920 0.103415 -0.053701 +v -0.085896 0.001045 0.044279 +v -0.085875 0.023358 0.047538 +v -0.085703 0.006553 0.071703 +v -0.085647 0.098055 -0.066846 +v -0.085631 0.105246 -0.061998 +v -0.085617 0.015977 0.064306 +v -0.085467 0.005784 0.071418 +v -0.085449 0.001803 0.055018 +v -0.085434 0.111319 -0.056988 +v -0.085219 0.012955 0.069299 +v -0.085209 0.065901 -0.071662 +v -0.085144 0.002922 0.063793 +v -0.084870 0.009332 0.072185 +v -0.084850 0.041253 -0.058535 +v -0.084655 0.004688 0.069824 +v -0.084651 0.041340 -0.066777 +v -0.084571 0.006009 0.071765 +v -0.084551 0.006608 0.072038 +v -0.084363 0.041275 -0.059578 +v -0.083808 0.005853 0.071402 +v -0.083716 0.049900 -0.066758 +v -0.083469 0.006492 0.071642 +v -0.083466 0.022922 -0.021098 +v -0.083455 0.049905 -0.058386 +v -0.083382 0.101719 -0.051446 +v -0.083215 0.055202 -0.067553 +v -0.083193 0.018704 -0.018837 +v -0.083182 0.028706 -0.023791 +v -0.083153 0.049905 -0.059365 +v -0.082916 0.025786 0.029386 +v -0.082886 0.009321 0.071178 +v -0.082878 0.073373 -0.071977 +v -0.082531 0.023552 -0.020158 +v -0.082492 0.011952 0.069097 +v -0.082396 0.055107 -0.058009 +v -0.082395 0.109430 -0.054211 +v -0.082391 0.055109 -0.059068 +v -0.082341 0.029116 -0.022710 +v -0.082285 0.014529 -0.016142 +v -0.082146 0.038826 -0.028934 +v -0.082105 0.055168 -0.064247 +v -0.081984 0.005470 0.069351 +v -0.081981 0.019362 -0.017744 +v -0.081725 0.064833 -0.068950 +v -0.081636 0.103267 -0.059273 +v -0.081613 0.015054 0.064034 +v -0.081576 0.006349 0.069065 +v -0.081447 0.089882 -0.068303 +v -0.081427 0.096571 -0.064326 +v -0.081405 0.082050 -0.071041 +v -0.081341 0.039613 -0.027932 +v -0.081315 0.098808 -0.047969 +v -0.081282 0.009480 0.068873 +v -0.081222 0.015602 -0.015345 +v -0.081216 0.011995 -0.014077 +v -0.080927 0.030445 0.033527 +v -0.080589 0.059218 -0.056268 +v -0.080283 0.013035 -0.013366 +v -0.080102 0.106289 -0.050489 +v -0.080088 0.059986 -0.057046 +v -0.079995 0.095849 -0.044907 +v -0.079974 0.048162 -0.035064 +v -0.079924 0.062772 -0.064908 +v -0.079474 0.009641 -0.011508 +v -0.079362 0.025945 0.039450 +v -0.079328 0.025924 0.027521 +v -0.079271 0.048816 -0.034366 +v -0.078928 0.017540 0.056236 +v -0.078900 0.011385 0.063810 +v -0.078874 0.010782 -0.011209 +v -0.078847 0.071782 -0.068575 +v -0.078838 0.100599 -0.055208 +v -0.078788 0.005281 0.063845 +v -0.078758 0.006083 0.063744 +v -0.078625 0.103018 -0.047130 +v -0.078492 0.091851 -0.041456 +v -0.078211 0.008120 -0.009658 +v -0.077969 0.060538 -0.052009 +v -0.077628 0.093882 -0.059999 +v -0.077485 0.009111 -0.009248 +v -0.077330 0.022445 0.045316 +v -0.077153 0.061505 -0.052521 +v -0.077102 0.055608 -0.042156 +v -0.077081 0.088055 -0.038959 +v -0.076992 0.097656 -0.051433 +v -0.076984 0.007091 -0.007994 +v -0.076968 0.087464 -0.063903 +v -0.076955 0.098466 -0.043079 +v -0.076943 0.080167 -0.067094 +v -0.076690 0.056106 -0.041577 +v -0.076415 0.060226 -0.049641 +v -0.076380 0.030600 0.031431 +v -0.076312 0.008017 -0.007613 +v -0.076037 0.031316 -0.017995 +v -0.076025 0.058633 -0.046426 +v -0.075961 0.025584 -0.015160 +v -0.075807 0.069320 -0.063584 +v -0.075724 0.026011 0.024241 +v -0.075653 0.060953 -0.049806 +v -0.075597 0.083671 -0.036976 +v -0.075511 0.094570 -0.048157 +v -0.075477 0.012593 0.056339 +v -0.075445 0.059303 -0.045913 +v -0.075428 0.094175 -0.040411 +v -0.075296 0.091456 -0.055522 +v -0.075114 0.005607 -0.005321 +v -0.075112 0.005688 0.055811 +v -0.075111 0.082230 -0.036066 +v -0.075060 0.081624 -0.034243 +v -0.075021 0.080251 -0.030108 +v -0.075000 0.004892 0.055660 +v -0.074909 0.078734 -0.026450 +v -0.074755 0.066968 -0.058984 +v -0.074727 0.042963 -0.023359 +v -0.074697 0.061857 -0.048422 +v -0.074697 0.076997 -0.023516 +v -0.074678 0.036630 -0.001650 +v -0.074529 0.006709 -0.005149 +v -0.074426 0.074471 -0.021250 +v -0.074364 0.064977 -0.054988 +v -0.074287 0.025222 0.036560 +v -0.074171 0.033032 0.000703 +v -0.074131 0.071022 -0.019073 +v -0.074123 0.091244 -0.045174 +v -0.074078 0.084875 -0.059193 +v -0.073824 0.077571 -0.062277 +v -0.073802 0.089603 -0.038078 +v -0.073611 0.088819 -0.051190 +v -0.073605 0.029946 0.003660 +v -0.073592 0.018158 -0.009359 +v -0.073453 0.025995 0.020888 +v -0.073399 0.086258 -0.031327 +v -0.073356 0.088292 -0.037253 +v -0.073354 0.059832 -0.013483 +v -0.073235 0.087697 -0.036130 +v -0.073155 0.084559 -0.027032 +v -0.072947 0.082667 -0.023880 +v -0.072909 0.004509 -0.002206 +v -0.072881 0.027815 0.007837 +v -0.072793 0.087784 -0.042694 +v -0.072593 0.049328 -0.009333 +v -0.072593 0.080137 -0.021605 +v -0.072533 0.053184 -0.028600 +v -0.072479 0.086666 -0.046579 +v -0.072369 0.026688 0.012350 +v -0.072349 0.005701 -0.002210 +v -0.072309 0.026250 0.016052 +v -0.072176 0.076193 -0.019019 +v -0.072123 0.082236 -0.054264 +v -0.072068 0.075099 -0.057289 +v -0.072037 0.015041 0.046973 +v -0.071815 0.081098 -0.028523 +v -0.071791 0.082292 -0.032001 +v -0.071699 0.079688 -0.026257 +v -0.071616 0.030608 0.028031 +v -0.071592 0.082971 -0.036215 +v -0.071583 0.061708 -0.036889 +v -0.071565 0.020362 0.040143 +v -0.071558 0.083900 -0.040546 +v -0.071535 0.077556 -0.023808 +v -0.071317 0.065176 -0.041808 +v -0.071242 0.072403 -0.052455 +v -0.071211 0.074225 -0.021433 +v -0.071085 0.063710 -0.012699 +v -0.071073 0.068476 -0.046392 +v -0.071017 0.034613 -0.012875 +v -0.070900 0.079519 -0.048986 +v -0.070667 0.029255 -0.009947 +v -0.070490 0.046764 -0.018194 +v -0.070369 0.003977 0.001479 +v -0.070302 0.014209 -0.003854 +v -0.070237 0.076315 -0.043089 +v -0.070116 0.004700 0.043308 +v -0.069981 0.005543 0.043370 +v -0.069967 0.073191 -0.037929 +v -0.069945 0.052504 -0.008349 +v -0.069944 0.062327 -0.016226 +v -0.069890 0.005120 0.001372 +v -0.069832 0.057487 -0.023310 +v -0.069807 0.068756 -0.032205 +v -0.069790 0.015913 0.040972 +v -0.069477 0.024210 0.031682 +v -0.069050 0.042373 -0.000954 +v -0.068869 0.050525 -0.012608 +v -0.068396 0.038269 -0.008458 +v -0.068284 0.030651 0.024118 +v -0.068179 0.038306 0.001707 +v -0.068067 0.021671 -0.002872 +v -0.067946 0.012592 0.037786 +v -0.067928 0.003905 0.005633 +v -0.067912 0.032289 -0.005345 +v -0.067859 0.018284 0.034232 +v -0.067583 0.004892 0.005620 +v -0.067478 0.035268 0.004719 +v -0.066717 0.032726 0.009204 +v -0.066582 0.027178 -0.000340 +v -0.066534 0.023678 0.026661 +v -0.066470 0.030916 0.018566 +v -0.066322 0.031467 0.014148 +v -0.066247 0.011175 0.005136 +v -0.065952 0.017617 0.028813 +v -0.065840 0.004611 0.030169 +v -0.065789 0.003964 0.010997 +v -0.065734 0.005427 0.030180 +v -0.065672 0.011766 0.029871 +v -0.065547 0.017536 0.005206 +v -0.065373 0.005161 0.011020 +v -0.065353 0.024459 0.006040 +v -0.064935 0.023081 0.019395 +v -0.064757 0.023220 0.013140 +v -0.064666 0.010889 0.011346 +v -0.064597 0.017143 0.020708 +v -0.064557 0.004411 0.020773 +v -0.064491 0.017046 0.012576 +v -0.064424 0.005411 0.020784 +v -0.064361 0.011425 0.021096 +v 0.064361 0.011424 0.021095 +v 0.064424 0.005410 0.020783 +v 0.064492 0.017046 0.012575 +v 0.064557 0.004410 0.020772 +v 0.064597 0.017142 0.020707 +v 0.064667 0.010888 0.011345 +v 0.064758 0.023219 0.013138 +v 0.064936 0.023081 0.019394 +v 0.065353 0.024459 0.006040 +v 0.065374 0.005160 0.011019 +v 0.065547 0.017535 0.005205 +v 0.065672 0.011766 0.029869 +v 0.065734 0.005427 0.030179 +v 0.065790 0.003964 0.010996 +v 0.065841 0.004611 0.030168 +v 0.065953 0.017617 0.028812 +v 0.066247 0.011175 0.005135 +v 0.066322 0.031466 0.014146 +v 0.066471 0.030915 0.018565 +v 0.066535 0.023677 0.026660 +v 0.066582 0.027178 -0.000340 +v 0.066718 0.032726 0.009203 +v 0.067479 0.035268 0.004718 +v 0.067584 0.004891 0.005619 +v 0.067859 0.018284 0.034231 +v 0.067912 0.032288 -0.005345 +v 0.067928 0.003905 0.005632 +v 0.067947 0.012592 0.037784 +v 0.068067 0.021670 -0.002873 +v 0.068179 0.038306 0.001706 +v 0.068284 0.030650 0.024117 +v 0.068396 0.038268 -0.008458 +v 0.068869 0.050525 -0.012609 +v 0.069051 0.042372 -0.000955 +v 0.069478 0.024210 0.031681 +v 0.069791 0.015913 0.040970 +v 0.069807 0.068755 -0.032205 +v 0.069833 0.057486 -0.023310 +v 0.069891 0.005120 0.001371 +v 0.069944 0.062327 -0.016226 +v 0.069946 0.052504 -0.008349 +v 0.069967 0.073190 -0.037929 +v 0.069981 0.005542 0.043369 +v 0.070116 0.004699 0.043306 +v 0.070238 0.076314 -0.043089 +v 0.070302 0.014209 -0.003854 +v 0.070369 0.003977 0.001478 +v 0.070490 0.046764 -0.018194 +v 0.070668 0.029254 -0.009947 +v 0.070900 0.079519 -0.048985 +v 0.071017 0.034612 -0.012875 +v 0.071074 0.068475 -0.046391 +v 0.071085 0.063710 -0.012699 +v 0.071212 0.074225 -0.021433 +v 0.071243 0.072403 -0.052454 +v 0.071318 0.065176 -0.041807 +v 0.071536 0.077556 -0.023808 +v 0.071559 0.083899 -0.040545 +v 0.071566 0.020362 0.040141 +v 0.071583 0.061707 -0.036888 +v 0.071593 0.082971 -0.036215 +v 0.071617 0.030608 0.028030 +v 0.071699 0.079687 -0.026257 +v 0.071791 0.082291 -0.032001 +v 0.071815 0.081098 -0.028522 +v 0.072038 0.015041 0.046972 +v 0.072069 0.075099 -0.057288 +v 0.072123 0.082235 -0.054263 +v 0.072177 0.076192 -0.019019 +v 0.072310 0.026250 0.016051 +v 0.072349 0.005701 -0.002211 +v 0.072370 0.026687 0.012349 +v 0.072480 0.086665 -0.046579 +v 0.072533 0.053184 -0.028600 +v 0.072593 0.080136 -0.021605 +v 0.072594 0.049328 -0.009334 +v 0.072794 0.087783 -0.042693 +v 0.072881 0.027814 0.007837 +v 0.072910 0.004509 -0.002207 +v 0.072947 0.082667 -0.023880 +v 0.073155 0.084559 -0.027032 +v 0.073236 0.087697 -0.036130 +v 0.073355 0.059832 -0.013483 +v 0.073356 0.088291 -0.037253 +v 0.073399 0.086257 -0.031327 +v 0.073454 0.025995 0.020887 +v 0.073592 0.018158 -0.009360 +v 0.073606 0.029945 0.003659 +v 0.073611 0.088819 -0.051189 +v 0.073803 0.089603 -0.038078 +v 0.073824 0.077571 -0.062276 +v 0.074078 0.084874 -0.059193 +v 0.074123 0.091243 -0.045173 +v 0.074131 0.071021 -0.019073 +v 0.074171 0.033032 0.000702 +v 0.074287 0.025222 0.036559 +v 0.074364 0.064977 -0.054987 +v 0.074426 0.074470 -0.021250 +v 0.074530 0.006708 -0.005149 +v 0.074679 0.036629 -0.001651 +v 0.074697 0.076997 -0.023516 +v 0.074698 0.061857 -0.048421 +v 0.074728 0.042963 -0.023359 +v 0.074755 0.066967 -0.058984 +v 0.074909 0.078734 -0.026450 +v 0.075000 0.004892 0.055657 +v 0.075021 0.080251 -0.030108 +v 0.075061 0.081624 -0.034242 +v 0.075111 0.082230 -0.036066 +v 0.075112 0.005687 0.055810 +v 0.075114 0.005606 -0.005321 +v 0.075296 0.091456 -0.055521 +v 0.075429 0.094174 -0.040410 +v 0.075445 0.059303 -0.045912 +v 0.075478 0.012593 0.056336 +v 0.075511 0.094570 -0.048156 +v 0.075597 0.083670 -0.036976 +v 0.075654 0.060953 -0.049805 +v 0.075725 0.026011 0.024240 +v 0.075807 0.069320 -0.063583 +v 0.075961 0.025584 -0.015160 +v 0.076025 0.058633 -0.046426 +v 0.076038 0.031316 -0.017995 +v 0.076312 0.008017 -0.007614 +v 0.076381 0.030600 0.031430 +v 0.076415 0.060225 -0.049641 +v 0.076690 0.056106 -0.041577 +v 0.076944 0.080166 -0.067093 +v 0.076955 0.098465 -0.043078 +v 0.076968 0.087463 -0.063902 +v 0.076985 0.007091 -0.007994 +v 0.076992 0.097655 -0.051431 +v 0.077081 0.088054 -0.038958 +v 0.077103 0.055608 -0.042156 +v 0.077153 0.061504 -0.052520 +v 0.077330 0.022445 0.045314 +v 0.077485 0.009111 -0.009249 +v 0.077628 0.093882 -0.059999 +v 0.077969 0.060538 -0.052008 +v 0.078211 0.008120 -0.009658 +v 0.078492 0.091850 -0.041456 +v 0.078625 0.103018 -0.047130 +v 0.078759 0.006082 0.063741 +v 0.078789 0.005280 0.063843 +v 0.078838 0.100598 -0.055208 +v 0.078847 0.071781 -0.068575 +v 0.078874 0.010782 -0.011210 +v 0.078901 0.011385 0.063808 +v 0.078929 0.017540 0.056234 +v 0.079271 0.048816 -0.034366 +v 0.079329 0.025923 0.027520 +v 0.079362 0.025945 0.039448 +v 0.079474 0.009641 -0.011508 +v 0.079924 0.062772 -0.064907 +v 0.079974 0.048162 -0.035064 +v 0.079994 0.095848 -0.044906 +v 0.080089 0.059985 -0.057045 +v 0.080103 0.106289 -0.050488 +v 0.080283 0.013035 -0.013366 +v 0.080590 0.059218 -0.056267 +v 0.080928 0.030445 0.033526 +v 0.081216 0.011994 -0.014077 +v 0.081222 0.015602 -0.015345 +v 0.081283 0.009480 0.068871 +v 0.081315 0.098808 -0.047969 +v 0.081342 0.039612 -0.027932 +v 0.081406 0.082050 -0.071040 +v 0.081427 0.096570 -0.064325 +v 0.081447 0.089881 -0.068302 +v 0.081576 0.006348 0.069063 +v 0.081614 0.015054 0.064032 +v 0.081636 0.103266 -0.059272 +v 0.081726 0.064833 -0.068948 +v 0.081981 0.019361 -0.017744 +v 0.081984 0.005470 0.069348 +v 0.082106 0.055167 -0.064246 +v 0.082146 0.038826 -0.028933 +v 0.082285 0.014529 -0.016142 +v 0.082341 0.029116 -0.022710 +v 0.082391 0.055108 -0.059067 +v 0.082396 0.109429 -0.054210 +v 0.082396 0.055107 -0.058008 +v 0.082493 0.011952 0.069095 +v 0.082532 0.023552 -0.020158 +v 0.082879 0.073372 -0.071976 +v 0.082887 0.009321 0.071175 +v 0.082917 0.025785 0.029385 +v 0.083153 0.049904 -0.059364 +v 0.083183 0.028706 -0.023791 +v 0.083194 0.018704 -0.018837 +v 0.083215 0.055201 -0.067553 +v 0.083383 0.101719 -0.051446 +v 0.083455 0.049904 -0.058385 +v 0.083466 0.022922 -0.021098 +v 0.083470 0.006492 0.071639 +v 0.083716 0.049899 -0.066758 +v 0.083809 0.005853 0.071400 +v 0.084364 0.041275 -0.059578 +v 0.084551 0.006607 0.072035 +v 0.084571 0.006008 0.071763 +v 0.084651 0.041340 -0.066776 +v 0.084656 0.004687 0.069821 +v 0.084850 0.041253 -0.058534 +v 0.084870 0.009331 0.072183 +v 0.085145 0.002922 0.063791 +v 0.085209 0.065900 -0.071660 +v 0.085219 0.012955 0.069297 +v 0.085434 0.111318 -0.056987 +v 0.085450 0.001803 0.055016 +v 0.085468 0.005783 0.071416 +v 0.085617 0.015977 0.064303 +v 0.085631 0.105245 -0.061997 +v 0.085647 0.098054 -0.066845 +v 0.085702 0.006553 0.071701 +v 0.085875 0.023358 0.047536 +v 0.085896 0.001044 0.044277 +v 0.085920 0.103414 -0.053700 +v 0.086016 0.074036 -0.073413 +v 0.086040 0.083070 -0.073153 +v 0.086104 0.090920 -0.070732 +v 0.086135 0.019129 0.057713 +v 0.086145 0.030170 0.034301 +v 0.086399 0.000620 0.035980 +v 0.086406 0.026748 0.040970 +v 0.086600 0.055252 -0.070681 +v 0.086632 0.005349 0.069989 +v 0.086663 0.019186 -0.059643 +v 0.086716 0.000326 0.027014 +v 0.086773 0.009066 0.070915 +v 0.086851 0.019176 -0.066224 +v 0.087024 0.006201 0.070051 +v 0.087025 0.019183 -0.058742 +v 0.087218 0.005652 -0.059604 +v 0.087220 0.049889 -0.070389 +v 0.087352 0.025503 0.030161 +v 0.087389 0.005635 -0.065487 +v 0.087457 0.041326 -0.069574 +v 0.087663 0.000879 -0.059645 +v 0.087671 0.000833 -0.059645 +v 0.087764 0.005649 -0.058700 +v 0.087790 0.011705 0.068525 +v 0.087862 0.000896 -0.065580 +v 0.087868 0.000850 -0.065582 +v 0.088208 0.000101 0.019368 +v 0.088276 0.000905 -0.058739 +v 0.088281 0.000860 -0.058740 +v 0.088558 0.009239 0.068478 +v 0.088943 0.019173 -0.068701 +v 0.089070 0.005641 -0.067772 +v 0.089225 0.000855 -0.067576 +v 0.089261 0.000808 -0.067531 +v 0.089612 0.105362 -0.063112 +v 0.089675 0.111908 -0.058277 +v 0.089728 0.098529 -0.067578 +v 0.089751 0.104123 -0.054863 +v 0.089866 0.014735 0.062805 +v 0.090105 0.083178 -0.073704 +v 0.090109 0.091259 -0.071278 +v 0.090112 0.066589 -0.072937 +v 0.090215 0.004470 0.064025 +v 0.090251 0.000000 0.012340 +v 0.090279 0.074198 -0.073914 +v 0.090629 0.005305 0.064195 +v 0.090943 0.055277 -0.071747 +v 0.091004 0.059280 -0.052090 +v 0.091032 0.049887 -0.071242 +v 0.091325 0.058765 -0.050618 +v 0.091333 0.041331 -0.070510 +v 0.091401 0.058891 -0.055078 +v 0.091725 0.057027 -0.046472 +v 0.091782 0.010521 0.063159 +v 0.091826 0.000072 0.007335 +v 0.091836 0.019170 -0.069386 +v 0.091991 0.000809 -0.068312 +v 0.091993 0.000857 -0.068346 +v 0.092005 0.024875 0.029296 +v 0.092105 0.005657 -0.068481 +v 0.092288 0.029488 0.033269 +v 0.092316 0.053981 -0.043840 +v 0.092502 0.017165 0.056078 +v 0.093349 0.024884 0.039701 +v 0.093400 0.046877 -0.037453 +v 0.093491 0.000386 0.003378 +v 0.093515 0.073804 -0.073470 +v 0.093588 0.021627 0.046488 +v 0.093734 0.102646 -0.053506 +v 0.093849 0.098020 -0.066806 +v 0.093959 0.082816 -0.073156 +v 0.094061 0.090989 -0.070535 +v 0.094114 0.104599 -0.061829 +v 0.094186 0.110367 -0.056815 +v 0.094254 0.000854 -0.067581 +v 0.094259 0.000900 -0.067582 +v 0.094324 0.049888 -0.070482 +v 0.094342 0.041310 -0.069763 +v 0.094346 0.055273 -0.071104 +v 0.094431 0.019168 -0.068778 +v 0.094506 0.037239 -0.031167 +v 0.094511 0.003619 0.056316 +v 0.094642 0.005654 -0.067614 +v 0.095098 0.004582 0.056446 +v 0.095132 0.000852 -0.058594 +v 0.095136 0.000898 -0.058591 +v 0.095143 0.001228 -0.001073 +v 0.095446 0.066191 -0.071830 +v 0.095496 0.027749 -0.026087 +v 0.095530 0.005658 -0.058462 +v 0.095752 0.022619 -0.023363 +v 0.095860 0.011974 -0.016672 +v 0.095867 0.016131 -0.019543 +v 0.095921 0.024192 0.027134 +v 0.095922 0.011670 0.056070 +v 0.095929 0.000853 -0.065343 +v 0.095932 0.000900 -0.065348 +v 0.096038 0.004124 -0.008228 +v 0.096041 0.006337 -0.011381 +v 0.096102 0.002531 -0.005099 +v 0.096167 0.008502 -0.013712 +v 0.096185 0.019172 -0.058370 +v 0.096195 0.000865 -0.059684 +v 0.096199 0.000912 -0.059688 +v 0.096245 0.005646 -0.065594 +v 0.096421 0.100176 -0.050369 +v 0.096534 0.019166 -0.066824 +v 0.096684 0.005648 -0.059816 +v 0.097070 0.028621 0.031137 +v 0.097214 0.107890 -0.053345 +v 0.097309 0.019168 -0.059718 +v 0.097338 0.073028 -0.072144 +v 0.097665 0.041291 -0.066798 +v 0.097859 0.041332 -0.058602 +v 0.098118 0.102351 -0.058732 +v 0.098261 0.049887 -0.067206 +v 0.098370 0.055243 -0.068279 +v 0.098444 0.023069 0.037644 +v 0.098459 0.096422 -0.063951 +v 0.098505 0.041321 -0.060162 +v 0.098515 0.096609 -0.046307 +v 0.098553 0.081587 -0.071480 +v 0.098589 0.065123 -0.069674 +v 0.098783 0.089610 -0.068325 +v 0.099027 0.049890 -0.058461 +v 0.099332 0.023141 0.024590 +v 0.099375 0.003074 0.047734 +v 0.099520 0.049888 -0.059554 +v 0.099557 0.013207 0.048788 +v 0.099634 0.103902 -0.048766 +v 0.099857 0.018375 0.042978 +v 0.099891 0.092994 -0.043260 +v 0.099914 0.004169 0.048090 +v 0.100571 0.055202 -0.063875 +v 0.100607 0.055163 -0.057602 +v 0.100834 0.062610 -0.065663 +v 0.100874 0.099416 -0.054915 +v 0.100877 0.055176 -0.058503 +v 0.101177 0.099908 -0.045259 +v 0.101242 0.088994 -0.040779 +v 0.101247 0.027643 0.028192 +v 0.101249 0.059626 -0.056650 +v 0.101565 0.071742 -0.069570 +v 0.101626 0.013373 0.044665 +v 0.101726 0.060313 -0.057368 +v 0.102073 0.093807 -0.060303 +v 0.102302 0.022455 0.021585 +v 0.102667 0.095413 -0.042289 +v 0.102815 0.087714 -0.064903 +v 0.102906 0.096090 -0.051170 +v 0.102987 0.079925 -0.068404 +v 0.103049 0.083915 -0.038597 +v 0.103306 0.021045 0.033503 +v 0.103659 0.002704 0.039673 +v 0.103852 0.070075 -0.066732 +v 0.104133 0.060507 -0.053139 +v 0.104238 0.021906 0.018253 +v 0.104336 0.073696 -0.026723 +v 0.104388 0.004022 0.039796 +v 0.104429 0.091063 -0.056996 +v 0.104518 0.080000 -0.037557 +v 0.104546 0.075347 -0.030331 +v 0.104576 0.071186 -0.023578 +v 0.104617 0.089801 -0.039923 +v 0.104654 0.092210 -0.047704 +v 0.104699 0.014818 0.037388 +v 0.104707 0.061217 -0.053498 +v 0.104857 0.026732 0.024686 +v 0.104907 0.076506 -0.033451 +v 0.105230 0.068091 -0.021165 +v 0.105338 0.077691 -0.036835 +v 0.105860 0.021548 0.014620 +v 0.105878 0.079246 -0.027726 +v 0.105886 0.085237 -0.061011 +v 0.106023 0.064783 -0.019319 +v 0.106040 0.085737 -0.038719 +v 0.106048 0.068196 -0.062746 +v 0.106072 0.076709 -0.024213 +v 0.106127 0.060010 -0.051056 +v 0.106238 0.081098 -0.031998 +v 0.106270 0.077696 -0.064719 +v 0.106296 0.088301 -0.052785 +v 0.106409 0.087764 -0.044519 +v 0.106573 0.082402 -0.035404 +v 0.106716 0.073680 -0.021700 +v 0.106882 0.060597 -0.050967 +v 0.106915 0.058055 -0.046871 +v 0.107069 0.083251 -0.038021 +v 0.107113 0.039502 -0.030767 +v 0.107184 0.065633 -0.058546 +v 0.107206 0.021744 0.010453 +v 0.107327 0.055392 -0.044095 +v 0.107357 0.084967 -0.042059 +v 0.107370 0.048769 -0.037213 +v 0.107405 0.019019 0.028140 +v 0.107447 0.026110 0.021250 +v 0.107639 0.069789 -0.019409 +v 0.107671 0.057464 -0.016256 +v 0.107684 0.058933 -0.046559 +v 0.107748 0.061997 -0.050776 +v 0.107756 0.049251 -0.036568 +v 0.107809 0.040182 -0.030118 +v 0.107818 0.002502 0.031157 +v 0.107889 0.030047 -0.025350 +v 0.107945 0.035197 -0.005455 +v 0.107963 0.022375 0.006642 +v 0.108014 0.056124 -0.043557 +v 0.108074 0.084633 -0.047649 +v 0.108308 0.082265 -0.056377 +v 0.108309 0.012552 0.030482 +v 0.108347 0.030295 -0.002976 +v 0.108348 0.024347 0.002627 +v 0.108397 0.030601 -0.024638 +v 0.108431 0.027106 -0.000628 +v 0.108435 0.003926 0.031305 +v 0.108619 0.047523 -0.012813 +v 0.108686 0.075178 -0.060224 +v 0.108856 0.023907 -0.022125 +v 0.109247 0.024260 -0.021522 +v 0.109253 0.070804 -0.024581 +v 0.109280 0.074541 -0.029847 +v 0.109284 0.072812 -0.027361 +v 0.109372 0.080290 -0.040689 +v 0.109391 0.076206 -0.032871 +v 0.109457 0.025801 0.017189 +v 0.109530 0.061574 -0.015937 +v 0.109727 0.017151 -0.018939 +v 0.109788 0.077439 -0.036749 +v 0.109862 0.067704 -0.022657 +v 0.109999 0.017733 0.023193 +v 0.110082 0.079139 -0.051002 +v 0.110229 0.072339 -0.055142 +v 0.110249 0.017603 -0.018377 +v 0.110994 0.013016 -0.016108 +v 0.111066 0.002318 0.023383 +v 0.111147 0.025910 0.012461 +v 0.111149 0.069629 -0.049041 +v 0.111171 0.075843 -0.044589 +v 0.111189 0.060385 -0.018950 +v 0.111192 0.050675 -0.012036 +v 0.111291 0.010604 0.023396 +v 0.111449 0.013479 -0.015451 +v 0.111536 0.065483 -0.042908 +v 0.111623 0.061881 -0.039102 +v 0.111655 0.003725 0.023630 +v 0.111707 0.072123 -0.039272 +v 0.111981 0.016719 0.018224 +v 0.111995 0.043053 -0.025499 +v 0.112033 0.053803 -0.031574 +v 0.112083 0.040622 -0.004938 +v 0.112103 0.067541 -0.032904 +v 0.112124 0.010301 -0.013646 +v 0.112147 0.026641 0.008218 +v 0.112540 0.010746 -0.012980 +v 0.112609 0.028638 0.003979 +v 0.112648 0.031622 0.000386 +v 0.112660 0.035183 -0.002403 +v 0.112821 0.033944 -0.019871 +v 0.112914 0.049506 -0.014589 +v 0.112962 0.058124 -0.024019 +v 0.113018 0.010552 0.017934 +v 0.113024 0.002143 0.017087 +v 0.113130 0.007884 -0.010997 +v 0.113409 0.026013 -0.015796 +v 0.113505 0.008669 -0.010557 +v 0.113626 0.003673 0.017476 +v 0.113715 0.017326 0.011402 +v 0.113790 0.021350 -0.013458 +v 0.114044 0.047131 -0.018672 +v 0.114268 0.005934 -0.007780 +v 0.114331 0.002169 0.011299 +v 0.114521 0.037429 -0.011166 +v 0.114532 0.010848 0.011557 +v 0.114574 0.006640 -0.007226 +v 0.114663 0.036308 -0.014342 +v 0.114751 0.018834 0.005203 +v 0.114882 0.031171 -0.008510 +v 0.114898 0.029335 -0.011578 +v 0.114899 0.014376 -0.008316 +v 0.114930 0.003633 0.011601 +v 0.114955 0.004306 -0.004090 +v 0.115047 0.026729 -0.005490 +v 0.115075 0.022453 -0.000770 +v 0.115131 0.024684 -0.009262 +v 0.115175 0.002508 0.005082 +v 0.115182 0.003056 0.000294 +v 0.115206 0.005265 -0.003927 +v 0.115593 0.018613 -0.004557 +v 0.115712 0.004220 0.000639 +v 0.115732 0.014752 0.001403 +v 0.115766 0.003876 0.005446 +v 0.115785 0.009832 -0.002757 +v -0.265006 0.523297 0.056262 +v -0.264562 0.525208 0.055358 +v -0.264151 0.527416 0.054157 +v -0.264072 0.511089 0.061978 +v -0.263838 0.509732 0.062616 +v -0.263471 0.521384 0.053151 +v -0.263207 0.525130 0.059506 +v -0.263189 0.523397 0.052099 +v -0.263185 0.508533 0.063275 +v -0.263023 0.526936 0.058494 +v -0.262612 0.525319 0.051042 +v -0.262582 0.529192 0.057636 +v -0.262498 0.521791 0.040989 +v -0.262398 0.519436 0.041800 +v -0.262345 0.513084 0.065240 +v -0.262235 0.511729 0.065870 +v -0.262196 0.523763 0.040365 +v -0.262143 0.509606 0.058451 +v -0.261943 0.505874 0.064727 +v -0.261940 0.510459 0.066610 +v -0.261919 0.508333 0.059126 +v -0.261633 0.507042 0.059791 +v -0.261409 0.538807 0.048138 +v -0.261285 0.509347 0.045385 +v -0.261223 0.522761 0.044858 +v -0.261158 0.520509 0.045724 +v -0.261070 0.507502 0.046126 +v -0.261065 0.507655 0.067983 +v -0.260959 0.529929 0.069442 +v -0.260949 0.524702 0.044324 +v -0.260839 0.504393 0.061402 +v -0.260827 0.532961 0.067586 +v -0.260656 0.506117 0.046700 +v -0.260490 0.535169 0.066148 +v -0.260386 0.522663 0.073987 +v -0.260246 0.510677 0.049002 +v -0.260161 0.521023 0.074816 +v -0.260024 0.508881 0.049625 +v -0.259981 0.501044 0.067441 +v -0.259942 0.547991 0.045239 +v -0.259917 0.520702 0.037395 +v -0.259870 0.545803 0.038961 +v -0.259851 0.519383 0.075971 +v -0.259833 0.531553 0.072513 +v -0.259815 0.518370 0.038063 +v -0.259682 0.528001 0.065846 +v -0.259680 0.507504 0.050281 +v -0.259677 0.534563 0.070520 +v -0.259671 0.540547 0.052691 +v -0.259634 0.535204 0.036903 +v -0.259567 0.530950 0.064155 +v -0.259499 0.503556 0.047559 +v -0.259476 0.522599 0.036672 +v -0.259421 0.544191 0.034573 +v -0.259381 0.521327 0.070299 +v -0.259381 0.500064 0.064179 +v -0.259276 0.536934 0.069133 +v -0.259254 0.519439 0.071597 +v -0.259243 0.509119 0.057875 +v -0.259158 0.533314 0.062618 +v -0.259136 0.503035 0.070265 +v -0.259098 0.535977 0.041037 +v -0.259054 0.522614 0.050803 +v -0.259051 0.524018 0.076912 +v -0.259047 0.518133 0.072237 +v -0.258996 0.504881 0.051044 +v -0.258967 0.536875 0.044078 +v -0.258931 0.517279 0.077136 +v -0.258914 0.522617 0.077633 +v -0.258882 0.508583 0.041645 +v -0.258783 0.545742 0.059910 +v -0.258781 0.507016 0.042245 +v -0.258750 0.498274 0.069014 +v -0.258604 0.551154 0.050946 +v -0.258601 0.513494 0.066663 +v -0.258528 0.515823 0.073474 +v -0.258493 0.527946 0.060686 +v -0.258479 0.505570 0.042715 +v -0.258444 0.520751 0.078826 +v -0.258226 0.497956 0.065966 +v -0.257930 0.542475 0.056640 +v -0.257878 0.504852 0.060095 +v -0.257772 0.503023 0.043750 +v -0.257630 0.509054 0.069146 +v -0.257625 0.523164 0.047232 +v -0.257615 0.500470 0.071183 +v -0.257599 0.518714 0.079863 +v -0.257596 0.499657 0.049135 +v -0.257461 0.500982 0.051949 +v -0.257108 0.499983 0.063060 +v -0.256894 0.525890 0.027087 +v -0.256887 0.524132 0.027371 +v -0.256869 0.554211 0.056264 +v -0.256864 0.497910 0.065716 +v -0.256846 0.512727 0.079436 +v -0.256814 0.514988 0.028942 +v -0.256802 0.522230 0.027720 +v -0.256778 0.510096 0.051205 +v -0.256768 0.535379 0.032796 +v -0.256745 0.497912 0.069212 +v -0.256732 0.511649 0.076411 +v -0.256644 0.499708 0.045824 +v -0.256637 0.542864 0.029232 +v -0.256611 0.503837 0.071039 +v -0.256569 0.523255 0.031916 +v -0.256548 0.513432 0.029300 +v -0.256546 0.516061 0.032815 +v -0.256514 0.525145 0.031648 +v -0.256461 0.514489 0.032975 +v -0.256399 0.526829 0.031477 +v -0.256353 0.513502 0.033209 +v -0.256352 0.559244 0.037594 +v -0.256326 0.519034 0.069536 +v -0.256265 0.512497 0.029682 +v -0.256118 0.529269 0.062736 +v -0.256100 0.500789 0.071276 +v -0.256085 0.496254 0.050510 +v -0.256082 0.497776 0.052417 +v -0.256050 0.548053 0.063287 +v -0.256030 0.505963 0.052808 +v -0.256017 0.520014 0.036885 +v -0.255969 0.562329 0.044766 +v -0.255900 0.514312 0.081787 +v -0.255884 0.497019 0.047797 +v -0.255850 0.511776 0.033491 +v -0.255669 0.515192 0.071381 +v -0.255639 0.533808 0.044887 +v -0.255636 0.510870 0.030403 +v -0.255635 0.508290 0.081635 +v -0.255534 0.534694 0.026111 +v -0.255478 0.507947 0.079004 +v -0.255423 0.509404 0.058518 +v -0.255402 0.535339 0.072288 +v -0.255400 0.511987 0.057356 +v -0.255335 0.533241 0.044624 +v -0.255263 0.535253 0.030324 +v -0.255113 0.522315 0.052199 +v -0.255098 0.501510 0.053755 +v -0.255097 0.524741 0.078524 +v -0.255089 0.507518 0.041336 +v -0.255041 0.509223 0.034022 +v -0.254971 0.515370 0.064654 +v -0.254909 0.498720 0.066517 +v -0.254880 0.513036 0.065794 +v -0.254769 0.538588 0.055766 +v -0.254738 0.500591 0.070463 +v -0.254734 0.510477 0.074304 +v -0.254717 0.508410 0.031444 +v -0.254665 0.499086 0.068356 +v -0.254643 0.515153 0.034703 +v -0.254613 0.524688 0.033294 +v -0.254596 0.523326 0.024770 +v -0.254572 0.514525 0.026517 +v -0.254564 0.525088 0.024322 +v -0.254558 0.521502 0.025192 +v -0.254551 0.538886 0.056068 +v -0.254499 0.525820 0.059676 +v -0.254474 0.510204 0.083306 +v -0.254473 0.555747 0.030240 +v -0.254452 0.513050 0.027017 +v -0.254445 0.541888 0.025069 +v -0.254435 0.520670 0.080813 +v -0.254425 0.503421 0.042780 +v -0.254392 0.528369 0.058746 +v -0.254317 0.524982 0.051476 +v -0.254218 0.512081 0.027424 +v -0.254208 0.512435 0.035367 +v -0.254164 0.505928 0.060429 +v -0.254120 0.497983 0.053048 +v -0.253959 0.512501 0.049787 +v -0.253884 0.501077 0.063612 +v -0.253801 0.499325 0.044844 +v -0.253790 0.506399 0.034585 +v -0.253721 0.510423 0.028280 +v -0.253720 0.521305 0.046889 +v -0.253659 0.509842 0.035696 +v -0.253655 0.505029 0.032548 +v -0.253653 0.495918 0.050691 +v -0.253609 0.507571 0.078055 +v -0.253580 0.534103 0.023428 +v -0.253524 0.509657 0.067467 +v -0.253427 0.496567 0.047404 +v -0.253367 0.564664 0.050538 +v -0.253299 0.523872 0.046022 +v -0.253238 0.521345 0.069005 +v -0.253207 0.569053 0.032718 +v -0.253182 0.515460 0.082977 +v -0.253182 0.519084 0.070072 +v -0.253124 0.511542 0.062443 +v -0.253109 0.510342 0.050337 +v -0.253101 0.503952 0.069982 +v -0.253038 0.513975 0.061477 +v -0.253008 0.524298 0.056446 +v -0.252876 0.507085 0.035601 +v -0.252774 0.572106 0.039117 +v -0.252739 0.518822 0.039423 +v -0.252724 0.556111 0.059748 +v -0.252721 0.532377 0.033311 +v -0.252651 0.532175 0.032997 +v -0.252647 0.526586 0.055176 +v -0.252551 0.521501 0.038563 +v -0.252546 0.507546 0.029512 +v -0.252517 0.508159 0.081663 +v -0.252447 0.516294 0.071428 +v -0.252441 0.528361 0.064602 +v -0.252410 0.540512 0.022915 +v -0.252292 0.525422 0.076091 +v -0.252275 0.505326 0.030821 +v -0.252247 0.546794 0.065047 +v -0.252236 0.506646 0.051670 +v -0.252234 0.510246 0.083109 +v -0.252233 0.498982 0.052809 +v -0.252220 0.553352 0.025333 +v -0.252187 0.510211 0.042332 +v -0.252111 0.502534 0.066782 +v -0.252107 0.522762 0.024308 +v -0.252061 0.522722 0.033481 +v -0.252034 0.508395 0.077484 +v -0.251948 0.530635 0.063382 +v -0.251864 0.532851 0.071690 +v -0.251834 0.508201 0.043067 +v -0.251788 0.524791 0.033281 +v -0.251726 0.535214 0.070021 +v -0.251718 0.511504 0.073998 +v -0.251715 0.502316 0.052820 +v -0.251709 0.580123 0.026886 +v -0.251701 0.508116 0.063984 +v -0.251670 0.512870 0.026591 +v -0.251605 0.523491 0.077540 +v -0.251482 0.565894 0.026577 +v -0.251403 0.520007 0.043183 +v -0.251401 0.504476 0.033624 +v -0.251397 0.497421 0.048239 +v -0.251375 0.516666 0.034535 +v -0.251276 0.514676 0.035128 +v -0.251224 0.582930 0.033414 +v -0.251134 0.522570 0.042390 +v -0.251123 0.497693 0.050575 +v -0.251027 0.510305 0.027674 +v -0.251017 0.512891 0.035679 +v -0.250987 0.511399 0.046183 +v -0.250978 0.504959 0.044281 +v -0.250710 0.520974 0.078895 +v -0.250669 0.509850 0.036093 +v -0.250654 0.507631 0.029111 +v -0.250631 0.523293 0.072165 +v -0.250610 0.500550 0.046269 +v -0.250551 0.509365 0.046800 +v -0.250549 0.504955 0.031435 +v -0.250503 0.507146 0.035649 +v -0.250453 0.509461 0.079829 +v -0.250392 0.511499 0.082072 +v -0.250261 0.532639 0.066440 +v -0.250204 0.573956 0.044996 +v -0.250180 0.530308 0.068295 +v -0.250129 0.533342 0.022865 +v -0.250103 0.578217 0.027104 +v -0.250086 0.533315 0.047271 +v -0.250026 0.520811 0.073199 +v -0.250007 0.536767 0.055024 +v -0.249952 0.581201 0.034085 +v -0.249598 0.515804 0.081269 +v -0.249540 0.532301 0.043876 +v -0.249535 0.505935 0.048091 +v -0.249501 0.521635 0.025731 +v -0.249484 0.501121 0.049686 +v -0.249465 0.523691 0.025090 +v -0.249462 0.576655 0.020355 +v -0.249287 0.534968 0.051801 +v -0.249113 0.531388 0.035841 +v -0.249110 0.531869 0.033359 +v -0.249088 0.506212 0.034272 +v -0.249082 0.518565 0.074703 +v -0.248975 0.505931 0.031665 +v -0.248685 0.563418 0.021542 +v -0.248632 0.514591 0.027618 +v -0.248516 0.538073 0.057679 +v -0.248491 0.513407 0.077429 +v -0.248472 0.512697 0.028358 +v -0.248451 0.521696 0.030415 +v -0.248394 0.544038 0.064351 +v -0.248390 0.575461 0.021937 +v -0.248374 0.538908 0.022292 +v -0.248356 0.531493 0.039968 +v -0.248188 0.508079 0.030291 +v -0.248187 0.511038 0.029070 +v -0.248173 0.514315 0.032555 +v -0.248025 0.550640 0.020964 +v -0.247987 0.524091 0.030091 +v -0.247962 0.552519 0.061734 +v -0.247945 0.584445 0.039215 +v -0.247935 0.516199 0.031931 +v -0.247692 0.610015 0.011343 +v -0.247639 0.512368 0.033189 +v -0.247547 0.509242 0.034196 +v -0.247342 0.582724 0.039252 +v -0.247197 0.540672 0.060928 +v -0.247047 0.593301 0.020807 +v -0.246978 0.532852 0.024443 +v -0.246519 0.566660 0.056915 +v -0.246507 0.596054 0.027032 +v -0.246249 0.573628 0.015890 +v -0.246224 0.589845 0.014314 +v -0.246132 0.575526 0.049919 +v -0.245901 0.531648 0.029355 +v -0.245706 0.602935 0.001672 +v -0.245433 0.572668 0.017312 +v -0.244768 0.614365 0.021043 +v -0.244642 0.606961 0.013952 +v -0.244381 0.560324 0.018003 +v -0.244378 0.537525 0.024356 +v -0.244313 0.549388 0.059960 +v -0.244243 0.598126 0.032878 +v -0.244196 0.541661 0.049424 +v -0.244132 0.585561 0.043365 +v -0.243945 0.543920 0.052875 +v -0.243923 0.539555 0.043739 +v -0.243826 0.593118 0.021305 +v -0.243794 0.546847 0.057588 +v -0.243552 0.601088 0.005435 +v -0.243398 0.583425 0.043263 +v -0.243393 0.595323 0.026289 +v -0.243383 0.538358 0.038702 +v -0.243368 0.548465 0.020129 +v -0.243166 0.590352 0.016106 +v -0.243067 0.564963 0.061378 +v -0.243055 0.536894 0.028279 +v -0.243008 0.537242 0.033959 +v -0.242617 0.585970 0.009452 +v -0.242527 0.583503 0.026393 +v -0.241889 0.610012 0.022047 +v -0.241580 0.596981 0.030969 +v -0.241568 0.585676 0.031640 +v -0.241170 0.562054 0.057738 +v -0.241083 0.560515 0.065276 +v -0.240856 0.577852 0.053857 +v -0.240818 0.599280 0.037623 +v -0.240816 0.569998 0.012324 +v -0.240777 0.569929 0.014546 +v -0.240278 0.587249 0.012214 +v -0.240201 0.586140 0.046597 +v -0.239818 0.572751 0.061173 +v -0.239766 0.583918 0.045678 +v -0.239733 0.561591 0.059878 +v -0.239533 0.555433 0.069816 +v -0.239257 0.553883 0.071515 +v -0.239218 0.595869 -0.004244 +v -0.239132 0.558667 0.017597 +v -0.239086 0.552129 0.072778 +v -0.239037 0.558143 0.062632 +v -0.238954 0.586932 0.036037 +v -0.238953 0.569322 0.064710 +v -0.238886 0.547328 0.022878 +v -0.238847 0.554281 0.043165 +v -0.238839 0.597905 0.034768 +v -0.238798 0.551716 0.068087 +v -0.238579 0.594966 0.000915 +v -0.238523 0.547761 0.071055 +v -0.238517 0.549795 0.074453 +v -0.238017 0.557374 0.049149 +v -0.237758 0.551011 0.036505 +v -0.237406 0.565604 0.068585 +v -0.237297 0.543085 0.075319 +v -0.237103 0.544850 0.078136 +v -0.236975 0.548015 0.028986 +v -0.236853 0.599223 0.041073 +v -0.236302 0.577166 0.018667 +v -0.236287 0.560914 0.054457 +v -0.236248 0.559846 0.057910 +v -0.236224 0.583827 0.047949 +v -0.236221 0.559860 0.073186 +v -0.236193 0.582252 0.007336 +v -0.236127 0.615537 0.032256 +v -0.236017 0.556496 0.061236 +v -0.235904 0.585759 0.049411 +v -0.235824 0.551903 0.065000 +v -0.235819 0.550068 0.065974 +v -0.235690 0.557954 0.074543 +v -0.235664 0.597860 0.037529 +v -0.235610 0.546516 0.068162 +v -0.235512 0.541630 0.080563 +v -0.235368 0.579813 0.055442 +v -0.235333 0.586593 0.038555 +v -0.235332 0.610759 0.030390 +v -0.235278 0.556136 0.075420 +v -0.235178 0.540499 0.078702 +v -0.235139 0.568028 0.015085 +v -0.235136 0.584273 0.010520 +v -0.235072 0.542182 0.072579 +v -0.234600 0.567724 0.011890 +v -0.234565 0.553096 0.076898 +v -0.234545 0.540229 0.076287 +v -0.234412 0.557691 0.020247 +v -0.233965 0.581257 0.029179 +v -0.233911 0.617490 0.007558 +v -0.233900 0.575594 0.061680 +v -0.233757 0.630643 -0.000088 +v -0.233378 0.547160 0.080404 +v -0.233272 0.572155 0.064328 +v -0.233011 0.623919 -0.009617 +v -0.232980 0.612473 0.000282 +v -0.232763 0.626946 0.002449 +v -0.232634 0.589875 -0.000796 +v -0.232422 0.567074 0.067972 +v -0.232400 0.542834 0.082632 +v -0.232394 0.620440 -0.007065 +v -0.232284 0.558489 0.024604 +v -0.232156 0.598153 0.042953 +v -0.232044 0.563853 0.038662 +v -0.231943 0.560692 0.032277 +v -0.231904 0.597003 0.039035 +v -0.231559 0.620096 0.014473 +v -0.231418 0.540619 0.080371 +v -0.231390 0.567788 0.017440 +v -0.231297 0.560794 0.072695 +v -0.231285 0.589398 -0.005450 +v -0.231260 0.582533 0.048365 +v -0.231190 0.575242 0.019973 +v -0.231084 0.584973 0.038727 +v -0.231057 0.559305 0.073895 +v -0.230846 0.558760 0.060681 +v -0.230794 0.578680 0.054167 +v -0.230748 0.584321 0.049512 +v -0.230727 0.553527 0.065429 +v -0.230610 0.634746 0.009791 +v -0.230576 0.557733 0.075105 +v -0.230497 0.563711 0.056705 +v -0.230362 0.566299 0.043769 +v -0.230217 0.551288 0.066346 +v -0.230137 0.554977 0.076928 +v -0.230069 0.541009 0.076231 +v -0.230068 0.574352 0.058932 +v -0.230021 0.566340 0.054172 +v -0.229858 0.567248 0.014757 +v -0.229841 0.547181 0.068485 +v -0.229835 0.542587 0.072801 +v -0.229831 0.581968 0.010869 +v -0.229764 0.630048 0.010683 +v -0.229603 0.579314 0.007821 +v -0.229457 0.570206 0.061944 +v -0.229426 0.549158 0.079983 +v -0.229212 0.545031 0.081702 +v -0.229169 0.565197 0.065527 +v -0.229051 0.580804 0.046707 +v -0.228752 0.569124 0.021429 +v -0.228740 0.557470 0.071530 +v -0.228733 0.607244 -0.003580 +v -0.228602 0.543216 0.080563 +v -0.228398 0.562139 0.062170 +v -0.228185 0.556220 0.067489 +v -0.228051 0.608440 0.034082 +v -0.228031 0.567622 0.058637 +v -0.227974 0.575896 0.051884 +v -0.227866 0.571252 0.055977 +v -0.227853 0.595171 0.038523 +v -0.227852 0.554344 0.069049 +v -0.227698 0.575827 0.023533 +v -0.227696 0.542978 0.077779 +v -0.227641 0.570355 0.049236 +v -0.227637 0.553306 0.074662 +v -0.227617 0.571716 0.028055 +v -0.227538 0.578250 0.043402 +v -0.227312 0.547572 0.078432 +v -0.227193 0.573485 0.032763 +v -0.227107 0.612285 -0.013141 +v -0.227097 0.595864 0.042313 +v -0.227000 0.550969 0.071595 +v -0.226999 0.612353 0.037482 +v -0.226993 0.581658 0.046712 +v -0.226945 0.576209 0.039215 +v -0.226663 0.546024 0.075358 +v -0.226188 0.569202 0.020852 +v -0.225957 0.620733 0.021601 +v -0.225868 0.581678 0.013436 +v -0.225825 0.577394 0.028569 +v -0.225680 0.613189 -0.017121 +v -0.225412 0.579238 0.043146 +v -0.225385 0.579436 0.033373 +v -0.224789 0.571557 0.027370 +v -0.224657 0.578953 0.011024 +v -0.224497 0.576347 0.037948 +v -0.224310 0.574021 0.033106 +v -0.224286 0.592864 0.037082 +v -0.223654 0.602895 -0.005041 +v -0.223610 0.630694 0.018475 +v -0.223125 0.582403 0.017312 +v -0.222870 0.635630 0.020121 +v -0.222841 0.585204 0.002385 +v -0.222641 0.592983 0.040513 +v -0.221580 0.584473 -0.002302 +v -0.221520 0.590188 0.034654 +v -0.221334 0.643359 -0.007955 +v -0.221191 0.579917 0.015820 +v -0.221047 0.604500 0.034642 +v -0.221047 0.583732 0.021995 +v -0.220230 0.637213 -0.017857 +v -0.219974 0.585448 0.026689 +v -0.219896 0.587441 0.031048 +v -0.219737 0.618752 0.024756 +v -0.219564 0.609699 0.010222 +v -0.219186 0.589641 0.037482 +v -0.218886 0.646533 0.002385 +v -0.218595 0.581577 0.021668 +v -0.218299 0.607288 0.038372 +v -0.217377 0.605068 -0.013713 +v -0.217255 0.583721 0.027532 +v -0.217158 0.586210 0.032977 +v -0.216359 0.605853 -0.017351 +v -0.216165 0.584511 0.008180 +v -0.215287 0.598904 -0.002324 +v -0.214725 0.628076 0.024274 +v -0.214549 0.598806 0.032242 +v -0.213754 0.615388 0.025233 +v -0.212886 0.627721 -0.024406 +v -0.212798 0.632951 0.026097 +v -0.212734 0.583588 0.004944 +v -0.212163 0.587454 0.016921 +v -0.211929 0.647013 0.013250 +v -0.210907 0.592151 0.025536 +v -0.210084 0.655695 -0.015468 +v -0.209638 0.599946 0.034748 +v -0.209583 0.598311 0.002627 +v -0.208892 0.650017 -0.026176 +v -0.208237 0.601280 -0.009870 +v -0.208202 0.610522 0.023184 +v -0.208103 0.657942 -0.004513 +v -0.207278 0.586165 0.015315 +v -0.206258 0.623338 0.026027 +v -0.206162 0.600821 0.010096 +v -0.205670 0.591831 0.026066 +v -0.205498 0.600869 -0.012590 +v -0.205089 0.604833 0.017456 +v -0.203871 0.620153 -0.024950 +v -0.202854 0.643610 0.019339 +v -0.202593 0.627012 0.027876 +v -0.201584 0.641148 -0.032828 +v -0.201429 0.657417 0.006518 +v -0.201226 0.600398 -0.003587 +v -0.198596 0.616195 0.023334 +v -0.198191 0.599722 -0.006282 +v -0.196973 0.670774 -0.024554 +v -0.195855 0.665608 -0.036543 +v -0.195827 0.602783 0.006057 +v -0.195410 0.671862 -0.012732 +v -0.194587 0.607926 0.015140 +v -0.194106 0.618028 0.024518 +v -0.193561 0.615266 -0.019561 +v -0.193552 0.637206 0.020787 +v -0.193419 0.653332 0.012507 +v -0.192071 0.633344 -0.032620 +v -0.191353 0.602746 0.004214 +v -0.189628 0.609321 0.015736 +v -0.188754 0.669693 -0.001732 +v -0.188624 0.657108 -0.044009 +v -0.186345 0.629535 0.017428 +v -0.186278 0.613631 -0.012240 +v -0.185014 0.646859 0.014145 +v -0.184556 0.685217 -0.033211 +v -0.183742 0.680501 -0.045730 +v -0.182117 0.628551 -0.026430 +v -0.182099 0.664780 0.003996 +v -0.181617 0.685808 -0.021937 +v -0.181135 0.621843 0.009942 +v -0.180303 0.615674 -0.000826 +v -0.178483 0.640356 0.011470 +v -0.177967 0.649008 -0.042175 +v -0.177840 0.692250 -0.037752 +v -0.177311 0.699771 -0.026264 +v -0.176926 0.691427 -0.024691 +v -0.176760 0.688684 -0.048476 +v -0.175888 0.672547 -0.053328 +v -0.175135 0.698572 -0.040297 +v -0.175071 0.684367 -0.011146 +v -0.174835 0.626513 -0.017999 +v -0.174797 0.658479 0.006211 +v -0.172569 0.633376 0.004800 +v -0.171836 0.687581 -0.012905 +v -0.170586 0.695375 -0.009587 +v -0.170358 0.701717 -0.041546 +v -0.170127 0.628044 -0.005599 +v -0.170060 0.697340 -0.049014 +v -0.169924 0.702262 -0.028125 +v -0.168709 0.653325 0.004827 +v -0.168447 0.681013 -0.056495 +v -0.168320 0.644322 -0.034879 +v -0.167036 0.681464 -0.004647 +v -0.166422 0.699497 -0.014229 +v -0.165304 0.711794 -0.027414 +v -0.164714 0.664802 -0.049995 +v -0.164221 0.689350 -0.002383 +v -0.164040 0.711359 -0.040431 +v -0.161947 0.647034 -0.001146 +v -0.161194 0.694121 -0.004404 +v -0.161019 0.641836 -0.025010 +v -0.160882 0.691035 -0.056432 +v -0.160551 0.707383 -0.049664 +v -0.160178 0.708533 -0.013035 +v -0.159278 0.675501 -0.000711 +v -0.158355 0.722003 -0.027140 +v -0.158142 0.673000 -0.053632 +v -0.158134 0.643027 -0.011343 +v -0.157280 0.721905 -0.040436 +v -0.155866 0.660453 -0.041889 +v -0.155414 0.687381 -0.000879 +v -0.154215 0.703949 -0.005661 +v -0.153954 0.717760 -0.013191 +v -0.153255 0.717797 -0.050347 +v -0.153202 0.671393 -0.002368 +v -0.153063 0.701232 -0.055626 +v -0.150408 0.683773 -0.054054 +v -0.150270 0.669436 -0.044587 +v -0.148487 0.657159 -0.031276 +v -0.148025 0.712556 -0.005462 +v -0.146819 0.711300 -0.055728 +v -0.146428 0.681028 -0.005793 +v -0.146291 0.698394 -0.001910 +v -0.145575 0.665722 -0.008265 +v -0.145390 0.741120 -0.039314 +v -0.145207 0.741683 -0.025778 +v -0.143223 0.659819 -0.017649 +v -0.143220 0.665900 -0.033855 +v -0.143007 0.694493 -0.054284 +v -0.142042 0.678811 -0.046437 +v -0.142024 0.735882 -0.050628 +v -0.141795 0.683709 -0.002529 +v -0.141202 0.737614 -0.013140 +v -0.140225 0.706406 -0.001888 +v -0.140150 0.663884 -0.019489 +v -0.137815 0.676502 -0.013536 +v -0.135417 0.675539 -0.036637 +v -0.135378 0.705266 -0.054637 +v -0.135041 0.731378 -0.005055 +v -0.134998 0.693885 -0.003925 +v -0.134828 0.728218 -0.056425 +v -0.134673 0.689052 -0.047571 +v -0.133175 0.674533 -0.023428 +v -0.132366 0.759139 -0.038650 +v -0.132331 0.759637 -0.025002 +v -0.130896 0.697916 -0.004453 +v -0.130024 0.680105 -0.010702 +v -0.129709 0.432415 0.010699 +v -0.129642 0.433137 -0.001000 +v -0.129425 0.755170 -0.012228 +v -0.129371 0.753613 -0.050133 +v -0.127589 0.698619 -0.048067 +v -0.127301 0.430394 0.025820 +v -0.127273 0.686250 -0.038917 +v -0.127111 0.723710 -0.002128 +v -0.126399 0.432028 -0.018453 +v -0.125233 0.720978 -0.054992 +v -0.124077 0.747452 -0.003691 +v -0.124028 0.457149 0.019813 +v -0.123510 0.427585 0.036533 +v -0.123018 0.692307 -0.013055 +v -0.122950 0.680867 -0.023174 +v -0.122920 0.744802 -0.056511 +v -0.122865 0.457335 0.000218 +v -0.121487 0.429752 -0.000697 +v -0.121339 0.430126 -0.033748 +v -0.121197 0.429022 0.010880 +v -0.120991 0.694104 -0.038969 +v -0.119898 0.429774 -0.016498 +v -0.119384 0.426371 0.024236 +v -0.119191 0.691197 -0.029747 +v -0.119122 0.482398 0.022631 +v -0.118618 0.715337 -0.005387 +v -0.117957 0.482609 0.001555 +v -0.117774 0.456200 -0.022925 +v -0.117237 0.714245 -0.048552 +v -0.116773 0.737887 -0.002008 +v -0.115978 0.423704 0.048674 +v -0.115547 0.451561 0.043579 +v -0.115436 0.423350 0.034765 +v -0.115398 0.427765 -0.029958 +v -0.114987 0.508656 0.002825 +v -0.114974 0.453304 -0.000877 +v -0.114592 0.508944 0.024899 +v -0.114570 0.735771 -0.055130 +v -0.114493 0.786332 -0.023623 +v -0.114093 0.786017 -0.038107 +v -0.113189 0.534964 0.003899 +v -0.112765 0.781115 -0.010245 +v -0.112581 0.425469 -0.048263 +v -0.112384 0.481688 -0.020808 +v -0.112202 0.534378 0.025202 +v -0.111954 0.450032 0.019851 +v -0.111440 0.708175 -0.014057 +v -0.111384 0.779549 -0.049833 +v -0.111089 0.708937 -0.039611 +v -0.110840 0.452292 -0.023671 +v -0.110510 0.478973 0.043884 +v -0.110506 0.482934 0.000580 +v -0.110123 0.561290 0.004831 +v -0.109855 0.507848 -0.019403 +v -0.109545 0.419450 0.044998 +v -0.109238 0.728464 -0.005970 +v -0.108753 0.560673 0.026904 +v -0.108495 0.452351 -0.045330 +v -0.108198 0.770988 -0.000778 +v -0.108177 0.534365 -0.018383 +v -0.107982 0.481835 -0.020970 +v -0.107849 0.707196 -0.028368 +v -0.107672 0.728033 -0.048499 +v -0.107488 0.419408 0.057069 +v -0.106413 0.423912 -0.041775 +v -0.105977 0.505680 0.044746 +v -0.105805 0.768667 -0.057061 +v -0.104973 0.587402 0.005742 +v -0.104886 0.560890 -0.017278 +v -0.104072 0.481108 0.020039 +v -0.103634 0.511124 0.001976 +v -0.103500 0.586747 0.027970 +v -0.103403 0.478909 -0.042056 +v -0.103203 0.420910 -0.056600 +v -0.102755 0.720629 -0.014659 +v -0.102448 0.444286 0.036645 +v -0.102301 0.721939 -0.039466 +v -0.102174 0.511142 -0.018890 +v -0.102057 0.415545 0.052890 +v -0.101977 0.532136 0.045751 +v -0.101925 0.758128 -0.000328 +v -0.100873 0.448534 -0.039227 +v -0.100553 0.809108 -0.022228 +v -0.100213 0.505800 -0.040447 +v -0.100079 0.587225 -0.015774 +v -0.099716 0.719202 -0.027832 +v -0.099572 0.809033 -0.038077 +v -0.099387 0.803093 -0.007938 +v -0.099150 0.756805 -0.055951 +v -0.098435 0.532837 -0.039458 +v -0.097773 0.613165 0.006695 +v -0.097300 0.558437 0.046724 +v -0.096874 0.478744 -0.038252 +v -0.096846 0.801257 -0.050004 +v -0.096821 0.418487 -0.050955 +v -0.096768 0.509231 0.020008 +v -0.096167 0.414487 0.064514 +v -0.095858 0.746967 -0.006146 +v -0.095728 0.612251 0.027554 +v -0.095301 0.445113 0.061946 +v -0.095195 0.559750 -0.037501 +v -0.095065 0.790712 0.002572 +v -0.094057 0.537496 0.003271 +v -0.093724 0.747416 -0.048948 +v -0.093413 0.613246 -0.013836 +v -0.093075 0.539316 -0.015742 +v -0.092869 0.411280 0.059165 +v -0.092652 0.818864 -0.021639 +v -0.092198 0.584706 0.047240 +v -0.091950 0.415459 -0.062895 +v -0.091924 0.475625 0.034968 +v -0.091779 0.788248 -0.057940 +v -0.091558 0.508730 -0.036620 +v -0.091529 0.446003 -0.060144 +v -0.091274 0.819867 -0.037371 +v -0.091066 0.474172 0.061424 +v -0.091025 0.811290 -0.005947 +v -0.090805 0.737613 -0.015487 +v -0.090512 0.586388 -0.035491 +v -0.089768 0.774576 0.002575 +v -0.089396 0.740015 -0.039690 +v -0.088712 0.533913 0.019908 +v -0.088129 0.735745 -0.027131 +v -0.088095 0.474502 -0.057961 +v -0.087684 0.798603 0.003524 +v -0.087566 0.811127 -0.050435 +v -0.087535 0.501602 0.060813 +v -0.087455 0.414249 -0.055731 +v -0.086559 0.637593 0.007387 +v -0.086420 0.773808 -0.057254 +v -0.085883 0.504524 0.033958 +v -0.085565 0.826694 -0.020892 +v -0.085525 0.610154 0.046065 +v -0.085340 0.636799 0.025866 +v -0.085050 0.502517 -0.056333 +v -0.084999 0.761762 -0.005622 +v -0.084980 0.443530 -0.050369 +v -0.084802 0.817598 -0.004287 +v -0.084535 0.781467 0.005424 +v -0.084272 0.528601 0.061220 +v -0.084154 0.538261 -0.033346 +v -0.084137 0.827431 -0.036485 +v -0.083847 0.798087 -0.057906 +v -0.083519 0.530314 -0.055985 +v -0.083474 0.612203 -0.031837 +v -0.082997 0.436976 0.051085 +v -0.082751 0.804879 0.005509 +v -0.082090 0.762855 -0.049848 +v -0.081844 0.637382 -0.010489 +v -0.081554 0.750601 -0.016128 +v -0.081080 0.555614 0.061957 +v -0.080883 0.557815 -0.054658 +v -0.080631 0.765857 -0.001910 +v -0.080383 0.786963 0.006645 +v -0.080244 0.474160 -0.048749 +v -0.080091 0.828325 -0.019796 +v -0.079965 0.818184 -0.050061 +v -0.079720 0.529344 0.032967 +v -0.079651 0.783241 -0.059541 +v -0.079049 0.820117 -0.004125 +v -0.078738 0.748578 -0.026627 +v -0.078574 0.829138 -0.035026 +v -0.078543 0.754209 -0.040312 +v -0.078479 0.561685 -0.001555 +v -0.078479 0.561685 -0.001555 +v -0.078320 0.841782 -0.023023 +v -0.078019 0.754483 -0.015191 +v -0.077899 0.407001 0.073356 +v -0.077860 0.835620 -0.014735 +v -0.077595 0.805458 -0.057250 +v -0.077364 0.469811 0.047605 +v -0.077353 0.769314 0.000452 +v -0.077275 0.809470 0.004760 +v -0.077247 0.836165 -0.024072 +v -0.077113 0.634563 0.043549 +v -0.077081 0.583017 0.062284 +v -0.077011 0.825891 0.000948 +v -0.076859 0.584787 -0.051681 +v -0.076568 0.767781 -0.053151 +v -0.075904 0.758261 -0.040672 +v -0.075887 0.752080 -0.025959 +v -0.075779 0.813978 0.009291 +v -0.075750 0.408431 -0.068458 +v -0.075550 0.820569 -0.048696 +v -0.075279 0.504486 -0.047866 +v -0.075126 0.408386 -0.060159 +v -0.075012 0.835728 -0.036545 +v -0.074835 0.756694 -0.013958 +v -0.074785 0.792448 0.007516 +v -0.074522 0.403357 0.065607 +v -0.074454 0.790607 -0.060365 +v -0.073965 0.660254 0.024297 +v -0.073944 0.794841 0.011770 +v -0.073479 0.754533 -0.025446 +v -0.073155 0.760541 -0.041494 +v -0.073030 0.499446 0.046318 +v -0.072930 0.659537 0.007602 +v -0.072806 0.635673 -0.025665 +v -0.072636 0.562171 -0.028127 +v -0.072636 0.562171 -0.028127 +v -0.072617 0.772068 -0.054888 +v -0.072613 0.806871 -0.057200 +v -0.072591 0.773129 0.001084 +v -0.072498 0.792762 0.046969 +v -0.072495 0.825014 -0.051693 +v -0.072191 0.436325 0.073785 +v -0.072084 0.758840 -0.011907 +v -0.071965 0.796459 0.051723 +v -0.071913 0.438982 -0.067258 +v -0.071824 0.557950 0.021079 +v -0.071824 0.557950 0.021079 +v -0.071283 0.769001 0.008451 +v -0.070892 0.842292 -0.005262 +v -0.070825 0.608474 0.061436 +v -0.070576 0.752680 -0.010140 +v -0.070318 0.809535 -0.061219 +v -0.070313 0.757226 -0.025112 +v -0.070279 0.838720 -0.014577 +v -0.070228 0.794652 -0.061437 +v -0.070191 0.837566 -0.007853 +v -0.070111 0.610597 -0.046085 +v -0.070052 0.777122 0.019859 +v -0.069994 0.526766 0.044931 +v -0.069887 0.827688 0.001788 +v -0.069585 0.844895 -0.040498 +v -0.069385 0.796449 0.016823 +v -0.069276 0.659316 0.041396 +v -0.069232 0.814586 0.011994 +v -0.069172 0.534887 -0.045465 +v -0.069172 0.760496 0.022573 +v -0.069063 0.795423 -0.063624 +v -0.068596 0.469342 -0.066043 +v -0.068502 0.750794 -0.023872 +v -0.068463 0.838906 -0.040352 +v -0.068384 0.838068 -0.035972 +v -0.067935 0.761488 -0.042691 +v -0.067842 0.467569 0.073948 +v -0.067745 0.658654 -0.006858 +v -0.067717 0.777130 -0.058067 +v -0.067503 0.753275 0.011290 +v -0.067449 0.554550 0.040764 +v -0.067449 0.554550 0.040764 +v -0.067449 0.554550 0.040764 +v -0.067277 0.742083 -0.006245 +v -0.067104 0.804616 0.043847 +v -0.066938 0.765675 0.035978 +v -0.066735 0.436890 -0.057665 +v -0.066548 0.808735 0.046571 +v -0.066506 0.797181 0.051535 +v -0.066096 0.400797 0.075940 +v -0.065689 0.755865 -0.040591 +v -0.065586 0.498738 -0.065029 +v -0.065546 0.740213 -0.019665 +v -0.065534 0.432017 0.062165 +v -0.065289 0.782371 0.030829 +v -0.065283 0.404044 -0.070764 +v -0.065021 0.848577 -0.021493 +v -0.064973 0.749763 0.023806 +v -0.064483 0.495441 0.074346 +v -0.064225 0.827759 -0.050787 +v -0.064108 0.632776 0.059822 +v -0.064053 0.729090 -0.003019 +v -0.064001 0.680637 0.021965 +v -0.063911 0.750863 0.037802 +v -0.063648 0.773974 0.060821 +v -0.063643 0.527277 -0.065035 +v -0.063514 0.776912 0.068418 +v -0.063440 0.743639 0.017680 +v -0.063439 0.834782 0.006125 +v -0.062895 0.841870 -0.011905 +v -0.062737 0.403022 -0.063035 +v -0.062727 0.839657 0.006553 +v -0.062532 0.728552 -0.015151 +v -0.062187 0.829288 0.004680 +v -0.062177 0.468678 -0.055415 +v -0.062175 0.680598 0.039193 +v -0.062096 0.798730 0.023471 +v -0.061819 0.526375 0.074873 +v -0.061755 0.814852 0.036208 +v -0.061751 0.555205 -0.064468 +v -0.061701 0.815331 0.015348 +v -0.061516 0.742956 -0.034119 +v -0.061419 0.811778 -0.060956 +v -0.061408 0.776735 0.063410 +v -0.061331 0.580156 0.042154 +v -0.061301 0.464135 0.060396 +v -0.061278 0.770773 -0.059299 +v -0.061056 0.553028 0.074448 +v -0.060937 0.713780 0.001005 +v -0.060915 0.633610 -0.037172 +v -0.060691 0.432166 0.076667 +v -0.060638 0.655274 -0.018379 +v -0.060578 0.841445 -0.035164 +v -0.060422 0.812575 0.039694 +v -0.060277 0.795885 -0.065820 +v -0.060176 0.818009 0.039372 +v -0.059859 0.852024 -0.013748 +v -0.059769 0.770533 0.049607 +v -0.059641 0.680447 0.005482 +v -0.059489 0.799922 0.058673 +v -0.059358 0.399618 0.069238 +v -0.059104 0.581048 0.074545 +v -0.058996 0.714101 -0.011198 +v -0.058958 0.582592 -0.062934 +v -0.058822 0.820359 0.030194 +v -0.058677 0.718952 0.018437 +v -0.058604 0.843611 -0.005759 +v -0.058481 0.658450 0.056893 +v -0.058303 0.698300 0.003704 +v -0.058043 0.739717 0.034046 +v -0.058014 0.699758 0.020079 +v -0.057766 0.758100 0.068997 +v -0.057736 0.499526 -0.055483 +v -0.057630 0.752565 0.052937 +v -0.057617 0.729119 -0.028133 +v -0.057329 0.824591 0.023386 +v -0.057281 0.824482 0.032188 +v -0.057253 0.495963 0.059640 +v -0.056999 0.680106 -0.017923 +v -0.056940 0.785680 0.041166 +v -0.056877 0.758363 0.068802 +v -0.056874 0.759519 0.076265 +v -0.056731 0.852869 -0.032813 +v -0.056718 0.681171 -0.006211 +v -0.056608 0.674050 -0.020508 +v -0.056473 0.829137 0.025371 +v -0.056289 0.462924 0.077441 +v -0.056185 0.607195 0.073940 +v -0.056160 0.847994 0.000997 +v -0.056012 0.698850 -0.007806 +v -0.055954 0.700262 0.036965 +v -0.055798 0.829718 0.016396 +v -0.055565 0.559635 -0.047182 +v -0.055565 0.559635 -0.047182 +v -0.055447 0.771718 0.046891 +v -0.055308 0.780707 0.071327 +v -0.055206 0.680705 0.054980 +v -0.055189 0.763160 0.053168 +v -0.055163 0.524096 0.059680 +v -0.055082 0.749222 -0.047920 +v -0.055052 0.833810 -0.008226 +v -0.054967 0.825666 0.000775 +v -0.054944 0.687170 -0.019812 +v -0.054865 0.613679 0.042988 +v -0.054840 0.852956 -0.033664 +v -0.054787 0.814055 0.047196 +v -0.054708 0.780099 0.038697 +v -0.054673 0.753695 0.053555 +v -0.054588 0.834842 0.018116 +v -0.054553 0.814489 0.009482 +v -0.054274 0.801019 0.031023 +v -0.054202 0.715198 -0.022954 +v -0.054149 0.608679 -0.056915 +v -0.054097 0.827024 0.027878 +v -0.054074 0.790579 0.028665 +v -0.054067 0.428856 0.069525 +v -0.054023 0.802121 0.018777 +v -0.053794 0.618530 0.046037 +v -0.053443 0.720024 0.035480 +v -0.053380 0.673591 -0.016503 +v -0.053353 0.680740 -0.014266 +v -0.053351 0.553910 0.059216 +v -0.053253 0.493539 0.078574 +v -0.053009 0.816858 0.020365 +v -0.053003 0.680092 -0.028379 +v -0.052781 0.530517 -0.054120 +v -0.052691 0.830905 0.009686 +v -0.052601 0.740759 0.072720 +v -0.052511 0.762005 0.078735 +v -0.052395 0.632398 0.073984 +v -0.052383 0.634080 0.041340 +v -0.052316 0.743652 0.051401 +v -0.052253 0.653751 -0.027152 +v -0.052025 0.845338 -0.008643 +v -0.052012 0.701198 -0.018838 +v -0.051895 0.670827 -0.026339 +v -0.051876 0.398206 -0.072939 +v -0.051767 0.701155 0.053427 +v -0.051602 0.791802 0.052560 +v -0.051566 0.742341 0.080613 +v -0.051463 0.431351 -0.070737 +v -0.051449 0.844532 -0.056104 +v -0.051197 0.807520 0.040527 +v -0.051189 0.832856 -0.049632 +v -0.051186 0.740996 0.052985 +v -0.051171 0.688057 -0.016079 +v -0.051105 0.840529 0.013555 +v -0.050987 0.635445 0.046762 +v -0.050864 0.838626 -0.052340 +v -0.050752 0.580304 0.058207 +v -0.050655 0.689708 -0.026069 +v -0.050531 0.461066 0.069178 +v -0.050525 0.656898 0.038396 +v -0.050464 0.731489 -0.039779 +v -0.050350 0.721446 0.074028 +v -0.050163 0.524922 0.080218 +v -0.049982 0.659645 0.070350 +v -0.049755 0.827676 0.032263 +v -0.049372 0.397062 -0.064898 +v -0.049307 0.669560 -0.023437 +v -0.049109 0.848586 -0.027025 +v -0.049028 0.720909 0.052293 +v -0.048948 0.838561 0.019590 +v -0.048902 0.552284 0.081007 +v -0.048889 0.723037 0.081739 +v -0.048767 0.659649 0.074127 +v -0.048666 0.756137 0.066282 +v -0.048386 0.662985 0.047891 +v -0.048344 0.774680 0.060939 +v -0.048295 0.464797 -0.069801 +v -0.047976 0.703249 0.074618 +v -0.047955 0.822756 0.026719 +v -0.047921 0.813413 -0.061122 +v -0.047891 0.853606 -0.019692 +v -0.047850 0.683879 0.048578 +v -0.047847 0.682587 0.069895 +v -0.047807 0.849157 -0.005826 +v -0.047768 0.788936 0.051726 +v -0.047637 0.682845 0.074928 +v -0.047601 0.733964 0.046194 +v -0.047572 0.716330 -0.033385 +v -0.047528 0.742772 0.067406 +v -0.047485 0.430798 -0.061744 +v -0.047465 0.631473 -0.046437 +v -0.047414 0.690465 -0.023317 +v -0.047289 0.581082 0.080923 +v -0.047271 0.702896 0.047802 +v -0.047193 0.702838 0.069630 +v -0.046860 0.493385 0.069360 +v -0.046692 0.612171 0.055483 +v -0.046658 0.718420 0.046405 +v -0.046550 0.796568 -0.067186 +v -0.046391 0.804269 0.039750 +v -0.046359 0.677729 0.035272 +v -0.046275 0.702465 -0.028112 +v -0.046257 0.836105 0.012824 +v -0.046207 0.721550 0.068441 +v -0.046114 0.767057 0.041837 +v -0.045570 0.772612 -0.063349 +v -0.045424 0.495600 -0.069316 +v -0.045197 0.838130 -0.001337 +v -0.045117 0.607245 0.079756 +v -0.044991 0.847736 -0.005584 +v -0.044856 0.704352 0.082267 +v -0.044800 0.820040 0.025510 +v -0.044520 0.751911 0.046547 +v -0.044128 0.854411 -0.047014 +v -0.044061 0.833147 0.013449 +v -0.044039 0.760957 0.025585 +v -0.043909 0.852377 -0.020412 +v -0.043809 0.782147 0.035449 +v -0.043417 0.829407 0.005732 +v -0.043404 0.680072 -0.034532 +v -0.043399 0.669753 -0.034606 +v -0.043335 0.631293 0.078311 +v -0.043303 0.683596 0.081548 +v -0.043173 0.463485 -0.059671 +v -0.043121 0.634370 0.051882 +v -0.042990 0.523358 0.070926 +v -0.042980 0.524702 -0.069614 +v -0.042951 0.751141 -0.055726 +v -0.042941 0.688998 -0.035298 +v -0.042926 0.659125 0.080315 +v -0.042869 0.852881 0.003289 +v -0.042715 0.763185 0.062331 +v -0.042653 0.858449 -0.008504 +v -0.042644 0.813795 0.045970 +v -0.042623 0.773816 0.056654 +v -0.042606 0.782519 0.070364 +v -0.042543 0.651744 -0.036466 +v -0.042513 0.784227 0.046238 +v -0.042502 0.801291 0.058969 +v -0.042314 0.753914 0.062571 +v -0.042117 0.744143 0.080074 +v -0.041988 0.700169 0.034419 +v -0.041967 0.793137 0.037567 +v -0.041863 0.805666 0.026570 +v -0.041828 0.818932 0.014634 +v -0.041639 0.748962 0.031344 +v -0.041569 0.763589 0.077399 +v -0.041470 0.668831 -0.031016 +v -0.041376 0.616565 0.051244 +v -0.041314 0.839947 0.019924 +v -0.041252 0.850569 -0.039515 +v -0.041237 0.552790 -0.069510 +v -0.041189 0.857141 -0.031848 +v -0.041092 0.744226 0.060430 +v -0.041076 0.853037 -0.013703 +v -0.041040 0.828417 0.032637 +v -0.040551 0.776052 0.020764 +v -0.040402 0.737133 0.049386 +v -0.040218 0.552689 0.071343 +v -0.039967 0.732796 -0.048282 +v -0.039938 0.657917 0.051166 +v -0.039746 0.853827 -0.048041 +v -0.039712 0.719779 0.034328 +v -0.039692 0.689291 -0.032655 +v -0.039443 0.634241 0.052678 +v -0.039276 0.494404 -0.060422 +v -0.039231 0.847785 -0.004692 +v -0.039147 0.735205 0.034443 +v -0.038821 0.580392 -0.068757 +v -0.038740 0.683340 0.057425 +v -0.038573 0.735655 0.055106 +v -0.038462 0.703525 0.057109 +v -0.038379 0.661468 0.055451 +v -0.038146 0.716843 -0.041781 +v -0.038030 0.720648 0.051202 +v -0.037931 0.856192 -0.026657 +v -0.037923 0.719797 0.055697 +v -0.037770 0.680017 0.052125 +v -0.037759 0.702507 -0.036456 +v -0.037520 0.581055 0.070802 +v -0.037331 0.701605 0.052333 +v -0.037250 0.834073 -0.008132 +v -0.036940 0.862084 -0.022950 +v -0.036688 0.856798 -0.033325 +v -0.036504 0.824363 0.004213 +v -0.036496 0.841146 0.001977 +v -0.036488 0.794850 0.026929 +v -0.036140 0.606745 -0.063681 +v -0.035535 0.525544 -0.060812 +v -0.034745 0.827078 0.030203 +v -0.034645 0.555755 -0.056833 +v -0.034645 0.555755 -0.056833 +v -0.034108 0.843398 -0.018860 +v -0.034095 0.813320 0.042841 +v -0.033727 0.841443 0.017921 +v -0.033298 0.609685 0.068958 +v -0.033097 0.798673 0.055724 +v -0.032835 0.831864 0.009219 +v -0.032824 0.856963 -0.011381 +v -0.032706 0.389513 -0.075301 +v -0.032367 0.782196 0.067185 +v -0.032332 0.629777 -0.053221 +v -0.032154 0.745763 0.076221 +v -0.032104 0.811339 0.016512 +v -0.031992 0.763940 0.074363 +v -0.031873 0.831655 -0.054157 +v -0.031817 0.813334 -0.062856 +v -0.031805 0.850845 -0.003522 +v -0.031751 0.789217 0.018024 +v -0.031659 0.725397 0.075092 +v -0.031380 0.796568 -0.068272 +v -0.031303 0.855826 0.005135 +v -0.031276 0.389127 -0.066551 +v -0.031260 0.855228 -0.036398 +v -0.030981 0.706018 0.073910 +v -0.030887 0.860767 -0.020623 +v -0.030571 0.847981 -0.026280 +v -0.030381 0.678580 -0.042212 +v -0.030340 0.633652 0.066818 +v -0.030319 0.773981 -0.064737 +v -0.030295 0.819662 0.025291 +v -0.030266 0.864007 -0.036268 +v -0.030156 0.687357 -0.042975 +v -0.030060 0.685470 0.072114 +v -0.029946 0.834548 0.013182 +v -0.029693 0.864228 -0.004529 +v -0.029595 0.669088 -0.042457 +v -0.029478 0.650025 -0.042945 +v -0.029420 0.661091 0.068972 +v -0.029417 0.860279 -0.042092 +v -0.029411 0.843332 0.003241 +v -0.029283 0.668398 -0.038104 +v -0.029254 0.806383 0.037885 +v -0.029126 0.849318 -0.047231 +v -0.028956 0.760710 0.067478 +v -0.028894 0.752853 -0.058830 +v -0.028877 0.743328 0.068972 +v -0.028791 0.804157 0.010007 +v -0.028675 0.661200 0.064060 +v -0.028523 0.790099 0.052148 +v -0.028382 0.845394 -0.001015 +v -0.028316 0.778563 0.061379 +v -0.028216 0.684530 0.065279 +v -0.028150 0.725380 0.068154 +v -0.028082 0.423126 -0.073910 +v -0.028046 0.785434 0.049834 +v -0.027898 0.815969 -0.000773 +v -0.027884 0.688392 -0.039648 +v -0.027882 0.703400 0.066943 +v -0.027878 0.794763 0.041167 +v -0.027731 0.845054 -0.064227 +v -0.027485 0.806323 0.031096 +v -0.027367 0.774502 0.059857 +v -0.027211 0.422276 -0.064272 +v -0.027093 0.754957 0.065267 +v -0.026860 0.733808 -0.052184 +v -0.026776 0.763900 0.065258 +v -0.026692 0.746000 0.063890 +v -0.026663 0.824643 -0.011380 +v -0.026420 0.838944 -0.061464 +v -0.026353 0.704082 0.063093 +v -0.026339 0.682920 0.062534 +v -0.026143 0.614423 0.054956 +v -0.026119 0.862536 -0.028663 +v -0.026017 0.721463 0.061914 +v -0.025956 0.702225 -0.041587 +v -0.025922 0.633168 0.055475 +v -0.025872 0.660532 0.059436 +v -0.025782 0.717077 -0.046153 +v -0.025741 0.738752 0.060551 +v -0.025639 0.840617 -0.005364 +v -0.025584 0.860101 -0.042033 +v -0.025485 0.848647 -0.005124 +v -0.025219 0.830953 -0.021173 +v -0.025165 0.844932 -0.011953 +v -0.025108 0.853389 -0.011599 +v -0.024999 0.460833 -0.072921 +v -0.024615 0.835604 0.000391 +v -0.024587 0.868255 -0.015326 +v -0.024570 0.843256 0.000485 +v -0.024532 0.822612 0.018926 +v -0.024152 0.835730 0.009779 +v -0.023469 0.854530 -0.012003 +v -0.023208 0.458796 -0.062912 +v -0.022959 0.856101 -0.017868 +v -0.022906 0.847437 -0.018118 +v -0.022195 0.852301 -0.042044 +v -0.022125 0.855524 -0.054040 +v -0.021729 0.855748 -0.056161 +v -0.021294 0.836007 0.006756 +v -0.021250 0.835273 -0.028206 +v -0.021056 0.493253 -0.072354 +v -0.021024 0.829234 0.006592 +v -0.020106 0.860004 -0.017229 +v -0.020062 0.490109 -0.063244 +v -0.019555 0.857179 -0.022941 +v -0.019493 0.848587 -0.023082 +v -0.019260 0.871198 -0.024774 +v -0.018122 0.578518 -0.071375 +v -0.018111 0.384013 -0.068644 +v -0.018099 0.862743 -0.034500 +v -0.017752 0.829786 0.015801 +v -0.017726 0.383822 -0.075959 +v -0.017691 0.605414 -0.066938 +v -0.017687 0.864983 -0.044037 +v -0.017287 0.851567 -0.033318 +v -0.017198 0.553696 -0.060785 +v -0.017198 0.553696 -0.060785 +v -0.016948 0.522902 -0.072601 +v -0.016747 0.520228 -0.065628 +v -0.016056 0.629007 -0.057005 +v -0.016046 0.550160 -0.072326 +v -0.015878 0.796642 -0.068458 +v -0.015742 0.813004 -0.064032 +v -0.015734 0.861942 -0.046445 +v -0.015481 0.686578 -0.046262 +v -0.015442 0.830568 -0.055627 +v -0.015178 0.774726 -0.064752 +v -0.014935 0.649127 -0.046944 +v -0.014909 0.668203 -0.045896 +v -0.014891 0.830479 0.013371 +v -0.014832 0.864183 -0.024099 +v -0.014758 0.678217 -0.045903 +v -0.014663 0.667644 -0.041891 +v -0.014648 0.857477 -0.027396 +v -0.014630 0.838558 -0.032274 +v -0.014565 0.823956 0.012944 +v -0.014548 0.849316 -0.027504 +v -0.014506 0.753893 -0.059581 +v -0.014221 0.847601 -0.049401 +v -0.013962 0.417753 -0.067657 +v -0.013800 0.785606 0.050620 +v -0.013789 0.795496 0.042050 +v -0.013701 0.687648 -0.041258 +v -0.013475 0.805937 0.033310 +v -0.013461 0.734573 -0.053080 +v -0.013328 0.774930 0.059711 +v -0.013279 0.682746 0.065154 +v -0.013253 0.704534 0.066288 +v -0.013002 0.419634 -0.074101 +v -0.012938 0.756055 0.065660 +v -0.012876 0.660109 0.061290 +v -0.012875 0.701823 -0.043414 +v -0.012730 0.722764 0.064970 +v -0.012654 0.747664 0.064359 +v -0.012524 0.816867 0.025355 +v -0.012479 0.716705 -0.047196 +v -0.012407 0.764914 0.065362 +v -0.012393 0.632689 0.056718 +v -0.011668 0.861920 -0.046110 +v -0.011381 0.614037 0.057078 +v -0.011200 0.850675 -0.044033 +v -0.011035 0.741013 0.061135 +v -0.010880 0.872180 -0.030991 +v -0.009577 0.826383 0.019550 +v -0.008919 0.854043 -0.034699 +v -0.008869 0.861388 -0.037530 +v -0.008505 0.456889 -0.067575 +v -0.008483 0.457187 -0.073820 +v -0.008387 0.827735 0.016971 +v -0.008304 0.865671 -0.029029 +v -0.008184 0.821299 0.016393 +v -0.007691 0.857206 -0.030663 +v -0.007627 0.849248 -0.030703 +v -0.007337 0.840940 -0.034354 +v -0.004499 0.491824 -0.073405 +v -0.004428 0.491763 -0.067672 +v -0.000986 0.515878 -0.068771 +v -0.000961 0.518620 -0.073228 +v -0.000000 0.866946 -0.046758 +v -0.000000 0.863833 -0.048174 +v -0.000000 0.856298 -0.059297 +v -0.000000 0.872847 -0.032627 +v -0.000000 0.856934 -0.056125 +v -0.000000 0.861828 -0.046989 +v -0.000000 0.846154 -0.067784 +v -0.000000 0.865989 -0.030975 +v -0.000000 0.860524 -0.038152 +v -0.000000 0.838904 -0.066254 +v -0.000000 0.847293 -0.049400 +v -0.000000 0.850224 -0.044384 +v -0.000000 0.855586 -0.035612 +v -0.000000 0.856754 -0.031593 +v -0.000000 0.848884 -0.031608 +v -0.000000 0.830310 -0.055911 +v -0.000000 0.842086 -0.034954 +v -0.000000 0.812886 -0.064328 +v -0.000000 0.796606 -0.068158 +v -0.000000 0.828569 -0.007909 +v -0.000000 0.774864 -0.064541 +v -0.000000 0.826323 0.018525 +v -0.000000 0.824960 0.021031 +v -0.000000 0.819929 0.017876 +v -0.000000 0.815627 0.026899 +v -0.000000 0.754189 -0.059406 +v -0.000000 0.805675 0.034082 +v -0.000000 0.734863 -0.053057 +v -0.000000 0.795375 0.042414 +v -0.000000 0.785529 0.050719 +v -0.000000 0.716430 -0.047325 +v -0.000000 0.775033 0.059898 +v -0.000000 0.701388 -0.043836 +v -0.000000 0.764762 0.065777 +v -0.000000 0.686320 -0.047138 +v -0.000000 0.687368 -0.042014 +v -0.000000 0.756435 0.065614 +v -0.000000 0.678426 -0.046680 +v -0.000000 0.748089 0.064406 +v -0.000000 0.741029 0.061046 +v -0.000000 0.668334 -0.047142 +v -0.000000 0.667348 -0.043090 +v -0.000000 0.649233 -0.048047 +v -0.000000 0.723125 0.065933 +v -0.000000 0.628930 -0.057932 +v -0.000000 0.704698 0.067402 +v -0.000000 0.604948 -0.067474 +v -0.000000 0.682701 0.065999 +v -0.000000 0.577804 -0.071582 +v -0.000000 0.660019 0.061750 +v -0.000000 0.548781 -0.072631 +v -0.000000 0.632544 0.056695 +v -0.000000 0.552672 -0.062449 +v -0.000000 0.552672 -0.062449 +v -0.000000 0.578674 -0.006434 +v -0.000000 0.613841 0.057318 +v -0.000000 0.522044 -0.073448 +v -0.000000 0.519488 -0.070206 +v -0.000000 0.553441 0.038804 +v 0.000961 0.518620 -0.073227 +v 0.000986 0.515878 -0.068770 +v 0.004428 0.491763 -0.067671 +v 0.004500 0.491823 -0.073404 +v 0.007337 0.840940 -0.034354 +v 0.007627 0.849248 -0.030703 +v 0.007691 0.857206 -0.030663 +v 0.008184 0.821299 0.016393 +v 0.008304 0.865671 -0.029029 +v 0.008387 0.827735 0.016971 +v 0.008484 0.457187 -0.073818 +v 0.008505 0.456888 -0.067574 +v 0.008869 0.861388 -0.037530 +v 0.008919 0.854043 -0.034699 +v 0.009577 0.826383 0.019550 +v 0.010880 0.872180 -0.030991 +v 0.011035 0.741013 0.061135 +v 0.011200 0.850675 -0.044033 +v 0.011381 0.614037 0.057078 +v 0.011668 0.861921 -0.046110 +v 0.012393 0.632689 0.056718 +v 0.012407 0.764914 0.065362 +v 0.012479 0.716705 -0.047196 +v 0.012525 0.816867 0.025355 +v 0.012654 0.747664 0.064359 +v 0.012730 0.722764 0.064970 +v 0.012875 0.701823 -0.043414 +v 0.012876 0.660109 0.061290 +v 0.012938 0.756055 0.065660 +v 0.013002 0.419634 -0.074100 +v 0.013253 0.704534 0.066288 +v 0.013279 0.682746 0.065154 +v 0.013328 0.774930 0.059711 +v 0.013461 0.734573 -0.053080 +v 0.013475 0.805938 0.033310 +v 0.013701 0.687648 -0.041258 +v 0.013788 0.795496 0.042050 +v 0.013800 0.785606 0.050620 +v 0.013963 0.417753 -0.067656 +v 0.014221 0.847601 -0.049401 +v 0.014506 0.753893 -0.059581 +v 0.014548 0.849316 -0.027504 +v 0.014565 0.823956 0.012944 +v 0.014630 0.838558 -0.032274 +v 0.014648 0.857477 -0.027396 +v 0.014663 0.667644 -0.041891 +v 0.014758 0.678217 -0.045903 +v 0.014832 0.864184 -0.024099 +v 0.014891 0.830479 0.013371 +v 0.014909 0.668203 -0.045896 +v 0.014935 0.649127 -0.046944 +v 0.015178 0.774726 -0.064752 +v 0.015442 0.830568 -0.055627 +v 0.015481 0.686578 -0.046262 +v 0.015734 0.861942 -0.046445 +v 0.015742 0.813004 -0.064032 +v 0.015877 0.796642 -0.068458 +v 0.016046 0.550160 -0.072326 +v 0.016055 0.629007 -0.057005 +v 0.016748 0.520227 -0.065628 +v 0.016948 0.522902 -0.072601 +v 0.017199 0.553696 -0.060784 +v 0.017199 0.553696 -0.060784 +v 0.017287 0.851567 -0.033318 +v 0.017687 0.864983 -0.044037 +v 0.017691 0.605414 -0.066937 +v 0.017727 0.383821 -0.075957 +v 0.017752 0.829786 0.015801 +v 0.018099 0.862744 -0.034500 +v 0.018112 0.384013 -0.068643 +v 0.018122 0.578518 -0.071374 +v 0.019260 0.871198 -0.024774 +v 0.019493 0.848587 -0.023082 +v 0.019555 0.857180 -0.022941 +v 0.020062 0.490109 -0.063243 +v 0.020106 0.860004 -0.017229 +v 0.021025 0.829235 0.006592 +v 0.021056 0.493253 -0.072353 +v 0.021250 0.835273 -0.028206 +v 0.021294 0.836007 0.006756 +v 0.021729 0.855748 -0.056161 +v 0.022125 0.855524 -0.054040 +v 0.022195 0.852301 -0.042044 +v 0.022906 0.847437 -0.018118 +v 0.022959 0.856101 -0.017868 +v 0.023208 0.458795 -0.062911 +v 0.023469 0.854530 -0.012003 +v 0.024152 0.835730 0.009779 +v 0.024532 0.822612 0.018926 +v 0.024570 0.843256 0.000485 +v 0.024587 0.868255 -0.015326 +v 0.024615 0.835604 0.000391 +v 0.024999 0.460833 -0.072919 +v 0.025107 0.853389 -0.011599 +v 0.025165 0.844933 -0.011953 +v 0.025219 0.830953 -0.021173 +v 0.025485 0.848647 -0.005124 +v 0.025584 0.860101 -0.042033 +v 0.025639 0.840617 -0.005364 +v 0.025741 0.738752 0.060551 +v 0.025782 0.717077 -0.046153 +v 0.025872 0.660532 0.059436 +v 0.025922 0.633168 0.055475 +v 0.025956 0.702225 -0.041587 +v 0.026017 0.721463 0.061914 +v 0.026119 0.862536 -0.028663 +v 0.026143 0.614423 0.054956 +v 0.026339 0.682920 0.062534 +v 0.026353 0.704082 0.063093 +v 0.026420 0.838944 -0.061464 +v 0.026663 0.824643 -0.011380 +v 0.026692 0.746000 0.063890 +v 0.026775 0.763900 0.065258 +v 0.026860 0.733808 -0.052184 +v 0.027093 0.754957 0.065267 +v 0.027211 0.422276 -0.064271 +v 0.027367 0.774502 0.059857 +v 0.027485 0.806323 0.031096 +v 0.027731 0.845054 -0.064227 +v 0.027878 0.794763 0.041167 +v 0.027882 0.703400 0.066943 +v 0.027884 0.688392 -0.039648 +v 0.027898 0.815969 -0.000773 +v 0.028046 0.785434 0.049834 +v 0.028082 0.423125 -0.073909 +v 0.028150 0.725380 0.068154 +v 0.028216 0.684530 0.065279 +v 0.028316 0.778563 0.061379 +v 0.028382 0.845394 -0.001015 +v 0.028523 0.790099 0.052148 +v 0.028675 0.661200 0.064060 +v 0.028791 0.804157 0.010007 +v 0.028877 0.743328 0.068972 +v 0.028894 0.752853 -0.058830 +v 0.028956 0.760710 0.067478 +v 0.029126 0.849319 -0.047231 +v 0.029254 0.806384 0.037885 +v 0.029283 0.668398 -0.038104 +v 0.029411 0.843333 0.003241 +v 0.029417 0.860279 -0.042092 +v 0.029420 0.661091 0.068972 +v 0.029478 0.650025 -0.042945 +v 0.029595 0.669088 -0.042457 +v 0.029693 0.864228 -0.004529 +v 0.029946 0.834548 0.013182 +v 0.030060 0.685470 0.072114 +v 0.030156 0.687357 -0.042975 +v 0.030266 0.864007 -0.036268 +v 0.030295 0.819662 0.025291 +v 0.030319 0.773981 -0.064737 +v 0.030340 0.633652 0.066818 +v 0.030381 0.678580 -0.042212 +v 0.030572 0.847981 -0.026280 +v 0.030887 0.860768 -0.020623 +v 0.030981 0.706018 0.073910 +v 0.031260 0.855228 -0.036398 +v 0.031276 0.389127 -0.066550 +v 0.031303 0.855827 0.005135 +v 0.031380 0.796568 -0.068272 +v 0.031659 0.725397 0.075092 +v 0.031751 0.789218 0.018024 +v 0.031805 0.850845 -0.003522 +v 0.031817 0.813334 -0.062856 +v 0.031873 0.831655 -0.054157 +v 0.031992 0.763940 0.074363 +v 0.032104 0.811339 0.016512 +v 0.032154 0.745763 0.076221 +v 0.032332 0.629777 -0.053221 +v 0.032367 0.782196 0.067185 +v 0.032707 0.389513 -0.075300 +v 0.032824 0.856963 -0.011381 +v 0.032835 0.831864 0.009219 +v 0.033097 0.798673 0.055724 +v 0.033298 0.609685 0.068958 +v 0.033727 0.841443 0.017921 +v 0.034095 0.813320 0.042841 +v 0.034108 0.843398 -0.018860 +v 0.034645 0.555754 -0.056832 +v 0.034645 0.555754 -0.056832 +v 0.034745 0.827078 0.030203 +v 0.035536 0.525544 -0.060812 +v 0.036140 0.606745 -0.063681 +v 0.036488 0.794850 0.026929 +v 0.036496 0.841146 0.001977 +v 0.036503 0.824363 0.004213 +v 0.036688 0.856799 -0.033325 +v 0.036940 0.862084 -0.022950 +v 0.037250 0.834074 -0.008132 +v 0.037331 0.701605 0.052333 +v 0.037520 0.581055 0.070802 +v 0.037758 0.702507 -0.036456 +v 0.037770 0.680017 0.052125 +v 0.037923 0.719797 0.055697 +v 0.037931 0.856192 -0.026657 +v 0.038030 0.720648 0.051202 +v 0.038146 0.716843 -0.041781 +v 0.038379 0.661468 0.055451 +v 0.038462 0.703525 0.057109 +v 0.038573 0.735655 0.055106 +v 0.038740 0.683340 0.057425 +v 0.038822 0.580392 -0.068757 +v 0.039147 0.735205 0.034443 +v 0.039231 0.847785 -0.004692 +v 0.039276 0.494404 -0.060421 +v 0.039442 0.634241 0.052678 +v 0.039692 0.689291 -0.032655 +v 0.039712 0.719779 0.034328 +v 0.039747 0.853827 -0.048041 +v 0.039938 0.657918 0.051166 +v 0.039967 0.732796 -0.048282 +v 0.040218 0.552689 0.071342 +v 0.040402 0.737133 0.049386 +v 0.040551 0.776052 0.020764 +v 0.041040 0.828418 0.032637 +v 0.041076 0.853037 -0.013703 +v 0.041092 0.744226 0.060430 +v 0.041189 0.857141 -0.031848 +v 0.041237 0.552790 -0.069510 +v 0.041252 0.850569 -0.039515 +v 0.041314 0.839947 0.019924 +v 0.041376 0.616565 0.051244 +v 0.041470 0.668831 -0.031016 +v 0.041569 0.763589 0.077399 +v 0.041639 0.748962 0.031344 +v 0.041828 0.818932 0.014634 +v 0.041863 0.805667 0.026570 +v 0.041967 0.793138 0.037567 +v 0.041988 0.700169 0.034419 +v 0.042117 0.744144 0.080074 +v 0.042314 0.753914 0.062571 +v 0.042502 0.801291 0.058969 +v 0.042513 0.784227 0.046238 +v 0.042543 0.651744 -0.036465 +v 0.042606 0.782519 0.070364 +v 0.042623 0.773816 0.056654 +v 0.042644 0.813796 0.045970 +v 0.042653 0.858449 -0.008504 +v 0.042715 0.763185 0.062331 +v 0.042869 0.852881 0.003289 +v 0.042926 0.659125 0.080315 +v 0.042941 0.688998 -0.035298 +v 0.042951 0.751141 -0.055726 +v 0.042980 0.524702 -0.069614 +v 0.042990 0.523358 0.070925 +v 0.043121 0.634370 0.051882 +v 0.043173 0.463484 -0.059670 +v 0.043303 0.683596 0.081548 +v 0.043335 0.631293 0.078311 +v 0.043399 0.669753 -0.034606 +v 0.043404 0.680072 -0.034532 +v 0.043417 0.829407 0.005732 +v 0.043809 0.782147 0.035449 +v 0.043909 0.852377 -0.020412 +v 0.044039 0.760958 0.025585 +v 0.044061 0.833147 0.013449 +v 0.044128 0.854411 -0.047014 +v 0.044520 0.751911 0.046547 +v 0.044800 0.820041 0.025510 +v 0.044856 0.704352 0.082267 +v 0.044991 0.847736 -0.005584 +v 0.045117 0.607245 0.079756 +v 0.045197 0.838130 -0.001337 +v 0.045424 0.495600 -0.069315 +v 0.045570 0.772612 -0.063350 +v 0.046114 0.767057 0.041837 +v 0.046207 0.721550 0.068441 +v 0.046257 0.836105 0.012824 +v 0.046275 0.702465 -0.028112 +v 0.046359 0.677729 0.035272 +v 0.046391 0.804270 0.039750 +v 0.046550 0.796568 -0.067186 +v 0.046658 0.718420 0.046405 +v 0.046692 0.612171 0.055483 +v 0.046860 0.493385 0.069359 +v 0.047193 0.702838 0.069630 +v 0.047271 0.702896 0.047802 +v 0.047289 0.581082 0.080922 +v 0.047414 0.690465 -0.023317 +v 0.047465 0.631473 -0.046437 +v 0.047485 0.430798 -0.061743 +v 0.047528 0.742772 0.067406 +v 0.047571 0.716330 -0.033385 +v 0.047601 0.733964 0.046194 +v 0.047637 0.682845 0.074928 +v 0.047768 0.788936 0.051726 +v 0.047806 0.849157 -0.005826 +v 0.047847 0.682587 0.069895 +v 0.047850 0.683879 0.048578 +v 0.047891 0.853607 -0.019692 +v 0.047921 0.813413 -0.061122 +v 0.047955 0.822757 0.026719 +v 0.047976 0.703250 0.074618 +v 0.048295 0.464796 -0.069800 +v 0.048344 0.774680 0.060939 +v 0.048386 0.662985 0.047891 +v 0.048666 0.756137 0.066282 +v 0.048767 0.659649 0.074127 +v 0.048889 0.723037 0.081739 +v 0.048902 0.552283 0.081007 +v 0.048948 0.838561 0.019590 +v 0.049028 0.720909 0.052293 +v 0.049109 0.848586 -0.027025 +v 0.049307 0.669560 -0.023437 +v 0.049373 0.397062 -0.064896 +v 0.049755 0.827677 0.032263 +v 0.049982 0.659645 0.070350 +v 0.050163 0.524922 0.080217 +v 0.050350 0.721446 0.074028 +v 0.050464 0.731489 -0.039779 +v 0.050525 0.656898 0.038396 +v 0.050531 0.461066 0.069177 +v 0.050655 0.689708 -0.026069 +v 0.050752 0.580304 0.058207 +v 0.050864 0.838626 -0.052340 +v 0.050987 0.635445 0.046762 +v 0.051105 0.840530 0.013555 +v 0.051171 0.688057 -0.016079 +v 0.051186 0.740996 0.052985 +v 0.051189 0.832857 -0.049632 +v 0.051197 0.807520 0.040527 +v 0.051449 0.844532 -0.056104 +v 0.051463 0.431351 -0.070736 +v 0.051566 0.742341 0.080613 +v 0.051602 0.791802 0.052560 +v 0.051767 0.701156 0.053427 +v 0.051877 0.398206 -0.072937 +v 0.051895 0.670827 -0.026339 +v 0.052012 0.701198 -0.018838 +v 0.052025 0.845339 -0.008643 +v 0.052253 0.653751 -0.027152 +v 0.052316 0.743652 0.051401 +v 0.052383 0.634080 0.041340 +v 0.052395 0.632398 0.073984 +v 0.052511 0.762005 0.078735 +v 0.052601 0.740759 0.072720 +v 0.052691 0.830906 0.009686 +v 0.052781 0.530518 -0.054119 +v 0.053003 0.680092 -0.028379 +v 0.053008 0.816859 0.020365 +v 0.053253 0.493538 0.078573 +v 0.053351 0.553910 0.059216 +v 0.053352 0.680740 -0.014266 +v 0.053380 0.673591 -0.016503 +v 0.053443 0.720024 0.035480 +v 0.053794 0.618530 0.046037 +v 0.054023 0.802121 0.018777 +v 0.054067 0.428857 0.069523 +v 0.054074 0.790579 0.028665 +v 0.054097 0.827025 0.027878 +v 0.054149 0.608679 -0.056915 +v 0.054202 0.715198 -0.022954 +v 0.054274 0.801019 0.031023 +v 0.054553 0.814489 0.009482 +v 0.054588 0.834842 0.018116 +v 0.054673 0.753695 0.053555 +v 0.054708 0.780099 0.038697 +v 0.054787 0.814055 0.047196 +v 0.054840 0.852956 -0.033664 +v 0.054865 0.613679 0.042988 +v 0.054943 0.687170 -0.019812 +v 0.054967 0.825666 0.000775 +v 0.055052 0.833810 -0.008226 +v 0.055082 0.749222 -0.047920 +v 0.055163 0.524096 0.059679 +v 0.055189 0.763160 0.053168 +v 0.055206 0.680705 0.054980 +v 0.055308 0.780708 0.071327 +v 0.055447 0.771718 0.046891 +v 0.055565 0.559634 -0.047182 +v 0.055565 0.559634 -0.047182 +v 0.055798 0.829719 0.016396 +v 0.055954 0.700262 0.036965 +v 0.056012 0.698850 -0.007806 +v 0.056160 0.847995 0.000997 +v 0.056185 0.607195 0.073939 +v 0.056289 0.462924 0.077440 +v 0.056473 0.829138 0.025371 +v 0.056607 0.674050 -0.020508 +v 0.056718 0.681171 -0.006211 +v 0.056731 0.852869 -0.032813 +v 0.056874 0.759519 0.076265 +v 0.056877 0.758363 0.068802 +v 0.056940 0.785680 0.041166 +v 0.056999 0.680106 -0.017923 +v 0.057253 0.495962 0.059639 +v 0.057281 0.824482 0.032188 +v 0.057330 0.824591 0.023386 +v 0.057617 0.729119 -0.028133 +v 0.057630 0.752565 0.052937 +v 0.057736 0.499526 -0.055482 +v 0.057766 0.758100 0.068997 +v 0.058014 0.699758 0.020079 +v 0.058043 0.739717 0.034046 +v 0.058303 0.698300 0.003704 +v 0.058481 0.658450 0.056893 +v 0.058604 0.843611 -0.005759 +v 0.058677 0.718952 0.018437 +v 0.058822 0.820359 0.030194 +v 0.058958 0.582592 -0.062933 +v 0.058996 0.714101 -0.011198 +v 0.059104 0.581048 0.074544 +v 0.059358 0.399618 0.069236 +v 0.059489 0.799922 0.058673 +v 0.059641 0.680447 0.005482 +v 0.059769 0.770534 0.049607 +v 0.059859 0.852025 -0.013748 +v 0.060176 0.818009 0.039372 +v 0.060277 0.795885 -0.065820 +v 0.060422 0.812575 0.039694 +v 0.060579 0.841445 -0.035164 +v 0.060638 0.655274 -0.018379 +v 0.060691 0.432166 0.076666 +v 0.060915 0.633610 -0.037172 +v 0.060937 0.713780 0.001005 +v 0.061056 0.553028 0.074447 +v 0.061278 0.770773 -0.059300 +v 0.061302 0.464135 0.060394 +v 0.061331 0.580156 0.042154 +v 0.061408 0.776735 0.063410 +v 0.061419 0.811779 -0.060956 +v 0.061516 0.742956 -0.034119 +v 0.061701 0.815332 0.015348 +v 0.061751 0.555204 -0.064468 +v 0.061755 0.814853 0.036208 +v 0.061819 0.526375 0.074872 +v 0.062096 0.798731 0.023471 +v 0.062175 0.680598 0.039193 +v 0.062177 0.468677 -0.055414 +v 0.062187 0.829288 0.004680 +v 0.062532 0.728553 -0.015151 +v 0.062727 0.839657 0.006553 +v 0.062736 0.403022 -0.063033 +v 0.062895 0.841870 -0.011905 +v 0.063439 0.834782 0.006125 +v 0.063440 0.743639 0.017680 +v 0.063514 0.776912 0.068418 +v 0.063643 0.527277 -0.065033 +v 0.063648 0.773974 0.060821 +v 0.063911 0.750863 0.037802 +v 0.064001 0.680637 0.021965 +v 0.064053 0.729090 -0.003019 +v 0.064108 0.632776 0.059822 +v 0.064225 0.827760 -0.050787 +v 0.064483 0.495441 0.074345 +v 0.064973 0.749763 0.023806 +v 0.065021 0.848578 -0.021493 +v 0.065284 0.404044 -0.070763 +v 0.065289 0.782371 0.030829 +v 0.065534 0.432017 0.062163 +v 0.065546 0.740213 -0.019665 +v 0.065586 0.498738 -0.065028 +v 0.065689 0.755866 -0.040591 +v 0.066097 0.400797 0.075938 +v 0.066505 0.797181 0.051535 +v 0.066548 0.808735 0.046571 +v 0.066735 0.436889 -0.057664 +v 0.066938 0.765675 0.035978 +v 0.067104 0.804617 0.043847 +v 0.067277 0.742083 -0.006245 +v 0.067449 0.554550 0.040764 +v 0.067449 0.554550 0.040764 +v 0.067449 0.554550 0.040764 +v 0.067503 0.753275 0.011290 +v 0.067717 0.777130 -0.058067 +v 0.067745 0.658654 -0.006858 +v 0.067842 0.467569 0.073946 +v 0.067936 0.761489 -0.042691 +v 0.068384 0.838068 -0.035972 +v 0.068463 0.838906 -0.040352 +v 0.068502 0.750794 -0.023872 +v 0.068596 0.469341 -0.066042 +v 0.069063 0.795424 -0.063624 +v 0.069172 0.760496 0.022573 +v 0.069172 0.534887 -0.045465 +v 0.069232 0.814587 0.011994 +v 0.069276 0.659316 0.041396 +v 0.069385 0.796449 0.016823 +v 0.069585 0.844895 -0.040498 +v 0.069887 0.827689 0.001788 +v 0.069994 0.526766 0.044931 +v 0.070052 0.777122 0.019859 +v 0.070111 0.610597 -0.046084 +v 0.070191 0.837566 -0.007853 +v 0.070228 0.794652 -0.061437 +v 0.070279 0.838721 -0.014577 +v 0.070313 0.757226 -0.025112 +v 0.070318 0.809536 -0.061219 +v 0.070576 0.752680 -0.010140 +v 0.070825 0.608474 0.061436 +v 0.070892 0.842292 -0.005262 +v 0.071283 0.769001 0.008451 +v 0.071824 0.557950 0.021079 +v 0.071824 0.557950 0.021079 +v 0.071913 0.438982 -0.067256 +v 0.071965 0.796459 0.051723 +v 0.072084 0.758840 -0.011907 +v 0.072191 0.436325 0.073783 +v 0.072495 0.825015 -0.051693 +v 0.072498 0.792762 0.046969 +v 0.072591 0.773129 0.001084 +v 0.072613 0.806871 -0.057200 +v 0.072617 0.772069 -0.054889 +v 0.072636 0.562170 -0.028127 +v 0.072636 0.562170 -0.028127 +v 0.072806 0.635673 -0.025665 +v 0.072930 0.659537 0.007602 +v 0.073030 0.499446 0.046317 +v 0.073154 0.760541 -0.041494 +v 0.073479 0.754533 -0.025446 +v 0.073944 0.794841 0.011770 +v 0.073965 0.660254 0.024297 +v 0.074454 0.790607 -0.060365 +v 0.074522 0.403357 0.065606 +v 0.074785 0.792447 0.007516 +v 0.074835 0.756694 -0.013958 +v 0.075012 0.835728 -0.036545 +v 0.075126 0.408386 -0.060157 +v 0.075279 0.504486 -0.047866 +v 0.075550 0.820569 -0.048696 +v 0.075750 0.408431 -0.068456 +v 0.075779 0.813978 0.009291 +v 0.075887 0.752080 -0.025959 +v 0.075904 0.758262 -0.040672 +v 0.076568 0.767781 -0.053151 +v 0.076859 0.584787 -0.051681 +v 0.077011 0.825892 0.000948 +v 0.077081 0.583017 0.062284 +v 0.077113 0.634563 0.043549 +v 0.077247 0.836166 -0.024072 +v 0.077275 0.809470 0.004759 +v 0.077353 0.769315 0.000452 +v 0.077364 0.469811 0.047604 +v 0.077595 0.805458 -0.057250 +v 0.077860 0.835620 -0.014735 +v 0.077899 0.407001 0.073354 +v 0.078019 0.754483 -0.015191 +v 0.078320 0.841782 -0.023023 +v 0.078479 0.561684 -0.001555 +v 0.078479 0.561684 -0.001555 +v 0.078543 0.754210 -0.040312 +v 0.078574 0.829138 -0.035026 +v 0.078738 0.748578 -0.026627 +v 0.079049 0.820118 -0.004125 +v 0.079651 0.783241 -0.059541 +v 0.079720 0.529344 0.032966 +v 0.079965 0.818185 -0.050061 +v 0.080091 0.828325 -0.019796 +v 0.080244 0.474159 -0.048748 +v 0.080383 0.786963 0.006645 +v 0.080631 0.765858 -0.001910 +v 0.080883 0.557816 -0.054657 +v 0.081080 0.555614 0.061956 +v 0.081554 0.750602 -0.016128 +v 0.081844 0.637382 -0.010489 +v 0.082090 0.762856 -0.049849 +v 0.082751 0.804879 0.005509 +v 0.082997 0.436976 0.051083 +v 0.083474 0.612203 -0.031837 +v 0.083519 0.530313 -0.055984 +v 0.083847 0.798087 -0.057906 +v 0.084137 0.827432 -0.036485 +v 0.084154 0.538262 -0.033345 +v 0.084272 0.528601 0.061219 +v 0.084535 0.781468 0.005424 +v 0.084802 0.817598 -0.004287 +v 0.084980 0.443530 -0.050368 +v 0.084999 0.761763 -0.005622 +v 0.085050 0.502517 -0.056332 +v 0.085340 0.636799 0.025866 +v 0.085524 0.610154 0.046065 +v 0.085565 0.826694 -0.020892 +v 0.085883 0.504524 0.033957 +v 0.086420 0.773808 -0.057254 +v 0.086559 0.637593 0.007387 +v 0.087455 0.414248 -0.055730 +v 0.087535 0.501602 0.060812 +v 0.087566 0.811127 -0.050436 +v 0.087684 0.798603 0.003523 +v 0.088094 0.474501 -0.057960 +v 0.088129 0.735745 -0.027131 +v 0.088711 0.533912 0.019908 +v 0.089396 0.740016 -0.039690 +v 0.089768 0.774576 0.002575 +v 0.090512 0.586388 -0.035491 +v 0.090805 0.737613 -0.015487 +v 0.091025 0.811291 -0.005947 +v 0.091066 0.474173 0.061422 +v 0.091274 0.819867 -0.037371 +v 0.091528 0.446002 -0.060143 +v 0.091557 0.508730 -0.036620 +v 0.091779 0.788249 -0.057940 +v 0.091924 0.475624 0.034968 +v 0.091950 0.415459 -0.062893 +v 0.092198 0.584706 0.047240 +v 0.092652 0.818864 -0.021639 +v 0.092868 0.411280 0.059164 +v 0.093074 0.539316 -0.015741 +v 0.093413 0.613246 -0.013836 +v 0.093724 0.747416 -0.048948 +v 0.094057 0.537496 0.003271 +v 0.095065 0.790713 0.002572 +v 0.095194 0.559750 -0.037500 +v 0.095301 0.445112 0.061945 +v 0.095728 0.612251 0.027554 +v 0.095858 0.746968 -0.006146 +v 0.096167 0.414487 0.064512 +v 0.096767 0.509231 0.020008 +v 0.096821 0.418487 -0.050954 +v 0.096846 0.801257 -0.050004 +v 0.096874 0.478744 -0.038251 +v 0.097299 0.558437 0.046723 +v 0.097772 0.613165 0.006695 +v 0.098434 0.532837 -0.039457 +v 0.099150 0.756805 -0.055951 +v 0.099387 0.803094 -0.007938 +v 0.099572 0.809033 -0.038077 +v 0.099716 0.719202 -0.027832 +v 0.100079 0.587225 -0.015774 +v 0.100212 0.505799 -0.040446 +v 0.100553 0.809108 -0.022228 +v 0.100873 0.448534 -0.039226 +v 0.101925 0.758129 -0.000328 +v 0.101977 0.532136 0.045750 +v 0.102057 0.415544 0.052889 +v 0.102174 0.511141 -0.018889 +v 0.102301 0.721939 -0.039466 +v 0.102448 0.444286 0.036644 +v 0.102755 0.720630 -0.014659 +v 0.103203 0.420910 -0.056599 +v 0.103403 0.478909 -0.042055 +v 0.103500 0.586747 0.027970 +v 0.103633 0.511124 0.001976 +v 0.104072 0.481108 0.020038 +v 0.104885 0.560889 -0.017278 +v 0.104972 0.587402 0.005743 +v 0.105804 0.768667 -0.057061 +v 0.105976 0.505680 0.044745 +v 0.106413 0.423912 -0.041774 +v 0.107488 0.419408 0.057068 +v 0.107672 0.728034 -0.048499 +v 0.107849 0.707196 -0.028368 +v 0.107982 0.481835 -0.020970 +v 0.108177 0.534365 -0.018382 +v 0.108198 0.770989 -0.000778 +v 0.108495 0.452350 -0.045329 +v 0.108753 0.560673 0.026904 +v 0.109238 0.728465 -0.005970 +v 0.109545 0.419450 0.044997 +v 0.109855 0.507848 -0.019403 +v 0.110123 0.561289 0.004831 +v 0.110505 0.482934 0.000580 +v 0.110510 0.478973 0.043883 +v 0.110840 0.452292 -0.023671 +v 0.111089 0.708938 -0.039611 +v 0.111384 0.779549 -0.049833 +v 0.111440 0.708175 -0.014058 +v 0.111953 0.450031 0.019850 +v 0.112202 0.534378 0.025202 +v 0.112383 0.481687 -0.020808 +v 0.112581 0.425469 -0.048261 +v 0.112765 0.781116 -0.010245 +v 0.113189 0.534964 0.003899 +v 0.114093 0.786017 -0.038107 +v 0.114493 0.786332 -0.023624 +v 0.114570 0.735771 -0.055130 +v 0.114591 0.508944 0.024898 +v 0.114974 0.453304 -0.000877 +v 0.114987 0.508656 0.002825 +v 0.115398 0.427764 -0.029958 +v 0.115435 0.423351 0.034764 +v 0.115547 0.451561 0.043579 +v 0.115978 0.423704 0.048673 +v 0.116773 0.737888 -0.002008 +v 0.117237 0.714245 -0.048552 +v 0.117774 0.456200 -0.022924 +v 0.117956 0.482608 0.001555 +v 0.118618 0.715338 -0.005387 +v 0.119122 0.482397 0.022631 +v 0.119191 0.691197 -0.029747 +v 0.119383 0.426371 0.024236 +v 0.119898 0.429773 -0.016498 +v 0.120991 0.694104 -0.038969 +v 0.121196 0.429022 0.010879 +v 0.121338 0.430126 -0.033747 +v 0.121487 0.429751 -0.000697 +v 0.122864 0.457334 0.000218 +v 0.122920 0.744803 -0.056511 +v 0.122950 0.680868 -0.023174 +v 0.123018 0.692307 -0.013056 +v 0.123509 0.427584 0.036532 +v 0.124028 0.457149 0.019813 +v 0.124077 0.747453 -0.003691 +v 0.125233 0.720978 -0.054992 +v 0.126399 0.432028 -0.018453 +v 0.127111 0.723711 -0.002128 +v 0.127273 0.686251 -0.038917 +v 0.127300 0.430393 0.025819 +v 0.127589 0.698619 -0.048067 +v 0.129371 0.753614 -0.050133 +v 0.129425 0.755171 -0.012228 +v 0.129642 0.433137 -0.001000 +v 0.129708 0.432415 0.010698 +v 0.130024 0.680105 -0.010702 +v 0.130896 0.697917 -0.004453 +v 0.132331 0.759637 -0.025002 +v 0.132366 0.759139 -0.038650 +v 0.133175 0.674534 -0.023428 +v 0.134673 0.689052 -0.047571 +v 0.134828 0.728218 -0.056425 +v 0.134998 0.693885 -0.003925 +v 0.135041 0.731379 -0.005055 +v 0.135378 0.705266 -0.054637 +v 0.135417 0.675539 -0.036637 +v 0.137815 0.676503 -0.013536 +v 0.140150 0.663884 -0.019489 +v 0.140225 0.706406 -0.001888 +v 0.141202 0.737614 -0.013140 +v 0.141795 0.683710 -0.002529 +v 0.142024 0.735883 -0.050628 +v 0.142042 0.678812 -0.046437 +v 0.143007 0.694494 -0.054284 +v 0.143221 0.665901 -0.033855 +v 0.143223 0.659820 -0.017649 +v 0.145207 0.741683 -0.025778 +v 0.145390 0.741120 -0.039314 +v 0.145575 0.665722 -0.008265 +v 0.146291 0.698394 -0.001910 +v 0.146428 0.681028 -0.005793 +v 0.146819 0.711301 -0.055728 +v 0.148025 0.712556 -0.005462 +v 0.148487 0.657159 -0.031276 +v 0.150270 0.669436 -0.044587 +v 0.150408 0.683773 -0.054054 +v 0.153063 0.701233 -0.055626 +v 0.153202 0.671393 -0.002368 +v 0.153255 0.717798 -0.050347 +v 0.153954 0.717761 -0.013191 +v 0.154215 0.703950 -0.005661 +v 0.155414 0.687382 -0.000879 +v 0.155866 0.660454 -0.041889 +v 0.157280 0.721906 -0.040436 +v 0.158134 0.643027 -0.011343 +v 0.158143 0.673001 -0.053632 +v 0.158355 0.722004 -0.027140 +v 0.159278 0.675501 -0.000711 +v 0.160178 0.708533 -0.013035 +v 0.160551 0.707384 -0.049664 +v 0.160882 0.691036 -0.056432 +v 0.161019 0.641836 -0.025010 +v 0.161194 0.694121 -0.004404 +v 0.161947 0.647035 -0.001146 +v 0.164040 0.711359 -0.040431 +v 0.164221 0.689351 -0.002383 +v 0.164714 0.664802 -0.049995 +v 0.165304 0.711794 -0.027414 +v 0.166422 0.699498 -0.014229 +v 0.167037 0.681464 -0.004647 +v 0.168321 0.644323 -0.034879 +v 0.168447 0.681014 -0.056495 +v 0.168709 0.653325 0.004827 +v 0.169924 0.702262 -0.028124 +v 0.170061 0.697340 -0.049014 +v 0.170127 0.628044 -0.005599 +v 0.170358 0.701718 -0.041546 +v 0.170586 0.695375 -0.009587 +v 0.171836 0.687581 -0.012905 +v 0.172569 0.633376 0.004800 +v 0.174797 0.658479 0.006211 +v 0.174835 0.626513 -0.017999 +v 0.175071 0.684368 -0.011146 +v 0.175135 0.698573 -0.040297 +v 0.175888 0.672548 -0.053328 +v 0.176760 0.688684 -0.048476 +v 0.176926 0.691428 -0.024690 +v 0.177311 0.699772 -0.026264 +v 0.177840 0.692251 -0.037752 +v 0.177968 0.649008 -0.042175 +v 0.178483 0.640356 0.011470 +v 0.180303 0.615675 -0.000826 +v 0.181135 0.621844 0.009942 +v 0.181617 0.685808 -0.021937 +v 0.182099 0.664781 0.003996 +v 0.182118 0.628552 -0.026430 +v 0.183742 0.680501 -0.045730 +v 0.184557 0.685218 -0.033211 +v 0.185014 0.646860 0.014145 +v 0.186278 0.613631 -0.012240 +v 0.186346 0.629536 0.017428 +v 0.188624 0.657108 -0.044009 +v 0.188754 0.669693 -0.001732 +v 0.189628 0.609321 0.015736 +v 0.191353 0.602746 0.004214 +v 0.192072 0.633344 -0.032620 +v 0.193419 0.653332 0.012507 +v 0.193553 0.637206 0.020787 +v 0.193561 0.615267 -0.019561 +v 0.194106 0.618029 0.024518 +v 0.194587 0.607926 0.015140 +v 0.195410 0.671862 -0.012731 +v 0.195827 0.602783 0.006057 +v 0.195855 0.665608 -0.036542 +v 0.196974 0.670775 -0.024554 +v 0.198191 0.599722 -0.006282 +v 0.198596 0.616196 0.023334 +v 0.201226 0.600398 -0.003587 +v 0.201430 0.657417 0.006518 +v 0.201584 0.641148 -0.032828 +v 0.202593 0.627012 0.027876 +v 0.202854 0.643610 0.019339 +v 0.203872 0.620154 -0.024950 +v 0.205089 0.604834 0.017456 +v 0.205498 0.600870 -0.012590 +v 0.205670 0.591831 0.026066 +v 0.206162 0.600822 0.010096 +v 0.206258 0.623339 0.026028 +v 0.207279 0.586165 0.015315 +v 0.208103 0.657943 -0.004513 +v 0.208202 0.610522 0.023184 +v 0.208237 0.601281 -0.009870 +v 0.208892 0.650018 -0.026176 +v 0.209583 0.598312 0.002627 +v 0.209638 0.599947 0.034748 +v 0.210084 0.655695 -0.015468 +v 0.210907 0.592151 0.025537 +v 0.211930 0.647013 0.013250 +v 0.212163 0.587454 0.016921 +v 0.212735 0.583589 0.004944 +v 0.212798 0.632951 0.026097 +v 0.212886 0.627722 -0.024406 +v 0.213754 0.615388 0.025233 +v 0.214549 0.598806 0.032242 +v 0.214726 0.628076 0.024274 +v 0.215288 0.598904 -0.002324 +v 0.216165 0.584512 0.008180 +v 0.216360 0.605853 -0.017351 +v 0.217158 0.586210 0.032977 +v 0.217255 0.583721 0.027532 +v 0.217378 0.605069 -0.013713 +v 0.218299 0.607288 0.038373 +v 0.218595 0.581577 0.021669 +v 0.218886 0.646534 0.002385 +v 0.219186 0.589641 0.037482 +v 0.219565 0.609699 0.010222 +v 0.219737 0.618752 0.024756 +v 0.219897 0.587442 0.031048 +v 0.219974 0.585449 0.026689 +v 0.220230 0.637214 -0.017856 +v 0.221047 0.583732 0.021995 +v 0.221047 0.604500 0.034642 +v 0.221191 0.579917 0.015820 +v 0.221334 0.643359 -0.007955 +v 0.221520 0.590188 0.034654 +v 0.221580 0.584473 -0.002302 +v 0.222641 0.592984 0.040514 +v 0.222841 0.585205 0.002385 +v 0.222870 0.635631 0.020121 +v 0.223125 0.582403 0.017312 +v 0.223610 0.630695 0.018475 +v 0.223654 0.602895 -0.005041 +v 0.224286 0.592864 0.037082 +v 0.224310 0.574021 0.033106 +v 0.224497 0.576347 0.037948 +v 0.224657 0.578954 0.011024 +v 0.224789 0.571557 0.027370 +v 0.225385 0.579437 0.033373 +v 0.225412 0.579239 0.043146 +v 0.225680 0.613189 -0.017121 +v 0.225825 0.577394 0.028569 +v 0.225868 0.581678 0.013437 +v 0.225957 0.620733 0.021601 +v 0.226188 0.569202 0.020852 +v 0.226664 0.546023 0.075359 +v 0.226945 0.576209 0.039215 +v 0.226994 0.581659 0.046711 +v 0.227000 0.612354 0.037482 +v 0.227000 0.550969 0.071596 +v 0.227097 0.595865 0.042313 +v 0.227107 0.612285 -0.013141 +v 0.227193 0.573485 0.032763 +v 0.227312 0.547573 0.078432 +v 0.227538 0.578250 0.043403 +v 0.227617 0.571716 0.028055 +v 0.227637 0.553306 0.074662 +v 0.227642 0.570355 0.049237 +v 0.227696 0.542978 0.077780 +v 0.227698 0.575827 0.023533 +v 0.227852 0.554344 0.069049 +v 0.227853 0.595171 0.038523 +v 0.227867 0.571253 0.055977 +v 0.227975 0.575896 0.051884 +v 0.228031 0.567622 0.058637 +v 0.228051 0.608441 0.034082 +v 0.228186 0.556220 0.067490 +v 0.228398 0.562139 0.062170 +v 0.228602 0.543217 0.080564 +v 0.228733 0.607245 -0.003580 +v 0.228740 0.557471 0.071530 +v 0.228752 0.569125 0.021429 +v 0.229051 0.580804 0.046707 +v 0.229169 0.565197 0.065528 +v 0.229212 0.545031 0.081702 +v 0.229427 0.549158 0.079983 +v 0.229457 0.570206 0.061944 +v 0.229603 0.579314 0.007821 +v 0.229764 0.630049 0.010683 +v 0.229832 0.581968 0.010869 +v 0.229835 0.542587 0.072802 +v 0.229842 0.547182 0.068486 +v 0.229858 0.567248 0.014757 +v 0.230022 0.566341 0.054172 +v 0.230068 0.574352 0.058932 +v 0.230070 0.541010 0.076232 +v 0.230137 0.554977 0.076928 +v 0.230217 0.551288 0.066347 +v 0.230362 0.566299 0.043769 +v 0.230497 0.563711 0.056705 +v 0.230577 0.557733 0.075106 +v 0.230610 0.634747 0.009791 +v 0.230728 0.553527 0.065429 +v 0.230748 0.584321 0.049512 +v 0.230795 0.578680 0.054167 +v 0.230847 0.558760 0.060681 +v 0.231058 0.559305 0.073895 +v 0.231085 0.584974 0.038727 +v 0.231190 0.575243 0.019973 +v 0.231259 0.582534 0.048366 +v 0.231285 0.589399 -0.005450 +v 0.231297 0.560794 0.072696 +v 0.231390 0.567788 0.017440 +v 0.231419 0.540619 0.080371 +v 0.231560 0.620096 0.014474 +v 0.231904 0.597003 0.039035 +v 0.231943 0.560693 0.032277 +v 0.232044 0.563853 0.038663 +v 0.232156 0.598154 0.042954 +v 0.232284 0.558489 0.024605 +v 0.232395 0.620441 -0.007065 +v 0.232401 0.542834 0.082633 +v 0.232422 0.567074 0.067973 +v 0.232634 0.589876 -0.000796 +v 0.232763 0.626946 0.002449 +v 0.232981 0.612473 0.000282 +v 0.233011 0.623920 -0.009616 +v 0.233272 0.572155 0.064328 +v 0.233379 0.547160 0.080405 +v 0.233757 0.630643 -0.000088 +v 0.233900 0.575595 0.061681 +v 0.233912 0.617491 0.007558 +v 0.233965 0.581258 0.029180 +v 0.234413 0.557691 0.020247 +v 0.234545 0.540230 0.076287 +v 0.234565 0.553097 0.076898 +v 0.234600 0.567724 0.011890 +v 0.235072 0.542181 0.072579 +v 0.235136 0.584273 0.010520 +v 0.235140 0.568029 0.015086 +v 0.235179 0.540499 0.078702 +v 0.235279 0.556137 0.075421 +v 0.235333 0.610760 0.030390 +v 0.235334 0.586593 0.038555 +v 0.235369 0.579813 0.055442 +v 0.235512 0.541630 0.080564 +v 0.235610 0.546516 0.068162 +v 0.235665 0.597860 0.037530 +v 0.235690 0.557954 0.074544 +v 0.235819 0.550067 0.065975 +v 0.235824 0.551903 0.065001 +v 0.235904 0.585759 0.049411 +v 0.236017 0.556496 0.061236 +v 0.236127 0.615537 0.032256 +v 0.236194 0.582253 0.007336 +v 0.236221 0.559860 0.073186 +v 0.236224 0.583827 0.047949 +v 0.236248 0.559847 0.057910 +v 0.236287 0.560914 0.054458 +v 0.236302 0.577167 0.018667 +v 0.236853 0.599224 0.041073 +v 0.236976 0.548016 0.028986 +v 0.237103 0.544850 0.078136 +v 0.237297 0.543085 0.075319 +v 0.237406 0.565605 0.068586 +v 0.237758 0.551012 0.036505 +v 0.238017 0.557375 0.049149 +v 0.238517 0.549795 0.074453 +v 0.238523 0.547761 0.071055 +v 0.238579 0.594966 0.000915 +v 0.238798 0.551717 0.068087 +v 0.238839 0.597905 0.034768 +v 0.238847 0.554282 0.043165 +v 0.238887 0.547328 0.022878 +v 0.238953 0.569322 0.064711 +v 0.238954 0.586932 0.036038 +v 0.239037 0.558144 0.062633 +v 0.239086 0.552130 0.072778 +v 0.239132 0.558667 0.017597 +v 0.239218 0.595869 -0.004244 +v 0.239258 0.553883 0.071516 +v 0.239533 0.555433 0.069816 +v 0.239734 0.561591 0.059879 +v 0.239766 0.583918 0.045678 +v 0.239818 0.572752 0.061174 +v 0.240201 0.586140 0.046597 +v 0.240279 0.587249 0.012214 +v 0.240777 0.569929 0.014546 +v 0.240816 0.569998 0.012324 +v 0.240818 0.599280 0.037623 +v 0.240857 0.577853 0.053858 +v 0.241083 0.560516 0.065276 +v 0.241170 0.562055 0.057739 +v 0.241568 0.585677 0.031640 +v 0.241581 0.596981 0.030970 +v 0.241890 0.610013 0.022047 +v 0.242527 0.583504 0.026393 +v 0.242617 0.585970 0.009452 +v 0.243008 0.537243 0.033959 +v 0.243055 0.536895 0.028279 +v 0.243067 0.564963 0.061378 +v 0.243166 0.590353 0.016107 +v 0.243368 0.548466 0.020129 +v 0.243384 0.538359 0.038702 +v 0.243393 0.595324 0.026289 +v 0.243399 0.583425 0.043263 +v 0.243552 0.601088 0.005435 +v 0.243794 0.546847 0.057589 +v 0.243826 0.593119 0.021305 +v 0.243923 0.539556 0.043739 +v 0.243945 0.543921 0.052875 +v 0.244132 0.585562 0.043365 +v 0.244196 0.541661 0.049424 +v 0.244243 0.598126 0.032879 +v 0.244313 0.549388 0.059960 +v 0.244378 0.537526 0.024356 +v 0.244381 0.560324 0.018003 +v 0.244642 0.606962 0.013952 +v 0.244768 0.614365 0.021043 +v 0.245432 0.572668 0.017312 +v 0.245706 0.602936 0.001672 +v 0.245901 0.531648 0.029355 +v 0.246132 0.575526 0.049919 +v 0.246224 0.589846 0.014314 +v 0.246249 0.573629 0.015890 +v 0.246507 0.596055 0.027032 +v 0.246519 0.566660 0.056915 +v 0.246978 0.532853 0.024443 +v 0.247048 0.593301 0.020807 +v 0.247197 0.540672 0.060928 +v 0.247342 0.582725 0.039252 +v 0.247547 0.509242 0.034196 +v 0.247638 0.512369 0.033189 +v 0.247693 0.610016 0.011343 +v 0.247935 0.516200 0.031931 +v 0.247945 0.584445 0.039215 +v 0.247962 0.552520 0.061734 +v 0.247987 0.524091 0.030091 +v 0.248025 0.550641 0.020964 +v 0.248173 0.514316 0.032555 +v 0.248187 0.511039 0.029070 +v 0.248188 0.508080 0.030291 +v 0.248357 0.531494 0.039968 +v 0.248374 0.538909 0.022293 +v 0.248390 0.575461 0.021938 +v 0.248394 0.544039 0.064351 +v 0.248451 0.521697 0.030415 +v 0.248472 0.512697 0.028358 +v 0.248491 0.513407 0.077430 +v 0.248516 0.538074 0.057679 +v 0.248632 0.514593 0.027618 +v 0.248685 0.563418 0.021542 +v 0.248975 0.505932 0.031665 +v 0.249082 0.518565 0.074703 +v 0.249088 0.506213 0.034272 +v 0.249110 0.531870 0.033360 +v 0.249113 0.531389 0.035842 +v 0.249288 0.534968 0.051801 +v 0.249462 0.576655 0.020355 +v 0.249465 0.523691 0.025091 +v 0.249484 0.501121 0.049686 +v 0.249500 0.521635 0.025731 +v 0.249535 0.505935 0.048091 +v 0.249540 0.532301 0.043876 +v 0.249599 0.515805 0.081269 +v 0.249953 0.581202 0.034085 +v 0.250008 0.536767 0.055024 +v 0.250026 0.520811 0.073200 +v 0.250086 0.533316 0.047271 +v 0.250103 0.578218 0.027104 +v 0.250130 0.533343 0.022865 +v 0.250180 0.530308 0.068295 +v 0.250204 0.573956 0.044996 +v 0.250261 0.532639 0.066441 +v 0.250392 0.511498 0.082072 +v 0.250452 0.509461 0.079830 +v 0.250503 0.507148 0.035649 +v 0.250549 0.504955 0.031435 +v 0.250552 0.509366 0.046800 +v 0.250610 0.500550 0.046269 +v 0.250630 0.523293 0.072165 +v 0.250654 0.507632 0.029111 +v 0.250669 0.509852 0.036093 +v 0.250710 0.520974 0.078895 +v 0.250978 0.504960 0.044281 +v 0.250988 0.511399 0.046183 +v 0.251017 0.512892 0.035679 +v 0.251027 0.510306 0.027674 +v 0.251123 0.497694 0.050575 +v 0.251134 0.522571 0.042390 +v 0.251224 0.582931 0.033414 +v 0.251275 0.514677 0.035128 +v 0.251375 0.516667 0.034535 +v 0.251397 0.497422 0.048239 +v 0.251401 0.504477 0.033624 +v 0.251403 0.520008 0.043183 +v 0.251482 0.565894 0.026577 +v 0.251605 0.523491 0.077540 +v 0.251670 0.512870 0.026592 +v 0.251701 0.508117 0.063984 +v 0.251709 0.580124 0.026886 +v 0.251715 0.502316 0.052820 +v 0.251718 0.511504 0.073998 +v 0.251726 0.535215 0.070021 +v 0.251788 0.524792 0.033281 +v 0.251834 0.508201 0.043067 +v 0.251864 0.532851 0.071689 +v 0.251948 0.530635 0.063382 +v 0.252034 0.508396 0.077484 +v 0.252061 0.522723 0.033481 +v 0.252107 0.522763 0.024308 +v 0.252111 0.502534 0.066782 +v 0.252187 0.510212 0.042332 +v 0.252220 0.553352 0.025334 +v 0.252233 0.498983 0.052809 +v 0.252234 0.510246 0.083109 +v 0.252236 0.506647 0.051670 +v 0.252247 0.546794 0.065047 +v 0.252275 0.505327 0.030822 +v 0.252292 0.525422 0.076091 +v 0.252410 0.540513 0.022915 +v 0.252441 0.528361 0.064602 +v 0.252447 0.516295 0.071428 +v 0.252517 0.508159 0.081663 +v 0.252545 0.507547 0.029512 +v 0.252552 0.521502 0.038563 +v 0.252647 0.526588 0.055176 +v 0.252650 0.532176 0.032997 +v 0.252722 0.532378 0.033312 +v 0.252724 0.556112 0.059748 +v 0.252739 0.518823 0.039424 +v 0.252774 0.572107 0.039117 +v 0.252876 0.507086 0.035601 +v 0.253008 0.524299 0.056446 +v 0.253038 0.513977 0.061477 +v 0.253101 0.503952 0.069982 +v 0.253110 0.510343 0.050337 +v 0.253124 0.511543 0.062443 +v 0.253182 0.519085 0.070073 +v 0.253182 0.515460 0.082977 +v 0.253208 0.569053 0.032718 +v 0.253237 0.521346 0.069005 +v 0.253299 0.523872 0.046022 +v 0.253367 0.564664 0.050538 +v 0.253428 0.496568 0.047404 +v 0.253524 0.509658 0.067467 +v 0.253580 0.534103 0.023428 +v 0.253609 0.507571 0.078055 +v 0.253653 0.495918 0.050691 +v 0.253654 0.505030 0.032548 +v 0.253658 0.509843 0.035697 +v 0.253721 0.521306 0.046889 +v 0.253721 0.510424 0.028280 +v 0.253790 0.506400 0.034585 +v 0.253802 0.499326 0.044845 +v 0.253884 0.501077 0.063612 +v 0.253959 0.512501 0.049788 +v 0.254121 0.497983 0.053048 +v 0.254165 0.505928 0.060429 +v 0.254208 0.512436 0.035368 +v 0.254218 0.512082 0.027424 +v 0.254317 0.524983 0.051476 +v 0.254392 0.528369 0.058746 +v 0.254425 0.503422 0.042781 +v 0.254435 0.520670 0.080813 +v 0.254446 0.541888 0.025070 +v 0.254452 0.513051 0.027017 +v 0.254474 0.555748 0.030240 +v 0.254474 0.510205 0.083306 +v 0.254500 0.525821 0.059676 +v 0.254551 0.538887 0.056068 +v 0.254558 0.521503 0.025192 +v 0.254564 0.525089 0.024322 +v 0.254572 0.514525 0.026518 +v 0.254596 0.523326 0.024771 +v 0.254613 0.524688 0.033294 +v 0.254643 0.515154 0.034703 +v 0.254665 0.499087 0.068356 +v 0.254717 0.508411 0.031444 +v 0.254734 0.510478 0.074304 +v 0.254738 0.500591 0.070463 +v 0.254770 0.538589 0.055766 +v 0.254880 0.513036 0.065794 +v 0.254909 0.498720 0.066517 +v 0.254972 0.515371 0.064654 +v 0.255041 0.509223 0.034022 +v 0.255090 0.507519 0.041336 +v 0.255097 0.524741 0.078524 +v 0.255098 0.501510 0.053756 +v 0.255114 0.522317 0.052199 +v 0.255263 0.535253 0.030324 +v 0.255336 0.533241 0.044624 +v 0.255401 0.511987 0.057355 +v 0.255402 0.535340 0.072288 +v 0.255423 0.509404 0.058518 +v 0.255478 0.507947 0.079004 +v 0.255535 0.534695 0.026111 +v 0.255635 0.508291 0.081635 +v 0.255636 0.510871 0.030403 +v 0.255639 0.533808 0.044887 +v 0.255669 0.515193 0.071381 +v 0.255850 0.511777 0.033491 +v 0.255885 0.497020 0.047797 +v 0.255901 0.514312 0.081787 +v 0.255969 0.562330 0.044766 +v 0.256017 0.520015 0.036885 +v 0.256031 0.505963 0.052808 +v 0.256050 0.548054 0.063287 +v 0.256082 0.497776 0.052417 +v 0.256085 0.496255 0.050510 +v 0.256100 0.500789 0.071276 +v 0.256118 0.529269 0.062736 +v 0.256265 0.512497 0.029682 +v 0.256326 0.519034 0.069536 +v 0.256353 0.559244 0.037594 +v 0.256353 0.513502 0.033209 +v 0.256399 0.526829 0.031477 +v 0.256461 0.514489 0.032975 +v 0.256514 0.525146 0.031648 +v 0.256546 0.516063 0.032815 +v 0.256548 0.513433 0.029300 +v 0.256569 0.523256 0.031916 +v 0.256612 0.503837 0.071039 +v 0.256637 0.542865 0.029232 +v 0.256644 0.499708 0.045824 +v 0.256732 0.511648 0.076411 +v 0.256745 0.497912 0.069212 +v 0.256769 0.535379 0.032796 +v 0.256778 0.510096 0.051205 +v 0.256802 0.522230 0.027721 +v 0.256814 0.514989 0.028942 +v 0.256846 0.512728 0.079436 +v 0.256864 0.497911 0.065715 +v 0.256869 0.554211 0.056264 +v 0.256887 0.524132 0.027371 +v 0.256894 0.525891 0.027087 +v 0.257109 0.499983 0.063060 +v 0.257461 0.500982 0.051949 +v 0.257596 0.499658 0.049135 +v 0.257599 0.518715 0.079863 +v 0.257615 0.500470 0.071182 +v 0.257625 0.523165 0.047232 +v 0.257630 0.509054 0.069146 +v 0.257771 0.503023 0.043750 +v 0.257878 0.504853 0.060095 +v 0.257930 0.542475 0.056640 +v 0.258226 0.497957 0.065966 +v 0.258444 0.520751 0.078826 +v 0.258480 0.505571 0.042715 +v 0.258493 0.527946 0.060686 +v 0.258528 0.515822 0.073474 +v 0.258602 0.513494 0.066663 +v 0.258604 0.551154 0.050946 +v 0.258750 0.498275 0.069014 +v 0.258781 0.507016 0.042245 +v 0.258783 0.545742 0.059911 +v 0.258883 0.508583 0.041645 +v 0.258914 0.522617 0.077633 +v 0.258931 0.517279 0.077136 +v 0.258967 0.536875 0.044078 +v 0.258996 0.504881 0.051044 +v 0.259047 0.518134 0.072237 +v 0.259051 0.524019 0.076912 +v 0.259055 0.522614 0.050803 +v 0.259098 0.535978 0.041037 +v 0.259136 0.503036 0.070264 +v 0.259158 0.533315 0.062618 +v 0.259244 0.509120 0.057875 +v 0.259254 0.519439 0.071597 +v 0.259277 0.536934 0.069133 +v 0.259382 0.500065 0.064179 +v 0.259381 0.521327 0.070299 +v 0.259421 0.544191 0.034574 +v 0.259477 0.522600 0.036672 +v 0.259499 0.503556 0.047559 +v 0.259567 0.530951 0.064155 +v 0.259634 0.535205 0.036903 +v 0.259671 0.540547 0.052691 +v 0.259677 0.534563 0.070519 +v 0.259680 0.507505 0.050281 +v 0.259682 0.528001 0.065846 +v 0.259815 0.518371 0.038063 +v 0.259833 0.531554 0.072513 +v 0.259851 0.519384 0.075971 +v 0.259870 0.545804 0.038961 +v 0.259917 0.520703 0.037395 +v 0.259942 0.547991 0.045240 +v 0.259981 0.501045 0.067441 +v 0.260025 0.508881 0.049626 +v 0.260161 0.521023 0.074816 +v 0.260246 0.510678 0.049002 +v 0.260386 0.522663 0.073987 +v 0.260490 0.535169 0.066148 +v 0.260656 0.506118 0.046700 +v 0.260827 0.532961 0.067586 +v 0.260839 0.504393 0.061402 +v 0.260949 0.524702 0.044324 +v 0.260959 0.529929 0.069442 +v 0.261066 0.507656 0.067983 +v 0.261071 0.507502 0.046127 +v 0.261158 0.520510 0.045724 +v 0.261223 0.522762 0.044858 +v 0.261285 0.509347 0.045385 +v 0.261409 0.538808 0.048138 +v 0.261633 0.507043 0.059791 +v 0.261919 0.508334 0.059126 +v 0.261940 0.510459 0.066610 +v 0.261943 0.505875 0.064726 +v 0.262144 0.509607 0.058451 +v 0.262196 0.523764 0.040366 +v 0.262235 0.511730 0.065870 +v 0.262345 0.513085 0.065240 +v 0.262398 0.519436 0.041800 +v 0.262499 0.521792 0.040989 +v 0.262582 0.529193 0.057636 +v 0.262612 0.525320 0.051042 +v 0.263023 0.526937 0.058494 +v 0.263185 0.508534 0.063275 +v 0.263189 0.523398 0.052099 +v 0.263207 0.525131 0.059506 +v 0.263471 0.521386 0.053151 +v 0.263838 0.509733 0.062616 +v 0.264073 0.511091 0.061977 +v 0.264151 0.527416 0.054157 +v 0.264562 0.525208 0.055358 +v 0.265006 0.523298 0.056261 +vn -0.4059 -0.1010 0.9083 +vn -0.6675 -0.1031 0.7375 +vn -0.6736 -0.2312 0.7020 +vn -0.4309 -0.2325 0.8719 +vn -0.0000 -0.0778 0.9970 +vn -0.0000 0.0711 0.9975 +vn -0.3795 0.0389 0.9244 +vn -0.9053 -0.0470 0.4222 +vn -0.8963 -0.1778 0.4063 +vn -0.6520 0.0293 0.7576 +vn -0.9013 0.0811 0.4256 +vn -0.9852 -0.0011 0.1716 +vn -0.9754 -0.1358 0.1736 +vn -0.9648 0.1369 0.2247 +vn 0.9929 0.1193 -0.0034 +vn 0.9972 -0.0358 0.0659 +vn 0.9854 -0.0019 0.1704 +vn -0.9972 -0.0358 0.0659 +vn -0.0000 -0.2246 0.9744 +vn 0.4058 -0.1010 0.9084 +vn 0.4309 -0.2325 0.8719 +vn 0.6736 -0.2312 0.7020 +vn 0.6675 -0.1031 0.7375 +vn 0.3795 0.0389 0.9244 +vn 0.8963 -0.1778 0.4063 +vn 0.9053 -0.0470 0.4221 +vn 0.9013 0.0811 0.4256 +vn 0.6520 0.0293 0.7576 +vn 0.9754 -0.1358 0.1736 +vn 0.9648 0.1369 0.2247 +vn -0.3763 -0.3990 0.8362 +vn -0.6069 -0.3790 0.6986 +vn -0.5975 -0.4489 0.6645 +vn -0.3787 -0.4859 0.7877 +vn -0.0000 -0.3978 0.9175 +vn -0.0000 -0.2708 0.9626 +vn -0.3766 -0.3034 0.8753 +vn -0.8757 -0.2812 0.3926 +vn -0.8607 -0.3627 0.3573 +vn -0.6043 -0.3141 0.7322 +vn -0.8728 -0.1852 0.4516 +vn -0.9821 -0.1456 0.1192 +vn -0.9440 -0.3069 0.1211 +vn -0.9316 -0.3525 0.0883 +vn -0.9903 -0.1253 0.0600 +vn -0.9929 0.1193 -0.0034 +vn -0.0000 -0.5356 0.8445 +vn 0.3763 -0.3990 0.8362 +vn 0.3787 -0.4858 0.7877 +vn 0.5974 -0.4489 0.6645 +vn 0.6069 -0.3790 0.6986 +vn 0.3766 -0.3034 0.8753 +vn 0.8610 -0.3621 0.3572 +vn 0.8769 -0.2780 0.3922 +vn 0.8728 -0.1849 0.4518 +vn 0.6043 -0.3141 0.7322 +vn 0.9903 -0.1253 0.0600 +vn 0.9316 -0.3525 0.0883 +vn 0.9440 -0.3069 0.1211 +vn 0.9821 -0.1456 0.1192 +vn 0.8268 0.4356 0.3560 +vn 0.6785 -0.6743 0.2915 +vn 0.9768 -0.1629 -0.1388 +vn 0.4516 0.8770 0.1641 +vn -0.0000 0.8111 0.5850 +vn 0.7485 0.5637 0.3493 +vn 0.8123 -0.4372 0.3861 +vn 0.5953 0.6395 0.4865 +vn -0.0000 -0.9151 -0.4033 +vn 0.9769 -0.1628 -0.1387 +vn 0.6784 -0.6744 0.2914 +vn -0.0000 -0.9670 -0.2549 +vn -0.0000 -0.9858 0.1677 +vn 0.8123 -0.4372 0.3860 +vn -0.0000 0.6440 -0.7650 +vn -0.8268 0.4356 0.3560 +vn -0.9768 -0.1629 -0.1388 +vn -0.6785 -0.6743 0.2915 +vn -0.8123 -0.4372 0.3861 +vn -0.7485 0.5637 0.3493 +vn -0.9768 -0.1628 -0.1387 +vn -0.8123 -0.4372 0.3860 +vn 0.0462 0.3875 0.9207 +vn 0.0566 0.1641 0.9848 +vn 0.2021 0.3341 0.9206 +vn 0.1560 0.0761 0.9848 +vn 0.3068 0.2415 0.9206 +vn 0.3792 0.0935 0.9206 +vn 0.1641 -0.0563 0.9848 +vn 0.3878 -0.0460 0.9206 +vn 0.3343 -0.2018 0.9206 +vn 0.0764 -0.1556 0.9849 +vn 0.2418 -0.3066 0.9206 +vn 0.0938 -0.3789 0.9207 +vn -0.0560 -0.1639 0.9849 +vn -0.0457 -0.3876 0.9207 +vn -0.2015 -0.3341 0.9207 +vn -0.1557 -0.0763 0.9849 +vn -0.3063 -0.2416 0.9208 +vn -0.3787 -0.0935 0.9208 +vn -0.1638 0.0563 0.9849 +vn -0.3873 0.0460 0.9208 +vn -0.3338 0.2018 0.9208 +vn -0.0759 0.1558 0.9849 +vn -0.2413 0.3066 0.9208 +vn -0.0932 0.3789 0.9207 +vn 0.0002 -0.0000 1.0000 +vn 0.2661 0.4775 0.8374 +vn 0.0751 0.5538 0.8293 +vn 0.4305 0.3278 0.8410 +vn 0.5233 0.1382 0.8409 +vn 0.5363 -0.0725 0.8409 +vn 0.4678 -0.2721 0.8409 +vn 0.3280 -0.4303 0.8410 +vn 0.1384 -0.5230 0.8410 +vn -0.0723 -0.5361 0.8411 +vn -0.2719 -0.4675 0.8412 +vn -0.4301 -0.3278 0.8412 +vn -0.5227 -0.1382 0.8412 +vn -0.5358 0.0725 0.8412 +vn -0.4673 0.2721 0.8412 +vn -0.3275 0.4303 0.8412 +vn -0.1292 0.5310 0.8375 +vn 0.2465 0.7399 0.6259 +vn 0.1445 0.7480 0.6477 +vn 0.4894 0.5951 0.6375 +vn 0.5303 0.5590 0.6374 +vn 0.6798 0.3626 0.6375 +vn 0.7038 0.3136 0.6374 +vn 0.7669 0.0749 0.6374 +vn 0.7702 0.0204 0.6375 +vn 0.7372 -0.2242 0.6374 +vn 0.7195 -0.2758 0.6374 +vn 0.5952 -0.4892 0.6375 +vn 0.5591 -0.5301 0.6375 +vn 0.3732 -0.6546 0.6574 +vn 0.2777 -0.7149 0.6417 +vn 0.1208 -0.7436 0.6576 +vn -0.0524 -0.7681 0.6382 +vn -0.1531 -0.7545 0.6382 +vn -0.3423 -0.6895 0.6383 +vn -0.4302 -0.6384 0.6383 +vn -0.5802 -0.5059 0.6383 +vn -0.6417 -0.4251 0.6384 +vn -0.7296 -0.2453 0.6384 +vn -0.7555 -0.1471 0.6384 +vn -0.7679 0.0526 0.6384 +vn -0.7543 0.1533 0.6384 +vn -0.6893 0.3425 0.6384 +vn -0.6382 0.4303 0.6384 +vn -0.5057 0.5803 0.6383 +vn -0.4249 0.6419 0.6383 +vn -0.2123 0.7674 0.6050 +vn -0.1149 0.7955 0.5950 +vn 0.4469 0.8368 0.3164 +vn 0.0428 0.9512 0.3055 +vn 0.7550 0.5751 0.3149 +vn 0.9177 0.2424 0.3148 +vn 0.9406 -0.1272 0.3148 +vn 0.8237 -0.4680 0.3202 +vn 0.6100 -0.7214 0.3278 +vn 0.3309 -0.8842 0.3296 +vn 0.0218 -0.9485 0.3160 +vn -0.3081 -0.8976 0.3153 +vn -0.6282 -0.7113 0.3153 +vn -0.8525 -0.4168 0.3154 +vn -0.9471 -0.0587 0.3155 +vn -0.8975 0.3082 0.3154 +vn -0.7112 0.6282 0.3154 +vn -0.3507 0.8857 0.3041 +vn 0.3714 0.6901 -0.6211 +vn 0.0291 0.7886 -0.6142 +vn 0.6194 0.4716 -0.6276 +vn 0.7533 0.1987 -0.6270 +vn 0.7724 -0.1048 -0.6264 +vn 0.6882 -0.3888 -0.6125 +vn 0.5246 -0.5911 -0.6127 +vn 0.2791 -0.7013 -0.6560 +vn 0.0170 -0.7635 -0.6456 +vn -0.2536 -0.7383 -0.6250 +vn -0.5166 -0.5850 -0.6253 +vn -0.7008 -0.3428 -0.6256 +vn -0.7782 -0.0486 -0.6262 +vn -0.7371 0.2527 -0.6268 +vn -0.5839 0.5152 -0.6273 +vn -0.2892 0.7364 -0.6117 +vn -0.0001 -0.0001 -1.0000 +vn -0.0010 -0.0000 -1.0000 +vn 0.0002 -0.0006 -1.0000 +vn -0.0002 0.0003 -1.0000 +vn -0.0003 -0.0000 -1.0000 +vn -0.0003 -0.0002 -1.0000 +vn -0.0005 0.0003 -1.0000 +vn 0.0002 0.0006 -1.0000 +vn -0.0004 0.0007 -1.0000 +vn -0.0002 0.0001 -1.0000 +vn -0.0005 -0.0003 -1.0000 +vn -0.0001 -0.0000 -1.0000 +vn -0.0009 -0.0004 -1.0000 +vn -0.0010 -0.0002 -1.0000 +vn 0.0828 0.3813 0.9207 +vn -0.0819 0.3817 0.9207 +vn -0.0000 0.1734 0.9849 +vn -0.2111 0.3284 0.9206 +vn -0.1225 0.1228 0.9848 +vn -0.3278 0.2122 0.9206 +vn -0.3816 0.0831 0.9206 +vn -0.1738 0.0003 0.9848 +vn -0.3819 -0.0816 0.9206 +vn -0.3287 -0.2108 0.9206 +vn -0.1230 -0.1222 0.9849 +vn -0.2124 -0.3276 0.9206 +vn -0.0834 -0.3813 0.9207 +vn -0.0006 -0.1733 0.9849 +vn 0.0814 -0.3816 0.9207 +vn 0.2106 -0.3284 0.9208 +vn 0.1220 -0.1227 0.9849 +vn 0.3274 -0.2121 0.9208 +vn 0.3811 -0.0831 0.9208 +vn 0.1731 -0.0003 0.9849 +vn 0.3814 0.0817 0.9208 +vn 0.3282 0.2109 0.9208 +vn 0.1226 0.1224 0.9849 +vn 0.2119 0.3276 0.9208 +vn -0.0002 -0.0000 1.0000 +vn 0.1098 0.5478 0.8294 +vn -0.0955 0.5382 0.8374 +vn -0.2999 0.4504 0.8409 +vn -0.4494 0.3014 0.8409 +vn -0.5306 0.1066 0.8409 +vn -0.5310 -0.1045 0.8409 +vn -0.4506 -0.2996 0.8410 +vn -0.3016 -0.4492 0.8410 +vn -0.1068 -0.5303 0.8410 +vn 0.1043 -0.5307 0.8411 +vn 0.2994 -0.4503 0.8412 +vn 0.4489 -0.3014 0.8412 +vn 0.5301 -0.1065 0.8412 +vn 0.5305 0.1045 0.8412 +vn 0.4501 0.2997 0.8412 +vn 0.2955 0.4596 0.8375 +vn 0.1958 0.7568 0.6236 +vn -0.0705 0.7800 0.6218 +vn -0.1837 0.7586 0.6251 +vn -0.3835 0.6677 0.6380 +vn -0.4680 0.6115 0.6380 +vn -0.6098 0.4702 0.6380 +vn -0.6664 0.3859 0.6380 +vn -0.7433 0.2011 0.6380 +vn -0.7633 0.1016 0.6380 +vn -0.7637 -0.0986 0.6380 +vn -0.7440 -0.1983 0.6380 +vn -0.6679 -0.3833 0.6380 +vn -0.6116 -0.4678 0.6380 +vn -0.4704 -0.6096 0.6381 +vn -0.3861 -0.6662 0.6381 +vn -0.2198 -0.7205 0.6577 +vn -0.0663 -0.7640 0.6418 +vn 0.0460 -0.7520 0.6575 +vn 0.2678 -0.7222 0.6377 +vn 0.3183 -0.7014 0.6377 +vn 0.5238 -0.5646 0.6378 +vn 0.5624 -0.5261 0.6378 +vn 0.7000 -0.3211 0.6379 +vn 0.7210 -0.2708 0.6379 +vn 0.7696 -0.0287 0.6379 +vn 0.7697 0.0258 0.6379 +vn 0.7220 0.2680 0.6379 +vn 0.7013 0.3185 0.6378 +vn 0.5645 0.5240 0.6378 +vn 0.5259 0.5627 0.6378 +vn 0.2809 0.7442 0.6060 +vn 0.0956 0.9479 0.3040 +vn -0.3341 0.8899 0.3105 +vn -0.6699 0.6724 0.3149 +vn -0.8762 0.3649 0.3148 +vn -0.9492 0.0018 0.3148 +vn -0.8776 -0.3615 0.3149 +vn -0.6725 -0.6698 0.3149 +vn -0.4085 -0.8542 0.3216 +vn -0.0941 -0.9365 0.3379 +vn 0.2302 -0.9184 0.3218 +vn 0.5257 -0.7901 0.3153 +vn 0.7880 -0.5288 0.3154 +vn 0.9303 -0.1869 0.3155 +vn 0.9310 0.1834 0.3155 +vn 0.7900 0.5257 0.3153 +vn 0.4639 0.8303 0.3090 +vn 0.0784 0.7859 -0.6134 +vn -0.2727 0.7304 -0.6262 +vn -0.5441 0.5463 -0.6368 +vn -0.7103 0.2964 -0.6384 +vn -0.7697 0.0008 -0.6384 +vn -0.7131 -0.2939 -0.6365 +vn -0.5483 -0.5464 -0.6331 +vn -0.3346 -0.6938 -0.6377 +vn -0.0769 -0.7678 -0.6361 +vn 0.1924 -0.7548 -0.6271 +vn 0.4359 -0.6548 -0.6174 +vn 0.6550 -0.4394 -0.6147 +vn 0.7740 -0.1555 -0.6138 +vn 0.7737 0.1524 -0.6149 +vn 0.6546 0.4355 -0.6179 +vn 0.3880 0.6928 -0.6079 +vn 0.0005 0.0007 -1.0000 +vn 0.0002 -0.0000 -1.0000 +vn 0.0003 -0.0002 -1.0000 +vn -0.0000 -0.0000 -1.0000 +vn 0.0006 -0.0003 -1.0000 +vn 0.0010 -0.0003 -1.0000 +vn -0.0000 -0.0003 -1.0000 +vn 0.0010 -0.0001 -1.0000 +vn 0.0003 -0.0001 -1.0000 +vn 0.0005 0.0001 -1.0000 +vn 0.0002 0.0007 -1.0000 +vn 0.0002 -0.0004 -1.0000 +vn 0.0006 0.0003 -1.0000 +vn -0.0001 0.0006 -1.0000 +vn 0.0003 -0.0003 -1.0000 +vn -0.1765 0.5341 0.8268 +vn -0.1193 0.4712 0.8739 +vn -0.7038 0.5608 0.4360 +vn -0.7093 0.5796 0.4012 +vn -0.8941 0.4431 0.0650 +vn -0.6279 0.6830 0.3732 +vn -0.3353 0.6745 0.6577 +vn -0.7643 0.5768 0.2884 +vn -0.9244 0.3418 -0.1695 +vn -0.9063 0.4135 -0.0875 +vn -0.9126 0.4070 -0.0376 +vn -0.9249 0.1865 -0.3314 +vn -0.7591 0.0141 -0.6508 +vn -0.6944 -0.0799 -0.7152 +vn -0.7992 0.0311 -0.6003 +vn 0.6473 0.0177 0.7620 +vn 0.5983 0.3350 0.7279 +vn 0.6291 -0.0464 0.7759 +vn 0.8509 -0.4339 0.2961 +vn 0.9273 -0.2671 0.2621 +vn 0.8285 -0.4734 0.2991 +vn 0.7699 -0.6302 -0.1008 +vn 0.8783 -0.4744 -0.0602 +vn 0.7373 -0.6595 -0.1462 +vn 0.5765 -0.6654 -0.4742 +vn 0.7976 -0.4904 -0.3512 +vn 0.4860 -0.6798 -0.5492 +vn 0.1112 -0.1778 -0.9778 +vn -0.5855 -0.0550 -0.8088 +vn 0.2329 -0.4172 -0.8785 +vn 0.9680 -0.2143 -0.1304 +vn 0.9326 -0.1211 -0.3401 +vn 0.9602 -0.2712 -0.0661 +vn 0.9870 0.0555 0.1508 +vn 0.2030 0.7887 0.5803 +vn 0.6659 -0.1604 -0.7286 +vn -0.0435 -0.4984 -0.8659 +vn -0.1016 -0.4857 -0.8682 +vn 0.1768 0.5343 0.8266 +vn 0.7093 0.5797 0.4010 +vn 0.7041 0.5605 0.4359 +vn 0.1198 0.4713 0.8738 +vn 0.8941 0.4431 0.0650 +vn 0.7642 0.5768 0.2885 +vn 0.3353 0.6745 0.6578 +vn 0.6279 0.6830 0.3732 +vn 0.9242 0.3422 -0.1694 +vn 0.9062 0.4137 -0.0880 +vn 0.9130 0.4062 -0.0383 +vn 0.9250 0.1863 -0.3310 +vn 0.6943 -0.0795 -0.7153 +vn 0.7588 0.0146 -0.6512 +vn 0.7989 0.0302 -0.6006 +vn -0.6469 0.0179 0.7623 +vn -0.5981 0.3351 0.7280 +vn -0.6286 -0.0457 0.7764 +vn -0.8508 -0.4339 0.2965 +vn -0.9273 -0.2672 0.2622 +vn -0.8285 -0.4732 0.2994 +vn -0.7696 -0.6305 -0.1010 +vn -0.8782 -0.4745 -0.0602 +vn -0.7372 -0.6596 -0.1465 +vn -0.5765 -0.6654 -0.4742 +vn -0.7976 -0.4904 -0.3513 +vn -0.4860 -0.6798 -0.5492 +vn -0.1113 -0.1780 -0.9777 +vn -0.2332 -0.4171 -0.8784 +vn 0.5856 -0.0554 -0.8087 +vn -0.9326 -0.1211 -0.3401 +vn -0.9680 -0.2143 -0.1304 +vn -0.9603 -0.2712 -0.0661 +vn -0.9870 0.0555 0.1508 +vn -0.2029 0.7887 0.5803 +vn -0.6659 -0.1604 -0.7286 +vn 0.1018 -0.4858 -0.8681 +vn 0.0434 -0.4981 -0.8660 +vn 0.9887 -0.1131 -0.0983 +vn 0.9877 -0.1521 -0.0363 +vn 0.8987 -0.1075 0.4252 +vn 0.9200 -0.0674 0.3861 +vn 0.9010 -0.1455 -0.4087 +vn 0.8948 -0.1741 -0.4111 +vn 0.9910 -0.0490 -0.1242 +vn 0.9089 -0.0949 -0.4060 +vn 0.6607 -0.1455 -0.7364 +vn 0.6008 -0.1531 -0.7846 +vn 0.9894 -0.0706 -0.1268 +vn 0.9221 -0.0576 -0.3827 +vn 0.7276 -0.0976 -0.6790 +vn 0.8030 -0.0022 -0.5960 +vn 0.0347 -0.1113 -0.9932 +vn 0.0178 -0.1194 -0.9927 +vn -0.5984 -0.1080 -0.7939 +vn -0.5530 -0.1335 -0.8224 +vn 0.0780 -0.0826 -0.9935 +vn 0.2054 0.0089 -0.9786 +vn -0.9374 -0.1114 -0.3300 +vn -0.9411 -0.1303 -0.3119 +vn -0.9918 -0.0777 0.1012 +vn -0.9814 -0.0865 0.1713 +vn -0.9977 -0.0466 0.0492 +vn -0.9294 -0.1038 -0.3542 +vn -0.8987 -0.1097 -0.4246 +vn -0.9989 -0.0422 -0.0212 +vn -0.8760 -0.0262 0.4815 +vn -0.8606 -0.0545 0.5064 +vn -0.8820 -0.0208 0.4707 +vn -0.4145 -0.0129 0.9100 +vn -0.4433 -0.0494 0.8950 +vn -0.9006 0.0049 0.4345 +vn -0.4529 0.0154 0.8914 +vn -0.4931 0.0825 0.8660 +vn 0.4574 -0.0403 0.8883 +vn 0.4249 -0.0710 0.9025 +vn 0.4739 0.0313 0.8800 +vn 0.9165 0.0104 0.3999 +vn 0.9218 0.0286 0.3865 +vn 0.4628 0.1249 0.8776 +vn 0.2810 0.3295 0.9014 +vn -0.4474 0.3255 0.8330 +vn -0.8724 0.2167 0.4381 +vn -0.4214 0.3898 0.8188 +vn -0.7568 0.3135 0.5735 +vn -0.9275 0.2419 0.2849 +vn -0.8626 0.3429 0.3720 +vn -0.6893 0.4219 0.5890 +vn -0.7948 0.4805 0.3708 +vn -0.6076 0.6563 0.4474 +vn -0.4350 0.5218 0.7338 +vn -0.3854 0.7255 0.5702 +vn 0.0916 0.4594 0.8835 +vn -0.0541 0.6142 0.7873 +vn -0.0600 0.7830 0.6191 +vn 0.6955 0.5046 0.5115 +vn 0.6511 0.5985 0.4667 +vn 0.8275 0.2893 0.4812 +vn 0.9378 0.1428 0.3163 +vn 0.9772 0.2092 0.0363 +vn 0.9740 0.1867 0.1281 +vn 0.9921 0.1180 0.0435 +vn 0.9978 0.0661 0.0051 +vn 0.9975 -0.0061 -0.0701 +vn 0.9626 -0.0007 -0.2710 +vn 0.9965 0.0654 -0.0519 +vn 0.9863 0.1053 -0.1268 +vn 0.8582 0.1468 -0.4919 +vn 0.3417 0.2038 -0.9174 +vn 0.8590 0.1814 -0.4789 +vn 0.3201 0.2293 -0.9192 +vn -0.6385 -0.1029 -0.7627 +vn -0.5929 -0.0769 -0.8016 +vn -0.4849 0.1129 -0.8673 +vn -0.3843 0.2292 -0.8943 +vn 0.1882 0.0289 -0.9817 +vn -0.4624 0.0948 -0.8816 +vn -0.8985 0.0408 -0.4370 +vn -0.9967 0.0465 -0.0668 +vn -0.9022 0.1112 -0.4168 +vn -0.9469 0.0825 -0.3107 +vn -0.9848 0.1687 0.0413 +vn -0.9907 0.0715 -0.1161 +vn -0.9613 0.0849 -0.2620 +vn -0.9276 0.1815 -0.3266 +vn -0.9609 0.1076 -0.2552 +vn -0.9908 0.0115 -0.1352 +vn -0.9883 -0.1369 0.0670 +vn -0.6529 0.6693 0.3547 +vn -0.8942 -0.2486 0.3722 +vn -0.4536 0.8135 0.3641 +vn -0.2660 0.8663 0.4228 +vn -0.0894 0.8840 0.4589 +vn 0.4237 0.7567 0.4978 +vn 0.9139 0.1942 0.3565 +vn 0.9353 -0.2659 0.2335 +vn 0.9769 -0.2101 -0.0385 +vn 0.9407 -0.0672 -0.3324 +vn 0.9235 -0.0511 -0.3802 +vn 0.9213 -0.1315 -0.3660 +vn 0.9417 -0.1530 -0.2998 +vn 0.9873 -0.0104 -0.1584 +vn 0.7984 -0.0547 -0.5996 +vn -0.8715 -0.2748 -0.4061 +vn -0.9098 -0.3320 -0.2492 +vn -0.5859 -0.5512 -0.5940 +vn -0.1904 -0.5857 -0.7879 +vn 0.3658 -0.7927 -0.4877 +vn 0.4719 -0.7859 -0.3997 +vn 0.3947 -0.7590 -0.5179 +vn -0.2755 -0.8735 -0.4014 +vn -0.5430 -0.7075 -0.4523 +vn -0.6462 -0.3572 -0.6744 +vn -0.4675 -0.5273 -0.7095 +vn -0.2857 -0.6211 -0.7298 +vn 0.4208 -0.6335 -0.6493 +vn 0.0101 -0.7794 -0.6265 +vn 0.1996 -0.6397 -0.7423 +vn 0.3916 -0.5641 -0.7269 +vn -0.0445 -0.5885 -0.8072 +vn -0.3431 -0.7559 -0.5575 +vn -0.3204 -0.9361 -0.1451 +vn 0.0407 -0.7212 -0.6915 +vn 0.2321 -0.6894 -0.6862 +vn 0.3457 -0.7731 -0.5318 +vn 0.3020 -0.9235 -0.2367 +vn 0.1770 -0.8885 -0.4233 +vn -0.0226 -0.9453 -0.3255 +vn -0.3770 -0.9150 0.1437 +vn -0.1061 -0.9929 -0.0539 +vn 0.0610 -0.9855 -0.1582 +vn 0.3120 -0.9494 0.0369 +vn -0.0007 -0.9979 -0.0650 +vn -0.1940 -0.9807 0.0224 +vn -0.4035 -0.8855 0.2306 +vn -0.7573 -0.5332 0.3770 +vn -0.5869 0.6669 0.4592 +vn -0.4214 0.7648 0.4873 +vn -0.2394 0.8094 0.5363 +vn -0.0918 0.8069 0.5835 +vn 0.1236 0.8065 0.5782 +vn 0.8449 0.2962 0.4454 +vn 0.8818 -0.3816 0.2770 +vn 0.2994 -0.9538 0.0234 +vn -0.0560 -0.9965 -0.0621 +vn -0.1870 -0.9823 0.0060 +vn -0.3745 -0.9132 0.1609 +vn 0.2620 -0.9643 0.0392 +vn 0.8916 -0.3431 0.2954 +vn 0.9146 0.2814 0.2904 +vn 0.0491 0.8033 0.5936 +vn -0.0390 -0.1583 0.9866 +vn -0.0571 -0.9982 0.0196 +vn -0.0941 0.8378 0.5378 +vn -0.1682 -0.1893 0.9674 +vn -0.3668 -0.0041 0.9303 +vn -0.2275 0.7845 0.5768 +vn -0.3532 0.7368 0.5766 +vn -0.3316 -0.9434 -0.0074 +vn -0.5221 -0.0574 0.8509 +vn -0.5625 -0.8146 0.1414 +vn -0.6949 -0.6463 0.3154 +vn -0.6997 0.2828 0.6561 +vn -0.6135 0.6018 0.5114 +vn -0.7329 -0.3146 0.6032 +vn -0.9886 -0.1182 -0.0935 +vn -0.9189 -0.0679 0.3886 +vn -0.8987 -0.1074 0.4252 +vn -0.9876 -0.1551 -0.0231 +vn -0.9006 -0.1496 -0.4080 +vn -0.8919 -0.1823 -0.4138 +vn -0.9088 -0.1059 -0.4036 +vn -0.9906 -0.0543 -0.1257 +vn -0.6473 -0.1453 -0.7483 +vn -0.5775 -0.1543 -0.8017 +vn -0.9204 -0.0727 -0.3841 +vn -0.9849 -0.0908 -0.1476 +vn -0.7119 -0.1082 -0.6939 +vn -0.8003 -0.0249 -0.5991 +vn -0.0346 -0.1103 -0.9933 +vn -0.0177 -0.1202 -0.9926 +vn 0.5986 -0.1051 -0.7941 +vn 0.5528 -0.1336 -0.8225 +vn -0.0777 -0.0786 -0.9939 +vn -0.2044 0.0055 -0.9789 +vn 0.9198 -0.1044 -0.3783 +vn 0.9307 -0.1271 -0.3430 +vn 0.9932 -0.0715 0.0922 +vn 0.9825 -0.0827 0.1666 +vn 0.9987 -0.0260 0.0435 +vn 0.9979 -0.0564 -0.0323 +vn 0.8691 -0.1002 -0.4844 +vn 0.9055 -0.1003 -0.4124 +vn 0.8588 -0.0221 0.5118 +vn 0.8470 -0.0518 0.5291 +vn 0.8623 -0.0062 0.5064 +vn 0.4156 -0.0064 0.9095 +vn 0.4437 -0.0457 0.8950 +vn 0.8867 -0.0013 0.4623 +vn 0.4424 0.0016 0.8968 +vn 0.4859 0.1236 0.8652 +vn -0.4550 -0.0419 0.8895 +vn -0.4240 -0.0756 0.9025 +vn -0.4689 0.0272 0.8828 +vn -0.9168 0.0108 0.3992 +vn -0.9214 0.0214 0.3880 +vn -0.4603 0.1173 0.8800 +vn -0.2832 0.3299 0.9005 +vn 0.4693 0.2863 0.8353 +vn 0.8928 0.1868 0.4099 +vn 0.4208 0.3910 0.8185 +vn 0.7564 0.3163 0.5726 +vn 0.6901 0.4199 0.5895 +vn 0.8654 0.3221 0.3839 +vn 0.9264 0.2487 0.2827 +vn 0.6077 0.6562 0.4474 +vn 0.7949 0.4735 0.3795 +vn 0.4353 0.5218 0.7336 +vn 0.3867 0.7253 0.5696 +vn -0.0914 0.4719 0.8769 +vn 0.0535 0.6182 0.7842 +vn 0.0611 0.7842 0.6174 +vn -0.6939 0.5102 0.5081 +vn -0.6509 0.5985 0.4670 +vn -0.8303 0.3239 0.4536 +vn -0.9362 0.1532 0.3162 +vn -0.9772 0.2089 0.0367 +vn -0.9742 0.1856 0.1282 +vn -0.9925 0.1146 0.0424 +vn -0.9980 0.0635 0.0058 +vn -0.9983 -0.0061 -0.0578 +vn -0.9599 -0.0215 -0.2796 +vn -0.9973 0.0651 -0.0343 +vn -0.9852 0.1238 -0.1188 +vn -0.8671 0.1326 -0.4801 +vn -0.3471 0.1995 -0.9164 +vn -0.8543 0.1821 -0.4868 +vn -0.3198 0.2244 -0.9205 +vn 0.6377 -0.1105 -0.7623 +vn 0.5910 -0.0887 -0.8018 +vn 0.4744 0.1216 -0.8719 +vn 0.3781 0.2409 -0.8939 +vn 0.4306 0.0884 -0.8982 +vn -0.1586 0.0433 -0.9864 +vn 0.8849 0.0222 -0.4652 +vn 0.9958 0.0133 -0.0906 +vn 0.8862 0.1766 -0.4283 +vn 0.9114 0.1508 -0.3828 +vn 0.9774 0.1862 0.1000 +vn 0.9886 0.0982 -0.1138 +vn 0.9602 0.1480 -0.2369 +vn 0.9747 0.1268 -0.1838 +vn 0.9886 0.0306 -0.1476 +vn 0.9980 -0.0434 -0.0469 +vn 0.9818 -0.1377 0.1311 +vn 0.6489 0.6604 0.3779 +vn 0.9016 -0.2411 0.3591 +vn 0.4553 0.8126 0.3640 +vn 0.2641 0.8664 0.4237 +vn 0.0916 0.8836 0.4592 +vn -0.4150 0.7466 0.5199 +vn -0.9131 0.1963 0.3574 +vn -0.9765 -0.2138 -0.0277 +vn -0.9379 -0.2739 0.2131 +vn -0.9418 -0.0687 -0.3291 +vn -0.9419 -0.0834 -0.3253 +vn -0.9490 -0.1709 -0.2651 +vn -0.9763 -0.1840 -0.1139 +vn -0.9868 -0.0714 -0.1455 +vn -0.8003 -0.0266 -0.5991 +vn 0.8198 -0.3719 -0.4354 +vn 0.8766 -0.2582 -0.4062 +vn 0.6065 -0.3577 -0.7101 +vn 0.1567 -0.4386 -0.8849 +vn -0.3781 -0.6345 -0.6741 +vn -0.4187 -0.8520 -0.3143 +vn -0.3880 -0.8246 -0.4116 +vn 0.2091 -0.8809 -0.4246 +vn 0.5541 -0.7132 -0.4294 +vn 0.6291 -0.3868 -0.6743 +vn 0.4242 -0.4702 -0.7739 +vn 0.3183 -0.6681 -0.6726 +vn -0.4291 -0.6599 -0.6168 +vn -0.0026 -0.8379 -0.5458 +vn -0.1995 -0.6416 -0.7406 +vn -0.4172 -0.6027 -0.6802 +vn 0.0440 -0.5855 -0.8095 +vn 0.3193 -0.6965 -0.6426 +vn 0.3289 -0.9141 -0.2371 +vn -0.0407 -0.7208 -0.6919 +vn -0.2338 -0.6922 -0.6827 +vn -0.3631 -0.7845 -0.5027 +vn -0.3020 -0.9233 -0.2374 +vn -0.1788 -0.8882 -0.4232 +vn 0.0229 -0.9462 -0.3227 +vn 0.3775 -0.9151 0.1420 +vn 0.1191 -0.9909 -0.0624 +vn -0.0617 -0.9855 -0.1578 +vn -0.3137 -0.9488 0.0356 +vn -0.0079 -0.9978 -0.0656 +vn 0.1850 -0.9825 0.0228 +vn 0.3853 -0.8944 0.2271 +vn 0.7348 -0.4848 0.4744 +vn 0.5601 0.5193 0.6454 +vn 0.4175 0.7672 0.4869 +vn 0.2401 0.8093 0.5361 +vn 0.1099 0.8036 0.5849 +vn -0.1250 0.7788 0.6146 +vn -0.8448 0.2957 0.4460 +vn -0.8848 -0.3843 0.2635 +vn -0.2708 -0.9606 0.0621 +vn 0.0577 -0.9964 -0.0619 +vn 0.2507 -0.9680 -0.0110 +vn 0.3403 -0.9127 0.2262 +vn -0.2987 -0.9538 0.0340 +vn -0.8986 -0.3166 0.3039 +vn -0.8413 0.4177 0.3432 +vn -0.0493 0.8034 0.5934 +vn 0.0525 -0.2039 0.9776 +vn 0.0725 -0.9972 0.0162 +vn 0.0653 0.8371 0.5431 +vn 0.1726 -0.1894 0.9666 +vn 0.3360 -0.0059 0.9418 +vn 0.2474 0.7857 0.5670 +vn 0.4600 0.7522 0.4718 +vn 0.3172 -0.9484 -0.0046 +vn 0.5188 -0.0569 0.8530 +vn 0.3715 -0.8921 0.2572 +vn 0.6634 -0.6570 0.3581 +vn 0.5684 0.2744 0.7756 +vn 0.6105 0.6015 0.5153 +vn 0.7357 -0.3159 0.5991 +vn -0.3021 -0.0092 -0.9532 +vn -0.3022 -0.0092 -0.9532 +vn -0.3672 -0.0873 -0.9260 +vn -0.2541 0.0142 0.9671 +vn -0.2540 0.0140 0.9671 +vn -0.2312 0.0146 0.9728 +vn -0.1458 0.0137 0.9892 +vn -0.1685 0.0131 0.9856 +vn -0.1685 0.0130 0.9856 +vn -0.0759 0.0175 0.9970 +vn -0.0758 0.0173 0.9970 +vn -0.0865 0.0155 0.9961 +vn -0.0865 0.0156 0.9961 +vn -0.0871 0.0153 -0.9961 +vn -0.0648 -0.0160 -0.9978 +vn -0.0567 -0.0158 -0.9983 +vn -0.0821 0.0159 -0.9965 +vn -0.0823 -0.0171 -0.9965 +vn -0.0821 -0.0170 -0.9965 +vn -0.1701 0.0314 0.9849 +vn -0.2060 0.0447 0.9775 +vn -0.2060 0.0448 0.9775 +vn -0.1696 0.0312 0.9850 +vn -0.0598 0.8904 -0.4512 +vn -0.0599 0.8904 -0.4512 +vn 0.1964 0.3620 -0.9113 +vn -0.1020 0.1115 0.9885 +vn -0.0774 0.0356 0.9964 +vn -0.0721 0.0358 0.9968 +vn -0.0000 0.9987 -0.0509 +vn -0.0000 0.9987 -0.0510 +vn 0.1540 0.9868 -0.0505 +vn -0.4418 0.0643 0.8948 +vn -0.9575 0.1491 0.2470 +vn -0.4596 0.0344 0.8875 +vn -0.1926 0.0135 0.9812 +vn -0.2131 0.0144 0.9769 +vn -0.2130 0.0144 0.9770 +vn -0.1925 0.0135 0.9812 +vn 0.2131 -0.0143 -0.9769 +vn 0.1926 -0.0135 -0.9812 +vn 0.1927 -0.0135 -0.9812 +vn 0.2131 -0.0144 -0.9769 +vn 0.5137 -0.8569 0.0438 +vn 0.5137 -0.8568 0.0438 +vn 0.3131 -0.9485 0.0485 +vn -0.3261 0.9441 -0.0483 +vn -0.1160 0.9920 -0.0507 +vn -0.1161 0.9919 -0.0507 +vn -0.1004 -0.0145 -0.9948 +vn -0.1005 -0.0145 -0.9948 +vn -0.1207 -0.0141 -0.9926 +vn -0.1209 -0.0142 -0.9926 +vn -0.2543 0.0770 0.9640 +vn -0.2708 0.1037 0.9570 +vn -0.2542 0.0769 0.9641 +vn -0.1087 0.0172 0.9939 +vn -0.1406 0.0218 0.9898 +vn -0.1089 0.0178 0.9939 +vn 0.0568 -0.0160 -0.9983 +vn 0.0821 0.0160 -0.9965 +vn 0.0560 -0.0168 -0.9983 +vn 0.0566 -0.0166 -0.9983 +vn -0.2765 0.0098 0.9610 +vn -0.2766 0.0098 0.9609 +vn -0.1005 0.0145 0.9948 +vn 0.1703 -0.0314 -0.9849 +vn 0.1403 -0.0217 -0.9899 +vn 0.1405 -0.0217 -0.9898 +vn 0.1700 -0.0314 -0.9849 +vn 0.2311 -0.0146 -0.9728 +vn 0.2312 -0.0146 -0.9728 +vn 0.2060 0.0448 0.9775 +vn 0.1703 0.0314 0.9849 +vn 0.1696 0.0312 0.9850 +vn -0.0672 0.0182 0.9976 +vn -0.0672 0.0180 0.9976 +vn -0.1208 0.0141 0.9926 +vn -0.1210 0.0141 0.9926 +vn 0.2871 -0.0084 -0.9579 +vn 0.2875 -0.0082 -0.9577 +vn 0.2765 -0.0099 -0.9610 +vn 0.2768 -0.0097 -0.9609 +vn 0.1927 0.0135 0.9812 +vn 0.1686 0.0132 0.9856 +vn 0.1686 0.0130 0.9856 +vn 0.1928 0.0135 0.9811 +vn -0.0563 0.0169 0.9983 +vn -0.0559 0.0166 0.9983 +vn -0.0606 0.0172 0.9980 +vn -0.0602 0.0172 0.9980 +vn -0.2925 0.0196 0.9561 +vn -0.2911 0.0113 0.9566 +vn -0.2907 0.0112 0.9567 +vn 0.2541 -0.0142 -0.9671 +vn 0.2540 -0.0142 -0.9671 +vn 0.6355 0.7711 -0.0394 +vn 0.4141 0.9091 -0.0465 +vn 0.4142 0.9090 -0.0465 +vn 0.0760 0.0174 0.9970 +vn 0.0759 0.0176 0.9970 +vn 0.0863 0.0155 0.9962 +vn 0.0867 0.0154 0.9961 +vn 0.0647 -0.0160 -0.9978 +vn 0.0870 0.0153 -0.9961 +vn 0.0821 -0.0170 -0.9965 +vn 0.0822 -0.0170 -0.9965 +vn 0.2060 -0.0448 -0.9775 +vn 0.2932 -0.0209 -0.9558 +vn 0.2807 -0.0658 -0.9575 +vn 0.2808 -0.0660 -0.9575 +vn 0.2932 -0.0211 -0.9558 +vn -0.2033 0.9775 -0.0568 +vn -0.1633 0.9864 -0.0208 +vn -0.2183 0.9738 -0.0643 +vn -0.0000 0.1879 0.9822 +vn -0.0466 -0.9976 0.0510 +vn 0.1067 -0.9930 0.0508 +vn 0.1068 -0.9930 0.0508 +vn 0.3442 -0.9377 0.0471 +vn 0.4036 -0.9135 0.0518 +vn 0.9107 -0.3574 0.2072 +vn 0.9520 -0.2926 0.0895 +vn 0.9106 -0.3575 0.2072 +vn -0.2873 0.0081 0.9578 +vn -0.2878 0.0082 0.9577 +vn 0.1004 -0.0145 -0.9948 +vn 0.1208 -0.0141 -0.9926 +vn 0.2541 0.0142 0.9671 +vn 0.2540 0.0142 0.9671 +vn 0.2311 0.0146 0.9728 +vn 0.1090 -0.0173 -0.9939 +vn 0.1092 -0.0171 -0.9939 +vn 0.1464 0.0135 0.9891 +vn 0.1459 0.0138 0.9892 +vn -0.1540 0.9868 -0.0505 +vn 0.2561 0.9654 -0.0493 +vn 0.2563 0.9653 -0.0494 +vn 0.1686 -0.0130 -0.9856 +vn 0.1687 -0.0130 -0.9856 +vn 0.0863 -0.0155 -0.9962 +vn 0.0865 -0.0155 -0.9961 +vn 0.0759 -0.0174 -0.9970 +vn 0.0757 -0.0173 -0.9970 +vn -0.8935 0.0032 0.4491 +vn -0.8935 0.0032 0.4490 +vn -0.9097 0.0189 0.4149 +vn 0.2905 -0.0113 -0.9568 +vn 0.2905 -0.0110 -0.9568 +vn 0.0670 -0.0179 -0.9976 +vn 0.0672 -0.0180 -0.9976 +vn 0.1208 0.0141 0.9926 +vn 0.1209 0.0141 0.9926 +vn -0.0820 0.0168 0.9965 +vn -0.0820 0.0170 0.9965 +vn 0.0602 -0.0172 -0.9980 +vn 0.0606 -0.0171 -0.9980 +vn -0.9763 -0.2162 0.0111 +vn -0.9684 -0.2490 0.0127 +vn -0.9684 -0.2491 0.0127 +vn 0.3022 -0.0092 -0.9532 +vn 0.8892 -0.4568 0.0233 +vn 0.8893 -0.4568 0.0233 +vn 0.7763 -0.6295 0.0322 +vn 0.7764 -0.6294 0.0322 +vn -0.7764 -0.6295 0.0322 +vn -0.6130 -0.7891 0.0403 +vn -0.6130 -0.7890 0.0403 +vn -0.0670 -0.0180 -0.9976 +vn -0.0672 -0.0181 -0.9976 +vn -0.0758 -0.0175 -0.9970 +vn 0.0454 0.9977 -0.0510 +vn 0.1351 0.9895 -0.0506 +vn 0.1349 0.9896 -0.0506 +vn -0.2788 -0.9591 0.0490 +vn -0.4914 -0.8698 0.0445 +vn -0.4913 -0.8698 0.0445 +vn 0.8419 -0.5389 0.0276 +vn 0.8418 -0.5390 0.0276 +vn 0.7084 -0.7049 0.0361 +vn -0.9913 0.1317 -0.0067 +vn -0.9913 0.1314 -0.0067 +vn -0.9756 0.2192 -0.0112 +vn -0.9756 0.2194 -0.0112 +vn -0.9782 -0.2075 0.0106 +vn -0.9782 -0.2076 0.0106 +vn -0.9281 -0.3719 0.0190 +vn 0.4831 0.8744 -0.0447 +vn 0.2708 0.1037 0.9570 +vn 0.2544 0.0770 0.9640 +vn 0.2543 0.0770 0.9641 +vn 0.1459 -0.0136 -0.9892 +vn 0.1459 -0.0137 -0.9892 +vn 0.4215 -0.9056 0.0463 +vn 0.4215 -0.9057 0.0463 +vn 0.8481 -0.5291 0.0271 +vn 0.9994 -0.0340 0.0017 +vn 0.2984 -0.9532 0.0487 +vn 0.4315 -0.9009 0.0460 +vn 0.4316 -0.9009 0.0461 +vn -0.5605 0.8271 -0.0423 +vn -0.0001 -0.9987 0.0511 +vn -0.9935 -0.0325 0.1089 +vn -0.9821 0.1251 0.1411 +vn 0.4971 -0.8666 0.0443 +vn 0.0151 -0.9958 0.0904 +vn 0.3672 -0.0873 -0.9261 +vn -0.0530 0.9973 -0.0510 +vn -0.0531 0.9973 -0.0510 +vn 0.0826 -0.9953 0.0509 +vn -0.0386 -0.9980 0.0510 +vn -0.9441 -0.3292 0.0168 +vn -0.8893 -0.4568 0.0233 +vn 0.9756 0.2191 -0.0112 +vn 0.9756 0.2194 -0.0112 +vn 0.9216 0.3877 -0.0198 +vn -0.9884 0.1516 -0.0077 +vn -0.9884 0.1519 -0.0078 +vn -0.9379 0.3463 -0.0177 +vn -0.9380 0.3463 -0.0177 +vn 0.9781 -0.2078 0.0106 +vn 0.9783 -0.2070 0.0106 +vn 0.9281 -0.3719 0.0190 +vn -0.1351 0.9895 -0.0506 +vn -0.2597 0.9644 -0.0493 +vn -0.2598 0.9644 -0.0493 +vn -0.2904 -0.0110 -0.9568 +vn -0.2906 -0.0112 -0.9568 +vn -0.2878 -0.0083 -0.9577 +vn -0.2869 -0.0079 -0.9579 +vn -0.1391 0.9890 -0.0506 +vn 0.2598 0.9644 -0.0493 +vn 0.2597 0.9644 -0.0493 +vn 0.2766 0.0097 0.9609 +vn 0.2765 0.0099 0.9610 +vn -0.9216 0.3877 -0.0198 +vn 1.0000 0.0026 -0.0001 +vn 0.9973 -0.0734 0.0037 +vn 0.9973 -0.0733 0.0037 +vn 0.8191 0.5728 -0.0293 +vn 0.8192 0.5728 -0.0293 +vn 0.6130 -0.7891 0.0403 +vn 0.4069 -0.9123 0.0466 +vn -0.2189 -0.9745 0.0498 +vn -0.7397 0.6720 -0.0344 +vn 0.0337 0.9981 -0.0511 +vn 0.0336 0.9981 -0.0511 +vn -0.1502 -0.9874 0.0505 +vn -0.1504 -0.9873 0.0505 +vn -0.4068 -0.9123 0.0466 +vn 0.2148 0.9754 -0.0499 +vn 0.2147 0.9754 -0.0499 +vn 0.9913 0.1314 -0.0067 +vn -0.8556 0.5170 -0.0264 +vn -0.8556 0.5169 -0.0264 +vn 0.0566 0.0166 0.9983 +vn 0.0561 0.0167 0.9983 +vn 0.0603 0.0172 0.9980 +vn 0.0605 0.0172 0.9980 +vn 0.0386 -0.9980 0.0510 +vn 0.1504 -0.9873 0.0505 +vn 0.1503 -0.9874 0.0505 +vn -0.4214 -0.9057 0.0463 +vn -0.4215 -0.9056 0.0463 +vn -0.8481 -0.5291 0.0271 +vn 0.1403 0.0217 0.9899 +vn 0.1090 0.0172 0.9939 +vn 0.1093 0.0171 0.9939 +vn 0.1406 0.0218 0.9898 +vn -0.7881 -0.6148 0.0314 +vn -0.7879 -0.6150 0.0314 +vn -1.0000 -0.0070 0.0003 +vn 0.1633 0.9864 -0.0208 +vn 0.2182 0.9738 -0.0643 +vn 0.1465 0.9892 -0.0100 +vn 0.1886 0.9799 -0.0651 +vn 0.0598 0.8904 -0.4512 +vn 0.0597 0.8904 -0.4512 +vn -0.2807 -0.0660 -0.9575 +vn -0.2807 -0.0659 -0.9575 +vn -0.2933 -0.0212 -0.9558 +vn -0.2932 -0.0211 -0.9558 +vn -0.9107 -0.3574 0.2072 +vn -0.9106 -0.3575 0.2072 +vn -0.8993 -0.1355 0.4158 +vn 0.1878 -0.9809 0.0502 +vn 0.1880 -0.9809 0.0501 +vn 0.9959 0.0005 -0.0899 +vn 0.9959 0.0004 -0.0900 +vn 0.9928 -0.0375 -0.1136 +vn -0.1885 0.9799 -0.0651 +vn -0.1884 0.9799 -0.0651 +vn -0.9520 -0.2926 0.0895 +vn -0.9520 -0.2927 0.0895 +vn -0.1594 0.9872 -0.0048 +vn -0.1593 0.9872 -0.0048 +vn -0.2725 -0.1041 -0.9565 +vn -0.2726 -0.1041 -0.9565 +vn -0.4832 0.8744 -0.0447 +vn -0.4831 0.8744 -0.0447 +vn -0.2558 0.9655 -0.0493 +vn -0.2563 0.9653 -0.0494 +vn 0.9942 0.0176 -0.1059 +vn 0.9894 0.0136 -0.1445 +vn 0.1851 -0.9807 0.0633 +vn -0.9894 0.0136 -0.1445 +vn -0.9942 0.0176 -0.1059 +vn -0.9894 0.0137 -0.1444 +vn 0.9379 0.3464 -0.0177 +vn 0.9380 0.3463 -0.0177 +vn 0.8556 0.5169 -0.0264 +vn 0.8556 0.5170 -0.0264 +vn -0.8193 0.5726 -0.0293 +vn -0.8191 0.5728 -0.0293 +vn 0.1594 0.9872 -0.0048 +vn 0.1593 0.9872 -0.0048 +vn 0.1451 0.9894 -0.0045 +vn 0.8993 -0.1354 0.4158 +vn -0.2786 0.0644 0.9582 +vn -0.1451 0.9894 -0.0046 +vn -0.1450 0.9894 -0.0046 +vn -0.1464 0.9892 -0.0099 +vn 0.2725 -0.1041 -0.9565 +vn 0.2726 -0.1041 -0.9565 +vn 0.4596 0.0344 0.8875 +vn 0.2926 0.0196 0.9560 +vn 0.2909 0.0112 0.9567 +vn 0.2903 0.0116 0.9569 +vn -0.2541 -0.0142 -0.9671 +vn -0.2540 -0.0140 -0.9671 +vn -0.2312 -0.0146 -0.9728 +vn 0.2544 -0.0770 -0.9640 +vn 0.2542 -0.0769 -0.9641 +vn 0.0649 0.0160 0.9978 +vn 0.0773 0.0356 0.9964 +vn 0.0720 0.0358 0.9968 +vn 0.0568 0.0160 0.9983 +vn 0.0821 0.0170 0.9965 +vn -0.1701 -0.0314 -0.9849 +vn -0.2060 -0.0448 -0.9775 +vn -0.1697 -0.0315 -0.9850 +vn 0.4418 0.0643 0.8948 +vn 0.2786 0.0644 0.9582 +vn 0.9922 -0.0549 -0.1118 +vn 0.1020 0.1115 0.9885 +vn 0.1021 0.1115 0.9885 +vn 0.9991 0.0167 -0.0383 +vn 0.8935 0.0032 0.4491 +vn 0.9097 0.0189 0.4149 +vn 0.2132 0.0143 0.9769 +vn 0.2131 0.0144 0.9769 +vn 0.2870 0.0083 0.9579 +vn 0.2878 0.0078 0.9577 +vn 0.0670 0.0180 0.9976 +vn 0.0672 0.0180 0.9976 +vn 0.1005 0.0145 0.9948 +vn 0.1004 0.0145 0.9948 +vn -0.1092 -0.0171 -0.9939 +vn -0.1403 -0.0217 -0.9899 +vn -0.1405 -0.0218 -0.9898 +vn -0.1092 -0.0172 -0.9939 +vn -0.1686 -0.0132 -0.9856 +vn -0.1926 -0.0135 -0.9812 +vn -0.1925 -0.0135 -0.9812 +vn -0.1686 -0.0130 -0.9856 +vn -0.0863 -0.0160 -0.9961 +vn -0.0865 -0.0155 -0.9961 +vn -0.0567 0.0158 0.9983 +vn -0.0563 -0.0169 -0.9983 +vn -0.0560 -0.0167 -0.9983 +vn -0.0605 -0.0171 -0.9980 +vn -0.0602 -0.0172 -0.9980 +vn -0.1461 -0.0137 -0.9892 +vn -0.1457 -0.0137 -0.9892 +vn -0.2984 -0.9532 0.0487 +vn -0.4315 -0.9009 0.0460 +vn -0.4318 -0.9008 0.0461 +vn 0.9575 0.1491 0.2470 +vn -0.1126 0.1007 -0.9885 +vn -0.1126 0.1006 -0.9885 +vn -0.0000 0.1511 -0.9885 +vn -0.0000 0.1510 -0.9885 +vn -0.9847 -0.0198 -0.1730 +vn -0.9894 0.0137 -0.1445 +vn 0.2307 -0.9723 0.0380 +vn -0.2131 -0.0143 -0.9769 +vn -0.2133 -0.0142 -0.9769 +vn -0.2768 -0.0097 -0.9609 +vn -0.2765 -0.0098 -0.9610 +vn -0.2543 -0.0770 -0.9641 +vn -0.2543 -0.0770 -0.9640 +vn -0.9994 -0.0340 0.0017 +vn 0.9762 -0.2167 0.0112 +vn 0.9762 -0.2166 0.0111 +vn 0.9684 -0.2493 0.0127 +vn 0.9683 -0.2493 0.0127 +vn -0.6355 0.7711 -0.0394 +vn 0.9847 -0.0198 -0.1730 +vn -0.0336 0.9981 -0.0511 +vn 0.1161 0.9919 -0.0507 +vn -0.1067 -0.9930 0.0508 +vn 0.0467 -0.9976 0.0510 +vn 0.0466 -0.9976 0.0510 +vn 0.7397 0.6721 -0.0344 +vn 0.7397 0.6720 -0.0344 +vn 0.7880 -0.6148 0.0314 +vn 0.7884 -0.6144 0.0315 +vn 0.9683 -0.2496 0.0128 +vn 0.4913 -0.8698 0.0445 +vn 0.4915 -0.8698 0.0445 +vn -0.9683 -0.2496 0.0127 +vn -0.9683 -0.2495 0.0128 +vn 0.0530 0.9973 -0.0510 +vn -0.0454 0.9977 -0.0510 +vn -0.0826 -0.9953 0.0509 +vn 0.1126 0.1006 -0.9885 +vn 0.1126 0.1007 -0.9885 +vn -0.7084 -0.7049 0.0361 +vn -0.5138 -0.8568 0.0438 +vn -0.5137 -0.8569 0.0438 +vn 0.5605 0.8271 -0.0423 +vn 0.2033 0.9775 -0.0568 +vn -0.4971 -0.8666 0.0443 +vn -0.3442 -0.9377 0.0471 +vn -0.3441 -0.9378 0.0471 +vn -0.4038 -0.9134 0.0518 +vn -0.0151 -0.9958 0.0904 +vn 0.2189 -0.9745 0.0498 +vn 0.2188 -0.9745 0.0498 +vn -0.4141 0.9090 -0.0465 +vn -0.2147 0.9754 -0.0499 +vn -0.2148 0.9754 -0.0499 +vn 1.0000 -0.0070 0.0003 +vn 0.9884 0.1515 -0.0077 +vn 0.9884 0.1519 -0.0078 +vn -0.8419 -0.5389 0.0276 +vn -0.8418 -0.5391 0.0276 +vn 0.9441 -0.3292 0.0168 +vn 0.9441 -0.3291 0.0168 +vn -0.3132 -0.9485 0.0485 +vn -0.3130 -0.9485 0.0485 +vn 0.3261 0.9441 -0.0483 +vn -0.1879 -0.9809 0.0501 +vn -0.1878 -0.9809 0.0502 +vn 0.2788 -0.9591 0.0490 +vn -1.0000 0.0026 -0.0001 +vn -0.9973 -0.0734 0.0037 +vn -0.0651 0.0160 0.9977 +vn 0.9815 -0.0368 -0.1881 +vn 0.9935 -0.0325 0.1089 +vn 0.9520 -0.2927 0.0895 +vn -0.9991 0.0167 -0.0383 +vn -0.9960 0.0005 -0.0899 +vn -0.9959 0.0003 -0.0901 +vn -0.9928 -0.0375 -0.1136 +vn -0.9922 -0.0549 -0.1118 +vn -0.1964 0.3620 -0.9113 +vn -0.1965 0.3620 -0.9112 +vn -0.1851 -0.9807 0.0633 +vn 0.9821 0.1251 0.1411 +vn -0.9815 -0.0368 -0.1881 +vn 0.1391 0.9890 -0.0506 +vn 0.1390 0.9890 -0.0506 +vn -0.2303 -0.9724 0.0380 +vn 0.9853 0.1668 -0.0368 +vn 0.9877 0.0569 0.1459 +vn 0.8765 0.0524 -0.4785 +vn 0.9465 -0.1638 -0.2779 +vn 0.9398 -0.2599 0.2217 +vn 0.8926 0.0003 -0.4508 +vn 0.8912 -0.1697 -0.4206 +vn 0.8250 -0.5255 0.2079 +vn 0.7363 -0.5787 0.3505 +vn 0.9046 -0.3733 -0.2057 +vn 0.7285 -0.2788 0.6258 +vn 0.7431 0.2912 0.6025 +vn 0.9919 0.0463 0.1183 +vn 0.9567 -0.2119 0.1998 +vn 0.8901 -0.1081 -0.4428 +vn 0.8623 0.4342 0.2606 +vn 0.7272 -0.4501 0.5182 +vn 0.6160 -0.6973 -0.3666 +vn -0.1959 -0.1101 -0.9744 +vn -0.0902 -0.7204 -0.6877 +vn 0.4223 -0.5800 -0.6966 +vn 0.4559 -0.0292 -0.8895 +vn -0.2554 0.3209 -0.9120 +vn 0.4442 0.2248 -0.8673 +vn -0.2258 0.5246 -0.8209 +vn 0.4614 0.2585 -0.8487 +vn 0.5863 0.0782 -0.8063 +vn -0.2158 0.4786 -0.8511 +vn 0.7320 -0.0880 -0.6756 +vn 0.7162 -0.2066 -0.6666 +vn -0.2027 -0.3151 -0.9272 +vn -0.1996 0.1106 -0.9736 +vn 0.2263 -0.5607 -0.7965 +vn -0.2830 -0.5786 -0.7649 +vn -0.1204 -0.8461 -0.5192 +vn -0.4280 -0.7683 -0.4759 +vn -0.4554 -0.1615 0.8755 +vn -0.4243 -0.3661 0.8282 +vn -0.5482 -0.6188 0.5627 +vn -0.9164 -0.2268 0.3298 +vn -0.9714 0.0106 0.2373 +vn -0.6134 -0.2461 0.7505 +vn -0.5413 -0.2805 0.7927 +vn -0.9152 0.1500 0.3741 +vn -0.9062 0.1224 0.4048 +vn -0.4090 -0.3069 0.8594 +vn -0.9745 0.0012 0.2245 +vn -0.9706 -0.1903 0.1476 +vn -0.4408 -0.0314 0.8971 +vn -0.3885 -0.2213 0.8945 +vn -0.9105 -0.3826 0.1567 +vn -0.6354 -0.0364 0.7713 +vn -0.7358 -0.6678 0.1124 +vn -0.5389 -0.3718 0.7559 +vn -0.5916 -0.7871 -0.1746 +vn -0.7336 -0.5756 -0.3612 +vn -0.8388 -0.2134 -0.5009 +vn -0.8932 0.2369 -0.3821 +vn -0.8682 0.4433 -0.2230 +vn -0.8572 0.4641 -0.2233 +vn -0.9131 0.1757 -0.3680 +vn -0.8887 -0.2412 -0.3900 +vn -0.4972 -0.8452 -0.1959 +vn -0.3711 -0.9020 0.2206 +vn 0.5472 0.0227 0.8367 +vn 0.9838 0.0791 0.1606 +vn 0.3298 -0.4249 0.8430 +vn 0.3096 -0.6486 0.6954 +vn 0.3602 -0.6000 0.7143 +vn 0.4165 -0.2597 0.8713 +vn 0.3536 0.2623 0.8979 +vn 0.3324 0.5100 0.7933 +vn 0.3411 0.1698 0.9246 +vn -0.6210 -0.0983 0.7776 +vn -0.9964 -0.0111 0.0840 +vn -0.9939 0.1048 -0.0355 +vn -0.9974 -0.0438 0.0579 +vn -0.9760 -0.1415 0.1658 +vn -0.4698 -0.4893 0.7348 +vn -0.4666 -0.7079 0.5302 +vn -0.9536 -0.2988 0.0359 +vn -0.5183 -0.6772 0.5222 +vn -0.8750 -0.4792 0.0691 +vn -0.8691 -0.4490 0.2076 +vn -0.6158 -0.4756 0.6282 +vn -0.9330 -0.2076 0.2941 +vn -0.5927 -0.1179 0.7967 +vn -0.4301 0.2416 0.8699 +vn -0.9316 0.1394 0.3357 +vn -0.4423 0.4173 0.7939 +vn -0.8910 0.3659 0.2686 +vn -0.7272 -0.4326 0.5330 +vn -0.4949 0.2094 0.8434 +vn 0.4060 -0.0875 0.9097 +vn 0.2470 -0.1581 0.9560 +vn 0.8937 -0.1980 0.4027 +vn 0.3652 -0.5521 0.7496 +vn 0.9338 -0.2381 0.2670 +vn 0.4016 -0.4842 0.7773 +vn 0.3335 -0.6432 0.6892 +vn 0.9323 -0.1381 0.3342 +vn 0.8712 -0.0214 0.4904 +vn 0.1727 -0.5531 0.8150 +vn 0.0184 -0.3702 0.9288 +vn 0.8220 0.0087 0.5695 +vn 0.8526 0.0481 0.5204 +vn 0.0785 -0.0409 0.9961 +vn 0.9030 -0.0588 0.4255 +vn 0.2538 0.1518 0.9553 +vn 0.2505 0.1962 0.9480 +vn 0.8586 -0.2592 0.4423 +vn 0.6597 -0.5560 0.5056 +vn 0.1002 -0.3290 0.9390 +vn 0.2339 -0.6484 -0.7245 +vn 0.5263 -0.1031 -0.8440 +vn -0.2303 -0.0303 -0.9727 +vn -0.2473 -0.5004 -0.8297 +vn 0.6399 0.1562 -0.7524 +vn -0.1301 0.3205 -0.9383 +vn 0.5875 0.4655 -0.6620 +vn -0.0952 0.5710 -0.8154 +vn 0.5334 0.5164 -0.6700 +vn -0.0315 0.4490 -0.8930 +vn 0.6299 0.3180 -0.7086 +vn 0.1298 0.1449 -0.9809 +vn 0.6863 -0.0333 -0.7266 +vn 0.1641 -0.1464 -0.9755 +vn 0.5894 -0.3815 -0.7121 +vn -0.0328 -0.3937 -0.9187 +vn 0.4758 -0.5705 -0.6694 +vn -0.2076 -0.4248 -0.8812 +vn 0.2379 -0.8977 -0.3708 +vn -0.1546 -0.9139 -0.3753 +vn 0.4906 -0.8650 0.1055 +vn 0.8248 -0.5188 -0.2247 +vn 0.9156 -0.2793 -0.2892 +vn 0.9694 0.0295 -0.2436 +vn 0.9514 0.2500 -0.1798 +vn -0.0673 -0.7102 0.7007 +vn 0.9209 0.3598 -0.1498 +vn 0.9590 0.2204 -0.1784 +vn 0.9751 -0.0155 -0.2212 +vn 0.9453 -0.1813 -0.2713 +vn 0.6107 -0.7808 -0.1318 +vn -0.8576 -0.1974 -0.4749 +vn -0.8356 0.0142 -0.5492 +vn -0.7845 0.2380 -0.5727 +vn -0.7504 0.3227 -0.5769 +vn -0.7315 0.0895 -0.6760 +vn -0.6716 -0.2251 -0.7059 +vn -0.6739 -0.2795 -0.6839 +vn -0.8190 -0.1862 -0.5427 +vn -0.8436 -0.0227 -0.5365 +vn -0.7302 -0.5923 -0.3404 +vn 0.3616 -0.5384 -0.7612 +vn -0.7791 -0.6090 0.1489 +vn 0.5643 -0.7224 -0.3997 +vn -0.4217 -0.8428 0.3345 +vn 0.9039 0.2192 0.3673 +vn 0.8666 0.3643 0.3411 +vn 0.8184 -0.0873 0.5679 +vn 0.0483 0.0137 0.9987 +vn -0.1872 0.1904 0.9637 +vn -0.0124 -0.0539 0.9985 +vn 0.9823 -0.0321 0.1846 +vn 0.9813 0.0784 0.1758 +vn 0.9469 -0.2891 0.1409 +vn 0.6831 -0.7277 0.0615 +vn 0.1794 -0.9837 0.0088 +vn -0.1568 -0.9808 0.1156 +vn -0.2284 -0.8517 0.4716 +vn -0.2399 -0.6403 0.7297 +vn 0.1437 -0.5382 0.8305 +vn 0.8898 0.0005 0.4564 +vn 0.6787 -0.7214 0.1376 +vn -0.2012 -0.5117 -0.8353 +vn -0.8272 -0.5574 -0.0706 +vn 0.0222 -0.8610 0.5081 +vn -0.8117 0.5127 0.2799 +vn -0.8168 0.3344 0.4702 +vn -0.8336 0.5230 0.1776 +vn -0.1940 0.1718 0.9658 +vn 0.1925 0.3109 0.9307 +vn 0.0492 0.1895 0.9806 +vn 0.1526 -0.2792 0.9480 +vn -0.7378 0.0349 0.6741 +vn -0.7862 -0.6151 -0.0587 +vn -0.9875 -0.0528 0.1484 +vn -0.9652 -0.1099 0.2374 +vn -0.5848 -0.6086 0.5363 +vn -0.9348 -0.1234 0.3329 +vn 0.1279 -0.6889 0.7135 +vn 0.5351 -0.8170 -0.2151 +vn 0.1754 -0.9315 0.3186 +vn -0.9714 0.0105 0.2371 +vn 0.0245 -0.6902 0.7232 +vn 0.0469 -0.8890 0.4554 +vn 0.2754 -0.8852 0.3749 +vn 0.2475 -0.8976 0.3648 +vn -0.1095 -0.7516 0.6504 +vn -0.7428 -0.0382 -0.6684 +vn 0.8279 0.1073 -0.5506 +vn -0.7524 0.2382 -0.6141 +vn 0.8382 0.3766 -0.3944 +vn 0.7733 0.5121 -0.3738 +vn -0.7614 0.4550 -0.4618 +vn -0.4263 -0.5573 0.7125 +vn -0.4773 -0.7191 0.5051 +vn -0.7408 -0.2045 0.6399 +vn -0.8355 0.0649 0.5456 +vn 0.7656 -0.4954 0.4104 +vn 0.7676 -0.3416 0.5424 +vn 0.4550 -0.5732 0.6815 +vn 0.3579 -0.7825 0.5095 +vn 0.4450 -0.7982 0.4060 +vn -0.2754 -0.5287 0.8029 +vn -0.8005 0.1404 0.5826 +vn 0.4817 -0.7451 0.4613 +vn 0.3893 -0.7736 0.4999 +vn -0.2087 -0.5500 0.8086 +vn -0.0156 -0.4928 0.8700 +vn -0.1269 -0.5796 0.8049 +vn 0.2915 -0.8776 0.3805 +vn 0.2179 -0.9071 0.3601 +vn 0.2945 -0.8612 0.4142 +vn 0.1518 -0.9049 0.3977 +vn 0.1196 -0.4360 0.8920 +vn 0.2146 -0.8691 0.4456 +vn 0.1342 -0.8571 0.4973 +vn -0.1107 -0.8223 0.5582 +vn -0.2015 -0.8169 0.5404 +vn -0.0890 -0.4392 0.8940 +vn -0.7274 0.2159 0.6514 +vn -0.3668 0.4257 0.8272 +vn -0.5719 0.3305 0.7508 +vn -0.2257 0.3076 0.9244 +vn -0.1798 0.0829 0.9802 +vn 0.8630 -0.3004 0.4061 +vn 0.1597 -0.8168 0.5543 +vn 0.8412 -0.2959 0.4526 +vn 0.1588 -0.8900 0.4275 +vn 0.7609 -0.2074 0.6148 +vn 0.1038 -0.8823 0.4590 +vn -0.2187 -0.7173 0.6615 +vn -0.0362 -0.7917 0.6098 +vn 0.6046 -0.1541 0.7815 +vn 0.3915 -0.1145 0.9130 +vn -0.2815 -0.7835 0.5539 +vn 0.2234 -0.1649 0.9607 +vn 0.0939 -0.2633 0.9601 +vn -0.2473 -0.8288 0.5019 +vn -0.0119 -0.4006 0.9162 +vn -0.1268 -0.8336 0.5376 +vn -0.0747 -0.4426 0.8936 +vn -0.1136 -0.7784 0.6174 +vn 0.1239 -0.7911 0.5990 +vn 0.0573 -0.4555 0.8884 +vn -0.1107 -0.8282 0.5494 +vn 0.1997 -0.8235 0.5311 +vn -0.0284 -0.8641 0.5025 +vn -0.2593 -0.8801 0.3978 +vn -0.4005 -0.8582 0.3210 +vn -0.5089 -0.8111 0.2884 +vn -0.4280 -0.8699 0.2451 +vn -0.2912 -0.9382 0.1869 +vn -0.2400 -0.9231 0.3006 +vn -0.1770 -0.8301 0.5288 +vn -0.5546 -0.8179 0.1534 +vn -0.1573 -0.8484 0.5054 +vn 0.0013 -0.8784 0.4780 +vn -0.4300 -0.9027 -0.0132 +vn -0.8011 -0.5982 -0.0207 +vn -0.7284 -0.6201 -0.2914 +vn -0.8884 -0.1722 -0.4256 +vn -0.9571 -0.1852 -0.2229 +vn -0.8349 -0.1586 -0.5271 +vn -0.9439 -0.0144 -0.3299 +vn -0.2930 0.1160 -0.9490 +vn -0.7869 0.4524 -0.4197 +vn -0.9126 0.0927 -0.3982 +vn -0.6874 -0.2193 -0.6924 +vn -0.0846 0.4734 -0.8768 +vn -0.4592 0.8283 -0.3210 +vn 0.2531 0.7949 -0.5514 +vn -0.1186 0.9856 -0.1202 +vn 0.0896 0.9827 0.1618 +vn 0.4489 0.8933 -0.0231 +vn 0.3448 0.7927 0.5028 +vn 0.6267 0.7169 0.3055 +vn 0.8008 0.3633 0.4761 +vn 0.5205 0.4990 0.6929 +vn 0.6603 -0.1770 0.7298 +vn 0.4724 -0.0240 0.8810 +vn 0.4778 -0.3903 0.7870 +vn 0.3430 -0.3095 0.8869 +vn 0.2605 -0.5987 0.7574 +vn 0.1872 -0.5132 0.8376 +vn 0.1859 -0.7629 0.6192 +vn 0.0426 -0.7268 0.6855 +vn 0.1337 0.0240 0.9907 +vn -0.0968 -0.0036 0.9953 +vn 0.3094 0.1298 0.9420 +vn 0.4273 0.2800 0.8597 +vn 0.5148 0.3474 0.7838 +vn 0.6129 0.3520 0.7074 +vn 0.7470 0.3395 0.5716 +vn 0.8621 0.3251 0.3887 +vn 0.9201 0.2368 0.3119 +vn 0.9750 0.0783 0.2082 +vn -0.3866 -0.9139 0.1239 +vn -0.4789 -0.0830 -0.8739 +vn -0.1884 0.4058 -0.8943 +vn 0.1942 0.2857 -0.9384 +vn -0.1807 -0.1704 -0.9687 +vn -0.0921 -0.5243 -0.8465 +vn -0.3507 -0.4700 -0.8100 +vn -0.4607 -0.1477 -0.8752 +vn -0.2372 -0.2150 -0.9474 +vn 0.1042 -0.8326 -0.5440 +vn -0.1290 -0.8800 -0.4571 +vn 0.2005 -0.9059 -0.3730 +vn 0.0267 -0.9693 -0.2444 +vn 0.2302 -0.9726 -0.0313 +vn 0.3938 -0.9017 -0.1785 +vn 0.5394 -0.8397 -0.0630 +vn 0.3992 -0.8992 0.1793 +vn 0.6204 -0.6752 0.3990 +vn 0.7276 -0.6785 0.1014 +vn 0.8826 -0.4057 0.2376 +vn 0.7517 -0.4201 0.5084 +vn 0.9880 -0.0617 0.1415 +vn 0.9009 -0.0326 0.4328 +vn 0.9596 0.2768 0.0507 +vn 0.8795 0.3883 0.2751 +vn 0.8698 0.4537 -0.1939 +vn 0.8114 0.5827 0.0462 +vn 0.6365 0.5406 -0.5501 +vn 0.5373 0.7518 -0.3822 +vn 0.4837 0.5092 -0.7119 +vn 0.3573 0.7124 -0.6040 +vn 0.4608 0.5540 -0.6933 +vn 0.2359 0.7023 -0.6717 +vn 0.5880 0.4404 -0.6785 +vn 0.4816 0.1480 -0.8638 +vn 0.2107 -0.2034 -0.9562 +vn 0.1299 -0.2349 -0.9633 +vn 0.2196 -0.5332 -0.8170 +vn 0.3452 -0.7340 -0.5849 +vn 0.4216 -0.7473 -0.5136 +vn 0.5948 -0.7247 -0.3479 +vn 0.6421 -0.7127 -0.2825 +vn 0.7440 -0.6410 -0.1887 +vn 0.9405 -0.3303 -0.0801 +vn 0.9867 -0.1124 -0.1176 +vn 0.9729 0.0862 -0.2146 +vn 0.8627 0.2647 -0.4309 +vn 0.6804 0.2956 -0.6706 +vn 0.5741 0.3442 -0.7429 +vn 0.7350 -0.0267 -0.6775 +vn 0.5503 -0.2974 -0.7802 +vn 0.7060 -0.3237 -0.6299 +vn 0.6922 -0.0151 -0.7216 +vn 0.8414 -0.1715 -0.5125 +vn 0.6026 -0.3152 -0.7331 +vn 0.8816 -0.1850 -0.4343 +vn 0.7037 -0.2077 -0.6794 +vn 0.8387 -0.1712 -0.5170 +vn 0.5969 -0.0939 -0.7968 +vn 0.6727 -0.4705 -0.5710 +vn 0.5134 -0.1850 -0.8380 +vn 0.6714 -0.4698 -0.5731 +vn 0.5751 -0.1396 -0.8061 +vn 0.7527 -0.3850 -0.5341 +vn 0.7511 -0.0674 -0.6567 +vn 0.6802 -0.4372 -0.5884 +vn 0.8543 -0.1202 -0.5057 +vn 0.6511 -0.5197 -0.5531 +vn 0.8871 -0.2251 -0.4029 +vn 0.6317 -0.4106 -0.6575 +vn 0.8662 -0.2257 -0.4458 +vn 0.5770 -0.1889 -0.7946 +vn 0.8065 -0.1597 -0.5693 +vn 0.8651 -0.2588 -0.4296 +vn 0.7349 0.0913 -0.6721 +vn 0.8906 -0.1938 -0.4114 +vn 0.7586 0.2300 -0.6096 +vn 0.8132 -0.2423 -0.5291 +vn 0.6909 -0.0220 -0.7226 +vn 0.7949 -0.2753 -0.5407 +vn 0.5662 -0.2135 -0.7961 +vn 0.6132 -0.1918 -0.7663 +vn 0.8368 -0.2394 -0.4924 +vn 0.9198 -0.2636 -0.2906 +vn 0.4936 -0.1493 -0.8568 +vn 0.8949 -0.2845 -0.3439 +vn -0.7316 -0.3665 -0.5749 +vn -0.8130 -0.3833 -0.4383 +vn -0.8176 -0.3932 -0.4206 +vn -0.7816 -0.4241 -0.4574 +vn -0.7551 -0.3720 -0.5398 +vn -0.8123 -0.4055 -0.4191 +vn -0.5651 -0.3310 -0.7557 +vn -0.7186 -0.3240 -0.6154 +vn -0.4702 -0.2828 -0.8360 +vn -0.6044 -0.1884 -0.7741 +vn -0.8082 -0.2008 -0.5536 +vn -0.8261 -0.2886 -0.4840 +vn -0.6716 -0.1141 -0.7321 +vn -0.5616 -0.1156 -0.8193 +vn -0.5814 -0.2140 -0.7849 +vn -0.7157 -0.2626 -0.6472 +vn -0.7489 -0.3553 -0.5593 +vn -0.7538 -0.3699 -0.5431 +vn -0.6427 -0.0912 -0.7607 +vn -0.6178 -0.2604 -0.7419 +vn -0.6317 -0.3836 -0.6737 +vn -0.7921 -0.3861 -0.4727 +vn -0.8123 -0.2457 -0.5289 +vn -0.8100 -0.2941 -0.5074 +vn -0.8027 -0.0123 -0.5962 +vn -0.7474 0.0038 -0.6644 +vn -0.8061 0.0098 -0.5917 +vn -0.8967 0.0040 -0.4427 +vn -0.8024 -0.1384 -0.5805 +vn -0.5862 -0.2210 -0.7794 +vn -0.5079 -0.1432 -0.8495 +vn -0.5294 -0.3168 -0.7870 +vn -0.5421 -0.4666 -0.6989 +vn -0.7032 -0.3710 -0.6066 +vn -0.6754 -0.3295 -0.6597 +vn -0.5671 -0.2418 -0.7874 +vn -0.6820 -0.2542 -0.6857 +vn -0.6311 -0.7726 -0.0699 +vn -0.7155 -0.5894 -0.3751 +vn -0.4655 -0.5934 -0.6566 +vn -0.3312 -0.8300 -0.4488 +vn -0.8446 -0.4925 -0.2099 +vn -0.7277 -0.6830 0.0624 +vn -0.8789 -0.4258 -0.2149 +vn -0.8667 -0.4988 0.0028 +vn -0.9760 -0.2168 0.0202 +vn -0.9289 -0.2777 -0.2449 +vn -0.9899 0.0865 -0.1125 +vn -0.9292 0.0395 -0.3674 +vn -0.8053 0.1996 -0.5582 +vn -0.8331 0.4023 -0.3797 +vn -0.7121 0.4581 -0.5320 +vn -0.7430 0.2685 -0.6130 +vn -0.7789 0.4620 -0.4240 +vn -0.8436 0.3072 -0.4403 +vn -0.7698 0.1597 -0.6180 +vn -0.6051 0.4122 -0.6812 +vn -0.0729 0.1416 -0.9872 +vn -0.3647 -0.0472 -0.9299 +vn 0.0654 0.1145 -0.9913 +vn -0.2139 -0.0061 -0.9768 +vn 0.0119 -0.2204 -0.9753 +vn -0.2450 -0.2800 -0.9282 +vn -0.0299 -0.6131 -0.7895 +vn -0.2632 -0.5602 -0.7854 +vn -0.4357 -0.6238 -0.6488 +vn -0.1633 -0.7874 -0.5944 +vn -0.1981 -0.8376 -0.5092 +vn -0.4569 -0.6309 -0.6271 +vn 0.0118 -0.9254 -0.3789 +vn 0.0782 -0.8527 -0.5165 +vn 0.2832 -0.8662 -0.4117 +vn 0.1612 -0.9528 -0.2573 +vn 0.0162 -0.9998 -0.0127 +vn -0.1558 -0.9582 -0.2398 +vn -0.3258 -0.8353 0.4429 +vn -0.4961 -0.8446 0.2012 +vn -0.3688 -0.7814 0.5035 +vn -0.5592 -0.7705 0.3060 +vn -0.5933 -0.5506 0.5873 +vn -0.7790 -0.5474 0.3058 +vn -0.9330 -0.1297 0.3356 +vn -0.7884 -0.0554 0.6127 +vn -0.9640 0.1833 0.1927 +vn -0.8307 0.2806 0.4809 +vn -0.6841 0.7187 0.1244 +vn -0.8072 0.5745 -0.1354 +vn -0.5538 0.8247 -0.1148 +vn -0.6703 0.6576 -0.3438 +vn -0.3984 0.8807 -0.2561 +vn -0.6383 0.6677 -0.3830 +vn 0.0921 0.8369 -0.5396 +vn -0.2650 0.6829 -0.6807 +vn 0.4650 0.5173 -0.7185 +vn 0.2472 0.3570 -0.9008 +vn 0.2950 0.2445 -0.9237 +vn 0.4920 0.3711 -0.7876 +vn 0.5500 -0.0557 -0.8333 +vn 0.2607 -0.1358 -0.9558 +vn 0.4863 -0.6133 -0.6224 +vn 0.2166 -0.6338 -0.7425 +vn -0.5522 0.8337 -0.0046 +vn -0.2392 0.9421 0.2349 +vn -0.3442 0.8023 0.4877 +vn -0.6129 0.7331 0.2949 +vn -0.4744 0.4059 0.7811 +vn -0.7584 0.3268 0.5639 +vn -0.4931 0.0459 0.8688 +vn -0.6058 -0.0742 0.7921 +vn -0.2796 -0.5278 0.8021 +vn -0.2791 -0.5861 0.7607 +vn -0.1145 -0.7591 0.6409 +vn -0.2836 -0.7727 0.5679 +vn -0.0981 -0.7270 0.6796 +vn -0.3210 -0.8071 0.4956 +vn 0.1649 -0.9292 0.3308 +vn -0.1278 -0.9652 0.2282 +vn 0.3423 -0.9395 0.0107 +vn 0.1142 -0.9844 -0.1341 +vn 0.6057 -0.7777 -0.1681 +vn 0.5136 -0.7664 -0.3857 +vn 0.8625 -0.4065 -0.3014 +vn 0.8466 -0.3431 -0.4069 +vn 0.9100 0.1167 -0.3978 +vn 0.8740 0.0849 -0.4785 +vn 0.7585 0.4566 -0.4651 +vn 0.6722 0.3025 -0.6758 +vn 0.5918 0.6537 -0.4716 +vn 0.3149 0.4885 -0.8138 +vn 0.3275 0.8855 -0.3297 +vn -0.0969 0.6838 -0.7232 +vn 0.0126 0.9993 -0.0343 +vn -0.3121 0.8758 -0.3682 +vn -0.0146 -0.9637 0.2666 +vn 0.1643 -0.8199 0.5485 +vn 0.3259 -0.5699 0.7543 +vn 0.4489 -0.3411 0.8259 +vn 0.5662 -0.0102 0.8242 +vn 0.6213 0.4886 0.6125 +vn 0.5133 0.7568 0.4047 +vn 0.2017 0.9791 0.0249 +vn -0.0336 0.9751 -0.2190 +vn -0.3047 0.8771 -0.3714 +vn -0.6881 0.4819 -0.5424 +vn -0.8222 0.0710 -0.5647 +vn -0.8414 -0.0282 -0.5397 +vn -0.8692 -0.3005 -0.3927 +vn -0.6510 -0.7448 -0.1465 +vn -0.3478 -0.9373 -0.0220 +vn -0.3750 0.9213 0.1025 +vn -0.4964 0.7948 0.3492 +vn -0.6412 0.3597 0.6779 +vn -0.6360 -0.0026 0.7717 +vn -0.4417 -0.5426 0.7145 +vn -0.2369 -0.7735 0.5879 +vn -0.1930 -0.7816 0.5931 +vn 0.1480 -0.9694 0.1958 +vn 0.2843 -0.9536 -0.0989 +vn 0.4588 -0.8374 -0.2969 +vn 0.7094 -0.5283 -0.4665 +vn 0.7975 0.0437 -0.6017 +vn 0.6559 0.4463 -0.6088 +vn 0.5753 0.6091 -0.5459 +vn 0.3118 0.8809 -0.3562 +vn -0.1177 0.9888 -0.0923 +vn -0.4854 0.4624 -0.7420 +vn -0.0592 0.8320 -0.5517 +vn 0.1621 0.8940 -0.4176 +vn 0.3820 0.9077 -0.1737 +vn 0.6846 0.6837 0.2525 +vn 0.7545 0.4559 0.4721 +vn 0.7322 -0.0079 0.6810 +vn 0.5756 -0.3869 0.7204 +vn 0.4473 -0.6254 0.6394 +vn 0.2671 -0.8789 0.3952 +vn 0.1078 -0.9889 0.1018 +vn -0.1449 -0.9796 -0.1392 +vn -0.4033 -0.8606 -0.3111 +vn -0.6490 -0.4078 -0.6423 +vn -0.6698 -0.0760 -0.7386 +vn -0.6743 0.0028 -0.7385 +vn 0.1177 -0.2937 -0.9486 +vn -0.1096 -0.2400 -0.9646 +vn -0.1656 -0.2051 -0.9646 +vn -0.1061 -0.4166 -0.9029 +vn -0.1359 -0.1626 -0.9773 +vn -0.2285 -0.4563 -0.8600 +vn -0.1183 -0.0260 -0.9926 +vn -0.0837 0.0760 -0.9936 +vn 0.0320 0.1008 -0.9944 +vn -0.0250 0.1685 -0.9854 +vn 0.1290 0.2018 -0.9709 +vn -0.1170 0.1641 -0.9795 +vn 0.1964 0.2898 -0.9367 +vn 0.0156 0.2660 -0.9639 +vn 0.0225 0.3559 -0.9343 +vn 0.1828 0.3066 -0.9341 +vn -0.1757 0.2289 -0.9575 +vn -0.1545 0.3464 -0.9253 +vn -0.1120 0.4435 -0.8893 +vn -0.0091 0.4356 -0.9001 +vn 0.1623 0.3779 -0.9115 +vn 0.1783 0.5587 -0.8100 +vn -0.0044 0.5526 -0.8334 +vn 0.0208 0.7646 -0.6441 +vn 0.2106 0.7426 -0.6357 +vn -0.1434 0.7680 -0.6242 +vn -0.1218 0.5573 -0.8214 +vn 0.0441 0.9191 -0.3916 +vn 0.2437 0.9016 -0.3574 +vn -0.1402 0.9088 -0.3930 +vn -0.1212 0.9781 -0.1692 +vn 0.0808 0.9851 -0.1520 +vn 0.2592 0.9559 -0.1379 +vn 0.0162 -0.4514 -0.8922 +vn 0.4245 -0.4130 -0.8057 +vn -0.3211 -0.3904 -0.8628 +vn 0.1111 -0.5193 -0.8473 +vn -0.4286 -0.4410 -0.7885 +vn 0.3871 -0.5020 -0.7734 +vn -0.3391 -0.5902 -0.7326 +vn 0.0706 -0.6186 -0.7825 +vn 0.2137 -0.7221 -0.6579 +vn -0.2399 -0.9661 -0.0956 +vn 0.0715 -0.9947 0.0737 +vn 0.1365 -0.9835 0.1188 +vn -0.0294 -0.7902 0.6122 +vn 0.0627 -0.7605 0.6463 +vn 0.0878 -0.9316 0.3528 +vn -0.2149 -0.9515 -0.2202 +vn -0.4761 -0.6306 -0.6130 +vn -0.5665 -0.3157 -0.7612 +vn -0.4436 -0.3407 -0.8289 +vn 0.5559 -0.3759 -0.7414 +vn 0.3889 -0.5979 -0.7009 +vn 0.0820 -0.8403 -0.5359 +vn -0.1162 -0.9884 0.0977 +vn 0.0966 -0.7245 0.6825 +vn -0.2503 -0.8477 0.4678 +vn -0.1090 -0.4332 -0.8947 +vn 0.4604 -0.0910 -0.8830 +vn 0.1770 -0.1657 -0.9702 +vn 0.1771 -0.0528 -0.9828 +vn 0.2969 0.0300 -0.9544 +vn -0.0519 0.0617 -0.9967 +vn 0.3787 0.2284 -0.8969 +vn -0.2819 0.2507 -0.9261 +vn -0.3511 0.2534 -0.9014 +vn -0.3082 0.3976 -0.8642 +vn -0.3149 0.5577 -0.7680 +vn -0.3231 0.7513 -0.5754 +vn -0.3257 0.8813 -0.3424 +vn 0.3851 0.2298 -0.8938 +vn 0.4122 0.3258 -0.8508 +vn 0.6534 0.2306 -0.7210 +vn 0.6229 0.1151 -0.7738 +vn 0.6679 0.3270 -0.6685 +vn 0.4383 0.4749 -0.7631 +vn 0.4468 0.6702 -0.5926 +vn 0.7149 0.5235 -0.4635 +vn 0.4497 0.8387 -0.3072 +vn 0.6901 0.6827 -0.2404 +vn 0.3816 0.9050 -0.1880 +vn 0.6179 0.7387 -0.2694 +vn -0.2669 0.9416 -0.2052 +vn -0.8833 0.2342 -0.4061 +vn -0.6748 0.2686 -0.6874 +vn -0.9376 0.2759 -0.2117 +vn -0.9778 0.0796 0.1939 +vn -0.6779 -0.2972 0.6724 +vn -0.4044 -0.5118 0.7579 +vn -0.3443 -0.7171 0.6060 +vn -0.5775 0.3219 -0.7502 +vn -0.5193 0.1629 -0.8389 +vn -0.7277 0.0729 -0.6820 +vn -0.7972 0.2299 -0.5582 +vn -0.8781 -0.0282 -0.4776 +vn -0.9201 0.1218 -0.3723 +vn -0.9281 0.0188 -0.3720 +vn -0.9437 0.0338 -0.3292 +vn -0.9453 -0.1463 0.2915 +vn -0.9211 -0.3841 -0.0639 +vn -0.6047 0.4600 -0.6502 +vn -0.8382 0.3165 -0.4441 +vn -0.8324 0.4617 -0.3065 +vn -0.6142 0.6495 -0.4482 +vn -0.9306 0.2182 -0.2939 +vn -0.9238 0.3011 -0.2365 +vn -0.9581 0.2380 -0.1595 +vn -0.9530 0.1652 -0.2541 +vn -0.9802 -0.1742 -0.0946 +vn -0.9955 -0.0075 0.0942 +vn -0.7608 0.5976 -0.2530 +vn -0.5551 0.7840 -0.2779 +vn -0.9061 0.3805 -0.1851 +vn -0.9662 0.2525 -0.0530 +vn -0.9923 0.0118 0.1231 +vn -0.7112 0.6666 -0.2232 +vn -0.4537 0.8518 -0.2619 +vn -0.9034 0.4281 -0.0234 +vn -0.9406 0.3026 0.1539 +vn -0.9584 0.1308 0.2538 +vn 0.8110 0.3055 -0.4990 +vn 0.8558 0.2943 -0.4255 +vn 0.8006 0.0154 -0.5989 +vn 0.8250 0.0869 -0.5584 +vn 0.9086 0.0701 -0.4117 +vn 0.8597 0.0886 -0.5031 +vn 0.8713 0.3525 -0.3414 +vn 0.8434 0.1688 -0.5101 +vn 0.9437 0.1842 -0.2746 +vn 0.9191 0.0504 -0.3908 +vn 0.8415 0.4998 -0.2051 +vn 0.9362 0.3168 -0.1522 +vn 0.8390 0.4961 -0.2237 +vn 0.9397 0.3310 -0.0862 +vn 0.9803 0.1720 0.0974 +vn 0.9922 0.1119 -0.0551 +vn 0.9845 0.0046 -0.1753 +vn 0.9598 -0.0911 -0.2654 +vn 0.9920 0.0441 -0.1181 +vn 0.9828 0.0008 -0.1848 +vn 0.9805 -0.0217 -0.1953 +vn 0.8343 -0.4006 0.3788 +vn 0.5704 -0.5136 0.6410 +vn 0.4590 -0.5784 0.6743 +vn 0.2636 -0.7593 0.5950 +vn 0.7711 -0.3765 0.5134 +vn 0.7907 -0.2855 0.5416 +vn 0.9373 -0.3471 0.0324 +vn 0.8227 -0.5571 0.1133 +vn 0.9860 -0.1320 0.1015 +vn 0.9569 -0.0551 0.2852 +vn 0.5179 -0.4323 0.7382 +vn 0.4116 -0.5599 0.7191 +vn 0.1901 -0.6792 0.7089 +vn -0.1796 -0.6057 0.7751 +vn -0.3743 -0.4879 0.7886 +vn -0.5584 -0.3523 0.7510 +vn 0.5985 0.2407 -0.7641 +vn 0.4113 0.2628 -0.8728 +vn -0.3798 0.2331 -0.8952 +vn -0.4691 0.2666 -0.8419 +vn 0.9628 0.0013 0.2701 +vn 0.9347 0.2536 0.2491 +vn 0.9500 0.0932 0.2981 +vn 0.9034 0.3372 0.2650 +vn 0.8963 0.3995 0.1927 +vn 0.8296 0.4774 0.2897 +vn 0.8920 0.1911 0.4097 +vn 0.8869 0.3406 0.3121 +vn 0.8175 0.5487 0.1750 +vn 0.8401 0.5368 0.0773 +vn 0.7660 0.5363 0.3545 +vn 0.7810 0.5653 0.2655 +vn 0.5597 0.8204 -0.1170 +vn 0.5347 0.8142 0.2261 +vn 0.5578 0.7848 0.2701 +vn 0.2733 0.9582 0.0847 +vn 0.2604 0.9372 0.2321 +vn 0.2954 0.9377 -0.1829 +vn 0.1879 0.9768 -0.1030 +vn 0.0676 0.9955 0.0670 +vn 0.0433 0.9764 0.2115 +vn 0.0595 0.9980 0.0193 +vn -0.0092 0.9941 0.1079 +vn -0.0137 0.9886 0.1497 +vn -0.0514 0.9984 0.0237 +vn 0.0263 0.9929 0.1158 +vn 0.0320 0.9882 0.1499 +vn -0.1219 0.9901 -0.0703 +vn -0.0388 0.9905 0.1316 +vn -0.0598 0.9759 0.2099 +vn -0.3978 0.9141 -0.0791 +vn -0.3457 0.9101 0.2286 +vn -0.3157 0.9036 0.2896 +vn -0.6868 0.7216 0.0873 +vn -0.6547 0.6746 0.3410 +vn -0.6641 0.6494 0.3706 +vn -0.8245 0.4922 0.2792 +vn -0.7728 0.5100 0.3778 +vn -0.7483 0.5158 0.4172 +vn -0.8655 0.3633 0.3448 +vn -0.8323 0.3787 0.4048 +vn -0.7872 0.4239 0.4478 +vn -0.8947 0.2276 0.3844 +vn -0.8795 0.2782 0.3862 +vn -0.8473 0.3305 0.4158 +vn 0.8497 0.2742 0.4504 +vn 0.8116 0.3145 0.4924 +vn 0.7507 0.3896 0.5335 +vn 0.6726 0.3863 0.6312 +vn 0.7238 0.2810 0.6302 +vn 0.7561 0.2989 0.5822 +vn 0.7201 0.6090 0.3325 +vn 0.6790 0.6517 0.3380 +vn 0.6073 0.6013 0.5192 +vn 0.6466 0.6077 0.4611 +vn 0.5688 0.7343 0.3704 +vn 0.5880 0.7302 0.3478 +vn 0.5717 0.7498 0.3332 +vn 0.2843 0.9469 0.1504 +vn 0.6182 0.6615 0.4246 +vn 0.6619 0.6506 0.3724 +vn 0.3494 0.8922 0.2862 +vn 0.3592 0.8405 0.4056 +vn -0.7550 0.4104 0.5115 +vn -0.4717 0.5181 0.7135 +vn -0.6090 0.5067 0.6102 +vn -0.7147 0.4881 0.5010 +vn -0.5347 0.4021 0.7432 +vn -0.6162 0.4993 0.6091 +vn -0.6771 0.5370 0.5032 +vn -0.6280 0.5188 0.5800 +vn -0.6364 0.5802 0.5083 +vn -0.6033 0.6278 0.4918 +vn -0.5774 0.6410 0.5057 +vn -0.5875 0.6077 0.5343 +vn -0.3284 0.8896 0.3175 +vn -0.3065 0.8427 0.4427 +vn -0.3434 0.8420 0.4161 +vn -0.0917 0.9777 0.1888 +vn 0.0298 0.9212 0.3879 +vn -0.0408 0.9432 0.3298 +vn 0.0293 0.9851 0.1692 +vn 0.0342 0.9325 0.3595 +vn 0.0606 0.9456 0.3197 +vn -0.0162 0.9879 0.1542 +vn -0.1265 0.9190 0.3733 +vn -0.0888 0.9569 0.2766 +vn 0.0504 0.9923 0.1132 +vn 0.0133 0.9747 0.2229 +vn 0.0252 0.9140 0.4049 +vn -0.4283 0.3707 0.8241 +vn -0.5244 0.2774 0.8050 +vn -0.5925 0.3754 0.7128 +vn -0.4542 0.6253 0.6346 +vn -0.2231 0.8638 0.4518 +vn -0.0064 0.9043 0.4269 +vn 0.0299 0.9063 0.4215 +vn -0.0473 0.8704 0.4901 +vn 0.1262 0.8272 0.5475 +vn 0.4299 0.7119 0.5553 +vn 0.6231 0.5645 0.5414 +vn 0.5769 0.5792 0.5759 +vn 0.4989 0.6125 0.6132 +vn 0.5705 0.3951 0.7200 +vn 0.5579 0.3224 0.7648 +vn -0.2618 0.2812 0.9232 +vn 0.1442 0.3775 0.9147 +vn -0.0514 0.3524 0.9344 +vn -0.2608 0.5653 0.7826 +vn 0.0303 0.6994 0.7141 +vn 0.0646 0.7052 0.7061 +vn 0.5078 0.2758 0.8161 +vn 0.5218 0.4488 0.7255 +vn 0.2965 0.5951 0.7469 +vn 0.3906 0.2989 0.8707 +vn 0.5018 -0.4838 0.7170 +vn 0.1119 -0.9696 0.2174 +vn 0.1028 -0.7783 0.6194 +vn -0.5647 0.0865 -0.8207 +vn 0.6670 0.2911 -0.6858 +vn 0.6691 -0.6985 -0.2538 +vn -0.3607 -0.9068 -0.2180 +vn 0.9654 0.0384 0.2579 +vn 0.9934 -0.0161 0.1135 +vn 0.9763 -0.0210 0.2155 +vn 0.8773 0.1093 0.4673 +vn 0.8935 0.0808 0.4416 +vn 0.9122 0.0865 0.4004 +vn 0.8634 0.1786 0.4718 +vn 0.8160 0.3581 0.4538 +vn 0.8025 0.4315 0.4121 +vn 0.6373 0.6030 0.4798 +vn 0.7391 0.4527 0.4988 +vn 0.6533 0.4754 0.5892 +vn 0.5528 0.5590 0.6180 +vn 0.7409 0.3908 0.5462 +vn 0.7521 0.3374 0.5661 +vn 0.4413 0.4486 0.7772 +vn 0.4392 0.5049 0.7431 +vn 0.7822 0.3257 0.5311 +vn 0.6700 0.2860 0.6850 +vn 0.5953 0.3925 0.7011 +vn 0.7492 0.3956 0.5312 +vn 0.3246 0.2125 0.9217 +vn 0.2571 0.1864 0.9482 +vn 0.2712 0.2506 0.9293 +vn 0.3006 0.2888 0.9090 +vn 0.3613 0.1959 0.9116 +vn 0.3010 0.1402 0.9433 +vn 0.3810 0.0636 0.9224 +vn 0.4782 0.0967 0.8729 +vn 0.6084 -0.0398 0.7926 +vn 0.4871 -0.0476 0.8720 +vn 0.5853 -0.1349 0.7995 +vn 0.6983 -0.1324 0.7035 +vn 0.6891 -0.2139 0.6924 +vn 0.7135 -0.2453 0.6563 +vn 0.7922 -0.2322 0.5643 +vn 0.7742 -0.2086 0.5976 +vn 0.7176 -0.2993 0.6288 +vn 0.7113 -0.3512 0.6089 +vn 0.7986 -0.3326 0.5016 +vn 0.8002 -0.2712 0.5349 +vn 0.2192 -0.6690 0.7102 +vn 0.1388 -0.6470 0.7498 +vn 0.0595 -0.6362 0.7692 +vn 0.1617 -0.6364 0.7542 +vn 0.0635 -0.5522 0.8313 +vn 0.1800 -0.4846 0.8560 +vn 0.2597 -0.4018 0.8781 +vn 0.1664 -0.5248 0.8348 +vn 0.4677 -0.4897 0.7358 +vn 0.5565 -0.3342 0.7607 +vn 0.4086 -0.3294 0.8512 +vn 0.3309 -0.4647 0.8213 +vn 0.5275 -0.5122 0.6777 +vn 0.6307 -0.3640 0.6853 +vn 0.6716 -0.3979 0.6250 +vn 0.5642 -0.5212 0.6403 +vn 0.5841 -0.5147 0.6276 +vn 0.6863 -0.4165 0.5962 +vn 0.5972 0.2417 0.7648 +vn 0.5046 0.1854 0.8432 +vn 0.5216 0.2377 0.8194 +vn 0.6090 0.2825 0.7411 +vn 0.3948 0.1026 0.9130 +vn 0.2668 0.0245 0.9634 +vn 0.3005 0.1882 0.9350 +vn 0.4062 0.2101 0.8893 +vn 0.1307 -0.0055 0.9914 +vn 0.2128 0.1791 0.9605 +vn 0.0647 0.0310 0.9974 +vn 0.0739 0.1312 0.9886 +vn 0.2675 0.2823 0.9213 +vn 0.1985 0.2008 0.9593 +vn 0.5284 0.1555 0.8346 +vn 0.7763 0.2610 0.5738 +vn 0.6511 0.4141 0.6361 +vn 0.3809 0.2920 0.8773 +vn 0.3800 -0.4630 0.8008 +vn 0.5207 -0.4709 0.7121 +vn 0.7657 -0.0571 0.6406 +vn 0.5328 -0.1598 0.8310 +vn 0.5054 0.2260 0.8328 +vn 0.5298 0.1730 0.8303 +vn 0.5056 0.2443 0.8275 +vn 0.5105 0.2601 0.8196 +vn 0.5238 0.1580 0.8371 +vn 0.5566 0.0600 0.8286 +vn 0.4946 -0.3490 0.7960 +vn 0.4959 -0.1976 0.8456 +vn 0.5325 0.0161 0.8463 +vn 0.5628 -0.1083 0.8195 +vn 0.5585 0.0644 0.8270 +vn 0.6684 -0.0142 0.7436 +vn 0.6148 -0.0547 0.7868 +vn 0.4748 -0.0272 0.8797 +vn 0.6397 -0.1760 0.7482 +vn 0.6025 -0.1815 0.7772 +vn 0.5123 -0.3665 0.7767 +vn 0.4973 -0.3315 0.8017 +vn 0.1012 -0.5616 0.8212 +vn 0.2201 -0.1525 0.9635 +vn 0.1723 -0.2786 0.9448 +vn 0.1957 -0.4907 0.8490 +vn -0.0000 0.4981 0.8671 +vn -0.0000 0.5578 0.8300 +vn 0.6227 -0.0950 0.7767 +vn 0.6572 -0.0125 0.7536 +vn 0.6601 -0.2306 0.7149 +vn 0.6002 -0.3158 0.7349 +vn 0.1168 0.2816 0.9524 +vn 0.1632 0.3204 0.9331 +vn 0.0830 0.3117 0.9466 +vn 0.0488 0.2585 0.9648 +vn 0.5060 0.2968 0.8098 +vn 0.4756 0.2961 0.8283 +vn 0.5742 0.2869 0.7668 +vn 0.6010 0.2820 0.7478 +vn 0.6662 0.2673 0.6962 +vn 0.6864 0.2603 0.6791 +vn 0.7691 0.2230 0.5989 +vn 0.7842 0.2089 0.5843 +vn 0.8359 0.1764 0.5198 +vn 0.8316 0.1684 0.5292 +vn 0.8749 0.0472 0.4820 +vn 0.9172 0.0789 0.3905 +vn 0.9212 0.0393 0.3871 +vn 0.9037 -0.0020 0.4281 +vn -0.0748 -0.8331 -0.5481 +vn 0.0009 -0.8230 -0.5681 +vn 0.0006 -0.2265 -0.9740 +vn -0.1200 -0.2338 -0.9649 +vn -0.3918 -0.1287 -0.9110 +vn -0.5621 -0.0980 -0.8212 +vn -0.3387 -0.8165 -0.4675 +vn -0.3183 -0.7955 -0.5156 +vn -0.3524 -0.8445 -0.4033 +vn -0.6096 -0.1420 -0.7798 +vn -0.3670 -0.8802 -0.3010 +vn -0.7249 -0.2890 -0.6253 +vn -0.3024 -0.9322 -0.1990 +vn -0.7167 -0.4638 -0.5208 +vn -0.2480 -0.9614 -0.1193 +vn -0.4913 -0.8460 -0.2071 +vn -0.3069 -0.1849 -0.9336 +vn -0.2136 -0.8042 -0.5547 +vn -0.2082 -0.9775 0.0348 +vn -0.6978 -0.7054 -0.1247 +vn 0.0002 -0.2568 -0.9665 +vn -0.1842 -0.2418 -0.9527 +vn -0.5465 -0.1255 -0.8280 +vn -0.6949 -0.0699 -0.7157 +vn -0.8232 0.0163 -0.5675 +vn -0.4598 -0.5304 -0.7122 +vn -0.0000 -0.5702 -0.8215 +vn -0.7722 -0.3693 -0.5171 +vn -0.8674 -0.0186 -0.4973 +vn -0.5295 -0.7901 -0.3088 +vn -0.9422 -0.2658 -0.2041 +vn -0.0003 -0.9252 -0.3794 +vn -0.8930 0.3357 0.2997 +vn -0.0004 -0.8659 0.5003 +vn -0.5336 -0.8433 0.0636 +vn -0.0004 -0.9995 -0.0320 +vn -0.8174 -0.4230 0.3910 +vn -0.9059 -0.4092 0.1091 +vn -0.0001 -0.5122 0.8589 +vn -0.7303 -0.0226 0.6827 +vn -0.0037 0.9990 -0.0438 +vn -0.2049 0.7188 -0.6643 +vn -0.0968 0.7446 -0.6605 +vn 0.0227 0.9997 -0.0131 +vn 0.0112 0.9957 -0.0917 +vn -0.3361 0.7597 -0.5567 +vn -0.2347 0.7151 -0.6584 +vn 0.0010 0.9970 -0.0771 +vn 0.0317 0.9945 -0.0995 +vn -0.3846 0.7295 -0.5656 +vn -0.0003 0.7369 -0.6760 +vn 0.0010 0.9999 0.0163 +vn 0.0799 0.9939 -0.0758 +vn -0.3357 0.8448 -0.4167 +vn 0.0763 0.9964 -0.0362 +vn -0.3320 0.8838 -0.3298 +vn -0.2163 -0.9759 0.0293 +vn -0.7006 -0.7025 -0.1252 +vn -0.0015 0.5026 -0.8645 +vn -0.2558 0.5657 -0.7839 +vn -0.0015 0.6980 -0.7161 +vn -0.7380 0.4317 -0.5186 +vn -0.6909 0.4022 -0.6007 +vn -0.6286 0.3737 -0.6821 +vn -0.3367 0.5450 -0.7678 +vn -0.8669 0.4970 0.0392 +vn -0.3860 0.6425 -0.6620 +vn -0.6039 0.7629 -0.2310 +vn -0.8931 0.3356 0.2997 +vn -0.9422 -0.2657 -0.2041 +vn -0.0318 0.9413 -0.3360 +vn -0.7313 0.6809 -0.0404 +vn -0.0143 0.9952 -0.0965 +vn -0.7296 0.6836 -0.0182 +vn -0.0062 0.9966 -0.0823 +vn 0.0010 0.6048 0.7964 +vn -0.8845 0.0323 -0.4654 +vn -0.8845 0.0322 -0.4654 +vn 0.0754 -0.8326 -0.5487 +vn 0.1199 -0.2333 -0.9650 +vn 0.3920 -0.1274 -0.9111 +vn 0.3181 -0.7960 -0.5150 +vn 0.3384 -0.8164 -0.4679 +vn 0.5616 -0.0964 -0.8218 +vn 0.3527 -0.8442 -0.4037 +vn 0.6091 -0.1399 -0.7806 +vn 0.3676 -0.8796 -0.3019 +vn 0.7243 -0.2858 -0.6275 +vn 0.3034 -0.9313 -0.2015 +vn 0.7166 -0.4599 -0.5243 +vn 0.2505 -0.9604 -0.1221 +vn 0.4965 -0.8419 -0.2115 +vn 0.2143 -0.8044 -0.5541 +vn 0.3076 -0.1837 -0.9336 +vn 0.2111 -0.9771 0.0256 +vn 0.7301 -0.6672 -0.1477 +vn 0.1833 -0.2418 -0.9529 +vn 0.5459 -0.1242 -0.8286 +vn 0.6937 -0.0684 -0.7170 +vn 0.8219 0.0185 -0.5693 +vn 0.4577 -0.5306 -0.7135 +vn 0.7713 -0.3690 -0.5186 +vn 0.8665 -0.0178 -0.4988 +vn 0.5274 -0.7911 -0.3099 +vn 0.9448 -0.2626 -0.1958 +vn 0.8931 0.3377 0.2972 +vn 0.5319 -0.8445 0.0627 +vn 0.9236 -0.3671 0.1103 +vn 0.8353 -0.3972 0.3802 +vn 0.7307 -0.0238 0.6823 +vn 0.0040 0.9990 -0.0448 +vn -0.0225 0.9996 -0.0185 +vn 0.0913 0.7432 -0.6628 +vn 0.2060 0.7179 -0.6650 +vn -0.0104 0.9957 -0.0922 +vn -0.0004 0.9969 -0.0782 +vn 0.2357 0.7144 -0.6589 +vn 0.3367 0.7590 -0.5573 +vn -0.0315 0.9945 -0.0998 +vn 0.3846 0.7285 -0.5669 +vn -0.0794 0.9939 -0.0764 +vn 0.3356 0.8443 -0.4178 +vn -0.0688 0.9968 -0.0408 +vn 0.3302 0.8823 -0.3355 +vn 0.2129 -0.9767 0.0260 +vn 0.7297 -0.6678 -0.1467 +vn 0.2531 0.5661 -0.7845 +vn 0.6911 0.3994 -0.6024 +vn 0.7340 0.4352 -0.5214 +vn 0.6288 0.3707 -0.6835 +vn 0.3358 0.5459 -0.7676 +vn 0.8667 0.4975 0.0369 +vn 0.3845 0.6424 -0.6629 +vn 0.6216 0.7414 -0.2530 +vn 0.7075 0.7047 -0.0536 +vn 0.9236 -0.3672 0.1103 +vn 0.7387 0.6738 -0.0171 +vn 0.8352 -0.3972 0.3803 +vn 0.8824 0.0369 -0.4691 +vn 0.8824 0.0370 -0.4691 +vn -0.4673 -0.7059 0.5323 +vn -0.5999 -0.6509 0.4652 +vn -0.5746 -0.8001 0.1723 +vn -0.4169 -0.8584 0.2989 +vn -0.6931 -0.6796 0.2405 +vn -0.4341 -0.8608 0.2656 +vn -0.1770 -0.8416 0.5103 +vn -0.1939 -0.9354 0.2957 +vn -0.4104 -0.7070 0.5759 +vn 0.0001 0.3075 0.9516 +vn -0.0830 0.3117 0.9465 +vn -0.0487 0.2585 0.9648 +vn -0.0000 0.2474 0.9689 +vn 0.0001 0.4637 0.8860 +vn -0.0999 0.4462 0.8893 +vn -0.7078 0.4383 0.5540 +vn -0.8365 0.3436 0.4269 +vn -0.8148 0.4100 0.4099 +vn -0.6851 0.4513 0.5718 +vn -0.1465 -0.0345 0.9886 +vn 0.0397 -0.1467 0.9884 +vn -0.0916 -0.4913 0.8662 +vn -0.2928 -0.3211 0.9006 +vn -0.7234 0.4015 0.5616 +vn -0.8272 0.3118 0.4675 +vn -0.2017 0.6509 0.7319 +vn -0.3964 0.6205 0.6767 +vn -0.2905 0.4551 0.8417 +vn -0.2225 0.4564 0.8615 +vn -0.1865 0.8201 -0.5410 +vn -0.5147 0.7020 -0.4922 +vn -0.4821 0.8378 -0.2564 +vn -0.2077 0.9334 -0.2928 +vn -0.4014 0.5387 -0.7408 +vn -0.1823 -0.3103 0.9330 +vn -0.2226 -0.4860 0.8451 +vn -0.3278 -0.5764 0.7485 +vn -0.2630 -0.4422 0.8575 +vn -0.4413 0.4486 0.7772 +vn -0.4020 0.1551 0.9024 +vn -0.7038 0.0560 0.7082 +vn -0.7521 0.3374 0.5661 +vn -0.0720 0.2125 0.9745 +vn -0.1785 0.2256 0.9577 +vn -0.1801 -0.1209 0.9762 +vn -0.1129 -0.0561 0.9920 +vn -0.2268 0.8889 0.3981 +vn -0.4627 0.7956 0.3910 +vn -0.4277 0.7245 0.5405 +vn -0.2055 0.7936 0.5727 +vn 0.1491 0.1384 0.9791 +vn 0.1110 -0.0131 0.9937 +vn -0.0450 0.0726 0.9963 +vn 0.0449 0.1869 0.9814 +vn -0.0003 0.2819 0.9594 +vn -0.0001 0.1602 0.9871 +vn -0.7944 0.3409 0.5027 +vn -0.6651 0.3375 0.6661 +vn -0.6514 0.0052 0.7587 +vn -0.3351 -0.0116 0.9421 +vn -0.3466 0.3431 0.8730 +vn -0.5816 -0.4006 0.7080 +vn -0.2955 -0.4378 0.8491 +vn -0.7092 -0.3568 0.6081 +vn -0.6341 -0.3494 0.6898 +vn -0.5059 -0.1092 0.8556 +vn -0.7410 0.3908 0.5461 +vn -0.8092 0.3406 0.4787 +vn -0.5492 -0.4341 0.7141 +vn -0.7988 0.0012 0.6016 +vn -0.4598 -0.3470 0.8174 +vn -0.8123 0.3195 0.4879 +vn -0.1631 0.3205 0.9331 +vn -0.2626 0.3234 0.9091 +vn -0.2211 0.2984 0.9285 +vn -0.1166 0.2816 0.9524 +vn -0.0164 0.2017 0.9793 +vn -0.4957 -0.5049 0.7067 +vn -0.2526 -0.6987 0.6693 +vn -0.5170 -0.5277 0.6739 +vn -0.5859 -0.4777 0.6546 +vn 0.1641 0.3981 0.9026 +vn 0.1144 0.6764 0.7276 +vn 0.1396 0.5652 0.8131 +vn 0.1829 0.2712 0.9450 +vn 0.0618 0.4843 0.8727 +vn 0.0845 0.1331 0.9875 +vn -0.1107 0.4389 0.8917 +vn -0.1336 0.0229 0.9908 +vn -0.2545 0.4069 0.8773 +vn -0.3303 -0.0105 0.9438 +vn -0.3946 0.3625 0.8443 +vn -0.5086 0.0062 0.8610 +vn -0.5470 0.3197 0.7737 +vn -0.6681 0.0590 0.7417 +vn -0.6469 0.2628 0.7159 +vn -0.7524 0.0757 0.6544 +vn -0.6933 0.0616 0.7180 +vn -0.7808 0.0293 0.6240 +vn -0.8060 -0.1752 0.5653 +vn -0.8172 -0.2332 0.5270 +vn -0.6862 -0.4166 0.5963 +vn -0.6716 -0.3979 0.6250 +vn -0.9146 0.1768 0.3637 +vn -0.9122 0.0867 0.4005 +vn -0.8635 0.1786 0.4717 +vn -0.8160 0.3581 0.4537 +vn -0.5377 -0.5334 0.6529 +vn -0.5511 -0.5297 0.6448 +vn -0.6503 -0.4034 0.6437 +vn -0.6203 -0.4265 0.6583 +vn -0.6645 -0.3964 0.6335 +vn -0.8024 0.4315 0.4122 +vn -0.7491 0.5482 0.3720 +vn -0.8100 0.5208 0.2696 +vn -0.9046 0.3055 0.2974 +vn -0.5509 -0.5150 0.6567 +vn -0.5840 -0.5149 0.6276 +vn -0.5929 -0.4917 0.6377 +vn 0.9975 0.0245 0.0665 +vn 0.0918 -0.8558 0.5091 +vn 0.0575 0.4918 0.8688 +vn 0.0020 0.7727 0.6348 +vn -0.2022 -0.9057 0.3726 +vn -0.2359 -0.7555 0.6112 +vn -0.2162 -0.8515 0.4777 +vn -0.2983 -0.7840 0.5444 +vn -0.3820 -0.6773 0.6287 +vn -0.2783 -0.7605 0.5867 +vn -0.3573 -0.6746 0.6460 +vn -0.4599 -0.5177 0.7214 +vn -0.3567 -0.6018 0.7146 +vn -0.4611 -0.3910 0.7966 +vn -0.3483 -0.5403 0.7660 +vn -0.4438 -0.2886 0.8484 +vn -0.3296 -0.4681 0.8199 +vn -0.4090 -0.2047 0.8893 +vn -0.2713 -0.3834 0.8828 +vn -0.3128 -0.1245 0.9416 +vn -0.1877 -0.3411 0.9211 +vn -0.0004 -0.3285 0.9445 +vn -0.0002 -0.0432 0.9991 +vn -0.1848 -0.0653 0.9806 +vn -0.1751 -0.6209 0.7641 +vn -0.0000 -0.6228 0.7824 +vn -0.2468 -0.6820 0.6885 +vn -0.2252 -0.6363 0.7379 +vn -0.2682 -0.7234 0.6362 +vn -0.2637 -0.7671 0.5849 +vn -0.2510 -0.8190 0.5160 +vn -0.1818 -0.9014 0.3929 +vn -0.0995 -0.9541 0.2826 +vn -0.4747 -0.5211 0.7093 +vn -0.3361 -0.6602 0.6717 +vn -0.5545 -0.3133 0.7709 +vn -0.5625 -0.1336 0.8159 +vn -0.5422 0.0158 0.8401 +vn -0.4950 0.1079 0.8622 +vn -0.3560 0.1717 0.9186 +vn -0.0002 0.2540 0.9672 +vn -0.1302 0.2550 0.9581 +vn -0.0876 -0.9506 0.2978 +vn 0.0003 -0.9675 0.2530 +vn -0.0870 -0.9550 0.2835 +vn -0.0564 -0.9551 0.2910 +vn -0.0855 -0.9618 0.2601 +vn -0.0650 -0.9699 0.2345 +vn -0.0096 -0.9902 0.1391 +vn 0.0228 -0.9980 0.0594 +vn -0.0659 -0.9550 0.2891 +vn 0.3385 -0.8164 -0.4680 +vn 0.3526 -0.8442 -0.4036 +vn 0.3676 -0.8797 -0.3018 +vn 0.3035 -0.9313 -0.2014 +vn 0.2142 -0.8044 -0.5541 +vn 0.0955 -0.9954 -0.0042 +vn 0.2006 -0.9793 0.0253 +vn -0.1406 -0.0333 0.9895 +vn -0.1150 0.2104 0.9708 +vn -0.2597 0.2264 0.9388 +vn -0.3022 0.0074 0.9532 +vn -0.4531 0.2692 0.8498 +vn -0.5046 0.1087 0.8565 +vn -0.6175 0.1934 0.7624 +vn -0.5729 0.2980 0.7635 +vn -0.6277 0.3375 0.7015 +vn -0.6479 0.2559 0.7175 +vn -0.5565 0.4427 0.7031 +vn -0.5517 0.2908 0.7817 +vn -0.3413 0.4544 0.8228 +vn -0.3050 0.1429 0.9416 +vn -0.1058 0.4208 0.9009 +vn -0.1990 0.4438 0.8737 +vn -0.3804 0.4716 0.7955 +vn -0.5463 0.5751 0.6090 +vn -0.4968 0.5157 0.6980 +vn 0.0001 -0.4961 0.8683 +vn -0.0001 -0.0524 0.9986 +vn -0.1513 -0.4633 0.8732 +vn -0.3164 -0.3831 0.8678 +vn -0.5450 -0.2014 0.8139 +vn -0.6623 -0.0198 0.7490 +vn -0.6786 0.1258 0.7236 +vn -0.5651 0.1807 0.8050 +vn -0.2721 -0.0008 0.9623 +vn -0.0000 0.1993 0.9799 +vn -0.0000 0.4003 0.9164 +vn -0.2267 0.8512 0.4733 +vn -0.2745 0.8995 0.3400 +vn -0.4615 0.7590 0.4592 +vn -0.2750 0.8502 0.4489 +vn -0.2206 0.9713 0.0890 +vn -0.1281 0.9906 0.0488 +vn 0.0041 0.9990 -0.0448 +vn -0.1251 0.7975 0.5902 +vn -0.1033 0.7427 0.6616 +vn -0.0314 0.9945 -0.0997 +vn -0.2768 0.9318 0.2346 +vn 0.0010 0.9999 0.0164 +vn -0.0011 0.7093 0.7049 +vn -0.0793 0.9939 -0.0764 +vn -0.0672 0.9969 -0.0410 +vn -0.0637 -0.7562 0.6512 +vn -0.0746 -0.6730 0.7359 +vn -0.0615 -0.8525 0.5191 +vn 0.0677 -0.9785 0.1946 +vn -0.1388 -0.6470 0.7498 +vn -0.0595 -0.6362 0.7692 +vn -0.0001 -0.6244 0.7811 +vn -0.0912 -0.6136 0.7843 +vn -0.0215 -0.5815 0.8133 +vn -0.0635 -0.5522 0.8313 +vn -0.1801 -0.4845 0.8560 +vn -0.1533 -0.5420 0.8263 +vn -0.3167 -0.5164 0.7957 +vn -0.3309 -0.4647 0.8213 +vn -0.4677 -0.4898 0.7358 +vn -0.4547 -0.5287 0.7168 +vn -0.5275 -0.5123 0.6777 +vn -0.5092 -0.5335 0.6754 +vn -0.5641 -0.5214 0.6403 +vn -0.6636 0.0643 0.7453 +vn -0.5972 0.2417 0.7648 +vn -0.5046 0.1853 0.8432 +vn -0.5414 -0.0376 0.8399 +vn -0.3902 -0.1308 0.9114 +vn -0.3948 0.1025 0.9130 +vn -0.2668 0.0243 0.9634 +vn -0.2119 -0.1798 0.9606 +vn -0.1307 -0.0056 0.9914 +vn -0.0287 -0.1490 0.9884 +vn 0.0610 -0.0557 0.9966 +vn -0.0647 0.0309 0.9974 +vn -0.0737 0.1309 0.9887 +vn 0.0692 0.0743 0.9948 +vn -0.4209 0.1506 0.8945 +vn -0.2120 0.2512 0.9444 +vn -0.3808 0.2919 0.8774 +vn -0.5282 0.1554 0.8348 +vn -0.4832 -0.1534 0.8620 +vn -0.5327 -0.1599 0.8310 +vn -0.3799 -0.4630 0.8008 +vn -0.3542 -0.4571 0.8158 +vn -0.6575 0.1365 0.7410 +vn -0.6721 0.2609 0.6929 +vn -0.7405 0.1580 0.6533 +vn -0.6810 0.0267 0.7318 +vn -0.6001 -0.3158 0.7349 +vn -0.6226 -0.0950 0.7767 +vn -0.6187 -0.1809 0.7645 +vn -0.5708 -0.3703 0.7329 +vn -0.7188 0.1344 0.6821 +vn -0.6464 0.2652 0.7154 +vn -0.5974 -0.4491 0.6644 +vn -0.5554 -0.4754 0.6823 +vn -0.2418 -0.5944 0.7670 +vn -0.2165 -0.5618 0.7984 +vn 0.3628 -0.3085 0.8793 +vn 0.6460 -0.7620 -0.0462 +vn 0.8604 -0.0981 0.5001 +vn 0.5300 0.8384 0.1269 +vn 0.1451 0.7660 0.6263 +vn 0.4524 -0.8511 -0.2662 +vn 0.1021 -0.9791 0.1758 +vn 0.2309 -0.9537 0.1929 +vn 0.5283 -0.8242 -0.2040 +vn 0.5418 -0.8405 0.0025 +vn 0.5780 -0.8095 -0.1028 +vn -0.2898 -0.8907 -0.3503 +vn -0.0595 -0.9842 0.1666 +vn -0.4367 -0.8523 -0.2880 +vn -0.2772 -0.9376 0.2100 +vn -0.4511 -0.8434 0.2919 +vn -0.6985 -0.7050 -0.1226 +vn -0.5428 -0.7822 0.3058 +vn -0.7622 -0.6405 -0.0939 +vn -0.7986 0.0557 0.5992 +vn -0.6658 -0.2120 0.7154 +vn -0.7797 0.0320 0.6253 +vn -0.6590 0.3042 0.6879 +vn -0.6468 0.3071 0.6981 +vn -0.6585 0.2036 0.7245 +vn -0.9866 -0.1082 0.1218 +vn -0.9934 -0.0410 0.1073 +vn -0.9959 -0.0646 0.0630 +vn -0.9887 -0.1245 0.0836 +vn -0.9740 -0.0397 0.2232 +vn -0.9723 0.0317 0.2317 +vn -0.9553 0.0201 0.2949 +vn -0.9484 0.0746 0.3083 +vn -0.9259 0.0755 0.3702 +vn -0.9169 0.1003 0.3863 +vn -0.8681 0.1340 0.4780 +vn -0.8605 0.1111 0.4972 +vn -0.7053 0.2148 0.6756 +vn -0.7108 0.0533 0.7014 +vn -0.7848 0.0986 0.6118 +vn -0.7897 0.1866 0.5845 +vn -0.6571 -0.0125 0.7537 +vn -0.8781 -0.4785 0.0082 +vn -0.6913 -0.5431 0.4766 +vn -0.3146 0.6982 0.6431 +vn -0.6727 0.2903 0.6806 +vn -0.7709 0.0873 0.6310 +vn -0.4386 0.6387 0.6322 +vn -0.7518 -0.4861 0.4454 +vn -0.1739 0.4845 0.8573 +vn -0.3417 -0.5846 0.7358 +vn -0.3023 -0.6665 0.6815 +vn -0.5320 -0.7765 0.3376 +vn -0.6150 -0.7181 0.3257 +vn -0.4993 -0.2947 0.8148 +vn -0.3389 0.8768 0.3411 +vn -0.2217 0.9684 -0.1141 +vn -0.9738 0.2240 -0.0385 +vn -0.6864 -0.4090 0.6013 +vn -0.8935 0.0808 0.4417 +vn -0.9246 0.0228 0.3803 +vn -0.6091 -0.7630 0.2165 +vn -0.4051 -0.9143 -0.0039 +vn -0.5510 -0.8341 0.0278 +vn -0.6065 -0.7950 -0.0080 +vn -0.0000 -0.4404 0.8978 +vn -0.0000 -0.8090 0.5878 +vn -0.3839 -0.8361 0.3920 +vn -0.4546 -0.4768 0.7523 +vn -0.6473 -0.4673 0.6023 +vn -0.8036 -0.2614 0.5347 +vn -0.8997 -0.0365 0.4350 +vn -0.8877 -0.0976 0.4499 +vn -0.9175 0.0837 0.3889 +vn -0.8918 0.1649 0.4213 +vn -0.8781 0.1282 0.4609 +vn -0.9057 0.0774 0.4169 +vn -0.8774 0.1093 0.4672 +vn -0.8820 0.0509 0.4685 +vn -0.2770 -0.7982 0.5349 +vn -0.4154 -0.7990 0.4347 +vn -0.2500 -0.9405 0.2300 +vn -0.1830 -0.9239 0.3361 +vn -0.1289 -0.8110 0.5707 +vn -0.0858 -0.9306 0.3558 +vn -0.0998 -0.9807 0.1683 +vn -0.2887 -0.9464 0.1446 +vn -0.1401 -0.9865 0.0851 +vn -0.3210 -0.9443 0.0729 +vn -0.1601 -0.9867 0.0267 +vn -0.2289 -0.9734 0.0084 +vn -0.4672 -0.3187 0.8247 +vn -0.6241 -0.3025 0.7205 +vn -0.5816 -0.2758 0.7653 +vn -0.4368 -0.3446 0.8310 +vn 0.0001 -0.6404 0.7680 +vn -0.1266 -0.6141 0.7790 +vn -0.2786 -0.5442 0.7914 +vn -0.5124 -0.3663 0.7767 +vn -0.6400 -0.1756 0.7481 +vn -0.6686 -0.0137 0.7435 +vn -0.5586 0.0649 0.8269 +vn -0.2200 -0.1517 0.9636 +vn -0.1001 -0.5609 0.8218 +vn -0.3525 -0.5203 0.7778 +vn -0.4948 -0.3491 0.7958 +vn -0.5630 -0.1084 0.8193 +vn -0.5567 0.0600 0.8286 +vn -0.5301 0.1728 0.8302 +vn -0.5057 0.2258 0.8326 +vn -0.4073 0.2340 0.8828 +vn -0.0002 0.4003 0.9164 +vn -0.1842 0.2470 0.9513 +vn 0.0633 -0.9386 0.3392 +vn 0.1356 -0.9317 0.3369 +vn 0.1635 -0.9165 0.3651 +vn 0.1469 -0.8976 0.4156 +vn -0.0448 -0.9337 0.3553 +vn -0.2455 -0.9082 0.3390 +vn -0.4289 -0.8406 0.3307 +vn -0.0538 0.9047 0.4227 +vn -0.2069 0.9729 -0.1030 +vn -0.1004 0.9929 -0.0646 +vn 0.0529 0.8753 0.4806 +vn -0.0642 0.9968 -0.0472 +vn 0.1465 0.9892 -0.0057 +vn 0.1993 0.9798 -0.0168 +vn 0.2999 0.9499 0.0881 +vn -0.4939 -0.8145 0.3044 +vn -0.6886 -0.1634 0.7065 +vn -0.6777 -0.4814 0.5558 +vn -0.5426 -0.4968 0.6773 +vn -0.5648 -0.2159 0.7965 +vn -0.0009 -0.9998 0.0185 +vn 0.0001 -0.9968 0.0796 +vn -0.1838 -0.5656 0.8039 +vn 0.0003 -0.8187 0.5742 +vn 0.0001 -0.5734 0.8193 +vn -0.7754 -0.6013 0.1927 +vn -0.7659 -0.3869 0.5136 +vn -0.2186 0.4732 -0.8534 +vn -0.5172 0.2740 -0.8108 +vn -0.0001 0.3251 -0.9457 +vn -0.0001 0.4697 -0.8828 +vn -0.8751 0.1610 -0.4564 +vn -0.6320 0.4455 -0.6341 +vn -0.8926 0.3541 -0.2792 +vn -0.9956 -0.0180 -0.0921 +vn -0.9882 0.1168 0.0986 +vn -0.3108 0.5134 -0.7999 +vn -0.9551 -0.2294 0.1875 +vn -0.9063 -0.1516 0.3944 +vn -0.7320 -0.3192 0.6019 +vn -0.7143 -0.5600 0.4198 +vn -0.5272 -0.8276 0.1929 +vn -0.3705 -0.5372 0.7577 +vn -0.0004 -0.9820 0.1890 +vn 0.0002 -0.9314 0.3641 +vn -0.3948 -0.2979 0.8691 +vn -0.1925 -0.3467 0.9180 +vn -0.0000 -0.3565 0.9343 +vn -0.2477 -0.0329 0.9683 +vn -0.2583 0.1531 0.9539 +vn -0.0001 0.1353 0.9908 +vn -0.0001 -0.0403 0.9992 +vn -0.6105 0.0100 0.7920 +vn -0.5258 -0.0390 0.8497 +vn -0.4424 -0.0079 0.8968 +vn -0.4328 0.1317 0.8918 +vn -0.3882 0.1311 0.9122 +vn -0.2671 -0.1753 0.9476 +vn -0.3969 -0.3307 0.8562 +vn -0.5148 -0.3026 0.8021 +vn -0.5263 -0.2018 0.8260 +vn -0.0004 -0.4347 0.9006 +vn -0.2343 -0.3935 0.8890 +vn -0.0130 -0.1847 0.9827 +vn -0.0003 -0.2537 0.9673 +vn -0.3627 -0.3921 0.8454 +vn -0.0003 -0.4546 0.8907 +vn -0.6870 -0.2597 0.6787 +vn -0.7382 -0.3923 0.5488 +vn -0.7504 -0.4771 0.4574 +vn -0.9658 -0.2560 -0.0415 +vn -0.9138 -0.3567 -0.1942 +vn -0.9179 -0.3256 -0.2270 +vn -0.8462 -0.4595 0.2697 +vn -0.7860 -0.4636 0.4089 +vn -0.7916 -0.3889 0.4714 +vn -0.8693 -0.3590 0.3396 +vn -0.6424 -0.2891 0.7098 +vn -0.7136 -0.3696 0.5951 +vn -0.9489 -0.3149 0.0189 +vn -0.9454 -0.3052 -0.1144 +vn -0.8694 -0.4716 -0.1476 +vn -0.8997 -0.4357 -0.0255 +vn -0.9272 -0.3591 0.1064 +vn -0.8741 -0.4839 0.0429 +vn -0.8857 -0.4220 0.1936 +vn -0.8364 -0.5351 0.1187 +vn -0.8053 -0.5634 0.1848 +vn -0.7648 -0.5616 0.3158 +vn -0.3534 -0.9352 0.0235 +vn -0.5330 -0.8397 0.1040 +vn -0.0000 -0.3804 0.9248 +vn -0.1735 -0.3580 0.9175 +vn -0.5148 -0.1038 0.8510 +vn -0.5983 -0.1398 0.7890 +vn -0.6229 -0.0536 0.7805 +vn -0.4861 -0.2249 0.8445 +vn -0.3301 -0.2843 0.9001 +vn -0.4975 -0.3312 0.8018 +vn -0.2863 -0.4772 0.8308 +vn -0.8375 -0.4911 -0.2398 +vn -0.4178 -0.3453 -0.8404 +vn -0.0000 -0.3506 -0.9365 +vn -0.0000 -0.2211 -0.9752 +vn -0.4720 -0.2240 -0.8527 +vn -0.6977 -0.3370 -0.6322 +vn -0.7522 -0.2312 -0.6170 +vn -0.8501 -0.3337 -0.4074 +vn -0.9016 -0.2559 -0.3488 +vn -0.9069 -0.3953 -0.1461 +vn -0.9423 -0.2999 -0.1489 +vn -0.9083 -0.3574 -0.2172 +vn -0.0001 0.0628 0.9980 +vn -0.1611 0.0635 0.9849 +vn -0.1681 -0.1645 0.9719 +vn -0.0011 -0.1490 0.9888 +vn -0.0001 -0.4053 0.9142 +vn -0.2109 0.1680 0.9630 +vn -0.3066 0.0463 0.9507 +vn -0.3769 -0.1394 0.9157 +vn -0.5435 -0.1347 0.8285 +vn -0.5709 -0.1777 0.8015 +vn -0.5587 -0.1592 0.8139 +vn -0.0016 -0.0267 0.9996 +vn 0.0679 -0.0320 0.9972 +vn 0.0959 0.0956 0.9908 +vn -0.0034 0.1000 0.9950 +vn -0.0000 -0.9982 0.0599 +vn -0.0000 -0.3270 0.9450 +vn -0.0001 -0.5706 0.8213 +vn -0.1329 -0.5489 0.8253 +vn -0.0000 0.1612 0.9869 +vn -0.7174 -0.2992 0.6291 +vn -0.6054 -0.3149 0.7310 +vn -0.6150 -0.2589 0.7448 +vn -0.7133 -0.2452 0.6565 +vn -0.6891 -0.2143 0.6923 +vn -0.6032 -0.2075 0.7701 +vn -0.5555 -0.1666 0.8147 +vn -0.6486 -0.1846 0.7384 +vn -0.4738 -0.1180 0.8727 +vn -0.5852 -0.1349 0.7996 +vn -0.3565 -0.0537 0.9328 +vn -0.2544 -0.0062 0.9671 +vn -0.3807 0.0635 0.9225 +vn -0.4869 -0.0478 0.8721 +vn -0.1850 0.0285 0.9823 +vn -0.3009 0.1400 0.9433 +vn -0.7112 -0.3511 0.6091 +vn -0.6123 -0.3250 0.7207 +vn -0.4993 0.2431 0.8316 +vn -0.5849 0.1073 0.8040 +vn -0.7611 0.1677 0.6266 +vn -0.6701 0.2859 0.6850 +vn -0.9278 -0.0252 -0.3722 +vn -0.9779 -0.0723 -0.1962 +vn -0.9926 -0.0120 -0.1207 +vn -0.9471 0.1208 -0.2972 +vn -0.8556 0.4187 0.3044 +vn -0.8788 0.2597 0.4003 +vn -0.8390 0.1641 0.5188 +vn -0.8539 0.4728 0.2177 +vn -0.3605 0.4979 0.7887 +vn -0.3892 0.5336 0.7509 +vn -0.0000 0.5625 0.8268 +vn -0.0001 0.5069 0.8620 +vn -0.0002 -0.1776 0.9841 +vn -0.0002 -0.5398 0.8418 +vn -0.9978 0.0104 0.0661 +vn -0.9316 0.3501 0.0973 +vn -0.6839 0.0615 0.7269 +vn -0.4653 -0.0228 0.8848 +vn -0.9131 -0.3261 0.2446 +vn -0.5477 0.0022 0.8367 +vn -0.7216 -0.4216 0.5492 +vn -0.5796 0.1546 0.8001 +vn -0.5665 0.2062 0.7978 +vn -0.6068 -0.2967 0.7374 +vn -0.5829 -0.0720 0.8094 +vn -0.5203 0.1893 0.8327 +vn -0.4038 0.1038 0.9089 +vn -0.0000 0.3478 -0.9376 +vn -0.4240 0.2653 -0.8659 +vn -0.0000 0.6340 -0.7734 +vn -0.7399 0.1081 -0.6639 +vn -0.7169 -0.1098 -0.6885 +vn -0.8926 -0.1489 -0.4255 +vn -0.4246 -0.0348 -0.9047 +vn -0.0014 -1.0000 0.0045 +vn -0.1843 -0.9829 -0.0007 +vn -0.7383 0.3417 -0.5815 +vn -0.4229 0.4218 0.8020 +vn -0.9567 -0.1432 -0.2535 +vn -0.9992 0.0223 -0.0327 +vn -0.9943 0.1031 -0.0259 +vn -0.9954 -0.0079 0.0952 +vn -0.9952 -0.0003 0.0983 +vn -0.8875 -0.3272 -0.3245 +vn -0.8406 -0.2984 -0.4520 +vn -0.8739 -0.3847 -0.2972 +vn -0.7844 -0.6006 -0.1553 +vn -0.8623 -0.4389 -0.2528 +vn -0.9621 -0.0914 0.2568 +vn -0.9844 -0.0314 0.1731 +vn -0.9953 -0.0485 0.0842 +vn -0.9859 -0.0635 0.1547 +vn -0.5689 0.7484 0.3410 +vn -0.6192 0.5984 -0.5084 +vn -0.9525 0.2754 -0.1297 +vn -0.9240 0.0417 0.3802 +vn -0.7028 0.2314 0.6727 +vn -0.8788 -0.2507 0.4060 +vn 0.0287 0.8627 0.5049 +vn 0.0395 0.7996 0.5993 +vn 0.0631 0.6080 0.7914 +vn -0.9536 -0.2183 0.2071 +vn -0.9840 -0.1748 0.0334 +vn -0.9753 -0.2000 0.0934 +vn -0.9858 -0.1222 0.1150 +vn -0.9891 -0.0414 0.1416 +vn -0.9209 -0.1393 -0.3641 +vn -0.9749 0.0039 0.2225 +vn -0.9726 0.0330 0.2301 +vn -0.7357 0.3455 0.5825 +vn -0.9642 -0.2602 0.0512 +vn -0.6293 0.2813 0.7245 +vn 0.3630 0.4739 -0.8023 +vn 0.3537 0.5692 -0.7422 +vn 0.3077 0.8752 -0.3732 +vn 0.3206 0.8135 -0.4852 +vn 0.1161 0.9159 0.3842 +vn -0.7684 -0.4897 0.4120 +vn -0.0261 -0.7160 -0.6976 +vn -0.1775 -0.9566 -0.2310 +vn -0.7125 -0.6933 0.1082 +vn -0.7982 -0.5927 -0.1075 +vn 0.2982 -0.7006 -0.6482 +vn 0.2841 -0.7023 -0.6528 +vn 0.2128 -0.5827 -0.7844 +vn 0.2160 -0.5912 -0.7771 +vn 0.4657 -0.6673 -0.5813 +vn -0.8540 0.0053 0.5202 +vn -0.9565 -0.1455 0.2528 +vn -0.9586 -0.0104 0.2847 +vn -0.8158 0.0729 0.5737 +vn -0.7025 0.1550 0.6946 +vn -0.7900 -0.1005 0.6048 +vn -0.7620 -0.2084 0.6131 +vn 0.7618 -0.2084 0.6133 +vn 0.8119 -0.4179 0.4077 +vn 0.8525 -0.4535 0.2600 +vn 0.7922 -0.1489 0.5918 +vn -0.8470 -0.1224 0.5173 +vn -0.9043 -0.0097 0.4269 +vn -0.2056 0.0354 0.9780 +vn 0.4731 -0.2854 0.8335 +vn 0.6760 0.2379 0.6975 +vn -0.0406 0.4093 0.9115 +vn -0.5248 0.1997 -0.8275 +vn -0.2741 -0.2891 -0.9172 +vn -0.8577 -0.2595 -0.4439 +vn -0.9272 0.0250 -0.3738 +vn -0.5541 -0.2403 0.7970 +vn -0.4655 -0.6875 0.5574 +vn -0.5077 -0.3318 0.7951 +vn -0.4266 -0.6851 0.5904 +vn -0.0160 -0.7739 0.6331 +vn -0.0805 -0.6151 0.7843 +vn -0.3060 0.1593 0.9386 +vn -0.4620 0.2174 0.8598 +vn -0.5108 0.2601 0.8194 +vn -0.0923 -0.5563 -0.8259 +vn -0.8029 -0.4830 -0.3494 +vn -0.1672 -0.9858 0.0150 +vn -0.0233 -0.9884 -0.1501 +vn -0.3070 -0.9328 -0.1886 +vn -0.4409 -0.8976 -0.0060 +vn -0.8525 -0.4535 0.2598 +vn -0.8120 -0.4179 0.4074 +vn -0.8352 -0.2047 0.5104 +vn -0.0001 -0.8695 0.4940 +vn -0.8118 0.4835 0.3275 +vn -0.8219 0.3728 0.4306 +vn -0.8155 0.5039 0.2847 +vn -0.9250 0.3762 0.0528 +vn -0.8480 0.4885 0.2056 +vn -0.9950 0.0951 0.0300 +vn -0.9488 -0.2959 0.1102 +vn -0.6950 -0.6582 0.2895 +vn -0.8252 0.0827 0.5588 +vn -0.7924 0.0296 0.6093 +vn -0.7397 -0.3121 0.5962 +vn -0.6130 -0.6144 0.4968 +vn 0.3511 0.2983 -0.8876 +vn 0.3319 0.0310 -0.9428 +vn 0.2774 -0.2547 -0.9264 +vn 0.2523 -0.3789 -0.8904 +vn 0.2453 -0.3972 -0.8844 +vn -0.8371 0.2473 0.4879 +vn -0.0000 0.5612 0.8277 +vn -0.4224 0.5139 0.7467 +vn -0.3659 0.0639 0.9285 +vn -0.2225 0.0411 0.9741 +vn -0.2876 0.0012 0.9578 +vn -0.1815 0.0409 0.9825 +vn -0.5141 -0.2336 0.8253 +vn -0.8027 -0.3013 0.5147 +vn -0.9212 -0.0499 0.3860 +vn -0.2118 0.5812 -0.7857 +vn -0.1260 0.5083 -0.8519 +vn -0.1666 0.6220 -0.7651 +vn -0.0668 0.5126 -0.8560 +vn -0.9889 0.0254 0.1464 +vn -0.9763 -0.0209 0.2155 +vn -0.9537 0.0115 0.3005 +vn -0.9659 0.0780 0.2468 +vn 0.0001 0.6574 0.7536 +vn -0.5368 0.4038 0.7408 +vn -0.5529 0.5590 0.6179 +vn -0.6330 0.3663 0.6819 +vn -0.6533 0.4756 0.5891 +vn -0.7184 0.3298 0.6125 +vn -0.7391 0.4528 0.4988 +vn -0.7960 0.2694 0.5421 +vn -0.7466 0.0896 0.6592 +vn -0.0343 0.5483 -0.8356 +vn -0.0000 0.4670 -0.8842 +vn -0.0000 0.4712 -0.8820 +vn -0.1438 0.6697 -0.7286 +vn -0.3872 0.8553 -0.3443 +vn -0.4274 0.7838 -0.4506 +vn -0.5323 0.8465 0.0047 +vn -0.6331 0.7739 0.0182 +vn -0.6074 0.6898 0.3941 +vn -0.6390 0.6213 0.4535 +vn -0.4227 0.6616 0.6194 +vn -0.5730 0.4770 0.6664 +vn -0.3920 0.5416 0.7437 +vn -0.5339 0.4302 0.7279 +vn -0.3455 0.2537 0.9035 +vn -0.4149 0.1985 0.8879 +vn -0.1374 0.1672 0.9763 +vn -0.2465 0.0641 0.9670 +vn 0.0795 0.1320 0.9881 +vn -0.0060 -0.0018 1.0000 +vn -0.6859 -0.7257 -0.0530 +vn -0.6416 -0.7565 -0.1268 +vn -0.8370 -0.5210 0.1672 +vn -0.7739 -0.6225 0.1164 +vn -0.7300 -0.5449 0.4125 +vn -0.7432 -0.3122 0.5918 +vn 0.1657 -0.9691 0.1826 +vn 0.2955 -0.9553 -0.0006 +vn 0.2867 -0.8606 0.4209 +vn 0.3524 -0.8936 0.2779 +vn -0.2003 -0.9797 0.0121 +vn -0.5207 -0.4710 0.7121 +vn -0.3273 -0.6335 0.7011 +vn -0.1410 -0.2816 0.9491 +vn -0.2633 -0.2092 0.9418 +vn -0.0000 -0.1590 0.9873 +vn -0.0000 -0.2235 0.9747 +vn -0.0231 0.2002 0.9795 +vn 0.0388 0.3491 0.9363 +vn 0.1469 0.2124 0.9661 +vn -0.2449 -0.3996 0.8834 +vn -0.4531 -0.1498 0.8788 +vn -0.0670 -0.4252 0.9026 +vn -0.1897 0.2546 0.9483 +vn -0.4222 0.3949 0.8160 +vn -0.6510 0.4140 0.6362 +vn -0.1735 0.3813 0.9080 +vn -0.9275 0.2866 0.2399 +vn -0.4339 -0.7959 0.4222 +vn -0.7532 -0.5263 0.3946 +vn -0.9428 -0.0870 0.3219 +vn -0.2571 0.1861 0.9483 +vn -0.2711 0.2503 0.9294 +vn -0.3612 0.1958 0.9117 +vn -0.7493 0.3955 0.5312 +vn -0.7823 0.3256 0.5310 +vn -0.7461 -0.1794 0.6412 +vn -0.6981 -0.1324 0.7036 +vn -0.7741 -0.2085 0.5977 +vn -0.8001 -0.2710 0.5351 +vn -0.7921 -0.2321 0.5645 +vn -0.4780 0.0966 0.8730 +vn -0.6083 -0.0401 0.7927 +vn -0.6893 -0.7045 0.1691 +vn -0.6966 -0.6552 0.2923 +vn -0.1904 0.0455 0.9807 +vn -0.0924 0.0045 0.9957 +vn 0.0728 -0.9870 0.1434 +vn -0.4291 -0.8520 0.3000 +vn -0.0001 -0.0130 0.9999 +vn -0.0001 0.3368 0.9416 +vn -0.0001 -0.4463 0.8949 +vn -0.0001 -0.7721 0.6355 +vn -0.0001 -0.9206 0.3906 +vn -0.0000 -0.9518 0.3068 +vn -0.8437 0.3888 0.3701 +vn -0.5353 -0.6657 0.5198 +vn -0.7505 -0.3419 0.5656 +vn -0.4253 0.1320 0.8954 +vn -0.3540 0.2028 0.9130 +vn -0.5014 -0.1484 0.8524 +vn -0.8633 0.0787 0.4985 +vn -0.1611 -0.7119 0.6836 +vn -0.0608 -0.7496 0.6591 +vn -0.2798 -0.7039 0.6529 +vn -0.3106 -0.7189 0.6219 +vn -0.3693 -0.6571 0.6571 +vn -0.0340 -0.7906 0.6114 +vn -0.1360 -0.8072 0.5744 +vn -0.2589 -0.7557 0.6015 +vn -0.4728 0.1779 0.8630 +vn -0.2674 0.2821 0.9214 +vn 0.1603 0.0450 0.9860 +vn -0.0001 -0.0270 0.9996 +vn -0.5076 0.2616 0.8209 +vn -0.4393 0.5048 0.7431 +vn -0.3245 0.2123 0.9218 +vn -0.4502 -0.2867 0.8456 +vn -0.4583 -0.2922 0.8394 +vn -0.5074 -0.2218 0.8327 +vn -0.4918 -0.1367 0.8599 +vn -0.4165 -0.0599 0.9071 +vn -0.2981 0.0036 0.9545 +vn -0.2174 0.0321 0.9756 +vn -0.3005 0.2886 0.9091 +vn -0.4347 0.3368 0.8352 +vn -0.5954 0.3924 0.7011 +vn -0.1985 0.2007 0.9593 +vn -0.2128 0.1791 0.9605 +vn -0.3005 0.1882 0.9350 +vn -0.7202 0.2680 0.6399 +vn -0.7266 0.2406 0.6436 +vn -0.7657 -0.0572 0.6407 +vn -0.2177 -0.4173 0.8823 +vn -0.1616 -0.6364 0.7542 +vn -0.2192 -0.6690 0.7102 +vn -0.3965 -0.1616 0.9037 +vn -0.2598 -0.4018 0.8781 +vn -0.1663 -0.5248 0.8348 +vn -0.2987 -0.2953 0.9075 +vn -0.5280 -0.0757 0.8459 +vn -0.6749 -0.0693 0.7347 +vn -0.5565 -0.3342 0.7607 +vn -0.4087 -0.3294 0.8511 +vn -0.7600 -0.1124 0.6401 +vn -0.6307 -0.3640 0.6854 +vn -0.7762 0.2609 0.5739 +vn -0.6997 0.1928 0.6879 +vn -0.6091 0.2825 0.7411 +vn -0.6165 0.1477 0.7734 +vn -0.5216 0.2377 0.8194 +vn -0.4060 0.2104 0.8893 +vn -0.4944 0.1637 0.8537 +vn -0.6772 -0.3678 0.6373 +vn -0.7766 -0.2439 0.5809 +vn -0.6600 -0.2306 0.7150 +vn -0.7364 -0.1300 0.6639 +vn 0.0973 -0.4891 0.8668 +vn 0.0661 -0.4178 0.9061 +vn -0.6373 0.6030 0.4798 +vn -0.6512 0.6523 0.3880 +vn -0.4714 0.8486 0.2399 +vn -0.6567 0.7066 0.2635 +vn -0.6794 0.7196 0.1438 +vn -0.4644 0.8765 0.1268 +vn -0.7046 0.7096 -0.0037 +vn -0.4677 0.8835 -0.0266 +vn -0.7171 0.6707 -0.1897 +vn -0.7231 0.5454 -0.4239 +vn -0.4118 -0.2676 -0.8711 +vn -0.6888 -0.2879 -0.6654 +vn -0.3022 0.2444 0.9214 +vn -0.2570 -0.0853 0.9626 +vn -0.4756 0.2962 0.8283 +vn -0.3405 0.3026 0.8902 +vn -0.3750 0.3154 0.8717 +vn -0.5062 0.2967 0.8098 +vn -0.3867 0.0294 0.9217 +vn -0.4392 0.2764 0.8548 +vn -0.0050 -0.4950 0.8689 +vn -0.9591 -0.1280 0.2524 +vn -0.9669 -0.1050 0.2324 +vn -0.9774 -0.1441 0.1549 +vn -0.9696 -0.1350 0.2042 +vn -0.9550 -0.0502 0.2923 +vn -0.9325 0.0152 0.3609 +vn -0.8837 0.0970 0.4578 +vn -0.9470 -0.0938 0.3072 +vn -0.9836 -0.1410 0.1125 +vn -0.8107 0.1858 0.5552 +vn -0.9792 -0.1247 0.1599 +vn -0.9878 -0.1507 0.0384 +vn -0.9826 -0.1488 0.1116 +vn -0.9792 -0.1984 -0.0421 +vn -0.9755 -0.2089 0.0691 +vn -0.1760 -0.4158 0.8923 +vn -0.3926 -0.3256 0.8601 +vn -0.5789 -0.3198 0.7501 +vn -0.6143 -0.3952 0.6830 +vn -0.7040 -0.0597 0.7077 +vn -0.5447 -0.1736 0.8205 +vn -0.3592 -0.2199 0.9070 +vn -0.1542 -0.2074 0.9660 +vn 0.0608 -0.1139 0.9916 +vn -0.4180 0.2093 0.8840 +vn -0.6215 -0.3446 0.7035 +vn -0.6371 -0.3923 0.6635 +vn -0.8514 0.4978 0.1653 +vn -0.9515 0.2428 0.1887 +vn -0.8582 -0.1126 0.5008 +vn -0.8117 -0.0316 0.5832 +vn -0.9187 0.0436 0.3925 +vn -0.9212 0.0393 0.3871 +vn -0.9037 -0.0020 0.4282 +vn -0.9188 0.0226 0.3940 +vn -0.9283 0.0290 0.3707 +vn -0.9583 -0.1513 -0.2423 +vn -0.9606 -0.2774 -0.0199 +vn -0.3772 -0.0325 0.9256 +vn -0.5321 0.0931 0.8415 +vn -0.8749 0.0472 0.4820 +vn -0.9478 0.0568 0.3137 +vn -0.7198 -0.6914 0.0616 +vn -0.8816 -0.4720 0.0049 +vn -0.8148 -0.1124 0.5687 +vn -0.7026 -0.2507 0.6660 +vn -0.6997 -0.1195 0.7044 +vn -0.6295 -0.2011 0.7505 +vn -0.5721 -0.1139 0.8123 +vn -0.7307 0.6823 -0.0235 +vn -0.5118 0.7177 -0.4722 +vn -0.6575 0.2436 0.7130 +vn -0.7007 0.3063 0.6444 +vn -0.7703 0.5128 0.3791 +vn -0.0000 0.4729 -0.8811 +vn -0.5978 0.6184 -0.5101 +vn -0.8022 0.5817 -0.1345 +vn -0.8303 0.1220 0.5438 +vn -0.8192 -0.0368 0.5724 +vn -0.9464 0.0835 0.3120 +vn -0.9195 0.3193 0.2294 +vn -0.7583 0.0800 0.6470 +vn -0.6970 -0.0343 0.7163 +vn -0.0000 -0.1182 0.9930 +vn -0.0647 -0.1638 0.9844 +vn 0.0001 0.8205 -0.5716 +vn 0.0001 0.9474 -0.3201 +vn -0.9183 0.3148 -0.2399 +vn -0.9129 0.3976 -0.0922 +vn -0.0000 0.0211 -0.9998 +vn -0.0000 -0.2496 -0.9684 +vn -0.0001 -0.0041 -1.0000 +vn -0.5326 -0.0177 -0.8462 +vn -0.8192 -0.0574 -0.5706 +vn -0.7985 -0.3326 0.5018 +vn -0.8074 -0.5830 -0.0902 +vn -0.7553 -0.6554 0.0039 +vn -0.0230 0.0362 0.9991 +vn -0.2464 0.9424 0.2263 +vn -0.0000 0.9157 0.4020 +vn -0.0000 0.9734 0.2293 +vn -0.0000 0.9939 0.1100 +vn -0.2526 0.9606 0.1159 +vn -0.2375 0.9707 -0.0360 +vn -0.0000 0.9985 -0.0540 +vn -0.6733 -0.7378 -0.0483 +vn -0.8909 0.4534 0.0285 +vn -0.9814 0.1765 0.0750 +vn -0.7797 -0.6026 -0.1699 +vn -0.9554 -0.2538 0.1510 +vn -0.9154 -0.3144 0.2512 +vn -0.0000 0.8232 0.5677 +vn -0.5239 0.1579 0.8370 +vn -0.5326 0.0160 0.8462 +vn -0.8813 0.0355 0.4713 +vn -0.8168 0.1313 0.5617 +vn -0.4842 0.1299 0.8652 +vn -0.4750 0.2245 0.8509 +vn -0.3650 0.1786 0.9137 +vn -0.4685 0.2826 0.8370 +vn -0.3989 0.1223 0.9088 +vn -0.6467 0.3184 0.6931 +vn -0.7616 0.2879 0.5806 +vn -0.7411 0.1888 0.6443 +vn -0.6086 0.1946 0.7692 +vn -0.4927 0.1298 0.8604 +vn -0.5456 0.3082 0.7793 +vn -0.0000 0.1962 0.9806 +vn 0.0002 0.0549 0.9985 +vn -0.8287 0.1330 0.5436 +vn -0.8405 0.2363 0.4876 +vn -0.6126 0.0584 0.7882 +vn -0.7232 -0.0563 0.6884 +vn -0.7911 -0.0656 0.6081 +vn -0.7142 0.0320 0.6992 +vn -0.7890 0.0724 0.6101 +vn -0.8527 -0.0248 0.5219 +vn -0.8316 0.1684 0.5292 +vn -0.8770 -0.2923 0.3813 +vn -0.9247 -0.2468 0.2899 +vn -0.9645 -0.1874 0.1862 +vn -0.9243 -0.0398 0.3797 +vn -0.4055 -0.1177 0.9065 +vn -0.3793 -0.2112 0.9008 +vn -0.5486 -0.0953 0.8306 +vn -0.1957 -0.4908 0.8490 +vn -0.3134 -0.3819 0.8695 +vn -0.2115 -0.3252 0.9217 +vn -0.1722 -0.2785 0.9449 +vn -0.4186 -0.2844 0.8625 +vn -0.3094 -0.3256 0.8935 +vn -0.4910 -0.0377 0.8703 +vn -0.4960 -0.1976 0.8455 +vn -0.5058 0.2441 0.8274 +vn -0.6149 -0.0542 0.7867 +vn -0.6027 -0.1811 0.7771 +vn -0.4749 -0.0268 0.8796 +vn -0.4818 -0.1402 0.8650 +vn -0.4523 -0.2188 0.8646 +vn -0.4624 0.0029 0.8867 +vn -0.0000 0.1861 0.9825 +vn -0.6661 0.2674 0.6962 +vn -0.7691 0.2231 0.5989 +vn -0.5742 0.2869 0.7668 +vn -0.8359 0.1765 0.5197 +vn -0.8969 -0.0878 0.4335 +vn -0.8432 -0.1248 0.5230 +vn -0.8798 -0.1520 0.4503 +vn -0.9269 -0.1245 0.3541 +vn -0.9044 -0.1634 0.3941 +vn -0.9451 -0.1392 0.2956 +vn -0.9564 -0.1312 0.2611 +vn -0.9185 -0.1653 0.3592 +vn -0.9640 -0.1377 0.2273 +vn -0.9249 -0.1878 0.3305 +vn -0.7908 -0.1303 0.5980 +vn -0.8294 -0.1649 0.5337 +vn -0.8538 -0.1863 0.4861 +vn -0.8686 -0.2022 0.4523 +vn -0.8759 -0.2321 0.4231 +vn -0.4016 -0.3792 0.8336 +vn -0.6010 0.2820 0.7478 +vn -0.6863 0.2603 0.6792 +vn -0.7841 0.2089 0.5844 +vn -0.9654 0.0384 0.2580 +vn -0.9172 0.0789 0.3906 +vn -0.9934 -0.0160 0.1135 +vn 0.4671 -0.7060 0.5323 +vn 0.4168 -0.8585 0.2989 +vn 0.5745 -0.8001 0.1724 +vn 0.5999 -0.6509 0.4652 +vn 0.4339 -0.8609 0.2657 +vn 0.6928 -0.6798 0.2407 +vn 0.1768 -0.8416 0.5104 +vn 0.4103 -0.7070 0.5760 +vn 0.1940 -0.9354 0.2957 +vn 0.0998 0.4463 0.8893 +vn 0.7078 0.4383 0.5541 +vn 0.6851 0.4513 0.5718 +vn 0.8147 0.4100 0.4100 +vn 0.8364 0.3436 0.4270 +vn 0.1462 -0.0345 0.9887 +vn 0.2926 -0.3211 0.9007 +vn 0.0913 -0.4913 0.8662 +vn -0.0399 -0.1467 0.9884 +vn 0.7233 0.4015 0.5618 +vn 0.8271 0.3118 0.4676 +vn 0.2018 0.6509 0.7318 +vn 0.2225 0.4564 0.8615 +vn 0.2907 0.4551 0.8417 +vn 0.3964 0.6205 0.6767 +vn -0.1494 0.1383 0.9791 +vn -0.0452 0.1869 0.9813 +vn 0.0448 0.0726 0.9964 +vn -0.1112 -0.0131 0.9937 +vn 0.6650 0.3377 0.6662 +vn 0.7943 0.3409 0.5029 +vn 0.6513 0.0052 0.7588 +vn 0.3464 0.3432 0.8731 +vn 0.3349 -0.0117 0.9422 +vn 0.5815 -0.4006 0.7081 +vn 0.2953 -0.4377 0.8492 +vn 0.7091 -0.3568 0.6082 +vn 0.6340 -0.3494 0.6899 +vn 0.5057 -0.1092 0.8558 +vn 0.8092 0.3407 0.4787 +vn 0.5492 -0.4341 0.7141 +vn 0.4595 -0.3469 0.8176 +vn 0.7038 0.0561 0.7082 +vn 0.7989 0.0013 0.6015 +vn 0.3278 -0.5763 0.7486 +vn 0.2631 -0.4422 0.8574 +vn 0.1865 0.8201 -0.5410 +vn 0.2075 0.9334 -0.2927 +vn 0.4820 0.8378 -0.2564 +vn 0.5148 0.7020 -0.4922 +vn 0.4014 0.5387 -0.7408 +vn 0.1823 -0.3103 0.9330 +vn 0.2226 -0.4861 0.8451 +vn 0.4020 0.1551 0.9024 +vn 0.0720 0.2125 0.9745 +vn 0.1129 -0.0561 0.9920 +vn 0.1802 -0.1209 0.9762 +vn 0.1785 0.2256 0.9577 +vn 0.2269 0.8889 0.3981 +vn 0.2055 0.7936 0.5727 +vn 0.4277 0.7246 0.5405 +vn 0.4627 0.7956 0.3910 +vn 0.8123 0.3196 0.4879 +vn 0.2212 0.2986 0.9284 +vn 0.2630 0.3234 0.9090 +vn 0.0164 0.2016 0.9793 +vn 0.5379 -0.5332 0.6530 +vn 0.6203 -0.4264 0.6583 +vn 0.6503 -0.4034 0.6438 +vn 0.5512 -0.5295 0.6448 +vn 0.5862 -0.4778 0.6543 +vn 0.6644 -0.3964 0.6336 +vn 0.5171 -0.5277 0.6739 +vn 0.9046 0.3054 0.2974 +vn 0.8100 0.5208 0.2696 +vn 0.7491 0.5481 0.3720 +vn 0.5509 -0.5149 0.6568 +vn 0.5930 -0.4916 0.6377 +vn -0.9975 0.0244 0.0662 +vn 0.6940 0.0603 0.7175 +vn 0.2530 -0.6987 0.6692 +vn -0.0916 -0.8558 0.5092 +vn -0.0583 0.4919 0.8687 +vn -0.1640 0.3982 0.9025 +vn -0.1058 0.5744 0.8117 +vn -0.0966 0.4259 0.8996 +vn 0.2357 -0.7555 0.6113 +vn 0.2020 -0.9057 0.3726 +vn 0.2159 -0.8518 0.4773 +vn 0.2782 -0.7604 0.5868 +vn 0.3815 -0.6776 0.6287 +vn 0.2983 -0.7839 0.5445 +vn 0.4596 -0.5176 0.7217 +vn 0.3563 -0.6750 0.6461 +vn 0.4608 -0.3910 0.7967 +vn 0.3565 -0.6018 0.7147 +vn 0.4434 -0.2886 0.8486 +vn 0.3480 -0.5402 0.7662 +vn 0.4086 -0.2049 0.8894 +vn 0.3292 -0.4681 0.8201 +vn 0.3125 -0.1245 0.9417 +vn 0.2709 -0.3834 0.8829 +vn 0.1873 -0.3410 0.9212 +vn 0.1845 -0.0653 0.9807 +vn 0.1749 -0.6207 0.7643 +vn 0.2467 -0.6819 0.6886 +vn 0.2248 -0.6361 0.7381 +vn 0.2679 -0.7234 0.6363 +vn 0.2635 -0.7670 0.5851 +vn 0.2506 -0.8190 0.5162 +vn 0.1812 -0.9015 0.3931 +vn 0.0993 -0.9542 0.2822 +vn 0.3361 -0.6601 0.6718 +vn 0.4745 -0.5212 0.7094 +vn 0.5544 -0.3133 0.7711 +vn 0.5622 -0.1335 0.8161 +vn 0.5419 0.0158 0.8403 +vn 0.4946 0.1078 0.8624 +vn 0.3556 0.1716 0.9187 +vn 0.1301 0.2551 0.9581 +vn 0.0877 -0.9505 0.2982 +vn 0.0559 -0.9555 0.2895 +vn 0.0861 -0.9554 0.2825 +vn 0.0845 -0.9622 0.2589 +vn 0.0636 -0.9705 0.2324 +vn 0.0078 -0.9906 0.1366 +vn -0.0246 -0.9980 0.0581 +vn 0.0654 -0.9554 0.2880 +vn -0.3525 -0.8444 -0.4034 +vn -0.3667 -0.8803 -0.3010 +vn -0.3022 -0.9322 -0.1990 +vn -0.2479 -0.9614 -0.1193 +vn -0.2113 -0.9769 0.0307 +vn -0.1467 -0.9870 -0.0660 +vn 0.1400 -0.0334 0.9896 +vn 0.3019 0.0072 0.9533 +vn 0.2593 0.2262 0.9389 +vn 0.1147 0.2103 0.9709 +vn 0.5043 0.1084 0.8567 +vn 0.4529 0.2689 0.8500 +vn 0.6172 0.1931 0.7627 +vn 0.6478 0.2557 0.7176 +vn 0.6278 0.3369 0.7017 +vn 0.5729 0.2975 0.7637 +vn 0.5517 0.2910 0.7816 +vn 0.5566 0.4420 0.7034 +vn 0.3053 0.1424 0.9416 +vn 0.3405 0.4565 0.8220 +vn 0.1055 0.4206 0.9011 +vn 0.1987 0.4435 0.8740 +vn 0.3802 0.4713 0.7958 +vn 0.5466 0.5742 0.6095 +vn 0.4966 0.5154 0.6984 +vn 0.1510 -0.4633 0.8732 +vn 0.3161 -0.3831 0.8679 +vn 0.5447 -0.2017 0.8140 +vn 0.6620 -0.0200 0.7492 +vn 0.6784 0.1253 0.7239 +vn 0.5655 0.1801 0.8048 +vn 0.2719 -0.0020 0.9623 +vn 0.2749 0.8988 0.3414 +vn 0.2271 0.8503 0.4747 +vn 0.4633 0.7562 0.4621 +vn 0.2772 0.8467 0.4541 +vn 0.1293 0.9903 0.0500 +vn 0.2209 0.9712 0.0890 +vn 0.0226 0.9997 -0.0131 +vn 0.1035 0.7398 0.6648 +vn 0.1246 0.7971 0.5908 +vn 0.0009 0.9970 -0.0771 +vn 0.0113 0.9957 -0.0917 +vn 0.2772 0.9315 0.2353 +vn 0.0315 0.9945 -0.0995 +vn 0.0798 0.9939 -0.0758 +vn 0.0675 0.9971 -0.0362 +vn 0.0760 -0.6731 0.7356 +vn 0.0638 -0.7555 0.6521 +vn 0.0603 -0.8518 0.5204 +vn -0.0678 -0.9780 0.1973 +vn 0.4958 -0.5048 0.7067 +vn -0.1830 0.2713 0.9449 +vn -0.1397 0.5651 0.8131 +vn -0.0845 0.1331 0.9875 +vn -0.0618 0.4844 0.8727 +vn 0.1335 0.0229 0.9908 +vn 0.1106 0.4390 0.8917 +vn 0.3302 -0.0106 0.9438 +vn 0.2545 0.4070 0.8773 +vn 0.5086 0.0062 0.8610 +vn 0.3946 0.3626 0.8443 +vn 0.6680 0.0589 0.7418 +vn 0.5470 0.3197 0.7737 +vn 0.7524 0.0755 0.6544 +vn 0.6468 0.2627 0.7160 +vn 0.7809 0.0292 0.6240 +vn 0.8060 -0.1752 0.5653 +vn 0.8173 -0.2332 0.5270 +vn 0.9147 0.1766 0.3636 +vn 0.8036 -0.2614 0.5346 +vn 0.8877 -0.0977 0.4500 +vn 0.8997 -0.0365 0.4350 +vn 0.9175 0.0838 0.3889 +vn 0.9057 0.0774 0.4168 +vn 0.8782 0.1281 0.4609 +vn 0.8919 0.1648 0.4212 +vn 0.8820 0.0509 0.4685 +vn 0.3072 -0.9329 -0.1882 +vn 0.6417 -0.7565 -0.1263 +vn 0.2007 -0.9796 0.0121 +vn 0.0236 -0.9884 -0.1499 +vn 0.1740 0.4846 0.8572 +vn 0.3417 -0.5846 0.7358 +vn 0.3637 -0.4847 0.7955 +vn 0.5022 -0.7036 0.5027 +vn 0.3023 -0.6664 0.6815 +vn 0.4993 -0.2948 0.8147 +vn 0.9884 0.1356 -0.0690 +vn 0.7823 -0.6070 -0.1399 +vn 0.5521 -0.6534 0.5180 +vn 0.6812 0.0267 0.7316 +vn 0.6660 -0.2120 0.7152 +vn 0.7987 0.0557 0.5992 +vn 0.7405 0.1580 0.6532 +vn 0.7188 0.1343 0.6821 +vn 0.6464 0.2652 0.7154 +vn 0.6722 0.2608 0.6929 +vn 0.7797 0.0320 0.6253 +vn 0.6468 0.3071 0.6981 +vn 0.6590 0.3041 0.6879 +vn 0.6582 0.2037 0.7247 +vn 0.6575 0.1365 0.7410 +vn 0.9866 -0.1082 0.1218 +vn 0.9886 -0.1251 0.0836 +vn 0.9959 -0.0647 0.0629 +vn 0.9934 -0.0411 0.1071 +vn 0.9740 -0.0397 0.2231 +vn 0.9723 0.0316 0.2316 +vn 0.9553 0.0201 0.2949 +vn 0.9484 0.0746 0.3083 +vn 0.9259 0.0755 0.3702 +vn 0.9170 0.1002 0.3861 +vn 0.8681 0.1340 0.4779 +vn 0.8605 0.1111 0.4972 +vn 0.7054 0.2148 0.6755 +vn 0.7898 0.1866 0.5843 +vn 0.7850 0.0985 0.6116 +vn 0.7109 0.0533 0.7013 +vn 0.6865 -0.4089 0.6013 +vn 0.9246 0.0228 0.3802 +vn 0.2654 0.9376 0.2246 +vn 0.7538 0.2255 0.6172 +vn 0.3800 0.9146 -0.1383 +vn 0.1263 -0.6142 0.7790 +vn 0.2784 -0.5443 0.7913 +vn 0.3527 -0.5200 0.7779 +vn 0.4069 0.2342 0.8829 +vn 0.1839 0.2471 0.9514 +vn 0.0351 0.9046 0.4247 +vn 0.2069 0.9729 -0.1029 +vn 0.1004 0.9928 -0.0646 +vn -0.0524 0.8738 0.4835 +vn 0.0642 0.9968 -0.0472 +vn -0.1423 0.9898 -0.0075 +vn -0.1994 0.9798 -0.0169 +vn -0.1451 0.7660 0.6263 +vn -0.2999 0.9499 0.0880 +vn -0.5302 0.8383 0.1268 +vn -0.1021 -0.9791 0.1760 +vn -0.2309 -0.9536 0.1932 +vn -0.1356 -0.9316 0.3373 +vn -0.0632 -0.9384 0.3396 +vn -0.5416 -0.8406 0.0026 +vn -0.1632 -0.9164 0.3655 +vn -0.6459 -0.7620 -0.0462 +vn -0.1466 -0.8975 0.4159 +vn 0.0447 -0.9336 0.3556 +vn 0.0596 -0.9842 0.1666 +vn 0.2454 -0.9082 0.3390 +vn 0.2771 -0.9376 0.2099 +vn 0.4289 -0.8406 0.3307 +vn 0.4512 -0.8433 0.2919 +vn -0.3629 -0.3084 0.8793 +vn -0.8604 -0.0982 0.5000 +vn -0.4524 -0.8512 -0.2661 +vn -0.5282 -0.8242 -0.2041 +vn -0.5778 -0.8097 -0.1028 +vn 0.2899 -0.8907 -0.3503 +vn 0.4367 -0.8523 -0.2879 +vn 0.6423 -0.7452 -0.1793 +vn 0.0912 -0.6136 0.7843 +vn 0.0001 -0.6244 0.7811 +vn 0.0214 -0.5815 0.8133 +vn 0.1533 -0.5419 0.8263 +vn 0.3166 -0.5163 0.7957 +vn 0.4546 -0.5286 0.7169 +vn 0.5093 -0.5333 0.6754 +vn 0.6636 0.0643 0.7453 +vn 0.5414 -0.0377 0.8399 +vn 0.3902 -0.1308 0.9114 +vn 0.2119 -0.1797 0.9606 +vn 0.0287 -0.1489 0.9884 +vn -0.0610 -0.0555 0.9966 +vn -0.0691 0.0746 0.9948 +vn 0.4210 0.1507 0.8945 +vn 0.2121 0.2514 0.9444 +vn 0.4833 -0.1534 0.8619 +vn 0.3543 -0.4571 0.8158 +vn 0.5710 -0.3703 0.7327 +vn 0.6188 -0.1809 0.7645 +vn 0.5556 -0.4753 0.6822 +vn 0.5974 -0.4491 0.6644 +vn 0.2418 -0.5943 0.7670 +vn 0.2166 -0.5618 0.7984 +vn 0.6091 -0.7630 0.2165 +vn 0.6065 -0.7951 -0.0080 +vn 0.5509 -0.8341 0.0277 +vn 0.4051 -0.9143 -0.0039 +vn 0.4546 -0.4768 0.7523 +vn 0.3840 -0.8360 0.3920 +vn 0.6473 -0.4672 0.6023 +vn 0.2768 -0.7986 0.5344 +vn 0.1831 -0.9238 0.3363 +vn 0.2500 -0.9405 0.2302 +vn 0.4154 -0.7992 0.4344 +vn 0.1289 -0.8112 0.5704 +vn 0.0861 -0.9305 0.3560 +vn 0.0999 -0.9807 0.1682 +vn 0.1404 -0.9864 0.0851 +vn 0.2887 -0.9464 0.1446 +vn 0.1603 -0.9867 0.0268 +vn 0.3210 -0.9443 0.0730 +vn 0.2290 -0.9734 0.0084 +vn 0.4672 -0.3186 0.8247 +vn 0.4366 -0.3445 0.8311 +vn 0.5815 -0.2757 0.7654 +vn 0.6241 -0.3024 0.7205 +vn 0.1835 -0.5659 0.8038 +vn 0.6886 -0.1637 0.7064 +vn 0.5651 -0.2158 0.7963 +vn 0.5430 -0.4971 0.6768 +vn 0.6768 -0.4821 0.5564 +vn 0.3704 -0.5376 0.7575 +vn 0.7658 -0.3870 0.5136 +vn 0.7755 -0.6013 0.1927 +vn 0.2184 0.4731 -0.8535 +vn 0.5172 0.2740 -0.8108 +vn 0.8752 0.1610 -0.4561 +vn 0.8926 0.3541 -0.2790 +vn 0.6321 0.4455 -0.6340 +vn 0.9956 -0.0180 -0.0918 +vn 0.9882 0.1167 0.0988 +vn 0.3107 0.5133 -0.8000 +vn 0.9063 -0.1516 0.3945 +vn 0.9551 -0.2294 0.1876 +vn 0.7142 -0.5600 0.4199 +vn 0.7320 -0.3193 0.6018 +vn 0.4938 -0.8145 0.3045 +vn 0.5270 -0.8277 0.1929 +vn 0.3948 -0.2978 0.8692 +vn 0.1924 -0.3467 0.9180 +vn 0.2673 -0.1754 0.9475 +vn 0.5265 -0.2016 0.8259 +vn 0.5147 -0.3024 0.8022 +vn 0.3968 -0.3305 0.8563 +vn 0.0128 -0.1850 0.9827 +vn 0.2338 -0.3935 0.8891 +vn 0.3623 -0.3920 0.8456 +vn 0.6870 -0.2599 0.6785 +vn 0.7505 -0.4771 0.4574 +vn 0.7382 -0.3925 0.5486 +vn 0.9658 -0.2561 -0.0415 +vn 0.9179 -0.3257 -0.2269 +vn 0.9139 -0.3567 -0.1940 +vn 0.2477 -0.0331 0.9683 +vn 0.2621 0.1530 0.9528 +vn 0.6105 0.0098 0.7919 +vn 0.5259 -0.0389 0.8497 +vn 0.4327 0.1316 0.8919 +vn 0.4423 -0.0081 0.8968 +vn 0.3882 0.1312 0.9122 +vn 0.8375 -0.4911 -0.2397 +vn 0.9454 -0.3053 -0.1143 +vn 0.8694 -0.4715 -0.1475 +vn 0.4177 -0.3453 -0.8404 +vn 0.4719 -0.2239 -0.8527 +vn -0.0001 -0.2211 -0.9752 +vn 0.6977 -0.3370 -0.6322 +vn 0.7523 -0.2311 -0.6170 +vn 0.8502 -0.3337 -0.4072 +vn 0.9016 -0.2559 -0.3487 +vn 0.8462 -0.4596 0.2697 +vn 0.8693 -0.3592 0.3395 +vn 0.7916 -0.3889 0.4713 +vn 0.7860 -0.4638 0.4087 +vn 0.6424 -0.2894 0.7097 +vn 0.7136 -0.3696 0.5952 +vn 0.9489 -0.3151 0.0189 +vn 0.8998 -0.4356 -0.0254 +vn 0.9272 -0.3592 0.1063 +vn 0.8741 -0.4839 0.0430 +vn 0.8856 -0.4221 0.1935 +vn 0.8365 -0.5350 0.1188 +vn 0.8053 -0.5633 0.1849 +vn 0.7648 -0.5616 0.3158 +vn 0.3532 -0.9352 0.0235 +vn 0.5329 -0.8398 0.1039 +vn 0.1734 -0.3582 0.9174 +vn 0.5147 -0.1041 0.8510 +vn 0.6228 -0.0539 0.7805 +vn 0.5983 -0.1400 0.7890 +vn 0.4860 -0.2252 0.8445 +vn 0.3300 -0.2845 0.9001 +vn 0.2861 -0.4774 0.8308 +vn 0.9423 -0.2999 -0.1488 +vn 0.9084 -0.3574 -0.2171 +vn 0.9069 -0.3953 -0.1460 +vn 0.2109 0.1680 0.9630 +vn 0.1611 0.0636 0.9849 +vn 0.3067 0.0466 0.9507 +vn 0.3768 -0.1395 0.9157 +vn 0.1680 -0.1646 0.9719 +vn 0.5440 -0.1347 0.8282 +vn 0.5710 -0.1781 0.8014 +vn 0.5586 -0.1596 0.8140 +vn -0.0971 0.0948 0.9907 +vn -0.0678 -0.0317 0.9972 +vn 0.6153 -0.2589 0.7446 +vn 0.6057 -0.3150 0.7307 +vn 0.6487 -0.1847 0.7383 +vn 0.5557 -0.1666 0.8145 +vn 0.6035 -0.2075 0.7699 +vn 0.4740 -0.1180 0.8726 +vn 0.3567 -0.0536 0.9327 +vn 0.2545 -0.0061 0.9670 +vn 0.1850 0.0285 0.9823 +vn 0.6125 -0.3252 0.7205 +vn 0.4993 0.2433 0.8316 +vn 0.7611 0.1678 0.6266 +vn 0.5848 0.1074 0.8040 +vn 0.1327 -0.5489 0.8253 +vn 0.9278 -0.0252 -0.3722 +vn 0.9471 0.1208 -0.2972 +vn 0.9927 -0.0118 -0.1204 +vn 0.9778 -0.0726 -0.1965 +vn 0.8554 0.4188 0.3047 +vn 0.8538 0.4728 0.2177 +vn 0.8390 0.1643 0.5188 +vn 0.8787 0.2597 0.4005 +vn 0.3886 0.5337 0.7511 +vn 0.3605 0.4980 0.7887 +vn 0.9977 0.0104 0.0664 +vn 0.4652 -0.0228 0.8849 +vn 0.6828 0.0613 0.7281 +vn 0.9316 0.3501 0.0974 +vn 0.9131 -0.3261 0.2448 +vn 0.5475 0.0023 0.8368 +vn 0.7215 -0.4215 0.5494 +vn 0.5794 0.1547 0.8002 +vn 0.5663 0.2063 0.7979 +vn 0.6068 -0.2966 0.7374 +vn 0.4035 0.1038 0.9091 +vn 0.5201 0.1894 0.8328 +vn 0.5827 -0.0719 0.8095 +vn 0.7399 0.1081 -0.6639 +vn 0.7383 0.3417 -0.5815 +vn 0.4239 0.2653 -0.8660 +vn 0.8927 -0.1489 -0.4254 +vn 0.7169 -0.1097 -0.6885 +vn 0.4246 -0.0348 -0.9047 +vn 0.1844 -0.9829 -0.0007 +vn 0.4228 0.4217 0.8021 +vn 0.9567 -0.1432 -0.2534 +vn 0.9943 0.1032 -0.0258 +vn 0.9992 0.0224 -0.0326 +vn 0.9951 -0.0004 0.0984 +vn 0.9954 -0.0080 0.0953 +vn 0.9859 -0.0635 0.1547 +vn 0.9621 -0.0915 0.2568 +vn 0.8875 -0.3273 -0.3243 +vn 0.8739 -0.3847 -0.2971 +vn 0.8406 -0.2984 -0.4520 +vn 0.7843 -0.6006 -0.1553 +vn 0.8623 -0.4389 -0.2527 +vn 0.9953 -0.0481 0.0845 +vn 0.9844 -0.0314 0.1732 +vn 0.5690 0.7482 0.3411 +vn 0.9239 0.0416 0.3804 +vn 0.9526 0.2752 -0.1294 +vn 0.6196 0.5982 -0.5082 +vn 0.7027 0.2314 0.6728 +vn 0.8788 -0.2507 0.4061 +vn -0.0632 0.6080 0.7914 +vn -0.0395 0.7995 0.5993 +vn -0.0285 0.8626 0.5051 +vn 0.9536 -0.2183 0.2072 +vn 0.9753 -0.2000 0.0933 +vn 0.9841 -0.1747 0.0334 +vn 0.9858 -0.1222 0.1150 +vn 0.9890 -0.0415 0.1417 +vn 0.9209 -0.1393 -0.3640 +vn 0.9738 -0.0079 0.2274 +vn 0.9844 -0.0433 0.1705 +vn 0.8094 0.3948 0.4347 +vn 0.9642 -0.2603 0.0514 +vn 0.6577 0.3706 0.6558 +vn 0.6544 0.2523 0.7128 +vn -0.3629 0.4739 -0.8023 +vn -0.3208 0.8135 -0.4851 +vn -0.3079 0.8752 -0.3731 +vn -0.3538 0.5691 -0.7423 +vn -0.1162 0.9160 0.3839 +vn 0.5250 0.1996 -0.8274 +vn 0.9272 0.0249 -0.3736 +vn 0.8576 -0.2596 -0.4439 +vn 0.2741 -0.2891 -0.9172 +vn 0.7684 -0.4897 0.4121 +vn 0.0261 -0.7160 -0.6976 +vn 0.7982 -0.5928 -0.1075 +vn 0.7125 -0.6933 0.1083 +vn 0.1775 -0.9566 -0.2311 +vn -0.2983 -0.7005 -0.6483 +vn -0.2160 -0.5911 -0.7771 +vn -0.2127 -0.5826 -0.7844 +vn -0.2842 -0.7022 -0.6528 +vn -0.4657 -0.6672 -0.5814 +vn 0.8694 -0.0270 0.4933 +vn 0.9565 -0.1455 0.2529 +vn 0.9585 -0.0104 0.2848 +vn 0.8611 0.0609 0.5047 +vn 0.6925 0.1125 0.7126 +vn 0.0009 0.6048 0.7964 +vn 0.8469 -0.1224 0.5175 +vn 0.8927 -0.0158 0.4503 +vn 0.4653 -0.6874 0.5577 +vn 0.5539 -0.2402 0.7972 +vn 0.8352 -0.2046 0.5105 +vn 0.8218 0.3729 0.4307 +vn 0.8117 0.4835 0.3276 +vn 0.8155 0.5038 0.2848 +vn 0.8480 0.4884 0.2057 +vn 0.9250 0.3762 0.0530 +vn 0.9950 0.0951 0.0302 +vn 0.9488 -0.2959 0.1103 +vn 0.6949 -0.6582 0.2897 +vn 0.7923 0.0297 0.6094 +vn 0.8250 0.0827 0.5590 +vn 0.7396 -0.3121 0.5964 +vn 0.6128 -0.6144 0.4970 +vn 0.4409 -0.8975 -0.0058 +vn 0.1671 -0.9858 0.0151 +vn 0.0923 -0.5563 -0.8259 +vn 0.8028 -0.4831 -0.3494 +vn 0.5075 -0.3319 0.7952 +vn 0.0803 -0.6150 0.7844 +vn 0.0160 -0.7739 0.6330 +vn 0.4266 -0.6852 0.5904 +vn 0.3057 0.1594 0.9387 +vn 0.4618 0.2175 0.8599 +vn -0.3509 0.2983 -0.8876 +vn -0.3318 0.0311 -0.9428 +vn -0.2772 -0.2547 -0.9264 +vn -0.2451 -0.3971 -0.8844 +vn -0.2522 -0.3788 -0.8904 +vn 0.8371 0.2473 0.4880 +vn 0.4222 0.5139 0.7468 +vn 0.3658 0.0640 0.9285 +vn 0.2876 0.0012 0.9577 +vn 0.2224 0.0412 0.9741 +vn 0.1816 0.0411 0.9825 +vn 0.5142 -0.2340 0.8251 +vn 0.8025 -0.3013 0.5150 +vn 0.8909 0.0014 0.4542 +vn 0.2149 0.5814 -0.7847 +vn 0.1260 0.5081 -0.8520 +vn 0.1666 0.6219 -0.7652 +vn 0.0640 0.5120 -0.8566 +vn 0.9889 0.0253 0.1464 +vn 0.9659 0.0778 0.2469 +vn 0.9537 0.0114 0.3005 +vn 0.5367 0.4038 0.7409 +vn 0.6331 0.3660 0.6820 +vn 0.7184 0.3297 0.6125 +vn 0.7960 0.2693 0.5421 +vn 0.7426 -0.6550 0.1397 +vn 0.0343 0.5484 -0.8355 +vn -0.0000 0.4671 -0.8842 +vn 0.1438 0.6697 -0.7286 +vn 0.4283 0.7829 -0.4512 +vn 0.3898 0.8533 -0.3464 +vn 0.5391 0.8422 0.0055 +vn 0.6360 0.7715 0.0181 +vn 0.6142 0.6836 0.3942 +vn 0.6415 0.6192 0.4529 +vn 0.5740 0.4764 0.6661 +vn 0.4242 0.6606 0.6194 +vn 0.3920 0.5416 0.7436 +vn 0.5339 0.4302 0.7279 +vn 0.4150 0.1984 0.8879 +vn 0.3455 0.2539 0.9034 +vn 0.2469 0.0640 0.9669 +vn 0.1385 0.1675 0.9761 +vn 0.0066 -0.0041 1.0000 +vn -0.0785 0.1337 0.9879 +vn 0.6858 -0.7259 -0.0527 +vn 0.8368 -0.5212 0.1675 +vn 0.7738 -0.6226 0.1167 +vn 0.7431 -0.3122 0.5919 +vn 0.7300 -0.5449 0.4125 +vn -0.1659 -0.9691 0.1825 +vn -0.2956 -0.9553 -0.0008 +vn -0.3525 -0.8937 0.2775 +vn -0.2870 -0.8606 0.4207 +vn 0.0908 0.0056 0.9959 +vn 0.2565 -0.0334 0.9660 +vn -0.0723 -0.9867 0.1459 +vn 0.3929 -0.8536 0.3419 +vn 0.8437 0.3887 0.3703 +vn 0.9275 0.2866 0.2402 +vn 0.5356 -0.6660 0.5192 +vn 0.7503 -0.3420 0.5657 +vn 0.4339 -0.7958 0.4224 +vn 0.7531 -0.5263 0.3947 +vn 0.4252 0.1321 0.8954 +vn 0.3538 0.2027 0.9131 +vn 0.5014 -0.1488 0.8523 +vn 0.8632 0.0787 0.4987 +vn 0.9427 -0.0871 0.3221 +vn 0.1612 -0.7115 0.6840 +vn 0.0609 -0.7493 0.6595 +vn 0.2800 -0.7036 0.6531 +vn 0.3109 -0.7188 0.6219 +vn 0.3693 -0.6570 0.6572 +vn 0.0341 -0.7905 0.6115 +vn 0.1360 -0.8071 0.5745 +vn 0.2589 -0.7557 0.6016 +vn 0.4730 0.1780 0.8629 +vn 0.1897 0.2548 0.9482 +vn 0.4222 0.3950 0.8159 +vn -0.1601 0.0451 0.9861 +vn -0.1470 0.2125 0.9660 +vn -0.0387 0.3493 0.9362 +vn 0.0232 0.2004 0.9794 +vn 0.5074 0.2617 0.8210 +vn 0.6968 -0.6551 0.2923 +vn 0.4588 -0.2924 0.8390 +vn 0.4505 -0.2870 0.8454 +vn 0.4930 -0.1365 0.8592 +vn 0.5080 -0.2218 0.8323 +vn 0.4169 -0.0597 0.9070 +vn 0.2983 0.0037 0.9545 +vn 0.2174 0.0323 0.9755 +vn 0.4348 0.3370 0.8351 +vn 0.7200 0.2681 0.6401 +vn 0.7266 0.2404 0.6436 +vn 0.2177 -0.4174 0.8823 +vn 0.3965 -0.1616 0.9037 +vn 0.2986 -0.2953 0.9075 +vn 0.5280 -0.0757 0.8459 +vn 0.6748 -0.0692 0.7347 +vn 0.7601 -0.1123 0.6401 +vn 0.6993 0.1924 0.6884 +vn 0.6165 0.1476 0.7734 +vn 0.4945 0.1636 0.8536 +vn 0.6774 -0.3676 0.6372 +vn 0.7766 -0.2439 0.5808 +vn 0.3274 -0.6334 0.7011 +vn 0.7365 -0.1300 0.6638 +vn 0.1407 -0.2816 0.9492 +vn 0.2634 -0.2092 0.9417 +vn 0.4531 -0.1498 0.8788 +vn 0.2450 -0.3996 0.8833 +vn 0.0670 -0.4252 0.9026 +vn 0.1737 0.3815 0.9079 +vn 0.7462 -0.1796 0.6410 +vn 0.6894 -0.7044 0.1690 +vn 0.6512 0.6522 0.3880 +vn 0.6567 0.7066 0.2636 +vn 0.4715 0.8486 0.2399 +vn 0.6794 0.7196 0.1438 +vn 0.4643 0.8765 0.1268 +vn 0.7046 0.7096 -0.0037 +vn 0.4679 0.8834 -0.0265 +vn 0.7171 0.6707 -0.1897 +vn 0.7231 0.5454 -0.4239 +vn 0.6881 -0.2879 -0.6660 +vn 0.4117 -0.2675 -0.8712 +vn 0.2571 -0.0853 0.9626 +vn 0.3022 0.2444 0.9214 +vn 0.3750 0.3155 0.8717 +vn 0.3405 0.3026 0.8902 +vn 0.3867 0.0294 0.9217 +vn 0.4392 0.2763 0.8548 +vn -0.0973 -0.4891 0.8668 +vn 0.0050 -0.4951 0.8688 +vn -0.0660 -0.4178 0.9061 +vn 0.9592 -0.1282 0.2522 +vn 0.9695 -0.1351 0.2044 +vn 0.9774 -0.1442 0.1548 +vn 0.9670 -0.1052 0.2322 +vn 0.9325 0.0152 0.3608 +vn 0.9550 -0.0503 0.2922 +vn 0.8837 0.0970 0.4579 +vn 0.9470 -0.0939 0.3071 +vn 0.9836 -0.1410 0.1125 +vn 0.8108 0.1857 0.5551 +vn 0.9792 -0.1248 0.1598 +vn 0.9825 -0.1489 0.1115 +vn 0.9878 -0.1507 0.0384 +vn 0.9755 -0.2090 0.0690 +vn 0.9792 -0.1984 -0.0421 +vn 0.1758 -0.4159 0.8922 +vn 0.5789 -0.3197 0.7501 +vn 0.3925 -0.3256 0.8602 +vn 0.6144 -0.3951 0.6829 +vn 0.5447 -0.1737 0.8205 +vn 0.7040 -0.0598 0.7077 +vn 0.1541 -0.2073 0.9661 +vn 0.3591 -0.2199 0.9070 +vn -0.0608 -0.1138 0.9916 +vn 0.4181 0.2094 0.8839 +vn 0.6217 -0.3446 0.7034 +vn 0.6371 -0.3923 0.6634 +vn 0.9516 0.2428 0.1886 +vn 0.8514 0.4977 0.1653 +vn 0.8118 -0.0315 0.5832 +vn 0.8583 -0.1123 0.5007 +vn 0.9283 0.0290 0.3706 +vn 0.9478 0.0567 0.3137 +vn 0.7199 -0.6914 0.0616 +vn 0.8815 -0.4721 0.0049 +vn 0.9606 -0.2774 -0.0197 +vn 0.7027 -0.2506 0.6659 +vn 0.8146 -0.1126 0.5691 +vn 0.6297 -0.2008 0.7505 +vn 0.6998 -0.1194 0.7043 +vn 0.5722 -0.1137 0.8122 +vn 0.3773 -0.0326 0.9255 +vn 0.5119 0.7177 -0.4721 +vn 0.7307 0.6823 -0.0234 +vn 0.5321 0.0932 0.8415 +vn 0.6575 0.2438 0.7130 +vn 0.7005 0.3066 0.6444 +vn 0.7702 0.5128 0.3792 +vn -0.0000 0.4730 -0.8811 +vn 0.9185 0.0434 0.3931 +vn 0.9189 0.0227 0.3939 +vn 0.9584 -0.1513 -0.2421 +vn 0.0647 -0.1637 0.9844 +vn -0.0000 0.8205 -0.5716 +vn 0.9131 0.3972 -0.0918 +vn 0.9183 0.3148 -0.2399 +vn -0.0000 0.0210 -0.9998 +vn 0.5325 -0.0176 -0.8462 +vn 0.8193 -0.0571 -0.5705 +vn 0.8023 0.5816 -0.1344 +vn 0.5979 0.6184 -0.5100 +vn 0.8303 0.1220 0.5438 +vn 0.9194 0.3193 0.2295 +vn 0.9463 0.0835 0.3122 +vn 0.8192 -0.0366 0.5723 +vn 0.7583 0.0802 0.6470 +vn 0.6970 -0.0341 0.7163 +vn 0.2464 0.9424 0.2263 +vn 0.2525 0.9606 0.1159 +vn 0.2374 0.9707 -0.0360 +vn 0.6733 -0.7378 -0.0483 +vn 0.8075 -0.5829 -0.0901 +vn 0.7554 -0.6553 0.0039 +vn 0.0230 0.0362 0.9991 +vn 0.8909 0.4534 0.0285 +vn 0.9814 0.1765 0.0751 +vn 0.7797 -0.6026 -0.1699 +vn 0.9554 -0.2539 0.1510 +vn 0.9154 -0.3146 0.2511 +vn 0.8169 0.1313 0.5616 +vn 0.8813 0.0355 0.4712 +vn 0.4841 0.1300 0.8653 +vn 0.4749 0.2246 0.8509 +vn 0.4684 0.2828 0.8371 +vn 0.3649 0.1787 0.9137 +vn 0.3988 0.1223 0.9088 +vn 0.6467 0.3184 0.6931 +vn 0.6086 0.1945 0.7693 +vn 0.7411 0.1888 0.6443 +vn 0.7615 0.2878 0.5808 +vn 0.5455 0.3082 0.7794 +vn 0.4919 0.1299 0.8609 +vn 0.8287 0.1330 0.5436 +vn 0.8406 0.2363 0.4875 +vn 0.7233 -0.0561 0.6882 +vn 0.6127 0.0584 0.7882 +vn 0.7912 -0.0655 0.6081 +vn 0.8527 -0.0246 0.5218 +vn 0.7890 0.0724 0.6101 +vn 0.7142 0.0320 0.6992 +vn 0.8771 -0.2923 0.3812 +vn 0.9247 -0.2468 0.2898 +vn 0.9645 -0.1875 0.1861 +vn 0.9243 -0.0399 0.3796 +vn 0.4055 -0.1181 0.9064 +vn 0.5486 -0.0957 0.8306 +vn 0.3794 -0.2116 0.9007 +vn 0.2114 -0.3252 0.9217 +vn 0.3134 -0.3818 0.8695 +vn 0.4188 -0.2844 0.8624 +vn 0.3095 -0.3257 0.8934 +vn 0.4910 -0.0375 0.8704 +vn 0.4820 -0.1401 0.8649 +vn 0.4624 0.0030 0.8867 +vn 0.4525 -0.2186 0.8646 +vn 0.8433 -0.1249 0.5228 +vn 0.8970 -0.0879 0.4333 +vn 0.8799 -0.1521 0.4502 +vn 0.9269 -0.1245 0.3541 +vn 0.9045 -0.1635 0.3939 +vn 0.9185 -0.1653 0.3591 +vn 0.9564 -0.1313 0.2610 +vn 0.9452 -0.1392 0.2954 +vn 0.9249 -0.1879 0.3304 +vn 0.9640 -0.1378 0.2272 +vn 0.7909 -0.1303 0.5979 +vn 0.8295 -0.1651 0.5335 +vn 0.8688 -0.2023 0.4520 +vn 0.8539 -0.1864 0.4859 +vn 0.8759 -0.2322 0.4230 +vn 0.4017 -0.3791 0.8336 +vn 0.4679 -0.8611 0.1988 +vn 0.6985 0.2709 0.6624 +vn 0.5315 0.2017 0.8227 +vn 0.3630 -0.8862 0.2880 +vn 0.6326 -0.7742 -0.0214 +vn 0.8825 0.3962 0.2536 +vn 0.8206 0.3375 0.4612 +vn 0.5639 -0.8214 0.0852 +vn 0.6659 -0.6971 -0.2658 +vn 0.8828 0.4442 -0.1529 +vn 0.8983 0.4366 0.0490 +vn 0.6592 -0.7369 -0.1496 +vn 0.5582 -0.6842 -0.4694 +vn 0.7629 0.3608 -0.5365 +vn 0.8420 0.4090 -0.3519 +vn 0.6311 -0.6826 -0.3685 +vn 0.3638 -0.6912 -0.6244 +vn 0.4780 0.3115 -0.8213 +vn 0.6364 0.3280 -0.6981 +vn 0.4758 -0.6843 -0.5526 +vn 0.1685 -0.6825 -0.7112 +vn 0.1852 0.3325 -0.9248 +vn 0.3171 0.3150 -0.8946 +vn 0.2602 -0.6879 -0.6775 +vn -0.0006 -0.6711 -0.7414 +vn -0.0000 0.3494 -0.9370 +vn 0.0827 0.3451 -0.9349 +vn 0.0819 -0.6736 -0.7346 +vn 0.0008 -0.9336 -0.3583 +vn 0.0560 -0.9975 0.0433 +vn 0.0260 -0.9982 0.0547 +vn -0.0035 -0.9348 -0.3551 +vn 0.1033 -0.8754 0.4723 +vn 0.0646 -0.8658 0.4961 +vn -0.0009 -0.8696 0.4937 +vn -0.0000 -0.9979 0.0654 +vn -0.0000 -0.9419 -0.3359 +vn -0.2622 -0.8897 0.3737 +vn -0.3716 0.1230 0.9202 +vn -0.5311 0.2025 0.8227 +vn -0.3512 -0.8882 0.2963 +vn -0.6982 0.2714 0.6624 +vn -0.4662 -0.8615 0.2011 +vn -0.8203 0.3380 0.4614 +vn -0.5590 -0.8237 0.0950 +vn -0.8823 0.3965 0.2537 +vn -0.6319 -0.7748 -0.0193 +vn -0.8982 0.4367 0.0494 +vn -0.6589 -0.7402 -0.1342 +vn -0.8827 0.4444 -0.1528 +vn -0.6662 -0.6983 -0.2618 +vn -0.8419 0.4092 -0.3518 +vn -0.6320 -0.6836 -0.3652 +vn -0.7627 0.3609 -0.5367 +vn -0.5649 -0.6856 -0.4591 +vn -0.6364 0.3282 -0.6981 +vn -0.4767 -0.6848 -0.5512 +vn -0.4780 0.3116 -0.8213 +vn -0.3716 -0.6916 -0.6194 +vn -0.3171 0.3151 -0.8945 +vn -0.2628 -0.6881 -0.6763 +vn -0.1852 0.3324 -0.9248 +vn -0.1704 -0.6827 -0.7106 +vn -0.0829 0.3455 -0.9347 +vn -0.0827 -0.6737 -0.7343 +vn 0.1394 0.0033 0.9902 +vn 0.0684 -0.0233 0.9974 +vn -0.0001 0.0046 1.0000 +vn -0.2312 0.9421 -0.2429 +vn -0.1597 0.9509 -0.2651 +vn -0.0663 0.9930 0.0978 +vn -0.0875 0.9912 0.0993 +vn -0.0989 0.9559 -0.2766 +vn -0.0454 0.9941 0.0988 +vn 0.0191 0.8887 0.4580 +vn 0.0531 0.8970 0.4388 +vn 0.0746 0.9128 0.4015 +vn -0.3923 0.9101 -0.1336 +vn -0.3111 0.9289 -0.2011 +vn -0.1054 0.9887 0.1068 +vn -0.1247 0.9851 0.1181 +vn 0.1054 0.9260 0.3624 +vn 0.1279 0.9426 0.3085 +vn -0.0320 -0.9314 -0.3627 +vn -0.0633 -0.9355 -0.3477 +vn 0.1438 -0.9886 -0.0446 +vn 0.1142 -0.9934 -0.0121 +vn -0.1035 -0.9416 -0.3204 +vn 0.1772 -0.9811 -0.0775 +vn 0.2836 -0.8868 0.3649 +vn -0.4969 0.8669 0.0396 +vn -0.4565 0.8883 -0.0511 +vn -0.1452 0.9813 0.1260 +vn -0.1638 0.9774 0.1340 +vn 0.1351 0.9592 0.2485 +vn 0.1342 0.9724 0.1909 +vn -0.1338 -0.9515 -0.2769 +vn 0.2109 -0.9715 -0.1086 +vn -0.1451 -0.9642 -0.2219 +vn 0.2390 -0.9614 -0.1367 +vn -0.5071 0.8353 0.2123 +vn -0.5120 0.8496 0.1262 +vn -0.1809 0.9726 0.1460 +vn -0.1910 0.9678 0.1637 +vn 0.1149 0.9838 0.1374 +vn 0.0970 0.9909 0.0930 +vn -0.1358 -0.9774 -0.1620 +vn 0.2622 -0.9507 -0.1656 +vn -0.1152 -0.9879 -0.1041 +vn 0.2815 -0.9394 -0.1956 +vn -0.4466 0.8119 0.3760 +vn -0.4854 0.8229 0.2954 +vn -0.1895 0.9647 0.1828 +vn -0.1798 0.9632 0.1998 +vn 0.0895 0.9945 0.0544 +vn 0.0782 0.9969 0.0018 +vn -0.0926 -0.9943 -0.0534 +vn 0.2852 -0.9319 -0.2241 +vn -0.0627 -0.9979 -0.0158 +vn 0.2725 -0.9265 -0.2595 +vn -0.2991 0.8021 0.5169 +vn -0.3855 0.8033 0.4540 +vn -0.1624 0.9627 0.2165 +vn -0.1312 0.9652 0.2263 +vn 0.0584 0.9973 -0.0445 +vn 0.0362 0.9954 -0.0886 +vn -0.0244 -0.9997 -0.0006 +vn 0.2482 -0.9200 -0.3033 +vn 0.0046 -1.0000 -0.0054 +vn 0.2051 -0.9155 -0.3460 +vn -0.1233 0.8164 0.5641 +vn -0.2050 0.8082 0.5521 +vn -0.0918 0.9704 0.2234 +vn -0.0557 0.9758 0.2115 +vn 0.0189 0.9915 -0.1286 +vn 0.0071 0.9876 -0.1569 +vn 0.0169 -0.9998 -0.0126 +vn 0.1517 -0.9139 -0.3764 +vn 0.0194 -0.9997 -0.0141 +vn 0.1018 -0.9138 -0.3932 +vn -0.0562 0.8218 0.5670 +vn -0.0256 0.9800 0.1974 +vn -0.0001 0.9815 0.1917 +vn 0.0027 0.9829 -0.1839 +vn -0.0004 0.9810 -0.1941 +vn 0.0138 -0.9998 -0.0110 +vn 0.0524 -0.9141 -0.4021 +vn -0.0000 -1.0000 -0.0085 +vn -0.0000 -0.9142 -0.4052 +vn 0.0319 -0.9314 -0.3626 +vn 0.0106 -0.9306 -0.3660 +vn -0.0853 -0.9962 0.0162 +vn -0.1143 -0.9934 -0.0122 +vn -0.0009 -0.9335 -0.3585 +vn -0.0560 -0.9975 0.0431 +vn -0.1078 -0.8751 0.4717 +vn -0.2026 -0.8798 0.4300 +vn 0.1035 -0.9415 -0.3206 +vn 0.0633 -0.9355 -0.3477 +vn -0.1438 -0.9886 -0.0446 +vn -0.1772 -0.9811 -0.0776 +vn 0.2311 0.9421 -0.2429 +vn 0.3109 0.9289 -0.2013 +vn 0.1055 0.9887 0.1064 +vn 0.0873 0.9913 0.0985 +vn 0.3923 0.9101 -0.1339 +vn 0.1250 0.9851 0.1177 +vn -0.1211 0.9411 0.3156 +vn -0.1051 0.9262 0.3620 +vn -0.0743 0.9130 0.4011 +vn 0.1450 -0.9642 -0.2220 +vn 0.1339 -0.9515 -0.2771 +vn -0.2108 -0.9715 -0.1088 +vn -0.2388 -0.9614 -0.1369 +vn 0.4578 0.8877 -0.0482 +vn 0.1455 0.9813 0.1258 +vn 0.4967 0.8670 0.0395 +vn 0.1639 0.9773 0.1339 +vn -0.1324 0.9714 0.1970 +vn -0.1360 0.9599 0.2452 +vn 0.1151 -0.9879 -0.1041 +vn 0.1357 -0.9775 -0.1614 +vn -0.2620 -0.9508 -0.1655 +vn -0.2811 -0.9395 -0.1956 +vn 0.5117 0.8499 0.1261 +vn 0.1811 0.9726 0.1459 +vn 0.5068 0.8355 0.2124 +vn 0.1912 0.9678 0.1639 +vn -0.0992 0.9895 0.1054 +vn -0.1142 0.9839 0.1374 +vn 0.0626 -0.9979 -0.0155 +vn 0.0920 -0.9944 -0.0522 +vn -0.2848 -0.9321 -0.2238 +vn -0.2723 -0.9267 -0.2591 +vn 0.4850 0.8231 0.2954 +vn 0.1897 0.9646 0.1831 +vn 0.4463 0.8120 0.3761 +vn 0.1798 0.9632 0.2000 +vn -0.0866 0.9961 0.0141 +vn -0.0878 0.9948 0.0523 +vn -0.0046 -1.0000 -0.0050 +vn 0.0246 -0.9997 -0.0005 +vn -0.2481 -0.9202 -0.3029 +vn -0.2052 -0.9156 -0.3458 +vn 0.3851 0.8035 0.4540 +vn 0.1623 0.9627 0.2165 +vn 0.2989 0.8023 0.5167 +vn 0.1311 0.9652 0.2261 +vn -0.0472 0.9955 -0.0822 +vn -0.0584 0.9973 -0.0442 +vn -0.0194 -0.9997 -0.0139 +vn -0.0142 -0.9998 -0.0132 +vn -0.1517 -0.9140 -0.3762 +vn -0.1018 -0.9139 -0.3929 +vn 0.2049 0.8084 0.5518 +vn 0.0917 0.9705 0.2232 +vn 0.1232 0.8165 0.5640 +vn 0.0557 0.9758 0.2112 +vn -0.0160 0.9877 -0.1553 +vn -0.0191 0.9915 -0.1287 +vn -0.0096 -0.9999 -0.0113 +vn -0.0524 -0.9141 -0.4020 +vn 0.0561 0.8219 0.5669 +vn 0.0255 0.9800 0.1974 +vn -0.0028 0.9829 -0.1841 +vn 0.3718 0.1221 0.9202 +vn 0.2418 0.0502 0.9690 +vn 0.1794 -0.8822 0.4354 +vn -0.1394 0.0039 0.9902 +vn -0.2417 0.0511 0.9690 +vn -0.0207 0.8893 0.4568 +vn -0.0684 -0.0230 0.9974 +vn -0.0040 0.8838 0.4678 +vn 0.0990 0.9559 -0.2766 +vn 0.0454 0.9942 0.0979 +vn 0.0224 0.9951 0.0965 +vn 0.0467 0.9574 -0.2849 +vn -0.0006 0.8893 0.4574 +vn 0.0002 0.9956 0.0940 +vn -0.0000 0.9577 -0.2877 +vn -0.0108 -0.9306 -0.3660 +vn 0.0852 -0.9962 0.0162 +vn 0.1597 0.9509 -0.2652 +vn 0.0662 0.9930 0.0974 +vn -0.0530 0.8973 0.4382 +vn -0.0561 -0.8664 0.4961 +vn 0.0070 0.8831 0.4691 +vn -0.0260 -0.9982 0.0544 +vn 0.0033 -0.9348 -0.3552 +vn -0.0222 0.9950 0.0975 +vn -0.0465 0.9574 -0.2849 +vn -0.1607 -0.9478 -0.2755 +vn 0.0249 -0.9525 -0.3036 +vn 0.0240 -0.9700 0.2421 +vn -0.1893 -0.9582 0.2147 +vn 0.0536 0.5304 0.8461 +vn -0.0825 -0.3758 0.9230 +vn 0.0257 -0.4070 0.9131 +vn 0.0120 0.5242 0.8515 +vn -0.9631 0.1178 0.2421 +vn -0.8991 0.1225 -0.4203 +vn -0.9028 -0.1006 -0.4182 +vn -0.9631 -0.1172 0.2424 +vn 0.1481 0.2852 0.9470 +vn 0.0772 0.4235 0.9026 +vn 0.0327 -0.5327 0.8457 +vn -0.0469 -0.5592 0.8277 +vn 0.2846 0.8850 0.3685 +vn 0.2136 0.9111 -0.3526 +vn 0.0119 0.9535 -0.3010 +vn -0.0117 0.9547 0.2972 +vn 0.2856 0.0287 0.9579 +vn -0.5725 -0.1048 0.8132 +vn -0.5820 0.1354 0.8019 +vn 0.2821 -0.0071 0.9594 +vn 0.9452 0.1540 -0.2878 +vn 0.8747 0.1440 0.4628 +vn 0.8843 -0.1469 0.4432 +vn 0.9465 -0.1696 -0.2745 +vn 0.4799 -0.1168 0.8695 +vn -0.3753 0.0084 0.9269 +vn -0.3767 -0.0330 0.9258 +vn 0.4818 0.0781 0.8728 +vn 0.1115 0.9859 0.1245 +vn -0.0132 0.9999 0.0040 +vn 0.0184 0.6217 -0.7830 +vn 0.1534 0.5762 -0.8028 +vn 0.2241 -0.9070 -0.3566 +vn 0.0414 -0.4077 -0.9122 +vn 0.0800 -0.2654 -0.9608 +vn 0.2754 -0.8930 0.3559 +vn -0.0760 0.5549 0.8284 +vn 0.1218 -0.3019 0.9455 +vn 0.3298 -0.0659 -0.9418 +vn 0.3097 0.0481 -0.9496 +vn 0.0608 -0.5404 0.8392 +vn -0.0767 0.3403 0.9372 +vn 0.9605 -0.1906 0.2027 +vn 0.9514 0.2057 0.2293 +vn 0.6284 0.1091 -0.7702 +vn 0.6289 -0.0911 -0.7721 +vn -0.5630 -0.0925 -0.8213 +vn -0.5688 0.0798 -0.8186 +vn -0.9843 0.1766 -0.0034 +vn -0.9811 -0.1915 0.0259 +vn -0.1596 0.9504 -0.2669 +vn 0.0281 0.4160 -0.9089 +vn -0.0198 0.5093 -0.8604 +vn -0.4306 0.0922 -0.8978 +vn -0.4348 -0.0602 -0.8985 +vn -0.1894 0.9490 0.2522 +vn 0.1566 -0.5537 -0.8178 +vn 0.0162 -0.5985 -0.8010 +vn 0.0040 -1.0000 -0.0055 +vn 0.3912 -0.9108 0.1323 +vn -0.2297 -0.9586 -0.1683 +vn -0.0198 -0.5039 -0.8635 +vn -0.1661 0.6103 -0.7745 +vn -0.1754 0.9843 -0.0180 +vn 0.0628 0.2770 -0.9588 +vn -0.1658 -0.5910 -0.7895 +vn 0.1674 -0.1306 0.9772 +vn 0.3383 -0.1336 0.9315 +vn 0.6904 -0.1894 0.6982 +vn 0.3803 -0.3512 0.8556 +vn -0.6841 0.1400 0.7158 +vn -0.7149 0.0548 0.6971 +vn -0.1755 -0.0325 0.9839 +vn -0.2228 0.0302 0.9744 +vn -0.9258 0.1853 0.3295 +vn -0.9454 0.1020 0.3096 +vn -0.8804 0.2221 0.4191 +vn -0.9550 0.2832 0.0880 +vn -0.9732 0.2127 -0.0871 +vn -0.8273 0.2730 0.4910 +vn -0.9180 0.3743 0.1313 +vn -0.7883 0.3375 0.5144 +vn -0.8498 0.4307 0.3038 +vn -0.6459 0.1772 0.7426 +vn -0.6359 0.2126 0.7419 +vn -0.6294 0.2567 0.7335 +vn -0.7663 0.3877 0.5123 +vn -0.6560 0.1610 0.7374 +vn -0.3614 0.0700 0.9298 +vn -0.4000 0.1084 0.9101 +vn -0.2974 0.0477 0.9536 +vn -0.9868 0.1267 -0.1012 +vn 0.4825 -0.7823 0.3939 +vn 0.2040 -0.9703 0.1301 +vn 0.1006 -0.9739 0.2034 +vn -0.0096 -0.1292 -0.9916 +vn 0.4410 -0.1666 -0.8819 +vn -0.1855 -0.9497 0.2522 +vn -0.0410 -0.9483 0.3146 +vn 0.9392 -0.2095 -0.2721 +vn -0.3184 -0.9448 0.0776 +vn -0.1629 -0.9845 0.0650 +vn 0.7877 -0.1974 -0.5835 +vn -0.1454 -0.0061 -0.9894 +vn 0.2684 -0.0441 -0.9623 +vn 0.2624 -0.0277 -0.9646 +vn -0.2295 0.0119 -0.9732 +vn -0.9499 0.0058 0.3125 +vn -0.9955 0.0412 -0.0854 +vn -0.9752 -0.0042 -0.2211 +vn -0.9498 -0.0566 0.3077 +vn -0.8948 0.4034 -0.1913 +vn -0.8342 0.4061 -0.3732 +vn -0.8647 0.3170 -0.3897 +vn -0.9211 0.3257 -0.2132 +vn -0.8922 -0.1536 -0.4248 +vn -0.5332 -0.1299 -0.8360 +vn -0.2430 -0.0199 -0.9698 +vn -0.4967 0.1415 -0.8563 +vn -0.5123 0.3041 -0.8032 +vn -0.2409 0.2489 -0.9381 +vn -0.4353 -0.8732 -0.2193 +vn -0.1633 -0.9797 -0.1162 +vn -0.0857 -0.9830 -0.1623 +vn -0.0598 -0.9605 -0.2719 +vn -0.7168 -0.0458 0.6958 +vn -0.1492 -0.1154 0.9820 +vn -0.1858 -0.1722 0.9674 +vn -0.7066 -0.1189 0.6975 +vn 0.8239 -0.2323 0.5169 +vn 0.4887 -0.2193 0.8445 +vn 0.7956 -0.2411 0.5558 +vn 0.7031 -0.2000 0.6824 +vn 0.4184 -0.1728 0.8917 +vn 0.9294 -0.1903 0.3162 +vn 0.9335 -0.1972 0.2995 +vn 0.8176 -0.4508 0.3582 +vn 0.0070 -0.6628 0.7488 +vn 0.1421 -0.9677 0.2083 +vn -0.0000 -0.9815 0.1915 +vn -0.0000 -0.6931 0.7208 +vn 0.0033 -0.2131 0.9770 +vn -0.0098 -0.2762 0.9610 +vn -0.0000 -0.2899 0.9571 +vn 0.9272 -0.3734 -0.0287 +vn 0.4859 -0.8702 -0.0816 +vn 0.9838 -0.1774 -0.0265 +vn -0.0300 -0.0942 0.9951 +vn -0.1073 0.0060 0.9942 +vn -0.0000 -0.0029 1.0000 +vn -0.0000 0.0608 0.9981 +vn -0.1089 0.0743 0.9913 +vn -0.0923 -0.0589 0.9940 +vn -0.1517 -0.0132 0.9883 +vn 0.9859 -0.1495 -0.0746 +vn 0.9737 -0.1993 0.1105 +vn 0.6625 -0.1016 -0.7421 +vn 0.9025 -0.1467 -0.4049 +vn 0.9022 -0.1161 -0.4154 +vn 0.6892 -0.0693 -0.7212 +vn -0.0361 0.1251 -0.9915 +vn -0.0151 0.0516 -0.9986 +vn -0.5224 0.0884 -0.8481 +vn 0.9779 -0.1951 -0.0749 +vn -0.7070 -0.1594 0.6890 +vn 0.1006 -0.2517 0.9626 +vn -0.9511 -0.0881 0.2960 +vn -0.9549 -0.0697 0.2885 +vn -0.7205 -0.1474 0.6776 +vn -0.9744 0.0030 -0.2249 +vn -0.9823 0.0334 -0.1845 +vn -0.9084 0.0480 -0.4153 +vn 0.8034 -0.0419 -0.5940 +vn 0.9523 -0.0511 -0.3008 +vn -0.7427 -0.1337 0.6561 +vn -0.9612 -0.0296 0.2741 +vn -0.7095 -0.0490 0.7030 +vn -0.9358 0.0772 0.3438 +vn -0.9337 0.0652 0.3520 +vn -0.7090 0.0206 0.7049 +vn 0.1703 -0.2722 0.9470 +vn 0.1694 -0.2500 0.9533 +vn 0.0928 -0.0579 0.9940 +vn 0.1203 -0.1840 0.9755 +vn -0.9876 0.0617 -0.1447 +vn -0.9909 0.1136 -0.0728 +vn -0.9843 0.1599 -0.0742 +vn -0.2445 -0.2913 -0.9249 +vn -0.1871 -0.1823 -0.9653 +vn -0.6315 0.0564 -0.7733 +vn -0.5663 -0.0196 -0.8240 +vn -0.7026 0.4215 -0.5734 +vn -0.7302 0.3225 -0.6023 +vn 0.9856 -0.1127 -0.1257 +vn 0.9774 -0.1533 -0.1452 +vn 0.8039 -0.2304 0.5483 +vn 0.8274 -0.2285 0.5130 +vn -0.0000 -0.1030 0.9947 +vn -0.0241 -0.0888 0.9958 +vn -0.5699 0.0353 -0.8210 +vn -0.7165 0.0216 -0.6972 +vn -0.8842 0.0416 -0.4653 +vn 0.9975 -0.0708 0.0075 +vn 0.9986 -0.0441 0.0293 +vn 0.9631 -0.0273 0.2676 +vn 0.7963 -0.0854 0.5988 +vn 0.9990 -0.0442 -0.0039 +vn 0.9970 -0.0601 -0.0477 +vn 0.8078 -0.1442 0.5715 +vn 0.8101 -0.0255 0.5857 +vn 0.8804 0.0105 0.4742 +vn 0.0981 0.0475 0.9940 +vn 0.6054 0.0672 0.7931 +vn 0.8373 -0.3861 -0.3871 +vn 0.5427 -0.4536 -0.7069 +vn 0.1510 -0.3752 -0.9146 +vn -0.6080 0.0643 -0.7913 +vn 0.4873 -0.0638 -0.8709 +vn -0.2163 0.1655 -0.9622 +vn 0.0002 0.0701 -0.9975 +vn -0.0000 -0.6416 -0.7671 +vn -0.1800 -0.4745 -0.8617 +vn -0.0915 0.0237 -0.9955 +vn -0.7380 0.0496 -0.6730 +vn -0.1177 -0.0046 -0.9930 +vn 0.6127 -0.0742 0.7868 +vn 0.1170 -0.0344 0.9925 +vn -0.7087 0.2290 -0.6673 +vn -0.0512 0.2078 -0.9768 +vn -0.0443 0.1318 -0.9903 +vn -0.7116 0.1514 -0.6861 +vn -0.7299 0.1048 -0.6754 +vn -0.0768 0.0869 -0.9933 +vn -0.0514 0.1740 -0.9834 +vn -0.7314 0.1660 -0.6614 +vn 0.6604 -0.0276 -0.7504 +vn 0.6165 -0.0851 -0.7827 +vn -0.0055 -0.1147 -0.9934 +vn -0.0000 -0.1527 -0.9883 +vn -0.0000 -0.5596 -0.8287 +vn 0.0804 -0.5291 -0.8448 +vn -0.0557 -0.0153 0.9983 +vn -0.1331 -0.0021 0.9911 +vn -0.0000 -0.8469 -0.5318 +vn 0.3749 -0.7773 -0.5052 +vn -0.8790 0.4396 -0.1847 +vn -0.8235 0.4318 -0.3680 +vn 0.5637 -0.7758 -0.2836 +vn -0.0000 -0.9462 -0.3235 +vn -0.0314 0.2158 -0.9759 +vn -0.0000 0.1990 -0.9800 +vn -0.0000 0.3667 -0.9303 +vn -0.0291 0.3731 -0.9273 +vn -0.0000 -0.9920 -0.1265 +vn 0.8901 -0.1488 -0.4308 +vn -0.4176 0.4066 -0.8126 +vn -0.5043 0.4479 -0.7383 +vn -0.6414 0.3636 -0.6756 +vn -0.0130 0.4218 -0.9066 +vn -0.0000 0.4242 -0.9056 +vn -0.2407 0.3908 -0.8885 +vn -0.2641 0.4322 -0.8622 +vn -0.5114 0.4220 -0.7485 +vn -0.8656 0.1970 -0.4603 +vn 0.7488 0.0273 -0.6623 +vn -0.0000 -0.0227 0.9997 +vn 0.0192 0.9908 0.1338 +vn -0.0000 0.9858 0.1682 +vn -0.0000 0.9992 -0.0411 +vn 0.0368 0.9977 -0.0565 +vn -0.3814 0.1425 0.9134 +vn -0.2101 0.0415 0.9768 +vn -0.1933 0.0421 0.9802 +vn 0.4420 0.0317 -0.8965 +vn 0.1185 0.1085 0.9870 +vn -0.6963 0.4401 -0.5669 +vn -0.8386 0.4083 -0.3606 +vn -0.7718 0.3885 -0.5033 +vn 0.7146 0.0497 -0.6978 +vn -0.7512 0.1171 -0.6497 +vn -0.8791 0.1166 -0.4621 +vn -0.9953 0.0758 -0.0606 +vn -0.9980 0.0394 -0.0488 +vn -0.9633 0.0498 0.2639 +vn -0.9433 0.0580 0.3269 +vn -0.9866 -0.1584 -0.0392 +vn -0.9693 -0.1155 0.2170 +vn -0.4999 -0.8035 0.3231 +vn -0.0464 -0.8979 0.4378 +vn -0.7507 0.0670 0.6572 +vn -0.8868 0.0770 0.4557 +vn 0.3146 -0.9490 0.0198 +vn 0.2264 -0.9717 -0.0676 +vn 0.5596 -0.8285 0.0189 +vn 0.4060 -0.8828 -0.2363 +vn -0.0279 -0.9996 0.0068 +vn -0.5312 -0.8472 0.0071 +vn -0.0000 0.7262 0.6875 +vn -0.0404 0.7642 0.6437 +vn 0.9642 -0.1550 0.2151 +vn 0.9794 -0.2019 -0.0035 +vn -0.2209 -0.9706 -0.0960 +vn 0.8618 -0.1156 0.4939 +vn -0.0932 -0.9954 -0.0238 +vn -0.3633 -0.9315 -0.0194 +vn 0.6941 0.0860 -0.7147 +vn 0.6893 0.0435 -0.7231 +vn -0.5477 0.1223 0.8277 +vn 0.0202 -0.9939 -0.1084 +vn 0.0488 -0.9987 -0.0173 +vn -0.8898 -0.0586 0.4527 +vn -0.5854 -0.0210 0.8105 +vn -0.2472 -0.0307 0.9685 +vn -0.0983 -0.0601 0.9933 +vn -0.2593 0.0403 0.9650 +vn -0.4231 0.0298 0.9056 +vn -0.4403 0.1056 0.8916 +vn -0.6706 0.1479 0.7270 +vn -0.6695 0.2140 0.7113 +vn -0.9348 0.3341 -0.1205 +vn -0.9469 0.3072 0.0952 +vn -0.9145 0.4045 0.0108 +vn -0.8017 0.3100 -0.5111 +vn -0.8871 0.3528 -0.2977 +vn -0.8819 0.4053 -0.2407 +vn -0.8011 0.3677 -0.4723 +vn -0.4696 0.2841 -0.8359 +vn -0.6628 0.2931 -0.6891 +vn -0.6621 0.3339 -0.6710 +vn -0.4714 0.3134 -0.8244 +vn -0.2222 0.2913 -0.9305 +vn -0.0000 0.2770 -0.9609 +vn -0.2674 0.1070 0.9576 +vn -0.4594 0.1540 0.8748 +vn -0.6814 0.2283 0.6954 +vn -0.8919 0.3557 0.2793 +vn -0.8973 0.3506 0.2681 +vn -0.9158 0.4012 0.0181 +vn -0.8878 0.4113 -0.2065 +vn -0.8075 0.3912 -0.4415 +vn -0.6621 0.3707 -0.6513 +vn -0.4717 0.3436 -0.8120 +vn -0.2242 0.3248 -0.9188 +vn -0.0000 0.3199 -0.9475 +vn 0.0375 0.9954 0.0878 +vn 0.0779 0.9944 -0.0721 +vn 0.0513 0.9963 0.0695 +vn 0.1241 0.9900 -0.0666 +vn 0.0829 0.9925 0.0898 +vn 0.2013 0.9792 -0.0247 +vn 0.1221 0.9734 0.1938 +vn 0.1604 0.9683 0.1914 +vn 0.2837 0.9449 0.1631 +vn 0.2353 0.9510 0.2007 +vn 0.0889 0.9891 0.1177 +vn 0.0831 0.9841 0.1567 +vn 0.1831 0.9660 0.1828 +vn 0.1529 0.9769 0.1491 +vn 0.0715 0.9910 0.1130 +vn 0.1084 0.9889 0.1017 +vn 0.1347 0.9828 0.1263 +vn 0.0883 0.9853 0.1460 +vn 0.0322 0.9923 0.1192 +vn 0.0426 0.9838 0.1740 +vn -0.0000 0.9934 0.1149 +vn -0.0000 0.9834 0.1813 +vn -0.1089 0.8032 0.5857 +vn -0.2285 0.8042 0.5486 +vn -0.3737 0.7914 0.4838 +vn -0.4148 0.8174 0.3997 +vn -0.8195 0.2895 0.4947 +vn -0.4167 0.8645 0.2811 +vn -0.4300 0.8928 0.1344 +vn -0.4163 0.9092 -0.0013 +vn -0.3104 0.9456 -0.0979 +vn -0.1329 0.9838 -0.1204 +vn -0.0678 0.9892 -0.1297 +vn -0.0360 0.9733 -0.2266 +vn -0.0001 0.9553 -0.2956 +vn -0.8299 0.2266 0.5098 +vn -0.8139 0.2926 0.5019 +vn 0.1356 0.9806 0.1416 +vn 0.2700 0.9607 0.0643 +vn -0.9094 0.3000 0.2880 +vn -0.4855 -0.1239 -0.8654 +vn -0.6917 -0.0750 -0.7183 +vn -0.8233 -0.0252 -0.5671 +vn -0.9084 -0.0510 -0.4149 +vn -0.8891 0.4577 0.0018 +vn -0.8690 0.4901 0.0683 +vn -0.8802 0.4557 -0.1328 +vn -0.8265 0.4762 0.3004 +vn -0.0000 -0.0747 0.9972 +vn -0.0729 0.0267 0.9970 +vn -0.0000 0.0359 0.9994 +vn -0.0000 -0.2220 0.9750 +vn -0.0848 -0.2116 0.9737 +vn -0.2461 -0.2229 0.9433 +vn -0.4314 -0.1905 0.8818 +vn -0.7347 0.2131 -0.6440 +vn -0.6971 -0.0890 0.7114 +vn -0.8733 -0.0994 0.4770 +vn -0.9657 -0.1159 0.2325 +vn -0.1332 0.4291 -0.8934 +vn -0.0000 0.4568 -0.8896 +vn -0.9719 -0.1099 -0.2082 +vn -0.9948 -0.1016 -0.0097 +vn 0.6421 0.2098 -0.7374 +vn -0.0452 0.2430 -0.9690 +vn -0.0408 0.2360 -0.9709 +vn 0.6771 0.0780 -0.7317 +vn 0.9947 -0.0512 -0.0891 +vn 0.8043 -0.2337 0.5464 +vn 0.1463 -0.3179 0.9368 +vn -0.7090 -0.1702 0.6844 +vn -0.9865 0.1136 -0.1182 +vn -0.9923 0.0925 -0.0819 +vn -0.9458 -0.0359 0.3227 +vn -0.9589 -0.0410 0.2809 +vn -0.7255 0.2491 -0.6415 +vn -0.7406 0.1937 -0.6434 +vn -0.7390 0.1861 -0.6475 +vn -0.0830 0.2313 -0.9693 +vn 0.6781 0.1339 -0.7227 +vn 0.9950 -0.0811 -0.0591 +vn 0.7972 -0.2394 0.5543 +vn -0.9524 -0.0138 0.3044 +vn -0.9929 0.0813 -0.0865 +vn -0.1674 -0.1306 0.9772 +vn -0.3803 -0.3512 0.8556 +vn -0.6904 -0.1894 0.6982 +vn -0.3383 -0.1336 0.9315 +vn 0.6841 0.1400 0.7158 +vn 0.2228 0.0302 0.9744 +vn 0.1755 -0.0325 0.9839 +vn 0.7149 0.0548 0.6971 +vn 0.9258 0.1853 0.3295 +vn 0.9454 0.1020 0.3096 +vn 0.8804 0.2221 0.4191 +vn 0.9732 0.2127 -0.0871 +vn 0.9550 0.2832 0.0880 +vn 0.9180 0.3743 0.1313 +vn 0.8273 0.2730 0.4910 +vn 0.8498 0.4307 0.3038 +vn 0.7883 0.3375 0.5144 +vn 0.6359 0.2126 0.7419 +vn 0.6459 0.1772 0.7426 +vn 0.6294 0.2567 0.7335 +vn 0.7663 0.3877 0.5123 +vn 0.6560 0.1610 0.7374 +vn 0.4000 0.1084 0.9101 +vn 0.3614 0.0700 0.9298 +vn 0.2974 0.0477 0.9536 +vn 0.9868 0.1267 -0.1012 +vn -0.4820 -0.7825 0.3943 +vn -0.1112 -0.9739 0.1978 +vn -0.2042 -0.9703 0.1295 +vn 0.0139 -0.1293 -0.9915 +vn 0.0533 -0.9481 0.3134 +vn 0.1737 -0.9503 0.2585 +vn -0.4582 -0.1656 -0.8733 +vn -0.9376 -0.2098 -0.2771 +vn -0.7776 -0.1963 -0.5974 +vn 0.1642 -0.9845 0.0622 +vn 0.3185 -0.9448 0.0764 +vn 0.1454 -0.0061 -0.9894 +vn 0.2295 0.0119 -0.9732 +vn -0.2624 -0.0277 -0.9646 +vn -0.2684 -0.0441 -0.9623 +vn 0.9499 0.0058 0.3125 +vn 0.9498 -0.0566 0.3077 +vn 0.9752 -0.0042 -0.2211 +vn 0.9955 0.0412 -0.0853 +vn 0.8948 0.4033 -0.1914 +vn 0.9211 0.3257 -0.2132 +vn 0.8647 0.3170 -0.3897 +vn 0.8342 0.4061 -0.3732 +vn 0.5557 -0.1290 -0.8213 +vn 0.8833 -0.1553 -0.4423 +vn 0.2430 -0.0199 -0.9698 +vn 0.2409 0.2488 -0.9381 +vn 0.5123 0.3040 -0.8032 +vn 0.4967 0.1415 -0.8563 +vn 0.4348 -0.8735 -0.2190 +vn 0.0608 -0.9604 -0.2720 +vn 0.0898 -0.9827 -0.1617 +vn 0.1603 -0.9799 -0.1189 +vn 0.1492 -0.1154 0.9820 +vn 0.7168 -0.0458 0.6957 +vn 0.1858 -0.1734 0.9672 +vn 0.7066 -0.1189 0.6975 +vn -0.8239 -0.2323 0.5169 +vn -0.7956 -0.2412 0.5558 +vn -0.4886 -0.2193 0.8445 +vn -0.7031 -0.2000 0.6824 +vn -0.4184 -0.1728 0.8917 +vn -0.9294 -0.1903 0.3162 +vn -0.9335 -0.1972 0.2995 +vn -0.8176 -0.4508 0.3581 +vn -0.0070 -0.6628 0.7488 +vn -0.1421 -0.9677 0.2083 +vn -0.0033 -0.2131 0.9770 +vn 0.0099 -0.2762 0.9610 +vn -0.9272 -0.3735 -0.0287 +vn -0.4859 -0.8702 -0.0816 +vn -0.9838 -0.1774 -0.0265 +vn 0.0300 -0.0942 0.9951 +vn 0.1073 0.0061 0.9942 +vn 0.1089 0.0743 0.9913 +vn 0.0923 -0.0589 0.9940 +vn 0.1517 -0.0132 0.9883 +vn -0.9859 -0.1495 -0.0746 +vn -0.9737 -0.1993 0.1105 +vn -0.6625 -0.1016 -0.7421 +vn -0.6892 -0.0693 -0.7213 +vn -0.9022 -0.1161 -0.4154 +vn -0.9025 -0.1467 -0.4049 +vn 0.0361 0.1251 -0.9915 +vn 0.5225 0.0885 -0.8481 +vn 0.0152 0.0517 -0.9985 +vn -0.9779 -0.1951 -0.0749 +vn -0.1006 -0.2517 0.9626 +vn 0.7070 -0.1594 0.6890 +vn 0.9511 -0.0881 0.2960 +vn 0.7205 -0.1474 0.6777 +vn 0.9549 -0.0697 0.2885 +vn 0.9823 0.0334 -0.1845 +vn 0.9744 0.0030 -0.2249 +vn 0.9085 0.0466 -0.4152 +vn -0.8031 -0.0440 -0.5943 +vn -0.9521 -0.0518 -0.3013 +vn 0.7427 -0.1338 0.6561 +vn 0.9612 -0.0296 0.2741 +vn 0.7095 -0.0490 0.7030 +vn 0.7090 0.0206 0.7049 +vn 0.9337 0.0652 0.3520 +vn 0.9358 0.0773 0.3438 +vn -0.1694 -0.2500 0.9533 +vn -0.1703 -0.2723 0.9470 +vn -0.0928 -0.0579 0.9940 +vn -0.1203 -0.1840 0.9755 +vn 0.9876 0.0617 -0.1447 +vn 0.9909 0.1136 -0.0728 +vn 0.9843 0.1599 -0.0742 +vn 0.2445 -0.2913 -0.9249 +vn 0.5663 -0.0196 -0.8240 +vn 0.6315 0.0564 -0.7733 +vn 0.1871 -0.1823 -0.9653 +vn 0.7302 0.3225 -0.6023 +vn 0.7026 0.4214 -0.5734 +vn -0.9856 -0.1127 -0.1257 +vn -0.8274 -0.2285 0.5130 +vn -0.8039 -0.2304 0.5483 +vn -0.9774 -0.1533 -0.1452 +vn 0.0241 -0.0889 0.9957 +vn 0.5699 0.0353 -0.8210 +vn 0.8842 0.0416 -0.4653 +vn 0.7165 0.0216 -0.6972 +vn -0.9975 -0.0706 0.0076 +vn -0.9632 -0.0272 0.2676 +vn -0.9986 -0.0448 0.0290 +vn -0.7964 -0.0847 0.5988 +vn -0.8078 -0.1442 0.5715 +vn -0.9971 -0.0601 -0.0477 +vn -0.9990 -0.0442 -0.0039 +vn -0.8101 -0.0254 0.5857 +vn -0.8804 0.0101 0.4741 +vn -0.6054 0.0672 0.7931 +vn -0.0982 0.0475 0.9940 +vn -0.8373 -0.3861 -0.3871 +vn -0.5427 -0.4536 -0.7069 +vn -0.1510 -0.3752 -0.9146 +vn 0.6080 0.0643 -0.7913 +vn -0.4873 -0.0638 -0.8709 +vn 0.2164 0.1655 -0.9622 +vn 0.1800 -0.4745 -0.8617 +vn 0.0915 0.0237 -0.9955 +vn 0.1177 -0.0046 -0.9930 +vn 0.7380 0.0496 -0.6730 +vn -0.1159 -0.0342 0.9927 +vn -0.6009 -0.0733 0.7960 +vn 0.7087 0.2290 -0.6673 +vn 0.7116 0.1514 -0.6861 +vn 0.0443 0.1318 -0.9903 +vn 0.0512 0.2078 -0.9768 +vn 0.7299 0.1048 -0.6754 +vn 0.7314 0.1660 -0.6614 +vn 0.0514 0.1740 -0.9834 +vn 0.0768 0.0869 -0.9933 +vn -0.6604 -0.0276 -0.7504 +vn -0.6165 -0.0851 -0.7827 +vn 0.0055 -0.1147 -0.9934 +vn -0.0804 -0.5291 -0.8447 +vn 0.1331 -0.0021 0.9911 +vn 0.0557 -0.0153 0.9983 +vn -0.3749 -0.7773 -0.5052 +vn 0.8790 0.4395 -0.1847 +vn 0.8235 0.4318 -0.3680 +vn -0.5637 -0.7758 -0.2836 +vn 0.0314 0.2158 -0.9759 +vn 0.0291 0.3731 -0.9273 +vn -0.8901 -0.1488 -0.4308 +vn 0.4175 0.4070 -0.8124 +vn 0.6414 0.3636 -0.6756 +vn 0.5043 0.4479 -0.7383 +vn 0.0130 0.4218 -0.9066 +vn 0.2407 0.3907 -0.8885 +vn 0.2641 0.4322 -0.8622 +vn 0.5114 0.4220 -0.7486 +vn 0.8656 0.1970 -0.4603 +vn -0.7486 0.0261 -0.6625 +vn -0.0188 0.9908 0.1338 +vn -0.0369 0.9977 -0.0565 +vn 0.3814 0.1424 0.9134 +vn 0.1933 0.0421 0.9802 +vn 0.2101 0.0415 0.9768 +vn -0.4409 0.0296 -0.8971 +vn -0.1185 0.1093 0.9869 +vn 0.6964 0.4401 -0.5669 +vn 0.7717 0.3887 -0.5033 +vn 0.8386 0.4083 -0.3606 +vn -0.7145 0.0497 -0.6978 +vn 0.7510 0.1180 -0.6496 +vn 0.8791 0.1166 -0.4621 +vn 0.9954 0.0747 -0.0608 +vn 0.9433 0.0572 0.3269 +vn 0.9632 0.0501 0.2639 +vn 0.9980 0.0394 -0.0486 +vn 0.9697 -0.1154 0.2155 +vn 0.9870 -0.1585 -0.0267 +vn 0.0472 -0.8983 0.4369 +vn 0.4988 -0.8040 0.3236 +vn 0.7507 0.0669 0.6572 +vn 0.8868 0.0774 0.4557 +vn -0.2274 -0.9717 -0.0632 +vn -0.3144 -0.9491 0.0172 +vn -0.5568 -0.8304 0.0193 +vn -0.4074 -0.8816 -0.2382 +vn 0.5313 -0.8472 0.0056 +vn 0.0288 -0.9996 0.0075 +vn 0.0404 0.7642 0.6437 +vn -0.9794 -0.2018 -0.0111 +vn -0.9647 -0.1549 0.2128 +vn 0.2238 -0.9719 -0.0733 +vn 0.0916 -0.9953 -0.0318 +vn -0.8797 -0.1165 0.4611 +vn 0.3636 -0.9315 0.0023 +vn -0.6941 0.0860 -0.7147 +vn -0.6893 0.0435 -0.7231 +vn 0.5476 0.1223 0.8277 +vn -0.0512 -0.9987 -0.0063 +vn -0.0078 -0.9932 -0.1158 +vn 0.8789 -0.0579 0.4735 +vn 0.5844 -0.0209 0.8112 +vn 0.2472 -0.0308 0.9685 +vn 0.2593 0.0403 0.9650 +vn 0.0983 -0.0601 0.9933 +vn 0.4231 0.0298 0.9056 +vn 0.4403 0.1056 0.8916 +vn 0.6706 0.1479 0.7270 +vn 0.6695 0.2140 0.7113 +vn 0.9348 0.3341 -0.1205 +vn 0.9145 0.4045 0.0108 +vn 0.9469 0.3072 0.0952 +vn 0.8017 0.3100 -0.5111 +vn 0.8011 0.3677 -0.4723 +vn 0.8819 0.4053 -0.2407 +vn 0.8871 0.3528 -0.2977 +vn 0.4696 0.2841 -0.8359 +vn 0.4714 0.3134 -0.8244 +vn 0.6621 0.3339 -0.6709 +vn 0.6628 0.2931 -0.6891 +vn 0.2222 0.2913 -0.9305 +vn 0.2674 0.1070 0.9576 +vn 0.4594 0.1540 0.8748 +vn 0.6814 0.2283 0.6954 +vn 0.9158 0.4012 0.0181 +vn 0.8974 0.3506 0.2680 +vn 0.8919 0.3557 0.2794 +vn 0.8075 0.3912 -0.4415 +vn 0.8878 0.4113 -0.2065 +vn 0.4717 0.3436 -0.8120 +vn 0.6622 0.3707 -0.6512 +vn 0.2242 0.3248 -0.9188 +vn -0.0375 0.9954 0.0878 +vn -0.0778 0.9944 -0.0721 +vn -0.0513 0.9963 0.0695 +vn -0.1241 0.9900 -0.0666 +vn -0.0821 0.9926 0.0890 +vn -0.2013 0.9792 -0.0247 +vn -0.1221 0.9734 0.1938 +vn -0.2353 0.9510 0.2007 +vn -0.2837 0.9449 0.1631 +vn -0.1604 0.9683 0.1913 +vn -0.0888 0.9891 0.1177 +vn -0.1529 0.9769 0.1491 +vn -0.1831 0.9660 0.1828 +vn -0.0836 0.9844 0.1551 +vn -0.0715 0.9910 0.1130 +vn -0.0883 0.9854 0.1458 +vn -0.1347 0.9828 0.1263 +vn -0.1084 0.9889 0.1016 +vn -0.0323 0.9923 0.1192 +vn -0.0426 0.9838 0.1740 +vn 0.1089 0.8032 0.5857 +vn 0.2285 0.8042 0.5487 +vn 0.3737 0.7914 0.4837 +vn 0.8195 0.2895 0.4946 +vn 0.4149 0.8174 0.3997 +vn 0.4167 0.8645 0.2811 +vn 0.4300 0.8928 0.1344 +vn 0.4164 0.9092 -0.0013 +vn 0.3105 0.9455 -0.0979 +vn 0.1329 0.9838 -0.1204 +vn 0.0679 0.9892 -0.1297 +vn 0.0360 0.9733 -0.2266 +vn 0.8299 0.2266 0.5098 +vn 0.8139 0.2926 0.5019 +vn -0.1356 0.9806 0.1416 +vn -0.2701 0.9607 0.0645 +vn 0.9091 0.2997 0.2892 +vn 0.4855 -0.1240 -0.8654 +vn 0.6917 -0.0750 -0.7183 +vn 0.8233 -0.0252 -0.5671 +vn 0.9084 -0.0510 -0.4149 +vn 0.8891 0.4577 0.0017 +vn 0.8802 0.4557 -0.1328 +vn 0.8690 0.4901 0.0683 +vn 0.8265 0.4762 0.3004 +vn 0.0729 0.0267 0.9970 +vn 0.0848 -0.2116 0.9737 +vn 0.2461 -0.2229 0.9433 +vn 0.4314 -0.1905 0.8818 +vn 0.7347 0.2131 -0.6440 +vn 0.6970 -0.0891 0.7115 +vn 0.8733 -0.0994 0.4770 +vn 0.9657 -0.1159 0.2325 +vn 0.1332 0.4291 -0.8934 +vn 0.9719 -0.1099 -0.2082 +vn 0.9948 -0.1016 -0.0097 +vn -0.6421 0.2098 -0.7374 +vn -0.6771 0.0780 -0.7317 +vn 0.0408 0.2360 -0.9709 +vn 0.0452 0.2430 -0.9690 +vn -0.9947 -0.0512 -0.0891 +vn -0.8043 -0.2337 0.5464 +vn -0.1463 -0.3179 0.9368 +vn 0.7089 -0.1702 0.6844 +vn 0.9865 0.1136 -0.1182 +vn 0.9589 -0.0410 0.2809 +vn 0.9458 -0.0359 0.3227 +vn 0.9923 0.0925 -0.0819 +vn 0.7255 0.2491 -0.6415 +vn 0.7406 0.1937 -0.6434 +vn 0.7389 0.1860 -0.6476 +vn 0.0830 0.2313 -0.9693 +vn -0.9950 -0.0811 -0.0591 +vn -0.6781 0.1339 -0.7227 +vn -0.7972 -0.2394 0.5543 +vn 0.9524 -0.0138 0.3044 +vn 0.9929 0.0813 -0.0865 +vn -0.7011 0.3440 -0.6245 +vn -0.5300 0.7292 -0.4328 +vn 0.2296 0.7211 -0.6537 +vn 0.1921 0.3459 -0.9184 +vn -0.7225 0.2378 -0.6492 +vn -0.1342 -0.1936 0.9719 +vn -0.1860 -0.2724 0.9440 +vn -0.7267 0.1506 -0.6702 +vn -0.1802 -0.1514 0.9719 +vn -0.1536 -0.5282 0.8351 +vn -0.7033 -0.2344 -0.6711 +vn 0.6260 -0.5754 0.5263 +vn 0.7210 -0.2120 0.6597 +vn 0.0367 -0.2321 -0.9720 +vn 0.1759 0.1125 -0.9780 +vn 0.7262 -0.2218 0.6508 +vn 0.7058 -0.3195 0.6323 +vn -0.1624 0.1148 0.9800 +vn 0.6585 0.0514 0.7508 +vn 0.1339 0.2095 -0.9686 +vn -0.8475 0.3183 0.4248 +vn -0.6348 0.6957 0.3362 +vn 0.2269 0.2841 -0.9316 +vn 0.1221 -0.1172 -0.9856 +vn -0.8542 0.2039 0.4783 +vn -0.1162 0.0618 0.9913 +vn -0.1147 0.1487 0.9822 +vn -0.8805 0.1158 0.4596 +vn -0.1156 -0.0143 0.9932 +vn -0.8436 -0.2672 0.4657 +vn -0.1906 -0.3822 0.9042 +vn 0.8768 -0.1428 -0.4591 +vn 0.7290 -0.5051 -0.4620 +vn 0.8596 -0.1823 -0.4774 +vn 0.8671 -0.2527 -0.4292 +vn 0.1118 -0.0685 -0.9914 +vn 0.1074 -0.0298 -0.9938 +vn 0.0414 -0.3662 -0.9296 +vn 0.8948 0.1377 -0.4247 +vn -0.0032 0.5446 0.8387 +vn -0.6263 0.4696 -0.6223 +vn -0.3857 0.7724 -0.5047 +vn 0.9924 0.0348 -0.1183 +vn 0.9000 -0.3628 -0.2414 +vn -0.7162 0.3498 -0.6039 +vn -0.9173 0.2853 0.2779 +vn -0.8924 0.3800 0.2434 +vn -0.6622 0.2558 -0.7043 +vn -0.9484 0.2410 0.2063 +vn -0.7646 -0.0192 -0.6442 +vn -0.9869 -0.1162 0.1117 +vn 0.6411 -0.3213 0.6969 +vn 0.5154 -0.6477 0.5611 +vn 0.7146 -0.3491 0.6062 +vn 0.6616 -0.3962 0.6366 +vn 0.9267 -0.2551 -0.2761 +vn 0.9438 -0.2570 -0.2080 +vn 0.7314 -0.6383 -0.2401 +vn 0.7639 -0.0157 0.6451 +vn -0.6055 0.7502 0.2656 +vn 0.1624 0.1148 0.9800 +vn 0.1860 -0.2724 0.9440 +vn 0.7012 0.3440 -0.6245 +vn 0.5302 0.7290 -0.4329 +vn -0.6585 0.0514 0.7508 +vn -0.2297 0.7211 -0.6537 +vn -0.7059 -0.3186 0.6326 +vn -0.7262 -0.2218 0.6507 +vn 0.1342 -0.1936 0.9719 +vn -0.7210 -0.2120 0.6597 +vn 0.1802 -0.1514 0.9719 +vn -0.6259 -0.5755 0.5264 +vn 0.1536 -0.5281 0.8352 +vn 0.7035 -0.2341 -0.6711 +vn 0.7267 0.1506 -0.6703 +vn -0.0357 -0.2330 -0.9718 +vn -0.1760 0.1125 -0.9779 +vn -0.1337 0.2075 -0.9691 +vn 0.7221 0.2404 -0.6487 +vn -0.1917 0.3433 -0.9194 +vn 0.6337 0.6965 0.3366 +vn 0.0032 0.5446 0.8387 +vn 0.1147 0.1487 0.9822 +vn 0.8475 0.3183 0.4248 +vn -0.8948 0.1377 -0.4247 +vn -0.8671 -0.2527 -0.4293 +vn -0.2269 0.2841 -0.9316 +vn -0.1247 -0.1091 -0.9862 +vn 0.8534 0.2076 0.4781 +vn -0.1117 -0.0685 -0.9914 +vn -0.8594 -0.1832 -0.4774 +vn 0.1162 0.0618 0.9913 +vn 0.1159 -0.0158 0.9931 +vn 0.8804 0.1167 0.4596 +vn -0.8760 -0.1471 -0.4593 +vn -0.7304 -0.5040 -0.4610 +vn 0.1905 -0.3822 0.9042 +vn -0.1074 -0.0303 -0.9938 +vn -0.0444 -0.3694 -0.9282 +vn 0.8439 -0.2668 0.4654 +vn 0.3855 0.7746 -0.5014 +vn 0.6055 0.7502 0.2656 +vn 0.8924 0.3800 0.2434 +vn 0.6263 0.4696 -0.6223 +vn -0.7639 -0.0158 0.6451 +vn -0.6616 -0.3961 0.6367 +vn -0.9924 0.0348 -0.1182 +vn -0.9008 -0.3612 -0.2409 +vn 0.7170 0.3472 -0.6044 +vn -0.9267 -0.2551 -0.2760 +vn -0.7129 -0.3539 0.6054 +vn 0.9173 0.2853 0.2779 +vn 0.9485 0.2405 0.2061 +vn 0.6620 0.2569 -0.7041 +vn -0.6396 -0.3263 0.6960 +vn -0.5144 -0.6448 0.5654 +vn 0.9869 -0.1161 0.1117 +vn -0.9433 -0.2584 -0.2082 +vn -0.7310 -0.6390 -0.2394 +vn 0.7627 -0.0169 -0.6465 +vn 0.3655 0.3771 -0.8510 +vn 0.0140 0.3788 -0.9254 +vn 0.0204 0.5150 -0.8569 +vn 0.4414 0.4956 -0.7481 +vn 0.2954 0.1661 -0.9408 +vn 0.0068 0.1586 -0.9873 +vn 0.2350 -0.0428 -0.9711 +vn -0.0055 -0.0190 -0.9998 +vn 0.0495 0.5898 -0.8061 +vn 0.0888 0.9836 0.1568 +vn 0.2997 0.9435 0.1413 +vn 0.5318 0.5370 -0.6548 +vn -0.4779 0.5282 -0.7019 +vn -0.2193 0.9678 0.1232 +vn -0.3585 0.3601 -0.8613 +vn -0.4172 0.4729 -0.7761 +vn -0.3083 0.1588 -0.9380 +vn -0.2605 -0.0480 -0.9643 +vn 0.4558 0.8605 0.2277 +vn 0.7848 0.4651 -0.4096 +vn 0.6898 0.4581 -0.5606 +vn 0.6017 0.3676 -0.7091 +vn 0.5133 0.1611 -0.8430 +vn 0.4146 -0.0677 -0.9075 +vn -0.5100 -0.0865 -0.8558 +vn -0.5936 0.1398 -0.7925 +vn -0.6458 0.3256 -0.6906 +vn -0.7056 0.4073 -0.5798 +vn -0.7661 0.4336 -0.4744 +vn -0.4772 0.8649 0.1553 +vn -0.6479 0.5202 0.5564 +vn -0.5979 0.5545 0.5789 +vn -0.9357 0.3528 -0.0052 +vn -0.9374 0.3469 0.0314 +vn -0.9729 0.2306 0.0159 +vn -0.6960 0.6163 0.3684 +vn -0.5756 0.6336 0.5170 +vn -0.9340 0.3531 -0.0546 +vn -0.5815 0.6952 0.4225 +vn -0.9261 0.3533 -0.1322 +vn -0.5842 0.7600 0.2849 +vn -0.8864 0.3732 -0.2739 +vn -0.9734 0.2183 0.0689 +vn -0.0379 0.9505 0.3083 +vn -0.9747 0.2069 0.0848 +vn 0.3477 0.8810 0.3209 +vn 0.3991 0.8124 0.4251 +vn -0.9797 0.1855 0.0759 +vn -0.9756 0.2015 0.0877 +vn 0.2119 0.7434 0.6344 +vn -0.9805 0.1742 0.0914 +vn -0.0646 0.6079 0.7914 +vn -0.9886 0.1177 0.0942 +vn -0.2553 0.4756 0.8418 +vn -0.9947 0.0836 0.0598 +vn -0.4268 0.4010 0.8106 +vn -0.9982 0.0103 -0.0596 +vn -0.6583 0.3865 0.6459 +vn -0.9851 -0.0192 -0.1709 +vn -0.7336 0.4120 0.5405 +vn -0.9641 -0.0113 -0.2653 +vn -0.9040 0.3735 0.2082 +vn -0.8633 0.4732 0.1752 +vn -0.9700 0.0254 -0.2416 +vn -0.7904 0.5953 0.1446 +vn -0.9872 0.0704 -0.1428 +vn -0.9944 0.1013 -0.0308 +vn -0.7370 0.6658 0.1165 +vn -0.9813 0.1491 0.1215 +vn -0.5923 0.7913 0.1517 +vn -0.9230 -0.1908 -0.3341 +vn -0.9870 -0.0925 -0.1318 +vn -0.7587 -0.3723 -0.5345 +vn -0.8411 -0.3917 -0.3729 +vn -0.6895 -0.4292 -0.5834 +vn -0.7427 -0.5047 -0.4400 +vn -0.8520 0.3582 -0.3817 +vn -0.9162 0.3315 -0.2253 +vn -0.9430 0.3075 -0.1274 +vn -0.9633 0.2625 -0.0557 +vn 0.5791 0.4384 0.6873 +vn 0.4119 0.7197 0.5589 +vn 0.9229 0.3677 0.1141 +vn 0.8924 0.4188 0.1681 +vn 0.9070 0.3749 0.1916 +vn 0.1179 0.9379 0.3263 +vn 0.9068 0.4099 0.0983 +vn 0.9267 0.3757 0.0039 +vn 0.6091 0.4743 0.6357 +vn 0.5767 0.7430 0.3396 +vn 0.8814 0.4253 -0.2057 +vn 0.6536 0.6266 0.4245 +vn 0.9102 0.4091 -0.0652 +vn 0.6489 0.5405 0.5355 +vn 0.9130 0.4068 0.0301 +vn 0.9156 0.3932 -0.0846 +vn 0.8911 0.4081 -0.1987 +vn 0.8265 0.4297 -0.3636 +vn -0.7197 -0.1519 -0.6775 +vn -0.7826 0.0847 -0.6167 +vn -0.8196 0.2858 -0.4966 +vn 0.7051 0.1428 -0.6946 +vn 0.6265 -0.1290 -0.7687 +vn 0.7686 0.3490 -0.5361 +vn 0.8516 0.0936 -0.5158 +vn 0.7819 -0.2600 -0.5665 +vn 0.8782 0.3187 -0.3567 +vn -0.8399 -0.2861 -0.4613 +vn -0.9038 0.0115 -0.4279 +vn -0.9168 0.2393 -0.3196 +vn -0.8598 -0.4037 -0.3127 +vn -0.9586 -0.0651 -0.2773 +vn -0.9640 0.1845 -0.1915 +vn 0.9291 0.0231 -0.3691 +vn 0.8222 -0.3672 -0.4349 +vn 0.9362 0.2812 -0.2108 +vn 0.9676 -0.0782 -0.2399 +vn 0.8133 -0.4548 -0.3630 +vn 0.9670 0.2354 -0.0970 +vn -0.8441 -0.4658 -0.2655 +vn -0.9708 -0.1593 -0.1796 +vn -0.9891 0.1119 -0.0958 +vn 0.8563 -0.4163 -0.3057 +vn 0.9721 -0.1513 -0.1792 +vn 0.9842 0.1752 -0.0242 +vn -0.8681 -0.4264 -0.2540 +vn -0.9631 -0.2263 -0.1455 +vn -0.9978 0.0512 -0.0410 +vn -0.8448 -0.4782 -0.2402 +vn -0.9449 -0.2882 -0.1555 +vn -0.9997 0.0245 -0.0052 +vn 0.9696 -0.1630 -0.1825 +vn 0.8819 -0.3390 -0.3275 +vn 0.9894 0.1404 0.0369 +vn -0.0443 -0.5227 -0.8514 +vn -0.2663 -0.5146 -0.8151 +vn -0.2419 -0.6271 -0.7404 +vn -0.0485 -0.6309 -0.7744 +vn 0.1718 -0.5105 -0.8425 +vn 0.1472 -0.6179 -0.7724 +vn 0.8108 -0.2860 -0.5108 +vn 0.8467 -0.3254 -0.4210 +vn 0.9156 -0.1675 -0.3656 +vn 0.9478 -0.1664 -0.2720 +vn 0.9965 0.0344 -0.0762 +vn 0.9965 0.0839 0.0011 +vn 0.9439 0.2195 0.2469 +vn 0.9241 0.2713 0.2691 +vn 0.3482 0.4414 0.8270 +vn 0.3421 0.3912 0.8544 +vn 0.0614 0.8562 0.5131 +vn 0.9030 0.3647 0.2273 +vn 0.8998 0.3785 0.2169 +vn 0.0764 0.9205 0.3832 +vn 0.1754 0.7206 0.6708 +vn 0.8815 0.3874 0.2701 +vn 0.2674 0.5819 0.7681 +vn 0.8980 0.3520 0.2639 +vn 0.3337 0.4607 0.8224 +vn 0.9145 0.3118 0.2579 +vn 0.0956 -0.6871 -0.7202 +vn 0.8653 -0.3255 -0.3812 +vn 0.9638 -0.1590 -0.2139 +vn 0.9876 0.1439 0.0625 +vn 0.1184 -0.8140 -0.5687 +vn -0.0343 -0.8258 -0.5629 +vn -0.0461 -0.6926 -0.7198 +vn -0.1996 -0.7240 -0.6603 +vn -0.1490 -0.8536 -0.4992 +vn -0.7936 -0.5242 -0.3088 +vn -0.9015 -0.3597 -0.2406 +vn -0.9996 -0.0050 -0.0264 +vn -0.7466 0.4765 0.4643 +vn -0.5693 0.6554 0.4963 +vn -0.0939 0.9938 0.0592 +vn -0.1968 0.9726 0.1237 +vn -0.3793 0.9103 0.1661 +vn -0.9158 0.2594 0.3066 +vn -0.3076 0.8145 0.4919 +vn -0.0128 0.9996 -0.0234 +vn 0.0477 0.8716 0.4879 +vn 0.0779 0.9937 -0.0807 +vn 0.3824 0.7957 0.4697 +vn 0.1556 0.9840 -0.0870 +vn 0.6053 0.6504 0.4590 +vn 0.2384 0.9704 -0.0390 +vn 0.8418 0.3497 0.4111 +vn 0.8970 0.2583 0.3588 +vn 0.5229 0.8399 0.1455 +vn 0.4653 0.8830 0.0624 +vn 0.3731 0.9278 0.0016 +vn 0.7516 0.4858 0.4461 +vn 0.6248 0.7419 0.2434 +vn 0.6041 0.7849 0.1380 +vn 0.9324 0.2104 0.2939 +vn 0.9565 0.1784 0.2310 +vn 0.9809 0.1528 0.1202 +vn 0.9857 0.1304 0.1069 +vn 0.5619 0.5588 0.6099 +vn 0.5516 0.6154 0.5631 +vn 0.5914 0.7105 0.3814 +vn 0.9725 0.1614 0.1680 +vn 0.3696 0.5302 0.7631 +vn 0.9698 0.1645 0.1803 +vn 0.9853 0.1179 0.1234 +vn 0.5482 0.3864 0.7418 +vn 0.9959 0.0547 0.0716 +vn 0.9978 0.0301 -0.0583 +vn 0.9915 -0.0057 -0.1302 +vn 0.9839 -0.0309 -0.1762 +vn 0.9886 -0.0063 -0.1502 +vn 0.8914 -0.1586 -0.4245 +vn 0.9933 -0.0288 -0.1116 +vn 0.9572 -0.0650 -0.2820 +vn 0.9187 -0.1080 -0.3799 +vn 0.8893 -0.1367 -0.4364 +vn 0.8558 -0.1490 -0.4954 +vn 0.8211 -0.1752 -0.5433 +vn 0.8022 -0.2339 -0.5493 +vn 0.9586 -0.0916 -0.2695 +vn 0.9428 -0.0843 -0.3224 +vn 0.9961 -0.0800 -0.0372 +vn 0.9740 -0.1399 -0.1784 +vn 0.8796 -0.1240 -0.4593 +vn 0.8971 -0.0616 -0.4374 +vn 0.1844 -0.4569 -0.8702 +vn 0.1603 -0.4466 -0.8802 +vn 0.1303 -0.4913 -0.8612 +vn 0.1582 -0.9668 -0.2005 +vn 0.1529 -0.9227 -0.3540 +vn 0.1417 -0.8531 -0.5021 +vn 0.1314 -0.7706 -0.6236 +vn 0.1187 -0.6727 -0.7303 +vn 0.1146 -0.5883 -0.8005 +vn -0.0322 -0.4691 -0.8825 +vn -0.0149 -0.4649 -0.8853 +vn -0.0193 -0.5095 -0.8603 +vn -0.0271 -0.5943 -0.8038 +vn -0.0255 -0.6787 -0.7340 +vn -0.0182 -0.7685 -0.6396 +vn -0.0049 -0.8495 -0.5276 +vn -0.0026 -0.9154 -0.4025 +vn -0.0047 -0.9600 -0.2800 +vn 0.9571 0.1045 0.2701 +vn 0.9787 0.0866 0.1861 +vn 0.9642 -0.0269 0.2638 +vn 0.9853 -0.0099 0.1704 +vn 0.9966 -0.0220 0.0800 +vn 0.9304 0.1180 0.3470 +vn 0.9358 -0.0270 0.3515 +vn 0.1628 -0.9838 -0.0749 +vn 0.1577 -0.9875 -0.0000 +vn 0.1406 -0.9890 0.0449 +vn 0.1338 -0.9889 0.0648 +vn -0.0448 -0.9988 0.0185 +vn -0.0287 -0.9995 -0.0106 +vn -0.0143 -0.9978 -0.0642 +vn -0.0048 -0.9876 -0.1567 +vn -0.2206 -0.4581 -0.8611 +vn -0.2577 -0.4555 -0.8521 +vn -0.1990 -0.4974 -0.8444 +vn -0.1895 -0.5637 -0.8039 +vn -0.1615 -0.9856 -0.0499 +vn -0.1808 -0.9835 -0.0043 +vn -0.1547 -0.9799 -0.1262 +vn -0.1420 -0.9588 -0.2462 +vn -0.1776 -0.6459 -0.7424 +vn -0.1646 -0.7269 -0.6667 +vn -0.1501 -0.8095 -0.5676 +vn -0.1360 -0.8763 -0.4622 +vn -0.1313 -0.9207 -0.3676 +vn -0.6595 -0.3666 -0.6563 +vn -0.6487 -0.3265 -0.6874 +vn -0.6583 -0.3088 -0.6865 +vn -0.6461 -0.3194 -0.6932 +vn -0.6476 -0.3104 -0.6959 +vn -0.6537 -0.2765 -0.7044 +vn -0.6703 -0.3046 -0.6767 +vn -0.6903 -0.3099 -0.6538 +vn -0.7673 -0.3100 -0.5615 +vn -0.8000 -0.2804 -0.5304 +vn -0.8190 -0.2301 -0.5256 +vn -0.9762 -0.1400 -0.1657 +vn -0.8875 -0.2088 -0.4108 +vn -0.5417 0.3289 0.7735 +vn -0.6157 0.3524 0.7048 +vn 0.9344 0.2971 0.1967 +vn 0.9488 0.2930 0.1177 +vn -0.7171 -0.3337 -0.6119 +vn -0.8609 -0.2240 -0.4568 +vn -0.8324 -0.2011 -0.5165 +vn -0.7062 -0.2912 -0.6454 +vn -0.6141 0.2145 0.7596 +vn -0.5616 0.2162 0.7987 +vn -0.9614 0.2738 -0.0264 +vn -0.9585 0.2831 0.0331 +vn -0.9427 0.2915 0.1625 +vn -0.8683 0.2853 0.4057 +vn -0.7395 0.2561 0.6226 +vn -0.2194 0.2641 0.9392 +vn -0.1123 0.2352 0.9654 +vn -0.8473 0.3335 0.4134 +vn -0.6116 0.3207 0.7232 +vn -0.4214 0.3013 0.8554 +vn -0.6827 0.3225 0.6557 +vn -0.3540 0.3840 0.8528 +vn -0.6311 0.3371 0.6986 +vn 0.0223 0.4001 0.9162 +vn 0.3396 0.3786 0.8610 +vn 0.5778 0.3445 0.7399 +vn 0.6801 0.3294 0.6549 +vn 0.6918 0.3271 0.6437 +vn 0.6104 0.3260 0.7219 +vn 0.4471 0.3084 0.8396 +vn 0.4191 0.2987 0.8574 +vn 0.8118 0.3376 0.4764 +vn 0.9482 0.2997 0.1052 +vn 0.7279 0.2764 0.6275 +vn 0.8100 0.2927 0.5081 +vn 0.8934 0.3008 0.3336 +vn 0.6591 0.2643 0.7041 +vn 0.6276 0.3879 0.6750 +vn 0.5219 0.3875 0.7599 +vn 0.5382 0.4740 0.6969 +vn -0.9613 -0.0711 -0.2662 +vn -0.9974 0.0069 -0.0712 +vn -0.9902 -0.0606 -0.1257 +vn -0.8986 -0.1777 -0.4011 +vn 0.5990 0.5981 0.5324 +vn -0.8751 -0.1563 -0.4580 +vn -0.8093 -0.2360 -0.5378 +vn -0.7221 -0.2682 -0.6377 +vn 0.6306 0.6999 0.3354 +vn 0.6375 0.7540 0.1580 +vn 0.6005 0.7975 -0.0577 +vn 0.5396 0.7981 -0.2679 +vn 0.0234 0.9018 0.4316 +vn 0.5125 0.7397 0.4361 +vn -0.4847 0.7597 0.4336 +vn 0.7399 0.4998 0.4504 +vn 0.8650 0.2609 0.4287 +vn 0.9013 0.1545 0.4047 +vn -0.9945 0.0609 0.0852 +vn -0.9599 0.1484 0.2378 +vn -0.8695 0.3304 0.3671 +vn -0.7105 0.5590 0.4275 +vn -0.9985 -0.0028 0.0539 +vn -0.9971 -0.0678 0.0349 +vn -0.2066 -0.9780 0.0293 +vn -0.9740 0.0292 0.2248 +vn -0.9720 -0.0390 0.2316 +vn -0.2353 -0.9701 0.0595 +vn -0.0687 -0.9960 0.0564 +vn -0.0558 -0.9977 0.0375 +vn 0.1492 -0.9853 0.0831 +vn 0.1384 -0.9878 0.0711 +vn 0.9099 -0.0127 0.4146 +vn 0.8918 0.0247 0.4517 +vn 0.0177 -0.0533 0.9984 +vn -0.6965 -0.0903 0.7119 +vn -0.4595 -0.6560 0.5987 +vn -0.0686 -0.8087 0.5843 +vn 0.6635 -0.1350 0.7359 +vn 0.3612 -0.6750 0.6433 +vn 0.8235 -0.1732 0.5402 +vn 0.3871 -0.8378 0.3851 +vn -0.4457 -0.8311 0.3325 +vn -0.8677 -0.0536 0.4942 +vn -0.0616 -0.9526 0.2978 +vn 0.2137 -0.9637 0.1598 +vn 0.3061 -0.9179 0.2526 +vn -0.0700 -0.9788 0.1927 +vn -0.0794 -0.9896 0.1201 +vn 0.1695 -0.9788 0.1147 +vn -0.0823 -0.9932 0.0824 +vn 0.8657 -0.1183 0.4864 +vn 0.8489 -0.1537 0.5058 +vn 0.8779 -0.0851 0.4712 +vn -0.3107 -0.9421 0.1259 +vn -0.2719 -0.9586 0.0844 +vn -0.3779 -0.9028 0.2051 +vn -0.9026 -0.0043 0.4305 +vn -0.9217 0.0171 0.3876 +vn -0.9367 0.0118 0.3498 +vn -0.9397 0.0877 0.3307 +vn -0.8608 0.2430 0.4473 +vn -0.8685 0.2808 0.4086 +vn 0.8453 0.2337 0.4804 +vn 0.8453 0.1371 0.5164 +vn 0.5536 0.7092 0.4365 +vn 0.6130 0.6132 0.4983 +vn 0.0231 0.9067 0.4211 +vn 0.0659 0.8795 0.4714 +vn -0.5322 0.7149 0.4535 +vn -0.5002 0.7585 0.4176 +vn 0.0859 0.4329 0.8973 +vn -0.6068 0.2640 0.7498 +vn 0.6473 0.2076 0.7334 +vn -0.5252 0.6350 0.5665 +vn 0.1129 0.7641 0.6352 +vn 0.6264 0.5279 0.5736 +vn 0.8363 0.0800 0.5425 +vn -0.8499 0.1769 0.4964 +vn 0.8390 0.2594 0.4783 +vn -0.8652 0.2915 0.4079 +vn 0.8544 0.2172 0.4720 +vn -0.8606 0.2928 0.4167 +vn 0.0854 -0.9100 -0.4057 +vn 0.6668 -0.6022 -0.4390 +vn 0.1129 -0.9809 -0.1586 +vn 0.5883 -0.6436 -0.4895 +vn 0.8605 -0.4189 -0.2901 +vn 0.3632 -0.8062 0.4671 +vn 0.9741 -0.2208 0.0490 +vn 0.0634 -0.1483 0.9869 +vn 0.9880 -0.1485 -0.0433 +vn 0.0907 0.0520 0.9945 +vn 0.1011 -0.0256 0.9945 +vn 0.9980 -0.0461 0.0423 +vn 0.9883 -0.0484 0.1449 +vn 0.0988 0.0180 0.9949 +vn 0.0957 -0.0385 0.9947 +vn 0.9775 -0.0713 0.1986 +vn -0.1028 -0.0074 0.9947 +vn -0.1266 -0.0493 0.9907 +vn -0.9927 -0.0741 0.0956 +vn -0.9894 -0.0728 0.1253 +vn -0.0876 0.0115 0.9961 +vn -0.0673 -0.0281 0.9973 +vn -0.9953 -0.0809 0.0535 +vn -0.9903 -0.1368 -0.0258 +vn -0.0810 -0.4426 0.8930 +vn -0.9545 -0.2971 -0.0251 +vn -0.0198 -0.9318 -0.3625 +vn -0.0043 -0.8233 0.5676 +vn -0.0100 -0.9973 -0.0730 +vn -0.1430 -0.9156 -0.3758 +vn -0.6979 -0.6194 -0.3595 +vn -0.1655 -0.9853 -0.0415 +vn -0.6803 -0.6467 -0.3448 +vn -0.2909 -0.8692 0.3999 +vn -0.8357 -0.5021 -0.2224 +vn -0.8535 -0.3693 -0.3677 +vn -0.4376 -0.1714 -0.8827 +vn -0.7092 -0.2393 -0.6632 +vn -0.0266 -0.1276 -0.9915 +vn 0.3578 -0.1368 -0.9237 +vn 0.6405 -0.2064 -0.7397 +vn 0.8503 -0.2721 -0.4505 +vn 0.9411 -0.1666 -0.2942 +vn 0.7610 -0.1554 -0.6299 +vn -0.0285 -0.1022 -0.9944 +vn 0.4088 -0.1176 -0.9050 +vn -0.4617 -0.1392 -0.8761 +vn -0.8021 -0.2035 -0.5614 +vn -0.9455 -0.2578 -0.1990 +vn -0.0217 -0.0484 -0.9986 +vn -0.5222 -0.0603 -0.8507 +vn -0.4994 -0.1039 -0.8601 +vn -0.0263 -0.0928 -0.9953 +vn 0.5065 -0.0355 -0.8615 +vn 0.4749 -0.0995 -0.8744 +vn 0.8487 -0.1339 -0.5116 +vn 0.8948 -0.0391 -0.4448 +vn -0.8909 -0.1479 -0.4294 +vn -0.9301 -0.0670 -0.3612 +vn -0.0097 -0.0582 -0.9983 +vn -0.5227 -0.0689 -0.8497 +vn 0.5092 -0.0532 -0.8590 +vn 0.9070 -0.0497 -0.4182 +vn -0.9294 -0.0745 -0.3614 +vn -0.5485 -0.0702 -0.8332 +vn 0.0066 -0.0636 -0.9980 +vn 0.5394 -0.0720 -0.8389 +vn -0.9401 -0.0755 -0.3325 +vn 0.9245 -0.0744 -0.3739 +vn 0.0252 -0.9949 0.0978 +vn -0.0071 -0.9916 0.1290 +vn 0.1036 -0.0348 0.9940 +vn -0.1295 -0.0258 0.9912 +vn -0.9833 -0.0972 0.1538 +vn -0.0059 -0.9996 -0.0287 +vn -0.0200 -0.9980 0.0596 +vn 0.9651 -0.0589 0.2550 +vn 0.0219 -0.9986 0.0488 +vn 0.9294 -0.0725 -0.3620 +vn -0.0472 -0.9984 0.0323 +vn 0.5944 -0.0975 -0.7983 +vn 0.0122 -0.9968 -0.0788 +vn 0.0300 -0.1471 -0.9887 +vn 0.1027 -0.9912 0.0829 +vn -0.5396 -0.1788 -0.8227 +vn -0.9457 -0.1447 -0.2910 +vn 0.1104 -0.9935 0.0277 +vn -0.5898 0.4042 0.6991 +vn -0.6784 0.5247 0.5142 +vn -0.7207 0.6166 0.3168 +vn -0.7226 0.6868 0.0793 +vn -0.6829 0.7171 -0.1392 +vn -0.6306 0.7222 -0.2842 +vn -0.5622 0.7153 -0.4150 +vn -0.4727 0.6928 -0.5446 +vn -0.3599 0.6829 -0.6358 +vn -0.2732 0.6717 -0.6886 +vn -0.1487 0.6608 -0.7357 +vn 0.0374 0.6713 -0.7403 +vn 0.2287 0.7011 -0.6754 +vn 0.3714 0.7299 -0.5739 +vn 0.4490 0.7668 -0.4587 +vn -0.3655 0.3771 -0.8510 +vn -0.4414 0.4956 -0.7481 +vn -0.0204 0.5150 -0.8569 +vn -0.0140 0.3788 -0.9254 +vn -0.0069 0.1586 -0.9873 +vn -0.2954 0.1661 -0.9408 +vn 0.0055 -0.0190 -0.9998 +vn -0.2350 -0.0428 -0.9711 +vn -0.0495 0.5898 -0.8060 +vn -0.5318 0.5371 -0.6548 +vn -0.2997 0.9435 0.1413 +vn -0.0889 0.9836 0.1568 +vn 0.4779 0.5282 -0.7019 +vn 0.2193 0.9678 0.1232 +vn 0.3585 0.3601 -0.8613 +vn 0.4172 0.4729 -0.7761 +vn 0.3083 0.1588 -0.9379 +vn 0.2605 -0.0480 -0.9643 +vn -0.4558 0.8605 0.2277 +vn -0.7848 0.4651 -0.4096 +vn -0.6898 0.4581 -0.5606 +vn -0.6017 0.3676 -0.7091 +vn -0.5133 0.1611 -0.8430 +vn -0.4146 -0.0677 -0.9075 +vn 0.5101 -0.0865 -0.8558 +vn 0.5936 0.1398 -0.7925 +vn 0.6458 0.3256 -0.6906 +vn 0.7056 0.4073 -0.5798 +vn 0.7661 0.4336 -0.4744 +vn 0.4772 0.8650 0.1553 +vn 0.6479 0.5202 0.5564 +vn 0.9373 0.3470 0.0315 +vn 0.9357 0.3528 -0.0052 +vn 0.5979 0.5545 0.5788 +vn 0.9729 0.2306 0.0159 +vn 0.6960 0.6163 0.3684 +vn 0.5756 0.6335 0.5170 +vn 0.9339 0.3532 -0.0546 +vn 0.5815 0.6952 0.4225 +vn 0.9261 0.3533 -0.1323 +vn 0.8864 0.3732 -0.2739 +vn 0.5841 0.7600 0.2849 +vn 0.9734 0.2183 0.0689 +vn 0.0379 0.9505 0.3083 +vn 0.9747 0.2069 0.0848 +vn -0.3477 0.8810 0.3209 +vn -0.3991 0.8123 0.4254 +vn 0.9797 0.1855 0.0759 +vn 0.9756 0.2015 0.0877 +vn -0.2117 0.7426 0.6354 +vn 0.0652 0.6071 0.7919 +vn 0.9805 0.1742 0.0914 +vn 0.2566 0.4769 0.8407 +vn 0.9886 0.1177 0.0941 +vn 0.4267 0.4084 0.8069 +vn 0.9947 0.0838 0.0597 +vn 0.6480 0.4351 0.6251 +vn 0.9981 0.0109 -0.0601 +vn 0.6897 0.5785 0.4355 +vn 0.9851 -0.0194 -0.1710 +vn 0.9169 0.2916 0.2726 +vn 0.9641 -0.0110 -0.2655 +vn 0.8656 0.4643 0.1873 +vn 0.9700 0.0253 -0.2416 +vn 0.7900 0.5689 0.2287 +vn 0.9872 0.0704 -0.1429 +vn 0.7327 0.6581 0.1734 +vn 0.9944 0.1014 -0.0309 +vn 0.5860 0.7916 0.1730 +vn 0.9813 0.1492 0.1216 +vn 0.9230 -0.1908 -0.3341 +vn 0.9870 -0.0923 -0.1319 +vn 0.7587 -0.3723 -0.5345 +vn 0.8412 -0.3918 -0.3727 +vn 0.6895 -0.4290 -0.5835 +vn 0.7427 -0.5048 -0.4399 +vn 0.8520 0.3582 -0.3817 +vn 0.9162 0.3315 -0.2253 +vn 0.9430 0.3075 -0.1274 +vn 0.9633 0.2625 -0.0557 +vn -0.5791 0.4384 0.6873 +vn -0.8924 0.4188 0.1681 +vn -0.9229 0.3677 0.1141 +vn -0.4119 0.7197 0.5588 +vn -0.9071 0.3749 0.1916 +vn -0.1179 0.9379 0.3262 +vn -0.9068 0.4099 0.0983 +vn -0.9267 0.3757 0.0039 +vn -0.6091 0.4743 0.6357 +vn -0.5767 0.7430 0.3395 +vn -0.8814 0.4253 -0.2057 +vn -0.9102 0.4091 -0.0652 +vn -0.6536 0.6266 0.4245 +vn -0.6489 0.5405 0.5355 +vn -0.9130 0.4068 0.0301 +vn -0.9155 0.3932 -0.0847 +vn -0.8911 0.4081 -0.1987 +vn -0.8265 0.4297 -0.3636 +vn 0.7826 0.0848 -0.6167 +vn 0.7197 -0.1519 -0.6775 +vn 0.8196 0.2858 -0.4965 +vn -0.7051 0.1428 -0.6946 +vn -0.6265 -0.1290 -0.7687 +vn -0.7686 0.3490 -0.5361 +vn -0.7819 -0.2601 -0.5665 +vn -0.8516 0.0935 -0.5158 +vn -0.8782 0.3187 -0.3567 +vn 0.9038 0.0115 -0.4278 +vn 0.8398 -0.2861 -0.4613 +vn 0.9168 0.2393 -0.3196 +vn 0.8597 -0.4038 -0.3129 +vn 0.9585 -0.0651 -0.2774 +vn 0.9640 0.1846 -0.1914 +vn -0.9291 0.0231 -0.3691 +vn -0.8221 -0.3673 -0.4350 +vn -0.9362 0.2812 -0.2107 +vn -0.9676 -0.0782 -0.2400 +vn -0.8133 -0.4554 -0.3621 +vn -0.9671 0.2353 -0.0970 +vn 0.8441 -0.4657 -0.2658 +vn 0.9708 -0.1593 -0.1796 +vn 0.9891 0.1119 -0.0958 +vn -0.8556 -0.4137 -0.3113 +vn -0.9717 -0.1504 -0.1819 +vn -0.9842 0.1752 -0.0242 +vn 0.9630 -0.2262 -0.1468 +vn 0.8690 -0.4268 -0.2503 +vn 0.9978 0.0512 -0.0410 +vn 0.8461 -0.4800 -0.2318 +vn 0.9449 -0.2882 -0.1555 +vn 0.9997 0.0245 -0.0053 +vn -0.8799 -0.3324 -0.3396 +vn -0.9695 -0.1628 -0.1830 +vn -0.9894 0.1404 0.0369 +vn 0.0443 -0.5227 -0.8513 +vn 0.0485 -0.6307 -0.7745 +vn 0.2411 -0.6295 -0.7386 +vn 0.2666 -0.5122 -0.8165 +vn -0.1718 -0.5109 -0.8423 +vn -0.1472 -0.6178 -0.7724 +vn -0.8108 -0.2859 -0.5108 +vn -0.8467 -0.3254 -0.4210 +vn -0.9156 -0.1675 -0.3656 +vn -0.9478 -0.1664 -0.2720 +vn -0.9965 0.0344 -0.0762 +vn -0.9965 0.0839 0.0011 +vn -0.9439 0.2189 0.2472 +vn -0.9241 0.2715 0.2691 +vn -0.3506 0.4320 0.8310 +vn -0.3418 0.3947 0.8529 +vn -0.0614 0.8562 0.5130 +vn -0.0764 0.9205 0.3832 +vn -0.8999 0.3785 0.2168 +vn -0.9030 0.3647 0.2273 +vn -0.1754 0.7210 0.6704 +vn -0.8815 0.3873 0.2700 +vn -0.2674 0.5819 0.7681 +vn -0.8980 0.3520 0.2639 +vn -0.3337 0.4607 0.8224 +vn -0.9145 0.3118 0.2579 +vn -0.8653 -0.3255 -0.3811 +vn -0.0957 -0.6769 -0.7298 +vn -0.9876 0.1439 0.0625 +vn -0.9638 -0.1590 -0.2138 +vn -0.1149 -0.7960 -0.5943 +vn 0.0343 -0.8259 -0.5628 +vn 0.0453 -0.6926 -0.7199 +vn 0.1508 -0.8438 -0.5151 +vn 0.2010 -0.7208 -0.6634 +vn 0.7936 -0.5242 -0.3088 +vn 0.9015 -0.3597 -0.2406 +vn 0.9996 -0.0049 -0.0264 +vn 0.7465 0.4761 0.4648 +vn 0.1989 0.9723 0.1231 +vn 0.0912 0.9940 0.0608 +vn 0.5701 0.6566 0.4938 +vn 0.9158 0.2591 0.3069 +vn 0.3811 0.9098 0.1645 +vn 0.0125 0.9996 -0.0236 +vn 0.3082 0.8147 0.4912 +vn -0.0779 0.9937 -0.0807 +vn -0.0480 0.8715 0.4881 +vn -0.1583 0.9836 -0.0868 +vn -0.3812 0.7960 0.4702 +vn -0.2489 0.9674 -0.0459 +vn -0.6033 0.6488 0.4638 +vn -0.8422 0.3500 0.4101 +vn -0.4437 0.8914 0.0923 +vn -0.5404 0.8339 0.1119 +vn -0.8968 0.2582 0.3592 +vn -0.7532 0.4872 0.4420 +vn -0.3518 0.9358 0.0224 +vn -0.6327 0.7441 0.2144 +vn -0.9564 0.1784 0.2311 +vn -0.9324 0.2107 0.2937 +vn -0.5856 0.7881 0.1899 +vn -0.9809 0.1522 0.1208 +vn -0.5716 0.6615 0.4856 +vn -0.5444 0.4856 0.6839 +vn -0.9856 0.1302 0.1075 +vn -0.9725 0.1612 0.1682 +vn -0.5993 0.7185 0.3531 +vn -0.9698 0.1648 0.1800 +vn -0.3771 0.4940 0.7834 +vn -0.9853 0.1192 0.1225 +vn -0.5511 0.4282 0.7162 +vn -0.9959 0.0547 0.0716 +vn -0.9978 0.0299 -0.0585 +vn -0.9915 -0.0057 -0.1301 +vn -0.9839 -0.0309 -0.1762 +vn -0.9886 -0.0063 -0.1502 +vn -0.8914 -0.1586 -0.4245 +vn -0.9933 -0.0288 -0.1116 +vn -0.9572 -0.0651 -0.2821 +vn -0.9187 -0.1080 -0.3798 +vn -0.8893 -0.1367 -0.4364 +vn -0.8210 -0.1753 -0.5433 +vn -0.8558 -0.1490 -0.4954 +vn -0.8023 -0.2339 -0.5492 +vn -0.9586 -0.0917 -0.2695 +vn -0.9428 -0.0843 -0.3224 +vn -0.9740 -0.1398 -0.1784 +vn -0.9961 -0.0801 -0.0372 +vn -0.8796 -0.1240 -0.4593 +vn -0.8971 -0.0616 -0.4374 +vn -0.1844 -0.4570 -0.8701 +vn -0.1603 -0.4467 -0.8802 +vn -0.1303 -0.4913 -0.8612 +vn -0.1582 -0.9669 -0.2004 +vn -0.1530 -0.9228 -0.3536 +vn -0.1417 -0.8533 -0.5018 +vn -0.1315 -0.7709 -0.6232 +vn -0.1187 -0.6728 -0.7303 +vn -0.1147 -0.5884 -0.8004 +vn 0.0322 -0.4691 -0.8825 +vn 0.0148 -0.4648 -0.8853 +vn 0.0193 -0.5095 -0.8603 +vn 0.0271 -0.5943 -0.8038 +vn 0.0255 -0.6787 -0.7339 +vn 0.0182 -0.7685 -0.6396 +vn 0.0049 -0.8495 -0.5276 +vn 0.0026 -0.9155 -0.4024 +vn 0.0047 -0.9600 -0.2799 +vn -0.9787 0.0866 0.1861 +vn -0.9572 0.1044 0.2701 +vn -0.9853 -0.0099 0.1706 +vn -0.9643 -0.0269 0.2635 +vn -0.9966 -0.0220 0.0800 +vn -0.9304 0.1180 0.3470 +vn -0.9360 -0.0270 0.3508 +vn -0.1627 -0.9838 -0.0749 +vn -0.1576 -0.9875 0.0008 +vn -0.1397 -0.9890 0.0481 +vn -0.1326 -0.9888 0.0682 +vn 0.0287 -0.9995 -0.0106 +vn 0.0448 -0.9988 0.0185 +vn 0.0143 -0.9978 -0.0642 +vn 0.0047 -0.9876 -0.1567 +vn 0.2577 -0.4560 -0.8519 +vn 0.2206 -0.4590 -0.8606 +vn 0.1993 -0.4995 -0.8431 +vn 0.1896 -0.5640 -0.8037 +vn 0.1585 -0.9858 -0.0558 +vn 0.1809 -0.9835 -0.0040 +vn 0.1513 -0.9797 -0.1314 +vn 0.1388 -0.9581 -0.2504 +vn 0.1779 -0.6465 -0.7419 +vn 0.1660 -0.7284 -0.6648 +vn 0.1520 -0.8110 -0.5650 +vn 0.1389 -0.8777 -0.4586 +vn 0.1337 -0.9216 -0.3644 +vn 0.6595 -0.3675 -0.6557 +vn 0.6487 -0.3266 -0.6874 +vn 0.6583 -0.3091 -0.6863 +vn 0.6462 -0.3196 -0.6931 +vn 0.6481 -0.3109 -0.6952 +vn 0.6534 -0.2761 -0.7048 +vn 0.6720 -0.3056 -0.6746 +vn 0.6921 -0.3104 -0.6516 +vn 0.7683 -0.3076 -0.5613 +vn 0.7999 -0.2796 -0.5310 +vn 0.8184 -0.2300 -0.5267 +vn 0.9761 -0.1390 -0.1671 +vn 0.8884 -0.2094 -0.4085 +vn 0.5418 0.3289 0.7735 +vn 0.6157 0.3524 0.7048 +vn -0.9488 0.2930 0.1177 +vn -0.9344 0.2971 0.1967 +vn 0.8610 -0.2240 -0.4567 +vn 0.7170 -0.3338 -0.6119 +vn 0.8324 -0.2011 -0.5165 +vn 0.7062 -0.2912 -0.6454 +vn 0.6141 0.2145 0.7595 +vn 0.5616 0.2162 0.7987 +vn 0.9585 0.2831 0.0331 +vn 0.9614 0.2738 -0.0264 +vn 0.9427 0.2915 0.1625 +vn 0.8683 0.2853 0.4057 +vn 0.7395 0.2561 0.6226 +vn 0.1123 0.2352 0.9654 +vn 0.2194 0.2641 0.9392 +vn 0.8473 0.3335 0.4134 +vn 0.4214 0.3013 0.8554 +vn 0.6116 0.3207 0.7232 +vn 0.6827 0.3225 0.6557 +vn 0.6312 0.3371 0.6986 +vn 0.3540 0.3840 0.8528 +vn -0.0224 0.4001 0.9162 +vn -0.3396 0.3786 0.8610 +vn -0.5778 0.3445 0.7399 +vn -0.6801 0.3294 0.6549 +vn -0.6918 0.3272 0.6437 +vn -0.6104 0.3260 0.7219 +vn -0.4471 0.3084 0.8396 +vn -0.4191 0.2987 0.8574 +vn -0.8118 0.3376 0.4764 +vn -0.9482 0.2997 0.1052 +vn -0.8100 0.2927 0.5081 +vn -0.7279 0.2764 0.6274 +vn -0.8934 0.3008 0.3336 +vn -0.6591 0.2643 0.7041 +vn -0.6276 0.3879 0.6750 +vn -0.5219 0.3875 0.7599 +vn -0.5382 0.4740 0.6969 +vn 0.9613 -0.0710 -0.2662 +vn 0.9974 0.0069 -0.0712 +vn 0.9902 -0.0600 -0.1258 +vn 0.8985 -0.1767 -0.4017 +vn -0.5990 0.5981 0.5324 +vn 0.8752 -0.1568 -0.4576 +vn 0.8093 -0.2362 -0.5378 +vn 0.7220 -0.2682 -0.6377 +vn -0.6307 0.6998 0.3354 +vn -0.6375 0.7540 0.1580 +vn -0.6005 0.7975 -0.0577 +vn -0.5396 0.7981 -0.2679 +vn -0.0234 0.9018 0.4316 +vn -0.5124 0.7395 0.4366 +vn 0.4848 0.7597 0.4335 +vn -0.7396 0.4993 0.4513 +vn -0.8648 0.2609 0.4291 +vn -0.9013 0.1545 0.4048 +vn 0.9945 0.0609 0.0852 +vn 0.9599 0.1482 0.2380 +vn 0.8696 0.3306 0.3669 +vn 0.7105 0.5590 0.4276 +vn 0.9985 -0.0028 0.0540 +vn 0.9971 -0.0677 0.0346 +vn 0.2066 -0.9780 0.0299 +vn 0.9739 0.0292 0.2249 +vn 0.9720 -0.0390 0.2315 +vn 0.2351 -0.9701 0.0604 +vn 0.0687 -0.9960 0.0564 +vn 0.0559 -0.9977 0.0375 +vn -0.1476 -0.9853 0.0861 +vn -0.1402 -0.9878 0.0672 +vn -0.9087 -0.0128 0.4172 +vn -0.8961 0.0245 0.4432 +vn -0.0154 -0.0527 0.9985 +vn 0.0756 -0.8078 0.5846 +vn 0.4586 -0.6569 0.5985 +vn 0.6955 -0.0885 0.7131 +vn -0.6637 -0.1353 0.7357 +vn -0.3613 -0.6750 0.6433 +vn -0.8236 -0.1734 0.5400 +vn -0.3863 -0.8378 0.3858 +vn 0.4487 -0.8310 0.3289 +vn 0.8679 -0.0540 0.4938 +vn 0.0624 -0.9526 0.2979 +vn -0.2145 -0.9638 0.1586 +vn 0.0794 -0.9896 0.1202 +vn 0.0697 -0.9788 0.1924 +vn -0.3050 -0.9178 0.2544 +vn -0.1689 -0.9788 0.1157 +vn 0.0823 -0.9932 0.0823 +vn -0.8486 -0.1536 0.5063 +vn -0.8633 -0.1178 0.4908 +vn -0.8793 -0.0854 0.4686 +vn 0.3119 -0.9421 0.1230 +vn 0.2706 -0.9587 0.0880 +vn 0.3798 -0.9029 0.2014 +vn 0.9026 -0.0043 0.4304 +vn 0.9216 0.0173 0.3877 +vn 0.9367 0.0118 0.3498 +vn 0.9396 0.0877 0.3308 +vn 0.8608 0.2429 0.4473 +vn 0.8685 0.2808 0.4086 +vn -0.8453 0.1371 0.5164 +vn -0.8454 0.2337 0.4804 +vn -0.6130 0.6132 0.4982 +vn -0.5537 0.7092 0.4363 +vn -0.0231 0.9069 0.4208 +vn -0.0655 0.8794 0.4716 +vn 0.5002 0.7585 0.4176 +vn 0.5322 0.7149 0.4536 +vn 0.6069 0.2639 0.7497 +vn -0.0879 0.4325 0.8973 +vn -0.6473 0.2077 0.7334 +vn -0.1131 0.7640 0.6352 +vn 0.5254 0.6350 0.5663 +vn -0.6265 0.5279 0.5734 +vn -0.8363 0.0801 0.5424 +vn 0.8499 0.1769 0.4963 +vn -0.8389 0.2593 0.4786 +vn 0.8652 0.2916 0.4079 +vn -0.8546 0.2172 0.4717 +vn 0.8606 0.2928 0.4167 +vn -0.6668 -0.6022 -0.4390 +vn -0.0823 -0.9068 -0.4135 +vn -0.5941 -0.6426 -0.4839 +vn -0.1564 -0.9813 -0.1125 +vn -0.8579 -0.4217 -0.2937 +vn -0.3800 -0.7743 0.5060 +vn -0.9729 -0.2259 0.0494 +vn -0.0408 -0.2314 0.9720 +vn -0.9830 -0.1761 -0.0512 +vn -0.0434 -0.0887 0.9951 +vn -0.9981 -0.0446 0.0428 +vn -0.1010 -0.0263 0.9945 +vn -0.9879 -0.0555 0.1451 +vn -0.0965 -0.0094 0.9953 +vn -0.9779 -0.0656 0.1984 +vn -0.0959 -0.0281 0.9950 +vn 0.1014 -0.0576 0.9932 +vn 0.1272 -0.0264 0.9915 +vn 0.9935 -0.0614 0.0963 +vn 0.9893 -0.0745 0.1254 +vn 0.0820 -0.0263 0.9963 +vn 0.0765 0.0073 0.9970 +vn 0.9941 -0.0962 0.0493 +vn 0.9890 -0.1455 -0.0273 +vn 0.1368 -0.2568 0.9567 +vn 0.9554 -0.2940 -0.0262 +vn 0.0197 -0.9317 -0.3626 +vn 0.0037 -0.8231 0.5678 +vn 0.0101 -0.9973 -0.0730 +vn 0.1462 -0.9208 -0.3615 +vn 0.6983 -0.6195 -0.3586 +vn 0.1364 -0.9871 -0.0840 +vn 0.6785 -0.6469 -0.3480 +vn 0.2830 -0.8798 0.3819 +vn 0.8361 -0.5016 -0.2220 +vn 0.8543 -0.3676 -0.3675 +vn 0.7092 -0.2394 -0.6631 +vn 0.4376 -0.1715 -0.8827 +vn 0.0266 -0.1276 -0.9915 +vn -0.3578 -0.1369 -0.9237 +vn -0.6405 -0.2062 -0.7398 +vn -0.8500 -0.2727 -0.4507 +vn -0.9406 -0.1695 -0.2942 +vn -0.7610 -0.1558 -0.6298 +vn 0.0285 -0.1023 -0.9943 +vn -0.4097 -0.1137 -0.9051 +vn 0.4618 -0.1390 -0.8760 +vn 0.8016 -0.2071 -0.5608 +vn 0.9455 -0.2579 -0.1990 +vn 0.0232 -0.0502 -0.9985 +vn 0.0262 -0.0900 -0.9956 +vn 0.4994 -0.1049 -0.8600 +vn 0.5222 -0.0603 -0.8507 +vn -0.5062 -0.0346 -0.8617 +vn -0.4746 -0.1019 -0.8743 +vn -0.8491 -0.1337 -0.5110 +vn -0.8940 -0.0375 -0.4465 +vn 0.8914 -0.1466 -0.4289 +vn 0.9295 -0.0651 -0.3631 +vn 0.5227 -0.0678 -0.8498 +vn 0.0098 -0.0597 -0.9982 +vn -0.5091 -0.0523 -0.8591 +vn -0.9070 -0.0497 -0.4182 +vn 0.9294 -0.0754 -0.3613 +vn 0.5488 -0.0702 -0.8330 +vn -0.0068 -0.0644 -0.9979 +vn -0.5396 -0.0726 -0.8388 +vn 0.9400 -0.0756 -0.3326 +vn -0.9243 -0.0744 -0.3743 +vn -0.0501 -0.9923 0.1133 +vn 0.1265 -0.0549 0.9904 +vn -0.1046 -0.0215 0.9943 +vn 0.0037 -0.9918 0.1277 +vn -0.0118 -0.9995 0.0309 +vn 0.9839 -0.0857 0.1566 +vn -0.0008 -1.0000 0.0035 +vn -0.9650 -0.0659 0.2539 +vn 0.0008 -0.9999 -0.0142 +vn -0.9288 -0.0737 -0.3632 +vn 0.0320 -0.9985 0.0436 +vn -0.5951 -0.0976 -0.7977 +vn -0.0188 -0.9967 -0.0786 +vn -0.0232 -0.1481 -0.9887 +vn -0.0424 -0.9910 0.1271 +vn 0.5597 -0.1746 -0.8101 +vn -0.1188 -0.9929 -0.0017 +vn 0.9454 -0.1443 -0.2921 +vn 0.6784 0.5248 0.5141 +vn 0.5898 0.4042 0.6991 +vn 0.7225 0.6868 0.0793 +vn 0.7207 0.6166 0.3168 +vn 0.6305 0.7223 -0.2842 +vn 0.6829 0.7171 -0.1392 +vn 0.4727 0.6928 -0.5446 +vn 0.5622 0.7153 -0.4150 +vn 0.2733 0.6716 -0.6886 +vn 0.3599 0.6829 -0.6357 +vn -0.0373 0.6713 -0.7403 +vn 0.1487 0.6608 -0.7357 +vn -0.3714 0.7299 -0.5738 +vn -0.2287 0.7011 -0.6754 +vn -0.4490 0.7668 -0.4587 +vn -0.9198 -0.3061 0.2455 +vn -0.6925 -0.6682 -0.2719 +vn -0.5999 -0.5849 -0.5459 +vn -0.9662 -0.2122 0.1463 +vn -0.9404 -0.3126 0.1337 +vn -0.8595 -0.4651 0.2121 +vn -0.7141 -0.6662 -0.2150 +vn -0.6612 -0.5239 -0.5370 +vn -0.9434 -0.2381 0.2308 +vn -0.8985 -0.4199 0.1281 +vn -0.7337 -0.3144 -0.6023 +vn -0.7950 -0.2513 -0.5521 +vn -0.9596 -0.2521 0.1250 +vn -0.8742 -0.4620 0.1493 +vn -0.7571 -0.5454 -0.3596 +vn -0.7581 -0.3653 -0.5402 +vn -0.6381 -0.4468 -0.6271 +vn -0.6183 -0.4195 -0.6646 +vn -0.9948 -0.0467 -0.0906 +vn -0.9957 -0.0863 -0.0339 +vn -0.9712 -0.1623 -0.1746 +vn -0.9595 0.0602 -0.2752 +vn -0.5370 -0.3173 -0.7816 +vn -0.5750 -0.4762 -0.6652 +vn -0.6696 -0.3534 -0.6532 +vn -0.6654 -0.3414 -0.6638 +vn -0.9977 -0.0434 -0.0513 +vn -0.9973 -0.0645 -0.0340 +vn -0.9762 -0.2165 0.0085 +vn -0.9886 0.0741 -0.1311 +vn -0.6302 -0.3022 -0.7152 +vn -0.6666 -0.4957 -0.5567 +vn -0.9807 -0.0695 0.1826 +vn -0.9760 0.1788 0.1241 +vn -0.7663 -0.1416 -0.6267 +vn -0.7722 -0.3282 -0.5440 +vn -0.9442 0.1410 0.2977 +vn -0.9536 0.0858 0.2887 +vn -0.8243 -0.0962 -0.5579 +vn -0.8308 -0.0810 -0.5506 +vn -0.9105 -0.0854 0.4045 +vn -0.9238 0.1794 0.3382 +vn -0.8600 -0.1236 -0.4951 +vn -0.8548 -0.3343 -0.3969 +vn -0.7543 0.1871 0.6292 +vn -0.7155 0.1072 0.6904 +vn -0.7781 0.3365 0.5304 +vn -0.7755 0.3063 0.5521 +vn -0.7874 0.2093 0.5798 +vn -0.7485 0.4256 0.5086 +vn -0.6571 0.0806 0.7495 +vn -0.7153 -0.2501 0.6525 +vn -0.6568 0.3005 0.6916 +vn -0.6649 0.2847 0.6905 +vn -0.7110 0.1400 0.6891 +vn -0.6463 0.4059 0.6462 +vn -0.5904 0.0293 0.8066 +vn -0.6736 -0.3303 0.6612 +vn -0.5933 0.1079 0.7977 +vn -0.5600 0.3285 0.7606 +vn -0.4633 -0.0938 0.8812 +vn -0.6286 -0.4466 0.6367 +vn -0.3867 0.2163 0.8965 +vn -0.4109 0.1645 0.8967 +vn -0.2901 0.0407 0.9561 +vn -0.2958 0.2475 0.9226 +vn -0.0140 0.5043 0.8634 +vn -0.1061 0.4545 0.8844 +vn 0.0148 0.4952 0.8686 +vn -0.0584 0.2782 0.9588 +vn -0.0010 0.4227 0.9063 +vn 0.8293 0.2916 0.4766 +vn 0.1654 0.2608 0.9511 +vn 0.0847 0.4067 0.9096 +vn 0.6970 0.4352 0.5700 +vn 0.7184 -0.0087 0.6956 +vn 0.7131 0.0807 0.6964 +vn 0.0276 0.4055 0.9137 +vn -0.0203 0.2843 0.9585 +vn 0.0624 0.4112 0.9094 +vn 0.6773 0.2668 0.6856 +vn 0.6814 0.2854 0.6740 +vn 0.2685 -0.5259 -0.8070 +vn 0.2808 -0.6510 -0.7053 +vn 0.1616 -0.5507 -0.8189 +vn 0.1761 -0.5259 -0.8321 +vn 0.2485 -0.7051 -0.6642 +vn -0.0605 -0.6265 -0.7771 +vn -0.0093 -0.5126 -0.8586 +vn 0.6923 -0.5327 -0.4867 +vn 0.7279 -0.5602 -0.3954 +vn 0.0212 -0.4590 -0.8882 +vn 0.0017 -0.4716 -0.8818 +vn 0.6717 -0.3488 -0.6536 +vn 0.6663 -0.3719 -0.6463 +vn 0.0350 -0.6257 -0.7793 +vn 0.8420 -0.2023 -0.5000 +vn 0.6614 -0.2192 -0.7173 +vn 0.9249 0.1361 0.3549 +vn 0.2733 0.0943 0.9573 +vn 0.2050 0.2786 0.9383 +vn 0.8051 0.2955 0.5143 +vn 0.8611 0.1980 0.4684 +vn 0.7877 0.1326 0.6016 +vn 0.7894 -0.2191 0.5735 +vn 0.7892 -0.0988 0.6062 +vn 0.1389 0.2389 0.9611 +vn 0.1341 0.0496 0.9897 +vn -0.2183 -0.5074 -0.8336 +vn -0.1850 -0.4000 -0.8976 +vn 0.4667 -0.5009 -0.7289 +vn 0.4631 -0.5157 -0.7209 +vn -0.1251 -0.1780 -0.9760 +vn -0.1143 -0.1420 -0.9832 +vn 0.4928 -0.0175 -0.8699 +vn 0.5210 -0.0474 -0.8522 +vn -0.1555 -0.2533 -0.9548 +vn 0.5176 -0.1736 -0.8378 +vn 0.5053 -0.1895 -0.8419 +vn 0.9955 0.0081 0.0941 +vn 0.4636 -0.1059 0.8797 +vn 0.3487 0.1286 0.9284 +vn 0.9101 0.1656 0.3798 +vn 0.9330 0.0348 0.3581 +vn 0.9347 0.0486 0.3521 +vn 0.3538 0.1457 0.9239 +vn 0.9078 -0.2649 0.3251 +vn 0.9118 -0.2053 0.3556 +vn 0.4055 0.0844 0.9102 +vn 0.3933 -0.0564 0.9177 +vn -0.1730 -0.3237 -0.9302 +vn -0.1232 -0.4137 -0.9020 +vn 0.6575 -0.1860 -0.7301 +vn 0.5299 -0.1402 -0.8364 +vn 0.6740 -0.1918 -0.7134 +vn 0.5696 -0.3185 -0.7577 +vn -0.1694 -0.5274 -0.8326 +vn -0.1832 -0.3787 -0.9072 +vn 0.5852 -0.5021 -0.6368 +vn 0.6156 -0.5652 -0.5492 +vn 0.9603 0.1129 -0.2549 +vn 0.9645 0.0179 -0.2635 +vn 0.9809 -0.1375 -0.1378 +vn 0.9843 -0.0033 -0.1764 +vn 0.9085 -0.4175 -0.0178 +vn 0.9152 -0.4008 -0.0433 +vn 0.8631 -0.4281 -0.2680 +vn 0.8635 -0.4190 -0.2808 +vn 0.9600 -0.0555 -0.2744 +vn 0.9581 -0.0750 -0.2763 +vn 0.9422 0.0550 -0.3304 +vn 0.9604 0.0825 -0.2662 +vn 0.9775 0.1678 -0.1279 +vn 0.9911 0.1140 -0.0686 +vn 0.9967 -0.0691 0.0418 +vn 0.9991 -0.0328 0.0283 +vn 0.9322 -0.3126 0.1823 +vn 0.9440 -0.3001 0.1373 +vn 0.9851 0.1717 0.0020 +vn 0.9628 0.2316 0.1393 +vn 0.5555 0.5038 0.6615 +vn 0.6402 0.4889 0.5925 +vn 0.9361 -0.2289 0.2672 +vn 0.9335 -0.2618 0.2449 +vn 0.6601 0.1142 0.7425 +vn 0.6529 0.0486 0.7559 +vn 0.9971 -0.0587 0.0491 +vn 0.9993 -0.0278 0.0233 +vn 0.6821 0.3524 0.6407 +vn 0.6641 0.3342 0.6688 +vn -0.4946 -0.6620 -0.5631 +vn -0.5133 -0.4962 -0.7002 +vn 0.2546 -0.6270 -0.7362 +vn 0.2404 -0.7829 -0.5739 +vn -0.9440 -0.0068 -0.3298 +vn -0.9008 0.3350 0.2764 +vn -0.9716 -0.1057 -0.2117 +vn -0.9075 0.1835 0.3779 +vn -0.9053 0.3162 0.2835 +vn -0.9627 -0.0829 -0.2577 +vn -0.9578 -0.2386 -0.1604 +vn 0.5370 0.4466 0.7157 +vn 0.5173 0.4230 0.7440 +vn 0.8822 0.1643 0.4412 +vn 0.9478 0.0631 0.3125 +vn 0.8320 0.0927 0.5470 +vn 0.8711 0.1118 0.4782 +vn 0.4477 0.3812 0.8089 +vn 0.4182 0.3741 0.8278 +vn 0.8559 -0.3711 -0.3602 +vn 0.1883 -0.4886 -0.8520 +vn 0.1179 -0.6018 -0.7899 +vn 0.9006 -0.3623 -0.2401 +vn 0.8826 -0.4559 -0.1144 +vn 0.0992 -0.9600 -0.2617 +vn 0.4679 -0.8699 0.1562 +vn -0.2930 -0.8729 0.3902 +vn -0.4241 -0.8860 -0.1873 +vn -0.8056 -0.3374 0.4870 +vn -0.8465 -0.5309 0.0403 +vn -0.2888 0.5129 0.8084 +vn -0.2634 0.4441 0.8564 +vn -0.3275 0.5983 0.7313 +vn -0.1812 0.5914 0.7857 +vn 0.2242 -0.0108 0.9745 +vn -0.3709 0.1345 0.9189 +vn 0.7072 -0.2171 0.6729 +vn 0.8165 -0.2254 -0.5315 +vn 0.9708 -0.1292 -0.2020 +vn 0.7841 -0.3689 -0.4991 +vn 0.7916 -0.3427 -0.5059 +vn 0.8306 -0.4653 -0.3059 +vn 0.8462 -0.4836 -0.2237 +vn 0.7251 -0.6807 -0.1046 +vn 0.7837 -0.4492 0.4290 +vn 0.2408 -0.4496 0.8601 +vn -0.0837 -0.9151 0.3945 +vn 0.3626 -0.6881 -0.6285 +vn -0.2808 -0.8297 -0.4824 +vn 0.7033 -0.7066 0.0776 +vn 0.6359 -0.4449 0.6307 +vn -0.0315 -0.4293 0.9026 +vn -0.2064 -0.9434 0.2597 +vn -0.1700 -0.8311 -0.5295 +vn 0.5102 -0.7346 -0.4472 +vn 0.7154 -0.5986 0.3603 +vn 0.5507 -0.2488 0.7967 +vn -0.1667 -0.2713 0.9479 +vn -0.2136 -0.8855 0.4126 +vn -0.1831 -0.9189 -0.3495 +vn 0.5847 -0.7812 -0.2187 +vn -0.8516 -0.5218 0.0491 +vn -0.5765 -0.7188 -0.3886 +vn 0.0564 -0.9388 -0.3398 +vn -0.3338 -0.8573 0.3920 +vn -0.7713 -0.1629 0.6153 +vn -0.2441 -0.1463 0.9587 +vn 0.7621 -0.4995 0.4120 +vn 0.5076 -0.1212 0.8530 +vn 0.7267 -0.6742 -0.1318 +vn -0.7517 -0.6220 0.2194 +vn -0.6154 -0.7485 -0.2471 +vn -0.6651 -0.3445 0.6626 +vn -0.7973 -0.5332 0.2828 +vn -0.7046 -0.6165 -0.3514 +vn -0.5663 -0.2899 0.7715 +vn -0.6664 -0.5587 0.4937 +vn -0.7506 -0.6307 -0.1971 +vn -0.2252 -0.3164 0.9215 +vn -0.7664 0.4926 0.4122 +vn 0.6741 0.2058 0.7094 +vn 0.9669 -0.1777 0.1831 +vn 0.8289 -0.3976 -0.3934 +vn -0.5628 -0.2593 -0.7849 +vn -0.9483 0.1422 -0.2837 +vn -0.6411 0.4644 0.6109 +vn 0.7046 0.1323 0.6971 +vn 0.9457 -0.3045 0.1139 +vn 0.6657 -0.5319 -0.5234 +vn -0.6495 -0.2183 -0.7284 +vn -0.9766 0.1453 -0.1583 +vn -0.5690 0.4026 0.7170 +vn 0.1837 0.2870 0.9402 +vn 0.7903 -0.0024 0.6127 +vn 0.9405 -0.3328 -0.0690 +vn 0.5919 -0.4309 -0.6812 +vn -0.1710 -0.3163 -0.9331 +vn -0.7847 -0.0404 -0.6186 +vn -0.9523 0.2840 0.1114 +vn -0.3540 0.2934 0.8880 +vn 0.9239 -0.1556 0.3496 +vn 0.8920 -0.3806 -0.2439 +vn 0.4755 -0.4800 -0.7372 +vn -0.8859 -0.0817 -0.4567 +vn -0.9247 0.1873 0.3313 +vn -0.3068 0.5564 0.7722 +vn 0.4654 0.3756 0.8015 +vn 0.8942 0.1063 0.4349 +vn -0.5028 -0.3594 -0.7861 +vn 0.8558 -0.2088 -0.4734 +vn 0.9966 0.0634 -0.0525 +vn 0.7919 0.3786 0.4791 +vn 0.8817 -0.3222 -0.3447 +vn 0.9904 -0.0890 0.1060 +vn 0.7761 0.2132 0.5935 +vn 0.7750 -0.1586 -0.6117 +vn 0.9858 0.1544 -0.0654 +vn 0.7832 0.3712 0.4988 +vn 0.7634 -0.4455 -0.4677 +vn 0.9733 -0.2181 0.0712 +vn 0.7821 0.1320 0.6090 +vn -0.7935 -0.1735 -0.5833 +vn -0.5982 0.2376 0.7653 +vn -0.9871 0.0510 0.1515 +vn -0.7923 -0.1419 -0.5934 +vn 0.5672 -0.3300 -0.7546 +vn 0.9796 -0.1514 -0.1319 +vn 0.7844 0.1233 0.6079 +vn -0.5853 0.2785 0.7615 +vn -0.9812 0.1199 0.1511 +vn 0.8623 0.0441 0.5045 +vn 0.9726 -0.2081 -0.1038 +vn 0.6955 -0.3468 -0.6294 +vn 0.5788 -0.3581 -0.7326 +vn 0.9360 -0.2399 -0.2575 +vn 0.9667 -0.0886 0.2401 +vn 0.9690 0.0473 0.2425 +vn 0.9648 -0.0552 -0.2573 +vn 0.6500 -0.1690 -0.7409 +vn -0.5265 -0.4947 -0.6914 +vn -0.9831 -0.1167 -0.1414 +vn -0.8793 0.3166 0.3558 +vn -0.3001 0.6041 0.7383 +vn 0.5383 0.4382 0.7199 +vn 0.9523 0.0007 0.3052 +vn 0.7698 -0.4868 -0.4128 +vn 0.1102 -0.6624 -0.7410 +vn -0.5437 -0.5486 -0.6352 +vn -0.9954 -0.0774 -0.0572 +vn -0.8236 0.3674 0.4320 +vn -0.1783 0.6289 0.7568 +vn 0.5582 0.4415 0.7025 +vn 0.8908 -0.0396 0.4527 +vn 0.6720 -0.7139 -0.1969 +vn -0.0410 -0.8088 -0.5867 +vn -0.6315 -0.5701 -0.5256 +vn -0.1200 -0.0061 0.9928 +vn -0.7884 -0.5478 0.2800 +vn -0.7555 -0.1414 0.6398 +vn -0.2244 0.2636 0.9382 +vn 0.4150 0.4820 0.7717 +vn 0.6116 0.4280 0.6654 +vn 0.8545 0.3953 0.3369 +vn 0.9638 0.2384 0.1194 +vn 0.9850 0.1719 -0.0169 +vn 0.9802 0.1104 -0.1644 +vn 0.9322 0.1085 -0.3452 +vn 0.8912 0.0362 -0.4521 +vn 0.8193 0.0943 -0.5655 +vn 0.4685 0.0179 -0.8833 +vn -0.8879 -0.4308 0.1617 +vn -0.8667 -0.4424 0.2305 +vn -0.8562 -0.5090 0.0889 +vn -0.0997 -0.2173 -0.9710 +vn -0.5809 -0.4138 -0.7010 +vn -0.8471 -0.5228 -0.0953 +vn -0.8605 -0.4712 0.1936 +vn -0.8889 -0.4104 0.2037 +vn -0.8897 -0.3872 0.2417 +vn 0.7982 0.4643 0.3839 +vn 0.9402 0.3310 0.0805 +vn 0.9579 0.2385 -0.1596 +vn 0.9039 0.1740 -0.3908 +vn 0.7922 0.1135 -0.5996 +vn 0.4736 -0.0379 -0.8799 +vn -0.8613 -0.4895 0.1359 +vn -0.8887 -0.4171 0.1902 +vn -0.8986 -0.3787 0.2216 +vn -0.9012 -0.4325 0.0289 +vn -0.7196 -0.4690 -0.5121 +vn -0.1080 -0.2782 -0.9544 +vn -0.5752 0.6904 0.4388 +vn -0.4682 0.5619 0.6819 +vn -0.9212 0.1402 0.3629 +vn -0.9553 0.2176 0.2003 +vn 0.0847 0.8291 0.5527 +vn 0.1826 0.6804 0.7097 +vn 0.5490 0.6507 0.5246 +vn 0.5419 0.5880 0.6005 +vn 0.6696 0.5483 0.5010 +vn 0.6612 0.5109 0.5493 +vn 0.7853 0.4874 0.3818 +vn 0.8103 0.4038 0.4247 +vn 0.9327 0.3518 0.0795 +vn 0.9638 0.2263 0.1412 +vn 0.9573 0.2065 -0.2024 +vn 0.9869 0.0630 -0.1486 +vn 0.9035 0.1055 -0.4155 +vn 0.9239 -0.0459 -0.3798 +vn 0.7653 0.0105 -0.6436 +vn 0.7787 -0.1582 -0.6071 +vn 0.4298 -0.1389 -0.8922 +vn 0.4331 -0.3654 -0.8239 +vn -0.1411 -0.3178 -0.9376 +vn -0.0844 -0.5587 -0.8251 +vn -0.6767 -0.4273 -0.5995 +vn -0.5945 -0.6201 -0.5119 +vn -0.9136 -0.4003 -0.0709 +vn -0.8442 -0.5331 -0.0557 +vn -0.9115 -0.3826 0.1506 +vn -0.8612 -0.4917 0.1288 +vn -0.9689 -0.2207 0.1123 +vn -0.9301 -0.3662 0.0269 +vn -0.8862 -0.4408 0.1427 +vn -0.8523 -0.5041 0.1394 +vn -0.8926 -0.4461 0.0650 +vn -0.8748 -0.4815 0.0534 +vn -0.7152 0.5364 0.4482 +vn -0.9901 0.0499 0.1309 +vn 0.0061 0.7508 0.6605 +vn 0.5916 0.5454 0.5938 +vn -0.8333 -0.5071 -0.2200 +vn 0.1103 0.3661 0.9240 +vn -0.3757 0.2564 0.8906 +vn 0.5058 0.3660 0.7812 +vn 0.6977 0.3516 0.6241 +vn 0.8347 0.3463 0.4281 +vn 0.9497 0.2730 0.1532 +vn 0.9813 0.1274 -0.1444 +vn 0.9199 -0.0325 -0.3907 +vn 0.7096 -0.2164 -0.6706 +vn 0.3828 -0.3703 -0.8464 +vn -0.0826 -0.5429 -0.8357 +vn -0.5883 -0.6347 -0.5010 +vn -0.7535 -0.6371 -0.1621 +vn -0.7691 -0.6372 0.0495 +vn -0.7636 -0.6126 0.2039 +vn -0.7794 -0.4969 0.3817 +vn -0.7795 -0.2748 0.5629 +vn -0.6699 0.0274 0.7419 +vn -0.4685 0.5256 0.7101 +vn -0.7517 0.3057 0.5843 +vn -0.0224 0.6587 0.7520 +vn 0.3924 0.6488 0.6520 +vn 0.6184 0.6051 0.5014 +vn 0.7590 0.5736 0.3079 +vn 0.8582 0.5121 0.0366 +vn 0.8896 0.3790 -0.2548 +vn 0.8296 0.2268 -0.5103 +vn 0.5927 0.0574 -0.8034 +vn 0.2407 -0.0583 -0.9688 +vn -0.2206 -0.1703 -0.9604 +vn -0.6961 -0.2953 -0.6544 +vn -0.8831 -0.3371 -0.3263 +vn -0.9321 -0.3459 -0.1075 +vn -0.9007 0.0142 0.4342 +vn -0.9424 -0.3282 0.0641 +vn -0.9416 -0.2128 0.2610 +vn 0.2966 0.6341 -0.7141 +vn 0.2875 0.7907 -0.5405 +vn -0.2230 0.3874 -0.8946 +vn -0.5399 0.3200 -0.7785 +vn -0.7069 0.2310 -0.6685 +vn -0.7854 0.3156 -0.5325 +vn -0.8664 0.3703 -0.3351 +vn -0.7435 0.6686 0.0141 +vn -0.5371 0.8423 0.0453 +vn -0.7614 0.6274 -0.1634 +vn -0.6898 0.7221 0.0524 +vn -0.2207 0.9442 0.2446 +vn 0.1987 0.9741 0.1075 +vn 0.4352 0.8992 0.0441 +vn 0.4529 0.8830 -0.1231 +vn -0.1936 0.4763 -0.8577 +vn 0.4905 0.8229 -0.2867 +vn 0.1296 0.5684 -0.8125 +vn 0.3843 0.5333 -0.7536 +vn -0.4248 0.8137 -0.3968 +vn -0.3676 0.2444 -0.8973 +vn -0.7035 0.2454 -0.6669 +vn -0.8781 0.2436 -0.4119 +vn -0.8128 0.5778 0.0743 +vn -0.7129 0.6804 0.1696 +vn 0.0150 0.9575 0.2880 +vn 0.4081 0.9128 0.0136 +vn 0.5759 0.8020 -0.1588 +vn 0.2615 0.5132 -0.8175 +vn -0.0053 -0.2042 -0.9789 +vn -0.0000 -0.2017 -0.9794 +vn -0.0000 0.0142 -0.9999 +vn 0.0044 0.0254 -0.9997 +vn 0.9663 -0.2230 0.1285 +vn 0.9521 -0.2387 0.1911 +vn 0.9726 -0.2216 0.0710 +vn 0.9649 -0.2540 0.0673 +vn 0.9790 -0.1324 0.1549 +vn 0.9274 -0.3460 0.1424 +vn 0.7099 0.3519 0.6101 +vn 0.8626 0.2673 0.4295 +vn 0.9690 0.0604 0.2397 +vn 0.8317 0.1229 0.5414 +vn 0.3970 -0.3836 -0.8338 +vn 0.1648 -0.2795 -0.9459 +vn 0.1062 0.0235 -0.9941 +vn 0.1706 -0.0201 -0.9851 +vn 0.5368 -0.4583 -0.7083 +vn 0.6336 -0.4147 -0.6531 +vn 0.3808 -0.3948 -0.8361 +vn 0.2721 -0.3915 -0.8790 +vn 0.0033 -0.2827 -0.9592 +vn 0.0212 -0.3190 -0.9475 +vn -0.0000 -0.3105 -0.9506 +vn -0.0000 -0.2764 -0.9610 +vn 0.4290 0.6493 0.6280 +vn 0.5255 0.4499 0.7221 +vn 0.4848 0.3826 0.7865 +vn 0.3642 0.6771 0.6394 +vn 0.9391 0.2547 0.2306 +vn 0.8880 0.3082 0.3414 +vn 0.7644 0.4009 0.5050 +vn 0.6946 0.4524 0.5594 +vn 0.9777 -0.2013 -0.0603 +vn 0.9816 -0.1780 -0.0688 +vn 0.9218 -0.2225 0.3176 +vn 0.8603 -0.1544 0.4858 +vn 0.9477 0.1544 0.2794 +vn 0.9839 0.0788 0.1605 +vn 0.9362 0.1029 0.3361 +vn 0.8932 0.1868 0.4090 +vn 0.9894 0.1255 0.0729 +vn 0.9567 0.1940 0.2171 +vn 0.9113 0.3032 0.2785 +vn 0.9354 0.3536 0.0047 +vn 0.9957 -0.0009 -0.0929 +vn 0.9241 0.3304 -0.1919 +vn 0.5691 -0.0070 -0.8223 +vn 0.5061 0.4164 -0.7553 +vn 0.6805 0.5397 -0.4957 +vn 0.7609 0.0526 -0.6468 +vn -0.0000 0.0957 -0.9954 +vn -0.0000 0.2631 -0.9648 +vn 0.0865 0.2769 -0.9570 +vn 0.0735 0.0980 -0.9925 +vn 0.5699 0.3821 -0.7275 +vn 0.6134 0.2283 -0.7560 +vn 0.3812 0.1810 -0.9066 +vn 0.3866 0.3606 -0.8488 +vn 0.8989 0.3262 0.2926 +vn 0.9271 0.2235 0.3010 +vn 0.9697 0.2338 -0.0703 +vn 0.9365 0.3456 -0.0600 +vn 0.6276 0.1196 0.7693 +vn 0.6731 0.1953 0.7133 +vn 0.5761 0.1211 0.8083 +vn 0.5301 0.0731 0.8448 +vn 0.7673 0.2714 0.5810 +vn 0.7703 0.1782 0.6122 +vn 0.3704 0.0562 0.9272 +vn 0.4215 -0.1612 0.8924 +vn 0.5135 -0.1451 0.8457 +vn 0.5367 0.0988 0.8380 +vn 0.9259 -0.0989 0.3647 +vn 0.9607 -0.1095 0.2549 +vn 0.9627 0.1926 0.1901 +vn 0.7929 0.1481 0.5911 +vn 0.9925 -0.1217 0.0092 +vn 0.6586 -0.1436 -0.7387 +vn 0.4417 -0.1872 -0.8774 +vn 0.4751 0.1281 -0.8705 +vn 0.1530 -0.1555 -0.9759 +vn -0.3338 -0.2092 -0.9191 +vn -0.3792 -0.0204 -0.9251 +vn 0.0761 0.0423 -0.9962 +vn 0.0981 0.5241 0.8460 +vn 0.0952 0.5162 0.8512 +vn 0.4761 0.6378 0.6054 +vn 0.4628 0.6575 0.5946 +vn -0.6018 -0.1200 0.7896 +vn -0.2790 0.2382 0.9303 +vn -0.2788 0.2647 0.9232 +vn -0.6109 -0.0944 0.7861 +vn -0.7559 -0.4719 0.4538 +vn -0.7736 -0.4497 0.4464 +vn 0.1948 -0.5066 -0.8399 +vn -0.1858 -0.7076 -0.6817 +vn -0.2122 -0.6731 -0.7084 +vn 0.1683 -0.4800 -0.8610 +vn 0.8880 0.3154 0.3346 +vn 0.5800 0.5020 0.6415 +vn 0.4611 0.6436 0.6109 +vn 0.7554 0.4731 0.4533 +vn -0.0000 0.2877 -0.9577 +vn -0.0000 0.1195 -0.9928 +vn 0.1142 0.1886 -0.9754 +vn 0.0718 0.3259 -0.9427 +vn 0.6833 0.1340 0.7177 +vn 0.7413 0.2284 0.6311 +vn 0.8817 0.4074 0.2381 +vn 0.7920 0.3170 0.5218 +vn 0.8807 0.4650 -0.0903 +vn 0.5193 0.5002 -0.6929 +vn 0.3875 0.4785 -0.7879 +vn 0.1109 0.4210 -0.9003 +vn -0.0000 0.4086 -0.9127 +vn 0.4363 0.6687 -0.6021 +vn 0.6582 0.4888 -0.5726 +vn 0.8150 0.5494 -0.1841 +vn 0.5021 0.8465 -0.1771 +vn 0.1588 0.6351 -0.7560 +vn 0.2491 0.5911 -0.7672 +vn 0.4086 0.8622 -0.2993 +vn 0.3147 0.8849 -0.3435 +vn 0.8876 0.4313 -0.1615 +vn 0.8357 0.1553 -0.5267 +vn 0.8146 0.2033 -0.5432 +vn 0.8626 0.4733 -0.1788 +vn 0.5701 -0.1973 -0.7975 +vn 0.5564 -0.1649 -0.8144 +vn 0.1044 0.4089 -0.9066 +vn 0.0990 0.4086 -0.9073 +vn -0.0000 0.3471 -0.9378 +vn 0.0271 0.3560 -0.9341 +vn 0.3157 -0.1205 -0.9412 +vn 0.2600 -0.2042 -0.9438 +vn 0.1337 0.0537 -0.9896 +vn 0.2419 0.0733 -0.9675 +vn 0.1934 0.1237 -0.9733 +vn 0.2182 0.3146 -0.9238 +vn 0.2424 0.4475 -0.8608 +vn 0.3728 -0.0298 -0.9274 +vn 0.3231 0.3411 -0.8827 +vn 0.1392 -0.3488 -0.9268 +vn 0.0872 -0.3144 -0.9453 +vn 0.0498 -0.2201 -0.9742 +vn 0.0500 0.0435 -0.9978 +vn 0.0836 0.3832 -0.9199 +vn 0.2406 0.4597 -0.8548 +vn 0.3482 0.3466 -0.8710 +vn -0.5345 -0.7697 -0.3491 +vn -0.5495 -0.7525 -0.3631 +vn 0.7823 -0.4340 -0.4468 +vn 0.8073 -0.3836 -0.4485 +vn 0.9905 0.1074 -0.0864 +vn 0.8446 0.5353 -0.0102 +vn 0.6520 0.5044 -0.5661 +vn 0.7397 0.3717 -0.5609 +vn 0.8017 0.2364 -0.5490 +vn 0.8105 -0.0169 -0.5855 +vn 0.7683 0.2081 -0.6053 +vn 0.2242 0.3078 -0.9247 +vn 0.1112 -0.0712 -0.9912 +vn 0.4463 -0.0623 -0.8927 +vn 0.5654 0.2580 -0.7834 +vn -0.1105 -0.5071 -0.8548 +vn 0.3443 -0.4433 -0.8276 +vn -0.3742 -0.7912 -0.4836 +vn 0.4225 -0.7251 -0.5438 +vn -0.1673 -0.5009 0.8492 +vn 0.7167 -0.3383 0.6098 +vn 0.7617 -0.5753 0.2981 +vn -0.3986 -0.7530 0.5236 +vn 0.0809 -0.1339 0.9877 +vn 0.4125 -0.1331 0.9012 +vn 0.3886 0.2767 0.8789 +vn 0.6784 0.1944 0.7085 +vn 0.5639 0.6072 0.5597 +vn 0.7816 0.4459 0.4362 +vn 0.5356 0.7838 -0.3144 +vn 0.3541 0.5905 -0.7252 +vn 0.6019 0.4935 -0.6279 +vn 0.6955 0.6604 -0.2831 +vn 0.2429 0.1409 -0.9598 +vn -0.2299 -0.2125 -0.9497 +vn -0.1281 -0.2147 -0.9683 +vn 0.2812 0.1626 -0.9458 +vn -0.5172 -0.4120 -0.7501 +vn -0.4340 -0.4633 -0.7726 +vn -0.7058 -0.5258 -0.4746 +vn -0.6833 -0.5660 -0.4612 +vn -0.6958 -0.4891 0.5259 +vn -0.4499 -0.3524 0.8206 +vn -0.3725 -0.4492 0.8121 +vn -0.6378 -0.5483 0.5409 +vn -0.0687 -0.1586 0.9850 +vn 0.0041 -0.2100 0.9777 +vn 0.4542 0.1867 0.8711 +vn 0.4643 0.1909 0.8649 +vn 0.7532 0.4311 0.4969 +vn 0.7344 0.4570 0.5018 +vn 0.8105 0.5354 -0.2377 +vn 0.6244 0.4203 -0.6584 +vn 0.6033 0.4306 -0.6713 +vn 0.7930 0.5505 -0.2609 +vn 0.3812 -0.0546 -0.9229 +vn 0.4029 0.3238 -0.8560 +vn 0.8577 -0.0545 0.5112 +vn 0.7519 0.2036 0.6270 +vn 0.7266 0.4841 0.4875 +vn 0.4870 0.5429 -0.6841 +vn 0.6251 0.7183 -0.3054 +vn 0.0306 -0.1520 -0.9879 +vn 0.2877 0.2258 -0.9307 +vn 0.5164 0.4925 -0.7006 +vn 0.7098 0.6367 -0.3013 +vn 0.4850 0.2297 0.8438 +vn 0.6744 0.5213 0.5229 +vn 0.1897 -0.1974 0.9618 +vn -0.1069 -0.5617 0.8204 +vn -0.4228 -0.7244 0.5446 +vn -0.5952 -0.6699 -0.4439 +vn -0.2541 -0.5107 -0.8214 +vn 0.0504 -0.2878 -0.9564 +vn -0.0000 -0.2745 -0.9616 +vn 0.4585 -0.3480 -0.8177 +vn 0.1958 -0.3207 -0.9267 +vn 0.6940 -0.3437 -0.6326 +vn 0.8334 -0.3150 -0.4541 +vn 0.9760 -0.1026 0.1921 +vn 0.9849 -0.1697 0.0332 +vn 0.9158 -0.2780 0.2898 +vn 0.9636 -0.0417 0.2641 +vn 0.9999 -0.0150 -0.0032 +vn 0.9688 -0.0004 0.2479 +vn 0.4616 0.8243 0.3279 +vn 0.7682 0.5684 0.2946 +vn 0.4336 0.8622 0.2618 +vn 0.3878 0.8916 0.2337 +vn 0.7196 0.6773 0.1531 +vn 0.7849 0.6022 0.1459 +vn 0.6036 0.7674 0.2164 +vn 0.8243 0.5517 0.1270 +vn 0.7529 0.6379 0.1620 +vn 0.8387 0.5279 0.1338 +vn 0.7529 0.6024 0.2651 +vn 0.7273 0.6412 0.2449 +vn 0.1422 0.4423 -0.8855 +vn -0.0000 0.3948 -0.9188 +vn 0.0371 0.3963 -0.9174 +vn -0.7129 -0.7004 0.0349 +vn -0.7316 -0.6813 0.0250 +vn -0.7984 -0.6021 -0.0067 +vn -0.8127 -0.5826 -0.0140 +vn -0.7168 -0.6969 -0.0217 +vn 0.5804 -0.7891 -0.2012 +vn -0.5373 -0.8406 -0.0680 +vn 0.9205 -0.3100 -0.2379 +vn 0.9153 -0.3437 -0.2102 +vn 0.9268 -0.2646 -0.2665 +vn 0.9458 -0.1213 -0.3013 +vn 0.8880 0.2606 -0.3790 +vn 0.8717 0.3545 -0.3383 +vn 0.7918 0.4932 -0.3602 +vn 0.9147 0.2301 -0.3322 +vn 0.9906 0.0294 -0.1336 +vn 0.9731 -0.0298 -0.2286 +vn 0.9118 0.2412 -0.3322 +vn 0.9618 0.2340 -0.1421 +vn 0.8643 -0.1922 -0.4649 +vn 0.9368 0.0861 -0.3392 +vn 0.9180 -0.0514 -0.3932 +vn 0.9755 -0.1453 0.1651 +vn 0.9696 -0.2387 0.0539 +vn 0.9180 -0.3482 -0.1898 +vn 0.7714 -0.4591 -0.4406 +vn 0.1411 0.3679 -0.9191 +vn -0.0000 0.3653 -0.9309 +vn -0.0000 0.0513 -0.9987 +vn 0.1673 0.0445 -0.9849 +vn 0.4594 0.4095 -0.7882 +vn 0.2950 0.3793 -0.8770 +vn 0.3574 0.0366 -0.9332 +vn 0.5662 0.0467 -0.8230 +vn 0.5699 0.4535 -0.6852 +vn 0.7300 0.0924 -0.6772 +vn 0.8203 0.2479 -0.5153 +vn 0.6568 0.4812 -0.5806 +vn 0.7733 0.4997 -0.3902 +vn 0.8538 0.4946 -0.1627 +vn 0.8933 0.4361 0.1087 +vn 0.8490 0.3203 0.4202 +vn 0.8613 0.1537 0.4843 +vn 0.8144 0.2130 0.5397 +vn 0.9007 -0.3063 0.3081 +vn 0.6702 -0.0654 0.7393 +vn 0.1474 -0.0211 -0.9889 +vn 0.1146 0.3960 -0.9111 +vn 0.2265 -0.2103 -0.9510 +vn 0.9593 -0.1432 -0.2432 +vn 0.8304 0.1870 0.5249 +vn 0.9369 0.0338 0.3481 +vn 0.0800 -0.1947 -0.9776 +vn -0.0000 -0.1881 -0.9822 +vn 0.2617 -0.2115 -0.9417 +vn 0.5459 -0.2253 -0.8070 +vn 0.7520 -0.2274 -0.6187 +vn -0.0000 0.0277 -0.9996 +vn 0.0763 0.0239 -0.9968 +vn 0.1712 0.0322 -0.9847 +vn 0.3478 0.0722 -0.9348 +vn 0.6093 0.1268 -0.7828 +vn 0.8229 0.1563 -0.5462 +vn 0.9300 0.1600 -0.3309 +vn 0.9836 0.1592 -0.0844 +vn 0.9453 0.1474 0.2910 +vn 0.7746 0.1115 0.6225 +vn 0.6016 0.0650 0.7962 +vn 0.5057 0.0344 0.8620 +vn 0.0867 0.0092 -0.9962 +vn -0.0000 0.0164 -0.9999 +vn 0.1615 0.0127 -0.9868 +vn 0.3195 0.0335 -0.9470 +vn 0.5874 0.0706 -0.8062 +vn 0.8265 0.0982 -0.5543 +vn 0.9391 0.1020 -0.3282 +vn 0.9904 0.1004 -0.0950 +vn 0.9594 0.1050 0.2619 +vn 0.7793 0.0889 0.6203 +vn 0.5819 0.0524 0.8116 +vn 0.4808 0.0261 0.8765 +vn 0.4478 0.0359 0.8934 +vn 0.5681 0.0649 0.8204 +vn 0.7850 0.0994 0.6115 +vn 0.9682 0.1162 0.2216 +vn 0.9879 0.1028 -0.1159 +vn 0.9403 0.0958 -0.3265 +vn 0.8214 0.0956 -0.5623 +vn 0.5635 0.0752 -0.8227 +vn 0.2968 0.0421 -0.9540 +vn 0.1562 0.0216 -0.9875 +vn 0.0938 0.0159 -0.9955 +vn -0.2612 -0.0253 -0.9649 +vn -0.2011 -0.0322 -0.9790 +vn 0.4066 0.0498 0.9123 +vn 0.5559 0.0839 0.8270 +vn 0.7897 0.1229 0.6011 +vn 0.9679 0.1569 0.1964 +vn 0.9762 0.1652 -0.1402 +vn 0.9311 0.1672 -0.3242 +vn 0.8010 0.1628 -0.5761 +vn 0.5247 0.1198 -0.8428 +vn 0.2642 0.0668 -0.9622 +vn 0.1461 0.0395 -0.9885 +vn 0.0898 0.0284 -0.9956 +vn -0.3271 -0.0258 -0.9446 +vn 0.5829 0.4406 -0.6827 +vn 0.6501 0.4106 -0.6394 +vn 0.2351 0.0090 -0.9719 +vn 0.1537 0.1288 -0.9797 +vn -0.2768 -0.3933 -0.8767 +vn -0.3167 -0.2678 -0.9099 +vn -0.5277 -0.5511 -0.6464 +vn -0.5275 -0.5023 -0.6852 +vn -0.6847 -0.6044 -0.4073 +vn -0.6716 -0.6075 -0.4241 +vn -0.8339 -0.5504 0.0412 +vn -0.7236 -0.6901 0.0118 +vn -0.6952 -0.2989 0.6538 +vn -0.5931 -0.5472 0.5906 +vn -0.4796 -0.0579 0.8756 +vn -0.3963 -0.3401 0.8528 +vn -0.1113 0.1961 0.9742 +vn -0.0480 0.0448 0.9978 +vn 0.2874 0.4573 0.8416 +vn 0.4896 0.2338 0.8400 +vn 0.8140 0.2515 0.5235 +vn 0.7189 0.4419 0.5366 +vn 0.8867 0.3920 0.2452 +vn 0.8092 0.5515 0.2027 +vn 0.8541 0.4985 -0.1484 +vn 0.8232 0.5367 -0.1851 +vn 0.7818 0.2096 -0.5872 +vn 0.5287 -0.1429 -0.8367 +vn 0.1197 -0.5090 -0.8524 +vn -0.2116 -0.6950 -0.6872 +vn -0.5104 -0.7896 -0.3407 +vn -0.6823 -0.7272 0.0753 +vn -0.7007 -0.4885 0.5199 +vn -0.5375 -0.1644 0.8271 +vn -0.2260 0.1659 0.9599 +vn 0.1568 0.4222 0.8928 +vn 0.5161 0.5962 0.6150 +vn 0.7421 0.6267 0.2377 +vn 0.8531 0.4943 -0.1671 +vn 0.7578 -0.0865 0.6468 +vn 0.3385 0.7381 0.5836 +vn 0.1341 0.7485 0.6494 +vn -0.1044 0.7011 0.7053 +vn 0.1860 0.7445 0.6412 +vn -0.1479 0.6802 0.7179 +vn 0.1713 0.6928 0.7005 +vn 0.8020 -0.0151 -0.5972 +vn 0.5443 -0.1425 -0.8267 +vn 0.0484 -0.6213 -0.7821 +vn 0.6125 0.6995 0.3682 +vn 0.6560 0.6161 0.4360 +vn -0.1261 0.6011 0.7892 +vn 0.2348 0.5635 0.7921 +vn -0.0000 -0.5695 0.8220 +vn 0.0001 -0.7643 0.6448 +vn -0.1634 -0.7847 0.5979 +vn -0.2157 -0.5806 0.7851 +vn -0.2780 -0.8427 0.4611 +vn -0.3859 -0.6897 0.6127 +vn -0.3596 -0.8972 0.2563 +vn -0.4926 -0.8120 0.3130 +vn -0.3582 -0.9327 -0.0412 +vn -0.5239 -0.8442 -0.1136 +vn -0.1514 -0.9762 -0.1553 +vn -0.4427 -0.8029 -0.3991 +vn -0.1585 -0.9601 -0.2306 +vn -0.4555 -0.8057 -0.3787 +vn -0.2574 -0.8028 -0.5379 +vn 0.2395 0.3581 -0.9024 +vn 0.4487 0.4891 -0.7480 +vn 0.6197 0.6827 -0.3871 +vn 0.7006 0.7120 0.0476 +vn 0.7330 0.5401 0.4137 +vn -0.0000 0.3325 -0.9431 +vn 0.6763 0.4431 0.5884 +vn 0.6563 0.5427 0.5241 +vn -0.0930 -0.5156 -0.8518 +vn 0.3558 -0.5622 -0.7466 +vn 0.7450 -0.5725 -0.3424 +vn -0.0616 -0.3781 -0.9237 +vn 0.2531 -0.2607 -0.9317 +vn 0.9482 -0.3073 0.0800 +vn 0.7946 0.0862 0.6010 +vn 0.7285 0.3358 0.5971 +vn 0.8765 -0.1074 0.4692 +vn -0.1733 0.2459 0.9537 +vn 0.1213 0.2403 0.9631 +vn 0.2278 0.4284 0.8744 +vn -0.1294 0.4586 0.8792 +vn 0.9320 -0.1922 -0.3074 +vn 0.9841 -0.1216 -0.1296 +vn 0.9864 -0.0727 0.1472 +vn 0.9233 -0.0046 0.3841 +vn 0.7808 0.0237 0.6244 +vn -0.2640 -0.0559 0.9629 +vn -0.2708 -0.0134 0.9625 +vn -0.1759 -0.0490 0.9832 +vn -0.0569 -0.0343 0.9978 +vn 0.1221 -0.0472 0.9914 +vn 0.6177 -0.0412 0.7853 +vn -0.2269 0.0219 0.9737 +vn -0.1843 0.0012 0.9829 +vn -0.1761 -0.0231 0.9841 +vn -0.2081 -0.0538 0.9766 +vn -0.1265 -0.2316 0.9645 +vn -0.2181 -0.0457 0.9748 +vn -0.2585 -0.0457 0.9649 +vn -0.2525 -0.0621 0.9656 +vn 0.7261 -0.4282 -0.5380 +vn 0.9106 -0.4127 -0.0231 +vn 0.5343 -0.3767 -0.7567 +vn 0.3618 -0.3536 -0.8626 +vn 0.1516 -0.3444 -0.9265 +vn -0.0000 -0.3308 -0.9437 +vn 0.7971 0.1745 -0.5781 +vn 0.5696 0.1825 -0.8014 +vn 0.2713 0.1842 -0.9447 +vn 0.0785 0.2083 -0.9749 +vn -0.0002 0.2318 -0.9728 +vn 0.9706 0.1336 0.2003 +vn -0.0001 0.4413 -0.8974 +vn 0.4357 0.7903 0.4308 +vn 0.3195 0.7927 0.5192 +vn 0.4549 0.8834 0.1126 +vn 0.4170 0.8882 0.1929 +vn 0.4267 0.8928 -0.1446 +vn 0.4222 0.9055 -0.0430 +vn 0.3480 0.8363 -0.4236 +vn 0.3632 0.8930 -0.2659 +vn 0.2079 0.7496 -0.6284 +vn 0.2164 0.8558 -0.4698 +vn -0.0000 0.7187 -0.6953 +vn -0.0000 0.8472 -0.5312 +vn -0.9661 -0.1145 -0.2313 +vn -0.9497 -0.1307 -0.2846 +vn -0.9065 -0.1850 -0.3794 +vn -0.7458 -0.5000 -0.4402 +vn -0.9793 -0.1230 -0.1605 +vn -0.9764 -0.0889 -0.1967 +vn -0.9710 -0.0939 -0.2198 +vn -0.9839 -0.1477 -0.1009 +vn -0.9901 -0.1344 0.0414 +vn -0.7293 -0.0599 0.6816 +vn -0.7202 -0.0347 0.6929 +vn -0.6542 0.0039 0.7563 +vn -0.5981 0.0001 0.8014 +vn -0.2440 0.0483 0.9686 +vn -0.0000 -0.8454 0.5341 +vn -0.1790 -0.8629 0.4727 +vn -0.2633 -0.9305 0.2547 +vn -0.2380 -0.9709 -0.0270 +vn -0.2089 -0.9401 -0.2694 +vn -0.0699 -0.9140 -0.3998 +vn 0.3531 -0.7598 -0.5459 +vn 0.4876 -0.5432 -0.6835 +vn 0.3426 -0.5530 -0.7595 +vn 0.2468 -0.5312 -0.8105 +vn 0.0393 -0.6982 -0.7148 +vn 0.8278 -0.4103 -0.3825 +vn 0.9910 -0.1062 -0.0815 +vn 0.5426 -0.6693 -0.5075 +vn 0.2431 -0.8468 -0.4731 +vn -0.1024 -0.9612 -0.2561 +vn -0.3417 -0.9395 0.0256 +vn -0.4696 -0.7971 0.3796 +vn 0.4371 0.1383 0.8887 +vn 0.1199 -0.0334 0.9922 +vn -0.2153 -0.3197 0.9227 +vn -0.4149 -0.5759 0.7044 +vn 0.9661 0.1025 0.2368 +vn 0.8109 0.1795 0.5569 +vn 0.0498 -0.4428 -0.8952 +vn 0.4694 -0.0905 -0.8784 +vn 0.7234 0.2799 -0.6312 +vn 0.7742 0.5924 -0.2227 +vn 0.6552 0.7367 0.1670 +vn 0.4178 0.7412 0.5255 +vn 0.0386 0.5804 0.8134 +vn -0.3410 0.3060 0.8889 +vn -0.6392 -0.0234 0.7687 +vn -0.7939 -0.3491 0.4978 +vn -0.7813 -0.6234 0.0300 +vn -0.5949 -0.6935 -0.4063 +vn -0.2930 -0.6203 -0.7276 +vn -0.2780 -0.2185 -0.9354 +vn 0.1634 0.1241 -0.9787 +vn 0.5807 0.4239 -0.6951 +vn 0.7220 0.6585 -0.2122 +vn 0.8063 0.5345 -0.2534 +vn 0.8399 0.5162 0.1677 +vn 0.7045 0.6902 0.1652 +vn 0.6004 0.6262 0.4973 +vn 0.7239 0.4640 0.5105 +vn 0.4448 0.3435 0.8271 +vn 0.3847 0.4515 0.8051 +vn 0.0457 0.1035 0.9936 +vn -0.3409 -0.1308 0.9310 +vn -0.3029 -0.3222 0.8969 +vn -0.7298 -0.4078 0.5488 +vn -0.6267 -0.5290 0.5722 +vn -0.8510 -0.5245 0.0270 +vn -0.7565 -0.6519 0.0526 +vn -0.6473 -0.5909 -0.4815 +vn -0.5166 -0.4340 -0.7381 +vn 0.7542 0.3412 -0.5610 +vn 0.3706 -0.1559 -0.9156 +vn -0.1439 -0.5089 -0.8487 +vn -0.4585 -0.6064 -0.6497 +vn -0.6679 -0.6497 -0.3630 +vn -0.7874 -0.6053 0.1163 +vn 0.4100 0.7187 0.5616 +vn 0.6165 0.7394 0.2707 +vn 0.7731 0.6259 -0.1028 +vn 0.0249 -0.5003 -0.8655 +vn 0.4760 -0.1567 -0.8654 +vn 0.7618 0.2679 -0.5899 +vn 0.7898 0.5853 -0.1834 +vn 0.6491 0.7335 0.2017 +vn 0.3911 0.7434 0.5425 +vn 0.0120 0.5845 0.8113 +vn -0.3174 0.3196 0.8928 +vn -0.5851 -0.0114 0.8109 +vn -0.7506 -0.3144 0.5811 +vn -0.7873 -0.6094 0.0939 +vn -0.6112 -0.7015 -0.3665 +vn -0.3266 -0.6532 -0.6831 +vn -0.2529 -0.2006 -0.9465 +vn 0.1823 0.0913 -0.9790 +vn 0.6195 0.3875 -0.6827 +vn 0.8122 0.5251 -0.2543 +vn 0.8234 0.5359 0.1863 +vn 0.7032 0.4603 0.5419 +vn 0.4367 0.2946 0.8500 +vn 0.0281 0.0327 0.9991 +vn -0.3683 -0.2336 0.8999 +vn -0.6880 -0.4968 -0.5290 +vn -0.5192 -0.3855 -0.7627 +vn -0.2569 -0.1926 -0.9470 +vn -0.2529 -0.2012 -0.9463 +vn 0.2019 0.1331 -0.9703 +vn 0.1806 0.1104 -0.9773 +vn 0.6081 0.4335 -0.6651 +vn 0.6080 0.4152 -0.6768 +vn 0.7962 0.5590 -0.2315 +vn 0.8017 0.5487 -0.2372 +vn 0.8194 0.5491 0.1645 +vn 0.8164 0.5452 0.1904 +vn 0.7261 0.4456 0.5237 +vn 0.7050 0.4523 0.5463 +vn 0.4341 0.2177 0.8742 +vn 0.4307 0.2664 0.8623 +vn -0.0476 -0.0875 0.9950 +vn -0.0070 -0.0054 1.0000 +vn -0.4251 -0.2984 0.8545 +vn -0.4016 -0.2541 0.8799 +vn -0.7055 -0.4806 0.5208 +vn -0.7144 -0.4871 0.5023 +vn -0.8225 -0.5685 -0.0154 +vn -0.8217 -0.5699 0.0032 +vn -0.7143 -0.4989 -0.4908 +vn -0.7131 -0.4862 -0.5050 +vn -0.5270 -0.3853 -0.7575 +vn -0.5282 -0.3695 -0.7645 +vn -0.0518 -0.5238 -0.8503 +vn 0.4371 -0.1861 -0.8800 +vn 0.7772 0.2904 -0.5582 +vn 0.7872 0.6016 -0.1357 +vn 0.6241 0.7405 0.2492 +vn 0.3847 0.7296 0.5654 +vn 0.0785 0.5655 0.8210 +vn -0.2042 0.3150 0.9269 +vn -0.5034 -0.0281 0.8636 +vn -0.6970 -0.3152 0.6441 +vn -0.7817 -0.6068 0.1439 +vn -0.6371 -0.6920 -0.3395 +vn -0.3828 -0.6474 -0.6591 +vn -0.2193 -0.4718 -0.8540 +vn -0.1757 -0.5932 -0.7857 +vn -0.2847 -0.6343 -0.7188 +vn -0.3383 -0.6588 -0.6720 +vn -0.5237 -0.6905 -0.4990 +vn -0.5882 -0.6950 -0.4135 +vn -0.7687 -0.6109 0.1895 +vn -0.6162 -0.6042 0.5053 +vn -0.8343 -0.5513 0.0099 +vn -0.7284 -0.6632 -0.1721 +vn -0.1502 -0.4700 0.8698 +vn -0.1285 -0.3544 0.9262 +vn -0.3219 -0.5236 0.7888 +vn -0.5137 -0.5014 0.6962 +vn -0.0248 -0.3732 0.9274 +vn 0.0521 -0.3641 0.9299 +vn -0.0138 -0.3073 0.9515 +vn -0.4790 -0.3038 0.8236 +vn -0.4530 -0.1803 -0.8731 +vn -0.4791 -0.2700 -0.8352 +vn -0.6502 -0.3068 -0.6951 +vn -0.8835 -0.2737 -0.3802 +vn -0.9755 -0.2194 -0.0186 +vn -0.9114 -0.2029 0.3581 +vn -0.6745 -0.1787 0.7163 +vn -0.4180 -0.1572 0.8947 +vn -0.2422 -0.1256 0.9621 +vn -0.1307 -0.0920 0.9871 +vn -0.1522 -0.0687 0.9860 +vn -0.5657 -0.0987 0.8187 +vn -0.6710 0.0407 0.7403 +vn -0.6106 0.1908 0.7686 +vn -0.5907 0.3797 0.7120 +vn -0.6275 0.4967 0.5996 +vn -0.6706 0.5198 0.5292 +vn -0.6753 0.4981 0.5439 +vn -0.5922 0.5372 0.6005 +vn -0.4629 0.6179 0.6356 +vn -0.2066 0.8288 0.5199 +vn 0.0839 0.9586 0.2722 +vn 0.1360 0.9880 0.0728 +vn 0.1062 0.9910 -0.0814 +vn -0.0000 0.9940 -0.1093 +vn -0.9299 -0.0369 -0.3659 +vn -0.9244 -0.1513 -0.3500 +vn -0.9485 0.0433 -0.3138 +vn -0.8641 -0.0664 -0.4990 +vn -0.9259 -0.0075 -0.3776 +vn -0.9541 0.0094 -0.2992 +vn -0.9615 0.0329 -0.2730 +vn -0.9514 -0.2127 -0.2228 +vn -0.9903 -0.1231 -0.0648 +vn -0.9999 0.0100 -0.0073 +vn -0.9969 -0.0711 -0.0339 +vn -0.9729 -0.2305 0.0163 +vn -0.9431 -0.3049 0.1327 +vn -0.8361 -0.3568 0.4167 +vn -0.6578 -0.3054 0.6885 +vn -0.3664 -0.1704 0.9147 +vn -0.0000 -0.1561 0.9877 +vn -0.5885 -0.1138 -0.8005 +vn -0.6469 -0.0831 -0.7581 +vn -0.6938 -0.0758 -0.7162 +vn -0.7320 -0.0968 -0.6744 +vn -0.7497 -0.1527 -0.6439 +vn -0.7518 -0.1635 -0.6388 +vn -0.7551 -0.1304 -0.6425 +vn -0.7852 -0.1046 -0.6104 +vn -0.8552 -0.0695 -0.5136 +vn -0.9120 -0.0332 -0.4088 +vn -0.9373 0.0224 -0.3479 +vn -0.9372 0.0876 -0.3376 +vn -0.9305 0.0258 -0.3655 +vn -0.9347 -0.1654 -0.3147 +vn -0.9194 -0.3607 -0.1566 +vn -0.9507 -0.3102 0.0040 +vn -0.9996 -0.0166 0.0230 +vn -0.9979 0.0643 -0.0017 +vn -0.9931 -0.0536 0.1041 +vn -0.9618 -0.1384 0.2360 +vn -0.7668 -0.2580 0.5877 +vn -0.5047 -0.2863 0.8144 +vn -0.2462 -0.2166 0.9447 +vn -0.0000 -0.1933 0.9811 +vn -0.7819 -0.2324 -0.5784 +vn -0.7107 -0.1176 -0.6936 +vn -0.6497 -0.1192 -0.7508 +vn -0.5848 -0.1541 -0.7964 +vn -0.5389 -0.0369 0.8416 +vn -0.5777 -0.0754 0.8128 +vn -0.4705 0.0126 0.8823 +vn -0.2036 -0.0665 0.9768 +vn -0.0000 -0.1598 0.9872 +vn -0.1880 -0.1587 0.9693 +vn -0.0000 -0.1847 0.9828 +vn -0.2154 0.0065 0.9765 +vn -0.1985 -0.0363 0.9794 +vn -0.1715 -0.0448 0.9842 +vn -0.2745 -0.0716 0.9589 +vn -0.4345 -0.1031 0.8947 +vn -0.6723 -0.1965 -0.7137 +vn -0.8582 -0.2123 -0.4674 +vn -0.9779 -0.1931 -0.0800 +vn -0.9252 -0.1716 0.3385 +vn -0.6883 -0.1416 0.7115 +vn 0.9912 -0.1114 -0.0718 +vn 0.8330 -0.4103 -0.3711 +vn 0.5481 -0.6734 -0.4960 +vn 0.2404 -0.8576 -0.4547 +vn -0.1036 -0.9640 -0.2450 +vn -0.3324 -0.9427 0.0295 +vn -0.4399 -0.8087 0.3904 +vn 0.1396 -0.0764 0.9873 +vn 0.4552 0.0853 0.8863 +vn -0.1905 -0.3285 0.9251 +vn -0.3785 -0.5867 0.7159 +vn 0.9634 0.0884 0.2529 +vn 0.8079 0.1457 0.5710 +vn 0.4856 -0.7421 0.4620 +vn -0.7035 -0.1761 -0.6885 +vn -0.8350 -0.2345 -0.4978 +vn -0.9535 -0.2708 -0.1322 +vn -0.9162 -0.2621 0.3032 +vn -0.8206 -0.5710 -0.0218 +vn -0.8720 -0.4261 0.2410 +vn -0.8839 -0.4274 -0.1900 +vn -0.8142 -0.3862 -0.4335 +vn -0.7909 -0.4797 -0.3801 +vn -0.7631 -0.2583 -0.5925 +vn -0.6989 -0.2058 0.6849 +vn -0.4613 -0.1309 0.8775 +vn -0.3213 -0.0672 0.9446 +vn -0.2204 -0.0131 0.9753 +vn -0.2669 -0.0823 0.9602 +vn -0.3608 -0.1216 0.9247 +vn -0.5170 -0.2276 0.8252 +vn -0.7230 -0.3704 0.5831 +vn -0.2840 -0.1474 0.9474 +vn -0.4830 -0.2507 0.8390 +vn -0.7250 -0.4797 0.4942 +vn 0.0124 -0.9071 0.4208 +vn -0.0000 -0.9996 -0.0283 +vn -0.0000 -0.9070 0.4210 +vn -0.1949 -0.9576 -0.2122 +vn -0.2056 -0.9748 -0.0870 +vn -0.0428 -0.9131 -0.4056 +vn -0.0046 -0.9115 0.4114 +vn -0.0855 -0.9423 0.3237 +vn -0.1811 -0.9742 0.1347 +vn -0.0000 -0.0111 -0.9999 +vn -0.0000 -0.8733 -0.4871 +vn -0.8147 -0.1397 -0.5629 +vn -0.8055 -0.1308 -0.5779 +vn -0.8081 -0.1613 -0.5666 +vn -0.7684 -0.1570 -0.6204 +vn -0.8011 -0.1421 -0.5814 +vn -0.8918 -0.1284 -0.4337 +vn -0.9599 -0.0794 -0.2688 +vn -0.9940 -0.0024 -0.1092 +vn -0.9888 0.1453 0.0347 +vn -0.9723 0.2124 0.0974 +vn -0.8810 -0.1536 0.4475 +vn -0.9241 -0.2089 0.3199 +vn -0.9865 0.0283 0.1612 +vn -0.8938 0.2295 0.3852 +vn -0.7810 0.4872 0.3907 +vn -0.7009 0.5183 0.4900 +vn -0.6954 0.4493 0.5608 +vn -0.5670 0.2838 0.7733 +vn -0.3409 0.1380 0.9299 +vn -0.1382 0.0629 0.9884 +vn -0.0000 0.0428 0.9991 +vn -0.2676 -0.7592 -0.5933 +vn 0.7514 -0.3426 -0.5640 +vn 0.7313 0.5954 0.3327 +vn 0.6067 0.6879 0.3985 +vn 0.2293 -0.0950 0.9687 +vn 0.0650 0.0055 0.9979 +vn 0.1282 0.1331 0.9828 +vn 0.3155 0.0906 0.9446 +vn 0.3287 0.0236 0.9441 +vn 0.1514 0.0079 0.9884 +vn 0.1275 -0.1082 0.9859 +vn 0.2863 -0.0793 0.9548 +vn 0.0700 -0.0945 0.9931 +vn 0.1827 -0.0873 0.9793 +vn 0.2206 -0.1340 0.9661 +vn 0.0888 -0.1643 0.9824 +vn 0.3643 -0.0502 0.9299 +vn 0.4432 -0.0757 0.8932 +vn 0.5781 0.0465 0.8147 +vn 0.5322 -0.0348 0.8459 +vn 0.5848 0.0510 0.8096 +vn 0.5136 -0.2173 0.8301 +vn 0.0988 -0.3206 0.9420 +vn -0.0052 -0.2826 0.9592 +vn 0.3850 -0.3985 0.8324 +vn 0.0758 0.6120 0.7872 +vn -0.0000 0.6047 0.7965 +vn -0.0000 0.5645 0.8254 +vn 0.0962 0.5734 0.8136 +vn 0.0487 0.6467 0.7612 +vn 0.0961 0.6433 0.7596 +vn 0.3282 0.5322 0.7805 +vn 0.3514 0.5209 0.7779 +vn 0.2419 0.5874 0.7723 +vn 0.1245 0.6393 0.7588 +vn -0.8517 0.4850 0.1984 +vn -0.8425 0.5267 -0.1130 +vn -0.7093 0.5759 -0.4065 +vn -0.4105 0.6010 -0.6857 +vn -0.0001 0.5869 -0.8097 +vn 0.0706 0.7400 0.6689 +vn 0.1543 0.8837 0.4418 +vn -0.7445 0.5143 0.4256 +vn -0.8020 0.4763 0.3604 +vn -0.6668 0.5567 0.4954 +vn 0.1917 0.9700 0.1498 +vn -0.5744 0.5663 0.5911 +vn 0.1347 0.9801 -0.1459 +vn -0.4563 0.5436 0.7045 +vn 0.0318 0.9268 -0.3742 +vn -0.2680 0.5218 0.8099 +vn -0.0365 0.8697 -0.4922 +vn -0.0000 0.5144 0.8575 +vn -0.0000 0.8532 -0.5216 +vn -0.0000 -0.2802 0.9599 +vn -0.0000 0.0446 0.9990 +vn -0.0000 0.1541 0.9881 +vn -0.0000 0.0029 1.0000 +vn -0.0000 -0.1184 0.9930 +vn -0.0000 -0.1742 0.9847 +vn -0.0000 -0.0964 0.9953 +vn 0.0282 0.6580 0.7525 +vn -0.0000 0.6516 0.7586 +vn -0.0000 0.5854 0.8107 +vn 0.0208 0.5824 0.8126 +vn 0.1377 0.6718 0.7278 +vn 0.1192 0.5804 0.8055 +vn 0.3981 -0.1210 0.9093 +vn 0.1050 -0.0812 0.9912 +vn 0.0115 -0.0684 0.9976 +vn -0.0000 -0.0698 0.9976 +vn 0.4889 -0.0144 0.8722 +vn 0.5866 -0.0333 0.8092 +vn 0.6823 -0.0062 0.7311 +vn 0.7198 0.0582 0.6918 +vn 0.7228 0.0346 0.6902 +vn 0.6545 -0.2668 0.7074 +vn 0.5508 -0.4273 0.7169 +vn 0.5902 -0.1421 0.7947 +vn 0.0686 0.0082 0.9976 +vn -0.0000 0.0261 0.9997 +vn 0.1723 -0.0230 0.9848 +vn 0.3172 -0.0205 0.9481 +vn 0.4210 0.0011 0.9071 +vn -0.6878 0.6362 0.3495 +vn -0.6373 0.7463 0.1919 +vn -0.5175 0.8554 0.0210 +vn -0.3015 0.9468 -0.1129 +vn -0.0000 0.9875 -0.1574 +vn -0.6897 0.5680 0.4491 +vn -0.6956 0.5872 0.4139 +vn -0.6559 0.5542 0.5126 +vn -0.5695 0.5475 0.6131 +vn -0.4330 0.5456 0.7175 +vn -0.2435 0.5467 0.8011 +vn -0.0000 0.5469 0.8372 +vn -0.0000 0.8506 0.5259 +vn 0.3924 0.6466 0.6542 +vn 0.3784 0.5634 0.7344 +vn 0.5594 0.5256 0.6409 +vn 0.5536 0.5970 0.5807 +vn 0.3960 0.5715 0.7187 +vn 0.4398 0.5341 0.7220 +vn 0.4916 0.5201 0.6984 +vn 0.3934 0.6549 0.6452 +vn 0.5067 0.7974 0.3276 +vn 0.5260 0.8476 -0.0705 +vn 0.4046 0.7796 -0.4780 +vn 0.2141 0.6459 -0.7328 +vn 0.0650 0.5489 -0.8334 +vn -0.0000 0.5189 -0.8548 +vn 0.5539 0.5414 0.6325 +vn 0.5085 0.5355 0.6743 +vn 0.4979 0.5354 0.6822 +vn 0.5611 0.5668 0.6032 +vn 0.4144 0.6190 0.6672 +vn 0.1694 0.6583 0.7334 +vn 0.0484 0.6506 0.7579 +vn -0.0000 0.6429 0.7659 +vn 0.5413 0.5415 0.6432 +vn 0.4026 0.5953 0.6954 +vn 0.2018 0.6295 0.7503 +vn 0.0728 0.6194 0.7817 +vn -0.0000 0.6133 0.7899 +vn 0.3947 0.2615 0.8808 +vn 0.5905 0.2413 0.7701 +vn 0.1121 0.2763 0.9545 +vn 0.0198 0.2905 0.9567 +vn -0.0000 0.2987 0.9543 +vn 0.9198 -0.3061 0.2455 +vn 0.9662 -0.2122 0.1463 +vn 0.5999 -0.5849 -0.5458 +vn 0.6925 -0.6682 -0.2719 +vn 0.9404 -0.3126 0.1337 +vn 0.6611 -0.5239 -0.5370 +vn 0.7141 -0.6662 -0.2150 +vn 0.8595 -0.4651 0.2121 +vn 0.9434 -0.2381 0.2308 +vn 0.7950 -0.2513 -0.5521 +vn 0.7337 -0.3144 -0.6023 +vn 0.8985 -0.4199 0.1280 +vn 0.9596 -0.2521 0.1250 +vn 0.7581 -0.3653 -0.5402 +vn 0.7571 -0.5454 -0.3596 +vn 0.8742 -0.4620 0.1493 +vn 0.6381 -0.4468 -0.6271 +vn 0.9957 -0.0863 -0.0339 +vn 0.9948 -0.0466 -0.0906 +vn 0.6183 -0.4195 -0.6646 +vn 0.9712 -0.1623 -0.1746 +vn 0.5750 -0.4762 -0.6653 +vn 0.5370 -0.3173 -0.7816 +vn 0.9595 0.0602 -0.2752 +vn 0.6696 -0.3534 -0.6532 +vn 0.9973 -0.0645 -0.0340 +vn 0.9977 -0.0434 -0.0513 +vn 0.6655 -0.3414 -0.6638 +vn 0.9763 -0.2165 0.0085 +vn 0.6666 -0.4957 -0.5567 +vn 0.6302 -0.3022 -0.7152 +vn 0.9886 0.0741 -0.1311 +vn 0.9807 -0.0695 0.1826 +vn 0.7722 -0.3282 -0.5440 +vn 0.7663 -0.1416 -0.6267 +vn 0.9760 0.1788 0.1242 +vn 0.9442 0.1410 0.2977 +vn 0.8308 -0.0810 -0.5506 +vn 0.8243 -0.0962 -0.5579 +vn 0.9536 0.0858 0.2887 +vn 0.9105 -0.0854 0.4045 +vn 0.8548 -0.3342 -0.3969 +vn 0.8600 -0.1236 -0.4951 +vn 0.9238 0.1794 0.3382 +vn 0.7543 0.1871 0.6292 +vn 0.7155 0.1072 0.6904 +vn 0.7755 0.3063 0.5521 +vn 0.7781 0.3364 0.5304 +vn 0.7874 0.2093 0.5798 +vn 0.7485 0.4256 0.5086 +vn 0.6571 0.0806 0.7495 +vn 0.7152 -0.2501 0.6526 +vn 0.6568 0.3006 0.6916 +vn 0.6649 0.2847 0.6905 +vn 0.7110 0.1400 0.6891 +vn 0.6463 0.4059 0.6462 +vn 0.5904 0.0293 0.8066 +vn 0.6736 -0.3303 0.6612 +vn 0.5933 0.1079 0.7977 +vn 0.5601 0.3285 0.7605 +vn 0.4633 -0.0938 0.8812 +vn 0.6285 -0.4467 0.6367 +vn 0.3867 0.2163 0.8965 +vn 0.4109 0.1645 0.8967 +vn 0.2901 0.0407 0.9561 +vn 0.2958 0.2474 0.9227 +vn 0.0140 0.5043 0.8634 +vn 0.1061 0.4545 0.8844 +vn -0.0148 0.4952 0.8686 +vn 0.0584 0.2782 0.9588 +vn 0.0010 0.4227 0.9063 +vn -0.8293 0.2916 0.4767 +vn -0.6970 0.4352 0.5700 +vn -0.0847 0.4067 0.9096 +vn -0.1654 0.2608 0.9511 +vn -0.7184 -0.0087 0.6956 +vn 0.0203 0.2843 0.9585 +vn -0.0276 0.4056 0.9136 +vn -0.7131 0.0807 0.6964 +vn -0.6814 0.2854 0.6740 +vn -0.6774 0.2668 0.6856 +vn -0.0623 0.4112 0.9094 +vn -0.2808 -0.6510 -0.7053 +vn -0.2685 -0.5259 -0.8070 +vn -0.1761 -0.5259 -0.8321 +vn -0.1616 -0.5507 -0.8189 +vn -0.2485 -0.7051 -0.6641 +vn 0.0605 -0.6265 -0.7771 +vn -0.7279 -0.5602 -0.3954 +vn -0.6924 -0.5327 -0.4867 +vn 0.0093 -0.5126 -0.8586 +vn -0.0212 -0.4590 -0.8882 +vn -0.6663 -0.3719 -0.6463 +vn -0.6717 -0.3488 -0.6536 +vn -0.0017 -0.4716 -0.8818 +vn -0.6614 -0.2192 -0.7173 +vn -0.8421 -0.2023 -0.5000 +vn -0.0351 -0.6257 -0.7793 +vn -0.9249 0.1361 0.3549 +vn -0.8051 0.2955 0.5143 +vn -0.2050 0.2786 0.9383 +vn -0.2733 0.0944 0.9573 +vn -0.8611 0.1980 0.4684 +vn -0.7877 0.1326 0.6016 +vn -0.7894 -0.2191 0.5735 +vn -0.1341 0.0496 0.9897 +vn -0.1389 0.2389 0.9611 +vn -0.7892 -0.0988 0.6062 +vn 0.2183 -0.5074 -0.8336 +vn -0.4631 -0.5157 -0.7209 +vn -0.4666 -0.5009 -0.7290 +vn 0.1850 -0.4000 -0.8976 +vn 0.1251 -0.1780 -0.9760 +vn -0.5210 -0.0474 -0.8522 +vn -0.4928 -0.0175 -0.8700 +vn 0.1143 -0.1420 -0.9832 +vn 0.1555 -0.2533 -0.9548 +vn -0.5053 -0.1895 -0.8419 +vn -0.5176 -0.1736 -0.8378 +vn -0.9955 0.0081 0.0942 +vn -0.9101 0.1656 0.3798 +vn -0.3487 0.1286 0.9284 +vn -0.4635 -0.1058 0.8798 +vn -0.9330 0.0348 0.3581 +vn -0.3538 0.1457 0.9239 +vn -0.9347 0.0486 0.3521 +vn -0.9078 -0.2649 0.3251 +vn -0.3933 -0.0564 0.9177 +vn -0.4055 0.0844 0.9102 +vn -0.9118 -0.2053 0.3556 +vn 0.1731 -0.3237 -0.9302 +vn -0.5299 -0.1402 -0.8364 +vn -0.6575 -0.1860 -0.7301 +vn 0.1231 -0.4137 -0.9021 +vn -0.6739 -0.1918 -0.7135 +vn -0.5696 -0.3185 -0.7577 +vn 0.1694 -0.5274 -0.8326 +vn -0.6156 -0.5652 -0.5492 +vn -0.5852 -0.5021 -0.6368 +vn 0.1832 -0.3787 -0.9072 +vn -0.9603 0.1129 -0.2550 +vn -0.9645 0.0179 -0.2635 +vn -0.9809 -0.1376 -0.1378 +vn -0.9843 -0.0033 -0.1764 +vn -0.9085 -0.4175 -0.0178 +vn -0.9151 -0.4008 -0.0432 +vn -0.8635 -0.4190 -0.2808 +vn -0.8631 -0.4281 -0.2681 +vn -0.9600 -0.0555 -0.2744 +vn -0.9581 -0.0750 -0.2763 +vn -0.9604 0.0826 -0.2661 +vn -0.9422 0.0550 -0.3304 +vn -0.9775 0.1678 -0.1279 +vn -0.9911 0.1140 -0.0685 +vn -0.9967 -0.0692 0.0418 +vn -0.9991 -0.0332 0.0273 +vn -0.9322 -0.3126 0.1823 +vn -0.9440 -0.3001 0.1373 +vn -0.9851 0.1717 0.0020 +vn -0.6402 0.4890 0.5924 +vn -0.5555 0.5038 0.6615 +vn -0.9628 0.2316 0.1393 +vn -0.9361 -0.2289 0.2672 +vn -0.6529 0.0486 0.7559 +vn -0.6601 0.1142 0.7425 +vn -0.9336 -0.2618 0.2448 +vn -0.9971 -0.0587 0.0491 +vn -0.6641 0.3341 0.6688 +vn -0.6821 0.3524 0.6407 +vn -0.9993 -0.0278 0.0233 +vn 0.4946 -0.6620 -0.5631 +vn -0.2404 -0.7829 -0.5739 +vn -0.2546 -0.6270 -0.7362 +vn 0.5133 -0.4962 -0.7002 +vn 0.9440 -0.0069 -0.3298 +vn 0.9716 -0.1057 -0.2118 +vn 0.9008 0.3350 0.2764 +vn 0.9075 0.1835 0.3779 +vn 0.9578 -0.2386 -0.1604 +vn 0.9627 -0.0829 -0.2577 +vn 0.9053 0.3162 0.2835 +vn -0.5370 0.4466 0.7157 +vn -0.9478 0.0631 0.3125 +vn -0.8822 0.1643 0.4412 +vn -0.5173 0.4230 0.7440 +vn -0.8320 0.0927 0.5470 +vn -0.4182 0.3741 0.8278 +vn -0.4477 0.3812 0.8089 +vn -0.8712 0.1122 0.4779 +vn -0.8559 -0.3711 -0.3602 +vn -0.1179 -0.6018 -0.7899 +vn -0.1883 -0.4886 -0.8519 +vn -0.8826 -0.4559 -0.1144 +vn -0.9006 -0.3623 -0.2401 +vn -0.0992 -0.9600 -0.2617 +vn 0.4241 -0.8860 -0.1873 +vn 0.2930 -0.8728 0.3902 +vn -0.4679 -0.8699 0.1562 +vn 0.8056 -0.3374 0.4870 +vn 0.8465 -0.5309 0.0403 +vn 0.2634 0.4441 0.8564 +vn 0.2888 0.5129 0.8084 +vn 0.3275 0.5982 0.7313 +vn 0.1812 0.5915 0.7857 +vn -0.2242 -0.0108 0.9745 +vn 0.3710 0.1345 0.9188 +vn -0.7072 -0.2171 0.6729 +vn -0.8165 -0.2254 -0.5315 +vn -0.9708 -0.1292 -0.2020 +vn -0.7841 -0.3689 -0.4991 +vn -0.7916 -0.3427 -0.5059 +vn -0.8306 -0.4654 -0.3059 +vn -0.8462 -0.4836 -0.2237 +vn -0.7251 -0.6807 -0.1046 +vn 0.0837 -0.9151 0.3945 +vn -0.2409 -0.4496 0.8601 +vn -0.7837 -0.4492 0.4290 +vn -0.3626 -0.6881 -0.6285 +vn 0.2809 -0.8297 -0.4824 +vn -0.7033 -0.7066 0.0776 +vn 0.2064 -0.9434 0.2596 +vn 0.0315 -0.4293 0.9026 +vn -0.6358 -0.4449 0.6307 +vn 0.1700 -0.8311 -0.5295 +vn -0.5102 -0.7347 -0.4472 +vn -0.7154 -0.5986 0.3603 +vn 0.2137 -0.8855 0.4126 +vn 0.1667 -0.2713 0.9479 +vn -0.5506 -0.2488 0.7968 +vn 0.1832 -0.9189 -0.3495 +vn -0.5847 -0.7812 -0.2187 +vn 0.8516 -0.5218 0.0491 +vn 0.3338 -0.8573 0.3920 +vn -0.0565 -0.9388 -0.3398 +vn 0.5764 -0.7188 -0.3886 +vn 0.7712 -0.1630 0.6153 +vn 0.2441 -0.1463 0.9587 +vn -0.5076 -0.1212 0.8530 +vn -0.7621 -0.4994 0.4120 +vn -0.7267 -0.6742 -0.1318 +vn 0.7517 -0.6220 0.2194 +vn 0.6154 -0.7485 -0.2471 +vn 0.6650 -0.3445 0.6626 +vn 0.7973 -0.5332 0.2828 +vn 0.7046 -0.6165 -0.3515 +vn 0.5664 -0.2899 0.7715 +vn 0.7506 -0.6307 -0.1972 +vn 0.6665 -0.5586 0.4937 +vn 0.2252 -0.3163 0.9215 +vn 0.7665 0.4926 0.4121 +vn -0.6741 0.2058 0.7094 +vn -0.9669 -0.1777 0.1831 +vn -0.8289 -0.3976 -0.3935 +vn 0.5627 -0.2593 -0.7849 +vn 0.9483 0.1422 -0.2837 +vn 0.6411 0.4645 0.6109 +vn -0.7046 0.1323 0.6971 +vn -0.9457 -0.3044 0.1138 +vn -0.6657 -0.5319 -0.5233 +vn 0.6495 -0.2183 -0.7284 +vn 0.9766 0.1453 -0.1584 +vn 0.5690 0.4026 0.7170 +vn -0.1837 0.2870 0.9402 +vn -0.7904 -0.0024 0.6126 +vn -0.9404 -0.3329 -0.0691 +vn -0.5919 -0.4310 -0.6812 +vn 0.1710 -0.3163 -0.9331 +vn 0.7847 -0.0404 -0.6186 +vn 0.9523 0.2840 0.1114 +vn 0.3540 0.2934 0.8880 +vn -0.9239 -0.1556 0.3496 +vn -0.8920 -0.3806 -0.2439 +vn -0.4755 -0.4800 -0.7372 +vn 0.8859 -0.0817 -0.4567 +vn 0.9247 0.1873 0.3313 +vn 0.3068 0.5564 0.7722 +vn -0.4654 0.3756 0.8015 +vn -0.8942 0.1063 0.4349 +vn 0.5028 -0.3594 -0.7861 +vn -0.8558 -0.2087 -0.4734 +vn -0.9966 0.0634 -0.0525 +vn -0.7919 0.3786 0.4791 +vn -0.8817 -0.3222 -0.3447 +vn -0.9904 -0.0891 0.1059 +vn -0.7761 0.2132 0.5935 +vn -0.7750 -0.1586 -0.6117 +vn -0.9858 0.1544 -0.0654 +vn -0.7832 0.3713 0.4988 +vn -0.7634 -0.4456 -0.4676 +vn -0.9733 -0.2181 0.0712 +vn -0.7821 0.1320 0.6091 +vn 0.7935 -0.1735 -0.5833 +vn 0.5982 0.2376 0.7653 +vn 0.9871 0.0510 0.1515 +vn 0.7923 -0.1419 -0.5934 +vn -0.5672 -0.3300 -0.7546 +vn -0.9796 -0.1514 -0.1319 +vn -0.7844 0.1232 0.6078 +vn 0.5853 0.2785 0.7615 +vn 0.9812 0.1199 0.1511 +vn -0.8623 0.0442 0.5045 +vn -0.9726 -0.2080 -0.1038 +vn -0.6954 -0.3468 -0.6294 +vn -0.5788 -0.3581 -0.7326 +vn -0.9360 -0.2399 -0.2575 +vn -0.9667 -0.0886 0.2401 +vn -0.9690 0.0473 0.2424 +vn -0.9648 -0.0547 -0.2574 +vn -0.6501 -0.1705 -0.7404 +vn 0.5265 -0.4947 -0.6915 +vn 0.9831 -0.1170 -0.1411 +vn 0.8793 0.3166 0.3558 +vn 0.3000 0.6041 0.7383 +vn -0.5383 0.4382 0.7199 +vn -0.9522 0.0004 0.3055 +vn -0.7698 -0.4867 -0.4129 +vn -0.1102 -0.6624 -0.7410 +vn 0.5437 -0.5486 -0.6352 +vn 0.9954 -0.0774 -0.0572 +vn 0.8237 0.3674 0.4320 +vn 0.1783 0.6289 0.7568 +vn -0.5582 0.4415 0.7025 +vn -0.8908 -0.0396 0.4527 +vn -0.6720 -0.7139 -0.1968 +vn 0.0410 -0.8088 -0.5867 +vn 0.6315 -0.5701 -0.5256 +vn 0.1202 -0.0062 0.9927 +vn 0.2244 0.2636 0.9382 +vn 0.7554 -0.1414 0.6398 +vn 0.7883 -0.5478 0.2801 +vn -0.4150 0.4820 0.7717 +vn -0.6116 0.4280 0.6654 +vn -0.8545 0.3953 0.3369 +vn -0.9638 0.2384 0.1194 +vn -0.9850 0.1719 -0.0169 +vn -0.9802 0.1104 -0.1644 +vn -0.9323 0.1085 -0.3451 +vn -0.8912 0.0362 -0.4521 +vn -0.8193 0.0943 -0.5655 +vn -0.4685 0.0179 -0.8833 +vn 0.8879 -0.4307 0.1618 +vn 0.8667 -0.4424 0.2305 +vn 0.8562 -0.5090 0.0889 +vn 0.0997 -0.2173 -0.9710 +vn 0.5809 -0.4138 -0.7010 +vn 0.8471 -0.5228 -0.0953 +vn 0.8605 -0.4712 0.1936 +vn 0.8889 -0.4104 0.2037 +vn 0.8897 -0.3872 0.2417 +vn -0.7982 0.4643 0.3839 +vn -0.9402 0.3311 0.0805 +vn -0.9579 0.2385 -0.1596 +vn -0.9039 0.1740 -0.3908 +vn -0.7922 0.1135 -0.5996 +vn -0.4735 -0.0379 -0.8800 +vn 0.8613 -0.4896 0.1359 +vn 0.8887 -0.4171 0.1902 +vn 0.8986 -0.3787 0.2216 +vn 0.9012 -0.4325 0.0289 +vn 0.7196 -0.4690 -0.5121 +vn 0.1080 -0.2783 -0.9544 +vn 0.5752 0.6904 0.4388 +vn 0.9553 0.2176 0.2003 +vn 0.9212 0.1402 0.3629 +vn 0.4682 0.5619 0.6820 +vn -0.0847 0.8291 0.5527 +vn -0.1827 0.6804 0.7097 +vn -0.5490 0.6507 0.5246 +vn -0.5419 0.5880 0.6005 +vn -0.6696 0.5483 0.5010 +vn -0.6613 0.5109 0.5493 +vn -0.8103 0.4038 0.4247 +vn -0.7853 0.4874 0.3818 +vn -0.9327 0.3518 0.0795 +vn -0.9638 0.2262 0.1411 +vn -0.9573 0.2065 -0.2025 +vn -0.9869 0.0630 -0.1486 +vn -0.9239 -0.0460 -0.3798 +vn -0.9034 0.1055 -0.4155 +vn -0.7652 0.0104 -0.6437 +vn -0.7787 -0.1582 -0.6071 +vn -0.4298 -0.1389 -0.8922 +vn -0.4331 -0.3654 -0.8240 +vn 0.1411 -0.3178 -0.9376 +vn 0.0843 -0.5586 -0.8252 +vn 0.6767 -0.4272 -0.5996 +vn 0.5945 -0.6201 -0.5119 +vn 0.9136 -0.4003 -0.0709 +vn 0.8442 -0.5331 -0.0558 +vn 0.9115 -0.3826 0.1506 +vn 0.8612 -0.4917 0.1288 +vn 0.9689 -0.2207 0.1123 +vn 0.9301 -0.3662 0.0269 +vn 0.8862 -0.4408 0.1427 +vn 0.8523 -0.5041 0.1394 +vn 0.8926 -0.4462 0.0651 +vn 0.8748 -0.4815 0.0534 +vn 0.7151 0.5364 0.4482 +vn 0.9901 0.0499 0.1309 +vn -0.0061 0.7508 0.6605 +vn -0.5916 0.5454 0.5938 +vn 0.8333 -0.5071 -0.2200 +vn 0.3757 0.2564 0.8906 +vn -0.1102 0.3661 0.9240 +vn -0.5058 0.3660 0.7812 +vn -0.6978 0.3516 0.6241 +vn -0.8347 0.3463 0.4281 +vn -0.9497 0.2730 0.1532 +vn -0.9813 0.1275 -0.1444 +vn -0.9199 -0.0325 -0.3907 +vn -0.7095 -0.2164 -0.6706 +vn -0.3829 -0.3703 -0.8463 +vn 0.0825 -0.5429 -0.8357 +vn 0.5883 -0.6347 -0.5011 +vn 0.7536 -0.6371 -0.1621 +vn 0.7691 -0.6372 0.0495 +vn 0.7636 -0.6126 0.2040 +vn 0.7793 -0.4969 0.3818 +vn 0.7795 -0.2748 0.5629 +vn 0.6699 0.0274 0.7419 +vn 0.7517 0.3057 0.5843 +vn 0.4678 0.5260 0.7103 +vn 0.0224 0.6587 0.7521 +vn -0.3923 0.6488 0.6520 +vn -0.6184 0.6051 0.5014 +vn -0.7591 0.5736 0.3079 +vn -0.8581 0.5122 0.0367 +vn -0.8898 0.3794 -0.2537 +vn -0.8279 0.2248 -0.5138 +vn -0.5927 0.0575 -0.8034 +vn -0.2407 -0.0583 -0.9689 +vn 0.2207 -0.1704 -0.9603 +vn 0.6961 -0.2953 -0.6544 +vn 0.8831 -0.3371 -0.3263 +vn 0.9321 -0.3459 -0.1075 +vn 0.9007 0.0142 0.4342 +vn 0.9418 -0.3308 0.0591 +vn 0.9416 -0.2128 0.2610 +vn -0.2875 0.7907 -0.5405 +vn -0.2966 0.6341 -0.7141 +vn 0.2229 0.3874 -0.8946 +vn 0.5399 0.3200 -0.7785 +vn 0.7069 0.2310 -0.6685 +vn 0.7854 0.3156 -0.5325 +vn 0.8664 0.3703 -0.3351 +vn 0.7435 0.6686 0.0141 +vn 0.5371 0.8423 0.0453 +vn 0.7614 0.6274 -0.1634 +vn 0.6898 0.7221 0.0524 +vn 0.2208 0.9442 0.2446 +vn -0.1987 0.9741 0.1075 +vn -0.4352 0.8992 0.0441 +vn -0.4529 0.8830 -0.1231 +vn 0.1936 0.4763 -0.8577 +vn -0.4905 0.8229 -0.2867 +vn -0.1296 0.5684 -0.8125 +vn 0.4248 0.8137 -0.3967 +vn -0.3843 0.5333 -0.7536 +vn 0.3676 0.2444 -0.8973 +vn 0.7035 0.2454 -0.6669 +vn 0.8780 0.2436 -0.4120 +vn 0.8128 0.5778 0.0743 +vn 0.7129 0.6804 0.1696 +vn -0.0150 0.9575 0.2880 +vn -0.4081 0.9128 0.0135 +vn -0.5759 0.8020 -0.1588 +vn -0.2615 0.5132 -0.8175 +vn 0.0053 -0.2042 -0.9789 +vn -0.0044 0.0254 -0.9997 +vn -0.9663 -0.2230 0.1285 +vn -0.9649 -0.2540 0.0673 +vn -0.9726 -0.2216 0.0710 +vn -0.9521 -0.2386 0.1911 +vn -0.9274 -0.3460 0.1424 +vn -0.9790 -0.1323 0.1549 +vn -0.7099 0.3519 0.6101 +vn -0.8317 0.1229 0.5414 +vn -0.9690 0.0604 0.2397 +vn -0.8626 0.2673 0.4295 +vn -0.3970 -0.3836 -0.8338 +vn -0.1706 -0.0201 -0.9851 +vn -0.1062 0.0235 -0.9941 +vn -0.1647 -0.2795 -0.9459 +vn -0.5368 -0.4583 -0.7083 +vn -0.2721 -0.3915 -0.8790 +vn -0.3808 -0.3948 -0.8361 +vn -0.6336 -0.4147 -0.6531 +vn -0.0033 -0.2826 -0.9592 +vn -0.0213 -0.3189 -0.9475 +vn -0.4290 0.6493 0.6280 +vn -0.3642 0.6771 0.6394 +vn -0.4848 0.3826 0.7865 +vn -0.5255 0.4500 0.7221 +vn -0.9391 0.2546 0.2307 +vn -0.6945 0.4524 0.5594 +vn -0.7644 0.4009 0.5050 +vn -0.8880 0.3083 0.3413 +vn -0.9777 -0.2013 -0.0603 +vn -0.8603 -0.1544 0.4858 +vn -0.9218 -0.2225 0.3176 +vn -0.9816 -0.1780 -0.0688 +vn -0.9477 0.1544 0.2794 +vn -0.8932 0.1868 0.4090 +vn -0.9362 0.1029 0.3361 +vn -0.9839 0.0788 0.1605 +vn -0.9894 0.1255 0.0729 +vn -0.9354 0.3536 0.0047 +vn -0.9113 0.3032 0.2785 +vn -0.9567 0.1941 0.2170 +vn -0.9957 -0.0009 -0.0929 +vn -0.9241 0.3304 -0.1919 +vn -0.5691 -0.0070 -0.8223 +vn -0.7609 0.0526 -0.6468 +vn -0.6804 0.5397 -0.4957 +vn -0.5061 0.4164 -0.7553 +vn -0.0736 0.0979 -0.9925 +vn -0.0865 0.2770 -0.9570 +vn -0.5699 0.3821 -0.7275 +vn -0.3866 0.3607 -0.8488 +vn -0.3812 0.1810 -0.9066 +vn -0.6134 0.2283 -0.7561 +vn -0.8989 0.3264 0.2924 +vn -0.9365 0.3456 -0.0600 +vn -0.9697 0.2338 -0.0703 +vn -0.9273 0.2233 0.3004 +vn -0.6276 0.1196 0.7693 +vn -0.5301 0.0731 0.8448 +vn -0.5761 0.1211 0.8083 +vn -0.6732 0.1952 0.7132 +vn -0.7704 0.1782 0.6122 +vn -0.7674 0.2714 0.5809 +vn -0.3704 0.0562 0.9272 +vn -0.5367 0.0988 0.8380 +vn -0.5135 -0.1452 0.8457 +vn -0.4216 -0.1612 0.8923 +vn -0.9259 -0.0989 0.3647 +vn -0.7929 0.1482 0.5910 +vn -0.9627 0.1926 0.1901 +vn -0.9607 -0.1095 0.2549 +vn -0.9925 -0.1217 0.0092 +vn -0.6586 -0.1436 -0.7387 +vn -0.4752 0.1281 -0.8705 +vn -0.4418 -0.1871 -0.8774 +vn -0.1530 -0.1555 -0.9759 +vn -0.0761 0.0423 -0.9962 +vn 0.3792 -0.0204 -0.9251 +vn 0.3339 -0.2091 -0.9191 +vn -0.0981 0.5241 0.8460 +vn -0.4628 0.6575 0.5946 +vn -0.4759 0.6378 0.6056 +vn -0.0953 0.5162 0.8511 +vn 0.6018 -0.1200 0.7896 +vn 0.6109 -0.0944 0.7861 +vn 0.2788 0.2646 0.9232 +vn 0.2790 0.2382 0.9303 +vn 0.7559 -0.4719 0.4538 +vn 0.7736 -0.4497 0.4464 +vn -0.1948 -0.5066 -0.8399 +vn -0.1683 -0.4800 -0.8610 +vn 0.2122 -0.6731 -0.7085 +vn 0.1858 -0.7076 -0.6817 +vn -0.8880 0.3154 0.3345 +vn -0.7554 0.4731 0.4533 +vn -0.4611 0.6436 0.6109 +vn -0.5800 0.5020 0.6415 +vn -0.0718 0.3259 -0.9427 +vn -0.1143 0.1887 -0.9754 +vn -0.6833 0.1340 0.7177 +vn -0.7413 0.2284 0.6311 +vn -0.7920 0.3171 0.5218 +vn -0.8817 0.4074 0.2381 +vn -0.8807 0.4650 -0.0903 +vn -0.5193 0.5002 -0.6929 +vn -0.3875 0.4785 -0.7879 +vn -0.1109 0.4210 -0.9003 +vn -0.4363 0.6687 -0.6021 +vn -0.5021 0.8465 -0.1771 +vn -0.8151 0.5494 -0.1840 +vn -0.6582 0.4889 -0.5725 +vn -0.1588 0.6351 -0.7559 +vn -0.3146 0.8849 -0.3436 +vn -0.4086 0.8622 -0.2993 +vn -0.2491 0.5911 -0.7672 +vn -0.8876 0.4313 -0.1615 +vn -0.8626 0.4733 -0.1788 +vn -0.8146 0.2033 -0.5432 +vn -0.8357 0.1553 -0.5268 +vn -0.5701 -0.1973 -0.7975 +vn -0.5564 -0.1649 -0.8144 +vn -0.0990 0.4086 -0.9073 +vn -0.1044 0.4089 -0.9066 +vn -0.0271 0.3560 -0.9341 +vn -0.3157 -0.1205 -0.9412 +vn -0.2418 0.0733 -0.9676 +vn -0.1337 0.0537 -0.9896 +vn -0.2600 -0.2042 -0.9438 +vn -0.2181 0.3146 -0.9238 +vn -0.1934 0.1236 -0.9733 +vn -0.2424 0.4475 -0.8608 +vn -0.3199 0.3409 -0.8840 +vn -0.3728 -0.0298 -0.9274 +vn -0.0872 -0.3144 -0.9453 +vn -0.1392 -0.3489 -0.9268 +vn -0.0501 0.0435 -0.9978 +vn -0.0498 -0.2201 -0.9742 +vn -0.0836 0.3832 -0.9199 +vn -0.3484 0.3467 -0.8709 +vn -0.2406 0.4598 -0.8548 +vn 0.5495 -0.7525 -0.3631 +vn 0.5345 -0.7697 -0.3491 +vn -0.8073 -0.3836 -0.4485 +vn -0.7823 -0.4340 -0.4468 +vn -0.9905 0.1074 -0.0864 +vn -0.8446 0.5353 -0.0102 +vn -0.6520 0.5044 -0.5661 +vn -0.7397 0.3717 -0.5609 +vn -0.8017 0.2364 -0.5490 +vn -0.8104 -0.0170 -0.5856 +vn -0.7683 0.2081 -0.6053 +vn -0.2246 0.3077 -0.9246 +vn -0.5654 0.2580 -0.7834 +vn -0.4463 -0.0623 -0.8927 +vn -0.1112 -0.0712 -0.9912 +vn 0.1105 -0.5071 -0.8548 +vn -0.3443 -0.4433 -0.8276 +vn 0.3741 -0.7913 -0.4836 +vn -0.4225 -0.7252 -0.5437 +vn 0.1673 -0.5009 0.8492 +vn 0.3986 -0.7529 0.5236 +vn -0.7617 -0.5753 0.2981 +vn -0.7167 -0.3384 0.6098 +vn -0.0809 -0.1339 0.9877 +vn -0.4125 -0.1331 0.9012 +vn -0.3886 0.2767 0.8789 +vn -0.6784 0.1944 0.7085 +vn -0.7817 0.4458 0.4362 +vn -0.5640 0.6072 0.5597 +vn -0.5356 0.7838 -0.3144 +vn -0.6955 0.6604 -0.2831 +vn -0.6019 0.4934 -0.6279 +vn -0.3541 0.5904 -0.7252 +vn -0.2429 0.1409 -0.9598 +vn -0.2812 0.1626 -0.9458 +vn 0.1281 -0.2147 -0.9682 +vn 0.2299 -0.2125 -0.9497 +vn 0.4340 -0.4633 -0.7726 +vn 0.5173 -0.4120 -0.7501 +vn 0.6833 -0.5660 -0.4612 +vn 0.7059 -0.5258 -0.4746 +vn 0.6958 -0.4891 0.5259 +vn 0.6378 -0.5483 0.5409 +vn 0.3725 -0.4492 0.8121 +vn 0.4499 -0.3524 0.8206 +vn -0.0041 -0.2100 0.9777 +vn 0.0687 -0.1586 0.9849 +vn -0.4643 0.1909 0.8649 +vn -0.4542 0.1867 0.8711 +vn -0.7344 0.4570 0.5018 +vn -0.7532 0.4310 0.4969 +vn -0.8105 0.5354 -0.2377 +vn -0.7930 0.5505 -0.2609 +vn -0.6033 0.4305 -0.6713 +vn -0.6244 0.4203 -0.6584 +vn -0.4029 0.3238 -0.8560 +vn -0.3812 -0.0546 -0.9229 +vn -0.8577 -0.0545 0.5112 +vn -0.7519 0.2036 0.6270 +vn -0.7266 0.4841 0.4875 +vn -0.4870 0.5430 -0.6841 +vn -0.6251 0.7184 -0.3054 +vn -0.2877 0.2258 -0.9307 +vn -0.0306 -0.1520 -0.9879 +vn -0.7099 0.6367 -0.3013 +vn -0.5165 0.4925 -0.7005 +vn -0.4850 0.2297 0.8438 +vn -0.6744 0.5213 0.5229 +vn -0.1897 -0.1974 0.9618 +vn 0.1069 -0.5617 0.8204 +vn 0.4227 -0.7244 0.5446 +vn 0.2541 -0.5107 -0.8214 +vn 0.5952 -0.6699 -0.4439 +vn -0.0504 -0.2878 -0.9564 +vn -0.1957 -0.3207 -0.9267 +vn -0.4586 -0.3480 -0.8177 +vn -0.6940 -0.3437 -0.6326 +vn -0.8334 -0.3149 -0.4541 +vn -0.9849 -0.1697 0.0332 +vn -0.9760 -0.1026 0.1921 +vn -0.9158 -0.2780 0.2898 +vn -0.9636 -0.0417 0.2641 +vn -0.9688 -0.0004 0.2480 +vn -0.9999 -0.0151 -0.0032 +vn -0.4615 0.8243 0.3279 +vn -0.7682 0.5683 0.2947 +vn -0.4336 0.8622 0.2618 +vn -0.3879 0.8916 0.2337 +vn -0.7196 0.6773 0.1531 +vn -0.7849 0.6022 0.1459 +vn -0.6036 0.7674 0.2164 +vn -0.7530 0.6378 0.1620 +vn -0.8243 0.5517 0.1269 +vn -0.8387 0.5279 0.1338 +vn -0.7273 0.6412 0.2449 +vn -0.7506 0.6032 0.2695 +vn -0.1422 0.4423 -0.8855 +vn -0.0371 0.3963 -0.9174 +vn 0.7129 -0.7004 0.0349 +vn 0.7316 -0.6813 0.0250 +vn 0.8127 -0.5826 -0.0140 +vn 0.7984 -0.6021 -0.0067 +vn 0.7168 -0.6969 -0.0217 +vn 0.5374 -0.8406 -0.0680 +vn -0.5804 -0.7891 -0.2012 +vn -0.9153 -0.3437 -0.2102 +vn -0.9205 -0.3100 -0.2379 +vn -0.9268 -0.2647 -0.2665 +vn -0.9458 -0.1213 -0.3013 +vn -0.8880 0.2606 -0.3790 +vn -0.7918 0.4932 -0.3602 +vn -0.8718 0.3544 -0.3382 +vn -0.9148 0.2298 -0.3322 +vn -0.9906 0.0294 -0.1336 +vn -0.9618 0.2340 -0.1421 +vn -0.9118 0.2412 -0.3322 +vn -0.9730 -0.0298 -0.2287 +vn -0.8643 -0.1922 -0.4649 +vn -0.9368 0.0861 -0.3392 +vn -0.9180 -0.0514 -0.3933 +vn -0.9755 -0.1453 0.1651 +vn -0.9696 -0.2387 0.0539 +vn -0.9180 -0.3482 -0.1898 +vn -0.7715 -0.4591 -0.4406 +vn -0.1411 0.3679 -0.9191 +vn -0.1674 0.0445 -0.9849 +vn -0.4594 0.4095 -0.7882 +vn -0.5662 0.0468 -0.8229 +vn -0.3575 0.0366 -0.9332 +vn -0.2950 0.3793 -0.8770 +vn -0.5699 0.4535 -0.6853 +vn -0.7300 0.0924 -0.6772 +vn -0.8204 0.2479 -0.5153 +vn -0.6568 0.4812 -0.5806 +vn -0.7733 0.4997 -0.3902 +vn -0.8538 0.4946 -0.1627 +vn -0.8933 0.4361 0.1088 +vn -0.8490 0.3203 0.4202 +vn -0.8144 0.2131 0.5398 +vn -0.8613 0.1537 0.4843 +vn -0.9007 -0.3063 0.3081 +vn -0.6703 -0.0654 0.7392 +vn -0.1474 -0.0211 -0.9889 +vn -0.1146 0.3960 -0.9111 +vn -0.2265 -0.2104 -0.9510 +vn -0.8304 0.1870 0.5249 +vn -0.9593 -0.1432 -0.2432 +vn -0.9369 0.0338 0.3481 +vn -0.0800 -0.1947 -0.9776 +vn -0.2616 -0.2114 -0.9418 +vn -0.5459 -0.2253 -0.8070 +vn -0.7520 -0.2274 -0.6187 +vn -0.0763 0.0238 -0.9968 +vn -0.1711 0.0322 -0.9847 +vn -0.3477 0.0722 -0.9348 +vn -0.6093 0.1268 -0.7828 +vn -0.8229 0.1563 -0.5462 +vn -0.9300 0.1599 -0.3309 +vn -0.9837 0.1591 -0.0843 +vn -0.9453 0.1474 0.2910 +vn -0.7750 0.1112 0.6221 +vn -0.6016 0.0650 0.7962 +vn -0.5056 0.0344 0.8621 +vn -0.0867 0.0092 -0.9962 +vn -0.1615 0.0127 -0.9868 +vn -0.3195 0.0335 -0.9470 +vn -0.5874 0.0706 -0.8062 +vn -0.8265 0.0984 -0.5543 +vn -0.9391 0.1020 -0.3282 +vn -0.9904 0.1004 -0.0949 +vn -0.9594 0.1050 0.2619 +vn -0.7793 0.0889 0.6203 +vn -0.5819 0.0524 0.8116 +vn -0.4808 0.0261 0.8765 +vn -0.5681 0.0649 0.8204 +vn -0.4478 0.0359 0.8934 +vn -0.7850 0.0994 0.6115 +vn -0.9682 0.1159 0.2219 +vn -0.9879 0.1028 -0.1159 +vn -0.9404 0.0958 -0.3264 +vn -0.8213 0.0956 -0.5624 +vn -0.5640 0.0750 -0.8224 +vn -0.2968 0.0421 -0.9540 +vn -0.1562 0.0216 -0.9875 +vn -0.0938 0.0159 -0.9955 +vn 0.2612 -0.0253 -0.9650 +vn 0.2011 -0.0322 -0.9790 +vn -0.5558 0.0839 0.8271 +vn -0.4065 0.0500 0.9123 +vn -0.7897 0.1229 0.6011 +vn -0.9679 0.1569 0.1964 +vn -0.9763 0.1651 -0.1402 +vn -0.9311 0.1672 -0.3242 +vn -0.8010 0.1628 -0.5762 +vn -0.5248 0.1199 -0.8428 +vn -0.2642 0.0667 -0.9622 +vn -0.1460 0.0395 -0.9885 +vn -0.0898 0.0283 -0.9956 +vn 0.3271 -0.0259 -0.9446 +vn -0.5829 0.4406 -0.6827 +vn -0.1536 0.1288 -0.9797 +vn -0.2351 0.0090 -0.9719 +vn -0.6505 0.4101 -0.6393 +vn 0.2768 -0.3932 -0.8768 +vn 0.3167 -0.2678 -0.9099 +vn 0.5277 -0.5511 -0.6464 +vn 0.5275 -0.5022 -0.6852 +vn 0.6846 -0.6044 -0.4073 +vn 0.6716 -0.6075 -0.4241 +vn 0.8339 -0.5504 0.0412 +vn 0.7236 -0.6901 0.0118 +vn 0.6951 -0.2989 0.6538 +vn 0.5931 -0.5472 0.5906 +vn 0.4796 -0.0579 0.8756 +vn 0.3963 -0.3401 0.8528 +vn 0.1113 0.1961 0.9742 +vn 0.0480 0.0448 0.9978 +vn -0.2875 0.4573 0.8416 +vn -0.4895 0.2338 0.8401 +vn -0.7189 0.4419 0.5366 +vn -0.8141 0.2515 0.5235 +vn -0.8867 0.3920 0.2452 +vn -0.8092 0.5515 0.2027 +vn -0.8232 0.5367 -0.1851 +vn -0.8541 0.4985 -0.1483 +vn -0.7819 0.2096 -0.5872 +vn -0.5287 -0.1429 -0.8367 +vn -0.1197 -0.5091 -0.8523 +vn 0.2115 -0.6949 -0.6873 +vn 0.5104 -0.7896 -0.3407 +vn 0.6823 -0.7272 0.0752 +vn 0.7007 -0.4885 0.5199 +vn 0.5376 -0.1644 0.8270 +vn 0.2260 0.1659 0.9599 +vn -0.1568 0.4222 0.8928 +vn -0.5160 0.5962 0.6150 +vn -0.7421 0.6267 0.2376 +vn -0.8531 0.4943 -0.1671 +vn -0.7578 -0.0866 0.6467 +vn -0.3385 0.7381 0.5836 +vn -0.1860 0.7445 0.6412 +vn 0.1044 0.7011 0.7054 +vn -0.1341 0.7485 0.6494 +vn 0.1479 0.6802 0.7179 +vn -0.1713 0.6927 0.7006 +vn -0.8020 -0.0151 -0.5972 +vn -0.0484 -0.6213 -0.7821 +vn -0.5444 -0.1425 -0.8267 +vn -0.6125 0.6995 0.3682 +vn -0.6560 0.6162 0.4359 +vn 0.1261 0.6010 0.7892 +vn -0.2348 0.5635 0.7921 +vn 0.2158 -0.5806 0.7851 +vn 0.1633 -0.7848 0.5979 +vn 0.3859 -0.6897 0.6127 +vn 0.2780 -0.8427 0.4611 +vn 0.4926 -0.8120 0.3130 +vn 0.3596 -0.8972 0.2563 +vn 0.5239 -0.8442 -0.1136 +vn 0.3582 -0.9327 -0.0412 +vn 0.4427 -0.8030 -0.3991 +vn 0.1513 -0.9762 -0.1554 +vn 0.2574 -0.8027 -0.5379 +vn 0.4555 -0.8057 -0.3787 +vn 0.1586 -0.9601 -0.2305 +vn -0.4487 0.4891 -0.7480 +vn -0.2396 0.3581 -0.9024 +vn -0.6197 0.6827 -0.3871 +vn -0.7006 0.7119 0.0477 +vn -0.7329 0.5401 0.4137 +vn -0.6563 0.5427 0.5241 +vn -0.6763 0.4431 0.5885 +vn 0.0930 -0.5156 -0.8518 +vn 0.0616 -0.3781 -0.9237 +vn -0.7450 -0.5726 -0.3423 +vn -0.3558 -0.5622 -0.7465 +vn -0.2530 -0.2605 -0.9317 +vn -0.9483 -0.3073 0.0800 +vn -0.7946 0.0862 0.6010 +vn -0.7285 0.3358 0.5971 +vn -0.8765 -0.1074 0.4692 +vn 0.1733 0.2459 0.9537 +vn 0.1294 0.4586 0.8792 +vn -0.2278 0.4284 0.8744 +vn -0.1213 0.2403 0.9631 +vn -0.9320 -0.1922 -0.3074 +vn -0.9841 -0.1216 -0.1296 +vn -0.9864 -0.0726 0.1472 +vn -0.9233 -0.0046 0.3841 +vn -0.7808 0.0237 0.6244 +vn 0.2708 -0.0134 0.9625 +vn 0.2640 -0.0559 0.9629 +vn 0.1759 -0.0490 0.9832 +vn 0.0569 -0.0343 0.9978 +vn -0.1221 -0.0472 0.9914 +vn -0.6177 -0.0412 0.7853 +vn 0.2269 0.0219 0.9737 +vn 0.1843 0.0012 0.9829 +vn 0.1760 -0.0230 0.9841 +vn 0.2081 -0.0538 0.9766 +vn 0.1226 -0.2309 0.9652 +vn 0.2181 -0.0457 0.9748 +vn 0.2585 -0.0457 0.9649 +vn 0.2525 -0.0621 0.9656 +vn -0.9105 -0.4128 -0.0232 +vn -0.7261 -0.4282 -0.5380 +vn -0.5343 -0.3767 -0.7567 +vn -0.3617 -0.3536 -0.8626 +vn -0.1516 -0.3444 -0.9265 +vn -0.7971 0.1746 -0.5781 +vn -0.5695 0.1824 -0.8015 +vn -0.2713 0.1842 -0.9447 +vn -0.0786 0.2083 -0.9749 +vn -0.9706 0.1337 0.2003 +vn -0.3195 0.7927 0.5192 +vn -0.4357 0.7903 0.4308 +vn -0.4549 0.8834 0.1125 +vn -0.4170 0.8882 0.1929 +vn -0.4222 0.9055 -0.0430 +vn -0.4266 0.8928 -0.1446 +vn -0.3632 0.8930 -0.2659 +vn -0.3480 0.8363 -0.4236 +vn -0.2079 0.7496 -0.6284 +vn -0.2164 0.8558 -0.4698 +vn 0.9497 -0.1307 -0.2846 +vn 0.9661 -0.1145 -0.2313 +vn 0.9066 -0.1850 -0.3794 +vn 0.7458 -0.5000 -0.4403 +vn 0.9764 -0.0888 -0.1967 +vn 0.9793 -0.1230 -0.1605 +vn 0.9710 -0.0939 -0.2198 +vn 0.9839 -0.1477 -0.1009 +vn 0.9900 -0.1345 0.0414 +vn 0.7293 -0.0599 0.6816 +vn 0.7202 -0.0348 0.6929 +vn 0.6542 0.0039 0.7563 +vn 0.5981 0.0001 0.8014 +vn 0.2440 0.0483 0.9686 +vn 0.1790 -0.8629 0.4726 +vn 0.2632 -0.9305 0.2547 +vn 0.2380 -0.9709 -0.0271 +vn 0.2089 -0.9401 -0.2693 +vn 0.0699 -0.9139 -0.3998 +vn -0.3531 -0.7597 -0.5460 +vn -0.4876 -0.5432 -0.6835 +vn -0.3426 -0.5530 -0.7595 +vn -0.2468 -0.5312 -0.8105 +vn -0.0393 -0.6982 -0.7148 +vn -0.8279 -0.4104 -0.3824 +vn -0.9910 -0.1062 -0.0815 +vn -0.5426 -0.6694 -0.5075 +vn -0.2432 -0.8468 -0.4731 +vn 0.1023 -0.9612 -0.2561 +vn 0.3417 -0.9395 0.0257 +vn 0.4696 -0.7971 0.3796 +vn -0.4371 0.1383 0.8887 +vn -0.1200 -0.0334 0.9922 +vn 0.2153 -0.3198 0.9227 +vn 0.4148 -0.5760 0.7044 +vn -0.9661 0.1025 0.2369 +vn -0.8109 0.1795 0.5569 +vn -0.4694 -0.0905 -0.8784 +vn -0.0498 -0.4428 -0.8952 +vn -0.7234 0.2799 -0.6312 +vn -0.7742 0.5924 -0.2227 +vn -0.6552 0.7367 0.1670 +vn -0.4180 0.7410 0.5256 +vn -0.0386 0.5804 0.8134 +vn 0.3410 0.3061 0.8888 +vn 0.6392 -0.0234 0.7687 +vn 0.7939 -0.3491 0.4978 +vn 0.7813 -0.6234 0.0300 +vn 0.5949 -0.6935 -0.4063 +vn 0.2930 -0.6203 -0.7276 +vn -0.1635 0.1241 -0.9787 +vn 0.2781 -0.2184 -0.9354 +vn -0.5807 0.4239 -0.6951 +vn -0.7220 0.6585 -0.2122 +vn -0.8063 0.5345 -0.2534 +vn -0.8399 0.5162 0.1677 +vn -0.7045 0.6902 0.1652 +vn -0.6004 0.6262 0.4973 +vn -0.7239 0.4640 0.5105 +vn -0.4448 0.3435 0.8271 +vn -0.3847 0.4515 0.8051 +vn -0.0457 0.1035 0.9936 +vn 0.3029 -0.3222 0.8969 +vn 0.3409 -0.1308 0.9310 +vn 0.6267 -0.5290 0.5722 +vn 0.7298 -0.4078 0.5488 +vn 0.8510 -0.5245 0.0270 +vn 0.7565 -0.6519 0.0526 +vn 0.6473 -0.5909 -0.4815 +vn 0.5166 -0.4340 -0.7381 +vn -0.3706 -0.1559 -0.9156 +vn -0.7542 0.3412 -0.5610 +vn 0.1439 -0.5089 -0.8487 +vn 0.4585 -0.6064 -0.6497 +vn 0.6679 -0.6497 -0.3630 +vn 0.7874 -0.6053 0.1163 +vn -0.4101 0.7187 0.5616 +vn -0.6165 0.7394 0.2707 +vn -0.7731 0.6259 -0.1028 +vn -0.0249 -0.5003 -0.8655 +vn -0.4760 -0.1567 -0.8654 +vn -0.7618 0.2679 -0.5899 +vn -0.7898 0.5853 -0.1834 +vn -0.6491 0.7335 0.2017 +vn -0.3911 0.7434 0.5425 +vn -0.0121 0.5845 0.8113 +vn 0.3174 0.3196 0.8928 +vn 0.5851 -0.0114 0.8109 +vn 0.7506 -0.3144 0.5811 +vn 0.7873 -0.6094 0.0939 +vn 0.6112 -0.7015 -0.3665 +vn 0.3266 -0.6532 -0.6831 +vn -0.1823 0.0912 -0.9790 +vn 0.2529 -0.2006 -0.9465 +vn -0.6195 0.3875 -0.6827 +vn -0.8122 0.5250 -0.2543 +vn -0.8234 0.5359 0.1863 +vn -0.7032 0.4603 0.5419 +vn -0.4367 0.2945 0.8500 +vn -0.0281 0.0326 0.9991 +vn 0.3682 -0.2336 0.8999 +vn 0.6880 -0.4968 -0.5290 +vn 0.5192 -0.3855 -0.7627 +vn 0.2569 -0.1926 -0.9470 +vn -0.1805 0.1104 -0.9774 +vn -0.2019 0.1331 -0.9703 +vn 0.2528 -0.2011 -0.9464 +vn -0.6080 0.4151 -0.6768 +vn -0.6081 0.4335 -0.6651 +vn -0.8016 0.5487 -0.2372 +vn -0.7962 0.5590 -0.2315 +vn -0.8164 0.5451 0.1904 +vn -0.8194 0.5491 0.1645 +vn -0.7050 0.4523 0.5463 +vn -0.7261 0.4456 0.5237 +vn -0.4307 0.2664 0.8623 +vn -0.4341 0.2177 0.8742 +vn 0.0070 -0.0054 1.0000 +vn 0.0476 -0.0875 0.9950 +vn 0.4016 -0.2541 0.8799 +vn 0.4251 -0.2985 0.8545 +vn 0.7144 -0.4871 0.5023 +vn 0.7055 -0.4807 0.5208 +vn 0.8218 -0.5698 0.0032 +vn 0.8225 -0.5685 -0.0154 +vn 0.7131 -0.4862 -0.5051 +vn 0.7143 -0.4989 -0.4908 +vn 0.5282 -0.3695 -0.7645 +vn 0.5270 -0.3853 -0.7575 +vn 0.0518 -0.5238 -0.8503 +vn -0.4371 -0.1861 -0.8799 +vn -0.7772 0.2904 -0.5582 +vn -0.7872 0.6016 -0.1357 +vn -0.6242 0.7405 0.2491 +vn -0.3847 0.7296 0.5654 +vn -0.0785 0.5655 0.8210 +vn 0.2042 0.3150 0.9269 +vn 0.5034 -0.0281 0.8636 +vn 0.6970 -0.3152 0.6441 +vn 0.7817 -0.6068 0.1439 +vn 0.6371 -0.6920 -0.3395 +vn 0.3828 -0.6474 -0.6590 +vn 0.2194 -0.4718 -0.8539 +vn 0.1757 -0.5931 -0.7857 +vn 0.2846 -0.6343 -0.7188 +vn 0.3382 -0.6588 -0.6720 +vn 0.5237 -0.6905 -0.4990 +vn 0.5883 -0.6950 -0.4135 +vn 0.6161 -0.6042 0.5054 +vn 0.7687 -0.6109 0.1894 +vn 0.8343 -0.5513 0.0098 +vn 0.7284 -0.6632 -0.1722 +vn 0.1285 -0.3544 0.9262 +vn 0.1502 -0.4700 0.8698 +vn 0.3219 -0.5236 0.7888 +vn 0.5137 -0.5014 0.6962 +vn 0.0247 -0.3732 0.9274 +vn -0.0521 -0.3641 0.9299 +vn 0.0139 -0.3073 0.9515 +vn 0.4789 -0.3038 0.8236 +vn 0.4530 -0.1804 -0.8731 +vn 0.4791 -0.2700 -0.8352 +vn 0.6502 -0.3068 -0.6951 +vn 0.8835 -0.2737 -0.3802 +vn 0.9755 -0.2194 -0.0186 +vn 0.9114 -0.2029 0.3581 +vn 0.6745 -0.1787 0.7163 +vn 0.4180 -0.1572 0.8947 +vn 0.2422 -0.1256 0.9621 +vn 0.1307 -0.0920 0.9871 +vn 0.1522 -0.0687 0.9860 +vn 0.5657 -0.0987 0.8187 +vn 0.6710 0.0406 0.7403 +vn 0.6106 0.1910 0.7686 +vn 0.5908 0.3790 0.7123 +vn 0.6276 0.4966 0.5996 +vn 0.6706 0.5198 0.5292 +vn 0.6753 0.4981 0.5439 +vn 0.5922 0.5372 0.6005 +vn 0.4629 0.6179 0.6356 +vn 0.2066 0.8288 0.5199 +vn -0.0839 0.9586 0.2722 +vn -0.1361 0.9880 0.0728 +vn -0.1062 0.9910 -0.0814 +vn 0.9299 -0.0369 -0.3659 +vn 0.9244 -0.1514 -0.3500 +vn 0.9485 0.0433 -0.3138 +vn 0.8641 -0.0663 -0.4990 +vn 0.9259 -0.0074 -0.3776 +vn 0.9541 0.0094 -0.2992 +vn 0.9615 0.0328 -0.2730 +vn 0.9514 -0.2127 -0.2228 +vn 0.9903 -0.1231 -0.0648 +vn 0.9999 0.0100 -0.0073 +vn 0.9969 -0.0711 -0.0339 +vn 0.9729 -0.2306 0.0163 +vn 0.9431 -0.3049 0.1327 +vn 0.8361 -0.3568 0.4167 +vn 0.6578 -0.3054 0.6885 +vn 0.3664 -0.1704 0.9147 +vn 0.5885 -0.1138 -0.8005 +vn 0.6469 -0.0832 -0.7580 +vn 0.6938 -0.0758 -0.7162 +vn 0.7321 -0.0968 -0.6743 +vn 0.7497 -0.1527 -0.6439 +vn 0.7517 -0.1635 -0.6389 +vn 0.7551 -0.1304 -0.6425 +vn 0.7852 -0.1046 -0.6104 +vn 0.8552 -0.0695 -0.5136 +vn 0.9120 -0.0332 -0.4088 +vn 0.9373 0.0224 -0.3479 +vn 0.9372 0.0876 -0.3376 +vn 0.9305 0.0258 -0.3655 +vn 0.9347 -0.1653 -0.3147 +vn 0.9194 -0.3607 -0.1566 +vn 0.9507 -0.3102 0.0040 +vn 0.9996 -0.0166 0.0230 +vn 0.9979 0.0643 -0.0017 +vn 0.9931 -0.0536 0.1041 +vn 0.9618 -0.1384 0.2360 +vn 0.7668 -0.2580 0.5877 +vn 0.5047 -0.2863 0.8144 +vn 0.2462 -0.2166 0.9447 +vn 0.7819 -0.2324 -0.5784 +vn 0.7107 -0.1176 -0.6936 +vn 0.6497 -0.1192 -0.7508 +vn 0.5848 -0.1541 -0.7964 +vn 0.5389 -0.0368 0.8416 +vn 0.5777 -0.0754 0.8128 +vn 0.4705 0.0126 0.8823 +vn 0.2036 -0.0665 0.9768 +vn 0.1881 -0.1587 0.9692 +vn 0.2154 0.0065 0.9765 +vn 0.1986 -0.0363 0.9794 +vn 0.1715 -0.0448 0.9842 +vn 0.2744 -0.0716 0.9589 +vn 0.4345 -0.1031 0.8947 +vn 0.6723 -0.1966 -0.7137 +vn 0.8582 -0.2123 -0.4674 +vn 0.9779 -0.1931 -0.0800 +vn 0.9252 -0.1715 0.3385 +vn 0.6883 -0.1415 0.7115 +vn -0.8330 -0.4103 -0.3711 +vn -0.9912 -0.1114 -0.0718 +vn -0.5481 -0.6735 -0.4960 +vn -0.2404 -0.8576 -0.4547 +vn 0.1036 -0.9640 -0.2450 +vn 0.3324 -0.9427 0.0295 +vn 0.4399 -0.8087 0.3904 +vn -0.4552 0.0853 0.8863 +vn -0.1396 -0.0764 0.9873 +vn 0.1905 -0.3285 0.9251 +vn 0.3785 -0.5867 0.7159 +vn -0.9634 0.0884 0.2530 +vn -0.8079 0.1457 0.5710 +vn -0.4856 -0.7421 0.4620 +vn 0.7035 -0.1761 -0.6885 +vn 0.8350 -0.2345 -0.4977 +vn 0.9535 -0.2708 -0.1322 +vn 0.9162 -0.2621 0.3032 +vn 0.8208 -0.5708 -0.0218 +vn 0.8839 -0.4274 -0.1900 +vn 0.8720 -0.4261 0.2410 +vn 0.7908 -0.4797 -0.3801 +vn 0.8142 -0.3862 -0.4335 +vn 0.7631 -0.2583 -0.5925 +vn 0.6989 -0.2058 0.6849 +vn 0.4613 -0.1309 0.8775 +vn 0.3213 -0.0673 0.9446 +vn 0.2205 -0.0133 0.9753 +vn 0.2669 -0.0823 0.9602 +vn 0.3608 -0.1216 0.9247 +vn 0.5169 -0.2276 0.8252 +vn 0.7230 -0.3705 0.5831 +vn 0.2840 -0.1474 0.9474 +vn 0.4829 -0.2509 0.8390 +vn 0.7250 -0.4798 0.4942 +vn -0.0125 -0.9071 0.4208 +vn 0.1952 -0.9576 -0.2117 +vn 0.2056 -0.9748 -0.0870 +vn 0.0428 -0.9131 -0.4056 +vn 0.0052 -0.9116 0.4111 +vn 0.0855 -0.9423 0.3237 +vn 0.1811 -0.9742 0.1348 +vn 0.8055 -0.1308 -0.5779 +vn 0.8146 -0.1397 -0.5629 +vn 0.8081 -0.1613 -0.5666 +vn 0.7684 -0.1570 -0.6204 +vn 0.8011 -0.1421 -0.5814 +vn 0.8918 -0.1284 -0.4337 +vn 0.9599 -0.0794 -0.2688 +vn 0.9940 -0.0024 -0.1092 +vn 0.9888 0.1452 0.0347 +vn 0.9723 0.2124 0.0974 +vn 0.8810 -0.1536 0.4475 +vn 0.9241 -0.2089 0.3199 +vn 0.9865 0.0283 0.1612 +vn 0.8938 0.2295 0.3852 +vn 0.7810 0.4872 0.3907 +vn 0.7009 0.5183 0.4900 +vn 0.6954 0.4493 0.5608 +vn 0.5670 0.2838 0.7733 +vn 0.3409 0.1380 0.9299 +vn 0.1382 0.0629 0.9884 +vn 0.2677 -0.7591 -0.5933 +vn -0.7514 -0.3426 -0.5640 +vn -0.7313 0.5954 0.3327 +vn -0.6067 0.6878 0.3985 +vn -0.2293 -0.0950 0.9687 +vn -0.3155 0.0906 0.9446 +vn -0.1282 0.1331 0.9828 +vn -0.0650 0.0055 0.9979 +vn -0.3287 0.0236 0.9441 +vn -0.2863 -0.0793 0.9548 +vn -0.1275 -0.1082 0.9859 +vn -0.1513 0.0079 0.9885 +vn -0.0700 -0.0945 0.9931 +vn -0.0888 -0.1643 0.9824 +vn -0.2206 -0.1340 0.9661 +vn -0.1827 -0.0873 0.9793 +vn -0.4432 -0.0757 0.8932 +vn -0.3643 -0.0502 0.9299 +vn -0.5781 0.0465 0.8147 +vn -0.5322 -0.0348 0.8459 +vn -0.5136 -0.2173 0.8301 +vn -0.5848 0.0510 0.8096 +vn -0.0989 -0.3206 0.9420 +vn 0.0052 -0.2826 0.9592 +vn -0.3850 -0.3985 0.8324 +vn -0.0758 0.6120 0.7872 +vn -0.0962 0.5735 0.8136 +vn -0.0487 0.6467 0.7612 +vn -0.3514 0.5209 0.7779 +vn -0.3281 0.5322 0.7805 +vn -0.0961 0.6432 0.7596 +vn -0.2419 0.5874 0.7723 +vn -0.1245 0.6393 0.7588 +vn 0.8425 0.5267 -0.1130 +vn 0.8517 0.4851 0.1984 +vn 0.7093 0.5759 -0.4065 +vn 0.4106 0.6010 -0.6857 +vn -0.0706 0.7400 0.6689 +vn 0.8020 0.4763 0.3604 +vn 0.7445 0.5143 0.4256 +vn -0.1542 0.8837 0.4418 +vn 0.6669 0.5567 0.4953 +vn -0.1917 0.9700 0.1498 +vn 0.5744 0.5663 0.5910 +vn -0.1347 0.9801 -0.1459 +vn 0.4563 0.5437 0.7045 +vn -0.0318 0.9268 -0.3742 +vn 0.2680 0.5218 0.8099 +vn 0.0365 0.8697 -0.4922 +vn -0.0282 0.6580 0.7525 +vn -0.0207 0.5824 0.8126 +vn -0.1377 0.6718 0.7278 +vn -0.1193 0.5804 0.8055 +vn -0.1050 -0.0812 0.9912 +vn -0.3981 -0.1210 0.9093 +vn -0.0115 -0.0684 0.9976 +vn -0.5866 -0.0333 0.8092 +vn -0.4889 -0.0144 0.8722 +vn -0.6823 -0.0062 0.7310 +vn -0.7198 0.0582 0.6918 +vn -0.6545 -0.2668 0.7074 +vn -0.7229 0.0346 0.6901 +vn -0.5509 -0.4271 0.7170 +vn -0.5902 -0.1421 0.7947 +vn -0.0686 0.0082 0.9976 +vn -0.1723 -0.0230 0.9848 +vn -0.3172 -0.0205 0.9481 +vn -0.4210 0.0011 0.9071 +vn 0.6374 0.7462 0.1920 +vn 0.6878 0.6362 0.3495 +vn 0.5175 0.8554 0.0210 +vn 0.3015 0.9468 -0.1128 +vn 0.6956 0.5872 0.4139 +vn 0.6897 0.5680 0.4491 +vn 0.6559 0.5542 0.5126 +vn 0.5695 0.5475 0.6131 +vn 0.4330 0.5456 0.7175 +vn 0.2435 0.5467 0.8011 +vn -0.3924 0.6466 0.6542 +vn -0.3784 0.5634 0.7344 +vn -0.5536 0.5970 0.5807 +vn -0.5594 0.5256 0.6410 +vn -0.4398 0.5341 0.7220 +vn -0.3960 0.5715 0.7187 +vn -0.4916 0.5201 0.6984 +vn -0.3934 0.6549 0.6452 +vn -0.5067 0.7974 0.3276 +vn -0.5260 0.8476 -0.0705 +vn -0.4046 0.7796 -0.4780 +vn -0.2141 0.6459 -0.7328 +vn -0.0650 0.5489 -0.8334 +vn -0.5539 0.5414 0.6325 +vn -0.5085 0.5355 0.6743 +vn -0.4980 0.5354 0.6822 +vn -0.4144 0.6190 0.6672 +vn -0.5610 0.5669 0.6032 +vn -0.1694 0.6583 0.7334 +vn -0.0485 0.6505 0.7579 +vn -0.4026 0.5953 0.6954 +vn -0.5413 0.5415 0.6432 +vn -0.2018 0.6295 0.7503 +vn -0.0728 0.6195 0.7816 +vn -0.3947 0.2615 0.8808 +vn -0.5905 0.2414 0.7701 +vn -0.1121 0.2763 0.9545 +vn -0.0198 0.2905 0.9567 +s 1 +f 34//1 24//2 25//3 +f 34//1 25//3 33//4 +f 34//1 46//5 48//6 +f 34//1 48//6 35//7 +f 25//3 24//2 13//8 +f 25//3 13//8 14//9 +f 24//2 26//10 15//11 +f 24//2 15//11 13//8 +f 14//9 13//8 9//12 +f 14//9 9//12 10//13 +f 13//8 15//11 11//14 +f 13//8 11//14 9//12 +f 80//15 79//16 76//17 +f 9//12 11//14 6//18 +f 24//2 34//1 35//7 +f 24//2 35//7 26//10 +f 46//5 34//1 33//4 +f 46//5 33//4 44//19 +f 51//20 50//21 60//22 +f 51//20 60//22 61//23 +f 51//20 52//24 48//6 +f 51//20 48//6 46//5 +f 60//22 71//25 72//26 +f 60//22 72//26 61//23 +f 61//23 72//26 70//27 +f 61//23 70//27 59//28 +f 71//25 75//29 76//17 +f 71//25 76//17 72//26 +f 72//26 76//17 74//30 +f 72//26 74//30 70//27 +f 61//23 59//28 52//24 +f 61//23 52//24 51//20 +f 46//5 44//19 50//21 +f 46//5 50//21 51//20 +f 30//31 22//32 23//33 +f 30//31 23//33 32//34 +f 30//31 47//35 49//36 +f 30//31 49//36 31//37 +f 23//33 22//32 7//38 +f 23//33 7//38 12//39 +f 22//32 21//40 8//41 +f 22//32 8//41 7//38 +f 4//42 2//43 1//44 +f 4//42 1//44 3//45 +f 7//38 8//41 2//43 +f 10//13 9//12 5//46 +f 22//32 30//31 31//37 +f 22//32 31//37 21//40 +f 47//35 30//31 32//34 +f 47//35 32//34 45//47 +f 54//48 53//49 62//50 +f 54//48 62//50 63//51 +f 54//48 55//52 49//36 +f 54//48 49//36 47//35 +f 62//50 73//53 77//54 +f 62//50 77//54 63//51 +f 63//51 77//54 78//55 +f 63//51 78//55 64//56 +f 82//57 84//58 83//59 +f 82//57 83//59 81//60 +f 9//12 6//18 5//46 +f 7//38 2//43 4//42 +f 75//29 80//15 76//17 +f 63//51 64//56 55//52 +f 63//51 55//52 54//48 +f 47//35 45//47 53//49 +f 47//35 53//49 54//48 +f 67//61 66//62 68//63 +f 39//64 67//61 68//63 +f 39//64 68//63 41//65 +f 66//62 67//61 56//66 +f 66//62 56//66 57//67 +f 67//61 39//64 36//68 +f 67//61 36//68 56//66 +f 43//69 69//70 65//71 +f 43//69 65//71 40//72 +f 38//73 40//72 65//71 +f 38//73 65//71 58//74 +f 56//66 36//68 37//73 +f 56//66 37//73 57//67 +f 41//65 68//63 42//75 +f 18//76 16//77 20//78 +f 39//64 41//65 16//77 +f 39//64 16//77 18//76 +f 20//78 28//79 29//80 +f 20//78 29//80 18//76 +f 18//76 29//80 36//68 +f 18//76 36//68 39//64 +f 43//69 40//72 19//78 +f 43//69 19//78 17//81 +f 38//73 27//82 19//78 +f 38//73 19//78 40//72 +f 29//80 28//79 37//73 +f 29//80 37//73 36//68 +f 41//65 42//75 16//77 +f 74//30 76//17 79//16 +f 12//39 7//38 4//42 +f 78//55 77//54 83//59 +f 73//53 81//60 77//54 +f 81//60 83//59 77//54 +f 153//83 156//84 166//85 +f 166//85 156//84 163//86 +f 166//85 163//86 174//87 +f 174//87 163//86 180//88 +f 180//88 163//86 164//89 +f 180//88 164//89 182//90 +f 182//90 164//89 176//91 +f 176//91 164//89 159//92 +f 176//91 159//92 170//93 +f 170//93 159//92 161//94 +f 161//94 159//92 131//95 +f 161//94 131//95 133//96 +f 133//96 131//95 122//97 +f 122//97 131//95 125//98 +f 122//97 125//98 113//99 +f 113//99 125//98 107//100 +f 107//100 125//98 124//101 +f 107//100 124//101 106//102 +f 106//102 124//101 110//103 +f 110//103 124//101 130//104 +f 110//103 130//104 118//105 +f 118//105 130//104 128//106 +f 128//106 130//104 156//84 +f 128//106 156//84 153//83 +f 163//86 156//84 150//107 +f 164//89 163//86 150//107 +f 159//92 164//89 150//107 +f 131//95 159//92 150//107 +f 125//98 131//95 150//107 +f 124//101 125//98 150//107 +f 130//104 124//101 150//107 +f 156//84 130//104 150//107 +f 153//83 166//85 168//108 +f 153//83 168//108 155//109 +f 166//85 174//87 178//110 +f 166//85 178//110 168//108 +f 174//87 180//88 186//111 +f 174//87 186//111 178//110 +f 180//88 182//90 188//112 +f 180//88 188//112 186//111 +f 182//90 176//91 183//113 +f 182//90 183//113 188//112 +f 176//91 170//93 171//114 +f 176//91 171//114 183//113 +f 170//93 161//94 162//115 +f 170//93 162//115 171//114 +f 161//94 133//96 132//116 +f 161//94 132//116 162//115 +f 133//96 122//97 119//117 +f 133//96 119//117 132//116 +f 122//97 113//99 109//118 +f 122//97 109//118 119//117 +f 113//99 107//100 103//119 +f 113//99 103//119 109//118 +f 107//100 106//102 100//120 +f 107//100 100//120 103//119 +f 106//102 110//103 105//121 +f 106//102 105//121 100//120 +f 110//103 118//105 116//122 +f 110//103 116//122 105//121 +f 118//105 128//106 127//123 +f 118//105 127//123 116//122 +f 128//106 153//83 155//109 +f 128//106 155//109 127//123 +f 155//109 168//108 167//124 +f 155//109 167//124 158//125 +f 168//108 175//126 167//124 +f 168//108 178//110 184//127 +f 168//108 184//127 175//126 +f 178//110 189//128 184//127 +f 178//110 186//111 192//129 +f 178//110 192//129 189//128 +f 186//111 195//130 192//129 +f 186//111 188//112 196//131 +f 186//111 196//131 195//130 +f 188//112 193//132 196//131 +f 188//112 183//113 191//133 +f 188//112 191//133 193//132 +f 183//113 187//134 191//133 +f 183//113 171//114 179//135 +f 183//113 179//135 187//134 +f 171//114 172//136 179//135 +f 171//114 162//115 165//137 +f 171//114 165//137 172//136 +f 162//115 160//138 165//137 +f 162//115 132//116 134//139 +f 162//115 134//139 160//138 +f 132//116 126//140 134//139 +f 132//116 119//117 120//141 +f 132//116 120//141 126//140 +f 119//117 112//142 120//141 +f 119//117 109//118 104//143 +f 119//117 104//143 112//142 +f 109//118 99//144 104//143 +f 109//118 103//119 95//145 +f 109//118 95//145 99//144 +f 103//119 93//146 95//145 +f 103//119 100//120 92//147 +f 103//119 92//147 93//146 +f 100//120 94//148 92//147 +f 100//120 105//121 98//149 +f 100//120 98//149 94//148 +f 105//121 102//150 98//149 +f 105//121 116//122 108//151 +f 105//121 108//151 102//150 +f 116//122 115//152 108//151 +f 116//122 127//123 123//153 +f 116//122 123//153 115//152 +f 127//123 129//154 123//153 +f 127//123 155//109 158//125 +f 127//123 158//125 129//154 +f 158//125 167//124 177//155 +f 158//125 177//155 154//156 +f 167//124 175//126 177//155 +f 175//126 184//127 194//157 +f 175//126 194//157 177//155 +f 184//127 189//128 194//157 +f 189//128 192//129 199//158 +f 189//128 199//158 194//157 +f 192//129 195//130 199//158 +f 195//130 196//131 201//159 +f 195//130 201//159 199//158 +f 196//131 193//132 201//159 +f 193//132 191//133 197//160 +f 193//132 197//160 201//159 +f 191//133 187//134 197//160 +f 187//134 179//135 185//161 +f 187//134 185//161 197//160 +f 179//135 172//136 185//161 +f 172//136 165//137 169//162 +f 172//136 169//162 185//161 +f 165//137 160//138 151//163 +f 165//137 151//163 169//162 +f 160//138 134//139 151//163 +f 134//139 126//140 121//164 +f 134//139 121//164 151//163 +f 126//140 120//141 121//164 +f 120//141 112//142 101//165 +f 120//141 101//165 121//164 +f 112//142 104//143 101//165 +f 104//143 99//144 90//166 +f 104//143 90//166 101//165 +f 99//144 95//145 90//166 +f 95//145 93//146 88//167 +f 95//145 88//167 90//166 +f 93//146 92//147 88//167 +f 92//147 94//148 89//168 +f 92//147 89//168 88//167 +f 94//148 98//149 89//168 +f 98//149 102//150 96//169 +f 98//149 96//169 89//168 +f 102//150 108//151 96//169 +f 108//151 115//152 114//170 +f 108//151 114//170 96//169 +f 115//152 123//153 114//170 +f 123//153 129//154 154//156 +f 123//153 154//156 114//170 +f 129//154 158//125 154//156 +f 154//156 177//155 181//171 +f 154//156 181//171 157//172 +f 177//155 194//157 198//173 +f 177//155 198//173 181//171 +f 194//157 199//158 202//174 +f 194//157 202//174 198//173 +f 199//158 201//159 203//175 +f 199//158 203//175 202//174 +f 201//159 197//160 200//176 +f 201//159 200//176 203//175 +f 197//160 185//161 190//177 +f 197//160 190//177 200//176 +f 185//161 169//162 173//178 +f 185//161 173//178 190//177 +f 169//162 151//163 152//179 +f 169//162 152//179 173//178 +f 151//163 121//164 117//180 +f 151//163 117//180 152//179 +f 121//164 101//165 97//181 +f 121//164 97//181 117//180 +f 101//165 90//166 87//182 +f 101//165 87//182 97//181 +f 90//166 88//167 85//183 +f 90//166 85//183 87//182 +f 88//167 89//168 86//184 +f 88//167 86//184 85//183 +f 89//168 96//169 91//185 +f 89//168 91//185 86//184 +f 96//169 114//170 111//186 +f 96//169 111//186 91//185 +f 114//170 154//156 157//172 +f 114//170 157//172 111//186 +f 200//176 190//177 143//187 +f 85//183 86//184 135//188 +f 190//177 173//178 149//189 +f 157//172 181//171 146//190 +f 86//184 91//185 141//191 +f 173//178 152//179 145//192 +f 91//185 111//186 138//193 +f 181//171 198//173 147//194 +f 111//186 157//172 139//195 +f 152//179 117//180 142//192 +f 198//173 202//174 148//196 +f 117//180 97//181 137//197 +f 202//174 203//175 144//198 +f 97//181 87//182 140//199 +f 203//175 200//176 144//198 +f 87//182 85//183 136//200 +f 277//201 249//202 255//203 +f 249//202 240//204 245//205 +f 249//202 245//205 255//203 +f 240//204 230//206 245//205 +f 230//206 224//207 242//208 +f 230//206 242//208 245//205 +f 224//207 226//209 242//208 +f 226//209 231//210 246//211 +f 226//209 246//211 242//208 +f 231//210 241//212 246//211 +f 241//212 251//213 257//214 +f 241//212 257//214 246//211 +f 251//213 278//215 257//214 +f 278//215 289//216 284//217 +f 278//215 284//217 257//214 +f 289//216 295//218 284//217 +f 295//218 303//219 287//220 +f 295//218 287//220 284//217 +f 303//219 301//221 287//220 +f 301//221 294//222 283//223 +f 301//221 283//223 287//220 +f 294//222 288//224 283//223 +f 288//224 277//201 255//203 +f 288//224 255//203 283//223 +f 245//205 256//225 255//203 +f 242//208 256//225 245//205 +f 246//211 256//225 242//208 +f 257//214 256//225 246//211 +f 284//217 256//225 257//214 +f 287//220 256//225 284//217 +f 283//223 256//225 287//220 +f 255//203 256//225 283//223 +f 277//201 279//226 247//227 +f 277//201 247//227 249//202 +f 249//202 247//227 238//228 +f 249//202 238//228 240//204 +f 240//204 238//228 225//229 +f 240//204 225//229 230//206 +f 230//206 225//229 220//230 +f 230//206 220//230 224//207 +f 224//207 220//230 221//231 +f 224//207 221//231 226//209 +f 226//209 221//231 227//232 +f 226//209 227//232 231//210 +f 231//210 227//232 239//233 +f 231//210 239//233 241//212 +f 241//212 239//233 248//234 +f 241//212 248//234 251//213 +f 251//213 248//234 280//235 +f 251//213 280//235 278//215 +f 278//215 280//235 291//236 +f 278//215 291//236 289//216 +f 289//216 291//236 302//237 +f 289//216 302//237 295//218 +f 295//218 302//237 309//238 +f 295//218 309//238 303//219 +f 303//219 309//238 308//239 +f 303//219 308//239 301//221 +f 301//221 308//239 300//240 +f 301//221 300//240 294//222 +f 294//222 300//240 290//241 +f 294//222 290//241 288//224 +f 288//224 290//241 279//226 +f 288//224 279//226 277//201 +f 279//226 281//242 254//243 +f 279//226 254//243 247//227 +f 247//227 254//243 244//244 +f 247//227 244//244 237//245 +f 247//227 237//245 238//228 +f 238//228 237//245 229//246 +f 238//228 229//246 223//247 +f 238//228 223//247 225//229 +f 225//229 223//247 217//248 +f 225//229 217//248 215//249 +f 225//229 215//249 220//230 +f 220//230 215//249 211//250 +f 220//230 211//250 210//251 +f 220//230 210//251 221//231 +f 221//231 210//251 214//252 +f 221//231 214//252 216//253 +f 221//231 216//253 227//232 +f 227//232 216//253 222//254 +f 227//232 222//254 228//255 +f 227//232 228//255 239//233 +f 239//233 228//255 235//256 +f 239//233 235//256 243//257 +f 239//233 243//257 248//234 +f 248//234 243//257 253//258 +f 248//234 253//258 276//259 +f 248//234 276//259 280//235 +f 280//235 276//259 286//260 +f 280//235 286//260 292//261 +f 280//235 292//261 291//236 +f 291//236 292//261 296//262 +f 291//236 296//262 305//263 +f 291//236 305//263 302//237 +f 302//237 305//263 310//264 +f 302//237 310//264 312//265 +f 302//237 312//265 309//238 +f 309//238 312//265 314//266 +f 309//238 314//266 315//267 +f 309//238 315//267 308//239 +f 308//239 315//267 313//268 +f 308//239 313//268 311//269 +f 308//239 311//269 300//240 +f 300//240 311//269 307//270 +f 300//240 307//270 297//271 +f 300//240 297//271 290//241 +f 290//241 297//271 293//272 +f 290//241 293//272 281//242 +f 290//241 281//242 279//226 +f 281//242 274//273 254//243 +f 254//243 274//273 236//274 +f 254//243 236//274 244//244 +f 244//244 236//274 237//245 +f 237//245 236//274 219//275 +f 237//245 219//275 229//246 +f 229//246 219//275 223//247 +f 223//247 219//275 209//276 +f 223//247 209//276 217//248 +f 217//248 209//276 215//249 +f 215//249 209//276 207//277 +f 215//249 207//277 211//250 +f 211//250 207//277 210//251 +f 210//251 207//277 208//278 +f 210//251 208//278 214//252 +f 214//252 208//278 216//253 +f 216//253 208//278 218//279 +f 216//253 218//279 222//254 +f 222//254 218//279 228//255 +f 228//255 218//279 234//280 +f 228//255 234//280 235//256 +f 235//256 234//280 243//257 +f 243//257 234//280 252//281 +f 243//257 252//281 253//258 +f 253//258 252//281 282//282 +f 253//258 282//282 276//259 +f 276//259 282//282 286//260 +f 286//260 282//282 298//283 +f 286//260 298//283 292//261 +f 292//261 298//283 296//262 +f 296//262 298//283 316//284 +f 296//262 316//284 305//263 +f 305//263 316//284 310//264 +f 310//264 316//284 320//285 +f 310//264 320//285 312//265 +f 312//265 320//285 314//266 +f 314//266 320//285 321//286 +f 314//266 321//286 315//267 +f 315//267 321//286 313//268 +f 313//268 321//286 317//287 +f 313//268 317//287 311//269 +f 311//269 317//287 307//270 +f 307//270 317//287 299//288 +f 307//270 299//288 297//271 +f 297//271 299//288 293//272 +f 293//272 299//288 274//273 +f 293//272 274//273 281//242 +f 274//273 275//289 233//290 +f 274//273 233//290 236//274 +f 236//274 233//290 213//291 +f 236//274 213//291 219//275 +f 219//275 213//291 206//292 +f 219//275 206//292 209//276 +f 209//276 206//292 204//293 +f 209//276 204//293 207//277 +f 207//277 204//293 205//294 +f 207//277 205//294 208//278 +f 208//278 205//294 212//295 +f 208//278 212//295 218//279 +f 218//279 212//295 232//296 +f 218//279 232//296 234//280 +f 234//280 232//296 250//297 +f 234//280 250//297 252//281 +f 252//281 250//297 285//298 +f 252//281 285//298 282//282 +f 282//282 285//298 304//299 +f 282//282 304//299 298//283 +f 298//283 304//299 318//300 +f 298//283 318//300 316//284 +f 316//284 318//300 322//301 +f 316//284 322//301 320//285 +f 320//285 322//301 323//302 +f 320//285 323//302 321//286 +f 321//286 323//302 319//303 +f 321//286 319//303 317//287 +f 317//287 319//303 306//304 +f 317//287 306//304 299//288 +f 299//288 306//304 275//289 +f 299//288 275//289 274//273 +f 275//289 306//304 270//305 +f 206//292 213//291 259//306 +f 304//299 285//298 266//307 +f 204//293 206//292 263//308 +f 318//300 304//299 271//309 +f 205//294 204//293 262//308 +f 322//301 318//300 272//310 +f 212//295 205//294 258//311 +f 323//302 322//301 273//312 +f 232//296 212//295 269//313 +f 319//303 323//302 268//314 +f 233//290 275//289 260//315 +f 250//297 232//296 261//316 +f 306//304 319//303 267//317 +f 213//291 233//290 264//318 +f 285//298 250//297 265//319 +f 348//320 355//321 345//322 +f 348//320 345//322 334//323 +f 324//324 330//325 336//326 +f 324//324 336//326 329//327 +f 329//327 336//326 348//320 +f 329//327 348//320 334//323 +f 326//328 329//327 334//323 +f 326//328 334//323 331//329 +f 334//323 345//322 337//330 +f 334//323 337//330 331//329 +f 325//331 324//324 329//327 +f 325//331 329//327 326//328 +f 331//329 332//332 328//333 +f 331//329 328//333 326//328 +f 331//329 337//330 339//334 +f 331//329 339//334 332//332 +f 354//335 348//320 336//326 +f 354//335 336//326 347//336 +f 354//335 359//337 355//321 +f 354//335 355//321 348//320 +f 357//338 354//335 347//336 +f 357//338 347//336 353//339 +f 357//338 361//340 359//337 +f 357//338 359//337 354//335 +f 356//341 357//338 353//339 +f 356//341 353//339 350//342 +f 356//341 360//343 361//340 +f 356//341 361//340 357//338 +f 351//344 356//341 350//342 +f 351//344 350//342 344//345 +f 351//344 358//346 360//343 +f 351//344 360//343 356//341 +f 333//347 327//348 328//333 +f 333//347 328//333 335//349 +f 335//349 344//345 343//350 +f 335//349 343//350 342//351 +f 344//345 350//342 346//352 +f 344//345 346//352 343//350 +f 350//342 353//339 349//353 +f 350//342 349//353 346//352 +f 353//339 347//336 340//354 +f 353//339 340//354 349//353 +f 347//336 336//326 330//325 +f 347//336 330//325 340//354 +f 326//328 328//333 327//348 +f 326//328 327//348 325//331 +f 335//349 342//351 338//355 +f 335//349 338//355 333//347 +f 351//344 341//356 352//357 +f 351//344 352//357 358//346 +f 332//332 339//334 352//357 +f 332//332 352//357 341//356 +f 351//344 344//345 335//349 +f 351//344 335//349 341//356 +f 335//349 328//333 332//332 +f 335//349 332//332 341//356 +f 709//358 723//359 712//360 +f 709//358 712//360 702//361 +f 733//362 728//363 721//364 +f 733//362 721//364 727//365 +f 728//363 723//359 709//358 +f 728//363 709//358 721//364 +f 731//366 726//367 723//359 +f 731//366 723//359 728//363 +f 723//359 726//367 720//368 +f 723//359 720//368 712//360 +f 732//369 731//366 728//363 +f 732//369 728//363 733//362 +f 726//367 731//366 729//370 +f 726//367 729//370 725//371 +f 726//367 725//371 718//372 +f 726//367 718//372 720//368 +f 703//373 710//374 721//364 +f 703//373 721//364 709//358 +f 703//373 709//358 702//361 +f 703//373 702//361 698//375 +f 700//376 704//377 710//374 +f 700//376 710//374 703//373 +f 700//376 703//373 698//375 +f 700//376 698//375 696//378 +f 701//379 707//380 704//377 +f 701//379 704//377 700//376 +f 701//379 700//376 696//378 +f 701//379 696//378 697//381 +f 706//382 713//383 707//380 +f 706//382 707//380 701//379 +f 706//382 701//379 697//381 +f 706//382 697//381 699//384 +f 724//385 722//386 729//370 +f 724//385 729//370 730//387 +f 722//386 715//388 714//389 +f 722//386 714//389 713//383 +f 713//383 714//389 711//390 +f 713//383 711//390 707//380 +f 707//380 711//390 708//391 +f 707//380 708//391 704//377 +f 704//377 708//391 717//392 +f 704//377 717//392 710//374 +f 710//374 717//392 727//365 +f 710//374 727//365 721//364 +f 731//366 732//369 730//387 +f 731//366 730//387 729//370 +f 722//386 724//385 719//393 +f 722//386 719//393 715//388 +f 706//382 699//384 705//394 +f 706//382 705//394 716//395 +f 725//371 716//395 705//394 +f 725//371 705//394 718//372 +f 706//382 716//395 722//386 +f 706//382 722//386 713//383 +f 722//386 716//395 725//371 +f 722//386 725//371 729//370 +f 514//396 519//397 516//398 +f 514//396 516//398 505//399 +f 493//400 512//401 519//397 +f 493//400 519//397 514//396 +f 493//400 514//396 509//402 +f 493//400 509//402 488//403 +f 475//404 477//405 512//401 +f 475//404 512//401 493//400 +f 488//403 509//402 513//406 +f 488//403 513//406 489//407 +f 475//404 493//400 488//403 +f 475//404 488//403 471//408 +f 471//408 488//403 489//407 +f 471//408 489//407 472//409 +f 444//410 445//411 477//405 +f 444//410 477//405 475//404 +f 413//412 406//413 445//411 +f 413//412 445//411 444//410 +f 444//410 475//404 471//408 +f 444//410 471//408 443//414 +f 443//414 471//408 472//409 +f 443//414 472//409 446//415 +f 383//416 375//417 406//413 +f 383//416 406//413 413//412 +f 379//418 374//419 375//417 +f 379//418 375//417 383//416 +f 384//420 387//421 396//422 +f 384//420 396//422 381//423 +f 384//420 379//418 383//416 +f 384//420 383//416 387//421 +f 393//424 390//425 374//419 +f 393//424 374//419 379//418 +f 393//424 379//418 384//420 +f 393//424 384//420 399//426 +f 432//427 429//428 390//425 +f 432//427 390//425 393//424 +f 399//426 384//420 381//423 +f 399//426 381//423 402//429 +f 430//430 399//426 402//429 +f 430//430 402//429 433//431 +f 430//430 432//427 393//424 +f 430//430 393//424 399//426 +f 469//432 473//433 429//428 +f 469//432 429//428 432//427 +f 505//399 516//398 473//433 +f 505//399 473//433 469//432 +f 469//432 432//427 430//430 +f 469//432 430//430 467//434 +f 499//435 505//399 469//432 +f 499//435 469//432 467//434 +f 509//402 514//396 505//399 +f 509//402 505//399 499//435 +f 513//406 509//402 499//435 +f 513//406 499//435 496//436 +f 467//434 430//430 433//431 +f 467//434 433//431 463//437 +f 496//436 499//435 467//434 +f 496//436 467//434 463//437 +f 457//438 463//437 433//431 +f 457//438 433//431 437//439 +f 437//439 433//431 402//429 +f 437//439 402//429 395//440 +f 431//441 437//439 395//440 +f 431//441 395//440 389//442 +f 389//442 370//443 366//444 +f 389//442 366//444 388//445 +f 388//445 366//444 368//446 +f 388//445 368//446 394//447 +f 428//448 388//445 394//447 +f 428//448 394//447 438//449 +f 428//448 431//441 389//442 +f 428//448 389//442 388//445 +f 465//450 457//438 437//439 +f 465//450 437//439 431//441 +f 465//450 431//441 428//448 +f 465//450 428//448 464//451 +f 464//451 428//448 438//449 +f 464//451 438//449 470//452 +f 510//453 464//451 470//452 +f 510//453 470//452 520//454 +f 503//455 465//450 464//451 +f 503//455 464//451 510//453 +f 495//456 496//436 463//437 +f 495//456 463//437 457//438 +f 525//457 510//453 520//454 +f 525//457 520//454 528//458 +f 495//456 457//438 465//450 +f 495//456 465//450 503//455 +f 517//459 503//455 510//453 +f 517//459 510//453 525//457 +f 495//456 503//455 517//459 +f 495//456 517//459 515//460 +f 497//461 513//406 496//436 +f 497//461 496//436 495//456 +f 486//462 489//407 513//406 +f 486//462 513//406 497//461 +f 497//461 495//456 515//460 +f 497//461 515//460 500//463 +f 490//464 486//462 497//461 +f 490//464 497//461 500//463 +f 474//465 472//409 489//407 +f 474//465 489//407 486//462 +f 446//415 472//409 474//465 +f 446//415 474//465 450//466 +f 474//465 486//462 490//464 +f 474//465 490//464 482//467 +f 452//468 450//466 474//465 +f 452//468 474//465 482//467 +f 387//421 383//416 413//412 +f 387//421 413//412 416//469 +f 416//469 413//412 444//410 +f 416//469 444//410 443//414 +f 396//422 387//421 416//469 +f 396//422 416//469 421//470 +f 421//470 416//469 443//414 +f 421//470 443//414 446//415 +f 424//471 421//470 446//415 +f 424//471 446//415 450//466 +f 424//471 450//466 452//468 +f 424//471 452//468 426//472 +f 426//472 452//468 453//473 +f 426//472 453//473 423//474 +f 396//422 421//470 424//471 +f 396//422 424//471 409//475 +f 385//476 381//423 396//422 +f 385//476 396//422 409//475 +f 395//440 402//429 381//423 +f 395//440 381//423 385//476 +f 409//475 424//471 426//472 +f 409//475 426//472 411//477 +f 411//477 426//472 423//474 +f 411//477 423//474 397//478 +f 370//443 389//442 395//440 +f 370//443 395//440 377//479 +f 377//479 395//440 385//476 +f 377//479 385//476 386//480 +f 386//480 385//476 409//475 +f 386//480 409//475 411//477 +f 404//481 386//480 411//477 +f 404//481 411//477 397//478 +f 373//482 377//479 386//480 +f 373//482 386//480 404//481 +f 364//483 370//443 377//479 +f 364//483 377//479 373//482 +f 366//444 370//443 364//483 +f 366//444 364//483 362//484 +f 368//446 366//444 362//484 +f 368//446 362//484 363//485 +f 372//486 368//446 363//485 +f 372//486 363//485 369//487 +f 394//447 368//446 372//486 +f 394//447 372//486 400//488 +f 439//489 438//449 394//447 +f 439//489 394//447 400//488 +f 470//452 438//449 439//489 +f 470//452 439//489 458//490 +f 508//491 520//454 470//452 +f 508//491 470//452 458//490 +f 528//458 520//454 508//491 +f 528//458 508//491 522//492 +f 528//458 522//492 524//493 +f 528//458 524//493 527//494 +f 525//457 528//458 527//494 +f 525//457 527//494 526//495 +f 517//459 525//457 526//495 +f 517//459 526//495 518//496 +f 511//497 515//460 517//459 +f 511//497 517//459 518//496 +f 500//463 515//460 511//497 +f 500//463 511//497 507//498 +f 502//499 490//464 500//463 +f 502//499 500//463 507//498 +f 482//467 490//464 502//499 +f 482//467 502//499 491//500 +f 453//473 452//468 482//467 +f 453//473 482//467 491//500 +f 407//501 404//481 397//478 +f 407//501 397//478 408//502 +f 408//502 397//478 423//474 +f 408//502 423//474 422//503 +f 422//503 423//474 453//473 +f 422//503 453//473 449//504 +f 449//504 453//473 491//500 +f 449//504 491//500 476//505 +f 476//505 491//500 502//499 +f 476//505 502//499 481//506 +f 481//506 502//499 507//498 +f 481//506 507//498 484//507 +f 451//508 449//504 476//505 +f 451//508 476//505 481//506 +f 425//509 422//503 449//504 +f 425//509 449//504 451//508 +f 425//509 407//501 408//502 +f 425//509 408//502 422//503 +f 373//482 404//481 407//501 +f 373//482 407//501 382//510 +f 364//483 373//482 382//510 +f 364//483 382//510 371//511 +f 382//510 407//501 425//509 +f 382//510 425//509 419//512 +f 484//507 507//498 511//497 +f 484//507 511//497 483//513 +f 419//512 425//509 451//508 +f 419//512 451//508 448//514 +f 451//508 481//506 484//507 +f 451//508 484//507 448//514 +f 448//514 484//507 483//513 +f 448//514 483//513 442//515 +f 483//513 511//497 518//496 +f 483//513 518//496 498//516 +f 405//517 419//512 448//514 +f 405//517 448//514 442//515 +f 405//517 371//511 382//510 +f 405//517 382//510 419//512 +f 362//484 364//483 371//511 +f 362//484 371//511 367//518 +f 363//485 362//484 367//518 +f 363//485 367//518 365//519 +f 367//518 371//511 405//517 +f 367//518 405//517 401//520 +f 454//521 442//515 483//513 +f 454//521 483//513 498//516 +f 401//520 405//517 442//515 +f 401//520 442//515 454//521 +f 498//516 518//496 526//495 +f 498//516 526//495 521//522 +f 521//522 526//495 527//494 +f 521//522 527//494 523//523 +f 462//524 454//521 498//516 +f 462//524 498//516 521//522 +f 403//525 401//520 454//521 +f 403//525 454//521 462//524 +f 403//525 365//519 367//518 +f 403//525 367//518 401//520 +f 369//487 363//485 365//519 +f 369//487 365//519 376//526 +f 376//526 365//519 403//525 +f 376//526 403//525 418//527 +f 418//527 403//525 462//524 +f 418//527 462//524 468//528 +f 462//524 521//522 523//523 +f 462//524 523//523 468//528 +f 523//523 527//494 524//493 +f 523//523 524//493 506//529 +f 466//530 468//528 523//523 +f 466//530 523//523 506//529 +f 435//531 418//527 468//528 +f 435//531 468//528 466//530 +f 398//532 376//526 418//527 +f 398//532 418//527 435//531 +f 378//533 369//487 376//526 +f 378//533 376//526 398//532 +f 372//486 369//487 378//533 +f 372//486 378//533 380//534 +f 400//488 372//486 380//534 +f 400//488 380//534 410//535 +f 439//489 400//488 410//535 +f 439//489 410//535 436//536 +f 458//490 439//489 436//536 +f 458//490 436//536 456//537 +f 487//538 508//491 458//490 +f 487//538 458//490 456//537 +f 522//492 508//491 487//538 +f 522//492 487//538 504//539 +f 524//493 522//492 504//539 +f 524//493 504//539 501//540 +f 506//529 524//493 501//540 +f 506//529 501//540 485//541 +f 466//530 506//529 485//541 +f 466//530 485//541 461//542 +f 435//531 466//530 461//542 +f 435//531 461//542 440//543 +f 398//532 435//531 440//543 +f 398//532 440//543 415//544 +f 480//545 485//541 501//540 +f 480//545 501//540 492//546 +f 501//540 504//539 494//547 +f 501//540 494//547 492//546 +f 494//547 504//539 487//538 +f 494//547 487//538 479//548 +f 478//549 480//545 492//546 +f 478//549 492//546 494//547 +f 478//549 494//547 479//548 +f 461//542 485//541 480//545 +f 461//542 480//545 460//550 +f 479//548 487//538 456//537 +f 479//548 456//537 455//551 +f 460//550 480//545 478//549 +f 460//550 478//549 459//552 +f 459//552 478//549 479//548 +f 459//552 479//548 455//551 +f 459//552 455//551 447//553 +f 455//551 456//537 436//536 +f 455//551 436//536 434//554 +f 434//554 436//536 410//535 +f 434//554 410//535 414//555 +f 441//556 447//553 427//557 +f 427//557 447//553 455//551 +f 427//557 455//551 434//554 +f 441//556 440//543 461//542 +f 441//556 461//542 460//550 +f 441//556 460//550 459//552 +f 441//556 459//552 447//553 +f 415//544 440//543 441//556 +f 415//544 441//556 420//558 +f 420//558 441//556 427//557 +f 378//533 398//532 415//544 +f 378//533 415//544 391//559 +f 420//558 427//557 417//560 +f 417//560 427//557 434//554 +f 417//560 434//554 414//555 +f 414//555 410//535 380//534 +f 414//555 380//534 392//561 +f 412//562 420//558 417//560 +f 380//534 378//533 391//559 +f 380//534 391//559 392//561 +f 392//561 391//559 412//562 +f 412//562 417//560 414//555 +f 412//562 414//555 392//561 +f 391//559 415//544 420//558 +f 391//559 420//558 412//562 +f 543//563 553//564 541//565 +f 543//563 541//565 538//566 +f 564//567 543//563 538//566 +f 564//567 538//566 545//568 +f 564//567 569//569 549//570 +f 564//567 549//570 543//563 +f 582//571 564//567 545//568 +f 582//571 545//568 580//572 +f 569//569 572//573 544//574 +f 569//569 544//574 549//570 +f 582//571 586//575 569//569 +f 582//571 569//569 564//567 +f 586//575 585//576 572//573 +f 586//575 572//573 569//569 +f 613//577 582//571 580//572 +f 613//577 580//572 612//578 +f 644//579 613//577 612//578 +f 644//579 612//578 650//580 +f 613//577 614//581 586//575 +f 613//577 586//575 582//571 +f 614//581 611//582 585//576 +f 614//581 585//576 586//575 +f 673//583 644//579 650//580 +f 673//583 650//580 681//584 +f 680//585 673//583 681//584 +f 680//585 681//584 683//586 +f 676//587 682//588 659//589 +f 676//587 659//589 669//590 +f 676//587 669//590 673//583 +f 676//587 673//583 680//585 +f 661//591 680//585 683//586 +f 661//591 683//586 666//592 +f 661//591 655//593 676//587 +f 661//591 676//587 680//585 +f 625//594 661//591 666//592 +f 625//594 666//592 628//595 +f 655//593 658//596 682//588 +f 655//593 682//588 676//587 +f 627//597 624//598 658//596 +f 627//597 658//596 655//593 +f 627//597 655//593 661//591 +f 627//597 661//591 625//594 +f 588//599 625//594 628//595 +f 588//599 628//595 584//600 +f 553//564 588//599 584//600 +f 553//564 584//600 541//565 +f 588//599 590//601 627//597 +f 588//599 627//597 625//594 +f 558//602 590//601 588//599 +f 558//602 588//599 553//564 +f 549//570 558//602 553//564 +f 549//570 553//564 543//563 +f 544//574 561//603 558//602 +f 544//574 558//602 549//570 +f 590//601 594//604 624//598 +f 590//601 624//598 627//597 +f 561//603 594//604 590//601 +f 561//603 590//601 558//602 +f 600//605 620//606 624//598 +f 600//605 624//598 594//604 +f 620//606 662//607 658//596 +f 620//606 658//596 624//598 +f 626//608 667//609 662//607 +f 626//608 662//607 620//606 +f 667//609 668//610 691//611 +f 667//609 691//611 687//612 +f 668//610 660//613 689//614 +f 668//610 689//614 691//611 +f 629//615 619//616 660//613 +f 629//615 660//613 668//610 +f 629//615 668//610 667//609 +f 629//615 667//609 626//608 +f 592//617 626//608 620//606 +f 592//617 620//606 600//605 +f 592//617 593//618 629//615 +f 592//617 629//615 626//608 +f 593//618 587//619 619//616 +f 593//618 619//616 629//615 +f 548//620 537//621 587//619 +f 548//620 587//619 593//618 +f 555//622 548//620 593//618 +f 555//622 593//618 592//617 +f 562//623 600//605 594//604 +f 562//623 594//604 561//603 +f 532//624 529//625 537//621 +f 532//624 537//621 548//620 +f 562//623 555//622 592//617 +f 562//623 592//617 600//605 +f 540//626 532//624 548//620 +f 540//626 548//620 555//622 +f 562//623 542//627 540//626 +f 562//623 540//626 555//622 +f 560//628 562//623 561//603 +f 560//628 561//603 544//574 +f 573//629 560//628 544//574 +f 573//629 544//574 572//573 +f 560//628 557//630 542//627 +f 560//628 542//627 562//623 +f 565//631 557//630 560//628 +f 565//631 560//628 573//629 +f 583//632 573//629 572//573 +f 583//632 572//573 585//576 +f 611//582 607//633 583//632 +f 611//582 583//632 585//576 +f 583//632 576//634 565//631 +f 583//632 565//631 573//629 +f 605//635 576//634 583//632 +f 605//635 583//632 607//633 +f 669//590 641//636 644//579 +f 669//590 644//579 673//583 +f 641//636 614//581 613//577 +f 641//636 613//577 644//579 +f 659//589 636//637 641//636 +f 659//589 641//636 669//590 +f 636//637 611//582 614//581 +f 636//637 614//581 641//636 +f 632//638 607//633 611//582 +f 632//638 611//582 636//637 +f 632//638 631//639 605//635 +f 632//638 605//635 607//633 +f 631//639 634//640 604//641 +f 631//639 604//641 605//635 +f 659//589 648//642 632//638 +f 659//589 632//638 636//637 +f 672//643 648//642 659//589 +f 672//643 659//589 682//588 +f 662//607 672//643 682//588 +f 662//607 682//588 658//596 +f 648//642 646//644 631//639 +f 648//642 631//639 632//638 +f 646//644 657//645 634//640 +f 646//644 634//640 631//639 +f 687//612 678//646 662//607 +f 687//612 662//607 667//609 +f 678//646 671//647 672//643 +f 678//646 672//643 662//607 +f 671//647 646//644 648//642 +f 671//647 648//642 672//643 +f 670//648 657//645 646//644 +f 670//648 646//644 671//647 +f 684//649 670//648 671//647 +f 684//649 671//647 678//646 +f 693//650 684//649 678//646 +f 693//650 678//646 687//612 +f 691//611 695//651 693//650 +f 691//611 693//650 687//612 +f 689//614 694//652 695//651 +f 689//614 695//651 691//611 +f 685//653 688//654 694//652 +f 685//653 694//652 689//614 +f 660//613 654//655 685//653 +f 660//613 685//653 689//614 +f 618//656 654//655 660//613 +f 618//656 660//613 619//616 +f 587//619 599//657 618//656 +f 587//619 618//656 619//616 +f 550//658 599//657 587//619 +f 550//658 587//619 537//621 +f 529//625 535//659 550//658 +f 529//625 550//658 537//621 +f 529//625 530//660 533//661 +f 529//625 533//661 535//659 +f 532//624 531//662 530//660 +f 532//624 530//660 529//625 +f 540//626 539//663 531//662 +f 540//626 531//662 532//624 +f 547//664 539//663 540//626 +f 547//664 540//626 542//627 +f 557//630 551//665 547//664 +f 557//630 547//664 542//627 +f 546//666 551//665 557//630 +f 546//666 557//630 565//631 +f 576//634 567//667 546//666 +f 576//634 546//666 565//631 +f 604//641 567//667 576//634 +f 604//641 576//634 605//635 +f 664//668 649//669 657//645 +f 664//668 657//645 670//648 +f 649//669 635//670 634//640 +f 649//669 634//640 657//645 +f 635//670 608//671 604//641 +f 635//670 604//641 634//640 +f 608//671 581//672 567//667 +f 608//671 567//667 604//641 +f 581//672 568//673 546//666 +f 581//672 546//666 567//667 +f 568//673 574//674 551//665 +f 568//673 551//665 546//666 +f 606//675 568//673 581//672 +f 606//675 581//672 608//671 +f 633//676 606//675 608//671 +f 633//676 608//671 635//670 +f 633//676 635//670 649//669 +f 633//676 649//669 664//668 +f 684//649 674//677 664//668 +f 684//649 664//668 670//648 +f 693//650 686//678 674//677 +f 693//650 674//677 684//649 +f 674//677 638//679 633//676 +f 674//677 633//676 664//668 +f 574//674 575//680 547//664 +f 574//674 547//664 551//665 +f 638//679 609//681 606//675 +f 638//679 606//675 633//676 +f 606//675 609//681 574//674 +f 606//675 574//674 568//673 +f 609//681 615//682 575//680 +f 609//681 575//680 574//674 +f 575//680 559//683 539//663 +f 575//680 539//663 547//664 +f 651//684 615//682 609//681 +f 651//684 609//681 638//679 +f 651//684 638//679 674//677 +f 651//684 674//677 686//678 +f 695//651 690//685 686//678 +f 695//651 686//678 693//650 +f 694//652 692//686 690//685 +f 694//652 690//685 695//651 +f 690//685 653//687 651//684 +f 690//685 651//684 686//678 +f 603//688 559//683 575//680 +f 603//688 575//680 615//682 +f 653//687 603//688 615//682 +f 653//687 615//682 651//684 +f 559//683 536//689 531//662 +f 559//683 531//662 539//663 +f 536//689 534//690 530//660 +f 536//689 530//660 531//662 +f 595//691 536//689 559//683 +f 595//691 559//683 603//688 +f 652//692 595//691 603//688 +f 652//692 603//688 653//687 +f 652//692 653//687 690//685 +f 652//692 690//685 692//686 +f 688//654 679//693 692//686 +f 688//654 692//686 694//652 +f 679//693 639//694 652//692 +f 679//693 652//692 692//686 +f 639//694 589//695 595//691 +f 639//694 595//691 652//692 +f 595//691 589//695 534//690 +f 595//691 534//690 536//689 +f 534//690 552//696 533//661 +f 534//690 533//661 530//660 +f 591//697 552//696 534//690 +f 591//697 534//690 589//695 +f 622//698 591//697 589//695 +f 622//698 589//695 639//694 +f 656//699 622//698 639//694 +f 656//699 639//694 679//693 +f 677//700 656//699 679//693 +f 677//700 679//693 688//654 +f 685//653 675//701 677//700 +f 685//653 677//700 688//654 +f 654//655 647//702 675//701 +f 654//655 675//701 685//653 +f 618//656 621//703 647//702 +f 618//656 647//702 654//655 +f 599//657 601//704 621//703 +f 599//657 621//703 618//656 +f 570//705 601//704 599//657 +f 570//705 599//657 550//658 +f 535//659 554//706 570//705 +f 535//659 570//705 550//658 +f 533//661 556//707 554//706 +f 533//661 554//706 535//659 +f 552//696 571//708 556//707 +f 552//696 556//707 533//661 +f 591//697 596//709 571//708 +f 591//697 571//708 552//696 +f 622//698 617//710 596//709 +f 622//698 596//709 591//697 +f 656//699 642//711 617//710 +f 656//699 617//710 622//698 +f 577//712 566//713 556//707 +f 577//712 556//707 571//708 +f 556//707 566//713 563//714 +f 556//707 563//714 554//706 +f 563//714 578//715 570//705 +f 563//714 570//705 554//706 +f 579//716 566//713 577//712 +f 579//716 578//715 563//714 +f 579//716 563//714 566//713 +f 596//709 597//717 577//712 +f 596//709 577//712 571//708 +f 578//715 602//718 601//704 +f 578//715 601//704 570//705 +f 597//717 598//719 579//716 +f 597//717 579//716 577//712 +f 598//719 602//718 578//715 +f 598//719 578//715 579//716 +f 598//719 610//720 602//718 +f 602//718 623//721 621//703 +f 602//718 621//703 601//704 +f 623//721 643//722 647//702 +f 623//721 647//702 621//703 +f 616//723 630//724 610//720 +f 630//724 623//721 602//718 +f 630//724 602//718 610//720 +f 616//723 597//717 596//709 +f 616//723 596//709 617//710 +f 616//723 610//720 598//719 +f 616//723 598//719 597//717 +f 642//711 637//725 616//723 +f 642//711 616//723 617//710 +f 637//725 630//724 616//723 +f 677//700 665//726 642//711 +f 677//700 642//711 656//699 +f 637//725 640//727 630//724 +f 640//727 643//722 623//721 +f 640//727 623//721 630//724 +f 643//722 663//728 675//701 +f 643//722 675//701 647//702 +f 645//729 640//727 637//725 +f 675//701 663//728 665//726 +f 675//701 665//726 677//700 +f 663//728 645//729 665//726 +f 645//729 663//728 643//722 +f 645//729 643//722 640//727 +f 665//726 645//729 637//725 +f 665//726 637//725 642//711 +f 1309//730 1326//731 1319//732 +f 1309//730 1319//732 1314//732 +f 854//733 843//734 859//735 +f 854//733 859//735 862//735 +f 912//736 902//737 898//738 +f 912//736 898//738 916//736 +f 988//739 963//740 953//741 +f 988//739 953//741 966//742 +f 1057//743 1084//744 1076//745 +f 1057//743 1076//745 1048//746 +f 1089//747 1100//748 1084//744 +f 1089//747 1084//744 1057//743 +f 904//749 880//750 884//751 +f 904//749 884//751 910//752 +f 739//753 757//754 753//755 +f 739//753 753//755 737//755 +f 1019//756 1000//757 1008//758 +f 1019//756 1008//758 1022//756 +f 1029//759 1031//760 1041//761 +f 1029//759 1041//761 1039//761 +f 803//762 791//763 788//763 +f 803//762 788//763 800//764 +f 892//765 876//766 875//767 +f 892//765 875//767 889//768 +f 878//769 894//770 891//771 +f 878//769 891//771 873//772 +f 951//773 949//774 933//775 +f 951//773 933//775 935//775 +f 930//776 928//776 914//777 +f 930//776 914//777 913//778 +f 1109//779 1112//780 1128//781 +f 1109//779 1128//781 1125//782 +f 864//783 838//784 848//784 +f 864//783 848//784 868//785 +f 940//786 926//787 923//787 +f 940//786 923//787 937//788 +f 983//789 1011//790 1016//791 +f 983//789 1016//791 995//792 +f 846//793 817//794 843//734 +f 846//793 843//734 854//733 +f 902//737 892//765 889//768 +f 902//737 889//768 898//738 +f 966//742 953//741 945//795 +f 966//742 945//795 948//795 +f 907//796 925//797 921//798 +f 907//796 921//798 909//799 +f 857//800 860//801 878//769 +f 857//800 878//769 873//772 +f 1179//802 1155//803 1151//804 +f 1179//802 1151//804 1175//802 +f 998//805 977//806 963//740 +f 998//805 963//740 988//739 +f 931//807 912//736 916//736 +f 931//807 916//736 932//808 +f 813//809 834//810 844//811 +f 813//809 844//811 819//812 +f 1167//813 1159//814 1163//815 +f 1167//813 1163//815 1170//816 +f 1014//817 993//818 985//819 +f 1014//817 985//819 1004//820 +f 820//821 801//764 811//822 +f 820//821 811//822 826//823 +f 841//824 852//825 860//801 +f 841//824 860//801 857//800 +f 861//826 863//826 877//827 +f 861//826 877//827 879//828 +f 1098//829 1071//830 1095//831 +f 1098//829 1095//831 1106//832 +f 975//833 1002//834 1011//790 +f 975//833 1011//790 983//789 +f 959//835 970//836 1002//834 +f 959//835 1002//834 975//833 +f 883//837 907//796 909//799 +f 883//837 909//799 887//837 +f 796//838 806//839 830//840 +f 796//838 830//840 822//841 +f 839//842 836//842 805//843 +f 839//842 805//843 804//844 +f 1030//845 1019//756 1022//756 +f 1030//845 1022//756 1033//845 +f 896//846 918//847 917//848 +f 896//846 917//848 899//846 +f 1273//849 1278//849 1268//850 +f 1273//849 1268//850 1259//850 +f 1323//851 1317//852 1297//852 +f 1323//851 1297//852 1299//853 +f 832//854 815//855 817//794 +f 832//854 817//794 846//793 +f 1004//820 985//819 977//806 +f 1004//820 977//806 998//805 +f 947//856 950//856 934//857 +f 947//856 934//857 929//857 +f 1218//858 1207//859 1199//860 +f 1218//858 1199//860 1202//860 +f 925//797 942//861 939//862 +f 925//797 939//862 921//798 +f 1159//814 1147//863 1143//864 +f 1159//814 1143//864 1163//815 +f 1018//865 1020//865 1003//866 +f 1018//865 1003//866 1001//867 +f 894//770 901//868 897//869 +f 894//770 897//869 891//771 +f 955//870 964//871 950//856 +f 955//870 950//856 947//856 +f 961//872 990//873 964//871 +f 961//872 964//871 955//870 +f 755//874 734//875 740//876 +f 755//874 740//876 748//876 +f 809//877 824//878 834//810 +f 809//877 834//810 813//809 +f 979//879 996//880 990//873 +f 979//879 990//873 961//872 +f 1147//863 1130//881 1127//882 +f 1147//863 1127//882 1143//864 +f 880//750 864//783 868//785 +f 880//750 868//785 884//751 +f 968//883 940//786 937//788 +f 968//883 937//788 957//884 +f 995//792 1016//791 1006//885 +f 995//792 1006//885 987//886 +f 798//887 793//887 808//888 +f 798//887 808//888 810//889 +f 737//755 753//755 751//890 +f 737//755 751//890 735//890 +f 1241//891 1243//892 1219//893 +f 1241//891 1219//893 1217//894 +f 842//895 840//895 856//896 +f 842//895 856//896 858//897 +f 1063//898 1080//899 1096//900 +f 1063//898 1096//900 1069//900 +f 906//901 905//901 927//902 +f 906//901 927//902 924//903 +f 936//904 938//904 958//905 +f 936//904 958//905 956//906 +f 991//907 989//908 967//909 +f 991//907 967//909 965//909 +f 1234//910 1232//911 1224//912 +f 1234//910 1224//912 1226//913 +f 1054//914 1052//915 1062//916 +f 1054//914 1062//916 1060//916 +f 971//917 969//917 1001//867 +f 971//917 1001//867 1003//866 +f 1223//918 1195//919 1191//920 +f 1223//918 1191//920 1211//918 +f 915//921 929//857 934//857 +f 915//921 934//857 919//922 +f 851//923 849//924 829//925 +f 851//923 829//925 831//925 +f 831//925 829//925 821//926 +f 831//925 821//926 823//926 +f 1025//927 1023//927 1009//928 +f 1025//927 1009//928 1012//929 +f 944//930 946//930 928//776 +f 944//930 928//776 930//776 +f 1031//760 1029//759 1020//865 +f 1031//760 1020//865 1018//865 +f 1025//927 1026//931 1032//931 +f 1025//927 1032//931 1023//927 +f 770//932 772//932 780//933 +f 770//932 780//933 782//933 +s 0 +f 1260//934 1269//934 1263//934 +f 1315//935 1320//935 1311//935 +s 1 +f 735//890 751//890 746//936 +f 735//890 746//936 742//936 +f 906//901 882//937 881//938 +f 906//901 881//938 905//901 +f 886//939 908//940 911//940 +f 886//939 911//940 885//939 +f 814//941 812//941 818//942 +f 814//941 818//942 816//942 +f 835//943 833//944 847//945 +f 835//943 847//945 845//945 +f 984//946 986//947 978//948 +f 984//946 978//948 976//949 +f 1007//950 1005//951 999//952 +f 1007//950 999//952 997//952 +f 1132//953 1135//953 1116//954 +f 1132//953 1116//954 1118//955 +f 1233//956 1248//957 1244//958 +f 1233//956 1244//958 1225//959 +f 881//938 882//937 866//960 +f 881//938 866//960 865//960 +f 924//903 927//902 941//961 +f 924//903 941//961 943//962 +f 1242//963 1213//964 1207//859 +f 1242//963 1207//859 1218//858 +f 1226//913 1224//912 1214//965 +f 1226//913 1214//965 1212//965 +f 1013//966 1010//966 1015//967 +f 1013//966 1015//967 1017//968 +f 816//942 818//942 840//895 +f 816//942 840//895 842//895 +f 845//945 847//945 855//969 +f 845//945 855//969 853//970 +f 1201//971 1203//971 1187//972 +f 1201//971 1187//972 1185//972 +f 888//973 890//973 896//846 +f 888//973 896//846 899//846 +f 965//909 967//909 949//774 +f 965//909 949//774 951//773 +f 952//974 954//974 946//930 +f 952//974 946//930 944//930 +f 914//777 900//975 903//976 +f 914//777 903//976 913//778 +f 911//940 908//940 920//977 +f 911//940 920//977 922//978 +f 858//897 856//896 872//979 +f 858//897 872//979 874//979 +f 895//980 893//981 903//976 +f 895//980 903//976 900//975 +f 810//889 808//888 812//941 +f 810//889 812//941 814//941 +f 825//982 827//982 833//944 +f 825//982 833//944 835//943 +f 997//952 999//952 989//908 +f 997//952 989//908 991//907 +f 976//949 978//948 960//983 +f 976//949 960//983 962//984 +f 1066//985 1045//986 1055//987 +f 1066//985 1055//987 1074//988 +f 1149//989 1150//989 1139//990 +f 1149//989 1139//990 1137//991 +f 1210//992 1208//993 1228//994 +f 1210//992 1228//994 1230//994 +f 943//962 941//961 969//917 +f 943//962 969//917 971//917 +f 1133//995 1119//996 1122//997 +f 1133//995 1122//997 1138//998 +f 956//906 958//905 974//999 +f 956//906 974//999 972//1000 +f 992//1001 994//1001 986//947 +f 992//1001 986//947 984//946 +f 1254//1002 1255//1003 1265//1004 +f 1293//1005 1316//1005 1322//1006 +f 1293//1005 1322//1006 1303//1007 +f 1229//1008 1253//1009 1258//1010 +f 1229//1008 1258//1010 1237//1011 +f 761//1012 738//1013 736//1014 +f 761//1012 736//1014 759//1014 +f 870//1015 869//1016 849//924 +f 870//1015 849//924 851//923 +f 754//1017 760//1018 756//1019 +f 754//1017 756//1019 752//1019 +f 744//1020 767//1021 757//754 +f 744//1020 757//754 739//753 +f 743//1022 738//1013 761//1012 +f 743//1022 761//1012 763//1023 +f 767//1021 744//1020 771//1024 +f 767//1021 771//1024 775//1025 +f 1220//1026 1253//1009 1229//1008 +f 1220//1026 1229//1008 1209//1027 +f 1090//1028 1088//1029 1056//1030 +f 1090//1028 1056//1030 1058//1031 +f 768//1032 776//1033 779//1033 +f 768//1032 779//1033 765//1032 +s 0 +f 1295//1034 1287//1034 1282//1034 +s 1 +f 1284//1035 1292//1036 1294//1036 +f 1284//1035 1294//1036 1280//1037 +f 1081//1038 1083//1039 1099//1040 +f 1081//1038 1099//1040 1097//1041 +f 1212//965 1214//965 1204//1042 +f 1212//965 1204//1042 1206//1043 +f 1290//1044 1286//1045 1275//1046 +f 1290//1044 1275//1046 1276//1046 +f 1323//851 1299//853 1301//1047 +f 1323//851 1301//1047 1325//1047 +f 828//1048 802//762 801//764 +f 828//1048 801//764 820//821 +f 775//1025 771//1024 783//1049 +f 775//1025 783//1049 784//1050 +f 784//1050 783//1049 792//1051 +f 784//1050 792//1051 805//843 +f 806//839 837//1052 850//1053 +f 806//839 850//1053 830//840 +f 865//960 866//960 836//842 +f 865//960 836//842 839//842 +f 1262//1054 1239//1055 1235//1056 +f 1262//1054 1235//1056 1250//1057 +f 1205//1058 1216//1059 1200//1060 +f 1205//1058 1200//1060 1197//1060 +f 901//868 915//921 919//922 +f 901//868 919//922 897//869 +f 867//1061 883//837 887//837 +f 867//1061 887//837 871//1062 +f 1086//1063 1059//1064 1051//1065 +f 1086//1063 1051//1065 1078//1066 +f 1102//1067 1091//1067 1059//1064 +f 1102//1067 1059//1064 1086//1063 +f 1152//1068 1176//1069 1172//1069 +f 1152//1068 1172//1069 1148//1070 +f 1256//1071 1223//918 1211//918 +f 1256//1071 1211//918 1231//1072 +f 752//1019 756//1019 750//1073 +f 752//1019 750//1073 747//1073 +f 1040//1074 1030//845 1033//845 +f 1040//1074 1033//845 1037//1075 +f 758//1076 762//1076 760//1018 +f 758//1076 760//1018 754//1017 +f 1327//1077 1305//1077 1312//1078 +f 1327//1077 1312//1078 1321//1078 +f 1183//1079 1167//813 1170//816 +f 1183//1079 1170//816 1186//1080 +f 1246//1081 1227//1082 1213//964 +f 1246//1081 1213//964 1242//963 +f 1074//988 1055//987 1061//1083 +f 1074//988 1061//1083 1082//1084 +f 1114//1085 1111//1086 1127//882 +f 1114//1085 1127//882 1130//881 +f 837//1052 867//1061 871//1062 +f 837//1052 871//1062 850//1053 +f 1117//1087 1134//1088 1136//1089 +f 1117//1087 1136//1089 1120//1090 +f 1078//1066 1051//1065 1045//986 +f 1078//1066 1045//986 1066//985 +f 819//812 844//811 852//825 +f 819//812 852//825 841//824 +f 1156//1091 1165//1092 1168//1093 +f 1156//1091 1168//1093 1160//1094 +f 1093//1095 1104//1096 1112//780 +f 1093//1095 1112//780 1109//779 +f 1155//803 1133//995 1138//998 +f 1155//803 1138//998 1151//804 +f 1202//860 1199//860 1183//1079 +f 1202//860 1183//1079 1186//1080 +f 1250//1057 1235//1056 1227//1082 +f 1250//1057 1227//1082 1246//1081 +f 1008//758 981//1097 993//818 +f 1008//758 993//818 1014//817 +f 874//979 872//979 890//973 +f 874//979 890//973 888//973 +f 1195//919 1179//802 1175//802 +f 1195//919 1175//802 1191//920 +f 1119//996 1091//1067 1102//1067 +f 1119//996 1102//1067 1122//997 +f 1043//1098 1064//1099 1072//1100 +f 1043//1098 1072//1100 1053//1101 +f 1237//1011 1258//1010 1248//957 +f 1237//1011 1248//957 1233//956 +f 796//838 822//841 824//878 +f 796//838 824//878 809//877 +f 1144//1102 1156//1091 1160//1094 +f 1144//1102 1160//1094 1140//1103 +f 1069//900 1096//900 1104//1096 +f 1069//900 1104//1096 1093//1095 +f 1036//1104 1034//1104 1047//1105 +f 1036//1104 1047//1105 1050//1106 +f 1266//1107 1256//1071 1262//1054 +f 1266//1107 1262//1054 1270//1107 +f 853//970 855//969 863//826 +f 853//970 863//826 861//826 +f 770//932 743//1022 763//1023 +f 770//932 763//1023 772//932 +f 1038//1108 1057//743 1048//746 +f 1038//1108 1048//746 1035//1109 +f 1028//1110 1038//1108 1035//1109 +f 1028//1110 1035//1109 1027//1111 +f 1274//1112 1285//1035 1281//1113 +f 1274//1112 1281//1113 1272//1112 +f 1283//1114 1288//1114 1278//849 +f 1283//1114 1278//849 1273//849 +f 1165//1092 1181//1115 1184//1116 +f 1165//1092 1184//1116 1168//1093 +f 1225//959 1244//958 1240//1117 +f 1225//959 1240//1117 1215//1118 +f 1053//1101 1072//1100 1080//899 +f 1053//1101 1080//899 1063//898 +f 1059//1064 1040//1074 1037//1075 +f 1059//1064 1037//1075 1051//1065 +f 1192//1119 1220//1026 1209//1027 +f 1192//1119 1209//1027 1188//1120 +f 1230//994 1228//994 1236//1121 +f 1230//994 1236//1121 1238//1121 +f 1048//746 1076//745 1064//1099 +f 1048//746 1064//1099 1043//1098 +f 1215//1118 1240//1117 1216//1059 +f 1215//1118 1216//1059 1205//1058 +f 886//939 885//939 869//1016 +f 886//939 869//1016 870//1015 +f 948//795 945//795 931//807 +f 948//795 931//807 932//808 +f 1134//1088 1152//1068 1148//1070 +f 1134//1088 1148//1070 1136//1089 +f 1197//1060 1200//1060 1184//1116 +f 1197//1060 1184//1116 1181//1115 +f 826//823 811//822 815//855 +f 826//823 815//855 832//854 +f 1082//1084 1061//1083 1071//830 +f 1082//1084 1071//830 1098//829 +f 1128//781 1144//1102 1140//1103 +f 1128//781 1140//1103 1125//782 +f 1176//1069 1192//1119 1188//1120 +f 1176//1069 1188//1120 1172//1069 +f 1089//747 1117//1087 1120//1090 +f 1089//747 1120//1090 1100//748 +f 987//886 1006//885 996//880 +f 987//886 996//880 979//879 +f 1238//1121 1236//1121 1232//911 +f 1238//1121 1232//911 1234//910 +f 1261//1122 1264//1123 1251//1124 +f 1261//1122 1251//1124 1249//1125 +f 1206//1043 1204//1042 1196//1126 +f 1206//1043 1196//1126 1198//1126 +f 776//1033 785//1127 787//1127 +f 776//1033 787//1127 779//1033 +f 1157//1128 1145//1129 1146//1129 +f 1157//1128 1146//1129 1158//1128 +f 1141//1130 1161//1131 1162//1132 +f 1141//1130 1162//1132 1142//1130 +f 1097//1041 1099//1040 1107//1133 +f 1097//1041 1107//1133 1105//1134 +f 962//984 960//983 954//974 +f 962//984 954//974 952//974 +f 1085//1135 1087//1136 1079//1137 +f 1085//1135 1079//1137 1077//1137 +f 1101//1138 1103//1139 1087//1136 +f 1101//1138 1087//1136 1085//1135 +f 980//1140 982//1141 994//1001 +f 980//1140 994//1001 992//1001 +f 1177//1142 1153//1143 1154//1143 +f 1177//1142 1154//1143 1178//1142 +f 1149//989 1173//1144 1174//1144 +f 1149//989 1174//1144 1150//989 +f 1002//834 1021//1145 1024//1146 +f 1002//834 1024//1146 1011//790 +f 1094//1147 1092//1147 1108//1148 +f 1094//1147 1108//1148 1110//1149 +f 935//775 933//775 917//848 +f 935//775 917//848 918//847 +f 1039//761 1041//761 1058//1031 +f 1039//761 1058//1031 1056//1030 +f 1105//1134 1107//1133 1115//1150 +f 1105//1134 1115//1150 1113//1150 +f 1021//1145 1028//1110 1027//1111 +f 1021//1145 1027//1111 1024//1146 +f 1221//1151 1222//1151 1255//1003 +f 1221//1151 1255//1003 1254//1002 +s 0 +f 794//1152 799//1152 789//1152 +s 1 +f 781//1153 786//1154 795//1155 +f 781//1153 795//1155 790//1155 +s 0 +f 745//1156 749//1156 741//1156 +s 1 +f 942//861 970//836 959//835 +f 942//861 959//835 939//862 +f 1185//972 1187//972 1171//1157 +f 1185//972 1171//1157 1169//1158 +f 1182//1159 1180//1159 1164//1160 +f 1182//1159 1164//1160 1166//1161 +f 1118//955 1116//954 1088//1029 +f 1118//955 1088//1029 1090//1028 +f 1065//1162 1067//1162 1075//1163 +f 1065//1162 1075//1163 1073//1164 +f 1060//916 1062//916 1068//1165 +f 1060//916 1068//1165 1070//1166 +f 1249//1125 1251//1124 1247//1167 +f 1249//1125 1247//1167 1245//1168 +f 1110//1149 1108//1148 1124//1169 +f 1110//1149 1124//1169 1126//1170 +f 1113//1150 1115//1150 1131//1171 +f 1113//1150 1131//1171 1129//1171 +f 1190//1172 1189//1173 1208//993 +f 1190//1172 1208//993 1210//992 +f 922//978 920//977 938//904 +f 922//978 938//904 936//904 +f 1137//991 1139//990 1123//1174 +f 1137//991 1123//1174 1121//1174 +f 1049//1175 1046//1175 1042//1176 +f 1049//1175 1042//1176 1044//1176 +f 1077//1137 1079//1137 1067//1162 +f 1077//1137 1067//1162 1065//1162 +f 1000//757 973//1177 981//1097 +f 1000//757 981//1097 1008//758 +f 1026//931 1034//1104 1036//1104 +f 1026//931 1036//1104 1032//931 +f 1169//1158 1171//1157 1162//1132 +f 1169//1158 1162//1132 1161//1131 +f 1166//1161 1164//1160 1157//1128 +f 1166//1161 1157//1128 1158//1128 +f 785//1127 807//1178 797//1178 +f 785//1127 797//1178 787//1127 +f 926//787 904//749 910//752 +f 926//787 910//752 923//787 +f 1154//1143 1153//1143 1135//953 +f 1154//1143 1135//953 1132//953 +f 1217//894 1219//893 1203//971 +f 1217//894 1203//971 1201//971 +f 879//828 877//827 893//981 +f 879//828 893//981 895//980 +f 1198//1126 1196//1126 1180//1159 +f 1198//1126 1180//1159 1182//1159 +f 1073//1164 1075//1163 1083//1039 +f 1073//1164 1083//1039 1081//1038 +f 823//926 821//926 827//982 +f 823//926 827//982 825//982 +f 782//933 780//933 788//763 +f 782//933 788//763 791//763 +f 1245//1168 1247//1167 1243//892 +f 1245//1168 1243//892 1241//891 +f 1126//1170 1124//1169 1141//1130 +f 1126//1170 1141//1130 1142//1130 +f 1129//1171 1131//1171 1146//1129 +f 1129//1171 1146//1129 1145//1129 +f 1174//1144 1173//1144 1189//1173 +f 1174//1144 1189//1173 1190//1172 +f 1070//1166 1068//1165 1092//1147 +f 1070//1166 1092//1147 1094//1147 +f 1121//1174 1123//1174 1103//1139 +f 1121//1174 1103//1139 1101//1138 +f 1044//1176 1042//1176 1052//915 +f 1044//1176 1052//915 1054//914 +f 1017//968 1015//967 1005//951 +f 1017//968 1005//951 1007//950 +f 805//843 792//1051 804//844 +f 1318//852 1291//1179 1289//1179 +f 1318//852 1289//1179 1296//1180 +f 1298//1181 1302//1181 1306//1182 +f 1298//1181 1306//1182 1300//1183 +f 1316//1005 1293//1005 1286//1045 +f 1316//1005 1286//1045 1290//1044 +f 862//735 859//735 875//767 +f 862//735 875//767 876//766 +f 1304//1184 1308//1184 1313//1185 +f 1304//1184 1313//1185 1310//1185 +f 1303//1007 1322//1006 1324//1186 +f 1303//1007 1324//1186 1307//1187 +f 1256//1071 1231//1072 1239//1055 +f 1256//1071 1239//1055 1262//1054 +f 758//1076 769//1032 766//1032 +f 758//1076 766//1032 762//1076 +f 1300//1183 1306//1182 1308//1184 +f 1300//1183 1308//1184 1304//1184 +f 1325//1047 1301//1047 1305//1077 +f 1325//1047 1305//1077 1327//1077 +f 1307//1187 1324//1186 1326//731 +f 1307//1187 1326//731 1309//730 +f 1106//832 1095//831 1111//1086 +f 1106//832 1111//1086 1114//1085 +s 0 +f 764//1188 778//1188 774//1188 +s 1 +f 1289//1179 1291//1179 1277//1189 +f 1289//1179 1277//1189 1279//1189 +f 1279//1189 1277//1189 1267//1107 +f 1279//1189 1267//1107 1271//1107 +f 1252//1190 1274//1112 1272//1112 +f 1252//1190 1272//1112 1257//1190 +f 1193//1191 1194//1192 1222//1151 +f 1193//1191 1222//1151 1221//1151 +f 1177//1142 1178//1142 1194//1192 +f 1177//1142 1194//1192 1193//1191 +f 759//1014 736//1014 734//875 +f 759//1014 734//875 755//874 +f 968//883 957//884 973//1177 +f 968//883 973//1177 1000//757 +f 1276//1046 1275//1046 1254//1002 +f 1276//1046 1254//1002 1265//1004 +f 838//784 802//762 828//1048 +f 838//784 828//1048 848//784 +f 1292//1036 1302//1181 1298//1181 +f 1292//1036 1298//1181 1294//1036 +f 972//1000 974//999 982//1141 +f 972//1000 982//1141 980//1140 +f 773//1193 777//1193 786//1154 +f 773//1193 786//1154 781//1153 +f 2105//1194 2142//1195 2131//1196 +f 2105//1194 2131//1196 2082//1197 +f 2142//1195 2138//1198 2133//1199 +f 2142//1195 2133//1199 2131//1196 +f 2103//1200 2133//1199 2138//1198 +f 2103//1200 2138//1198 2086//1201 +f 2032//1202 2058//1203 2103//1200 +f 2032//1202 2103//1200 2086//1201 +f 1999//1204 1998//1205 2019//1206 +f 1999//1204 2019//1206 2022//1207 +f 2040//1208 2019//1206 1998//1205 +f 2040//1208 1998//1205 2037//1209 +f 2037//1209 2074//1210 2070//1211 +f 2037//1209 2070//1211 2040//1208 +f 2077//1212 2009//1213 2034//1214 +f 2077//1212 2034//1214 2096//1215 +f 2071//1216 2077//1212 2096//1215 +f 2071//1216 2096//1215 2097//1217 +f 2039//1218 2071//1216 2097//1217 +f 2039//1218 2097//1217 2073//1219 +f 2036//1220 1996//1221 2039//1218 +f 2036//1220 2039//1218 2073//1219 +f 2025//1222 2020//1223 1976//1224 +f 2025//1222 1976//1224 1974//1225 +f 2020//1223 2013//1226 1995//1227 +f 2020//1223 1995//1227 1976//1224 +f 2013//1226 2056//1228 2049//1229 +f 2013//1226 2049//1229 1995//1227 +f 2047//1230 2045//1231 1969//1232 +f 2047//1230 1969//1232 2016//1233 +f 2016//1233 2029//1234 2044//1235 +f 2016//1233 2044//1235 2047//1230 +f 2007//1236 2044//1235 2029//1234 +f 2007//1236 2029//1234 1990//1237 +f 1990//1237 1967//1238 1978//1239 +f 1990//1237 1978//1239 2007//1236 +f 1954//1240 1958//1241 1972//1242 +f 1954//1240 1972//1242 1971//1243 +f 1958//1241 1985//1244 1994//1245 +f 1958//1241 1994//1245 1972//1242 +f 1985//1244 2052//1246 2054//1247 +f 1985//1244 2054//1247 1994//1245 +f 2050//1248 1989//1249 1995//1227 +f 2050//1248 1995//1227 2049//1229 +f 1989//1249 1965//1250 1976//1224 +f 1989//1249 1976//1224 1995//1227 +f 1965//1250 1961//1251 1974//1225 +f 1965//1250 1974//1225 1976//1224 +f 2039//1218 1996//1221 1970//1252 +f 2039//1218 1970//1252 2000//1253 +f 2071//1216 2039//1218 2000//1253 +f 2071//1216 2000//1253 2033//1254 +f 2077//1212 2071//1216 2033//1254 +f 2077//1212 2033//1254 2026//1255 +f 2009//1213 2077//1212 2026//1255 +f 2009//1213 2026//1255 1966//1256 +f 2056//1228 2057//1257 2050//1248 +f 2056//1228 2050//1248 2049//1229 +f 2114//1258 2142//1195 2105//1194 +f 2114//1258 2105//1194 2109//1259 +f 2114//1258 2106//1260 2138//1198 +f 2114//1258 2138//1198 2142//1195 +f 2086//1201 2138//1198 2106//1260 +f 2086//1201 2106//1260 2055//1261 +f 2011//1262 2032//1202 2086//1201 +f 2011//1262 2086//1201 2055//1261 +f 1988//1263 1991//1264 1998//1205 +f 1988//1263 1998//1205 1999//1204 +f 2037//1209 1998//1205 1991//1264 +f 2037//1209 1991//1264 2031//1265 +f 2031//1265 2065//1266 2074//1210 +f 2031//1265 2074//1210 2037//1209 +f 2050//1248 2057//1257 2054//1247 +f 2050//1248 2054//1247 2052//1246 +f 2052//1246 1985//1244 1989//1249 +f 2052//1246 1989//1249 2050//1248 +f 1985//1244 1958//1241 1965//1250 +f 1985//1244 1965//1250 1989//1249 +f 1958//1241 1954//1240 1961//1251 +f 1958//1241 1961//1251 1965//1250 +f 2000//1253 1970//1252 1967//1238 +f 2000//1253 1967//1238 1990//1237 +f 2033//1254 2000//1253 1990//1237 +f 2033//1254 1990//1237 2029//1234 +f 2026//1255 2033//1254 2029//1234 +f 2026//1255 2029//1234 2016//1233 +f 1966//1256 2026//1255 2016//1233 +f 1966//1256 2016//1233 1969//1232 +f 2096//1215 2034//1214 2082//1197 +f 2096//1215 2082//1197 2131//1196 +f 2097//1217 2096//1215 2131//1196 +f 2097//1217 2131//1196 2133//1199 +f 2073//1219 2097//1217 2133//1199 +f 2073//1219 2133//1199 2103//1200 +f 2058//1203 2036//1220 2073//1219 +f 2058//1203 2073//1219 2103//1200 +f 2022//1207 2019//1206 2020//1223 +f 2022//1207 2020//1223 2025//1222 +f 2019//1206 2040//1208 2013//1226 +f 2019//1206 2013//1226 2020//1223 +f 2040//1208 2070//1211 2056//1228 +f 2040//1208 2056//1228 2013//1226 +f 2070//1211 2074//1210 2057//1257 +f 2070//1211 2057//1257 2056//1228 +f 2054//1247 2057//1257 2074//1210 +f 2054//1247 2074//1210 2065//1266 +f 1994//1245 2054//1247 2065//1266 +f 1994//1245 2065//1266 2031//1265 +f 1972//1242 1994//1245 2031//1265 +f 1972//1242 2031//1265 1991//1264 +f 1971//1243 1972//1242 1991//1264 +f 1971//1243 1991//1264 1988//1263 +f 2007//1236 1978//1239 2011//1262 +f 2007//1236 2011//1262 2055//1261 +f 2055//1261 2106//1260 2044//1235 +f 2055//1261 2044//1235 2007//1236 +f 2047//1230 2044//1235 2106//1260 +f 2047//1230 2106//1260 2114//1258 +f 2114//1258 2109//1259 2045//1231 +f 2114//1258 2045//1231 2047//1230 +f 2025//1222 2036//1220 2058//1203 +f 2025//1222 2058//1203 2022//1207 +f 1974//1225 1996//1221 2036//1220 +f 1974//1225 2036//1220 2025//1222 +f 1970//1252 1996//1221 1974//1225 +f 1970//1252 1974//1225 1961//1251 +f 1967//1238 1970//1252 1961//1251 +f 1967//1238 1961//1251 1954//1240 +f 1978//1239 1967//1238 1954//1240 +f 1978//1239 1954//1240 1971//1243 +f 2011//1262 1978//1239 1971//1243 +f 2011//1262 1971//1243 1988//1263 +f 1999//1204 2032//1202 2011//1262 +f 1999//1204 2011//1262 1988//1263 +f 2022//1207 2058//1203 2032//1202 +f 2022//1207 2032//1202 1999//1204 +f 1430//1267 1402//1268 1409//1269 +f 1430//1267 1409//1269 1405//1270 +f 1405//1270 1400//1271 1434//1272 +f 1405//1270 1434//1272 1430//1267 +f 1443//1273 1434//1272 1400//1271 +f 1443//1273 1400//1271 1403//1274 +f 1485//1275 1443//1273 1403//1274 +f 1485//1275 1403//1274 1451//1276 +f 1451//1276 1489//1277 1526//1278 +f 1451//1276 1526//1278 1485//1275 +f 1489//1277 1535//1279 1554//1280 +f 1489//1277 1554//1280 1526//1278 +f 1563//1281 1554//1280 1535//1279 +f 1563//1281 1535//1279 1546//1282 +f 1542//1283 1563//1281 1546//1282 +f 1542//1283 1546//1282 1533//1284 +f 1533//1284 1497//1285 1508//1286 +f 1533//1284 1508//1286 1542//1283 +f 1465//1287 1476//1288 1541//1289 +f 1465//1287 1541//1289 1594//1290 +f 1509//1291 1541//1289 1476//1288 +f 1509//1291 1476//1288 1471//1292 +f 1471//1292 1467//1293 1492//1294 +f 1471//1292 1492//1294 1509//1291 +f 1538//1295 1492//1294 1467//1293 +f 1538//1295 1467//1293 1507//1296 +f 1507//1296 1544//1297 1565//1298 +f 1507//1296 1565//1298 1538//1295 +f 1575//1299 1565//1298 1544//1297 +f 1575//1299 1544//1297 1564//1300 +f 1577//1301 1575//1299 1564//1300 +f 1577//1301 1564//1300 1569//1302 +f 1569//1302 1550//1303 1561//1304 +f 1569//1302 1561//1304 1577//1301 +f 1522//1305 1561//1304 1550//1303 +f 1522//1305 1550//1303 1515//1306 +f 1547//1307 1493//1308 1435//1309 +f 1547//1307 1435//1309 1464//1310 +f 1493//1308 1486//1311 1449//1312 +f 1493//1308 1449//1312 1435//1309 +f 1486//1311 1474//1313 1441//1314 +f 1486//1311 1441//1314 1449//1312 +f 1474//1313 1483//1315 1458//1316 +f 1474//1313 1458//1316 1441//1314 +f 1483//1315 1549//1317 1505//1318 +f 1483//1315 1505//1318 1458//1316 +f 1549//1317 1570//1319 1557//1320 +f 1549//1317 1557//1320 1505//1318 +f 1570//1319 1566//1321 1553//1322 +f 1570//1319 1553//1322 1557//1320 +f 1566//1321 1551//1323 1537//1324 +f 1566//1321 1537//1324 1553//1322 +f 1551//1323 1510//1325 1504//1326 +f 1551//1323 1504//1326 1537//1324 +f 1561//1304 1522//1305 1517//1327 +f 1561//1304 1517//1327 1560//1328 +f 1560//1328 1573//1329 1577//1301 +f 1560//1328 1577//1301 1561//1304 +f 1575//1299 1577//1301 1573//1329 +f 1575//1299 1573//1329 1578//1330 +f 1578//1330 1568//1331 1565//1298 +f 1578//1330 1565//1298 1575//1299 +f 1515//1306 1502//1332 1517//1327 +f 1515//1306 1517//1327 1522//1305 +f 1565//1298 1568//1331 1536//1333 +f 1565//1298 1536//1333 1538//1295 +f 1492//1294 1538//1295 1536//1333 +f 1492//1294 1536//1333 1491//1334 +f 1509//1291 1492//1294 1491//1334 +f 1509//1291 1491//1334 1511//1335 +f 1511//1335 1540//1336 1541//1289 +f 1511//1335 1541//1289 1509//1291 +f 1594//1290 1541//1289 1540//1336 +f 1594//1290 1540//1336 1587//1337 +f 1405//1270 1409//1269 1433//1338 +f 1405//1270 1433//1338 1415//1339 +f 1415//1339 1425//1340 1400//1271 +f 1415//1339 1400//1271 1405//1270 +f 1403//1274 1400//1271 1425//1340 +f 1403//1274 1425//1340 1423//1341 +f 1451//1276 1403//1274 1423//1341 +f 1451//1276 1423//1341 1440//1342 +f 1440//1342 1482//1343 1489//1277 +f 1440//1342 1489//1277 1451//1276 +f 1482//1343 1539//1344 1535//1279 +f 1482//1343 1535//1279 1489//1277 +f 1546//1282 1535//1279 1539//1344 +f 1546//1282 1539//1344 1543//1345 +f 1533//1284 1546//1282 1543//1345 +f 1533//1284 1543//1345 1532//1346 +f 1532//1346 1500//1347 1497//1285 +f 1532//1346 1497//1285 1533//1284 +f 1510//1325 1517//1327 1502//1332 +f 1510//1325 1502//1332 1504//1326 +f 1560//1328 1517//1327 1510//1325 +f 1560//1328 1510//1325 1551//1323 +f 1551//1323 1566//1321 1573//1329 +f 1551//1323 1573//1329 1560//1328 +f 1578//1330 1573//1329 1566//1321 +f 1578//1330 1566//1321 1570//1319 +f 1570//1319 1549//1317 1568//1331 +f 1570//1319 1568//1331 1578//1330 +f 1536//1333 1568//1331 1549//1317 +f 1536//1333 1549//1317 1483//1315 +f 1491//1334 1536//1333 1483//1315 +f 1491//1334 1483//1315 1474//1313 +f 1511//1335 1491//1334 1474//1313 +f 1511//1335 1474//1313 1486//1311 +f 1486//1311 1493//1308 1540//1336 +f 1486//1311 1540//1336 1511//1335 +f 1587//1337 1540//1336 1493//1308 +f 1587//1337 1493//1308 1547//1307 +f 1402//1268 1430//1267 1476//1288 +f 1402//1268 1476//1288 1465//1287 +f 1471//1292 1476//1288 1430//1267 +f 1471//1292 1430//1267 1434//1272 +f 1434//1272 1443//1273 1467//1293 +f 1434//1272 1467//1293 1471//1292 +f 1507//1296 1467//1293 1443//1273 +f 1507//1296 1443//1273 1485//1275 +f 1485//1275 1526//1278 1544//1297 +f 1485//1275 1544//1297 1507//1296 +f 1564//1300 1544//1297 1526//1278 +f 1564//1300 1526//1278 1554//1280 +f 1569//1302 1564//1300 1554//1280 +f 1569//1302 1554//1280 1563//1281 +f 1563//1281 1542//1283 1550//1303 +f 1563//1281 1550//1303 1569//1302 +f 1515//1306 1550//1303 1542//1283 +f 1515//1306 1542//1283 1508//1286 +f 1497//1285 1502//1332 1508//1286 +f 1502//1332 1515//1306 1508//1286 +f 1504//1326 1502//1332 1497//1285 +f 1504//1326 1497//1285 1500//1347 +f 1537//1324 1504//1326 1500//1347 +f 1537//1324 1500//1347 1532//1346 +f 1553//1322 1537//1324 1532//1346 +f 1553//1322 1532//1346 1543//1345 +f 1557//1320 1553//1322 1543//1345 +f 1557//1320 1543//1345 1539//1344 +f 1505//1318 1557//1320 1539//1344 +f 1505//1318 1539//1344 1482//1343 +f 1458//1316 1505//1318 1482//1343 +f 1458//1316 1482//1343 1440//1342 +f 1441//1314 1458//1316 1440//1342 +f 1441//1314 1440//1342 1423//1341 +f 1449//1312 1441//1314 1423//1341 +f 1449//1312 1423//1341 1425//1340 +f 1435//1309 1449//1312 1425//1340 +f 1435//1309 1425//1340 1415//1339 +f 1464//1310 1435//1309 1415//1339 +f 1464//1310 1415//1339 1433//1338 +f 2109//1259 2113//1348 2083//1349 +f 2109//1259 2083//1349 2045//1231 +f 2113//1348 2140//1350 2123//1351 +f 2113//1348 2123//1351 2083//1349 +f 2108//1352 2120//1353 2113//1348 +f 2108//1352 2113//1348 2109//1259 +f 2120//1353 2136//1354 2140//1350 +f 2120//1353 2140//1350 2113//1348 +f 2080//1355 2092//1356 2120//1353 +f 2080//1355 2120//1353 2108//1352 +f 2092//1356 2128//1357 2136//1354 +f 2092//1356 2136//1354 2120//1353 +f 2128//1357 2092//1356 2083//1349 +f 2128//1357 2083//1349 2123//1351 +f 2092//1356 2080//1355 2045//1231 +f 2092//1356 2045//1231 2083//1349 +f 2136//1354 2128//1357 2123//1351 +f 2136//1354 2123//1351 2140//1350 +f 2105//1194 2078//1358 2068//1359 +f 2105//1194 2068//1359 2109//1259 +f 2105//1194 2082//1197 2100//1360 +f 2105//1194 2100//1360 2078//1358 +f 2034//1214 2110//1361 2100//1360 +f 2034//1214 2100//1360 2082//1197 +f 2110//1361 2034//1214 2009//1213 +f 2110//1361 2009//1213 1924//1362 +f 1881//1363 1924//1362 2009//1213 +f 1881//1363 2009//1213 1966//1256 +f 1881//1363 1966//1256 1969//1232 +f 1881//1363 1969//1232 1874//1364 +f 1874//1364 1969//1232 2045//1231 +f 1874//1364 2045//1231 1900//1365 +f 2080//1355 2024//1366 1900//1365 +f 2080//1355 1900//1365 2045//1231 +f 2108//1352 2069//1367 2024//1366 +f 2108//1352 2024//1366 2080//1355 +f 2109//1259 2068//1359 2069//1367 +f 2109//1259 2069//1367 2108//1352 +f 1465//1287 1432//1368 1408//1369 +f 1465//1287 1408//1369 1402//1268 +f 1372//1370 1408//1369 1432//1368 +f 1372//1370 1432//1368 1377//1371 +f 1399//1372 1406//1373 1402//1268 +f 1399//1372 1402//1268 1408//1369 +f 1374//1374 1399//1372 1408//1369 +f 1374//1374 1408//1369 1372//1370 +f 1376//1375 1422//1376 1399//1372 +f 1376//1375 1399//1372 1374//1374 +f 1399//1372 1422//1376 1448//1377 +f 1399//1372 1448//1377 1406//1373 +f 1448//1377 1422//1376 1432//1368 +f 1448//1377 1432//1368 1465//1287 +f 1377//1371 1432//1368 1422//1376 +f 1377//1371 1422//1376 1376//1375 +f 1376//1375 1374//1374 1372//1370 +f 1376//1375 1372//1370 1377//1371 +f 1448//1377 1528//1378 1437//1379 +f 1448//1377 1437//1379 1406//1373 +f 1528//1378 1448//1377 1465//1287 +f 1528//1378 1465//1287 1594//1290 +f 1406//1373 1437//1379 1409//1269 +f 1406//1373 1409//1269 1402//1268 +f 1412//1380 1433//1338 1409//1269 +f 1412//1380 1409//1269 1446//1381 +f 1446//1381 1409//1269 1437//1379 +f 1446//1381 1437//1379 1431//1382 +f 1437//1379 1481//1383 1456//1384 +f 1437//1379 1456//1384 1431//1382 +f 1528//1378 1555//1385 1481//1383 +f 1528//1378 1481//1383 1437//1379 +f 1531//1386 1481//1383 1555//1385 +f 1531//1386 1555//1385 1590//1387 +f 1531//1386 1475//1388 1456//1384 +f 1531//1386 1456//1384 1481//1383 +f 1528//1378 1594//1290 1630//1389 +f 1528//1378 1630//1389 1555//1385 +f 1630//1389 1648//1390 1590//1387 +f 1630//1389 1590//1387 1555//1385 +f 1618//1391 1590//1387 1648//1390 +f 1618//1391 1648//1390 1658//1392 +f 1590//1387 1618//1391 1583//1393 +f 1590//1387 1583//1393 1531//1386 +f 1454//1394 1475//1388 1531//1386 +f 1454//1394 1531//1386 1495//1395 +f 1480//1396 1519//1397 1524//1398 +f 1480//1396 1524//1398 1487//1399 +f 1490//1400 1480//1396 1487//1399 +f 1490//1400 1487//1399 1498//1401 +f 1472//1402 1488//1403 1475//1388 +f 1472//1402 1475//1388 1454//1394 +f 1498//1401 1523//1404 1521//1405 +f 1498//1401 1521//1405 1490//1400 +f 1496//1406 1583//1393 1488//1403 +f 1496//1406 1488//1403 1472//1402 +f 1519//1397 1521//1405 1523//1404 +f 1519//1397 1523//1404 1524//1398 +f 1583//1393 1496//1406 1495//1395 +f 1583//1393 1495//1395 1531//1386 +f 1641//1407 1618//1391 1658//1392 +f 1641//1407 1658//1392 1676//1408 +f 1618//1391 1641//1407 1608//1409 +f 1618//1391 1608//1409 1583//1393 +f 1583//1393 1608//1409 1585//1410 +f 1583//1393 1585//1410 1488//1403 +f 1676//1408 1686//1411 1653//1412 +f 1676//1408 1653//1412 1641//1407 +f 1653//1412 1629//1413 1608//1409 +f 1653//1412 1608//1409 1641//1407 +f 1690//1414 1654//1415 1668//1416 +f 1690//1414 1668//1416 1699//1417 +f 1699//1417 1668//1416 1696//1418 +f 1699//1417 1696//1418 1720//1419 +f 1730//1420 1690//1414 1699//1417 +f 1730//1420 1699//1417 1734//1421 +f 1720//1419 1740//1422 1734//1421 +f 1720//1419 1734//1421 1699//1417 +f 1752//1423 1734//1421 1740//1422 +f 1752//1423 1740//1422 1755//1424 +f 1750//1425 1730//1420 1734//1421 +f 1750//1425 1734//1421 1752//1423 +f 1629//1413 1610//1426 1585//1410 +f 1629//1413 1585//1410 1608//1409 +f 1654//1415 1690//1414 1682//1427 +f 1654//1415 1682//1427 1637//1428 +f 1722//1429 1682//1427 1690//1414 +f 1722//1429 1690//1414 1730//1420 +f 1730//1420 1750//1425 1747//1430 +f 1730//1420 1747//1430 1722//1429 +f 2069//1367 2041//1431 1983//1432 +f 2069//1367 1983//1432 2024//1366 +f 2041//1431 2015//1433 1957//1434 +f 2041//1431 1957//1434 1983//1432 +f 2015//1433 1977//1435 1937//1436 +f 2015//1433 1937//1436 1957//1434 +f 1886//1437 1916//1438 1940//1439 +f 1886//1437 1940//1439 1898//1440 +f 1851//1441 1886//1437 1898//1440 +f 1851//1441 1898//1440 1860//1442 +f 1860//1442 1831//1443 1824//1444 +f 1860//1442 1824//1444 1851//1441 +f 1801//1445 1796//1446 1824//1444 +f 1801//1445 1824//1444 1831//1443 +f 1771//1447 1768//1448 1784//1449 +f 1771//1447 1784//1449 1788//1450 +f 1768//1448 1769//1451 1781//1452 +f 1768//1448 1781//1452 1784//1449 +f 1796//1446 1792//1453 1804//1454 +f 1796//1446 1804//1454 1824//1444 +f 1804//1454 1834//1455 1851//1441 +f 1804//1454 1851//1441 1824//1444 +f 1886//1437 1851//1441 1834//1455 +f 1886//1437 1834//1455 1857//1456 +f 1916//1438 1886//1437 1857//1456 +f 1916//1438 1857//1456 1878//1457 +f 1937//1436 1895//1458 1904//1459 +f 1937//1436 1904//1459 1957//1434 +f 1904//1459 1906//1460 1983//1432 +f 1904//1459 1983//1432 1957//1434 +f 2024//1366 1983//1432 1906//1460 +f 2024//1366 1906//1460 1900//1365 +f 1858//1461 1909//1462 1892//1463 +f 1858//1461 1892//1463 1849//1464 +f 1830//1465 1858//1461 1849//1464 +f 1830//1465 1849//1464 1817//1466 +f 1800//1467 1806//1468 1830//1465 +f 1800//1467 1830//1465 1817//1466 +f 1805//1469 1813//1470 1806//1468 +f 1805//1469 1806//1468 1800//1467 +f 1821//1471 1839//1472 1816//1473 +f 1821//1471 1816//1473 1803//1474 +f 1847//1475 1867//1476 1839//1472 +f 1847//1475 1839//1472 1821//1471 +f 1847//1475 1910//1477 1926//1478 +f 1847//1475 1926//1478 1867//1476 +f 2021//1479 1926//1478 1910//1477 +f 2021//1479 1910//1477 1973//1480 +f 2116//1481 2021//1479 1973//1480 +f 2116//1481 1973//1480 2062//1482 +f 2099//1483 2145//1484 2116//1481 +f 2099//1483 2116//1481 2062//1482 +f 2107//1485 2147//1486 2145//1484 +f 2107//1485 2145//1484 2099//1483 +f 2008//1487 2079//1488 2147//1486 +f 2008//1487 2147//1486 2107//1485 +f 1955//1489 2010//1490 2079//1488 +f 1955//1489 2079//1488 2008//1487 +f 1915//1491 1939//1492 2010//1490 +f 1915//1491 2010//1490 1955//1489 +f 1892//1463 1909//1462 1939//1492 +f 1892//1463 1939//1492 1915//1491 +f 1790//1493 1773//1494 1771//1447 +f 1790//1493 1771//1447 1788//1450 +f 1807//1495 1801//1445 1831//1443 +f 1807//1495 1831//1443 1840//1496 +f 1869//1497 1840//1496 1831//1443 +f 1869//1497 1831//1443 1860//1442 +f 1911//1498 1869//1497 1860//1442 +f 1911//1498 1860//1442 1898//1440 +f 1898//1440 1940//1439 1948//1499 +f 1898//1440 1948//1499 1911//1498 +f 1975//1500 1977//1435 2015//1433 +f 1975//1500 2015//1433 2014//1501 +f 2015//1433 2041//1431 2043//1502 +f 2015//1433 2043//1502 2014//1501 +f 2068//1359 2043//1502 2041//1431 +f 2068//1359 2041//1431 2069//1367 +f 1433//1338 1412//1380 1607//1503 +f 1433//1338 1607//1503 1464//1310 +f 1464//1310 1607//1503 1547//1307 +f 1888//1504 1907//1505 1945//1506 +f 1888//1504 1945//1506 1919//1507 +f 1914//1508 1876//1509 1882//1510 +f 1914//1508 1882//1510 1913//1511 +f 1929//1512 1896//1513 1876//1509 +f 1929//1512 1876//1509 1914//1508 +f 1959//1514 1930//1515 1896//1513 +f 1959//1514 1896//1513 1929//1512 +f 1982//1516 1930//1515 1959//1514 +f 1982//1516 1959//1514 2035//1517 +f 2129//1518 2085//1519 1982//1516 +f 2129//1518 1982//1516 2035//1517 +f 2155//1520 2085//1519 2129//1518 +f 2155//1520 2129//1518 2164//1521 +f 2164//1521 2177//1522 2170//1523 +f 2164//1521 2170//1523 2155//1520 +f 2177//1522 2187//1524 2186//1525 +f 2177//1522 2186//1525 2170//1523 +f 2187//1524 2183//1526 2178//1527 +f 2187//1524 2178//1527 2186//1525 +f 2183//1526 2175//1528 2173//1529 +f 2183//1526 2173//1529 2178//1527 +f 2157//1530 2152//1531 2173//1529 +f 2157//1530 2173//1529 2175//1528 +f 2157//1530 2091//1532 2042//1533 +f 2157//1530 2042//1533 2152//1531 +f 2091//1532 1981//1534 1950//1535 +f 2091//1532 1950//1535 2042//1533 +f 1945//1506 1907//1505 1950//1535 +f 1945//1506 1950//1535 1981//1534 +f 2028//1536 1980//1537 1945//1506 +f 2028//1536 1945//1506 1981//1534 +f 1945//1506 1980//1537 1951//1538 +f 1945//1506 1951//1538 1919//1507 +f 1913//1511 1943//1539 1947//1540 +f 1913//1511 1947//1540 1914//1508 +f 1914//1508 1947//1540 1956//1541 +f 1914//1508 1956//1541 1929//1512 +f 1956//1541 2004//1542 1959//1514 +f 1956//1541 1959//1514 1929//1512 +f 1959//1514 2004//1542 2087//1543 +f 1959//1514 2087//1543 2035//1517 +f 2087//1543 2146//1544 2129//1518 +f 2087//1543 2129//1518 2035//1517 +f 2146//1544 2168//1545 2164//1521 +f 2146//1544 2164//1521 2129//1518 +f 2164//1521 2168//1545 2181//1546 +f 2164//1521 2181//1546 2177//1522 +f 2181//1546 2185//1547 2187//1524 +f 2181//1546 2187//1524 2177//1522 +f 2185//1547 2184//1548 2183//1526 +f 2185//1547 2183//1526 2187//1524 +f 2183//1526 2184//1548 2176//1549 +f 2183//1526 2176//1549 2175//1528 +f 2175//1528 2176//1549 2161//1550 +f 2175//1528 2161//1550 2157//1530 +f 2157//1530 2161//1550 2121//1551 +f 2157//1530 2121//1551 2091//1532 +f 2121//1551 2028//1536 1981//1534 +f 2121//1551 1981//1534 2091//1532 +f 2172//1552 2158//1553 2148//1554 +f 2172//1552 2148//1554 2159//1555 +f 2158//1553 2172//1552 2182//1556 +f 2158//1553 2182//1556 2166//1557 +f 2179//1558 2169//1559 2166//1557 +f 2179//1558 2166//1557 2182//1556 +f 2169//1559 2179//1558 2180//1560 +f 2169//1559 2180//1560 2165//1561 +f 2165//1561 2180//1560 2167//1562 +f 2165//1561 2167//1562 2162//1563 +f 2167//1562 2150//1564 2151//1565 +f 2167//1562 2151//1565 2162//1563 +f 2151//1565 2150//1564 2122//1566 +f 2151//1565 2122//1566 2130//1567 +f 2060//1568 2101//1569 2130//1567 +f 2060//1568 2130//1567 2122//1566 +f 2101//1569 2060//1568 2012//1570 +f 2101//1569 2012//1570 2081//1571 +f 2002//1572 2059//1573 2081//1571 +f 2002//1572 2081//1571 2012//1570 +f 1993//1574 2046//1575 2059//1573 +f 1993//1574 2059//1573 2002//1572 +f 2132//1576 2139//1577 2159//1555 +f 2132//1576 2159//1555 2148//1554 +f 2119//1578 2088//1579 2139//1577 +f 2119//1578 2139//1577 2132//1576 +f 2088//1579 2119//1578 2090//1580 +f 2088//1579 2090//1580 2048//1581 +f 2064//1582 2001//1583 2048//1581 +f 2064//1582 2048//1581 2090//1580 +f 2158//1553 2141//1584 2111//1585 +f 2158//1553 2111//1585 2148//1554 +f 2148//1554 2111//1585 2102//1586 +f 2148//1554 2102//1586 2132//1576 +f 2101//1569 2111//1585 2141//1584 +f 2101//1569 2141//1584 2130//1567 +f 2081//1571 2102//1586 2111//1585 +f 2081//1571 2111//1585 2101//1569 +f 2141//1584 2158//1553 2166//1557 +f 2141//1584 2166//1557 2154//1587 +f 2130//1567 2141//1584 2154//1587 +f 2130//1567 2154//1587 2151//1565 +f 2169//1559 2165//1561 2154//1587 +f 2169//1559 2154//1587 2166//1557 +f 2165//1561 2162//1563 2151//1565 +f 2165//1561 2151//1565 2154//1587 +f 2089//1588 2119//1578 2132//1576 +f 2089//1588 2132//1576 2102//1586 +f 2059//1573 2089//1588 2102//1586 +f 2059//1573 2102//1586 2081//1571 +f 2064//1582 2090//1580 2119//1578 +f 2064//1582 2119//1578 2089//1588 +f 1803//1474 1816//1473 1813//1470 +f 1803//1474 1813//1470 1805//1469 +f 1913//1511 1882//1510 1888//1504 +f 1913//1511 1888//1504 1919//1507 +f 1919//1507 1951//1538 1943//1539 +f 1919//1507 1943//1539 1913//1511 +f 2001//1583 2064//1582 2046//1575 +f 2001//1583 2046//1575 1993//1574 +f 2046//1575 2064//1582 2089//1588 +f 2046//1575 2089//1588 2059//1573 +f 1416//1589 1427//1590 1407//1591 +f 1416//1589 1407//1591 1387//1592 +f 1387//1592 1407//1591 1391//1593 +f 1387//1592 1391//1593 1375//1594 +f 1375//1594 1391//1593 1369//1595 +f 1375//1594 1369//1595 1366//1596 +f 1355//1597 1353//1598 1366//1596 +f 1355//1597 1366//1596 1369//1595 +f 1418//1599 1407//1591 1427//1590 +f 1418//1599 1427//1590 1450//1600 +f 1388//1601 1391//1593 1407//1591 +f 1388//1601 1407//1591 1418//1599 +f 1368//1602 1369//1595 1391//1593 +f 1368//1602 1391//1593 1388//1601 +f 1356//1603 1355//1597 1369//1595 +f 1356//1603 1369//1595 1368//1602 +f 1427//1590 1416//1589 1442//1604 +f 1427//1590 1442//1604 1455//1605 +f 1455//1605 1468//1606 1450//1600 +f 1455//1605 1450//1600 1427//1590 +f 1346//1607 1353//1598 1355//1597 +f 1346//1607 1355//1597 1343//1608 +f 1356//1603 1347//1609 1343//1608 +f 1356//1603 1343//1608 1355//1597 +f 1345//1610 1347//1609 1356//1603 +f 1345//1610 1356//1603 1350//1611 +f 1345//1610 1333//1612 1343//1608 +f 1345//1610 1343//1608 1347//1609 +f 1346//1607 1343//1608 1333//1612 +f 1346//1607 1333//1612 1336//1613 +f 1336//1613 1348//1614 1353//1598 +f 1336//1613 1353//1598 1346//1607 +f 1362//1615 1366//1596 1353//1598 +f 1362//1615 1353//1598 1348//1614 +f 1370//1616 1375//1594 1366//1596 +f 1370//1616 1366//1596 1362//1615 +f 1384//1617 1387//1592 1375//1594 +f 1384//1617 1375//1594 1370//1616 +f 1419//1618 1416//1589 1387//1592 +f 1419//1618 1387//1592 1384//1617 +f 1442//1604 1416//1589 1419//1618 +f 1442//1604 1419//1618 1479//1619 +f 1455//1605 1442//1604 1479//1619 +f 1455//1605 1479//1619 1506//1620 +f 1506//1620 1516//1621 1468//1606 +f 1506//1620 1468//1606 1455//1605 +f 1516//1621 1463//1622 1450//1600 +f 1516//1621 1450//1600 1468//1606 +f 1429//1623 1418//1599 1450//1600 +f 1429//1623 1450//1600 1463//1622 +f 1385//1624 1388//1601 1418//1599 +f 1385//1624 1418//1599 1429//1623 +f 1361//1625 1368//1602 1388//1601 +f 1361//1625 1388//1601 1385//1624 +f 1368//1602 1361//1625 1350//1611 +f 1368//1602 1350//1611 1356//1603 +f 1371//1626 1363//1627 1393//1628 +f 1371//1626 1393//1628 1421//1629 +f 1351//1630 1363//1627 1371//1626 +f 1351//1630 1371//1626 1357//1631 +f 1341//1632 1351//1630 1357//1631 +f 1341//1632 1357//1631 1337//1633 +f 1337//1633 1332//1634 1331//1635 +f 1337//1633 1331//1635 1341//1632 +f 1332//1634 1328//1636 1330//1637 +f 1332//1634 1330//1637 1331//1635 +f 1342//1638 1330//1637 1328//1636 +f 1342//1638 1328//1636 1338//1639 +f 1338//1639 1360//1640 1359//1641 +f 1338//1639 1359//1641 1342//1638 +f 1390//1642 1381//1643 1359//1641 +f 1390//1642 1359//1641 1360//1640 +f 1389//1644 1381//1643 1390//1642 +f 1389//1644 1390//1642 1414//1645 +f 1534//1646 1461//1647 1389//1644 +f 1534//1646 1389//1644 1414//1645 +f 1596//1648 1552//1649 1461//1647 +f 1596//1648 1461//1647 1534//1646 +f 1612//1650 1576//1651 1552//1649 +f 1612//1650 1552//1649 1596//1648 +f 1612//1650 1602//1652 1574//1653 +f 1612//1650 1574//1653 1576//1651 +f 1518//1654 1574//1653 1602//1652 +f 1518//1654 1602//1652 1586//1655 +f 1586//1655 1512//1656 1459//1657 +f 1586//1655 1459//1657 1518//1654 +f 1393//1628 1459//1657 1512//1656 +f 1393//1628 1512//1656 1421//1629 +f 1582//1658 1614//1659 1643//1660 +f 1582//1658 1643//1660 1611//1661 +f 1562//1662 1470//1663 1582//1658 +f 1562//1662 1582//1658 1611//1661 +f 1444//1664 1392//1665 1470//1663 +f 1444//1664 1470//1663 1562//1662 +f 1386//1666 1367//1667 1392//1665 +f 1386//1666 1392//1665 1444//1664 +f 1354//1668 1340//1669 1367//1667 +f 1354//1668 1367//1667 1386//1666 +f 1334//1670 1340//1669 1354//1668 +f 1334//1670 1354//1668 1344//1671 +f 1329//1672 1334//1670 1344//1671 +f 1329//1672 1344//1671 1335//1673 +f 1335//1673 1349//1674 1339//1675 +f 1335//1673 1339//1675 1329//1672 +f 1349//1674 1380//1676 1365//1677 +f 1349//1674 1365//1677 1339//1675 +f 1439//1678 1401//1679 1365//1677 +f 1439//1678 1365//1677 1380//1676 +f 1584//1680 1478//1681 1401//1679 +f 1584//1680 1401//1679 1439//1678 +f 1626//1682 1597//1683 1478//1681 +f 1626//1682 1478//1681 1584//1680 +f 1624//1684 1597//1683 1626//1682 +f 1624//1684 1626//1682 1651//1685 +f 1651//1685 1679//1686 1642//1687 +f 1651//1685 1642//1687 1624//1684 +f 1663//1688 1635//1689 1642//1687 +f 1663//1688 1642//1687 1679//1686 +f 1643//1660 1614//1659 1635//1689 +f 1643//1660 1635//1689 1663//1688 +f 1548//1690 1466//1691 1394//1692 +f 1548//1690 1394//1692 1447//1693 +f 1394//1692 1373//1694 1404//1695 +f 1394//1692 1404//1695 1447//1693 +f 1404//1695 1373//1694 1382//1696 +f 1404//1695 1382//1696 1428//1697 +f 1428//1697 1382//1696 1426//1698 +f 1428//1697 1426//1698 1520//1699 +f 1520//1699 1426//1698 1514//1700 +f 1520//1699 1514//1700 1595//1701 +f 1514//1700 1588//1702 1616//1703 +f 1514//1700 1616//1703 1595//1701 +f 1588//1702 1620//1704 1640//1705 +f 1588//1702 1640//1705 1616//1703 +f 1620//1704 1652//1706 1669//1707 +f 1620//1704 1669//1707 1640//1705 +f 1652//1706 1689//1708 1700//1709 +f 1652//1706 1700//1709 1669//1707 +f 1689//1708 1705//1710 1724//1711 +f 1689//1708 1724//1711 1700//1709 +f 1705//1710 1718//1712 1725//1713 +f 1705//1710 1725//1713 1724//1711 +f 1725//1713 1718//1712 1687//1714 +f 1725//1713 1687//1714 1697//1715 +f 1697//1715 1687//1714 1657//1716 +f 1697//1715 1657//1716 1670//1717 +f 1670//1717 1657//1716 1621//1718 +f 1670//1717 1621//1718 1647//1719 +f 1647//1719 1621//1718 1580//1720 +f 1647//1719 1580//1720 1599//1721 +f 1580//1720 1466//1691 1548//1690 +f 1580//1720 1548//1690 1599//1721 +f 1939//1492 1909//1462 1922//1722 +f 1939//1492 1922//1722 1968//1723 +f 2010//1490 1939//1492 1968//1723 +f 2010//1490 1968//1723 2072//1724 +f 2079//1488 2010//1490 2072//1724 +f 2079//1488 2072//1724 2135//1725 +f 2147//1486 2079//1488 2135//1725 +f 2147//1486 2135//1725 2160//1726 +f 2145//1484 2147//1486 2160//1726 +f 2145//1484 2160//1726 2156//1727 +f 2116//1481 2145//1484 2156//1727 +f 2116//1481 2156//1727 2149//1728 +f 2149//1728 2067//1729 2021//1479 +f 2149//1728 2021//1479 2116//1481 +f 2067//1729 1949//1730 1926//1478 +f 2067//1729 1926//1478 2021//1479 +f 1867//1476 1926//1478 1949//1730 +f 1867//1476 1949//1730 1891//1731 +f 1839//1472 1867//1476 1891//1731 +f 1839//1472 1891//1731 1848//1732 +f 1816//1473 1839//1472 1848//1732 +f 1816//1473 1848//1732 1838//1733 +f 1813//1470 1816//1473 1838//1733 +f 1813//1470 1838//1733 1832//1734 +f 1806//1468 1813//1470 1832//1734 +f 1806//1468 1832//1734 1823//1735 +f 1830//1465 1806//1468 1823//1735 +f 1830//1465 1823//1735 1842//1736 +f 1842//1736 1879//1737 1858//1461 +f 1842//1736 1858//1461 1830//1465 +f 1879//1737 1922//1722 1909//1462 +f 1879//1737 1909//1462 1858//1461 +f 1394//1692 1466//1691 1411//1738 +f 1394//1692 1411//1738 1364//1739 +f 1364//1739 1352//1740 1373//1694 +f 1364//1739 1373//1694 1394//1692 +f 1382//1696 1373//1694 1352//1740 +f 1382//1696 1352//1740 1358//1741 +f 1426//1698 1382//1696 1358//1741 +f 1426//1698 1358//1741 1378//1742 +f 1514//1700 1426//1698 1378//1742 +f 1514//1700 1378//1742 1436//1743 +f 1436//1743 1527//1744 1588//1702 +f 1436//1743 1588//1702 1514//1700 +f 1527//1744 1598//1745 1620//1704 +f 1527//1744 1620//1704 1588//1702 +f 1598//1745 1633//1746 1652//1706 +f 1598//1745 1652//1706 1620//1704 +f 1633//1746 1667//1747 1689//1708 +f 1633//1746 1689//1708 1652//1706 +f 1667//1747 1691//1748 1705//1710 +f 1667//1747 1705//1710 1689//1708 +f 1691//1748 1704//1749 1718//1712 +f 1691//1748 1718//1712 1705//1710 +f 1687//1714 1718//1712 1704//1749 +f 1687//1714 1704//1749 1673//1750 +f 1657//1716 1687//1714 1673//1750 +f 1657//1716 1673//1750 1646//1751 +f 1621//1718 1657//1716 1646//1751 +f 1621//1718 1646//1751 1604//1752 +f 1580//1720 1621//1718 1604//1752 +f 1580//1720 1604//1752 1513//1753 +f 1513//1753 1411//1738 1466//1691 +f 1513//1753 1466//1691 1580//1720 +f 1635//1689 1614//1659 1586//1655 +f 1635//1689 1586//1655 1602//1652 +f 1642//1687 1635//1689 1602//1652 +f 1642//1687 1602//1652 1612//1650 +f 1624//1684 1642//1687 1612//1650 +f 1624//1684 1612//1650 1596//1648 +f 1596//1648 1534//1646 1597//1683 +f 1596//1648 1597//1683 1624//1684 +f 1478//1681 1597//1683 1534//1646 +f 1478//1681 1534//1646 1414//1645 +f 1401//1679 1478//1681 1414//1645 +f 1401//1679 1414//1645 1390//1642 +f 1365//1677 1401//1679 1390//1642 +f 1365//1677 1390//1642 1360//1640 +f 1339//1675 1365//1677 1360//1640 +f 1339//1675 1360//1640 1338//1639 +f 1329//1672 1339//1675 1338//1639 +f 1329//1672 1338//1639 1328//1636 +f 1328//1636 1332//1634 1334//1670 +f 1328//1636 1334//1670 1329//1672 +f 1337//1633 1340//1669 1334//1670 +f 1337//1633 1334//1670 1332//1634 +f 1367//1667 1340//1669 1337//1633 +f 1367//1667 1337//1633 1357//1631 +f 1392//1665 1367//1667 1357//1631 +f 1392//1665 1357//1631 1371//1626 +f 1470//1663 1392//1665 1371//1626 +f 1470//1663 1371//1626 1421//1629 +f 1582//1658 1470//1663 1421//1629 +f 1582//1658 1421//1629 1512//1656 +f 1586//1655 1614//1659 1582//1658 +f 1586//1655 1582//1658 1512//1656 +f 1950//1535 1907//1505 1871//1754 +f 1950//1535 1871//1754 1918//1755 +f 2042//1533 1950//1535 1918//1755 +f 2042//1533 1918//1755 1986//1756 +f 2152//1531 2042//1533 1986//1756 +f 2152//1531 1986//1756 2125//1757 +f 2173//1529 2152//1531 2125//1757 +f 2173//1529 2125//1757 2163//1758 +f 2178//1527 2173//1529 2163//1758 +f 2178//1527 2163//1758 2171//1759 +f 2186//1525 2178//1527 2171//1759 +f 2186//1525 2171//1759 2174//1760 +f 2170//1523 2186//1525 2174//1760 +f 2170//1523 2174//1760 2153//1761 +f 2155//1520 2170//1523 2153//1761 +f 2155//1520 2153//1761 2118//1762 +f 2118//1762 2006//1763 2085//1519 +f 2118//1762 2085//1519 2155//1520 +f 1982//1516 2085//1519 2006//1763 +f 1982//1516 2006//1763 1942//1764 +f 1899//1765 1930//1515 1982//1516 +f 1899//1765 1982//1516 1942//1764 +f 1896//1513 1930//1515 1899//1765 +f 1896//1513 1899//1765 1866//1766 +f 1876//1509 1896//1513 1866//1766 +f 1876//1509 1866//1766 1844//1767 +f 1882//1510 1876//1509 1844//1767 +f 1882//1510 1844//1767 1852//1768 +f 1888//1504 1882//1510 1852//1768 +f 1888//1504 1852//1768 1855//1769 +f 1855//1769 1871//1754 1907//1505 +f 1855//1769 1907//1505 1888//1504 +f 1951//1538 1980//1537 2048//1581 +f 1951//1538 2048//1581 2001//1583 +f 1943//1539 1951//1538 2001//1583 +f 1943//1539 2001//1583 1993//1574 +f 1947//1540 1943//1539 1993//1574 +f 1947//1540 1993//1574 2002//1572 +f 1956//1541 1947//1540 2002//1572 +f 1956//1541 2002//1572 2012//1570 +f 2060//1568 2004//1542 1956//1541 +f 2060//1568 1956//1541 2012//1570 +f 2087//1543 2004//1542 2060//1568 +f 2087//1543 2060//1568 2122//1566 +f 2150//1564 2146//1544 2087//1543 +f 2150//1564 2087//1543 2122//1566 +f 2150//1564 2167//1562 2168//1545 +f 2150//1564 2168//1545 2146//1544 +f 2181//1546 2168//1545 2167//1562 +f 2181//1546 2167//1562 2180//1560 +f 2179//1558 2185//1547 2181//1546 +f 2179//1558 2181//1546 2180//1560 +f 2182//1556 2184//1548 2185//1547 +f 2182//1556 2185//1547 2179//1558 +f 2176//1549 2184//1548 2182//1556 +f 2176//1549 2182//1556 2172//1552 +f 2161//1550 2176//1549 2172//1552 +f 2161//1550 2172//1552 2159//1555 +f 2121//1551 2161//1550 2159//1555 +f 2121//1551 2159//1555 2139//1577 +f 2088//1579 2028//1536 2121//1551 +f 2088//1579 2121//1551 2139//1577 +f 2088//1579 2048//1581 1980//1537 +f 2088//1579 1980//1537 2028//1536 +f 1393//1628 1363//1627 1361//1625 +f 1393//1628 1361//1625 1385//1624 +f 1429//1623 1459//1657 1393//1628 +f 1429//1623 1393//1628 1385//1624 +f 1518//1654 1459//1657 1429//1623 +f 1518//1654 1429//1623 1463//1622 +f 1516//1621 1574//1653 1518//1654 +f 1516//1621 1518//1654 1463//1622 +f 1576//1651 1574//1653 1516//1621 +f 1576//1651 1516//1621 1506//1620 +f 1552//1649 1576//1651 1506//1620 +f 1552//1649 1506//1620 1479//1619 +f 1461//1647 1552//1649 1479//1619 +f 1461//1647 1479//1619 1419//1618 +f 1389//1644 1461//1647 1419//1618 +f 1389//1644 1419//1618 1384//1617 +f 1370//1616 1381//1643 1389//1644 +f 1370//1616 1389//1644 1384//1617 +f 1359//1641 1381//1643 1370//1616 +f 1359//1641 1370//1616 1362//1615 +f 1342//1638 1359//1641 1362//1615 +f 1342//1638 1362//1615 1348//1614 +f 1348//1614 1336//1613 1330//1637 +f 1348//1614 1330//1637 1342//1638 +f 1331//1635 1330//1637 1336//1613 +f 1331//1635 1336//1613 1333//1612 +f 1341//1632 1331//1635 1333//1612 +f 1341//1632 1333//1612 1345//1610 +f 1350//1611 1351//1630 1341//1632 +f 1350//1611 1341//1632 1345//1610 +f 1361//1625 1363//1627 1351//1630 +f 1361//1625 1351//1630 1350//1611 +f 1741//1770 1736//1771 1753//1772 +f 1741//1770 1753//1772 1758//1773 +f 1785//1774 1780//1775 1758//1773 +f 1785//1774 1758//1773 1753//1772 +f 1726//1776 1759//1777 1753//1772 +f 1726//1776 1753//1772 1736//1771 +f 1798//1778 1785//1774 1753//1772 +f 1798//1778 1753//1772 1759//1777 +f 1798//1778 1759//1777 1761//1779 +f 1798//1778 1761//1779 1808//1780 +f 1761//1779 1759//1777 1726//1776 +f 1761//1779 1726//1776 1721//1781 +f 1820//1782 1762//1783 1763//1784 +f 1820//1782 1763//1784 1818//1785 +f 1762//1783 1709//1786 1708//1787 +f 1762//1783 1708//1787 1763//1784 +f 1708//1787 1716//1788 1764//1789 +f 1708//1787 1764//1789 1763//1784 +f 1763//1784 1764//1789 1826//1790 +f 1763//1784 1826//1790 1818//1785 +f 1828//1791 1765//1792 1767//1793 +f 1828//1791 1767//1793 1835//1794 +f 1717//1795 1767//1793 1765//1792 +f 1717//1795 1765//1792 1713//1796 +f 1770//1797 1837//1798 1835//1794 +f 1770//1797 1835//1794 1767//1793 +f 1719//1799 1770//1797 1767//1793 +f 1719//1799 1767//1793 1717//1795 +f 1723//1800 1772//1801 1770//1797 +f 1723//1800 1770//1797 1719//1799 +f 1772//1801 1833//1802 1837//1798 +f 1772//1801 1837//1798 1770//1797 +f 1758//1773 1757//1803 1743//1804 +f 1758//1773 1743//1804 1741//1770 +f 1757//1803 1758//1773 1780//1775 +f 1757//1803 1780//1775 1782//1805 +f 1751//1806 1757//1803 1782//1805 +f 1751//1806 1782//1805 1778//1807 +f 1751//1806 1738//1808 1743//1804 +f 1751//1806 1743//1804 1757//1803 +f 1778//1807 1786//1809 1748//1810 +f 1778//1807 1748//1810 1751//1806 +f 1728//1811 1738//1808 1751//1806 +f 1728//1811 1751//1806 1748//1810 +f 1748//1810 1786//1809 1793//1812 +f 1748//1810 1793//1812 1749//1813 +f 1714//1814 1728//1811 1748//1810 +f 1714//1814 1748//1810 1749//1813 +f 1749//1813 1793//1812 1810//1815 +f 1749//1813 1810//1815 1756//1816 +f 1892//1463 1863//1817 1829//1818 +f 1892//1463 1829//1818 1849//1464 +f 1849//1464 1829//1818 1799//1819 +f 1849//1464 1799//1819 1817//1466 +f 1799//1819 1794//1820 1800//1467 +f 1799//1819 1800//1467 1817//1466 +f 1800//1467 1794//1820 1797//1821 +f 1800//1467 1797//1821 1805//1469 +f 1732//1822 1715//1823 1700//1709 +f 1732//1822 1700//1709 1724//1711 +f 1669//1707 1700//1709 1715//1823 +f 1669//1707 1715//1823 1695//1824 +f 1669//1707 1695//1824 1675//1825 +f 1669//1707 1675//1825 1640//1705 +f 1714//1814 1749//1813 1756//1816 +f 1714//1814 1756//1816 1710//1826 +f 1640//1705 1675//1825 1644//1827 +f 1640//1705 1644//1827 1616//1703 +f 1803//1474 1805//1469 1797//1821 +f 1803//1474 1797//1821 1795//1828 +f 1724//1711 1725//1713 1731//1829 +f 1724//1711 1731//1829 1732//1822 +f 1821//1471 1803//1474 1795//1828 +f 1821//1471 1795//1828 1802//1830 +f 1706//1831 1731//1829 1725//1713 +f 1706//1831 1725//1713 1697//1715 +f 1802//1830 1836//1832 1847//1475 +f 1802//1830 1847//1475 1821//1471 +f 1697//1715 1670//1717 1692//1833 +f 1697//1715 1692//1833 1706//1831 +f 1910//1477 1847//1475 1836//1832 +f 1910//1477 1836//1832 1856//1834 +f 1684//1835 1692//1833 1670//1717 +f 1684//1835 1670//1717 1647//1719 +f 1672//1836 1664//1837 1716//1788 +f 1672//1836 1716//1788 1708//1787 +f 1713//1796 1662//1838 1661//1839 +f 1713//1796 1661//1839 1717//1795 +f 1661//1839 1666//1840 1719//1799 +f 1661//1839 1719//1799 1717//1795 +f 1868//1841 1880//1842 1928//1843 +f 1868//1841 1928//1843 1923//1844 +f 1936//1845 1887//1846 1890//1847 +f 1936//1845 1890//1847 1946//1848 +f 1946//1848 1890//1847 1889//1849 +f 1946//1848 1889//1849 1934//1850 +f 1889//1849 1877//1851 1932//1852 +f 1889//1849 1932//1852 1934//1850 +f 1666//1840 1677//1853 1723//1800 +f 1666//1840 1723//1800 1719//1799 +f 1548//1690 1556//1854 1606//1855 +f 1548//1690 1606//1855 1599//1721 +f 1548//1690 1447//1693 1477//1856 +f 1548//1690 1477//1856 1556//1854 +f 1413//1857 1477//1856 1447//1693 +f 1413//1857 1447//1693 1404//1695 +f 1445//1858 1413//1857 1404//1695 +f 1445//1858 1404//1695 1428//1697 +f 1428//1697 1520//1699 1567//1859 +f 1428//1697 1567//1859 1445//1858 +f 1600//1860 1567//1859 1520//1699 +f 1600//1860 1520//1699 1595//1701 +f 1616//1703 1644//1827 1600//1860 +f 1616//1703 1600//1860 1595//1701 +f 1619//1861 1627//1862 1592//1863 +f 1619//1861 1592//1863 1571//1864 +f 1571//1864 1592//1863 1501//1865 +f 1571//1864 1501//1865 1462//1866 +f 1501//1865 1453//1867 1424//1868 +f 1501//1865 1424//1868 1462//1866 +f 1453//1867 1396//1869 1397//1870 +f 1453//1867 1397//1870 1424//1868 +f 1615//1871 1559//1872 1558//1873 +f 1615//1871 1558//1873 1613//1874 +f 1559//1872 1473//1875 1457//1876 +f 1559//1872 1457//1876 1558//1873 +f 1420//1877 1457//1876 1473//1875 +f 1420//1877 1473//1875 1417//1878 +f 1417//1878 1379//1879 1383//1880 +f 1417//1878 1383//1880 1420//1877 +f 1558//1873 1572//1881 1622//1882 +f 1558//1873 1622//1882 1613//1874 +f 1572//1881 1558//1873 1457//1876 +f 1572//1881 1457//1876 1469//1883 +f 1420//1877 1410//1884 1469//1883 +f 1420//1877 1469//1883 1457//1876 +f 1395//1885 1410//1884 1420//1877 +f 1395//1885 1420//1877 1383//1880 +f 1572//1881 1581//1886 1634//1887 +f 1572//1881 1634//1887 1622//1882 +f 1469//1883 1460//1888 1581//1886 +f 1469//1883 1581//1886 1572//1881 +f 1469//1883 1410//1884 1438//1889 +f 1469//1883 1438//1889 1460//1888 +f 1395//1885 1398//1890 1438//1889 +f 1395//1885 1438//1889 1410//1884 +f 2062//1482 1973//1480 1960//1891 +f 2062//1482 1960//1891 2027//1892 +f 1962//1893 1923//1844 1928//1843 +f 1962//1893 1928//1843 1984//1894 +f 1984//1894 2053//1895 2030//1896 +f 1984//1894 2030//1896 1962//1893 +f 2023//1897 2005//1898 1936//1845 +f 2023//1897 1936//1845 1946//1848 +f 2023//1897 2098//1899 2063//1900 +f 2023//1897 2063//1900 2005//1898 +f 2023//1897 1946//1848 1934//1850 +f 2023//1897 1934//1850 1992//1901 +f 2098//1899 2023//1897 1992//1901 +f 2098//1899 1992//1901 2094//1902 +f 1992//1901 1934//1850 1932//1852 +f 1992//1901 1932//1852 1997//1903 +f 2094//1902 1992//1901 1997//1903 +f 2094//1902 1997//1903 2093//1904 +f 2093//1904 2126//1905 2127//1906 +f 2093//1904 2127//1906 2094//1902 +f 2094//1902 2127//1906 2137//1907 +f 2094//1902 2137//1907 2098//1899 +f 2115//1908 2063//1900 2098//1899 +f 2115//1908 2098//1899 2137//1907 +f 2053//1895 2095//1909 2144//1910 +f 2053//1895 2144//1910 2030//1896 +f 2027//1892 2112//1911 2099//1483 +f 2027//1892 2099//1483 2062//1482 +f 2107//1485 2099//1483 2112//1911 +f 2107//1485 2112//1911 2076//1912 +f 2076//1912 2003//1913 2008//1487 +f 2076//1912 2008//1487 2107//1485 +f 2003//1913 1941//1914 1955//1489 +f 2003//1913 1955//1489 2008//1487 +f 1941//1914 1903//1915 1915//1491 +f 1941//1914 1915//1491 1955//1489 +f 1903//1915 1863//1817 1892//1463 +f 1903//1915 1892//1463 1915//1491 +f 2084//1916 2144//1910 2095//1909 +f 2084//1916 2095//1909 2066//1917 +f 2137//1907 2124//1918 2117//1919 +f 2137//1907 2117//1919 2115//1908 +f 2124//1918 2137//1907 2127//1906 +f 2124//1918 2127//1906 2143//1920 +f 2126//1905 2134//1921 2143//1920 +f 2126//1905 2143//1920 2127//1906 +f 2027//1892 1960//1891 1962//1893 +f 2027//1892 1962//1893 2030//1896 +f 2030//1896 2144//1910 2112//1911 +f 2030//1896 2112//1911 2027//1892 +f 2076//1912 2112//1911 2144//1910 +f 2076//1912 2144//1910 2084//1916 +f 2084//1916 1935//1922 2003//1913 +f 2084//1916 2003//1913 2076//1912 +f 1935//1922 1897//1923 1941//1914 +f 1935//1922 1941//1914 2003//1913 +f 1897//1923 1861//1924 1903//1915 +f 1897//1923 1903//1915 1941//1914 +f 1861//1924 1810//1815 1863//1817 +f 1861//1924 1863//1817 1903//1915 +f 1829//1818 1863//1817 1810//1815 +f 1829//1818 1810//1815 1793//1812 +f 1799//1819 1829//1818 1793//1812 +f 1799//1819 1793//1812 1786//1809 +f 1786//1809 1778//1807 1794//1820 +f 1786//1809 1794//1820 1799//1819 +f 1797//1821 1794//1820 1778//1807 +f 1797//1821 1778//1807 1782//1805 +f 1795//1828 1797//1821 1782//1805 +f 1795//1828 1782//1805 1780//1775 +f 1802//1830 1795//1828 1780//1775 +f 1802//1830 1780//1775 1785//1774 +f 1785//1774 1798//1778 1836//1832 +f 1785//1774 1836//1832 1802//1830 +f 1856//1834 1836//1832 1798//1778 +f 1856//1834 1798//1778 1808//1780 +f 1761//1779 1762//1783 1820//1782 +f 1761//1779 1820//1782 1808//1780 +f 1761//1779 1721//1781 1709//1786 +f 1761//1779 1709//1786 1762//1783 +f 1726//1776 1692//1833 1684//1835 +f 1726//1776 1684//1835 1721//1781 +f 1706//1831 1692//1833 1726//1776 +f 1706//1831 1726//1776 1736//1771 +f 1736//1771 1741//1770 1731//1829 +f 1736//1771 1731//1829 1706//1831 +f 1732//1822 1731//1829 1741//1770 +f 1732//1822 1741//1770 1743//1804 +f 1743//1804 1738//1808 1715//1823 +f 1743//1804 1715//1823 1732//1822 +f 1695//1824 1715//1823 1738//1808 +f 1695//1824 1738//1808 1728//1811 +f 1675//1825 1695//1824 1728//1811 +f 1675//1825 1728//1811 1714//1814 +f 1644//1827 1675//1825 1714//1814 +f 1644//1827 1714//1814 1710//1826 +f 1600//1860 1644//1827 1710//1826 +f 1600//1860 1710//1826 1655//1925 +f 1655//1925 1631//1926 1567//1859 +f 1655//1925 1567//1859 1600//1860 +f 1445//1858 1567//1859 1631//1926 +f 1445//1858 1631//1926 1617//1927 +f 1617//1927 1396//1869 1413//1857 +f 1617//1927 1413//1857 1445//1858 +f 1396//1869 1453//1867 1477//1856 +f 1396//1869 1477//1856 1413//1857 +f 1556//1854 1477//1856 1453//1867 +f 1556//1854 1453//1867 1501//1865 +f 1606//1855 1556//1854 1501//1865 +f 1606//1855 1501//1865 1592//1863 +f 1960//1891 1908//1928 1923//1844 +f 1960//1891 1923//1844 1962//1893 +f 1908//1928 1960//1891 1973//1480 +f 1908//1928 1973//1480 1910//1477 +f 1908//1928 1862//1929 1868//1841 +f 1908//1928 1868//1841 1923//1844 +f 1868//1841 1862//1929 1820//1782 +f 1868//1841 1820//1782 1818//1785 +f 1826//1790 1880//1842 1868//1841 +f 1826//1790 1868//1841 1818//1785 +f 1887//1846 1828//1791 1835//1794 +f 1887//1846 1835//1794 1890//1847 +f 1889//1849 1890//1847 1835//1794 +f 1889//1849 1835//1794 1837//1798 +f 1837//1798 1833//1802 1877//1851 +f 1837//1798 1877//1851 1889//1849 +f 1910//1477 1856//1834 1862//1929 +f 1910//1477 1862//1929 1908//1928 +f 1856//1834 1808//1780 1820//1782 +f 1856//1834 1820//1782 1862//1929 +f 1709//1786 1681//1930 1672//1836 +f 1709//1786 1672//1836 1708//1787 +f 1592//1863 1627//1862 1645//1931 +f 1592//1863 1645//1931 1606//1855 +f 1647//1719 1599//1721 1606//1855 +f 1647//1719 1606//1855 1645//1931 +f 1672//1836 1681//1930 1645//1931 +f 1672//1836 1645//1931 1627//1862 +f 1627//1862 1619//1861 1664//1837 +f 1627//1862 1664//1837 1672//1836 +f 1613//1874 1661//1839 1662//1838 +f 1613//1874 1662//1838 1615//1871 +f 1613//1874 1622//1882 1666//1840 +f 1613//1874 1666//1840 1661//1839 +f 1634//1887 1677//1853 1666//1840 +f 1634//1887 1666//1840 1622//1882 +f 1681//1930 1684//1835 1647//1719 +f 1681//1930 1647//1719 1645//1931 +f 1684//1835 1681//1930 1709//1786 +f 1684//1835 1709//1786 1721//1781 +f 2061//1932 2134//1921 2126//1905 +f 2061//1932 2126//1905 2104//1933 +f 2075//1934 2061//1932 2104//1933 +f 2075//1934 2104//1933 2038//1935 +f 2051//1936 2104//1933 2126//1905 +f 2051//1936 2126//1905 2093//1904 +f 2038//1935 2104//1933 2051//1936 +f 2038//1935 2051//1936 1979//1937 +f 1987//1938 2075//1934 2038//1935 +f 1987//1938 2038//1935 2018//1939 +f 1979//1937 1953//1940 2018//1939 +f 1979//1937 2018//1939 2038//1935 +f 2017//1941 2051//1936 2093//1904 +f 2017//1941 2093//1904 1997//1903 +f 1952//1942 1979//1937 2051//1936 +f 1952//1942 2051//1936 2017//1941 +f 1931//1943 1953//1940 1979//1937 +f 1931//1943 1979//1937 1952//1942 +f 1997//1903 1932//1852 1933//1944 +f 1997//1903 1933//1944 2017//1941 +f 1920//1945 1952//1942 2017//1941 +f 1920//1945 2017//1941 1933//1944 +f 1952//1942 1920//1945 1901//1946 +f 1952//1942 1901//1946 1931//1943 +f 1920//1945 1870//1947 1859//1948 +f 1920//1945 1859//1948 1901//1946 +f 1870//1947 1920//1945 1933//1944 +f 1870//1947 1933//1944 1875//1949 +f 1877//1851 1875//1949 1933//1944 +f 1877//1851 1933//1944 1932//1852 +f 1833//1802 1827//1950 1875//1949 +f 1833//1802 1875//1949 1877//1851 +f 1827//1950 1825//1951 1870//1947 +f 1827//1950 1870//1947 1875//1949 +f 1859//1948 1870//1947 1825//1951 +f 1859//1948 1825//1951 1819//1952 +f 1774//1953 1827//1950 1833//1802 +f 1774//1953 1833//1802 1772//1801 +f 1774//1953 1775//1954 1825//1951 +f 1774//1953 1825//1951 1827//1950 +f 1825//1951 1775//1954 1776//1955 +f 1825//1951 1776//1955 1819//1952 +f 1729//1956 1774//1953 1772//1801 +f 1729//1956 1772//1801 1723//1800 +f 1735//1957 1775//1954 1774//1953 +f 1735//1957 1774//1953 1729//1956 +f 1739//1958 1776//1955 1775//1954 +f 1739//1958 1775//1954 1735//1957 +f 1680//1959 1729//1956 1723//1800 +f 1680//1959 1723//1800 1677//1853 +f 1688//1960 1735//1957 1729//1956 +f 1688//1960 1729//1956 1680//1959 +f 1739//1958 1735//1957 1688//1960 +f 1739//1958 1688//1960 1694//1961 +f 1634//1887 1628//1962 1680//1959 +f 1634//1887 1680//1959 1677//1853 +f 1688//1960 1680//1959 1628//1962 +f 1688//1960 1628//1962 1639//1963 +f 1656//1964 1694//1961 1688//1960 +f 1656//1964 1688//1960 1639//1963 +f 1591//1965 1628//1962 1634//1887 +f 1591//1965 1634//1887 1581//1886 +f 1591//1965 1605//1966 1639//1963 +f 1591//1965 1639//1963 1628//1962 +f 1656//1964 1639//1963 1605//1966 +f 1656//1964 1605//1966 1623//1967 +f 1581//1886 1460//1888 1499//1968 +f 1581//1886 1499//1968 1591//1965 +f 1589//1969 1605//1966 1591//1965 +f 1589//1969 1591//1965 1499//1968 +f 1605//1966 1589//1969 1601//1970 +f 1605//1966 1601//1970 1623//1967 +f 1484//1971 1499//1968 1460//1888 +f 1484//1971 1460//1888 1438//1889 +f 1484//1971 1530//1972 1589//1969 +f 1484//1971 1589//1969 1499//1968 +f 1589//1969 1530//1972 1593//1973 +f 1589//1969 1593//1973 1601//1970 +f 1438//1889 1398//1890 1452//1974 +f 1438//1889 1452//1974 1484//1971 +f 1545//1975 1530//1972 1484//1971 +f 1545//1975 1484//1971 1452//1974 +f 1579//1976 1593//1973 1530//1972 +f 1579//1976 1530//1972 1545//1975 +f 2143//1920 2100//1360 2110//1361 +f 2143//1920 2110//1361 2124//1918 +f 2100//1360 2143//1920 2134//1921 +f 2100//1360 2134//1921 2078//1358 +f 2078//1358 2134//1921 2061//1932 +f 2078//1358 2061//1932 2068//1359 +f 2043//1502 2068//1359 2061//1932 +f 2043//1502 2061//1932 2075//1934 +f 2075//1934 1987//1938 2014//1501 +f 2075//1934 2014//1501 2043//1502 +f 2115//1908 2117//1919 2066//1917 +f 2115//1908 2066//1917 2095//1909 +f 2095//1909 2053//1895 2063//1900 +f 2095//1909 2063//1900 2115//1908 +f 2005//1898 2063//1900 2053//1895 +f 2005//1898 2053//1895 1984//1894 +f 1936//1845 2005//1898 1984//1894 +f 1936//1845 1984//1894 1928//1843 +f 1880//1842 1887//1846 1936//1845 +f 1880//1842 1936//1845 1928//1843 +f 1826//1790 1828//1791 1887//1846 +f 1826//1790 1887//1846 1880//1842 +f 1764//1789 1765//1792 1828//1791 +f 1764//1789 1828//1791 1826//1790 +f 1713//1796 1765//1792 1764//1789 +f 1713//1796 1764//1789 1716//1788 +f 1664//1837 1662//1838 1713//1796 +f 1664//1837 1713//1796 1716//1788 +f 1615//1871 1662//1838 1664//1837 +f 1615//1871 1664//1837 1619//1861 +f 1571//1864 1559//1872 1615//1871 +f 1571//1864 1615//1871 1619//1861 +f 1571//1864 1462//1866 1473//1875 +f 1571//1864 1473//1875 1559//1872 +f 1417//1878 1473//1875 1462//1866 +f 1417//1878 1462//1866 1424//1868 +f 1424//1868 1397//1870 1379//1879 +f 1424//1868 1379//1879 1417//1878 +f 1379//1879 1412//1380 1446//1381 +f 1379//1879 1446//1381 1383//1880 +f 1412//1380 1379//1879 1397//1870 +f 1412//1380 1397//1870 1607//1503 +f 1431//1382 1395//1885 1383//1880 +f 1431//1382 1383//1880 1446//1381 +f 1395//1885 1431//1382 1456//1384 +f 1395//1885 1456//1384 1398//1890 +f 1398//1890 1456//1384 1475//1388 +f 1398//1890 1475//1388 1452//1974 +f 1452//1974 1475//1388 1488//1403 +f 1452//1974 1488//1403 1545//1975 +f 1545//1975 1488//1403 1585//1410 +f 1545//1975 1585//1410 1579//1976 +f 1963//1977 1975//1500 2014//1501 +f 1963//1977 2014//1501 1987//1938 +f 2018//1939 1964//1978 1963//1977 +f 2018//1939 1963//1977 1987//1938 +f 1948//1499 1938//1979 1912//1980 +f 1948//1499 1912//1980 1911//1498 +f 1905//1981 1912//1980 1938//1979 +f 1905//1981 1938//1979 1927//1982 +f 1953//1940 1944//1983 1964//1978 +f 1953//1940 1964//1978 2018//1939 +f 1917//1984 1944//1983 1953//1940 +f 1917//1984 1953//1940 1931//1943 +f 1902//1985 1905//1981 1927//1982 +f 1902//1985 1927//1982 1925//1986 +f 1902//1985 1925//1986 1894//1987 +f 1902//1985 1894//1987 1885//1988 +f 1893//1989 1917//1984 1931//1943 +f 1893//1989 1931//1943 1901//1946 +f 1853//1990 1893//1989 1901//1946 +f 1853//1990 1901//1946 1859//1948 +f 1873//1991 1864//1992 1885//1988 +f 1873//1991 1885//1988 1894//1987 +f 1864//1992 1873//1991 1850//1993 +f 1864//1992 1850//1993 1843//1994 +f 1610//1426 1603//1995 1579//1976 +f 1610//1426 1579//1976 1585//1410 +f 1678//1996 1638//1997 1637//1428 +f 1678//1996 1637//1428 1682//1427 +f 1603//1995 1609//1998 1593//1973 +f 1603//1995 1593//1973 1579//1976 +f 1671//1999 1632//2000 1638//1997 +f 1671//1999 1638//1997 1678//1996 +f 1609//1998 1625//2001 1601//1970 +f 1609//1998 1601//1970 1593//1973 +f 1632//2000 1671//1999 1674//2002 +f 1632//2000 1674//2002 1649//2003 +f 1625//2001 1636//2004 1623//1967 +f 1625//2001 1623//1967 1601//1970 +f 1683//2005 1660//2006 1649//2003 +f 1683//2005 1649//2003 1674//2002 +f 1636//2004 1665//2007 1656//1964 +f 1636//2004 1656//1964 1623//1967 +f 1660//2006 1683//2005 1698//2008 +f 1660//2006 1698//2008 1685//2009 +f 1665//2007 1703//2010 1694//1961 +f 1665//2007 1694//1961 1656//1964 +f 1727//2011 1711//2012 1685//2009 +f 1727//2011 1685//2009 1698//2008 +f 1703//2010 1742//2013 1739//1958 +f 1703//2010 1739//1958 1694//1961 +f 1727//2011 1746//2014 1745//2015 +f 1727//2011 1745//2015 1711//2012 +f 1742//2013 1777//2016 1776//1955 +f 1742//2013 1776//1955 1739//1958 +f 1746//2014 1783//2017 1779//2018 +f 1746//2014 1779//2018 1745//2015 +f 1777//2016 1814//2019 1819//1952 +f 1777//2016 1819//1952 1776//1955 +f 1814//2019 1853//1990 1859//1948 +f 1814//2019 1859//1948 1819//1952 +f 1843//1994 1850//1993 1811//2020 +f 1843//1994 1811//2020 1809//2021 +f 1809//2021 1811//2020 1779//2018 +f 1809//2021 1779//2018 1783//2017 +f 1707//2022 1678//1996 1682//1427 +f 1707//2022 1682//1427 1722//1429 +f 1701//2023 1671//1999 1678//1996 +f 1701//2023 1678//1996 1707//2022 +f 1693//2024 1674//2002 1671//1999 +f 1693//2024 1671//1999 1701//2023 +f 1702//2025 1683//2005 1674//2002 +f 1702//2025 1674//2002 1693//2024 +f 1683//2005 1702//2025 1712//2026 +f 1683//2005 1712//2026 1698//2008 +f 1698//2008 1712//2026 1737//2027 +f 1698//2008 1737//2027 1727//2011 +f 1727//2011 1737//2027 1754//2028 +f 1727//2011 1754//2028 1746//2014 +f 1787//2029 1783//2017 1746//2014 +f 1787//2029 1746//2014 1754//2028 +f 1783//2017 1787//2029 1812//2030 +f 1783//2017 1812//2030 1809//2021 +f 1841//2031 1843//1994 1809//2021 +f 1841//2031 1809//2021 1812//2030 +f 1854//2032 1864//1992 1843//1994 +f 1854//2032 1843//1994 1841//2031 +f 1864//1992 1854//2032 1865//2033 +f 1864//1992 1865//2033 1885//1988 +f 1885//1988 1865//2033 1883//2034 +f 1885//1988 1883//2034 1902//1985 +f 1884//2035 1905//1981 1902//1985 +f 1884//2035 1902//1985 1883//2034 +f 1872//2036 1912//1980 1905//1981 +f 1872//2036 1905//1981 1884//2035 +f 1911//1498 1912//1980 1872//2036 +f 1911//1498 1872//2036 1869//1497 +f 1744//2037 1707//2022 1722//1429 +f 1744//2037 1722//1429 1747//1430 +f 1791//2038 1766//2039 1773//1494 +f 1791//2038 1773//1494 1790//1493 +f 1733//2040 1701//2023 1707//2022 +f 1733//2040 1707//2022 1744//2037 +f 1693//2024 1701//2023 1733//2040 +f 1693//2024 1733//2040 1702//2025 +f 1737//2027 1712//2026 1702//2025 +f 1737//2027 1702//2025 1733//2040 +f 1737//2027 1733//2040 1760//2041 +f 1737//2027 1760//2041 1754//2028 +f 1789//2042 1787//2029 1754//2028 +f 1789//2042 1754//2028 1760//2041 +f 1840//1496 1869//1497 1872//2036 +f 1840//1496 1872//2036 1845//2043 +f 1884//2035 1846//2044 1845//2043 +f 1884//2035 1845//2043 1872//2036 +f 1884//2035 1883//2034 1865//2033 +f 1884//2035 1865//2033 1846//2044 +f 1865//2033 1854//2032 1841//2031 +f 1865//2033 1841//2031 1846//2044 +f 1812//2030 1822//2045 1846//2044 +f 1812//2030 1846//2044 1841//2031 +f 1787//2029 1789//2042 1822//2045 +f 1787//2029 1822//2045 1812//2030 +f 1815//2046 1807//1495 1840//1496 +f 1815//2046 1840//1496 1845//2043 +f 1790//1493 1807//1495 1815//2046 +f 1790//1493 1815//2046 1791//2038 +f 1788//1450 1801//1445 1807//1495 +f 1788//1450 1807//1495 1790//1493 +f 1784//1449 1796//1446 1801//1445 +f 1784//1449 1801//1445 1788//1450 +f 1781//1452 1792//1453 1796//1446 +f 1781//1452 1796//1446 1784//1449 +f 1845//2043 1846//2044 1822//2045 +f 1845//2043 1822//2045 1815//2046 +f 1773//1494 1766//2039 1744//2037 +f 1773//1494 1744//2037 1747//1430 +f 1771//1447 1773//1494 1747//1430 +f 1771//1447 1747//1430 1750//1425 +f 1752//1423 1768//1448 1771//1447 +f 1752//1423 1771//1447 1750//1425 +f 1755//1424 1769//1451 1768//1448 +f 1755//1424 1768//1448 1752//1423 +f 1733//2040 1744//2037 1766//2039 +f 1733//2040 1766//2039 1760//2041 +f 1760//2041 1766//2039 1791//2038 +f 1760//2041 1791//2038 1789//2042 +f 1791//2038 1815//2046 1822//2045 +f 1791//2038 1822//2045 1789//2042 +f 2110//1361 1924//1362 2117//1919 +f 2110//1361 2117//1919 2124//1918 +f 1921//2047 2066//1917 2117//1919 +f 1921//2047 2117//1919 1924//1362 +f 2066//1917 1921//2047 1935//1922 +f 2066//1917 1935//1922 2084//1916 +f 1607//1503 1397//1870 1396//1869 +f 1607//1503 1396//1869 1617//1927 +f 1547//1307 1607//1503 1650//2048 +f 1547//1307 1650//2048 1587//1337 +f 1650//2048 1659//2049 1594//1290 +f 1650//2048 1594//1290 1587//1337 +f 1659//2049 1630//1389 1594//1290 +f 1653//1412 1686//1411 1696//1418 +f 1653//1412 1696//1418 1668//1416 +f 1654//1415 1629//1413 1653//1412 +f 1654//1415 1653//1412 1668//1416 +f 1637//1428 1610//1426 1629//1413 +f 1637//1428 1629//1413 1654//1415 +f 1638//1997 1603//1995 1610//1426 +f 1638//1997 1610//1426 1637//1428 +f 1632//2000 1609//1998 1603//1995 +f 1632//2000 1603//1995 1638//1997 +f 1649//2003 1625//2001 1609//1998 +f 1649//2003 1609//1998 1632//2000 +f 1660//2006 1636//2004 1625//2001 +f 1660//2006 1625//2001 1649//2003 +f 1685//2009 1665//2007 1636//2004 +f 1685//2009 1636//2004 1660//2006 +f 1711//2012 1703//2010 1665//2007 +f 1711//2012 1665//2007 1685//2009 +f 1711//2012 1745//2015 1742//2013 +f 1711//2012 1742//2013 1703//2010 +f 1745//2015 1779//2018 1777//2016 +f 1745//2015 1777//2016 1742//2013 +f 1779//2018 1811//2020 1814//2019 +f 1779//2018 1814//2019 1777//2016 +f 1811//2020 1850//1993 1853//1990 +f 1811//2020 1853//1990 1814//2019 +f 1873//1991 1893//1989 1853//1990 +f 1873//1991 1853//1990 1850//1993 +f 1894//1987 1917//1984 1893//1989 +f 1894//1987 1893//1989 1873//1991 +f 1925//1986 1944//1983 1917//1984 +f 1925//1986 1917//1984 1894//1987 +f 1964//1978 1944//1983 1925//1986 +f 1964//1978 1925//1986 1927//1982 +f 1963//1977 1964//1978 1927//1982 +f 1963//1977 1927//1982 1938//1979 +f 1948//1499 1975//1500 1963//1977 +f 1948//1499 1963//1977 1938//1979 +f 1940//1439 1977//1435 1975//1500 +f 1940//1439 1975//1500 1948//1499 +f 1937//1436 1977//1435 1940//1439 +f 1937//1436 1940//1439 1916//1438 +f 1878//1457 1895//1458 1937//1436 +f 1878//1457 1937//1436 1916//1438 +f 1968//1723 1922//1722 1942//1764 +f 1968//1723 1942//1764 2006//1763 +f 2072//1724 1968//1723 2006//1763 +f 2072//1724 2006//1763 2118//1762 +f 2135//1725 2072//1724 2118//1762 +f 2135//1725 2118//1762 2153//1761 +f 2160//1726 2135//1725 2153//1761 +f 2160//1726 2153//1761 2174//1760 +f 2156//1727 2160//1726 2174//1760 +f 2156//1727 2174//1760 2171//1759 +f 2149//1728 2156//1727 2171//1759 +f 2149//1728 2171//1759 2163//1758 +f 2125//1757 2067//1729 2149//1728 +f 2125//1757 2149//1728 2163//1758 +f 2125//1757 1986//1756 1949//1730 +f 2125//1757 1949//1730 2067//1729 +f 1891//1731 1949//1730 1986//1756 +f 1891//1731 1986//1756 1918//1755 +f 1848//1732 1891//1731 1918//1755 +f 1848//1732 1918//1755 1871//1754 +f 1838//1733 1848//1732 1871//1754 +f 1838//1733 1871//1754 1855//1769 +f 1832//1734 1838//1733 1855//1769 +f 1832//1734 1855//1769 1852//1768 +f 1823//1735 1832//1734 1852//1768 +f 1823//1735 1852//1768 1844//1767 +f 1842//1736 1823//1735 1844//1767 +f 1842//1736 1844//1767 1866//1766 +f 1899//1765 1879//1737 1842//1736 +f 1899//1765 1842//1736 1866//1766 +f 1942//1764 1922//1722 1879//1737 +f 1942//1764 1879//1737 1899//1765 +f 1335//1673 1352//1740 1364//1739 +f 1335//1673 1364//1739 1349//1674 +f 1358//1741 1352//1740 1335//1673 +f 1358//1741 1335//1673 1344//1671 +f 1378//1742 1358//1741 1344//1671 +f 1378//1742 1344//1671 1354//1668 +f 1436//1743 1378//1742 1354//1668 +f 1436//1743 1354//1668 1386//1666 +f 1444//1664 1527//1744 1436//1743 +f 1444//1664 1436//1743 1386//1666 +f 1562//1662 1598//1745 1527//1744 +f 1562//1662 1527//1744 1444//1664 +f 1611//1661 1633//1746 1598//1745 +f 1611//1661 1598//1745 1562//1662 +f 1643//1660 1667//1747 1633//1746 +f 1643//1660 1633//1746 1611//1661 +f 1663//1688 1691//1748 1667//1747 +f 1663//1688 1667//1747 1643//1660 +f 1679//1686 1704//1749 1691//1748 +f 1679//1686 1691//1748 1663//1688 +f 1673//1750 1704//1749 1679//1686 +f 1673//1750 1679//1686 1651//1685 +f 1646//1751 1673//1750 1651//1685 +f 1646//1751 1651//1685 1626//1682 +f 1604//1752 1646//1751 1626//1682 +f 1604//1752 1626//1682 1584//1680 +f 1513//1753 1604//1752 1584//1680 +f 1513//1753 1584//1680 1439//1678 +f 1380//1676 1411//1738 1513//1753 +f 1380//1676 1513//1753 1439//1678 +f 1364//1739 1411//1738 1380//1676 +f 1364//1739 1380//1676 1349//1674 +f 1494//2050 1529//2051 1525//2052 +f 1494//2050 1525//2052 1503//2053 +f 1496//1406 1521//1405 1519//1397 +f 1496//1406 1519//1397 1495//1395 +f 1490//1400 1521//1405 1496//1406 +f 1490//1400 1496//1406 1472//1402 +f 1454//1394 1480//1396 1490//1400 +f 1454//1394 1490//1400 1472//1402 +f 1495//1395 1519//1397 1480//1396 +f 1495//1395 1480//1396 1454//1394 +f 1498//1401 1487//1399 1494//2050 +f 1498//1401 1494//2050 1503//2053 +f 1525//2052 1523//1404 1498//1401 +f 1525//2052 1498//1401 1503//2053 +f 1524//1398 1523//1404 1525//2052 +f 1524//1398 1525//2052 1529//2051 +f 1487//1399 1524//1398 1529//2051 +f 1487//1399 1529//2051 1494//2050 +f 4276//2054 4284//2055 4288//2056 +f 4166//2057 4191//2058 4219//2059 +f 4166//2057 4219//2059 4165//2060 +f 4146//2061 4159//2062 3969//2063 +f 4146//2061 3969//2063 4053//2064 +f 4053//2064 3969//2063 3977//2065 +f 3977//2065 3969//2063 3909//2066 +f 3396//2067 3392//2068 3345//2069 +f 3396//2067 3345//2069 3349//2070 +f 3551//2071 3629//2072 3682//2073 +f 3551//2071 3682//2073 3572//2074 +f 3829//2075 3884//2076 3910//2077 +f 3829//2075 3910//2077 3852//2078 +f 3976//2079 3935//2080 3972//2081 +f 3976//2079 3972//2081 4038//2082 +f 4088//2083 3996//2084 4021//2085 +f 4088//2083 4021//2085 4109//2086 +f 4024//2087 4010//2088 4108//2089 +f 4024//2087 4108//2089 4118//2090 +f 3998//2091 3981//2092 4072//2093 +f 3998//2091 4072//2093 4099//2094 +f 3687//2095 3729//2096 3792//2097 +f 3687//2095 3792//2097 3783//2098 +f 3851//2099 3926//2100 3922//2101 +f 3851//2099 3922//2101 3849//2102 +f 4022//2103 4029//2104 3992//2105 +f 4022//2103 3992//2105 3989//2106 +f 4049//2107 4065//2108 4029//2104 +f 4049//2107 4029//2104 4022//2103 +f 4100//2109 4079//2110 4098//2111 +f 4100//2109 4098//2111 4119//2112 +f 4084//2113 4040//2114 4095//2115 +f 4084//2113 4095//2115 4129//2116 +f 3986//2117 3937//2118 3954//2119 +f 3986//2117 3954//2119 4020//2120 +f 3937//2118 3892//2121 3901//2122 +f 3937//2118 3901//2122 3954//2119 +f 3845//2123 3791//2124 3796//2125 +f 3845//2123 3796//2125 3850//2126 +f 3597//2127 3548//2128 3604//2129 +f 3597//2127 3604//2129 3639//2130 +f 3607//2131 3558//2132 3537//2133 +f 3607//2131 3537//2133 3589//2134 +f 3538//2135 3587//2136 3623//2137 +f 3538//2135 3623//2137 3554//2138 +f 3719//2139 3623//2137 3587//2136 +f 3719//2139 3587//2136 3647//2140 +f 3756//2141 3804//2142 3771//2143 +f 3756//2141 3771//2143 3715//2144 +f 3763//2145 3713//2146 3750//2147 +f 3763//2145 3750//2147 3797//2148 +f 3713//2146 3600//2149 3633//2150 +f 3713//2146 3633//2150 3750//2147 +f 3600//2149 3522//2151 3550//2152 +f 3600//2149 3550//2152 3633//2150 +f 3810//2153 3800//2154 3820//2155 +f 3810//2153 3820//2155 3838//2156 +f 3310//2157 3319//2158 3349//2070 +f 3310//2157 3349//2070 3345//2069 +f 4122//2159 4148//2160 4140//2161 +f 4122//2159 4140//2161 4116//2162 +f 3529//2163 3542//2164 3424//2165 +f 3529//2163 3424//2165 3418//2166 +f 3890//2167 3882//2168 3955//2169 +f 3890//2167 3955//2169 3963//2170 +f 3963//2170 3955//2169 4023//2171 +f 3963//2170 4023//2171 4031//2172 +f 4078//2173 4091//2174 4031//2172 +f 4078//2173 4031//2172 4023//2171 +f 4128//2175 4134//2176 4091//2174 +f 4128//2175 4091//2174 4078//2173 +f 4208//2177 4228//2178 4211//2179 +f 4208//2177 4211//2179 4198//2180 +f 3335//2181 3314//2182 3320//2183 +f 3335//2181 3320//2183 3336//2184 +f 3443//2185 3476//2186 3472//2187 +f 3443//2185 3472//2187 3436//2188 +f 3508//2189 3472//2187 3476//2186 +f 3508//2189 3476//2186 3517//2190 +f 3557//2191 3508//2189 3517//2190 +f 3557//2191 3517//2190 3555//2192 +f 3592//2193 3557//2191 3555//2192 +f 3592//2193 3555//2192 3581//2194 +f 3662//2195 3592//2193 3581//2194 +f 3662//2195 3581//2194 3628//2196 +f 3336//2184 3406//2197 3403//2198 +f 3336//2184 3403//2198 3335//2181 +f 3406//2197 3443//2185 3436//2188 +f 3406//2197 3436//2188 3403//2198 +f 3698//2199 3662//2195 3628//2196 +f 3698//2199 3628//2196 3646//2200 +f 3318//2201 3377//2202 3336//2184 +f 3318//2201 3336//2184 3320//2183 +f 3377//2202 3406//2197 3336//2184 +f 3377//2202 3478//2203 3443//2185 +f 3377//2202 3443//2185 3406//2197 +f 3478//2203 3476//2186 3443//2185 +f 3517//2190 3476//2186 3478//2203 +f 3517//2190 3478//2203 3553//2204 +f 3553//2204 3555//2192 3517//2190 +f 3553//2204 3622//2205 3581//2194 +f 3553//2204 3581//2194 3555//2192 +f 3622//2205 3628//2196 3581//2194 +f 3478//2203 3377//2202 3464//2206 +f 3316//2207 3464//2206 3377//2202 +f 3316//2207 3377//2202 3318//2201 +f 3464//2206 3601//2208 3553//2204 +f 3464//2206 3553//2204 3478//2203 +f 3601//2208 3668//2209 3622//2205 +f 3601//2208 3622//2205 3553//2204 +f 3498//2210 3741//2211 3601//2208 +f 3498//2210 3601//2208 3464//2206 +f 3313//2212 3498//2210 3464//2206 +f 3313//2212 3464//2206 3316//2207 +f 3741//2211 3668//2209 3601//2208 +f 3741//2211 3696//2213 3668//2209 +f 3214//2214 3505//2215 3308//2216 +f 3214//2214 3724//2217 3798//2218 +f 3214//2214 3798//2218 3505//2215 +f 3213//2219 3565//2220 3724//2217 +f 3213//2219 3724//2217 3214//2214 +f 3741//2211 3498//2210 3505//2215 +f 3741//2211 3505//2215 3798//2218 +f 3498//2210 3313//2212 3308//2216 +f 3498//2210 3308//2216 3505//2215 +f 3414//2221 3417//2222 3340//2223 +f 3414//2221 3340//2223 3339//2224 +f 3506//2225 3497//2226 3447//2227 +f 3506//2225 3447//2227 3445//2228 +f 3591//2229 3573//2230 3497//2226 +f 3591//2229 3497//2226 3506//2225 +f 3339//2224 3340//2223 3323//2231 +f 3339//2224 3323//2231 3321//2232 +f 3445//2228 3447//2227 3417//2222 +f 3445//2228 3417//2222 3414//2221 +f 3650//2233 3613//2234 3573//2230 +f 3650//2233 3573//2230 3591//2229 +f 3701//2235 3649//2236 3613//2234 +f 3701//2235 3613//2234 3650//2233 +f 3697//2237 3645//2238 3701//2235 +f 3645//2238 3649//2236 3701//2235 +f 3324//2239 3323//2231 3340//2223 +f 3324//2239 3340//2223 3373//2240 +f 3324//2239 3373//2240 3312//2241 +f 3649//2236 3648//2242 3617//2243 +f 3649//2236 3617//2243 3613//2234 +f 3574//2244 3573//2230 3613//2234 +f 3574//2244 3613//2234 3617//2243 +f 3483//2245 3497//2226 3573//2230 +f 3483//2245 3573//2230 3574//2244 +f 3497//2226 3483//2245 3447//2227 +f 3447//2227 3483//2245 3373//2240 +f 3447//2227 3373//2240 3417//2222 +f 3417//2222 3373//2240 3340//2223 +f 3648//2242 3657//2246 3617//2243 +f 3617//2243 3657//2246 3549//2247 +f 3617//2243 3549//2247 3574//2244 +f 3574//2244 3549//2247 3483//2245 +f 3483//2245 3549//2247 3312//2241 +f 3483//2245 3312//2241 3373//2240 +f 3594//2248 3657//2246 3695//2249 +f 3594//2248 3695//2249 3740//2250 +f 3549//2247 3657//2246 3594//2248 +f 3549//2247 3594//2248 3309//2251 +f 3549//2247 3309//2251 3312//2241 +f 3677//2252 3594//2248 3740//2250 +f 3677//2252 3740//2250 3799//2218 +f 3594//2248 3677//2252 3222//2253 +f 3594//2248 3222//2253 3309//2251 +f 3799//2218 3725//2217 3561//2254 +f 3799//2218 3561//2254 3677//2252 +f 3222//2253 3677//2252 3561//2254 +f 3222//2253 3561//2254 3212//2255 +f 3564//2220 3212//2255 3561//2254 +f 3564//2220 3561//2254 3725//2217 +f 3212//2255 3564//2220 3211//2256 +f 3657//2246 3648//2242 3672//2257 +f 3657//2246 3672//2257 3695//2249 +f 3648//2242 3649//2236 3672//2257 +f 3649//2236 3645//2238 3672//2257 +f 3668//2209 3696//2213 3671//2258 +f 3668//2209 3671//2258 3622//2205 +f 3622//2205 3671//2258 3646//2200 +f 3622//2205 3646//2200 3628//2196 +f 3198//2259 3192//2260 3320//2183 +f 3198//2259 3320//2183 3314//2182 +f 3086//2261 3096//2262 3058//2263 +f 3086//2261 3058//2263 3055//2264 +f 3022//2265 3011//2266 3055//2264 +f 3022//2265 3055//2264 3058//2263 +f 2975//2267 2973//2268 3011//2266 +f 2975//2267 3011//2266 3022//2265 +f 2936//2269 2945//2270 2973//2268 +f 2936//2269 2973//2268 2975//2267 +f 2862//2271 2897//2272 2945//2270 +f 2862//2271 2945//2270 2936//2269 +f 3192//2260 3198//2259 3127//2273 +f 3192//2260 3127//2273 3117//2274 +f 3117//2274 3127//2273 3096//2262 +f 3117//2274 3096//2262 3086//2261 +f 2832//2275 2876//2276 2897//2272 +f 2832//2275 2897//2272 2862//2271 +f 3318//2201 3320//2183 3192//2260 +f 3318//2201 3192//2260 3149//2277 +f 3192//2260 3117//2274 3149//2277 +f 3149//2277 3117//2274 3086//2261 +f 3149//2277 3086//2261 3050//2278 +f 3086//2261 3055//2264 3050//2278 +f 3011//2266 2972//2279 3050//2278 +f 3011//2266 3050//2278 3055//2264 +f 3011//2266 2973//2268 2972//2279 +f 2972//2279 2973//2268 2945//2270 +f 2972//2279 2945//2270 2901//2280 +f 2945//2270 2897//2272 2901//2280 +f 3050//2278 3064//2281 3149//2277 +f 3316//2207 3318//2201 3149//2277 +f 3316//2207 3149//2277 3064//2281 +f 3064//2281 3050//2278 2972//2279 +f 3064//2281 2972//2279 2922//2282 +f 2922//2282 2972//2279 2901//2280 +f 2922//2282 2901//2280 2848//2283 +f 3023//2284 3064//2281 2922//2282 +f 3023//2284 2922//2282 2781//2285 +f 3313//2212 3316//2207 3064//2281 +f 3313//2212 3064//2281 3023//2284 +f 2922//2282 2848//2283 2781//2285 +f 2848//2283 2822//2286 2781//2285 +f 3308//2216 3014//2287 3214//2214 +f 3214//2214 3014//2287 2722//2288 +f 3214//2214 2722//2288 2787//2289 +f 3213//2219 3214//2214 2787//2289 +f 3213//2219 2787//2289 2959//2290 +f 2781//2285 2722//2288 3014//2287 +f 2781//2285 3014//2287 3023//2284 +f 3023//2284 3014//2287 3308//2216 +f 3023//2284 3308//2216 3313//2212 +f 3121//2291 3194//2292 3190//2293 +f 3121//2291 3190//2293 3118//2294 +f 3028//2295 3089//2296 3088//2297 +f 3028//2295 3088//2297 3037//2298 +f 2944//2299 3028//2295 3037//2298 +f 2944//2299 3037//2298 2964//2300 +f 3194//2292 3321//2232 3323//2231 +f 3194//2292 3323//2231 3190//2293 +f 3089//2296 3121//2291 3118//2294 +f 3089//2296 3118//2294 3088//2297 +f 2885//2301 2944//2299 2964//2300 +f 2885//2301 2964//2300 2918//2302 +f 2834//2303 2885//2301 2918//2302 +f 2834//2303 2918//2302 2880//2304 +f 2834//2303 2880//2304 2833//2305 +f 2880//2304 2877//2306 2833//2305 +f 3324//2239 3160//2307 3190//2293 +f 3324//2239 3190//2293 3323//2231 +f 3312//2241 3160//2307 3324//2239 +f 2880//2304 2918//2302 2915//2308 +f 2880//2304 2915//2308 2878//2309 +f 2957//2310 2915//2308 2918//2302 +f 2957//2310 2918//2302 2964//2300 +f 3051//2311 2957//2310 2964//2300 +f 3051//2311 2964//2300 3037//2298 +f 3037//2298 3088//2297 3051//2311 +f 3088//2297 3118//2294 3160//2307 +f 3088//2297 3160//2307 3051//2311 +f 3118//2294 3190//2293 3160//2307 +f 2878//2309 2915//2308 2871//2312 +f 2915//2308 2957//2310 2981//2313 +f 2915//2308 2981//2313 2871//2312 +f 2957//2310 3051//2311 2981//2313 +f 3051//2311 3160//2307 3312//2241 +f 3051//2311 3312//2241 2981//2313 +f 2939//2314 2780//2285 2823//2286 +f 2939//2314 2823//2286 2871//2312 +f 2939//2314 2871//2312 2981//2313 +f 2981//2313 3312//2241 3309//2251 +f 2981//2313 3309//2251 2939//2314 +f 2904//2315 2722//2316 2780//2285 +f 2904//2315 2780//2285 2939//2314 +f 2939//2314 3309//2251 3222//2253 +f 2939//2314 3222//2253 2904//2315 +f 2722//2316 2904//2315 2960//2317 +f 2722//2316 2960//2317 2788//2318 +f 3222//2253 3212//2255 2960//2317 +f 3222//2253 2960//2317 2904//2315 +f 2958//2290 2788//2318 2960//2317 +f 2958//2290 2960//2317 3212//2255 +f 3212//2255 3211//2256 2958//2290 +f 2871//2312 2823//2286 2854//2319 +f 2871//2312 2854//2319 2878//2309 +f 2878//2309 2854//2319 2877//2306 +f 2878//2309 2877//2306 2880//2304 +f 2848//2283 2901//2280 2853//2320 +f 2848//2283 2853//2320 2822//2286 +f 2901//2280 2897//2272 2876//2276 +f 2901//2280 2876//2276 2853//2320 +f 3162//2321 3101//2322 3135//2323 +f 3162//2321 3135//2323 3174//2324 +f 3174//2324 3135//2323 3171//2325 +f 3174//2324 3171//2325 3187//2326 +f 3208//2327 3204//2328 3187//2326 +f 3208//2327 3187//2326 3201//2329 +f 3271//2330 3110//2331 3116//2332 +f 3271//2330 3116//2332 3270//2333 +f 3273//2334 3104//2335 3110//2331 +f 3273//2334 3110//2331 3271//2330 +f 3150//2336 3085//2337 3077//2338 +f 3150//2336 3077//2338 3138//2339 +f 3186//2340 3206//2341 3207//2342 +f 3186//2340 3207//2342 3197//2343 +f 3153//2344 3083//2345 3085//2337 +f 3153//2344 3085//2337 3150//2336 +f 3040//2346 2724//2347 2790//2348 +f 3040//2346 2790//2348 2980//2349 +f 2801//2350 2625//2351 2606//2352 +f 2801//2350 2606//2352 2902//2353 +f 2682//2354 2625//2351 2801//2350 +f 3046//2355 2916//2356 2991//2357 +f 3046//2355 2991//2357 3084//2358 +f 3189//2359 3183//2360 3133//2361 +f 3189//2359 3133//2361 3139//2362 +f 3017//2363 2900//2364 2924//2365 +f 3017//2363 2924//2365 3038//2366 +f 2965//2367 2660//2368 2705//2369 +f 2965//2367 2705//2369 3034//2370 +f 3209//2371 3205//2372 3188//2373 +f 3209//2371 3188//2373 3202//2374 +f 3240//2375 3243//2376 3209//2371 +f 3240//2375 3209//2371 3202//2374 +f 3138//2339 3077//2338 3075//2377 +f 3138//2339 3075//2377 3128//2378 +f 3188//2373 3205//2372 3206//2341 +f 3188//2373 3206//2341 3186//2340 +f 3132//2379 3179//2380 3178//2381 +f 3132//2379 3178//2381 3128//2378 +f 3136//2382 3182//2383 3179//2380 +f 3136//2382 3179//2380 3132//2379 +f 3136//2382 3082//2384 3101//2322 +f 3136//2382 3101//2322 3162//2321 +f 3166//2385 3137//2386 3197//2343 +f 3166//2385 3197//2343 3201//2329 +f 3142//2387 3070//2388 3083//2345 +f 3142//2387 3083//2345 3153//2344 +f 3043//2389 3066//2390 3133//2361 +f 3043//2389 3133//2361 3106//2391 +f 2991//2357 3043//2389 3106//2391 +f 2991//2357 3106//2391 3084//2358 +f 3034//2370 2705//2369 2724//2347 +f 3034//2370 2724//2347 3040//2346 +f 3139//2362 3063//2392 3070//2388 +f 3139//2362 3070//2388 3142//2387 +f 2989//2393 2830//2394 2867//2395 +f 2989//2393 2867//2395 3004//2396 +f 2924//2365 2916//2356 3046//2355 +f 2924//2365 3046//2355 3038//2366 +f 3017//2363 3129//2397 3116//2332 +f 3017//2363 3116//2332 3004//2396 +f 3110//2331 3104//2335 2980//2349 +f 3110//2331 2980//2349 2989//2393 +f 2460//2398 2470//2399 2461//2400 +f 2460//2398 2461//2400 2449//2401 +f 2755//2402 2760//2403 2712//2404 +f 2755//2402 2712//2404 2707//2405 +f 2707//2405 2712//2404 2668//2406 +f 2707//2405 2668//2406 2663//2407 +f 2663//2407 2668//2406 2631//2408 +f 2663//2407 2631//2408 2628//2409 +f 2628//2409 2631//2408 2582//2410 +f 2628//2409 2582//2410 2580//2411 +f 2580//2411 2582//2410 2539//2412 +f 2580//2411 2539//2412 2536//2413 +f 2536//2413 2539//2412 2513//2414 +f 2536//2413 2513//2414 2504//2415 +f 2504//2415 2513//2414 2488//2416 +f 2504//2415 2488//2416 2481//2417 +f 2481//2417 2488//2416 2472//2418 +f 2481//2417 2472//2418 2462//2419 +f 2403//2420 2390//2421 2410//2422 +f 2403//2420 2410//2422 2429//2423 +f 2295//2424 2310//2425 2364//2426 +f 2295//2424 2364//2426 2384//2427 +f 2464//2428 2448//2429 2453//2430 +f 2464//2428 2453//2430 2469//2431 +f 2449//2401 2461//2400 2453//2430 +f 2449//2401 2453//2430 2444//2432 +f 2370//2433 2393//2434 2360//2435 +f 2370//2433 2360//2435 2312//2436 +f 2435//2437 2448//2429 2431//2438 +f 2435//2437 2431//2438 2423//2439 +f 2476//2440 2475//2441 2470//2399 +f 2476//2440 2470//2399 2472//2418 +f 2795//2442 2793//2443 2760//2403 +f 2795//2442 2760//2403 2755//2402 +f 3174//2324 3203//2444 3200//2445 +f 3174//2324 3200//2445 3162//2321 +f 3187//2326 3204//2328 3203//2444 +f 3187//2326 3203//2444 3174//2324 +f 3201//2329 3187//2326 3171//2325 +f 3201//2329 3171//2325 3166//2385 +f 2774//2446 2852//2447 2808//2448 +f 2774//2446 2808//2448 2750//2449 +f 2852//2447 2913//2450 2868//2451 +f 2852//2447 2868//2451 2808//2448 +f 2868//2451 2913//2450 2954//2452 +f 2868//2451 2954//2452 2921//2453 +f 2921//2453 2954//2452 2987//2454 +f 2921//2453 2987//2454 2967//2455 +f 2967//2455 2987//2454 3031//2456 +f 2967//2455 3031//2456 3001//2457 +f 3001//2457 3031//2456 3076//2458 +f 3001//2457 3076//2458 3057//2459 +f 3161//2460 3254//2461 3257//2462 +f 3161//2460 3257//2462 3145//2463 +f 3254//2461 3161//2460 3175//2464 +f 3254//2461 3175//2464 3251//2465 +f 3071//2466 3102//2467 3076//2458 +f 3071//2466 3076//2458 3031//2456 +f 3071//2466 3031//2456 2987//2454 +f 3071//2466 2987//2454 3033//2468 +f 3033//2468 2987//2454 2954//2452 +f 3033//2468 2954//2452 2984//2469 +f 2984//2469 2954//2452 2913//2450 +f 2984//2469 2913//2450 2947//2470 +f 2947//2470 2913//2450 2852//2447 +f 2947//2470 2852//2447 2896//2471 +f 2896//2471 2852//2447 2774//2446 +f 2896//2471 2774//2446 2794//2472 +f 2750//2449 2808//2448 2789//2473 +f 2750//2449 2789//2473 2738//2474 +f 2808//2448 2868//2451 2840//2475 +f 2808//2448 2840//2475 2789//2473 +f 2840//2475 2868//2451 2921//2453 +f 2840//2475 2921//2453 2905//2476 +f 2905//2476 2921//2453 2967//2455 +f 2905//2476 2967//2455 2953//2477 +f 2953//2477 2967//2455 3001//2457 +f 2953//2477 3001//2457 2995//2478 +f 2995//2478 3001//2457 3057//2459 +f 2995//2478 3057//2459 3052//2479 +f 3145//2463 3257//2462 3249//2480 +f 3145//2463 3249//2480 3143//2481 +f 3145//2463 3057//2459 3076//2458 +f 3145//2463 3076//2458 3161//2460 +f 3161//2460 3076//2458 3102//2467 +f 3161//2460 3102//2467 3175//2464 +f 3251//2465 3175//2464 3184//2482 +f 3251//2465 3184//2482 3292//2483 +f 3033//2468 3042//2484 3087//2485 +f 3033//2468 3087//2485 3071//2466 +f 2984//2469 2998//2486 3042//2484 +f 2984//2469 3042//2484 3033//2468 +f 2947//2470 2966//2487 2998//2486 +f 2947//2470 2998//2486 2984//2469 +f 2896//2471 2917//2488 2966//2487 +f 2896//2471 2966//2487 2947//2470 +f 2794//2472 2829//2489 2917//2488 +f 2794//2472 2917//2488 2896//2471 +f 3184//2482 3175//2464 3102//2467 +f 3184//2482 3102//2467 3111//2490 +f 3071//2466 3087//2485 3111//2490 +f 3071//2466 3111//2490 3102//2467 +f 3199//2259 3315//2182 3292//2483 +f 3199//2259 3292//2483 3184//2482 +f 3095//2262 3087//2485 3042//2484 +f 3095//2262 3042//2484 3059//2491 +f 3059//2491 3042//2484 2998//2486 +f 3059//2491 2998//2486 3021//2492 +f 3021//2492 2998//2486 2966//2487 +f 3021//2492 2966//2487 2976//2493 +f 2976//2493 2966//2487 2917//2488 +f 2976//2493 2917//2488 2937//2494 +f 2937//2494 2917//2488 2829//2489 +f 2937//2494 2829//2489 2863//2271 +f 3199//2259 3184//2482 3111//2490 +f 3199//2259 3111//2490 3126//2495 +f 3126//2495 3111//2490 3087//2485 +f 3126//2495 3087//2485 3095//2262 +f 2863//2271 2829//2489 2797//2496 +f 2863//2271 2797//2496 2831//2497 +f 3145//2463 3143//2481 3052//2479 +f 3145//2463 3052//2479 3057//2459 +f 3173//2498 3176//2499 3093//2500 +f 3173//2498 3093//2500 3081//2501 +f 3081//2501 3093//2500 3045//2502 +f 3081//2501 3045//2502 3025//2503 +f 2956//2504 2974//2505 2882//2506 +f 2956//2504 2882//2506 2855//2507 +f 2855//2507 2882//2506 2803//2508 +f 2855//2507 2803//2508 2785//2509 +f 2785//2509 2803//2508 2756//2510 +f 2785//2509 2756//2510 2742//2511 +f 3180//2512 3105//2513 3093//2500 +f 3180//2512 3093//2500 3176//2499 +f 3093//2500 3105//2513 3067//2514 +f 3093//2500 3067//2514 3045//2502 +f 2907//2515 2882//2506 2974//2505 +f 2907//2515 2974//2505 2993//2516 +f 3305//2517 3302//2518 3173//2498 +f 3305//2517 3173//2498 3172//2519 +f 3172//2519 3173//2498 3081//2501 +f 3172//2519 3081//2501 3078//2520 +f 3078//2520 3081//2501 3025//2503 +f 3078//2520 3025//2503 3018//2521 +f 2946//2522 2956//2504 2855//2507 +f 2946//2522 2855//2507 2838//2523 +f 2838//2523 2855//2507 2785//2509 +f 2838//2523 2785//2509 2775//2524 +f 2775//2524 2785//2509 2742//2511 +f 2775//2524 2742//2511 2736//2525 +f 3173//2498 3302//2518 3298//2526 +f 3173//2498 3298//2526 3176//2499 +f 3025//2503 3045//2502 2974//2505 +f 3025//2503 2974//2505 2956//2504 +f 3303//2527 3180//2512 3176//2499 +f 3303//2527 3176//2499 3298//2526 +f 2993//2516 2974//2505 3045//2502 +f 2993//2516 3045//2502 3067//2514 +f 2993//2516 3067//2514 3080//2528 +f 2993//2516 3080//2528 3008//2529 +f 2882//2506 2907//2515 2820//2530 +f 2882//2506 2820//2530 2803//2508 +f 2803//2508 2820//2530 2773//2531 +f 2803//2508 2773//2531 2756//2510 +f 2820//2530 2846//2532 2800//2533 +f 2820//2530 2800//2533 2773//2531 +f 3193//2292 3122//2534 3115//2535 +f 3193//2292 3115//2535 3191//2536 +f 3090//2296 3027//2295 3008//2529 +f 3090//2296 3008//2529 3080//2528 +f 3027//2295 2943//2537 2923//2538 +f 3027//2295 2923//2538 3008//2529 +f 3322//2539 3193//2292 3191//2536 +f 3322//2539 3191//2536 3301//2540 +f 3122//2534 3090//2296 3080//2528 +f 3122//2534 3080//2528 3115//2535 +f 2943//2537 2886//2541 2846//2532 +f 2943//2537 2846//2532 2923//2538 +f 2886//2541 2835//2542 2800//2533 +f 2886//2541 2800//2533 2846//2532 +f 2835//2542 2831//2497 2797//2496 +f 2835//2542 2797//2496 2800//2533 +f 3191//2536 3115//2535 3105//2513 +f 3191//2536 3105//2513 3180//2512 +f 3115//2535 3080//2528 3067//2514 +f 3115//2535 3067//2514 3105//2513 +f 2993//2516 3008//2529 2923//2538 +f 2993//2516 2923//2538 2907//2515 +f 3303//2527 3301//2540 3191//2536 +f 3303//2527 3191//2536 3180//2512 +f 2907//2515 2923//2538 2846//2532 +f 2907//2515 2846//2532 2820//2530 +f 2956//2504 2946//2522 3018//2521 +f 2956//2504 3018//2521 3025//2503 +f 2736//2525 2742//2511 2735//2543 +f 2736//2525 2735//2543 2728//2544 +f 2728//2544 2735//2543 2750//2449 +f 2728//2544 2750//2449 2738//2474 +f 2742//2511 2756//2510 2749//2545 +f 2742//2511 2749//2545 2735//2543 +f 2735//2543 2749//2545 2774//2446 +f 2735//2543 2774//2446 2750//2449 +f 2756//2510 2773//2531 2769//2546 +f 2756//2510 2769//2546 2749//2545 +f 2774//2446 2749//2545 2769//2546 +f 2774//2446 2769//2546 2794//2472 +f 2797//2496 2769//2546 2773//2531 +f 2797//2496 2773//2531 2800//2533 +f 2769//2546 2797//2496 2829//2489 +f 2769//2546 2829//2489 2794//2472 +f 2799//2547 2734//2548 2726//2549 +f 2799//2547 2726//2549 2779//2550 +f 2669//2551 2677//2552 2603//2553 +f 2669//2551 2603//2553 2601//2554 +f 2544//2555 2540//2556 2507//2557 +f 2544//2555 2507//2557 2514//2558 +f 2514//2558 2507//2557 2480//2559 +f 2514//2558 2480//2559 2487//2560 +f 2448//2429 2464//2428 2451//2561 +f 2448//2429 2451//2561 2431//2438 +f 2485//2562 2446//2563 2489//2564 +f 2485//2562 2489//2564 2525//2565 +f 2561//2566 2543//2567 2592//2568 +f 2561//2566 2592//2568 2612//2569 +f 2612//2569 2592//2568 2637//2570 +f 2612//2569 2637//2570 2650//2571 +f 2690//2572 2683//2573 2737//2574 +f 2690//2572 2737//2574 2744//2575 +f 2894//2576 2856//2577 2892//2578 +f 2894//2576 2892//2578 2934//2579 +f 2908//2580 2941//2581 2926//2582 +f 2908//2580 2926//2582 2895//2583 +f 2409//2584 2416//2585 2442//2586 +f 2409//2584 2442//2586 2437//2587 +f 2414//2588 2408//2589 2428//2590 +f 2414//2588 2428//2590 2426//2591 +f 2456//2592 2424//2593 2446//2563 +f 2456//2592 2446//2563 2485//2562 +f 2650//2571 2637//2570 2683//2573 +f 2650//2571 2683//2573 2690//2572 +f 2908//2580 2894//2576 2934//2579 +f 2908//2580 2934//2579 2941//2581 +f 2601//2554 2603//2553 2540//2556 +f 2601//2554 2540//2556 2544//2555 +f 2464//2428 2487//2560 2480//2559 +f 2464//2428 2480//2559 2451//2561 +f 2525//2565 2489//2564 2543//2567 +f 2525//2565 2543//2567 2561//2566 +f 2423//2439 2418//2594 2427//2595 +f 2423//2439 2427//2595 2435//2437 +f 2879//2596 2799//2547 2779//2550 +f 2879//2596 2779//2550 2845//2597 +f 2408//2589 2409//2584 2437//2587 +f 2408//2589 2437//2587 2428//2590 +f 2418//2594 2414//2588 2426//2591 +f 2418//2594 2426//2591 2427//2595 +f 2734//2548 2677//2552 2669//2551 +f 2734//2548 2669//2551 2726//2549 +f 2470//2399 2460//2398 2462//2419 +f 2470//2399 2462//2419 2472//2418 +f 2527//2598 2499//2599 2476//2440 +f 2527//2598 2476//2440 2482//2600 +f 2482//2600 2476//2440 2495//2601 +f 2482//2600 2495//2601 2528//2602 +f 2600//2603 2598//2604 2549//2605 +f 2600//2603 2549//2605 2550//2606 +f 2550//2606 2549//2605 2526//2607 +f 2550//2606 2526//2607 2529//2608 +f 2529//2608 2526//2607 2499//2599 +f 2529//2608 2499//2599 2527//2598 +f 2600//2603 2657//2609 2659//2610 +f 2600//2603 2659//2610 2598//2604 +f 2657//2609 2701//2611 2706//2612 +f 2657//2609 2706//2612 2659//2610 +f 2746//2613 2706//2612 2701//2611 +f 2746//2613 2701//2611 2748//2614 +f 2786//2615 2746//2613 2748//2614 +f 2786//2615 2748//2614 2784//2616 +f 2437//2587 2442//2586 2452//2617 +f 2437//2587 2452//2617 2447//2618 +f 2456//2592 2442//2586 2416//2585 +f 2456//2592 2416//2585 2424//2593 +f 2471//2619 2481//2417 2462//2419 +f 2471//2619 2462//2419 2452//2617 +f 2424//2593 2416//2585 2378//2620 +f 2424//2593 2378//2620 2387//2621 +f 2416//2585 2409//2584 2377//2622 +f 2416//2585 2377//2622 2378//2620 +f 2255//2623 2250//2624 2254//2625 +f 2255//2623 2254//2625 2261//2626 +f 2252//2627 2251//2628 2250//2624 +f 2252//2627 2250//2624 2255//2623 +f 2259//2629 2260//2630 2251//2628 +f 2259//2629 2251//2628 2252//2627 +f 2280//2631 2284//2632 2260//2630 +f 2280//2631 2260//2630 2259//2629 +f 2300//2633 2305//2634 2284//2632 +f 2300//2633 2284//2632 2280//2631 +f 2346//2635 2349//2636 2327//2637 +f 2346//2635 2327//2637 2321//2638 +f 2409//2584 2408//2589 2382//2639 +f 2409//2584 2382//2639 2377//2622 +f 2813//2640 2826//2641 2786//2615 +f 2813//2640 2786//2615 2784//2616 +f 2836//2642 2859//2643 2850//2644 +f 2836//2642 2850//2644 2828//2645 +f 2859//2643 2857//2646 2850//2644 +f 2836//2642 2793//2443 2795//2442 +f 2836//2642 2795//2442 2839//2647 +f 2851//2648 2807//2649 2791//2650 +f 2851//2648 2791//2650 2837//2651 +f 2851//2648 2837//2651 2857//2646 +f 2851//2648 2857//2646 2865//2652 +f 2837//2651 2826//2641 2850//2644 +f 2837//2651 2850//2644 2857//2646 +f 2771//2653 2761//2654 2798//2655 +f 2423//2439 2431//2438 2410//2422 +f 2423//2439 2410//2422 2399//2656 +f 2338//2657 2310//2425 2318//2658 +f 2713//2659 2765//2660 2617//2661 +f 2713//2659 2617//2661 2566//2662 +f 3218//2663 3219//2664 3092//2665 +f 3218//2663 3092//2665 3103//2666 +f 2783//2667 3103//2666 3092//2665 +f 2783//2667 3092//2665 2713//2659 +f 2375//2668 2390//2421 2368//2669 +f 2375//2668 2368//2669 2353//2670 +f 2312//2436 2295//2424 2384//2427 +f 2312//2436 2384//2427 2370//2433 +f 2359//2671 2380//2672 2374//2673 +f 2359//2671 2374//2673 2351//2674 +f 2374//2673 2363//2675 2338//2657 +f 2374//2673 2338//2657 2351//2674 +f 2380//2672 2359//2671 2368//2669 +f 2380//2672 2368//2669 2388//2676 +f 3036//2677 2950//2678 3039//2679 +f 3036//2677 3039//2679 3068//2680 +f 3157//2681 3036//2677 3068//2680 +f 3157//2681 3068//2680 3169//2682 +f 3068//2680 3039//2679 3170//2683 +f 3068//2680 3170//2683 3169//2682 +f 3039//2679 3000//2684 3164//2685 +f 3039//2679 3164//2685 3170//2683 +f 3000//2684 2961//2686 3144//2687 +f 3000//2684 3144//2687 3164//2685 +f 2765//2660 2713//2659 3092//2665 +f 2765//2660 3092//2665 3098//2688 +f 3113//2689 2875//2690 2938//2691 +f 3113//2689 2938//2691 3131//2692 +f 3103//2666 2783//2667 2875//2690 +f 3103//2666 2875//2690 3113//2689 +f 3304//2693 3305//2517 3172//2519 +f 3304//2693 3172//2519 3167//2694 +f 3167//2694 3172//2519 3078//2520 +f 3167//2694 3078//2520 3073//2695 +f 3073//2695 3078//2520 3018//2521 +f 3073//2695 3018//2521 3010//2696 +f 2932//2697 2946//2522 2838//2523 +f 2932//2697 2838//2523 2816//2698 +f 2816//2698 2838//2523 2775//2524 +f 2816//2698 2775//2524 2766//2699 +f 2766//2699 2775//2524 2736//2525 +f 2766//2699 2736//2525 2730//2700 +f 2730//2700 2736//2525 2728//2544 +f 2730//2700 2728//2544 2721//2701 +f 2721//2701 2728//2544 2738//2474 +f 2721//2701 2738//2474 2729//2702 +f 2738//2474 2789//2473 2772//2703 +f 2738//2474 2772//2703 2729//2702 +f 2814//2704 2772//2703 2789//2473 +f 2814//2704 2789//2473 2840//2475 +f 2814//2704 2840//2475 2905//2476 +f 2814//2704 2905//2476 2883//2705 +f 2883//2705 2905//2476 2953//2477 +f 2883//2705 2953//2477 2942//2706 +f 2942//2706 2953//2477 2995//2478 +f 2942//2706 2995//2478 2992//2707 +f 2992//2707 2995//2478 3052//2479 +f 2992//2707 3052//2479 3053//2708 +f 3143//2481 3249//2480 3242//2709 +f 3143//2481 3242//2709 3148//2710 +f 2598//2604 2597//2711 2547//2712 +f 2598//2604 2547//2712 2549//2605 +f 2549//2605 2547//2712 2520//2713 +f 2549//2605 2520//2713 2526//2607 +f 2526//2607 2520//2713 2496//2714 +f 2526//2607 2496//2714 2499//2599 +f 2499//2599 2496//2714 2475//2441 +f 2499//2599 2475//2441 2476//2440 +f 2597//2711 2598//2604 2659//2610 +f 2597//2711 2659//2610 2662//2715 +f 2659//2610 2706//2612 2711//2716 +f 2659//2610 2711//2716 2662//2715 +f 2751//2717 2711//2716 2706//2612 +f 2751//2717 2706//2612 2746//2613 +f 2791//2650 2751//2717 2746//2613 +f 2791//2650 2746//2613 2786//2615 +f 2689//2718 2714//2719 2761//2654 +f 2689//2718 2761//2654 2771//2653 +f 2671//2720 2714//2719 2689//2718 +f 2602//2721 2630//2722 2671//2720 +f 2602//2721 2671//2720 2689//2718 +f 2583//2723 2630//2722 2602//2721 +f 2546//2724 2583//2723 2602//2721 +f 2546//2724 2602//2721 2528//2602 +f 2521//2725 2546//2724 2528//2602 +f 2495//2601 2521//2725 2528//2602 +f 2950//2678 2874//2726 3000//2684 +f 2950//2678 3000//2684 3039//2679 +f 2811//2727 2805//2728 2910//2729 +f 2811//2727 2910//2729 2920//2730 +f 3144//2687 3229//2731 3230//2732 +f 3144//2687 3230//2732 3164//2685 +f 3141//2733 3157//2681 3233//2734 +f 3141//2733 3233//2734 3234//2735 +f 2713//2659 2581//2736 2638//2737 +f 2713//2659 2638//2737 2783//2667 +f 3048//2738 2825//2739 3223//2740 +f 3048//2738 3223//2740 3216//2741 +f 2605//2742 2825//2739 2703//2743 +f 2605//2742 2703//2743 2579//2744 +f 2563//2745 2605//2742 2579//2744 +f 2563//2745 2579//2744 2573//2746 +f 2703//2743 2825//2739 3048//2738 +f 2703//2743 3048//2738 2767//2747 +f 2581//2736 2562//2748 2595//2749 +f 2581//2736 2595//2749 2638//2737 +f 2805//2728 2811//2727 2697//2750 +f 2805//2728 2697//2750 2716//2751 +f 2805//2728 2716//2751 2768//2752 +f 2805//2728 2768//2752 2874//2726 +f 2910//2729 2805//2728 2874//2726 +f 2910//2729 2874//2726 2950//2678 +f 3157//2681 3141//2733 3006//2753 +f 3157//2681 3006//2753 3036//2677 +f 2950//2678 3036//2677 3006//2753 +f 2950//2678 3006//2753 2910//2729 +f 3231//2754 3232//2755 3169//2682 +f 3231//2754 3169//2682 3170//2683 +f 3157//2681 3169//2682 3232//2755 +f 3157//2681 3232//2755 3233//2734 +f 2874//2726 2768//2752 2961//2686 +f 2874//2726 2961//2686 3000//2684 +f 3012//2756 3006//2753 3141//2733 +f 3012//2756 3141//2733 3140//2757 +f 3140//2757 3141//2733 3234//2735 +f 3140//2757 3234//2735 3235//2758 +f 3230//2732 3231//2754 3170//2683 +f 3230//2732 3170//2683 3164//2685 +f 2920//2730 2910//2729 3006//2753 +f 2920//2730 3006//2753 3012//2756 +f 3107//2759 3109//2760 3272//2761 +f 3107//2759 3272//2761 3275//2762 +f 2914//2763 2811//2727 2920//2730 +f 2914//2763 2920//2730 2969//2764 +f 3107//2759 2968//2765 2990//2766 +f 3107//2759 2990//2766 3109//2760 +f 2990//2766 2914//2763 2969//2764 +f 2990//2766 2969//2764 3032//2767 +f 2970//2768 3009//2769 2985//2770 +f 2970//2768 2985//2770 2844//2771 +f 3248//2772 3158//2773 3159//2774 +f 3248//2772 3159//2774 3245//2775 +f 3158//2773 3009//2769 2970//2768 +f 3158//2773 2970//2768 3159//2774 +f 3156//2776 2985//2770 3009//2769 +f 3156//2776 3009//2769 3158//2773 +f 3241//2777 3156//2776 3158//2773 +f 3241//2777 3158//2773 3248//2772 +f 2675//2778 2588//2779 2613//2780 +f 2675//2778 2613//2780 2697//2750 +f 2303//2781 2352//2782 2348//2783 +f 2436//2784 2512//2785 2490//2786 +f 2436//2784 2490//2786 2413//2787 +f 2644//2788 2570//2789 2588//2779 +f 2644//2788 2588//2779 2675//2778 +f 2342//2790 2339//2791 2373//2792 +f 2342//2790 2373//2792 2371//2793 +f 2354//2794 2342//2790 2371//2793 +f 2354//2794 2371//2793 2385//2795 +f 2386//2796 2354//2794 2385//2795 +f 2386//2796 2385//2795 2412//2797 +f 2436//2784 2386//2796 2412//2797 +f 2436//2784 2412//2797 2478//2798 +f 2588//2779 2512//2785 2535//2799 +f 2588//2779 2535//2799 2613//2780 +f 2860//2800 2961//2686 2768//2752 +f 2860//2800 2768//2752 2681//2801 +f 3275//2762 3295//2802 3119//2803 +f 3275//2762 3119//2803 3107//2759 +f 2858//2804 2757//2805 2824//2806 +f 2858//2804 2824//2806 2968//2765 +f 2931//2807 2858//2804 2968//2765 +f 2931//2807 2968//2765 3002//2808 +f 2982//2809 2931//2807 3002//2808 +f 2982//2809 3002//2808 3047//2810 +f 2395//2811 2373//2792 2339//2791 +f 2395//2811 2339//2791 2348//2783 +f 2869//2812 3294//2813 3237//2814 +f 2869//2812 3237//2814 2912//2815 +f 2565//2816 2869//2812 2912//2815 +f 2565//2816 2912//2815 2604//2817 +f 2419//2818 2565//2816 2604//2817 +f 2419//2818 2604//2817 2523//2819 +f 2478//2798 2535//2799 2512//2785 +f 2478//2798 2512//2785 2436//2784 +f 3107//2759 3119//2803 3002//2808 +f 3107//2759 3002//2808 2968//2765 +f 2757//2805 2675//2778 2697//2750 +f 2757//2805 2697//2750 2824//2806 +f 2348//2783 2352//2782 2425//2820 +f 2348//2783 2425//2820 2395//2811 +f 2523//2819 2468//2821 2379//2822 +f 2523//2819 2379//2822 2419//2818 +f 3010//2696 3018//2521 2946//2522 +f 3010//2696 2946//2522 2932//2697 +f 3247//2823 3147//2824 3154//2825 +f 3247//2823 3154//2825 3238//2826 +f 3221//2827 3131//2692 3156//2776 +f 3221//2827 3156//2776 3241//2777 +f 2468//2821 2425//2820 2352//2782 +f 2468//2821 2352//2782 2379//2822 +f 3109//2760 2990//2766 3032//2767 +f 3109//2760 3032//2767 3125//2828 +f 3125//2828 3032//2767 3065//2829 +f 3125//2828 3065//2829 3147//2824 +f 2968//2765 2824//2806 2914//2763 +f 2968//2765 2914//2763 2990//2766 +f 3032//2767 2969//2764 3041//2830 +f 3032//2767 3041//2830 3065//2829 +f 3065//2829 3041//2830 3154//2825 +f 3065//2829 3154//2825 3147//2824 +f 2764//2831 2708//2832 2757//2805 +f 2764//2831 2757//2805 2858//2804 +f 2858//2804 2931//2807 2819//2833 +f 2858//2804 2819//2833 2764//2831 +f 3239//2834 3165//2835 3155//2836 +f 3239//2834 3155//2836 3259//2837 +f 3098//2688 3092//2665 3219//2664 +f 3098//2688 3219//2664 3225//2838 +f 3218//2663 3103//2666 3113//2689 +f 3218//2663 3113//2689 3220//2839 +f 3154//2825 3140//2757 3235//2758 +f 3154//2825 3235//2758 3238//2826 +f 3295//2802 3300//2840 3151//2841 +f 3295//2802 3151//2841 3119//2803 +f 3147//2824 3247//2823 3265//2842 +f 3147//2824 3265//2842 3125//2828 +f 2531//2843 2593//2844 2586//2845 +f 2531//2843 2586//2845 2519//2846 +f 2505//2847 2577//2848 2571//2849 +f 2505//2847 2571//2849 2502//2850 +f 2502//2850 2571//2849 2576//2851 +f 2502//2850 2576//2851 2508//2852 +f 2589//2853 2618//2854 2557//2855 +f 2589//2853 2557//2855 2533//2856 +f 2576//2851 2589//2853 2533//2856 +f 2576//2851 2533//2856 2508//2852 +f 2519//2846 2586//2845 2577//2848 +f 2519//2846 2577//2848 2505//2847 +f 2618//2854 2649//2857 2594//2858 +f 2618//2854 2594//2858 2557//2855 +f 2548//2859 2616//2860 2593//2844 +f 2548//2859 2593//2844 2531//2843 +f 2777//2861 2861//2862 2949//2863 +f 2777//2861 2949//2863 2903//2864 +f 3010//2696 2982//2809 3047//2810 +f 3010//2696 3047//2810 3073//2695 +f 3167//2694 3151//2841 3300//2840 +f 3167//2694 3300//2840 3304//2693 +f 2270//2865 2243//2866 2225//2867 +f 2270//2865 2225//2867 2257//2868 +f 2999//2869 3007//2870 2955//2871 +f 2999//2869 2955//2871 2952//2872 +f 3138//2339 3181//2873 3185//2874 +f 3138//2339 3185//2874 3150//2336 +f 3311//2875 3185//2874 3181//2873 +f 3311//2875 3181//2873 3299//2876 +f 3085//2337 3007//2870 2999//2869 +f 3085//2337 2999//2869 3077//2338 +f 3262//2877 3266//2878 3207//2342 +f 3262//2877 3207//2342 3206//2341 +f 2843//2879 2898//2880 2893//2881 +f 2843//2879 2893//2881 2821//2882 +f 2889//2883 2843//2879 2821//2882 +f 2889//2883 2821//2882 2881//2884 +f 2948//2885 2889//2883 2881//2884 +f 2948//2885 2881//2884 2940//2886 +f 2988//2887 2997//2888 2948//2885 +f 2988//2887 2948//2885 2940//2886 +f 3137//2386 3060//2889 3054//2890 +f 3137//2386 3054//2890 3123//2891 +f 3186//2340 3197//2343 3137//2386 +f 3186//2340 3137//2386 3123//2891 +f 2952//2872 2955//2871 2893//2881 +f 2952//2872 2893//2881 2898//2880 +f 3054//2890 3060//2889 2997//2888 +f 3054//2890 2997//2888 2988//2887 +f 3276//2892 2687//2893 2682//2354 +f 3276//2892 2682//2354 3279//2894 +f 2270//2865 2402//2895 2440//2896 +f 2270//2865 2440//2896 2288//2897 +f 2402//2895 2687//2893 2725//2898 +f 2402//2895 2725//2898 2440//2896 +f 3226//2899 3229//2731 3144//2687 +f 3226//2899 3144//2687 3108//2900 +f 3108//2900 3144//2687 2961//2686 +f 3108//2900 2961//2686 2860//2800 +f 2257//2868 2397//2901 2402//2895 +f 2257//2868 2402//2895 2270//2865 +f 2397//2901 2682//2354 2687//2893 +f 2397//2901 2687//2893 2402//2895 +f 2980//2349 3104//2335 3040//2346 +f 2724//2347 2688//2902 2790//2348 +f 2243//2866 2270//2865 2288//2897 +f 2243//2866 2288//2897 2274//2903 +f 2225//2867 2222//2904 2228//2905 +f 2225//2867 2228//2905 2257//2868 +f 2222//2904 2220//2906 2224//2907 +f 2222//2904 2224//2907 2228//2905 +f 2289//2908 2333//2909 2419//2818 +f 2289//2908 2419//2818 2313//2910 +f 2274//2903 2333//2909 2289//2908 +f 2298//2911 2352//2782 2303//2781 +f 2379//2822 2352//2782 2298//2911 +f 2379//2822 2298//2911 2307//2912 +f 2307//2912 2313//2910 2419//2818 +f 2307//2912 2419//2818 2379//2822 +f 2333//2909 2274//2903 2288//2897 +f 2233//2913 2239//2914 2242//2915 +f 2233//2913 2242//2915 2236//2916 +f 2224//2907 2220//2906 2233//2913 +f 2224//2907 2233//2913 2236//2916 +f 2206//2917 2195//2918 2189//2919 +f 2206//2917 2189//2919 2199//2920 +f 2217//2921 2206//2917 2199//2920 +f 2217//2921 2199//2920 2213//2922 +f 2221//2923 2218//2924 2234//2925 +f 2265//2926 2303//2781 2272//2927 +f 2265//2926 2272//2927 2264//2928 +f 2261//2626 2256//2929 2264//2928 +f 2261//2626 2264//2928 2272//2927 +f 2256//2929 2261//2626 2254//2625 +f 2256//2929 2254//2625 2246//2930 +f 2254//2625 2242//2915 2239//2914 +f 2254//2625 2239//2914 2246//2930 +f 2227//2931 2294//2932 2240//2933 +f 2294//2932 2267//2934 2240//2933 +f 2230//2935 2294//2932 2227//2931 +f 2294//2932 2212//2936 2267//2934 +f 2244//2937 2196//2938 2207//2939 +f 2244//2937 2207//2939 2226//2940 +f 2207//2939 2223//2941 2226//2940 +f 2218//2924 2221//2923 2223//2941 +f 2218//2924 2223//2941 2207//2939 +f 2233//2913 2217//2921 2213//2922 +f 2233//2913 2213//2922 2230//2935 +f 2230//2935 2239//2914 2233//2913 +f 2262//2942 2303//2781 2265//2926 +f 2215//2943 2247//2944 2219//2945 +f 2215//2943 2219//2945 2211//2946 +f 2298//2911 2262//2942 2219//2945 +f 2298//2911 2219//2945 2247//2944 +f 2308//2947 2248//2948 2216//2949 +f 2308//2947 2216//2949 2314//2950 +f 2299//2951 2248//2948 2308//2947 +f 2298//2911 2303//2781 2262//2942 +f 2240//2933 2267//2934 2214//2952 +f 2256//2929 2238//2953 2265//2926 +f 2256//2929 2265//2926 2264//2928 +f 2238//2953 2256//2929 2246//2930 +f 2238//2953 2246//2930 2231//2954 +f 2246//2930 2239//2914 2230//2935 +f 2246//2930 2230//2935 2231//2954 +f 2214//2952 2208//2955 2211//2946 +f 2214//2952 2211//2946 2219//2945 +f 2209//2956 2212//2936 2294//2932 +f 2209//2956 2294//2932 2249//2957 +f 2209//2956 2249//2957 2205//2958 +f 4325//2959 4320//2960 4300//2961 +f 4325//2959 4300//2961 4282//2962 +f 2191//2963 2192//2964 2209//2956 +f 2191//2963 2209//2956 2205//2958 +f 3692//2965 3675//2966 3673//2967 +f 3692//2965 3673//2967 3693//2968 +f 2197//2969 2201//2970 2190//2971 +f 2197//2969 2190//2971 2188//2972 +f 2997//2888 3060//2889 3072//2973 +f 2997//2888 3072//2973 2996//2974 +f 3091//2975 3099//2976 3069//2977 +f 3091//2975 3069//2977 3026//2978 +f 3124//2979 3049//2980 3053//2708 +f 3124//2979 3053//2708 3148//2710 +f 3148//2710 3053//2708 3052//2479 +f 3148//2710 3052//2479 3143//2481 +f 3072//2973 3091//2975 3026//2978 +f 3072//2973 3026//2978 2996//2974 +f 3049//2980 3054//2890 2988//2887 +f 3049//2980 2988//2887 2978//2981 +f 2201//2970 2203//2982 2193//2983 +f 2201//2970 2193//2983 2190//2971 +f 3061//2984 3079//2985 3100//2986 +f 3061//2984 3100//2986 3097//2987 +f 2229//2988 2210//2989 2205//2958 +f 2229//2988 2205//2958 2249//2957 +f 2214//2952 2232//2990 2240//2933 +f 3266//2878 3264//2991 3208//2327 +f 3266//2878 3208//2327 3207//2342 +f 3077//2338 3013//2992 3020//2993 +f 3077//2338 3020//2993 3075//2377 +f 2999//2869 2971//2994 3013//2992 +f 2999//2869 3013//2992 3077//2338 +f 2898//2880 2887//2995 2929//2996 +f 2898//2880 2929//2996 2952//2872 +f 2849//2997 2887//2995 2898//2880 +f 2849//2997 2898//2880 2843//2879 +f 2864//2998 2849//2997 2843//2879 +f 2864//2998 2843//2879 2889//2883 +f 2889//2883 2948//2885 2925//2999 +f 2889//2883 2925//2999 2864//2998 +f 2997//2888 2996//2974 2925//2999 +f 2997//2888 2925//2999 2948//2885 +f 3137//2386 3166//2385 3072//2973 +f 3137//2386 3072//2973 3060//2889 +f 3075//2377 3020//2993 3015//3000 +f 3075//2377 3015//3000 3074//3001 +f 3074//3001 3015//3000 3024//3002 +f 3074//3001 3024//3002 3082//2384 +f 3082//2384 3024//3002 3044//3003 +f 3082//2384 3044//3003 3101//2322 +f 3097//2987 3101//2322 3044//3003 +f 3097//2987 3044//3003 3061//2984 +f 2203//2982 2215//2943 2211//2946 +f 2203//2982 2211//2946 2193//2983 +f 2195//2918 2197//2969 2188//2972 +f 2195//2918 2188//2972 2189//2919 +f 2198//3004 2196//2938 2244//2937 +f 2198//3004 2244//2937 2275//3005 +f 2275//3005 2202//3006 2198//3004 +f 2275//3005 2290//3007 2204//3008 +f 2275//3005 2204//3008 2202//3006 +f 2290//3007 2314//2950 2216//2949 +f 2290//3007 2216//2949 2204//3008 +f 3085//2337 3083//2345 3005//3009 +f 3085//2337 3005//3009 3007//2870 +f 3317//3010 3195//3011 3185//2874 +f 3317//3010 3185//2874 3311//2875 +f 3150//2336 3185//2874 3195//3011 +f 3150//2336 3195//3011 3153//2344 +f 3007//2870 3005//3009 2949//2863 +f 3007//2870 2949//2863 2955//2871 +f 2893//2881 2861//2862 2763//3012 +f 2893//2881 2763//3012 2821//2882 +f 2821//2882 2763//3012 2694//3013 +f 2821//2882 2694//3013 2753//3014 +f 2753//3014 2694//3013 2649//2857 +f 2753//3014 2649//2857 2692//3015 +f 2666//3016 2616//2860 2644//2788 +f 2666//3016 2644//2788 2708//2832 +f 2708//2832 2644//2788 2675//2778 +f 2708//2832 2675//2778 2757//2805 +f 2824//2806 2697//2750 2811//2727 +f 2824//2806 2811//2727 2914//2763 +f 2249//2957 2294//2932 2230//2935 +f 2249//2957 2230//2935 2229//2988 +f 2229//2988 2230//2935 2213//2922 +f 2210//2989 2229//2988 2213//2922 +f 2210//2989 2213//2922 2199//2920 +f 2194//3017 2210//2989 2199//2920 +f 2194//3017 2199//2920 2189//2919 +f 2194//3017 2189//2919 2188//2972 +f 2194//3017 2188//2972 2191//2963 +f 2191//2963 2188//2972 2190//2971 +f 2191//2963 2190//2971 2192//2964 +f 2192//2964 2190//2971 2193//2983 +f 2192//2964 2193//2983 2200//3018 +f 2208//2955 2200//3018 2193//2983 +f 2208//2955 2193//2983 2211//2946 +f 2219//2945 2262//2942 2232//2990 +f 2219//2945 2232//2990 2214//2952 +f 2232//2990 2262//2942 2265//2926 +f 2232//2990 2265//2926 2240//2933 +f 2240//2933 2265//2926 2238//2953 +f 2240//2933 2238//2953 2227//2931 +f 2227//2931 2238//2953 2231//2954 +f 2227//2931 2231//2954 2230//2935 +f 2208//2955 2214//2952 2267//2934 +f 2208//2955 2267//2934 2212//2936 +f 2208//2955 2212//2936 2200//3018 +f 2212//2936 2209//2956 2192//2964 +f 2212//2936 2192//2964 2200//3018 +f 2205//2958 2210//2989 2194//3017 +f 2205//2958 2194//3017 2191//2963 +f 2710//3019 2767//2747 3048//2738 +f 2710//3019 3048//2738 3019//3020 +f 2609//3021 2710//3019 3019//3020 +f 2609//3021 3019//3020 2930//3022 +f 2573//2746 2595//2749 2562//2748 +f 2573//2746 2562//2748 2563//2745 +f 2235//3023 2241//3024 2269//3025 +f 2235//3023 2269//3025 2258//3026 +f 2224//2907 2236//2916 2241//3024 +f 2224//2907 2241//3024 2235//3023 +f 3274//3027 3040//2346 3104//2335 +f 3274//3027 3104//2335 3273//2334 +f 2629//3028 2688//2902 2724//2347 +f 2629//3028 2724//2347 2620//3029 +f 2556//3030 2629//3028 2620//3029 +f 2556//3030 2620//3029 2553//3031 +f 2483//3032 2556//3030 2553//3031 +f 2483//3032 2553//3031 2477//3033 +f 2422//3034 2483//3032 2477//3033 +f 2422//3034 2477//3033 2384//2427 +f 2364//2426 2422//3034 2384//2427 +f 2258//3026 2269//3025 2310//2425 +f 2258//3026 2310//2425 2295//2424 +f 2813//2640 2784//2616 2810//3035 +f 2813//2640 2810//3035 2798//2655 +f 2813//2640 2798//2655 2826//2641 +f 2826//2641 2798//2655 2828//2645 +f 2826//2641 2828//2645 2850//2644 +f 2776//3036 2930//3022 3261//3037 +f 2776//3036 3261//3037 3255//3038 +f 2609//3021 2930//3022 2776//3036 +f 2609//3021 2776//3036 2558//3039 +f 2558//3039 2376//3040 2463//3041 +f 2558//3039 2463//3041 2609//3021 +f 2317//3042 2381//3043 2463//3041 +f 2317//3042 2463//3041 2376//3040 +f 2355//3044 2398//3045 2381//3043 +f 2355//3044 2381//3043 2317//3042 +f 2355//3044 2459//3046 2473//3047 +f 2355//3044 2473//3047 2398//3045 +f 2555//3048 2559//3049 2473//3047 +f 2555//3048 2473//3047 2459//3046 +f 2555//3048 2643//3050 2651//3051 +f 2555//3048 2651//3051 2559//3049 +f 2643//3050 2815//3052 2817//3053 +f 2643//3050 2817//3053 2651//3051 +f 2815//3052 3029//3054 3030//3055 +f 2815//3052 3030//3055 2817//3053 +f 3029//3054 3155//2836 3165//2835 +f 3029//3054 3165//2835 3030//3055 +f 3114//3056 3097//2987 3100//2986 +f 3114//3056 3100//2986 3112//3057 +f 3163//3058 3114//3056 3112//3057 +f 3163//3058 3112//3057 3130//3059 +f 3163//3058 3130//3059 3120//3060 +f 3163//3058 3120//3060 3146//3061 +f 3146//3061 3120//3060 3099//2976 +f 3146//3061 3099//2976 3091//2975 +f 3035//3062 3062//3063 3079//2985 +f 3035//3062 3079//2985 3061//2984 +f 3035//3062 3016//3064 3056//3065 +f 3035//3062 3056//3065 3062//3063 +f 3016//3064 3026//2978 3069//2977 +f 3016//3064 3069//2977 3056//3065 +f 3112//3057 3094//3066 3130//3059 +f 3130//3059 3094//3066 3120//3060 +f 3120//3060 3094//3066 3099//2976 +f 3056//3065 3094//3066 3062//3063 +f 3062//3063 3094//3066 3079//2985 +f 2879//2596 2926//2582 2977//3067 +f 2879//2596 2977//3067 2933//3068 +f 3168//3069 3177//3070 3306//3071 +f 3168//3069 3306//3071 3307//3072 +f 3106//2391 3133//2361 3183//2360 +f 3106//2391 3183//2360 3177//3070 +f 2792//3073 2796//3074 2752//3075 +f 2792//3073 2752//3075 2744//2575 +f 2895//2583 2870//3076 2888//3077 +f 2895//2583 2888//3077 2908//2580 +f 2851//2648 2870//3076 2827//3078 +f 2851//2648 2827//3078 2807//2649 +f 2845//2597 2895//2583 2926//2582 +f 2845//2597 2926//2582 2879//2596 +f 2804//3079 2818//3080 2927//3081 +f 2804//3079 2927//3081 2892//2578 +f 2839//2647 2795//2442 2796//3074 +f 2839//2647 2796//3074 2841//3082 +f 2792//3073 2804//3079 2892//2578 +f 2792//3073 2892//2578 2856//2577 +f 2828//2645 2798//2655 2793//2443 +f 2828//2645 2793//2443 2836//2642 +f 2952//2872 2929//2996 2971//2994 +f 2952//2872 2971//2994 2999//2869 +f 2884//3083 2929//2996 2887//2995 +f 2884//3083 2887//2995 2849//2997 +f 3026//2978 3016//3064 2986//3084 +f 3026//2978 2986//3084 2996//2974 +f 2986//3084 2919//3085 2925//2999 +f 2986//3084 2925//2999 2996//2974 +f 2864//2998 2925//2999 2919//3085 +f 2864//2998 2919//3085 2873//3086 +f 2873//3086 2884//3083 2849//2997 +f 2873//3086 2849//2997 2864//2998 +f 2955//2871 2949//2863 2861//2862 +f 2955//2871 2861//2862 2893//2881 +f 2594//2858 2645//3087 2619//3088 +f 2594//2858 2619//3088 2552//3089 +f 2962//3090 2979//3091 3070//2388 +f 2962//3090 3070//2388 3063//2392 +f 2404//3092 2502//2850 2508//2852 +f 2404//3092 2508//2852 2420//3093 +f 2411//3094 2505//2847 2502//2850 +f 2411//3094 2502//2850 2404//3092 +f 2430//3095 2531//2843 2519//2846 +f 2430//3095 2519//2846 2421//3096 +f 2533//2856 2557//2855 2491//3097 +f 2533//2856 2491//3097 2441//3098 +f 2569//3099 2636//3100 2613//2780 +f 2569//3099 2613//2780 2535//2799 +f 2482//2600 2528//2602 2527//2598 +f 2689//2718 2715//3101 2610//3102 +f 2689//2718 2610//3102 2602//2721 +f 2527//2598 2610//3102 2621//3103 +f 2602//2721 2610//3102 2527//2598 +f 2602//2721 2527//2598 2528//2602 +f 2771//2653 2798//2655 2810//3035 +f 2810//3035 2718//3104 2715//3101 +f 2715//3101 2689//2718 2771//2653 +f 2715//3101 2771//2653 2810//3035 +f 2718//3104 2621//3103 2610//3102 +f 2718//3104 2610//3102 2715//3101 +f 2550//2606 2527//2598 2621//3103 +f 2550//2606 2621//3103 2600//2603 +f 2621//3103 2657//2609 2600//2603 +f 2701//2611 2657//2609 2621//3103 +f 2701//2611 2621//3103 2718//3104 +f 2748//2614 2701//2611 2718//3104 +f 2784//2616 2748//2614 2718//3104 +f 2784//2616 2718//3104 2810//3035 +f 2529//2608 2527//2598 2550//2606 +f 3179//2380 3250//3105 3252//3106 +f 3179//2380 3252//3106 3178//2381 +f 3182//2383 3244//3107 3250//3105 +f 3182//2383 3250//3105 3179//2380 +f 3200//2445 3253//3108 3244//3107 +f 3200//2445 3244//3107 3182//2383 +f 3253//3108 3200//2445 3203//2444 +f 3253//3108 3203//2444 3258//3109 +f 3203//2444 3204//2328 3263//3110 +f 3203//2444 3263//3110 3258//3109 +f 3204//2328 3208//2327 3264//2991 +f 3204//2328 3264//2991 3263//3110 +f 3202//2374 3188//2373 3124//2979 +f 3202//2374 3124//2979 3148//2710 +f 3202//2374 3148//2710 3242//2709 +f 3202//2374 3242//2709 3240//2375 +f 3299//2876 3181//2873 3178//2381 +f 3299//2876 3178//2381 3252//3106 +f 3128//2378 3178//2381 3181//2873 +f 3128//2378 3181//2873 3138//2339 +f 3186//2340 3123//2891 3124//2979 +f 3186//2340 3124//2979 3188//2373 +f 3128//2378 3075//2377 3074//3001 +f 3128//2378 3074//3001 3132//2379 +f 3132//2379 3074//3001 3082//2384 +f 3132//2379 3082//2384 3136//2382 +f 3162//2321 3200//2445 3182//2383 +f 3162//2321 3182//2383 3136//2382 +f 3097//2987 3114//3056 3135//2323 +f 3097//2987 3135//2323 3101//2322 +f 3114//3056 3163//3058 3171//2325 +f 3114//3056 3171//2325 3135//2323 +f 3163//3058 3146//3061 3166//2385 +f 3163//3058 3166//2385 3171//2325 +f 3166//2385 3146//3061 3091//2975 +f 3166//2385 3091//2975 3072//2973 +f 3013//2992 2971//2994 2951//3111 +f 3013//2992 2951//3111 3020//2993 +f 2951//3111 2971//2994 2929//2996 +f 2951//3111 2929//2996 2884//3083 +f 3003//3112 3044//3003 3024//3002 +f 3003//3112 3024//3002 2963//3113 +f 3044//3003 3003//3112 3035//3062 +f 3044//3003 3035//3062 3061//2984 +f 3003//3112 2986//3084 3016//3064 +f 3003//3112 3016//3064 3035//3062 +f 3003//3112 2963//3113 2919//3085 +f 3003//3112 2919//3085 2986//3084 +f 2812//3114 2753//3014 2692//3015 +f 2812//3114 2692//3015 2747//3115 +f 2700//3116 2666//3016 2708//2832 +f 2700//3116 2708//2832 2764//2831 +f 3020//2993 2951//3111 2935//3117 +f 3020//2993 2935//3117 3015//3000 +f 3015//3000 2935//3117 2963//3113 +f 3015//3000 2963//3113 3024//3002 +f 2963//3113 2935//3117 2873//3086 +f 2963//3113 2873//3086 2919//3085 +f 2935//3117 2951//3111 2884//3083 +f 2935//3117 2884//3083 2873//3086 +f 2542//3118 2547//2712 2597//2711 +f 2542//3118 2597//2711 2596//3119 +f 2547//2712 2542//3118 2511//3120 +f 2547//2712 2511//3120 2520//2713 +f 2520//2713 2511//3120 2492//3121 +f 2520//2713 2492//3121 2496//2714 +f 2496//2714 2492//3121 2474//3122 +f 2496//2714 2474//3122 2475//2441 +f 2461//2400 2470//2399 2475//2441 +f 2461//2400 2475//2441 2474//3122 +f 2662//2715 2664//3123 2596//3119 +f 2662//2715 2596//3119 2597//2711 +f 2662//2715 2711//2716 2717//3124 +f 2662//2715 2717//3124 2664//3123 +f 2711//2716 2751//2717 2762//3125 +f 2711//2716 2762//3125 2717//3124 +f 2751//2717 2791//2650 2807//2649 +f 2751//2717 2807//2649 2762//3125 +f 2837//2651 2791//2650 2786//2615 +f 2837//2651 2786//2615 2826//2641 +f 2714//2719 2712//2404 2760//2403 +f 2714//2719 2760//2403 2761//2654 +f 2712//2404 2714//2719 2671//2720 +f 2712//2404 2671//2720 2668//2406 +f 2668//2406 2671//2720 2630//2722 +f 2668//2406 2630//2722 2631//2408 +f 2631//2408 2630//2722 2583//2723 +f 2631//2408 2583//2723 2582//2410 +f 2583//2723 2546//2724 2539//2412 +f 2583//2723 2539//2412 2582//2410 +f 2539//2412 2546//2724 2521//2725 +f 2539//2412 2521//2725 2513//2414 +f 2513//2414 2521//2725 2495//2601 +f 2513//2414 2495//2601 2488//2416 +f 2488//2416 2495//2601 2476//2440 +f 2488//2416 2476//2440 2472//2418 +f 2839//2647 2866//3126 2859//2643 +f 2839//2647 2859//2643 2836//2642 +f 2865//2652 2857//2646 2859//2643 +f 2865//2652 2859//2643 2866//3126 +f 2737//2574 2731//3127 2818//3080 +f 2737//2574 2818//3080 2804//3079 +f 2744//2575 2752//3075 2698//3128 +f 2744//2575 2698//3128 2690//2572 +f 2755//2402 2752//3075 2796//3074 +f 2755//2402 2796//3074 2795//2442 +f 2744//2575 2737//2574 2804//3079 +f 2744//2575 2804//3079 2792//3073 +f 3205//2372 3246//3129 3262//2877 +f 3205//2372 3262//2877 3206//2341 +f 2753//3014 2812//3114 2881//2884 +f 2753//3014 2881//2884 2821//2882 +f 2812//3114 2911//3130 2940//2886 +f 2812//3114 2940//2886 2881//2884 +f 2911//3130 2978//2981 2988//2887 +f 2911//3130 2988//2887 2940//2886 +f 3049//2980 3124//2979 3123//2891 +f 3049//2980 3123//2891 3054//2890 +f 3208//2327 3201//2329 3197//2343 +f 3208//2327 3197//2343 3207//2342 +f 2761//2654 2760//2403 2793//2443 +f 2761//2654 2793//2443 2798//2655 +f 3083//2345 3070//2388 2979//3091 +f 3083//2345 2979//3091 3005//3009 +f 3319//2158 3196//3131 3195//3011 +f 3319//2158 3195//3011 3317//3010 +f 3153//2344 3195//3011 3196//3131 +f 3153//2344 3196//3131 3142//2387 +f 3005//3009 2979//3091 2903//2864 +f 3005//3009 2903//2864 2949//2863 +f 2861//2862 2777//2861 2699//3132 +f 2861//2862 2699//3132 2763//3012 +f 2645//3087 2694//3013 2763//3012 +f 2645//3087 2763//3012 2699//3132 +f 2697//2750 2613//2780 2636//3100 +f 2697//2750 2636//3100 2716//2751 +f 2593//2844 2641//3133 2634//3134 +f 2593//2844 2634//3134 2586//2845 +f 2577//2848 2626//3135 2623//3136 +f 2577//2848 2623//3136 2571//2849 +f 2571//2849 2623//3136 2624//3137 +f 2571//2849 2624//3137 2576//2851 +f 2635//3138 2655//3139 2618//2854 +f 2635//3138 2618//2854 2589//2853 +f 2694//3013 2645//3087 2594//2858 +f 2694//3013 2594//2858 2649//2857 +f 2576//2851 2624//3137 2635//3138 +f 2576//2851 2635//3138 2589//2853 +f 2586//2845 2634//3134 2626//3135 +f 2586//2845 2626//3135 2577//2848 +f 2618//2854 2655//3139 2692//3015 +f 2618//2854 2692//3015 2649//2857 +f 2616//2860 2666//3016 2641//3133 +f 2616//2860 2641//3133 2593//2844 +f 2616//2860 2548//2859 2570//2789 +f 2616//2860 2570//2789 2644//2788 +f 2676//3140 2699//3132 2777//2861 +f 2676//3140 2777//2861 2741//3141 +f 2741//3141 2777//2861 2903//2864 +f 2741//3141 2903//2864 2847//3142 +f 2741//3141 2731//3127 2679//3143 +f 2741//3141 2679//3143 2676//3140 +f 2619//3088 2627//3144 2575//3145 +f 2619//3088 2575//3145 2552//3089 +f 2347//3146 2350//3147 2387//2621 +f 2347//3146 2387//2621 2378//2620 +f 2977//3067 2994//3148 3066//2390 +f 2977//3067 3066//2390 3043//2389 +f 2759//3149 2745//3150 2842//3151 +f 2759//3149 2842//3151 2916//2356 +f 2590//3152 2607//3153 2680//3154 +f 2590//3152 2680//3154 2672//3155 +f 2532//3156 2484//3157 2500//3158 +f 2532//3156 2500//3158 2537//3159 +f 2484//3157 2432//3160 2466//3161 +f 2484//3157 2466//3161 2500//3158 +f 2399//2656 2410//2422 2390//2421 +f 2399//2656 2390//2421 2375//2668 +f 2676//3140 2679//3143 2627//3144 +f 2676//3140 2627//3144 2619//3088 +f 3063//2392 3066//2390 2994//3148 +f 3063//2392 2994//3148 2983//3162 +f 2590//3152 2532//3156 2537//3159 +f 2590//3152 2537//3159 2607//3153 +f 2429//2423 2466//3161 2432//3160 +f 2429//2423 2432//3160 2403//2420 +f 2366//3163 2400//3164 2387//2621 +f 2366//3163 2387//2621 2350//3147 +f 2394//3165 2434//3166 2400//3164 +f 2394//3165 2400//3164 2366//3163 +f 2552//3089 2575//3145 2509//3167 +f 2552//3089 2509//3167 2465//3168 +f 2465//3168 2509//3167 2434//3166 +f 2465//3168 2434//3166 2394//3165 +f 2392//3169 2399//2656 2375//2668 +f 2392//3169 2375//2668 2367//3170 +f 2991//2357 2916//2356 2842//3151 +f 2991//2357 2842//3151 2933//3068 +f 2346//2635 2347//3146 2378//2620 +f 2346//2635 2378//2620 2377//2622 +f 2377//2622 2382//2639 2349//2636 +f 2377//2622 2349//2636 2346//2635 +f 2389//3171 2392//3169 2367//3170 +f 2389//3171 2367//3170 2356//3172 +f 2672//3155 2680//3154 2745//3150 +f 2672//3155 2745//3150 2759//3149 +f 2847//3142 2818//3080 2731//3127 +f 2847//3142 2731//3127 2741//3141 +f 2933//3068 2977//3067 3043//2389 +f 2933//3068 3043//2389 2991//2357 +f 2962//3090 2927//3081 2818//3080 +f 2962//3090 2818//3080 2847//3142 +f 3225//2838 3226//2899 3108//2900 +f 3225//2838 3108//2900 3098//2688 +f 3098//2688 3108//2900 2860//2800 +f 3098//2688 2860//2800 2765//2660 +f 2765//2660 2860//2800 2681//2801 +f 2765//2660 2681//2801 2617//2661 +f 2779//2550 2726//2549 2720//3173 +f 2779//2550 2720//3173 2770//3174 +f 2620//3029 2724//2347 2705//2369 +f 2620//3029 2705//2369 2560//3175 +f 2538//3176 2660//2368 2632//3177 +f 2538//3176 2632//3177 2518//3178 +f 2479//3179 2518//3178 2632//3177 +f 2479//3179 2632//3177 2614//3180 +f 2454//3181 2479//3179 2614//3180 +f 2454//3181 2614//3180 2608//3182 +f 2439//3183 2454//3181 2608//3182 +f 2439//3183 2608//3182 2606//2352 +f 2439//3183 2606//2352 2625//2351 +f 2439//3183 2625//2351 2417//3184 +f 2440//2896 2725//2898 2782//3185 +f 2440//2896 2782//3185 2501//3186 +f 2900//2364 2739//3187 2754//3188 +f 2900//2364 2754//3188 2924//2365 +f 2646//3189 2723//3190 2702//3191 +f 2646//3189 2702//3191 2639//3192 +f 2672//3155 2759//3149 2754//3188 +f 2672//3155 2754//3188 2658//3193 +f 2653//3194 2739//3187 2723//3190 +f 2653//3194 2723//3190 2646//3189 +f 2639//3192 2702//3191 2688//2902 +f 2639//3192 2688//2902 2629//3028 +f 2720//3173 2667//3195 2664//3123 +f 2720//3173 2664//3123 2717//3124 +f 2277//3196 2263//3197 2266//3198 +f 2277//3196 2266//3198 2279//3199 +f 2259//2629 2271//3200 2282//3201 +f 2259//2629 2282//3201 2280//2631 +f 2280//2631 2282//3201 2302//3202 +f 2280//2631 2302//3202 2300//2633 +f 2281//3203 2271//3200 2263//3197 +f 2281//3203 2263//3197 2277//3196 +f 2255//2623 2261//2626 2273//3204 +f 2255//2623 2273//3204 2266//3198 +f 2321//2638 2324//3205 2347//3146 +f 2321//2638 2347//3146 2346//2635 +f 2300//2633 2302//3202 2324//3205 +f 2300//2633 2324//3205 2321//2638 +f 2283//3206 2273//3204 2278//3207 +f 2283//3206 2278//3207 2287//3208 +f 2287//3208 2278//3207 2309//3209 +f 2287//3208 2309//3209 2320//3210 +f 2348//2783 2339//2791 2309//3209 +f 2348//2783 2309//3209 2303//2781 +f 2680//3154 2677//2552 2734//2548 +f 2680//3154 2734//2548 2745//3150 +f 2717//3124 2762//3125 2770//3174 +f 2717//3124 2770//3174 2720//3173 +f 2599//3211 2596//3119 2664//3123 +f 2599//3211 2664//3123 2667//3195 +f 2542//3118 2541//3212 2510//3213 +f 2542//3118 2510//3213 2511//3120 +f 2511//3120 2510//3213 2486//3214 +f 2511//3120 2486//3214 2492//3121 +f 2461//2400 2474//3122 2469//2431 +f 2461//2400 2469//2431 2453//2430 +f 2504//2415 2497//3215 2534//3216 +f 2504//2415 2534//3216 2536//2413 +f 2580//2411 2572//3217 2622//3218 +f 2580//2411 2622//3218 2628//2409 +f 2628//2409 2622//3218 2656//3219 +f 2628//2409 2656//3219 2663//2407 +f 2707//2405 2698//3128 2752//3075 +f 2707//2405 2752//3075 2755//2402 +f 2841//3082 2872//3220 2866//3126 +f 2841//3082 2866//3126 2839//2647 +f 2865//2652 2888//3077 2870//3076 +f 2865//2652 2870//3076 2851//2648 +f 2460//2398 2447//2618 2452//2617 +f 2460//2398 2452//2617 2462//2419 +f 2481//2417 2471//2619 2497//3215 +f 2481//2417 2497//3215 2504//2415 +f 2663//2407 2656//3219 2698//3128 +f 2663//2407 2698//3128 2707//2405 +f 2866//3126 2872//3220 2888//3077 +f 2866//3126 2888//3077 2865//2652 +f 2596//3119 2599//3211 2541//3212 +f 2596//3119 2541//3212 2542//3118 +f 2492//3121 2486//3214 2469//2431 +f 2492//3121 2469//2431 2474//3122 +f 2536//2413 2534//3216 2572//3217 +f 2536//2413 2572//3217 2580//2411 +f 2807//2649 2827//3078 2770//3174 +f 2807//2649 2770//3174 2762//3125 +f 2449//2401 2443//3221 2447//2618 +f 2449//2401 2447//2618 2460//2398 +f 2428//2590 2433//3222 2427//2595 +f 2428//2590 2427//2595 2426//2591 +f 2435//2437 2427//2595 2433//3222 +f 2435//2437 2433//3222 2444//2432 +f 2444//2432 2433//3222 2443//3221 +f 2444//2432 2443//3221 2449//2401 +f 2443//3221 2433//3222 2428//2590 +f 2312//2436 2360//2435 2334//3223 +f 2312//2436 2334//3223 2268//3224 +f 2356//3172 2367//3170 2343//3225 +f 2356//3172 2343//3225 2337//3226 +f 2726//2549 2669//2551 2667//3195 +f 2726//2549 2667//3195 2720//3173 +f 2669//2551 2601//2554 2599//3211 +f 2669//2551 2599//3211 2667//3195 +f 2541//3212 2544//2555 2514//2558 +f 2541//3212 2514//2558 2510//3213 +f 2510//3213 2514//2558 2487//2560 +f 2510//3213 2487//2560 2486//3214 +f 2448//2429 2435//2437 2444//2432 +f 2448//2429 2444//2432 2453//2430 +f 2525//2565 2534//3216 2497//3215 +f 2525//2565 2497//3215 2485//2562 +f 2612//2569 2622//3218 2572//3217 +f 2612//2569 2572//3217 2561//2566 +f 2650//2571 2656//3219 2622//3218 +f 2650//2571 2622//3218 2612//2569 +f 2792//3073 2856//2577 2841//3082 +f 2792//3073 2841//3082 2796//3074 +f 2856//2577 2894//2576 2872//3220 +f 2856//2577 2872//3220 2841//3082 +f 2895//2583 2845//2597 2827//3078 +f 2895//2583 2827//3078 2870//3076 +f 2452//2617 2442//2586 2456//2592 +f 2452//2617 2456//2592 2471//2619 +f 2485//2562 2497//3215 2471//2619 +f 2485//2562 2471//2619 2456//2592 +f 2656//3219 2650//2571 2690//2572 +f 2656//3219 2690//2572 2698//3128 +f 2908//2580 2888//3077 2872//3220 +f 2908//2580 2872//3220 2894//2576 +f 2599//3211 2601//2554 2544//2555 +f 2599//3211 2544//2555 2541//3212 +f 2487//2560 2464//2428 2469//2431 +f 2487//2560 2469//2431 2486//3214 +f 2561//2566 2572//3217 2534//3216 +f 2561//2566 2534//3216 2525//2565 +f 2845//2597 2779//2550 2770//3174 +f 2845//2597 2770//3174 2827//3078 +f 2428//2590 2437//2587 2447//2618 +f 2428//2590 2447//2618 2443//3221 +f 2335//3227 2319//3228 2331//3229 +f 2335//3227 2331//3229 2344//3230 +f 2318//2658 2292//3231 2319//3228 +f 2318//2658 2319//3228 2335//3227 +f 2344//3230 2331//3229 2343//3225 +f 2344//3230 2343//3225 2353//2670 +f 3159//2774 2970//2768 3030//3055 +f 3159//2774 3030//3055 3165//2835 +f 2551//3232 2530//3233 2468//2821 +f 2551//3232 2468//2821 2523//2819 +f 2651//3051 2817//3053 2802//3234 +f 2651//3051 2802//3234 2665//3235 +f 2322//3236 2305//2634 2327//2637 +f 2322//3236 2327//2637 2337//3226 +f 2414//2588 2418//2594 2392//3169 +f 2414//2588 2392//3169 2389//3171 +f 2292//3231 2269//3025 2276//3237 +f 2268//3224 2258//3026 2295//2424 +f 2268//3224 2295//2424 2312//2436 +f 3047//2810 3002//2808 3119//2803 +f 3047//2810 3119//2803 3151//2841 +f 3140//2757 3154//2825 3041//2830 +f 3140//2757 3041//2830 3012//2756 +f 3151//2841 3167//2694 3073//2695 +f 3151//2841 3073//2695 3047//2810 +f 2566//2662 2617//2661 2569//3099 +f 2566//2662 2569//3099 2517//3238 +f 2522//3239 2530//3233 2562//2748 +f 2522//3239 2562//2748 2581//2736 +f 2638//2737 2595//2749 2633//3240 +f 2638//2737 2633//3240 2686//3241 +f 2686//3241 2633//3240 2661//3242 +f 2686//3241 2661//3242 2733//3243 +f 2733//3243 2661//3242 2719//3244 +f 2733//3243 2719//3244 2844//2771 +f 2844//2771 2719//3244 2802//3234 +f 2844//2771 2802//3234 2970//2768 +f 3245//2775 3159//2774 3165//2835 +f 3245//2775 3165//2835 3239//2834 +f 2381//3043 2493//3245 2564//3246 +f 2381//3043 2564//3246 2463//3041 +f 2665//3235 2587//3247 2559//3049 +f 2665//3235 2559//3049 2651//3051 +f 2473//3047 2559//3049 2587//3247 +f 2473//3047 2587//3247 2524//3248 +f 2524//3248 2494//3249 2398//3045 +f 2524//3248 2398//3045 2473//3047 +f 2381//3043 2398//3045 2494//3249 +f 2381//3043 2494//3249 2493//3245 +f 2930//3022 3019//3020 3215//3250 +f 2930//3022 3215//3250 3261//3037 +f 2564//3246 2710//3019 2609//3021 +f 2564//3246 2609//3021 2463//3041 +f 2970//2768 2802//3234 2817//3053 +f 2970//2768 2817//3053 3030//3055 +f 2579//2744 2703//2743 2640//3251 +f 2579//2744 2640//3251 2554//3252 +f 2554//3252 2640//3251 2564//3246 +f 2554//3252 2564//3246 2493//3245 +f 2567//3253 2611//3254 2585//3255 +f 2567//3253 2585//3255 2545//3256 +f 2567//3253 2545//3256 2494//3249 +f 2567//3253 2494//3249 2524//3248 +f 2545//3256 2554//3252 2493//3245 +f 2545//3256 2493//3245 2494//3249 +f 2545//3256 2579//2744 2554//3252 +f 2545//3256 2585//3255 2573//2746 +f 2545//3256 2573//2746 2579//2744 +f 2640//3251 2703//2743 2767//2747 +f 3019//3020 3048//2738 3216//2741 +f 3019//3020 3216//2741 3215//3250 +f 2564//3246 2640//3251 2767//2747 +f 2564//3246 2767//2747 2710//3019 +f 2591//3257 2642//3258 2611//3254 +f 2591//3257 2611//3254 2567//3253 +f 2587//3247 2665//3235 2642//3258 +f 2587//3247 2642//3258 2591//3257 +f 2591//3257 2567//3253 2524//3248 +f 2591//3257 2524//3248 2587//3247 +f 2799//2547 2879//2596 2933//3068 +f 2799//2547 2933//3068 2842//3151 +f 3168//3069 3307//3072 3267//3259 +f 3168//3069 3267//3259 3152//3260 +f 3084//2358 3106//2391 3177//3070 +f 3084//2358 3177//3070 3168//3069 +f 3246//3129 3205//2372 3209//2371 +f 3246//3129 3209//2371 3243//2376 +f 3285//3261 2801//2350 2902//2353 +f 3285//3261 2902//2353 3290//3262 +f 2417//3184 2285//3263 2286//3264 +f 2417//3184 2286//3264 2439//3183 +f 2257//2868 2228//2905 2286//3264 +f 2257//2868 2286//3264 2285//3263 +f 2285//3263 2417//3184 2397//2901 +f 2285//3263 2397//2901 2257//2868 +f 2417//3184 2625//2351 2682//2354 +f 2417//3184 2682//2354 2397//2901 +f 3279//2894 2682//2354 2801//2350 +f 3279//2894 2801//2350 3285//3261 +f 3283//3265 2725//2898 2687//2893 +f 3283//3265 2687//2893 3276//2892 +f 2333//2909 2501//3186 2565//2816 +f 2333//2909 2565//2816 2419//2818 +f 2501//3186 2333//2909 2288//2897 +f 2501//3186 2288//2897 2440//2896 +f 2501//3186 2782//3185 2869//2812 +f 2501//3186 2869//2812 2565//2816 +f 2782//3185 3296//3266 3294//2813 +f 2782//3185 3294//2813 2869//2812 +f 3296//3266 2782//3185 2725//2898 +f 3296//3266 2725//2898 3283//3265 +f 3227//3267 2890//3268 2912//2815 +f 3227//3267 2912//2815 3237//2814 +f 2890//3268 2615//3269 2604//2817 +f 2890//3268 2604//2817 2912//2815 +f 2615//3269 2551//3232 2523//2819 +f 2615//3269 2523//2819 2604//2817 +f 3223//2740 2825//2739 2890//3268 +f 3223//2740 2890//3268 3227//3267 +f 2825//2739 2605//2742 2615//3269 +f 2825//2739 2615//3269 2890//3268 +f 2605//2742 2563//2745 2551//3232 +f 2605//2742 2551//3232 2615//3269 +f 2272//2927 2278//3207 2273//3204 +f 2272//2927 2273//3204 2261//2626 +f 2517//3238 2569//3099 2535//2799 +f 2517//3238 2535//2799 2478//2798 +f 2548//2859 2455//3270 2490//2786 +f 2548//2859 2490//2786 2570//2789 +f 2407//3271 2385//2795 2371//2793 +f 2407//3271 2371//2793 2373//2792 +f 2412//2797 2385//2795 2407//3271 +f 2412//2797 2407//3271 2445//3272 +f 2478//2798 2412//2797 2445//3272 +f 2478//2798 2445//3272 2517//3238 +f 2303//2781 2309//3209 2278//3207 +f 2303//2781 2278//3207 2272//2927 +f 3012//2756 3041//2830 2969//2764 +f 3012//2756 2969//2764 2920//2730 +f 3046//2355 3084//2358 3168//3069 +f 3046//2355 3168//3069 3152//3260 +f 3139//2362 3133//2361 3066//2390 +f 3139//2362 3066//2390 3063//2392 +f 3038//2366 3134//3273 3129//2397 +f 3038//2366 3129//2397 3017//2363 +f 2992//2707 3053//2708 3049//2980 +f 2992//2707 3049//2980 2978//2981 +f 2928//3274 2965//2367 3281//3275 +f 2928//3274 3281//3275 3286//3276 +f 2928//3274 3286//3276 3287//3277 +f 2928//3274 3287//3277 2906//3278 +f 2891//3279 2906//3278 3287//3277 +f 2891//3279 3287//3277 3288//3280 +f 2902//2353 2891//3279 3288//3280 +f 2902//2353 3288//3280 3290//3262 +f 3131//2692 3221//2827 3220//2839 +f 3131//2692 3220//2839 3113//2689 +f 2844//2771 2985//2770 2938//2691 +f 2844//2771 2938//2691 2733//3243 +f 3131//2692 2938//2691 2985//2770 +f 3131//2692 2985//2770 3156//2776 +f 3265//2842 3272//2761 3109//2760 +f 3265//2842 3109//2760 3125//2828 +f 2566//2662 2503//3281 2581//2736 +f 2566//2662 2581//2736 2713//2659 +f 2686//3241 2875//2690 2783//2667 +f 2686//3241 2783//2667 2638//2737 +f 2875//2690 2686//3241 2733//3243 +f 2875//2690 2733//3243 2938//2691 +f 2723//3190 2867//2395 2830//2394 +f 2723//3190 2830//2394 2702//3191 +f 2759//3149 2916//2356 2924//2365 +f 2759//3149 2924//2365 2754//3188 +f 2739//3187 2900//2364 2867//2395 +f 2739//3187 2867//2395 2723//3190 +f 2702//3191 2830//2394 2790//2348 +f 2702//3191 2790//2348 2688//2902 +f 2928//3274 2632//3177 2660//2368 +f 2928//3274 2660//2368 2965//2367 +f 2393//2434 2538//3176 2518//3178 +f 2393//2434 2518//3178 2360//2435 +f 2306//3282 2237//3283 2268//3224 +f 2306//3282 2268//3224 2334//3223 +f 2286//3264 2228//2905 2237//3283 +f 2286//3264 2237//3283 2306//3282 +f 2393//2434 2370//2433 2560//3175 +f 2393//2434 2560//3175 2538//3176 +f 2906//3278 2614//3180 2632//3177 +f 2906//3278 2632//3177 2928//3274 +f 2360//2435 2518//3178 2479//3179 +f 2360//2435 2479//3179 2334//3223 +f 2891//3279 2608//3182 2614//3180 +f 2891//3279 2614//3180 2906//3278 +f 2306//3282 2334//3223 2479//3179 +f 2306//3282 2479//3179 2454//3181 +f 2891//3279 2902//2353 2606//2352 +f 2891//3279 2606//2352 2608//3182 +f 2286//3264 2306//3282 2454//3181 +f 2286//3264 2454//3181 2439//3183 +f 2395//2811 2425//2820 2522//3239 +f 2395//2811 2522//3239 2457//3284 +f 2503//3281 2457//3284 2522//3239 +f 2503//3281 2522//3239 2581//2736 +f 2237//3283 2235//3023 2258//3026 +f 2237//3283 2258//3026 2268//3224 +f 2228//2905 2224//2907 2235//3023 +f 2228//2905 2235//3023 2237//3283 +f 2320//3210 2309//3209 2339//2791 +f 2320//3210 2339//2791 2342//2790 +f 2336//3285 2320//3210 2342//2790 +f 2336//3285 2342//2790 2354//2794 +f 2362//3286 2336//3285 2354//2794 +f 2362//3286 2354//2794 2386//2796 +f 2413//2787 2362//3286 2386//2796 +f 2413//2787 2386//2796 2436//2784 +f 2570//2789 2490//2786 2512//2785 +f 2570//2789 2512//2785 2588//2779 +f 2716//2751 2636//3100 2681//2801 +f 2716//2751 2681//2801 2768//2752 +f 2407//3271 2373//2792 2395//2811 +f 2407//3271 2395//2811 2457//3284 +f 2445//3272 2407//3271 2457//3284 +f 2445//3272 2457//3284 2503//3281 +f 2517//3238 2445//3272 2503//3281 +f 2517//3238 2503//3281 2566//2662 +f 2636//3100 2569//3099 2617//2661 +f 2636//3100 2617//2661 2681//2801 +f 3040//2346 3274//3027 3278//3287 +f 3040//2346 3278//3287 3034//2370 +f 2814//2704 2883//2705 2809//3288 +f 2814//2704 2809//3288 2758//3289 +f 2349//2636 2356//3172 2337//3226 +f 2349//2636 2337//3226 2327//2637 +f 2353//2670 2343//3225 2367//3170 +f 2353//2670 2367//3170 2375//2668 +f 2302//3202 2311//3290 2329//3291 +f 2302//3202 2329//3291 2324//3205 +f 2704//3292 2758//3289 2809//3288 +f 2704//3292 2809//3288 2743//3293 +f 2747//3115 2693//3294 2743//3293 +f 2747//3115 2743//3293 2806//3295 +f 2670//3296 2693//3294 2655//3139 +f 2670//3296 2655//3139 2635//3138 +f 2515//3297 2458//3298 2467//3299 +f 2515//3297 2467//3299 2516//3300 +f 2516//3300 2584//3301 2578//3302 +f 2516//3300 2578//3302 2515//3297 +f 2584//3301 2658//3193 2653//3194 +f 2584//3301 2653//3194 2578//3302 +f 2658//3193 2754//3188 2739//3187 +f 2658//3193 2739//3187 2653//3194 +f 3269//3303 3129//2397 3134//3273 +f 3269//3303 3134//3273 3268//3304 +f 2467//3299 2415//3305 2432//3160 +f 2467//3299 2432//3160 2484//3157 +f 2406//3306 2380//2672 2388//2676 +f 2406//3306 2388//2676 2415//3305 +f 2441//3098 2491//3097 2405//3307 +f 2441//3098 2405//3307 2383//3308 +f 2345//3309 2361//3310 2340//3311 +f 2345//3309 2340//3311 2328//3312 +f 2328//3312 2340//3311 2329//3291 +f 2328//3312 2329//3291 2311//3290 +f 2383//3308 2405//3307 2361//3310 +f 2383//3308 2361//3310 2345//3309 +f 2388//2676 2368//2669 2390//2421 +f 2388//2676 2390//2421 2403//2420 +f 2363//2675 2396//3313 2364//2426 +f 2418//2594 2423//2439 2399//2656 +f 2418//2594 2399//2656 2392//3169 +f 2351//2674 2335//3227 2344//3230 +f 2351//2674 2344//3230 2359//2671 +f 2338//2657 2318//2658 2335//3227 +f 2338//2657 2335//3227 2351//2674 +f 2359//2671 2344//3230 2353//2670 +f 2359//2671 2353//2670 2368//2669 +f 2318//2658 2310//2425 2269//3025 +f 2318//2658 2269//3025 2292//3231 +f 3281//3275 2965//2367 3034//2370 +f 3281//3275 3034//2370 3278//3287 +f 2455//3270 2391//3314 2413//2787 +f 2455//3270 2413//2787 2490//2786 +f 2391//3314 2341//3315 2362//3286 +f 2391//3314 2362//3286 2413//2787 +f 2341//3315 2316//3316 2336//3285 +f 2341//3315 2336//3285 2362//3286 +f 2316//3316 2287//3208 2320//3210 +f 2316//3316 2320//3210 2336//3285 +f 2255//2623 2266//3198 2263//3197 +f 2255//2623 2263//3197 2252//2627 +f 2271//3200 2281//3203 2291//3317 +f 2271//3200 2291//3317 2282//3201 +f 2282//3201 2291//3317 2311//3290 +f 2282//3201 2311//3290 2302//3202 +f 2252//2627 2263//3197 2271//3200 +f 2252//2627 2271//3200 2259//2629 +f 2279//3199 2266//3198 2273//3204 +f 2279//3199 2273//3204 2283//3206 +f 2424//2593 2387//2621 2400//3164 +f 2424//2593 2400//3164 2446//2563 +f 2356//3172 2349//2636 2382//2639 +f 2356//3172 2382//2639 2389//3171 +f 2324//3205 2329//3291 2350//3147 +f 2324//3205 2350//3147 2347//3146 +f 2812//3114 2747//3115 2806//3295 +f 2812//3114 2806//3295 2911//3130 +f 2696//3318 2678//3319 2700//3116 +f 2696//3318 2700//3116 2740//3320 +f 2691//3321 2673//3322 2684//3323 +f 2691//3321 2684//3323 2709//3324 +f 2648//3325 2626//3135 2634//3134 +f 2648//3325 2634//3134 2654//3326 +f 2685//3327 2727//3328 2758//3289 +f 2685//3327 2758//3289 2704//3292 +f 2809//3288 2909//3329 2806//3295 +f 2809//3288 2806//3295 2743//3293 +f 2778//3330 2740//3320 2819//2833 +f 2778//3330 2819//2833 2899//3331 +f 2911//3130 2806//3295 2909//3329 +f 2911//3130 2909//3329 2978//2981 +f 2732//3332 2696//3318 2740//3320 +f 2732//3332 2740//3320 2778//3330 +f 2899//3331 2819//2833 2931//2807 +f 2899//3331 2931//2807 2982//2809 +f 2678//3319 2696//3318 2684//3323 +f 2678//3319 2684//3323 2654//3326 +f 2647//3333 2674//3334 2685//3327 +f 2647//3333 2685//3327 2652//3335 +f 2693//3294 2670//3296 2704//3292 +f 2693//3294 2704//3292 2743//3293 +f 2764//2831 2819//2833 2740//3320 +f 2764//2831 2740//3320 2700//3116 +f 2624//3137 2652//3335 2670//3296 +f 2624//3137 2670//3296 2635//3138 +f 2693//3294 2747//3115 2692//3015 +f 2693//3294 2692//3015 2655//3139 +f 2678//3319 2641//3133 2666//3016 +f 2678//3319 2666//3016 2700//3116 +f 2637//2570 2627//3144 2679//3143 +f 2637//2570 2679//3143 2683//2573 +f 2941//2581 2934//2579 2983//3162 +f 2941//2581 2983//3162 2994//3148 +f 2962//3090 3063//2392 2983//3162 +f 2962//3090 2983//3162 2927//3081 +f 3177//3070 3183//2360 3277//3336 +f 3177//3070 3277//3336 3306//3071 +f 3183//2360 3189//2359 3310//2157 +f 3183//2360 3310//2157 3277//3336 +f 2540//2556 2603//2553 2607//3153 +f 2540//2556 2607//3153 2537//3159 +f 2484//3157 2532//3156 2516//3300 +f 2484//3157 2516//3300 2467//3299 +f 2458//3298 2515//3297 2506//3337 +f 2458//3298 2506//3337 2450//3338 +f 2532//3156 2590//3152 2584//3301 +f 2532//3156 2584//3301 2516//3300 +f 2515//3297 2578//3302 2574//3339 +f 2515//3297 2574//3339 2506//3337 +f 2590//3152 2672//3155 2658//3193 +f 2590//3152 2658//3193 2584//3301 +f 2578//3302 2653//3194 2646//3189 +f 2578//3302 2646//3189 2574//3339 +f 3038//2366 3046//2355 3152//3260 +f 3038//2366 3152//3260 3134//3273 +f 3004//2396 2867//2395 2900//2364 +f 3004//2396 2900//2364 3017//2363 +f 3134//3273 3152//3260 3267//3259 +f 3134//3273 3267//3259 3268//3304 +f 3270//2333 3116//2332 3129//2397 +f 3270//2333 3129//2397 3269//3303 +f 2415//3305 2467//3299 2458//3298 +f 2415//3305 2458//3298 2406//3306 +f 2406//3306 2458//3298 2450//3338 +f 2406//3306 2450//3338 2401//3340 +f 2451//2561 2480//2559 2466//3161 +f 2451//2561 2466//3161 2429//2423 +f 2403//2420 2432//3160 2415//3305 +f 2403//2420 2415//3305 2388//2676 +f 2401//3340 2396//3313 2363//2675 +f 2401//3340 2363//2675 2374//2673 +f 2328//3312 2315//3341 2332//3342 +f 2328//3312 2332//3342 2345//3309 +f 2325//3343 2332//3342 2315//3341 +f 2325//3343 2315//3341 2297//3344 +f 2323//3345 2293//3346 2296//3347 +f 2323//3345 2296//3347 2326//3348 +f 2326//3348 2296//3347 2304//3349 +f 2326//3348 2304//3349 2330//3350 +f 2297//3344 2315//3341 2291//3317 +f 2297//3344 2291//3317 2281//3203 +f 2293//3346 2277//3196 2279//3199 +f 2293//3346 2279//3199 2296//3347 +f 2296//3347 2279//3199 2283//3206 +f 2296//3347 2283//3206 2304//3349 +f 2420//3093 2441//3098 2383//3308 +f 2420//3093 2383//3308 2369//3351 +f 2420//3093 2369//3351 2357//3352 +f 2420//3093 2357//3352 2404//3092 +f 2411//3094 2358//3353 2365//3354 +f 2411//3094 2365//3354 2421//3096 +f 2430//3095 2372//3355 2391//3314 +f 2430//3095 2391//3314 2455//3270 +f 2695//3356 2729//2702 2772//2703 +f 2695//3356 2772//2703 2727//3328 +f 2674//3334 2695//3356 2727//3328 +f 2674//3334 2727//3328 2685//3327 +f 2652//3335 2685//3327 2704//3292 +f 2652//3335 2704//3292 2670//3296 +f 2623//3136 2647//3333 2652//3335 +f 2623//3136 2652//3335 2624//3137 +f 2315//3341 2328//3312 2311//3290 +f 2315//3341 2311//3290 2291//3317 +f 2465//3168 2405//3307 2491//3097 +f 2465//3168 2491//3097 2552//3089 +f 2366//3163 2340//3311 2361//3310 +f 2366//3163 2361//3310 2394//3165 +f 2350//3147 2329//3291 2340//3311 +f 2350//3147 2340//3311 2366//3163 +f 2357//3352 2325//3343 2323//3345 +f 2357//3352 2323//3345 2358//3353 +f 2297//3344 2281//3203 2277//3196 +f 2297//3344 2277//3196 2293//3346 +f 2729//2702 2695//3356 2691//3321 +f 2729//2702 2691//3321 2721//2701 +f 2732//3332 2766//2699 2730//2700 +f 2732//3332 2730//2700 2709//3324 +f 2648//3325 2673//3322 2674//3334 +f 2648//3325 2674//3334 2647//3333 +f 2696//3318 2732//3332 2709//3324 +f 2696//3318 2709//3324 2684//3323 +f 2654//3326 2634//3134 2641//3133 +f 2654//3326 2641//3133 2678//3319 +f 2647//3333 2623//3136 2626//3135 +f 2647//3333 2626//3135 2648//3325 +f 2673//3322 2691//3321 2695//3356 +f 2673//3322 2695//3356 2674//3334 +f 2654//3326 2684//3323 2673//3322 +f 2654//3326 2673//3322 2648//3325 +f 2421//3096 2365//3354 2372//3355 +f 2421//3096 2372//3355 2430//3095 +f 2404//3092 2357//3352 2358//3353 +f 2404//3092 2358//3353 2411//3094 +f 2304//3349 2283//3206 2287//3208 +f 2304//3349 2287//3208 2316//3316 +f 2489//2564 2434//3166 2509//3167 +f 2489//2564 2509//3167 2543//2567 +f 2369//3351 2383//3308 2345//3309 +f 2369//3351 2345//3309 2332//3342 +f 2357//3352 2369//3351 2332//3342 +f 2357//3352 2332//3342 2325//3343 +f 2358//3353 2323//3345 2326//3348 +f 2358//3353 2326//3348 2365//3354 +f 2365//3354 2326//3348 2330//3350 +f 2365//3354 2330//3350 2372//3355 +f 2394//3165 2361//3310 2405//3307 +f 2394//3165 2405//3307 2465//3168 +f 2293//3346 2323//3345 2325//3343 +f 2293//3346 2325//3343 2297//3344 +f 2372//3355 2330//3350 2341//3315 +f 2372//3355 2341//3315 2391//3314 +f 2330//3350 2304//3349 2316//3316 +f 2330//3350 2316//3316 2341//3315 +f 2530//3233 2551//3232 2563//2745 +f 2530//3233 2563//2745 2562//2748 +f 2595//2749 2573//2746 2585//3255 +f 2595//2749 2585//3255 2633//3240 +f 2633//3240 2585//3255 2611//3254 +f 2633//3240 2611//3254 2661//3242 +f 2661//3242 2611//3254 2642//3258 +f 2661//3242 2642//3258 2719//3244 +f 2719//3244 2642//3258 2665//3235 +f 2719//3244 2665//3235 2802//3234 +f 2530//3233 2522//3239 2425//2820 +f 2530//3233 2425//2820 2468//2821 +f 2380//2672 2406//3306 2401//3340 +f 2380//2672 2401//3340 2374//2673 +f 2660//2368 2538//3176 2560//3175 +f 2660//2368 2560//3175 2705//2369 +f 2980//2349 2790//2348 2830//2394 +f 2980//2349 2830//2394 2989//2393 +f 2568//3357 2639//3192 2629//3028 +f 2568//3357 2629//3028 2556//3030 +f 2498//3358 2568//3357 2556//3030 +f 2498//3358 2556//3030 2483//3032 +f 2438//3359 2498//3358 2483//3032 +f 2438//3359 2483//3032 2422//3034 +f 2396//3313 2438//3359 2422//3034 +f 2396//3313 2422//3034 2364//2426 +f 2241//3024 2253//3360 2276//3237 +f 2241//3024 2276//3237 2269//3025 +f 2301//3361 2319//3228 2292//3231 +f 2301//3361 2292//3231 2276//3237 +f 2250//2624 2245//3362 2242//2915 +f 2250//2624 2242//2915 2254//2625 +f 2245//3362 2250//2624 2251//2628 +f 2245//3362 2251//2628 2253//3360 +f 2253//3360 2251//2628 2260//2630 +f 2253//3360 2260//2630 2276//3237 +f 2276//3237 2260//2630 2284//2632 +f 2276//3237 2284//2632 2301//3361 +f 2301//3361 2284//2632 2305//2634 +f 2301//3361 2305//2634 2322//3236 +f 2305//2634 2300//2633 2321//2638 +f 2305//2634 2321//2638 2327//2637 +f 2331//3229 2322//3236 2337//3226 +f 2331//3229 2337//3226 2343//3225 +f 2245//3362 2241//3024 2236//2916 +f 2245//3362 2236//2916 2242//2915 +f 2253//3360 2241//3024 2245//3362 +f 2363//2675 2364//2426 2310//2425 +f 2363//2675 2310//2425 2338//2657 +f 2384//2427 2477//3033 2560//3175 +f 2384//2427 2560//3175 2370//2433 +f 2477//3033 2553//3031 2560//3175 +f 2553//3031 2620//3029 2560//3175 +f 3371//3363 3359//3364 3397//3365 +f 3371//3363 3397//3365 3432//3366 +f 3359//3364 3342//3367 3364//3368 +f 3359//3364 3364//3368 3397//3365 +f 3326//3369 3333//3370 3342//3367 +f 3326//3369 3342//3367 3330//3371 +f 3271//2330 3270//2333 3418//2166 +f 3271//2330 3418//2166 3424//2165 +f 3273//2334 3271//2330 3424//2165 +f 3273//2334 3424//2165 3428//3372 +f 3142//2387 3196//3131 3189//2359 +f 3142//2387 3189//2359 3139//2362 +f 2979//3091 2962//3090 2847//3142 +f 2979//3091 2847//3142 2903//2864 +f 2699//3132 2676//3140 2619//3088 +f 2699//3132 2619//3088 2645//3087 +f 2552//3089 2491//3097 2557//2855 +f 2552//3089 2557//2855 2594//2858 +f 2441//3098 2420//3093 2508//2852 +f 2441//3098 2508//2852 2533//2856 +f 2505//2847 2411//3094 2421//3096 +f 2505//2847 2421//3096 2519//2846 +f 2531//2843 2430//3095 2455//3270 +f 2531//2843 2455//3270 2548//2859 +f 2842//3151 2745//3150 2734//2548 +f 2842//3151 2734//2548 2799//2547 +f 2677//2552 2680//3154 2607//3153 +f 2677//2552 2607//3153 2603//2553 +f 2507//2557 2540//2556 2537//3159 +f 2507//2557 2537//3159 2500//3158 +f 2480//2559 2507//2557 2500//3158 +f 2480//2559 2500//3158 2466//3161 +f 2429//2423 2410//2422 2431//2438 +f 2429//2423 2431//2438 2451//2561 +f 2446//2563 2400//3164 2434//3166 +f 2446//2563 2434//3166 2489//2564 +f 2543//2567 2509//3167 2575//3145 +f 2543//2567 2575//3145 2592//2568 +f 2592//2568 2575//3145 2627//3144 +f 2592//2568 2627//3144 2637//2570 +f 2683//2573 2679//3143 2731//3127 +f 2683//2573 2731//3127 2737//2574 +f 2934//2579 2892//2578 2927//3081 +f 2934//2579 2927//3081 2983//3162 +f 2926//2582 2941//2581 2994//3148 +f 2926//2582 2994//3148 2977//3067 +f 2992//2707 2978//2981 2909//3329 +f 2992//2707 2909//3329 2942//2706 +f 2809//3288 2883//2705 2942//2706 +f 2809//3288 2942//2706 2909//3329 +f 2772//2703 2814//2704 2758//3289 +f 2772//2703 2758//3289 2727//3328 +f 2766//2699 2732//3332 2778//3330 +f 2766//2699 2778//3330 2816//2698 +f 2816//2698 2778//3330 2899//3331 +f 2816//2698 2899//3331 2932//2697 +f 2932//2697 2899//3331 2982//2809 +f 2932//2697 2982//2809 3010//2696 +f 2721//2701 2691//3321 2709//3324 +f 2721//2701 2709//3324 2730//2700 +f 3310//2157 3189//2359 3196//3131 +f 3310//2157 3196//3131 3319//2158 +f 2408//2589 2414//2588 2389//3171 +f 2408//2589 2389//3171 2382//2639 +f 3004//2396 3116//2332 3110//2331 +f 3004//2396 3110//2331 2989//2393 +f 2639//3192 2568//3357 2574//3339 +f 2639//3192 2574//3339 2646//3189 +f 2568//3357 2498//3358 2506//3337 +f 2568//3357 2506//3337 2574//3339 +f 2450//3338 2506//3337 2498//3358 +f 2450//3338 2498//3358 2438//3359 +f 2401//3340 2450//3338 2438//3359 +f 2401//3340 2438//3359 2396//3313 +f 2322//3236 2331//3229 2319//3228 +f 2322//3236 2319//3228 2301//3361 +f 3387//3373 3393//3374 3455//3375 +f 3387//3373 3455//3375 3448//3376 +f 3343//3377 3337//3378 3327//3379 +f 3343//3377 3327//3379 3328//3380 +f 3389//3381 3387//3373 3448//3376 +f 3389//3381 3448//3376 3449//3382 +f 3492//3383 3552//3384 3739//3385 +f 3492//3383 3739//3385 3806//3386 +f 3325//3387 3332//3388 3341//3389 +f 3325//3387 3341//3389 3329//3390 +f 3240//2375 3332//3388 3325//3387 +f 3240//2375 3325//3387 3243//2376 +f 3393//3374 3405//3391 3458//3392 +f 3393//3374 3458//3392 3455//3375 +f 3341//3389 3343//3377 3328//3380 +f 3341//3389 3328//3380 3329//3390 +f 3401//3393 3405//3391 3355//3394 +f 3401//3393 3355//3394 3352//3395 +f 3395//3396 3401//3393 3352//3395 +f 3395//3396 3352//3395 3351//3397 +f 3395//3396 3371//3363 3432//3366 +f 3395//3396 3432//3366 3451//3398 +f 3365//3399 3333//3370 3337//3378 +f 3365//3399 3337//3378 3394//3400 +f 3396//2067 3389//3381 3449//3382 +f 3396//2067 3449//3382 3460//3401 +f 3490//3402 3427//3403 3400//3404 +f 3490//3402 3400//3404 3469//3405 +f 3540//3406 3450//3407 3427//3403 +f 3540//3406 3427//3403 3490//3402 +f 3730//3408 3630//3409 3925//3410 +f 3730//3408 3925//3410 3905//3411 +f 3848//3412 3730//3408 3905//3411 +f 3486//3413 3450//3407 3540//3406 +f 3486//3413 3540//3406 3614//3414 +f 3345//2069 3392//2068 3400//3404 +f 3345//2069 3400//3404 3347//3415 +f 3514//3416 3494//3417 3608//3418 +f 3514//3416 3608//3418 3632//3419 +f 3569//3420 3501//3421 3824//3422 +f 3569//3420 3824//3422 3870//3423 +f 3501//3421 3492//3383 3806//3386 +f 3501//3421 3806//3386 3824//3422 +f 3392//2068 3396//2067 3460//3401 +f 3392//2068 3460//3401 3470//3424 +f 3542//2164 3529//2163 3666//3425 +f 3542//2164 3666//3425 3702//3426 +f 3608//3418 3494//3417 3486//3413 +f 3608//3418 3486//3413 3614//3414 +f 3514//3416 3529//2163 3418//2166 +f 3514//3416 3418//2166 3407//3427 +f 3424//2165 3542//2164 3552//3384 +f 3424//2165 3552//3384 3428//3372 +f 4064//3428 4060//3429 4076//3430 +f 4064//3428 4076//3430 4081//3431 +f 4080//3432 4085//3433 4076//3430 +f 4080//3432 4076//3430 4068//3434 +f 4159//2062 4217//3435 4169//3436 +f 4159//2062 4169//3436 4137//3437 +f 4094//3438 4106//3439 4098//2111 +f 4094//3438 4098//2111 4081//3431 +f 4052//3440 4057//3441 4059//3442 +f 4052//3440 4059//3442 4054//3443 +f 3732//3444 3773//3445 3768//3446 +f 3732//3444 3768//3446 3735//3447 +f 3359//3364 3371//3363 3334//3448 +f 3359//3364 3334//3448 3331//3449 +f 3342//3367 3359//3364 3331//3449 +f 3342//3367 3331//3449 3330//3371 +f 3333//3370 3365//3399 3364//3368 +f 3333//3370 3364//3368 3342//3367 +f 3754//3450 3778//3451 3720//3452 +f 3754//3450 3720//3452 3679//3453 +f 3679//3453 3720//3452 3661//3454 +f 3679//3453 3661//3454 3618//3455 +f 3661//3454 3610//3456 3577//3457 +f 3661//3454 3577//3457 3618//3455 +f 3610//3456 3566//3458 3544//3459 +f 3610//3456 3544//3459 3577//3457 +f 3566//3458 3530//3460 3500//3461 +f 3566//3458 3500//3461 3544//3459 +f 3530//3460 3475//3462 3457//3463 +f 3530//3460 3457//3463 3500//3461 +f 3372//3464 3386//3465 3257//2462 +f 3372//3464 3257//2462 3254//2461 +f 3254//2461 3251//2465 3358//3466 +f 3254//2461 3358//3466 3372//3464 +f 3463//3467 3500//3461 3457//3463 +f 3463//3467 3457//3463 3431//3468 +f 3463//3467 3502//3469 3544//3459 +f 3463//3467 3544//3459 3500//3461 +f 3502//3469 3547//3470 3577//3457 +f 3502//3469 3577//3457 3544//3459 +f 3547//3470 3585//3471 3618//3455 +f 3547//3470 3618//3455 3577//3457 +f 3585//3471 3634//3472 3679//3453 +f 3585//3471 3679//3453 3618//3455 +f 3634//3472 3736//3473 3754//3450 +f 3634//3472 3754//3450 3679//3453 +f 3778//3451 3790//3474 3742//3475 +f 3778//3451 3742//3475 3720//3452 +f 3720//3452 3742//3475 3689//3476 +f 3720//3452 3689//3476 3661//3454 +f 3689//3476 3625//3477 3610//3456 +f 3689//3476 3610//3456 3661//3454 +f 3625//3477 3578//3478 3566//3458 +f 3625//3477 3566//3458 3610//3456 +f 3578//3478 3536//3479 3530//3460 +f 3578//3478 3530//3460 3566//3458 +f 3536//3479 3482//3480 3475//3462 +f 3536//3479 3475//3462 3530//3460 +f 3386//3465 3388//3481 3249//2480 +f 3386//3465 3249//2480 3257//2462 +f 3386//3465 3372//3464 3457//3463 +f 3386//3465 3457//3463 3475//3462 +f 3372//3464 3358//3466 3431//3468 +f 3372//3464 3431//3468 3457//3463 +f 3251//2465 3292//2483 3346//3482 +f 3251//2465 3346//3482 3358//3466 +f 3502//3469 3463//3467 3446//3483 +f 3502//3469 3446//3483 3489//3484 +f 3547//3470 3502//3469 3489//3484 +f 3547//3470 3489//3484 3533//3485 +f 3585//3471 3547//3470 3533//3485 +f 3585//3471 3533//3485 3568//3486 +f 3634//3472 3585//3471 3568//3486 +f 3634//3472 3568//3486 3615//3487 +f 3736//3473 3634//3472 3615//3487 +f 3736//3473 3615//3487 3704//3488 +f 3346//3482 3423//3489 3431//3468 +f 3346//3482 3431//3468 3358//3466 +f 3463//3467 3431//3468 3423//3489 +f 3463//3467 3423//3489 3446//3483 +f 3335//2181 3346//3482 3292//2483 +f 3335//2181 3292//2483 3315//2182 +f 3437//2188 3472//2187 3489//3484 +f 3437//2188 3489//3484 3446//3483 +f 3472//2187 3509//3490 3533//3485 +f 3472//2187 3533//3485 3489//3484 +f 3509//3490 3556//3491 3568//3486 +f 3509//3490 3568//3486 3533//3485 +f 3556//3491 3593//3492 3615//3487 +f 3556//3491 3615//3487 3568//3486 +f 3593//3492 3663//3493 3704//3488 +f 3593//3492 3704//3488 3615//3487 +f 3335//2181 3404//2198 3423//3489 +f 3335//2181 3423//3489 3346//3482 +f 3404//2198 3437//2188 3446//3483 +f 3404//2198 3446//3483 3423//3489 +f 3663//3493 3699//3494 3734//3495 +f 3663//3493 3734//3495 3704//3488 +f 3386//3465 3475//3462 3482//3480 +f 3386//3465 3482//3480 3388//3481 +f 3360//3496 3452//3497 3441//3498 +f 3360//3496 3441//3498 3357//3499 +f 3452//3497 3507//3500 3488//3501 +f 3452//3497 3488//3501 3441//3498 +f 3575//3502 3678//3503 3652//3504 +f 3575//3502 3652//3504 3559//3505 +f 3678//3503 3747//3506 3728//3507 +f 3678//3503 3728//3507 3652//3504 +f 3747//3506 3786//3508 3772//3509 +f 3747//3506 3772//3509 3728//3507 +f 3354//3510 3357//3499 3441//3498 +f 3354//3510 3441//3498 3429//3511 +f 3441//3498 3488//3501 3468//3512 +f 3441//3498 3468//3512 3429//3511 +f 3626//3513 3539//3514 3559//3505 +f 3626//3513 3559//3505 3652//3504 +f 3305//2517 3363//3515 3360//3496 +f 3305//2517 3360//3496 3302//2518 +f 3363//3515 3456//3516 3452//3497 +f 3363//3515 3452//3497 3360//3496 +f 3456//3516 3515//3517 3507//3500 +f 3456//3516 3507//3500 3452//3497 +f 3586//3518 3694//3519 3678//3503 +f 3586//3518 3678//3503 3575//3502 +f 3694//3519 3753//3520 3747//3506 +f 3694//3519 3747//3506 3678//3503 +f 3753//3520 3794//3521 3786//3508 +f 3753//3520 3786//3508 3747//3506 +f 3360//3496 3357//3499 3298//2526 +f 3360//3496 3298//2526 3302//2518 +f 3507//3500 3575//3502 3559//3505 +f 3507//3500 3559//3505 3488//3501 +f 3303//2527 3298//2526 3357//3499 +f 3303//2527 3357//3499 3354//3510 +f 3539//3514 3468//3512 3488//3501 +f 3539//3514 3488//3501 3559//3505 +f 3539//3514 3523//3522 3453//3523 +f 3539//3514 3453//3523 3468//3512 +f 3652//3504 3728//3507 3711//3524 +f 3652//3504 3711//3524 3626//3513 +f 3728//3507 3772//3509 3755//3525 +f 3728//3507 3755//3525 3711//3524 +f 3711//3524 3755//3525 3733//3526 +f 3711//3524 3733//3526 3684//3527 +f 3338//3528 3344//3529 3419//3530 +f 3338//3528 3419//3530 3413//2221 +f 3444//3531 3453//3523 3523//3522 +f 3444//3531 3523//3522 3506//3532 +f 3506//3532 3523//3522 3609//3533 +f 3506//3532 3609//3533 3590//3534 +f 3322//2539 3301//2540 3344//3529 +f 3322//2539 3344//3529 3338//3528 +f 3413//2221 3419//3530 3453//3523 +f 3413//2221 3453//3523 3444//3531 +f 3590//3534 3609//3533 3684//3527 +f 3590//3534 3684//3527 3650//3535 +f 3650//3535 3684//3527 3733//3526 +f 3650//3535 3733//3526 3700//3536 +f 3700//3536 3733//3526 3734//3495 +f 3700//3536 3734//3495 3699//3494 +f 3344//3529 3354//3510 3429//3511 +f 3344//3529 3429//3511 3419//3530 +f 3419//3530 3429//3511 3468//3512 +f 3419//3530 3468//3512 3453//3523 +f 3539//3514 3626//3513 3609//3533 +f 3539//3514 3609//3533 3523//3522 +f 3303//2527 3354//3510 3344//3529 +f 3303//2527 3344//3529 3301//2540 +f 3626//3513 3711//3524 3684//3527 +f 3626//3513 3684//3527 3609//3533 +f 3575//3502 3507//3500 3515//3517 +f 3575//3502 3515//3517 3586//3518 +f 3794//3521 3802//3537 3795//3538 +f 3794//3521 3795//3538 3786//3508 +f 3802//3537 3790//3474 3778//3451 +f 3802//3537 3778//3451 3795//3538 +f 3786//3508 3795//3538 3781//3539 +f 3786//3508 3781//3539 3772//3509 +f 3795//3538 3778//3451 3754//3450 +f 3795//3538 3754//3450 3781//3539 +f 3772//3509 3781//3539 3760//3540 +f 3772//3509 3760//3540 3755//3525 +f 3754//3450 3736//3473 3760//3540 +f 3754//3450 3760//3540 3781//3539 +f 3734//3495 3733//3526 3755//3525 +f 3734//3495 3755//3525 3760//3540 +f 3760//3540 3736//3473 3704//3488 +f 3760//3540 3704//3488 3734//3495 +f 4069//3541 4080//3432 4068//3434 +f 4069//3541 4068//3434 4059//3442 +f 3773//3445 3821//3542 3817//3543 +f 3773//3445 3817//3543 3768//3446 +f 3821//3542 3866//3544 3862//3545 +f 3821//3542 3862//3545 3817//3543 +f 3866//3544 3902//3546 3899//3547 +f 3866//3544 3899//3547 3862//3545 +f 3902//3546 3950//3548 3947//3549 +f 3902//3546 3947//3549 3899//3547 +f 3950//3548 3993//3550 3990//3551 +f 3950//3548 3990//3551 3947//3549 +f 3993//3550 4025//3552 4016//3553 +f 3993//3550 4016//3553 3990//3551 +f 4025//3552 4048//3554 4041//3555 +f 4025//3552 4041//3555 4016//3553 +f 4048//3554 4067//3556 4057//3441 +f 4048//3554 4057//3441 4041//3555 +f 4126//3557 4100//2109 4119//2112 +f 4126//3557 4119//2112 4139//3558 +f 4234//3559 4146//2061 4165//2060 +f 4234//3559 4165//2060 4219//2059 +f 4154//3560 4177//3561 4161//3562 +f 4154//3560 4161//3562 4139//3558 +f 4217//3435 4159//2062 4146//2061 +f 4217//3435 4146//2061 4234//3559 +f 4170//3563 4178//3564 4155//3565 +f 4170//3563 4155//3565 4149//3566 +f 4155//3565 4178//3564 4191//2058 +f 4155//3565 4191//2058 4166//2057 +f 4149//3566 4141//3567 4161//3562 +f 4149//3566 4161//3562 4170//3563 +f 3433//3568 3421//3569 3440//3570 +f 3433//3568 3440//3570 3454//3571 +f 3693//2968 3691//3572 3732//3444 +f 3693//2968 3732//3444 3735//3447 +f 3681//3573 3690//3574 3738//3575 +f 3681//3573 3738//3575 3722//3576 +f 3681//3573 3667//3577 3675//2966 +f 3681//3573 3675//2966 3690//3574 +f 3690//3574 3675//2966 3692//2965 +f 3690//3574 3692//2965 3718//3578 +f 3745//3579 3746//3580 3718//3578 +f 4093//3581 4083//3582 4077//3583 +f 4093//3581 4077//3583 4087//3584 +f 4074//3585 4105//3586 4113//3587 +f 4074//3585 4113//3587 4087//3584 +f 4058//3588 4077//3583 4067//3556 +f 4058//3588 4067//3556 4048//3554 +f 4105//3586 4142//3589 4151//3590 +f 4105//3586 4151//3590 4113//3587 +f 4113//3587 4151//3590 4152//3591 +f 4113//3587 4152//3591 4121//3592 +f 4274//3593 4268//3594 4275//3595 +f 4274//3593 4275//3595 4279//3596 +f 4277//3597 4274//3593 4279//3596 +f 4277//3597 4279//3596 4278//3598 +f 4270//3599 4277//3597 4278//3598 +f 4270//3599 4278//3598 4269//3600 +f 4249//3601 4270//3599 4269//3600 +f 4249//3601 4269//3600 4246//3602 +f 4231//3603 4249//3601 4246//3602 +f 4231//3603 4246//3602 4224//3604 +f 4183//3605 4209//3606 4202//3607 +f 4183//3605 4202//3607 4180//3608 +f 4121//3592 4152//3591 4148//2160 +f 4121//3592 4148//2160 4122//2159 +f 4106//3439 4131//3609 4119//2112 +f 4106//3439 4119//2112 4098//2111 +f 4191//2058 4212//3610 4219//2059 +f 3759//3611 3703//3612 3718//3578 +f 3759//3611 3718//3578 3767//3613 +f 3304//2693 3367//3614 3363//3515 +f 3304//2693 3363//3515 3305//2517 +f 3367//3614 3462//3615 3456//3516 +f 3367//3614 3456//3516 3363//3515 +f 3462//3615 3522//2151 3515//3517 +f 3462//3615 3515//3517 3456//3516 +f 3600//2149 3713//2146 3694//3519 +f 3600//2149 3694//3519 3586//3518 +f 3713//2146 3763//2145 3753//3520 +f 3713//2146 3753//3520 3694//3519 +f 3763//2145 3800//2154 3794//3521 +f 3763//2145 3794//3521 3753//3520 +f 3800//2154 3810//2153 3802//3537 +f 3800//2154 3802//3537 3794//3521 +f 3810//2153 3801//3616 3790//3474 +f 3810//2153 3790//3474 3802//3537 +f 3790//3474 3801//3616 3756//2141 +f 3790//3474 3756//2141 3742//3475 +f 3715//2144 3689//3476 3742//3475 +f 3715//2144 3742//3475 3756//2141 +f 3715//2144 3647//2140 3625//3477 +f 3715//2144 3625//3477 3689//3476 +f 3647//2140 3587//2136 3578//3478 +f 3647//2140 3578//3478 3625//3477 +f 3587//2136 3538//2135 3536//3479 +f 3587//2136 3536//3479 3578//3478 +f 3538//2135 3481//3617 3482//3480 +f 3538//2135 3482//3480 3536//3479 +f 3388//3481 3381//3618 3242//2709 +f 3388//3481 3242//2709 3249//2480 +f 3840//3619 3759//3611 3767//3613 +f 3840//3619 3767//3613 3815//3620 +f 3840//3619 3815//3620 3858//3621 +f 3927//3622 3840//3619 3858//3621 +f 3927//3622 3858//3621 3898//3623 +f 3927//3622 3898//3623 3946//3624 +f 3984//3625 4001//3626 3927//3622 +f 3984//3625 3927//3622 3946//3624 +f 4008//3627 4001//3626 3984//3625 +f 4034//3628 4001//3626 4008//3627 +f 3931//3629 3980//3630 3982//3631 +f 3931//3629 3982//3631 3932//3632 +f 3980//3630 4002//3633 4009//3634 +f 3980//3630 4009//3634 3982//3631 +f 4002//3633 4030//3635 4033//3636 +f 4002//3633 4033//3636 4009//3634 +f 4030//3635 4052//3440 4054//3443 +f 4030//3635 4054//3443 4033//3636 +f 3932//3632 3867//3637 3869//3638 +f 3932//3632 3869//3638 3931//3629 +f 3869//3638 3867//3637 3818//3639 +f 3869//3638 3818//3639 3823//3640 +f 3777//3641 3782//3642 3823//3640 +f 3777//3641 3823//3640 3818//3639 +f 3738//3575 3746//3580 3782//3642 +f 3738//3575 3782//3642 3777//3641 +f 4003//3643 4047//3644 4052//3440 +f 4003//3643 4052//3440 4030//3635 +f 4047//3644 4001//3626 4034//3628 +f 4047//3644 4034//3628 4052//3440 +f 3929//3645 3979//3646 3980//3630 +f 3929//3645 3980//3630 3931//3629 +f 3979//3646 4000//3647 4002//3633 +f 3979//3646 4002//3633 3980//3630 +f 4000//3647 4003//3643 4030//3635 +f 4000//3647 4030//3635 4002//3633 +f 3929//3645 3931//3629 3869//3638 +f 3929//3645 3869//3638 3871//3648 +f 3871//3648 3869//3638 3823//3640 +f 3871//3648 3823//3640 3828//3649 +f 3782//3642 3779//3650 3828//3649 +f 3782//3642 3828//3649 3823//3640 +f 3746//3580 3745//3579 3779//3650 +f 3746//3580 3779//3650 3782//3642 +f 3729//2096 3749//3651 3803//3652 +f 3729//2096 3803//3652 3792//2097 +f 3859//3653 3928//3654 3926//2100 +f 3859//3653 3926//2100 3851//2099 +f 3985//3655 4014//3656 4022//2103 +f 3985//3655 4022//2103 3989//2106 +f 4014//3656 4042//3657 4049//2107 +f 4014//3656 4049//2107 4022//2103 +f 4081//3431 4098//2111 4079//2110 +f 4081//3431 4079//2110 4064//3428 +f 4044//3658 4005//3659 4040//2114 +f 4044//3658 4040//2114 4084//2113 +f 3968//3660 3917//3661 3937//2118 +f 3968//3660 3937//2118 3986//2117 +f 3917//3661 3879//3662 3892//2121 +f 3917//3661 3892//2121 3937//2118 +f 3839//3663 3784//3664 3791//2124 +f 3839//3663 3791//2124 3845//2123 +f 3638//3665 3597//2127 3639//2130 +f 3638//3665 3639//2130 3676//3666 +f 3624//3667 3637//3668 3607//2131 +f 3624//3667 3607//2131 3589//2134 +f 4121//3592 4093//3581 4087//3584 +f 4121//3592 4087//3584 4113//3587 +f 4116//2162 4103//3669 4101//3670 +f 4116//2162 4101//3670 4122//2159 +f 4074//3585 4044//3658 4084//2113 +f 4074//3585 4084//2113 4105//3586 +f 3879//3662 3839//3663 3845//2123 +f 3879//3662 3845//2123 3892//2121 +f 3624//3667 3589//2134 3597//2127 +f 3624//3667 3597//2127 3638//3665 +f 3928//3654 3985//3655 3989//2106 +f 3928//3654 3989//2106 3926//2100 +f 4064//3428 4079//2110 4049//2107 +f 4064//3428 4049//2107 4042//3657 +f 4005//3659 3968//3660 3986//2117 +f 4005//3659 3986//2117 4040//2114 +f 4106//3439 4094//3438 4102//3671 +f 4106//3439 4102//3671 4111//3672 +f 3654//3673 3685//3674 3749//3651 +f 3654//3673 3749//3651 3729//2096 +f 4122//2159 4101//3670 4093//3581 +f 4122//2159 4093//3581 4121//3592 +f 4111//3672 4102//3671 4103//3669 +f 4111//3672 4103//3669 4116//2162 +f 3792//2097 3803//3652 3859//3653 +f 3792//2097 3859//3653 3851//2099 +f 4059//3442 4057//3441 4067//3556 +f 4059//3442 4067//3556 4069//3541 +f 3816//3675 3962//3676 3912//3677 +f 3816//3675 3912//3677 3762//3678 +f 3218//2663 3430//3679 3438//3680 +f 3218//2663 3438//3680 3219//2664 +f 3744//3681 3816//3675 3438//3680 +f 3744//3681 3438//3680 3430//3679 +f 3496//3682 3465//3683 3493//3684 +f 3496//3682 3493//3684 3582//3685 +f 3375//3686 3362//3687 3465//3683 +f 3375//3686 3465//3683 3496//3682 +f 3465//3683 3362//3687 3361//3688 +f 3465//3683 3361//3688 3493//3684 +f 3493//3684 3361//3688 3369//3689 +f 3493//3684 3369//3689 3528//3690 +f 3528//3690 3369//3689 3384//3691 +f 3528//3690 3384//3691 3570//3692 +f 3762//3678 3435//3693 3438//3680 +f 3762//3678 3438//3680 3816//3675 +f 3415//3694 3398//3695 3595//3696 +f 3415//3694 3595//3696 3653//3697 +f 3430//3679 3415//3694 3653//3697 +f 3430//3679 3653//3697 3744//3681 +f 3391//3698 3234//2735 3233//2734 +f 3391//3698 3233//2734 3375//3686 +f 3716//3699 3611//3700 3621//3701 +f 3716//3699 3621//3701 3723//3702 +f 3384//3691 3369//3689 3230//2732 +f 3384//3691 3230//2732 3229//2731 +f 3582//3685 3621//3701 3526//3703 +f 3582//3685 3526//3703 3496//3682 +f 3816//3675 3744//3681 3891//3704 +f 3816//3675 3891//3704 3948//3705 +f 3479//3706 3217//2741 3224//2740 +f 3479//3706 3224//2740 3705//3707 +f 3923//3708 3949//3709 3826//3710 +f 3923//3708 3826//3710 3705//3707 +f 3965//3711 3956//3712 3949//3709 +f 3965//3711 3949//3709 3923//3708 +f 3826//3710 3761//3713 3479//3706 +f 3826//3710 3479//3706 3705//3707 +f 3948//3705 3891//3704 3934//3714 +f 3948//3705 3934//3714 3967//3715 +f 3723//3702 3814//3716 3832//3717 +f 3723//3702 3832//3717 3716//3699 +f 3723//3702 3655//3718 3757//3719 +f 3723//3702 3757//3719 3814//3716 +f 3621//3701 3582//3685 3655//3718 +f 3621//3701 3655//3718 3723//3702 +f 3375//3686 3496//3682 3526//3703 +f 3375//3686 3526//3703 3391//3698 +f 3582//3685 3493//3684 3528//3690 +f 3582//3685 3528//3690 3655//3718 +f 3231//2754 3361//3688 3362//3687 +f 3231//2754 3362//3687 3232//2755 +f 3375//3686 3233//2734 3232//2755 +f 3375//3686 3232//2755 3362//3687 +f 3655//3718 3528//3690 3570//3692 +f 3655//3718 3570//3692 3757//3719 +f 3520//3720 3390//3721 3391//3698 +f 3520//3720 3391//3698 3526//3703 +f 3390//3721 3235//2758 3234//2735 +f 3390//3721 3234//2735 3391//3698 +f 3230//2732 3369//3689 3361//3688 +f 3230//2732 3361//3688 3231//2754 +f 3562//3722 3683//3723 3546//3724 +f 3562//3722 3546//3724 3521//3725 +f 3248//2772 3245//2775 3374//3726 +f 3248//2772 3374//3726 3376//3727 +f 3376//3727 3374//3726 3562//3722 +f 3376//3727 3562//3722 3521//3725 +f 3378//3728 3376//3727 3521//3725 +f 3378//3728 3521//3725 3546//3724 +f 3241//2777 3248//2772 3376//3727 +f 3241//2777 3376//3727 3378//3728 +f 3853//3729 3832//3717 3916//3730 +f 3853//3729 3916//3730 3941//3731 +f 4226//3732 4181//3733 4176//3734 +f 3611//3700 3520//3720 3526//3703 +f 3611//3700 3526//3703 3621//3701 +f 3426//3735 3275//2762 3272//2761 +f 3426//3735 3272//2761 3425//3736 +f 3616//3737 3563//3738 3611//3700 +f 3616//3737 3611//3700 3716//3699 +f 3426//3735 3425//3736 3541//3739 +f 3426//3735 3541//3739 3567//3740 +f 3541//3739 3499//3741 3563//3738 +f 3541//3739 3563//3738 3616//3737 +f 4133//3742 4181//3733 4189//3743 +f 4133//3742 4189//3743 4156//3744 +f 3660//3745 3619//3746 3236//3747 +f 3660//3745 3236//3747 3293//2813 +f 3964//3748 3924//3749 3619//3746 +f 3964//3748 3619//3746 3660//3745 +f 4110//3750 4006//3751 3924//3749 +f 4110//3750 3924//3749 3964//3748 +f 4092//3752 4115//3753 4039//3754 +f 4092//3752 4039//3754 4017//3755 +f 3885//3756 3853//3729 3941//3731 +f 3885//3756 3941//3731 3959//3757 +f 4186//3758 4158//3759 4156//3744 +f 4186//3758 4156//3744 4189//3743 +f 4175//3760 4144//3761 4158//3759 +f 4175//3760 4158//3759 4186//3758 +f 4143//3762 4117//3763 4144//3761 +f 4143//3762 4144//3761 4175//3760 +f 4092//3752 4050//3764 4117//3763 +f 4092//3752 4117//3763 4143//3762 +f 3941//3731 3916//3730 3994//3765 +f 3941//3731 3994//3765 4017//3755 +f 3669//3766 3847//3767 3757//3719 +f 3669//3766 3757//3719 3570//3692 +f 3275//2762 3426//3735 3416//3768 +f 3275//2762 3416//3768 3295//2802 +f 3674//3769 3567//3740 3708//3770 +f 3674//3769 3708//3770 3770//3771 +f 3599//3772 3531//3773 3567//3740 +f 3599//3772 3567//3740 3674//3769 +f 3550//2152 3485//3774 3531//3773 +f 3550//2152 3531//3773 3599//3772 +f 4050//3764 4092//3752 4017//3755 +f 4050//3764 4017//3755 3994//3765 +f 3426//3735 3567//3740 3531//3773 +f 3426//3735 3531//3773 3416//3768 +f 4061//3775 4150//3776 4176//3734 +f 4061//3775 4176//3734 4104//3777 +f 3770//3771 3708//3770 3832//3717 +f 3770//3771 3832//3717 3853//3729 +f 4181//3733 4133//3742 4104//3777 +f 4181//3733 4104//3777 4176//3734 +f 4006//3751 4110//3750 4150//3776 +f 4006//3751 4150//3776 4061//3775 +f 3221//2827 3241//2777 3378//3728 +f 3221//2827 3378//3728 3398//3695 +f 3425//3736 3410//3778 3499//3741 +f 3425//3736 3499//3741 3541//3739 +f 3410//3778 3382//3779 3467//3780 +f 3410//3778 3467//3780 3499//3741 +f 3567//3740 3541//3739 3616//3737 +f 3567//3740 3616//3737 3708//3770 +f 3499//3741 3467//3780 3491//3781 +f 3499//3741 3491//3781 3563//3738 +f 3467//3780 3382//3779 3379//3782 +f 3467//3780 3379//3782 3491//3781 +f 3764//3783 3674//3769 3770//3771 +f 3764//3783 3770//3771 3822//3784 +f 3674//3769 3764//3783 3710//3785 +f 3674//3769 3710//3785 3599//3772 +f 3239//2834 3259//2837 3412//3786 +f 3239//2834 3412//3786 3368//3787 +f 3522//2151 3600//2149 3586//3518 +f 3522//2151 3586//3518 3515//3517 +f 3247//2823 3238//2826 3379//3782 +f 3247//2823 3379//3782 3382//3779 +f 3998//2091 4010//2088 3944//3788 +f 3998//2091 3944//3788 3936//3789 +f 4024//2087 4027//3790 3958//3791 +f 4024//2087 3958//3791 3952//3792 +f 4027//3790 4021//2085 3953//3793 +f 4027//3790 3953//3793 3958//3791 +f 3940//3794 3996//2084 3972//2081 +f 3940//3794 3972//2081 3911//3795 +f 3953//3793 4021//2085 3996//2084 +f 3953//3793 3996//2084 3940//3794 +f 4010//2088 4024//2087 3952//3792 +f 4010//2088 3952//3792 3944//3788 +f 3911//3795 3972//2081 3935//2080 +f 3911//3795 3935//2080 3880//3796 +f 3981//2092 3998//2091 3936//3789 +f 3981//2092 3936//3789 3913//3797 +f 3751//3798 3629//2072 3583//3799 +f 3751//3798 3583//3799 3670//3800 +f 3522//2151 3462//3615 3485//3774 +f 3522//2151 3485//3774 3550//2152 +f 3367//3614 3304//2693 3300//2840 +f 3367//3614 3300//2840 3383//3801 +f 3435//3693 3225//2838 3219//2664 +f 3435//3693 3219//2664 3438//3680 +f 3218//2663 3220//2839 3415//3694 +f 3218//2663 3415//3694 3430//3679 +f 3379//3782 3238//2826 3235//2758 +f 3379//3782 3235//2758 3390//3721 +f 3295//2802 3416//3768 3383//3801 +f 3295//2802 3383//3801 3300//2840 +f 3382//3779 3410//3778 3265//2842 +f 3382//3779 3265//2842 3247//2823 +f 4259//3802 4272//3803 4305//3804 +f 4259//3802 4305//3804 4286//3805 +f 3532//3806 3579//3807 3576//3808 +f 3532//3806 3576//3808 3524//3809 +f 3393//3374 3387//3373 3348//3810 +f 3393//3374 3348//3810 3353//3811 +f 3311//2875 3299//2876 3353//3811 +f 3311//2875 3353//3811 3348//3810 +f 3448//3376 3455//3375 3532//3806 +f 3448//3376 3532//3806 3524//3809 +f 3262//2877 3328//3380 3327//3379 +f 3262//2877 3327//3379 3266//2878 +f 3686//3812 3707//3813 3635//3814 +f 3686//3812 3635//3814 3631//3815 +f 3641//3816 3651//3817 3707//3813 +f 3641//3816 3707//3813 3686//3812 +f 3584//3818 3588//3819 3651//3817 +f 3584//3818 3651//3817 3641//3816 +f 3543//3820 3588//3819 3584//3818 +f 3543//3820 3584//3818 3534//3821 +f 3394//3400 3409//3822 3480//3823 +f 3394//3400 3480//3823 3474//3824 +f 3343//3377 3409//3822 3394//3400 +f 3343//3377 3394//3400 3337//3378 +f 3579//3807 3631//3815 3635//3814 +f 3579//3807 3635//3814 3576//3808 +f 3480//3823 3543//3820 3534//3821 +f 3480//3823 3534//3821 3474//3824 +f 4272//3803 4259//3802 4127//3825 +f 4272//3803 4127//3825 4132//3826 +f 4132//3826 4127//3825 3843//3827 +f 4132//3826 3843//3827 3848//3412 +f 3276//2892 3280//2894 3848//3412 +f 3276//2892 3848//3412 3843//3827 +f 4259//3802 4241//3828 4089//3829 +f 4259//3802 4089//3829 4127//3825 +f 4127//3825 4089//3829 3805//3830 +f 4127//3825 3805//3830 3843//3827 +f 3226//2899 3422//3831 3384//3691 +f 3226//2899 3384//3691 3229//2731 +f 3422//3831 3669//3766 3570//3692 +f 3422//3831 3570//3692 3384//3691 +f 3552//3384 3492//3383 3428//3372 +f 3739//3385 3841//3832 3806//3386 +f 4286//3805 4255//3833 4241//3828 +f 4286//3805 4241//3828 4259//3802 +f 4305//3804 4272//3803 4301//3834 +f 4305//3804 4301//3834 4308//3835 +f 4308//3835 4301//3834 4306//3836 +f 4308//3835 4306//3836 4310//3837 +f 4306//3836 4292//3838 4296//3839 +f 4306//3836 4296//3839 4310//3837 +f 4240//3840 4216//3841 4110//3750 +f 4240//3840 4110//3750 4196//3842 +f 4255//3833 4240//3840 4196//3842 +f 4226//3732 4176//3734 4230//3843 +f 4150//3776 4222//3844 4230//3843 +f 4150//3776 4230//3843 4176//3734 +f 4222//3844 4150//3776 4110//3750 +f 4222//3844 4110//3750 4216//3841 +f 4196//3842 4241//3828 4255//3833 +f 4296//3839 4292//3838 4287//3845 +f 4296//3839 4287//3845 4290//3846 +f 4324//3847 4331//3848 4341//3849 +f 4324//3847 4341//3849 4335//3850 +f 4313//3851 4318//3852 4331//3848 +f 4313//3851 4331//3848 4324//3847 +f 4295//3853 4312//3854 4309//3855 +f 4264//3856 4265//3857 4257//3858 +f 4264//3856 4257//3858 4226//3732 +f 4268//3594 4257//3858 4265//3857 +f 4268//3594 4265//3857 4273//3859 +f 4273//3859 4283//3860 4275//3595 +f 4273//3859 4275//3595 4268//3594 +f 4275//3595 4283//3860 4290//3846 +f 4275//3595 4290//3846 4287//3845 +f 4303//3861 4289//3862 4235//3863 +f 4235//3863 4289//3862 4263//3864 +f 4299//3865 4303//3861 4235//3863 +f 4235//3863 4302//3866 4317//3867 +f 4235//3863 4263//3864 4302//3866 +f 4285//3868 4304//3869 4323//3870 +f 4285//3868 4323//3870 4334//3871 +f 4323//3870 4304//3869 4307//3872 +f 4312//3854 4323//3870 4307//3872 +f 4312//3854 4307//3872 4309//3855 +f 4296//3839 4299//3865 4318//3852 +f 4296//3839 4318//3852 4313//3851 +f 4299//3865 4296//3839 4290//3846 +f 4333//3873 4342//3874 4340//3875 +f 4333//3873 4340//3875 4329//3876 +f 4267//3877 4264//3856 4226//3732 +f 4315//3878 4319//3879 4311//3880 +f 4315//3878 4311//3880 4281//3881 +f 4230//3843 4281//3881 4311//3880 +f 4230//3843 4311//3880 4267//3877 +f 4221//3882 4215//3883 4314//3884 +f 4221//3882 4314//3884 4280//3885 +f 4221//3882 4280//3885 4229//3886 +f 4230//3843 4267//3877 4226//3732 +f 4289//3862 4316//3887 4263//3864 +f 4273//3859 4265//3857 4264//3856 +f 4273//3859 4264//3856 4291//3888 +f 4291//3888 4298//3889 4283//3860 +f 4291//3888 4283//3860 4273//3859 +f 4283//3860 4298//3889 4299//3865 +f 4283//3860 4299//3865 4290//3846 +f 4316//3887 4311//3880 4319//3879 +f 4316//3887 4319//3879 4322//3890 +f 4321//3891 4282//2962 4235//3863 +f 4321//3891 4235//3863 4317//3867 +f 4321//3891 4325//2959 4282//2962 +f 3213//2219 2959//2290 3210//3892 +f 3213//2219 3210//3892 3565//2220 +f 4339//3893 4325//2959 4321//3891 +f 4339//3893 4321//3891 4338//3894 +f 3094//3066 3056//3065 3069//2977 +f 3094//3066 3069//2977 3099//2976 +f 3534//3821 3535//3895 3461//3896 +f 3534//3821 3461//3896 3474//3824 +f 4316//3887 4289//3862 4297//3897 +f 3266//2878 3327//3379 3326//3369 +f 3266//2878 3326//3369 3264//2991 +f 3455//3375 3458//3392 3512//3898 +f 3455//3375 3512//3898 3519//3899 +f 3532//3806 3455//3375 3519//3899 +f 3532//3806 3519//3899 3560//3900 +f 3631//3815 3579//3807 3602//3901 +f 3631//3815 3602//3901 3642//3902 +f 3680//3903 3686//3812 3631//3815 +f 3680//3903 3631//3815 3642//3902 +f 3665//3904 3641//3816 3686//3812 +f 3665//3904 3686//3812 3680//3903 +f 3641//3816 3665//3904 3606//3905 +f 3641//3816 3606//3905 3584//3818 +f 3534//3821 3584//3818 3606//3905 +f 3534//3821 3606//3905 3535//3895 +f 3394//3400 3474//3824 3461//3896 +f 3394//3400 3461//3896 3365//3399 +f 3458//3392 3459//3906 3516//3907 +f 3458//3392 3516//3907 3512//3898 +f 3459//3906 3451//3398 3511//3908 +f 3459//3906 3511//3908 3516//3907 +f 3451//3398 3432//3366 3487//3909 +f 3451//3398 3487//3909 3511//3908 +f 3439//3910 3473//3911 3487//3909 +f 3439//3910 3487//3909 3432//3366 +f 4327//3912 4337//3913 4319//3879 +f 4327//3912 4319//3879 4315//3878 +f 3442//3914 3504//3915 3466//3916 +f 3442//3914 3466//3916 3434//3917 +f 3408//3918 3381//3618 3481//3617 +f 3408//3918 3481//3617 3484//3919 +f 3381//3618 3388//3481 3482//3480 +f 3381//3618 3482//3480 3481//3617 +f 3461//3896 3535//3895 3504//3915 +f 3461//3896 3504//3915 3442//3914 +f 3484//3919 3554//2138 3543//3820 +f 3484//3919 3543//3820 3480//3823 +f 3473//3911 3439//3910 3433//3568 +f 3473//3911 3433//3568 3454//3571 +f 3094//3066 3112//3057 3100//2986 +f 3094//3066 3100//2986 3079//2985 +f 4329//3876 4340//3875 4337//3913 +f 4329//3876 4337//3913 4327//3912 +f 4335//3850 4341//3849 4342//3874 +f 4335//3850 4342//3874 4333//3873 +f 4332//3920 4254//3921 4285//3868 +f 4332//3920 4285//3868 4334//3871 +f 4254//3921 4332//3920 4328//3922 +f 4254//3921 4328//3922 4326//3923 +f 4254//3921 4326//3923 4239//3924 +f 4239//3924 4326//3923 4314//3884 +f 4239//3924 4314//3884 4215//3883 +f 3448//3376 3524//3809 3525//3925 +f 3448//3376 3525//3925 3449//3382 +f 3317//3010 3311//2875 3348//3810 +f 3317//3010 3348//3810 3350//3926 +f 3387//3373 3389//3381 3350//3926 +f 3387//3373 3350//3926 3348//3810 +f 3524//3809 3576//3808 3583//3799 +f 3524//3809 3583//3799 3525//3925 +f 3635//3814 3707//3813 3765//3927 +f 3635//3814 3765//3927 3670//3800 +f 3707//3813 3774//3928 3835//3929 +f 3707//3813 3835//3929 3765//3927 +f 3774//3928 3837//3930 3880//3796 +f 3774//3928 3880//3796 3835//3929 +f 3863//3931 3822//3784 3885//3756 +f 3863//3931 3885//3756 3913//3797 +f 3822//3784 3770//3771 3853//3729 +f 3822//3784 3853//3729 3885//3756 +f 3708//3770 3616//3737 3716//3699 +f 3708//3770 3716//3699 3832//3717 +f 4282//2962 4300//2961 4299//3865 +f 4282//2962 4299//3865 4235//3863 +f 4300//2961 4318//3852 4299//3865 +f 4320//2960 4331//3848 4318//3852 +f 4320//2960 4318//3852 4300//2961 +f 4336//3932 4341//3849 4331//3848 +f 4336//3932 4331//3848 4320//2960 +f 4336//3932 4339//3893 4342//3874 +f 4336//3932 4342//3874 4341//3849 +f 4339//3893 4338//3894 4340//3875 +f 4339//3893 4340//3875 4342//3874 +f 4338//3894 4330//3933 4337//3913 +f 4338//3894 4337//3913 4340//3875 +f 4322//3890 4319//3879 4337//3913 +f 4322//3890 4337//3913 4330//3933 +f 4311//3880 4316//3887 4297//3897 +f 4311//3880 4297//3897 4267//3877 +f 4297//3897 4289//3862 4264//3856 +f 4297//3897 4264//3856 4267//3877 +f 4289//3862 4303//3861 4291//3888 +f 4289//3862 4291//3888 4264//3856 +f 4303//3861 4298//3889 4291//3888 +f 4303//3861 4299//3865 4298//3889 +f 4322//3890 4302//3866 4263//3864 +f 4322//3890 4263//3864 4316//3887 +f 4322//3890 4330//3933 4317//3867 +f 4322//3890 4317//3867 4302//3866 +f 4317//3867 4330//3933 4338//3894 +f 4317//3867 4338//3894 4321//3891 +f 4325//2959 4339//3893 4336//3932 +f 4325//2959 4336//3932 4320//2960 +f 3819//3934 3513//3935 3479//3706 +f 3819//3934 3479//3706 3761//3713 +f 3919//3936 3603//3937 3513//3935 +f 3919//3936 3513//3935 3819//3934 +f 3956//3712 3965//3711 3967//3715 +f 3956//3712 3967//3715 3934//3714 +f 4294//3938 4271//3939 4260//3940 +f 4294//3938 4260//3940 4288//2056 +f 4306//3836 4294//3938 4288//2056 +f 4306//3836 4288//2056 4292//3838 +f 3274//3027 3273//2334 3428//3372 +f 3274//3027 3428//3372 3492//3383 +f 3900//3941 3909//2066 3806//3386 +f 3900//3941 3806//3386 3841//3832 +f 3973//3942 3977//2065 3909//2066 +f 3973//3942 3909//2066 3900//3941 +f 4046//3943 4053//2064 3977//2065 +f 4046//3943 3977//2065 3973//3942 +f 4107//3944 4146//2061 4053//2064 +f 4107//3944 4053//2064 4046//3943 +f 4165//2060 4146//2061 4107//3944 +f 4271//3939 4234//3559 4219//2059 +f 4271//3939 4219//2059 4260//3940 +f 3718//3578 3703//3612 3743//3945 +f 3718//3578 3743//3945 3745//3579 +f 3752//3946 3256//3038 3260//3947 +f 3752//3946 3260//3947 3603//3937 +f 3919//3936 3971//3948 3752//3946 +f 3919//3936 3752//3946 3603//3937 +f 3971//3948 3919//3936 4066//3949 +f 3971//3948 4066//3949 4153//3950 +f 4205//3951 4153//3950 4066//3949 +f 4205//3951 4066//3949 4145//3952 +f 4174//3953 4205//3951 4145//3952 +f 4174//3953 4145//3952 4130//3954 +f 4174//3953 4130//3954 4056//3955 +f 4174//3953 4056//3955 4070//3956 +f 3974//3957 4070//3956 4056//3955 +f 3974//3957 4056//3955 3970//3958 +f 3974//3957 3970//3958 3878//3959 +f 3974//3957 3878//3959 3887//3960 +f 3887//3960 3878//3959 3712//3961 +f 3887//3960 3712//3961 3714//3962 +f 3714//3962 3712//3961 3503//3963 +f 3714//3962 3503//3963 3510//3964 +f 3510//3964 3503//3963 3368//3787 +f 3510//3964 3368//3787 3412//3786 +f 3420//3965 3421//3569 3433//3568 +f 3420//3965 3433//3568 3439//3910 +f 3370//3966 3402//3967 3421//3569 +f 3370//3966 3421//3569 3420//3965 +f 3370//3966 3385//3968 3411//3969 +f 3370//3966 3411//3969 3402//3967 +f 3385//3968 3442//3914 3434//3917 +f 3385//3968 3434//3917 3411//3969 +f 3495//3970 3473//3911 3454//3571 +f 3495//3970 3454//3571 3471//3971 +f 3495//3970 3471//3971 3477//3972 +f 3495//3970 3477//3972 3518//3973 +f 3518//3973 3477//3972 3466//3916 +f 3518//3973 3466//3916 3504//3915 +f 3421//3569 3402//3967 3440//3570 +f 3402//3967 3411//3969 3440//3570 +f 3411//3969 3434//3917 3440//3570 +f 3477//3972 3471//3971 3440//3570 +f 3471//3971 3454//3571 3440//3570 +f 4003//3643 4001//3626 4047//3644 +f 3840//3619 3927//3622 3920//3974 +f 3840//3619 3920//3974 3808//3975 +f 3907//3976 3920//3974 4003//3643 +f 3927//3622 4001//3626 4003//3643 +f 3927//3622 4003//3643 3920//3974 +f 3743//3945 3703//3612 3808//3975 +f 3743//3945 3808//3975 3812//3977 +f 3808//3975 3703//3612 3759//3611 +f 3808//3975 3759//3611 3840//3619 +f 3812//3977 3808//3975 3920//3974 +f 3812//3977 3920//3974 3907//3976 +f 3979//3646 3929//3645 3907//3976 +f 3979//3646 3907//3976 4003//3643 +f 3907//3976 3929//3645 3871//3648 +f 3828//3649 3812//3977 3907//3976 +f 3828//3649 3907//3976 3871//3648 +f 3779//3650 3812//3977 3828//3649 +f 3745//3579 3743//3945 3812//3977 +f 3745//3579 3812//3977 3779//3650 +f 4000//3647 3979//3646 4003//3643 +f 3352//3395 3355//3394 3252//3106 +f 3352//3395 3252//3106 3250//3105 +f 3351//3397 3352//3395 3250//3105 +f 3351//3397 3250//3105 3244//3107 +f 3334//3448 3351//3397 3244//3107 +f 3334//3448 3244//3107 3253//3108 +f 3253//3108 3258//3109 3331//3449 +f 3253//3108 3331//3449 3334//3448 +f 3331//3449 3258//3109 3263//3110 +f 3331//3449 3263//3110 3330//3371 +f 3330//3371 3263//3110 3264//2991 +f 3330//3371 3264//2991 3326//3369 +f 3332//3388 3381//3618 3408//3918 +f 3332//3388 3408//3918 3341//3389 +f 3332//3388 3240//2375 3242//2709 +f 3332//3388 3242//2709 3381//3618 +f 3299//2876 3252//3106 3355//3394 +f 3299//2876 3355//3394 3353//3811 +f 3405//3391 3393//3374 3353//3811 +f 3405//3391 3353//3811 3355//3394 +f 3343//3377 3341//3389 3408//3918 +f 3343//3377 3408//3918 3409//3822 +f 3405//3391 3401//3393 3459//3906 +f 3405//3391 3459//3906 3458//3392 +f 3401//3393 3395//3396 3451//3398 +f 3401//3393 3451//3398 3459//3906 +f 3371//3363 3395//3396 3351//3397 +f 3371//3363 3351//3397 3334//3448 +f 3439//3910 3432//3366 3397//3365 +f 3439//3910 3397//3365 3420//3965 +f 3420//3965 3397//3365 3364//3368 +f 3420//3965 3364//3368 3370//3966 +f 3370//3966 3364//3368 3365//3399 +f 3370//3966 3365//3399 3385//3968 +f 3365//3399 3461//3896 3442//3914 +f 3365//3399 3442//3914 3385//3968 +f 3519//3899 3512//3898 3580//3978 +f 3519//3899 3580//3978 3560//3900 +f 3580//3978 3644//3979 3602//3901 +f 3580//3978 3602//3901 3560//3900 +f 3527//3980 3571//3981 3511//3908 +f 3527//3980 3511//3908 3487//3909 +f 3487//3909 3473//3911 3495//3970 +f 3487//3909 3495//3970 3527//3980 +f 3527//3980 3495//3970 3518//3973 +f 3527//3980 3518//3973 3545//3982 +f 3527//3980 3545//3982 3612//3983 +f 3527//3980 3612//3983 3571//3981 +f 3717//3984 3780//3985 3837//3930 +f 3717//3984 3837//3930 3774//3928 +f 3830//3986 3764//3783 3822//3784 +f 3830//3986 3822//3784 3863//3931 +f 3512//3898 3516//3907 3596//3987 +f 3512//3898 3596//3987 3580//3978 +f 3516//3907 3511//3908 3571//3981 +f 3516//3907 3571//3981 3596//3987 +f 3571//3981 3612//3983 3656//3988 +f 3571//3981 3656//3988 3596//3987 +f 3596//3987 3656//3988 3644//3979 +f 3596//3987 3644//3979 3580//3978 +f 3987//3989 3933//3990 3932//3632 +f 3987//3989 3932//3632 3982//3631 +f 3982//3631 4009//3634 4018//3991 +f 3982//3631 4018//3991 3987//3989 +f 4009//3634 4033//3636 4037//3992 +f 4009//3634 4037//3992 4018//3991 +f 4033//3636 4054//3443 4055//3993 +f 4033//3636 4055//3993 4037//3992 +f 4068//3434 4055//3993 4054//3443 +f 4068//3434 4054//3443 4059//3442 +f 3867//3637 3932//3632 3933//3990 +f 3867//3637 3933//3990 3865//3994 +f 3867//3637 3865//3994 3813//3995 +f 3867//3637 3813//3995 3818//3639 +f 3818//3639 3813//3995 3766//3996 +f 3818//3639 3766//3996 3777//3641 +f 3777//3641 3766//3996 3722//3576 +f 3777//3641 3722//3576 3738//3575 +f 3690//3574 3718//3578 3738//3575 +f 3718//3578 3746//3580 3738//3575 +f 3815//3620 3767//3613 3768//3446 +f 3815//3620 3768//3446 3817//3543 +f 3817//3543 3862//3545 3858//3621 +f 3817//3543 3858//3621 3815//3620 +f 3862//3545 3899//3547 3898//3623 +f 3862//3545 3898//3623 3858//3621 +f 3899//3547 3947//3549 3946//3624 +f 3899//3547 3946//3624 3898//3623 +f 3946//3624 3947//3549 3990//3551 +f 3946//3624 3990//3551 3984//3625 +f 3990//3551 4016//3553 4008//3627 +f 3990//3551 4008//3627 3984//3625 +f 4016//3553 4041//3555 4034//3628 +f 4016//3553 4034//3628 4008//3627 +f 4041//3555 4057//3441 4052//3440 +f 4041//3555 4052//3440 4034//3628 +f 3691//3572 3693//2968 3673//2967 +f 3691//3572 3673//2967 3664//3997 +f 3667//3577 3664//3997 3673//2967 +f 3667//3577 3673//2967 3675//2966 +f 3791//2124 3726//3998 3709//3999 +f 3791//2124 3709//3999 3796//2125 +f 3784//3664 3839//3663 3831//4000 +f 3784//3664 3831//4000 3776//4001 +f 3773//3445 3732//3444 3731//4002 +f 3773//3445 3731//4002 3776//4001 +f 3784//3664 3737//4003 3726//3998 +f 3784//3664 3726//3998 3791//2124 +f 3329//3390 3328//3380 3262//2877 +f 3329//3390 3262//2877 3246//3129 +f 3774//3928 3707//3813 3651//3817 +f 3774//3928 3651//3817 3717//3984 +f 3717//3984 3651//3817 3588//3819 +f 3717//3984 3588//3819 3620//4004 +f 3620//4004 3588//3819 3543//3820 +f 3620//4004 3543//3820 3554//2138 +f 3484//3919 3480//3823 3409//3822 +f 3484//3919 3409//3822 3408//3918 +f 3326//3369 3327//3379 3337//3378 +f 3326//3369 3337//3378 3333//3370 +f 3767//3613 3718//3578 3735//3447 +f 3767//3613 3735//3447 3768//3446 +f 3449//3382 3525//3925 3551//2071 +f 3449//3382 3551//2071 3460//3401 +f 3319//2158 3317//3010 3350//3926 +f 3319//2158 3350//3926 3349//2070 +f 3389//3381 3396//2067 3349//2070 +f 3389//3381 3349//2070 3350//3926 +f 3525//3925 3583//3799 3629//2072 +f 3525//3925 3629//2072 3551//2071 +f 3670//3800 3765//3927 3829//2075 +f 3670//3800 3829//2075 3751//3798 +f 3884//2076 3829//2075 3765//3927 +f 3884//2076 3765//3927 3835//3929 +f 3832//3717 3814//3716 3893//4005 +f 3832//3717 3893//4005 3916//3730 +f 3936//3789 3944//3788 3896//4006 +f 3936//3789 3896//4006 3888//4007 +f 3952//3792 3958//3791 3906//4008 +f 3952//3792 3906//4008 3903//4009 +f 3958//3791 3953//3793 3904//4010 +f 3958//3791 3904//4010 3906//4008 +f 3894//4011 3940//3794 3911//3795 +f 3894//4011 3911//3795 3873//4012 +f 3835//3929 3880//3796 3935//2080 +f 3835//3929 3935//2080 3884//2076 +f 3953//3793 3940//3794 3894//4011 +f 3953//3793 3894//4011 3904//4010 +f 3944//3788 3952//3792 3903//4009 +f 3944//3788 3903//4009 3896//4006 +f 3911//3795 3880//3796 3837//3930 +f 3911//3795 3837//3930 3873//4012 +f 3913//3797 3936//3789 3888//4007 +f 3913//3797 3888//4007 3863//3931 +f 3913//3797 3885//3756 3959//3757 +f 3913//3797 3959//3757 3981//2092 +f 3852//2078 3787//4013 3751//3798 +f 3852//2078 3751//3798 3829//2075 +f 3787//4013 3682//2073 3629//2072 +f 3787//4013 3629//2072 3751//3798 +f 3787//4013 3852//2078 3850//2126 +f 3787//4013 3850//2126 3796//2125 +f 3910//2077 3976//2079 3954//2119 +f 3910//2077 3954//2119 3901//2122 +f 4182//4014 4151//3590 4142//3589 +f 4182//4014 4142//3589 4179//4015 +f 3558//2132 3490//3402 3469//3405 +f 3558//2132 3469//3405 3537//2133 +f 3769//4016 3614//3414 3687//2095 +f 3769//4016 3687//2095 3783//2098 +f 3939//4017 3857//4018 3849//2102 +f 3939//4017 3849//2102 3922//2101 +f 3997//4019 3992//2105 4029//2104 +f 3997//4019 4029//2104 4045//4020 +f 4045//4020 4029//2104 4065//2108 +f 4045//4020 4065//2108 4097//4021 +f 4131//3609 4154//3560 4139//3558 +f 4131//3609 4139//3558 4119//2112 +f 3852//2078 3910//2077 3901//2122 +f 3852//2078 3901//2122 3850//2126 +f 3470//3424 3548//2128 3537//2133 +f 3470//3424 3537//2133 3469//3405 +f 3939//4017 3922//2101 3992//2105 +f 3939//4017 3992//2105 3997//4019 +f 4100//2109 4126//3557 4097//4021 +f 4100//2109 4097//4021 4065//2108 +f 4163//4022 4179//4015 4142//3589 +f 4163//4022 4142//3589 4129//2116 +f 4135//4023 4163//4022 4129//2116 +f 4135//4023 4129//2116 4095//2115 +f 3976//2079 4063//4024 4020//2120 +f 3976//2079 4020//2120 3954//2119 +f 4063//4024 4135//4023 4095//2115 +f 4063//4024 4095//2115 4020//2120 +f 4136//4025 4162//4026 4154//3560 +f 4136//4025 4154//3560 4131//3609 +f 3540//3406 3598//4027 3687//2095 +f 3540//3406 3687//2095 3614//3414 +f 4183//3605 4152//3591 4151//3590 +f 4183//3605 4151//3590 4182//4014 +f 4152//3591 4183//3605 4180//3608 +f 4152//3591 4180//3608 4148//2160 +f 4140//2161 4173//4028 4162//4026 +f 4140//2161 4162//4026 4136//4025 +f 3857//4018 3769//4016 3783//2098 +f 3857//4018 3783//2098 3849//2102 +f 3682//2073 3787//4013 3796//2125 +f 3682//2073 3796//2125 3709//3999 +f 3598//4027 3540//3406 3490//3402 +f 3598//4027 3490//3402 3558//2132 +f 3572//2074 3682//2073 3709//3999 +f 3572//2074 3709//3999 3604//2129 +f 3225//2838 3435//3693 3422//3831 +f 3225//2838 3422//3831 3226//2899 +f 3435//3693 3762//3678 3669//3766 +f 3435//3693 3669//3766 3422//3831 +f 3762//3678 3912//3677 3847//3767 +f 3762//3678 3847//3767 3669//3766 +f 3654//3673 3598//4027 3558//2132 +f 3654//3673 3558//2132 3607//2131 +f 3366//4029 3307//3072 3306//3071 +f 3366//4029 3306//3071 3356//4030 +f 3427//3403 3356//4030 3347//3415 +f 3427//3403 3347//3415 3400//3404 +f 3737//4003 3784//3664 3776//4001 +f 3737//4003 3776//4001 3731//4002 +f 3637//3668 3624//3667 3643//4031 +f 3637//3668 3643//4031 3659//4032 +f 3681//3573 3722//3576 3706//4033 +f 3681//3573 3706//4033 3659//4032 +f 3685//3674 3654//3673 3607//2131 +f 3685//3674 3607//2131 3637//3668 +f 3726//3998 3639//2130 3604//2129 +f 3726//3998 3604//2129 3709//3999 +f 3691//3572 3688//4034 3731//4002 +f 3691//3572 3731//4002 3732//3444 +f 3737//4003 3676//3666 3639//2130 +f 3737//4003 3639//2130 3726//3998 +f 3692//2965 3693//2968 3735//3447 +f 3692//2965 3735//3447 3718//3578 +f 3579//3807 3532//3806 3560//3900 +f 3579//3807 3560//3900 3602//3901 +f 3644//3979 3680//3903 3642//3902 +f 3644//3979 3642//3902 3602//3901 +f 3504//3915 3535//3895 3545//3982 +f 3504//3915 3545//3982 3518//3973 +f 3545//3982 3535//3895 3606//3905 +f 3545//3982 3606//3905 3612//3983 +f 3665//3904 3656//3988 3612//3983 +f 3665//3904 3612//3983 3606//3905 +f 3656//3988 3665//3904 3680//3903 +f 3656//3988 3680//3903 3644//3979 +f 3576//3808 3635//3814 3670//3800 +f 3576//3808 3670//3800 3583//3799 +f 3935//2080 3976//2079 3910//2077 +f 3935//2080 3910//2077 3884//2076 +f 3572//2074 3470//3424 3460//3401 +f 3572//2074 3460//3401 3551//2071 +f 4125//4035 4109//2086 4021//2085 +f 4125//4035 4021//2085 4027//3790 +f 4118//2090 4125//4035 4027//3790 +f 4118//2090 4027//3790 4024//2087 +f 4099//2094 4108//2089 4010//2088 +f 4099//2094 4010//2088 3998//2091 +f 3996//2084 4088//2083 4038//2082 +f 3996//2084 4038//2082 3972//2081 +f 3961//4036 3994//3765 3916//3730 +f 3961//4036 3916//3730 3893//4005 +f 3909//2066 3969//2063 3824//3422 +f 3909//2066 3824//3422 3806//3386 +f 3991//4037 4012//4038 3897//4039 +f 3991//4037 3897//4039 3870//3423 +f 4051//4040 3915//4041 3897//4039 +f 4051//4040 3897//4039 4012//4038 +f 4075//4042 3921//4043 3915//4041 +f 4075//4042 3915//4041 4051//4040 +f 4090//4044 3925//3410 3921//4043 +f 4090//4044 3921//4043 4075//4042 +f 4090//4044 4112//4045 3905//3411 +f 4090//4044 3905//3411 3925//3410 +f 4089//3829 4028//4046 3748//4047 +f 4089//3829 3748//4047 3805//3830 +f 3632//3419 3608//3418 3775//4048 +f 3632//3419 3775//4048 3789//4049 +f 3882//2168 3890//2167 3827//4050 +f 3882//2168 3827//4050 3807//4051 +f 3857//4018 3872//4052 3775//4048 +f 3857//4018 3775//4048 3769//4016 +f 3876//4053 3882//2168 3807//4051 +f 3876//4053 3807//4051 3789//4049 +f 3890//2167 3900//3941 3841//3832 +f 3890//2167 3841//3832 3827//4050 +f 3809//4054 3813//3995 3865//3994 +f 3809//4054 3865//3994 3861//4055 +f 3749//3651 3758//4056 3809//4054 +f 3749//3651 3809//4054 3803//3652 +f 4252//4057 4250//4058 4262//4059 +f 4252//4057 4262//4059 4266//4060 +f 4270//3599 4249//3601 4247//4061 +f 4270//3599 4247//4061 4258//4062 +f 4249//3601 4231//3603 4227//4063 +f 4249//3601 4227//4063 4247//4061 +f 4248//4064 4252//4057 4266//4060 +f 4248//4064 4266//4060 4258//4062 +f 4274//3593 4262//4059 4256//4065 +f 4274//3593 4256//4065 4268//3594 +f 4209//3606 4183//3605 4182//4014 +f 4209//3606 4182//4014 4207//4066 +f 4231//3603 4209//3606 4207//4066 +f 4231//3603 4207//4066 4227//4063 +f 4245//4067 4242//4068 4251//4069 +f 4245//4067 4251//4069 4256//4065 +f 4242//4068 4210//4070 4220//4071 +f 4242//4068 4220//4071 4251//4069 +f 4181//3733 4226//3732 4220//4071 +f 4181//3733 4220//4071 4189//3743 +f 3849//2102 3783//2098 3792//2097 +f 3849//2102 3792//2097 3851//2099 +f 3813//3995 3809//4054 3758//4056 +f 3813//3995 3758//4056 3766//3996 +f 3930//4072 3861//4055 3865//3994 +f 3930//4072 3865//3994 3933//3990 +f 3987//3989 4018//3991 4019//4073 +f 3987//3989 4019//4073 3988//4074 +f 4018//3991 4037//3992 4043//4075 +f 4018//3991 4043//4075 4019//4073 +f 4068//3434 4076//3430 4060//3429 +f 4068//3434 4060//3429 4055//3993 +f 4025//3552 3993//3550 3995//4076 +f 4025//3552 3995//4076 4032//4077 +f 3950//3548 3902//3546 3908//4078 +f 3950//3548 3908//4078 3957//4079 +f 3902//3546 3866//3544 3874//4080 +f 3902//3546 3874//4080 3908//4078 +f 3821//3542 3773//3445 3776//4001 +f 3821//3542 3776//4001 3831//4000 +f 3688//4034 3691//3572 3664//3997 +f 3688//4034 3664//3997 3658//4081 +f 3667//3577 3681//3573 3659//4032 +f 3667//3577 3659//4032 3643//4031 +f 4069//3541 4067//3556 4077//3583 +f 4069//3541 4077//3583 4083//3582 +f 4048//3554 4025//3552 4032//4077 +f 4048//3554 4032//4077 4058//3588 +f 3866//3544 3821//3542 3831//4000 +f 3866//3544 3831//4000 3874//4080 +f 3664//3997 3667//3577 3643//4031 +f 3664//3997 3643//4031 3658//4081 +f 3933//3990 3987//3989 3988//4074 +f 3933//3990 3988//4074 3930//4072 +f 4037//3992 4055//3993 4060//3429 +f 4037//3992 4060//3429 4043//4075 +f 3993//3550 3950//3548 3957//4079 +f 3993//3550 3957//4079 3995//4076 +f 3722//3576 3766//3996 3758//4056 +f 3722//3576 3758//4056 3706//4033 +f 4080//3432 4069//3541 4083//3582 +f 4080//3432 4083//3582 4086//4082 +f 4101//3670 4103//3669 4102//3671 +f 4101//3670 4102//3671 4096//4083 +f 4094//3438 4085//3433 4096//4083 +f 4094//3438 4096//4083 4102//3671 +f 4085//3433 4080//3432 4086//4082 +f 4085//3433 4086//4082 4096//4083 +f 4086//4082 4101//3670 4096//4083 +f 4217//3435 4261//4084 4195//4085 +f 4217//3435 4195//4085 4169//3436 +f 4173//4028 4193//4086 4187//4087 +f 4173//4028 4187//4087 4162//4026 +f 4116//2162 4140//2161 4136//4025 +f 4116//2162 4136//4025 4111//3672 +f 4237//4088 4253//4089 4260//3940 +f 4261//4084 4217//3435 4234//3559 +f 4261//4084 4234//3559 4271//3939 +f 3962//3676 4011//4090 3961//4036 +f 3962//3676 3961//4036 3912//3677 +f 4007//4091 3948//3705 3967//3715 +f 4007//4091 3967//3715 3999//4092 +f 3891//3704 3842//4093 3895//4094 +f 3891//3704 3895//4094 3934//3714 +f 3842//4093 3793//4095 3868//4096 +f 3842//4093 3868//4096 3895//4094 +f 3793//4095 3683//3723 3811//4097 +f 3793//4095 3811//4097 3868//4096 +f 3683//3723 3562//3722 3727//4098 +f 3683//3723 3727//4098 3811//4097 +f 3245//2775 3239//2834 3368//3787 +f 3245//2775 3368//3787 3374//3726 +f 4145//3952 4066//3949 3966//4099 +f 4145//3952 3966//4099 4036//4100 +f 3864//4101 3878//3959 3970//3958 +f 3864//4101 3970//3958 3942//4102 +f 4056//3955 4004//4103 3942//4102 +f 4056//3955 3942//4102 3970//3958 +f 4004//4103 4056//3955 4130//3954 +f 4004//4103 4130//3954 4035//4104 +f 4145//3952 4036//4100 4035//4104 +f 4145//3952 4035//4104 4130//3954 +f 3603//3937 3260//3947 3215//4105 +f 3603//3937 3215//4105 3513//3935 +f 3966//4099 4066//3949 3919//3936 +f 3966//4099 3919//3936 3819//3934 +f 3562//3722 3503//3963 3712//3961 +f 3562//3722 3712//3961 3727//4098 +f 3485//3774 3383//3801 3416//3768 +f 3485//3774 3416//3768 3531//3773 +f 3390//3721 3520//3720 3491//3781 +f 3390//3721 3491//3781 3379//3782 +f 3803//3652 3809//4054 3861//4055 +f 3803//3652 3861//4055 3859//3653 +f 3859//3653 3861//4055 3930//4072 +f 3859//3653 3930//4072 3928//3654 +f 3988//4074 4019//4073 4014//3656 +f 3988//4074 4014//3656 3985//3655 +f 4019//4073 4043//4075 4042//3657 +f 4019//4073 4042//3657 4014//3656 +f 4081//3431 4076//3430 4085//3433 +f 4081//3431 4085//3433 4094//3438 +f 4005//3659 4044//3658 4032//4077 +f 4005//3659 4032//4077 3995//4076 +f 3917//3661 3968//3660 3957//4079 +f 3917//3661 3957//4079 3908//4078 +f 3879//3662 3917//3661 3908//4078 +f 3879//3662 3908//4078 3874//4080 +f 3737//4003 3731//4002 3688//4034 +f 3737//4003 3688//4034 3676//3666 +f 3676//3666 3688//4034 3658//4081 +f 3676//3666 3658//4081 3638//3665 +f 3637//3668 3659//4032 3706//4033 +f 3637//3668 3706//4033 3685//3674 +f 4077//3583 4058//3588 4074//3585 +f 4077//3583 4074//3585 4087//3584 +f 4044//3658 4074//3585 4058//3588 +f 4044//3658 4058//3588 4032//4077 +f 3874//4080 3831//4000 3839//3663 +f 3874//4080 3839//3663 3879//3662 +f 3624//3667 3638//3665 3658//4081 +f 3624//3667 3658//4081 3643//4031 +f 3930//4072 3988//4074 3985//3655 +f 3930//4072 3985//3655 3928//3654 +f 4042//3657 4043//4075 4060//3429 +f 4042//3657 4060//3429 4064//3428 +f 3968//3660 4005//3659 3995//4076 +f 3968//3660 3995//4076 3957//4079 +f 3685//3674 3706//4033 3758//4056 +f 3685//3674 3758//4056 3749//3651 +f 4101//3670 4086//4082 4083//3582 +f 4101//3670 4083//3582 4093//3581 +f 4194//4106 4185//4107 4198//2180 +f 4194//4106 4198//2180 4211//2179 +f 4212//3610 4194//4106 4211//2179 +f 4212//3610 4211//2179 4237//4088 +f 4185//4107 4177//3561 4187//4087 +f 4185//4107 4187//4087 4198//2180 +f 3374//3726 3368//3787 3503//3963 +f 3374//3726 3503//3963 3562//3722 +f 3978//4108 4006//3751 4061//3775 +f 3978//4108 4061//3775 3999//4092 +f 3878//3959 3864//4101 3727//4098 +f 3878//3959 3727//4098 3712//3961 +f 4208//2177 4193//4086 4202//3607 +f 4208//2177 4202//3607 4224//3604 +f 3729//2096 3687//2095 3598//4027 +f 3729//2096 3598//4027 3654//3673 +f 3366//4029 3380//4109 3267//3259 +f 3366//4029 3267//3259 3307//3072 +f 3450//3407 3366//4029 3356//4030 +f 3450//3407 3356//4030 3427//3403 +f 3246//3129 3243//2376 3325//3387 +f 3246//3129 3325//3387 3329//3390 +f 3284//4110 3291//3262 3630//3409 +f 3284//4110 3630//3409 3730//3408 +f 4112//4045 4090//4044 4243//4111 +f 4112//4045 4243//4111 4244//4112 +f 4272//3803 4244//4112 4243//4111 +f 4272//3803 4243//4111 4301//3834 +f 4244//4112 4272//3803 4132//3826 +f 4244//4112 4132//3826 4112//4045 +f 4112//4045 4132//3826 3848//3412 +f 4112//4045 3848//3412 3905//3411 +f 3280//2894 3284//4110 3730//3408 +f 3280//2894 3730//3408 3848//3412 +f 3282//4113 3276//2892 3843//3827 +f 3282//4113 3843//3827 3805//3830 +f 4196//3842 4110//3750 3964//3748 +f 4196//3842 3964//3748 4028//4046 +f 4028//4046 4089//3829 4241//3828 +f 4028//4046 4241//3828 4196//3842 +f 4028//4046 3964//3748 3660//3745 +f 4028//4046 3660//3745 3748//4047 +f 3748//4047 3660//3745 3293//2813 +f 3748//4047 3293//2813 3297//3266 +f 3297//3266 3282//4113 3805//3830 +f 3297//3266 3805//3830 3748//4047 +f 3228//3267 3236//3747 3619//3746 +f 3228//3267 3619//3746 3636//4114 +f 3636//4114 3619//3746 3924//3749 +f 3636//4114 3924//3749 3914//4115 +f 3914//4115 3924//3749 4006//3751 +f 3914//4115 4006//3751 3978//4108 +f 3224//2740 3228//3267 3636//4114 +f 3224//2740 3636//4114 3705//3707 +f 3705//3707 3636//4114 3914//4115 +f 3705//3707 3914//4115 3923//3708 +f 3923//3708 3914//4115 3978//4108 +f 3923//3708 3978//4108 3965//3711 +f 3538//2135 3554//2138 3484//3919 +f 3538//2135 3484//3919 3481//3617 +f 3383//3801 3485//3774 3462//3615 +f 3383//3801 3462//3615 3367//3614 +f 3949//3709 3975//4116 3889//4117 +f 3949//3709 3889//4117 3826//3710 +f 3975//4116 4036//4100 3966//4099 +f 3975//4116 3966//4099 3889//4117 +f 3960//4118 3983//4119 3943//4120 +f 3960//4118 3943//4120 3918//4121 +f 3960//4118 4004//4103 4035//4104 +f 3960//4118 4035//4104 3983//4119 +f 3983//4119 4035//4104 4036//4100 +f 3983//4119 4036//4100 3975//4116 +f 3983//4119 3975//4116 3949//3709 +f 3983//4119 3949//3709 3956//3712 +f 3983//4119 3956//3712 3943//4120 +f 3889//4117 3761//3713 3826//3710 +f 3513//3935 3215//4105 3217//2741 +f 3513//3935 3217//2741 3479//3706 +f 3966//4099 3819//3934 3761//3713 +f 3966//4099 3761//3713 3889//4117 +f 3938//4122 3960//4118 3918//4121 +f 3938//4122 3918//4121 3886//4123 +f 3942//4102 3938//4122 3886//4123 +f 3942//4102 3886//4123 3864//4101 +f 3938//4122 3942//4102 4004//4103 +f 3938//4122 4004//4103 3960//4118 +f 3605//4124 3286//3276 3281//3275 +f 3605//4124 3281//3275 3569//3420 +f 3605//4124 3627//4125 3287//3277 +f 3605//4124 3287//3277 3286//3276 +f 3640//4126 3289//3280 3287//3277 +f 3640//4126 3287//3277 3627//4125 +f 3630//3409 3291//3262 3289//3280 +f 3630//3409 3289//3280 3640//4126 +f 3398//3695 3415//3694 3220//2839 +f 3398//3695 3220//2839 3221//2827 +f 3683//3723 3793//4095 3595//3696 +f 3683//3723 3595//3696 3546//3724 +f 3398//3695 3378//3728 3546//3724 +f 3398//3695 3546//3724 3595//3696 +f 3265//2842 3410//3778 3425//3736 +f 3265//2842 3425//3736 3272//2761 +f 3962//3676 3816//3675 3948//3705 +f 3962//3676 3948//3705 4026//4127 +f 3842//4093 3891//3704 3744//3681 +f 3842//4093 3744//3681 3653//3697 +f 3653//3697 3595//3696 3793//4095 +f 3653//3697 3793//4095 3842//4093 +f 4257//3858 4268//3594 4256//4065 +f 4257//3858 4256//4065 4251//4069 +f 4011//4090 4050//3764 3994//3765 +f 4011//4090 3994//3765 3961//4036 +f 3981//2092 3959//3757 4039//3754 +f 3981//2092 4039//3754 4072//2093 +f 4120//4128 4156//3744 4158//3759 +f 4120//4128 4158//3759 4144//3761 +f 4117//3763 4082//4129 4120//4128 +f 4117//3763 4120//4128 4144//3761 +f 4050//3764 4011//4090 4082//4129 +f 4050//3764 4082//4129 4117//3763 +f 4226//3732 4257//3858 4251//4069 +f 4226//3732 4251//4069 4220//4071 +f 3520//3720 3611//3700 3563//3738 +f 3520//3720 3563//3738 3491//3781 +f 3486//3413 3380//4109 3366//4029 +f 3486//3413 3366//4029 3450//3407 +f 3392//2068 3470//3424 3469//3405 +f 3392//2068 3469//3405 3400//3404 +f 3494//3417 3514//3416 3407//3427 +f 3494//3417 3407//3427 3399//4130 +f 3807//4051 3827//4050 3702//3426 +f 3807//4051 3702//3426 3666//3425 +f 3769//4016 3775//4048 3608//3418 +f 3769//4016 3608//3418 3614//3414 +f 3789//4049 3807//4051 3666//3425 +f 3789//4049 3666//3425 3632//3419 +f 3827//4050 3841//3832 3739//3385 +f 3827//4050 3739//3385 3702//3426 +f 3605//4124 3569//3420 3870//3423 +f 3605//4124 3870//3423 3897//4039 +f 4137//3437 4169//3436 4012//4038 +f 4137//3437 4012//4038 3991//4037 +f 4223//4131 4195//4085 4261//4084 +f 4223//4131 4261//4084 4293//4132 +f 4243//4111 4223//4131 4293//4132 +f 4243//4111 4293//4132 4301//3834 +f 4137//3437 3991//4037 3969//2063 +f 4137//3437 3969//2063 4159//2062 +f 3627//4125 3605//4124 3897//4039 +f 3627//4125 3897//4039 3915//4041 +f 4169//3436 4195//4085 4051//4040 +f 4169//3436 4051//4040 4012//4038 +f 3640//4126 3627//4125 3915//4041 +f 3640//4126 3915//4041 3921//4043 +f 4223//4131 4075//4042 4051//4040 +f 4223//4131 4051//4040 4195//4085 +f 3640//4126 3921//4043 3925//3410 +f 3640//4126 3925//3410 3630//3409 +f 4243//4111 4090//4044 4075//4042 +f 4243//4111 4075//4042 4223//4131 +f 4133//3742 4071//4133 4007//4091 +f 4133//3742 4007//4091 4104//3777 +f 4026//4127 3948//3705 4007//4091 +f 4026//4127 4007//4091 4071//4133 +f 4293//4132 4261//4084 4271//3939 +f 4293//4132 4271//3939 4294//3938 +f 4301//3834 4293//4132 4294//3938 +f 4301//3834 4294//3938 4306//3836 +f 4210//4070 4186//3758 4189//3743 +f 4210//4070 4189//3743 4220//4071 +f 4192//4134 4175//3760 4186//3758 +f 4192//4134 4186//3758 4210//4070 +f 4167//4135 4143//3762 4175//3760 +f 4167//4135 4175//3760 4192//4134 +f 4115//3753 4092//3752 4143//3762 +f 4115//3753 4143//3762 4167//4135 +f 3959//3757 3941//3731 4017//3755 +f 3959//3757 4017//3755 4039//3754 +f 3814//3716 3757//3719 3847//3767 +f 3814//3716 3847//3767 3893//4005 +f 4120//4128 4071//4133 4133//3742 +f 4120//4128 4133//3742 4156//3744 +f 4082//4129 4026//4127 4071//4133 +f 4082//4129 4071//4133 4120//4128 +f 4011//4090 3962//3676 4026//4127 +f 4011//4090 4026//4127 4082//4129 +f 3893//4005 3847//3767 3912//3677 +f 3893//4005 3912//3677 3961//4036 +f 3492//3383 3501//3421 3278//3287 +f 3492//3383 3278//3287 3274//3027 +f 3715//2144 3771//2143 3719//2139 +f 3715//2144 3719//2139 3647//2140 +f 4180//3608 4202//3607 4193//4086 +f 4180//3608 4193//4086 4173//4028 +f 4177//3561 4154//3560 4162//4026 +f 4177//3561 4162//4026 4187//4087 +f 4227//4063 4207//4066 4200//4136 +f 4227//4063 4200//4136 4218//4137 +f 3825//4138 3785//4139 3719//2139 +f 3825//4138 3719//2139 3771//2143 +f 3780//3985 3721//4140 3785//4139 +f 3780//3985 3785//4139 3836//4141 +f 3860//4142 3894//4011 3873//4012 +f 3860//4142 3873//4012 3836//4141 +f 4015//4143 4013//4144 4062//4145 +f 4015//4143 4062//4145 4073//4146 +f 4013//4144 4015//4143 3951//4147 +f 4013//4144 3951//4147 3945//4148 +f 3945//4148 3951//4147 3876//4053 +f 3945//4148 3876//4053 3872//4052 +f 3872//4052 3876//4053 3789//4049 +f 3872//4052 3789//4049 3775//4048 +f 3269//3303 3268//3304 3399//4130 +f 3269//3303 3399//4130 3407//3427 +f 4062//4145 4045//4020 4097//4021 +f 4062//4145 4097//4021 4114//4149 +f 4123//4150 4114//4149 4141//3567 +f 4123//4150 4141//3567 4149//3566 +f 4088//2083 4147//4151 4124//4152 +f 4088//2083 4124//4152 4038//2082 +f 4184//4153 4201//4154 4190//4155 +f 4184//4153 4190//4155 4168//4156 +f 4201//4154 4218//4137 4200//4136 +f 4201//4154 4200//4136 4190//4155 +f 4147//4151 4184//4153 4168//4156 +f 4147//4151 4168//4156 4124//4152 +f 4141//3567 4126//3557 4139//3558 +f 4141//3567 4139//3558 4161//3562 +f 4166//2057 4165//2060 4134//2176 +f 4111//3672 4136//4025 4131//3609 +f 4111//3672 4131//3609 4106//3439 +f 4178//3564 4170//3563 4185//4107 +f 4178//3564 4185//4107 4194//4106 +f 4191//2058 4178//3564 4194//4106 +f 4191//2058 4194//4106 4212//3610 +f 4170//3563 4161//3562 4177//3561 +f 4170//3563 4177//3561 4185//4107 +f 4212//3610 4237//4088 4260//3940 +f 4212//3610 4260//3940 4219//2059 +f 4149//3566 4155//3565 4128//2175 +f 4149//3566 4128//2175 4123//4150 +f 3870//3423 3824//3422 3969//2063 +f 3870//3423 3969//2063 3991//4037 +f 3552//3384 3542//2164 3702//3426 +f 3552//3384 3702//3426 3739//3385 +f 3963//2170 3973//3942 3900//3941 +f 3963//2170 3900//3941 3890//2167 +f 4031//2172 4046//3943 3973//3942 +f 4031//2172 3973//3942 3963//2170 +f 4091//2174 4107//3944 4046//3943 +f 4091//2174 4046//3943 4031//2172 +f 4134//2176 4165//2060 4107//3944 +f 4134//2176 4107//3944 4091//2174 +f 4228//2178 4253//4089 4237//4088 +f 4228//2178 4237//4088 4211//2179 +f 4279//3596 4275//3595 4287//3845 +f 4279//3596 4287//3845 4284//2055 +f 4284//2055 4276//2054 4278//3598 +f 4284//2055 4278//3598 4279//3596 +f 4276//2054 4253//4089 4269//3600 +f 4276//2054 4269//3600 4278//3598 +f 4253//4089 4228//2178 4246//3602 +f 4253//4089 4246//3602 4269//3600 +f 4228//2178 4208//2177 4224//3604 +f 4228//2178 4224//3604 4246//3602 +f 4224//3604 4202//3607 4209//3606 +f 4224//3604 4209//3606 4231//3603 +f 4198//2180 4187//4087 4193//4086 +f 4198//2180 4193//4086 4208//2177 +f 4284//2055 4287//3845 4292//3838 +f 4284//2055 4292//3838 4288//2056 +f 3281//3275 3278//3287 3501//3421 +f 3281//3275 3501//3421 3569//3420 +f 4072//2093 4039//3754 4115//3753 +f 4072//2093 4115//3753 4138//4157 +f 4138//4157 4115//3753 4167//4135 +f 4138//4157 4167//4135 4188//4158 +f 4188//4158 4167//4135 4192//4134 +f 4188//4158 4192//4134 4213//4159 +f 4213//4159 4192//4134 4210//4070 +f 4213//4159 4210//4070 4242//4068 +f 4274//3593 4277//3597 4266//4060 +f 4274//3593 4266//4060 4262//4059 +f 4258//4062 4247//4061 4238//4160 +f 4258//4062 4238//4160 4248//4064 +f 4247//4061 4227//4063 4218//4137 +f 4247//4061 4218//4137 4238//4160 +f 4277//3597 4270//3599 4258//4062 +f 4277//3597 4258//4062 4266//4060 +f 4250//4058 4245//4067 4256//4065 +f 4250//4058 4256//4065 4262//4059 +f 4105//3586 4084//2113 4129//2116 +f 4105//3586 4129//2116 4142//3589 +f 4173//4028 4140//2161 4148//2160 +f 4173//4028 4148//2160 4180//3608 +f 4207//4066 4182//4014 4179//4015 +f 4207//4066 4179//4015 4200//4136 +f 3717//3984 3620//4004 3721//4140 +f 3717//3984 3721//4140 3780//3985 +f 3833//4161 3788//4162 3830//3986 +f 3833//4161 3830//3986 3854//4163 +f 3838//2156 3820//2155 3846//4164 +f 3838//2156 3846//4164 3856//4165 +f 3881//4166 3875//4167 3896//4006 +f 3881//4166 3896//4006 3903//4009 +f 3844//4168 3825//4138 3771//2143 +f 3844//4168 3771//2143 3804//2142 +f 3719//2139 3785//4139 3721//4140 +f 3719//2139 3721//4140 3623//2137 +f 3750//2147 3633//2150 3710//3785 +f 3750//2147 3710//3785 3788//4162 +f 3620//4004 3554//2138 3623//2137 +f 3620//4004 3623//2137 3721//4140 +f 3797//2148 3750//2147 3788//4162 +f 3797//2148 3788//4162 3833//4161 +f 3633//2150 3550//2152 3599//3772 +f 3633//2150 3599//3772 3710//3785 +f 3854//4163 3875//4167 3846//4164 +f 3854//4163 3846//4164 3833//4161 +f 3883//4169 3877//4170 3844//4168 +f 3883//4169 3844//4168 3855//4171 +f 3836//4141 3785//4139 3825//4138 +f 3836//4141 3825//4138 3860//4142 +f 3764//3783 3830//3986 3788//4162 +f 3764//3783 3788//4162 3710//3785 +f 3904//4010 3894//4011 3860//4142 +f 3904//4010 3860//4142 3877//4170 +f 3836//4141 3873//4012 3837//3930 +f 3836//4141 3837//3930 3780//3985 +f 3854//4163 3830//3986 3863//3931 +f 3854//4163 3863//3931 3888//4007 +f 3892//2121 3845//2123 3850//2126 +f 3892//2121 3850//2126 3901//2122 +f 3589//2134 3537//2133 3548//2128 +f 3589//2134 3548//2128 3597//2127 +f 3572//2074 3604//2129 3548//2128 +f 3572//2074 3548//2128 3470//3424 +f 3356//4030 3306//3071 3277//3336 +f 3356//4030 3277//3336 3347//3415 +f 3347//3415 3277//3336 3310//2157 +f 3347//3415 3310//2157 3345//2069 +f 3989//2106 3992//2105 3922//2101 +f 3989//2106 3922//2101 3926//2100 +f 4045//4020 4062//4145 4013//4144 +f 4045//4020 4013//4144 3997//4019 +f 4073//4146 4078//2173 4023//2171 +f 4073//4146 4023//2171 4015//4143 +f 3997//4019 4013//4144 3945//4148 +f 3997//4019 3945//4148 3939//4017 +f 4015//4143 4023//2171 3955//2169 +f 4015//4143 3955//2169 3951//4147 +f 3939//4017 3945//4148 3872//4052 +f 3939//4017 3872//4052 3857//4018 +f 3951//4147 3955//2169 3882//2168 +f 3951//4147 3882//2168 3876//4053 +f 3494//3417 3399//4130 3380//4109 +f 3494//3417 3380//4109 3486//3413 +f 3529//2163 3514//3416 3632//3419 +f 3529//2163 3632//3419 3666//3425 +f 3399//4130 3268//3304 3267//3259 +f 3399//4130 3267//3259 3380//4109 +f 3270//2333 3269//3303 3407//3427 +f 3270//2333 3407//3427 3418//2166 +f 4114//4149 4123//4150 4073//4146 +f 4114//4149 4073//4146 4062//4145 +f 4123//4150 4128//2175 4078//2173 +f 4123//4150 4078//2173 4073//4146 +f 4079//2110 4100//2109 4065//2108 +f 4079//2110 4065//2108 4049//2107 +f 4126//3557 4141//3567 4114//4149 +f 4126//3557 4114//4149 4097//4021 +f 4128//2175 4155//3565 4166//2057 +f 4128//2175 4166//2057 4134//2176 +f 4201//4154 4184//4153 4197//4172 +f 4201//4154 4197//4172 4214//4173 +f 4204//4174 4232//4175 4214//4173 +f 4204//4174 4214//4173 4197//4172 +f 4206//4176 4203//4177 4233//4178 +f 4206//4176 4233//4178 4236//4179 +f 4203//4177 4199//4180 4225//4181 +f 4203//4177 4225//4181 4233//4178 +f 4232//4175 4248//4064 4238//4160 +f 4232//4175 4238//4160 4214//4173 +f 4236//4179 4233//4178 4250//4058 +f 4236//4179 4250//4058 4252//4057 +f 4233//4178 4225//4181 4245//4067 +f 4233//4178 4245//4067 4250//4058 +f 4109//2086 4160//4182 4147//4151 +f 4109//2086 4147//4151 4088//2083 +f 4109//2086 4125//4035 4172//4183 +f 4109//2086 4172//4183 4160//4182 +f 4118//2090 4108//2089 4164//4184 +f 4118//2090 4164//4184 4171//4185 +f 4099//2094 4072//2093 4138//4157 +f 4099//2094 4138//4157 4157//4186 +f 3834//4187 3804//2142 3756//2141 +f 3834//4187 3756//2141 3801//3616 +f 3855//4171 3844//4168 3804//2142 +f 3855//4171 3804//2142 3834//4187 +f 3877//4170 3860//4142 3825//4138 +f 3877//4170 3825//4138 3844//4168 +f 3906//4008 3904//4010 3877//4170 +f 3906//4008 3877//4170 3883//4169 +f 4214//4173 4238//4160 4218//4137 +f 4214//4173 4218//4137 4201//4154 +f 4063//4024 3976//2079 4038//2082 +f 4063//4024 4038//2082 4124//4152 +f 4163//4022 4135//4023 4168//4156 +f 4163//4022 4168//4156 4190//4155 +f 4179//4015 4163//4022 4190//4155 +f 4179//4015 4190//4155 4200//4136 +f 4172//4183 4171//4185 4206//4176 +f 4172//4183 4206//4176 4204//4174 +f 4232//4175 4236//4179 4252//4057 +f 4232//4175 4252//4057 4248//4064 +f 3801//3616 3810//2153 3838//2156 +f 3801//3616 3838//2156 3834//4187 +f 3797//2148 3820//2155 3800//2154 +f 3797//2148 3800//2154 3763//2145 +f 3881//4166 3883//4169 3855//4171 +f 3881//4166 3855//4171 3856//4165 +f 3833//4161 3846//4164 3820//2155 +f 3833//4161 3820//2155 3797//2148 +f 3875//4167 3854//4163 3888//4007 +f 3875//4167 3888//4007 3896//4006 +f 3883//4169 3881//4166 3903//4009 +f 3883//4169 3903//4009 3906//4008 +f 3856//4165 3855//4171 3834//4187 +f 3856//4165 3834//4187 3838//2156 +f 3875//4167 3881//4166 3856//4165 +f 3875//4167 3856//4165 3846//4164 +f 4108//2089 4099//2094 4157//4186 +f 4108//2089 4157//4186 4164//4184 +f 4125//4035 4118//2090 4171//4185 +f 4125//4035 4171//4185 4172//4183 +f 4225//4181 4213//4159 4242//4068 +f 4225//4181 4242//4068 4245//4067 +f 4040//2114 3986//2117 4020//2120 +f 4040//2114 4020//2120 4095//2115 +f 4160//4182 4197//4172 4184//4153 +f 4160//4182 4184//4153 4147//4151 +f 4172//4183 4204//4174 4197//4172 +f 4172//4183 4197//4172 4160//4182 +f 4171//4185 4164//4184 4203//4177 +f 4171//4185 4203//4177 4206//4176 +f 4164//4184 4157//4186 4199//4180 +f 4164//4184 4199//4180 4203//4177 +f 4135//4023 4063//4024 4124//4152 +f 4135//4023 4124//4152 4168//4156 +f 4236//4179 4232//4175 4204//4174 +f 4236//4179 4204//4174 4206//4176 +f 4157//4186 4138//4157 4188//4158 +f 4157//4186 4188//4158 4199//4180 +f 4199//4180 4188//4158 4213//4159 +f 4199//4180 4213//4159 4225//4181 +f 3999//4092 3967//3715 3965//3711 +f 3999//4092 3965//3711 3978//4108 +f 3934//3714 3895//4094 3943//4120 +f 3934//3714 3943//4120 3956//3712 +f 3895//4094 3868//4096 3918//4121 +f 3895//4094 3918//4121 3943//4120 +f 3868//4096 3811//4097 3886//4123 +f 3868//4096 3886//4123 3918//4121 +f 3811//4097 3727//4098 3864//4101 +f 3811//4097 3864//4101 3886//4123 +f 3999//4092 4061//3775 4104//3777 +f 3999//4092 4104//3777 4007//4091 +f 4288//2056 4260//3940 4253//4089 +f 4288//2056 4253//4089 4276//2054 +f 3466//3916 3477//3972 3440//3570 +f 3466//3916 3440//3570 3434//3917 +f 5236//4188 5219//4189 5160//4190 +f 5236//4188 5160//4190 5169//4191 +f 5322//4192 5314//4193 5281//4194 +f 5322//4192 5281//4194 5295//4195 +f 5320//4196 5309//4197 5317//4198 +f 5320//4196 5317//4198 5327//4199 +f 5220//4200 5214//4201 5272//4202 +f 5220//4200 5272//4202 5286//4203 +f 5114//4204 5106//4205 5152//4206 +f 5114//4204 5152//4206 5163//4207 +f 5023//4208 5019//4209 5067//4210 +f 5023//4208 5067//4210 5075//4211 +f 4885//4212 4888//4213 4957//4214 +f 4885//4212 4957//4214 4962//4215 +f 5007//4216 5014//4217 4950//4218 +f 5007//4216 4950//4218 4945//4219 +f 5014//4217 5022//4220 4963//4221 +f 5014//4217 4963//4221 4950//4218 +f 4950//4218 4963//4221 4926//4222 +f 4950//4218 4926//4222 4891//4223 +f 4945//4219 4950//4218 4891//4223 +f 4945//4219 4891//4223 4889//4224 +f 4707//4225 4711//4226 4662//4227 +f 4707//4225 4662//4227 4653//4228 +f 4653//4228 4662//4227 4603//4229 +f 4653//4228 4603//4229 4586//4230 +f 4586//4230 4603//4229 4541//4231 +f 4586//4230 4541//4231 4527//4232 +f 4527//4232 4541//4231 4508//4233 +f 4527//4232 4508//4233 4500//4234 +f 4500//4234 4508//4233 4505//4235 +f 4500//4234 4505//4235 4495//4236 +f 4495//4236 4505//4235 4513//4237 +f 4495//4236 4513//4237 4502//4238 +f 4502//4238 4513//4237 4550//4239 +f 4502//4238 4550//4239 4536//4240 +f 4536//4240 4550//4239 4608//4241 +f 4536//4240 4608//4241 4602//4242 +f 4602//4242 4608//4241 4670//4243 +f 4602//4242 4670//4243 4659//4244 +f 4659//4244 4670//4243 4716//4245 +f 4659//4244 4716//4245 4708//4246 +f 4708//4246 4716//4245 4755//4247 +f 4708//4246 4755//4247 4747//4248 +f 4747//4248 4755//4247 4803//4249 +f 4747//4248 4803//4249 4799//4250 +f 4799//4250 4803//4249 4859//4251 +f 4799//4250 4859//4251 4856//4252 +f 4856//4252 4859//4251 4888//4213 +f 4856//4252 4888//4213 4885//4212 +f 5022//4220 5021//4253 4959//4254 +f 5022//4220 4959//4254 4963//4221 +f 4963//4221 4959//4254 4924//4255 +f 4963//4221 4924//4255 4926//4222 +f 5090//4256 5046//4257 5054//4258 +f 5090//4256 5054//4258 5096//4259 +f 5046//4257 5005//4260 5010//4261 +f 5046//4257 5010//4261 5054//4258 +f 5054//4258 5010//4261 5017//4262 +f 5054//4258 5017//4262 5065//4263 +f 5096//4259 5054//4258 5065//4263 +f 5096//4259 5065//4263 5104//4264 +f 5165//4265 5122//4266 5134//4267 +f 5165//4265 5134//4267 5185//4268 +f 5122//4266 5090//4256 5096//4259 +f 5122//4266 5096//4259 5134//4267 +f 5134//4267 5096//4259 5104//4264 +f 5134//4267 5104//4264 5149//4269 +f 5185//4268 5134//4267 5149//4269 +f 5185//4268 5149//4269 5209//4270 +f 5094//4271 5130//4272 5145//4273 +f 5094//4271 5145//4273 5102//4274 +f 5130//4272 5186//4275 5205//4276 +f 5130//4272 5205//4276 5145//4273 +f 5145//4273 5205//4276 5236//4188 +f 5145//4273 5236//4188 5169//4191 +f 5102//4274 5145//4273 5169//4191 +f 5102//4274 5169//4191 5115//4277 +f 5224//4278 5200//4279 5221//4280 +f 5224//4278 5221//4280 5258//4281 +f 5200//4279 5165//4265 5185//4268 +f 5200//4279 5185//4268 5221//4280 +f 5221//4280 5185//4268 5209//4270 +f 5221//4280 5209//4270 5262//4282 +f 5258//4281 5221//4280 5262//4282 +f 5258//4281 5262//4282 5292//4283 +f 5186//4275 5231//4284 5263//4285 +f 5186//4275 5263//4285 5205//4276 +f 5231//4284 5268//4286 5297//4287 +f 5231//4284 5297//4287 5263//4285 +f 5263//4285 5297//4287 5322//4192 +f 5263//4285 5322//4192 5295//4195 +f 5205//4276 5263//4285 5295//4195 +f 5205//4276 5295//4195 5236//4188 +f 5222//4288 5238//4289 5270//4290 +f 5222//4288 5270//4290 5255//4291 +f 5238//4289 5224//4278 5258//4281 +f 5238//4289 5258//4281 5270//4290 +f 5270//4290 5258//4281 5292//4283 +f 5270//4290 5292//4283 5300//4292 +f 5255//4291 5270//4290 5300//4292 +f 5255//4291 5300//4292 5288//4293 +f 5268//4286 5276//4294 5306//4295 +f 5268//4286 5306//4295 5297//4287 +f 5276//4294 5267//4296 5294//4297 +f 5276//4294 5294//4297 5306//4295 +f 5306//4295 5294//4297 5320//4196 +f 5306//4295 5320//4196 5327//4199 +f 5297//4287 5306//4295 5327//4199 +f 5297//4287 5327//4199 5322//4192 +f 5164//4298 5198//4299 5216//4300 +f 5164//4298 5216//4300 5177//4301 +f 5198//4299 5222//4288 5255//4291 +f 5198//4299 5255//4291 5216//4300 +f 5216//4300 5255//4291 5288//4293 +f 5216//4300 5288//4293 5253//4302 +f 5177//4301 5216//4300 5253//4302 +f 5177//4301 5253//4302 5199//4303 +f 5267//4296 5228//4304 5260//4305 +f 5267//4296 5260//4305 5294//4297 +f 5228//4304 5187//4306 5204//4307 +f 5228//4304 5204//4307 5260//4305 +f 5260//4305 5204//4307 5220//4200 +f 5260//4305 5220//4200 5286//4203 +f 5294//4297 5260//4305 5286//4203 +f 5294//4297 5286//4203 5320//4196 +f 5091//4308 5125//4309 5131//4310 +f 5091//4308 5131//4310 5097//4311 +f 5125//4309 5164//4298 5177//4301 +f 5125//4309 5177//4301 5131//4310 +f 5131//4310 5177//4301 5199//4303 +f 5131//4310 5199//4303 5144//4312 +f 5097//4311 5131//4310 5144//4312 +f 5097//4311 5144//4312 5103//4313 +f 5187//4306 5137//4314 5147//4315 +f 5187//4306 5147//4315 5204//4307 +f 5137//4314 5099//4316 5105//4317 +f 5137//4314 5105//4317 5147//4315 +f 5147//4315 5105//4317 5114//4204 +f 5147//4315 5114//4204 5163//4207 +f 5204//4307 5147//4315 5163//4207 +f 5204//4307 5163//4207 5220//4200 +f 5009//4318 5051//4319 5056//4320 +f 5009//4318 5056//4320 5012//4321 +f 5051//4319 5091//4308 5097//4311 +f 5051//4319 5097//4311 5056//4320 +f 5056//4320 5097//4311 5103//4313 +f 5056//4320 5103//4313 5062//4322 +f 5012//4321 5056//4320 5062//4322 +f 5012//4321 5062//4322 5016//4323 +f 5099//4316 5059//4324 5066//4325 +f 5099//4316 5066//4325 5105//4317 +f 5059//4324 5015//4326 5018//4327 +f 5059//4324 5018//4327 5066//4325 +f 5066//4325 5018//4327 5023//4208 +f 5066//4325 5023//4208 5075//4211 +f 5105//4317 5066//4325 5075//4211 +f 5105//4317 5075//4211 5114//4204 +f 4887//3287 4946//4328 4948//4329 +f 4887//3287 4948//4329 4886//4330 +f 4946//4328 5009//4318 5012//4321 +f 4946//4328 5012//4321 4948//4329 +f 4948//4329 5012//4321 5016//4323 +f 4948//4329 5016//4323 4951//4331 +f 4886//4330 4948//4329 4951//4331 +f 4886//4330 4951//4331 4884//4332 +f 5015//4326 4952//4333 4958//4334 +f 5015//4326 4958//4334 5018//4327 +f 4952//4333 4921//4335 4890//4336 +f 4952//4333 4890//4336 4958//4334 +f 4958//4334 4890//4336 4885//4212 +f 4958//4334 4885//4212 4962//4215 +f 5018//4327 4958//4334 4962//4215 +f 5018//4327 4962//4215 5023//4208 +f 4728//4337 4770//4338 4761//4339 +f 4728//4337 4761//4339 4720//4340 +f 4770//4338 4815//4341 4808//4342 +f 4770//4338 4808//4342 4761//4339 +f 4761//4339 4808//4342 4800//4343 +f 4761//4339 4800//4343 4750//4344 +f 4720//4340 4761//4339 4750//4344 +f 4720//4340 4750//4344 4707//4225 +f 4636//4345 4692//4346 4677//4347 +f 4636//4345 4677//4347 4617//4348 +f 4692//4346 4728//4337 4720//4340 +f 4692//4346 4720//4340 4677//4347 +f 4677//4347 4720//4340 4707//4225 +f 4677//4347 4707//4225 4653//4228 +f 4617//4348 4677//4347 4653//4228 +f 4617//4348 4653//4228 4586//4230 +f 4732//4349 4700//4350 4688//4351 +f 4732//4349 4688//4351 4726//4352 +f 4700//4350 4657//4353 4637//4354 +f 4700//4350 4637//4354 4688//4351 +f 4688//4351 4637//4354 4613//4355 +f 4688//4351 4613//4355 4673//4356 +f 4726//4352 4688//4351 4673//4356 +f 4726//4352 4673//4356 4718//4357 +f 4554//4358 4591//4359 4559//4360 +f 4554//4358 4559//4360 4525//4361 +f 4591//4359 4636//4345 4617//4348 +f 4591//4359 4617//4348 4559//4360 +f 4559//4360 4617//4348 4586//4230 +f 4559//4360 4586//4230 4527//4232 +f 4525//4361 4559//4360 4527//4232 +f 4525//4361 4527//4232 4500//4234 +f 4657//4353 4622//4362 4601//4363 +f 4657//4353 4601//4363 4637//4354 +f 4622//4362 4598//4364 4564//4365 +f 4622//4362 4564//4365 4601//4363 +f 4601//4363 4564//4365 4530//4366 +f 4601//4363 4530//4366 4560//4367 +f 4637//4354 4601//4363 4560//4367 +f 4637//4354 4560//4367 4613//4355 +f 4555//4368 4546//4369 4516//4370 +f 4555//4368 4516//4370 4528//4371 +f 4546//4369 4554//4358 4525//4361 +f 4546//4369 4525//4361 4516//4370 +f 4516//4370 4525//4361 4500//4234 +f 4516//4370 4500//4234 4495//4236 +f 4528//4371 4516//4370 4495//4236 +f 4528//4371 4495//4236 4502//4238 +f 4598//4364 4584//4372 4552//4373 +f 4598//4364 4552//4373 4564//4365 +f 4584//4372 4600//4374 4567//4375 +f 4584//4372 4567//4375 4552//4373 +f 4552//4373 4567//4375 4534//4376 +f 4552//4373 4534//4376 4522//4377 +f 4564//4365 4552//4373 4522//4377 +f 4564//4365 4522//4377 4530//4366 +f 4635//4378 4594//4379 4562//4380 +f 4635//4378 4562//4380 4618//4381 +f 4594//4379 4555//4368 4528//4371 +f 4594//4379 4528//4371 4562//4380 +f 4562//4380 4528//4371 4502//4238 +f 4562//4380 4502//4238 4536//4240 +f 4618//4381 4562//4380 4536//4240 +f 4618//4381 4536//4240 4602//4242 +f 4600//4374 4624//4382 4606//4383 +f 4600//4374 4606//4383 4567//4375 +f 4624//4382 4658//4384 4645//4385 +f 4624//4382 4645//4385 4606//4383 +f 4606//4383 4645//4385 4623//4386 +f 4606//4383 4623//4386 4569//4387 +f 4567//4375 4606//4383 4569//4387 +f 4567//4375 4569//4387 4534//4376 +f 4723//4388 4685//4389 4675//4390 +f 4723//4388 4675//4390 4717//4391 +f 4685//4389 4635//4378 4618//4381 +f 4685//4389 4618//4381 4675//4390 +f 4675//4390 4618//4381 4602//4242 +f 4675//4390 4602//4242 4659//4244 +f 4717//4391 4675//4390 4659//4244 +f 4717//4391 4659//4244 4708//4246 +f 4658//4384 4697//4392 4691//4393 +f 4658//4384 4691//4393 4645//4385 +f 4697//4392 4731//4394 4725//4395 +f 4697//4392 4725//4395 4691//4393 +f 4691//4393 4725//4395 4719//4396 +f 4691//4393 4719//4396 4678//4397 +f 4645//4385 4691//4393 4678//4397 +f 4645//4385 4678//4397 4623//4386 +f 4807//4398 4763//4399 4756//4400 +f 4807//4398 4756//4400 4804//4401 +f 4763//4399 4723//4388 4717//4391 +f 4763//4399 4717//4391 4756//4400 +f 4756//4400 4717//4391 4708//4246 +f 4756//4400 4708//4246 4747//4248 +f 4804//4401 4756//4400 4747//4248 +f 4804//4401 4747//4248 4799//4250 +f 4731//4394 4771//4402 4766//4403 +f 4731//4394 4766//4403 4725//4395 +f 4771//4402 4813//4404 4810//4405 +f 4771//4402 4810//4405 4766//4403 +f 4766//4403 4810//4405 4806//4406 +f 4766//4403 4806//4406 4760//4407 +f 4725//4395 4766//4403 4760//4407 +f 4725//4395 4760//4407 4719//4396 +f 4921//4335 4863//4408 4858//4409 +f 4921//4335 4858//4409 4890//4336 +f 4863//4408 4807//4398 4804//4401 +f 4863//4408 4804//4401 4858//4409 +f 4858//4409 4804//4401 4799//4250 +f 4858//4409 4799//4250 4856//4252 +f 4890//4336 4858//4409 4856//4252 +f 4890//4336 4856//4252 4885//4212 +f 4813//4404 4870//4410 4868//4411 +f 4813//4404 4868//4411 4810//4405 +f 4870//4410 4887//3287 4886//4330 +f 4870//4410 4886//4330 4868//4411 +f 4868//4411 4886//4330 4884//4332 +f 4868//4411 4884//4332 4864//4412 +f 4810//4405 4868//4411 4864//4412 +f 4810//4405 4864//4412 4806//4406 +f 5115//4277 5111//4413 5070//4414 +f 5115//4277 5070//4414 5072//4415 +f 4800//4343 4801//4416 4752//4417 +f 4800//4343 4752//4417 4750//4344 +f 4750//4344 4752//4417 4711//4226 +f 4750//4344 4711//4226 4707//4225 +f 4805//4418 4801//4416 4857//4419 +f 4805//4418 4857//4419 4860//4420 +f 4817//4421 4812//4422 4869//4423 +f 4817//4421 4869//4423 4876//4424 +f 4812//4422 4805//4418 4860//4420 +f 4812//4422 4860//4420 4869//4423 +f 4869//4423 4860//4420 4925//4425 +f 4869//4423 4925//4425 4923//4426 +f 4876//4424 4869//4423 4923//4426 +f 4876//4424 4923//4426 4922//4427 +f 5007//4216 5052//4428 5061//4429 +f 5007//4216 5061//4429 5014//4217 +f 5052//4428 5094//4271 5102//4274 +f 5052//4428 5102//4274 5061//4429 +f 5061//4429 5102//4274 5115//4277 +f 5061//4429 5115//4277 5072//4415 +f 5014//4217 5061//4429 5072//4415 +f 5014//4217 5072//4415 5022//4220 +f 4860//4420 4857//4419 4924//4255 +f 4860//4420 4924//4255 4925//4425 +f 4817//4421 4776//4430 4768//4431 +f 4817//4421 4768//4431 4812//4422 +f 4776//4430 4732//4349 4726//4352 +f 4776//4430 4726//4352 4768//4431 +f 4768//4431 4726//4352 4718//4357 +f 4768//4431 4718//4357 4757//4432 +f 4812//4422 4768//4431 4757//4432 +f 4812//4422 4757//4432 4805//4418 +f 5111//4413 5160//4190 5149//4269 +f 5111//4413 5149//4269 5104//4264 +f 5160//4190 5219//4189 5209//4270 +f 5160//4190 5209//4270 5149//4269 +f 5219//4189 5281//4194 5262//4282 +f 5219//4189 5262//4282 5209//4270 +f 5281//4194 5314//4193 5292//4283 +f 5281//4194 5292//4283 5262//4282 +f 5314//4193 5317//4198 5300//4292 +f 5314//4193 5300//4292 5292//4283 +f 5317//4198 5309//4197 5288//4293 +f 5317//4198 5288//4293 5300//4292 +f 5309//4197 5272//4202 5253//4302 +f 5309//4197 5253//4302 5288//4293 +f 5272//4202 5214//4201 5199//4303 +f 5272//4202 5199//4303 5253//4302 +f 5214//4201 5152//4206 5144//4312 +f 5214//4201 5144//4312 5199//4303 +f 5152//4206 5106//4205 5103//4313 +f 5152//4206 5103//4313 5144//4312 +f 5106//4205 5067//4210 5062//4322 +f 5106//4205 5062//4322 5103//4313 +f 5067//4210 5019//4209 5016//4323 +f 5067//4210 5016//4323 5062//4322 +f 5019//4209 4957//4214 4951//4331 +f 5019//4209 4951//4331 5016//4323 +f 4957//4214 4888//4213 4884//4332 +f 4957//4214 4884//4332 4951//4331 +f 4718//4357 4673//4356 4662//4227 +f 4718//4357 4662//4227 4711//4226 +f 4673//4356 4613//4355 4603//4229 +f 4673//4356 4603//4229 4662//4227 +f 4613//4355 4560//4367 4541//4231 +f 4613//4355 4541//4231 4603//4229 +f 4560//4367 4530//4366 4508//4233 +f 4560//4367 4508//4233 4541//4231 +f 4530//4366 4522//4377 4505//4235 +f 4530//4366 4505//4235 4508//4233 +f 4522//4377 4534//4376 4513//4237 +f 4522//4377 4513//4237 4505//4235 +f 4534//4376 4569//4387 4550//4239 +f 4534//4376 4550//4239 4513//4237 +f 4569//4387 4623//4386 4608//4241 +f 4569//4387 4608//4241 4550//4239 +f 4623//4386 4678//4397 4670//4243 +f 4623//4386 4670//4243 4608//4241 +f 4678//4397 4719//4396 4716//4245 +f 4678//4397 4716//4245 4670//4243 +f 4719//4396 4760//4407 4755//4247 +f 4719//4396 4755//4247 4716//4245 +f 4760//4407 4806//4406 4803//4249 +f 4760//4407 4803//4249 4755//4247 +f 4806//4406 4864//4412 4859//4251 +f 4806//4406 4859//4251 4803//4249 +f 4864//4412 4884//4332 4888//4213 +f 4864//4412 4888//4213 4859//4251 +f 5021//4253 5070//4414 5065//4263 +f 5021//4253 5065//4263 5017//4262 +f 5070//4414 5111//4413 5104//4264 +f 5070//4414 5104//4264 5065//4263 +f 4805//4418 4757//4432 4752//4417 +f 4805//4418 4752//4417 4801//4416 +f 4757//4432 4718//4357 4711//4226 +f 4757//4432 4711//4226 4752//4417 +f 4857//4419 4855//4433 4926//4222 +f 4857//4419 4926//4222 4924//4255 +f 5021//4253 5017//4262 4954//4434 +f 5021//4253 4954//4434 4959//4254 +f 4800//4343 4808//4342 4866//4435 +f 4800//4343 4866//4435 4855//4433 +f 4808//4342 4815//4341 4873//4436 +f 4808//4342 4873//4436 4866//4435 +f 4866//4435 4873//4436 4889//4224 +f 4866//4435 4889//4224 4891//4223 +f 4855//4433 4866//4435 4891//4223 +f 4855//4433 4891//4223 4926//4222 +f 5169//4191 5160//4190 5111//4413 +f 5169//4191 5111//4413 5115//4277 +f 5295//4195 5281//4194 5219//4189 +f 5295//4195 5219//4189 5236//4188 +f 5327//4199 5317//4198 5314//4193 +f 5327//4199 5314//4193 5322//4192 +f 5286//4203 5272//4202 5309//4197 +f 5286//4203 5309//4197 5320//4196 +f 5163//4207 5152//4206 5214//4201 +f 5163//4207 5214//4201 5220//4200 +f 5075//4211 5067//4210 5106//4205 +f 5075//4211 5106//4205 5114//4204 +f 4962//4215 4957//4214 5019//4209 +f 4962//4215 5019//4209 5023//4208 +f 5072//4415 5070//4414 5021//4253 +f 5072//4415 5021//4253 5022//4220 +f 4801//4416 4800//4343 4855//4433 +f 4801//4416 4855//4433 4857//4419 +f 4959//4254 4954//4434 4925//4425 +f 4959//4254 4925//4425 4924//4255 +f 5017//4262 5010//4261 4947//4437 +f 5017//4262 4947//4437 4954//4434 +f 5010//4261 5005//4260 4944//4438 +f 5010//4261 4944//4438 4947//4437 +f 4947//4437 4944//4438 4922//4427 +f 4947//4437 4922//4427 4923//4426 +f 4954//4434 4947//4437 4923//4426 +f 4954//4434 4923//4426 4925//4425 +f 4842//4439 4928//4440 4883//4441 +f 4842//4439 4883//4441 4839//4442 +f 4872//4443 4844//4444 4927//4445 +f 4872//4443 4927//4445 4931//4446 +f 4821//4447 4827//4448 4826//4449 +f 4821//4447 4826//4449 4820//4450 +f 4971//4451 4932//4452 4937//4453 +f 4971//4451 4937//4453 4961//4454 +f 4980//4455 4982//4456 4933//4457 +f 4980//4455 4933//4457 4930//4458 +f 4988//4459 4978//4460 4976//4461 +f 4988//4459 4976//4461 4989//4462 +f 5003//4463 5001//4464 5000//4465 +f 5003//4463 5000//4465 5002//4466 +f 4850//4467 4833//4468 4832//4469 +f 4850//4467 4832//4469 4849//4470 +f 4878//4471 4935//4472 4934//4473 +f 4878//4471 4934//4473 4874//4474 +f 4981//4475 4928//4440 4929//4476 +f 4981//4475 4929//4476 4974//4477 +f 5000//4465 4979//4478 4981//4475 +f 5000//4465 4981//4475 5002//4466 +f 4976//4461 4960//4479 4970//4480 +f 4976//4461 4970//4480 4989//4462 +f 5003//4463 5002//4466 4993//4481 +f 5003//4463 4993//4481 4994//4482 +f 4850//4467 4871//4483 4843//4484 +f 4850//4467 4843//4484 4833//4468 +f 4839//4442 4883//4441 4927//4445 +f 4839//4442 4927//4445 4844//4444 +f 4865//4485 4862//4486 4853//4487 +f 4865//4485 4853//4487 4854//4488 +f 4983//4489 4984//4490 4972//4491 +f 4983//4489 4972//4491 4973//4492 +f 4841//4493 4933//4457 4936//4494 +f 4841//4493 4936//4494 4847//4495 +f 4872//4443 4931//4446 4935//4472 +f 4872//4443 4935//4472 4878//4471 +f 4832//4469 4833//4468 4821//4447 +f 4832//4469 4821//4447 4820//4450 +f 4835//4496 4834//4497 4826//4449 +f 4835//4496 4826//4449 4827//4448 +f 4821//4447 4838//4498 4841//4493 +f 4821//4447 4841//4493 4827//4448 +f 4875//4499 4938//4500 4939//4501 +f 4875//4499 4939//4501 4877//4502 +f 4961//4454 4937//4453 4939//4501 +f 4961//4454 4939//4501 4956//4503 +f 4850//4467 4849//4470 4862//4486 +f 4850//4467 4862//4486 4865//4485 +f 4843//4484 4932//4452 4930//4458 +f 4843//4484 4930//4458 4838//4498 +f 5003//4463 4982//4456 4980//4455 +f 5003//4463 4980//4455 5001//4464 +f 4961//4454 4978//4460 4988//4459 +f 4961//4454 4988//4459 4971//4451 +f 4973//4492 4972//4491 4976//4461 +f 4973//4492 4976//4461 4978//4460 +f 4872//4443 4849//4470 4832//4469 +f 4872//4443 4832//4469 4844//4444 +f 4988//4459 4989//4462 5000//4465 +f 4988//4459 5000//4465 5001//4464 +f 4826//4449 4834//4497 4846//4504 +f 4826//4449 4846//4504 4842//4439 +f 4826//4449 4842//4439 4839//4442 +f 4826//4449 4839//4442 4820//4450 +f 5002//4466 4981//4475 4974//4477 +f 5002//4466 4974//4477 4993//4481 +f 4972//4491 4984//4490 4966//4505 +f 4972//4491 4966//4505 4955//4506 +f 4844//4444 4832//4469 4820//4450 +f 4844//4444 4820//4450 4839//4442 +f 4989//4462 4970//4480 4979//4478 +f 4989//4462 4979//4478 5000//4465 +f 5003//4463 4994//4482 4975//4507 +f 5003//4463 4975//4507 4982//4456 +f 4973//4492 4956//4503 4965//4508 +f 4973//4492 4965//4508 4983//4489 +f 4862//4486 4849//4470 4872//4443 +f 4862//4486 4872//4443 4878//4471 +f 4877//4502 4865//4485 4854//4488 +f 4877//4502 4854//4488 4875//4499 +f 4827//4448 4841//4493 4847//4495 +f 4827//4448 4847//4495 4835//4496 +f 4961//4454 4956//4503 4973//4492 +f 4961//4454 4973//4492 4978//4460 +f 4833//4468 4843//4484 4838//4498 +f 4833//4468 4838//4498 4821//4447 +f 4877//4502 4871//4483 4850//4467 +f 4877//4502 4850//4467 4865//4485 +f 5001//4464 4980//4455 4971//4451 +f 5001//4464 4971//4451 4988//4459 +f 4972//4491 4955//4506 4960//4479 +f 4972//4491 4960//4479 4976//4461 +f 4928//4440 4842//4439 4846//4504 +f 4928//4440 4846//4504 4929//4476 +f 4862//4486 4878//4471 4874//4474 +f 4862//4486 4874//4474 4853//4487 +f 4928//4440 4981//4475 4979//4478 +f 4928//4440 4979//4478 4883//4441 +f 4931//4446 4927//4445 4970//4480 +f 4931//4446 4970//4480 4960//4479 +f 4935//4472 4955//4506 4966//4505 +f 4935//4472 4966//4505 4934//4473 +f 4883//4441 4979//4478 4970//4480 +f 4883//4441 4970//4480 4927//4445 +f 4931//4446 4960//4479 4955//4506 +f 4931//4446 4955//4506 4935//4472 +f 4933//4457 4982//4456 4975//4507 +f 4933//4457 4975//4507 4936//4494 +f 4938//4500 4965//4508 4956//4503 +f 4938//4500 4956//4503 4939//4501 +f 4932//4452 4971//4451 4980//4455 +f 4932//4452 4980//4455 4930//4458 +f 4932//4452 4843//4484 4871//4483 +f 4932//4452 4871//4483 4937//4453 +f 4930//4458 4933//4457 4841//4493 +f 4930//4458 4841//4493 4838//4498 +f 4937//4453 4871//4483 4877//4502 +f 4937//4453 4877//4502 4939//4501 +f 4699//4509 4689//4510 4784//4511 +f 4699//4509 4784//4511 4789//4512 +f 4430//4513 4422//4514 4496//4515 +f 4430//4513 4496//4515 4529//4516 +f 4402//4517 4389//4518 4422//4514 +f 4402//4517 4422//4514 4430//4513 +f 4424//4519 4418//4520 4400//4521 +f 4424//4519 4400//4521 4402//4517 +f 4418//4520 4424//4519 4444//4522 +f 4418//4520 4444//4522 4435//4523 +f 4435//4523 4444//4522 4472//4524 +f 4435//4523 4472//4524 4462//4525 +f 4472//4524 4444//4522 4477//4526 +f 4472//4524 4477//4526 4538//4527 +f 4593//4528 4521//4529 4472//4524 +f 4593//4528 4472//4524 4538//4527 +f 4424//4519 4452//4530 4477//4526 +f 4424//4519 4477//4526 4444//4522 +f 4477//4526 4614//4531 4648//4532 +f 4477//4526 4648//4532 4538//4527 +f 4452//4530 4568//4533 4614//4531 +f 4452//4530 4614//4531 4477//4526 +f 4402//4517 4400//4521 4375//4534 +f 4402//4517 4375//4534 4389//4518 +f 4379//4535 4356//4536 4376//4537 +f 4408//4538 4440//4539 4431//4540 +f 4408//4538 4431//4540 4410//4541 +f 4514//4542 4469//4543 4459//4544 +f 4514//4542 4459//4544 4468//4545 +f 4524//4546 4695//4547 4666//4548 +f 4524//4546 4666//4548 4501//4549 +f 4371//4550 4362//4551 4363//4552 +f 4371//4550 4363//4552 4369//4553 +f 4438//4554 4450//4555 4428//4556 +f 4438//4554 4428//4556 4421//4557 +f 4371//4550 4389//4518 4375//4534 +f 4371//4550 4375//4534 4362//4551 +f 4356//4536 4348//4558 4361//4559 +f 4356//4536 4361//4559 4376//4537 +f 4690//4560 4548//4561 4587//4562 +f 4690//4560 4587//4562 4710//4563 +f 4436//4564 4441//4565 4420//4566 +f 4436//4564 4420//4566 4416//4567 +f 4430//4513 4452//4530 4424//4519 +f 4430//4513 4424//4519 4402//4517 +f 4496//4515 4422//4514 4411//4568 +f 4496//4515 4411//4568 4471//4569 +f 4466//4570 4471//4569 4411//4568 +f 4466//4570 4411//4568 4405//4571 +f 4727//4572 4620//4573 4696//4574 +f 4767//4575 4664//4576 4620//4573 +f 4767//4575 4620//4573 4727//4572 +f 4831//4577 4784//4511 4767//4575 +f 4831//4577 4767//4575 4811//4578 +f 4789//4512 4784//4511 4831//4577 +f 4789//4512 4831//4577 4867//4579 +f 4861//4580 4789//4512 4867//4579 +f 4861//4580 4867//4579 4881//4581 +f 4861//4580 4881//4581 4893//4582 +f 4861//4580 4893//4582 4894//4583 +f 4779//4584 4789//4512 4861//4580 +f 4779//4584 4861//4580 4848//4585 +f 4848//4585 4861//4580 4894//4583 +f 4848//4585 4894//4583 4898//4586 +f 4879//4587 4882//4588 4881//4581 +f 4879//4587 4881//4581 4867//4579 +f 4867//4579 4831//4577 4851//4589 +f 4867//4579 4851//4589 4879//4587 +f 4699//4509 4789//4512 4779//4584 +f 4699//4509 4779//4584 4704//4590 +f 4819//4591 4908//4592 4912//4593 +f 4819//4591 4912//4593 4822//4594 +f 4777//4595 4709//4596 4704//4590 +f 4777//4595 4704//4590 4779//4584 +f 4784//4511 4689//4510 4664//4576 +f 4784//4511 4664//4576 4767//4575 +f 4851//4589 4831//4577 4811//4578 +f 4851//4589 4811//4578 4818//4597 +f 4818//4597 4811//4578 4786//4598 +f 4769//4599 4825//4600 4802//4601 +f 4769//4599 4802//4601 4735//4602 +f 4425//4603 4417//4604 4364//4605 +f 4736//4606 4786//4598 4727//4572 +f 4736//4606 4727//4572 4696//4574 +f 4786//4598 4811//4578 4727//4572 +f 4811//4578 4767//4575 4727//4572 +f 4405//4571 4412//4607 4467//4608 +f 4405//4571 4467//4608 4466//4570 +f 4369//4553 4380//4609 4412//4607 +f 4369//4553 4412//4607 4405//4571 +f 4380//4609 4387//4610 4415//4611 +f 4380//4609 4415//4611 4412//4607 +f 4380//4609 4370//4612 4382//4613 +f 4380//4609 4382//4613 4387//4610 +f 4349//4614 4364//4605 4361//4559 +f 4349//4614 4361//4559 4348//4558 +f 4485//4615 4547//4616 4514//4542 +f 4485//4615 4514//4542 4468//4545 +f 4363//4552 4370//4612 4380//4609 +f 4363//4552 4380//4609 4369//4553 +f 4404//4617 4415//4611 4387//4610 +f 4404//4617 4387//4610 4384//4618 +f 4401//4619 4374//4620 4357//4621 +f 4401//4619 4357//4621 4390//4622 +f 4404//4617 4458//4623 4461//4624 +f 4404//4617 4461//4624 4415//4611 +f 4432//4625 4446//4626 4401//4619 +f 4432//4625 4401//4619 4390//4622 +f 4382//4613 4373//4627 4384//4618 +f 4382//4613 4384//4618 4387//4610 +f 4352//4628 4357//4621 4374//4620 +f 4352//4628 4374//4620 4366//4629 +f 4644//4630 4557//4631 4449//4632 +f 4644//4630 4449//4632 4487//4633 +f 4450//4555 4473//4634 4456//4635 +f 4450//4555 4456//4635 4428//4556 +f 4461//4624 4467//4608 4412//4607 +f 4461//4624 4412//4607 4415//4611 +f 4371//4550 4369//4553 4405//4571 +f 4371//4550 4405//4571 4411//4568 +f 4686//4636 4701//4637 4654//4638 +f 4686//4636 4654//4638 4631//4639 +f 4898//4586 4900//4640 4840//4641 +f 4898//4586 4840//4641 4848//4585 +f 4426//4642 4413//4643 4394//4644 +f 4664//4576 4471//4569 4466//4570 +f 4664//4576 4466//4570 4620//4573 +f 4362//4551 4394//4644 4363//4552 +f 4566//4645 4609//4646 4540//4647 +f 4736//4606 4696//4574 4654//4638 +f 4736//4606 4654//4638 4701//4637 +f 4533//4648 4646//4649 4660//4650 +f 4533//4648 4660//4650 4580//4651 +f 4503//4652 4609//4646 4646//4649 +f 4503//4652 4646//4649 4533//4648 +f 4540//4647 4503//4652 4480//4653 +f 4503//4652 4427//4654 4454//4655 +f 4480//4653 4503//4652 4454//4655 +f 4389//4518 4371//4550 4411//4568 +f 4389//4518 4411//4568 4422//4514 +f 4529//4516 4568//4533 4452//4530 +f 4529//4516 4452//4530 4430//4513 +f 4852//4656 4825//4600 4769//4599 +f 4852//4656 4769//4599 4794//4657 +f 4715//4658 4794//4657 4769//4599 +f 4715//4658 4769//4599 4695//4547 +f 4433//4659 4449//4632 4557//4631 +f 4433//4659 4557//4631 4524//4546 +f 4524//4546 4501//4549 4426//4642 +f 4524//4546 4426//4642 4433//4659 +f 4695//4547 4769//4599 4735//4602 +f 4695//4547 4735//4602 4666//4548 +f 4735//4602 4713//4660 4666//4548 +f 4788//4661 4911//4662 4909//4663 +f 4788//4661 4909//4663 4790//4664 +f 4464//4665 4407//4666 4413//4643 +f 4464//4665 4413//4643 4474//4667 +f 4441//4565 4448//4668 4419//4669 +f 4441//4565 4419//4669 4420//4566 +f 4580//4651 4446//4626 4432//4625 +f 4580//4651 4432//4625 4533//4648 +f 4388//4670 4439//4671 4429//4672 +f 4388//4670 4429//4672 4360//4673 +f 4399//4674 4457//4675 4451//4676 +f 4399//4674 4451//4676 4397//4677 +f 4464//4665 4457//4675 4399//4674 +f 4464//4665 4399//4674 4407//4666 +f 4426//4642 4501//4549 4474//4667 +f 4426//4642 4474//4667 4413//4643 +f 4620//4573 4466//4570 4467//4608 +f 4604//4678 4457//4675 4464//4665 +f 4604//4678 4464//4665 4665//4679 +f 4665//4679 4464//4665 4474//4667 +f 4665//4679 4474//4667 4713//4660 +f 4654//4638 4461//4624 4458//4623 +f 4654//4638 4458//4623 4631//4639 +f 4654//4638 4696//4574 4467//4608 +f 4654//4638 4467//4608 4461//4624 +f 4793//4680 4899//4681 4897//4682 +f 4793//4680 4897//4682 4783//4683 +f 4848//4585 4840//4641 4777//4595 +f 4848//4585 4777//4595 4779//4584 +f 4777//4595 4840//4641 4837//4684 +f 4777//4595 4837//4684 4764//4685 +f 4783//4683 4897//4682 4896//4686 +f 4783//4683 4896//4686 4845//4687 +f 4465//4688 4486//4689 4450//4555 +f 4465//4688 4450//4555 4438//4554 +f 4880//4690 4845//4687 4896//4686 +f 4880//4690 4896//4686 4895//4691 +f 4690//4560 4710//4563 4798//4692 +f 4690//4560 4798//4692 4793//4680 +f 4793//4680 4798//4692 4901//4693 +f 4793//4680 4901//4693 4899//4681 +f 4902//4694 4901//4693 4798//4692 +f 4902//4694 4798//4692 4814//4695 +f 4794//4657 4845//4687 4880//4690 +f 4794//4657 4880//4690 4852//4656 +f 4881//4581 4882//4588 4892//4696 +f 4881//4581 4892//4696 4893//4582 +f 4736//4606 4665//4679 4762//4697 +f 4706//4698 4684//4699 4643//4700 +f 4814//4695 4816//4701 4906//4702 +f 4814//4695 4906//4702 4902//4694 +f 4729//4703 4740//4704 4816//4701 +f 4729//4703 4816//4701 4814//4695 +f 4814//4695 4798//4692 4710//4563 +f 4814//4695 4710//4563 4729//4703 +f 4729//4703 4630//4705 4684//4699 +f 4729//4703 4684//4699 4740//4704 +f 4710//4563 4587//4562 4630//4705 +f 4710//4563 4630//4705 4729//4703 +f 4400//4521 4421//4557 4428//4556 +f 4400//4521 4428//4556 4414//4706 +f 4512//4707 4547//4616 4485//4615 +f 4529//4516 4699//4509 4704//4590 +f 4529//4516 4704//4590 4568//4533 +f 4837//4684 4840//4641 4900//4640 +f 4837//4684 4900//4640 4903//4708 +f 4828//4709 4915//4710 4914//4711 +f 4828//4709 4914//4711 4836//4712 +f 4669//4713 4714//4714 4754//4715 +f 4704//4590 4709//4596 4614//4531 +f 4704//4590 4614//4531 4568//4533 +f 4714//4714 4709//4596 4777//4595 +f 4714//4714 4777//4595 4764//4685 +f 4709//4596 4714//4714 4648//4532 +f 4709//4596 4648//4532 4614//4531 +f 4648//4532 4714//4714 4669//4713 +f 4593//4528 4538//4527 4648//4532 +f 4593//4528 4648//4532 4669//4713 +f 4512//4707 4485//4615 4447//4716 +f 4419//4669 4448//4668 4454//4655 +f 4419//4669 4454//4655 4423//4717 +f 4486//4689 4545//4718 4473//4634 +f 4486//4689 4473//4634 4450//4555 +f 4545//4718 4486//4689 4511//4719 +f 4545//4718 4511//4719 4578//4720 +f 4429//4672 4539//4721 4512//4707 +f 4429//4672 4512//4707 4425//4603 +f 4354//4722 4425//4603 4364//4605 +f 4364//4605 4417//4604 4408//4538 +f 4364//4605 4408//4538 4361//4559 +f 4426//4642 4394//4644 4398//4723 +f 4426//4642 4398//4723 4433//4659 +f 4762//4697 4665//4679 4713//4660 +f 4398//4723 4414//4706 4449//4632 +f 4398//4723 4449//4632 4433//4659 +f 4425//4603 4447//4716 4417//4604 +f 4360//4673 4429//4672 4425//4603 +f 4360//4673 4425//4603 4354//4722 +f 4666//4548 4474//4667 4501//4549 +f 4346//4724 4343//4725 4347//4726 +f 4346//4724 4347//4726 4350//4727 +f 4347//4726 4343//4725 4344//4728 +f 4347//4726 4344//4728 4345//4729 +f 4431//4540 4434//4730 4409//4731 +f 4431//4540 4409//4731 4410//4541 +f 4357//4621 4352//4628 4346//4724 +f 4357//4621 4346//4724 4350//4727 +f 4357//4621 4350//4727 4367//4732 +f 4357//4621 4367//4732 4390//4622 +f 4367//4732 4350//4727 4347//4726 +f 4367//4732 4347//4726 4358//4733 +f 4644//4630 4487//4633 4548//4561 +f 4644//4630 4548//4561 4690//4560 +f 4354//4722 4364//4605 4349//4614 +f 4345//4729 4344//4728 4351//4734 +f 4345//4729 4351//4734 4355//4735 +f 4365//4736 4385//4737 4355//4735 +f 4365//4736 4355//4735 4351//4734 +f 4447//4716 4440//4539 4408//4538 +f 4447//4716 4408//4538 4417//4604 +f 4376//4537 4410//4541 4409//4731 +f 4376//4537 4409//4731 4379//4535 +f 4440//4539 4468//4545 4459//4544 +f 4440//4539 4459//4544 4431//4540 +f 4410//4541 4376//4537 4361//4559 +f 4410//4541 4361//4559 4408//4538 +f 4459//4544 4434//4730 4431//4540 +f 4454//4655 4427//4654 4423//4717 +f 4436//4564 4416//4567 4406//4738 +f 4436//4564 4406//4738 4437//4739 +f 4485//4615 4468//4545 4440//4539 +f 4485//4615 4440//4539 4447//4716 +f 4713//4660 4735//4602 4802//4601 +f 4713//4660 4802//4601 4762//4697 +f 4529//4516 4496//4515 4689//4510 +f 4529//4516 4689//4510 4699//4509 +f 4609//4646 4503//4652 4540//4647 +f 4913//4740 4824//4741 4822//4594 +f 4913//4740 4822//4594 4912//4593 +f 4566//4645 4540//4647 4526//4742 +f 4566//4645 4526//4742 4531//4743 +f 4475//4744 4526//4742 4481//4745 +f 4475//4744 4481//4745 4463//4746 +f 4526//4742 4475//4744 4476//4747 +f 4526//4742 4476//4747 4531//4743 +f 4476//4747 4475//4744 4436//4564 +f 4476//4747 4436//4564 4437//4739 +f 4475//4744 4463//4746 4436//4564 +f 4463//4746 4481//4745 4448//4668 +f 4463//4746 4448//4668 4441//4565 +f 4448//4668 4481//4745 4480//4653 +f 4448//4668 4480//4653 4454//4655 +f 4526//4742 4540//4647 4480//4653 +f 4526//4742 4480//4653 4481//4745 +f 4459//4544 4469//4543 4434//4730 +f 4895//4691 4892//4696 4882//4588 +f 4895//4691 4882//4588 4880//4690 +f 4715//4658 4783//4683 4845//4687 +f 4715//4658 4845//4687 4794//4657 +f 4557//4631 4715//4658 4695//4547 +f 4557//4631 4695//4547 4524//4546 +f 4425//4603 4512//4707 4447//4716 +f 4561//4748 4539//4721 4429//4672 +f 4561//4748 4429//4672 4439//4671 +f 4583//4749 4451//4676 4457//4675 +f 4583//4749 4457//4675 4604//4678 +f 4432//4625 4390//4622 4367//4732 +f 4432//4625 4367//4732 4427//4654 +f 4367//4732 4358//4733 4391//4750 +f 4436//4564 4463//4746 4441//4565 +f 4385//4737 4393//4751 4359//4752 +f 4385//4737 4359//4752 4355//4735 +f 4394//4644 4362//4551 4375//4534 +f 4394//4644 4375//4534 4398//4723 +f 4375//4534 4400//4521 4414//4706 +f 4375//4534 4414//4706 4398//4723 +f 4416//4567 4385//4737 4365//4736 +f 4416//4567 4365//4736 4406//4738 +f 4345//4729 4355//4735 4359//4752 +f 4345//4729 4359//4752 4353//4753 +f 4347//4726 4345//4729 4353//4753 +f 4347//4726 4353//4753 4358//4733 +f 4391//4750 4358//4733 4353//4753 +f 4391//4750 4353//4753 4383//4754 +f 4383//4754 4353//4753 4359//4752 +f 4383//4754 4359//4752 4393//4751 +f 4745//4755 4823//4756 4819//4591 +f 4745//4755 4819//4591 4743//4757 +f 4674//4758 4745//4755 4743//4757 +f 4674//4758 4743//4757 4683//4759 +f 4607//4760 4674//4758 4683//4759 +f 4607//4760 4683//4759 4625//4761 +f 4489//4762 4482//4763 4509//4764 +f 4576//4765 4507//4766 4537//4767 +f 4576//4765 4537//4767 4599//4768 +f 4702//4769 4638//4770 4651//4771 +f 4702//4769 4651//4771 4712//4772 +f 4788//4661 4702//4769 4712//4772 +f 4788//4661 4712//4772 4792//4773 +f 4911//4662 4788//4661 4792//4773 +f 4911//4662 4792//4773 4916//4774 +f 4743//4757 4819//4591 4822//4594 +f 4743//4757 4822//4594 4746//4775 +f 4683//4759 4743//4757 4746//4775 +f 4683//4759 4746//4775 4687//4776 +f 4625//4761 4683//4759 4687//4776 +f 4625//4761 4687//4776 4634//4777 +f 4509//4764 4519//4778 4549//4779 +f 4509//4764 4549//4779 4543//4780 +f 4599//4768 4537//4767 4565//4781 +f 4599//4768 4565//4781 4616//4782 +f 4712//4772 4651//4771 4663//4783 +f 4712//4772 4663//4783 4721//4784 +f 4792//4773 4712//4772 4721//4784 +f 4792//4773 4721//4784 4795//4785 +f 4916//4774 4792//4773 4795//4785 +f 4916//4774 4795//4785 4918//4786 +f 4758//4787 4828//4709 4836//4712 +f 4758//4787 4836//4712 4774//4788 +f 4698//4789 4758//4787 4774//4788 +f 4698//4789 4774//4788 4722//4790 +f 4656//4791 4698//4789 4722//4790 +f 4656//4791 4722//4790 4694//4792 +f 4573//4793 4582//4794 4640//4795 +f 4573//4793 4640//4795 4629//4796 +f 4633//4797 4597//4798 4650//4799 +f 4633//4797 4650//4799 4679//4800 +f 4730//4801 4682//4802 4703//4803 +f 4730//4801 4703//4803 4741//4804 +f 4797//4805 4730//4801 4741//4804 +f 4797//4805 4741//4804 4809//4806 +f 4920//4807 4797//4805 4809//4806 +f 4920//4807 4809//4806 4917//4808 +f 4824//4741 4749//4809 4746//4775 +f 4824//4741 4746//4775 4822//4594 +f 4749//4809 4693//4810 4687//4776 +f 4749//4809 4687//4776 4746//4775 +f 4641//4811 4596//4812 4585//4813 +f 4641//4811 4585//4813 4634//4777 +f 4553//4814 4551//4815 4543//4780 +f 4553//4814 4543//4780 4549//4779 +f 4575//4816 4621//4817 4616//4782 +f 4575//4816 4616//4782 4565//4781 +f 4672//4818 4724//4819 4721//4784 +f 4672//4818 4721//4784 4663//4783 +f 4724//4819 4796//4820 4795//4785 +f 4724//4819 4795//4785 4721//4784 +f 4918//4786 4795//4785 4796//4820 +f 4918//4786 4796//4820 4919//4821 +f 4523//4822 4607//4760 4625//4761 +f 4523//4822 4625//4761 4558//4823 +f 4558//4823 4625//4761 4634//4777 +f 4558//4823 4634//4777 4585//4813 +f 4693//4810 4641//4811 4634//4777 +f 4693//4810 4634//4777 4687//4776 +f 4611//4824 4656//4791 4694//4792 +f 4611//4824 4694//4792 4661//4825 +f 4582//4794 4611//4824 4661//4825 +f 4582//4794 4661//4825 4640//4795 +f 4596//4812 4553//4814 4549//4779 +f 4596//4812 4549//4779 4585//4813 +f 4519//4778 4558//4823 4585//4813 +f 4519//4778 4585//4813 4549//4779 +f 4490//4826 4523//4822 4558//4823 +f 4490//4826 4558//4823 4519//4778 +f 4682//4802 4633//4797 4679//4800 +f 4682//4802 4679//4800 4703//4803 +f 4621//4817 4672//4818 4663//4783 +f 4621//4817 4663//4783 4616//4782 +f 4651//4771 4599//4768 4616//4782 +f 4651//4771 4616//4782 4663//4783 +f 4638//4770 4576//4765 4599//4768 +f 4638//4770 4599//4768 4651//4771 +f 4597//4798 4573//4793 4629//4796 +f 4597//4798 4629//4796 4650//4799 +f 4551//4815 4575//4816 4565//4781 +f 4551//4815 4565//4781 4543//4780 +f 4537//4767 4509//4764 4543//4780 +f 4537//4767 4543//4780 4565//4781 +f 4507//4766 4489//4762 4509//4764 +f 4507//4766 4509//4764 4537//4767 +f 4705//4827 4702//4769 4788//4661 +f 4705//4827 4788//4661 4790//4664 +f 4545//4718 4578//4720 4643//4700 +f 4684//4699 4545//4718 4643//4700 +f 4642//4828 4638//4770 4702//4769 +f 4642//4828 4702//4769 4705//4827 +f 4638//4770 4642//4828 4577//4829 +f 4638//4770 4577//4829 4576//4765 +f 4510//4830 4507//4766 4576//4765 +f 4510//4830 4576//4765 4577//4829 +f 4689//4510 4496//4515 4471//4569 +f 4689//4510 4471//4569 4664//4576 +f 4418//4520 4421//4557 4400//4521 +f 4438//4554 4421//4557 4418//4520 +f 4438//4554 4418//4520 4435//4523 +f 4465//4688 4438//4554 4460//4831 +f 4465//4688 4460//4831 4484//4832 +f 4465//4688 4484//4832 4492//4833 +f 4494//4834 4462//4525 4472//4524 +f 4494//4834 4472//4524 4521//4529 +f 4609//4646 4566//4645 4547//4616 +f 4823//4756 4907//4835 4908//4592 +f 4823//4756 4908//4592 4819//4591 +f 4714//4714 4764//4685 4754//4715 +f 4837//4684 4830//4836 4754//4715 +f 4837//4684 4754//4715 4764//4685 +f 4830//4836 4837//4684 4903//4708 +f 4830//4836 4903//4708 4905//4837 +f 4904//4838 4907//4835 4823//4756 +f 4904//4838 4823//4756 4829//4839 +f 4745//4755 4753//4840 4829//4839 +f 4745//4755 4829//4839 4823//4756 +f 4668//4841 4753//4840 4745//4755 +f 4668//4841 4745//4755 4674//4758 +f 4394//4644 4413//4643 4363//4552 +f 4363//4552 4413//4643 4407//4666 +f 4363//4552 4407//4666 4370//4612 +f 4370//4612 4407//4666 4399//4674 +f 4370//4612 4399//4674 4382//4613 +f 4399//4674 4397//4677 4373//4627 +f 4399//4674 4373//4627 4382//4613 +f 4388//4670 4360//4673 4352//4628 +f 4388//4670 4352//4628 4366//4629 +f 4346//4724 4352//4628 4360//4673 +f 4346//4724 4360//4673 4354//4722 +f 4354//4722 4349//4614 4343//4725 +f 4354//4722 4343//4725 4346//4724 +f 4343//4725 4349//4614 4348//4558 +f 4343//4725 4348//4558 4344//4728 +f 4351//4734 4344//4728 4348//4558 +f 4351//4734 4348//4558 4356//4536 +f 4365//4736 4351//4734 4356//4536 +f 4365//4736 4356//4536 4379//4535 +f 4365//4736 4379//4535 4409//4731 +f 4365//4736 4409//4731 4406//4738 +f 4409//4731 4434//4730 4437//4739 +f 4409//4731 4437//4739 4406//4738 +f 4469//4543 4476//4747 4437//4739 +f 4469//4543 4437//4739 4434//4730 +f 4531//4743 4476//4747 4469//4543 +f 4531//4743 4469//4543 4514//4542 +f 4547//4616 4566//4645 4531//4743 +f 4547//4616 4531//4743 4514//4542 +f 4512//4707 4609//4646 4547//4616 +f 4646//4649 4609//4646 4512//4707 +f 4646//4649 4512//4707 4539//4721 +f 4561//4748 4660//4650 4646//4649 +f 4561//4748 4646//4649 4539//4721 +f 4686//4636 4583//4749 4604//4678 +f 4686//4636 4604//4678 4701//4637 +f 4701//4637 4604//4678 4665//4679 +f 4701//4637 4665//4679 4736//4606 +f 4786//4598 4736//4606 4762//4697 +f 4762//4697 4802//4601 4818//4597 +f 4762//4697 4818//4597 4786//4598 +f 4825//4600 4851//4589 4818//4597 +f 4825//4600 4818//4597 4802//4601 +f 4879//4587 4851//4589 4825//4600 +f 4879//4587 4825//4600 4852//4656 +f 4880//4690 4882//4588 4879//4587 +f 4880//4690 4879//4587 4852//4656 +f 4690//4560 4793//4680 4783//4683 +f 4690//4560 4783//4683 4644//4630 +f 4557//4631 4644//4630 4783//4683 +f 4557//4631 4783//4683 4715//4658 +f 4449//4632 4414//4706 4453//4842 +f 4449//4632 4453//4842 4487//4633 +f 4453//4842 4414//4706 4428//4556 +f 4453//4842 4428//4556 4456//4635 +f 4548//4561 4487//4633 4453//4842 +f 4730//4801 4797//4805 4796//4820 +f 4730//4801 4796//4820 4724//4819 +f 4587//4562 4456//4635 4473//4634 +f 4587//4562 4473//4634 4630//4705 +f 4668//4841 4674//4758 4607//4760 +f 4668//4841 4607//4760 4592//4843 +f 4523//4822 4520//4844 4592//4843 +f 4523//4822 4592//4843 4607//4760 +f 4520//4844 4523//4822 4490//4826 +f 4520//4844 4490//4826 4493//4845 +f 4423//4717 4391//4750 4383//4754 +f 4423//4717 4383//4754 4419//4669 +f 4427//4654 4367//4732 4391//4750 +f 4427//4654 4503//4652 4533//4648 +f 4427//4654 4533//4648 4432//4625 +f 4419//4669 4383//4754 4393//4751 +f 4419//4669 4393//4751 4420//4566 +f 4416//4567 4420//4566 4393//4751 +f 4416//4567 4393//4751 4385//4737 +f 4696//4574 4620//4573 4467//4608 +f 4423//4717 4427//4654 4391//4750 +f 4713//4660 4474//4667 4666//4548 +f 4906//4702 4816//4701 4791//4846 +f 4906//4702 4791//4846 4910//4847 +f 4919//4821 4796//4820 4797//4805 +f 4919//4821 4797//4805 4920//4807 +f 4630//4705 4473//4634 4545//4718 +f 4630//4705 4545//4718 4684//4699 +f 4816//4701 4740//4704 4791//4846 +f 4740//4704 4684//4699 4706//4698 +f 4740//4704 4706//4698 4791//4846 +f 4486//4689 4465//4688 4492//4833 +f 4486//4689 4492//4833 4511//4719 +f 4510//4830 4491//4848 4489//4762 +f 4510//4830 4489//4762 4507//4766 +f 4489//4762 4491//4848 4483//4849 +f 4489//4762 4483//4849 4482//4763 +f 4482//4763 4490//4826 4519//4778 +f 4482//4763 4519//4778 4509//4764 +f 4482//4763 4483//4849 4493//4845 +f 4482//4763 4493//4845 4490//4826 +f 4460//4831 4462//4525 4494//4834 +f 4460//4831 4494//4834 4484//4832 +f 4435//4523 4462//4525 4460//4831 +f 4438//4554 4435//4523 4460//4831 +f 4453//4842 4456//4635 4587//4562 +f 4453//4842 4587//4562 4548//4561 +f 4682//4802 4730//4801 4724//4819 +f 4682//4802 4724//4819 4672//4818 +f 4633//4797 4682//4802 4672//4818 +f 4633//4797 4672//4818 4621//4817 +f 4597//4798 4633//4797 4621//4817 +f 4597//4798 4621//4817 4575//4816 +f 4573//4793 4597//4798 4575//4816 +f 4573//4793 4575//4816 4551//4815 +f 4582//4794 4573//4793 4551//4815 +f 4582//4794 4551//4815 4553//4814 +f 4611//4824 4582//4794 4553//4814 +f 4611//4824 4553//4814 4596//4812 +f 4656//4791 4611//4824 4596//4812 +f 4656//4791 4596//4812 4641//4811 +f 4698//4789 4656//4791 4641//4811 +f 4698//4789 4641//4811 4693//4810 +f 4758//4787 4698//4789 4693//4810 +f 4758//4787 4693//4810 4749//4809 +f 4828//4709 4758//4787 4749//4809 +f 4828//4709 4749//4809 4824//4741 +f 4915//4710 4828//4709 4824//4741 +f 4915//4710 4824//4741 4913//4740 +f 4572//4850 4443//4851 4445//4852 +f 4572//4850 4445//4852 4574//4853 +f 4676//4854 4572//4850 4574//4853 +f 4676//4854 4686//4636 4631//4639 +f 4676//4854 4631//4639 4615//4855 +f 4631//4639 4458//4623 4455//4856 +f 4631//4639 4455//4856 4615//4855 +f 4403//4857 4455//4856 4458//4623 +f 4403//4857 4458//4623 4404//4617 +f 4377//4858 4368//4859 4381//4860 +f 4377//4858 4381//4860 4386//4861 +f 4396//4862 4395//4863 4368//4859 +f 4396//4862 4368//4859 4377//4858 +f 4396//4862 4445//4852 4443//4851 +f 4396//4862 4443//4851 4395//4863 +f 4392//4864 4442//4865 4439//4671 +f 4392//4864 4439//4671 4388//4670 +f 4660//4650 4561//4748 4579//4866 +f 4660//4650 4579//4866 4681//4867 +f 4660//4650 4681//4867 4588//4868 +f 4660//4650 4588//4868 4580//4651 +f 4588//4868 4446//4626 4580//4651 +f 4401//4619 4446//4626 4455//4856 +f 4401//4619 4455//4856 4403//4857 +f 4374//4620 4401//4619 4378//4869 +f 4372//4870 4366//4629 4374//4620 +f 4372//4870 4374//4620 4378//4869 +f 4392//4864 4388//4670 4366//4629 +f 4392//4864 4366//4629 4372//4870 +f 4615//4855 4455//4856 4446//4626 +f 4615//4855 4446//4626 4588//4868 +f 4681//4867 4676//4854 4615//4855 +f 4681//4867 4615//4855 4588//4868 +f 4681//4867 4579//4866 4572//4850 +f 4681//4867 4572//4850 4676//4854 +f 4442//4865 4443//4851 4572//4850 +f 4442//4865 4572//4850 4579//4866 +f 4395//4863 4443//4851 4442//4865 +f 4395//4863 4442//4865 4392//4864 +f 4395//4863 4392//4864 4372//4870 +f 4395//4863 4372//4870 4368//4859 +f 4368//4859 4372//4870 4378//4869 +f 4368//4859 4378//4869 4381//4860 +f 4401//4619 4403//4857 4381//4860 +f 4401//4619 4381//4860 4378//4869 +f 4439//4671 4442//4865 4579//4866 +f 4439//4671 4579//4866 4561//4748 +f 4574//4853 4445//4852 4451//4676 +f 4574//4853 4451//4676 4583//4749 +f 4397//4677 4451//4676 4445//4852 +f 4397//4677 4445//4852 4396//4862 +f 4397//4677 4396//4862 4377//4858 +f 4397//4677 4377//4858 4373//4627 +f 4373//4627 4377//4858 4386//4861 +f 4373//4627 4386//4861 4384//4618 +f 4404//4617 4384//4618 4386//4861 +f 4381//4860 4403//4857 4404//4617 +f 4381//4860 4404//4617 4386//4861 +f 4686//4636 4676//4854 4574//4853 +f 4686//4636 4574//4853 4583//4749 +f 5123//4871 5033//4872 5038//4873 +f 5123//4871 5038//4873 5133//4874 +f 5392//4875 5293//4876 5326//4877 +f 5392//4875 5326//4877 5400//4878 +f 5420//4879 5392//4875 5400//4878 +f 5420//4879 5400//4878 5433//4880 +f 5398//4881 5420//4879 5422//4882 +f 5398//4881 5422//4882 5404//4883 +f 5404//4883 5387//4884 5378//4885 +f 5404//4883 5378//4885 5398//4881 +f 5387//4884 5360//4886 5350//4887 +f 5387//4884 5350//4887 5378//4885 +f 5350//4887 5284//4888 5345//4889 +f 5350//4887 5345//4889 5378//4885 +f 5229//4890 5284//4888 5350//4887 +f 5229//4890 5350//4887 5301//4891 +f 5398//4881 5378//4885 5345//4889 +f 5398//4881 5345//4889 5370//4892 +f 5345//4889 5284//4888 5174//4893 +f 5345//4889 5174//4893 5208//4894 +f 5370//4892 5345//4889 5208//4894 +f 5370//4892 5208//4894 5254//4895 +f 5420//4879 5433//4880 5447//4896 +f 5420//4879 5447//4896 5422//4882 +f 5443//4897 5446//4898 5466//4899 +f 5414//4900 5412//4901 5391//4902 +f 5414//4900 5391//4902 5382//4903 +f 5308//4904 5354//4905 5363//4906 +f 5308//4904 5363//4906 5353//4907 +f 5298//4908 5321//4909 5156//4910 +f 5298//4908 5156//4910 5127//4911 +f 5451//4912 5453//4913 5459//4914 +f 5451//4912 5459//4914 5460//4915 +f 5384//4916 5401//4917 5394//4918 +f 5384//4916 5394//4918 5372//4919 +f 5451//4912 5460//4915 5447//4896 +f 5451//4912 5447//4896 5433//4880 +f 5466//4899 5446//4898 5461//4920 +f 5466//4899 5461//4920 5474//4921 +f 5132//4922 5112//4923 5235//4924 +f 5132//4922 5235//4924 5274//4925 +f 5386//4926 5406//4927 5402//4928 +f 5386//4926 5402//4928 5381//4929 +f 5392//4875 5420//4879 5398//4881 +f 5392//4875 5398//4881 5370//4892 +f 5326//4877 5351//4930 5411//4931 +f 5326//4877 5411//4931 5400//4878 +f 5356//4932 5417//4933 5411//4931 +f 5356//4932 5411//4931 5351//4930 +f 5095//4934 5126//4935 5202//4936 +f 5055//4937 5095//4934 5202//4936 +f 5055//4937 5202//4936 5158//4938 +f 4987//4939 5011//4940 5055//4937 +f 4987//4939 5055//4937 5038//4873 +f 5033//4872 4949//4941 4987//4939 +f 5033//4872 4987//4939 5038//4873 +f 4953//4942 4941//4943 4949//4941 +f 4953//4942 4949//4941 5033//4872 +f 4953//4942 4894//4583 4893//4582 +f 4953//4942 4893//4582 4941//4943 +f 5043//4944 4968//4945 4953//4942 +f 5043//4944 4953//4942 5033//4872 +f 4968//4945 4898//4586 4894//4583 +f 4968//4945 4894//4583 4953//4942 +f 4943//4946 4949//4941 4941//4943 +f 4943//4946 4941//4943 4940//4947 +f 4949//4941 4943//4946 4967//4948 +f 4949//4941 4967//4948 4987//4939 +f 5123//4871 5118//4949 5043//4944 +f 5123//4871 5043//4944 5033//4872 +f 4999//4950 4998//4951 4912//4593 +f 4999//4950 4912//4593 4908//4592 +f 5045//4952 5043//4944 5118//4949 +f 5045//4952 5118//4949 5113//4953 +f 5038//4873 5055//4937 5158//4938 +f 5038//4873 5158//4938 5133//4874 +f 4967//4948 5004//4954 5011//4940 +f 4967//4948 5011//4940 4987//4939 +f 5004//4954 5036//4955 5011//4940 +f 5053//4956 5087//4957 5020//4958 +f 5053//4956 5020//4958 4995//4959 +f 5397//4960 5458//4961 5405//4962 +f 5086//4963 5126//4935 5095//4934 +f 5086//4963 5095//4934 5036//4955 +f 5036//4955 5095//4934 5011//4940 +f 5011//4940 5095//4934 5055//4937 +f 5417//4933 5356//4932 5355//4964 +f 5417//4933 5355//4964 5410//4965 +f 5453//4913 5417//4933 5410//4965 +f 5453//4913 5410//4965 5442//4966 +f 5442//4966 5410//4965 5407//4967 +f 5442//4966 5407//4967 5435//4968 +f 5442//4966 5435//4968 5440//4969 +f 5442//4966 5440//4969 5452//4970 +f 5473//4971 5474//4921 5461//4920 +f 5473//4971 5461//4920 5458//4961 +f 5337//4972 5354//4905 5308//4904 +f 5337//4972 5308//4904 5275//4973 +f 5459//4914 5453//4913 5442//4966 +f 5459//4914 5442//4966 5452//4970 +f 5418//4974 5438//4975 5435//4968 +f 5418//4974 5435//4968 5407//4967 +f 5421//4976 5432//4977 5465//4978 +f 5421//4976 5465//4978 5448//4979 +f 5418//4974 5407//4967 5361//4980 +f 5418//4974 5361//4980 5364//4981 +f 5390//4982 5432//4977 5421//4976 +f 5390//4982 5421//4976 5376//4983 +f 5440//4969 5435//4968 5438//4975 +f 5440//4969 5438//4975 5449//4984 +f 5470//4985 5456//4986 5448//4979 +f 5470//4985 5448//4979 5465//4978 +f 5178//4987 5335//4988 5373//4989 +f 5178//4987 5373//4989 5265//4990 +f 5372//4919 5394//4918 5366//4991 +f 5372//4919 5366//4991 5349//4992 +f 5361//4980 5407//4967 5410//4965 +f 5361//4980 5410//4965 5355//4964 +f 5451//4912 5411//4931 5417//4933 +f 5451//4912 5417//4933 5453//4913 +f 5136//4993 5191//4994 5168//4995 +f 5136//4993 5168//4995 5121//4996 +f 4898//4586 4968//4945 4977//4997 +f 4898//4586 4977//4997 4900//4640 +f 5396//4998 5428//4999 5409//5000 +f 5158//4938 5202//4936 5356//4932 +f 5158//4938 5356//4932 5351//4930 +f 5460//4915 5459//4914 5428//4999 +f 5256//5001 5282//5002 5213//5003 +f 5086//4963 5121//4996 5168//4995 +f 5086//4963 5168//4995 5126//4935 +f 5289//5004 5242//5005 5162//5006 +f 5289//5004 5162//5006 5176//5007 +f 5319//5008 5289//5004 5176//5007 +f 5319//5008 5176//5007 5213//5003 +f 5282//5002 5342//5009 5319//5008 +f 5319//5008 5368//5010 5395//5011 +f 5342//5009 5368//5010 5319//5008 +f 5433//4880 5400//4878 5411//4931 +f 5433//4880 5411//4931 5451//4912 +f 5293//4876 5392//4875 5370//4892 +f 5293//4876 5370//4892 5254//4895 +f 4964//5012 5028//5013 5053//4956 +f 4964//5012 5053//4956 4995//4959 +f 5107//5014 5127//4911 5053//4956 +f 5107//5014 5053//4956 5028//5013 +f 5389//5015 5298//4908 5265//4990 +f 5389//5015 5265//4990 5373//4989 +f 5298//4908 5389//5015 5396//4998 +f 5298//4908 5396//4998 5321//4909 +f 5127//4911 5156//4910 5087//4957 +f 5127//4911 5087//4957 5053//4956 +f 5087//4957 5156//4910 5109//5016 +f 5034//5017 5032//5018 4909//4663 +f 5034//5017 4909//4663 4911//4662 +f 5358//5019 5348//5020 5409//5000 +f 5358//5019 5409//5000 5415//5021 +f 5381//4929 5402//4928 5403//5022 +f 5381//4929 5403//5022 5374//5023 +f 5242//5005 5289//5004 5390//4982 +f 5242//5005 5390//4982 5376//4983 +f 5434//5024 5462//5025 5393//5026 +f 5434//5024 5393//5026 5383//5027 +f 5423//5028 5425//5029 5371//5030 +f 5423//5028 5371//5030 5365//5031 +f 5358//5019 5415//5021 5423//5028 +f 5358//5019 5423//5028 5365//5031 +f 5396//4998 5409//5000 5348//5020 +f 5396//4998 5348//5020 5321//4909 +f 5202//4936 5355//4964 5356//4932 +f 5218//5032 5157//5033 5358//5019 +f 5218//5032 5358//5019 5365//5031 +f 5157//5033 5109//5016 5348//5020 +f 5157//5033 5348//5020 5358//5019 +f 5168//4995 5191//4994 5364//4981 +f 5168//4995 5364//4981 5361//4980 +f 5168//4995 5361//4980 5355//4964 +f 5168//4995 5355//4964 5126//4935 +f 5029//5034 5039//5035 4897//4682 +f 5029//5034 4897//4682 4899//4681 +f 4968//4945 5043//4944 5045//4952 +f 4968//4945 5045//4952 4977//4997 +f 5045//4952 5058//5036 4985//5037 +f 5045//4952 4985//5037 4977//4997 +f 5039//5035 4969//5038 4896//4686 +f 5039//5035 4896//4686 4897//4682 +f 5357//5039 5384//4916 5372//4919 +f 5357//5039 5372//4919 5336//5040 +f 4942//5041 4895//4691 4896//4686 +f 4942//5041 4896//4686 4969//5038 +f 5132//4922 5029//5034 5024//5042 +f 5132//4922 5024//5042 5112//4923 +f 5029//5034 4899//4681 4901//4693 +f 5029//5034 4901//4693 5024//5042 +f 4902//4694 5008//5043 5024//5042 +f 4902//4694 5024//5042 4901//4693 +f 5028//5013 4964//5012 4942//5041 +f 5028//5013 4942//5041 4969//5038 +f 4941//4943 4893//4582 4892//4696 +f 4941//4943 4892//4696 4940//4947 +f 5086//4963 5060//5044 5157//5033 +f 5116//5045 5179//5046 5138//5047 +f 5008//5043 4902//4694 4906//4702 +f 5008//5043 4906//4702 5006//5048 +f 5093//5049 5008//5043 5006//5048 +f 5093//5049 5006//5048 5082//5050 +f 5008//5043 5093//5049 5112//4923 +f 5008//5043 5112//4923 5024//5042 +f 5093//5049 5082//5050 5138//5047 +f 5093//5049 5138//5047 5192//5051 +f 5112//4923 5093//5049 5192//5051 +f 5112//4923 5192//5051 5235//4924 +f 5422//4882 5408//5052 5394//4918 +f 5422//4882 5394//4918 5401//4917 +f 5310//5053 5337//4972 5275//4973 +f 5293//4876 5254//4895 5118//4949 +f 5293//4876 5118//4949 5123//4871 +f 4985//5037 4903//4708 4900//4640 +f 4985//5037 4900//4640 4977//4997 +f 4992//5054 4986//5055 4914//4711 +f 4992//5054 4914//4711 4915//4710 +f 5153//5056 5068//5057 5108//5058 +f 5118//4949 5254//4895 5208//4894 +f 5118//4949 5208//4894 5113//4953 +f 5108//5058 5058//5036 5045//4952 +f 5108//5058 5045//4952 5113//4953 +f 5113//4953 5208//4894 5174//4893 +f 5113//4953 5174//4893 5108//5058 +f 5174//4893 5153//5056 5108//5058 +f 5229//4890 5153//5056 5174//4893 +f 5229//4890 5174//4893 5284//4888 +f 5310//5053 5375//5059 5337//4972 +f 5403//5022 5399//5060 5368//5010 +f 5403//5022 5368//5010 5374//5023 +f 5336//5040 5372//4919 5349//4992 +f 5336//5040 5349//4992 5277//5061 +f 5277//5061 5244//5062 5311//5063 +f 5277//5061 5311//5063 5336//5040 +f 5393//5026 5397//4960 5310//5053 +f 5393//5026 5310//5053 5283//5064 +f 5468//5065 5458//4961 5397//4960 +f 5458//4961 5461//4920 5414//4900 +f 5458//4961 5414//4900 5405//4962 +f 5396//4998 5389//5015 5424//5066 +f 5396//4998 5424//5066 5428//4999 +f 5060//5044 5109//5016 5157//5033 +f 5424//5066 5389//5015 5373//4989 +f 5424//5066 5373//4989 5408//5052 +f 5397//4960 5405//4962 5375//5059 +f 5462//5025 5468//5065 5397//4960 +f 5462//5025 5397//4960 5393//5026 +f 5156//4910 5321//4909 5348//5020 +f 5476//5067 5472//5068 5475//5069 +f 5476//5067 5475//5069 5479//5070 +f 5475//5069 5477//5071 5478//5072 +f 5475//5069 5478//5072 5479//5070 +f 5391//4902 5412//4901 5413//5073 +f 5391//4902 5413//5073 5388//5074 +f 5465//4978 5472//5068 5476//5067 +f 5465//4978 5476//5067 5470//4985 +f 5465//4978 5432//4977 5455//5075 +f 5465//4978 5455//5075 5472//5068 +f 5455//5075 5464//5076 5475//5069 +f 5455//5075 5475//5069 5472//5068 +f 5178//4987 5132//4922 5274//4925 +f 5178//4987 5274//4925 5335//4988 +f 5468//5065 5473//4971 5458//4961 +f 5477//5071 5467//5077 5471//5078 +f 5477//5071 5471//5078 5478//5072 +f 5457//5079 5471//5078 5467//5077 +f 5457//5079 5467//5077 5437//5080 +f 5375//5059 5405//4962 5414//4900 +f 5375//5059 5414//4900 5382//4903 +f 5446//4898 5443//4897 5413//5073 +f 5446//4898 5413//5073 5412//4901 +f 5382//4903 5391//4902 5363//4906 +f 5382//4903 5363//4906 5354//4905 +f 5412//4901 5414//4900 5461//4920 +f 5412//4901 5461//4920 5446//4898 +f 5363//4906 5391//4902 5388//5074 +f 5368//5010 5399//5060 5395//5011 +f 5386//4926 5385//5081 5416//5082 +f 5386//4926 5416//5082 5406//4927 +f 5337//4972 5375//5059 5382//4903 +f 5337//4972 5382//4903 5354//4905 +f 5109//5016 5060//5044 5020//4958 +f 5109//5016 5020//4958 5087//4957 +f 5293//4876 5123//4871 5133//4874 +f 5293//4876 5133//4874 5326//4877 +f 5213//5003 5282//5002 5319//5008 +f 4913//4740 4912//4593 4998//4951 +f 4913//4740 4998//4951 4996//5083 +f 5256//5001 5291//5084 5296//5085 +f 5256//5001 5296//5085 5282//5002 +f 5347//5086 5359//5087 5341//5088 +f 5347//5086 5341//5088 5296//5085 +f 5296//5085 5291//5084 5346//5089 +f 5296//5085 5346//5089 5347//5086 +f 5346//5089 5385//5081 5386//4926 +f 5346//5089 5386//4926 5347//5086 +f 5347//5086 5386//4926 5359//5087 +f 5359//5087 5381//4929 5374//5023 +f 5359//5087 5374//5023 5341//5088 +f 5374//5023 5368//5010 5342//5009 +f 5374//5023 5342//5009 5341//5088 +f 5296//5085 5341//5088 5342//5009 +f 5296//5085 5342//5009 5282//5002 +f 5363//4906 5388//5074 5353//4907 +f 4895//4691 4942//5041 4940//4947 +f 4895//4691 4940//4947 4892//4696 +f 5107//5014 5028//5013 4969//5038 +f 5107//5014 4969//5038 5039//5035 +f 5265//4990 5298//4908 5127//4911 +f 5265//4990 5127//4911 5107//5014 +f 5397//4960 5375//5059 5310//5053 +f 5261//5090 5383//5027 5393//5026 +f 5261//5090 5393//5026 5283//5064 +f 5239//5091 5218//5032 5365//5031 +f 5239//5091 5365//5031 5371//5030 +f 5390//4982 5395//5011 5455//5075 +f 5390//4982 5455//5075 5432//4977 +f 5455//5075 5431//5092 5464//5076 +f 5386//4926 5381//4929 5359//5087 +f 5437//5080 5463//5093 5429//5094 +f 5437//5080 5467//5077 5463//5093 +f 5428//4999 5424//5066 5447//4896 +f 5428//4999 5447//4896 5460//4915 +f 5447//4896 5424//5066 5408//5052 +f 5447//4896 5408//5052 5422//4882 +f 5406//4927 5416//5082 5457//5079 +f 5406//4927 5457//5079 5437//5080 +f 5477//5071 5469//5095 5463//5093 +f 5477//5071 5463//5093 5467//5077 +f 5475//5069 5464//5076 5469//5095 +f 5475//5069 5469//5095 5477//5071 +f 5431//5092 5439//5096 5469//5095 +f 5431//5092 5469//5095 5464//5076 +f 5439//5096 5429//5094 5463//5093 +f 5439//5096 5463//5093 5469//5095 +f 5077//5097 5079//5098 4999//4950 +f 5077//5097 4999//4950 4997//5099 +f 5148//5100 5139//5101 5079//5098 +f 5148//5100 5079//5098 5077//5097 +f 5215//5102 5197//5103 5139//5101 +f 5215//5102 5139//5101 5148//5100 +f 5333//5104 5313//5105 5340//5106 +f 5246//5107 5223//5108 5285//5109 +f 5246//5107 5285//5109 5315//5110 +f 5120//5111 5110//5112 5171//5113 +f 5120//5111 5171//5113 5184//5114 +f 5034//5017 5030//5115 5110//5112 +f 5034//5017 5110//5112 5120//5111 +f 4911//4662 4916//4774 5030//5115 +f 4911//4662 5030//5115 5034//5017 +f 5079//5098 5076//5116 4998//4951 +f 5079//5098 4998//4951 4999//4950 +f 5139//5101 5135//5117 5076//5116 +f 5139//5101 5076//5116 5079//5098 +f 5197//5103 5188//5118 5135//5117 +f 5197//5103 5135//5117 5139//5101 +f 5313//5105 5279//5119 5273//5120 +f 5313//5105 5273//5120 5303//5121 +f 5223//5108 5206//5122 5257//5123 +f 5223//5108 5257//5123 5285//5109 +f 5110//5112 5101//5124 5159//5125 +f 5110//5112 5159//5125 5171//5113 +f 5030//5115 5027//5126 5101//5124 +f 5030//5115 5101//5124 5110//5112 +f 4916//4774 4918//4786 5027//5126 +f 4916//4774 5027//5126 5030//5115 +f 5064//5127 5048//5128 4986//5055 +f 5064//5127 4986//5055 4992//5054 +f 5124//5129 5100//5130 5048//5128 +f 5124//5129 5048//5128 5064//5127 +f 5166//5131 5128//5132 5100//5130 +f 5166//5131 5100//5130 5124//5129 +f 5249//5133 5193//5134 5182//5135 +f 5249//5133 5182//5135 5240//5136 +f 5189//5137 5143//5138 5172//5139 +f 5189//5137 5172//5139 5225//5140 +f 5092//5141 5081//5142 5119//5143 +f 5092//5141 5119//5143 5140//5144 +f 5025//5145 5013//5146 5081//5142 +f 5025//5145 5081//5142 5092//5141 +f 4920//4807 4917//4808 5013//5146 +f 4920//4807 5013//5146 5025//5145 +f 4996//5083 4998//4951 5076//5116 +f 4996//5083 5076//5116 5073//5147 +f 5073//5147 5076//5116 5135//5117 +f 5073//5147 5135//5117 5129//5148 +f 5181//5149 5188//5118 5237//5150 +f 5181//5149 5237//5150 5226//5151 +f 5269//5152 5273//5120 5279//5119 +f 5269//5152 5279//5119 5271//5153 +f 5247//5154 5257//5123 5206//5122 +f 5247//5154 5206//5122 5201//5155 +f 5150//5156 5159//5125 5101//5124 +f 5150//5156 5101//5124 5098//5157 +f 5098//5157 5101//5124 5027//5126 +f 5098//5157 5027//5126 5026//5158 +f 4918//4786 4919//4821 5026//5158 +f 4918//4786 5026//5158 5027//5126 +f 5299//5159 5264//5160 5197//5103 +f 5299//5159 5197//5103 5215//5102 +f 5264//5160 5237//5150 5188//5118 +f 5264//5160 5188//5118 5197//5103 +f 5129//5148 5135//5117 5188//5118 +f 5129//5148 5188//5118 5181//5149 +f 5211//5161 5161//5162 5128//5132 +f 5211//5161 5128//5132 5166//5131 +f 5240//5136 5182//5135 5161//5162 +f 5240//5136 5161//5162 5211//5161 +f 5226//5151 5237//5150 5273//5120 +f 5226//5151 5273//5120 5269//5152 +f 5303//5121 5273//5120 5237//5150 +f 5303//5121 5237//5150 5264//5160 +f 5332//5163 5303//5121 5264//5160 +f 5332//5163 5264//5160 5299//5159 +f 5140//5144 5119//5143 5143//5138 +f 5140//5144 5143//5138 5189//5137 +f 5201//5155 5206//5122 5159//5125 +f 5201//5155 5159//5125 5150//5156 +f 5171//5113 5159//5125 5206//5122 +f 5171//5113 5206//5122 5223//5108 +f 5184//5114 5171//5113 5223//5108 +f 5184//5114 5223//5108 5246//5107 +f 5225//5140 5172//5139 5193//5134 +f 5225//5140 5193//5134 5249//5133 +f 5271//5153 5279//5119 5257//5123 +f 5271//5153 5257//5123 5247//5154 +f 5285//5109 5257//5123 5279//5119 +f 5285//5109 5279//5119 5313//5105 +f 5315//5110 5285//5109 5313//5105 +f 5315//5110 5313//5105 5333//5104 +f 5117//5164 5032//5018 5034//5017 +f 5117//5164 5034//5017 5120//5111 +f 5277//5061 5179//5046 5244//5062 +f 5138//5047 5179//5046 5277//5061 +f 5180//5165 5117//5164 5120//5111 +f 5180//5165 5120//5111 5184//5114 +f 5184//5114 5246//5107 5245//5166 +f 5184//5114 5245//5166 5180//5165 +f 5312//5167 5245//5166 5246//5107 +f 5312//5167 5246//5107 5315//5110 +f 5133//4874 5158//4938 5351//4930 +f 5133//4874 5351//4930 5326//4877 +f 5404//4883 5422//4882 5401//4917 +f 5384//4916 5387//4884 5404//4883 +f 5384//4916 5404//4883 5401//4917 +f 5357//5039 5362//5168 5384//4916 +f 5357//5039 5330//5169 5338//5170 +f 5357//5039 5338//5170 5362//5168 +f 5328//5171 5301//4891 5350//4887 +f 5328//5171 5350//4887 5360//4886 +f 5213//5003 5275//4973 5256//5001 +f 4997//5099 4999//4950 4908//4592 +f 4997//5099 4908//4592 4907//4835 +f 5108//5058 5068//5057 5058//5036 +f 4985//5037 5058//5036 5068//5057 +f 4985//5037 5068//5057 4990//5172 +f 4990//5172 4905//4837 4903//4708 +f 4990//5172 4903//4708 4985//5037 +f 4904//4838 4991//5173 4997//5099 +f 4904//4838 4997//5099 4907//4835 +f 5077//5097 4997//5099 4991//5173 +f 5077//5097 4991//5173 5069//5174 +f 5154//5175 5148//5100 5077//5097 +f 5154//5175 5077//5097 5069//5174 +f 5428//4999 5459//4914 5409//5000 +f 5459//4914 5452//4970 5415//5021 +f 5459//4914 5415//5021 5409//5000 +f 5452//4970 5440//4969 5423//5028 +f 5452//4970 5423//5028 5415//5021 +f 5423//5028 5440//4969 5449//4984 +f 5423//5028 5449//4984 5425//5029 +f 5434//5024 5456//4986 5470//4985 +f 5434//5024 5470//4985 5462//5025 +f 5476//5067 5468//5065 5462//5025 +f 5476//5067 5462//5025 5470//4985 +f 5468//5065 5476//5067 5479//5070 +f 5468//5065 5479//5070 5473//4971 +f 5479//5070 5478//5072 5474//4921 +f 5479//5070 5474//4921 5473//4971 +f 5471//5078 5466//4899 5474//4921 +f 5471//5078 5474//4921 5478//5072 +f 5457//5079 5443//4897 5466//4899 +f 5457//5079 5466//4899 5471//5078 +f 5457//5079 5416//5082 5413//5073 +f 5457//5079 5413//5073 5443//4897 +f 5413//5073 5416//5082 5385//5081 +f 5413//5073 5385//5081 5388//5074 +f 5353//4907 5388//5074 5385//5081 +f 5353//4907 5385//5081 5346//5089 +f 5291//5084 5308//4904 5353//4907 +f 5291//5084 5353//4907 5346//5089 +f 5275//4973 5308//4904 5291//5084 +f 5275//4973 5291//5084 5256//5001 +f 5310//5053 5275//4973 5213//5003 +f 5176//5007 5283//5064 5310//5053 +f 5176//5007 5310//5053 5213//5003 +f 5261//5090 5283//5064 5176//5007 +f 5261//5090 5176//5007 5162//5006 +f 5136//4993 5121//4996 5218//5032 +f 5136//4993 5218//5032 5239//5091 +f 5121//4996 5086//4963 5157//5033 +f 5121//4996 5157//5033 5218//5032 +f 5036//4955 5060//5044 5086//4963 +f 5060//5044 5036//4955 5004//4954 +f 5060//5044 5004//4954 5020//4958 +f 4995//4959 5020//4958 5004//4954 +f 4995//4959 5004//4954 4967//4948 +f 4943//4946 4964//5012 4995//4959 +f 4943//4946 4995//4959 4967//4948 +f 4942//5041 4964//5012 4943//4946 +f 4942//5041 4943//4946 4940//4947 +f 5132//4922 5178//4987 5039//5035 +f 5132//4922 5039//5035 5029//5034 +f 5265//4990 5107//5014 5039//5035 +f 5265//4990 5039//5035 5178//4987 +f 5373//4989 5335//4988 5369//5176 +f 5373//4989 5369//5176 5408//5052 +f 5369//5176 5366//4991 5394//4918 +f 5369//5176 5394//4918 5408//5052 +f 5274//4925 5369//5176 5335//4988 +f 5092//5141 5098//5157 5026//5158 +f 5092//5141 5026//5158 5025//5145 +f 5235//4924 5192//5051 5349//4992 +f 5235//4924 5349//4992 5366//4991 +f 5154//5175 5230//5177 5215//5102 +f 5154//5175 5215//5102 5148//5100 +f 5299//5159 5215//5102 5230//5177 +f 5299//5159 5230//5177 5302//5178 +f 5302//5178 5329//5179 5332//5163 +f 5302//5178 5332//5163 5299//5159 +f 5399//5060 5403//5022 5439//5096 +f 5399//5060 5439//5096 5431//5092 +f 5395//5011 5431//5092 5455//5075 +f 5395//5011 5390//4982 5289//5004 +f 5395//5011 5289//5004 5319//5008 +f 5403//5022 5402//4928 5429//5094 +f 5403//5022 5429//5094 5439//5096 +f 5406//4927 5437//5080 5429//5094 +f 5406//4927 5429//5094 5402//4928 +f 5126//4935 5355//4964 5202//4936 +f 5399//5060 5431//5092 5395//5011 +f 5109//5016 5156//4910 5348//5020 +f 4906//4702 4910//4847 5031//5180 +f 4906//4702 5031//5180 5006//5048 +f 4919//4821 4920//4807 5025//5145 +f 4919//4821 5025//5145 5026//5158 +f 5192//5051 5138//5047 5277//5061 +f 5192//5051 5277//5061 5349//4992 +f 5006//5048 5031//5180 5082//5050 +f 5082//5050 5031//5180 5116//5045 +f 5082//5050 5116//5045 5138//5047 +f 5336//5040 5311//5063 5330//5169 +f 5336//5040 5330//5169 5357//5039 +f 5312//5167 5315//5110 5333//5104 +f 5312//5167 5333//5104 5331//5181 +f 5333//5104 5340//5106 5339//5182 +f 5333//5104 5339//5182 5331//5181 +f 5340//5106 5313//5105 5303//5121 +f 5340//5106 5303//5121 5332//5163 +f 5340//5106 5332//5163 5329//5179 +f 5340//5106 5329//5179 5339//5182 +f 5362//5168 5338//5170 5328//5171 +f 5362//5168 5328//5171 5360//4886 +f 5387//4884 5362//5168 5360//4886 +f 5384//4916 5362//5168 5387//4884 +f 5369//5176 5274//4925 5235//4924 +f 5369//5176 5235//4924 5366//4991 +f 5140//5144 5150//5156 5098//5157 +f 5140//5144 5098//5157 5092//5141 +f 5189//5137 5201//5155 5150//5156 +f 5189//5137 5150//5156 5140//5144 +f 5225//5140 5247//5154 5201//5155 +f 5225//5140 5201//5155 5189//5137 +f 5249//5133 5271//5153 5247//5154 +f 5249//5133 5247//5154 5225//5140 +f 5240//5136 5269//5152 5271//5153 +f 5240//5136 5271//5153 5249//5133 +f 5211//5161 5226//5151 5269//5152 +f 5211//5161 5269//5152 5240//5136 +f 5166//5131 5181//5149 5226//5151 +f 5166//5131 5226//5151 5211//5161 +f 5124//5129 5129//5148 5181//5149 +f 5124//5129 5181//5149 5166//5131 +f 5064//5127 5073//5147 5129//5148 +f 5064//5127 5129//5148 5124//5129 +f 4992//5054 4996//5083 5073//5147 +f 4992//5054 5073//5147 5064//5127 +f 4915//4710 4913//4740 4996//5083 +f 4915//4710 4996//5083 4992//5054 +f 5250//5183 5248//5184 5377//5185 +f 5250//5183 5377//5185 5379//5186 +f 5146//5187 5248//5184 5250//5183 +f 5146//5187 5207//5188 5191//4994 +f 5146//5187 5191//4994 5136//4993 +f 5191//4994 5207//5188 5367//5189 +f 5191//4994 5367//5189 5364//4981 +f 5419//5190 5418//4974 5364//4981 +f 5419//5190 5364//4981 5367//5189 +f 5445//5191 5436//5192 5441//5193 +f 5445//5191 5441//5193 5454//5194 +f 5426//5195 5445//5191 5454//5194 +f 5426//5195 5454//5194 5427//5196 +f 5426//5195 5427//5196 5379//5186 +f 5426//5195 5379//5186 5377//5185 +f 5430//5197 5434//5024 5383//5027 +f 5430//5197 5383//5027 5380//5198 +f 5162//5006 5141//5199 5243//5200 +f 5162//5006 5243//5200 5261//5090 +f 5162//5006 5242//5005 5234//5201 +f 5162//5006 5234//5201 5141//5199 +f 5234//5201 5242//5005 5376//4983 +f 5421//4976 5419//5190 5367//5189 +f 5421//4976 5367//5189 5376//4983 +f 5448//4979 5444//5202 5421//4976 +f 5450//5203 5444//5202 5448//4979 +f 5450//5203 5448//4979 5456//4986 +f 5430//5197 5450//5203 5456//4986 +f 5430//5197 5456//4986 5434//5024 +f 5207//5188 5234//5201 5376//4983 +f 5207//5188 5376//4983 5367//5189 +f 5141//5199 5234//5201 5207//5188 +f 5141//5199 5207//5188 5146//5187 +f 5141//5199 5146//5187 5250//5183 +f 5141//5199 5250//5183 5243//5200 +f 5380//5198 5243//5200 5250//5183 +f 5380//5198 5250//5183 5379//5186 +f 5427//5196 5430//5197 5380//5198 +f 5427//5196 5380//5198 5379//5186 +f 5427//5196 5454//5194 5450//5203 +f 5427//5196 5450//5203 5430//5197 +f 5454//5194 5441//5193 5444//5202 +f 5454//5194 5444//5202 5450//5203 +f 5421//4976 5444//5202 5441//5193 +f 5421//4976 5441//5193 5419//5190 +f 5383//5027 5261//5090 5243//5200 +f 5383//5027 5243//5200 5380//5198 +f 5248//5184 5239//5091 5371//5030 +f 5248//5184 5371//5030 5377//5185 +f 5425//5029 5426//5195 5377//5185 +f 5425//5029 5377//5185 5371//5030 +f 5425//5029 5449//4984 5445//5191 +f 5425//5029 5445//5191 5426//5195 +f 5449//4984 5438//4975 5436//5192 +f 5449//4984 5436//5192 5445//5191 +f 5418//4974 5436//5192 5438//4975 +f 5441//5193 5436//5192 5418//4974 +f 5441//5193 5418//4974 5419//5190 +f 5136//4993 5239//5091 5248//5184 +f 5136//4993 5248//5184 5146//5187 +f 4742//5204 4751//5205 4782//5206 +f 4742//5204 4782//5206 4780//5207 +f 4742//5204 4738//5208 4744//5209 +f 4742//5204 4744//5209 4748//5210 +f 4734//5211 4739//5212 4744//5209 +f 4734//5211 4744//5209 4738//5208 +f 4737//5213 4739//5212 4734//5211 +f 4737//5213 4734//5211 4733//5214 +f 4737//5213 4773//5215 4778//5216 +f 4737//5213 4778//5216 4739//5212 +f 4773//5215 4737//5213 4733//5214 +f 4773//5215 4733//5214 4765//5217 +f 4733//5214 4734//5211 4772//5218 +f 4733//5214 4772//5218 4765//5217 +f 4765//5217 4772//5218 4778//5216 +f 4765//5217 4778//5216 4773//5215 +f 4744//5209 4739//5212 4778//5216 +f 4744//5209 4778//5216 4781//5219 +f 4785//5220 4748//5210 4744//5209 +f 4785//5220 4744//5209 4781//5219 +f 4759//5221 4748//5210 4785//5220 +f 4759//5221 4785//5220 4787//5222 +f 4782//5206 4787//5222 4785//5220 +f 4782//5206 4785//5220 4780//5207 +f 4785//5220 4781//5219 4775//5223 +f 4785//5220 4775//5223 4780//5207 +f 4772//5218 4775//5223 4781//5219 +f 4772//5218 4781//5219 4778//5216 +f 4772//5218 4734//5211 4738//5208 +f 4772//5218 4738//5208 4775//5223 +f 4742//5204 4780//5207 4775//5223 +f 4742//5204 4775//5223 4738//5208 +f 4751//5205 4742//5204 4748//5210 +f 4751//5205 4748//5210 4759//5221 +f 4751//5205 4759//5221 4787//5222 +f 4751//5205 4787//5222 4782//5206 +f 4610//5224 4628//5225 4647//5226 +f 4610//5224 4647//5226 4627//5227 +f 4610//5224 4595//5228 4639//5229 +f 4610//5224 4639//5229 4652//5230 +f 4595//5228 4581//5231 4626//5232 +f 4595//5228 4626//5232 4639//5229 +f 4626//5232 4581//5231 4571//5233 +f 4626//5232 4571//5233 4619//5234 +f 4649//5235 4626//5232 4619//5234 +f 4649//5235 4619//5234 4632//5236 +f 4626//5232 4649//5235 4655//5237 +f 4626//5232 4655//5237 4639//5229 +f 4639//5229 4655//5237 4671//5238 +f 4639//5229 4671//5238 4652//5230 +f 4612//5239 4627//5227 4671//5238 +f 4612//5239 4671//5238 4655//5237 +f 4605//5240 4612//5239 4655//5237 +f 4605//5240 4655//5237 4649//5235 +f 4581//5231 4595//5228 4612//5239 +f 4581//5231 4612//5239 4605//5240 +f 4605//5240 4590//5241 4571//5233 +f 4605//5240 4571//5233 4581//5231 +f 4619//5234 4571//5233 4590//5241 +f 4619//5234 4590//5241 4632//5236 +f 4605//5240 4649//5235 4632//5236 +f 4605//5240 4632//5236 4590//5241 +f 4610//5224 4627//5227 4612//5239 +f 4610//5224 4612//5239 4595//5228 +f 4680//5242 4671//5238 4627//5227 +f 4680//5242 4627//5227 4647//5226 +f 4680//5242 4667//5243 4652//5230 +f 4680//5242 4652//5230 4671//5238 +f 4628//5225 4667//5243 4680//5242 +f 4628//5225 4680//5242 4647//5226 +f 4628//5225 4610//5224 4652//5230 +f 4628//5225 4652//5230 4667//5243 +f 4532//5244 4556//5245 4589//5246 +f 4532//5244 4589//5246 4563//5247 +f 4532//5244 4506//5248 4498//5249 +f 4532//5244 4498//5249 4515//5250 +f 4506//5248 4497//5251 4478//5252 +f 4506//5248 4478//5252 4498//5249 +f 4478//5252 4497//5251 4479//5253 +f 4478//5252 4479//5253 4470//5254 +f 4504//5255 4478//5252 4470//5254 +f 4504//5255 4470//5254 4488//5256 +f 4478//5252 4504//5255 4518//5257 +f 4478//5252 4518//5257 4498//5249 +f 4498//5249 4518//5257 4544//5258 +f 4498//5249 4544//5258 4515//5250 +f 4535//5259 4563//5247 4544//5258 +f 4535//5259 4544//5258 4518//5257 +f 4517//5260 4535//5259 4518//5257 +f 4517//5260 4518//5257 4504//5255 +f 4497//5251 4506//5248 4535//5259 +f 4497//5251 4535//5259 4517//5260 +f 4517//5260 4499//5261 4479//5253 +f 4517//5260 4479//5253 4497//5251 +f 4470//5254 4479//5253 4499//5261 +f 4470//5254 4499//5261 4488//5256 +f 4517//5260 4504//5255 4488//5256 +f 4517//5260 4488//5256 4499//5261 +f 4532//5244 4563//5247 4535//5259 +f 4532//5244 4535//5259 4506//5248 +f 4570//5262 4544//5258 4563//5247 +f 4570//5262 4563//5247 4589//5246 +f 4570//5262 4542//5263 4515//5250 +f 4570//5262 4515//5250 4544//5258 +f 4556//5245 4542//5263 4570//5262 +f 4556//5245 4570//5262 4589//5246 +f 4556//5245 4532//5244 4515//5250 +f 4556//5245 4515//5250 4542//5263 +f 5063//5264 5074//5265 5080//5266 +f 5063//5264 5080//5266 5071//5267 +f 5035//5268 5063//5264 5071//5267 +f 5035//5268 5071//5267 5040//5269 +f 5035//5268 5037//5270 5074//5265 +f 5035//5268 5074//5265 5063//5264 +f 5037//5270 5041//5271 5078//5272 +f 5037//5270 5078//5272 5074//5265 +f 5041//5271 5044//5273 5083//5274 +f 5041//5271 5083//5274 5078//5272 +f 5083//5274 5044//5273 5049//5275 +f 5083//5274 5049//5275 5085//5276 +f 5083//5274 5085//5276 5089//5277 +f 5083//5274 5089//5277 5088//5278 +f 5049//5275 5057//5279 5089//5277 +f 5049//5275 5089//5277 5085//5276 +f 5049//5275 5044//5273 5050//5280 +f 5049//5275 5050//5280 5057//5279 +f 5088//5278 5089//5277 5057//5279 +f 5088//5278 5057//5279 5050//5280 +f 5050//5280 5047//5281 5084//5282 +f 5050//5280 5084//5282 5088//5278 +f 5084//5282 5078//5272 5083//5274 +f 5084//5282 5083//5274 5088//5278 +f 5041//5271 5047//5281 5050//5280 +f 5041//5271 5050//5280 5044//5273 +f 5037//5270 5042//5283 5047//5281 +f 5037//5270 5047//5281 5041//5271 +f 5042//5283 5037//5270 5035//5268 +f 5042//5283 5035//5268 5040//5269 +f 5071//5267 5080//5266 5042//5283 +f 5071//5267 5042//5283 5040//5269 +f 5047//5281 5042//5283 5080//5266 +f 5047//5281 5080//5266 5084//5282 +f 5084//5282 5080//5266 5074//5265 +f 5084//5282 5074//5265 5078//5272 +f 5194//5284 5155//5285 5170//5286 +f 5194//5284 5170//5286 5212//5287 +f 5142//5288 5151//5289 5170//5286 +f 5142//5288 5170//5286 5155//5285 +f 5175//5290 5195//5291 5151//5289 +f 5175//5290 5151//5289 5142//5288 +f 5142//5288 5155//5285 5194//5284 +f 5142//5288 5194//5284 5175//5290 +f 5194//5284 5212//5287 5195//5291 +f 5194//5284 5195//5291 5175//5290 +f 5195//5291 5212//5287 5227//5292 +f 5195//5291 5227//5292 5210//5293 +f 5151//5289 5195//5291 5210//5293 +f 5151//5289 5210//5293 5167//5294 +f 5170//5286 5151//5289 5167//5294 +f 5170//5286 5167//5294 5183//5295 +f 5183//5295 5227//5292 5212//5287 +f 5183//5295 5212//5287 5170//5286 +f 5183//5295 5196//5296 5241//5297 +f 5183//5295 5241//5297 5227//5292 +f 5173//5298 5190//5299 5203//5300 +f 5173//5298 5203//5300 5196//5296 +f 5183//5295 5167//5294 5173//5298 +f 5183//5295 5173//5298 5196//5296 +f 5173//5298 5217//5301 5232//5302 +f 5173//5298 5232//5302 5190//5299 +f 5217//5301 5173//5298 5167//5294 +f 5217//5301 5167//5294 5210//5293 +f 5241//5297 5196//5296 5203//5300 +f 5241//5297 5203//5300 5251//5303 +f 5232//5302 5251//5303 5203//5300 +f 5232//5302 5203//5300 5190//5299 +f 5210//5293 5227//5292 5241//5297 +f 5210//5293 5241//5297 5217//5301 +f 5266//5304 5280//5305 5307//5306 +f 5266//5304 5307//5306 5290//5307 +f 5252//5308 5278//5309 5307//5306 +f 5252//5308 5307//5306 5280//5305 +f 5233//5310 5259//5311 5278//5309 +f 5233//5310 5278//5309 5252//5308 +f 5252//5308 5280//5305 5266//5304 +f 5252//5308 5266//5304 5233//5310 +f 5266//5304 5290//5307 5259//5311 +f 5266//5304 5259//5311 5233//5310 +f 5259//5311 5290//5307 5316//5312 +f 5259//5311 5316//5312 5287//5313 +f 5278//5309 5259//5311 5287//5313 +f 5278//5309 5287//5313 5304//5314 +f 5307//5306 5278//5309 5304//5314 +f 5307//5306 5304//5314 5324//5315 +f 5324//5315 5316//5312 5290//5307 +f 5324//5315 5290//5307 5307//5306 +f 5324//5315 5344//5316 5325//5317 +f 5324//5315 5325//5317 5316//5312 +f 5318//5318 5334//5319 5352//5320 +f 5318//5318 5352//5320 5344//5316 +f 5324//5315 5304//5314 5318//5318 +f 5324//5315 5318//5318 5344//5316 +f 5318//5318 5305//5321 5323//5322 +f 5318//5318 5323//5322 5334//5319 +f 5305//5321 5318//5318 5304//5314 +f 5305//5321 5304//5314 5287//5313 +f 5325//5317 5344//5316 5352//5320 +f 5325//5317 5352//5320 5343//5323 +f 5323//5322 5343//5323 5352//5320 +f 5323//5322 5352//5320 5334//5319 +f 5287//5313 5316//5312 5325//5317 +f 5287//5313 5325//5317 5305//5321 +f 5232//5302 5217//5301 5241//5297 +f 5232//5302 5241//5297 5251//5303 +f 5323//5322 5305//5321 5325//5317 +f 5323//5322 5325//5317 5343//5323 +f 6277//5324 6246//5325 6242//5326 +f 6277//5324 6242//5326 6275//5327 +f 6277//5324 6276//5328 6245//5329 +f 6277//5324 6245//5329 6246//5325 +f 6276//5328 6272//5330 6250//5331 +f 6276//5328 6250//5331 6245//5329 +f 6240//5332 6241//5333 6279//5334 +f 6240//5332 6279//5334 6278//5335 +f 6242//5326 6240//5332 6278//5335 +f 6242//5326 6278//5335 6275//5327 +f 6200//5336 6196//5337 6241//5333 +f 6200//5336 6241//5333 6240//5332 +f 6208//5338 6201//5339 6242//5326 +f 6208//5338 6242//5326 6246//5325 +f 6201//5339 6200//5336 6240//5332 +f 6201//5339 6240//5332 6242//5326 +f 6207//5340 6208//5338 6246//5325 +f 6207//5340 6246//5325 6245//5329 +f 6206//5341 6207//5340 6245//5329 +f 6206//5341 6245//5329 6250//5331 +f 6315//5342 6320//5343 6278//5335 +f 6315//5342 6278//5335 6279//5334 +f 6320//5343 6324//5344 6275//5327 +f 6320//5343 6275//5327 6278//5335 +f 6324//5344 6329//5345 6277//5324 +f 6324//5344 6277//5324 6275//5327 +f 6329//5345 6327//5346 6276//5328 +f 6329//5345 6276//5328 6277//5324 +f 6272//5330 6276//5328 6327//5346 +f 6272//5330 6327//5346 6317//5347 +f 6173//5348 6155//5349 6207//5340 +f 6173//5348 6207//5340 6206//5341 +f 6155//5349 6157//5350 6208//5338 +f 6155//5349 6208//5338 6207//5340 +f 6157//5350 6156//5351 6201//5339 +f 6157//5350 6201//5339 6208//5338 +f 6156//5351 6160//5352 6200//5336 +f 6156//5351 6200//5336 6201//5339 +f 6160//5352 6169//5353 6196//5337 +f 6160//5352 6196//5337 6200//5336 +f 6078//5354 6101//5355 6081//5356 +f 6078//5354 6081//5356 6065//5357 +f 6046//5358 6072//5359 6078//5354 +f 6046//5358 6078//5354 6065//5357 +f 6117//5360 6104//5361 6081//5356 +f 6117//5360 6081//5356 6101//5355 +f 6130//5362 6120//5363 6104//5361 +f 6130//5362 6104//5361 6117//5360 +f 6120//5363 6130//5362 6146//5364 +f 6120//5363 6146//5364 6133//5365 +f 6133//5365 6146//5364 6169//5353 +f 6133//5365 6169//5353 6160//5352 +f 6049//5366 6070//5367 6072//5359 +f 6049//5366 6072//5359 6046//5358 +f 6052//5368 6073//5369 6070//5367 +f 6052//5368 6070//5367 6049//5366 +f 6069//5370 6073//5369 6052//5368 +f 6069//5370 6052//5368 6053//5371 +f 6051//5372 6068//5373 6069//5370 +f 6051//5372 6069//5370 6053//5371 +f 6051//5372 6045//5374 6063//5375 +f 6051//5372 6063//5375 6068//5373 +f 6045//5374 6042//5376 6057//5377 +f 6045//5374 6057//5377 6063//5375 +f 6057//5377 6042//5376 6028//5378 +f 6057//5377 6028//5378 6041//5379 +f 6041//5379 6028//5378 6021//5380 +f 6041//5379 6021//5380 6029//5381 +f 6029//5381 6021//5380 6020//5382 +f 6029//5381 6020//5382 6022//5383 +f 6020//5382 6014//5384 6018//5385 +f 6020//5382 6018//5385 6022//5383 +f 6011//5386 6018//5385 6014//5384 +f 6011//5386 6014//5384 6009//5387 +f 6010//5388 6011//5386 6009//5387 +f 6010//5388 6009//5387 5997//5389 +f 5997//5389 5995//5390 6006//5391 +f 5997//5389 6006//5391 6010//5388 +f 5995//5390 5996//5392 6007//5393 +f 5995//5390 6007//5393 6006//5391 +f 6036//5394 6021//5380 6028//5378 +f 6036//5394 6028//5378 6026//5395 +f 6091//5396 6036//5394 6026//5395 +f 6091//5396 6026//5395 6062//5397 +f 6154//5398 6091//5396 6062//5397 +f 6154//5398 6062//5397 6138//5399 +f 6126//5400 6133//5365 6160//5352 +f 6126//5400 6160//5352 6156//5351 +f 6100//5401 6120//5363 6133//5365 +f 6100//5401 6133//5365 6126//5400 +f 6077//5402 6104//5361 6120//5363 +f 6077//5402 6120//5363 6100//5401 +f 6061//5403 6081//5356 6104//5361 +f 6061//5403 6104//5361 6077//5402 +f 6065//5357 6081//5356 6061//5403 +f 6065//5357 6061//5403 6046//5358 +f 6381//5404 6393//5405 6428//5406 +f 6381//5404 6428//5406 6398//5407 +f 6433//5408 6428//5406 6393//5405 +f 6433//5408 6393//5405 6389//5409 +f 6388//5410 6398//5407 6428//5406 +f 6388//5410 6428//5406 6413//5411 +f 6388//5410 6369//5412 6381//5404 +f 6388//5410 6381//5404 6398//5407 +f 6335//5413 6342//5414 6320//5343 +f 6335//5413 6320//5343 6315//5342 +f 6342//5414 6335//5413 6344//5415 +f 6342//5414 6344//5415 6355//5416 +f 6353//5417 6370//5418 6355//5416 +f 6353//5417 6355//5416 6344//5415 +f 6388//5410 6370//5418 6353//5417 +f 6388//5410 6353//5417 6369//5412 +f 6413//5411 6387//5419 6370//5418 +f 6413//5411 6370//5418 6388//5410 +f 6365//5420 6355//5416 6370//5418 +f 6365//5420 6370//5418 6387//5419 +f 6342//5414 6351//5421 6324//5344 +f 6342//5414 6324//5344 6320//5343 +f 6351//5421 6342//5414 6355//5416 +f 6351//5421 6355//5416 6365//5420 +f 6155//5349 6173//5348 6134//5422 +f 6155//5349 6134//5422 6116//5423 +f 6157//5350 6155//5349 6116//5423 +f 6157//5350 6116//5423 6118//5424 +f 6157//5350 6118//5424 6126//5400 +f 6157//5350 6126//5400 6156//5351 +f 6356//5425 6348//5426 6317//5347 +f 6356//5425 6317//5347 6327//5346 +f 6354//5427 6356//5425 6327//5346 +f 6354//5427 6327//5346 6329//5345 +f 6351//5421 6354//5427 6329//5345 +f 6351//5421 6329//5345 6324//5344 +f 6348//5426 6356//5425 6386//5428 +f 6348//5426 6386//5428 6360//5429 +f 6356//5425 6354//5427 6379//5430 +f 6356//5425 6379//5430 6386//5428 +f 6365//5420 6379//5430 6354//5427 +f 6365//5420 6354//5427 6351//5421 +f 6116//5423 6134//5422 6108//5431 +f 6116//5423 6108//5431 6079//5432 +f 6079//5432 6080//5433 6118//5424 +f 6079//5432 6118//5424 6116//5423 +f 6118//5424 6080//5433 6100//5401 +f 6118//5424 6100//5401 6126//5400 +f 6092//5434 6055//5435 6079//5432 +f 6092//5434 6079//5432 6108//5431 +f 6055//5435 6056//5436 6080//5433 +f 6055//5435 6080//5433 6079//5432 +f 6077//5402 6100//5401 6080//5433 +f 6077//5402 6080//5433 6056//5436 +f 6422//5437 6382//5438 6360//5429 +f 6422//5437 6360//5429 6386//5428 +f 6414//5439 6422//5437 6386//5428 +f 6414//5439 6386//5428 6379//5430 +f 6387//5419 6414//5439 6379//5430 +f 6387//5419 6379//5430 6365//5420 +f 6437//5440 6395//5441 6382//5438 +f 6437//5440 6382//5438 6422//5437 +f 6436//5442 6437//5440 6422//5437 +f 6436//5442 6422//5437 6414//5439 +f 6436//5442 6414//5439 6387//5419 +f 6436//5442 6387//5419 6413//5411 +f 6085//5443 6043//5444 6055//5435 +f 6085//5443 6055//5435 6092//5434 +f 6043//5444 6038//5445 6056//5436 +f 6043//5444 6056//5436 6055//5435 +f 6056//5436 6038//5445 6061//5403 +f 6056//5436 6061//5403 6077//5402 +f 6405//5446 6395//5441 6437//5440 +f 6405//5446 6437//5440 6442//5447 +f 6437//5440 6436//5442 6443//5448 +f 6437//5440 6443//5448 6442//5447 +f 6443//5448 6436//5442 6413//5411 +f 6443//5448 6413//5411 6428//5406 +f 6043//5444 6085//5443 6090//5449 +f 6043//5444 6090//5449 6040//5450 +f 6040//5450 6033//5451 6038//5445 +f 6040//5450 6038//5445 6043//5444 +f 6038//5445 6033//5451 6046//5358 +f 6038//5445 6046//5358 6061//5403 +f 6102//5452 6044//5453 6040//5450 +f 6102//5452 6040//5450 6090//5449 +f 6044//5453 6030//5454 6033//5451 +f 6044//5453 6033//5451 6040//5450 +f 6046//5358 6033//5451 6030//5454 +f 6046//5358 6030//5454 6049//5366 +f 6442//5447 6448//5455 6404//5456 +f 6442//5447 6404//5456 6405//5446 +f 6451//5457 6448//5455 6442//5447 +f 6451//5457 6442//5447 6443//5448 +f 6433//5408 6451//5457 6443//5448 +f 6433//5408 6443//5448 6428//5406 +f 6286//5458 6165//5459 6143//5460 +f 6286//5458 6143//5460 6270//5461 +f 6165//5459 6154//5398 6138//5399 +f 6165//5459 6138//5399 6143//5460 +f 6394//5462 6286//5458 6270//5461 +f 6394//5462 6270//5461 6399//5463 +f 6407//5464 6394//5462 6399//5463 +f 6407//5464 6399//5463 6406//5465 +f 6453//5466 6407//5464 6406//5465 +f 6453//5466 6406//5465 6454//5467 +f 6474//5468 6453//5466 6454//5467 +f 6474//5468 6454//5467 6465//5469 +f 6464//5470 6474//5468 6465//5469 +f 6464//5470 6465//5469 6444//5471 +f 6445//5472 6464//5470 6444//5471 +f 6445//5472 6444//5471 6431//5473 +f 6378//5474 6426//5475 6429//5476 +f 6378//5474 6429//5476 6385//5477 +f 6385//5477 6429//5476 6433//5408 +f 6385//5477 6433//5408 6389//5409 +f 6383//5478 6427//5479 6426//5475 +f 6383//5478 6426//5475 6378//5474 +f 6390//5480 6425//5481 6427//5479 +f 6390//5480 6427//5479 6383//5478 +f 6402//5482 6434//5483 6425//5481 +f 6402//5482 6425//5481 6390//5480 +f 6434//5483 6402//5482 6431//5473 +f 6434//5483 6431//5473 6444//5471 +f 6406//5465 6399//5463 6397//5484 +f 6406//5465 6397//5484 6412//5485 +f 6465//5469 6454//5467 6449//5486 +f 6465//5469 6449//5486 6456//5487 +f 6454//5467 6406//5465 6412//5485 +f 6454//5467 6412//5485 6449//5486 +f 6434//5483 6444//5471 6465//5469 +f 6434//5483 6465//5469 6456//5487 +f 6449//5486 6412//5485 6404//5456 +f 6449//5486 6404//5456 6448//5455 +f 6397//5484 6392//5488 6404//5456 +f 6397//5484 6404//5456 6412//5485 +f 6456//5487 6449//5486 6448//5455 +f 6456//5487 6448//5455 6451//5457 +f 6258//5489 6392//5488 6397//5484 +f 6258//5489 6397//5484 6267//5490 +f 6397//5484 6399//5463 6270//5461 +f 6397//5484 6270//5461 6267//5490 +f 6267//5490 6122//5491 6110//5492 +f 6267//5490 6110//5492 6258//5489 +f 6122//5491 6267//5490 6270//5461 +f 6122//5491 6270//5461 6143//5460 +f 6115//5493 6102//5452 6110//5492 +f 6115//5493 6110//5492 6122//5491 +f 6115//5493 6122//5491 6143//5460 +f 6115//5493 6143//5460 6138//5399 +f 6048//5494 6115//5493 6138//5399 +f 6048//5494 6138//5399 6062//5397 +f 6115//5493 6048//5494 6044//5453 +f 6115//5493 6044//5453 6102//5452 +f 6048//5494 6025//5495 6030//5454 +f 6048//5494 6030//5454 6044//5453 +f 6025//5495 6048//5494 6062//5397 +f 6025//5495 6062//5397 6026//5395 +f 6025//5495 6026//5395 6028//5378 +f 6025//5495 6028//5378 6042//5376 +f 6045//5374 6051//5372 6053//5371 +f 6045//5374 6053//5371 6025//5495 +f 6456//5487 6425//5481 6434//5483 +f 6025//5495 6042//5376 6045//5374 +f 6025//5495 6053//5371 6052//5368 +f 6025//5495 6052//5368 6030//5454 +f 6049//5366 6030//5454 6052//5368 +f 6426//5475 6456//5487 6451//5457 +f 6426//5475 6451//5457 6429//5476 +f 6426//5475 6427//5479 6425//5481 +f 6426//5475 6425//5481 6456//5487 +f 6451//5457 6433//5408 6429//5476 +f 6023//5496 6084//5497 6113//5498 +f 6023//5496 6113//5498 6050//5499 +f 6050//5499 6019//5500 6008//5501 +f 6050//5499 6008//5501 6023//5496 +f 5996//5392 6008//5501 6019//5500 +f 5996//5392 6019//5500 6007//5393 +f 6084//5497 6140//5502 6149//5503 +f 6084//5497 6149//5503 6113//5498 +f 6140//5502 6212//5504 6210//5505 +f 6140//5502 6210//5505 6149//5503 +f 6212//5504 6269//5506 6266//5507 +f 6212//5504 6266//5507 6210//5505 +f 6269//5506 6323//5508 6314//5509 +f 6269//5506 6314//5509 6266//5507 +f 6400//5510 6435//5511 6401//5512 +f 6400//5510 6401//5512 6373//5513 +f 6373//5513 6346//5514 6358//5515 +f 6373//5513 6358//5515 6400//5510 +f 6346//5514 6314//5509 6323//5508 +f 6346//5514 6323//5508 6358//5515 +f 6441//5516 6430//5517 6452//5518 +f 6441//5516 6452//5518 6472//5519 +f 6452//5518 6430//5517 6401//5512 +f 6452//5518 6401//5512 6435//5511 +f 6488//5520 6487//5521 6461//5522 +f 6488//5520 6461//5522 6460//5523 +f 6460//5523 6458//5524 6481//5525 +f 6460//5523 6481//5525 6488//5520 +f 6458//5524 6441//5516 6472//5519 +f 6458//5524 6472//5519 6481//5525 +f 6464//5470 6445//5472 6455//5526 +f 6464//5470 6455//5526 6477//5527 +f 6482//5528 6477//5527 6455//5526 +f 6482//5528 6455//5526 6462//5529 +f 6487//5521 6482//5528 6462//5529 +f 6487//5521 6462//5529 6461//5522 +f 6495//5530 6493//5531 6488//5520 +f 6495//5530 6488//5520 6481//5525 +f 6493//5531 6489//5532 6487//5521 +f 6493//5531 6487//5521 6488//5520 +f 6489//5532 6483//5533 6482//5528 +f 6489//5532 6482//5528 6487//5521 +f 6483//5533 6480//5534 6477//5527 +f 6483//5533 6477//5527 6482//5528 +f 6464//5470 6477//5527 6480//5534 +f 6464//5470 6480//5534 6474//5468 +f 6453//5466 6474//5468 6480//5534 +f 6453//5466 6480//5534 6463//5535 +f 6497//5536 6484//5537 6493//5531 +f 6497//5536 6493//5531 6495//5530 +f 6484//5537 6473//5538 6489//5532 +f 6484//5537 6489//5532 6493//5531 +f 6469//5539 6483//5533 6489//5532 +f 6469//5539 6489//5532 6473//5538 +f 6480//5534 6483//5533 6469//5539 +f 6480//5534 6469//5539 6463//5535 +f 6473//5538 6438//5540 6424//5541 +f 6473//5538 6424//5541 6469//5539 +f 6424//5541 6418//5542 6463//5535 +f 6424//5541 6463//5535 6469//5539 +f 6453//5466 6463//5535 6418//5542 +f 6453//5466 6418//5542 6407//5464 +f 6479//5543 6470//5544 6484//5537 +f 6479//5543 6484//5537 6497//5536 +f 6497//5536 6494//5545 6492//5546 +f 6492//5546 6479//5543 6497//5536 +f 6447//5547 6438//5540 6473//5538 +f 6447//5547 6473//5538 6484//5537 +f 6459//5548 6484//5537 6470//5544 +f 6484//5537 6459//5548 6447//5547 +f 6394//5462 6407//5464 6418//5542 +f 6394//5462 6418//5542 6409//5549 +f 6423//5550 6409//5549 6418//5542 +f 6423//5550 6418//5542 6424//5541 +f 6424//5541 6438//5540 6432//5551 +f 6424//5541 6432//5551 6423//5550 +f 6491//5552 6486//5553 6492//5546 +f 6491//5552 6492//5546 6494//5545 +f 6486//5553 6475//5554 6479//5543 +f 6486//5553 6479//5543 6492//5546 +f 6475//5554 6468//5555 6470//5544 +f 6475//5554 6470//5544 6479//5543 +f 6468//5555 6457//5556 6459//5548 +f 6468//5555 6459//5548 6470//5544 +f 6447//5547 6459//5548 6457//5556 +f 6447//5547 6457//5556 6439//5557 +f 6439//5557 6432//5551 6438//5540 +f 6439//5557 6438//5540 6447//5547 +f 6394//5462 6409//5549 6294//5558 +f 6394//5462 6294//5558 6286//5458 +f 6294//5558 6409//5549 6423//5550 +f 6294//5558 6423//5550 6296//5559 +f 6432//5551 6298//5560 6296//5559 +f 6432//5551 6296//5559 6423//5550 +f 6439//5557 6297//5561 6298//5560 +f 6439//5557 6298//5560 6432//5551 +f 6457//5556 6306//5562 6297//5561 +f 6457//5556 6297//5561 6439//5557 +f 6468//5555 6304//5563 6306//5562 +f 6468//5555 6306//5562 6457//5556 +f 6475//5554 6303//5564 6304//5563 +f 6475//5554 6304//5563 6468//5555 +f 6486//5553 6305//5565 6303//5564 +f 6486//5553 6303//5564 6475//5554 +f 6491//5552 6292//5566 6305//5565 +f 6491//5552 6305//5565 6486//5553 +f 6452//5518 6466//5567 6478//5568 +f 6452//5518 6478//5568 6472//5519 +f 6478//5568 6466//5567 6471//5569 +f 6478//5568 6471//5569 6485//5570 +f 6481//5525 6472//5519 6478//5568 +f 6481//5525 6478//5568 6495//5530 +f 6495//5530 6478//5568 6485//5570 +f 6495//5530 6485//5570 6496//5571 +f 6494//5545 6497//5536 6495//5530 +f 6494//5545 6495//5530 6496//5571 +f 6446//5572 6450//5573 6471//5569 +f 6446//5572 6471//5569 6466//5567 +f 6466//5567 6452//5518 6435//5511 +f 6466//5567 6435//5511 6446//5572 +f 6490//5574 6491//5552 6494//5545 +f 6490//5574 6494//5545 6496//5571 +f 6496//5571 6485//5570 6476//5575 +f 6496//5571 6476//5575 6490//5574 +f 6467//5576 6476//5575 6485//5570 +f 6467//5576 6485//5570 6471//5569 +f 6440//5577 6467//5576 6471//5569 +f 6440//5577 6471//5569 6450//5573 +f 6440//5577 6232//5578 6249//5579 +f 6440//5577 6249//5579 6467//5576 +f 6467//5576 6249//5579 6260//5580 +f 6467//5576 6260//5580 6476//5575 +f 6476//5575 6260//5580 6271//5581 +f 6476//5575 6271//5581 6490//5574 +f 6292//5566 6491//5552 6490//5574 +f 6292//5566 6490//5574 6271//5581 +f 6296//5559 6182//5582 6177//5583 +f 6296//5559 6177//5583 6294//5558 +f 6286//5458 6294//5558 6177//5583 +f 6286//5458 6177//5583 6165//5459 +f 6178//5584 6298//5560 6297//5561 +f 6178//5584 6297//5561 6166//5585 +f 6182//5582 6296//5559 6298//5560 +f 6182//5582 6298//5560 6178//5584 +f 6015//5586 6249//5579 6232//5578 +f 6015//5586 6232//5578 6002//5587 +f 6035//5588 6260//5580 6249//5579 +f 6035//5588 6249//5579 6015//5586 +f 6067//5589 6271//5581 6260//5580 +f 6067//5589 6260//5580 6035//5588 +f 6166//5585 6297//5561 6306//5562 +f 6166//5585 6306//5562 6150//5590 +f 6150//5590 6306//5562 6304//5563 +f 6150//5590 6304//5563 6141//5591 +f 6141//5591 6304//5563 6303//5564 +f 6141//5591 6303//5564 6128//5592 +f 6128//5592 6303//5564 6305//5565 +f 6128//5592 6305//5565 6119//5593 +f 6119//5593 6305//5565 6292//5566 +f 6119//5593 6292//5566 6099//5594 +f 6099//5594 6292//5566 6271//5581 +f 6099//5594 6271//5581 6067//5589 +f 6154//5398 6165//5459 6177//5583 +f 6154//5398 6177//5583 6167//5595 +f 6172//5596 6167//5595 6177//5583 +f 6172//5596 6177//5583 6182//5582 +f 6182//5582 6178//5584 6162//5597 +f 6182//5582 6162//5597 6172//5596 +f 6178//5584 6166//5585 6151//5598 +f 6178//5584 6151//5598 6162//5597 +f 6147//5599 6151//5598 6166//5585 +f 6147//5599 6166//5585 6150//5590 +f 6135//5600 6147//5599 6150//5590 +f 6135//5600 6150//5590 6141//5591 +f 6125//5601 6135//5600 6141//5591 +f 6125//5601 6141//5591 6128//5592 +f 6112//5602 6125//5601 6128//5592 +f 6112//5602 6128//5592 6119//5593 +f 6119//5593 6099//5594 6087//5603 +f 6119//5593 6087//5603 6112//5602 +f 6099//5594 6067//5589 6059//5604 +f 6099//5594 6059//5604 6087//5603 +f 6027//5605 6059//5604 6067//5589 +f 6027//5605 6067//5589 6035//5588 +f 5998//5606 6012//5607 6015//5586 +f 5998//5606 6015//5586 6002//5587 +f 6035//5588 6015//5586 6012//5607 +f 6035//5588 6012//5607 6027//5605 +f 6410//5608 6455//5526 6445//5472 +f 6410//5608 6445//5472 6421//5609 +f 6073//5369 6069//5370 6093//5610 +f 6073//5369 6093//5610 6095//5611 +f 6036//5394 6091//5396 6111//5612 +f 6036//5394 6111//5612 6039//5613 +f 6020//5382 6021//5380 6036//5394 +f 6020//5382 6036//5394 6039//5613 +f 6091//5396 6154//5398 6167//5595 +f 6091//5396 6167//5595 6111//5612 +f 6039//5613 6037//5614 6014//5384 +f 6039//5613 6014//5384 6020//5382 +f 6109//5615 6037//5614 6039//5613 +f 6109//5615 6039//5613 6111//5612 +f 6172//5596 6109//5615 6111//5612 +f 6172//5596 6111//5612 6167//5595 +f 6380//5616 6403//5617 6431//5473 +f 6380//5616 6431//5473 6402//5482 +f 6445//5472 6431//5473 6403//5617 +f 6445//5472 6403//5617 6421//5609 +f 6385//5477 6389//5409 6374//5618 +f 6385//5477 6374//5618 6367//5619 +f 6367//5619 6363//5620 6378//5474 +f 6367//5619 6378//5474 6385//5477 +f 6383//5478 6378//5474 6363//5620 +f 6383//5478 6363//5620 6368//5621 +f 6390//5480 6383//5478 6368//5621 +f 6390//5480 6368//5621 6375//5622 +f 6402//5482 6390//5480 6375//5622 +f 6402//5482 6375//5622 6380//5616 +f 6381//5404 6369//5412 6357//5623 +f 6381//5404 6357//5623 6366//5624 +f 6366//5624 6376//5625 6393//5405 +f 6366//5624 6393//5405 6381//5404 +f 6389//5409 6393//5405 6376//5625 +f 6389//5409 6376//5625 6374//5618 +f 6353//5417 6344//5415 6337//5626 +f 6353//5417 6337//5626 6345//5627 +f 6345//5627 6357//5623 6369//5412 +f 6345//5627 6369//5412 6353//5417 +f 6326//5628 6337//5626 6344//5415 +f 6326//5628 6344//5415 6335//5413 +f 6315//5342 6279//5334 6274//5629 +f 6315//5342 6274//5629 6311//5630 +f 6311//5630 6326//5628 6335//5413 +f 6311//5630 6335//5413 6315//5342 +f 6243//5631 6274//5629 6279//5334 +f 6243//5631 6279//5334 6241//5333 +f 6205//5632 6243//5631 6241//5333 +f 6205//5632 6241//5333 6196//5337 +f 6180//5633 6205//5632 6196//5337 +f 6180//5633 6196//5337 6169//5353 +f 6169//5353 6146//5364 6153//5634 +f 6169//5353 6153//5634 6180//5633 +f 6144//5635 6153//5634 6146//5364 +f 6144//5635 6146//5364 6130//5362 +f 6130//5362 6117//5360 6129//5636 +f 6130//5362 6129//5636 6144//5635 +f 6121//5637 6129//5636 6117//5360 +f 6121//5637 6117//5360 6101//5355 +f 6101//5355 6078//5354 6105//5638 +f 6101//5355 6105//5638 6121//5637 +f 6097//5639 6105//5638 6078//5354 +f 6097//5639 6078//5354 6072//5359 +f 6072//5359 6070//5367 6096//5640 +f 6072//5359 6096//5640 6097//5639 +f 6070//5367 6073//5369 6095//5611 +f 6070//5367 6095//5611 6096//5640 +f 6063//5375 6057//5377 6082//5641 +f 6063//5375 6082//5641 6086//5642 +f 6068//5373 6063//5375 6086//5642 +f 6068//5373 6086//5642 6089//5643 +f 6069//5370 6068//5373 6089//5643 +f 6069//5370 6089//5643 6093//5610 +f 6071//5644 6082//5641 6057//5377 +f 6071//5644 6057//5377 6041//5379 +f 6064//5645 6071//5644 6041//5379 +f 6064//5645 6041//5379 6029//5381 +f 6088//5646 6064//5645 6029//5381 +f 6088//5646 6029//5381 6022//5383 +f 6083//5647 6088//5646 6022//5383 +f 6083//5647 6022//5383 6018//5385 +f 5999//5648 5991//5649 5995//5390 +f 5999//5648 5995//5390 5997//5389 +f 5994//5650 5991//5649 5999//5648 +f 5994//5650 5999//5648 6005//5651 +f 5998//5606 5994//5650 6005//5651 +f 5998//5606 6005//5651 6012//5607 +f 6018//5385 6011//5386 6076//5652 +f 6018//5385 6076//5652 6083//5647 +f 6037//5614 6017//5653 6009//5387 +f 6037//5614 6009//5387 6014//5384 +f 5997//5389 6009//5387 6017//5653 +f 5997//5389 6017//5653 5999//5648 +f 6172//5596 6162//5597 6151//5598 +f 6172//5596 6151//5598 6109//5615 +f 6027//5605 6012//5607 6005//5651 +f 6027//5605 6005//5651 6059//5604 +f 6034//5654 6005//5651 5999//5648 +f 6034//5654 5999//5648 6017//5653 +f 6059//5604 6005//5651 6034//5654 +f 6059//5604 6034//5654 6087//5603 +f 6017//5653 6037//5614 6109//5615 +f 6017//5653 6109//5615 6075//5655 +f 6147//5599 6075//5655 6109//5615 +f 6147//5599 6109//5615 6151//5598 +f 6112//5602 6034//5654 6075//5655 +f 6112//5602 6075//5655 6125//5601 +f 6034//5654 6017//5653 6075//5655 +f 6147//5599 6135//5600 6125//5601 +f 6147//5599 6125//5601 6075//5655 +f 6087//5603 6034//5654 6112//5602 +f 6066//5656 6076//5652 6011//5386 +f 6066//5656 6011//5386 6010//5388 +f 6060//5657 6066//5656 6010//5388 +f 6060//5657 6010//5388 6006//5391 +f 6006//5391 6007//5393 6058//5658 +f 6006//5391 6058//5658 6060//5657 +f 6007//5393 6019//5500 6074//5659 +f 6007//5393 6074//5659 6058//5658 +f 6203//5660 6273//5661 6269//5506 +f 6203//5660 6269//5506 6212//5504 +f 6124//5662 6203//5660 6212//5504 +f 6124//5662 6212//5504 6140//5502 +f 6273//5661 6336//5663 6323//5508 +f 6273//5661 6323//5508 6269//5506 +f 6336//5663 6371//5664 6358//5515 +f 6336//5663 6358//5515 6323//5508 +f 6371//5664 6415//5665 6400//5510 +f 6371//5664 6400//5510 6358//5515 +f 6415//5665 6446//5572 6435//5511 +f 6415//5665 6435//5511 6400//5510 +f 5991//5649 5993//5666 5996//5392 +f 5991//5649 5996//5392 5995//5390 +f 5993//5666 6004//5667 6008//5501 +f 5993//5666 6008//5501 5996//5392 +f 6008//5501 6004//5667 6013//5668 +f 6008//5501 6013//5668 6023//5496 +f 6084//5497 6047//5669 6124//5662 +f 6084//5497 6124//5662 6140//5502 +f 6084//5497 6023//5496 6013//5668 +f 6084//5497 6013//5668 6047//5669 +f 5994//5650 5989//5670 5993//5666 +f 5994//5650 5993//5666 5991//5649 +f 5998//5606 5990//5671 5989//5670 +f 5998//5606 5989//5670 5994//5650 +f 6002//5587 5992//5672 5990//5671 +f 6002//5587 5990//5671 5998//5606 +f 5989//5670 6000//5673 6004//5667 +f 5989//5670 6004//5667 5993//5666 +f 5990//5671 6001//5674 6000//5673 +f 5990//5671 6000//5673 5989//5670 +f 5992//5672 6003//5675 6001//5674 +f 5992//5672 6001//5674 5990//5671 +f 6211//5676 6003//5675 5992//5672 +f 6211//5676 5992//5672 6216//5677 +f 6216//5677 5992//5672 6002//5587 +f 6216//5677 6002//5587 6232//5578 +f 6359//5678 6211//5676 6216//5677 +f 6359//5678 6216//5677 6408//5679 +f 6408//5679 6216//5677 6232//5578 +f 6408//5679 6232//5578 6440//5577 +f 6420//5680 6364//5681 6359//5678 +f 6420//5680 6359//5678 6408//5679 +f 6408//5679 6440//5577 6450//5573 +f 6408//5679 6450//5573 6420//5680 +f 6420//5680 6450//5573 6446//5572 +f 6420//5680 6446//5572 6415//5665 +f 6371//5664 6364//5681 6420//5680 +f 6371//5664 6420//5680 6415//5665 +f 6187//5682 6183//5683 6185//5684 +f 6187//5682 6185//5684 6188//5685 +f 6202//5686 6187//5682 6188//5685 +f 6202//5686 6188//5685 6198//5687 +f 6219//5688 6202//5686 6198//5687 +f 6219//5688 6198//5687 6214//5689 +f 6163//5690 6185//5684 6183//5683 +f 6163//5690 6183//5683 6158//5691 +f 6185//5684 6163//5690 6190//5692 +f 6185//5684 6190//5692 6188//5685 +f 6190//5692 6214//5689 6198//5687 +f 6190//5692 6198//5687 6188//5685 +f 6287//5693 6248//5694 6193//5695 +f 6287//5693 6193//5695 6197//5696 +f 6248//5694 6214//5689 6190//5692 +f 6248//5694 6190//5692 6193//5695 +f 6332//5697 6204//5698 6211//5676 +f 6332//5697 6211//5676 6359//5678 +f 6287//5693 6197//5696 6204//5698 +f 6287//5693 6204//5698 6332//5697 +f 6248//5694 6287//5693 6289//5699 +f 6248//5694 6289//5699 6251//5700 +f 6214//5689 6248//5694 6251//5700 +f 6214//5689 6251//5700 6219//5688 +f 6287//5693 6332//5697 6338//5701 +f 6287//5693 6338//5701 6289//5699 +f 6332//5697 6359//5678 6364//5681 +f 6332//5697 6364//5681 6338//5701 +f 6094//5702 6032//5703 6204//5698 +f 6094//5702 6204//5698 6197//5696 +f 6204//5698 6032//5703 6003//5675 +f 6204//5698 6003//5675 6211//5676 +f 6094//5702 6197//5696 6193//5695 +f 6094//5702 6193//5695 6132//5704 +f 6193//5695 6190//5692 6163//5690 +f 6193//5695 6163//5690 6132//5704 +f 6132//5704 6163//5690 6158//5691 +f 6132//5704 6158//5691 6131//5705 +f 6131//5705 6098//5706 6094//5702 +f 6131//5705 6094//5702 6132//5704 +f 6098//5706 6031//5707 6032//5703 +f 6098//5706 6032//5703 6094//5702 +f 6003//5675 6032//5703 6031//5707 +f 6003//5675 6031//5707 6001//5674 +f 6013//5668 6004//5667 6000//5673 +f 6013//5668 6000//5673 6016//5708 +f 6016//5708 6000//5673 6001//5674 +f 6016//5708 6001//5674 6031//5707 +f 6136//5709 6103//5710 6098//5706 +f 6136//5709 6098//5706 6131//5705 +f 6251//5700 6289//5699 6300//5711 +f 6251//5700 6300//5711 6259//5712 +f 6259//5712 6300//5711 6268//5713 +f 6259//5712 6268//5713 6244//5714 +f 6209//5715 6199//5716 6244//5714 +f 6209//5715 6244//5714 6268//5713 +f 6103//5710 6136//5709 6159//5717 +f 6103//5710 6159//5717 6137//5718 +f 6137//5718 6159//5717 6199//5716 +f 6137//5718 6199//5716 6209//5715 +f 6203//5660 6209//5715 6268//5713 +f 6203//5660 6268//5713 6273//5661 +f 6209//5715 6203//5660 6124//5662 +f 6209//5715 6124//5662 6137//5718 +f 6187//5682 6192//5719 6174//5720 +f 6187//5682 6174//5720 6183//5683 +f 6202//5686 6217//5721 6192//5719 +f 6202//5686 6192//5719 6187//5682 +f 6159//5717 6171//5722 6195//5723 +f 6159//5717 6195//5723 6199//5716 +f 6192//5719 6195//5723 6171//5722 +f 6192//5719 6171//5722 6174//5720 +f 6244//5714 6199//5716 6195//5723 +f 6244//5714 6195//5723 6229//5724 +f 6195//5723 6192//5719 6217//5721 +f 6195//5723 6217//5721 6229//5724 +f 6219//5688 6235//5725 6217//5721 +f 6219//5688 6217//5721 6202//5686 +f 6183//5683 6174//5720 6152//5726 +f 6183//5683 6152//5726 6158//5691 +f 6152//5726 6136//5709 6131//5705 +f 6152//5726 6131//5705 6158//5691 +f 6251//5700 6259//5712 6235//5725 +f 6251//5700 6235//5725 6219//5688 +f 6159//5717 6136//5709 6152//5726 +f 6159//5717 6152//5726 6171//5722 +f 6244//5714 6229//5724 6235//5725 +f 6244//5714 6235//5725 6259//5712 +f 6235//5725 6229//5724 6217//5721 +f 6171//5722 6152//5726 6174//5720 +f 6300//5711 6334//5727 6273//5661 +f 6300//5711 6273//5661 6268//5713 +f 6289//5699 6338//5701 6334//5727 +f 6289//5699 6334//5727 6300//5711 +f 6103//5710 6054//5728 6031//5707 +f 6103//5710 6031//5707 6098//5706 +f 6054//5728 6103//5710 6137//5718 +f 6054//5728 6137//5718 6124//5662 +f 6273//5661 6334//5727 6349//5729 +f 6273//5661 6349//5729 6336//5663 +f 6364//5681 6349//5729 6334//5727 +f 6364//5681 6334//5727 6338//5701 +f 6047//5669 6024//5730 6054//5728 +f 6047//5669 6054//5728 6124//5662 +f 6054//5728 6024//5730 6016//5708 +f 6054//5728 6016//5708 6031//5707 +f 6024//5730 6047//5669 6013//5668 +f 6024//5730 6013//5668 6016//5708 +f 6364//5681 6371//5664 6336//5663 +f 6364//5681 6336//5663 6349//5729 +f 6392//5488 6384//5731 6391//5732 +f 6392//5488 6391//5732 6404//5456 +f 6384//5731 6361//5733 6372//5734 +f 6384//5731 6372//5734 6391//5732 +f 6350//5735 6372//5734 6361//5733 +f 6350//5735 6361//5733 6347//5736 +f 6343//5737 6350//5735 6347//5736 +f 6343//5737 6347//5736 6340//5738 +f 6333//5739 6343//5737 6340//5738 +f 6333//5739 6340//5738 6330//5740 +f 6330//5740 6319//5741 6325//5742 +f 6330//5740 6325//5742 6333//5739 +f 6316//5743 6325//5742 6319//5741 +f 6316//5743 6319//5741 6307//5744 +f 6307//5744 6295//5745 6313//5746 +f 6307//5744 6313//5746 6316//5743 +f 6220//5747 6228//5748 6295//5745 +f 6220//5747 6295//5745 6307//5744 +f 6215//5749 6221//5750 6228//5748 +f 6215//5749 6228//5748 6220//5747 +f 6191//5751 6220//5747 6307//5744 +f 6191//5751 6307//5744 6319//5741 +f 6191//5751 6319//5741 6330//5740 +f 6191//5751 6330//5740 6181//5752 +f 6220//5747 6191//5751 6186//5753 +f 6220//5747 6186//5753 6215//5749 +f 6176//5754 6186//5753 6191//5751 +f 6176//5754 6191//5751 6181//5752 +f 6340//5738 6170//5755 6181//5752 +f 6340//5738 6181//5752 6330//5740 +f 6168//5756 6176//5754 6181//5752 +f 6168//5756 6181//5752 6170//5755 +f 6255//5757 6384//5731 6392//5488 +f 6255//5757 6392//5488 6258//5489 +f 6257//5758 6347//5736 6361//5733 +f 6257//5758 6361//5733 6253//5759 +f 6253//5759 6361//5733 6384//5731 +f 6253//5759 6384//5731 6255//5757 +f 6114//5760 6110//5492 6102//5452 +f 6114//5760 6102//5452 6106//5761 +f 6114//5760 6255//5757 6258//5489 +f 6114//5760 6258//5489 6110//5492 +f 6127//5762 6114//5760 6106//5761 +f 6127//5762 6106//5761 6123//5763 +f 6127//5762 6253//5759 6255//5757 +f 6127//5762 6255//5757 6114//5760 +f 6148//5764 6127//5762 6123//5763 +f 6148//5764 6123//5763 6145//5765 +f 6253//5759 6127//5762 6148//5764 +f 6253//5759 6148//5764 6257//5758 +f 6170//5755 6148//5764 6145//5765 +f 6170//5755 6145//5765 6168//5756 +f 6170//5755 6257//5758 6148//5764 +f 6257//5758 6170//5755 6340//5738 +f 6257//5758 6340//5738 6347//5736 +f 6123//5763 6106//5761 6090//5449 +f 6123//5763 6090//5449 6085//5443 +f 6102//5452 6090//5449 6106//5761 +f 6391//5732 6372//5734 6395//5441 +f 6391//5732 6395//5441 6405//5446 +f 6404//5456 6391//5732 6405//5446 +f 6395//5441 6372//5734 6350//5735 +f 6395//5441 6350//5735 6382//5438 +f 6123//5763 6085//5443 6092//5434 +f 6123//5763 6092//5434 6145//5765 +f 6108//5431 6142//5766 6145//5765 +f 6108//5431 6145//5765 6092//5434 +f 6173//5348 6194//5767 6161//5768 +f 6173//5348 6161//5768 6134//5422 +f 6134//5422 6161//5768 6142//5766 +f 6134//5422 6142//5766 6108//5431 +f 6247//5769 6206//5341 6250//5331 +f 6206//5341 6247//5769 6194//5767 +f 6206//5341 6194//5767 6173//5348 +f 6317//5347 6293//5770 6247//5769 +f 6317//5347 6247//5769 6272//5330 +f 6348//5426 6328//5771 6293//5770 +f 6348//5426 6293//5770 6317//5347 +f 6328//5771 6348//5426 6360//5429 +f 6328//5771 6360//5429 6341//5772 +f 6341//5772 6360//5429 6382//5438 +f 6341//5772 6382//5438 6350//5735 +f 6341//5772 6350//5735 6343//5737 +f 6341//5772 6343//5737 6339//5773 +f 6322//5774 6328//5771 6341//5772 +f 6322//5774 6341//5772 6339//5773 +f 6252//5775 6247//5769 6293//5770 +f 6252//5775 6293//5770 6284//5776 +f 6213//5777 6194//5767 6247//5769 +f 6213//5777 6247//5769 6252//5775 +f 6284//5776 6293//5770 6328//5771 +f 6284//5776 6328//5771 6322//5774 +f 6179//5778 6161//5768 6194//5767 +f 6179//5778 6194//5767 6213//5777 +f 6164//5779 6142//5766 6161//5768 +f 6164//5779 6161//5768 6179//5778 +f 6168//5756 6145//5765 6142//5766 +f 6168//5756 6142//5766 6164//5779 +f 6256//5780 6225//5781 6222//5782 +f 6256//5780 6222//5782 6254//5783 +f 6283//5784 6256//5780 6254//5783 +f 6283//5784 6254//5783 6282//5785 +f 6254//5783 6222//5782 6213//5777 +f 6254//5783 6213//5777 6252//5775 +f 6282//5785 6254//5783 6252//5775 +f 6282//5785 6252//5775 6284//5776 +f 6321//5786 6322//5774 6339//5773 +f 6321//5786 6339//5773 6333//5739 +f 6343//5737 6333//5739 6339//5773 +f 6282//5785 6284//5776 6322//5774 +f 6282//5785 6322//5774 6321//5786 +f 6333//5739 6325//5742 6318//5787 +f 6333//5739 6318//5787 6321//5786 +f 6283//5784 6282//5785 6321//5786 +f 6283//5784 6321//5786 6318//5787 +f 6176//5754 6164//5779 6179//5778 +f 6176//5754 6179//5778 6184//5788 +f 6176//5754 6168//5756 6164//5779 +f 6213//5777 6222//5782 6184//5788 +f 6213//5777 6184//5788 6179//5778 +f 6222//5782 6225//5781 6189//5789 +f 6222//5782 6189//5789 6184//5788 +f 6184//5788 6189//5789 6186//5753 +f 6184//5788 6186//5753 6176//5754 +f 6256//5780 6261//5790 6236//5791 +f 6256//5780 6236//5791 6225//5781 +f 6283//5784 6285//5792 6261//5790 +f 6283//5784 6261//5790 6256//5780 +f 6285//5792 6283//5784 6318//5787 +f 6285//5792 6318//5787 6312//5793 +f 6325//5742 6316//5743 6312//5793 +f 6325//5742 6312//5793 6318//5787 +f 6215//5749 6186//5753 6189//5789 +f 6215//5749 6189//5789 6218//5794 +f 6218//5794 6189//5789 6225//5781 +f 6218//5794 6225//5781 6236//5791 +f 6237//5795 6236//5791 6261//5790 +f 6237//5795 6261//5790 6265//5796 +f 6285//5792 6288//5797 6265//5796 +f 6285//5792 6265//5796 6261//5790 +f 6237//5795 6224//5798 6218//5794 +f 6237//5795 6218//5794 6236//5791 +f 6221//5750 6215//5749 6218//5794 +f 6221//5750 6218//5794 6224//5798 +f 6313//5746 6310//5799 6312//5793 +f 6313//5746 6312//5793 6316//5743 +f 6288//5797 6285//5792 6312//5793 +f 6288//5797 6312//5793 6310//5799 +f 6234//5800 6290//5801 6291//5802 +f 6234//5800 6291//5802 6233//5803 +f 6234//5800 6233//5803 6226//5804 +f 6234//5800 6226//5804 6227//5805 +f 6308//5806 6309//5807 6291//5802 +f 6308//5806 6291//5802 6290//5801 +f 6301//5808 6302//5809 6309//5807 +f 6301//5808 6309//5807 6308//5806 +f 6280//5810 6281//5811 6302//5809 +f 6280//5810 6302//5809 6301//5808 +f 6262//5812 6263//5813 6281//5811 +f 6262//5812 6281//5811 6280//5810 +f 6239//5814 6238//5815 6263//5813 +f 6239//5814 6263//5813 6262//5812 +f 6226//5804 6230//5816 6231//5817 +f 6226//5804 6231//5817 6227//5805 +f 6231//5817 6230//5816 6238//5815 +f 6231//5817 6238//5815 6239//5814 +f 6290//5801 6234//5800 6227//5805 +f 6290//5801 6227//5805 6308//5806 +f 6301//5808 6308//5806 6227//5805 +f 6301//5808 6227//5805 6231//5817 +f 6231//5817 6239//5814 6280//5810 +f 6231//5817 6280//5810 6301//5808 +f 6262//5812 6280//5810 6239//5814 +f 6233//5803 6291//5802 6295//5745 +f 6233//5803 6295//5745 6228//5748 +f 6226//5804 6233//5803 6228//5748 +f 6226//5804 6228//5748 6221//5750 +f 6224//5798 6230//5816 6226//5804 +f 6224//5798 6226//5804 6221//5750 +f 6238//5815 6230//5816 6224//5798 +f 6238//5815 6224//5798 6237//5795 +f 6263//5813 6238//5815 6237//5795 +f 6263//5813 6237//5795 6265//5796 +f 6281//5811 6263//5813 6265//5796 +f 6281//5811 6265//5796 6288//5797 +f 6302//5809 6281//5811 6288//5797 +f 6302//5809 6288//5797 6310//5799 +f 6309//5807 6302//5809 6310//5799 +f 6309//5807 6310//5799 6313//5746 +f 6291//5802 6309//5807 6313//5746 +f 6291//5802 6313//5746 6295//5745 +f 6461//5522 6462//5529 6416//5818 +f 6461//5522 6416//5818 6419//5819 +f 6458//5524 6460//5523 6417//5820 +f 6458//5524 6417//5820 6411//5821 +f 6430//5517 6441//5516 6396//5822 +f 6430//5517 6396//5822 6377//5823 +f 6373//5513 6401//5512 6362//5824 +f 6373//5513 6362//5824 6352//5825 +f 6314//5509 6346//5514 6331//5826 +f 6314//5509 6331//5826 6299//5827 +f 6210//5505 6266//5507 6264//5828 +f 6210//5505 6264//5828 6223//5829 +f 6113//5498 6149//5503 6175//5830 +f 6113//5498 6175//5830 6139//5831 +f 6019//5500 6050//5499 6107//5832 +f 6019//5500 6107//5832 6074//5659 +f 6223//5829 6175//5830 6149//5503 +f 6223//5829 6149//5503 6210//5505 +f 6050//5499 6113//5498 6139//5831 +f 6050//5499 6139//5831 6107//5832 +f 6266//5507 6314//5509 6299//5827 +f 6266//5507 6299//5827 6264//5828 +f 6346//5514 6373//5513 6352//5825 +f 6346//5514 6352//5825 6331//5826 +f 6401//5512 6430//5517 6377//5823 +f 6401//5512 6377//5823 6362//5824 +f 6411//5821 6396//5822 6441//5516 +f 6411//5821 6441//5516 6458//5524 +f 6462//5529 6455//5526 6410//5608 +f 6462//5529 6410//5608 6416//5818 +f 6460//5523 6461//5522 6419//5819 +f 6460//5523 6419//5819 6417//5820 +f 6247//5769 6250//5331 6272//5330 +f 5700//5833 5702//5834 5735//5835 +f 5700//5833 5735//5835 5731//5836 +f 5700//5833 5731//5836 5732//5837 +f 5700//5833 5732//5837 5701//5838 +f 5701//5838 5732//5837 5727//5839 +f 5701//5838 5727//5839 5705//5840 +f 5737//5841 5699//5842 5698//5843 +f 5737//5841 5698//5843 5736//5844 +f 5735//5835 5702//5834 5699//5842 +f 5735//5835 5699//5842 5737//5841 +f 5777//5845 5737//5841 5736//5844 +f 5777//5845 5736//5844 5781//5846 +f 5769//5847 5731//5836 5735//5835 +f 5769//5847 5735//5835 5776//5848 +f 5776//5848 5735//5835 5737//5841 +f 5776//5848 5737//5841 5777//5845 +f 5770//5849 5732//5837 5731//5836 +f 5770//5849 5731//5836 5769//5847 +f 5771//5850 5727//5839 5732//5837 +f 5771//5850 5732//5837 5770//5849 +f 5662//5851 5698//5843 5699//5842 +f 5662//5851 5699//5842 5657//5852 +f 5657//5852 5699//5842 5702//5834 +f 5657//5852 5702//5834 5653//5853 +f 5653//5853 5702//5834 5700//5833 +f 5653//5853 5700//5833 5648//5854 +f 5648//5854 5700//5833 5701//5838 +f 5648//5854 5701//5838 5650//5855 +f 5705//5840 5660//5856 5650//5855 +f 5705//5840 5650//5855 5701//5838 +f 5804//5857 5771//5850 5770//5849 +f 5804//5857 5770//5849 5822//5858 +f 5822//5858 5770//5849 5769//5847 +f 5822//5858 5769//5847 5820//5859 +f 5820//5859 5769//5847 5776//5848 +f 5820//5859 5776//5848 5821//5860 +f 5821//5860 5776//5848 5777//5845 +f 5821//5860 5777//5845 5817//5861 +f 5817//5861 5777//5845 5781//5846 +f 5817//5861 5781//5846 5808//5862 +f 5899//5863 5912//5864 5896//5865 +f 5899//5863 5896//5865 5876//5866 +f 5931//5867 5912//5864 5899//5863 +f 5931//5867 5899//5863 5905//5868 +f 5860//5869 5876//5866 5896//5865 +f 5860//5869 5896//5865 5873//5870 +f 5847//5871 5860//5869 5873//5870 +f 5847//5871 5873//5870 5857//5872 +f 5857//5872 5844//5873 5831//5874 +f 5857//5872 5831//5874 5847//5871 +f 5844//5873 5817//5861 5808//5862 +f 5844//5873 5808//5862 5831//5874 +f 5928//5875 5931//5867 5905//5868 +f 5928//5875 5905//5868 5907//5876 +f 5925//5877 5928//5875 5907//5876 +f 5925//5877 5907//5876 5904//5878 +f 5908//5879 5924//5880 5925//5877 +f 5908//5879 5925//5877 5904//5878 +f 5926//5881 5924//5880 5908//5879 +f 5926//5881 5908//5879 5909//5882 +f 5926//5881 5909//5882 5914//5883 +f 5926//5881 5914//5883 5932//5884 +f 5932//5884 5914//5883 5920//5885 +f 5932//5884 5920//5885 5935//5886 +f 5920//5885 5936//5887 5949//5888 +f 5920//5885 5949//5888 5935//5886 +f 5936//5887 5948//5889 5956//5890 +f 5936//5887 5956//5890 5949//5888 +f 5948//5889 5955//5891 5957//5892 +f 5948//5889 5957//5892 5956//5890 +f 5957//5892 5955//5891 5959//5893 +f 5957//5892 5959//5893 5963//5894 +f 5966//5895 5968//5896 5963//5894 +f 5966//5895 5963//5894 5959//5893 +f 5967//5897 5980//5898 5968//5896 +f 5967//5897 5968//5896 5966//5895 +f 5980//5898 5967//5897 5971//5899 +f 5980//5898 5971//5899 5982//5900 +f 5982//5900 5971//5899 5970//5901 +f 5982//5900 5970//5901 5981//5902 +f 5941//5903 5951//5904 5949//5888 +f 5941//5903 5949//5888 5956//5890 +f 5886//5905 5915//5906 5951//5904 +f 5886//5905 5951//5904 5941//5903 +f 5823//5907 5839//5908 5915//5906 +f 5823//5907 5915//5906 5886//5905 +f 5851//5909 5821//5860 5817//5861 +f 5851//5909 5817//5861 5844//5873 +f 5877//5910 5851//5909 5844//5873 +f 5877//5910 5844//5873 5857//5872 +f 5900//5911 5877//5910 5857//5872 +f 5900//5911 5857//5872 5873//5870 +f 5916//5912 5900//5911 5873//5870 +f 5916//5912 5873//5870 5896//5865 +f 5912//5864 5931//5867 5916//5912 +f 5912//5864 5916//5912 5896//5865 +f 5596//5913 5579//5914 5549//5915 +f 5596//5913 5549//5915 5584//5916 +f 5544//5917 5588//5918 5584//5916 +f 5544//5917 5584//5916 5549//5915 +f 5589//5919 5564//5920 5549//5915 +f 5589//5919 5549//5915 5579//5914 +f 5589//5919 5579//5914 5596//5913 +f 5589//5919 5596//5913 5608//5921 +f 5642//5922 5662//5851 5657//5852 +f 5642//5922 5657//5852 5635//5923 +f 5635//5923 5622//5924 5633//5925 +f 5635//5923 5633//5925 5642//5922 +f 5624//5926 5633//5925 5622//5924 +f 5624//5926 5622//5924 5607//5927 +f 5589//5919 5608//5921 5624//5926 +f 5589//5919 5624//5926 5607//5927 +f 5564//5920 5589//5919 5607//5927 +f 5564//5920 5607//5927 5590//5928 +f 5612//5929 5590//5928 5607//5927 +f 5612//5929 5607//5927 5622//5924 +f 5635//5923 5657//5852 5653//5853 +f 5635//5923 5653//5853 5626//5930 +f 5626//5930 5612//5929 5622//5924 +f 5626//5930 5622//5924 5635//5923 +f 5822//5858 5861//5931 5843//5932 +f 5822//5858 5843//5932 5804//5857 +f 5820//5859 5859//5933 5861//5931 +f 5820//5859 5861//5931 5822//5858 +f 5820//5859 5821//5860 5851//5909 +f 5820//5859 5851//5909 5859//5933 +f 5621//5934 5650//5855 5660//5856 +f 5621//5934 5660//5856 5629//5935 +f 5623//5936 5648//5854 5650//5855 +f 5623//5936 5650//5855 5621//5934 +f 5626//5930 5653//5853 5648//5854 +f 5626//5930 5648//5854 5623//5936 +f 5629//5935 5617//5937 5591//5938 +f 5629//5935 5591//5938 5621//5934 +f 5621//5934 5591//5938 5598//5939 +f 5621//5934 5598//5939 5623//5936 +f 5612//5929 5626//5930 5623//5936 +f 5612//5929 5623//5936 5598//5939 +f 5861//5931 5898//5940 5869//5941 +f 5861//5931 5869//5941 5843//5932 +f 5898//5940 5861//5931 5859//5933 +f 5898//5940 5859//5933 5897//5942 +f 5859//5933 5851//5909 5877//5910 +f 5859//5933 5877//5910 5897//5942 +f 5885//5943 5869//5941 5898//5940 +f 5885//5943 5898//5940 5922//5944 +f 5922//5944 5898//5940 5897//5942 +f 5922//5944 5897//5942 5921//5945 +f 5900//5911 5921//5945 5897//5942 +f 5900//5911 5897//5942 5877//5910 +f 5555//5946 5591//5938 5617//5937 +f 5555//5946 5617//5937 5595//5947 +f 5563//5948 5598//5939 5591//5938 +f 5563//5948 5591//5938 5555//5946 +f 5590//5928 5612//5929 5598//5939 +f 5590//5928 5598//5939 5563//5948 +f 5540//5949 5555//5946 5595//5947 +f 5540//5949 5595//5947 5582//5950 +f 5541//5951 5563//5948 5555//5946 +f 5541//5951 5555//5946 5540//5949 +f 5541//5951 5564//5920 5590//5928 +f 5541//5951 5590//5928 5563//5948 +f 5892//5952 5885//5943 5922//5944 +f 5892//5952 5922//5944 5934//5953 +f 5934//5953 5922//5944 5921//5945 +f 5934//5953 5921//5945 5939//5954 +f 5921//5945 5900//5911 5916//5912 +f 5921//5945 5916//5912 5939//5954 +f 5572//5955 5535//5956 5540//5949 +f 5572//5955 5540//5949 5582//5950 +f 5540//5949 5535//5956 5534//5957 +f 5540//5949 5534//5957 5541//5951 +f 5534//5957 5549//5915 5564//5920 +f 5534//5957 5564//5920 5541//5951 +f 5934//5953 5937//5958 5887//5959 +f 5934//5953 5887//5959 5892//5952 +f 5937//5958 5934//5953 5939//5954 +f 5937//5958 5939//5954 5944//5960 +f 5939//5954 5916//5912 5931//5867 +f 5939//5954 5931//5867 5944//5960 +f 5875//5961 5887//5959 5937//5958 +f 5875//5961 5937//5958 5933//5962 +f 5933//5962 5937//5958 5944//5960 +f 5933//5962 5944//5960 5947//5963 +f 5931//5867 5928//5875 5947//5963 +f 5931//5867 5947//5963 5944//5960 +f 5535//5956 5572//5955 5573//5964 +f 5535//5956 5573//5964 5529//5965 +f 5526//5966 5534//5957 5535//5956 +f 5526//5966 5535//5956 5529//5965 +f 5544//5917 5549//5915 5534//5957 +f 5544//5917 5534//5957 5526//5966 +f 5691//5967 5707//5968 5834//5969 +f 5691//5967 5834//5969 5812//5970 +f 5812//5970 5834//5969 5839//5908 +f 5812//5970 5839//5908 5823//5907 +f 5583//5971 5578//5972 5707//5968 +f 5583//5971 5707//5968 5691//5967 +f 5570//5973 5571//5974 5578//5972 +f 5570//5973 5578//5972 5583//5971 +f 5524//5975 5523//5976 5571//5974 +f 5524//5975 5571//5974 5570//5973 +f 5503//5977 5512//5978 5523//5976 +f 5503//5977 5523//5976 5524//5975 +f 5513//5979 5533//5980 5512//5978 +f 5513//5979 5512//5978 5503//5977 +f 5532//5981 5546//5982 5533//5980 +f 5532//5981 5533//5980 5513//5979 +f 5599//5983 5592//5984 5548//5985 +f 5599//5983 5548//5985 5551//5986 +f 5592//5984 5588//5918 5544//5917 +f 5592//5984 5544//5917 5548//5985 +f 5594//5987 5599//5983 5551//5986 +f 5594//5987 5551//5986 5550//5988 +f 5587//5989 5594//5987 5550//5988 +f 5587//5989 5550//5988 5552//5990 +f 5575//5991 5587//5989 5552//5990 +f 5575//5991 5552//5990 5543//5992 +f 5543//5992 5533//5980 5546//5982 +f 5543//5992 5546//5982 5575//5991 +f 5571//5974 5565//5993 5580//5994 +f 5571//5974 5580//5994 5578//5972 +f 5512//5978 5521//5995 5528//5996 +f 5512//5978 5528//5996 5523//5976 +f 5523//5976 5528//5996 5565//5993 +f 5523//5976 5565//5993 5571//5974 +f 5543//5992 5521//5995 5512//5978 +f 5543//5992 5512//5978 5533//5980 +f 5528//5996 5529//5965 5573//5964 +f 5528//5996 5573//5964 5565//5993 +f 5580//5994 5565//5993 5573//5964 +f 5580//5994 5573//5964 5585//5997 +f 5521//5995 5526//5966 5529//5965 +f 5521//5995 5529//5965 5528//5996 +f 5719//5998 5710//5999 5580//5994 +f 5719//5998 5580//5994 5585//5997 +f 5580//5994 5710//5999 5707//5968 +f 5580//5994 5707//5968 5578//5972 +f 5710//5999 5719//5998 5867//6000 +f 5710//5999 5867//6000 5855//6001 +f 5855//6001 5834//5969 5707//5968 +f 5855//6001 5707//5968 5710//5999 +f 5862//6002 5855//6001 5867//6000 +f 5862//6002 5867//6000 5875//5961 +f 5862//6002 5839//5908 5834//5969 +f 5862//6002 5834//5969 5855//6001 +f 5929//6003 5915//5906 5839//5908 +f 5929//6003 5839//5908 5862//6002 +f 5862//6002 5875//5961 5933//5962 +f 5862//6002 5933//5962 5929//6003 +f 5929//6003 5933//5962 5947//5963 +f 5929//6003 5947//5963 5952//6004 +f 5952//6004 5951//5904 5915//5906 +f 5952//6004 5915//5906 5929//6003 +f 5952//6004 5935//5886 5949//5888 +f 5952//6004 5949//5888 5951//5904 +f 5932//5884 5952//6004 5924//5880 +f 5932//5884 5924//5880 5926//5881 +f 5521//5995 5543//5992 5552//5990 +f 5952//6004 5932//5884 5935//5886 +f 5952//6004 5947//5963 5925//5877 +f 5952//6004 5925//5877 5924//5880 +f 5928//5875 5925//5877 5947//5963 +f 5551//5986 5548//5985 5526//5966 +f 5551//5986 5526//5966 5521//5995 +f 5551//5986 5521//5995 5552//5990 +f 5551//5986 5552//5990 5550//5988 +f 5526//5966 5548//5985 5544//5917 +f 5954//6005 5927//6006 5864//6007 +f 5954//6005 5864//6007 5893//6008 +f 5927//6006 5954//6005 5969//6009 +f 5927//6006 5969//6009 5958//6010 +f 5981//5902 5970//5901 5958//6010 +f 5981//5902 5958//6010 5969//6009 +f 5893//6008 5864//6007 5828//6011 +f 5893//6008 5828//6011 5837//6012 +f 5837//6012 5828//6011 5767//6013 +f 5837//6012 5767//6013 5765//6014 +f 5765//6014 5767//6013 5711//6015 +f 5765//6014 5711//6015 5708//6016 +f 5708//6016 5711//6015 5663//6017 +f 5708//6016 5663//6017 5654//6018 +f 5577//6019 5604//6020 5576//6021 +f 5577//6019 5576//6021 5542//6022 +f 5604//6020 5577//6019 5619//6023 +f 5604//6020 5619//6023 5631//6024 +f 5631//6024 5619//6023 5654//6018 +f 5631//6024 5654//6018 5663//6017 +f 5536//6025 5505//6026 5525//6027 +f 5536//6025 5525//6027 5547//6028 +f 5525//6027 5542//6022 5576//6021 +f 5525//6027 5576//6021 5547//6028 +f 5489//6029 5517//6030 5516//6031 +f 5489//6029 5516//6031 5490//6032 +f 5517//6030 5489//6029 5496//6033 +f 5517//6030 5496//6033 5519//6034 +f 5519//6034 5496//6033 5505//6026 +f 5519//6034 5505//6026 5536//6025 +f 5513//5979 5500//6035 5522//6036 +f 5513//5979 5522//6036 5532//5981 +f 5495//6037 5515//6038 5522//6036 +f 5495//6037 5522//6036 5500//6035 +f 5490//6032 5516//6031 5515//6038 +f 5490//6032 5515//6038 5495//6037 +f 5482//6039 5496//6033 5489//6029 +f 5482//6039 5489//6029 5484//6040 +f 5484//6040 5489//6029 5490//6032 +f 5484//6040 5490//6032 5488//6041 +f 5488//6041 5490//6032 5495//6037 +f 5488//6041 5495//6037 5494//6042 +f 5494//6042 5495//6037 5500//6035 +f 5494//6042 5500//6035 5497//6043 +f 5513//5979 5503//5977 5497//6043 +f 5513//5979 5497//6043 5500//6035 +f 5524//5975 5514//6044 5497//6043 +f 5524//5975 5497//6043 5503//5977 +f 5480//6045 5482//6039 5484//6040 +f 5480//6045 5484//6040 5493//6046 +f 5493//6046 5484//6040 5488//6041 +f 5493//6046 5488//6041 5504//6047 +f 5508//6048 5504//6047 5488//6041 +f 5508//6048 5488//6041 5494//6042 +f 5497//6043 5514//6044 5508//6048 +f 5497//6043 5508//6048 5494//6042 +f 5504//6047 5508//6048 5553//6049 +f 5504//6047 5553//6049 5539//6050 +f 5553//6049 5508//6048 5514//6044 +f 5553//6049 5514//6044 5559//6051 +f 5524//5975 5570//5973 5559//6051 +f 5524//5975 5559//6051 5514//6044 +f 5498//6052 5480//6045 5493//6046 +f 5498//6052 5493//6046 5507//6053 +f 5480//6045 5485//6054 5483//6055 +f 5485//6054 5480//6045 5498//6052 +f 5530//6056 5493//6046 5504//6047 +f 5530//6056 5504//6047 5539//6050 +f 5518//6057 5507//6053 5493//6046 +f 5493//6046 5530//6056 5518//6057 +f 5583//5971 5568//6058 5559//6051 +f 5583//5971 5559//6051 5570//5973 +f 5554//6059 5553//6049 5559//6051 +f 5554//6059 5559//6051 5568//6058 +f 5553//6049 5554//6059 5545//6060 +f 5553//6049 5545//6060 5539//6050 +f 5486//6061 5483//6055 5485//6054 +f 5486//6061 5485//6054 5491//6062 +f 5491//6062 5485//6054 5498//6052 +f 5491//6062 5498//6052 5502//6063 +f 5502//6063 5498//6052 5507//6053 +f 5502//6063 5507//6053 5509//6064 +f 5509//6064 5507//6053 5518//6057 +f 5509//6064 5518//6057 5520//6065 +f 5530//6056 5538//6066 5520//6065 +f 5530//6056 5520//6065 5518//6057 +f 5538//6066 5530//6056 5539//6050 +f 5538//6066 5539//6050 5545//6060 +f 5583//5971 5691//5967 5683//6067 +f 5583//5971 5683//6067 5568//6058 +f 5683//6067 5681//6068 5554//6059 +f 5683//6067 5554//6059 5568//6058 +f 5545//6060 5554//6059 5681//6068 +f 5545//6060 5681//6068 5679//6069 +f 5538//6066 5545//6060 5679//6069 +f 5538//6066 5679//6069 5680//6070 +f 5520//6065 5538//6066 5680//6070 +f 5520//6065 5680//6070 5671//6071 +f 5509//6064 5520//6065 5671//6071 +f 5509//6064 5671//6071 5673//6072 +f 5502//6063 5509//6064 5673//6072 +f 5502//6063 5673//6072 5674//6073 +f 5491//6062 5502//6063 5674//6073 +f 5491//6062 5674//6073 5672//6074 +f 5486//6061 5491//6062 5672//6074 +f 5486//6061 5672//6074 5685//6075 +f 5525//6027 5505//6026 5499//6076 +f 5525//6027 5499//6076 5511//6077 +f 5499//6076 5492//6078 5506//6079 +f 5499//6076 5506//6079 5511//6077 +f 5496//6033 5482//6039 5499//6076 +f 5496//6033 5499//6076 5505//6026 +f 5482//6039 5481//6080 5492//6078 +f 5482//6039 5492//6078 5499//6076 +f 5483//6055 5481//6080 5482//6039 +f 5483//6055 5482//6039 5480//6045 +f 5531//6081 5511//6077 5506//6079 +f 5531//6081 5506//6079 5527//6082 +f 5511//6077 5531//6081 5542//6022 +f 5511//6077 5542//6022 5525//6027 +f 5487//6083 5481//6080 5483//6055 +f 5487//6083 5483//6055 5486//6061 +f 5481//6080 5487//6083 5501//6084 +f 5481//6080 5501//6084 5492//6078 +f 5510//6085 5506//6079 5492//6078 +f 5510//6085 5492//6078 5501//6084 +f 5537//6086 5527//6082 5506//6079 +f 5537//6086 5506//6079 5510//6085 +f 5537//6086 5510//6085 5728//6087 +f 5537//6086 5728//6087 5745//6088 +f 5510//6085 5501//6084 5717//6089 +f 5510//6085 5717//6089 5728//6087 +f 5501//6084 5487//6083 5706//6090 +f 5501//6084 5706//6090 5717//6089 +f 5685//6075 5706//6090 5487//6083 +f 5685//6075 5487//6083 5486//6061 +f 5681//6068 5683//6067 5800//6091 +f 5681//6068 5800//6091 5795//6092 +f 5691//5967 5812//5970 5800//6091 +f 5691//5967 5800//6091 5683//6067 +f 5799//6093 5811//6094 5680//6070 +f 5799//6093 5680//6070 5679//6069 +f 5795//6092 5799//6093 5679//6069 +f 5795//6092 5679//6069 5681//6068 +f 5962//6095 5975//6096 5745//6088 +f 5962//6095 5745//6088 5728//6087 +f 5942//6097 5962//6095 5728//6087 +f 5942//6097 5728//6087 5717//6089 +f 5910//6098 5942//6097 5717//6089 +f 5910//6098 5717//6089 5706//6090 +f 5811//6094 5827//6099 5671//6071 +f 5811//6094 5671//6071 5680//6070 +f 5827//6099 5836//6100 5673//6072 +f 5827//6099 5673//6072 5671//6071 +f 5836//6100 5849//6101 5674//6073 +f 5836//6100 5674//6073 5673//6072 +f 5849//6101 5858//6102 5672//6074 +f 5849//6101 5672//6074 5674//6073 +f 5858//6102 5878//6103 5685//6075 +f 5858//6102 5685//6075 5672//6074 +f 5878//6103 5910//6098 5706//6090 +f 5878//6103 5706//6090 5685//6075 +f 5823//5907 5810//6104 5800//6091 +f 5823//5907 5800//6091 5812//5970 +f 5805//6105 5795//6092 5800//6091 +f 5805//6105 5800//6091 5810//6104 +f 5795//6092 5805//6105 5815//6106 +f 5795//6092 5815//6106 5799//6093 +f 5799//6093 5815//6106 5826//6107 +f 5799//6093 5826//6107 5811//6094 +f 5830//6108 5827//6099 5811//6094 +f 5830//6108 5811//6094 5826//6107 +f 5842//6109 5836//6100 5827//6099 +f 5842//6109 5827//6099 5830//6108 +f 5852//6110 5849//6101 5836//6100 +f 5852//6110 5836//6100 5842//6109 +f 5865//6111 5858//6102 5849//6101 +f 5865//6111 5849//6101 5852//6110 +f 5858//6102 5865//6111 5890//6112 +f 5858//6102 5890//6112 5878//6103 +f 5878//6103 5890//6112 5918//6113 +f 5878//6103 5918//6113 5910//6098 +f 5950//6114 5942//6097 5910//6098 +f 5950//6114 5910//6098 5918//6113 +f 5979//6115 5975//6096 5962//6095 +f 5979//6115 5962//6095 5965//6116 +f 5942//6097 5950//6114 5965//6116 +f 5942//6097 5965//6116 5962//6095 +f 5567//6117 5556//6118 5532//5981 +f 5567//6117 5532//5981 5522//6036 +f 5904//5878 5882//6119 5884//6120 +f 5904//5878 5884//6120 5908//5879 +f 5941//5903 5938//6121 5866//6122 +f 5941//5903 5866//6122 5886//5905 +f 5957//5892 5938//6121 5941//5903 +f 5957//5892 5941//5903 5956//5890 +f 5886//5905 5866//6122 5810//6104 +f 5886//5905 5810//6104 5823//5907 +f 5938//6121 5957//5892 5963//5894 +f 5938//6121 5963//5894 5940//6123 +f 5868//6124 5866//6122 5938//6121 +f 5868//6124 5938//6121 5940//6123 +f 5805//6105 5810//6104 5866//6122 +f 5805//6105 5866//6122 5868//6124 +f 5597//6125 5575//5991 5546//5982 +f 5597//6125 5546//5982 5574//6126 +f 5532//5981 5556//6118 5574//6126 +f 5532//5981 5574//6126 5546//5982 +f 5592//5984 5610//6127 5603//6128 +f 5592//5984 5603//6128 5588//5918 +f 5610//6127 5592//5984 5599//5983 +f 5610//6127 5599//5983 5614//6129 +f 5594//5987 5609//6130 5614//6129 +f 5594//5987 5614//6129 5599//5983 +f 5587//5989 5602//6131 5609//6130 +f 5587//5989 5609//6130 5594//5987 +f 5575//5991 5597//6125 5602//6131 +f 5575//5991 5602//6131 5587//5989 +f 5596//5913 5611//6132 5620//6133 +f 5596//5913 5620//6133 5608//5921 +f 5611//6132 5596//5913 5584//5916 +f 5611//6132 5584//5916 5601//6134 +f 5588//5918 5603//6128 5601//6134 +f 5588//5918 5601//6134 5584//5916 +f 5624//5926 5632//6135 5640//6136 +f 5624//5926 5640//6136 5633//5925 +f 5632//6135 5624//5926 5608//5921 +f 5632//6135 5608//5921 5620//6133 +f 5651//6137 5642//5922 5633//5925 +f 5651//6137 5633//5925 5640//6136 +f 5662//5851 5666//6138 5703//6139 +f 5662//5851 5703//6139 5698//5843 +f 5666//6138 5662//5851 5642//5922 +f 5666//6138 5642//5922 5651//6137 +f 5734//6140 5736//5844 5698//5843 +f 5734//6140 5698//5843 5703//6139 +f 5772//6141 5781//5846 5736//5844 +f 5772//6141 5736//5844 5734//6140 +f 5797//6142 5808//5862 5781//5846 +f 5797//6142 5781//5846 5772//6141 +f 5808//5862 5797//6142 5824//6143 +f 5808//5862 5824//6143 5831//5874 +f 5833//6144 5847//5871 5831//5874 +f 5833//6144 5831//5874 5824//6143 +f 5847//5871 5833//6144 5848//6145 +f 5847//5871 5848//6145 5860//5869 +f 5856//6146 5876//5866 5860//5869 +f 5856//6146 5860//5869 5848//6145 +f 5876//5866 5856//6146 5872//6147 +f 5876//5866 5872//6147 5899//5863 +f 5880//6148 5905//5868 5899//5863 +f 5880//6148 5899//5863 5872//6147 +f 5905//5868 5880//6148 5881//6149 +f 5905//5868 5881//6149 5907//5876 +f 5907//5876 5881//6149 5882//6119 +f 5907//5876 5882//6119 5904//5878 +f 5914//5883 5891//6150 5895//6151 +f 5914//5883 5895//6151 5920//5885 +f 5909//5882 5888//6152 5891//6150 +f 5909//5882 5891//6150 5914//5883 +f 5908//5879 5884//6120 5888//6152 +f 5908//5879 5888//6152 5909//5882 +f 5906//6153 5936//5887 5920//5885 +f 5906//6153 5920//5885 5895//6151 +f 5913//6154 5948//5889 5936//5887 +f 5913//6154 5936//5887 5906//6153 +f 5889//6155 5955//5891 5948//5889 +f 5889//6155 5948//5889 5913//6154 +f 5894//6156 5959//5893 5955//5891 +f 5894//6156 5955//5891 5889//6155 +f 5978//6157 5980//5898 5982//5900 +f 5978//6157 5982//5900 5986//6158 +f 5983//6159 5972//6160 5978//6157 +f 5983//6159 5978//6157 5986//6158 +f 5979//6115 5965//6116 5972//6160 +f 5979//6115 5972//6160 5983//6159 +f 5959//5893 5894//6156 5901//6161 +f 5959//5893 5901//6161 5966//5895 +f 5940//6123 5963//5894 5968//5896 +f 5940//6123 5968//5896 5960//6162 +f 5980//5898 5978//6157 5960//6162 +f 5980//5898 5960//6162 5968//5896 +f 5805//6105 5868//6124 5826//6107 +f 5805//6105 5826//6107 5815//6106 +f 5950//6114 5918//6113 5972//6160 +f 5950//6114 5972//6160 5965//6116 +f 5943//6163 5960//6162 5978//6157 +f 5943//6163 5978//6157 5972//6160 +f 5918//6113 5890//6112 5943//6163 +f 5918//6113 5943//6163 5972//6160 +f 5960//6162 5902//6164 5868//6124 +f 5960//6162 5868//6124 5940//6123 +f 5830//6108 5826//6107 5868//6124 +f 5830//6108 5868//6124 5902//6164 +f 5865//6111 5852//6110 5902//6164 +f 5865//6111 5902//6164 5943//6163 +f 5943//6163 5902//6164 5960//6162 +f 5830//6108 5902//6164 5852//6110 +f 5830//6108 5852//6110 5842//6109 +f 5890//6112 5865//6111 5943//6163 +f 5911//6165 5967//5897 5966//5895 +f 5911//6165 5966//5895 5901//6161 +f 5917//6166 5971//5899 5967//5897 +f 5917//6166 5967//5897 5911//6165 +f 5971//5899 5917//6166 5919//6167 +f 5971//5899 5919//6167 5970//5901 +f 5970//5901 5919//6167 5903//6168 +f 5970//5901 5903//6168 5958//6010 +f 5774//6169 5765//6014 5708//6016 +f 5774//6169 5708//6016 5704//6170 +f 5853//6171 5837//6012 5765//6014 +f 5853//6171 5765//6014 5774//6169 +f 5704//6170 5708//6016 5654//6018 +f 5704//6170 5654//6018 5641//6172 +f 5641//6172 5654//6018 5619//6023 +f 5641//6172 5619//6023 5606//6173 +f 5606//6173 5619//6023 5577//6019 +f 5606//6173 5577//6019 5562//6174 +f 5562//6174 5577//6019 5542//6022 +f 5562//6174 5542//6022 5531//6081 +f 5986//6158 5982//5900 5981//5902 +f 5986//6158 5981//5902 5984//6175 +f 5984//6175 5981//5902 5969//6009 +f 5984//6175 5969//6009 5973//6176 +f 5969//6009 5954//6005 5964//6177 +f 5969//6009 5964//6177 5973//6176 +f 5893//6008 5837//6012 5853//6171 +f 5893//6008 5853//6171 5930//6178 +f 5893//6008 5930//6178 5964//6177 +f 5893//6008 5964//6177 5954//6005 +f 5983//6159 5986//6158 5984//6175 +f 5983//6159 5984//6175 5988//6179 +f 5979//6115 5983//6159 5988//6179 +f 5979//6115 5988//6179 5987//6180 +f 5975//6096 5979//6115 5987//6180 +f 5975//6096 5987//6180 5985//6181 +f 5988//6179 5984//6175 5973//6176 +f 5988//6179 5973//6176 5977//6182 +f 5987//6180 5988//6179 5977//6182 +f 5987//6180 5977//6182 5976//6183 +f 5985//6181 5987//6180 5976//6183 +f 5985//6181 5976//6183 5974//6184 +f 5766//6185 5761//6186 5985//6181 +f 5766//6185 5985//6181 5974//6184 +f 5761//6186 5745//6088 5975//6096 +f 5761//6186 5975//6096 5985//6181 +f 5618//6187 5569//6188 5761//6186 +f 5618//6187 5761//6186 5766//6185 +f 5569//6188 5537//6086 5745//6088 +f 5569//6188 5745//6088 5761//6186 +f 5557//6189 5569//6188 5618//6187 +f 5557//6189 5618//6187 5613//6190 +f 5569//6188 5557//6189 5527//6082 +f 5569//6188 5527//6082 5537//6086 +f 5557//6189 5562//6174 5531//6081 +f 5557//6189 5531//6081 5527//6082 +f 5606//6173 5562//6174 5557//6189 +f 5606//6173 5557//6189 5613//6190 +f 5790//6191 5789//6192 5792//6193 +f 5790//6191 5792//6193 5794//6194 +f 5775//6195 5779//6196 5789//6192 +f 5775//6195 5789//6192 5790//6191 +f 5758//6197 5763//6198 5779//6196 +f 5758//6197 5779//6196 5775//6195 +f 5814//6199 5819//6200 5794//6194 +f 5814//6199 5794//6194 5792//6193 +f 5792//6193 5789//6192 5787//6201 +f 5792//6193 5787//6201 5814//6199 +f 5787//6201 5789//6192 5779//6196 +f 5787//6201 5779//6196 5763//6198 +f 5690//6202 5780//6203 5784//6204 +f 5690//6202 5784//6204 5729//6205 +f 5729//6205 5784//6204 5787//6201 +f 5729//6205 5787//6201 5763//6198 +f 5645//6206 5618//6187 5766//6185 +f 5645//6206 5766//6185 5773//6207 +f 5690//6202 5645//6206 5773//6207 +f 5690//6202 5773//6207 5780//6203 +f 5729//6205 5726//6208 5688//6209 +f 5729//6205 5688//6209 5690//6202 +f 5763//6198 5758//6197 5726//6208 +f 5763//6198 5726//6208 5729//6205 +f 5690//6202 5688//6209 5639//6210 +f 5690//6202 5639//6210 5645//6206 +f 5645//6206 5639//6210 5613//6190 +f 5645//6206 5613//6190 5618//6187 +f 5883//6211 5780//6203 5773//6207 +f 5883//6211 5773//6207 5945//6212 +f 5773//6207 5766//6185 5974//6184 +f 5773//6207 5974//6184 5945//6212 +f 5883//6211 5845//6213 5784//6204 +f 5883//6211 5784//6204 5780//6203 +f 5784//6204 5845//6213 5814//6199 +f 5784//6204 5814//6199 5787//6201 +f 5845//6213 5846//6214 5819//6200 +f 5845//6213 5819//6200 5814//6199 +f 5846//6214 5845//6213 5883//6211 +f 5846//6214 5883//6211 5879//6215 +f 5879//6215 5883//6211 5945//6212 +f 5879//6215 5945//6212 5946//6216 +f 5974//6184 5976//6183 5946//6216 +f 5974//6184 5946//6216 5945//6212 +f 5964//6177 5961//6217 5977//6182 +f 5964//6177 5977//6182 5973//6176 +f 5961//6217 5946//6216 5976//6183 +f 5961//6217 5976//6183 5977//6182 +f 5841//6218 5846//6214 5879//6215 +f 5841//6218 5879//6215 5874//6219 +f 5726//6208 5718//6220 5677//6221 +f 5726//6208 5677//6221 5688//6209 +f 5718//6220 5733//6222 5709//6223 +f 5718//6220 5709//6223 5677//6221 +f 5768//6224 5709//6223 5733//6222 +f 5768//6224 5733//6222 5778//6225 +f 5874//6219 5840//6226 5818//6227 +f 5874//6219 5818//6227 5841//6218 +f 5840//6226 5768//6224 5778//6225 +f 5840//6226 5778//6225 5818//6227 +f 5774//6169 5704//6170 5709//6223 +f 5774//6169 5709//6223 5768//6224 +f 5768//6224 5840//6226 5853//6171 +f 5768//6224 5853//6171 5774//6169 +f 5790//6191 5794//6194 5803//6228 +f 5790//6191 5803//6228 5785//6229 +f 5775//6195 5790//6191 5785//6229 +f 5775//6195 5785//6229 5760//6230 +f 5818//6227 5778//6225 5782//6231 +f 5818//6227 5782//6231 5806//6232 +f 5785//6229 5803//6228 5806//6232 +f 5785//6229 5806//6232 5782//6231 +f 5733//6222 5748//6233 5782//6231 +f 5733//6222 5782//6231 5778//6225 +f 5782//6231 5748//6233 5760//6230 +f 5782//6231 5760//6230 5785//6229 +f 5758//6197 5775//6195 5760//6230 +f 5758//6197 5760//6230 5742//6234 +f 5794//6194 5819//6200 5825//6235 +f 5794//6194 5825//6235 5803//6228 +f 5825//6235 5819//6200 5846//6214 +f 5825//6235 5846//6214 5841//6218 +f 5726//6208 5758//6197 5742//6234 +f 5726//6208 5742//6234 5718//6220 +f 5818//6227 5806//6232 5825//6235 +f 5818//6227 5825//6235 5841//6218 +f 5733//6222 5718//6220 5742//6234 +f 5733//6222 5742//6234 5748//6233 +f 5742//6234 5760//6230 5748//6233 +f 5806//6232 5803//6228 5825//6235 +f 5677//6221 5709//6223 5704//6170 +f 5677//6221 5704//6170 5643//6236 +f 5688//6209 5677//6221 5643//6236 +f 5688//6209 5643//6236 5639//6210 +f 5874//6219 5879//6215 5946//6216 +f 5874//6219 5946//6216 5923//6237 +f 5923//6237 5853//6171 5840//6226 +f 5923//6237 5840//6226 5874//6219 +f 5704//6170 5641//6172 5628//6238 +f 5704//6170 5628//6238 5643//6236 +f 5613//6190 5639//6210 5643//6236 +f 5613//6190 5643//6236 5628//6238 +f 5930//6178 5853//6171 5923//6237 +f 5930//6178 5923//6237 5953//6239 +f 5923//6237 5946//6216 5961//6217 +f 5923//6237 5961//6217 5953//6239 +f 5953//6239 5961//6217 5964//6177 +f 5953//6239 5964//6177 5930//6178 +f 5613//6190 5628//6238 5641//6172 +f 5613//6190 5641//6172 5606//6173 +f 5585//5997 5573//5964 5586//6240 +f 5585//5997 5586//6240 5593//6241 +f 5593//6241 5586//6240 5605//6242 +f 5593//6241 5605//6242 5616//6243 +f 5627//6244 5630//6245 5616//6243 +f 5627//6244 5616//6243 5605//6242 +f 5634//6246 5637//6247 5630//6245 +f 5634//6246 5630//6245 5627//6244 +f 5644//6248 5647//6249 5637//6247 +f 5644//6248 5637//6247 5634//6246 +f 5647//6249 5644//6248 5652//6250 +f 5647//6249 5652//6250 5658//6251 +f 5661//6252 5670//6253 5658//6251 +f 5661//6252 5658//6251 5652//6250 +f 5670//6253 5661//6252 5664//6254 +f 5670//6253 5664//6254 5682//6255 +f 5757//6256 5670//6253 5682//6255 +f 5757//6256 5682//6255 5749//6257 +f 5762//6258 5757//6256 5749//6257 +f 5762//6258 5749//6257 5756//6259 +f 5786//6260 5658//6251 5670//6253 +f 5786//6260 5670//6253 5757//6256 +f 5786//6260 5796//6261 5647//6249 +f 5786//6260 5647//6249 5658//6251 +f 5757//6256 5762//6258 5791//6262 +f 5757//6256 5791//6262 5786//6260 +f 5801//6263 5796//6261 5786//6260 +f 5801//6263 5786//6260 5791//6262 +f 5637//6247 5647//6249 5796//6261 +f 5637//6247 5796//6261 5807//6264 +f 5809//6265 5807//6264 5796//6261 +f 5809//6265 5796//6261 5801//6263 +f 5722//6266 5719//5998 5585//5997 +f 5722//6266 5585//5997 5593//6241 +f 5720//6267 5724//6268 5616//6243 +f 5720//6267 5616//6243 5630//6245 +f 5724//6268 5722//6266 5593//6241 +f 5724//6268 5593//6241 5616//6243 +f 5863//6269 5871//6270 5875//5961 +f 5863//6269 5875//5961 5867//6000 +f 5863//6269 5867//6000 5719//5998 +f 5863//6269 5719//5998 5722//6266 +f 5850//6271 5854//6272 5871//6270 +f 5850//6271 5871//6270 5863//6269 +f 5850//6271 5863//6269 5722//6266 +f 5850//6271 5722//6266 5724//6268 +f 5829//6273 5832//6274 5854//6272 +f 5829//6273 5854//6272 5850//6271 +f 5724//6268 5720//6267 5829//6273 +f 5724//6268 5829//6273 5850//6271 +f 5807//6264 5809//6265 5832//6274 +f 5807//6264 5832//6274 5829//6273 +f 5807//6264 5829//6273 5720//6267 +f 5720//6267 5637//6247 5807//6264 +f 5720//6267 5630//6245 5637//6247 +f 5854//6272 5892//5952 5887//5959 +f 5854//6272 5887//5959 5871//6270 +f 5875//5961 5871//6270 5887//5959 +f 5586//6240 5572//5955 5582//5950 +f 5586//6240 5582//5950 5605//6242 +f 5573//5964 5572//5955 5586//6240 +f 5582//5950 5595//5947 5627//6244 +f 5582//5950 5627//6244 5605//6242 +f 5854//6272 5832//6274 5885//5943 +f 5854//6272 5885//5943 5892//5952 +f 5869//5941 5885//5943 5832//6274 +f 5869//5941 5832//6274 5835//6275 +f 5804//5857 5843//5932 5816//6276 +f 5804//5857 5816//6276 5783//6277 +f 5843//5932 5869//5941 5835//6275 +f 5843//5932 5835//6275 5816//6276 +f 5730//6278 5727//5839 5771//5850 +f 5771//5850 5804//5857 5783//6277 +f 5771//5850 5783//6277 5730//6278 +f 5660//5856 5705//5840 5730//6278 +f 5660//5856 5730//6278 5684//6279 +f 5629//5935 5660//5856 5684//6279 +f 5629//5935 5684//6279 5649//6280 +f 5649//6280 5636//6281 5617//5937 +f 5649//6280 5617//5937 5629//5935 +f 5636//6281 5627//6244 5595//5947 +f 5636//6281 5595//5947 5617//5937 +f 5636//6281 5638//6282 5634//6246 +f 5636//6281 5634//6246 5627//6244 +f 5655//6283 5638//6282 5636//6281 +f 5655//6283 5636//6281 5649//6280 +f 5725//6284 5693//6285 5684//6279 +f 5725//6284 5684//6279 5730//6278 +f 5764//6286 5725//6284 5730//6278 +f 5764//6286 5730//6278 5783//6277 +f 5693//6285 5655//6283 5649//6280 +f 5693//6285 5649//6280 5684//6279 +f 5798//6287 5764//6286 5783//6277 +f 5798//6287 5783//6277 5816//6276 +f 5813//6288 5798//6287 5816//6276 +f 5813//6288 5816//6276 5835//6275 +f 5809//6265 5813//6288 5835//6275 +f 5809//6265 5835//6275 5832//6274 +f 5721//6289 5723//6290 5755//6291 +f 5721//6289 5755//6291 5752//6292 +f 5694//6293 5695//6294 5723//6290 +f 5694//6293 5723//6290 5721//6289 +f 5723//6290 5725//6284 5764//6286 +f 5723//6290 5764//6286 5755//6291 +f 5695//6294 5693//6285 5725//6284 +f 5695//6294 5725//6284 5723//6290 +f 5656//6295 5644//6248 5638//6282 +f 5656//6295 5638//6282 5655//6283 +f 5634//6246 5638//6282 5644//6248 +f 5695//6294 5656//6295 5655//6283 +f 5695//6294 5655//6283 5693//6285 +f 5644//6248 5656//6295 5659//6296 +f 5644//6248 5659//6296 5652//6250 +f 5694//6293 5659//6296 5656//6295 +f 5694//6293 5656//6295 5695//6294 +f 5801//6263 5793//6297 5798//6287 +f 5801//6263 5798//6287 5813//6288 +f 5801//6263 5813//6288 5809//6265 +f 5764//6286 5798//6287 5793//6297 +f 5764//6286 5793//6297 5755//6291 +f 5755//6291 5793//6297 5788//6298 +f 5755//6291 5788//6298 5752//6292 +f 5793//6297 5801//6263 5791//6262 +f 5793//6297 5791//6262 5788//6298 +f 5721//6289 5752//6292 5741//6299 +f 5721//6289 5741//6299 5716//6300 +f 5694//6293 5721//6289 5716//6300 +f 5694//6293 5716//6300 5692//6301 +f 5692//6301 5665//6302 5659//6296 +f 5692//6301 5659//6296 5694//6293 +f 5652//6250 5659//6296 5665//6302 +f 5652//6250 5665//6302 5661//6252 +f 5762//6258 5759//6303 5788//6298 +f 5762//6258 5788//6298 5791//6262 +f 5759//6303 5741//6299 5752//6292 +f 5759//6303 5752//6292 5788//6298 +f 5740//6304 5712//6305 5716//6300 +f 5740//6304 5716//6300 5741//6299 +f 5692//6301 5716//6300 5712//6305 +f 5692//6301 5712//6305 5689//6306 +f 5740//6304 5741//6299 5759//6303 +f 5740//6304 5759//6303 5753//6307 +f 5756//6259 5753//6307 5759//6303 +f 5756//6259 5759//6303 5762//6258 +f 5664//6254 5661//6252 5665//6302 +f 5664//6254 5665//6302 5667//6308 +f 5689//6306 5667//6308 5665//6302 +f 5689//6306 5665//6302 5692//6301 +f 5743//6309 5744//6310 5686//6311 +f 5743//6309 5686//6311 5687//6312 +f 5743//6309 5750//6313 5751//6314 +f 5743//6309 5751//6314 5744//6310 +f 5669//6315 5687//6312 5686//6311 +f 5669//6315 5686//6311 5668//6316 +f 5676//6317 5669//6315 5668//6316 +f 5676//6317 5668//6316 5675//6318 +f 5697//6319 5676//6317 5675//6318 +f 5697//6319 5675//6318 5696//6320 +f 5715//6321 5697//6319 5696//6320 +f 5715//6321 5696//6320 5714//6322 +f 5738//6323 5715//6321 5714//6322 +f 5738//6323 5714//6322 5739//6324 +f 5751//6314 5750//6313 5746//6325 +f 5751//6314 5746//6325 5747//6326 +f 5746//6325 5738//6323 5739//6324 +f 5746//6325 5739//6324 5747//6326 +f 5687//6312 5669//6315 5750//6313 +f 5687//6312 5750//6313 5743//6309 +f 5676//6317 5746//6325 5750//6313 +f 5676//6317 5750//6313 5669//6315 +f 5746//6325 5676//6317 5697//6319 +f 5746//6325 5697//6319 5738//6323 +f 5715//6321 5738//6323 5697//6319 +f 5744//6310 5749//6257 5682//6255 +f 5744//6310 5682//6255 5686//6311 +f 5751//6314 5756//6259 5749//6257 +f 5751//6314 5749//6257 5744//6310 +f 5753//6307 5756//6259 5751//6314 +f 5753//6307 5751//6314 5747//6326 +f 5739//6324 5740//6304 5753//6307 +f 5739//6324 5753//6307 5747//6326 +f 5714//6322 5712//6305 5740//6304 +f 5714//6322 5740//6304 5739//6324 +f 5696//6320 5689//6306 5712//6305 +f 5696//6320 5712//6305 5714//6322 +f 5675//6318 5667//6308 5689//6306 +f 5675//6318 5689//6306 5696//6320 +f 5668//6316 5664//6254 5667//6308 +f 5668//6316 5667//6308 5675//6318 +f 5686//6311 5682//6255 5664//6254 +f 5686//6311 5664//6254 5668//6316 +f 5516//6031 5558//6327 5561//6328 +f 5516//6031 5561//6328 5515//6038 +f 5519//6034 5566//6329 5560//6330 +f 5519//6034 5560//6330 5517//6030 +f 5547//6028 5600//6331 5581//6332 +f 5547//6028 5581//6332 5536//6025 +f 5604//6020 5625//6333 5615//6334 +f 5604//6020 5615//6334 5576//6021 +f 5663//6017 5678//6335 5646//6336 +f 5663//6017 5646//6336 5631//6024 +f 5767//6013 5754//6337 5713//6338 +f 5767//6013 5713//6338 5711//6015 +f 5864//6007 5838//6339 5802//6340 +f 5864//6007 5802//6340 5828//6011 +f 5958//6010 5903//6168 5870//6341 +f 5958//6010 5870//6341 5927//6006 +f 5754//6337 5767//6013 5828//6011 +f 5754//6337 5828//6011 5802//6340 +f 5927//6006 5870//6341 5838//6339 +f 5927//6006 5838//6339 5864//6007 +f 5711//6015 5713//6338 5678//6335 +f 5711//6015 5678//6335 5663//6017 +f 5631//6024 5646//6336 5625//6333 +f 5631//6024 5625//6333 5604//6020 +f 5576//6021 5615//6334 5600//6331 +f 5576//6021 5600//6331 5547//6028 +f 5566//6329 5519//6034 5536//6025 +f 5566//6329 5536//6025 5581//6332 +f 5515//6038 5561//6328 5567//6117 +f 5515//6038 5567//6117 5522//6036 +f 5517//6030 5560//6330 5558//6327 +f 5517//6030 5558//6327 5516//6031 +f 5730//6278 5705//5840 5727//5839 +f 8944//6342 8964//6343 9022//6344 +f 8944//6342 9022//6344 8988//6345 +f 9041//6346 8972//6347 8983//6348 +f 9041//6346 8983//6348 9076//6349 +f 8952//6350 8936//6351 8942//6352 +f 8952//6350 8942//6352 8974//6353 +f 9004//6354 8957//6355 8971//6356 +f 9004//6354 8971//6356 9040//6357 +f 9036//6358 9056//6359 8995//6360 +f 9036//6358 8995//6360 8986//6361 +f 8963//6362 8968//6363 9037//6364 +f 8963//6362 9037//6364 9017//6365 +f 9104//6366 9107//6367 9049//6368 +f 9104//6366 9049//6368 9048//6369 +f 9026//6370 9014//6371 9073//6372 +f 9026//6370 9073//6372 9070//6373 +f 8975//6374 8977//6375 8999//6376 +f 8975//6374 8999//6376 8994//6377 +f 8949//6378 8961//6379 8976//6380 +f 8949//6378 8976//6380 8965//6381 +f 8946//6382 8947//6383 8955//6384 +f 8946//6382 8955//6384 8956//6385 +f 9018//6386 8960//6387 8944//6342 +f 9018//6386 8944//6342 8988//6345 +f 8986//6361 8995//6360 9034//6388 +f 8986//6361 9034//6388 9021//6389 +f 8979//6390 8998//6391 8968//6363 +f 8979//6390 8968//6363 8963//6362 +f 9077//6392 8981//6393 8972//6347 +f 9077//6392 8972//6347 9041//6346 +f 9099//6394 9084//6395 9048//6369 +f 9099//6394 9048//6369 9049//6368 +f 9050//6396 9060//6397 9014//6371 +f 9050//6396 9014//6371 9026//6370 +f 9057//6398 8978//6399 8957//6355 +f 9057//6398 8957//6355 9004//6354 +f 9016//6400 9031//6401 8977//6375 +f 9016//6400 8977//6375 8975//6374 +f 9019//6402 8970//6403 8936//6351 +f 9019//6402 8936//6351 8952//6350 +f 9007//6404 9024//6405 8961//6379 +f 9007//6404 8961//6379 8949//6378 +f 8997//6406 9001//6407 8947//6383 +f 8997//6406 8947//6383 8946//6382 +f 9108//6408 9032//6409 8960//6387 +f 9108//6408 8960//6387 9018//6386 +f 9102//6410 9108//6408 9021//6389 +f 9102//6410 9021//6389 9034//6388 +f 9054//6411 9079//6412 8998//6391 +f 9054//6411 8998//6391 8979//6390 +f 9192//6413 9096//6414 9164//6415 +f 9192//6413 9164//6415 9229//6416 +f 9180//6417 9213//6418 9157//6419 +f 9180//6417 9157//6419 9137//6420 +f 9164//6415 9166//6421 9226//6422 +f 9164//6415 9226//6422 9234//6423 +f 9037//6364 9115//6424 9094//6425 +f 9037//6364 9094//6425 9017//6365 +f 9056//6359 9036//6358 9126//6426 +f 9056//6359 9126//6426 9128//6427 +f 9022//6344 8964//6343 9085//6428 +f 9022//6344 9085//6428 9126//6426 +f 9151//6429 9159//6430 9210//6431 +f 9151//6429 9210//6431 9185//6432 +f 9182//6433 9178//6434 9235//6435 +f 9182//6433 9235//6435 9223//6436 +f 9178//6434 9114//6437 9174//6438 +f 9178//6434 9174//6438 9230//6439 +f 9179//6440 9106//6441 9156//6442 +f 9179//6440 9156//6442 9211//6443 +f 9216//6444 9156//6442 9215//6445 +f 9152//6446 9175//6447 9121//6448 +f 9152//6446 9121//6448 9103//6449 +f 8996//6450 9002//6451 9067//6452 +f 8996//6450 9067//6452 9039//6453 +f 9025//6454 8985//6455 9061//6456 +f 9025//6454 9061//6456 9087//6457 +f 9013//6458 9025//6454 9086//6459 +f 9013//6458 9086//6459 9088//6460 +f 9105//6461 9042//6462 9090//6463 +f 9105//6461 9090//6463 9131//6464 +f 9134//6465 9136//6466 9090//6463 +f 9134//6465 9090//6463 9091//6467 +f 9100//6468 9116//6469 9074//6470 +f 9100//6468 9074//6470 9065//6471 +f 9120//6472 9043//6473 9142//6474 +f 9120//6472 9142//6474 9188//6475 +f 9200//6476 9196//6477 9120//6472 +f 9069//6478 9078//6479 9158//6480 +f 9069//6478 9158//6480 9139//6481 +f 9224//6482 9191//6483 9179//6440 +f 9224//6482 9179//6440 9211//6443 +f 9227//6484 9228//6485 9216//6444 +f 9227//6484 9216//6444 9215//6445 +f 9153//6486 9189//6487 9175//6447 +f 9153//6486 9175//6447 9152//6446 +f 9067//6452 9113//6488 9093//6489 +f 9067//6452 9093//6489 9039//6453 +f 9144//6490 9145//6491 9088//6460 +f 9144//6490 9088//6460 9086//6459 +f 9061//6456 9111//6492 9150//6493 +f 9061//6456 9150//6493 9087//6457 +f 9238//6494 9218//6495 9192//6413 +f 9238//6494 9192//6413 9229//6416 +f 9237//6496 9240//6497 9234//6423 +f 9237//6496 9234//6423 9226//6422 +f 9202//6498 9222//6499 9213//6418 +f 9202//6498 9213//6418 9180//6417 +f 9207//6500 9170//6501 9122//6502 +f 9207//6500 9122//6502 9184//6503 +f 9146//6504 9173//6505 9154//6506 +f 9146//6504 9154//6506 9118//6507 +f 9206//6508 9212//6509 9197//6510 +f 9206//6508 9197//6510 9177//6511 +f 8805//6512 8806//6513 8861//6514 +f 8805//6512 8861//6514 8852//6515 +f 8785//6516 8795//6517 8791//6518 +f 8778//6519 8781//6520 8774//6521 +f 8778//6519 8774//6521 8770//6522 +f 8870//6523 8863//6524 8895//6525 +f 8870//6523 8895//6525 8896//6526 +f 8877//6527 8882//6528 8850//6529 +f 8877//6527 8850//6529 8843//6530 +f 8885//6531 8864//6532 8865//6533 +f 8861//6514 8883//6534 8878//6535 +f 8861//6514 8878//6535 8852//6515 +f 8849//6536 8855//6537 8828//6538 +f 8849//6536 8828//6538 8810//6539 +f 8770//6522 8774//6521 8806//6513 +f 8770//6522 8806//6513 8805//6512 +f 8793//6540 8783//6541 8810//6539 +f 8793//6540 8810//6539 8828//6538 +f 8850//6529 8811//6542 8800//6543 +f 8850//6529 8800//6543 8843//6530 +f 8826//6544 8821//6545 8863//6524 +f 8826//6544 8863//6524 8870//6523 +f 8795//6517 8821//6545 8826//6544 +f 8800//6543 8811//6542 8781//6520 +f 8800//6543 8781//6520 8778//6519 +f 8836//6546 8799//6547 8793//6540 +f 8836//6546 8793//6540 8828//6538 +f 8855//6537 8860//6548 8836//6546 +f 8855//6537 8836//6546 8828//6538 +f 8883//6534 8882//6528 8877//6527 +f 8883//6534 8877//6527 8878//6535 +f 8896//6526 8895//6525 8885//6531 +f 9181//6549 9160//6550 9170//6501 +f 9181//6549 9170//6501 9207//6500 +f 9186//6551 9195//6552 9212//6509 +f 9186//6551 9212//6509 9206//6508 +f 9165//6553 9173//6505 9146//6504 +f 9165//6553 9146//6504 9140//6554 +f 9230//6439 9174//6438 9218//6495 +f 9230//6439 9218//6495 9238//6494 +f 9223//6436 9235//6435 9240//6497 +f 9223//6436 9240//6497 9237//6496 +f 9210//6431 9222//6499 9202//6498 +f 9210//6431 9202//6498 9185//6432 +f 9188//6475 9142//6474 9191//6483 +f 9188//6475 9191//6483 9224//6482 +f 9196//6477 9200//6476 9228//6485 +f 9196//6477 9228//6485 9227//6484 +f 9158//6480 9189//6487 9153//6486 +f 9158//6480 9153//6486 9139//6481 +f 9113//6488 9116//6469 9100//6468 +f 9113//6488 9100//6468 9093//6489 +f 9136//6466 9134//6465 9145//6491 +f 9136//6466 9145//6491 9144//6490 +f 9111//6492 9105//6461 9131//6464 +f 9111//6492 9131//6464 9150//6493 +f 9064//6555 9068//6556 9047//6557 +f 9064//6555 9047//6557 9009//6558 +f 9033//6559 9064//6555 9009//6558 +f 9033//6559 9009//6558 8992//6560 +f 9124//6561 9123//6562 9072//6563 +f 9124//6561 9072//6563 9063//6564 +f 9059//6565 9117//6566 9124//6561 +f 9059//6565 9124//6561 9063//6564 +f 9168//6567 9155//6568 9125//6569 +f 9168//6567 9125//6569 9141//6570 +f 9147//6571 9161//6572 9168//6567 +f 9147//6571 9168//6567 9141//6570 +f 8990//6573 9023//6574 9062//6575 +f 8990//6573 9062//6575 9038//6576 +f 8989//6577 8990//6573 9038//6576 +f 8989//6577 9038//6576 9030//6578 +f 9184//6503 9122//6502 9032//6409 +f 9184//6503 9032//6409 9108//6408 +f 9177//6511 9197//6510 9108//6408 +f 9177//6511 9108//6408 9102//6410 +f 9118//6507 9154//6506 9079//6412 +f 9118//6507 9079//6412 9054//6411 +f 9038//6576 9112//6579 9083//6580 +f 9038//6576 9083//6580 9030//6578 +f 9062//6575 9110//6581 9112//6579 +f 9062//6575 9112//6579 9038//6576 +f 9115//6424 9165//6553 9140//6554 +f 9115//6424 9140//6554 9094//6425 +f 9128//6427 9126//6426 9195//6552 +f 9128//6427 9195//6552 9186//6551 +f 9126//6426 9085//6428 9160//6550 +f 9126//6426 9160//6550 9181//6549 +f 9076//6349 8983//6348 9114//6437 +f 9076//6349 9114//6437 9178//6434 +f 9107//6367 9104//6366 9178//6434 +f 9107//6367 9178//6434 9182//6433 +f 9073//6372 9159//6430 9151//6429 +f 9073//6372 9151//6429 9070//6373 +f 9092//6582 9098//6583 9147//6571 +f 9092//6582 9147//6571 9141//6570 +f 9125//6569 9095//6584 9092//6582 +f 9125//6569 9092//6582 9141//6570 +f 9157//6419 9060//6397 9050//6396 +f 9157//6419 9050//6396 9137//6420 +f 9084//6395 9099//6394 9166//6421 +f 9084//6395 9166//6421 9164//6415 +f 9096//6414 8981//6393 9077//6392 +f 9096//6414 9077//6392 9164//6415 +f 9040//6357 8971//6356 9043//6473 +f 9040//6357 9043//6473 9120//6472 +f 8999//6376 9078//6479 9069//6478 +f 8999//6376 9069//6478 8994//6377 +f 9003//6585 9008//6586 9059//6565 +f 9003//6585 9059//6565 9063//6564 +f 9072//6563 9029//6587 9003//6585 +f 9072//6563 9003//6585 9063//6564 +f 9121//6448 9031//6401 9016//6400 +f 9121//6448 9016//6400 9103//6449 +f 9106//6441 8978//6399 9057//6398 +f 9106//6441 9057//6398 9156//6442 +f 8974//6353 8942//6352 8985//6455 +f 8974//6353 8985//6455 9025//6454 +f 8965//6381 8976//6380 9025//6454 +f 8965//6381 9025//6454 9013//6458 +f 8955//6384 9002//6451 8996//6450 +f 8955//6384 8996//6450 8956//6385 +f 9009//6558 8969//6588 8967//6589 +f 9009//6558 8967//6589 8992//6560 +f 9047//6557 8991//6590 8969//6588 +f 9047//6557 8969//6588 9009//6558 +f 9074//6470 9001//6407 8997//6406 +f 9074//6470 8997//6406 9065//6471 +f 9091//6467 9090//6463 9024//6405 +f 9091//6467 9024//6405 9007//6404 +f 9042//6462 8970//6403 9019//6402 +f 9042//6462 9019//6402 9090//6463 +f 9094//6425 9062//6575 9023//6574 +f 9094//6425 9023//6574 9017//6365 +f 9140//6554 9110//6581 9062//6575 +f 9140//6554 9062//6575 9094//6425 +f 9112//6579 9110//6581 9140//6554 +f 9112//6579 9140//6554 9146//6504 +f 9083//6580 9112//6579 9146//6504 +f 9083//6580 9146//6504 9118//6507 +f 9030//6578 9083//6580 9118//6507 +f 9030//6578 9118//6507 9054//6411 +f 8989//6577 9030//6578 9054//6411 +f 8989//6577 9054//6411 8979//6390 +f 8963//6362 8990//6573 8989//6577 +f 8963//6362 8989//6577 8979//6390 +f 9017//6365 9023//6574 8990//6573 +f 9017//6365 8990//6573 8963//6362 +f 9079//6412 9102//6410 9012//6591 +f 9079//6412 9012//6591 8998//6391 +f 9154//6506 9162//6592 9102//6410 +f 9154//6506 9102//6410 9079//6412 +f 9173//6505 9198//6593 9162//6592 +f 9173//6505 9162//6592 9154//6506 +f 9165//6553 9176//6594 9198//6593 +f 9165//6553 9198//6593 9173//6505 +f 9176//6594 9165//6553 9115//6424 +f 9176//6594 9115//6424 9128//6427 +f 9128//6427 9115//6424 9037//6364 +f 9128//6427 9037//6364 9053//6595 +f 9053//6595 9037//6364 8968//6363 +f 9053//6595 8968//6363 8982//6596 +f 8982//6596 8968//6363 8998//6391 +f 8982//6596 8998//6391 9012//6591 +f 9097//6597 9060//6397 9157//6419 +f 9097//6597 9157//6419 9166//6421 +f 9166//6421 9157//6419 9213//6418 +f 9166//6421 9213//6418 9221//6598 +f 9222//6499 9232//6599 9221//6598 +f 9222//6499 9221//6598 9213//6418 +f 9210//6431 9219//6600 9232//6599 +f 9210//6431 9232//6599 9222//6499 +f 9159//6430 9182//6433 9219//6600 +f 9159//6430 9219//6600 9210//6431 +f 9073//6372 9109//6601 9182//6433 +f 9073//6372 9182//6433 9159//6430 +f 9109//6601 9073//6372 9014//6371 +f 9109//6601 9014//6371 9052//6602 +f 9052//6602 9014//6371 9060//6397 +f 9052//6602 9060//6397 9097//6597 +f 9147//6571 9098//6583 9070//6373 +f 9147//6571 9070//6373 9151//6429 +f 9161//6572 9147//6571 9151//6429 +f 9161//6572 9151//6429 9185//6432 +f 9168//6567 9161//6572 9185//6432 +f 9168//6567 9185//6432 9202//6498 +f 9155//6568 9168//6567 9202//6498 +f 9155//6568 9202//6498 9180//6417 +f 9137//6420 9125//6569 9155//6568 +f 9137//6420 9155//6568 9180//6417 +f 9050//6396 9095//6584 9125//6569 +f 9050//6396 9125//6569 9137//6420 +f 9026//6370 9092//6582 9095//6584 +f 9026//6370 9095//6584 9050//6396 +f 9070//6373 9098//6583 9092//6582 +f 9070//6373 9092//6582 9026//6370 +f 9059//6565 9008//6586 8994//6377 +f 9059//6565 8994//6377 9069//6478 +f 9117//6566 9059//6565 9069//6478 +f 9117//6566 9069//6478 9139//6481 +f 9124//6561 9117//6566 9139//6481 +f 9124//6561 9139//6481 9153//6486 +f 9123//6562 9124//6561 9153//6486 +f 9123//6562 9153//6486 9152//6446 +f 9103//6449 9072//6563 9123//6562 +f 9103//6449 9123//6562 9152//6446 +f 9016//6400 9029//6587 9072//6563 +f 9016//6400 9072//6563 9103//6449 +f 8975//6374 9003//6585 9029//6587 +f 8975//6374 9029//6587 9016//6400 +f 8994//6377 9008//6586 9003//6585 +f 8994//6377 9003//6585 8975//6374 +f 9051//6603 9031//6401 9121//6448 +f 9051//6603 9121//6448 9143//6604 +f 9143//6604 9121//6448 9175//6447 +f 9143//6604 9175//6447 9194//6605 +f 9189//6487 9208//6606 9194//6605 +f 9189//6487 9194//6605 9175//6447 +f 9158//6480 9163//6607 9208//6606 +f 9158//6480 9208//6606 9189//6487 +f 9078//6479 9101//6608 9163//6607 +f 9078//6479 9163//6607 9158//6480 +f 8999//6376 9020//6609 9101//6608 +f 8999//6376 9101//6608 9078//6479 +f 9020//6609 8999//6376 8977//6375 +f 9020//6609 8977//6375 8993//6610 +f 8993//6610 8977//6375 9031//6401 +f 8993//6610 9031//6401 9051//6603 +f 8992//6560 8967//6589 8956//6385 +f 8992//6560 8956//6385 8996//6450 +f 9033//6559 8992//6560 8996//6450 +f 9033//6559 8996//6450 9039//6453 +f 9093//6489 9064//6555 9033//6559 +f 9093//6489 9033//6559 9039//6453 +f 9100//6468 9068//6556 9064//6555 +f 9100//6468 9064//6555 9093//6489 +f 9065//6471 9047//6557 9068//6556 +f 9065//6471 9068//6556 9100//6468 +f 8997//6406 8991//6590 9047//6557 +f 8997//6406 9047//6557 9065//6471 +f 8946//6382 8969//6588 8991//6590 +f 8946//6382 8991//6590 8997//6406 +f 8956//6385 8967//6589 8969//6588 +f 8956//6385 8969//6588 8946//6382 +f 9006//6611 9001//6407 9074//6470 +f 9006//6611 9074//6470 9091//6467 +f 9091//6467 9074//6470 9116//6469 +f 9091//6467 9116//6469 9130//6612 +f 9130//6612 9116//6469 9113//6488 +f 9130//6612 9113//6488 9127//6613 +f 9127//6613 9113//6488 9067//6452 +f 9127//6613 9067//6452 9075//6614 +f 9002//6451 9013//6458 9075//6614 +f 9002//6451 9075//6614 9067//6452 +f 8955//6384 8962//6615 9013//6458 +f 8955//6384 9013//6458 9002//6451 +f 8962//6615 8955//6384 8947//6383 +f 8962//6615 8947//6383 8954//6616 +f 8954//6616 8947//6383 9001//6407 +f 8954//6616 9001//6407 9006//6611 +f 8878//6535 8855//6537 8849//6536 +f 8878//6535 8849//6536 8852//6515 +f 8877//6527 8860//6548 8855//6537 +f 8877//6527 8855//6537 8878//6535 +f 8843//6530 8836//6546 8860//6548 +f 8843//6530 8860//6548 8877//6527 +f 8800//6543 8799//6547 8836//6546 +f 8800//6543 8836//6546 8843//6530 +f 8793//6540 8799//6547 8800//6543 +f 8793//6540 8800//6543 8778//6519 +f 8783//6541 8793//6540 8778//6519 +f 8783//6541 8778//6519 8770//6522 +f 8805//6512 8810//6539 8783//6541 +f 8805//6512 8783//6541 8770//6522 +f 8852//6515 8849//6536 8810//6539 +f 8852//6515 8810//6539 8805//6512 +f 8781//6520 8795//6517 8785//6516 +f 8781//6520 8785//6516 8774//6521 +f 8811//6542 8815//6617 8795//6517 +f 8811//6542 8795//6517 8781//6520 +f 8815//6617 8811//6542 8850//6529 +f 8815//6617 8850//6529 8856//6618 +f 8856//6618 8850//6529 8882//6528 +f 8856//6618 8882//6528 8892//6619 +f 8892//6619 8882//6528 8883//6534 +f 8892//6619 8883//6534 8885//6531 +f 8885//6531 8883//6534 8861//6514 +f 8885//6531 8861//6514 8864//6532 +f 8864//6532 8861//6514 8806//6513 +f 8864//6532 8806//6513 8812//6620 +f 8812//6620 8806//6513 8774//6521 +f 8812//6620 8774//6521 8785//6516 +f 9190//6621 9126//6426 9181//6549 +f 9209//6622 9190//6621 9181//6549 +f 9209//6622 9181//6549 9207//6500 +f 9193//6623 9209//6622 9207//6500 +f 9193//6623 9207//6500 9184//6503 +f 9108//6408 9193//6623 9184//6503 +f 9021//6389 9108//6408 9018//6386 +f 8986//6361 9021//6389 9018//6386 +f 8986//6361 9018//6386 8988//6345 +f 9022//6344 9036//6358 8986//6361 +f 9022//6344 8986//6361 8988//6345 +f 9197//6510 9193//6623 9108//6408 +f 9212//6509 9209//6622 9193//6623 +f 9212//6509 9193//6623 9197//6510 +f 9195//6552 9190//6621 9209//6622 +f 9195//6552 9209//6622 9212//6509 +f 9126//6426 9190//6621 9195//6552 +f 9022//6344 9126//6426 9036//6358 +f 9183//6624 9128//6427 9186//6551 +f 9204//6625 9183//6624 9186//6551 +f 9204//6625 9186//6551 9206//6508 +f 9172//6626 9204//6625 9206//6508 +f 9172//6626 9206//6508 9177//6511 +f 9102//6410 9172//6626 9177//6511 +f 9012//6591 9102//6410 9034//6388 +f 8982//6596 9012//6591 9034//6388 +f 8982//6596 9034//6388 8995//6360 +f 9056//6359 9053//6595 8982//6596 +f 9056//6359 8982//6596 8995//6360 +f 9162//6592 9172//6626 9102//6410 +f 9198//6593 9204//6625 9172//6626 +f 9198//6593 9172//6626 9162//6592 +f 9176//6594 9183//6624 9204//6625 +f 9176//6594 9204//6625 9198//6593 +f 9128//6427 9183//6624 9176//6594 +f 9056//6359 9128//6427 9053//6595 +f 9233//6627 9178//6434 9230//6439 +f 9239//6628 9233//6627 9230//6439 +f 9239//6628 9230//6439 9238//6494 +f 9231//6629 9239//6628 9238//6494 +f 9231//6629 9238//6494 9229//6416 +f 9164//6415 9231//6629 9229//6416 +f 9084//6395 9164//6415 9077//6392 +f 9048//6369 9084//6395 9077//6392 +f 9048//6369 9077//6392 9041//6346 +f 9076//6349 9104//6366 9048//6369 +f 9076//6349 9048//6369 9041//6346 +f 9231//6629 9164//6415 9234//6423 +f 9240//6497 9239//6628 9231//6629 +f 9240//6497 9231//6629 9234//6423 +f 9235//6435 9233//6627 9239//6628 +f 9235//6435 9239//6628 9240//6497 +f 9178//6434 9233//6627 9235//6435 +f 9076//6349 9178//6434 9104//6366 +f 9220//6630 9182//6433 9223//6436 +f 9236//6631 9220//6630 9223//6436 +f 9236//6631 9223//6436 9237//6496 +f 9225//6632 9236//6631 9237//6496 +f 9225//6632 9237//6496 9226//6422 +f 9166//6421 9225//6632 9226//6422 +f 9052//6602 9097//6597 9099//6394 +f 9052//6602 9099//6394 9049//6368 +f 9107//6367 9109//6601 9052//6602 +f 9107//6367 9052//6602 9049//6368 +f 9166//6421 9099//6394 9097//6597 +f 9225//6632 9166//6421 9221//6598 +f 9232//6599 9236//6631 9225//6632 +f 9232//6599 9225//6632 9221//6598 +f 9219//6600 9220//6630 9236//6631 +f 9219//6600 9236//6631 9232//6599 +f 9182//6433 9220//6630 9219//6600 +f 9107//6367 9182//6433 9109//6601 +f 9120//6472 9045//6633 9040//6357 +f 9200//6476 9120//6472 9188//6475 +f 9228//6485 9200//6476 9188//6475 +f 9228//6485 9188//6475 9224//6482 +f 9216//6444 9228//6485 9224//6482 +f 9216//6444 9224//6482 9211//6443 +f 9156//6442 9216//6444 9211//6443 +f 9066//6634 9156//6442 9057//6398 +f 9010//6635 9066//6634 9057//6398 +f 9010//6635 9057//6398 9004//6354 +f 9040//6357 9045//6633 9010//6635 +f 9040//6357 9010//6635 9004//6354 +f 9027//6636 9045//6633 9120//6472 +f 9027//6636 9120//6472 9101//6608 +f 9196//6477 9171//6637 9101//6608 +f 9196//6477 9101//6608 9120//6472 +f 9171//6637 9196//6477 9227//6484 +f 9171//6637 9227//6484 9217//6638 +f 9217//6638 9227//6484 9215//6445 +f 9217//6638 9215//6445 9205//6639 +f 9156//6442 9143//6604 9205//6639 +f 9156//6442 9205//6639 9215//6445 +f 9066//6634 9071//6640 9143//6604 +f 9066//6634 9143//6604 9156//6442 +f 9071//6640 9066//6634 9010//6635 +f 9071//6640 9010//6635 9000//6641 +f 9045//6633 9027//6636 9000//6641 +f 9045//6633 9000//6641 9010//6635 +f 9203//6642 9143//6604 9194//6605 +f 9208//6606 9214//6643 9203//6642 +f 9208//6606 9203//6642 9194//6605 +f 9163//6607 9169//6644 9214//6643 +f 9163//6607 9214//6643 9208//6606 +f 9101//6608 9169//6644 9163//6607 +f 9101//6608 9020//6609 9027//6636 +f 9171//6637 9169//6644 9101//6608 +f 9214//6643 9169//6644 9171//6637 +f 9214//6643 9171//6637 9217//6638 +f 9203//6642 9214//6643 9217//6638 +f 9203//6642 9217//6638 9205//6639 +f 9143//6604 9203//6642 9205//6639 +f 9051//6603 9143//6604 9071//6640 +f 8993//6610 9051//6603 9071//6640 +f 8993//6610 9071//6640 9000//6641 +f 9027//6636 9020//6609 8993//6610 +f 9027//6636 8993//6610 9000//6641 +f 9081//6645 9013//6458 9088//6460 +f 9145//6491 9135//6646 9081//6645 +f 9145//6491 9081//6645 9088//6460 +f 9134//6465 9132//6647 9135//6646 +f 9134//6465 9135//6646 9145//6491 +f 9091//6467 9132//6647 9134//6465 +f 9006//6611 9091//6467 9007//6404 +f 8954//6616 9006//6611 9007//6404 +f 8954//6616 9007//6404 8949//6378 +f 8965//6381 8962//6615 8954//6616 +f 8965//6381 8954//6616 8949//6378 +f 9132//6647 9091//6467 9130//6612 +f 9135//6646 9132//6647 9130//6612 +f 9135//6646 9130//6612 9127//6613 +f 9081//6645 9135//6646 9127//6613 +f 9081//6645 9127//6613 9075//6614 +f 9013//6458 9081//6645 9075//6614 +f 8965//6381 9013//6458 8962//6615 +f 9136//6466 9133//6648 9090//6463 +f 9149//6649 9133//6648 9136//6466 +f 9149//6649 9136//6466 9144//6490 +f 9089//6650 9149//6649 9144//6490 +f 9089//6650 9144//6490 9086//6459 +f 9025//6454 9089//6650 9086//6459 +f 9025//6454 8976//6380 8974//6353 +f 9089//6650 9025//6454 9087//6457 +f 9150//6493 9149//6649 9089//6650 +f 9150//6493 9089//6650 9087//6457 +f 9131//6464 9133//6648 9149//6649 +f 9131//6464 9149//6649 9150//6493 +f 9090//6463 9133//6648 9131//6464 +f 9024//6405 9090//6463 9019//6402 +f 8961//6379 9024//6405 9019//6402 +f 8961//6379 9019//6402 8952//6350 +f 8974//6353 8976//6380 8961//6379 +f 8974//6353 8961//6379 8952//6350 +f 8815//6617 8821//6545 8795//6517 +f 8863//6524 8821//6545 8815//6617 +f 8863//6524 8815//6617 8856//6618 +f 8895//6525 8863//6524 8856//6618 +f 8895//6525 8856//6618 8892//6619 +f 8885//6531 8895//6525 8892//6619 +f 8812//6620 8817//6651 8865//6533 +f 8812//6620 8865//6533 8864//6532 +f 8785//6516 8791//6518 8817//6651 +f 8785//6516 8817//6651 8812//6620 +f 8792//6652 8791//6518 8795//6517 +f 8792//6652 8795//6517 8798//6653 +f 8798//6653 8795//6517 8826//6544 +f 8798//6653 8826//6544 8837//6654 +f 8837//6654 8826//6544 8870//6523 +f 8837//6654 8870//6523 8879//6655 +f 8879//6655 8870//6523 8896//6526 +f 8879//6655 8896//6526 8906//6656 +f 8906//6656 8896//6526 8885//6531 +f 8906//6656 8885//6531 8891//6657 +f 8891//6657 8885//6531 8865//6533 +f 8891//6657 8865//6533 8867//6658 +f 8867//6658 8865//6533 8817//6651 +f 8867//6658 8817//6651 8820//6659 +f 8820//6659 8817//6651 8791//6518 +f 8820//6659 8791//6518 8792//6652 +f 8789//6660 8792//6652 8798//6653 +f 8789//6660 8798//6653 8801//6661 +f 8801//6661 8798//6653 8837//6654 +f 8801//6661 8837//6654 8842//6662 +f 8842//6662 8837//6654 8879//6655 +f 8842//6662 8879//6655 8889//6663 +f 8889//6663 8879//6655 8906//6656 +f 8889//6663 8906//6656 8915//6664 +f 8915//6664 8906//6656 8891//6657 +f 8915//6664 8891//6657 8897//6665 +f 8897//6665 8891//6657 8867//6658 +f 8897//6665 8867//6658 8872//6666 +f 8872//6666 8867//6658 8820//6659 +f 8872//6666 8820//6659 8814//6667 +f 8814//6667 8820//6659 8792//6652 +f 8814//6667 8792//6652 8789//6660 +f 9043//6473 9042//6462 9105//6461 +f 9043//6473 9105//6461 9142//6474 +f 9042//6462 9043//6473 8971//6356 +f 9042//6462 8971//6356 8970//6403 +f 9114//6437 9106//6441 9179//6440 +f 9114//6437 9179//6440 9174//6438 +f 8983//6348 8978//6399 9106//6441 +f 8983//6348 9106//6441 9114//6437 +f 9096//6414 9085//6428 8964//6343 +f 9096//6414 8964//6343 8981//6393 +f 9085//6428 9096//6414 9192//6413 +f 9085//6428 9192//6413 9160//6550 +f 8907//6668 8873//6669 8929//6670 +f 8907//6668 8929//6670 8951//6671 +f 9044//6672 8941//6673 8907//6668 +f 9044//6672 8907//6668 8951//6671 +f 9032//6409 9122//6502 9044//6672 +f 9032//6409 9044//6672 8951//6671 +f 8929//6670 8960//6387 9032//6409 +f 8929//6670 9032//6409 8951//6671 +f 9122//6502 9170//6501 9148//6674 +f 9122//6502 9148//6674 9044//6672 +f 9170//6501 9160//6550 9167//6675 +f 9170//6501 9167//6675 9148//6674 +f 9167//6675 9160//6550 9192//6413 +f 9192//6413 9218//6495 9201//6676 +f 9192//6413 9201//6676 9167//6675 +f 9218//6495 9174//6438 9199//6677 +f 9218//6495 9199//6677 9201//6676 +f 9174//6438 9179//6440 9199//6677 +f 9179//6440 9191//6483 9187//6678 +f 9179//6440 9187//6678 9199//6677 +f 9138//6679 9187//6678 9191//6483 +f 9138//6679 9191//6483 9142//6474 +f 9138//6679 9142//6474 9105//6461 +f 9105//6461 9111//6492 9080//6680 +f 9105//6461 9080//6680 9138//6679 +f 9111//6492 9061//6456 9035//6681 +f 9111//6492 9035//6681 9080//6680 +f 8925//6682 8981//6393 8964//6343 +f 8924//6683 8978//6399 8983//6348 +f 8913//6684 8970//6403 8971//6356 +f 8958//6685 9035//6681 9061//6456 +f 8958//6685 9061//6456 8985//6455 +f 8985//6455 8942//6352 8930//6686 +f 8985//6455 8930//6686 8958//6685 +f 8914//6687 8930//6686 8942//6352 +f 8914//6687 8942//6352 8936//6351 +f 8936//6351 8970//6403 8913//6684 +f 8936//6351 8913//6684 8914//6687 +f 8918//6688 8913//6684 8971//6356 +f 8918//6688 8971//6356 8957//6355 +f 8957//6355 8978//6399 8924//6683 +f 8957//6355 8924//6683 8918//6688 +f 8972//6347 8981//6393 8925//6682 +f 8972//6347 8925//6682 8927//6689 +f 8927//6689 8924//6683 8983//6348 +f 8927//6689 8983//6348 8972//6347 +f 8929//6670 8922//6690 8944//6342 +f 8929//6670 8944//6342 8960//6387 +f 8922//6690 8925//6682 8964//6343 +f 8922//6690 8964//6343 8944//6342 +f 9058//6691 8941//6673 9044//6672 +f 9058//6691 9044//6672 9148//6674 +f 9148//6674 9167//6675 9119//6692 +f 9148//6674 9119//6692 9058//6691 +f 9129//6693 9119//6692 9167//6675 +f 9129//6693 9167//6675 9201//6676 +f 9201//6676 9199//6677 9129//6693 +f 9082//6694 9129//6693 9199//6677 +f 9082//6694 9199//6677 9187//6678 +f 9187//6678 9138//6679 9028//6695 +f 9187//6678 9028//6695 9082//6694 +f 9028//6695 9138//6679 9080//6680 +f 9080//6680 9035//6681 8953//6696 +f 9080//6680 8953//6696 9028//6695 +f 8873//6669 8881//6697 8922//6690 +f 8873//6669 8922//6690 8929//6670 +f 8881//6697 8925//6682 8922//6690 +f 8881//6697 8887//6698 8927//6689 +f 8881//6697 8927//6689 8925//6682 +f 8924//6683 8927//6689 8887//6698 +f 8924//6683 8887//6698 8880//6699 +f 8918//6688 8924//6683 8880//6699 +f 8880//6699 8876//6700 8913//6684 +f 8880//6699 8913//6684 8918//6688 +f 8876//6700 8888//6701 8914//6687 +f 8876//6700 8914//6687 8913//6684 +f 8930//6686 8914//6687 8888//6701 +f 8888//6701 8917//6702 8958//6685 +f 8888//6701 8958//6685 8930//6686 +f 8917//6702 8953//6696 9035//6681 +f 8917//6702 9035//6681 8958//6685 +f 8819//6703 8824//6704 8797//6705 +f 8819//6703 8797//6705 8788//6706 +f 8859//6707 8871//6708 8824//6704 +f 8859//6707 8824//6704 8819//6703 +f 8905//6709 8898//6710 8871//6708 +f 8905//6709 8871//6708 8859//6707 +f 8937//6711 8920//6712 8898//6710 +f 8937//6711 8898//6710 8905//6709 +f 8920//6712 8937//6711 8987//6713 +f 8920//6712 8987//6713 8945//6714 +f 9046//6715 8980//6716 8945//6714 +f 9046//6715 8945//6714 8987//6713 +f 9055//6717 8984//6718 8980//6716 +f 9055//6717 8980//6716 9046//6715 +f 8984//6718 9055//6717 9011//6719 +f 8984//6718 9011//6719 8959//6720 +f 8966//6721 8934//6722 8959//6720 +f 8966//6721 8959//6720 9011//6719 +f 8931//6723 8902//6724 8934//6722 +f 8931//6723 8934//6722 8966//6721 +f 8893//6725 8854//6726 8902//6724 +f 8893//6725 8902//6724 8931//6723 +f 8848//6727 8827//6728 8854//6726 +f 8848//6727 8854//6726 8893//6725 +f 8834//6729 8796//6730 8827//6728 +f 8834//6729 8827//6728 8848//6727 +f 8831//6731 8780//6732 8796//6730 +f 8831//6731 8796//6730 8834//6729 +f 8779//6733 8782//6734 8788//6706 +f 8779//6733 8788//6706 8797//6705 +f 8832//6735 8777//6736 8780//6732 +f 8832//6735 8780//6732 8831//6731 +f 8771//6737 8813//6738 8782//6734 +f 8771//6737 8782//6734 8779//6733 +f 8777//6736 8832//6735 8813//6738 +f 8777//6736 8813//6738 8771//6737 +f 8809//6739 8787//6740 8789//6660 +f 8809//6739 8789//6660 8801//6661 +f 8845//6741 8809//6739 8801//6661 +f 8845//6741 8801//6661 8842//6662 +f 8899//6742 8845//6741 8842//6662 +f 8899//6742 8842//6662 8889//6663 +f 8941//6673 8899//6742 8889//6663 +f 8941//6673 8889//6663 8915//6664 +f 8907//6668 8941//6673 8915//6664 +f 8907//6668 8915//6664 8897//6665 +f 8873//6669 8907//6668 8897//6665 +f 8873//6669 8897//6665 8872//6666 +f 8808//6743 8873//6669 8872//6666 +f 8808//6743 8872//6666 8814//6667 +f 8787//6740 8808//6743 8814//6667 +f 8787//6740 8814//6667 8789//6660 +f 8782//6734 8808//6743 8787//6740 +f 8782//6734 8787//6740 8788//6706 +f 8782//6734 8873//6669 8808//6743 +f 8782//6734 8813//6738 8881//6697 +f 8782//6734 8881//6697 8873//6669 +f 8832//6735 8887//6698 8881//6697 +f 8832//6735 8881//6697 8813//6738 +f 8880//6699 8887//6698 8832//6735 +f 8880//6699 8832//6735 8831//6731 +f 8876//6700 8880//6699 8831//6731 +f 8876//6700 8831//6731 8834//6729 +f 8888//6701 8876//6700 8834//6729 +f 8888//6701 8834//6729 8848//6727 +f 8917//6702 8888//6701 8848//6727 +f 8917//6702 8848//6727 8893//6725 +f 8953//6696 8917//6702 8893//6725 +f 8953//6696 8893//6725 8931//6723 +f 9028//6695 8953//6696 8931//6723 +f 9028//6695 8931//6723 8966//6721 +f 9028//6695 8966//6721 9011//6719 +f 9028//6695 9011//6719 9082//6694 +f 9055//6717 9129//6693 9082//6694 +f 9055//6717 9082//6694 9011//6719 +f 9129//6693 9055//6717 9046//6715 +f 9129//6693 9046//6715 9119//6692 +f 9058//6691 9119//6692 9046//6715 +f 9058//6691 9046//6715 8987//6713 +f 8937//6711 8941//6673 9058//6691 +f 8937//6711 9058//6691 8987//6713 +f 8899//6742 8941//6673 8937//6711 +f 8899//6742 8937//6711 8905//6709 +f 8845//6741 8899//6742 8905//6709 +f 8845//6741 8905//6709 8859//6707 +f 8809//6739 8845//6741 8859//6707 +f 8809//6739 8859//6707 8819//6703 +f 8787//6740 8809//6739 8819//6703 +f 8787//6740 8819//6703 8788//6706 +f 8871//6708 8866//6744 8818//6745 +f 8871//6708 8818//6745 8824//6704 +f 8898//6710 8900//6746 8866//6744 +f 8898//6710 8866//6744 8871//6708 +f 8920//6712 8926//6747 8900//6746 +f 8920//6712 8900//6746 8898//6710 +f 8950//6748 8926//6747 8920//6712 +f 8950//6748 8920//6712 8945//6714 +f 8980//6716 9005//6749 8950//6748 +f 8980//6716 8950//6748 8945//6714 +f 8984//6718 9015//6750 9005//6749 +f 8984//6718 9005//6749 8980//6716 +f 8973//6751 9015//6750 8984//6718 +f 8973//6751 8984//6718 8959//6720 +f 8934//6722 8939//6752 8973//6751 +f 8934//6722 8973//6751 8959//6720 +f 8902//6724 8903//6753 8939//6752 +f 8902//6724 8939//6752 8934//6722 +f 8854//6726 8851//6754 8903//6753 +f 8854//6726 8903//6753 8902//6724 +f 8827//6728 8807//6755 8851//6754 +f 8827//6728 8851//6754 8854//6726 +f 8796//6730 8769//6756 8807//6755 +f 8796//6730 8807//6755 8827//6728 +f 8780//6732 8762//6757 8769//6756 +f 8780//6732 8769//6756 8796//6730 +f 8777//6736 8759//6758 8762//6757 +f 8777//6736 8762//6757 8780//6732 +f 8760//6759 8759//6758 8777//6736 +f 8760//6759 8777//6736 8771//6737 +f 8760//6759 8771//6737 8779//6733 +f 8760//6759 8779//6733 8764//6760 +f 8772//6761 8764//6760 8779//6733 +f 8772//6761 8779//6733 8797//6705 +f 8824//6704 8818//6745 8772//6761 +f 8824//6704 8772//6761 8797//6705 +f 8818//6745 8775//6762 8752//6763 +f 8818//6745 8752//6763 8772//6761 +f 8866//6744 8833//6764 8775//6762 +f 8866//6744 8775//6762 8818//6745 +f 8900//6746 8875//6765 8833//6764 +f 8900//6746 8833//6764 8866//6744 +f 8926//6747 8904//6766 8875//6765 +f 8926//6747 8875//6765 8900//6746 +f 8950//6748 8928//6767 8904//6766 +f 8950//6748 8904//6766 8926//6747 +f 9005//6749 8940//6768 8928//6767 +f 9005//6749 8928//6767 8950//6748 +f 9015//6750 8943//6769 8940//6768 +f 9015//6750 8940//6768 9005//6749 +f 8973//6751 8938//6770 8943//6769 +f 8973//6751 8943//6769 9015//6750 +f 8939//6752 8912//6771 8938//6770 +f 8939//6752 8938//6770 8973//6751 +f 8903//6753 8869//6772 8912//6771 +f 8903//6753 8912//6771 8939//6752 +f 8851//6754 8802//6773 8869//6772 +f 8851//6754 8869//6772 8903//6753 +f 8807//6755 8761//6774 8802//6773 +f 8807//6755 8802//6773 8851//6754 +f 8769//6756 8748//6775 8761//6774 +f 8769//6756 8761//6774 8807//6755 +f 8762//6757 8738//6776 8748//6775 +f 8762//6757 8748//6775 8769//6756 +f 8772//6761 8752//6763 8740//6777 +f 8772//6761 8740//6777 8764//6760 +f 8759//6758 8735//6778 8738//6776 +f 8759//6758 8738//6776 8762//6757 +f 8764//6760 8740//6777 8734//6779 +f 8764//6760 8734//6779 8760//6759 +f 8760//6759 8734//6779 8735//6778 +f 8760//6759 8735//6778 8759//6758 +f 8775//6762 8786//6780 8758//6781 +f 8775//6762 8758//6781 8752//6763 +f 8833//6764 8830//6782 8786//6780 +f 8833//6764 8786//6780 8775//6762 +f 8875//6765 8862//6783 8830//6782 +f 8875//6765 8830//6782 8833//6764 +f 8904//6766 8886//6784 8862//6783 +f 8904//6766 8862//6783 8875//6765 +f 8928//6767 8909//6785 8886//6784 +f 8928//6767 8886//6784 8904//6766 +f 8940//6768 8919//6786 8909//6785 +f 8940//6768 8909//6785 8928//6767 +f 8943//6769 8923//6787 8919//6786 +f 8943//6769 8919//6786 8940//6768 +f 8938//6770 8916//6788 8923//6787 +f 8938//6770 8923//6787 8943//6769 +f 8912//6771 8901//6789 8916//6788 +f 8912//6771 8916//6788 8938//6770 +f 8869//6772 8853//6790 8901//6789 +f 8869//6772 8901//6789 8912//6771 +f 8802//6773 8804//6791 8853//6790 +f 8802//6773 8853//6790 8869//6772 +f 8761//6774 8767//6792 8804//6791 +f 8761//6774 8804//6791 8802//6773 +f 8748//6775 8755//6793 8767//6792 +f 8748//6775 8767//6792 8761//6774 +f 8738//6776 8746//6794 8755//6793 +f 8738//6776 8755//6793 8748//6775 +f 8752//6763 8758//6781 8750//6795 +f 8752//6763 8750//6795 8740//6777 +f 8735//6778 8744//6796 8746//6794 +f 8735//6778 8746//6794 8738//6776 +f 8740//6777 8750//6795 8743//6797 +f 8740//6777 8743//6797 8734//6779 +f 8734//6779 8743//6797 8744//6796 +f 8734//6779 8744//6796 8735//6778 +f 8786//6780 8822//6798 8847//6799 +f 8786//6780 8847//6799 8758//6781 +f 8830//6782 8858//6800 8822//6798 +f 8830//6782 8822//6798 8786//6780 +f 8862//6783 8858//6800 8830//6782 +f 8886//6784 8890//6801 8858//6800 +f 8886//6784 8858//6800 8862//6783 +f 8909//6785 8908//6802 8890//6801 +f 8909//6785 8890//6801 8886//6784 +f 8919//6786 8908//6802 8909//6785 +f 8923//6787 8911//6803 8908//6802 +f 8923//6787 8908//6802 8919//6786 +f 8916//6788 8847//6799 8911//6803 +f 8916//6788 8911//6803 8923//6787 +f 8901//6789 8847//6799 8916//6788 +f 8853//6790 8874//6804 8847//6799 +f 8853//6790 8847//6799 8901//6789 +f 8804//6791 8823//6805 8874//6804 +f 8804//6791 8874//6804 8853//6790 +f 8767//6792 8823//6805 8804//6791 +f 8755//6793 8784//6806 8823//6805 +f 8755//6793 8823//6805 8767//6792 +f 8746//6794 8766//6807 8784//6806 +f 8746//6794 8784//6806 8755//6793 +f 8758//6781 8847//6799 8750//6795 +f 8744//6796 8766//6807 8746//6794 +f 8750//6795 8847//6799 8763//6808 +f 8750//6795 8763//6808 8743//6797 +f 8743//6797 8763//6808 8766//6807 +f 8743//6797 8766//6807 8744//6796 +f 8858//6800 8847//6799 8822//6798 +f 8890//6801 8847//6799 8858//6800 +f 8908//6802 8847//6799 8890//6801 +f 8911//6803 8847//6799 8908//6802 +f 8823//6805 8847//6799 8874//6804 +f 8784//6806 8847//6799 8823//6805 +f 8766//6807 8847//6799 8784//6806 +f 8763//6808 8847//6799 8766//6807 +f 7950//6809 7860//6810 7858//6811 +f 7950//6809 7858//6811 7955//6812 +f 8361//6813 8333//6814 8339//6815 +f 8361//6813 8339//6815 8357//6816 +f 8361//6813 8371//6817 8343//6818 +f 8361//6813 8343//6818 8333//6814 +f 8324//6819 8346//6820 8379//6821 +f 8324//6819 8379//6821 8375//6822 +f 8314//6823 8162//6824 8169//6825 +f 8314//6823 8169//6825 8306//6826 +f 8261//6827 8207//6828 8108//6829 +f 8261//6827 8108//6829 8140//6830 +f 7939//6831 7932//6832 7867//6833 +f 7939//6831 7867//6833 7865//6834 +f 8327//6835 8320//6836 8373//6837 +f 8327//6835 8373//6837 8377//6838 +f 8168//6839 8183//6840 8281//6841 +f 8168//6839 8281//6841 8250//6842 +f 8194//6843 8179//6844 8216//6845 +f 8194//6843 8216//6845 8287//6846 +f 8223//6847 8173//6848 8185//6849 +f 8223//6847 8185//6849 8264//6850 +f 8290//6851 8270//6852 8325//6853 +f 8290//6851 8325//6853 8338//6854 +f 8292//6855 8290//6851 8338//6854 +f 8292//6855 8338//6854 8302//6856 +f 8148//6857 8139//6858 8210//6859 +f 8148//6857 8210//6859 8236//6860 +f 7888//6861 7886//6862 7964//6863 +f 7888//6861 7964//6863 7969//6864 +f 8380//6865 8423//6866 8297//6867 +f 8380//6865 8297//6867 8248//6868 +f 8502//6869 8529//6870 8533//6871 +f 8502//6869 8533//6871 8510//6872 +f 8425//6873 8387//6874 8273//6875 +f 8425//6873 8273//6875 8299//6876 +f 8529//6870 8502//6869 8468//6877 +f 8529//6870 8468//6877 8492//6878 +f 8395//6879 8433//6880 8504//6881 +f 8395//6879 8504//6881 8501//6882 +f 8588//6883 8595//6884 8589//6885 +f 8588//6883 8589//6885 8569//6886 +f 8595//6884 8600//6887 8589//6885 +f 8527//6888 8491//6889 8487//6890 +f 8068//6891 7965//6892 7928//6893 +f 8068//6891 7928//6893 8023//6894 +f 8730//6895 8773//6896 8868//6897 +f 8730//6895 8868//6897 8756//6898 +f 8720//6899 8737//6900 8713//6901 +f 8720//6899 8713//6901 8702//6902 +f 8711//6903 8720//6899 8702//6902 +f 8711//6903 8702//6902 8696//6904 +f 8825//6905 8751//6906 8717//6907 +f 8825//6905 8717//6907 8736//6908 +f 8156//6909 8237//6910 8234//6911 +f 8156//6909 8234//6911 8153//6912 +f 7850//6913 7845//6914 7918//6915 +f 7850//6913 7918//6915 7938//6916 +f 8231//6917 8273//6875 8387//6874 +f 8231//6917 8387//6874 8340//6918 +f 8502//6869 8467//6919 8426//6920 +f 8502//6869 8426//6920 8468//6877 +f 8467//6919 8502//6869 8510//6872 +f 8467//6919 8510//6872 8472//6921 +f 8311//6922 8380//6865 8248//6868 +f 8311//6922 8248//6868 8177//6923 +f 7957//6924 7964//6863 7886//6862 +f 7957//6924 7886//6862 7884//6925 +f 8117//6926 8084//6927 8151//6928 +f 8117//6926 8151//6928 8200//6929 +f 8341//6930 8217//6931 8308//6932 +f 8341//6930 8308//6932 8366//6933 +f 8948//6934 8935//6935 8835//6936 +f 8948//6934 8835//6936 8839//6937 +f 8237//6910 8156//6909 8168//6839 +f 8237//6910 8168//6839 8250//6842 +f 8373//6837 8320//6836 8324//6819 +f 8373//6837 8324//6819 8375//6822 +f 8737//6900 8773//6896 8730//6895 +f 8737//6900 8730//6895 8713//6901 +f 8894//6938 8825//6905 8736//6908 +f 8894//6938 8736//6908 8776//6939 +f 8306//6826 8169//6825 8188//6940 +f 8306//6826 8188//6940 8318//6941 +f 7858//6811 7857//6942 7954//6943 +f 7858//6811 7954//6943 7955//6812 +f 8418//6944 8345//6945 8220//6946 +f 8418//6944 8220//6946 8392//6947 +f 8248//6868 8297//6867 8099//6948 +f 8248//6868 8099//6948 8080//6949 +f 8177//6923 8248//6868 8080//6949 +f 8177//6923 8080//6949 8066//6950 +f 8139//6858 8148//6857 8050//6951 +f 8139//6858 8050//6951 8045//6952 +f 8140//6830 8108//6829 8012//6953 +f 8140//6830 8012//6953 8032//6954 +f 8162//6824 8048//6955 8057//6956 +f 8162//6824 8057//6956 8169//6825 +f 8188//6940 8169//6825 8057//6956 +f 8188//6940 8057//6956 8061//6957 +f 8084//6927 8117//6926 8034//6958 +f 8084//6927 8034//6958 7996//6959 +f 8751//6906 8725//6960 8703//6961 +f 8751//6906 8703//6961 8717//6907 +f 8207//6828 8261//6827 8319//6962 +f 8207//6828 8319//6962 8286//6963 +f 8282//6964 8236//6860 8210//6859 +f 8282//6964 8210//6859 8258//6965 +f 8403//6966 8456//6967 8380//6865 +f 8403//6966 8380//6865 8311//6922 +f 8423//6866 8380//6865 8456//6967 +f 8423//6866 8456//6967 8482//6968 +f 8558//6969 8527//6888 8487//6890 +f 8558//6969 8487//6890 8543//6970 +f 8431//6971 8410//6972 8382//6973 +f 8431//6971 8382//6973 8399//6974 +f 8400//6975 8362//6976 8382//6973 +f 8400//6975 8382//6973 8410//6972 +f 8406//6977 8365//6978 8362//6976 +f 8406//6977 8362//6976 8400//6975 +f 8429//6979 8398//6980 8394//6981 +f 8429//6979 8394//6981 8413//6982 +f 8447//6983 8412//6984 8398//6980 +f 8447//6983 8398//6980 8429//6979 +f 8454//6985 8428//6986 8412//6984 +f 8454//6985 8412//6984 8447//6983 +f 8428//6986 8454//6985 8463//6987 +f 8428//6986 8463//6987 8441//6988 +f 8459//6989 8444//6990 8417//6991 +f 8459//6989 8417//6991 8439//6992 +f 8534//6993 8512//6994 8471//6995 +f 8534//6993 8471//6995 8489//6996 +f 8512//6994 8497//6997 8453//6998 +f 8512//6994 8453//6998 8471//6995 +f 8497//6997 8480//6999 8438//7000 +f 8497//6997 8438//7000 8453//6998 +f 8483//7001 8503//7002 8465//7003 +f 8483//7001 8465//7003 8451//7004 +f 8503//7002 8520//7005 8481//7006 +f 8503//7002 8481//7006 8465//7003 +f 8520//7005 8542//7007 8499//7008 +f 8520//7005 8499//7008 8481//7006 +f 8499//7008 8542//7007 8559//7009 +f 8499//7008 8559//7009 8513//7010 +f 8561//7011 8553//7012 8507//7013 +f 8561//7011 8507//7013 8514//7014 +f 8399//6974 8382//6973 8370//7015 +f 8399//6974 8370//7015 8385//7016 +f 8362//6976 8370//7015 8382//6973 +f 8412//6984 8408//7017 8398//6980 +f 8428//6986 8419//7018 8408//7017 +f 8428//6986 8408//7017 8412//6984 +f 8424//7019 8419//7018 8428//6986 +f 8424//7019 8428//6986 8441//6988 +f 8396//7020 8414//7021 8439//6992 +f 8396//7020 8439//6992 8417//6991 +f 8489//6996 8471//6995 8442//7022 +f 8489//6996 8442//7022 8458//7023 +f 8514//7014 8507//7013 8475//7024 +f 8514//7014 8475//7024 8486//7025 +f 8476//7026 8499//7008 8513//7010 +f 8476//7026 8513//7010 8484//7027 +f 8499//7008 8476//7026 8462//7028 +f 8499//7008 8462//7028 8481//7006 +f 8481//7006 8462//7028 8448//7029 +f 8481//7006 8448//7029 8465//7003 +f 8465//7003 8448//7029 8434//7030 +f 8465//7003 8434//7030 8451//7004 +f 8438//7000 8421//7031 8422//7032 +f 8438//7000 8422//7032 8453//6998 +f 8453//6998 8422//7032 8442//7022 +f 8453//6998 8442//7022 8471//6995 +f 7932//6832 7921//7033 7870//7034 +f 7932//6832 7870//7034 7867//6833 +f 8108//6829 8094//7035 7999//7036 +f 8108//6829 7999//7036 8012//6953 +f 8207//6828 8180//7037 8094//7035 +f 8207//6828 8094//7035 8108//6829 +f 8180//7037 8207//6828 8286//6963 +f 8180//7037 8286//6963 8249//7038 +f 8333//6814 8295//7039 8312//7040 +f 8333//6814 8312//7040 8339//6815 +f 8291//7041 8242//7042 8295//7039 +f 8291//7041 8295//7039 8333//6814 +f 8179//6844 8164//7043 8199//7044 +f 8179//6844 8199//7044 8216//6845 +f 8227//7045 8158//7046 8153//6912 +f 8227//7045 8153//6912 8234//6911 +f 8331//7047 8327//6835 8377//6838 +f 8331//7047 8377//6838 8383//7048 +f 8432//7049 8424//7019 8441//6988 +f 8432//7049 8441//6988 8445//7050 +f 8463//6987 8469//7051 8445//7050 +f 8463//6987 8445//7050 8441//6988 +f 8513//7010 8518//7052 8493//7053 +f 8513//7010 8493//7053 8484//7027 +f 8559//7009 8562//7054 8518//7052 +f 8559//7009 8518//7052 8513//7010 +f 8868//6897 8933//7055 8803//7056 +f 8868//6897 8803//7056 8756//6898 +f 8803//7056 8933//7055 8948//6934 +f 8803//7056 8948//6934 8839//6937 +f 8518//7052 8562//7054 8561//7011 +f 8518//7052 8561//7011 8514//7014 +f 8493//7053 8518//7052 8514//7014 +f 8493//7053 8514//7014 8486//7025 +f 8445//7050 8469//7051 8459//6989 +f 8445//7050 8459//6989 8439//6992 +f 8414//7021 8432//7049 8445//7050 +f 8414//7021 8445//7050 8439//6992 +f 8308//6932 8331//7047 8383//7048 +f 8308//6932 8383//7048 8366//6933 +f 8200//6929 8151//6928 8158//7046 +f 8200//6929 8158//7046 8227//7045 +f 8835//6936 8935//6935 8894//6938 +f 8835//6936 8894//6938 8776//6939 +f 8507//7013 8553//7012 8534//6993 +f 8507//7013 8534//6993 8489//6996 +f 8475//7024 8507//7013 8489//6996 +f 8475//7024 8489//6996 8458//7023 +f 8417//6991 8444//6990 8431//6971 +f 8417//6991 8431//6971 8399//6974 +f 8396//7020 8417//6991 8399//6974 +f 8396//7020 8399//6974 8385//7016 +f 8318//6941 8188//6940 8217//6931 +f 8318//6941 8217//6931 8341//6930 +f 8062//7057 8217//6931 8188//6940 +f 8062//7057 8188//6940 8061//6957 +f 7857//6942 7855//7058 7951//7059 +f 7857//6942 7951//7059 7954//6943 +f 8714//7060 8711//6903 8696//6904 +f 8714//7060 8696//6904 8698//7061 +f 8451//7004 8440//7062 8478//7063 +f 8451//7004 8478//7063 8483//7001 +f 8420//7064 8440//7062 8451//7004 +f 8420//7064 8451//7004 8434//7030 +f 8394//6981 8384//7065 8407//7066 +f 8394//6981 8407//7066 8413//6982 +f 8357//6816 8339//6815 8328//7067 +f 8357//6816 8328//7067 8348//7068 +f 8298//7069 8328//7067 8339//6815 +f 8298//7069 8339//6815 8312//7040 +f 8271//7070 8292//6855 8302//6856 +f 8271//7070 8302//6856 8277//7071 +f 8472//6921 8510//6872 8496//7072 +f 8472//6921 8496//7072 8452//7073 +f 8516//7074 8496//7072 8510//6872 +f 8516//7074 8510//6872 8533//6871 +f 8599//7075 8592//7076 8573//7077 +f 8599//7075 8573//7077 8584//7078 +f 8725//6960 8714//7060 8698//7061 +f 8725//6960 8698//7061 8703//6961 +f 8440//7062 8438//7000 8480//6999 +f 8440//7062 8480//6999 8478//7063 +f 8421//7031 8438//7000 8440//7062 +f 8421//7031 8440//7062 8420//7064 +f 8384//7065 8365//6978 8406//6977 +f 8384//7065 8406//6977 8407//7066 +f 8348//7068 8328//7067 8286//6963 +f 8348//7068 8286//6963 8319//6962 +f 8249//7038 8286//6963 8328//7067 +f 8249//7038 8328//7067 8298//7069 +f 8226//7079 8271//7070 8277//7071 +f 8226//7079 8277//7071 8215//7080 +f 8452//7073 8496//7072 8456//6967 +f 8452//7073 8456//6967 8403//6966 +f 8482//6968 8456//6967 8496//7072 +f 8482//6968 8496//7072 8516//7074 +f 8592//7076 8582//7081 8573//7077 +f 8389//7082 8398//6980 8408//7017 +f 8389//7082 8408//7017 8375//6822 +f 8394//6981 8398//6980 8389//7082 +f 8394//6981 8389//7082 8386//7083 +f 8384//7065 8394//6981 8386//7083 +f 8384//7065 8386//7083 8368//7084 +f 8365//6978 8384//7065 8368//7084 +f 8365//6978 8368//7084 8350//7085 +f 8314//6823 8362//6976 8365//6978 +f 8314//6823 8365//6978 8350//7085 +f 8370//7015 8362//6976 8314//6823 +f 8370//7015 8314//6823 8306//6826 +f 8385//7016 8370//7015 8306//6826 +f 8385//7016 8306//6826 8318//6941 +f 8396//7020 8385//7016 8318//6941 +f 8396//7020 8318//6941 8341//6930 +f 8396//7020 8341//6930 8366//6933 +f 8396//7020 8366//6933 8414//7021 +f 8383//7048 8432//7049 8414//7021 +f 8383//7048 8414//7021 8366//6933 +f 8377//6838 8424//7019 8432//7049 +f 8377//6838 8432//7049 8383//7048 +f 8373//6837 8419//7018 8424//7019 +f 8373//6837 8424//7019 8377//6838 +f 8419//7018 8373//6837 8375//6822 +f 8419//7018 8375//6822 8408//7017 +f 7949//7086 7882//7087 7881//7088 +f 7949//7086 7881//7088 7944//7089 +f 8131//7090 8040//7091 8036//7092 +f 8131//7090 8036//7092 8120//7093 +f 8228//7094 8131//7090 8120//7093 +f 8228//7094 8120//7093 8201//7095 +f 8241//7096 8309//7097 8228//7094 +f 8241//7096 8228//7094 8201//7095 +f 8277//7071 8363//7098 8309//7097 +f 8277//7071 8309//7097 8241//7096 +f 8302//6856 8404//7099 8363//7098 +f 8302//6856 8363//7098 8277//7071 +f 8338//6854 8409//7100 8404//7099 +f 8338//6854 8404//7099 8302//6856 +f 8325//6853 8374//7101 8409//7100 +f 8325//6853 8409//7100 8338//6854 +f 8185//6849 8204//7102 8293//7103 +f 8185//6849 8293//7103 8264//6850 +f 7951//7059 7855//7058 7850//6913 +f 7951//7059 7850//6913 7938//6916 +f 8117//6926 8217//6931 8062//7057 +f 8117//6926 8062//7057 8034//6958 +f 8308//6932 8217//6931 8117//6926 +f 8308//6932 8117//6926 8200//6929 +f 8331//7047 8308//6932 8200//6929 +f 8331//7047 8200//6929 8227//7045 +f 8327//6835 8331//7047 8227//7045 +f 8327//6835 8227//7045 8234//6911 +f 8237//6910 8320//6836 8327//6835 +f 8237//6910 8327//6835 8234//6911 +f 8320//6836 8237//6910 8250//6842 +f 8320//6836 8250//6842 8324//6819 +f 8281//6841 8346//6820 8324//6819 +f 8281//6841 8324//6819 8250//6842 +f 8287//6846 8216//6845 8291//7041 +f 8287//6846 8291//7041 8337//7104 +f 8242//7042 8291//7041 8216//6845 +f 8242//7042 8216//6845 8199//7044 +f 8270//6852 8223//6847 8264//6850 +f 8270//6852 8264//6850 8325//6853 +f 8293//7103 8374//7101 8325//6853 +f 8293//7103 8325//6853 8264//6850 +f 8468//6877 8426//6920 8340//6918 +f 8468//6877 8340//6918 8387//6874 +f 8492//6878 8468//6877 8387//6874 +f 8492//6878 8387//6874 8425//6873 +f 8501//6882 8504//6881 8537//7105 +f 8034//6958 7938//6916 7918//6915 +f 8034//6958 7918//6915 7996//6959 +f 8062//7057 7951//7059 7938//6916 +f 8062//7057 7938//6916 8034//6958 +f 7951//7059 8062//7057 8061//6957 +f 7951//7059 8061//6957 7954//6943 +f 8057//6956 7955//6812 7954//6943 +f 8057//6956 7954//6943 8061//6957 +f 8048//6955 7950//6809 7955//6812 +f 8048//6955 7955//6812 8057//6956 +f 8032//6954 8012//6953 7932//6832 +f 8032//6954 7932//6832 7939//6831 +f 7921//7033 7932//6832 8012//6953 +f 7921//7033 8012//6953 7999//7036 +f 7945//7106 7952//7107 8045//6952 +f 7945//7106 8045//6952 8050//6951 +f 8040//7091 7949//7086 7944//7089 +f 8040//7091 7944//7089 8036//7092 +f 8066//6950 8080//6949 7964//6863 +f 8066//6950 7964//6863 7957//6924 +f 7969//6864 7964//6863 8080//6949 +f 7969//6864 8080//6949 8099//6948 +f 8345//6945 8224//7108 8220//6946 +f 8281//6841 8183//6840 8192//7109 +f 8281//6841 8192//7109 8303//7110 +f 8346//6820 8281//6841 8303//7110 +f 8346//6820 8303//7110 8355//7111 +f 8389//7082 8379//6821 8371//6817 +f 8389//7082 8371//6817 8361//6813 +f 8389//7082 8361//6813 8357//6816 +f 8389//7082 8357//6816 8386//7083 +f 8386//7083 8357//6816 8348//7068 +f 8386//7083 8348//7068 8368//7084 +f 8368//7084 8348//7068 8319//6962 +f 8368//7084 8319//6962 8350//7085 +f 8261//6827 8314//6823 8350//7085 +f 8261//6827 8350//7085 8319//6962 +f 8162//6824 8314//6823 8261//6827 +f 8162//6824 8261//6827 8140//6830 +f 8048//6955 8162//6824 8140//6830 +f 8048//6955 8140//6830 8032//6954 +f 7950//6809 8048//6955 8032//6954 +f 7950//6809 8032//6954 7939//6831 +f 7860//6810 7950//6809 7939//6831 +f 7860//6810 7939//6831 7865//6834 +f 8192//7109 8194//6843 8287//6846 +f 8192//7109 8287//6846 8303//7110 +f 8303//7110 8287//6846 8337//7104 +f 8303//7110 8337//7104 8355//7111 +f 8355//7111 8337//7104 8343//6818 +f 8355//7111 8343//6818 8371//6817 +f 8379//6821 8346//6820 8355//7111 +f 8379//6821 8355//7111 8371//6817 +f 8379//6821 8389//7082 8375//6822 +f 8343//6818 8337//7104 8291//7041 +f 8343//6818 8291//7041 8333//6814 +f 7921//7033 7925//7112 7872//7113 +f 7921//7033 7872//7113 7870//7034 +f 8002//7114 7925//7112 7921//7033 +f 8002//7114 7921//7033 7999//7036 +f 8094//7035 8089//7115 8002//7114 +f 8094//7035 8002//7114 7999//7036 +f 8180//7037 8166//7116 8089//7115 +f 8180//7037 8089//7115 8094//7035 +f 8166//7116 8180//7037 8249//7038 +f 8166//7116 8249//7038 8226//7079 +f 8271//7070 8226//7079 8249//7038 +f 8271//7070 8249//7038 8298//7069 +f 8292//6855 8271//7070 8298//7069 +f 8292//6855 8298//7069 8312//7040 +f 8295//7039 8290//6851 8292//6855 +f 8295//7039 8292//6855 8312//7040 +f 8242//7042 8270//6852 8290//6851 +f 8242//7042 8290//6851 8295//7039 +f 8223//6847 8270//6852 8242//7042 +f 8223//6847 8242//7042 8199//7044 +f 8164//7043 8173//6848 8223//6847 +f 8164//7043 8223//6847 8199//7044 +f 8340//6918 8293//7103 8204//7102 +f 8340//6918 8204//7102 8231//6917 +f 8426//6920 8374//7101 8293//7103 +f 8426//6920 8293//7103 8340//6918 +f 8467//6919 8409//7100 8374//7101 +f 8467//6919 8374//7101 8426//6920 +f 8472//6921 8404//7099 8409//7100 +f 8472//6921 8409//7100 8467//6919 +f 8452//7073 8363//7098 8404//7099 +f 8452//7073 8404//7099 8472//6921 +f 8403//6966 8309//7097 8363//7098 +f 8403//6966 8363//7098 8452//7073 +f 8311//6922 8228//7094 8309//7097 +f 8311//6922 8309//7097 8403//6966 +f 8177//6923 8131//7090 8228//7094 +f 8177//6923 8228//7094 8311//6922 +f 8066//6950 8040//7091 8131//7090 +f 8066//6950 8131//7090 8177//6923 +f 7957//6924 7949//7086 8040//7091 +f 7957//6924 8040//7091 8066//6950 +f 7884//6925 7882//7087 7949//7086 +f 7884//6925 7949//7086 7957//6924 +f 7890//7117 7888//6861 7969//6864 +f 7890//7117 7969//6864 7956//7118 +f 7956//7118 7969//6864 8099//6948 +f 7956//7118 8099//6948 8116//7119 +f 8297//6867 8321//7120 8116//7119 +f 8297//6867 8116//7119 8099//6948 +f 8423//6866 8449//7121 8321//7120 +f 8423//6866 8321//7120 8297//6867 +f 8449//7121 8423//6866 8482//6968 +f 8449//7121 8482//6968 8500//7122 +f 8500//7122 8482//6968 8516//7074 +f 8500//7122 8516//7074 8532//7123 +f 8532//7123 8516//7074 8533//6871 +f 8532//7123 8533//6871 8548//7124 +f 8529//6870 8544//7125 8548//7124 +f 8529//6870 8548//7124 8533//6871 +f 8544//7125 8529//6870 8492//6878 +f 8544//7125 8492//6878 8509//7126 +f 8509//7126 8492//6878 8425//6873 +f 8509//7126 8425//6873 8450//7127 +f 8450//7127 8425//6873 8299//6876 +f 8450//7127 8299//6876 8313//7128 +f 7959//7129 7896//7130 7890//7117 +f 7959//7129 7890//7117 7956//7118 +f 8141//7131 7959//7129 7956//7118 +f 8141//7131 7956//7118 8116//7119 +f 8321//7120 8335//7132 8141//7131 +f 8321//7120 8141//7131 8116//7119 +f 8449//7121 8457//7133 8335//7132 +f 8449//7121 8335//7132 8321//7120 +f 8511//7134 8457//7133 8449//7121 +f 8511//7134 8449//7121 8500//7122 +f 8541//7135 8511//7134 8500//7122 +f 8541//7135 8500//7122 8532//7123 +f 8560//7136 8541//7135 8532//7123 +f 8560//7136 8532//7123 8548//7124 +f 8544//7125 8556//7137 8560//7136 +f 8544//7125 8560//7136 8548//7124 +f 8521//7138 8556//7137 8544//7125 +f 8521//7138 8544//7125 8509//7126 +f 8461//7139 8521//7138 8509//7126 +f 8461//7139 8509//7126 8450//7127 +f 8323//7140 8461//7139 8450//7127 +f 8323//7140 8450//7127 8313//7128 +f 8461//7139 8323//7140 8342//7141 +f 8461//7139 8342//7141 8474//7142 +f 8521//7138 8461//7139 8474//7142 +f 8521//7138 8474//7142 8535//7143 +f 8564//7144 8556//7137 8521//7138 +f 8564//7144 8521//7138 8535//7143 +f 8566//7145 8560//7136 8556//7137 +f 8566//7145 8556//7137 8564//7144 +f 8547//7146 8541//7135 8560//7136 +f 8547//7146 8560//7136 8566//7145 +f 8517//7147 8511//7134 8541//7135 +f 8517//7147 8541//7135 8547//7146 +f 8466//7148 8457//7133 8511//7134 +f 8466//7148 8511//7134 8517//7147 +f 8349//7149 8335//7132 8457//7133 +f 8349//7149 8457//7133 8466//7148 +f 8161//7150 8141//7131 8335//7132 +f 8161//7150 8335//7132 8349//7149 +f 7976//7151 7959//7129 8141//7131 +f 7976//7151 8141//7131 8161//7150 +f 7902//7152 7899//7153 7959//7129 +f 7902//7152 7959//7129 7976//7151 +f 8342//7141 8364//7154 8485//7155 +f 8342//7141 8485//7155 8474//7142 +f 8474//7142 8485//7155 8550//7156 +f 8474//7142 8550//7156 8535//7143 +f 8576//7157 8564//7144 8535//7143 +f 8576//7157 8535//7143 8550//7156 +f 8574//7158 8566//7145 8564//7144 +f 8574//7158 8564//7144 8576//7157 +f 8557//7159 8547//7146 8566//7145 +f 8557//7159 8566//7145 8574//7158 +f 8528//7160 8517//7147 8547//7146 +f 8528//7160 8547//7146 8557//7159 +f 8477//7161 8466//7148 8517//7147 +f 8477//7161 8517//7147 8528//7160 +f 8369//7162 8349//7149 8466//7148 +f 8369//7162 8466//7148 8477//7161 +f 8191//7163 8161//7150 8349//7149 +f 8191//7163 8349//7149 8369//7162 +f 7991//7164 7976//7151 8161//7150 +f 7991//7164 8161//7150 8191//7163 +f 7909//7165 7902//7152 7976//7151 +f 7909//7165 7976//7151 7991//7164 +f 8364//7154 8395//6879 8501//6882 +f 8364//7154 8501//6882 8485//7155 +f 8485//7155 8501//6882 8569//6886 +f 8485//7155 8569//6886 8550//7156 +f 8589//6885 8576//7157 8550//7156 +f 8589//6885 8550//7156 8569//6886 +f 8584//7078 8574//7158 8576//7157 +f 8584//7078 8576//7157 8589//6885 +f 8573//7077 8557//7159 8574//7158 +f 8573//7077 8574//7158 8584//7078 +f 8543//6970 8528//7160 8557//7159 +f 8543//6970 8557//7159 8573//7077 +f 8487//6890 8477//7161 8528//7160 +f 8487//6890 8528//7160 8543//6970 +f 8392//6947 8369//7162 8477//7161 +f 8392//6947 8477//7161 8487//6890 +f 8220//6946 8191//7163 8369//7162 +f 8220//6946 8369//7162 8392//6947 +f 8023//6894 7991//7164 8191//7163 +f 8023//6894 8191//7163 8220//6946 +f 7928//6893 7909//7165 7991//7164 +f 7928//6893 7991//7164 8023//6894 +f 8660//7166 8671//7167 8657//7168 +f 8660//7166 8657//7168 8646//7169 +f 8641//7170 8631//7171 8646//7169 +f 8641//7170 8646//7169 8657//7168 +f 8630//7172 8618//7173 8631//7171 +f 8630//7172 8631//7171 8641//7170 +f 8620//7174 8611//7175 8618//7173 +f 8620//7174 8618//7173 8630//7172 +f 8613//7176 8605//7177 8611//7175 +f 8613//7176 8611//7175 8620//7174 +f 8624//7178 8612//7179 8605//7177 +f 8624//7178 8605//7177 8613//7176 +f 8633//7180 8626//7181 8612//7179 +f 8633//7180 8612//7179 8624//7178 +f 8643//7182 8637//7183 8626//7181 +f 8643//7182 8626//7181 8633//7180 +f 8655//7184 8651//7185 8643//7182 +f 8651//7185 8655//7184 8664//7186 +f 8651//7185 8664//7186 8663//7187 +f 8672//7188 8673//7189 8663//7187 +f 8672//7188 8663//7187 8664//7186 +f 8673//7189 8672//7188 8674//7190 +f 8673//7189 8674//7190 8669//7191 +f 8660//7166 8669//7191 8674//7190 +f 8841//7192 8835//6936 8776//6939 +f 8841//7192 8776//6939 8765//7193 +f 8736//6908 8733//7194 8765//7193 +f 8736//6908 8765//7193 8776//6939 +f 8717//6907 8710//7195 8733//7194 +f 8717//6907 8733//7194 8736//6908 +f 8703//6961 8701//7196 8710//7195 +f 8703//6961 8710//7195 8717//6907 +f 8698//7061 8690//7197 8701//7196 +f 8698//7061 8701//7196 8703//6961 +f 8696//6904 8689//7198 8690//7197 +f 8696//6904 8690//7197 8698//7061 +f 8702//6902 8695//7199 8689//7198 +f 8702//6902 8689//7198 8696//6904 +f 8713//6901 8706//7200 8695//7199 +f 8713//6901 8695//7199 8702//6902 +f 8730//6895 8726//7201 8706//7200 +f 8730//6895 8706//7200 8713//6901 +f 8726//7201 8730//6895 8756//6898 +f 8726//7201 8756//6898 8754//7202 +f 8803//7056 8816//7203 8754//7202 +f 8803//7056 8754//7202 8756//6898 +f 8816//7203 8803//7056 8839//6937 +f 8816//7203 8839//6937 8844//7204 +f 8835//6936 8841//7192 8844//7204 +f 8835//6936 8844//7204 8839//6937 +f 8501//6882 8537//7105 8570//7205 +f 8501//6882 8570//7205 8569//6886 +f 8600//6887 8599//7075 8584//7078 +f 8600//6887 8584//7078 8589//6885 +f 8582//7081 8558//6969 8543//6970 +f 8582//7081 8543//6970 8573//7077 +f 8491//6889 8418//6944 8392//6947 +f 8491//6889 8392//6947 8487//6890 +f 8224//7108 8068//6891 8023//6894 +f 8224//7108 8023//6894 8220//6946 +f 7899//7153 7896//7130 7959//7129 +f 8570//7205 8588//6883 8569//6886 +f 8198//7206 8118//7207 8112//7208 +f 8198//7206 8112//7208 8203//7209 +f 8134//7210 8255//7211 8203//7209 +f 8134//7210 8203//7209 8112//7208 +f 8322//7212 8356//7213 8307//7214 +f 8305//7215 8353//7216 8356//7213 +f 8305//7215 8356//7213 8322//7212 +f 8129//7217 8301//7218 8255//7211 +f 8129//7217 8255//7211 8134//7210 +f 7844//7219 7849//7220 8008//7221 +f 7844//7219 8008//7221 7980//7222 +f 7980//7222 8008//7221 8212//7223 +f 7980//7222 8212//7223 8106//7224 +f 8212//7223 8367//7225 8256//7226 +f 8212//7223 8256//7226 8106//7224 +f 8256//7226 8367//7225 8427//7227 +f 8256//7226 8427//7227 8344//7228 +f 8427//7227 8381//7229 8294//7230 +f 8427//7227 8294//7230 8344//7228 +f 8294//7230 8332//7231 8269//7232 +f 8294//7230 8269//7232 8214//7233 +f 8008//7221 8017//7234 8219//7235 +f 8008//7221 8219//7235 8212//7223 +f 8212//7223 8219//7235 8376//7236 +f 8212//7223 8376//7236 8367//7225 +f 8367//7225 8376//7236 8435//7237 +f 8367//7225 8435//7237 8427//7227 +f 8388//7238 8381//7229 8427//7227 +f 8388//7238 8427//7227 8435//7237 +f 7849//7220 7846//7239 8017//7234 +f 7849//7220 8017//7234 8008//7221 +f 8332//7231 8329//7240 8252//7241 +f 8332//7231 8252//7241 8269//7232 +f 8352//7242 8397//7243 8336//7244 +f 8352//7242 8336//7244 8317//7245 +f 8317//7245 8336//7244 8289//7246 +f 8336//7244 8280//7247 8289//7246 +f 8334//7248 8336//7244 8397//7243 +f 8334//7248 8397//7243 8393//7249 +f 8336//7244 8334//7248 8279//7250 +f 8336//7244 8279//7250 8280//7247 +f 8121//7251 8232//7252 8265//7253 +f 8121//7251 8265//7253 8132//7254 +f 8132//7254 8265//7253 8301//7218 +f 8132//7254 8301//7218 8129//7217 +f 8233//7255 8289//7246 8280//7247 +f 8206//7256 8164//7043 8179//6844 +f 8206//7256 8179//6844 8233//7255 +f 8190//7257 8173//6848 8164//7043 +f 8190//7257 8164//7043 8206//7256 +f 8182//7258 8185//6849 8173//6848 +f 8182//7258 8173//6848 8190//7257 +f 8204//7102 8185//6849 8182//7258 +f 8204//7102 8182//7258 8195//7259 +f 8231//6917 8204//7102 8195//7259 +f 8195//7259 8138//7260 8146//7261 +f 8195//7259 8146//7261 8231//6917 +f 8138//7260 8195//7259 8182//7258 +f 8138//7260 8182//7258 8145//7262 +f 8145//7262 8182//7258 8190//7257 +f 8145//7262 8190//7257 8157//7263 +f 8157//7263 8190//7257 8206//7256 +f 8157//7263 8206//7256 8196//7264 +f 8196//7264 8206//7256 8233//7255 +f 8196//7264 8233//7255 8221//7265 +f 8280//7247 8279//7250 8221//7265 +f 8280//7247 8221//7265 8233//7255 +f 8231//6917 8146//7261 8159//7266 +f 8231//6917 8159//7266 8273//6875 +f 8175//7267 8299//6876 8273//6875 +f 8175//7267 8273//6875 8159//7266 +f 8197//7268 8313//7128 8299//6876 +f 8197//7268 8299//6876 8175//7267 +f 8205//7269 8323//7140 8313//7128 +f 8205//7269 8313//7128 8197//7268 +f 8351//7270 8433//6880 8395//6879 +f 8351//7270 8395//6879 8310//7271 +f 8364//7154 8274//7272 8310//7271 +f 8364//7154 8310//7271 8395//6879 +f 8274//7272 8364//7154 8342//7141 +f 8274//7272 8342//7141 8238//7273 +f 8323//7140 8205//7269 8238//7273 +f 8323//7140 8238//7273 8342//7141 +f 8241//7096 8201//7095 8225//7274 +f 8241//7096 8225//7274 8276//7275 +f 8120//7093 8147//7276 8225//7274 +f 8120//7093 8225//7274 8201//7095 +f 8036//7092 8041//7277 8147//7276 +f 8036//7092 8147//7276 8120//7093 +f 7944//7089 7948//7278 8041//7277 +f 7944//7089 8041//7277 8036//7092 +f 7881//7088 7880//7279 7948//7278 +f 7881//7088 7948//7278 7944//7089 +f 8166//7116 8226//7079 8215//7080 +f 8166//7116 8215//7080 8176//7280 +f 8104//7281 8089//7115 8166//7116 +f 8104//7281 8166//7116 8176//7280 +f 8002//7114 8089//7115 8104//7281 +f 8002//7114 8104//7281 8020//7282 +f 7934//7283 7925//7112 8002//7114 +f 7934//7283 8002//7114 8020//7282 +f 7875//7284 7872//7113 7925//7112 +f 7875//7284 7925//7112 7934//7283 +f 8277//7071 8240//7285 8215//7080 +f 8277//7071 8241//7096 8240//7285 +f 8215//7080 8240//7285 8282//6964 +f 8215//7080 8282//6964 8258//6965 +f 8139//6858 8104//7281 8176//7280 +f 8139//6858 8176//7280 8210//6859 +f 8104//7281 8139//6858 8045//6952 +f 8104//7281 8045//6952 8020//7282 +f 7952//7107 7934//7283 8020//7282 +f 7952//7107 8020//7282 8045//6952 +f 7874//7286 7875//7284 7934//7283 +f 7874//7286 7934//7283 7952//7107 +f 8225//7274 8236//6860 8282//6964 +f 8225//7274 8282//6964 8276//7275 +f 8147//7276 8148//6857 8236//6860 +f 8147//7276 8236//6860 8225//7274 +f 8041//7277 8050//6951 8148//6857 +f 8041//7277 8148//6857 8147//7276 +f 7948//7278 7945//7106 8050//6951 +f 7948//7278 8050//6951 8041//7277 +f 8240//7285 8241//7096 8276//7275 +f 8240//7285 8276//7275 8282//6964 +f 8215//7080 8258//6965 8210//6859 +f 8215//7080 8210//6859 8176//7280 +f 8118//7207 8198//7206 8272//7287 +f 8118//7207 8272//7287 8137//7288 +f 8304//7289 8135//7290 8137//7288 +f 8304//7289 8137//7288 8272//7287 +f 8135//7290 8304//7289 8278//7291 +f 8135//7290 8278//7291 8085//7292 +f 8085//7292 8278//7291 8154//7293 +f 8085//7292 8154//7293 8046//7294 +f 7979//7295 7963//7296 8046//7294 +f 7979//7295 8046//7294 8154//7293 +f 7963//7296 7979//7295 7842//7297 +f 7963//7296 7842//7297 7840//7298 +f 8274//7272 8238//7273 8172//7299 +f 8274//7272 8172//7299 8209//7300 +f 8310//7271 8274//7272 8209//7300 +f 8310//7271 8209//7300 8245//7301 +f 8351//7270 8310//7271 8245//7301 +f 8351//7270 8245//7301 8300//7302 +f 8197//7268 8175//7267 8088//7303 +f 8197//7268 8088//7303 8109//7304 +f 8205//7269 8197//7268 8109//7304 +f 8205//7269 8109//7304 8142//7305 +f 8172//7299 8238//7273 8205//7269 +f 8172//7299 8205//7269 8142//7305 +f 8159//7266 8072//7306 8088//7303 +f 8159//7266 8088//7303 8175//7267 +f 8159//7266 8146//7261 8049//7307 +f 8159//7266 8049//7307 8072//7306 +f 8146//7261 8138//7260 8039//7308 +f 8146//7261 8039//7308 8049//7307 +f 8145//7262 8044//7309 8039//7308 +f 8145//7262 8039//7308 8138//7260 +f 8053//7310 8044//7309 8145//7262 +f 8053//7310 8145//7262 8157//7263 +f 8157//7263 8196//7264 8058//7311 +f 8157//7263 8058//7311 8053//7310 +f 8196//7264 8127//7312 8058//7311 +f 8232//7252 8121//7251 8127//7312 +f 8232//7252 8127//7312 8221//7265 +f 7918//6915 7845//6914 7841//7313 +f 7918//6915 7841//7313 7953//7314 +f 7996//6959 7918//6915 7953//7314 +f 7996//6959 7953//7314 8038//7315 +f 8115//7316 8084//6927 7996//6959 +f 8115//7316 7996//6959 8038//7315 +f 8151//6928 8084//6927 8115//7316 +f 8151//6928 8115//7316 8187//7317 +f 8184//7318 8158//7046 8151//6928 +f 8184//7318 8151//6928 8187//7317 +f 8165//7319 8153//6912 8158//7046 +f 8165//7319 8158//7046 8184//7318 +f 8153//6912 8165//7319 8189//7320 +f 8153//6912 8189//7320 8156//6909 +f 8218//7321 8168//6839 8156//6909 +f 8218//7321 8156//6909 8189//7320 +f 8222//7322 8183//6840 8168//6839 +f 8222//7322 8168//6839 8218//7321 +f 8183//6840 8222//7322 8192//7109 +f 7841//7313 7844//7219 7980//7222 +f 7841//7313 7980//7222 7953//7314 +f 7953//7314 7980//7222 8106//7224 +f 7953//7314 8106//7224 8038//7315 +f 8256//7226 8115//7316 8038//7315 +f 8256//7226 8038//7315 8106//7224 +f 8115//7316 8256//7226 8344//7228 +f 8115//7316 8344//7228 8187//7317 +f 8294//7230 8184//7318 8187//7317 +f 8294//7230 8187//7317 8344//7228 +f 8214//7233 8165//7319 8184//7318 +f 8214//7233 8184//7318 8294//7230 +f 8165//7319 8214//7233 8247//7323 +f 8165//7319 8247//7323 8189//7320 +f 8307//7214 8218//7321 8189//7320 +f 8307//7214 8189//7320 8247//7323 +f 8352//7242 8222//7322 8218//7321 +f 8352//7242 8218//7321 8307//7214 +f 8222//7322 8352//7242 8317//7245 +f 8222//7322 8317//7245 8192//7109 +f 8192//7109 8317//7245 8289//7246 +f 8192//7109 8289//7246 8194//6843 +f 8233//7255 8179//6844 8194//6843 +f 8233//7255 8194//6843 8289//7246 +f 8729//7324 8720//6899 8711//6903 +f 8729//7324 8711//6903 8722//7325 +f 8747//7326 8737//6900 8720//6899 +f 8747//7326 8720//6899 8729//7324 +f 8790//7327 8773//6896 8737//6900 +f 8790//7327 8737//6900 8747//7326 +f 8857//7328 8868//6897 8773//6896 +f 8857//7328 8773//6896 8790//7327 +f 8910//7329 8933//7055 8868//6897 +f 8910//7329 8868//6897 8857//7328 +f 8932//7330 8948//6934 8933//7055 +f 8932//7330 8933//7055 8910//7329 +f 8753//7331 8751//6906 8825//6905 +f 8753//7331 8825//6905 8838//7332 +f 8894//6938 8884//7333 8838//7332 +f 8894//6938 8838//7332 8825//6905 +f 8884//7333 8894//6938 8935//6935 +f 8884//7333 8935//6935 8921//7334 +f 8921//7334 8935//6935 8948//6934 +f 8921//7334 8948//6934 8932//7330 +f 8722//7325 8711//6903 8714//7060 +f 8722//7325 8714//7060 8724//7335 +f 8724//7335 8714//7060 8725//6960 +f 8724//7335 8725//6960 8732//7336 +f 8751//6906 8753//7331 8732//7336 +f 8751//6906 8732//7336 8725//6960 +f 8733//7194 8708//7337 8727//7338 +f 8733//7194 8727//7338 8765//7193 +f 8727//7338 8745//7339 8841//7192 +f 8727//7338 8841//7192 8765//7193 +f 8841//7192 8745//7339 8749//7340 +f 8841//7192 8749//7340 8844//7204 +f 8749//7340 8739//7341 8816//7203 +f 8749//7340 8816//7203 8844//7204 +f 8816//7203 8739//7341 8723//7342 +f 8816//7203 8723//7342 8754//7202 +f 8723//7342 8707//7343 8726//7201 +f 8723//7342 8726//7201 8754//7202 +f 8726//7201 8707//7343 8693//7344 +f 8726//7201 8693//7344 8706//7200 +f 8706//7200 8693//7344 8686//7345 +f 8706//7200 8686//7345 8695//7199 +f 8695//7199 8686//7345 8678//7346 +f 8695//7199 8678//7346 8689//7198 +f 8689//7198 8678//7346 8677//7347 +f 8689//7198 8677//7347 8690//7197 +f 8690//7197 8677//7347 8685//7348 +f 8690//7197 8685//7348 8701//7196 +f 8701//7196 8685//7348 8694//7349 +f 8701//7196 8694//7349 8710//7195 +f 8710//7195 8694//7349 8708//7337 +f 8710//7195 8708//7337 8733//7194 +f 8646//7169 8631//7171 8619//7350 +f 8646//7169 8619//7350 8632//7351 +f 8645//7352 8660//7166 8646//7169 +f 8645//7352 8646//7169 8632//7351 +f 8662//7353 8660//7166 8645//7352 +f 8662//7353 8645//7352 8650//7354 +f 8653//7355 8659//7356 8662//7353 +f 8653//7355 8662//7353 8650//7354 +f 8654//7357 8659//7356 8653//7355 +f 8654//7357 8653//7355 8644//7358 +f 8636//7359 8648//7360 8654//7357 +f 8636//7359 8654//7357 8644//7358 +f 8648//7360 8636//7359 8625//7361 +f 8648//7360 8625//7361 8637//7183 +f 8637//7183 8625//7361 8608//7362 +f 8637//7183 8608//7362 8616//7363 +f 8616//7363 8608//7362 8587//7364 +f 8616//7363 8587//7364 8601//7365 +f 8577//7366 8586//7367 8601//7365 +f 8577//7366 8601//7365 8587//7364 +f 8594//7368 8586//7367 8577//7366 +f 8611//7175 8594//7368 8606//7369 +f 8611//7175 8606//7369 8618//7173 +f 8618//7173 8606//7369 8619//7350 +f 8618//7173 8619//7350 8631//7171 +f 8637//7183 8616//7363 8626//7181 +f 8626//7181 8616//7363 8601//7365 +f 8626//7181 8601//7365 8612//7179 +f 8586//7367 8605//7177 8612//7179 +f 8586//7367 8612//7179 8601//7365 +f 8594//7368 8611//7175 8605//7177 +f 8594//7368 8605//7177 8586//7367 +f 8651//7185 8648//7360 8637//7183 +f 8648//7360 8651//7185 8663//7187 +f 8648//7360 8663//7187 8654//7357 +f 8673//7189 8659//7356 8654//7357 +f 8673//7189 8654//7357 8663//7187 +f 8659//7356 8673//7189 8669//7191 +f 8659//7356 8669//7191 8662//7353 +f 8660//7166 8662//7353 8669//7191 +f 8671//7167 8660//7166 8674//7190 +f 8444//6990 8475//7024 8458//7023 +f 8444//6990 8458//7023 8431//6971 +f 8442//7022 8410//6972 8431//6971 +f 8442//7022 8431//6971 8458//7023 +f 8422//7032 8400//6975 8410//6972 +f 8422//7032 8410//6972 8442//7022 +f 8421//7031 8406//6977 8400//6975 +f 8421//7031 8400//6975 8422//7032 +f 8407//7066 8406//6977 8421//7031 +f 8407//7066 8421//7031 8420//7064 +f 8413//6982 8407//7066 8420//7064 +f 8413//6982 8420//7064 8434//7030 +f 8448//7029 8429//6979 8413//6982 +f 8448//7029 8413//6982 8434//7030 +f 8462//7028 8447//6983 8429//6979 +f 8462//7028 8429//6979 8448//7029 +f 8476//7026 8454//6985 8447//6983 +f 8476//7026 8447//6983 8462//7028 +f 8454//6985 8476//7026 8484//7027 +f 8454//6985 8484//7027 8463//6987 +f 8493//7053 8469//7051 8463//6987 +f 8493//7053 8463//6987 8484//7027 +f 8469//7051 8493//7053 8486//7025 +f 8469//7051 8486//7025 8459//6989 +f 8475//7024 8444//6990 8459//6989 +f 8475//7024 8459//6989 8486//7025 +f 8671//7167 8682//7370 8670//7371 +f 8671//7167 8670//7371 8657//7168 +f 8652//7372 8641//7170 8657//7168 +f 8652//7372 8657//7168 8670//7371 +f 8638//7373 8630//7172 8641//7170 +f 8638//7373 8641//7170 8652//7372 +f 8629//7374 8620//7174 8630//7172 +f 8629//7374 8630//7172 8638//7373 +f 8621//7375 8613//7176 8620//7174 +f 8621//7375 8620//7174 8629//7374 +f 8624//7178 8613//7176 8621//7375 +f 8655//7184 8668//7376 8664//7186 +f 8679//7377 8672//7188 8664//7186 +f 8679//7377 8664//7186 8668//7376 +f 8672//7188 8679//7377 8683//7378 +f 8672//7188 8683//7378 8674//7190 +f 8682//7370 8671//7167 8674//7190 +f 8682//7370 8674//7190 8683//7378 +f 8691//7379 8705//7380 8727//7338 +f 8691//7379 8727//7338 8708//7337 +f 8705//7380 8718//7381 8745//7339 +f 8705//7380 8745//7339 8727//7338 +f 8745//7339 8718//7381 8721//7382 +f 8745//7339 8721//7382 8749//7340 +f 8721//7382 8715//7383 8739//7341 +f 8721//7382 8739//7341 8749//7340 +f 8739//7341 8715//7383 8704//7384 +f 8739//7341 8704//7384 8723//7342 +f 8723//7342 8704//7384 8692//7385 +f 8723//7342 8692//7385 8707//7343 +f 8692//7385 8684//7386 8693//7344 +f 8692//7385 8693//7344 8707//7343 +f 8693//7344 8684//7386 8676//7387 +f 8693//7344 8676//7387 8686//7345 +f 8676//7387 8665//7388 8678//7346 +f 8676//7387 8678//7346 8686//7345 +f 8665//7388 8661//7389 8677//7347 +f 8665//7388 8677//7347 8678//7346 +f 8661//7389 8667//7390 8685//7348 +f 8661//7389 8685//7348 8677//7347 +f 8667//7390 8681//7391 8694//7349 +f 8667//7390 8694//7349 8685//7348 +f 8681//7391 8691//7379 8708//7337 +f 8681//7391 8708//7337 8694//7349 +f 8619//7350 8610//7392 8627//7393 +f 8619//7350 8627//7393 8632//7351 +f 8634//7394 8645//7352 8632//7351 +f 8634//7394 8632//7351 8627//7393 +f 8645//7352 8634//7394 8639//7395 +f 8645//7352 8639//7395 8650//7354 +f 8642//7396 8653//7355 8650//7354 +f 8642//7396 8650//7354 8639//7395 +f 8653//7355 8642//7396 8635//7397 +f 8653//7355 8635//7397 8644//7358 +f 8628//7398 8636//7359 8644//7358 +f 8628//7398 8644//7358 8635//7397 +f 8636//7359 8628//7398 8614//7399 +f 8636//7359 8614//7399 8625//7361 +f 8625//7361 8614//7399 8602//7400 +f 8625//7361 8602//7400 8608//7362 +f 8608//7362 8602//7400 8587//7364 +f 8580//7401 8594//7368 8577//7366 +f 8594//7368 8580//7401 8596//7402 +f 8594//7368 8596//7402 8606//7369 +f 8606//7369 8596//7402 8610//7392 +f 8606//7369 8610//7392 8619//7350 +f 8591//7403 8563//7404 8585//7405 +f 8591//7403 8585//7405 8607//7406 +f 8585//7405 8597//7407 8617//7408 +f 8585//7405 8617//7408 8607//7406 +f 8617//7408 8597//7407 8604//7409 +f 8617//7408 8604//7409 8623//7410 +f 8604//7409 8603//7411 8622//7412 +f 8604//7409 8622//7412 8623//7410 +f 8622//7412 8603//7411 8598//7413 +f 8622//7412 8598//7413 8615//7414 +f 8598//7413 8590//7415 8609//7416 +f 8598//7413 8609//7416 8615//7414 +f 8609//7416 8590//7415 8571//7417 +f 8609//7416 8571//7417 8593//7418 +f 8593//7418 8571//7417 8545//7419 +f 8593//7418 8545//7419 8575//7420 +f 8575//7420 8545//7419 8526//7421 +f 8575//7420 8526//7421 8554//7422 +f 8526//7421 8515//7423 8539//7424 +f 8526//7421 8539//7424 8554//7422 +f 8515//7423 8524//7425 8552//7426 +f 8515//7423 8552//7426 8539//7424 +f 8552//7426 8524//7425 8538//7427 +f 8552//7426 8538//7427 8572//7428 +f 8572//7428 8538//7427 8563//7404 +f 8572//7428 8563//7404 8591//7403 +f 8617//7408 8634//7394 8627//7393 +f 8617//7408 8627//7393 8607//7406 +f 8610//7392 8591//7403 8607//7406 +f 8610//7392 8607//7406 8627//7393 +f 8596//7402 8572//7428 8591//7403 +f 8596//7402 8591//7403 8610//7392 +f 8580//7401 8552//7426 8572//7428 +f 8580//7401 8572//7428 8596//7402 +f 8552//7426 8580//7401 8577//7366 +f 8552//7426 8577//7366 8539//7424 +f 8539//7424 8577//7366 8587//7364 +f 8539//7424 8587//7364 8554//7422 +f 8602//7400 8575//7420 8554//7422 +f 8602//7400 8554//7422 8587//7364 +f 8614//7399 8593//7418 8575//7420 +f 8614//7399 8575//7420 8602//7400 +f 8628//7398 8609//7416 8593//7418 +f 8628//7398 8593//7418 8614//7399 +f 8609//7416 8628//7398 8635//7397 +f 8609//7416 8635//7397 8615//7414 +f 8642//7396 8622//7412 8615//7414 +f 8642//7396 8615//7414 8635//7397 +f 8622//7412 8642//7396 8639//7395 +f 8622//7412 8639//7395 8623//7410 +f 8634//7394 8617//7408 8623//7410 +f 8634//7394 8623//7410 8639//7395 +f 8651//7185 8637//7183 8643//7182 +f 8675//7429 8652//7372 8670//7371 +f 8675//7429 8670//7371 8687//7430 +f 8682//7370 8699//7431 8687//7430 +f 8682//7370 8687//7430 8670//7371 +f 8699//7431 8682//7370 8683//7378 +f 8699//7431 8683//7378 8700//7432 +f 8679//7377 8697//7433 8700//7432 +f 8679//7377 8700//7432 8683//7378 +f 8697//7433 8679//7377 8668//7376 +f 8697//7433 8668//7376 8688//7434 +f 8655//7184 8680//7435 8688//7434 +f 8655//7184 8688//7434 8668//7376 +f 8680//7435 8655//7184 8643//7182 +f 8680//7435 8643//7182 8666//7436 +f 8666//7436 8643//7182 8633//7180 +f 8666//7436 8633//7180 8658//7437 +f 8658//7437 8633//7180 8624//7178 +f 8658//7437 8624//7178 8649//7438 +f 8649//7438 8624//7178 8621//7375 +f 8649//7438 8621//7375 8640//7439 +f 8640//7439 8621//7375 8629//7374 +f 8640//7439 8629//7374 8647//7440 +f 8647//7440 8629//7374 8638//7373 +f 8647//7440 8638//7373 8656//7441 +f 8656//7441 8638//7373 8652//7372 +f 8656//7441 8652//7372 8675//7429 +f 8433//6880 8351//7270 8300//7302 +f 8433//6880 8300//7302 8411//7442 +f 8504//6881 8433//6880 8411//7442 +f 8504//6881 8411//7442 8494//7443 +f 8537//7105 8504//6881 8494//7443 +f 8537//7105 8494//7443 8522//7444 +f 8570//7205 8537//7105 8522//7444 +f 8570//7205 8522//7444 8546//7445 +f 8588//6883 8570//7205 8546//7445 +f 8588//6883 8546//7445 8568//7446 +f 8595//6884 8588//6883 8568//7446 +f 8595//6884 8568//7446 8578//7447 +f 8582//7081 8592//7076 8579//7448 +f 8582//7081 8579//7448 8567//7449 +f 8579//7448 8592//7076 8599//7075 +f 8579//7448 8599//7075 8583//7450 +f 8583//7450 8599//7075 8600//6887 +f 8583//7450 8600//6887 8581//7451 +f 8581//7451 8600//6887 8595//6884 +f 8581//7451 8595//6884 8578//7447 +f 8418//6944 8491//6889 8473//7452 +f 8418//6944 8473//7452 8415//7453 +f 8473//7452 8491//6889 8527//6888 +f 8473//7452 8527//6888 8506//7454 +f 8506//7454 8527//6888 8558//6969 +f 8506//7454 8558//6969 8536//7455 +f 8536//7455 8558//6969 8582//7081 +f 8536//7455 8582//7081 8567//7449 +f 8345//6945 8418//6944 8415//7453 +f 8345//6945 8415//7453 8330//7456 +f 8224//7108 8345//6945 8330//7456 +f 8224//7108 8330//7456 8202//7457 +f 8068//6891 8224//7108 8202//7457 +f 8068//6891 8202//7457 8055//7458 +f 7965//6892 8068//6891 8055//7458 +f 7965//6892 8055//7458 7968//7459 +f 8411//7442 8300//7302 8245//7301 +f 8411//7442 8245//7301 8347//7460 +f 8347//7460 8455//7461 8494//7443 +f 8347//7460 8494//7443 8411//7442 +f 8522//7444 8494//7443 8455//7461 +f 8455//7461 8525//7462 8546//7445 +f 8455//7461 8546//7445 8522//7444 +f 8525//7462 8568//7446 8546//7445 +f 8578//7447 8568//7446 8525//7462 +f 8578//7447 8525//7462 8555//7463 +f 8581//7451 8578//7447 8555//7463 +f 8555//7463 8565//7464 8583//7450 +f 8555//7463 8583//7450 8581//7451 +f 8551//7465 8579//7448 8583//7450 +f 8551//7465 8583//7450 8565//7464 +f 8551//7465 8567//7449 8579//7448 +f 8551//7465 8519//7466 8536//7455 +f 8551//7465 8536//7455 8567//7449 +f 8506//7454 8536//7455 8519//7466 +f 8506//7454 8519//7466 8464//7467 +f 8464//7467 8473//7452 8506//7454 +f 8464//7467 8354//7468 8415//7453 +f 8464//7467 8415//7453 8473//7452 +f 8354//7468 8178//7469 8330//7456 +f 8354//7468 8330//7456 8415//7453 +f 8178//7469 8202//7457 8330//7456 +f 8178//7469 8014//7470 8055//7458 +f 8178//7469 8055//7458 8202//7457 +f 7968//7459 8055//7458 8014//7470 +f 7968//7459 8014//7470 7937//7471 +f 7928//6893 7965//6892 7968//7459 +f 7928//6893 7968//7459 7937//7471 +f 7979//7295 8017//7234 7846//7239 +f 7979//7295 7846//7239 7842//7297 +f 8017//7234 7979//7295 8154//7293 +f 8017//7234 8154//7293 8219//7235 +f 8278//7291 8376//7236 8219//7235 +f 8278//7291 8219//7235 8154//7293 +f 8304//7289 8435//7237 8376//7236 +f 8304//7289 8376//7236 8278//7291 +f 8435//7237 8304//7289 8272//7287 +f 8435//7237 8272//7287 8388//7238 +f 8198//7206 8252//7241 8329//7240 +f 8198//7206 8329//7240 8272//7287 +f 8255//7211 8353//7216 8305//7215 +f 8265//7253 8334//7248 8393//7249 +f 8265//7253 8393//7249 8301//7218 +f 8232//7252 8279//7250 8334//7248 +f 8232//7252 8334//7248 8265//7253 +f 8221//7265 8279//7250 8232//7252 +f 8058//7311 8127//7312 8065//7472 +f 8121//7251 8063//7473 8065//7472 +f 8121//7251 8065//7472 8127//7312 +f 8063//7473 8121//7251 8132//7254 +f 8063//7473 8132//7254 8067//7474 +f 8067//7474 8132//7254 8129//7217 +f 8067//7474 8129//7217 8071//7475 +f 8074//7476 8071//7475 8129//7217 +f 8074//7476 8129//7217 8134//7210 +f 8074//7476 8134//7210 8112//7208 +f 8074//7476 8112//7208 8078//7477 +f 8118//7207 8073//7478 8078//7477 +f 8118//7207 8078//7477 8112//7208 +f 8073//7478 8118//7207 8137//7288 +f 8073//7478 8137//7288 8056//7479 +f 8135//7290 8042//7480 8056//7479 +f 8135//7290 8056//7479 8137//7288 +f 8042//7480 8135//7290 8085//7292 +f 8042//7480 8085//7292 7989//7481 +f 7989//7481 8085//7292 8046//7294 +f 7989//7481 8046//7294 7970//7482 +f 7963//7296 7914//7483 7970//7482 +f 7963//7296 7970//7482 8046//7294 +f 7914//7483 7963//7296 7840//7298 +f 7914//7483 7840//7298 7843//7484 +f 8196//7264 8221//7265 8127//7312 +f 8033//7485 8063//7473 8067//7474 +f 8033//7485 8067//7474 8026//7486 +f 8031//7487 8065//7472 8063//7473 +f 8031//7487 8063//7473 8033//7485 +f 8039//7308 8029//7488 8049//7307 +f 8029//7488 8039//7308 8044//7309 +f 8029//7488 8044//7309 8025//7489 +f 8025//7489 8044//7309 8053//7310 +f 8025//7489 8053//7310 8019//7490 +f 8058//7311 8024//7491 8019//7490 +f 8058//7311 8019//7490 8053//7310 +f 8024//7491 8058//7311 8065//7472 +f 8024//7491 8065//7472 8031//7487 +f 8026//7486 8067//7474 8071//7475 +f 8026//7486 8071//7475 8028//7492 +f 8035//7493 8074//7476 8078//7477 +f 8035//7493 8078//7477 8047//7494 +f 8028//7492 8071//7475 8074//7476 +f 8028//7492 8074//7476 8035//7493 +f 8047//7494 8078//7477 8073//7478 +f 8047//7494 8073//7478 8043//7495 +f 8056//7479 8027//7496 8043//7495 +f 8056//7479 8043//7495 8073//7478 +f 8042//7480 7985//7497 8027//7496 +f 8042//7480 8027//7496 8056//7479 +f 7985//7497 8042//7480 7989//7481 +f 7985//7497 7989//7481 7974//7498 +f 7974//7498 7989//7481 7970//7482 +f 7974//7498 7970//7482 7946//7499 +f 7946//7499 7970//7482 7914//7483 +f 7946//7499 7914//7483 7907//7500 +f 7907//7500 7914//7483 7843//7484 +f 7907//7500 7843//7484 7847//7501 +f 8209//7300 8315//7502 8347//7460 +f 8209//7300 8347//7460 8245//7301 +f 8315//7502 8209//7300 8172//7299 +f 8315//7502 8172//7299 8283//7503 +f 8283//7503 8172//7299 8142//7305 +f 8283//7503 8142//7305 8262//7504 +f 8142//7305 8109//7304 8239//7505 +f 8142//7305 8239//7505 8262//7504 +f 8143//7506 8171//7507 8072//7306 +f 8143//7506 8072//7306 8049//7307 +f 8088//7303 8072//7306 8171//7507 +f 8088//7303 8171//7507 8211//7508 +f 8088//7303 8211//7508 8239//7505 +f 8088//7303 8239//7505 8109//7304 +f 8029//7488 8107//7509 8143//7506 +f 8029//7488 8143//7506 8049//7307 +f 8107//7509 8029//7488 8025//7489 +f 8107//7509 8025//7489 8090//7510 +f 8019//7490 8087//7511 8090//7510 +f 8019//7490 8090//7510 8025//7489 +f 8087//7511 8019//7490 8024//7491 +f 8087//7511 8024//7491 8093//7512 +f 8093//7512 8024//7491 8031//7487 +f 8093//7512 8031//7487 8110//7513 +f 8110//7513 8031//7487 8033//7485 +f 8110//7513 8033//7485 8155//7514 +f 8155//7514 8033//7485 8026//7486 +f 8155//7514 8026//7486 8163//7515 +f 8163//7515 8026//7486 8028//7492 +f 8163//7515 8028//7492 8150//7516 +f 8150//7516 8028//7492 8035//7493 +f 8150//7516 8035//7493 8081//7517 +f 8081//7517 8035//7493 8047//7494 +f 8081//7517 8047//7494 8064//7518 +f 8064//7518 8047//7494 8043//7495 +f 8064//7518 8043//7495 8083//7519 +f 8083//7519 8043//7495 8027//7496 +f 8083//7519 8027//7496 8086//7520 +f 8086//7520 8027//7496 7985//7497 +f 8086//7520 7985//7497 8075//7521 +f 8051//7522 8075//7521 7985//7497 +f 8051//7522 7985//7497 7974//7498 +f 7962//7523 8051//7522 7974//7498 +f 7962//7523 7974//7498 7946//7499 +f 7912//7524 7962//7523 7946//7499 +f 7912//7524 7946//7499 7907//7500 +f 7852//7525 7912//7524 7907//7500 +f 7852//7525 7907//7500 7847//7501 +f 8359//7526 8378//7527 8262//7504 +f 8359//7526 8262//7504 8239//7505 +f 8405//7528 8283//7503 8262//7504 +f 8405//7528 8262//7504 8378//7527 +f 8430//7529 8315//7502 8283//7503 +f 8430//7529 8283//7503 8405//7528 +f 8315//7502 8430//7529 8455//7461 +f 8315//7502 8455//7461 8347//7460 +f 7901//7530 7902//7152 7909//7165 +f 7901//7530 7909//7165 7910//7531 +f 7928//6893 7937//7471 7910//7531 +f 7928//6893 7910//7531 7909//7165 +f 7902//7152 7901//7530 7900//7532 +f 7902//7152 7900//7532 7899//7153 +f 7958//7533 7897//7534 7900//7532 +f 7897//7534 7958//7533 7960//7535 +f 7897//7534 7960//7535 7893//7536 +f 7973//7537 7958//7533 7900//7532 +f 7973//7537 7900//7532 7901//7530 +f 7910//7531 7984//7538 7973//7537 +f 7910//7531 7973//7537 7901//7530 +f 7984//7538 7910//7531 7937//7471 +f 7984//7538 7937//7471 8014//7470 +f 8144//7539 7984//7538 8014//7470 +f 8144//7539 8014//7470 8178//7469 +f 8326//7540 8144//7539 8178//7469 +f 8326//7540 8178//7469 8354//7468 +f 8446//7541 8326//7540 8354//7468 +f 8446//7541 8354//7468 8464//7467 +f 8525//7462 8455//7461 8430//7529 +f 8525//7462 8430//7529 8490//7542 +f 8490//7542 8531//7543 8555//7463 +f 8490//7542 8555//7463 8525//7462 +f 8531//7543 8549//7544 8565//7464 +f 8531//7543 8565//7464 8555//7463 +f 8549//7544 8540//7545 8551//7465 +f 8549//7544 8551//7465 8565//7464 +f 8540//7545 8508//7546 8519//7466 +f 8540//7545 8519//7466 8551//7465 +f 8508//7546 8446//7541 8464//7467 +f 8508//7546 8464//7467 8519//7466 +f 8722//7325 8709//7547 8716//7548 +f 8722//7325 8716//7548 8729//7324 +f 8729//7324 8716//7548 8728//7549 +f 8729//7324 8728//7549 8747//7326 +f 8747//7326 8728//7549 8742//7550 +f 8747//7326 8742//7550 8790//7327 +f 8790//7327 8742//7550 8768//7551 +f 8790//7327 8768//7551 8857//7328 +f 8857//7328 8768//7551 8829//7552 +f 8857//7328 8829//7552 8910//7329 +f 8910//7329 8829//7552 8846//7553 +f 8910//7329 8846//7553 8932//7330 +f 8838//7332 8757//7554 8731//7555 +f 8838//7332 8731//7555 8753//7331 +f 8884//7333 8794//7556 8757//7554 +f 8884//7333 8757//7554 8838//7332 +f 8921//7334 8840//7557 8794//7556 +f 8921//7334 8794//7556 8884//7333 +f 8932//7330 8846//7553 8840//7557 +f 8932//7330 8840//7557 8921//7334 +f 8724//7335 8712//7558 8709//7547 +f 8724//7335 8709//7547 8722//7325 +f 8732//7336 8719//7559 8712//7558 +f 8732//7336 8712//7558 8724//7335 +f 8753//7331 8731//7555 8719//7559 +f 8753//7331 8719//7559 8732//7336 +f 8709//7547 8741//7560 8716//7548 +f 8716//7548 8741//7560 8728//7549 +f 8728//7549 8741//7560 8742//7550 +f 8742//7550 8741//7560 8768//7551 +f 8768//7551 8741//7560 8829//7552 +f 8829//7552 8741//7560 8846//7553 +f 8757//7554 8741//7560 8731//7555 +f 8794//7556 8741//7560 8757//7554 +f 8840//7557 8741//7560 8794//7556 +f 8846//7553 8741//7560 8840//7557 +f 8712//7558 8741//7560 8709//7547 +f 8719//7559 8741//7560 8712//7558 +f 8731//7555 8741//7560 8719//7559 +f 8490//7542 8430//7529 8405//7528 +f 8490//7542 8405//7528 8470//7561 +f 8470//7561 8505//7562 8531//7543 +f 8470//7561 8531//7543 8490//7542 +f 8505//7562 8530//7563 8549//7544 +f 8505//7562 8549//7544 8531//7543 +f 8530//7563 8523//7564 8540//7545 +f 8530//7563 8540//7545 8549//7544 +f 8436//7565 8495//7566 8498//7567 +f 8498//7567 8479//7568 8390//7569 +f 8498//7567 8390//7569 8436//7565 +f 8479//7568 8443//7570 8359//7526 +f 8479//7568 8359//7526 8390//7569 +f 8443//7570 8378//7527 8359//7526 +f 8470//7561 8405//7528 8378//7527 +f 8470//7561 8378//7527 8443//7570 +f 8443//7570 8479//7568 8505//7562 +f 8443//7570 8505//7562 8470//7561 +f 8479//7568 8498//7567 8530//7563 +f 8479//7568 8530//7563 8505//7562 +f 8495//7566 8523//7564 8530//7563 +f 8495//7566 8530//7563 8498//7567 +f 8523//7564 8488//7571 8508//7546 +f 8523//7564 8508//7546 8540//7545 +f 8416//7572 8446//7541 8508//7546 +f 8416//7572 8508//7546 8488//7571 +f 8416//7572 8288//7573 8326//7540 +f 8416//7572 8326//7540 8446//7541 +f 8102//7574 8144//7539 8326//7540 +f 8102//7574 8326//7540 8288//7573 +f 8144//7539 8102//7574 7973//7537 +f 8144//7539 7973//7537 7984//7538 +f 7958//7533 7973//7537 8102//7574 +f 7958//7533 8102//7574 8079//7575 +f 8102//7574 8288//7573 8235//7576 +f 8102//7574 8235//7576 8079//7575 +f 8372//7577 8235//7576 8288//7573 +f 8372//7577 8288//7573 8416//7572 +f 8416//7572 8488//7571 8460//7578 +f 8416//7572 8460//7578 8372//7577 +f 7960//7535 7958//7533 8079//7575 +f 7960//7535 8079//7575 8076//7579 +f 8076//7579 8079//7575 8235//7576 +f 8076//7579 8235//7576 8267//7580 +f 8372//7577 8267//7580 8235//7576 +f 8372//7577 8460//7578 8401//7581 +f 8372//7577 8401//7581 8267//7580 +f 8460//7578 8495//7566 8436//7565 +f 8460//7578 8436//7565 8401//7581 +f 8488//7571 8523//7564 8495//7566 +f 8488//7571 8495//7566 8460//7578 +f 7961//7582 7894//7583 7892//7584 +f 8391//7585 7894//7583 8437//7586 +f 8360//7587 7894//7583 8391//7585 +f 8077//7588 7894//7583 7961//7582 +f 8268//7589 7894//7583 8077//7588 +f 8402//7590 7894//7583 8268//7589 +f 8437//7586 7894//7583 8402//7590 +f 7874//7286 7952//7107 7945//7106 +f 7874//7286 7945//7106 7877//7591 +f 7877//7591 7945//7106 7948//7278 +f 7877//7591 7948//7278 7880//7279 +f 7894//7583 8360//7587 7898//7592 +f 8239//7505 8211//7508 8316//7593 +f 8239//7505 8316//7593 8358//7594 +f 8257//7595 8316//7593 8211//7508 +f 8257//7595 8211//7508 8171//7507 +f 8257//7595 8171//7507 8143//7506 +f 8257//7595 8143//7506 8230//7596 +f 8143//7506 8107//7509 8208//7597 +f 8143//7506 8208//7597 8230//7596 +f 8167//7598 8208//7597 8107//7509 +f 8167//7598 8107//7509 8090//7510 +f 8126//7599 8167//7598 8090//7510 +f 8126//7599 8090//7510 8087//7511 +f 8105//7600 8126//7599 8087//7511 +f 8105//7600 8087//7511 8093//7512 +f 8100//7601 8105//7600 8093//7512 +f 8100//7601 8093//7512 8110//7513 +f 8122//7602 8100//7601 8110//7513 +f 8122//7602 8110//7513 8155//7514 +f 8059//7603 8111//7604 8150//7516 +f 8059//7603 8150//7516 8081//7517 +f 8163//7515 8150//7516 8111//7604 +f 8163//7515 8111//7604 8152//7605 +f 8155//7514 8163//7515 8152//7605 +f 8155//7514 8152//7605 8122//7602 +f 8030//7606 8059//7603 8081//7517 +f 8030//7606 8081//7517 8064//7518 +f 8021//7607 8030//7606 8064//7518 +f 8021//7607 8064//7518 8083//7519 +f 8083//7519 8086//7520 8009//7608 +f 8083//7519 8009//7608 8021//7607 +f 8075//7521 7994//7609 8009//7608 +f 8075//7521 8009//7608 8086//7520 +f 8051//7522 7977//7610 7994//7609 +f 8051//7522 7994//7609 8075//7521 +f 7977//7610 8051//7522 7962//7523 +f 7977//7610 7962//7523 7942//7611 +f 7942//7611 7962//7523 7912//7524 +f 7942//7611 7912//7524 7903//7612 +f 7903//7612 7912//7524 7852//7525 +f 7903//7612 7852//7525 7856//7613 +f 8534//6993 8585//7405 8563//7404 +f 8534//6993 8563//7404 8512//6994 +f 8597//7407 8585//7405 8534//6993 +f 8597//7407 8534//6993 8553//7012 +f 8561//7011 8604//7409 8597//7407 +f 8561//7011 8597//7407 8553//7012 +f 8603//7411 8604//7409 8561//7011 +f 8603//7411 8561//7011 8562//7054 +f 8598//7413 8603//7411 8562//7054 +f 8598//7413 8562//7054 8559//7009 +f 8542//7007 8590//7415 8598//7413 +f 8542//7007 8598//7413 8559//7009 +f 8571//7417 8590//7415 8542//7007 +f 8571//7417 8542//7007 8520//7005 +f 8503//7002 8545//7419 8571//7417 +f 8503//7002 8571//7417 8520//7005 +f 8483//7001 8526//7421 8545//7419 +f 8483//7001 8545//7419 8503//7002 +f 8478//7063 8515//7423 8526//7421 +f 8478//7063 8526//7421 8483//7001 +f 8480//6999 8524//7425 8515//7423 +f 8480//6999 8515//7423 8478//7063 +f 8497//6997 8538//7427 8524//7425 +f 8497//6997 8524//7425 8480//6999 +f 8512//6994 8563//7404 8538//7427 +f 8512//6994 8538//7427 8497//6997 +f 8699//7431 8718//7381 8705//7380 +f 8699//7431 8705//7380 8687//7430 +f 8721//7382 8718//7381 8699//7431 +f 8721//7382 8699//7431 8700//7432 +f 8697//7433 8715//7383 8721//7382 +f 8697//7433 8721//7382 8700//7432 +f 8704//7384 8715//7383 8697//7433 +f 8704//7384 8697//7433 8688//7434 +f 8680//7435 8692//7385 8704//7384 +f 8680//7435 8704//7384 8688//7434 +f 8684//7386 8692//7385 8680//7435 +f 8684//7386 8680//7435 8666//7436 +f 8676//7387 8684//7386 8666//7436 +f 8676//7387 8666//7436 8658//7437 +f 8665//7388 8676//7387 8658//7437 +f 8665//7388 8658//7437 8649//7438 +f 8661//7389 8665//7388 8649//7438 +f 8661//7389 8649//7438 8640//7439 +f 8667//7390 8661//7389 8640//7439 +f 8667//7390 8640//7439 8647//7440 +f 8681//7391 8667//7390 8647//7440 +f 8681//7391 8647//7440 8656//7441 +f 8691//7379 8681//7391 8656//7441 +f 8691//7379 8656//7441 8675//7429 +f 8705//7380 8691//7379 8675//7429 +f 8705//7380 8675//7429 8687//7430 +f 8301//7218 8393//7249 8353//7216 +f 8301//7218 8353//7216 8255//7211 +f 8393//7249 8397//7243 8356//7213 +f 8393//7249 8356//7213 8353//7216 +f 8307//7214 8356//7213 8352//7242 +f 8356//7213 8397//7243 8352//7242 +f 8285//7614 8296//7615 8247//7323 +f 8275//7616 8284//7617 8296//7615 +f 8275//7616 8296//7615 8285//7614 +f 8203//7209 8284//7617 8275//7616 +f 8255//7211 8305//7215 8284//7617 +f 8255//7211 8284//7617 8203//7209 +f 8296//7615 8284//7617 8305//7215 +f 8296//7615 8305//7215 8322//7212 +f 8322//7212 8307//7214 8247//7323 +f 8322//7212 8247//7323 8296//7615 +f 8214//7233 8269//7232 8285//7614 +f 8214//7233 8285//7614 8247//7323 +f 8275//7616 8285//7614 8269//7232 +f 8275//7616 8269//7232 8252//7241 +f 8252//7241 8198//7206 8203//7209 +f 8252//7241 8203//7209 8275//7616 +f 8388//7238 8272//7287 8329//7240 +f 8381//7229 8332//7231 8294//7230 +f 8388//7238 8329//7240 8332//7231 +f 8388//7238 8332//7231 8381//7229 +f 7998//7618 7915//7619 7924//7620 +f 7998//7618 7924//7620 8003//7621 +f 8007//7622 7929//7623 7930//7624 +f 8007//7622 7930//7624 8006//7625 +f 7919//7626 8001//7627 8000//7628 +f 7919//7626 8000//7628 7926//7629 +f 8000//7628 8001//7627 8103//7630 +f 8000//7628 8103//7630 8095//7631 +f 8096//7632 8007//7622 8006//7625 +f 8096//7632 8006//7625 8098//7633 +f 8003//7621 8091//7634 8097//7635 +f 8003//7621 8097//7635 7998//7618 +f 8010//7636 7923//7637 7915//7619 +f 8010//7636 7915//7619 7998//7618 +f 8097//7635 8114//7638 8010//7636 +f 8097//7635 8010//7636 7998//7618 +f 7913//7639 7862//7640 7864//7641 +f 7913//7639 7864//7641 7922//7642 +f 8037//7643 7986//7644 8070//7645 +f 8037//7643 8070//7645 8082//7646 +f 7987//7647 8070//7645 7986//7644 +f 7987//7647 7986//7644 7966//7648 +f 7922//7642 7987//7647 7966//7648 +f 7922//7642 7966//7648 7913//7639 +f 7986//7644 8037//7643 7988//7649 +f 7986//7644 7988//7649 7978//7650 +f 7978//7650 7947//7651 7966//7648 +f 7978//7650 7966//7648 7986//7644 +f 7947//7651 7908//7652 7913//7639 +f 7947//7651 7913//7639 7966//7648 +f 7908//7652 7861//7653 7862//7640 +f 7908//7652 7862//7640 7913//7639 +f 8060//7654 8069//7655 7992//7656 +f 8060//7654 7992//7656 7995//7657 +f 8037//7643 8060//7654 7995//7657 +f 8037//7643 7995//7657 7988//7649 +f 7983//7658 7992//7656 8069//7655 +f 7983//7658 8069//7655 8052//7659 +f 7972//7660 7983//7658 8052//7659 +f 7972//7660 8052//7659 8004//7661 +f 7943//7662 7972//7660 8004//7661 +f 7943//7662 8004//7661 7967//7663 +f 7905//7664 7943//7662 7967//7663 +f 7905//7664 7967//7663 7911//7665 +f 7853//7666 7905//7664 7911//7665 +f 7853//7666 7911//7665 7848//7667 +f 7915//7619 7923//7637 7878//7668 +f 7915//7619 7878//7668 7879//7669 +f 7924//7620 7915//7619 7879//7669 +f 7924//7620 7879//7669 7883//7670 +f 7929//7623 7885//7671 7887//7672 +f 7929//7623 7887//7672 7930//7624 +f 7889//7673 7891//7674 7919//7626 +f 7889//7673 7919//7626 7926//7629 +f 7936//7675 7869//7676 7871//7677 +f 7936//7675 7871//7677 7931//7678 +f 8022//7679 7936//7675 7931//7678 +f 8022//7679 7931//7678 8015//7680 +f 8010//7636 8114//7638 8128//7681 +f 8010//7636 8128//7681 8013//7682 +f 8013//7682 7927//7683 7923//7637 +f 8013//7682 7923//7637 8010//7636 +f 7878//7668 7923//7637 7927//7683 +f 7878//7668 7927//7683 7876//7684 +f 8095//7631 8103//7630 8213//7685 +f 8095//7631 8213//7685 8193//7686 +f 8186//7687 8174//7688 8096//7632 +f 8186//7687 8096//7632 8098//7633 +f 8097//7635 8091//7634 8170//7689 +f 8097//7635 8170//7689 8181//7690 +f 8181//7690 8229//7691 8114//7638 +f 8181//7690 8114//7638 8097//7635 +f 8253//7692 8128//7681 8114//7638 +f 8253//7692 8114//7638 8229//7691 +f 8091//7634 8096//7632 8174//7688 +f 8091//7634 8174//7688 8170//7689 +f 8003//7621 8007//7622 8096//7632 +f 8003//7621 8096//7632 8091//7634 +f 7929//7623 8007//7622 8003//7621 +f 7929//7623 8003//7621 7924//7620 +f 7885//7671 7929//7623 7924//7620 +f 7885//7671 7924//7620 7883//7670 +f 7917//7693 7919//7626 7891//7674 +f 7917//7693 7891//7674 7895//7694 +f 8005//7695 8001//7627 7919//7626 +f 8005//7695 7919//7626 7917//7693 +f 8119//7696 8103//7630 8001//7627 +f 8119//7696 8001//7627 8005//7695 +f 8243//7697 8213//7685 8103//7630 +f 8243//7697 8103//7630 8119//7696 +f 7978//7650 7988//7649 7990//7698 +f 7978//7650 7990//7698 7975//7699 +f 7947//7651 7978//7650 7975//7699 +f 7947//7651 7975//7699 7941//7700 +f 7908//7652 7947//7651 7941//7700 +f 7908//7652 7941//7700 7906//7701 +f 7861//7653 7908//7652 7906//7701 +f 7861//7653 7906//7701 7863//7702 +f 7995//7657 7992//7656 7993//7703 +f 7995//7657 7993//7703 7997//7704 +f 7988//7649 7995//7657 7997//7704 +f 7988//7649 7997//7704 7990//7698 +f 7992//7656 7983//7658 7982//7705 +f 7992//7656 7982//7705 7993//7703 +f 7983//7658 7972//7660 7971//7706 +f 7983//7658 7971//7706 7982//7705 +f 7972//7660 7943//7662 7940//7707 +f 7972//7660 7940//7707 7971//7706 +f 7943//7662 7905//7664 7904//7708 +f 7943//7662 7904//7708 7940//7707 +f 7905//7664 7853//7666 7854//7709 +f 7905//7664 7854//7709 7904//7708 +f 7975//7699 7990//7698 7859//7710 +f 7941//7700 7975//7699 7859//7710 +f 7906//7701 7941//7700 7859//7710 +f 7863//7702 7906//7701 7859//7710 +f 7997//7704 7993//7703 7859//7710 +f 7990//7698 7997//7704 7859//7710 +f 7993//7703 7982//7705 7859//7710 +f 7982//7705 7971//7706 7859//7710 +f 7971//7706 7940//7707 7859//7710 +f 7940//7707 7904//7708 7859//7710 +f 7904//7708 7854//7709 7859//7710 +f 8130//7711 8022//7679 8015//7680 +f 8130//7711 8015//7680 8133//7712 +f 8133//7712 8266//7713 8254//7714 +f 8133//7712 8254//7714 8130//7711 +f 7987//7647 8123//7715 8149//7716 +f 7987//7647 8149//7716 8070//7645 +f 8149//7716 8160//7717 8082//7646 +f 8149//7716 8082//7646 8070//7645 +f 8082//7646 8101//7718 8060//7654 +f 8082//7646 8060//7654 8037//7643 +f 8101//7718 8113//7719 8069//7655 +f 8101//7718 8069//7655 8060//7654 +f 8113//7719 8092//7720 8052//7659 +f 8113//7719 8052//7659 8069//7655 +f 8092//7720 8054//7721 8004//7661 +f 8092//7720 8004//7661 8052//7659 +f 7967//7663 8004//7661 8054//7721 +f 7967//7663 8054//7721 7981//7722 +f 7981//7722 7916//7723 7911//7665 +f 7981//7722 7911//7665 7967//7663 +f 7916//7723 7851//7724 7848//7667 +f 7916//7723 7848//7667 7911//7665 +f 8260//7725 8160//7717 8149//7716 +f 8260//7725 8149//7716 8259//7726 +f 8259//7726 8149//7716 8123//7715 +f 8259//7726 8123//7715 8251//7727 +f 8130//7711 8254//7714 8246//7728 +f 8130//7711 8246//7728 8125//7729 +f 8125//7729 8018//7730 8022//7679 +f 8125//7729 8022//7679 8130//7711 +f 8018//7730 7935//7731 7936//7675 +f 8018//7730 7936//7675 8022//7679 +f 7935//7731 7868//7732 7869//7676 +f 7935//7731 7869//7676 7936//7675 +f 8125//7729 8246//7728 8244//7733 +f 8125//7729 8244//7733 8124//7734 +f 8124//7734 8016//7735 8018//7730 +f 8124//7734 8018//7730 8125//7729 +f 8016//7735 7933//7736 7935//7731 +f 8016//7735 7935//7731 8018//7730 +f 7933//7736 7866//7737 7868//7732 +f 7933//7736 7868//7732 7935//7731 +f 8124//7734 8244//7733 8251//7727 +f 8124//7734 8251//7727 8123//7715 +f 7987//7647 8016//7735 8124//7734 +f 7987//7647 8124//7734 8123//7715 +f 7922//7642 7933//7736 8016//7735 +f 7922//7642 8016//7735 7987//7647 +f 7864//7641 7866//7737 7933//7736 +f 7864//7641 7933//7736 7922//7642 +f 8136//7738 8128//7681 8253//7692 +f 8136//7738 8253//7692 8263//7739 +f 8013//7682 8128//7681 8136//7738 +f 8013//7682 8136//7738 8011//7740 +f 8011//7740 7920//7741 7927//7683 +f 8011//7740 7927//7683 8013//7682 +f 7876//7684 7927//7683 7920//7741 +f 7876//7684 7920//7741 7873//7742 +f 7931//7678 7871//7677 7873//7742 +f 7931//7678 7873//7742 7920//7741 +f 8015//7680 7931//7678 7920//7741 +f 8015//7680 7920//7741 8011//7740 +f 8133//7712 8015//7680 8011//7740 +f 8133//7712 8011//7740 8136//7738 +f 8263//7739 8266//7713 8133//7712 +f 8263//7739 8133//7712 8136//7738 +f 7926//7629 7930//7624 7887//7672 +f 7926//7629 7887//7672 7889//7673 +f 8006//7625 7930//7624 7926//7629 +f 8006//7625 7926//7629 8000//7628 +f 8098//7633 8006//7625 8000//7628 +f 8098//7633 8000//7628 8095//7631 +f 8186//7687 8098//7633 8095//7631 +f 8186//7687 8095//7631 8193//7686 +f 6794//7743 6750//7744 6716//7745 +f 6794//7743 6716//7745 6774//7746 +f 6697//7747 6662//7748 6755//7749 +f 6697//7747 6755//7749 6766//7750 +f 6786//7751 6764//7752 6796//7753 +f 6786//7751 6796//7753 6802//7754 +f 6734//7755 6698//7756 6767//7757 +f 6734//7755 6767//7757 6781//7758 +f 6702//7759 6752//7760 6743//7761 +f 6702//7759 6743//7761 6682//7762 +f 6775//7763 6721//7764 6701//7765 +f 6775//7763 6701//7765 6770//7766 +f 6634//7767 6690//7768 6689//7769 +f 6634//7767 6689//7769 6631//7770 +f 6712//7771 6668//7772 6665//7773 +f 6712//7771 6665//7773 6724//7774 +f 6763//7775 6744//7776 6739//7777 +f 6763//7775 6739//7777 6761//7778 +f 6789//7779 6773//7780 6762//7781 +f 6789//7779 6762//7781 6777//7782 +f 6792//7783 6782//7784 6783//7785 +f 6792//7783 6783//7785 6791//7786 +f 6720//7787 6750//7744 6794//7743 +f 6720//7787 6794//7743 6778//7788 +f 6752//7760 6717//7789 6704//7790 +f 6752//7760 6704//7790 6743//7761 +f 6759//7791 6775//7763 6770//7766 +f 6759//7791 6770//7766 6740//7792 +f 6661//7793 6697//7747 6766//7750 +f 6661//7793 6766//7750 6757//7794 +f 6639//7795 6689//7769 6690//7768 +f 6639//7795 6690//7768 6654//7796 +f 6688//7797 6712//7771 6724//7774 +f 6688//7797 6724//7774 6678//7798 +f 6681//7799 6734//7755 6781//7758 +f 6681//7799 6781//7758 6760//7800 +f 6722//7801 6763//7775 6761//7778 +f 6722//7801 6761//7778 6707//7802 +f 6719//7803 6786//7751 6802//7754 +f 6719//7803 6802//7754 6768//7804 +f 6731//7805 6789//7779 6777//7782 +f 6731//7805 6777//7782 6714//7806 +f 6741//7807 6792//7783 6791//7786 +f 6741//7807 6791//7786 6737//7808 +f 6630//7809 6720//7787 6778//7788 +f 6630//7809 6778//7788 6706//7810 +f 6636//7811 6704//7790 6717//7789 +f 6636//7811 6717//7789 6630//7809 +f 6684//7812 6759//7791 6740//7792 +f 6684//7812 6740//7792 6659//7813 +f 6546//7814 6509//7815 6574//7816 +f 6546//7814 6574//7816 6642//7817 +f 6558//7818 6601//7819 6581//7820 +f 6558//7818 6581//7820 6525//7821 +f 6574//7816 6504//7822 6512//7823 +f 6574//7816 6512//7823 6572//7824 +f 6701//7765 6721//7764 6644//7825 +f 6701//7765 6644//7825 6623//7826 +f 6682//7762 6610//7827 6612//7828 +f 6682//7762 6612//7828 6702//7759 +f 6716//7745 6612//7828 6653//7829 +f 6716//7745 6653//7829 6774//7746 +f 6587//7830 6553//7831 6528//7832 +f 6587//7830 6528//7832 6579//7833 +f 6556//7834 6515//7835 6503//7836 +f 6556//7834 6503//7836 6560//7837 +f 6560//7837 6508//7838 6564//7839 +f 6560//7837 6564//7839 6624//7840 +f 6559//7841 6527//7842 6582//7843 +f 6559//7841 6582//7843 6632//7844 +f 6522//7845 6523//7846 6582//7843 +f 6586//7847 6635//7848 6617//7849 +f 6586//7847 6617//7849 6563//7850 +f 6742//7851 6699//7852 6671//7853 +f 6742//7851 6671//7853 6736//7854 +f 6713//7855 6651//7856 6677//7857 +f 6713//7855 6677//7857 6753//7858 +f 6725//7859 6650//7860 6652//7861 +f 6725//7859 6652//7861 6713//7855 +f 6633//7862 6607//7863 6648//7864 +f 6633//7862 6648//7864 6696//7865 +f 6604//7866 6647//7867 6648//7864 +f 6604//7866 6648//7864 6602//7868 +f 6638//7869 6673//7870 6664//7871 +f 6638//7869 6664//7871 6622//7872 +f 6618//7873 6550//7874 6596//7875 +f 6618//7873 6596//7875 6695//7876 +f 6538//7877 6618//7873 6542//7878 +f 6669//7879 6599//7880 6580//7881 +f 6669//7879 6580//7881 6660//7882 +f 6514//7883 6527//7842 6559//7841 +f 6514//7883 6559//7841 6547//7884 +f 6511//7885 6523//7846 6522//7845 +f 6511//7885 6522//7845 6510//7886 +f 6585//7887 6586//7847 6563//7850 +f 6585//7887 6563//7850 6549//7888 +f 6671//7853 6699//7852 6645//7889 +f 6671//7853 6645//7889 6625//7890 +f 6594//7891 6652//7861 6650//7860 +f 6594//7891 6650//7860 6593//7892 +f 6677//7857 6651//7856 6588//7893 +f 6677//7857 6588//7893 6627//7894 +f 6500//7895 6509//7815 6546//7814 +f 6500//7895 6546//7814 6520//7896 +f 6501//7897 6512//7823 6504//7822 +f 6501//7897 6504//7822 6498//7898 +f 6536//7899 6558//7818 6525//7821 +f 6536//7899 6525//7821 6516//7900 +f 6531//7901 6554//7902 6616//7903 +f 6531//7901 6616//7903 6568//7904 +f 6592//7905 6620//7906 6584//7907 +f 6592//7905 6584//7907 6565//7908 +f 6532//7909 6561//7910 6541//7911 +f 6532//7909 6541//7911 6526//7912 +f 6933//7913 6886//7914 6877//7915 +f 6933//7913 6877//7915 6932//7916 +f 6953//7917 6947//7918 6943//7919 +f 6960//7920 6968//7921 6964//7922 +f 6960//7920 6964//7922 6957//7923 +f 6868//7924 6842//7925 6843//7926 +f 6868//7924 6843//7926 6875//7927 +f 6861//7928 6895//7929 6888//7930 +f 6861//7928 6888//7930 6856//7931 +f 6853//7932 6873//7933 6874//7934 +f 6877//7915 6886//7914 6860//7935 +f 6877//7915 6860//7935 6855//7936 +f 6889//7937 6928//7938 6910//7939 +f 6889//7937 6910//7939 6883//7940 +f 6968//7921 6933//7913 6932//7916 +f 6968//7921 6932//7916 6964//7922 +f 6945//7941 6910//7939 6928//7938 +f 6945//7941 6928//7938 6955//7942 +f 6888//7930 6895//7929 6938//7943 +f 6888//7930 6938//7943 6927//7944 +f 6912//7945 6868//7924 6875//7927 +f 6912//7945 6875//7927 6917//7946 +f 6943//7919 6912//7945 6917//7946 +f 6938//7943 6960//7920 6957//7923 +f 6938//7943 6957//7923 6927//7944 +f 6902//7947 6910//7939 6945//7941 +f 6902//7947 6945//7941 6939//7948 +f 6883//7940 6910//7939 6902//7947 +f 6883//7940 6902//7947 6878//7949 +f 6855//7936 6860//7935 6861//7928 +f 6855//7936 6861//7928 6856//7931 +f 6842//7925 6853//7932 6843//7926 +f 6557//7950 6531//7901 6568//7904 +f 6557//7950 6568//7904 6578//7951 +f 6552//7952 6532//7909 6526//7912 +f 6552//7952 6526//7912 6543//7953 +f 6573//7954 6598//7955 6592//7905 +f 6573//7954 6592//7905 6565//7908 +f 6508//7838 6500//7895 6520//7896 +f 6508//7838 6520//7896 6564//7839 +f 6515//7835 6501//7897 6498//7898 +f 6515//7835 6498//7898 6503//7836 +f 6528//7832 6553//7831 6536//7899 +f 6528//7832 6536//7899 6516//7900 +f 6550//7874 6514//7883 6547//7884 +f 6550//7874 6547//7884 6596//7875 +f 6542//7878 6511//7885 6510//7886 +f 6542//7878 6510//7886 6538//7877 +f 6580//7881 6599//7880 6585//7887 +f 6580//7881 6585//7887 6549//7888 +f 6625//7890 6645//7889 6638//7869 +f 6625//7890 6638//7869 6622//7872 +f 6602//7868 6594//7891 6593//7892 +f 6602//7868 6593//7892 6604//7866 +f 6627//7894 6588//7893 6607//7863 +f 6627//7894 6607//7863 6633//7862 +f 6674//7956 6729//7957 6691//7958 +f 6674//7956 6691//7958 6670//7959 +f 6705//7960 6746//7961 6729//7957 +f 6705//7960 6729//7957 6674//7956 +f 6614//7962 6675//7963 6666//7964 +f 6614//7962 6666//7964 6615//7965 +f 6679//7966 6675//7963 6614//7962 +f 6679//7966 6614//7962 6621//7967 +f 6570//7968 6597//7969 6613//7970 +f 6570//7968 6613//7970 6583//7971 +f 6591//7972 6597//7969 6570//7968 +f 6591//7972 6570//7968 6577//7973 +f 6748//7974 6700//7975 6676//7976 +f 6748//7974 6676//7976 6715//7977 +f 6749//7978 6708//7979 6700//7975 +f 6749//7978 6700//7975 6748//7974 +f 6554//7902 6630//7809 6706//7810 +f 6554//7902 6706//7810 6616//7903 +f 6561//7910 6636//7811 6630//7809 +f 6561//7910 6630//7809 6541//7911 +f 6620//7906 6684//7812 6659//7813 +f 6620//7906 6659//7813 6584//7907 +f 6700//7975 6708//7979 6655//7980 +f 6700//7975 6655//7980 6626//7981 +f 6676//7976 6700//7975 6626//7981 +f 6676//7976 6626//7981 6628//7982 +f 6623//7826 6644//7825 6598//7955 +f 6623//7826 6598//7955 6573//7954 +f 6610//7827 6552//7952 6543//7953 +f 6610//7827 6543//7953 6612//7828 +f 6612//7828 6557//7950 6578//7951 +f 6612//7828 6578//7951 6653//7829 +f 6662//7748 6560//7837 6624//7840 +f 6662//7748 6624//7840 6755//7749 +f 6631//7770 6556//7834 6560//7837 +f 6631//7770 6560//7837 6634//7767 +f 6665//7773 6668//7772 6587//7830 +f 6665//7773 6587//7830 6579//7833 +f 6646//7983 6597//7969 6591//7972 +f 6646//7983 6591//7972 6640//7984 +f 6613//7970 6597//7969 6646//7983 +f 6613//7970 6646//7983 6643//7985 +f 6581//7820 6601//7819 6688//7797 +f 6581//7820 6688//7797 6678//7798 +f 6654//7796 6574//7816 6572//7824 +f 6654//7796 6572//7824 6639//7795 +f 6642//7817 6574//7816 6661//7793 +f 6642//7817 6661//7793 6757//7794 +f 6698//7756 6618//7873 6695//7876 +f 6698//7756 6695//7876 6767//7757 +f 6739//7777 6744//7776 6669//7879 +f 6739//7777 6669//7879 6660//7882 +f 6735//7986 6675//7963 6679//7966 +f 6735//7986 6679//7966 6730//7987 +f 6666//7964 6675//7963 6735//7986 +f 6666//7964 6735//7986 6709//7988 +f 6617//7849 6635//7848 6722//7801 +f 6617//7849 6722//7801 6707//7802 +f 6632//7844 6582//7843 6681//7799 +f 6632//7844 6681//7799 6760//7800 +f 6764//7752 6713//7855 6753//7858 +f 6764//7752 6753//7858 6796//7753 +f 6773//7780 6725//7859 6713//7855 +f 6773//7780 6713//7855 6762//7781 +f 6783//7785 6782//7784 6742//7851 +f 6783//7785 6742//7851 6736//7854 +f 6729//7957 6746//7961 6771//7989 +f 6729//7957 6771//7989 6769//7990 +f 6691//7958 6729//7957 6769//7990 +f 6691//7958 6769//7990 6747//7991 +f 6664//7871 6673//7870 6741//7807 +f 6664//7871 6741//7807 6737//7808 +f 6647//7867 6731//7805 6714//7806 +f 6647//7867 6714//7806 6648//7864 +f 6696//7865 6648//7864 6719//7803 +f 6696//7865 6719//7803 6768//7804 +f 6644//7825 6721//7764 6715//7977 +f 6644//7825 6715//7977 6676//7976 +f 6598//7955 6644//7825 6676//7976 +f 6598//7955 6676//7976 6628//7982 +f 6626//7981 6592//7905 6598//7955 +f 6626//7981 6598//7955 6628//7982 +f 6655//7980 6620//7906 6592//7905 +f 6655//7980 6592//7905 6626//7981 +f 6708//7979 6684//7812 6620//7906 +f 6708//7979 6620//7906 6655//7980 +f 6749//7978 6759//7791 6684//7812 +f 6749//7978 6684//7812 6708//7979 +f 6775//7763 6759//7791 6749//7978 +f 6775//7763 6749//7978 6748//7974 +f 6721//7764 6775//7763 6748//7974 +f 6721//7764 6748//7974 6715//7977 +f 6659//7813 6740//7792 6726//7992 +f 6659//7813 6726//7992 6636//7811 +f 6584//7907 6659//7813 6636//7811 +f 6584//7907 6636//7811 6576//7993 +f 6565//7908 6584//7907 6576//7993 +f 6565//7908 6576//7993 6540//7994 +f 6573//7954 6565//7908 6540//7994 +f 6573//7954 6540//7994 6562//7995 +f 6562//7995 6610//7827 6623//7826 +f 6562//7995 6623//7826 6573//7954 +f 6610//7827 6685//7996 6701//7765 +f 6610//7827 6701//7765 6623//7826 +f 6685//7996 6756//7997 6770//7766 +f 6685//7996 6770//7766 6701//7765 +f 6756//7997 6726//7992 6740//7792 +f 6756//7997 6740//7792 6770//7766 +f 6641//7998 6572//7824 6581//7820 +f 6641//7998 6581//7820 6678//7798 +f 6572//7824 6517//7999 6525//7821 +f 6572//7824 6525//7821 6581//7820 +f 6516//7900 6525//7821 6517//7999 +f 6516//7900 6517//7999 6506//8000 +f 6528//7832 6516//7900 6506//8000 +f 6528//7832 6506//8000 6519//8001 +f 6579//7833 6528//7832 6519//8001 +f 6579//7833 6519//8001 6556//7834 +f 6665//7773 6579//7833 6556//7834 +f 6665//7773 6556//7834 6629//8002 +f 6629//8002 6686//8003 6724//7774 +f 6629//8002 6724//7774 6665//7773 +f 6686//8003 6641//7998 6678//7798 +f 6686//8003 6678//7798 6724//7774 +f 6591//7972 6587//7830 6668//7772 +f 6591//7972 6668//7772 6640//7984 +f 6577//7973 6553//7831 6587//7830 +f 6577//7973 6587//7830 6591//7972 +f 6570//7968 6536//7899 6553//7831 +f 6570//7968 6553//7831 6577//7973 +f 6583//7971 6558//7818 6536//7899 +f 6583//7971 6536//7899 6570//7968 +f 6601//7819 6558//7818 6583//7971 +f 6601//7819 6583//7971 6613//7970 +f 6688//7797 6601//7819 6613//7970 +f 6688//7797 6613//7970 6643//7985 +f 6712//7771 6688//7797 6643//7985 +f 6712//7771 6643//7985 6646//7983 +f 6668//7772 6712//7771 6646//7983 +f 6668//7772 6646//7983 6640//7984 +f 6679//7966 6669//7879 6744//7776 +f 6679//7966 6744//7776 6730//7987 +f 6621//7967 6599//7880 6669//7879 +f 6621//7967 6669//7879 6679//7966 +f 6614//7962 6585//7887 6599//7880 +f 6614//7962 6599//7880 6621//7967 +f 6615//7965 6586//7847 6585//7887 +f 6615//7965 6585//7887 6614//7962 +f 6635//7848 6586//7847 6615//7965 +f 6635//7848 6615//7965 6666//7964 +f 6722//7801 6635//7848 6666//7964 +f 6722//7801 6666//7964 6709//7988 +f 6763//7775 6722//7801 6709//7988 +f 6763//7775 6709//7988 6735//7986 +f 6744//7776 6763//7775 6735//7986 +f 6744//7776 6735//7986 6730//7987 +f 6687//8004 6595//8005 6617//7849 +f 6687//8004 6617//7849 6707//7802 +f 6595//8005 6544//8006 6563//7850 +f 6595//8005 6563//7850 6617//7849 +f 6549//7888 6563//7850 6544//8006 +f 6549//7888 6544//8006 6530//8007 +f 6580//7881 6549//7888 6530//8007 +f 6580//7881 6530//8007 6575//8008 +f 6660//7882 6580//7881 6575//8008 +f 6660//7882 6575//8008 6637//8009 +f 6739//7777 6660//7882 6637//8009 +f 6739//7777 6637//8009 6718//8010 +f 6718//8010 6745//8011 6761//7778 +f 6718//8010 6761//7778 6739//7777 +f 6745//8011 6687//8004 6707//7802 +f 6745//8011 6707//7802 6761//7778 +f 6746//7961 6742//7851 6782//7784 +f 6746//7961 6782//7784 6771//7989 +f 6705//7960 6699//7852 6742//7851 +f 6705//7960 6742//7851 6746//7961 +f 6645//7889 6699//7852 6705//7960 +f 6645//7889 6705//7960 6674//7956 +f 6638//7869 6645//7889 6674//7956 +f 6638//7869 6674//7956 6670//7959 +f 6673//7870 6638//7869 6670//7959 +f 6673//7870 6670//7959 6691//7958 +f 6741//7807 6673//7870 6691//7958 +f 6741//7807 6691//7958 6747//7991 +f 6792//7783 6741//7807 6747//7991 +f 6792//7783 6747//7991 6769//7990 +f 6782//7784 6792//7783 6769//7990 +f 6782//7784 6769//7990 6771//7989 +f 6732//8012 6647//7867 6664//7871 +f 6732//8012 6664//7871 6737//7808 +f 6647//7867 6608//8013 6622//7872 +f 6647//7867 6622//7872 6664//7871 +f 6608//8013 6611//8014 6625//7890 +f 6608//8013 6625//7890 6622//7872 +f 6611//8014 6663//8015 6671//7853 +f 6611//8014 6671//7853 6625//7890 +f 6736//7854 6671//7853 6663//8015 +f 6736//7854 6663//8015 6725//7859 +f 6783//7785 6736//7854 6725//7859 +f 6783//7785 6725//7859 6776//8016 +f 6776//8016 6784//8017 6791//7786 +f 6776//8016 6791//7786 6783//7785 +f 6784//8017 6732//8012 6737//7808 +f 6784//8017 6737//7808 6791//7786 +f 6860//7935 6886//7914 6889//7937 +f 6860//7935 6889//7937 6883//7940 +f 6861//7928 6860//7935 6883//7940 +f 6861//7928 6883//7940 6878//7949 +f 6895//7929 6861//7928 6878//7949 +f 6895//7929 6878//7949 6902//7947 +f 6938//7943 6895//7929 6902//7947 +f 6938//7943 6902//7947 6939//7948 +f 6945//7941 6960//7920 6938//7943 +f 6945//7941 6938//7943 6939//7948 +f 6955//7942 6968//7921 6960//7920 +f 6955//7942 6960//7920 6945//7941 +f 6933//7913 6968//7921 6955//7942 +f 6933//7913 6955//7942 6928//7938 +f 6886//7914 6933//7913 6928//7938 +f 6886//7914 6928//7938 6889//7937 +f 6957//7923 6964//7922 6953//7917 +f 6957//7923 6953//7917 6943//7919 +f 6927//7944 6957//7923 6943//7919 +f 6927//7944 6943//7919 6923//8018 +f 6923//8018 6882//8019 6888//7930 +f 6923//8018 6888//7930 6927//7944 +f 6882//8019 6846//8020 6856//7931 +f 6882//8019 6856//7931 6888//7930 +f 6846//8020 6853//7932 6855//7936 +f 6846//8020 6855//7936 6856//7931 +f 6853//7932 6874//7934 6877//7915 +f 6853//7932 6877//7915 6855//7936 +f 6874//7934 6926//8021 6932//7916 +f 6874//7934 6932//7916 6877//7915 +f 6926//8021 6953//7917 6964//7922 +f 6926//8021 6964//7922 6932//7916 +f 6548//8022 6557//7950 6612//7828 +f 6529//8023 6531//7901 6557//7950 +f 6529//8023 6557//7950 6548//8022 +f 6545//8024 6554//7902 6531//7901 +f 6545//8024 6531//7901 6529//8023 +f 6630//7809 6554//7902 6545//8024 +f 6717//7789 6720//7787 6630//7809 +f 6752//7760 6750//7744 6720//7787 +f 6752//7760 6720//7787 6717//7789 +f 6716//7745 6750//7744 6752//7760 +f 6716//7745 6752//7760 6702//7759 +f 6541//7911 6630//7809 6545//8024 +f 6526//7912 6541//7911 6545//8024 +f 6526//7912 6545//8024 6529//8023 +f 6543//7953 6526//7912 6529//8023 +f 6543//7953 6529//8023 6548//8022 +f 6612//7828 6543//7953 6548//8022 +f 6716//7745 6702//7759 6612//7828 +f 6555//8025 6552//7952 6610//7827 +f 6534//8026 6532//7909 6552//7952 +f 6534//8026 6552//7952 6555//8025 +f 6566//8027 6561//7910 6532//7909 +f 6566//8027 6532//7909 6534//8026 +f 6636//7811 6561//7910 6566//8027 +f 6726//7992 6704//7790 6636//7811 +f 6756//7997 6743//7761 6704//7790 +f 6756//7997 6704//7790 6726//7992 +f 6682//7762 6743//7761 6756//7997 +f 6682//7762 6756//7997 6685//7996 +f 6576//7993 6636//7811 6566//8027 +f 6540//7994 6576//7993 6566//8027 +f 6540//7994 6566//8027 6534//8026 +f 6562//7995 6540//7994 6534//8026 +f 6562//7995 6534//8026 6555//8025 +f 6610//7827 6562//7995 6555//8025 +f 6682//7762 6685//7996 6610//7827 +f 6505//8028 6508//7838 6560//7837 +f 6499//8029 6500//7895 6508//7838 +f 6499//8029 6508//7838 6505//8028 +f 6507//8030 6509//7815 6500//7895 +f 6507//8030 6500//7895 6499//8029 +f 6574//7816 6509//7815 6507//8030 +f 6654//7796 6661//7793 6574//7816 +f 6690//7768 6697//7747 6661//7793 +f 6690//7768 6661//7793 6654//7796 +f 6662//7748 6697//7747 6690//7768 +f 6662//7748 6690//7768 6634//7767 +f 6507//8030 6504//7822 6574//7816 +f 6498//7898 6504//7822 6507//8030 +f 6498//7898 6507//8030 6499//8029 +f 6503//7836 6498//7898 6499//8029 +f 6503//7836 6499//8029 6505//8028 +f 6560//7837 6503//7836 6505//8028 +f 6662//7748 6634//7767 6560//7837 +f 6518//8031 6515//7835 6556//7834 +f 6502//8032 6501//7897 6515//7835 +f 6502//8032 6515//7835 6518//8031 +f 6513//8033 6512//7823 6501//7897 +f 6513//8033 6501//7897 6502//8032 +f 6572//7824 6512//7823 6513//8033 +f 6686//8003 6689//7769 6639//7795 +f 6686//8003 6639//7795 6641//7998 +f 6631//7770 6689//7769 6686//8003 +f 6631//7770 6686//8003 6629//8002 +f 6572//7824 6641//7998 6639//7795 +f 6513//8033 6517//7999 6572//7824 +f 6506//8000 6517//7999 6513//8033 +f 6506//8000 6513//8033 6502//8032 +f 6519//8001 6506//8000 6502//8032 +f 6519//8001 6502//8032 6518//8031 +f 6556//7834 6519//8001 6518//8031 +f 6631//7770 6629//8002 6556//7834 +f 6618//7873 6698//7756 6693//8034 +f 6538//7877 6550//7874 6618//7873 +f 6510//7886 6514//7883 6550//7874 +f 6510//7886 6550//7874 6538//7877 +f 6522//7845 6527//7842 6514//7883 +f 6522//7845 6514//7883 6510//7886 +f 6582//7843 6527//7842 6522//7845 +f 6672//8035 6681//7799 6582//7843 +f 6728//8036 6734//7755 6681//7799 +f 6728//8036 6681//7799 6672//8035 +f 6698//7756 6734//7755 6728//8036 +f 6698//7756 6728//8036 6693//8034 +f 6711//8037 6637//8009 6618//7873 +f 6711//8037 6618//7873 6693//8034 +f 6542//7878 6618//7873 6637//8009 +f 6542//7878 6637//8009 6567//8038 +f 6567//8038 6521//8039 6511//7885 +f 6567//8038 6511//7885 6542//7878 +f 6521//8039 6533//8040 6523//7846 +f 6521//8039 6523//7846 6511//7885 +f 6582//7843 6523//7846 6533//8040 +f 6582//7843 6533//8040 6595//8005 +f 6672//8035 6582//7843 6595//8005 +f 6672//8035 6595//8005 6667//8041 +f 6667//8041 6738//8042 6728//8036 +f 6667//8041 6728//8036 6672//8035 +f 6693//8034 6728//8036 6738//8042 +f 6693//8034 6738//8042 6711//8037 +f 6535//8043 6544//8006 6595//8005 +f 6530//8007 6544//8006 6535//8043 +f 6530//8007 6535//8043 6524//8044 +f 6575//8008 6530//8007 6524//8044 +f 6575//8008 6524//8044 6569//8045 +f 6637//8009 6575//8008 6569//8045 +f 6637//8009 6711//8037 6718//8010 +f 6567//8038 6637//8009 6569//8045 +f 6524//8044 6521//8039 6567//8038 +f 6524//8044 6567//8038 6569//8045 +f 6535//8043 6533//8040 6521//8039 +f 6535//8043 6521//8039 6524//8044 +f 6595//8005 6533//8040 6535//8043 +f 6687//8004 6667//8041 6595//8005 +f 6745//8011 6738//8042 6667//8041 +f 6745//8011 6667//8041 6687//8004 +f 6711//8037 6738//8042 6745//8011 +f 6711//8037 6745//8011 6718//8010 +f 6657//8046 6650//7860 6725//7859 +f 6593//7892 6650//7860 6657//8046 +f 6593//7892 6657//8046 6603//8047 +f 6604//7866 6593//7892 6603//8047 +f 6604//7866 6603//8047 6606//8048 +f 6647//7867 6604//7866 6606//8048 +f 6732//8012 6731//7805 6647//7867 +f 6784//8017 6789//7779 6731//7805 +f 6784//8017 6731//7805 6732//8012 +f 6773//7780 6789//7779 6784//8017 +f 6773//7780 6784//8017 6776//8016 +f 6606//8048 6608//8013 6647//7867 +f 6603//8047 6611//8014 6608//8013 +f 6603//8047 6608//8013 6606//8048 +f 6657//8046 6663//8015 6611//8014 +f 6657//8046 6611//8014 6603//8047 +f 6725//7859 6663//8015 6657//8046 +f 6773//7780 6776//8016 6725//7859 +f 6602//7868 6648//7864 6605//8049 +f 6589//8050 6594//7891 6602//7868 +f 6589//8050 6602//7868 6605//8049 +f 6649//8051 6652//7861 6594//7891 +f 6649//8051 6594//7891 6589//8050 +f 6713//7855 6652//7861 6649//8051 +f 6713//7855 6764//7752 6762//7781 +f 6649//8051 6651//7856 6713//7855 +f 6588//7893 6651//7856 6649//8051 +f 6588//7893 6649//8051 6589//8050 +f 6607//7863 6588//7893 6589//8050 +f 6607//7863 6589//8050 6605//8049 +f 6648//7864 6607//7863 6605//8049 +f 6714//7806 6719//7803 6648//7864 +f 6777//7782 6786//7751 6719//7803 +f 6777//7782 6719//7803 6714//7806 +f 6764//7752 6786//7751 6777//7782 +f 6764//7752 6777//7782 6762//7781 +f 6923//8018 6943//7919 6917//7946 +f 6875//7927 6882//8019 6923//8018 +f 6875//7927 6923//8018 6917//7946 +f 6843//7926 6846//8020 6882//8019 +f 6843//7926 6882//8019 6875//7927 +f 6853//7932 6846//8020 6843//7926 +f 6926//8021 6874//7934 6873//7933 +f 6926//8021 6873//7933 6921//8052 +f 6953//7917 6926//8021 6921//8052 +f 6953//7917 6921//8052 6947//7918 +f 6946//8053 6940//8054 6943//7919 +f 6946//8053 6943//7919 6947//7918 +f 6940//8054 6901//8055 6912//7945 +f 6940//8054 6912//7945 6943//7919 +f 6901//8055 6859//8056 6868//7924 +f 6901//8055 6868//7924 6912//7945 +f 6859//8056 6832//8057 6842//7925 +f 6859//8056 6842//7925 6868//7924 +f 6832//8057 6847//8058 6853//7932 +f 6832//8057 6853//7932 6842//7925 +f 6847//8058 6871//8059 6873//7933 +f 6847//8058 6873//7933 6853//7932 +f 6871//8059 6918//8060 6921//8052 +f 6871//8059 6921//8052 6873//7933 +f 6918//8060 6946//8053 6947//7918 +f 6918//8060 6947//7918 6921//8052 +f 6949//8061 6937//8062 6940//8054 +f 6949//8061 6940//8054 6946//8053 +f 6937//8062 6896//8063 6901//8055 +f 6937//8062 6901//8055 6940//8054 +f 6896//8063 6849//8064 6859//8056 +f 6896//8063 6859//8056 6901//8055 +f 6849//8064 6823//8065 6832//8057 +f 6849//8064 6832//8057 6859//8056 +f 6823//8065 6841//8066 6847//8058 +f 6823//8065 6847//8058 6832//8057 +f 6841//8066 6866//8067 6871//8059 +f 6841//8066 6871//8059 6847//8058 +f 6866//8067 6924//8068 6918//8060 +f 6866//8067 6918//8060 6871//8059 +f 6924//8068 6949//8061 6946//8053 +f 6924//8068 6946//8053 6918//8060 +f 6695//7876 6596//7875 6633//7862 +f 6695//7876 6633//7862 6696//7865 +f 6696//7865 6768//7804 6767//7757 +f 6696//7865 6767//7757 6695//7876 +f 6624//7840 6564//7839 6559//7841 +f 6624//7840 6559//7841 6632//7844 +f 6755//7749 6624//7840 6632//7844 +f 6755//7749 6632//7844 6760//7800 +f 6642//7817 6757//7794 6774//7746 +f 6642//7817 6774//7746 6653//7829 +f 6653//7829 6578//7951 6546//7814 +f 6653//7829 6546//7814 6642//7817 +f 6831//8069 6787//8070 6809//8071 +f 6831//8069 6809//8071 6865//8072 +f 6694//8073 6787//8070 6831//8069 +f 6694//8073 6831//8069 6797//8074 +f 6706//7810 6787//8070 6694//8073 +f 6706//7810 6694//8073 6616//7903 +f 6809//8071 6787//8070 6706//7810 +f 6809//8071 6706//7810 6778//7788 +f 6616//7903 6694//8073 6590//8075 +f 6616//7903 6590//8075 6568//7904 +f 6568//7904 6590//8075 6571//8076 +f 6568//7904 6571//8076 6578//7951 +f 6571//8076 6546//7814 6578//7951 +f 6546//7814 6571//8076 6537//8077 +f 6546//7814 6537//8077 6520//7896 +f 6520//7896 6537//8077 6539//8078 +f 6520//7896 6539//8078 6564//7839 +f 6564//7839 6539//8078 6559//7841 +f 6559//7841 6539//8078 6551//8079 +f 6559//7841 6551//8079 6547//7884 +f 6600//8080 6596//7875 6547//7884 +f 6600//8080 6547//7884 6551//8079 +f 6600//8080 6633//7862 6596//7875 +f 6633//7862 6600//8080 6658//8081 +f 6633//7862 6658//8081 6627//7894 +f 6627//7894 6658//8081 6703//8082 +f 6627//7894 6703//8082 6677//7857 +f 6813//8083 6774//7746 6757//7794 +f 6814//8084 6755//7749 6760//7800 +f 6825//8085 6767//7757 6768//7804 +f 6780//8086 6753//7858 6677//7857 +f 6780//8086 6677//7857 6703//8082 +f 6753//7858 6780//8086 6808//8087 +f 6753//7858 6808//8087 6796//7753 +f 6824//8088 6802//7754 6796//7753 +f 6824//8088 6796//7753 6808//8087 +f 6802//7754 6824//8088 6825//8085 +f 6802//7754 6825//8085 6768//7804 +f 6820//8089 6781//7758 6767//7757 +f 6820//8089 6767//7757 6825//8085 +f 6781//7758 6820//8089 6814//8084 +f 6781//7758 6814//8084 6760//7800 +f 6766//7750 6811//8090 6813//8083 +f 6766//7750 6813//8083 6757//7794 +f 6811//8090 6766//7750 6755//7749 +f 6811//8090 6755//7749 6814//8084 +f 6809//8071 6778//7788 6794//7743 +f 6809//8071 6794//7743 6816//8091 +f 6816//8091 6794//7743 6774//7746 +f 6816//8091 6774//7746 6813//8083 +f 6680//8092 6590//8075 6694//8073 +f 6680//8092 6694//8073 6797//8074 +f 6590//8075 6680//8092 6619//8093 +f 6590//8075 6619//8093 6571//8076 +f 6609//8094 6537//8077 6571//8076 +f 6609//8094 6571//8076 6619//8093 +f 6537//8077 6609//8094 6539//8078 +f 6656//8095 6551//8079 6539//8078 +f 6656//8095 6539//8078 6609//8094 +f 6551//8079 6656//8095 6710//8096 +f 6551//8079 6710//8096 6600//8080 +f 6710//8096 6658//8081 6600//8080 +f 6658//8081 6710//8096 6785//8097 +f 6658//8081 6785//8097 6703//8082 +f 6865//8072 6809//8071 6816//8091 +f 6865//8072 6816//8091 6857//8098 +f 6857//8098 6816//8091 6813//8083 +f 6857//8098 6813//8083 6811//8090 +f 6857//8098 6811//8090 6851//8099 +f 6814//8084 6858//8100 6851//8099 +f 6814//8084 6851//8099 6811//8090 +f 6820//8089 6858//8100 6814//8084 +f 6858//8100 6820//8089 6825//8085 +f 6858//8100 6825//8085 6862//8101 +f 6862//8101 6825//8085 6824//8088 +f 6862//8101 6824//8088 6850//8102 +f 6808//8087 6850//8102 6824//8088 +f 6850//8102 6808//8087 6780//8086 +f 6850//8102 6780//8086 6821//8103 +f 6821//8103 6780//8086 6703//8082 +f 6821//8103 6703//8082 6785//8097 +f 6919//8104 6950//8105 6941//8106 +f 6919//8104 6941//8106 6914//8107 +f 6879//8108 6919//8104 6914//8107 +f 6879//8108 6914//8107 6867//8109 +f 6833//8110 6879//8108 6867//8109 +f 6833//8110 6867//8109 6840//8111 +f 6801//8112 6833//8110 6840//8111 +f 6801//8112 6840//8111 6818//8113 +f 6818//8113 6793//8114 6751//8115 +f 6818//8113 6751//8115 6801//8112 +f 6692//8116 6751//8115 6793//8114 +f 6692//8116 6793//8114 6758//8117 +f 6683//8118 6692//8116 6758//8117 +f 6683//8118 6758//8117 6754//8119 +f 6754//8119 6779//8120 6727//8121 +f 6754//8119 6727//8121 6683//8118 +f 6772//8122 6727//8121 6779//8120 +f 6772//8122 6779//8120 6804//8123 +f 6807//8124 6772//8122 6804//8123 +f 6807//8124 6804//8123 6836//8125 +f 6845//8126 6807//8124 6836//8125 +f 6845//8126 6836//8125 6884//8127 +f 6890//8128 6845//8126 6884//8127 +f 6890//8128 6884//8127 6911//8129 +f 6904//8130 6890//8128 6911//8129 +f 6904//8130 6911//8129 6942//8131 +f 6907//8132 6904//8130 6942//8131 +f 6907//8132 6942//8131 6958//8133 +f 6959//8134 6941//8106 6950//8105 +f 6959//8134 6950//8105 6956//8135 +f 6906//8136 6907//8132 6958//8133 +f 6906//8136 6958//8133 6961//8137 +f 6967//8138 6959//8134 6956//8135 +f 6967//8138 6956//8135 6925//8139 +f 6961//8137 6967//8138 6925//8139 +f 6961//8137 6925//8139 6906//8136 +f 6929//8140 6937//8062 6949//8061 +f 6929//8140 6949//8061 6951//8141 +f 6893//8142 6896//8063 6937//8062 +f 6893//8142 6937//8062 6929//8140 +f 6839//8143 6849//8064 6896//8063 +f 6839//8143 6896//8063 6893//8142 +f 6797//8074 6823//8065 6849//8064 +f 6797//8074 6849//8064 6839//8143 +f 6831//8069 6841//8066 6823//8065 +f 6831//8069 6823//8065 6797//8074 +f 6865//8072 6866//8067 6841//8066 +f 6865//8072 6841//8066 6831//8069 +f 6930//8144 6924//8068 6866//8067 +f 6930//8144 6866//8067 6865//8072 +f 6951//8141 6949//8061 6924//8068 +f 6951//8141 6924//8068 6930//8144 +f 6956//8135 6950//8105 6951//8141 +f 6956//8135 6951//8141 6930//8144 +f 6956//8135 6930//8144 6865//8072 +f 6956//8135 6865//8072 6857//8098 +f 6956//8135 6857//8098 6925//8139 +f 6906//8136 6925//8139 6857//8098 +f 6906//8136 6857//8098 6851//8099 +f 6858//8100 6907//8132 6906//8136 +f 6858//8100 6906//8136 6851//8099 +f 6862//8101 6904//8130 6907//8132 +f 6862//8101 6907//8132 6858//8100 +f 6850//8102 6890//8128 6904//8130 +f 6850//8102 6904//8130 6862//8101 +f 6821//8103 6845//8126 6890//8128 +f 6821//8103 6890//8128 6850//8102 +f 6785//8097 6807//8124 6845//8126 +f 6785//8097 6845//8126 6821//8103 +f 6710//8096 6772//8122 6807//8124 +f 6710//8096 6807//8124 6785//8097 +f 6710//8096 6656//8095 6727//8121 +f 6710//8096 6727//8121 6772//8122 +f 6683//8118 6727//8121 6656//8095 +f 6683//8118 6656//8095 6609//8094 +f 6609//8094 6619//8093 6692//8116 +f 6609//8094 6692//8116 6683//8118 +f 6680//8092 6751//8115 6692//8116 +f 6680//8092 6692//8116 6619//8093 +f 6801//8112 6751//8115 6680//8092 +f 6801//8112 6680//8092 6797//8074 +f 6839//8143 6833//8110 6801//8112 +f 6839//8143 6801//8112 6797//8074 +f 6893//8142 6879//8108 6833//8110 +f 6893//8142 6833//8110 6839//8143 +f 6929//8140 6919//8104 6879//8108 +f 6929//8140 6879//8108 6893//8142 +f 6951//8141 6950//8105 6919//8104 +f 6951//8141 6919//8104 6929//8140 +f 6867//8109 6914//8107 6920//8145 +f 6867//8109 6920//8145 6872//8146 +f 6840//8111 6867//8109 6872//8146 +f 6840//8111 6872//8146 6838//8147 +f 6818//8113 6840//8111 6838//8147 +f 6818//8113 6838//8147 6812//8148 +f 6788//8149 6793//8114 6818//8113 +f 6788//8149 6818//8113 6812//8148 +f 6758//8117 6793//8114 6788//8149 +f 6758//8117 6788//8149 6733//8150 +f 6754//8119 6758//8117 6733//8150 +f 6754//8119 6733//8150 6723//8151 +f 6765//8152 6779//8120 6754//8119 +f 6765//8152 6754//8119 6723//8151 +f 6804//8123 6779//8120 6765//8152 +f 6804//8123 6765//8152 6799//8153 +f 6836//8125 6804//8123 6799//8153 +f 6836//8125 6799//8153 6835//8154 +f 6884//8127 6836//8125 6835//8154 +f 6884//8127 6835//8154 6887//8155 +f 6911//8129 6884//8127 6887//8155 +f 6911//8129 6887//8155 6931//8156 +f 6942//8131 6911//8129 6931//8156 +f 6942//8131 6931//8156 6969//8157 +f 6958//8133 6942//8131 6969//8157 +f 6958//8133 6969//8157 6976//8158 +f 6961//8137 6958//8133 6976//8158 +f 6961//8137 6976//8158 6979//8159 +f 6978//8160 6967//8138 6961//8137 +f 6978//8160 6961//8137 6979//8159 +f 6978//8160 6974//8161 6959//8134 +f 6978//8160 6959//8134 6967//8138 +f 6966//8162 6941//8106 6959//8134 +f 6966//8162 6959//8134 6974//8161 +f 6914//8107 6941//8106 6966//8162 +f 6914//8107 6966//8162 6920//8145 +f 6920//8145 6966//8162 6986//8163 +f 6920//8145 6986//8163 6963//8164 +f 6872//8146 6920//8145 6963//8164 +f 6872//8146 6963//8164 6905//8165 +f 6838//8147 6872//8146 6905//8165 +f 6838//8147 6905//8165 6863//8166 +f 6812//8148 6838//8147 6863//8166 +f 6812//8148 6863//8166 6834//8167 +f 6788//8149 6812//8148 6834//8167 +f 6788//8149 6834//8167 6810//8168 +f 6733//8150 6788//8149 6810//8168 +f 6733//8150 6810//8168 6798//8169 +f 6723//8151 6733//8150 6798//8169 +f 6723//8151 6798//8169 6795//8170 +f 6765//8152 6723//8151 6795//8170 +f 6765//8152 6795//8170 6800//8171 +f 6799//8153 6765//8152 6800//8171 +f 6799//8153 6800//8171 6826//8172 +f 6835//8154 6799//8153 6826//8172 +f 6835//8154 6826//8172 6869//8173 +f 6887//8155 6835//8154 6869//8173 +f 6887//8155 6869//8173 6936//8174 +f 6931//8156 6887//8155 6936//8174 +f 6931//8156 6936//8174 6977//8175 +f 6969//8157 6931//8156 6977//8175 +f 6969//8157 6977//8175 6990//8176 +f 6976//8158 6969//8157 6990//8176 +f 6976//8158 6990//8176 7000//8177 +f 6966//8162 6974//8161 6998//8178 +f 6966//8162 6998//8178 6986//8163 +f 6979//8159 6976//8158 7000//8177 +f 6979//8159 7000//8177 7003//8179 +f 6974//8161 6978//8160 7004//8180 +f 6974//8161 7004//8180 6998//8178 +f 6978//8160 6979//8159 7003//8179 +f 6978//8160 7003//8179 7004//8180 +f 6963//8164 6986//8163 6980//8181 +f 6963//8164 6980//8181 6952//8182 +f 6905//8165 6963//8164 6952//8182 +f 6905//8165 6952//8182 6908//8183 +f 6863//8166 6905//8165 6908//8183 +f 6863//8166 6908//8183 6876//8184 +f 6834//8167 6863//8166 6876//8184 +f 6834//8167 6876//8184 6852//8185 +f 6810//8168 6834//8167 6852//8185 +f 6810//8168 6852//8185 6829//8186 +f 6798//8169 6810//8168 6829//8186 +f 6798//8169 6829//8186 6819//8187 +f 6795//8170 6798//8169 6819//8187 +f 6795//8170 6819//8187 6815//8188 +f 6800//8171 6795//8170 6815//8188 +f 6800//8171 6815//8188 6822//8189 +f 6826//8172 6800//8171 6822//8189 +f 6826//8172 6822//8189 6837//8190 +f 6869//8173 6826//8172 6837//8190 +f 6869//8173 6837//8190 6885//8191 +f 6936//8174 6869//8173 6885//8191 +f 6936//8174 6885//8191 6934//8192 +f 6977//8175 6936//8174 6934//8192 +f 6977//8175 6934//8192 6971//8193 +f 6990//8176 6977//8175 6971//8193 +f 6990//8176 6971//8193 6983//8194 +f 7000//8177 6990//8176 6983//8194 +f 7000//8177 6983//8194 6992//8195 +f 6986//8163 6998//8178 6988//8196 +f 6986//8163 6988//8196 6980//8181 +f 7003//8179 7000//8177 6992//8195 +f 7003//8179 6992//8195 6994//8197 +f 6998//8178 7004//8180 6995//8198 +f 6998//8178 6995//8198 6988//8196 +f 7004//8180 7003//8179 6994//8197 +f 7004//8180 6994//8197 6995//8198 +f 6952//8182 6980//8181 6891//8199 +f 6952//8182 6891//8199 6916//8200 +f 6908//8183 6952//8182 6916//8200 +f 6908//8183 6916//8200 6880//8201 +f 6876//8184 6908//8183 6880//8201 +f 6852//8185 6876//8184 6880//8201 +f 6852//8185 6880//8201 6848//8202 +f 6829//8186 6852//8185 6848//8202 +f 6829//8186 6848//8202 6830//8203 +f 6819//8187 6829//8186 6830//8203 +f 6815//8188 6819//8187 6830//8203 +f 6815//8188 6830//8203 6827//8204 +f 6822//8189 6815//8188 6827//8204 +f 6822//8189 6827//8204 6891//8199 +f 6837//8190 6822//8189 6891//8199 +f 6885//8191 6837//8190 6891//8199 +f 6885//8191 6891//8199 6864//8205 +f 6934//8192 6885//8191 6864//8205 +f 6934//8192 6864//8205 6915//8206 +f 6971//8193 6934//8192 6915//8206 +f 6983//8194 6971//8193 6915//8206 +f 6983//8194 6915//8206 6954//8207 +f 6992//8195 6983//8194 6954//8207 +f 6992//8195 6954//8207 6972//8208 +f 6980//8181 6988//8196 6891//8199 +f 6994//8197 6992//8195 6972//8208 +f 6988//8196 6995//8198 6975//8209 +f 6988//8196 6975//8209 6891//8199 +f 6995//8198 6994//8197 6972//8208 +f 6995//8198 6972//8208 6975//8209 +f 6880//8201 6916//8200 6891//8199 +f 6848//8202 6880//8201 6891//8199 +f 6830//8203 6848//8202 6891//8199 +f 6827//8204 6830//8203 6891//8199 +f 6915//8206 6864//8205 6891//8199 +f 6954//8207 6915//8206 6891//8199 +f 6972//8208 6954//8207 6891//8199 +f 6975//8209 6972//8208 6891//8199 +f 7788//8210 7783//8211 7858//6811 +f 7788//8210 7858//6811 7860//6810 +f 7377//8212 7381//8213 7399//8214 +f 7377//8212 7399//8214 7405//8215 +f 7377//8212 7405//8215 7395//8216 +f 7377//8212 7395//8216 7367//8217 +f 7414//8218 7363//8219 7359//8220 +f 7414//8218 7359//8220 7392//8221 +f 7424//8222 7432//8223 7569//8224 +f 7424//8222 7569//8224 7576//8225 +f 7477//8226 7598//8227 7630//8228 +f 7477//8226 7630//8228 7531//8229 +f 7799//8230 7865//6834 7867//6833 +f 7799//8230 7867//6833 7806//8231 +f 7411//8232 7361//8233 7365//8234 +f 7411//8232 7365//8234 7418//8235 +f 7570//8236 7488//8237 7457//8238 +f 7570//8236 7457//8238 7555//8239 +f 7544//8240 7451//8241 7522//8242 +f 7544//8240 7522//8242 7559//8243 +f 7515//8244 7474//8245 7553//8246 +f 7515//8244 7553//8246 7565//8247 +f 7448//8248 7400//8249 7413//8250 +f 7448//8248 7413//8250 7468//8251 +f 7446//8252 7436//8253 7400//8249 +f 7446//8252 7400//8249 7448//8248 +f 7590//8254 7502//8255 7528//8256 +f 7590//8254 7528//8256 7599//8257 +f 7888//6861 7769//8258 7774//8259 +f 7888//6861 7774//8259 7886//6862 +f 7358//8260 7490//8261 7441//8262 +f 7358//8260 7441//8262 7315//8263 +f 7236//8264 7228//8265 7205//8266 +f 7236//8264 7205//8266 7209//8267 +f 7313//8268 7439//8269 7465//8270 +f 7313//8268 7465//8270 7351//8271 +f 7209//8267 7246//8272 7270//8273 +f 7209//8267 7270//8273 7236//8264 +f 7343//8274 7237//8275 7234//8276 +f 7343//8274 7234//8276 7305//8277 +f 7150//8278 7169//8279 7149//8280 +f 7150//8278 7149//8280 7143//8281 +f 7143//8281 7149//8280 7138//8282 +f 7211//8283 7251//8284 7247//8285 +f 7670//8286 7715//8287 7810//8288 +f 7670//8286 7810//8288 7773//8289 +f 7008//8290 6982//8291 6870//8292 +f 7008//8290 6870//8292 6965//8293 +f 7018//8294 7036//8295 7025//8296 +f 7018//8294 7025//8296 7001//8297 +f 7027//8298 7042//8299 7036//8295 +f 7027//8298 7036//8295 7018//8294 +f 6913//8300 7002//8301 7021//8302 +f 6913//8300 7021//8302 6987//8303 +f 7582//8304 7585//8305 7504//8306 +f 7582//8304 7504//8306 7501//8307 +f 7850//6913 7800//8308 7820//8309 +f 7850//6913 7820//8309 7845//6914 +f 7507//8310 7398//8311 7351//8271 +f 7507//8310 7351//8271 7465//8270 +f 7236//8264 7270//8273 7312//8312 +f 7236//8264 7312//8312 7271//8313 +f 7271//8313 7266//8314 7228//8265 +f 7271//8313 7228//8265 7236//8264 +f 7427//8315 7561//8316 7490//8261 +f 7427//8315 7490//8261 7358//8260 +f 7781//8317 7884//6925 7886//6862 +f 7781//8317 7886//6862 7774//8259 +f 7621//8318 7538//8319 7587//8320 +f 7621//8318 7587//8320 7654//8321 +f 7397//8322 7372//8323 7430//8324 +f 7397//8322 7430//8324 7521//8325 +f 6790//8326 6899//8327 6903//8328 +f 6790//8326 6903//8328 6803//8329 +f 7501//8307 7488//8237 7570//8236 +f 7501//8307 7570//8236 7582//8304 +f 7365//8234 7363//8219 7414//8218 +f 7365//8234 7414//8218 7418//8235 +f 7001//8297 7025//8296 7008//8290 +f 7001//8297 7008//8290 6965//8293 +f 6844//8330 6962//8331 7002//8301 +f 6844//8330 7002//8301 6913//8300 +f 7432//8223 7420//8332 7550//8333 +f 7432//8223 7550//8333 7569//8224 +f 7858//6811 7783//8211 7784//8334 +f 7858//6811 7784//8334 7857//6942 +f 7320//8335 7346//8336 7518//8337 +f 7320//8335 7518//8337 7393//8338 +f 7490//8261 7658//8339 7639//8340 +f 7490//8261 7639//8340 7441//8262 +f 7561//8316 7672//8341 7658//8339 +f 7561//8316 7658//8339 7490//8261 +f 7599//8257 7693//8342 7688//8343 +f 7599//8257 7688//8343 7590//8254 +f 7598//8227 7706//8344 7726//8345 +f 7598//8227 7726//8345 7630//8228 +f 7576//8225 7569//8224 7681//8346 +f 7576//8225 7681//8346 7690//8347 +f 7550//8333 7677//8348 7681//8346 +f 7550//8333 7681//8346 7569//8224 +f 7654//8321 7742//8349 7704//8350 +f 7654//8321 7704//8350 7621//8318 +f 6987//8303 7021//8302 7035//8351 +f 6987//8303 7035//8351 7013//8352 +f 7531//8229 7452//8353 7419//8354 +f 7531//8229 7419//8354 7477//8226 +f 7456//8355 7480//8356 7528//8256 +f 7456//8355 7528//8256 7502//8255 +f 7335//8357 7427//8315 7358//8260 +f 7335//8357 7358//8260 7282//8358 +f 7315//8263 7256//8359 7282//8358 +f 7315//8263 7282//8358 7358//8260 +f 7180//8360 7195//8361 7251//8284 +f 7180//8360 7251//8284 7211//8283 +f 7307//8362 7339//8363 7356//8364 +f 7307//8362 7356//8364 7328//8365 +f 7338//8366 7328//8365 7356//8364 +f 7338//8366 7356//8364 7376//8367 +f 7332//8368 7338//8366 7376//8367 +f 7332//8368 7376//8367 7373//8369 +f 7309//8370 7325//8371 7344//8372 +f 7309//8370 7344//8372 7340//8373 +f 7291//8374 7309//8370 7340//8373 +f 7291//8374 7340//8373 7326//8375 +f 7284//8376 7291//8374 7326//8375 +f 7284//8376 7326//8375 7310//8377 +f 7310//8377 7297//8378 7275//8379 +f 7310//8377 7275//8379 7284//8376 +f 7279//8380 7299//8381 7321//8382 +f 7279//8380 7321//8382 7294//8383 +f 7204//8384 7249//8385 7267//8386 +f 7204//8384 7267//8386 7226//8387 +f 7226//8387 7267//8386 7285//8388 +f 7226//8387 7285//8388 7241//8389 +f 7241//8389 7285//8388 7300//8390 +f 7241//8389 7300//8390 7258//8391 +f 7255//8392 7287//8393 7273//8394 +f 7255//8392 7273//8394 7235//8395 +f 7235//8395 7273//8394 7257//8396 +f 7235//8395 7257//8396 7218//8397 +f 7218//8397 7257//8396 7239//8398 +f 7218//8397 7239//8398 7196//8399 +f 7239//8398 7225//8400 7179//8401 +f 7239//8398 7179//8401 7196//8399 +f 7177//8402 7224//8403 7231//8404 +f 7177//8402 7231//8404 7185//8405 +f 7339//8363 7353//8406 7368//8407 +f 7339//8363 7368//8407 7356//8364 +f 7376//8367 7356//8364 7368//8407 +f 7326//8375 7340//8373 7330//8408 +f 7310//8377 7326//8375 7330//8408 +f 7310//8377 7330//8408 7319//8409 +f 7314//8410 7297//8378 7310//8377 +f 7314//8410 7310//8377 7319//8409 +f 7342//8411 7321//8382 7299//8381 +f 7342//8411 7299//8381 7324//8412 +f 7249//8385 7280//8413 7296//8414 +f 7249//8385 7296//8414 7267//8386 +f 7224//8403 7252//8415 7263//8416 +f 7224//8403 7263//8416 7231//8404 +f 7262//8417 7254//8418 7225//8400 +f 7262//8417 7225//8400 7239//8398 +f 7239//8398 7257//8396 7276//8419 +f 7239//8398 7276//8419 7262//8417 +f 7257//8396 7273//8394 7290//8420 +f 7257//8396 7290//8420 7276//8419 +f 7273//8394 7287//8393 7304//8421 +f 7273//8394 7304//8421 7290//8420 +f 7300//8390 7285//8388 7316//8422 +f 7300//8390 7316//8422 7317//8423 +f 7285//8388 7267//8386 7296//8414 +f 7285//8388 7296//8414 7316//8422 +f 7806//8231 7867//6833 7870//7034 +f 7806//8231 7870//7034 7817//8424 +f 7630//8228 7726//8345 7739//8425 +f 7630//8228 7739//8425 7644//8426 +f 7531//8229 7630//8228 7644//8426 +f 7531//8229 7644//8426 7558//8427 +f 7558//8427 7489//8428 7452//8353 +f 7558//8427 7452//8353 7531//8229 +f 7405//8215 7399//8214 7426//8429 +f 7405//8215 7426//8429 7443//8430 +f 7447//8431 7405//8215 7443//8430 +f 7447//8431 7443//8430 7496//8432 +f 7559//8243 7522//8242 7539//8433 +f 7559//8243 7539//8433 7574//8434 +f 7511//8435 7504//8306 7585//8305 +f 7511//8435 7585//8305 7580//8436 +f 7407//8437 7355//8438 7361//8233 +f 7407//8437 7361//8233 7411//8232 +f 7306//8439 7293//8440 7297//8378 +f 7306//8439 7297//8378 7314//8410 +f 7275//8379 7297//8378 7293//8440 +f 7275//8379 7293//8440 7269//8441 +f 7225//8400 7254//8418 7245//8442 +f 7225//8400 7245//8442 7220//8443 +f 7179//8401 7225//8400 7220//8443 +f 7179//8401 7220//8443 7176//8444 +f 6870//8292 6982//8291 6935//8445 +f 6870//8292 6935//8445 6805//8446 +f 6935//8445 6899//8327 6790//8326 +f 6935//8445 6790//8326 6805//8446 +f 7220//8443 7224//8403 7177//8402 +f 7220//8443 7177//8402 7176//8444 +f 7245//8442 7252//8415 7224//8403 +f 7245//8442 7224//8403 7220//8443 +f 7293//8440 7299//8381 7279//8380 +f 7293//8440 7279//8380 7269//8441 +f 7324//8412 7299//8381 7293//8440 +f 7324//8412 7293//8440 7306//8439 +f 7430//8324 7372//8323 7355//8438 +f 7430//8324 7355//8438 7407//8437 +f 7538//8319 7511//8435 7580//8436 +f 7538//8319 7580//8436 7587//8320 +f 6903//8328 6962//8331 6844//8330 +f 6903//8328 6844//8330 6803//8329 +f 7231//8404 7249//8385 7204//8384 +f 7231//8404 7204//8384 7185//8405 +f 7263//8416 7280//8413 7249//8385 +f 7263//8416 7249//8385 7231//8404 +f 7321//8382 7339//8363 7307//8362 +f 7321//8382 7307//8362 7294//8383 +f 7342//8411 7353//8406 7339//8363 +f 7342//8411 7339//8363 7321//8382 +f 7420//8332 7397//8322 7521//8325 +f 7420//8332 7521//8325 7550//8333 +f 7676//8447 7677//8348 7550//8333 +f 7676//8447 7550//8333 7521//8325 +f 7857//6942 7784//8334 7787//8448 +f 7857//6942 7787//8448 7855//7058 +f 7024//8449 7040//8450 7042//8299 +f 7024//8449 7042//8299 7027//8298 +f 7287//8393 7255//8392 7260//8451 +f 7287//8393 7260//8451 7298//8452 +f 7318//8453 7304//8421 7287//8393 +f 7318//8453 7287//8393 7298//8452 +f 7344//8372 7325//8371 7331//8454 +f 7344//8372 7331//8454 7354//8455 +f 7381//8213 7390//8456 7410//8457 +f 7381//8213 7410//8457 7399//8214 +f 7440//8458 7426//8429 7399//8214 +f 7440//8458 7399//8214 7410//8457 +f 7467//8459 7461//8460 7436//8253 +f 7467//8459 7436//8253 7446//8252 +f 7266//8314 7286//8461 7242//8462 +f 7266//8314 7242//8462 7228//8265 +f 7222//8463 7205//8266 7228//8265 +f 7222//8463 7228//8265 7242//8462 +f 7139//8464 7154//8465 7165//8466 +f 7139//8464 7165//8466 7146//8467 +f 7013//8352 7035//8351 7040//8450 +f 7013//8352 7040//8450 7024//8449 +f 7298//8452 7260//8451 7258//8391 +f 7298//8452 7258//8391 7300//8390 +f 7317//8423 7318//8453 7298//8452 +f 7317//8423 7298//8452 7300//8390 +f 7354//8455 7331//8454 7332//8368 +f 7354//8455 7332//8368 7373//8369 +f 7390//8456 7419//8354 7452//8353 +f 7390//8456 7452//8353 7410//8457 +f 7489//8428 7440//8458 7410//8457 +f 7489//8428 7410//8457 7452//8353 +f 7512//8468 7523//8469 7461//8460 +f 7512//8468 7461//8460 7467//8459 +f 7286//8461 7335//8357 7282//8358 +f 7286//8461 7282//8358 7242//8462 +f 7256//8359 7222//8463 7242//8462 +f 7256//8359 7242//8462 7282//8358 +f 7146//8467 7165//8466 7156//8470 +f 7349//8471 7363//8219 7330//8408 +f 7349//8471 7330//8408 7340//8373 +f 7344//8372 7352//8472 7349//8471 +f 7344//8372 7349//8471 7340//8373 +f 7354//8455 7370//8473 7352//8472 +f 7354//8455 7352//8472 7344//8372 +f 7373//8369 7388//8474 7370//8473 +f 7373//8369 7370//8473 7354//8455 +f 7424//8222 7388//8474 7373//8369 +f 7424//8222 7373//8369 7376//8367 +f 7368//8407 7432//8223 7424//8222 +f 7368//8407 7424//8222 7376//8367 +f 7353//8406 7420//8332 7432//8223 +f 7353//8406 7432//8223 7368//8407 +f 7342//8411 7397//8322 7420//8332 +f 7342//8411 7420//8332 7353//8406 +f 7342//8411 7324//8412 7372//8323 +f 7342//8411 7372//8323 7397//8322 +f 7355//8438 7372//8323 7324//8412 +f 7355//8438 7324//8412 7306//8439 +f 7361//8233 7355//8438 7306//8439 +f 7361//8233 7306//8439 7314//8410 +f 7365//8234 7361//8233 7314//8410 +f 7365//8234 7314//8410 7319//8409 +f 7319//8409 7330//8408 7363//8219 +f 7319//8409 7363//8219 7365//8234 +f 7789//8475 7794//8476 7881//7088 +f 7789//8475 7881//7088 7882//7087 +f 7607//8477 7618//8478 7702//8479 +f 7607//8477 7702//8479 7698//8480 +f 7510//8481 7537//8482 7618//8478 +f 7510//8481 7618//8478 7607//8477 +f 7497//8483 7537//8482 7510//8481 +f 7497//8483 7510//8481 7429//8484 +f 7461//8460 7497//8483 7429//8484 +f 7461//8460 7429//8484 7375//8485 +f 7436//8253 7461//8460 7375//8485 +f 7436//8253 7375//8485 7334//8486 +f 7400//8249 7436//8253 7334//8486 +f 7400//8249 7334//8486 7329//8487 +f 7413//8250 7400//8249 7329//8487 +f 7413//8250 7329//8487 7364//8488 +f 7553//8246 7474//8245 7445//8489 +f 7553//8246 7445//8489 7534//8490 +f 7787//8448 7800//8308 7850//6913 +f 7787//8448 7850//6913 7855//7058 +f 7621//8318 7704//8350 7676//8447 +f 7621//8318 7676//8447 7521//8325 +f 7430//8324 7538//8319 7621//8318 +f 7430//8324 7621//8318 7521//8325 +f 7407//8437 7511//8435 7538//8319 +f 7407//8437 7538//8319 7430//8324 +f 7411//8232 7504//8306 7511//8435 +f 7411//8232 7511//8435 7407//8437 +f 7501//8307 7504//8306 7411//8232 +f 7501//8307 7411//8232 7418//8235 +f 7418//8235 7414//8218 7488//8237 +f 7418//8235 7488//8237 7501//8307 +f 7457//8238 7488//8237 7414//8218 +f 7457//8238 7414//8218 7392//8221 +f 7451//8241 7401//8491 7447//8431 +f 7451//8241 7447//8431 7522//8242 +f 7496//8432 7539//8433 7522//8242 +f 7496//8432 7522//8242 7447//8431 +f 7468//8251 7413//8250 7474//8245 +f 7468//8251 7474//8245 7515//8244 +f 7445//8489 7474//8245 7413//8250 +f 7445//8489 7413//8250 7364//8488 +f 7270//8273 7351//8271 7398//8311 +f 7270//8273 7398//8311 7312//8312 +f 7246//8272 7313//8268 7351//8271 +f 7246//8272 7351//8271 7270//8273 +f 7237//8275 7201//8492 7234//8276 +f 7704//8350 7742//8349 7820//8309 +f 7704//8350 7820//8309 7800//8308 +f 7676//8447 7704//8350 7800//8308 +f 7676//8447 7800//8308 7787//8448 +f 7787//8448 7784//8334 7677//8348 +f 7787//8448 7677//8348 7676//8447 +f 7681//8346 7677//8348 7784//8334 +f 7681//8346 7784//8334 7783//8211 +f 7690//8347 7681//8346 7783//8211 +f 7690//8347 7783//8211 7788//8210 +f 7706//8344 7799//8230 7806//8231 +f 7706//8344 7806//8231 7726//8345 +f 7817//8424 7739//8425 7726//8345 +f 7817//8424 7726//8345 7806//8231 +f 7793//8493 7688//8343 7693//8342 +f 7793//8493 7693//8342 7786//8494 +f 7698//8480 7702//8479 7794//8476 +f 7698//8480 7794//8476 7789//8475 +f 7672//8341 7781//8317 7774//8259 +f 7672//8341 7774//8259 7658//8339 +f 7769//8258 7639//8340 7658//8339 +f 7769//8258 7658//8339 7774//8259 +f 7393//8338 7518//8337 7514//8495 +f 7457//8238 7435//8496 7546//8497 +f 7457//8238 7546//8497 7555//8239 +f 7392//8221 7383//8498 7435//8496 +f 7392//8221 7435//8496 7457//8238 +f 7349//8471 7377//8212 7367//8217 +f 7349//8471 7367//8217 7359//8220 +f 7349//8471 7352//8472 7381//8213 +f 7349//8471 7381//8213 7377//8212 +f 7352//8472 7370//8473 7390//8456 +f 7352//8472 7390//8456 7381//8213 +f 7370//8473 7388//8474 7419//8354 +f 7370//8473 7419//8354 7390//8456 +f 7477//8226 7419//8354 7388//8474 +f 7477//8226 7388//8474 7424//8222 +f 7576//8225 7598//8227 7477//8226 +f 7576//8225 7477//8226 7424//8222 +f 7690//8347 7706//8344 7598//8227 +f 7690//8347 7598//8227 7576//8225 +f 7788//8210 7799//8230 7706//8344 +f 7788//8210 7706//8344 7690//8347 +f 7860//6810 7865//6834 7799//8230 +f 7860//6810 7799//8230 7788//8210 +f 7546//8497 7435//8496 7451//8241 +f 7546//8497 7451//8241 7544//8240 +f 7435//8496 7383//8498 7401//8491 +f 7435//8496 7401//8491 7451//8241 +f 7383//8498 7367//8217 7395//8216 +f 7383//8498 7395//8216 7401//8491 +f 7359//8220 7367//8217 7383//8498 +f 7359//8220 7383//8498 7392//8221 +f 7359//8220 7363//8219 7349//8471 +f 7395//8216 7405//8215 7447//8431 +f 7395//8216 7447//8431 7401//8491 +f 7817//8424 7870//7034 7872//7113 +f 7817//8424 7872//7113 7813//8499 +f 7736//8500 7739//8425 7817//8424 +f 7736//8500 7817//8424 7813//8499 +f 7644//8426 7739//8425 7736//8500 +f 7644//8426 7736//8500 7649//8501 +f 7558//8427 7644//8426 7649//8501 +f 7558//8427 7649//8501 7572//8502 +f 7572//8502 7512//8468 7489//8428 +f 7572//8502 7489//8428 7558//8427 +f 7467//8459 7440//8458 7489//8428 +f 7467//8459 7489//8428 7512//8468 +f 7446//8252 7426//8429 7440//8458 +f 7446//8252 7440//8458 7467//8459 +f 7443//8430 7426//8429 7446//8252 +f 7443//8430 7446//8252 7448//8248 +f 7496//8432 7443//8430 7448//8248 +f 7496//8432 7448//8248 7468//8251 +f 7515//8244 7539//8433 7496//8432 +f 7515//8244 7496//8432 7468//8251 +f 7574//8434 7539//8433 7515//8244 +f 7574//8434 7515//8244 7565//8247 +f 7398//8311 7507//8310 7534//8490 +f 7398//8311 7534//8490 7445//8489 +f 7312//8312 7398//8311 7445//8489 +f 7312//8312 7445//8489 7364//8488 +f 7271//8313 7312//8312 7364//8488 +f 7271//8313 7364//8488 7329//8487 +f 7266//8314 7271//8313 7329//8487 +f 7266//8314 7329//8487 7334//8486 +f 7286//8461 7266//8314 7334//8486 +f 7286//8461 7334//8486 7375//8485 +f 7335//8357 7286//8461 7375//8485 +f 7335//8357 7375//8485 7429//8484 +f 7427//8315 7335//8357 7429//8484 +f 7427//8315 7429//8484 7510//8481 +f 7561//8316 7427//8315 7510//8481 +f 7561//8316 7510//8481 7607//8477 +f 7672//8341 7561//8316 7607//8477 +f 7672//8341 7607//8477 7698//8480 +f 7781//8317 7672//8341 7698//8480 +f 7781//8317 7698//8480 7789//8475 +f 7884//6925 7781//8317 7789//8475 +f 7884//6925 7789//8475 7882//7087 +f 7890//7117 7782//8503 7769//8258 +f 7890//7117 7769//8258 7888//6861 +f 7782//8503 7622//8504 7639//8340 +f 7782//8503 7639//8340 7769//8258 +f 7441//8262 7639//8340 7622//8504 +f 7441//8262 7622//8504 7417//8505 +f 7315//8263 7441//8262 7417//8505 +f 7315//8263 7417//8505 7289//8506 +f 7289//8506 7238//8507 7256//8359 +f 7289//8506 7256//8359 7315//8263 +f 7238//8507 7206//8508 7222//8463 +f 7238//8507 7222//8463 7256//8359 +f 7206//8508 7190//8509 7205//8266 +f 7206//8508 7205//8266 7222//8463 +f 7209//8267 7205//8266 7190//8509 +f 7209//8267 7190//8509 7194//8510 +f 7194//8510 7229//8511 7246//8272 +f 7194//8510 7246//8272 7209//8267 +f 7229//8511 7288//8512 7313//8268 +f 7229//8511 7313//8268 7246//8272 +f 7288//8512 7425//8513 7439//8269 +f 7288//8512 7439//8269 7313//8268 +f 7779//8514 7782//8503 7890//7117 +f 7779//8514 7890//7117 7896//7130 +f 7597//8515 7622//8504 7782//8503 +f 7597//8515 7782//8503 7779//8514 +f 7417//8505 7622//8504 7597//8515 +f 7417//8505 7597//8515 7403//8516 +f 7289//8506 7417//8505 7403//8516 +f 7289//8506 7403//8516 7281//8517 +f 7227//8518 7238//8507 7289//8506 +f 7227//8518 7289//8506 7281//8517 +f 7197//8519 7206//8508 7238//8507 +f 7197//8519 7238//8507 7227//8518 +f 7178//8520 7190//8509 7206//8508 +f 7178//8520 7206//8508 7197//8519 +f 7194//8510 7190//8509 7178//8520 +f 7194//8510 7178//8520 7182//8521 +f 7217//8522 7229//8511 7194//8510 +f 7217//8522 7194//8510 7182//8521 +f 7277//8523 7288//8512 7229//8511 +f 7277//8523 7229//8511 7217//8522 +f 7415//8524 7425//8513 7288//8512 +f 7415//8524 7288//8512 7277//8523 +f 7277//8523 7264//8525 7396//8526 +f 7277//8523 7396//8526 7415//8524 +f 7217//8522 7203//8527 7264//8525 +f 7217//8522 7264//8525 7277//8523 +f 7174//8528 7203//8527 7217//8522 +f 7174//8528 7217//8522 7182//8521 +f 7172//8529 7174//8528 7182//8521 +f 7172//8529 7182//8521 7178//8520 +f 7191//8530 7172//8529 7178//8520 +f 7191//8530 7178//8520 7197//8519 +f 7221//8531 7191//8530 7197//8519 +f 7221//8531 7197//8519 7227//8518 +f 7272//8532 7221//8531 7227//8518 +f 7272//8532 7227//8518 7281//8517 +f 7389//8533 7272//8532 7281//8517 +f 7389//8533 7281//8517 7403//8516 +f 7577//8534 7389//8533 7403//8516 +f 7577//8534 7403//8516 7597//8515 +f 7762//8535 7577//8534 7597//8515 +f 7762//8535 7597//8515 7779//8514 +f 7836//8536 7762//8535 7779//8514 +f 7836//8536 7779//8514 7839//8537 +f 7396//8526 7264//8525 7253//8538 +f 7396//8526 7253//8538 7374//8539 +f 7264//8525 7203//8527 7188//8540 +f 7264//8525 7188//8540 7253//8538 +f 7162//8541 7188//8540 7203//8527 +f 7162//8541 7203//8527 7174//8528 +f 7164//8542 7162//8541 7174//8528 +f 7164//8542 7174//8528 7172//8529 +f 7181//8543 7164//8542 7172//8529 +f 7181//8543 7172//8529 7191//8530 +f 7210//8544 7181//8543 7191//8530 +f 7210//8544 7191//8530 7221//8531 +f 7261//8545 7210//8544 7221//8531 +f 7261//8545 7221//8531 7272//8532 +f 7369//8546 7261//8545 7272//8532 +f 7369//8546 7272//8532 7389//8533 +f 7547//8547 7369//8546 7389//8533 +f 7547//8547 7389//8533 7577//8534 +f 7747//8548 7547//8547 7577//8534 +f 7747//8548 7577//8534 7762//8535 +f 7829//8549 7747//8548 7762//8535 +f 7829//8549 7762//8535 7836//8536 +f 7374//8539 7253//8538 7237//8275 +f 7374//8539 7237//8275 7343//8274 +f 7253//8538 7188//8540 7169//8279 +f 7253//8538 7169//8279 7237//8275 +f 7149//8280 7169//8279 7188//8540 +f 7149//8280 7188//8540 7162//8541 +f 7154//8465 7149//8280 7162//8541 +f 7154//8465 7162//8541 7164//8542 +f 7165//8466 7154//8465 7164//8542 +f 7165//8466 7164//8542 7181//8543 +f 7195//8361 7165//8466 7181//8543 +f 7195//8361 7181//8543 7210//8544 +f 7251//8284 7195//8361 7210//8544 +f 7251//8284 7210//8544 7261//8545 +f 7346//8336 7251//8284 7261//8545 +f 7346//8336 7261//8545 7369//8546 +f 7518//8337 7346//8336 7369//8546 +f 7518//8337 7369//8546 7547//8547 +f 7715//8287 7518//8337 7547//8547 +f 7715//8287 7547//8547 7747//8548 +f 7810//8288 7715//8287 7747//8548 +f 7810//8288 7747//8548 7829//8549 +f 7078//8550 7092//8551 7081//8552 +f 7078//8550 7081//8552 7067//8553 +f 7097//8554 7081//8552 7092//8551 +f 7097//8554 7092//8551 7107//8555 +f 7108//8556 7097//8554 7107//8555 +f 7108//8556 7107//8555 7120//8557 +f 7118//8558 7108//8556 7120//8557 +f 7118//8558 7120//8557 7127//8559 +f 7125//8560 7118//8558 7127//8559 +f 7125//8560 7127//8559 7133//8561 +f 7114//8562 7125//8560 7133//8561 +f 7114//8562 7133//8561 7126//8563 +f 7105//8564 7114//8562 7126//8563 +f 7105//8564 7126//8563 7112//8565 +f 7095//8566 7105//8564 7112//8565 +f 7095//8566 7112//8565 7101//8567 +f 7083//8568 7095//8566 7087//8569 +f 7087//8569 7075//8570 7074//8571 +f 7087//8569 7074//8571 7083//8568 +f 7066//8572 7074//8571 7075//8570 +f 7066//8572 7075//8570 7065//8573 +f 7065//8573 7069//8574 7064//8575 +f 7065//8573 7064//8575 7066//8572 +f 7078//8550 7064//8575 7069//8574 +f 6897//8576 6973//8577 6962//8331 +f 6897//8576 6962//8331 6903//8328 +f 7002//8301 6962//8331 6973//8577 +f 7002//8301 6973//8577 7005//8578 +f 7021//8302 7002//8301 7005//8578 +f 7021//8302 7005//8578 7028//8579 +f 7035//8351 7021//8302 7028//8579 +f 7035//8351 7028//8579 7037//8580 +f 7040//8450 7035//8351 7037//8580 +f 7040//8450 7037//8580 7048//8581 +f 7042//8299 7040//8450 7048//8581 +f 7042//8299 7048//8581 7049//8582 +f 7036//8295 7042//8299 7049//8582 +f 7036//8295 7049//8582 7043//8583 +f 7025//8296 7036//8295 7043//8583 +f 7025//8296 7043//8583 7032//8584 +f 7008//8290 7025//8296 7032//8584 +f 7008//8290 7032//8584 7012//8585 +f 7012//8585 6984//8586 6982//8291 +f 7012//8585 6982//8291 7008//8290 +f 6935//8445 6982//8291 6984//8586 +f 6935//8445 6984//8586 6922//8587 +f 6922//8587 6894//8588 6899//8327 +f 6922//8587 6899//8327 6935//8445 +f 6903//8328 6899//8327 6894//8588 +f 6903//8328 6894//8588 6897//8576 +f 7237//8275 7169//8279 7168//8589 +f 7237//8275 7168//8589 7201//8492 +f 7138//8282 7149//8280 7154//8465 +f 7138//8282 7154//8465 7139//8464 +f 7156//8470 7165//8466 7195//8361 +f 7156//8470 7195//8361 7180//8360 +f 7247//8285 7251//8284 7346//8336 +f 7247//8285 7346//8336 7320//8335 +f 7514//8495 7518//8337 7715//8287 +f 7514//8495 7715//8287 7670//8286 +f 7839//8537 7779//8514 7896//7130 +f 7168//8589 7169//8279 7150//8278 +f 7540//8590 7535//8591 7626//8592 +f 7540//8590 7626//8592 7620//8593 +f 7604//8594 7626//8592 7535//8591 +f 7604//8594 7535//8591 7483//8595 +f 7416//8596 7431//8597 7382//8598 +f 7433//8599 7416//8596 7382//8598 +f 7433//8599 7382//8598 7385//8600 +f 7609//8601 7604//8594 7483//8595 +f 7609//8601 7483//8595 7437//8602 +f 7844//7219 7758//8603 7730//8604 +f 7844//7219 7730//8604 7849//7220 +f 7758//8603 7632//8605 7526//8606 +f 7758//8603 7526//8606 7730//8604 +f 7526//8606 7632//8605 7482//8607 +f 7526//8606 7482//8607 7371//8608 +f 7482//8607 7394//8609 7311//8610 +f 7482//8607 7311//8610 7371//8608 +f 7311//8610 7394//8609 7444//8611 +f 7311//8610 7444//8611 7357//8612 +f 7444//8611 7524//8613 7469//8614 +f 7444//8611 7469//8614 7406//8615 +f 7730//8604 7526//8606 7519//8616 +f 7730//8604 7519//8616 7721//8617 +f 7526//8606 7371//8608 7362//8618 +f 7526//8606 7362//8618 7519//8616 +f 7371//8608 7311//8610 7303//8619 +f 7371//8608 7303//8619 7362//8618 +f 7350//8620 7303//8619 7311//8610 +f 7350//8620 7311//8610 7357//8612 +f 7849//7220 7730//8604 7721//8617 +f 7849//7220 7721//8617 7846//7239 +f 7406//8615 7469//8614 7486//8621 +f 7406//8615 7486//8621 7409//8622 +f 7386//8623 7421//8624 7402//8625 +f 7386//8623 7402//8625 7341//8626 +f 7421//8624 7449//8627 7402//8625 +f 7449//8627 7458//8628 7402//8625 +f 7404//8629 7345//8630 7341//8626 +f 7404//8629 7341//8626 7402//8625 +f 7402//8625 7458//8628 7459//8631 +f 7402//8625 7459//8631 7404//8629 +f 7617//8632 7606//8633 7473//8634 +f 7617//8632 7473//8634 7506//8635 +f 7606//8633 7609//8601 7437//8602 +f 7606//8633 7437//8602 7473//8634 +f 7505//8636 7458//8628 7449//8627 +f 7532//8637 7505//8636 7559//8243 +f 7532//8637 7559//8243 7574//8434 +f 7548//8638 7532//8637 7574//8434 +f 7548//8638 7574//8434 7565//8247 +f 7556//8639 7548//8638 7565//8247 +f 7556//8639 7565//8247 7553//8246 +f 7534//8490 7543//8640 7556//8639 +f 7534//8490 7556//8639 7553//8246 +f 7507//8310 7543//8640 7534//8490 +f 7543//8640 7507//8310 7592//8641 +f 7543//8640 7592//8641 7600//8642 +f 7600//8642 7593//8643 7556//8639 +f 7600//8642 7556//8639 7543//8640 +f 7593//8643 7581//8644 7548//8638 +f 7593//8643 7548//8638 7556//8639 +f 7581//8644 7542//8645 7532//8637 +f 7581//8644 7532//8637 7548//8638 +f 7542//8645 7517//8646 7505//8636 +f 7542//8645 7505//8636 7532//8637 +f 7458//8628 7505//8636 7517//8646 +f 7458//8628 7517//8646 7459//8631 +f 7507//8310 7465//8270 7579//8647 +f 7507//8310 7579//8647 7592//8641 +f 7563//8648 7579//8647 7465//8270 +f 7563//8648 7465//8270 7439//8269 +f 7541//8649 7563//8648 7439//8269 +f 7541//8649 7439//8269 7425//8513 +f 7533//8650 7541//8649 7425//8513 +f 7533//8650 7425//8513 7415//8524 +f 7387//8651 7428//8652 7343//8274 +f 7387//8651 7343//8274 7305//8277 +f 7374//8539 7343//8274 7428//8652 +f 7374//8539 7428//8652 7464//8653 +f 7464//8653 7500//8654 7396//8526 +f 7464//8653 7396//8526 7374//8539 +f 7415//8524 7396//8526 7500//8654 +f 7415//8524 7500//8654 7533//8650 +f 7497//8483 7462//8655 7513//8656 +f 7497//8483 7513//8656 7537//8482 +f 7618//8478 7537//8482 7513//8656 +f 7618//8478 7513//8656 7591//8657 +f 7702//8479 7618//8478 7591//8657 +f 7702//8479 7591//8657 7697//8658 +f 7794//8476 7702//8479 7697//8658 +f 7794//8476 7697//8658 7790//8659 +f 7881//7088 7794//8476 7790//8659 +f 7881//7088 7790//8659 7880//7279 +f 7572//8502 7562//8660 7523//8469 +f 7572//8502 7523//8469 7512//8468 +f 7634//8661 7562//8660 7572//8502 +f 7634//8661 7572//8502 7649//8501 +f 7736//8500 7718//8662 7634//8661 +f 7736//8500 7634//8661 7649//8501 +f 7804//8663 7718//8662 7736//8500 +f 7804//8663 7736//8500 7813//8499 +f 7875//7284 7804//8663 7813//8499 +f 7875//7284 7813//8499 7872//7113 +f 7461//8460 7523//8469 7498//8664 +f 7461//8460 7498//8664 7497//8483 +f 7523//8469 7480//8356 7456//8355 +f 7523//8469 7456//8355 7498//8664 +f 7599//8257 7528//8256 7562//8660 +f 7599//8257 7562//8660 7634//8661 +f 7634//8661 7718//8662 7693//8342 +f 7634//8661 7693//8342 7599//8257 +f 7786//8494 7693//8342 7718//8662 +f 7786//8494 7718//8662 7804//8663 +f 7874//7286 7786//8494 7804//8663 +f 7874//7286 7804//8663 7875//7284 +f 7513//8656 7462//8655 7456//8355 +f 7513//8656 7456//8355 7502//8255 +f 7591//8657 7513//8656 7502//8255 +f 7591//8657 7502//8255 7590//8254 +f 7697//8658 7591//8657 7590//8254 +f 7697//8658 7590//8254 7688//8343 +f 7790//8659 7697//8658 7688//8343 +f 7790//8659 7688//8343 7793//8493 +f 7498//8664 7456//8355 7462//8655 +f 7498//8664 7462//8655 7497//8483 +f 7523//8469 7562//8660 7528//8256 +f 7523//8469 7528//8256 7480//8356 +f 7620//8593 7601//8665 7466//8666 +f 7620//8593 7466//8666 7540//8590 +f 7434//8667 7466//8666 7601//8665 +f 7434//8667 7601//8665 7603//8668 +f 7603//8668 7653//8669 7460//8670 +f 7603//8668 7460//8670 7434//8667 +f 7653//8669 7692//8671 7584//8672 +f 7653//8669 7584//8672 7460//8670 +f 7759//8673 7584//8672 7692//8671 +f 7759//8673 7692//8671 7775//8674 +f 7775//8674 7840//7298 7842//7297 +f 7775//8674 7842//7297 7759//8673 +f 7464//8653 7529//8675 7566//8676 +f 7464//8653 7566//8676 7500//8654 +f 7428//8652 7493//8677 7529//8675 +f 7428//8652 7529//8675 7464//8653 +f 7387//8651 7438//8678 7493//8677 +f 7387//8651 7493//8677 7428//8652 +f 7541//8649 7629//8679 7650//8680 +f 7541//8649 7650//8680 7563//8648 +f 7533//8650 7596//8681 7629//8679 +f 7533//8650 7629//8679 7541//8649 +f 7566//8676 7596//8681 7533//8650 +f 7566//8676 7533//8650 7500//8654 +f 7579//8647 7563//8648 7650//8680 +f 7579//8647 7650//8680 7666//8682 +f 7579//8647 7666//8682 7689//8683 +f 7579//8647 7689//8683 7592//8641 +f 7592//8641 7689//8683 7699//8684 +f 7592//8641 7699//8684 7600//8642 +f 7593//8643 7600//8642 7699//8684 +f 7593//8643 7699//8684 7694//8685 +f 7685//8686 7581//8644 7593//8643 +f 7685//8686 7593//8643 7694//8685 +f 7581//8644 7685//8686 7680//8687 +f 7581//8644 7680//8687 7542//8645 +f 7542//8645 7680//8687 7611//8688 +f 7506//8635 7517//8646 7611//8688 +f 7506//8635 7611//8688 7617//8632 +f 7820//8309 7785//8689 7841//7313 +f 7820//8309 7841//7313 7845//6914 +f 7742//8349 7700//8690 7785//8689 +f 7742//8349 7785//8689 7820//8309 +f 7623//8691 7700//8690 7742//8349 +f 7623//8691 7742//8349 7654//8321 +f 7587//8320 7551//8692 7623//8691 +f 7587//8320 7623//8691 7654//8321 +f 7554//8693 7551//8692 7587//8320 +f 7554//8693 7587//8320 7580//8436 +f 7573//8694 7554//8693 7580//8436 +f 7573//8694 7580//8436 7585//8305 +f 7585//8305 7582//8304 7549//8695 +f 7585//8305 7549//8695 7573//8694 +f 7520//8696 7549//8695 7582//8304 +f 7520//8696 7582//8304 7570//8236 +f 7516//8697 7520//8696 7570//8236 +f 7516//8697 7570//8236 7555//8239 +f 7555//8239 7546//8497 7516//8697 +f 7841//7313 7785//8689 7758//8603 +f 7841//7313 7758//8603 7844//7219 +f 7785//8689 7700//8690 7632//8605 +f 7785//8689 7632//8605 7758//8603 +f 7482//8607 7632//8605 7700//8690 +f 7482//8607 7700//8690 7623//8691 +f 7623//8691 7551//8692 7394//8609 +f 7623//8691 7394//8609 7482//8607 +f 7444//8611 7394//8609 7551//8692 +f 7444//8611 7551//8692 7554//8693 +f 7524//8613 7444//8611 7554//8693 +f 7524//8613 7554//8693 7573//8694 +f 7573//8694 7549//8695 7491//8698 +f 7573//8694 7491//8698 7524//8613 +f 7431//8597 7491//8698 7549//8695 +f 7431//8597 7549//8695 7520//8696 +f 7386//8623 7431//8597 7520//8696 +f 7386//8623 7520//8696 7516//8697 +f 7516//8697 7546//8497 7421//8624 +f 7516//8697 7421//8624 7386//8623 +f 7546//8497 7544//8240 7449//8627 +f 7546//8497 7449//8627 7421//8624 +f 7505//8636 7449//8627 7544//8240 +f 7505//8636 7544//8240 7559//8243 +f 7009//8699 7016//8700 7027//8298 +f 7009//8699 7027//8298 7018//8294 +f 6991//8701 7009//8699 7018//8294 +f 6991//8701 7018//8294 7001//8297 +f 6948//8702 6991//8701 7001//8297 +f 6948//8702 7001//8297 6965//8293 +f 6881//8703 6948//8702 6965//8293 +f 6881//8703 6965//8293 6870//8292 +f 6828//8704 6881//8703 6870//8292 +f 6828//8704 6870//8292 6805//8446 +f 6806//8705 6828//8704 6805//8446 +f 6806//8705 6805//8446 6790//8326 +f 6985//8706 6900//8707 6913//8300 +f 6985//8706 6913//8300 6987//8303 +f 6844//8330 6913//8300 6900//8707 +f 6844//8330 6900//8707 6854//8708 +f 6854//8708 6817//8709 6803//8329 +f 6854//8708 6803//8329 6844//8330 +f 6817//8709 6806//8705 6790//8326 +f 6817//8709 6790//8326 6803//8329 +f 7016//8700 7014//8710 7024//8449 +f 7016//8700 7024//8449 7027//8298 +f 7014//8710 7006//8711 7013//8352 +f 7014//8710 7013//8352 7024//8449 +f 6987//8303 7013//8352 7006//8711 +f 6987//8303 7006//8711 6985//8706 +f 7005//8578 6973//8577 7011//8712 +f 7005//8578 7011//8712 7030//8713 +f 7011//8712 6973//8577 6897//8576 +f 7011//8712 6897//8576 6993//8714 +f 6897//8576 6894//8588 6989//8715 +f 6897//8576 6989//8715 6993//8714 +f 6989//8715 6894//8588 6922//8587 +f 6989//8715 6922//8587 6999//8716 +f 6922//8587 6984//8586 7015//8717 +f 6922//8587 7015//8717 6999//8716 +f 7015//8717 6984//8586 7012//8585 +f 7015//8717 7012//8585 7031//8718 +f 7012//8585 7032//8584 7045//8719 +f 7012//8585 7045//8719 7031//8718 +f 7032//8584 7043//8583 7052//8720 +f 7032//8584 7052//8720 7045//8719 +f 7043//8583 7049//8582 7060//8721 +f 7043//8583 7060//8721 7052//8720 +f 7049//8582 7048//8581 7061//8722 +f 7049//8582 7061//8722 7060//8721 +f 7048//8581 7037//8580 7053//8723 +f 7048//8581 7053//8723 7061//8722 +f 7037//8580 7028//8579 7044//8724 +f 7037//8580 7044//8724 7053//8723 +f 7028//8579 7005//8578 7030//8713 +f 7028//8579 7030//8713 7044//8724 +f 7092//8551 7106//8725 7119//8726 +f 7092//8551 7119//8726 7107//8555 +f 7093//8727 7106//8725 7092//8551 +f 7093//8727 7092//8551 7078//8550 +f 7076//8728 7088//8729 7093//8727 +f 7076//8728 7093//8727 7078//8550 +f 7085//8730 7088//8729 7076//8728 +f 7085//8730 7076//8728 7079//8731 +f 7084//8732 7094//8733 7085//8730 +f 7084//8732 7085//8730 7079//8731 +f 7102//8734 7094//8733 7084//8732 +f 7102//8734 7084//8732 7090//8735 +f 7090//8735 7101//8567 7113//8736 +f 7090//8735 7113//8736 7102//8734 +f 7101//8567 7122//8737 7130//8738 +f 7101//8567 7130//8738 7113//8736 +f 7122//8737 7137//8739 7151//8740 +f 7122//8737 7151//8740 7130//8738 +f 7161//8741 7151//8740 7137//8739 +f 7161//8741 7137//8739 7152//8742 +f 7144//8743 7161//8741 7152//8742 +f 7127//8559 7120//8557 7132//8744 +f 7127//8559 7132//8744 7144//8743 +f 7120//8557 7107//8555 7119//8726 +f 7120//8557 7119//8726 7132//8744 +f 7101//8567 7112//8565 7122//8737 +f 7112//8565 7126//8563 7137//8739 +f 7112//8565 7137//8739 7122//8737 +f 7152//8742 7137//8739 7126//8563 +f 7152//8742 7126//8563 7133//8561 +f 7144//8743 7133//8561 7127//8559 +f 7144//8743 7152//8742 7133//8561 +f 7087//8569 7101//8567 7090//8735 +f 7090//8735 7084//8732 7075//8570 +f 7090//8735 7075//8570 7087//8569 +f 7065//8573 7075//8570 7084//8732 +f 7065//8573 7084//8732 7079//8731 +f 7079//8731 7076//8728 7069//8574 +f 7079//8731 7069//8574 7065//8573 +f 7078//8550 7069//8574 7076//8728 +f 7067//8553 7064//8575 7078//8550 +f 7294//8383 7307//8362 7280//8413 +f 7294//8383 7280//8413 7263//8416 +f 7296//8414 7280//8413 7307//8362 +f 7296//8414 7307//8362 7328//8365 +f 7316//8422 7296//8414 7328//8365 +f 7316//8422 7328//8365 7338//8366 +f 7317//8423 7316//8422 7338//8366 +f 7317//8423 7338//8366 7332//8368 +f 7331//8454 7318//8453 7317//8423 +f 7331//8454 7317//8423 7332//8368 +f 7325//8371 7304//8421 7318//8453 +f 7325//8371 7318//8453 7331//8454 +f 7290//8420 7304//8421 7325//8371 +f 7290//8420 7325//8371 7309//8370 +f 7276//8419 7290//8420 7309//8370 +f 7276//8419 7309//8370 7291//8374 +f 7262//8417 7276//8419 7291//8374 +f 7262//8417 7291//8374 7284//8376 +f 7284//8376 7275//8379 7254//8418 +f 7284//8376 7254//8418 7262//8417 +f 7245//8442 7254//8418 7275//8379 +f 7245//8442 7275//8379 7269//8441 +f 7269//8441 7279//8380 7252//8415 +f 7269//8441 7252//8415 7245//8442 +f 7263//8416 7252//8415 7279//8380 +f 7263//8416 7279//8380 7294//8383 +f 7067//8553 7081//8552 7068//8745 +f 7067//8553 7068//8745 7056//8746 +f 7086//8747 7068//8745 7081//8552 +f 7086//8747 7081//8552 7097//8554 +f 7100//8748 7086//8747 7097//8554 +f 7100//8748 7097//8554 7108//8556 +f 7109//8749 7100//8748 7108//8556 +f 7109//8749 7108//8556 7118//8558 +f 7117//8750 7109//8749 7118//8558 +f 7117//8750 7118//8558 7125//8560 +f 7114//8562 7117//8750 7125//8560 +f 7083//8568 7074//8571 7070//8751 +f 7059//8752 7070//8751 7074//8571 +f 7059//8752 7074//8571 7066//8572 +f 7066//8572 7064//8575 7055//8753 +f 7066//8572 7055//8753 7059//8752 +f 7056//8746 7055//8753 7064//8575 +f 7056//8746 7064//8575 7067//8553 +f 7047//8754 7030//8713 7011//8712 +f 7047//8754 7011//8712 7033//8755 +f 7033//8755 7011//8712 6993//8714 +f 7033//8755 6993//8714 7020//8756 +f 6993//8714 6989//8715 7017//8757 +f 6993//8714 7017//8757 7020//8756 +f 7017//8757 6989//8715 6999//8716 +f 7017//8757 6999//8716 7023//8758 +f 6999//8716 7015//8717 7034//8759 +f 6999//8716 7034//8759 7023//8758 +f 7015//8717 7031//8718 7046//8760 +f 7015//8717 7046//8760 7034//8759 +f 7046//8760 7031//8718 7045//8719 +f 7046//8760 7045//8719 7054//8761 +f 7045//8719 7052//8720 7062//8762 +f 7045//8719 7062//8762 7054//8761 +f 7062//8762 7052//8720 7060//8721 +f 7062//8762 7060//8721 7073//8763 +f 7073//8763 7060//8721 7061//8722 +f 7073//8763 7061//8722 7077//8764 +f 7077//8764 7061//8722 7053//8723 +f 7077//8764 7053//8723 7071//8765 +f 7071//8765 7053//8723 7044//8724 +f 7071//8765 7044//8724 7057//8766 +f 7057//8766 7044//8724 7030//8713 +f 7057//8766 7030//8713 7047//8754 +f 7119//8726 7106//8725 7111//8767 +f 7119//8726 7111//8767 7128//8768 +f 7104//8769 7111//8767 7106//8725 +f 7104//8769 7106//8725 7093//8727 +f 7093//8727 7088//8729 7099//8770 +f 7093//8727 7099//8770 7104//8769 +f 7096//8771 7099//8770 7088//8729 +f 7096//8771 7088//8729 7085//8730 +f 7085//8730 7094//8733 7103//8772 +f 7085//8730 7103//8772 7096//8771 +f 7110//8773 7103//8772 7094//8733 +f 7110//8773 7094//8733 7102//8734 +f 7102//8734 7113//8736 7124//8774 +f 7102//8734 7124//8774 7110//8773 +f 7113//8736 7130//8738 7136//8775 +f 7113//8736 7136//8775 7124//8774 +f 7130//8738 7151//8740 7136//8775 +f 7158//8776 7161//8741 7144//8743 +f 7144//8743 7132//8744 7142//8777 +f 7144//8743 7142//8777 7158//8776 +f 7132//8744 7119//8726 7128//8768 +f 7132//8744 7128//8768 7142//8777 +f 7147//8778 7131//8779 7153//8780 +f 7147//8778 7153//8780 7175//8781 +f 7153//8780 7131//8779 7121//8782 +f 7153//8780 7121//8782 7141//8783 +f 7121//8782 7115//8784 7134//8785 +f 7121//8782 7134//8785 7141//8783 +f 7134//8785 7115//8784 7116//8786 +f 7134//8785 7116//8786 7135//8787 +f 7116//8786 7123//8788 7140//8789 +f 7116//8786 7140//8789 7135//8787 +f 7140//8789 7123//8788 7129//8790 +f 7140//8789 7129//8790 7148//8791 +f 7129//8790 7145//8792 7167//8793 +f 7129//8790 7167//8793 7148//8791 +f 7145//8792 7163//8794 7193//8795 +f 7145//8792 7193//8795 7167//8793 +f 7163//8794 7184//8796 7212//8797 +f 7163//8794 7212//8797 7193//8795 +f 7212//8797 7184//8796 7199//8798 +f 7212//8797 7199//8798 7223//8799 +f 7223//8799 7199//8798 7186//8800 +f 7223//8799 7186//8800 7214//8801 +f 7186//8800 7166//8802 7200//8803 +f 7186//8800 7200//8803 7214//8801 +f 7166//8802 7147//8778 7175//8781 +f 7166//8802 7175//8781 7200//8803 +f 7121//8782 7131//8779 7111//8767 +f 7121//8782 7111//8767 7104//8769 +f 7128//8768 7111//8767 7131//8779 +f 7128//8768 7131//8779 7147//8778 +f 7142//8777 7128//8768 7147//8778 +f 7142//8777 7147//8778 7166//8802 +f 7158//8776 7142//8777 7166//8802 +f 7158//8776 7166//8802 7186//8800 +f 7186//8800 7199//8798 7161//8741 +f 7186//8800 7161//8741 7158//8776 +f 7199//8798 7184//8796 7151//8740 +f 7199//8798 7151//8740 7161//8741 +f 7136//8775 7151//8740 7184//8796 +f 7136//8775 7184//8796 7163//8794 +f 7124//8774 7136//8775 7163//8794 +f 7124//8774 7163//8794 7145//8792 +f 7110//8773 7124//8774 7145//8792 +f 7110//8773 7145//8792 7129//8790 +f 7129//8790 7123//8788 7103//8772 +f 7129//8790 7103//8772 7110//8773 +f 7096//8771 7103//8772 7123//8788 +f 7096//8771 7123//8788 7116//8786 +f 7116//8786 7115//8784 7099//8770 +f 7116//8786 7099//8770 7096//8771 +f 7104//8769 7099//8770 7115//8784 +f 7104//8769 7115//8784 7121//8782 +f 7087//8569 7095//8566 7101//8567 +f 7063//8804 7051//8805 7068//8745 +f 7063//8804 7068//8745 7086//8747 +f 7056//8746 7068//8745 7051//8805 +f 7056//8746 7051//8805 7039//8806 +f 7039//8806 7038//8807 7055//8753 +f 7039//8806 7055//8753 7056//8746 +f 7059//8752 7055//8753 7038//8807 +f 7059//8752 7038//8807 7041//8808 +f 7041//8808 7050//8809 7070//8751 +f 7041//8808 7070//8751 7059//8752 +f 7083//8568 7070//8751 7050//8809 +f 7083//8568 7050//8809 7058//8810 +f 7058//8810 7072//8811 7095//8566 +f 7058//8810 7095//8566 7083//8568 +f 7072//8811 7080//8812 7105//8564 +f 7072//8811 7105//8564 7095//8566 +f 7080//8812 7089//8813 7114//8562 +f 7080//8812 7114//8562 7105//8564 +f 7089//8813 7098//8814 7117//8750 +f 7089//8813 7117//8750 7114//8562 +f 7098//8814 7091//8815 7109//8749 +f 7098//8814 7109//8749 7117//8750 +f 7091//8815 7082//8816 7100//8748 +f 7091//8815 7100//8748 7109//8749 +f 7082//8816 7063//8804 7086//8747 +f 7082//8816 7086//8747 7100//8748 +f 7305//8277 7327//8817 7438//8678 +f 7305//8277 7438//8678 7387//8651 +f 7234//8276 7244//8818 7327//8817 +f 7234//8276 7327//8817 7305//8277 +f 7201//8492 7216//8819 7244//8818 +f 7201//8492 7244//8818 7234//8276 +f 7168//8589 7192//8820 7216//8819 +f 7168//8589 7216//8819 7201//8492 +f 7150//8278 7170//8821 7192//8820 +f 7150//8278 7192//8820 7168//8589 +f 7143//8281 7160//8822 7170//8821 +f 7143//8281 7170//8821 7150//8278 +f 7156//8470 7171//8823 7159//8824 +f 7156//8470 7159//8824 7146//8467 +f 7159//8824 7155//8825 7139//8464 +f 7159//8824 7139//8464 7146//8467 +f 7155//8825 7157//8826 7138//8282 +f 7155//8825 7138//8282 7139//8464 +f 7157//8826 7160//8822 7143//8281 +f 7157//8826 7143//8281 7138//8282 +f 7320//8335 7323//8827 7265//8828 +f 7320//8335 7265//8828 7247//8285 +f 7265//8828 7232//8829 7211//8283 +f 7265//8828 7211//8283 7247//8285 +f 7232//8829 7202//8830 7180//8360 +f 7232//8829 7180//8360 7211//8283 +f 7202//8830 7171//8823 7156//8470 +f 7202//8830 7156//8470 7180//8360 +f 7393//8338 7408//8831 7323//8827 +f 7393//8338 7323//8827 7320//8335 +f 7514//8495 7536//8832 7408//8831 +f 7514//8495 7408//8831 7393//8338 +f 7670//8286 7683//8833 7536//8832 +f 7670//8286 7536//8832 7514//8495 +f 7773//8289 7770//8834 7683//8833 +f 7773//8289 7683//8833 7670//8286 +f 7327//8817 7391//8835 7493//8677 +f 7327//8817 7493//8677 7438//8678 +f 7391//8835 7327//8817 7244//8818 +f 7391//8835 7244//8818 7283//8836 +f 7216//8819 7283//8836 7244//8818 +f 7283//8836 7216//8819 7192//8820 +f 7283//8836 7192//8820 7213//8837 +f 7213//8837 7192//8820 7170//8821 +f 7160//8822 7183//8838 7213//8837 +f 7160//8822 7213//8837 7170//8821 +f 7157//8826 7183//8838 7160//8822 +f 7183//8838 7157//8826 7155//8825 +f 7183//8838 7155//8825 7173//8839 +f 7187//8840 7173//8839 7155//8825 +f 7187//8840 7155//8825 7159//8824 +f 7187//8840 7159//8824 7171//8823 +f 7187//8840 7171//8823 7202//8830 +f 7187//8840 7202//8830 7219//8841 +f 7232//8829 7274//8842 7219//8841 +f 7232//8829 7219//8841 7202//8830 +f 7274//8842 7232//8829 7265//8828 +f 7274//8842 7265//8828 7323//8827 +f 7274//8842 7323//8827 7384//8843 +f 7384//8843 7323//8827 7408//8831 +f 7384//8843 7408//8831 7560//8844 +f 7560//8844 7408//8831 7536//8832 +f 7560//8844 7536//8832 7683//8833 +f 7560//8844 7683//8833 7724//8845 +f 7770//8834 7801//8846 7724//8845 +f 7770//8834 7724//8845 7683//8833 +f 7810//8288 7801//8846 7770//8834 +f 7810//8288 7770//8834 7773//8289 +f 7759//8673 7842//7297 7846//7239 +f 7759//8673 7846//7239 7721//8617 +f 7721//8617 7519//8616 7584//8672 +f 7721//8617 7584//8672 7759//8673 +f 7460//8670 7584//8672 7519//8616 +f 7460//8670 7519//8616 7362//8618 +f 7434//8667 7460//8670 7362//8618 +f 7434//8667 7362//8618 7303//8619 +f 7303//8619 7350//8620 7466//8666 +f 7303//8619 7466//8666 7434//8667 +f 7540//8590 7466//8666 7409//8622 +f 7540//8590 7409//8622 7486//8621 +f 7483//8595 7433//8599 7385//8600 +f 7473//8634 7437//8602 7345//8630 +f 7473//8634 7345//8630 7404//8629 +f 7506//8635 7473//8634 7404//8629 +f 7506//8635 7404//8629 7459//8631 +f 7517//8646 7506//8635 7459//8631 +f 7680//8687 7673//8847 7611//8688 +f 7617//8632 7611//8688 7673//8847 +f 7617//8632 7673//8847 7675//8848 +f 7675//8848 7671//8849 7606//8633 +f 7675//8848 7606//8633 7617//8632 +f 7671//8849 7667//8850 7609//8601 +f 7671//8849 7609//8601 7606//8633 +f 7664//8851 7604//8594 7609//8601 +f 7664//8851 7609//8601 7667//8850 +f 7664//8851 7660//8852 7626//8592 +f 7664//8851 7626//8592 7604//8594 +f 7620//8593 7626//8592 7660//8852 +f 7620//8593 7660//8852 7665//8853 +f 7665//8853 7682//8854 7601//8665 +f 7665//8853 7601//8665 7620//8593 +f 7603//8668 7601//8665 7682//8854 +f 7603//8668 7682//8854 7696//8855 +f 7696//8855 7749//8856 7653//8669 +f 7696//8855 7653//8669 7603//8668 +f 7749//8856 7768//8857 7692//8671 +f 7749//8856 7692//8671 7653//8669 +f 7775//8674 7692//8671 7768//8857 +f 7775//8674 7768//8857 7824//8858 +f 7824//8858 7843//7484 7840//7298 +f 7824//8858 7840//7298 7775//8674 +f 7542//8645 7611//8688 7517//8646 +f 7705//8859 7712//8860 7671//8849 +f 7705//8859 7671//8849 7675//8848 +f 7707//8861 7705//8859 7675//8848 +f 7707//8861 7675//8848 7673//8847 +f 7699//8684 7689//8683 7709//8862 +f 7709//8862 7713//8863 7694//8685 +f 7709//8862 7694//8685 7699//8684 +f 7713//8863 7719//8864 7685//8686 +f 7713//8863 7685//8686 7694//8685 +f 7680//8687 7685//8686 7719//8864 +f 7680//8687 7719//8864 7714//8865 +f 7714//8865 7707//8861 7673//8847 +f 7714//8865 7673//8847 7680//8687 +f 7712//8860 7710//8866 7667//8850 +f 7712//8860 7667//8850 7671//8849 +f 7703//8867 7691//8868 7660//8852 +f 7703//8867 7660//8852 7664//8851 +f 7710//8866 7703//8867 7664//8851 +f 7710//8866 7664//8851 7667//8850 +f 7691//8868 7695//8869 7665//8853 +f 7691//8868 7665//8853 7660//8852 +f 7682//8854 7665//8853 7695//8869 +f 7682//8854 7695//8869 7711//8870 +f 7696//8855 7682//8854 7711//8870 +f 7696//8855 7711//8870 7753//8871 +f 7753//8871 7764//8872 7749//8856 +f 7753//8871 7749//8856 7696//8855 +f 7764//8872 7792//8873 7768//8857 +f 7764//8872 7768//8857 7749//8856 +f 7792//8873 7831//8874 7824//8858 +f 7792//8873 7824//8858 7768//8857 +f 7831//8874 7847//7501 7843//7484 +f 7831//8874 7843//7484 7824//8858 +f 7529//8675 7493//8677 7391//8835 +f 7529//8675 7391//8835 7423//8875 +f 7423//8875 7455//8876 7566//8676 +f 7423//8875 7566//8676 7529//8675 +f 7455//8876 7476//8877 7596//8681 +f 7455//8876 7596//8681 7566//8676 +f 7596//8681 7476//8877 7499//8878 +f 7596//8681 7499//8878 7629//8679 +f 7595//8879 7689//8683 7666//8682 +f 7595//8879 7666//8682 7567//8880 +f 7650//8680 7527//8881 7567//8880 +f 7650//8680 7567//8880 7666//8682 +f 7650//8680 7629//8679 7499//8878 +f 7650//8680 7499//8878 7527//8881 +f 7709//8862 7689//8683 7595//8879 +f 7709//8862 7595//8879 7631//8882 +f 7631//8882 7648//8883 7713//8863 +f 7631//8882 7713//8863 7709//8862 +f 7719//8864 7713//8863 7648//8883 +f 7719//8864 7648//8883 7651//8884 +f 7651//8884 7645//8885 7714//8865 +f 7651//8884 7714//8865 7719//8864 +f 7645//8885 7628//8886 7707//8861 +f 7645//8885 7707//8861 7714//8865 +f 7628//8886 7583//8887 7705//8859 +f 7628//8886 7705//8859 7707//8861 +f 7583//8887 7575//8888 7712//8860 +f 7583//8887 7712//8860 7705//8859 +f 7575//8888 7588//8889 7710//8866 +f 7575//8888 7710//8866 7712//8860 +f 7588//8889 7657//8890 7703//8867 +f 7588//8889 7703//8867 7710//8866 +f 7657//8890 7674//8891 7691//8868 +f 7657//8890 7691//8868 7703//8867 +f 7674//8891 7655//8892 7695//8869 +f 7674//8891 7695//8869 7691//8868 +f 7655//8892 7652//8893 7711//8870 +f 7655//8892 7711//8870 7695//8869 +f 7652//8893 7663//8894 7753//8871 +f 7652//8893 7753//8871 7711//8870 +f 7687//8895 7764//8872 7753//8871 +f 7687//8895 7753//8871 7663//8894 +f 7776//8896 7792//8873 7764//8872 +f 7776//8896 7764//8872 7687//8895 +f 7826//8897 7831//8874 7792//8873 +f 7826//8897 7792//8873 7776//8896 +f 7852//7525 7847//7501 7831//8874 +f 7852//7525 7831//8874 7826//8897 +f 7379//8898 7499//8878 7476//8877 +f 7379//8898 7476//8877 7360//8899 +f 7333//8900 7360//8899 7476//8877 +f 7333//8900 7476//8877 7455//8876 +f 7308//8901 7333//8900 7455//8876 +f 7308//8901 7455//8876 7423//8875 +f 7423//8875 7391//8835 7283//8836 +f 7423//8875 7283//8836 7308//8901 +f 7837//8902 7828//8903 7829//8549 +f 7837//8902 7829//8549 7836//8536 +f 7810//8288 7829//8549 7828//8903 +f 7810//8288 7828//8903 7801//8846 +f 7836//8536 7839//8537 7838//8904 +f 7836//8536 7838//8904 7837//8902 +f 7780//8905 7838//8904 7897//7534 +f 7897//7534 7893//7536 7778//8906 +f 7897//7534 7778//8906 7780//8905 +f 7765//8907 7837//8902 7838//8904 +f 7765//8907 7838//8904 7780//8905 +f 7828//8903 7837//8902 7765//8907 +f 7828//8903 7765//8907 7754//8908 +f 7754//8908 7724//8845 7801//8846 +f 7754//8908 7801//8846 7828//8903 +f 7594//8909 7560//8844 7724//8845 +f 7594//8909 7724//8845 7754//8908 +f 7412//8910 7384//8843 7560//8844 +f 7412//8910 7560//8844 7594//8909 +f 7292//8911 7274//8842 7384//8843 +f 7292//8911 7384//8843 7412//8910 +f 7213//8837 7248//8912 7308//8901 +f 7213//8837 7308//8901 7283//8836 +f 7248//8912 7213//8837 7183//8838 +f 7248//8912 7183//8838 7207//8913 +f 7207//8913 7183//8838 7173//8839 +f 7207//8913 7173//8839 7189//8914 +f 7189//8914 7173//8839 7187//8840 +f 7189//8914 7187//8840 7198//8915 +f 7198//8915 7187//8840 7219//8841 +f 7198//8915 7219//8841 7230//8916 +f 7230//8916 7219//8841 7274//8842 +f 7230//8916 7274//8842 7292//8911 +f 7016//8700 7009//8699 7022//8917 +f 7016//8700 7022//8917 7029//8918 +f 7009//8699 6991//8701 7010//8919 +f 7009//8699 7010//8919 7022//8917 +f 6991//8701 6948//8702 6996//8920 +f 6991//8701 6996//8920 7010//8919 +f 6948//8702 6881//8703 6970//8921 +f 6948//8702 6970//8921 6996//8920 +f 6881//8703 6828//8704 6909//8922 +f 6881//8703 6909//8922 6970//8921 +f 6828//8704 6806//8705 6892//8923 +f 6828//8704 6892//8923 6909//8922 +f 6900//8707 6985//8706 7007//8924 +f 6900//8707 7007//8924 6981//8925 +f 6854//8708 6900//8707 6981//8925 +f 6854//8708 6981//8925 6944//8926 +f 6817//8709 6854//8708 6944//8926 +f 6817//8709 6944//8926 6898//8927 +f 6806//8705 6817//8709 6898//8927 +f 6806//8705 6898//8927 6892//8923 +f 7014//8710 7016//8700 7029//8918 +f 7014//8710 7029//8918 7026//8928 +f 7006//8711 7014//8710 7026//8928 +f 7006//8711 7026//8928 7019//8929 +f 6985//8706 7006//8711 7019//8929 +f 6985//8706 7019//8929 7007//8924 +f 7029//8918 7022//8917 6997//8930 +f 7022//8917 7010//8919 6997//8930 +f 7010//8919 6996//8920 6997//8930 +f 6996//8920 6970//8921 6997//8930 +f 6970//8921 6909//8922 6997//8930 +f 6909//8922 6892//8923 6997//8930 +f 6981//8925 7007//8924 6997//8930 +f 6944//8926 6981//8925 6997//8930 +f 6898//8927 6944//8926 6997//8930 +f 6892//8923 6898//8927 6997//8930 +f 7026//8928 7029//8918 6997//8930 +f 7019//8929 7026//8928 6997//8930 +f 7007//8924 7019//8929 6997//8930 +f 7248//8912 7268//8931 7333//8900 +f 7248//8912 7333//8900 7308//8901 +f 7268//8931 7248//8912 7207//8913 +f 7268//8931 7207//8913 7233//8932 +f 7233//8932 7207//8913 7189//8914 +f 7233//8932 7189//8914 7208//8933 +f 7208//8933 7189//8914 7198//8915 +f 7208//8933 7198//8915 7215//8934 +f 7302//8935 7240//8936 7243//8937 +f 7240//8936 7302//8935 7348//8938 +f 7240//8936 7348//8938 7259//8939 +f 7259//8939 7348//8938 7379//8898 +f 7259//8939 7379//8898 7295//8940 +f 7295//8940 7379//8898 7360//8899 +f 7268//8931 7295//8940 7360//8899 +f 7268//8931 7360//8899 7333//8900 +f 7295//8940 7268//8931 7233//8932 +f 7295//8940 7233//8932 7259//8939 +f 7259//8939 7233//8932 7208//8933 +f 7259//8939 7208//8933 7240//8936 +f 7243//8937 7240//8936 7208//8933 +f 7243//8937 7208//8933 7215//8934 +f 7215//8934 7198//8915 7230//8916 +f 7215//8934 7230//8916 7250//8941 +f 7322//8942 7250//8941 7230//8916 +f 7322//8942 7230//8916 7292//8911 +f 7322//8942 7292//8911 7412//8910 +f 7322//8942 7412//8910 7450//8943 +f 7636//8944 7450//8943 7412//8910 +f 7636//8944 7412//8910 7594//8909 +f 7594//8909 7754//8908 7765//8907 +f 7594//8909 7765//8907 7636//8944 +f 7780//8905 7659//8945 7636//8944 +f 7780//8905 7636//8944 7765//8907 +f 7636//8944 7659//8945 7503//8946 +f 7636//8944 7503//8946 7450//8943 +f 7366//8947 7322//8942 7450//8943 +f 7366//8947 7450//8943 7503//8946 +f 7322//8942 7366//8947 7278//8948 +f 7322//8942 7278//8948 7250//8941 +f 7778//8906 7662//8949 7659//8945 +f 7778//8906 7659//8945 7780//8905 +f 7662//8949 7471//8950 7503//8946 +f 7662//8949 7503//8946 7659//8945 +f 7366//8947 7503//8946 7471//8950 +f 7366//8947 7471//8950 7337//8951 +f 7366//8947 7337//8951 7278//8948 +f 7278//8948 7337//8951 7302//8935 +f 7278//8948 7302//8935 7243//8937 +f 7250//8941 7278//8948 7243//8937 +f 7250//8941 7243//8937 7215//8934 +f 7777//8952 7892//7584 7894//7583 +f 7347//8953 7301//8954 7894//7583 +f 7378//8955 7347//8953 7894//7583 +f 7661//8956 7777//8952 7894//7583 +f 7470//8957 7661//8956 7894//7583 +f 7336//8958 7470//8957 7894//7583 +f 7301//8954 7336//8958 7894//7583 +f 7874//7286 7877//7591 7793//8493 +f 7874//7286 7793//8493 7786//8494 +f 7877//7591 7880//7279 7790//8659 +f 7877//7591 7790//8659 7793//8493 +f 7894//7583 7898//7592 7378//8955 +f 7499//8878 7380//8959 7422//8960 +f 7499//8878 7422//8960 7527//8881 +f 7481//8961 7567//8880 7527//8881 +f 7481//8961 7527//8881 7422//8960 +f 7481//8961 7508//8962 7595//8879 +f 7481//8961 7595//8879 7567//8880 +f 7595//8879 7508//8962 7530//8963 +f 7595//8879 7530//8963 7631//8882 +f 7571//8964 7648//8883 7631//8882 +f 7571//8964 7631//8882 7530//8963 +f 7612//8965 7651//8884 7648//8883 +f 7612//8965 7648//8883 7571//8964 +f 7633//8966 7645//8885 7651//8884 +f 7633//8966 7651//8884 7612//8965 +f 7638//8967 7628//8886 7645//8885 +f 7638//8967 7645//8885 7633//8966 +f 7616//8968 7583//8887 7628//8886 +f 7616//8968 7628//8886 7638//8967 +f 7679//8969 7657//8890 7588//8889 +f 7679//8969 7588//8889 7627//8970 +f 7575//8888 7586//8971 7627//8970 +f 7575//8888 7627//8970 7588//8889 +f 7583//8887 7616//8968 7586//8971 +f 7583//8887 7586//8971 7575//8888 +f 7708//8972 7674//8891 7657//8890 +f 7708//8972 7657//8890 7679//8969 +f 7717//8973 7655//8892 7674//8891 +f 7717//8973 7674//8891 7708//8972 +f 7655//8892 7717//8973 7729//8974 +f 7655//8892 7729//8974 7652//8893 +f 7663//8894 7652//8893 7729//8974 +f 7663//8894 7729//8974 7744//8975 +f 7687//8895 7663//8894 7744//8975 +f 7687//8895 7744//8975 7761//8976 +f 7761//8976 7796//8977 7776//8896 +f 7761//8976 7776//8896 7687//8895 +f 7796//8977 7835//8978 7826//8897 +f 7796//8977 7826//8897 7776//8896 +f 7835//8978 7856//7613 7852//7525 +f 7835//8978 7852//7525 7826//8897 +f 7204//8384 7226//8387 7175//8781 +f 7204//8384 7175//8781 7153//8780 +f 7141//8783 7185//8405 7204//8384 +f 7141//8783 7204//8384 7153//8780 +f 7177//8402 7185//8405 7141//8783 +f 7177//8402 7141//8783 7134//8785 +f 7135//8787 7176//8444 7177//8402 +f 7135//8787 7177//8402 7134//8785 +f 7140//8789 7179//8401 7176//8444 +f 7140//8789 7176//8444 7135//8787 +f 7196//8399 7179//8401 7140//8789 +f 7196//8399 7140//8789 7148//8791 +f 7167//8793 7218//8397 7196//8399 +f 7167//8793 7196//8399 7148//8791 +f 7235//8395 7218//8397 7167//8793 +f 7235//8395 7167//8793 7193//8795 +f 7255//8392 7235//8395 7193//8795 +f 7255//8392 7193//8795 7212//8797 +f 7260//8451 7255//8392 7212//8797 +f 7260//8451 7212//8797 7223//8799 +f 7258//8391 7260//8451 7223//8799 +f 7258//8391 7223//8799 7214//8801 +f 7241//8389 7258//8391 7214//8801 +f 7241//8389 7214//8801 7200//8803 +f 7226//8387 7241//8389 7200//8803 +f 7226//8387 7200//8803 7175//8781 +f 7039//8806 7051//8805 7033//8755 +f 7039//8806 7033//8755 7020//8756 +f 7017//8757 7038//8807 7039//8806 +f 7017//8757 7039//8806 7020//8756 +f 7041//8808 7038//8807 7017//8757 +f 7041//8808 7017//8757 7023//8758 +f 7034//8759 7050//8809 7041//8808 +f 7034//8759 7041//8808 7023//8758 +f 7058//8810 7050//8809 7034//8759 +f 7058//8810 7034//8759 7046//8760 +f 7054//8761 7072//8811 7058//8810 +f 7054//8761 7058//8810 7046//8760 +f 7062//8762 7080//8812 7072//8811 +f 7062//8762 7072//8811 7054//8761 +f 7073//8763 7089//8813 7080//8812 +f 7073//8763 7080//8812 7062//8762 +f 7077//8764 7098//8814 7089//8813 +f 7077//8764 7089//8813 7073//8763 +f 7071//8765 7091//8815 7098//8814 +f 7071//8765 7098//8814 7077//8764 +f 7057//8766 7082//8816 7091//8815 +f 7057//8766 7091//8815 7071//8765 +f 7047//8754 7063//8804 7082//8816 +f 7047//8754 7082//8816 7057//8766 +f 7033//8755 7051//8805 7063//8804 +f 7033//8755 7063//8804 7047//8754 +f 7437//8602 7483//8595 7385//8600 +f 7437//8602 7385//8600 7345//8630 +f 7345//8630 7385//8600 7382//8598 +f 7345//8630 7382//8598 7341//8626 +f 7431//8597 7386//8623 7382//8598 +f 7386//8623 7341//8626 7382//8598 +f 7453//8979 7491//8698 7442//8980 +f 7463//8981 7453//8979 7442//8980 +f 7463//8981 7442//8980 7454//8982 +f 7535//8591 7463//8981 7454//8982 +f 7483//8595 7535//8591 7454//8982 +f 7483//8595 7454//8982 7433//8599 +f 7442//8980 7416//8596 7433//8599 +f 7442//8980 7433//8599 7454//8982 +f 7416//8596 7442//8980 7491//8698 +f 7416//8596 7491//8698 7431//8597 +f 7524//8613 7491//8698 7453//8979 +f 7524//8613 7453//8979 7469//8614 +f 7463//8981 7486//8621 7469//8614 +f 7463//8981 7469//8614 7453//8979 +f 7486//8621 7463//8981 7535//8591 +f 7486//8621 7535//8591 7540//8590 +f 7350//8620 7409//8622 7466//8666 +f 7357//8612 7444//8611 7406//8615 +f 7350//8620 7357//8612 7406//8615 +f 7350//8620 7406//8615 7409//8622 +f 7740//8983 7735//8984 7814//8985 +f 7740//8983 7814//8985 7823//8986 +f 7731//8987 7732//8988 7808//8989 +f 7731//8987 7808//8989 7809//8990 +f 7819//8991 7812//8992 7738//8993 +f 7819//8991 7738//8993 7737//8994 +f 7738//8993 7643//8995 7635//8996 +f 7738//8993 7635//8996 7737//8994 +f 7642//8997 7640//8998 7732//8988 +f 7642//8997 7732//8988 7731//8987 +f 7735//8984 7740//8983 7641//8999 +f 7735//8984 7641//8999 7647//9000 +f 7728//9001 7740//8983 7823//8986 +f 7728//9001 7823//8986 7815//9002 +f 7641//8999 7740//8983 7728//9001 +f 7641//8999 7728//9001 7624//9003 +f 7825//9004 7816//9005 7864//7641 +f 7825//9004 7864//7641 7862//7640 +f 7701//9006 7656//9007 7668//9008 +f 7701//9006 7668//9008 7752//9009 +f 7751//9010 7772//9011 7752//9009 +f 7751//9010 7752//9009 7668//9008 +f 7816//9005 7825//9004 7772//9011 +f 7816//9005 7772//9011 7751//9010 +f 7752//9009 7760//9012 7750//9013 +f 7752//9009 7750//9013 7701//9006 +f 7760//9012 7752//9009 7772//9011 +f 7760//9012 7772//9011 7791//9014 +f 7791//9014 7772//9011 7825//9004 +f 7791//9014 7825//9004 7830//9015 +f 7830//9015 7825//9004 7862//7640 +f 7830//9015 7862//7640 7861//7653 +f 7678//9016 7743//9017 7746//9018 +f 7678//9016 7746//9018 7669//9019 +f 7701//9006 7750//9013 7743//9017 +f 7701//9006 7743//9017 7678//9016 +f 7755//9020 7686//9021 7669//9019 +f 7755//9020 7669//9019 7746//9018 +f 7766//9022 7734//9023 7686//9021 +f 7766//9022 7686//9021 7755//9020 +f 7795//9024 7771//9025 7734//9023 +f 7795//9024 7734//9023 7766//9022 +f 7833//9026 7827//9027 7771//9025 +f 7833//9026 7771//9025 7795//9024 +f 7853//7666 7848//7667 7827//9027 +f 7853//7666 7827//9027 7833//9026 +f 7823//8986 7879//7669 7878//7668 +f 7823//8986 7878//7668 7815//9002 +f 7814//8985 7883//7670 7879//7669 +f 7814//8985 7879//7669 7823//8986 +f 7809//8990 7808//8989 7887//7672 +f 7809//8990 7887//7672 7885//7671 +f 7889//7673 7812//8992 7819//8991 +f 7889//7673 7819//8991 7891//7674 +f 7802//9028 7807//9029 7871//7677 +f 7802//9028 7871//7677 7869//7676 +f 7716//9030 7723//9031 7807//9029 +f 7716//9030 7807//9029 7802//9028 +f 7728//9001 7725//9032 7610//9033 +f 7728//9001 7610//9033 7624//9003 +f 7725//9032 7728//9001 7815//9002 +f 7725//9032 7815//9002 7811//9034 +f 7878//7668 7876//7684 7811//9034 +f 7878//7668 7811//9034 7815//9002 +f 7643//8995 7545//9035 7525//9036 +f 7643//8995 7525//9036 7635//8996 +f 7552//9037 7640//8998 7642//8997 +f 7552//9037 7642//8997 7564//9038 +f 7641//8999 7557//9039 7568//9040 +f 7641//8999 7568//9040 7647//9000 +f 7557//9039 7641//8999 7624//9003 +f 7557//9039 7624//9003 7509//9041 +f 7485//9042 7509//9041 7624//9003 +f 7485//9042 7624//9003 7610//9033 +f 7647//9000 7568//9040 7564//9038 +f 7647//9000 7564//9038 7642//8997 +f 7735//8984 7647//9000 7642//8997 +f 7735//8984 7642//8997 7731//8987 +f 7809//8990 7814//8985 7735//8984 +f 7809//8990 7735//8984 7731//8987 +f 7885//7671 7883//7670 7814//8985 +f 7885//7671 7814//8985 7809//8990 +f 7821//9043 7895//7694 7891//7674 +f 7821//9043 7891//7674 7819//8991 +f 7733//9044 7821//9043 7819//8991 +f 7733//9044 7819//8991 7737//8994 +f 7619//9045 7733//9044 7737//8994 +f 7619//9045 7737//8994 7635//8996 +f 7495//9046 7619//9045 7635//8996 +f 7495//9046 7635//8996 7525//9036 +f 7760//9012 7763//9047 7748//9048 +f 7760//9012 7748//9048 7750//9013 +f 7791//9014 7797//9049 7763//9047 +f 7791//9014 7763//9047 7760//9012 +f 7830//9015 7832//9050 7797//9049 +f 7830//9015 7797//9049 7791//9014 +f 7861//7653 7863//7702 7832//9050 +f 7861//7653 7832//9050 7830//9015 +f 7743//9017 7741//9051 7745//9052 +f 7743//9017 7745//9052 7746//9018 +f 7750//9013 7748//9048 7741//9051 +f 7750//9013 7741//9051 7743//9017 +f 7746//9018 7745//9052 7756//9053 +f 7746//9018 7756//9053 7755//9020 +f 7755//9020 7756//9053 7767//9054 +f 7755//9020 7767//9054 7766//9022 +f 7766//9022 7767//9054 7798//9055 +f 7766//9022 7798//9055 7795//9024 +f 7795//9024 7798//9055 7834//9056 +f 7795//9024 7834//9056 7833//9026 +f 7833//9026 7834//9056 7854//7709 +f 7833//9026 7854//7709 7853//7666 +f 7763//9047 7859//7710 7748//9048 +f 7797//9049 7859//7710 7763//9047 +f 7832//9050 7859//7710 7797//9049 +f 7863//7702 7859//7710 7832//9050 +f 7741//9051 7859//7710 7745//9052 +f 7748//9048 7859//7710 7741//9051 +f 7745//9052 7859//7710 7756//9053 +f 7756//9053 7859//7710 7767//9054 +f 7767//9054 7859//7710 7798//9055 +f 7798//9055 7859//7710 7834//9056 +f 7834//9056 7859//7710 7854//7709 +f 7608//9057 7605//9058 7723//9031 +f 7608//9057 7723//9031 7716//9030 +f 7605//9058 7608//9057 7484//9059 +f 7605//9058 7484//9059 7472//9060 +f 7751//9010 7668//9008 7589//9061 +f 7751//9010 7589//9061 7615//9062 +f 7589//9061 7668//9008 7656//9007 +f 7589//9061 7656//9007 7578//9063 +f 7656//9007 7701//9006 7678//9016 +f 7656//9007 7678//9016 7637//9064 +f 7637//9064 7678//9016 7669//9019 +f 7637//9064 7669//9019 7625//9065 +f 7625//9065 7669//9019 7686//9021 +f 7625//9065 7686//9021 7646//9066 +f 7646//9066 7686//9021 7734//9023 +f 7646//9066 7734//9023 7684//9067 +f 7771//9025 7757//9068 7684//9067 +f 7771//9025 7684//9067 7734//9023 +f 7757//9068 7771//9025 7827//9027 +f 7757//9068 7827//9027 7822//9069 +f 7822//9069 7827//9027 7848//7667 +f 7822//9069 7848//7667 7851//7724 +f 7478//9070 7479//9071 7589//9061 +f 7478//9070 7589//9061 7578//9063 +f 7479//9071 7487//9072 7615//9062 +f 7479//9071 7615//9062 7589//9061 +f 7608//9057 7613//9073 7492//9074 +f 7608//9057 7492//9074 7484//9059 +f 7613//9073 7608//9057 7716//9030 +f 7613//9073 7716//9030 7720//9075 +f 7720//9075 7716//9030 7802//9028 +f 7720//9075 7802//9028 7803//9076 +f 7803//9076 7802//9028 7869//7676 +f 7803//9076 7869//7676 7868//7732 +f 7613//9073 7614//9077 7494//9078 +f 7613//9073 7494//9078 7492//9074 +f 7614//9077 7613//9073 7720//9075 +f 7614//9077 7720//9075 7722//9079 +f 7722//9079 7720//9075 7803//9076 +f 7722//9079 7803//9076 7805//9080 +f 7805//9080 7803//9076 7868//7732 +f 7805//9080 7868//7732 7866//7737 +f 7614//9077 7615//9062 7487//9072 +f 7614//9077 7487//9072 7494//9078 +f 7751//9010 7615//9062 7614//9077 +f 7751//9010 7614//9077 7722//9079 +f 7816//9005 7751//9010 7722//9079 +f 7816//9005 7722//9079 7805//9080 +f 7864//7641 7816//9005 7805//9080 +f 7864//7641 7805//9080 7866//7737 +f 7602//9081 7475//9082 7485//9042 +f 7602//9081 7485//9042 7610//9033 +f 7725//9032 7727//9083 7602//9081 +f 7725//9032 7602//9081 7610//9033 +f 7727//9083 7725//9032 7811//9034 +f 7727//9083 7811//9034 7818//9084 +f 7876//7684 7873//7742 7818//9084 +f 7876//7684 7818//9084 7811//9034 +f 7807//9029 7818//9084 7873//7742 +f 7807//9029 7873//7742 7871//7677 +f 7723//9031 7727//9083 7818//9084 +f 7723//9031 7818//9084 7807//9029 +f 7605//9058 7602//9081 7727//9083 +f 7605//9058 7727//9083 7723//9031 +f 7475//9082 7602//9081 7605//9058 +f 7475//9082 7605//9058 7472//9060 +f 7812//8992 7889//7673 7887//7672 +f 7812//8992 7887//7672 7808//8989 +f 7732//8988 7738//8993 7812//8992 +f 7732//8988 7812//8992 7808//8989 +f 7640//8998 7643//8995 7738//8993 +f 7640//8998 7738//8993 7732//8988 +f 7552//9037 7545//9035 7643//8995 +f 7552//9037 7643//8995 7640//8998 diff --git a/samples/sample2.obj b/samples/sample2.obj new file mode 100644 index 0000000000000000000000000000000000000000..03beae3d13639afb074979a0baf22abed760e805 --- /dev/null +++ b/samples/sample2.obj @@ -0,0 +1,9196 @@ +# Blender 4.0.0 +# www.blender.org +mtllib object_merge.mtl +o default +v -0.046539 0.263607 0.401120 +v -0.056071 0.262890 0.399984 +v -0.055480 0.256008 0.406717 +v -0.045032 0.259130 0.410464 +v -0.033121 0.261151 0.409482 +v -0.053893 0.253130 0.412433 +v -0.036085 0.258579 0.417395 +v -0.046344 0.255273 0.420121 +v -0.044320 0.254272 0.429776 +v -0.031186 0.256949 0.419487 +v -0.030561 0.255708 0.421698 +v -0.031133 0.259802 0.422891 +v -0.025789 0.252585 0.420454 +v -0.027682 0.253100 0.423725 +v -0.027527 0.254102 0.425145 +v -0.055785 0.244975 0.418443 +v -0.052614 0.250519 0.420121 +v -0.047836 0.251688 0.424925 +v -0.048494 0.251705 0.429994 +v -0.053255 0.248278 0.426346 +v -0.052950 0.247079 0.427282 +v -0.047959 0.249586 0.429572 +v -0.049522 0.253356 0.433204 +v -0.046238 0.244064 0.435011 +v -0.045535 0.241930 0.432314 +v -0.044674 0.234746 0.432980 +v -0.048884 0.240798 0.430886 +v -0.055866 0.250551 0.428218 +v -0.056837 0.249403 0.424601 +v -0.051880 0.246505 0.422003 +v -0.053371 0.243346 0.419651 +v -0.059166 0.245084 0.418753 +v -0.051701 0.240378 0.422624 +v -0.050282 0.243433 0.424809 +v -0.052305 0.243855 0.425970 +v -0.047183 0.239234 0.425890 +v -0.040273 0.280460 0.396496 +v -0.031302 0.272856 0.402864 +v -0.037955 0.264687 0.401587 +v -0.038007 0.284901 0.404236 +v -0.039720 0.288035 0.403321 +v -0.027553 0.269124 0.408158 +v -0.033708 0.274072 0.417623 +v -0.051083 0.287314 0.411217 +v -0.048064 0.282676 0.416278 +v -0.060944 0.276117 0.407946 +v -0.063230 0.280454 0.404537 +v -0.061421 0.270637 0.398619 +v -0.063699 0.258458 0.418176 +v -0.057699 0.263580 0.427655 +v -0.045631 0.269600 0.432360 +v -0.057966 0.258442 0.422980 +v -0.056383 0.260218 0.429858 +v -0.049480 0.263237 0.430530 +v -0.043719 0.266027 0.435430 +v -0.038276 0.266608 0.430061 +v -0.059913 0.254601 0.410510 +v -0.055122 0.266401 0.398251 +v -0.062810 0.254582 0.419260 +v -0.060851 0.248390 0.422243 +v -0.037921 0.257625 0.438992 +v -0.043305 0.257253 0.439916 +v -0.042000 0.253095 0.440389 +v -0.036144 0.246124 0.441202 +v -0.040001 0.245118 0.441070 +v -0.037601 0.239745 0.440182 +v -0.041188 0.243093 0.436756 +v -0.034177 0.234818 0.437544 +v -0.058071 0.245387 0.423469 +v -0.055502 0.254808 0.432732 +v -0.054190 0.249272 0.434406 +v -0.051626 0.243882 0.435432 +v -0.051667 0.242052 0.431413 +v -0.044049 0.269138 0.396630 +v -0.052082 0.274231 0.389685 +v -0.042508 0.294670 0.378366 +v -0.052249 0.290978 0.368357 +v -0.042629 0.309082 0.366755 +v -0.042191 0.319633 0.378086 +v -0.041859 0.311920 0.367070 +v -0.041815 0.317497 0.362660 +v -0.030303 0.330503 0.371955 +v -0.039723 0.329150 0.393795 +v -0.041065 0.323134 0.376943 +v -0.046660 0.333860 0.406044 +v -0.056779 0.328482 0.382835 +v -0.025004 0.329718 0.411200 +v -0.037369 0.340888 0.416768 +v -0.028832 0.324493 0.400237 +v -0.008777 0.324539 0.415365 +v -0.003089 0.338836 0.423318 +v -0.025144 0.342804 0.421107 +v -0.048545 0.322738 0.385558 +v -0.061380 0.324480 0.386313 +v -0.073675 0.323946 0.373877 +v -0.073056 0.316023 0.380114 +v -0.053322 0.299916 0.402994 +v -0.041874 0.301897 0.395810 +v -0.066477 0.296169 0.395995 +v -0.007022 0.360784 0.425591 +v -0.073171 0.312616 0.364233 +v -0.072499 0.314481 0.361040 +v -0.071867 0.332072 0.358028 +v -0.059945 0.316456 0.353867 +v -0.055309 0.309238 0.358928 +v -0.062942 0.299760 0.363471 +v -0.071931 0.301707 0.369862 +v -0.052804 0.302826 0.361228 +v -0.066998 0.286414 0.381970 +v -0.057832 0.359253 0.379536 +v -0.056572 0.336835 0.392445 +v -0.042835 0.343085 0.412856 +v -0.032689 0.356208 0.419348 +v -0.053489 0.353612 0.399853 +v -0.044166 0.367321 0.401458 +v -0.052285 0.367893 0.387478 +v -0.049251 0.375628 0.364697 +v -0.017429 0.371575 0.417952 +v -0.035532 0.260715 0.429983 +v -0.041420 0.253122 0.431724 +v -0.036073 0.255750 0.433760 +v -0.040434 0.251170 0.431780 +v -0.045041 0.253759 0.434544 +v -0.035754 0.253106 0.433691 +v -0.038740 0.243430 0.435721 +v -0.034920 0.245096 0.436993 +v -0.046856 0.253594 0.433295 +v -0.024359 0.258582 0.417541 +v -0.028261 0.257750 0.414730 +v -0.024842 0.261153 0.415724 +v -0.026430 0.263437 0.420795 +v -0.023308 0.254626 0.420788 +v -0.023945 0.251179 0.424807 +v -0.025018 0.256571 0.424818 +v -0.025914 0.248054 0.427814 +v -0.033792 0.262057 0.424764 +v -0.030057 0.257625 0.424217 +v -0.056124 0.249486 0.425903 +v -0.054311 0.247541 0.421793 +v -0.052778 0.240226 0.423670 +v -0.052766 0.241344 0.425996 +v 0.019467 0.343795 0.423168 +v 0.005330 0.374816 0.431152 +v 0.020353 0.379921 0.441463 +v 0.000886 0.397258 0.438832 +v -0.008752 0.386033 0.429127 +v -0.023443 0.394390 0.404906 +v -0.012177 0.403981 0.422090 +v -0.009720 0.413459 0.409762 +v -0.012333 0.407743 0.400080 +v -0.028107 0.400310 0.377907 +v -0.008079 0.422033 0.429131 +v -0.010002 0.425611 0.428129 +v -0.018072 0.403473 0.375245 +v -0.019630 0.408559 0.378451 +v -0.013291 0.409437 0.373649 +v -0.011792 0.412002 0.397477 +v -0.009295 0.417119 0.408873 +v -0.000441 0.435147 0.404767 +v 0.015124 0.440848 0.394667 +v 0.014858 0.439711 0.384669 +v 0.018650 0.445917 0.390142 +v 0.016567 0.449937 0.385622 +v 0.018066 0.458874 0.382272 +v 0.017372 0.465316 0.373571 +v 0.011872 0.432209 0.363817 +v 0.017347 0.443884 0.371849 +v 0.015192 0.452031 0.358797 +v 0.016370 0.457395 0.355141 +v 0.015018 0.453808 0.350452 +v 0.016101 0.444233 0.347838 +v 0.005352 0.426631 0.369466 +v -0.000769 0.429188 0.374347 +v 0.005944 0.418425 0.339983 +v 0.015996 0.436052 0.344778 +v 0.010547 0.428625 0.325044 +v 0.012601 0.440951 0.333337 +v 0.015344 0.450293 0.336149 +v 0.014457 0.446711 0.322650 +v 0.005521 0.409835 0.319081 +v -0.007368 0.410540 0.333192 +v 0.013197 0.454486 0.309302 +v -0.000916 0.389691 0.285027 +v 0.008471 0.419590 0.292427 +v 0.010429 0.433594 0.280064 +v 0.004932 0.410510 0.270166 +v 0.006403 0.437518 0.243491 +v 0.008144 0.402981 0.258657 +v 0.002594 0.376555 0.275912 +v 0.006972 0.407044 0.246382 +v 0.007344 0.398262 0.249659 +v 0.001140 0.362725 0.265547 +v 0.008302 0.374420 0.257844 +v 0.005855 0.386973 0.228898 +v 0.005866 0.384179 0.228331 +v 0.001579 0.370903 0.226931 +v 0.002980 0.396955 0.193112 +v 0.004183 0.384396 0.205824 +v -0.001104 0.339309 0.249068 +v 0.005181 0.362298 0.213088 +v 0.006700 0.340154 0.227413 +v -0.012446 0.338058 0.254848 +v -0.026147 0.340859 0.276749 +v -0.030379 0.363592 0.299537 +v -0.044755 0.349899 0.319614 +v -0.046862 0.316983 0.297267 +v -0.026367 0.309905 0.259570 +v -0.039665 0.296760 0.287604 +v -0.050889 0.283577 0.267416 +v -0.032264 0.296951 0.244528 +v -0.031264 0.381693 0.316131 +v -0.035367 0.386357 0.360209 +v -0.015249 0.309058 0.239151 +v -0.019099 0.302783 0.236587 +v -0.053387 0.299361 0.334924 +v -0.049947 0.325509 0.356996 +v -0.052645 0.357851 0.363913 +v -0.035613 0.315421 0.368350 +v -0.047099 0.278160 0.325424 +v -0.020465 0.280859 0.384779 +v -0.014570 0.306683 0.398132 +v -0.003025 0.280576 0.398785 +v -0.005790 0.301563 0.408420 +v -0.038323 0.272649 0.344618 +v -0.011382 0.260524 0.376144 +v -0.008519 0.234184 0.362140 +v -0.021579 0.249142 0.346819 +v -0.009885 0.206481 0.350521 +v 0.018785 0.192630 0.354137 +v -0.033204 0.241605 0.342183 +v -0.010285 0.190267 0.339678 +v 0.005915 0.164914 0.350681 +v 0.019723 0.164441 0.353892 +v -0.006982 0.162923 0.327953 +v 0.013900 0.143392 0.308577 +v -0.053139 0.274806 0.301242 +v -0.057131 0.264736 0.256517 +v -0.033217 0.282734 0.231798 +v -0.004244 0.286975 0.223136 +v -0.001571 0.305567 0.231082 +v -0.001394 0.322464 0.236031 +v -0.001536 0.318130 0.212259 +v -0.001535 0.330960 0.205324 +v 0.003884 0.352336 0.192804 +v 0.004370 0.327652 0.192343 +v 0.004808 0.348827 0.204331 +v -0.013473 0.320727 0.243965 +v 0.002478 0.322063 0.189314 +v 0.003012 0.340499 0.177587 +v 0.003752 0.321188 0.182101 +v 0.000901 0.290851 0.218325 +v 0.001003 0.280966 0.210566 +v 0.003564 0.294265 0.171858 +v 0.005211 0.278802 0.189783 +v 0.001162 0.276121 0.203007 +v -0.001364 0.266263 0.199286 +v 0.003722 0.282004 0.170492 +v 0.004728 0.265009 0.179280 +v 0.000393 0.258905 0.190052 +v 0.004834 0.261920 0.179838 +v -0.000292 0.250614 0.177301 +v 0.004461 0.262637 0.174980 +v 0.003122 0.266105 0.157795 +v 0.003853 0.253515 0.164115 +v -0.007518 0.247266 0.186393 +v -0.012026 0.255313 0.199030 +v -0.014634 0.251431 0.195988 +v -0.011791 0.240739 0.183272 +v -0.012690 0.233174 0.181291 +v -0.013267 0.247463 0.183882 +v -0.030726 0.233335 0.185916 +v -0.016462 0.242038 0.190858 +v -0.026176 0.256788 0.210225 +v -0.052971 0.231887 0.232323 +v -0.018427 0.262618 0.210896 +v -0.014098 0.276405 0.216111 +v -0.005577 0.265668 0.207561 +v -0.014471 0.268723 0.199569 +v -0.001472 0.415031 0.442626 +v -0.008303 0.410551 0.432867 +v 0.005081 0.408613 0.446659 +v 0.002983 0.410540 0.451890 +v -0.003351 0.418494 0.447338 +v -0.004408 0.428386 0.448485 +v -0.007018 0.431562 0.449797 +v -0.007833 0.427584 0.427009 +v -0.014662 0.415428 0.408565 +v -0.004466 0.434156 0.449929 +v -0.004391 0.442585 0.447184 +v -0.000418 0.454093 0.428521 +v -0.000793 0.449893 0.446112 +v -0.000189 0.457849 0.446843 +v 0.002247 0.453508 0.460221 +v 0.005730 0.455600 0.461734 +v 0.020215 0.460288 0.457505 +v 0.020495 0.455738 0.469674 +v 0.008390 0.450631 0.473964 +v 0.012183 0.446277 0.483341 +v 0.019888 0.463928 0.442760 +v 0.003937 0.449038 0.472333 +v 0.005392 0.445785 0.470339 +v 0.002643 0.444376 0.467598 +v -0.001041 0.443521 0.464493 +v 0.006034 0.439730 0.473434 +v 0.007994 0.442005 0.477181 +v 0.006921 0.444536 0.477754 +v 0.005485 0.447201 0.478373 +v 0.009520 0.444659 0.484194 +v 0.010511 0.442685 0.481646 +v 0.011807 0.439947 0.479596 +v 0.013079 0.441074 0.483579 +v 0.013414 0.442884 0.486865 +v 0.015380 0.441049 0.486967 +v 0.021058 0.439073 0.491494 +v 0.015151 0.439405 0.491930 +v 0.015532 0.436929 0.497782 +v 0.017121 0.434705 0.499387 +v 0.021266 0.434305 0.500000 +v 0.011957 0.437835 0.487512 +v 0.013765 0.433833 0.496923 +v 0.021294 0.425436 0.498341 +v 0.016386 0.425598 0.497723 +v 0.014377 0.425977 0.495468 +v 0.012721 0.426400 0.492495 +v 0.011246 0.427199 0.486972 +v 0.009883 0.426984 0.482298 +v 0.008564 0.426544 0.473183 +v 0.009909 0.425880 0.478956 +v 0.010837 0.425974 0.484980 +v 0.011625 0.426210 0.489284 +v 0.013415 0.424884 0.493035 +v 0.017250 0.422175 0.496403 +v 0.019437 0.424496 0.496354 +v 0.013667 0.424600 0.491671 +v 0.014287 0.422134 0.492132 +v 0.011201 0.425329 0.483361 +v 0.010272 0.422792 0.478891 +v 0.009474 0.428421 0.471270 +v 0.004292 0.428653 0.465408 +v 0.002978 0.429229 0.466971 +v 0.002151 0.435683 0.475751 +v 0.003178 0.438779 0.475046 +v -0.001735 0.436899 0.470028 +v -0.002132 0.433880 0.455484 +v -0.003212 0.438499 0.461915 +v 0.002596 0.428527 0.464130 +v -0.004062 0.428399 0.453685 +v 0.000472 0.419812 0.460805 +v 0.006786 0.425283 0.469369 +v 0.006586 0.412310 0.467090 +v 0.011132 0.421534 0.477637 +v 0.011051 0.412097 0.475125 +v 0.012742 0.411317 0.474236 +v 0.015580 0.409251 0.486191 +v 0.008574 0.408976 0.462946 +v 0.009260 0.406396 0.454992 +v 0.020949 0.400675 0.457748 +v 0.018134 0.408811 0.488199 +v 0.017381 0.410588 0.490394 +v 0.021259 0.410957 0.491705 +v 0.015189 0.410172 0.488276 +v 0.018304 0.417381 0.493365 +v 0.020191 0.419523 0.493695 +v 0.015548 0.418181 0.492243 +v 0.016899 0.419573 0.492331 +v 0.014594 0.418448 0.490118 +v 0.015586 0.419408 0.490910 +v 0.013930 0.418890 0.488591 +v 0.013239 0.418353 0.486274 +v 0.012538 0.419019 0.483993 +v 0.013933 0.419309 0.487210 +v 0.014048 0.420241 0.488737 +v 0.012857 0.419910 0.482872 +v 0.011890 0.419742 0.481912 +v 0.012014 0.421086 0.482464 +v 0.011585 0.420322 0.480104 +v 0.012490 0.420378 0.481134 +v 0.012137 0.421942 0.480315 +v 0.012207 0.420853 0.479420 +v 0.009507 0.425145 0.469948 +v 0.012096 0.422231 0.476872 +v 0.012354 0.422619 0.477725 +v -0.088279 0.188053 0.327342 +v -0.063802 0.226216 0.347124 +v -0.082666 0.218707 0.302031 +v -0.071297 0.197616 0.367176 +v -0.040234 0.194022 0.371710 +v -0.034642 0.228377 0.346970 +v -0.090665 0.154382 0.349795 +v -0.079284 0.166140 0.378443 +v -0.081256 0.143555 0.383514 +v -0.093896 0.131526 0.351464 +v -0.052599 0.138820 0.387226 +v -0.045560 0.167665 0.384152 +v -0.084717 0.124585 0.378183 +v -0.048586 0.120505 0.380524 +v -0.084573 0.109654 0.368541 +v -0.048445 0.109002 0.371327 +v -0.091108 0.137129 0.320082 +v -0.084341 0.146644 0.314711 +v -0.035639 0.126126 0.367457 +v -0.032912 0.130190 0.369248 +v -0.030124 0.126133 0.332250 +v -0.026610 0.131142 0.331644 +v -0.039425 0.130148 0.307000 +v -0.027721 0.104329 0.311031 +v -0.021895 0.142797 0.305863 +v -0.018309 0.153771 0.338658 +v -0.043733 0.062529 0.287247 +v -0.039646 0.059848 0.302499 +v -0.037944 0.051926 0.323655 +v -0.034099 0.097161 0.352018 +v -0.044091 0.048289 0.287929 +v -0.043979 0.049820 0.275284 +v -0.037780 0.038123 0.297648 +v -0.046722 0.039068 0.267973 +v -0.043546 0.036434 0.285435 +v -0.045807 0.027591 0.275328 +v -0.042237 0.036763 0.319272 +v -0.040821 0.018664 0.291371 +v -0.050848 0.022895 0.332725 +v -0.057840 0.039326 0.333066 +v -0.029822 0.012815 0.310675 +v -0.042277 0.015737 0.326323 +v -0.026275 0.003828 0.328532 +v -0.047056 0.006606 0.279907 +v -0.057420 0.017401 0.346151 +v -0.065715 0.025054 0.347645 +v -0.063948 0.031632 0.338234 +v -0.077320 0.026985 0.335152 +v -0.073272 0.019974 0.345600 +v -0.071602 0.012831 0.359383 +v -0.089533 0.016174 0.341433 +v -0.085718 0.041886 0.325354 +v -0.081349 0.084900 0.348746 +v -0.092460 0.048493 0.306923 +v -0.094625 0.096294 0.335040 +v -0.095070 0.113200 0.309642 +v -0.067812 0.135045 0.298574 +v -0.068657 0.119587 0.284709 +v -0.076917 0.064424 0.284588 +v -0.041338 0.114134 0.292950 +v -0.060531 0.071970 0.277431 +v -0.061497 0.053888 0.266455 +v -0.063516 0.040589 0.261537 +v -0.075544 0.038357 0.267533 +v -0.071408 0.050609 0.270791 +v -0.090814 0.043653 0.288282 +v -0.087335 0.028442 0.289406 +v -0.094372 0.039928 0.306947 +v -0.080837 0.027448 0.279132 +v -0.094325 0.030066 0.308725 +v -0.099019 0.019464 0.303384 +v -0.102078 0.007120 0.300225 +v -0.097254 0.020023 0.320299 +v -0.089855 0.032484 0.330564 +v -0.108672 0.007744 0.312192 +v -0.108393 0.003439 0.318013 +v -0.070142 0.028231 0.268657 +v -0.072010 0.015858 0.270768 +v -0.086170 0.007503 0.278857 +v -0.104480 0.007830 0.319895 +v -0.063186 0.028952 0.269483 +v -0.063250 0.007787 0.282609 +v -0.074695 0.005620 0.278926 +v -0.093928 0.004762 0.295685 +v 0.017347 0.449213 0.393253 +v 0.019020 0.448990 0.397249 +v 0.018745 0.453571 0.390854 +v 0.016612 0.449512 0.402779 +v 0.018862 0.460071 0.394157 +v 0.019080 0.458442 0.401843 +v 0.019140 0.455986 0.403771 +v -0.000322 0.446733 0.415886 +v 0.017238 0.456282 0.411786 +v 0.016219 0.463678 0.426377 +v 0.019447 0.467696 0.422490 +v -0.018552 0.412148 0.395733 +v -0.013697 0.409953 0.394720 +v 0.019501 0.463017 0.423559 +v -0.060926 0.198777 0.241566 +v -0.073615 0.191013 0.273826 +v -0.063813 0.216731 0.253498 +v -0.072285 0.243983 0.283828 +v -0.054133 0.251838 0.330035 +v -0.046632 0.263140 0.327099 +v -0.081536 0.159662 0.293928 +v -0.070317 0.137610 0.295737 +v -0.063930 0.157709 0.263719 +v -0.031154 0.144286 0.280720 +v -0.036831 0.131953 0.299845 +v -0.046619 0.081453 0.357564 +v -0.002834 0.145292 0.105263 +v 0.003569 0.157516 0.099305 +v 0.003457 0.157999 0.088664 +v 0.003648 0.156630 0.112921 +v -0.012603 0.151988 0.113021 +v -0.007856 0.160100 0.119000 +v -0.003693 0.167701 0.124367 +v -0.011636 0.172234 0.122128 +v -0.009439 0.177825 0.134225 +v -0.012817 0.166005 0.128241 +v -0.001982 0.193234 0.141860 +v -0.001552 0.223096 0.154197 +v -0.011501 0.226519 0.158398 +v -0.012352 0.218992 0.167728 +v -0.014126 0.232927 0.165429 +v 0.000306 0.243715 0.171300 +v -0.028469 0.219534 0.180464 +v -0.032538 0.206464 0.163613 +v -0.044524 0.181441 0.176700 +v -0.054447 0.198621 0.214790 +v -0.048938 0.165855 0.227657 +v -0.046461 0.144301 0.210369 +v -0.027104 0.155888 0.240769 +v -0.003901 0.129483 0.246895 +v -0.010236 0.153247 0.293794 +v -0.040810 0.100115 0.169112 +v -0.025742 0.131240 0.112291 +v -0.021287 0.114534 0.109142 +v -0.030523 0.192109 0.164029 +v -0.019425 0.195743 0.156794 +v -0.030325 0.173911 0.145382 +v -0.025678 0.172543 0.138884 +v -0.020560 0.163376 0.128653 +v -0.020409 0.148763 0.123036 +v -0.015883 0.199282 0.151831 +v -0.014748 0.213806 0.159158 +v -0.015420 0.143562 0.107339 +v -0.014822 0.148832 0.104382 +v -0.008553 0.144928 0.105829 +v -0.019361 0.134196 0.104542 +v -0.011505 0.137732 0.102142 +v -0.001257 0.136026 0.093463 +v 0.004304 0.139349 0.082839 +v 0.002157 0.129069 0.088524 +v 0.004602 0.133139 0.075498 +v 0.002134 0.118782 0.072090 +v 0.004968 0.135742 0.061511 +v 0.006339 0.112328 0.062297 +v -0.011075 0.107910 0.067331 +v -0.009229 0.100972 0.047513 +v 0.000657 0.104997 0.047332 +v 0.002765 0.098664 0.031213 +v 0.008328 0.117319 0.015749 +v 0.008915 0.103033 0.016692 +v 0.006357 0.097514 0.014111 +v 0.009199 0.114712 0.006211 +v 0.010129 0.101240 0.004521 +v 0.010248 0.095401 0.006800 +v -0.005265 0.084258 0.009296 +v 0.011022 0.089828 0.002286 +v -0.010240 0.082955 0.015783 +v 0.011102 0.077533 -0.040795 +v 0.015493 0.095202 -0.045571 +v 0.002699 0.073490 -0.042455 +v 0.010375 0.074734 -0.046915 +v -0.005256 0.071997 -0.030159 +v -0.015656 0.032991 -0.014606 +v -0.000044 0.063643 -0.060670 +v 0.000043 0.024641 -0.079746 +v 0.016492 0.084366 -0.047437 +v 0.016012 0.083211 -0.043621 +v 0.006285 0.059124 -0.081370 +v 0.017696 0.069702 -0.076171 +v 0.015283 0.072750 -0.063560 +v 0.022025 0.089743 -0.078962 +v 0.022001 0.084524 -0.077809 +v 0.023781 0.073562 -0.083536 +v 0.022474 0.083413 -0.079709 +v -0.029734 0.368528 0.246087 +v -0.021026 0.346664 0.266582 +v -0.018163 0.345158 0.251106 +v -0.025594 0.368951 0.254732 +v -0.018501 0.370553 0.272273 +v -0.007724 0.352190 0.261451 +v -0.025236 0.366559 0.252248 +v -0.034550 0.369683 0.229406 +v -0.007848 0.387563 0.288494 +v -0.024088 0.391168 0.275653 +v -0.033413 0.408244 0.271362 +v -0.026884 0.405843 0.285380 +v -0.026340 0.409633 0.290231 +v -0.013547 0.403372 0.312135 +v -0.007510 0.426529 0.362176 +v -0.001206 0.422998 0.359261 +v -0.006697 0.414775 0.343102 +v -0.016509 0.419690 0.319676 +v -0.022607 0.382911 0.299867 +v -0.035223 0.407325 0.264464 +v -0.025538 0.390025 0.270138 +v -0.014483 0.374565 0.278415 +v 0.003158 0.430616 0.381645 +v -0.003082 0.431798 0.393470 +v -0.005556 0.423823 0.374937 +v -0.009242 0.429356 0.377907 +v -0.005065 0.423818 0.383494 +v -0.021840 0.410981 0.353597 +v -0.011592 0.411793 0.348816 +v -0.017799 0.406780 0.349964 +v -0.010663 0.406585 0.336725 +v -0.025694 0.398610 0.347084 +v -0.020029 0.403359 0.335274 +v -0.020134 0.392555 0.319609 +v -0.034735 0.380321 0.394423 +v 0.017284 0.457680 0.371478 +v 0.017174 0.456260 0.369396 +v -0.007373 0.417112 0.358307 +v -0.006886 0.423942 0.358187 +v -0.010650 0.424718 0.344020 +v -0.010826 0.404771 0.323782 +v -0.016422 0.324123 0.249185 +v -0.014404 0.332382 0.251375 +v -0.021248 0.331325 0.240500 +v -0.011990 0.326931 0.246619 +v -0.020560 0.310331 0.224656 +v -0.013380 0.299270 0.232432 +v -0.010809 0.304955 0.235370 +v -0.031371 0.329973 0.384456 +v -0.051213 0.340006 0.359198 +v -0.040888 0.336741 0.364609 +v -0.010560 0.151042 0.310204 +v -0.008775 0.162270 0.326530 +v -0.022771 0.162276 0.368636 +v -0.023354 0.216472 0.346742 +v -0.014033 0.186124 0.342463 +v -0.022113 0.220667 0.346930 +v 0.012905 0.130269 0.259773 +v 0.009770 0.084446 0.200482 +v -0.002992 0.086508 0.197401 +v -0.033600 0.066022 0.114444 +v -0.016761 0.107594 0.078368 +v -0.018119 0.101554 0.073224 +v -0.028130 0.052017 0.076934 +v -0.010982 0.089333 0.040881 +v -0.024689 0.044011 0.048090 +v -0.006614 0.093543 0.037298 +v -0.003753 0.073290 -0.019618 +v 0.015554 0.015593 -0.002619 +v 0.008868 0.027375 0.068046 +v 0.002921 0.018856 -0.144304 +v -0.007111 0.019992 -0.216756 +v 0.005294 0.006963 -0.223807 +v 0.007165 0.046397 -0.134624 +v 0.022298 0.057580 -0.116043 +v 0.024590 0.055855 -0.124829 +v 0.025240 0.070916 -0.115297 +v 0.020987 0.052388 -0.138991 +v 0.024615 0.059567 -0.129071 +v 0.024771 0.064981 -0.131824 +v 0.024649 0.061043 -0.135787 +v 0.024810 0.060055 -0.143710 +v 0.025026 0.062037 -0.146195 +v -0.028062 0.020373 -0.292944 +v -0.053726 0.006610 -0.364414 +v -0.055248 0.019536 -0.364528 +v -0.036697 0.023266 -0.333398 +v -0.084982 0.009512 -0.427202 +v -0.085233 0.017122 -0.429414 +v -0.121862 0.012222 -0.494365 +v -0.125429 0.008947 -0.497474 +v -0.050060 0.020542 -0.370560 +v -0.049023 0.020312 -0.371815 +v -0.045990 0.022393 -0.364043 +v -0.043928 0.025669 -0.367427 +v -0.043770 0.021837 -0.351680 +v -0.038348 0.024409 -0.347317 +v -0.033630 0.024848 -0.336055 +v -0.029620 0.029126 -0.340655 +v -0.026241 0.027431 -0.319832 +v -0.025018 0.029017 -0.296902 +v -0.019175 0.028919 -0.303319 +v -0.015135 0.030360 -0.298681 +v -0.013109 0.030775 -0.289038 +v -0.011929 0.034132 -0.293232 +v -0.010897 0.038965 -0.294935 +v -0.009056 0.035149 -0.286185 +v -0.009668 0.031488 -0.279406 +v -0.007727 0.038671 -0.285818 +v -0.005936 0.031377 -0.274058 +v -0.014436 0.032108 -0.264445 +v -0.005204 0.033442 -0.268834 +v -0.002570 0.041956 -0.274610 +v -0.005307 0.037143 -0.238859 +v 0.001474 0.038151 -0.259857 +v 0.002913 0.038605 -0.247084 +v 0.005472 0.040021 -0.238965 +v 0.006091 0.049595 -0.256433 +v -0.003832 0.037422 -0.224986 +v 0.007421 0.045406 -0.217047 +v 0.000387 0.040591 -0.196172 +v 0.015900 0.046713 -0.193180 +v 0.017779 0.055861 -0.198976 +v 0.019686 0.062417 -0.194575 +v 0.019502 0.054582 -0.189862 +v 0.020510 0.050608 -0.182383 +v 0.019105 0.050342 -0.175637 +v 0.007222 0.042761 -0.160731 +v 0.013827 0.048038 -0.212415 +v 0.021870 0.050676 -0.163599 +v 0.025349 0.062159 -0.162569 +v -0.027626 0.141189 0.132835 +v -0.033922 0.147836 0.139222 +v -0.031627 0.161185 0.145515 +v -0.034275 0.174559 0.153094 +v -0.038569 0.178594 0.147121 +v -0.037672 0.153082 0.134943 +v -0.028310 0.150814 0.135564 +v -0.011367 0.074332 -0.005153 +v -0.012870 0.086487 -0.005013 +v -0.006959 0.082675 -0.031137 +v -0.002760 0.076756 -0.019302 +v -0.004570 0.079447 -0.009208 +v 0.002950 0.254179 0.152048 +v -0.050081 0.171149 0.237339 +v -0.031384 0.163726 0.250220 +v 0.002685 0.207591 0.135043 +v 0.002757 0.203721 0.134905 +v 0.002903 0.181453 0.129871 +v 0.002263 0.183300 0.120388 +v 0.001620 0.183400 0.106696 +v 0.002649 0.169022 0.109953 +v -0.012794 0.090469 0.011664 +v -0.021435 0.116083 0.061989 +v -0.019740 0.113032 0.054066 +v -0.018252 0.088343 0.044360 +v -0.024780 0.118400 0.042963 +v -0.016736 0.099352 0.043136 +v -0.016523 0.099369 0.036874 +v -0.020278 0.111288 0.073594 +v -0.025767 0.107077 0.094118 +v -0.019024 0.122905 0.101075 +v -0.029788 0.120089 0.078509 +v -0.019238 0.116900 0.087642 +v -0.018453 0.109334 0.083151 +v -0.016527 0.218790 0.151847 +v -0.010014 0.218927 0.159859 +v -0.016684 0.207918 0.145549 +v -0.010323 0.216768 0.153987 +v -0.018174 0.214575 0.147638 +v -0.011933 0.197900 0.146432 +v -0.016822 0.177689 0.138139 +v -0.032481 0.183686 0.128740 +v -0.030933 0.136916 0.098295 +v -0.008555 0.077897 -0.058103 +v -0.006150 0.067757 -0.045980 +v 0.002882 0.068437 -0.055020 +v 0.000407 0.090353 -0.018004 +v 0.008897 0.079365 -0.024587 +v 0.001163 0.081978 -0.013317 +v 0.011311 0.084434 -0.017464 +v 0.013872 0.084032 -0.024141 +v 0.008598 0.084018 -0.012677 +v 0.002036 0.088712 0.007914 +v 0.007423 0.053824 -0.095038 +v -0.099401 0.012158 0.332767 +v -0.101312 0.010461 0.346534 +v -0.093608 0.023184 0.337840 +v -0.100988 0.004299 0.348374 +v -0.099416 0.003756 0.316342 +v -0.027173 0.011772 0.325853 +v -0.034674 0.022263 0.322252 +v -0.099372 0.004926 0.334643 +v -0.091624 0.005490 0.345706 +v -0.082796 0.007214 0.335954 +v -0.096547 0.005472 0.322231 +v -0.048995 0.005332 0.326163 +v -0.055483 0.005109 0.333952 +v -0.075777 0.006541 0.339834 +v -0.059052 0.003605 0.352324 +v -0.070668 0.002819 0.360623 +v -0.066348 0.003375 0.298213 +v -0.039250 0.006326 0.286169 +v -0.040921 0.003297 0.325038 +v -0.027010 0.004159 0.318666 +v -0.105947 0.001967 0.309038 +v -0.105992 -0.000000 0.323398 +v 0.012239 0.058885 -0.089703 +v 0.025581 0.065450 -0.094398 +v 0.020349 0.058151 -0.097350 +v 0.025644 0.069949 -0.100293 +v 0.025574 0.078127 -0.109651 +v 0.025214 0.067294 -0.113279 +v 0.013026 0.420455 0.486595 +v 0.014376 0.419342 0.487762 +v 0.013646 0.419342 0.485389 +v 0.014185 0.419829 0.485066 +v 0.012873 0.421008 0.483923 +v 0.014296 0.420618 0.482898 +v 0.012920 0.420587 0.482141 +v 0.013746 0.420729 0.480530 +v 0.012844 0.420823 0.480253 +v 0.014654 0.420800 0.481495 +v 0.013637 0.422641 0.482524 +v 0.012796 0.420930 0.478144 +v 0.016260 0.419894 0.485809 +v 0.014892 0.420709 0.487701 +v 0.017775 0.419283 0.489722 +v 0.015257 0.420742 0.480282 +v 0.013705 0.427582 0.470921 +v 0.010362 0.425286 0.477280 +v 0.018507 0.429824 0.468084 +v 0.014834 0.426525 0.467071 +v 0.012664 0.426353 0.470356 +v 0.020972 0.426784 0.483077 +v 0.010735 0.425206 0.480539 +v 0.012087 0.425562 0.485694 +v 0.013713 0.425070 0.489481 +v 0.012713 0.421564 0.489120 +v 0.013272 0.423648 0.490088 +v 0.021207 0.426091 0.494314 +v 0.011809 0.423143 0.483940 +v 0.014564 0.424532 0.493627 +v 0.016403 0.424349 0.495717 +v 0.016009 0.423236 0.494947 +v 0.015643 0.420988 0.488775 +v 0.013853 0.419991 0.486390 +v 0.013962 0.421344 0.486364 +v 0.016308 0.419988 0.489614 +v 0.016943 0.419840 0.491357 +v 0.018549 0.421200 0.492569 +v 0.015551 0.419716 0.489900 +v 0.015207 0.419376 0.489228 +v 0.014825 0.421104 0.490642 +v -0.019683 0.099673 -0.024239 +v 0.003339 0.067647 -0.092333 +v 0.019239 0.419761 0.492661 +v 0.018357 0.419695 0.493020 +v 0.017706 0.419503 0.492097 +v 0.017217 0.420759 0.491834 +v 0.020228 0.421668 0.493422 +v 0.016570 0.422719 0.490735 +v 0.008277 0.437713 0.480265 +v 0.000040 0.414546 0.454749 +v -0.002373 0.420261 0.450381 +v 0.018422 0.424541 0.496126 +v 0.018794 0.423347 0.495923 +v 0.018713 0.425113 0.496928 +v 0.014848 0.419976 0.488025 +v 0.020911 0.420603 0.477974 +v 0.013066 0.458084 0.308720 +v 0.076725 0.259419 0.380333 +v 0.081002 0.252784 0.384281 +v 0.089808 0.252756 0.380482 +v 0.091963 0.245764 0.388055 +v 0.082620 0.248246 0.394355 +v 0.070672 0.249318 0.396298 +v 0.092245 0.243088 0.394319 +v 0.075803 0.247382 0.403402 +v 0.086699 0.245075 0.403612 +v 0.085973 0.243987 0.414280 +v 0.067714 0.245798 0.402918 +v 0.071790 0.245520 0.406808 +v 0.071850 0.244368 0.409151 +v 0.071814 0.246386 0.411640 +v 0.069818 0.241665 0.411937 +v 0.069594 0.236750 0.416530 +v 0.096352 0.235498 0.400039 +v 0.093202 0.240862 0.402251 +v 0.089626 0.241893 0.408083 +v 0.091572 0.242307 0.412792 +v 0.095573 0.239057 0.408193 +v 0.095620 0.237896 0.409220 +v 0.091146 0.240131 0.412600 +v 0.090194 0.232485 0.416156 +v 0.090191 0.225308 0.417291 +v 0.093174 0.231539 0.413971 +v 0.098345 0.241639 0.409255 +v 0.093330 0.236923 0.404408 +v 0.094474 0.233757 0.401878 +v 0.095188 0.230846 0.406033 +v 0.093876 0.230850 0.405288 +v 0.090436 0.229549 0.409631 +v 0.092769 0.233908 0.407643 +v 0.072616 0.253650 0.386747 +v 0.073258 0.271280 0.383299 +v 0.071305 0.289708 0.369279 +v 0.077716 0.288682 0.356353 +v 0.069108 0.306604 0.361945 +v 0.072398 0.314225 0.375064 +v 0.068622 0.309369 0.363133 +v 0.079244 0.308589 0.350887 +v 0.067763 0.316903 0.360062 +v 0.083113 0.316896 0.345638 +v 0.070951 0.320855 0.375628 +v 0.075674 0.330944 0.388166 +v 0.060567 0.331778 0.370180 +v 0.083557 0.334819 0.399755 +v 0.087822 0.324673 0.377644 +v 0.063643 0.331204 0.407991 +v 0.076074 0.341390 0.411474 +v 0.047733 0.325106 0.413066 +v 0.043084 0.339434 0.421807 +v 0.065121 0.343488 0.417604 +v 0.080164 0.317904 0.382082 +v 0.093332 0.317019 0.378490 +v 0.101812 0.319644 0.362814 +v 0.102349 0.310198 0.367512 +v 0.088740 0.288880 0.390358 +v 0.075909 0.292464 0.387372 +v 0.074925 0.277040 0.391389 +v 0.073156 0.273625 0.391591 +v 0.066154 0.261644 0.390173 +v 0.087765 0.274927 0.395133 +v 0.085332 0.271118 0.398939 +v 0.072193 0.262489 0.403690 +v 0.064194 0.256988 0.396106 +v 0.095912 0.265308 0.387233 +v 0.097443 0.269763 0.384300 +v 0.093859 0.261221 0.377526 +v 0.102593 0.249399 0.397076 +v 0.098746 0.254605 0.407668 +v 0.088282 0.259538 0.414566 +v 0.098344 0.249343 0.403359 +v 0.098353 0.251366 0.410315 +v 0.091657 0.253442 0.412437 +v 0.088093 0.256212 0.417824 +v 0.081024 0.256503 0.414422 +v 0.097362 0.244890 0.390738 +v 0.088047 0.256516 0.378866 +v 0.082415 0.266990 0.372285 +v 0.102428 0.245642 0.398648 +v 0.099679 0.235893 0.399480 +v 0.101867 0.239526 0.402302 +v 0.083263 0.249497 0.424473 +v 0.088459 0.248132 0.423851 +v 0.088149 0.244045 0.418647 +v 0.086073 0.243761 0.425450 +v 0.079646 0.247573 0.420403 +v 0.079049 0.245558 0.420402 +v 0.078684 0.238754 0.427422 +v 0.072783 0.228754 0.424221 +v 0.081967 0.236681 0.426510 +v 0.077956 0.232303 0.426118 +v 0.099767 0.236391 0.404304 +v 0.094970 0.234558 0.408237 +v 0.095652 0.232098 0.408242 +v 0.098117 0.240461 0.406993 +v 0.098725 0.246117 0.413549 +v 0.092873 0.243916 0.415711 +v 0.098385 0.240604 0.415710 +v 0.096663 0.235099 0.417556 +v 0.091346 0.234827 0.418503 +v 0.095872 0.233040 0.413730 +v 0.078424 0.251620 0.415994 +v 0.083592 0.243677 0.417346 +v 0.082650 0.242400 0.417582 +v 0.079264 0.235760 0.421524 +v 0.076422 0.238387 0.423546 +v 0.082155 0.234523 0.421987 +v 0.089832 0.243767 0.416740 +v 0.064573 0.246485 0.406590 +v 0.064351 0.248973 0.404612 +v 0.066932 0.251675 0.409027 +v 0.067227 0.240807 0.409273 +v 0.064731 0.242660 0.410145 +v 0.066660 0.239523 0.414001 +v 0.067201 0.244973 0.413536 +v 0.069929 0.242736 0.413311 +v 0.075161 0.251183 0.411061 +v 0.072322 0.248558 0.410003 +v 0.095527 0.238134 0.403552 +v 0.098489 0.240357 0.405557 +v 0.019470 0.330927 0.422749 +v 0.019738 0.357432 0.428762 +v 0.034379 0.374746 0.429866 +v 0.039572 0.397184 0.437242 +v 0.048600 0.385869 0.426472 +v 0.056293 0.371401 0.414570 +v 0.061813 0.393761 0.400416 +v 0.051156 0.403864 0.419305 +v 0.047687 0.413357 0.407262 +v 0.049367 0.407605 0.397336 +v 0.063271 0.399899 0.373446 +v 0.052482 0.415311 0.405608 +v 0.055165 0.412001 0.392433 +v 0.047826 0.422559 0.427429 +v 0.049657 0.425969 0.426501 +v 0.052822 0.403336 0.372064 +v 0.047954 0.409313 0.370909 +v 0.054663 0.408423 0.375109 +v 0.040716 0.423724 0.381457 +v 0.048620 0.411872 0.394795 +v 0.039783 0.431749 0.391644 +v 0.047226 0.417035 0.406444 +v 0.038734 0.435308 0.403648 +v 0.022655 0.440882 0.394462 +v 0.021259 0.439707 0.384402 +v 0.018534 0.457100 0.387590 +v 0.020018 0.449943 0.385498 +v 0.018087 0.455788 0.382690 +v 0.017724 0.462364 0.378260 +v 0.017629 0.454245 0.376954 +v 0.032403 0.430560 0.380349 +v 0.022049 0.432190 0.363359 +v 0.018037 0.452029 0.358665 +v 0.016265 0.461551 0.354195 +v 0.017280 0.453808 0.350346 +v 0.015437 0.467821 0.342110 +v 0.015812 0.456025 0.345026 +v 0.015661 0.455790 0.342447 +v 0.016336 0.433866 0.350977 +v 0.029075 0.426583 0.368405 +v 0.035592 0.429118 0.372715 +v 0.034634 0.422934 0.357647 +v 0.025620 0.418402 0.339082 +v 0.019100 0.428631 0.324631 +v 0.017936 0.440953 0.333083 +v 0.015783 0.443613 0.342084 +v 0.015282 0.447494 0.334604 +v 0.013713 0.457459 0.315848 +v 0.023364 0.409864 0.318201 +v 0.013872 0.421193 0.312012 +v 0.011272 0.460536 0.291369 +v 0.012133 0.459883 0.299510 +v 0.022396 0.390044 0.283483 +v 0.015274 0.419649 0.292031 +v 0.009221 0.431940 0.269450 +v 0.010189 0.424416 0.277572 +v 0.013685 0.410650 0.269577 +v 0.006585 0.428597 0.244378 +v 0.016938 0.376828 0.274891 +v 0.029838 0.388126 0.286007 +v 0.034169 0.375507 0.274934 +v 0.007277 0.391738 0.248025 +v 0.008686 0.377853 0.263099 +v 0.016931 0.363053 0.264391 +v 0.010425 0.371088 0.226280 +v 0.004114 0.394680 0.207731 +v 0.003262 0.388363 0.194514 +v 0.005536 0.363093 0.218112 +v 0.025476 0.352884 0.259012 +v 0.017638 0.339701 0.247689 +v 0.029749 0.338943 0.251744 +v 0.046393 0.342375 0.271419 +v 0.037141 0.371681 0.268228 +v 0.053455 0.365168 0.293598 +v 0.070700 0.351784 0.311433 +v 0.070854 0.319445 0.288615 +v 0.045342 0.311417 0.254285 +v 0.063144 0.298927 0.280028 +v 0.071831 0.286163 0.258373 +v 0.049510 0.298675 0.238502 +v 0.082570 0.374303 0.357000 +v 0.057762 0.382650 0.310723 +v 0.068889 0.385589 0.354535 +v 0.057460 0.398631 0.343126 +v 0.072069 0.379506 0.388754 +v 0.083530 0.302247 0.324834 +v 0.081947 0.328261 0.347733 +v 0.069695 0.318110 0.361641 +v 0.059040 0.282513 0.378947 +v 0.053739 0.308448 0.393943 +v 0.066549 0.325895 0.395293 +v 0.043850 0.281550 0.395348 +v 0.046514 0.302465 0.404817 +v 0.071224 0.275039 0.336420 +v 0.049684 0.261811 0.371644 +v 0.020293 0.262371 0.389974 +v 0.020658 0.273914 0.398384 +v 0.020798 0.297665 0.411351 +v 0.019610 0.255737 0.378570 +v 0.045017 0.235684 0.357794 +v 0.055894 0.252112 0.340058 +v 0.019242 0.225814 0.365019 +v 0.043855 0.211239 0.345611 +v 0.056617 0.225008 0.340324 +v 0.067015 0.244566 0.334159 +v 0.042050 0.197759 0.336095 +v 0.027495 0.166353 0.349893 +v 0.034187 0.171592 0.326995 +v 0.019008 0.149172 0.339934 +v 0.077875 0.266255 0.316636 +v 0.076891 0.280809 0.316150 +v 0.079881 0.277527 0.290830 +v 0.077193 0.267567 0.246618 +v 0.049182 0.284470 0.225726 +v 0.019101 0.287466 0.221416 +v 0.016844 0.305955 0.229725 +v 0.016686 0.322845 0.234698 +v 0.013525 0.318447 0.211149 +v 0.011970 0.331244 0.204329 +v 0.005039 0.341741 0.205440 +v 0.002459 0.357391 0.174917 +v 0.003021 0.347279 0.179645 +v 0.006725 0.334639 0.226291 +v 0.006031 0.322138 0.189052 +v 0.005757 0.298778 0.202898 +v 0.013149 0.291110 0.217423 +v 0.012325 0.281204 0.209731 +v 0.005508 0.296883 0.198975 +v 0.002967 0.306418 0.167233 +v 0.005037 0.283084 0.188638 +v 0.011263 0.276334 0.202263 +v 0.013627 0.266579 0.198182 +v 0.004948 0.276671 0.185602 +v 0.002353 0.282282 0.151988 +v 0.010846 0.259125 0.189282 +v 0.010003 0.250831 0.176543 +v 0.002560 0.272976 0.152144 +v 0.003011 0.263123 0.155435 +v 0.008821 0.243895 0.170672 +v 0.018617 0.247816 0.184467 +v 0.024587 0.256084 0.196332 +v 0.026881 0.252305 0.192928 +v 0.022656 0.241465 0.180733 +v 0.023922 0.248247 0.181141 +v 0.042067 0.234869 0.180552 +v 0.028329 0.242982 0.187557 +v 0.040150 0.258186 0.205337 +v 0.072701 0.232269 0.222538 +v 0.032346 0.263688 0.207155 +v 0.028253 0.277298 0.212990 +v 0.026520 0.269587 0.196548 +v 0.019029 0.266186 0.205747 +v 0.088557 0.366074 0.379169 +v 0.091882 0.357026 0.370547 +v 0.081680 0.366569 0.394903 +v 0.072430 0.356121 0.414437 +v 0.047356 0.360918 0.423240 +v 0.081557 0.342921 0.406993 +v 0.090308 0.334471 0.386472 +v 0.090460 0.352173 0.391813 +v 0.097165 0.330994 0.347271 +v 0.096722 0.313188 0.348888 +v 0.098045 0.310638 0.351479 +v 0.087187 0.298453 0.350828 +v 0.097655 0.298834 0.354726 +v 0.077039 0.301871 0.352340 +v 0.095304 0.281043 0.363788 +v 0.099227 0.287055 0.379292 +v 0.084439 0.357490 0.356080 +v 0.041979 0.415054 0.441384 +v 0.048269 0.410585 0.430525 +v 0.035736 0.408434 0.445285 +v 0.038043 0.410231 0.450364 +v 0.044031 0.418712 0.446132 +v 0.044937 0.428724 0.447341 +v 0.047551 0.431788 0.448620 +v 0.047431 0.428014 0.425698 +v 0.044949 0.434380 0.448904 +v 0.044623 0.442928 0.446181 +v 0.039730 0.454374 0.427700 +v 0.040882 0.450184 0.445259 +v 0.040197 0.458132 0.446016 +v 0.038372 0.453761 0.459483 +v 0.034924 0.455804 0.461137 +v 0.032836 0.450803 0.473464 +v 0.029492 0.446398 0.482987 +v 0.037241 0.449271 0.471652 +v 0.035751 0.445998 0.469717 +v 0.038405 0.444627 0.466866 +v 0.041971 0.443822 0.463613 +v 0.035321 0.439935 0.472835 +v 0.033485 0.442184 0.476659 +v 0.034544 0.444730 0.477188 +v 0.035967 0.447414 0.477750 +v 0.032209 0.444818 0.483730 +v 0.031143 0.442830 0.481224 +v 0.028678 0.441183 0.483260 +v 0.020920 0.443141 0.486141 +v 0.028453 0.442990 0.486558 +v 0.026973 0.439487 0.491688 +v 0.026867 0.437008 0.497550 +v 0.021201 0.437251 0.497833 +v 0.025376 0.434763 0.499218 +v 0.030006 0.437961 0.487143 +v 0.028641 0.433937 0.496619 +v 0.026170 0.425667 0.497523 +v 0.028079 0.426072 0.495187 +v 0.029606 0.426518 0.492150 +v 0.030842 0.427336 0.486572 +v 0.032016 0.427138 0.481845 +v 0.032971 0.426380 0.472732 +v 0.031869 0.426034 0.478507 +v 0.031186 0.426117 0.484564 +v 0.030573 0.426343 0.488896 +v 0.028956 0.424993 0.492717 +v 0.023819 0.425149 0.496824 +v 0.025301 0.422231 0.496238 +v 0.021270 0.424160 0.496727 +v 0.026466 0.423310 0.494733 +v 0.028653 0.424705 0.491365 +v 0.028996 0.423758 0.489766 +v 0.030213 0.423272 0.483563 +v 0.031118 0.425348 0.480122 +v 0.031547 0.422940 0.478456 +v 0.031356 0.425423 0.476852 +v 0.036966 0.428328 0.464854 +v 0.038264 0.428864 0.466426 +v 0.039352 0.435935 0.474995 +v 0.038254 0.439025 0.474329 +v 0.042983 0.437211 0.469113 +v 0.042857 0.433901 0.454784 +v 0.044137 0.438680 0.460935 +v 0.038610 0.428329 0.463476 +v 0.044789 0.428507 0.452718 +v 0.040728 0.418274 0.459394 +v 0.034609 0.425061 0.468801 +v 0.035010 0.409359 0.464554 +v 0.030654 0.421670 0.477238 +v 0.030763 0.411962 0.474569 +v 0.029050 0.411028 0.473540 +v 0.026732 0.409329 0.485963 +v 0.021007 0.409281 0.478815 +v 0.020888 0.403572 0.464410 +v 0.032979 0.407552 0.461230 +v 0.032024 0.406021 0.454074 +v 0.024269 0.408854 0.488073 +v 0.021205 0.409041 0.488402 +v 0.025085 0.410642 0.490237 +v 0.027194 0.410255 0.488030 +v 0.024190 0.417422 0.493245 +v 0.021253 0.417682 0.493694 +v 0.022288 0.419537 0.493651 +v 0.026886 0.418261 0.492011 +v 0.025521 0.419634 0.492155 +v 0.027749 0.418540 0.489849 +v 0.026777 0.419486 0.490681 +v 0.028344 0.418991 0.488296 +v 0.028947 0.418463 0.485953 +v 0.029544 0.419138 0.483645 +v 0.027087 0.419459 0.488986 +v 0.029179 0.421122 0.483590 +v 0.030097 0.419869 0.481540 +v 0.030319 0.420453 0.479721 +v 0.029754 0.422066 0.479955 +v 0.029021 0.421043 0.477812 +v 0.029650 0.422354 0.476513 +v 0.029421 0.422738 0.477376 +v 0.116985 0.191322 0.319231 +v 0.096950 0.232575 0.335130 +v 0.103714 0.207059 0.358240 +v 0.111621 0.218819 0.290237 +v 0.073776 0.204928 0.365072 +v 0.068889 0.233221 0.338273 +v 0.118709 0.160984 0.345812 +v 0.110262 0.177246 0.373155 +v 0.111072 0.156222 0.381198 +v 0.120640 0.138787 0.350854 +v 0.082601 0.154312 0.387161 +v 0.077675 0.181384 0.380405 +v 0.112965 0.137343 0.378953 +v 0.076852 0.136491 0.384073 +v 0.111362 0.121493 0.372494 +v 0.075503 0.123807 0.377374 +v 0.116011 0.139021 0.319053 +v 0.109451 0.148556 0.312588 +v 0.063369 0.140426 0.370864 +v 0.060996 0.144839 0.372024 +v 0.055526 0.134065 0.336644 +v 0.052232 0.139159 0.335213 +v 0.050662 0.108517 0.320602 +v 0.063530 0.113609 0.300157 +v 0.046676 0.146815 0.307611 +v 0.045776 0.163882 0.338955 +v 0.059734 0.062507 0.320831 +v 0.058636 0.058645 0.343138 +v 0.059351 0.109404 0.361769 +v 0.062911 0.048106 0.308223 +v 0.062287 0.047374 0.295647 +v 0.056457 0.039511 0.318546 +v 0.065166 0.036106 0.289260 +v 0.062346 0.036068 0.306107 +v 0.059954 0.039170 0.339012 +v 0.059534 0.018664 0.311087 +v 0.051469 0.024108 0.341568 +v 0.066088 0.023295 0.351499 +v 0.073477 0.040285 0.352482 +v 0.047191 0.014225 0.330545 +v 0.043732 0.014444 0.346031 +v 0.058248 0.016422 0.344903 +v 0.058042 0.006054 0.306526 +v 0.071241 0.017780 0.365425 +v 0.079287 0.025292 0.367561 +v 0.078450 0.031769 0.357956 +v 0.091970 0.026966 0.356203 +v 0.086919 0.020063 0.366247 +v 0.084129 0.013418 0.380061 +v 0.097357 0.007269 0.357697 +v 0.103612 0.016372 0.363799 +v 0.105310 0.005824 0.368414 +v 0.115140 0.011109 0.370149 +v 0.108148 0.023481 0.360552 +v 0.101979 0.042494 0.346931 +v 0.105586 0.093798 0.358595 +v 0.110974 0.048687 0.327190 +v 0.118499 0.101337 0.342091 +v 0.118114 0.112746 0.313686 +v 0.091269 0.134443 0.299610 +v 0.090470 0.115652 0.289434 +v 0.096066 0.061324 0.300704 +v 0.063367 0.132509 0.310623 +v 0.079584 0.068080 0.292766 +v 0.063013 0.061867 0.305165 +v 0.079637 0.048958 0.285629 +v 0.082295 0.036424 0.282415 +v 0.094853 0.035291 0.288621 +v 0.089998 0.046571 0.290261 +v 0.109638 0.042100 0.309191 +v 0.106291 0.027863 0.311648 +v 0.112200 0.039900 0.328907 +v 0.100541 0.026503 0.300954 +v 0.111679 0.029991 0.331440 +v 0.116693 0.019186 0.326770 +v 0.119868 0.006689 0.324185 +v 0.113417 0.020074 0.343466 +v 0.105191 0.032762 0.352825 +v 0.125429 0.007573 0.336637 +v 0.124622 0.003414 0.342521 +v 0.090207 0.026517 0.289942 +v 0.092252 0.014692 0.292358 +v 0.122926 0.001722 0.333413 +v 0.083205 0.027368 0.290356 +v 0.065018 0.026363 0.296107 +v 0.066435 0.005965 0.300287 +v 0.094494 0.004939 0.300664 +v 0.112211 0.004351 0.318958 +v 0.020382 0.449229 0.393177 +v 0.018838 0.454359 0.392780 +v 0.018703 0.455987 0.390128 +v 0.021706 0.449546 0.402671 +v 0.039213 0.447000 0.415046 +v 0.021375 0.456311 0.411701 +v 0.022886 0.463725 0.426241 +v 0.019613 0.464269 0.429444 +v 0.050255 0.409813 0.391871 +v 0.019192 0.460102 0.407525 +v 0.019246 0.454543 0.408248 +v 0.083793 0.194452 0.233983 +v 0.088769 0.212993 0.243462 +v 0.098832 0.188278 0.266810 +v 0.099679 0.244599 0.270102 +v 0.086014 0.255194 0.318735 +v 0.071589 0.200179 0.205889 +v 0.071106 0.170859 0.231094 +v 0.106056 0.159090 0.290469 +v 0.093694 0.136687 0.296128 +v 0.086480 0.154682 0.261228 +v 0.054842 0.144670 0.280971 +v 0.060465 0.133223 0.303443 +v 0.071411 0.094407 0.369769 +v 0.011460 0.145608 0.104993 +v 0.003782 0.154810 0.104699 +v 0.004123 0.146865 0.093603 +v 0.020392 0.152710 0.112044 +v 0.014540 0.160583 0.117956 +v 0.009742 0.167987 0.123537 +v 0.017034 0.172844 0.120304 +v 0.015873 0.178359 0.132422 +v 0.019249 0.166687 0.126205 +v 0.023715 0.178545 0.135225 +v 0.019198 0.198557 0.144146 +v 0.008893 0.193464 0.141064 +v 0.003473 0.209840 0.146459 +v 0.009015 0.223318 0.153419 +v 0.019321 0.227168 0.156126 +v 0.021844 0.219713 0.165208 +v 0.022676 0.233703 0.162717 +v 0.004193 0.238887 0.164549 +v 0.023572 0.233938 0.178619 +v 0.039617 0.220969 0.175447 +v 0.041715 0.208029 0.158148 +v 0.056524 0.183571 0.169265 +v 0.068929 0.167527 0.219639 +v 0.064935 0.146570 0.202183 +v 0.049465 0.157081 0.235702 +v 0.028514 0.129742 0.245254 +v 0.035536 0.156876 0.294236 +v 0.054628 0.102134 0.164443 +v 0.042491 0.149467 0.134749 +v 0.036558 0.142580 0.130214 +v 0.031778 0.115715 0.108409 +v 0.040386 0.193604 0.158812 +v 0.032821 0.173779 0.134727 +v 0.027169 0.164393 0.125700 +v 0.036431 0.152198 0.131875 +v 0.024090 0.144434 0.106553 +v 0.022907 0.149664 0.103542 +v 0.017167 0.145496 0.105332 +v 0.020585 0.138444 0.101628 +v 0.010277 0.136284 0.093305 +v 0.007176 0.129184 0.088475 +v 0.004324 0.140326 0.072451 +v 0.004761 0.127230 0.080114 +v 0.008759 0.118965 0.072234 +v 0.005660 0.125371 0.061226 +v 0.005950 0.118511 0.063361 +v 0.023397 0.108933 0.068530 +v 0.006705 0.111234 0.055741 +v 0.013925 0.105411 0.047905 +v 0.006816 0.114444 0.050337 +v 0.007733 0.109974 0.032657 +v 0.013556 0.099049 0.031797 +v 0.012420 0.097807 0.014615 +v 0.024500 0.083249 -0.011041 +v 0.020700 0.078194 -0.039532 +v 0.029727 0.075387 -0.038814 +v 0.023535 0.075705 -0.045030 +v 0.034844 0.074375 -0.025791 +v 0.046328 0.036484 -0.008286 +v 0.039382 0.066632 -0.054816 +v 0.019160 0.077118 -0.060469 +v 0.027030 0.070343 -0.074917 +v 0.024443 0.073443 -0.062202 +v 0.034076 0.070804 -0.050390 +v 0.021327 0.076610 -0.072405 +v 0.022574 0.074622 -0.078031 +v 0.039600 0.347932 0.262127 +v 0.041479 0.370349 0.249808 +v 0.040845 0.367939 0.247394 +v 0.044294 0.370077 0.240645 +v 0.046539 0.371386 0.223438 +v 0.034556 0.346264 0.247228 +v 0.043401 0.391346 0.265224 +v 0.043011 0.392393 0.270946 +v 0.051169 0.409751 0.265474 +v 0.040140 0.423870 0.356054 +v 0.042354 0.424689 0.341549 +v 0.044768 0.419897 0.316519 +v 0.037958 0.410539 0.331050 +v 0.047763 0.406948 0.280466 +v 0.048303 0.410613 0.285474 +v 0.046327 0.383880 0.295394 +v 0.051617 0.408964 0.258304 +v 0.040608 0.403672 0.309199 +v 0.044315 0.429263 0.375491 +v 0.040405 0.423732 0.372877 +v 0.040659 0.417029 0.356140 +v 0.054423 0.410923 0.350063 +v 0.043878 0.411727 0.346276 +v 0.050093 0.406720 0.346830 +v 0.050524 0.403432 0.331847 +v 0.048113 0.392928 0.315915 +v 0.017443 0.446683 0.373772 +v 0.041133 0.426453 0.359971 +v 0.038445 0.414730 0.341024 +v 0.041623 0.406573 0.334267 +v 0.040009 0.404874 0.321250 +v 0.033396 0.325173 0.245513 +v 0.036592 0.332544 0.236238 +v 0.031403 0.333344 0.248004 +v 0.029859 0.321641 0.240772 +v 0.028523 0.327784 0.243634 +v 0.031398 0.310041 0.235713 +v 0.035091 0.303925 0.232594 +v 0.034470 0.311491 0.220601 +v 0.026628 0.305744 0.232612 +v 0.028976 0.300163 0.229311 +v 0.066534 0.332036 0.380092 +v 0.072519 0.338454 0.358772 +v 0.079036 0.339917 0.352781 +v 0.036050 0.159040 0.312463 +v 0.035749 0.170141 0.325143 +v 0.053827 0.175208 0.366989 +v 0.057436 0.221796 0.339950 +v 0.045330 0.194664 0.338971 +v 0.021880 0.086947 0.195827 +v 0.007141 0.052179 0.144663 +v 0.046137 0.067899 0.114018 +v 0.036220 0.108535 0.093780 +v 0.028882 0.123976 0.100431 +v 0.029062 0.110560 0.083602 +v 0.029189 0.118052 0.087456 +v 0.030264 0.102968 0.074775 +v 0.043300 0.054214 0.079919 +v 0.034009 0.090009 0.046717 +v 0.042853 0.046261 0.051379 +v 0.026152 0.009321 -0.077729 +v 0.043237 0.017076 -0.147297 +v 0.022790 0.005026 -0.142132 +v 0.051355 0.026112 -0.076949 +v 0.022641 0.015566 -0.223171 +v 0.041210 0.045561 -0.136231 +v 0.041622 0.061106 -0.077507 +v 0.027462 0.057490 -0.116234 +v 0.027755 0.052194 -0.139350 +v 0.024746 0.063733 -0.128696 +v 0.025191 0.070234 -0.140945 +v -0.009548 0.014048 -0.299601 +v 0.005318 0.025516 -0.271433 +v 0.030848 0.058121 -0.097437 +v 0.038595 0.059489 -0.088568 +v 0.023921 0.067474 -0.082583 +v 0.025638 0.071256 -0.098076 +v -0.008901 0.023499 -0.302704 +v -0.025917 0.018584 -0.337881 +v -0.021601 0.025728 -0.321571 +v -0.027058 0.024973 -0.328648 +v -0.022867 0.034512 -0.325497 +v -0.020039 0.027994 -0.310571 +v -0.016048 0.027839 -0.304450 +v -0.016719 0.034234 -0.306832 +v -0.014534 0.035119 -0.301481 +v -0.010616 0.029924 -0.289934 +v -0.006744 0.030493 -0.280455 +v -0.027908 0.022309 -0.338467 +v -0.032671 0.023245 -0.341598 +v -0.033229 0.021809 -0.349674 +v -0.034788 0.025522 -0.348996 +v -0.039219 0.021896 -0.354521 +v -0.036246 0.017776 -0.355290 +v -0.043215 0.020854 -0.365399 +v -0.047888 0.019301 -0.371644 +v -0.054008 0.022313 -0.383296 +v -0.079410 0.019524 -0.431160 +v -0.080014 0.013018 -0.432176 +v -0.119776 0.013071 -0.494907 +v -0.120581 0.010905 -0.495071 +v -0.124868 0.010848 -0.500000 +v -0.043895 0.012872 -0.370300 +v -0.020755 0.004734 -0.289841 +v 0.043235 0.176194 0.147439 +v 0.046449 0.180388 0.140942 +v 0.038253 0.175358 0.140425 +v 0.028206 0.196747 0.153291 +v 0.045438 0.154855 0.129976 +v 0.040138 0.162702 0.140476 +v 0.035966 0.076891 -0.000580 +v 0.044792 0.103150 -0.018030 +v 0.035543 0.085134 -0.026654 +v 0.029577 0.081300 -0.005893 +v 0.003390 0.245851 0.155644 +v 0.053914 0.163967 0.246537 +v 0.003683 0.220013 0.152236 +v 0.002973 0.227463 0.144724 +v 0.001923 0.201402 0.122510 +v 0.002726 0.188343 0.129708 +v 0.002656 0.179215 0.124960 +v 0.002251 0.180544 0.118398 +v 0.003188 0.162434 0.113790 +v 0.026125 0.085899 0.012196 +v 0.032301 0.092666 0.015461 +v 0.022699 0.094519 0.038721 +v 0.032756 0.114610 0.055998 +v 0.027020 0.090569 0.042658 +v 0.023985 0.102009 0.048952 +v 0.038230 0.120318 0.045337 +v 0.031873 0.100889 0.045282 +v 0.032167 0.100962 0.039154 +v 0.033227 0.117652 0.063541 +v 0.031424 0.112715 0.074688 +v 0.027887 0.108818 0.079265 +v 0.039632 0.121808 0.078652 +v 0.023173 0.214605 0.156366 +v 0.023831 0.200119 0.148912 +v 0.023344 0.208762 0.142608 +v 0.017919 0.217363 0.151909 +v 0.018383 0.219525 0.157768 +v 0.023652 0.219637 0.148890 +v 0.024842 0.215482 0.144475 +v 0.037661 0.185167 0.123772 +v 0.035332 0.132592 0.111225 +v 0.028477 0.149826 0.121229 +v 0.039671 0.138488 0.097067 +v 0.028655 0.135262 0.103768 +v 0.039982 0.071080 -0.039558 +v 0.044630 0.081738 -0.050640 +v 0.019305 0.079951 -0.023529 +v 0.029992 0.078561 -0.016059 +v 0.013409 0.093437 -0.024745 +v 0.013165 0.088937 -0.019373 +v 0.015117 0.084643 -0.017091 +v 0.012340 0.092519 -0.012792 +v 0.012021 0.088196 -0.006949 +v 0.016913 0.084471 -0.011866 +v 0.025186 0.091705 -0.015583 +v 0.018799 0.089592 0.009472 +v 0.047573 0.068305 -0.091132 +v 0.114355 0.012380 0.356198 +v 0.114608 0.004980 0.372087 +v 0.120590 0.007857 0.343935 +v 0.115829 0.003704 0.339979 +v 0.056898 0.003925 0.343771 +v 0.114108 0.005166 0.358175 +v 0.112429 0.005493 0.345509 +v 0.064761 0.005591 0.344972 +v 0.070425 0.005301 0.353152 +v 0.089978 0.006611 0.360847 +v 0.072312 0.004216 0.372038 +v 0.083129 0.003573 0.381443 +v 0.084708 0.003212 0.318648 +v 0.043728 0.006037 0.338800 +v 0.042449 0.006473 0.348881 +v 0.082929 0.007354 0.303137 +v 0.121728 0.000088 0.347734 +v 0.105730 0.006546 0.301664 +v 0.023279 0.052093 -0.169742 +v 0.024842 0.060317 -0.165906 +v 0.026121 0.050196 -0.164317 +v 0.038809 0.039205 -0.166056 +v 0.024360 0.049622 -0.176700 +v 0.023343 0.060425 -0.175298 +v 0.019164 0.046243 -0.193870 +v 0.020052 0.054763 -0.187391 +v 0.015992 0.046417 -0.200768 +v 0.029906 0.036328 -0.202433 +v 0.016930 0.043992 -0.219097 +v 0.013082 0.054989 -0.222547 +v 0.010743 0.045653 -0.226365 +v 0.021474 0.033479 -0.230575 +v 0.009113 0.042275 -0.232054 +v 0.009095 0.039340 -0.239852 +v 0.009840 0.049131 -0.236184 +v 0.007115 0.042026 -0.243017 +v 0.007417 0.037534 -0.248355 +v 0.003719 0.039809 -0.254573 +v 0.017104 0.032629 -0.244572 +v -0.000566 0.034256 -0.262117 +v -0.002176 0.032429 -0.269907 +v -0.002369 0.038850 -0.271165 +v -0.001240 0.040222 -0.269316 +v 0.028278 0.419409 0.486917 +v 0.029143 0.420568 0.486265 +v 0.027858 0.419436 0.487487 +v 0.028490 0.419446 0.485086 +v 0.027931 0.419926 0.484785 +v 0.027721 0.420712 0.482623 +v 0.029168 0.420024 0.482538 +v 0.029066 0.420700 0.481811 +v 0.029976 0.421212 0.482096 +v 0.029456 0.420497 0.480787 +v 0.028172 0.420831 0.480235 +v 0.029060 0.420937 0.479921 +v 0.029662 0.420975 0.479063 +v 0.027304 0.420889 0.481237 +v 0.028335 0.422744 0.482223 +v 0.025888 0.419962 0.485612 +v 0.027321 0.420796 0.487447 +v 0.024543 0.419330 0.489583 +v 0.021059 0.419034 0.484671 +v 0.021189 0.418934 0.490974 +v 0.026652 0.420822 0.480049 +v 0.031911 0.425499 0.469490 +v 0.027725 0.427672 0.470635 +v 0.031958 0.428255 0.470884 +v 0.020633 0.431205 0.468003 +v 0.022780 0.429854 0.467997 +v 0.028761 0.426172 0.470033 +v 0.030766 0.425466 0.482961 +v 0.029973 0.425687 0.485328 +v 0.028511 0.425173 0.489178 +v 0.029544 0.421682 0.488775 +v 0.027837 0.424625 0.493356 +v 0.028087 0.422230 0.491849 +v 0.026088 0.424417 0.495519 +v 0.024085 0.424581 0.496010 +v 0.023081 0.424521 0.496279 +v 0.027388 0.420063 0.487769 +v 0.028213 0.420340 0.488447 +v 0.026610 0.421065 0.488550 +v 0.028186 0.421443 0.486073 +v 0.028315 0.420092 0.486094 +v 0.023194 0.419789 0.492580 +v 0.023859 0.421237 0.492461 +v 0.027502 0.421192 0.490382 +v 0.031387 0.075232 -0.016124 +v 0.036039 0.089096 -0.000371 +v 0.029849 0.084939 0.019232 +v 0.043791 0.053750 -0.095282 +v 0.024091 0.419735 0.492903 +v 0.024706 0.419551 0.491954 +v 0.021215 0.419621 0.492484 +v 0.022210 0.421682 0.493381 +v 0.025433 0.419899 0.491184 +v 0.025167 0.420814 0.491671 +v 0.026766 0.419795 0.489671 +v 0.025740 0.422783 0.490547 +v 0.025994 0.420055 0.489415 +v 0.026519 0.441127 0.486739 +v 0.029803 0.440073 0.479228 +v 0.033388 0.437889 0.479751 +v 0.041015 0.414279 0.453545 +v 0.043138 0.420624 0.449403 +v 0.023721 0.423381 0.495823 +v 0.024685 0.051642 -0.152094 +v 0.016141 0.428207 0.468901 +v 0.018334 0.424396 0.481693 +v 0.020648 0.429054 0.468001 +v 0.015117 0.423468 0.468029 +v 0.019767 0.422966 0.482857 +v 0.021053 0.423485 0.485919 +v 0.021062 0.424639 0.486735 +v 0.025200 0.428271 0.468716 +v 0.023584 0.424433 0.481586 +v 0.020979 0.425147 0.482834 +v 0.026255 0.423319 0.467776 +v 0.026455 0.426422 0.466837 +v 0.022221 0.422983 0.482806 +v 0.020911 0.422685 0.478685 +vn 0.1590 -0.7380 -0.6558 +vn 0.2900 -0.8750 -0.3876 +vn 0.0174 -0.7992 -0.6009 +vn 0.0809 -0.5966 -0.7985 +vn 0.3640 -0.7676 -0.5276 +vn -0.1001 -0.7558 -0.6471 +vn 0.0752 -0.9929 -0.0921 +vn 0.4466 -0.8498 -0.2799 +vn 0.0180 -0.9270 -0.3746 +vn 0.6964 -0.6835 -0.2188 +vn 0.3524 -0.8996 0.2578 +vn 0.3943 -0.8942 -0.2118 +vn -0.8214 -0.5680 0.0528 +vn -0.8504 -0.3226 0.4157 +vn -0.6455 -0.6049 0.4662 +vn -0.5416 -0.8402 -0.0252 +vn -0.6355 -0.6978 -0.3305 +vn -0.8313 -0.5558 0.0100 +vn -0.8688 -0.3425 0.3575 +vn 0.9221 0.3185 -0.2195 +vn 0.3440 -0.5235 -0.7795 +vn 0.6097 -0.7751 -0.1657 +vn 0.8406 -0.5411 0.0249 +vn -0.0246 -0.9133 -0.4065 +vn 0.7803 0.2081 -0.5898 +vn -0.1646 -0.5333 -0.8298 +vn 0.7430 -0.4943 0.4513 +vn 0.9493 0.3111 0.0460 +vn 0.8911 0.0674 -0.4488 +vn 0.0168 -0.3915 -0.9200 +vn -0.5041 -0.2302 0.8324 +vn 0.5952 -0.1127 -0.7956 +vn -0.4387 -0.6157 -0.6546 +vn -0.6328 -0.7742 0.0134 +vn -0.2060 -0.9620 0.1792 +vn 0.4208 -0.3040 -0.8547 +vn -0.0814 -0.7364 -0.6716 +vn 0.7733 0.5348 0.3405 +vn 0.2672 -0.6602 -0.7019 +vn 0.7502 0.5921 -0.2943 +vn 0.5491 0.6355 0.5428 +vn 0.4592 0.2773 0.8439 +vn -0.6049 -0.5514 0.5745 +vn 0.5001 0.5428 0.6747 +vn -0.3851 -0.4414 0.8105 +vn 0.8115 -0.0680 -0.5804 +vn 0.8333 0.1254 -0.5385 +vn 0.5751 -0.3215 -0.7523 +vn 0.5476 -0.1923 -0.8143 +vn 0.7914 0.6072 0.0706 +vn 0.8356 0.4267 0.3459 +vn -0.0764 0.7639 0.6408 +vn 0.6338 0.6498 0.4196 +vn 0.9050 0.1618 -0.3935 +vn -0.1537 0.5876 0.7944 +vn -0.8875 0.0272 0.4599 +vn -0.8817 0.2181 0.4183 +vn -0.8266 -0.4156 -0.3794 +vn -0.9742 -0.0602 0.2174 +vn -0.8728 -0.3465 -0.3438 +vn -0.7350 0.2418 0.6334 +vn -0.4543 0.1454 0.8789 +vn -0.1291 0.6312 0.7648 +vn 0.6583 0.5428 0.5215 +vn -0.7797 -0.2316 0.5817 +vn -0.7159 0.1962 0.6701 +vn -0.0924 0.5854 0.8054 +vn -0.9483 -0.1510 0.2791 +vn 0.6015 0.3397 0.7231 +vn 0.8661 0.3483 0.3586 +vn -0.4566 0.1245 0.8809 +vn -0.4776 -0.0278 0.8782 +vn -0.8255 -0.1564 0.5423 +vn -0.5392 -0.1958 0.8191 +vn 0.5835 0.0706 0.8090 +vn -0.3238 -0.5250 0.7871 +vn -0.8525 -0.3652 0.3739 +vn 0.0167 -0.3526 -0.9356 +vn -0.7545 -0.6226 0.2077 +vn -0.6657 0.1355 0.7339 +vn -0.0059 0.2559 0.9667 +vn -0.6493 -0.0737 0.7570 +vn -0.8190 -0.5253 0.2308 +vn -0.5040 -0.2898 0.8136 +vn 0.6379 -0.4566 -0.6201 +vn 0.9026 -0.2932 -0.3151 +vn -0.7814 -0.5608 -0.2739 +vn 0.6879 -0.4799 -0.5444 +vn 0.5778 -0.2548 -0.7754 +vn 0.8392 -0.1790 -0.5136 +vn 0.9425 0.0061 0.3341 +vn 0.8308 0.3399 0.4408 +vn 0.9248 -0.2824 -0.2548 +vn -0.1292 -0.9708 0.2020 +vn -0.0214 -0.7537 0.6569 +vn 0.7302 -0.1709 -0.6615 +vn 0.6581 -0.6220 -0.4242 +vn -0.5752 -0.6003 0.5558 +vn -0.8250 -0.1876 0.5331 +vn -0.2400 -0.2247 0.9444 +vn -0.3222 -0.6984 0.6391 +vn -0.4157 -0.1089 0.9030 +vn -0.4397 -0.8295 0.3444 +vn 0.6740 0.7363 0.0597 +vn -0.3384 -0.4628 0.8193 +vn -0.7284 -0.4583 0.5094 +vn -0.2013 -0.3139 0.9279 +vn -0.2435 -0.3010 0.9220 +vn 0.4524 0.6258 0.6354 +vn -0.3200 0.6285 0.7089 +vn -0.9446 0.1990 0.2609 +vn -0.9040 0.1423 0.4031 +vn -0.0716 0.4954 0.8657 +vn -0.8379 0.1058 0.5354 +vn -0.2144 0.0893 0.9727 +vn -0.8726 -0.2704 -0.4067 +vn -0.8675 -0.3923 -0.3058 +vn -0.7241 0.2907 -0.6255 +vn -0.7051 -0.4215 -0.5702 +vn -0.0396 0.2460 -0.9685 +vn -0.3160 -0.3168 -0.8943 +vn -0.2570 -0.4366 -0.8622 +vn -0.3362 -0.4960 -0.8006 +vn -0.0667 -0.4359 -0.8975 +vn -0.2542 -0.6938 -0.6739 +vn -0.7864 -0.5543 -0.2727 +vn -0.9211 0.3822 0.0739 +vn -0.8479 0.1863 0.4963 +vn -0.6690 -0.0456 0.7419 +vn -0.4712 0.2287 0.8519 +vn -0.6267 0.5494 0.5526 +vn -0.7832 0.5640 0.2616 +vn -0.7810 0.6214 -0.0625 +vn -0.9263 0.1912 -0.3247 +vn -0.3887 0.4848 0.7835 +vn -0.5431 0.7304 0.4142 +vn 0.5895 -0.7621 -0.2678 +vn 0.4761 -0.3116 -0.8224 +vn 0.0016 -0.4996 -0.8663 +vn 0.4062 -0.1044 -0.9078 +vn -0.0646 -0.2641 -0.9623 +vn -0.3439 -0.9112 -0.2268 +vn 0.3843 -0.1919 -0.9030 +vn 0.0157 -0.3165 -0.9485 +vn -0.1628 -0.9263 -0.3398 +vn 0.9369 -0.0859 -0.3389 +vn 0.5754 -0.3895 -0.7192 +vn 0.9385 -0.0125 -0.3451 +vn 0.7028 0.4644 0.5390 +vn 0.9259 -0.3663 0.0922 +vn 0.5612 -0.5918 -0.5786 +vn 0.9487 -0.1628 -0.2711 +vn 0.5410 0.3882 0.7461 +vn 0.4210 0.2612 0.8686 +vn -0.1620 0.2895 0.9434 +vn 0.6873 0.2998 0.6616 +vn 0.2609 0.2061 0.9431 +vn -0.0222 0.4376 0.8989 +vn -0.3834 -0.4421 0.8109 +vn -0.2665 0.0426 0.9629 +vn -0.6204 -0.4398 0.6494 +vn -0.5371 -0.7709 0.3423 +vn -0.5950 -0.2887 0.7501 +vn -0.6658 -0.1244 0.7357 +vn -0.3245 -0.3112 0.8932 +vn -0.4666 -0.4384 0.7681 +vn -0.8348 0.3899 0.3888 +vn -0.7677 0.0014 0.6408 +vn -0.9366 0.1705 0.3060 +vn -0.9032 -0.2377 0.3574 +vn -0.9742 0.1333 0.1819 +vn -0.6889 0.7238 0.0405 +vn -0.8409 0.4312 0.3270 +vn -0.8706 0.0075 0.4920 +vn -0.9220 -0.2001 0.3315 +vn -0.7141 0.6960 -0.0752 +vn -0.9976 -0.0074 0.0688 +vn -0.8074 0.5859 -0.0686 +vn -0.7081 0.6628 -0.2436 +vn -0.9739 0.1229 -0.1906 +vn -0.9257 0.2511 0.2829 +vn -0.9853 0.1662 0.0397 +vn 0.9272 0.2974 0.2277 +vn -0.8238 0.5529 -0.1254 +vn -0.5119 0.8246 -0.2410 +vn -0.9556 0.2890 0.0578 +vn -0.9583 0.2211 0.1809 +vn -0.9828 0.0419 -0.1801 +vn -0.9974 0.0687 -0.0208 +vn -0.9956 0.0924 0.0174 +vn -0.9675 -0.0598 -0.2457 +vn -0.9270 0.3054 0.2176 +vn -0.9786 0.1337 0.1563 +vn -0.9773 0.2039 -0.0570 +vn -0.6574 0.7236 -0.2105 +vn -0.9995 0.0179 -0.0271 +vn -0.6465 0.7031 -0.2962 +vn -0.9612 0.2322 -0.1487 +vn -0.3494 0.8644 -0.3616 +vn -0.3376 0.8768 -0.3424 +vn -0.9493 0.2728 0.1563 +vn -0.9985 0.0014 -0.0542 +vn -0.9879 0.1403 -0.0667 +vn -0.7945 0.4000 -0.4569 +vn -0.9908 0.1322 -0.0277 +vn -0.6959 0.3639 -0.6191 +vn -0.9931 0.1131 -0.0305 +vn -0.9639 -0.0913 -0.2500 +vn -0.9900 0.1362 -0.0372 +vn -0.9809 0.1874 0.0516 +vn -0.6989 0.5142 -0.4971 +vn -0.8074 0.5767 0.1249 +vn -0.9963 0.0525 -0.0677 +vn -0.9712 0.2038 0.1236 +vn -0.9993 0.0324 -0.0164 +vn -0.9946 0.0447 -0.0941 +vn -0.7777 0.3417 -0.5276 +vn -0.3285 0.5453 -0.7712 +vn -0.9911 0.0165 -0.1323 +vn -0.9893 0.0779 -0.1230 +vn -0.9201 -0.0661 -0.3861 +vn -0.8722 0.2333 -0.4299 +vn -0.9373 0.0815 -0.3389 +vn -0.9269 0.2610 -0.2698 +vn -0.8706 0.3238 -0.3704 +vn -0.8176 0.4302 -0.3827 +vn -0.9481 0.2636 -0.1776 +vn -0.6869 0.5718 -0.4486 +vn -0.8553 0.5023 -0.1273 +vn -0.7508 0.6520 -0.1062 +vn -0.8811 0.4099 -0.2359 +vn -0.8073 0.5616 -0.1811 +vn -0.5732 0.4751 -0.6676 +vn -0.5922 0.3463 -0.7276 +vn -0.9817 -0.1472 0.1206 +vn -0.8997 -0.1174 0.4203 +vn -0.9843 -0.0697 0.1623 +vn -0.6432 -0.1568 0.7495 +vn -0.9974 0.0569 -0.0447 +vn -0.8192 -0.1875 0.5420 +vn -0.8217 -0.3014 0.4836 +vn -0.7251 -0.3445 0.5963 +vn -0.7759 -0.2998 0.5551 +vn -0.8762 -0.3455 0.3359 +vn -0.3841 -0.4848 0.7857 +vn -0.5044 -0.3598 0.7850 +vn -0.9139 -0.0602 0.4015 +vn -0.5292 -0.5589 0.6385 +vn -0.6401 -0.2222 0.7354 +vn -0.5061 -0.3216 0.8003 +vn -0.4673 -0.0766 0.8808 +vn -0.4996 -0.3304 0.8008 +vn -0.8343 -0.1658 0.5258 +vn -0.4815 -0.4329 0.7621 +vn -0.2572 -0.1393 0.9563 +vn -0.3104 -0.3157 0.8966 +vn 0.7515 -0.3266 0.5732 +vn 0.9233 -0.0149 0.3837 +vn -0.6053 -0.6895 0.3978 +vn -0.8822 0.4523 0.1309 +vn -0.8421 0.3793 -0.3834 +vn -0.9543 0.2985 -0.0112 +vn -0.6294 0.4620 -0.6248 +vn -0.8010 0.2132 -0.5594 +vn -0.4232 0.4205 -0.8025 +vn -0.8369 0.3363 -0.4320 +vn -0.9872 -0.0855 -0.1349 +vn -0.9556 0.2726 0.1115 +vn -0.9901 0.1232 -0.0671 +vn -0.9932 0.0455 -0.1072 +vn -0.9964 -0.0105 -0.0846 +vn -0.8246 0.5653 0.0193 +vn -0.9983 0.0485 0.0308 +vn -0.9791 -0.1291 -0.1572 +vn -0.9052 0.1807 -0.3846 +vn -0.9763 -0.0166 -0.2157 +vn -0.5242 0.5265 -0.6693 +vn -0.3846 0.4372 -0.8130 +vn -0.8344 0.3726 -0.4062 +vn -0.4062 0.6146 -0.6763 +vn -0.9209 -0.3345 -0.2002 +vn -0.9659 0.1871 -0.1791 +vn -0.8985 0.3524 -0.2616 +vn -0.7347 0.6560 0.1727 +vn -0.8396 0.4248 -0.3386 +vn -0.8065 0.4177 -0.4184 +vn -0.9037 -0.3111 -0.2942 +vn -0.9455 0.2668 -0.1865 +vn -0.8699 0.3947 -0.2959 +vn -0.4094 0.7316 -0.5450 +vn -0.8570 0.3641 0.3646 +vn 0.8097 0.4288 -0.4007 +vn -0.5043 0.6442 -0.5750 +vn -0.4752 0.5840 -0.6581 +vn -0.5715 0.3738 -0.7305 +vn -0.9161 0.1624 -0.3667 +vn 0.3819 0.1289 -0.9152 +vn -0.6064 0.5971 -0.5252 +vn -0.5976 0.5596 -0.5742 +vn -0.4382 0.2734 -0.8563 +vn -0.5889 0.5345 -0.6062 +vn -0.8127 0.3429 -0.4710 +vn -0.6595 0.4506 -0.6017 +vn -0.9506 0.0515 -0.3062 +vn -0.2483 0.6786 -0.6912 +vn 0.6763 0.4711 -0.5663 +vn -0.8796 -0.3249 0.3474 +vn -0.7080 -0.5564 0.4349 +vn -0.6864 -0.7186 0.1116 +vn -0.8422 -0.4894 0.2261 +vn -0.9649 -0.2617 0.0210 +vn -0.8644 -0.4725 0.1717 +vn -0.9721 0.2286 0.0524 +vn -0.8642 0.4772 -0.1598 +vn -0.9655 0.1093 0.2364 +vn -0.7795 0.5737 -0.2513 +vn -0.6474 0.7338 -0.2060 +vn -0.2796 0.9292 -0.2415 +vn -0.8876 0.3975 0.2325 +vn -0.9611 0.2533 0.1104 +vn -0.7747 0.5969 -0.2088 +vn -0.7662 0.5125 -0.3876 +vn -0.9524 0.2869 0.1032 +vn -0.7662 0.6307 0.1229 +vn -0.7938 0.5261 0.3050 +vn -0.3915 0.8551 0.3398 +vn -0.2207 0.9339 0.2811 +vn -0.2574 0.9576 0.1296 +vn -0.3024 0.8488 0.4337 +vn -0.1611 0.8854 0.4360 +vn -0.1132 0.7844 0.6098 +vn -0.9279 0.2321 0.2918 +vn -0.8491 0.2387 0.4711 +vn -0.9336 -0.0698 0.3514 +vn -0.6413 0.5016 0.5806 +vn -0.8267 0.4329 0.3594 +vn -0.4939 0.6302 0.5991 +vn -0.4735 0.6529 0.5912 +vn -0.7653 -0.3679 0.5281 +vn -0.8272 -0.5011 0.2543 +vn -0.6924 0.0540 0.7195 +vn -0.6777 -0.6583 0.3277 +vn -0.7936 -0.2180 0.5681 +vn -0.7504 0.2229 0.6223 +vn -0.6146 -0.6857 0.3900 +vn -0.4016 0.0352 0.9152 +vn -0.2570 0.6886 0.6780 +vn -0.3912 0.8426 0.3702 +vn -0.0295 0.9124 0.4082 +vn -0.0338 0.7361 0.6761 +vn -0.4805 0.6371 0.6027 +vn -0.0585 0.8157 0.5755 +vn -0.8593 0.4074 0.3092 +vn -0.8372 0.1041 0.5370 +vn -0.2978 0.1866 0.9362 +vn -0.4390 -0.4018 0.8036 +vn -0.1798 -0.5280 0.8300 +vn -0.1260 0.1876 0.9741 +vn -0.7631 -0.3626 0.5350 +vn -0.9288 -0.2133 0.3031 +vn -0.9588 -0.1667 0.2300 +vn -0.9674 -0.0619 0.2456 +vn -0.9600 -0.2137 0.1810 +vn -0.8381 -0.4148 0.3543 +vn -0.1825 -0.9806 -0.0719 +vn -0.9084 -0.3966 0.1324 +vn -0.8666 -0.4233 0.2642 +vn -0.8389 -0.4338 0.3289 +vn -0.6077 -0.5755 0.5473 +vn -0.9041 -0.4096 0.1213 +vn -0.7272 -0.2693 0.6314 +vn -0.8548 -0.3852 0.3479 +vn -0.8803 -0.4739 0.0212 +vn -0.6453 -0.7540 0.1229 +vn -0.7253 -0.4832 0.4904 +vn 0.2168 -0.9612 -0.1706 +vn -0.7202 -0.1680 0.6731 +vn -0.9115 -0.0670 0.4057 +vn -0.9420 -0.0636 0.3296 +vn -0.7074 0.2737 0.6516 +vn -0.9904 -0.0167 0.1374 +vn -0.9123 -0.1390 0.3851 +vn -0.8633 -0.1848 0.4696 +vn -0.5570 0.4878 0.6722 +vn -0.7570 -0.4942 0.4275 +vn -0.7633 -0.5958 0.2498 +vn -0.7002 0.4744 0.5336 +vn -0.3783 -0.9169 0.1270 +vn -0.4504 -0.8895 0.0772 +vn -0.1930 -0.9811 -0.0141 +vn -0.5431 -0.7942 0.2727 +vn -0.4460 -0.8403 0.3082 +vn -0.5030 -0.8550 0.1262 +vn -0.5454 -0.6826 0.4864 +vn -0.7151 -0.5467 0.4356 +vn -0.1147 -0.9449 0.3066 +vn -0.3558 -0.5870 0.7272 +vn -0.9379 0.1125 0.3280 +vn -0.2894 -0.0764 0.9542 +vn -0.6835 0.1420 0.7160 +vn -0.1780 -0.5886 0.7886 +vn -0.8631 0.1077 0.4935 +vn -0.2218 0.4124 0.8836 +vn -0.3776 0.5628 0.7353 +vn -0.5897 0.3184 0.7422 +vn -0.9146 0.1908 0.3566 +vn -0.9674 0.0043 0.2533 +vn -0.9237 0.3418 0.1729 +vn -0.7057 0.7084 0.0060 +vn -0.8309 0.3665 0.4186 +vn -0.9551 0.1594 0.2499 +vn -0.8588 0.3670 0.3575 +vn -0.9694 -0.0113 0.2454 +vn -0.1146 0.9864 0.1176 +vn -0.9257 0.2842 0.2495 +vn -0.7723 0.5825 0.2535 +vn 0.4075 0.9032 0.1348 +vn -0.9401 0.2985 0.1648 +vn -0.4039 0.9084 0.1083 +vn -0.4734 0.8528 0.2205 +vn -0.3823 0.6258 0.6798 +vn -0.6516 0.7453 -0.1415 +vn -0.3101 0.7731 0.5533 +vn -0.9811 0.1865 0.0512 +vn -0.5491 0.4913 0.6761 +vn -0.5261 0.5867 0.6157 +vn -0.9636 0.2653 -0.0335 +vn 0.2952 0.4361 0.8501 +vn 0.1469 0.4531 0.8792 +vn -0.9773 0.1757 0.1186 +vn -0.6367 0.3004 0.7101 +vn -0.5828 0.0651 0.8100 +vn -0.9869 0.0365 0.1574 +vn -0.6324 -0.2736 0.7247 +vn 0.1957 -0.1176 0.9736 +vn 0.2282 0.2076 0.9512 +vn 0.3401 -0.4425 0.8298 +vn -0.5879 -0.4732 0.6560 +vn 0.3111 -0.5158 0.7982 +vn -0.9137 -0.0025 -0.4063 +vn -0.8972 0.2681 -0.3510 +vn 0.8041 -0.4758 0.3564 +vn 0.7965 -0.3993 0.4541 +vn 0.9646 -0.2597 -0.0453 +vn 0.8503 -0.5244 -0.0448 +vn 0.9704 0.0459 -0.2372 +vn 0.8182 -0.4638 -0.3398 +vn 0.6742 -0.7221 -0.1550 +vn 0.7673 -0.6407 0.0268 +vn 0.8608 -0.4670 0.2023 +vn 0.8116 -0.0380 0.5829 +vn 0.9623 -0.0766 -0.2610 +vn 0.8294 0.1718 -0.5315 +vn 0.9738 -0.0772 -0.2139 +vn 0.8929 -0.3157 0.3210 +vn 0.8841 -0.2221 0.4112 +vn 0.9718 0.0393 -0.2324 +vn 0.9885 -0.0336 -0.1475 +vn 0.9855 -0.1693 -0.0117 +vn 0.8958 -0.0089 0.4444 +vn 0.9520 0.0531 -0.3015 +vn 0.9890 -0.0586 -0.1354 +vn 0.9578 0.0123 -0.2872 +vn 0.8451 0.4999 0.1895 +vn 0.9474 0.1970 -0.2522 +vn 0.6808 0.1417 0.7186 +vn 0.3000 -0.0625 0.9519 +vn 0.1092 0.6193 0.7775 +vn 0.0735 0.7282 0.6814 +vn 0.7969 0.2546 0.5479 +vn -0.3014 0.4442 0.8437 +vn -0.8202 0.3752 0.4318 +vn 0.5788 0.2630 0.7719 +vn 0.7841 0.1454 0.6034 +vn -0.7461 0.1439 0.6501 +vn -0.5538 -0.4568 0.6962 +vn -0.9911 0.1331 0.0060 +vn -0.9595 -0.1609 0.2310 +vn -0.9126 0.1835 -0.3653 +vn -0.6944 0.2534 -0.6735 +vn -0.4091 -0.1881 -0.8929 +vn -0.2047 0.4236 -0.8824 +vn 0.0615 0.3020 -0.9513 +vn 0.3618 0.3998 -0.8422 +vn 0.1955 -0.4040 -0.8936 +vn 0.6093 0.3386 -0.7170 +vn -0.7679 0.0732 -0.6364 +vn -0.6185 0.3579 -0.6996 +vn 0.0187 0.4339 -0.9007 +vn -0.0185 -0.0233 -0.9996 +vn -0.8751 0.1005 -0.4734 +vn -0.7838 0.1008 -0.6127 +vn -0.8567 0.0746 -0.5103 +vn -0.9600 0.2793 -0.0185 +vn -0.9947 0.0726 0.0732 +vn -0.8533 0.4486 -0.2657 +vn -0.6911 -0.1119 -0.7140 +vn -0.7348 -0.1579 -0.6597 +vn -0.9189 0.3795 0.1080 +vn -0.5825 0.5978 0.5508 +vn -0.8592 -0.3187 -0.4002 +vn -0.9729 0.2173 -0.0793 +vn -0.7777 0.4694 0.4181 +vn 0.2175 -0.4501 -0.8661 +vn -0.2897 -0.3079 -0.9062 +vn -0.0408 -0.3321 -0.9424 +vn 0.1108 -0.6358 -0.7639 +vn -0.7091 -0.0130 -0.7050 +vn 0.1339 -0.5777 -0.8052 +vn 0.0344 -0.9966 -0.0751 +vn -0.7862 0.4974 -0.3668 +vn -0.9603 0.2584 -0.1055 +vn -0.9117 0.2923 0.2887 +vn -0.9465 0.2786 0.1626 +vn -0.9867 0.1258 -0.1034 +vn -0.9054 0.3089 0.2912 +vn -0.9300 0.3428 0.1324 +vn -0.4787 0.7081 -0.5191 +vn -0.5029 0.8452 -0.1809 +vn -0.5127 0.8171 -0.2635 +vn 0.1638 0.8259 -0.5395 +vn 0.2144 0.7047 -0.6764 +vn -0.9715 -0.0518 0.2312 +vn -0.9380 -0.0541 -0.3424 +vn -0.9311 -0.0185 -0.3644 +vn -0.9223 0.1110 -0.3701 +vn -0.9276 0.3530 -0.1219 +vn -0.6095 0.5182 0.6000 +vn -0.2299 0.3529 0.9070 +vn -0.9176 -0.1865 -0.3509 +vn -0.5181 -0.7346 -0.4382 +vn -0.5658 -0.5907 -0.5753 +vn -0.0674 -0.8367 -0.5436 +vn -0.0774 -0.8933 -0.4428 +vn -0.6520 -0.5552 -0.5164 +vn 0.2937 -0.4754 0.8293 +vn -0.9563 0.1341 -0.2598 +vn -0.8754 0.0962 -0.4738 +vn -0.5484 0.5885 -0.5940 +vn -0.8573 0.5028 0.1103 +vn -0.4928 0.6890 -0.5314 +vn -0.3530 0.5533 -0.7545 +vn -0.5110 0.5658 -0.6471 +vn -0.3487 0.5504 -0.7586 +vn -0.8600 -0.1343 -0.4923 +vn -0.6512 0.4774 -0.5899 +vn -0.4384 0.5853 -0.6821 +vn -0.4015 0.5510 -0.7316 +vn -0.6684 0.1700 -0.7241 +vn -0.8381 0.2757 -0.4708 +vn -0.8246 0.3798 -0.4194 +vn -0.8315 0.4508 -0.3245 +vn -0.6413 0.3992 -0.6553 +vn -0.6648 0.4512 -0.5954 +vn -0.9335 0.0600 -0.3534 +vn -0.9634 0.0089 -0.2680 +vn -0.8914 -0.4109 0.1911 +vn -0.8064 -0.5538 0.2076 +vn -0.9157 -0.3341 0.2231 +vn -0.5501 -0.6987 0.4574 +vn -0.6772 -0.6060 0.4173 +vn -0.6057 -0.7405 0.2912 +vn -0.6599 -0.6985 0.2769 +vn -0.9108 -0.3829 0.1542 +vn -0.9140 0.1038 -0.3922 +vn -0.9492 0.0241 -0.3136 +vn -0.9824 0.0969 -0.1596 +vn -0.6607 0.1846 -0.7276 +vn -0.9414 0.0416 -0.3346 +vn -0.6812 0.3659 -0.6341 +vn -0.6981 0.4033 -0.5916 +vn 0.1327 0.5418 -0.8300 +vn -0.7811 0.2514 -0.5716 +vn -0.8266 0.1545 -0.5412 +vn -0.7803 0.3782 -0.4982 +vn -0.6092 0.3894 -0.6908 +vn -0.9291 0.3693 -0.0183 +vn -0.8290 0.3177 -0.4602 +vn -0.8635 0.2755 -0.4225 +vn -0.5024 0.3554 -0.7882 +vn 0.1205 0.5732 -0.8105 +vn -0.4556 0.6300 -0.6289 +vn -0.4184 0.4141 -0.8084 +vn -0.7219 0.3458 -0.5995 +vn -0.9613 0.2168 -0.1700 +vn -0.7790 0.3619 -0.5121 +vn -0.7701 0.4879 -0.4110 +vn -0.9111 -0.0866 -0.4030 +vn -0.5069 0.7520 -0.4213 +vn -0.4558 0.8191 -0.3484 +vn -0.7882 0.5566 -0.2627 +vn -0.8966 0.4389 -0.0597 +vn -0.9730 0.1353 -0.1868 +vn -0.9652 0.2400 -0.1042 +vn -0.8771 0.4130 -0.2453 +vn -0.5637 0.7496 -0.3469 +vn -0.9847 0.0961 -0.1451 +vn -0.9128 -0.0663 -0.4031 +vn -0.7618 0.3015 -0.5733 +vn 0.8296 0.5493 -0.1005 +vn -0.7216 0.6361 -0.2733 +vn -0.5813 0.7506 -0.3141 +vn -0.3997 0.8637 -0.3071 +vn -0.8454 0.4056 -0.3475 +vn -0.7803 0.4954 -0.3816 +vn -0.8604 -0.4839 -0.1598 +vn -0.8945 -0.4356 -0.1006 +vn -0.7222 0.6820 0.1156 +vn -0.8057 0.5501 -0.2197 +vn -0.3414 -0.1660 -0.9251 +vn -0.9156 0.2574 -0.3088 +vn 0.6819 0.7258 -0.0903 +vn -0.6830 0.6945 -0.2263 +vn -0.6675 0.6330 -0.3920 +vn -0.7661 0.6363 -0.0904 +vn -0.9508 0.1173 -0.2868 +vn -0.8443 -0.1433 -0.5164 +vn -0.9248 -0.3539 0.1399 +vn -0.9273 0.0089 0.3742 +vn -0.9756 -0.0945 0.1983 +vn -0.9071 0.1006 0.4086 +vn 0.6587 0.5911 -0.4655 +vn 0.7438 0.4558 -0.4889 +vn 0.6923 0.6740 -0.2578 +vn -0.9385 -0.2865 0.1929 +vn 0.6919 0.6101 -0.3861 +vn 0.7331 0.6337 -0.2470 +vn -0.9385 -0.1773 0.2963 +vn -0.9884 -0.1514 0.0088 +vn -0.9456 -0.3249 0.0155 +vn -0.7946 -0.6035 -0.0661 +vn -0.5592 0.7449 -0.3639 +vn -0.7676 0.5428 -0.3408 +vn -0.9112 0.3045 -0.2774 +vn -0.5896 0.8069 -0.0365 +vn -0.7719 0.6277 -0.1011 +vn -0.7288 0.6847 -0.0042 +vn -0.4363 0.8740 -0.2139 +vn -0.6494 0.7224 -0.2375 +vn -0.7476 0.6275 -0.2174 +vn -0.9873 0.1560 0.0309 +vn -0.9945 0.0593 -0.0865 +vn -0.9595 0.0345 -0.2797 +vn -0.9631 -0.2556 0.0848 +vn -0.9286 -0.3707 0.0156 +vn -0.9912 -0.0678 0.1135 +vn -0.9975 0.0710 -0.0038 +vn -0.8505 0.0706 0.5211 +vn 0.6309 0.4283 -0.6470 +vn -0.9195 -0.2427 0.3092 +vn -0.9148 -0.3272 0.2371 +vn 0.2846 -0.8554 -0.4327 +vn -0.1754 -0.9545 -0.2413 +vn 0.8575 0.4619 -0.2266 +vn 0.5797 0.7878 0.2080 +vn 0.3849 -0.7129 0.5862 +vn 0.8139 0.0058 0.5810 +vn 0.3524 0.1325 0.9264 +vn -0.2700 -0.7789 0.5661 +vn 0.6459 -0.6477 0.4042 +vn -0.2426 -0.8965 0.3708 +vn -0.4401 -0.7319 0.5203 +vn -0.9691 0.0914 -0.2293 +vn -0.9078 -0.4121 0.0775 +vn -0.7947 0.5087 -0.3312 +vn -0.7872 0.2730 -0.5530 +vn -0.9643 0.2374 -0.1171 +vn -0.6760 -0.3815 -0.6305 +vn -0.9070 -0.4209 0.0140 +vn -0.9153 0.2323 -0.3289 +vn -0.9158 -0.4014 -0.0140 +vn -0.7557 0.6290 -0.1822 +vn -0.9438 0.3179 -0.0899 +vn -0.9495 0.2583 -0.1784 +vn -0.9143 0.4049 -0.0093 +vn -0.8462 0.4405 -0.2999 +vn -0.4957 -0.8671 0.0486 +vn -0.4954 -0.8512 0.1732 +vn -0.5058 -0.7891 0.3485 +vn -0.7923 0.6002 -0.1102 +vn -0.7459 0.6372 -0.1942 +vn -0.6871 0.7079 -0.1638 +vn -0.9764 0.2126 0.0384 +vn 0.7579 0.6051 -0.2440 +vn -0.7299 0.6723 -0.1238 +vn -0.9554 0.0982 0.2784 +vn -0.9708 0.1568 0.1816 +vn -0.9567 0.2911 -0.0065 +vn -0.9045 0.4166 -0.0914 +vn -0.9447 0.3042 -0.1222 +vn -0.7239 0.6858 -0.0755 +vn -0.8901 0.4554 0.0188 +vn -0.9460 -0.2751 0.1712 +vn -0.5852 0.8072 0.0777 +vn -0.9455 0.0530 0.3214 +vn -0.6903 -0.7041 0.1666 +vn -0.3744 0.9235 0.0839 +vn -0.7427 0.6087 0.2792 +vn -0.9135 -0.1281 0.3862 +vn -0.8916 0.0947 0.4429 +vn 0.8002 -0.3281 -0.5020 +vn -0.1082 0.9935 -0.0351 +vn -0.7101 0.6129 0.3464 +vn -0.6189 0.7460 0.2458 +vn -0.3552 0.9319 0.0738 +vn -0.7770 0.5431 0.3184 +vn 0.6716 -0.5641 -0.4803 +vn -0.5687 0.7956 0.2091 +vn -0.6802 0.6872 0.2551 +vn 0.0060 0.9994 -0.0333 +vn -0.2014 0.9788 -0.0366 +vn -0.3170 0.9479 0.0312 +vn -0.5772 0.8119 0.0874 +vn -0.5041 0.8614 0.0621 +vn -0.4868 0.8730 0.0290 +vn -0.4890 0.8561 0.1672 +vn -0.5267 0.8422 0.1149 +vn -0.9534 0.3004 -0.0277 +vn -0.8586 0.4614 0.2233 +vn -0.8838 0.4061 0.2324 +vn -0.4150 0.8937 0.1704 +vn -0.8644 0.4441 0.2356 +vn 0.8372 0.4668 -0.2848 +vn -0.4596 0.8881 0.0031 +vn -0.5950 0.8031 0.0309 +vn -0.7435 0.5523 0.3770 +vn -0.8645 0.4366 0.2492 +vn -0.6316 0.7719 0.0726 +vn -0.4900 0.8715 -0.0186 +vn -0.7207 0.6749 -0.1585 +vn -0.5387 0.8396 0.0697 +vn -0.5996 0.8003 0.0031 +vn -0.7933 0.6009 0.0983 +vn -0.7885 0.6150 0.0104 +vn -0.6508 0.7592 0.0041 +vn -0.7673 0.6362 0.0811 +vn -0.6159 0.7873 0.0276 +vn -0.7013 0.7128 0.0114 +vn -0.9118 0.2983 0.2823 +vn -0.9480 0.2464 0.2014 +vn -0.9733 0.2260 0.0412 +vn -0.6794 0.7317 0.0558 +vn -0.7610 0.6486 -0.0147 +vn -0.7762 0.6285 0.0500 +vn -0.9144 -0.4010 0.0560 +vn -0.8537 -0.3744 0.3620 +vn -0.8588 -0.2625 0.4400 +vn -0.8581 -0.2479 0.4498 +vn -0.0447 0.2781 -0.9595 +vn -0.6216 0.3760 -0.6872 +vn -0.5222 0.4280 -0.7376 +vn -0.9581 0.2314 -0.1686 +vn -0.9213 0.1922 -0.3379 +vn -0.6756 0.4413 -0.5906 +vn -0.9410 -0.1159 -0.3179 +vn -0.8694 0.4718 0.1469 +vn -0.9191 0.3839 -0.0891 +vn -0.9756 0.1401 -0.1689 +vn -0.8897 -0.2115 -0.4046 +vn -0.9249 -0.1839 0.3328 +vn -0.6470 0.0027 -0.7625 +vn -0.9833 -0.1131 0.1424 +vn -0.9758 -0.1748 0.1316 +vn 0.5801 0.6163 -0.5326 +vn 0.6425 0.4142 -0.6447 +vn -0.9760 -0.2126 0.0475 +vn 0.6206 0.5913 -0.5150 +vn -0.9504 -0.2116 -0.2281 +vn 0.6505 0.7521 -0.1059 +vn -0.8102 -0.3399 -0.4775 +vn -0.9641 -0.1906 -0.1851 +vn -0.9879 0.1040 -0.1151 +vn 0.3289 0.2867 0.8998 +vn -0.8799 0.4723 -0.0526 +vn -0.9799 0.1708 -0.1028 +vn -0.0148 0.6193 0.7850 +vn 0.3552 0.2829 0.8909 +vn 0.1018 0.3475 0.9321 +vn 0.9151 0.3102 -0.2576 +vn 0.0908 0.3794 0.9208 +vn 0.1259 0.3975 0.9089 +vn 0.1335 -0.9907 -0.0272 +vn 0.0196 -0.9826 0.1845 +vn 0.0295 -0.9985 0.0452 +vn 0.1293 -0.9908 -0.0401 +vn -0.0683 -0.9976 -0.0075 +vn -0.0880 -0.9916 -0.0948 +vn -0.0788 -0.9945 -0.0695 +vn -0.0732 -0.9882 -0.1343 +vn -0.0505 -0.9852 -0.1640 +vn 0.0422 -0.9966 -0.0712 +vn 0.0082 -0.9973 -0.0724 +vn -0.1105 -0.9911 -0.0745 +vn 0.9017 0.3365 -0.2715 +vn 0.0451 -0.9973 -0.0573 +vn -0.4273 0.8180 -0.3850 +vn -0.4643 0.8482 -0.2548 +vn -0.7576 0.6441 0.1055 +vn -0.6451 0.7257 -0.2392 +vn -0.9730 0.2262 -0.0461 +vn -0.9793 0.1986 0.0389 +vn -0.8883 0.4352 0.1465 +vn 0.1208 0.9927 -0.0053 +vn -0.4450 0.8933 0.0627 +vn -0.0408 0.9688 0.2443 +vn 0.1916 0.9814 0.0102 +vn 0.2858 0.9421 0.1757 +vn 0.5896 0.8077 0.0054 +vn 0.8985 0.3430 0.2740 +vn -0.4820 0.0055 0.8761 +vn 0.8228 0.4349 -0.3658 +vn 0.5307 0.7933 -0.2983 +vn 0.2882 0.9573 -0.0202 +vn 0.6498 0.4889 -0.5821 +vn 0.0999 0.9767 0.1900 +vn -0.0676 0.9941 -0.0848 +vn 0.0930 0.8955 0.4352 +vn -0.3103 0.7730 0.5533 +vn 0.1555 0.9809 0.1169 +vn 0.1571 -0.9874 -0.0195 +vn 0.1162 -0.9676 -0.2243 +vn 0.1964 -0.9711 -0.1358 +vn 0.5203 -0.8446 -0.1261 +vn 0.4973 -0.8404 -0.2155 +vn 0.6032 -0.7707 -0.2053 +vn 0.9564 -0.2920 0.0103 +vn 0.9266 -0.2340 -0.2945 +vn 0.1548 -0.9351 -0.3188 +vn 0.4835 -0.6035 -0.6340 +vn 0.4081 -0.6047 -0.6840 +vn 0.5891 -0.7148 -0.3768 +vn 0.7461 0.1791 -0.6413 +vn -0.9652 0.0731 -0.2512 +vn -0.4450 0.8934 0.0626 +vn -0.9240 0.2753 -0.2655 +vn -0.9652 0.0732 -0.2512 +vn 0.3010 0.7220 0.6229 +vn 0.6261 0.7370 0.2546 +vn 0.7418 0.5770 -0.3417 +vn 0.3990 0.7624 -0.5094 +vn 0.7483 0.5715 -0.3367 +vn 0.5477 0.5989 -0.5842 +vn 0.8390 0.5216 -0.1548 +vn 0.9023 0.4200 0.0972 +vn -0.2269 0.2711 0.9354 +vn -0.2173 0.2824 0.9344 +vn -0.3029 0.2257 0.9259 +vn 0.7456 0.5741 -0.3383 +vn 0.2902 0.3029 0.9078 +vn -0.7202 0.0995 0.6866 +vn -0.5142 0.8489 0.1224 +vn -0.8589 0.4077 0.3100 +vn -0.5774 0.7861 0.2207 +vn -0.5141 0.8490 0.1224 +vn -0.9428 -0.1861 0.2766 +vn -0.9956 0.0931 0.0144 +vn -0.9837 0.0207 -0.1788 +vn -0.2909 -0.7498 -0.5943 +vn -0.2252 -0.6595 -0.7172 +vn -0.0976 -0.8355 -0.5408 +vn -0.3052 -0.9112 -0.2766 +vn -0.4148 -0.8273 -0.3789 +vn -0.4127 -0.8973 -0.1565 +vn -0.0362 -0.9379 -0.3450 +vn -0.2160 -0.8211 -0.5284 +vn -0.0183 -0.9985 -0.0521 +vn -0.3598 -0.9326 -0.0283 +vn -0.2827 -0.8488 0.4468 +vn 0.3011 -0.8655 -0.4004 +vn 0.5954 -0.7933 -0.1275 +vn 0.8574 -0.4967 -0.1349 +vn 0.5949 -0.6637 -0.4535 +vn -0.6603 -0.7510 -0.0090 +vn -0.5000 -0.8658 -0.0168 +vn -0.4782 -0.5989 -0.6423 +vn -0.9730 0.2305 0.0087 +vn -0.7107 -0.6809 0.1771 +vn 0.0096 -0.9315 -0.3635 +vn 0.0016 -0.5674 -0.8234 +vn -0.9128 0.0965 -0.3968 +vn -0.2087 -0.4461 -0.8703 +vn 0.3257 -0.6213 -0.7127 +vn -0.7647 -0.2078 -0.6100 +vn -0.9770 -0.0307 -0.2112 +vn -0.9424 0.1888 0.2760 +vn 0.3317 -0.9301 0.1579 +vn 0.6848 -0.7192 -0.1175 +vn -0.5901 -0.3873 -0.7084 +vn 0.1835 -0.8823 -0.4334 +vn -0.3713 -0.7203 -0.5859 +vn 0.6133 -0.3600 0.7031 +vn -0.7496 -0.1609 -0.6420 +vn -0.7966 0.1605 -0.5829 +vn -0.9579 -0.0563 -0.2814 +vn -0.7847 -0.0640 -0.6165 +vn -0.8387 -0.3352 -0.4292 +vn -0.9705 -0.2143 -0.1109 +vn -0.8539 -0.0671 0.5160 +vn -0.6680 0.1804 0.7219 +vn -0.9765 -0.2099 -0.0497 +vn -0.9160 -0.1122 -0.3852 +vn -0.7175 -0.0783 -0.6922 +vn -0.6934 0.0079 -0.7205 +vn -0.8705 -0.0129 0.4920 +vn 0.7045 -0.4634 0.5375 +vn -0.2659 -0.8819 0.3893 +vn -0.8667 -0.4585 -0.1966 +vn -0.7007 0.0616 -0.7108 +vn 0.4565 -0.3008 0.8374 +vn 0.8809 -0.2549 0.3989 +vn 0.5455 -0.1737 0.8199 +vn 0.3929 -0.7102 0.5842 +vn 0.3479 -0.2962 0.8895 +vn 0.2846 -0.3178 0.9044 +vn 0.4300 -0.4915 0.7573 +vn -0.2221 0.4092 0.8850 +vn 0.6033 0.3629 0.7101 +vn 0.9851 0.0485 0.1648 +vn 0.9922 0.1067 0.0646 +vn 0.3391 0.2428 0.9089 +vn -0.9412 0.0251 -0.3370 +vn -0.6833 0.2859 0.6718 +vn 0.4048 0.3646 0.8386 +vn -0.7398 0.5800 0.3410 +vn -0.9408 0.1128 -0.3196 +vn 0.1966 0.7466 0.6356 +vn -0.9846 0.0496 -0.1679 +vn -0.5360 0.6205 0.5724 +vn -0.7661 -0.2985 -0.5692 +vn -0.7934 -0.0810 -0.6033 +vn 0.9667 0.2013 0.1580 +vn 0.9759 -0.0764 0.2044 +vn 0.9566 -0.0199 0.2906 +vn 0.6337 -0.3532 -0.6882 +vn 0.9979 0.0389 -0.0519 +vn 0.7865 -0.2927 -0.5438 +vn 0.8421 0.3464 0.4133 +vn 0.2730 0.6842 0.6762 +vn 0.6483 0.2350 0.7242 +vn -0.4791 0.6129 0.6283 +vn 0.9209 -0.1294 0.3676 +vn 0.9981 -0.0500 0.0363 +vn 0.8257 0.3123 0.4698 +vn 0.3847 0.6512 0.6542 +vn 0.1040 -0.5616 -0.8209 +vn 0.6473 -0.4618 -0.6063 +vn 0.0687 -0.2910 -0.9542 +vn 0.9570 -0.1131 0.2672 +vn 0.8893 -0.4056 -0.2113 +vn -0.2703 0.5130 0.8147 +vn -0.7144 0.5030 0.4865 +vn 0.6252 -0.1369 0.7683 +vn 0.7271 0.1222 0.6755 +vn 0.9135 -0.2472 0.3230 +vn -0.7793 0.2853 0.5579 +vn -0.7987 0.3950 0.4539 +vn -0.8061 0.4043 0.4322 +vn -0.3684 0.1961 0.9087 +vn 0.5759 -0.3738 0.7270 +vn -0.5807 0.0835 0.8098 +vn 0.2763 -0.6260 0.7292 +vn 0.7802 -0.5668 0.2647 +vn 0.8465 0.1490 0.5111 +vn 0.8813 -0.2842 0.3775 +vn 0.6118 -0.3607 0.7040 +vn 0.6739 -0.7039 0.2245 +vn 0.8000 -0.3494 0.4878 +vn 0.4923 0.1214 0.8619 +vn 0.8055 0.2423 0.5408 +vn 0.2219 0.3258 0.9190 +vn 0.8205 0.0261 0.5711 +vn 0.9953 -0.0655 -0.0709 +vn 0.7164 -0.1737 0.6758 +vn 0.1737 0.0334 0.9842 +vn 0.9005 -0.4340 0.0287 +vn -0.7068 -0.6918 0.1478 +vn -0.3139 -0.5209 -0.7938 +vn -0.2057 -0.2252 -0.9524 +vn 0.1501 -0.9641 -0.2189 +vn 0.0174 -0.4330 -0.9012 +vn -0.3029 -0.2142 -0.9286 +vn -0.0243 -0.9657 -0.2585 +vn -0.9894 -0.1145 -0.0897 +vn -0.6995 -0.4829 -0.5268 +vn -0.9797 -0.1803 -0.0876 +vn -0.5840 0.4382 0.6833 +vn -0.6304 -0.6682 -0.3950 +vn -0.9671 -0.2537 -0.0161 +vn -0.8362 -0.4325 0.3372 +vn -0.3720 0.3880 0.8432 +vn -0.4908 0.3379 0.8031 +vn -0.0353 0.2424 0.9695 +vn 0.3784 -0.3037 0.8744 +vn 0.7258 -0.1181 0.6777 +vn 0.6523 -0.2895 0.7005 +vn 0.5220 -0.4527 0.7229 +vn 0.8269 0.0091 0.5623 +vn 0.8640 0.3944 0.3129 +vn 0.9587 0.1743 0.2249 +vn 0.9322 -0.2306 0.2791 +vn 0.9909 0.1154 0.0694 +vn 0.6910 0.7223 -0.0280 +vn 0.7097 0.6906 -0.1393 +vn 0.5417 -0.7349 0.4080 +vn 0.9374 -0.2184 0.2712 +vn 0.9993 -0.0148 -0.0353 +vn 0.6349 -0.7703 0.0602 +vn 0.9091 -0.0062 0.4164 +vn 0.6321 0.7267 -0.2689 +vn 0.9765 0.2155 -0.0073 +vn 0.7287 0.6811 -0.0720 +vn 0.9991 -0.0053 -0.0427 +vn 0.5403 0.2828 -0.7925 +vn 0.7384 0.6452 -0.1960 +vn 0.4908 0.8139 -0.3109 +vn 0.8679 0.4876 -0.0947 +vn 0.7702 0.5526 -0.3184 +vn 0.8528 0.4801 -0.2058 +vn 0.6875 0.6517 -0.3203 +vn 0.8039 0.5672 -0.1787 +vn 0.4947 0.8190 -0.2906 +vn 0.9843 0.1682 0.0539 +vn 0.9715 0.1420 -0.1900 +vn 0.9831 0.1778 0.0428 +vn 0.8079 0.5518 -0.2067 +vn 0.9542 0.2964 -0.0414 +vn 0.9871 0.0907 -0.1316 +vn 0.9715 0.2255 0.0728 +vn 0.9356 -0.0474 -0.3498 +vn 0.9626 0.2012 -0.1816 +vn 0.9927 0.0756 -0.0935 +vn 0.9575 0.0671 -0.2804 +vn 0.9806 0.0688 -0.1837 +vn 0.3773 0.9112 -0.1655 +vn 0.3960 0.8715 -0.2895 +vn 0.4123 0.8530 -0.3200 +vn 0.6286 0.7241 -0.2838 +vn 0.9126 0.3317 -0.2389 +vn 0.9842 0.0973 -0.1480 +vn 0.7921 0.4236 -0.4394 +vn 0.9875 0.1541 0.0322 +vn 0.9834 0.0560 -0.1729 +vn 0.5952 0.7019 -0.3913 +vn 0.9227 0.2551 -0.2889 +vn 0.9553 0.2211 -0.1963 +vn 0.9831 0.0617 -0.1722 +vn 0.9842 0.1616 -0.0726 +vn 0.7058 0.3859 -0.5941 +vn 0.9644 0.2621 -0.0346 +vn 0.9712 0.0499 -0.2331 +vn 0.9643 0.2408 -0.1106 +vn 0.9632 0.0139 -0.2685 +vn 0.9513 0.1260 -0.2814 +vn 0.5805 0.3669 -0.7269 +vn 0.3124 0.6183 -0.7212 +vn 0.0765 0.6115 -0.7875 +vn 0.9369 -0.0216 -0.3489 +vn 0.9113 -0.0698 -0.4058 +vn 0.8424 -0.0502 -0.5366 +vn 0.5996 0.5353 -0.5950 +vn 0.7966 0.6045 -0.0003 +vn 0.9809 0.0835 -0.1757 +vn 0.9671 0.0858 -0.2393 +vn 0.9724 0.0928 -0.2140 +vn 0.9593 0.0570 -0.2767 +vn -0.6344 0.6264 -0.4530 +vn 0.1905 0.5559 -0.8091 +vn 0.6771 0.3713 -0.6354 +vn -0.8026 -0.0425 -0.5951 +vn 0.2835 0.5696 -0.7715 +vn 0.7839 0.4926 -0.3779 +vn 0.8025 0.3317 -0.4959 +vn 0.7906 0.2649 -0.5521 +vn 0.8165 0.1740 -0.5505 +vn 0.6680 0.6730 -0.3175 +vn 0.7547 0.4331 -0.4928 +vn 0.8716 0.2968 -0.3901 +vn 0.8736 0.1216 -0.4712 +vn 0.8982 0.3064 -0.3152 +vn 0.7340 0.4627 -0.4972 +vn 0.8019 0.5406 -0.2542 +vn 0.5891 0.5987 -0.5427 +vn 0.7836 0.5891 -0.1973 +vn 0.7793 0.6007 -0.1787 +vn 0.8433 0.3969 -0.3624 +vn 0.7613 0.6209 -0.1867 +vn 0.8772 0.4019 0.2628 +vn 0.7151 0.6767 0.1752 +vn 0.9945 -0.1021 -0.0231 +vn 0.9584 -0.0949 0.2693 +vn 0.8974 -0.1463 0.4163 +vn 0.8186 -0.3131 0.4815 +vn 0.8914 -0.2751 0.3602 +vn 0.8439 -0.2955 0.4477 +vn 0.6226 -0.3427 0.7035 +vn 0.5128 -0.4661 0.7210 +vn 0.6392 -0.5454 0.5421 +vn 0.7352 -0.2219 0.6405 +vn 0.3223 -0.7103 0.6257 +vn 0.3186 -0.6599 0.6805 +vn 0.2782 -0.5250 0.8044 +vn 0.2752 -0.3859 0.8805 +vn 0.6343 -0.3404 0.6941 +vn 0.4206 -0.3685 0.8290 +vn 0.6332 -0.3652 0.6824 +vn 0.7104 -0.1904 0.6775 +vn 0.5765 -0.0675 0.8143 +vn 0.9645 -0.0061 0.2641 +vn 0.8895 -0.3478 0.2963 +vn -0.2609 0.1548 0.9529 +vn 0.4068 -0.1063 0.9073 +vn 0.7096 -0.3702 0.5995 +vn 0.8423 -0.4903 0.2240 +vn 0.8190 -0.5064 0.2700 +vn 0.9997 -0.0116 0.0232 +vn 0.8644 0.5028 -0.0006 +vn 0.9230 0.3452 -0.1698 +vn 0.7564 0.4195 -0.5018 +vn 0.5111 0.4861 -0.7089 +vn 0.7498 0.3692 -0.5490 +vn 0.2830 0.4354 -0.8546 +vn 0.7007 0.2448 -0.6701 +vn 0.9594 -0.0445 -0.2784 +vn 0.9636 0.1643 -0.2111 +vn 0.9494 0.3128 -0.0289 +vn 0.9714 0.1295 -0.1990 +vn 0.9149 0.3954 0.0817 +vn 0.8893 -0.1620 -0.4276 +vn 0.9728 0.0309 -0.2297 +vn 0.9640 0.0868 -0.2514 +vn 0.9340 0.0237 -0.3565 +vn 0.8307 0.2173 -0.5125 +vn 0.2427 0.4504 -0.8592 +vn 0.3980 0.5460 -0.7372 +vn 0.7498 0.4057 -0.5227 +vn 0.9251 0.3782 -0.0350 +vn 0.2766 0.6290 -0.7266 +vn 0.9643 -0.0835 -0.2512 +vn 0.9825 0.0926 -0.1614 +vn 0.7974 0.4288 -0.4246 +vn 0.8929 0.2190 -0.3933 +vn 0.7183 0.4498 -0.5308 +vn 0.2941 0.7465 -0.5969 +vn 0.3487 0.6013 -0.7189 +vn 0.3871 0.6630 -0.6407 +vn 0.7621 0.4588 -0.4568 +vn 0.1157 0.6863 -0.7181 +vn 0.4422 0.3952 -0.8052 +vn 0.8459 0.2010 -0.4941 +vn 0.4973 0.6204 -0.6065 +vn 0.5929 0.5265 -0.6093 +vn 0.4799 0.5816 -0.6568 +vn 0.2962 0.2888 -0.9104 +vn 0.4674 0.5588 -0.6851 +vn 0.7153 0.3741 -0.5902 +vn 0.5447 0.4760 -0.6904 +vn 0.8995 0.1036 -0.4245 +vn 0.8350 0.3889 -0.3894 +vn 0.9546 0.2936 -0.0506 +vn 0.9320 0.2817 0.2282 +vn 0.7257 0.5291 0.4398 +vn 0.9291 0.1786 0.3240 +vn 0.5735 0.2566 0.7780 +vn 0.4666 0.5037 0.7270 +vn 0.3071 0.1175 0.9444 +vn 0.7431 -0.2183 0.6325 +vn 0.5987 0.2451 -0.7625 +vn 0.7182 -0.1931 -0.6685 +vn 0.5117 -0.3630 -0.7787 +vn 0.7275 -0.3063 -0.6140 +vn 0.0700 -0.2913 -0.9541 +vn -0.0222 -0.2488 -0.9683 +vn 0.0144 -0.5090 -0.8607 +vn 0.9023 -0.3185 0.2904 +vn 0.7390 -0.5457 0.3950 +vn 0.7062 -0.7065 0.0460 +vn 0.9718 -0.2343 -0.0256 +vn 0.8688 -0.4707 0.1538 +vn 0.8732 -0.4713 0.1240 +vn 0.9756 0.1130 0.1880 +vn 0.9610 0.2675 0.0701 +vn 0.8931 0.4048 0.1961 +vn 0.7427 0.5205 -0.4212 +vn 0.7569 0.6078 -0.2400 +vn 0.9517 0.3003 0.0642 +vn 0.7617 0.6414 0.0916 +vn 0.3931 0.8606 0.3238 +vn 0.7982 0.5373 0.2724 +vn 0.9357 0.2451 0.2537 +vn 0.9481 -0.0566 0.3129 +vn 0.3080 0.8531 0.4212 +vn 0.8643 0.2507 0.4361 +vn 0.6575 0.5106 0.5540 +vn 0.8351 0.4437 0.3250 +vn 0.5091 0.6372 0.5786 +vn 0.4882 0.6595 0.5716 +vn 0.7913 -0.3570 0.4963 +vn 0.8438 -0.4894 0.2201 +vn 0.1270 0.7861 0.6049 +vn 0.1664 0.8877 0.4294 +vn 0.7204 0.0639 0.6906 +vn 0.6997 -0.6486 0.2995 +vn 0.4381 0.0411 0.8980 +vn 0.6395 -0.6770 0.3643 +vn 0.0511 0.7367 0.6743 +vn 0.2749 0.6923 0.6672 +vn 0.0705 0.8166 0.5728 +vn 0.1630 0.1896 0.9682 +vn 0.3331 0.1910 0.9233 +vn 0.4957 0.6439 0.5828 +vn 0.8569 0.1159 0.5023 +vn 0.3942 0.8481 0.3541 +vn 0.9433 -0.2002 0.2648 +vn 0.8651 0.4198 0.2747 +vn 0.4771 -0.3955 0.7848 +vn 0.7893 -0.3518 0.5032 +vn 0.9696 -0.1533 0.1905 +vn 0.9774 -0.0483 0.2058 +vn 0.8576 -0.4030 0.3196 +vn 0.9699 -0.1987 0.1409 +vn 0.1548 -0.9869 -0.0445 +vn 0.7702 0.2412 0.5904 +vn 0.9051 -0.4113 0.1077 +vn 0.7556 -0.6482 -0.0941 +vn 0.8576 -0.4219 0.2942 +vn 0.6375 -0.5668 0.5218 +vn 0.4793 -0.4111 0.7754 +vn 0.7398 -0.4691 0.4824 +vn 0.0671 -0.7805 0.6215 +vn 0.9140 -0.3969 0.0842 +vn 0.7562 -0.2589 0.6010 +vn 0.8825 -0.4111 0.2284 +vn -0.9894 -0.1243 -0.0749 +vn 0.9920 -0.1047 0.0709 +vn -0.2833 -0.9452 -0.1621 +vn 0.7439 -0.4810 0.4640 +vn 0.6631 -0.7405 0.1095 +vn 0.7479 -0.1437 0.6480 +vn 0.9284 -0.0357 0.3699 +vn 0.7308 0.2704 0.6267 +vn 0.9562 -0.0535 0.2878 +vn 0.9951 -0.0053 0.0987 +vn 0.9316 -0.1296 0.3396 +vn 0.8844 -0.2024 0.4205 +vn 0.6621 0.3868 0.6419 +vn 0.6732 -0.6444 0.3628 +vn 0.7203 0.4789 0.5019 +vn 0.7958 -0.5395 0.2751 +vn 0.3552 -0.9091 0.2176 +vn 0.4584 -0.8862 0.0672 +vn 0.1804 -0.9792 0.0927 +vn -0.4277 -0.8629 0.2691 +vn 0.4222 -0.8514 0.3114 +vn 0.4828 -0.8570 0.1801 +vn 0.5758 -0.6780 0.4568 +vn 0.4889 -0.8708 0.0527 +vn 0.1404 -0.9431 0.3013 +vn 0.0136 -0.9547 0.2974 +vn 0.3934 -0.5817 0.7119 +vn 0.2182 -0.5858 0.7805 +vn 0.7394 -0.5362 0.4072 +vn 0.9489 0.1257 0.2893 +vn 0.2613 0.2745 0.9254 +vn -0.0488 -0.0640 0.9968 +vn 0.3292 -0.0720 0.9415 +vn 0.7101 0.1518 0.6875 +vn 0.8809 0.1199 0.4578 +vn 0.3994 0.5682 0.7195 +vn 0.2519 0.4157 0.8739 +vn 0.6151 0.3268 0.7176 +vn 0.9257 0.2036 0.3189 +vn 0.9768 0.0179 0.2135 +vn 0.9252 0.3547 0.1351 +vn 0.9623 0.1723 0.2103 +vn 0.8675 0.3791 0.3222 +vn 0.9314 0.2965 0.2112 +vn 0.7737 0.5934 0.2218 +vn 0.3953 0.9139 0.0919 +vn 0.6347 0.7543 -0.1678 +vn -0.0913 0.7911 0.6048 +vn 0.4416 0.8720 0.2110 +vn 0.9924 0.1178 -0.0352 +vn 0.9707 0.1966 -0.1385 +vn 0.6005 0.6239 0.5002 +vn 0.6167 0.5420 0.5709 +vn -0.2254 0.5724 0.7884 +vn -0.0447 0.5224 0.8516 +vn 0.9926 0.1179 0.0303 +vn 0.7018 0.3502 0.6204 +vn 0.6403 0.1456 0.7542 +vn 0.6651 -0.1879 0.7227 +vn 0.9960 -0.0021 0.0895 +vn -0.1614 0.3605 0.9187 +vn -0.1363 0.0539 0.9892 +vn -0.3121 -0.2611 0.9135 +vn -0.2852 -0.3296 0.9000 +vn 0.6056 -0.3751 0.7018 +vn 0.8834 0.1358 -0.4485 +vn 0.8848 -0.1257 -0.4487 +vn -0.8018 -0.3517 0.4831 +vn -0.7851 -0.2574 0.5634 +vn -0.9781 -0.2000 0.0571 +vn -0.8828 -0.4596 0.0972 +vn -0.7772 -0.6281 -0.0387 +vn -0.8711 -0.3670 0.3263 +vn -0.8364 -0.5288 0.1445 +vn -0.7707 0.1208 0.6256 +vn -0.9161 -0.1474 0.3728 +vn -0.8648 -0.0759 0.4963 +vn -0.9802 0.0574 -0.1897 +vn -0.9868 -0.0560 -0.1517 +vn -0.9746 0.0467 -0.2190 +vn -0.9487 0.0483 -0.3126 +vn -0.9901 -0.0063 -0.1404 +vn -0.9112 0.0911 0.4018 +vn -0.9797 -0.1914 0.0597 +vn -0.9892 -0.0973 -0.1098 +vn -0.9281 0.2024 -0.3125 +vn -0.8546 0.4937 0.1607 +vn -0.9493 -0.0110 -0.3142 +vn -0.7198 0.1403 0.6799 +vn -0.3464 -0.1869 0.9193 +vn -0.1769 0.6072 0.7746 +vn -0.3684 0.0808 0.9261 +vn -0.2569 -0.2721 0.9273 +vn -0.8814 0.3309 -0.3372 +vn -0.9411 0.2620 -0.2139 +vn -0.8367 0.0830 -0.5414 +vn -0.1289 0.7391 0.6611 +vn -0.8464 0.2636 0.4627 +vn 0.2177 0.4447 0.8688 +vn 0.7840 0.3774 0.4928 +vn -0.6490 0.2802 0.7073 +vn -0.8385 0.1585 0.5213 +vn -0.4403 0.2913 0.8493 +vn -0.2884 0.1015 0.9521 +vn -0.5302 0.2605 0.8069 +vn -0.1156 -0.9931 -0.0199 +vn -0.3087 0.3718 0.8755 +vn -0.3247 0.3107 0.8933 +vn 0.9838 0.1767 -0.0312 +vn -0.1804 0.4961 0.8493 +vn 0.5259 0.5939 0.6088 +vn 0.6552 0.1564 0.7391 +vn 0.5717 -0.3422 0.7457 +vn 0.9983 0.0433 0.0387 +vn 0.9644 -0.1729 0.2002 +vn 0.8962 0.0375 -0.4421 +vn 0.6894 0.0786 -0.7201 +vn 0.3311 -0.3514 -0.8757 +vn 0.1670 0.2079 -0.9638 +vn -0.1065 0.1162 -0.9875 +vn 0.7685 0.0637 -0.6366 +vn 0.0026 -0.1410 -0.9900 +vn -0.0617 0.2877 -0.9557 +vn 0.6038 0.2705 -0.7498 +vn 0.8923 0.0403 -0.4497 +vn 0.8921 0.0447 -0.4496 +vn 0.8170 0.1162 -0.5647 +vn 0.9878 0.0457 0.1486 +vn 0.9632 0.2610 0.0649 +vn 0.8780 0.4339 -0.2023 +vn 0.7883 -0.1782 -0.5890 +vn 0.7485 -0.1245 -0.6514 +vn 0.9081 0.3749 0.1865 +vn 0.8837 0.4290 0.1870 +vn 0.9867 0.1615 -0.0174 +vn 0.9779 0.2091 -0.0003 +vn 0.8905 -0.3360 -0.3067 +vn 0.7411 0.4764 0.4730 +vn 0.3397 -0.3399 -0.8769 +vn -0.1923 -0.5180 -0.8335 +vn 0.1130 -0.3515 -0.9293 +vn 0.9462 -0.0918 0.3104 +vn -0.1016 -0.5795 -0.8086 +vn -0.0149 -0.4112 -0.9114 +vn -0.2606 -0.3753 -0.8895 +vn -0.1703 -0.9853 0.0164 +vn 0.9485 0.2805 0.1471 +vn 0.9130 0.1564 -0.3769 +vn 0.9725 0.1699 -0.1591 +vn 0.9379 0.2917 0.1880 +vn 0.9492 0.1170 -0.2922 +vn 0.9538 0.1668 -0.2500 +vn 0.9397 0.2881 0.1843 +vn 0.7621 0.5041 -0.4063 +vn 0.4469 0.7147 -0.5381 +vn 0.4832 0.8521 -0.2011 +vn 0.2491 0.9611 0.1192 +vn 0.8645 -0.0606 -0.4990 +vn -0.9457 0.2386 -0.2207 +vn 0.9316 0.2139 -0.2938 +vn 0.9323 0.2518 -0.2598 +vn 0.8707 -0.1416 -0.4710 +vn 0.8673 0.0692 -0.4930 +vn 0.8984 -0.1284 -0.4200 +vn 0.8947 0.3574 -0.2679 +vn 0.6703 0.5537 0.4941 +vn 0.3481 0.3718 0.8606 +vn 0.9128 -0.0425 -0.4063 +vn 0.5864 -0.4912 -0.6441 +vn 0.8813 -0.4562 0.1232 +vn 0.8828 -0.2837 -0.3745 +vn 0.4624 -0.8022 -0.3776 +vn 0.4840 -0.6752 -0.5566 +vn -0.0131 -0.8845 -0.4663 +vn 0.0067 -0.9506 -0.3102 +vn -0.2771 -0.5374 -0.7965 +vn 0.3139 0.6061 -0.7308 +vn 0.5602 0.5079 -0.6544 +vn 0.2588 0.5636 -0.7844 +vn 0.2532 0.5680 -0.7831 +vn 0.2694 0.5656 -0.7795 +vn 0.3265 0.4822 -0.8129 +vn 0.2896 0.4345 -0.8528 +vn 0.4999 0.3997 -0.7683 +vn 0.3904 0.4473 -0.8047 +vn 0.7554 0.4843 -0.4415 +vn 0.6298 0.5813 -0.5151 +vn 0.7376 0.4126 -0.5346 +vn 0.5474 0.1955 -0.8137 +vn 0.2421 0.6654 -0.7061 +vn 0.4160 0.3469 -0.8406 +vn 0.4041 0.2512 -0.8795 +vn 0.5503 0.4771 -0.6852 +vn 0.5211 0.4236 -0.7410 +vn 0.9110 0.0211 -0.4118 +vn 0.8684 0.0987 -0.4859 +vn 0.9000 -0.4355 0.0194 +vn 0.9519 -0.2964 0.0778 +vn 0.7295 -0.5982 0.3316 +vn 0.6301 -0.6689 0.3943 +vn 0.6475 -0.7394 0.1847 +vn 0.6906 -0.6732 0.2642 +vn 0.8764 0.1589 -0.4545 +vn 0.9489 0.0477 -0.3118 +vn 0.9358 -0.3461 0.0668 +vn 0.5386 0.2093 -0.8162 +vn 0.5653 0.3916 -0.7260 +vn 0.5018 0.4084 -0.7625 +vn 0.7247 0.4265 -0.5412 +vn 0.3874 0.6563 -0.6474 +vn -0.1876 0.5458 -0.8166 +vn 0.4205 0.7297 -0.5393 +vn 0.8288 0.3446 -0.4408 +vn 0.4842 0.6279 -0.6093 +vn 0.4336 0.6112 -0.6621 +vn 0.3761 0.4303 -0.8206 +vn 0.6862 0.3889 -0.6147 +vn 0.8536 0.1650 -0.4941 +vn 0.7533 0.5543 -0.3541 +vn 0.9245 0.3165 -0.2123 +vn 0.7556 0.4067 -0.5134 +vn 0.4818 0.6766 -0.5569 +vn 0.9323 0.1839 -0.3114 +vn -0.7499 0.5000 -0.4332 +vn 0.9631 0.0455 -0.2653 +vn 0.7938 0.5934 0.1334 +vn -0.6096 0.7242 -0.3223 +vn 0.5114 0.8578 -0.0515 +vn 0.5811 0.7668 -0.2727 +vn 0.7648 0.6205 -0.1735 +vn 0.8658 0.4999 0.0219 +vn 0.6833 0.7242 -0.0934 +vn 0.3433 0.9166 -0.2050 +vn 0.5307 0.8357 -0.1413 +vn 0.8362 0.5427 -0.0790 +vn 0.7952 0.5912 -0.1347 +vn 0.6568 0.7382 -0.1537 +vn 0.6147 0.7888 0.0023 +vn 0.4019 0.8985 -0.1767 +vn 0.6543 0.7400 0.1560 +vn 0.9758 0.2187 0.0083 +vn 0.9730 -0.0867 -0.2138 +vn 0.9931 -0.0314 0.1132 +vn 0.9457 0.0493 0.3212 +vn 0.9540 -0.2908 -0.0733 +vn 0.9833 0.0494 0.1753 +vn 0.5850 0.8058 -0.0914 +vn 0.8797 0.3068 -0.3633 +vn 0.5248 0.7420 -0.4171 +vn 0.7358 0.5373 -0.4121 +vn 0.6284 0.7133 -0.3102 +vn 0.7140 0.6210 -0.3233 +vn 0.9647 -0.1991 0.1726 +vn -0.7362 0.4004 -0.5456 +vn 0.9145 0.1050 0.3908 +vn 0.9523 -0.2889 0.0985 +vn 0.8034 -0.5690 -0.1754 +vn -0.8383 0.4217 -0.3455 +vn -0.6469 0.7603 -0.0586 +vn -0.8375 0.4248 -0.3436 +vn -0.8322 -0.0225 -0.5541 +vn -0.8440 0.5275 0.0969 +vn -0.0668 -0.9501 -0.3047 +vn -0.7372 -0.5145 -0.4379 +vn -0.4214 0.8330 0.3584 +vn -0.3976 -0.6256 0.6712 +vn 0.1659 -0.6102 0.7747 +vn -0.7678 0.1689 0.6180 +vn -0.3047 0.2480 0.9196 +vn 0.5308 -0.7158 0.4536 +vn 0.5645 -0.7549 0.3339 +vn 0.9264 -0.3677 0.0813 +vn 0.9696 0.1442 -0.1977 +vn 0.9594 0.1326 -0.2488 +vn 0.9669 0.1234 -0.2232 +vn 0.9312 0.0991 -0.3508 +vn 0.7821 0.3175 -0.5362 +vn 0.7688 0.5684 -0.2931 +vn 0.9469 0.3188 -0.0416 +vn 0.9256 -0.3699 0.0806 +vn 0.7528 -0.3319 -0.5684 +vn 0.8013 -0.5441 -0.2488 +vn 0.8815 0.3914 -0.2643 +vn 0.8759 -0.4688 -0.1144 +vn 0.4976 -0.8666 -0.0380 +vn 0.9368 -0.3466 -0.0480 +vn 0.8125 0.5575 -0.1704 +vn 0.8199 0.5438 -0.1791 +vn 0.4611 -0.8749 -0.1477 +vn 0.7596 -0.5715 -0.3105 +vn 0.8521 0.2886 -0.4366 +vn 0.4145 0.8985 -0.1443 +vn 0.6552 0.7377 -0.1631 +vn 0.3843 0.8957 -0.2238 +vn 0.4195 0.8439 -0.3345 +vn 0.8477 0.3255 -0.4189 +vn 0.8341 0.4728 -0.2841 +vn 0.8911 0.0803 -0.4467 +vn 0.8446 0.3497 -0.4054 +vn 0.8119 0.2201 -0.5407 +vn 0.7489 0.4674 -0.4698 +vn -0.5422 0.8376 0.0668 +vn 0.9109 -0.0497 -0.4096 +vn 0.7431 0.5705 -0.3497 +vn 0.7948 0.4410 -0.4169 +vn -0.4962 0.8677 0.0290 +vn 0.8891 -0.0742 -0.4517 +vn 0.9662 -0.1049 -0.2355 +vn 0.8643 0.3968 -0.3089 +vn 0.8465 0.3753 -0.3775 +vn 0.9024 -0.1336 -0.4096 +vn 0.7991 0.3433 -0.4936 +vn 0.7299 0.4500 -0.5146 +vn 0.7959 0.4115 -0.4441 +vn -0.3438 0.9387 0.0266 +vn 0.7001 0.4951 -0.5145 +vn 0.7756 0.3572 -0.5203 +vn 0.8145 -0.3546 -0.4593 +vn 0.8497 -0.0349 -0.5262 +vn 0.7392 0.4679 -0.4844 +vn 0.7427 -0.5093 -0.4348 +vn 0.8693 -0.0186 -0.4940 +vn 0.8865 0.0126 -0.4625 +vn 0.7906 -0.2463 -0.5606 +vn -0.7160 0.6064 0.3459 +vn 0.3954 -0.8896 -0.2284 +vn 0.6716 -0.5702 -0.4731 +vn 0.5803 -0.6450 -0.4973 +vn -0.6163 0.7509 0.2375 +vn 0.4411 -0.8781 -0.1855 +vn 0.8183 -0.5693 -0.0790 +vn 0.1633 0.0870 -0.9827 +vn -0.5495 0.3284 -0.7683 +vn 0.8416 0.1187 -0.5268 +vn 0.2837 0.5955 -0.7516 +vn 0.9452 0.1276 -0.3005 +vn 0.8539 0.2307 -0.4664 +vn 0.8900 -0.0738 -0.4500 +vn -0.5721 0.6549 -0.4937 +vn -0.7535 0.6340 -0.1739 +vn -0.7594 0.5115 -0.4021 +vn -0.1416 0.8199 -0.5547 +vn -0.2679 0.9085 -0.3208 +vn -0.4789 0.7360 -0.4786 +vn -0.4760 0.8365 -0.2715 +vn 0.9156 0.2830 0.2857 +vn -0.5437 0.6224 -0.5631 +vn -0.5899 0.6468 -0.4834 +vn -0.6220 0.3912 -0.6783 +vn -0.7196 0.2477 -0.6487 +vn 0.9808 0.1949 0.0052 +vn 0.8037 0.5203 0.2887 +vn 0.7934 0.6016 -0.0933 +vn 0.7935 0.6048 0.0676 +vn -0.8231 0.5124 -0.2449 +vn 0.9557 0.2651 0.1282 +vn 0.5894 0.7991 -0.1184 +vn 0.4982 0.8492 -0.1751 +vn -0.6778 0.7263 -0.1139 +vn 0.9969 -0.0789 0.0055 +vn -0.6977 0.6728 -0.2461 +vn -0.6979 0.6757 -0.2373 +vn 0.0749 -0.9926 -0.0960 +vn -0.8395 0.2617 0.4762 +vn 0.0880 -0.9929 -0.0797 +vn 0.0768 -0.9956 -0.0537 +vn 0.0468 -0.9897 -0.1355 +vn -0.6466 0.2767 0.7109 +vn 0.8990 0.4017 -0.1746 +vn -0.7201 0.6868 0.0990 +vn 0.9643 0.0840 -0.2510 +vn 0.8515 0.5153 -0.0968 +vn 0.8884 0.4152 -0.1959 +vn 0.8061 0.5326 -0.2580 +vn 0.9767 0.1164 -0.1804 +vn -0.6772 0.7347 0.0408 +vn 0.8138 0.4916 -0.3098 +vn 0.7715 0.5831 -0.2546 +vn 0.7785 0.5733 -0.2555 +vn 0.8187 0.5732 0.0336 +vn 0.7488 0.3529 -0.5610 +vn -0.6041 0.7966 0.0230 +vn 0.9234 0.2260 -0.3101 +vn 0.8687 0.3639 -0.3361 +vn 0.8110 0.5142 -0.2789 +vn 0.7984 0.5279 -0.2897 +vn 0.8428 0.3915 -0.3692 +vn 0.9902 -0.0238 -0.1375 +vn 0.9422 0.2085 -0.2624 +vn 0.7946 0.5454 -0.2667 +vn 0.7483 0.4745 -0.4635 +vn 0.8101 0.3295 -0.4850 +vn 0.9707 -0.0178 -0.2397 +vn 0.9409 -0.0882 -0.3271 +vn -0.2915 0.9380 0.1875 +vn 0.1665 0.9708 0.1727 +vn 0.1056 0.9879 0.1132 +vn 0.9622 0.1728 0.2106 +vn -0.5200 0.8405 0.1519 +vn -0.1262 0.9844 0.1226 +vn 0.0502 0.9949 -0.0874 +vn -0.4143 0.8974 0.1515 +vn -0.3331 0.9300 -0.1555 +vn 0.3219 0.7774 0.5404 +vn 0.5173 0.0123 0.8557 +vn -0.8913 0.3303 0.3106 +vn -0.6006 0.7990 0.0298 +vn 0.0372 0.9694 0.2427 +vn -0.1347 0.9909 -0.0001 +vn -0.2047 0.9786 0.0183 +vn -0.5535 0.7857 -0.2762 +vn -0.8431 0.4232 -0.3318 +vn -0.3022 0.9532 -0.0081 +vn -0.6798 0.4796 -0.5548 +vn -0.1643 0.9787 0.1234 +vn -0.0969 0.9785 -0.1822 +vn -0.1075 0.9774 0.1822 +vn 0.1911 0.2563 0.9475 +vn 0.2862 -0.9342 0.2128 +vn -0.3824 -0.8951 0.2292 +vn -0.2945 0.2858 0.9119 +vn -0.6082 -0.5246 0.5957 +vn -0.2018 -0.5778 0.7908 +vn -0.4213 -0.8845 0.2006 +vn -0.6002 -0.7792 -0.1806 +vn -0.4938 -0.8474 -0.1952 +vn -0.9345 -0.2471 -0.2563 +vn -0.9509 -0.3054 0.0495 +vn 0.3784 0.6000 -0.7049 +vn 0.3786 0.5999 -0.7048 +vn 0.3784 0.6000 -0.7048 +vn 0.9257 0.2036 0.3188 +vn 0.3373 0.2302 0.9128 +vn -0.7330 0.0587 0.6777 +vn -0.1248 0.6506 -0.7491 +vn 0.7244 0.4594 0.5140 +vn 0.9588 0.2839 -0.0095 +vn 0.4041 0.3826 -0.8309 +vn -0.7632 0.5664 -0.3109 +vn 0.4041 0.3826 -0.8308 +vn 0.4042 0.3825 -0.8309 +vn 0.6482 -0.6529 0.3918 +vn 0.6044 -0.7089 0.3637 +vn 0.9597 -0.1734 0.2214 +vn 0.8042 0.5797 -0.1310 +vn 0.7695 0.6298 -0.1059 +vn 0.0885 -0.6872 -0.7210 +vn 0.1048 -0.6126 -0.7834 +vn 0.0898 -0.7508 -0.6544 +vn 0.1245 -0.8271 -0.5481 +vn -0.8476 -0.3970 -0.3521 +vn -0.3265 -0.8726 -0.3632 +vn -0.8893 -0.4323 -0.1489 +vn 0.9259 -0.3663 0.0923 +vn 0.4939 -0.8427 0.2142 +vn -0.5567 -0.0520 0.8291 +vn -0.3698 -0.8922 0.2592 +vn -0.2164 -0.8783 -0.4264 +vn -0.5966 -0.4278 0.6790 +vn 0.5734 -0.0667 -0.8166 +vn 0.6081 0.2552 -0.7518 +vn 0.5720 -0.4532 -0.6837 +vn -0.3267 -0.7160 -0.6169 +vn -0.3607 -0.4393 -0.8228 +vn -0.8785 -0.4777 -0.0071 +vn -0.8460 -0.2132 0.4887 +vn 0.9489 0.1128 0.2946 +vn 0.9541 0.1661 0.2493 +vn 0.9596 0.1377 0.2454 +vn 0.7313 -0.0915 0.6759 +vn -0.7318 -0.3783 0.5668 +vn -0.6980 0.0500 0.7144 +vn -0.9431 -0.0894 0.3204 +vn -0.9724 -0.1543 0.1748 +vn -0.3649 0.1635 0.9166 +vn 0.0872 -0.0748 0.9934 +vn 0.4808 -0.2666 -0.8353 +vn 0.4721 -0.1017 -0.8757 +vn 0.5121 -0.0313 -0.8584 +vn -0.5924 0.5714 -0.5679 +vn -0.8673 0.4971 0.0281 +vn -0.3647 -0.4067 -0.8376 +vn 0.0955 -0.1575 -0.9829 +vn -0.0928 -0.2386 0.9667 +vn -0.0382 -0.1618 0.9861 +vn -0.0445 -0.2420 0.9693 +vn -0.3308 -0.3932 0.8579 +vn -0.2256 -0.3645 0.9035 +vn -0.2665 -0.1990 0.9431 +vn 0.4586 -0.3300 0.8251 +vn -0.5065 -0.7320 0.4556 +vn -0.6287 -0.7687 0.1174 +vn -0.8759 0.3922 0.2810 +vn -0.8658 0.4990 -0.0373 +vn -0.8692 0.4943 -0.0131 +vn -0.7432 0.6600 -0.1099 +vn -0.9710 0.2265 0.0768 +vn -0.9830 0.0110 -0.1833 +vn -0.9601 0.1817 0.2127 +vn -0.9948 0.1014 0.0087 +vn 0.9638 0.2513 0.0895 +vn -0.9989 0.0419 0.0202 +vn -0.9813 0.1790 -0.0705 +vn -0.9965 0.0494 -0.0678 +vn -0.9339 0.3106 -0.1770 +vn 0.8071 0.5423 -0.2337 +vn -0.4188 0.8726 -0.2515 +vn -0.3883 0.9124 -0.1297 +vn -0.4370 0.8546 -0.2805 +vn 0.6236 0.7234 -0.2964 +vn 0.9888 0.1481 0.0175 +vn -0.9322 0.1976 0.3034 +vn -0.9625 0.2640 0.0619 +vn -0.8439 0.4123 -0.3432 +vn -0.9985 0.0533 -0.0132 +vn -0.9780 0.1627 0.1305 +vn -0.9985 0.0483 0.0243 +vn 0.9302 0.1296 -0.3433 +vn -0.9653 0.2373 0.1086 +vn 0.9512 0.3025 -0.0607 +vn -0.9991 0.0415 -0.0103 +vn -0.9961 0.0835 0.0297 +vn -0.9927 -0.0506 -0.1090 +vn 0.9525 0.1789 -0.2463 +vn -0.2138 0.6054 -0.7666 +vn -0.4246 0.6113 -0.6679 +vn -0.9814 -0.0415 -0.1874 +vn -0.9165 -0.0750 -0.3930 +vn 0.9721 0.0915 -0.2161 +vn -0.9987 0.0424 -0.0290 +vn 0.5236 0.6440 -0.5578 +vn -0.4168 0.5549 -0.7200 +vn 0.7075 -0.0114 -0.7067 +vn -0.6125 0.4451 -0.6532 +vn -0.8951 0.1412 -0.4230 +vn -0.8466 0.4700 -0.2496 +vn -0.8188 0.4439 -0.3641 +vn -0.7276 0.6538 -0.2079 +vn -0.8223 0.2859 -0.4921 +vn -0.5998 0.3798 -0.7043 +vn -0.7679 -0.6294 0.1194 +vn -0.1976 -0.7215 0.6636 +vn -0.1366 -0.5358 0.8332 +vn -0.1495 -0.3903 0.9084 +vn -0.1807 -0.6665 0.7233 +vn -0.2220 -0.3747 0.9002 +vn -0.6077 -0.1770 0.7742 +vn 0.6407 -0.3881 0.6625 +vn 0.4759 0.1532 0.8661 +vn -0.3688 -0.9073 0.2021 +vn -0.5765 -0.7861 0.2230 +vn 0.8038 -0.5947 0.0164 +vn 0.9815 0.1090 -0.1574 +vn -0.4802 -0.8696 0.1149 +vn -0.9088 0.3570 0.2161 +vn -0.9946 0.0881 -0.0541 +vn -0.9348 -0.2005 -0.2932 +vn -0.9860 0.1533 -0.0654 +vn -0.4676 0.4993 -0.7294 +vn -0.9353 0.3389 0.1020 +vn -0.9986 0.0509 -0.0154 +vn -0.9867 -0.1228 -0.1069 +vn 0.7548 0.3936 -0.5247 +vn 0.7738 0.4724 -0.4220 +vn -0.8850 0.4563 0.0922 +vn -0.9972 0.0237 -0.0703 +vn -0.9961 0.0751 -0.0462 +vn -0.8791 -0.2862 -0.3811 +vn -0.9891 0.0036 -0.1473 +vn -0.9844 0.0039 -0.1758 +vn -0.8242 0.4637 -0.3250 +vn -0.6648 0.6369 -0.3904 +vn -0.6869 0.4386 -0.5795 +vn -0.8724 0.2447 -0.4232 +vn 0.1271 0.7861 0.6049 +vn -0.0307 -0.7812 0.6236 +vn -0.4413 -0.4176 0.7943 +vn -0.7128 -0.4793 0.5120 +vn -0.2566 -0.8028 0.5383 +vn -0.2400 -0.7695 0.5918 +vn -0.6224 -0.4871 0.6127 +vn -0.9173 -0.2264 0.3275 +vn -0.7715 -0.5079 0.3832 +vn -0.8637 -0.4060 0.2986 +vn -0.9867 -0.1185 0.1115 +vn -0.7809 -0.6191 -0.0834 +vn 0.4583 -0.6480 0.6084 +vn 0.0119 -0.9547 0.2974 +vn 0.0904 -0.0630 0.9939 +vn -0.5494 -0.1155 0.8275 +vn -0.8838 0.1635 -0.4384 +vn -0.9724 -0.1283 0.1947 +vn -0.8721 0.0236 0.4887 +vn -0.4155 0.5944 -0.6885 +vn -0.5016 0.7479 0.4348 +vn -0.4278 -0.0600 0.9019 +vn 0.5045 0.8234 0.2598 +vn -0.9542 0.1695 0.2466 +vn 0.1049 0.7925 0.6009 +vn -0.2268 0.1783 0.9575 +vn 0.5693 -0.7901 -0.2272 +vn 0.3100 -0.1637 0.9365 +vn 0.9055 0.3257 -0.2722 +vn 0.8681 0.0790 -0.4901 +vn 0.9843 0.0571 -0.1668 +vn 0.9719 0.1947 -0.1322 +vn 0.9009 0.3504 -0.2562 +vn 0.9535 0.2605 -0.1516 +vn 0.6082 -0.2036 -0.7673 +vn 0.4594 -0.1682 0.8721 +vn 0.8878 -0.1412 0.4381 +vn -0.5810 0.6917 0.4289 +vn 0.5676 0.1531 0.8089 +vn -0.8334 0.0945 0.5445 +vn 0.1965 0.0867 0.9767 +vn -0.9717 -0.0625 0.2276 +vn -0.9778 -0.0016 0.2093 +vn 0.4219 0.2570 0.8694 +vn 0.2120 0.3463 0.9139 +vn 0.2280 0.2895 0.9296 +vn 0.5026 0.4374 -0.7457 +vn 0.3451 -0.0353 -0.9379 +vn -0.8949 0.4331 0.1079 +vn -0.9701 -0.0942 0.2236 +vn 0.1626 -0.9867 0.0085 +vn -0.9335 -0.0894 0.3474 +vn -0.7369 0.4232 0.5271 +vn 0.6636 0.1949 0.7223 +vn 0.2738 -0.3506 -0.8956 +vn 0.0122 -0.4155 -0.9095 +vn 0.9384 0.2917 0.1854 +vn -0.9280 0.2676 0.2594 +vn -0.9838 0.1675 -0.0637 +vn -0.9182 0.2538 0.3041 +vn 0.8721 0.3469 0.3450 +vn -0.4838 0.7045 -0.5193 +vn -0.2930 0.9455 0.1419 +vn -0.4916 0.7764 0.3944 +vn -0.4960 0.5158 -0.6985 +vn -0.6614 -0.1743 -0.7295 +vn -0.9122 0.4059 -0.0562 +vn 0.9328 0.2526 -0.2572 +vn -0.9465 0.1984 -0.2544 +vn -0.9461 0.2390 -0.2184 +vn -0.9307 0.1460 -0.3354 +vn -0.9700 -0.0646 -0.2343 +vn -0.9177 0.2892 -0.2725 +vn -0.8550 0.4954 0.1537 +vn -0.5764 0.5415 0.6119 +vn -0.0488 -0.8632 -0.5025 +vn -0.8136 0.5718 0.1056 +vn -0.4585 0.4691 -0.7548 +vn -0.4290 0.4200 -0.7997 +vn 0.6062 0.2756 -0.7461 +vn -0.4153 0.5808 -0.7001 +vn -0.5488 0.3256 -0.7699 +vn -0.3092 0.0787 -0.9477 +vn -0.3667 0.6666 -0.6490 +vn -0.4338 0.8008 -0.4130 +vn -0.7223 0.5528 -0.4155 +vn -0.5388 0.2313 -0.8101 +vn -0.9865 0.0386 -0.1595 +vn -0.9754 0.0028 -0.2204 +vn -0.8273 0.1778 -0.5329 +vn -0.5106 0.5672 -0.6462 +vn -0.5056 0.6665 -0.5479 +vn -0.9933 0.0342 -0.1100 +vn -0.9621 -0.1507 -0.2272 +vn -0.9498 0.2183 -0.2243 +vn 0.7542 0.4010 -0.5200 +vn -0.9877 0.0823 -0.1326 +vn -0.9319 -0.0813 -0.3535 +vn 0.4309 0.8506 -0.3013 +vn -0.5558 0.8241 -0.1098 +vn -0.3441 0.8028 -0.4869 +vn -0.6047 0.7277 -0.3237 +vn -0.8441 0.5331 0.0576 +vn -0.3564 0.8434 -0.4019 +vn -0.5446 0.7398 -0.3952 +vn -0.5296 0.8107 -0.2497 +vn -0.5000 0.8007 -0.3298 +vn -0.5393 0.8030 -0.2537 +vn 0.6782 0.7224 0.1350 +vn -0.5153 0.8259 -0.2289 +vn -0.4147 0.8481 -0.3297 +vn -0.9807 0.1510 0.1245 +vn -0.5757 0.8043 -0.1470 +vn -0.8528 0.5143 -0.0909 +vn -0.4595 0.8295 -0.3174 +vn 0.5305 0.8370 -0.1344 +vn -0.6694 0.2346 -0.7049 +vn -0.4269 0.8035 -0.4149 +vn -0.9364 0.1218 -0.3291 +vn -0.7326 0.6204 -0.2801 +vn -0.8039 0.1955 -0.5617 +vn -0.9122 0.3584 0.1988 +vn -0.1803 -0.1864 -0.9658 +vn 0.7351 0.3606 -0.5741 +vn -0.9381 -0.0516 0.3426 +vn -0.9320 -0.3386 0.1294 +vn -0.7772 -0.6237 -0.0831 +vn -0.7564 -0.6495 -0.0768 +vn 0.7322 0.3561 -0.5806 +vn 0.6974 0.6207 -0.3582 +vn 0.7029 0.5906 -0.3964 +vn 0.5377 0.6351 -0.5545 +vn 0.7358 0.3649 -0.5705 +vn 0.7077 -0.0112 -0.7064 +vn 0.7854 0.4731 -0.3992 +vn 0.6581 0.7356 -0.1605 +vn 0.7217 0.1548 -0.6747 +vn 0.6520 0.5115 -0.5597 +vn 0.6176 0.4251 -0.6617 +vn 0.7478 0.4869 -0.4515 +vn 0.2069 0.9513 -0.2284 +vn -0.9907 -0.0741 0.1139 +vn 0.4459 0.8890 -0.1044 +vn 0.4596 0.8689 -0.1838 +vn 0.8095 0.3643 -0.4604 +vn 0.7527 0.5167 -0.4081 +vn 0.5275 0.8437 0.0997 +vn -0.3991 0.9067 -0.1366 +vn 0.8511 0.4929 -0.1808 +vn 0.8643 0.3843 -0.3244 +vn 0.8678 0.3132 -0.3857 +vn -0.9526 -0.0314 0.3025 +vn 0.8141 0.3455 -0.4668 +vn -0.9511 -0.2764 0.1380 +vn -0.8530 0.0674 0.5175 +vn -0.9452 -0.0144 0.3263 +vn -0.9244 0.0685 0.3753 +vn -0.9404 -0.3201 0.1144 +vn -0.9513 -0.0651 0.3012 +vn 0.1096 0.8051 -0.5829 +vn -0.8818 -0.3800 0.2792 +vn -0.0595 0.9901 -0.1269 +vn 0.0097 0.9350 -0.3546 +vn 0.1798 0.5685 -0.8028 +vn -0.3952 0.9102 -0.1241 +vn -0.8196 -0.5711 0.0467 +vn -0.8796 -0.3843 0.2802 +vn -0.8700 -0.4477 0.2063 +vn -0.9981 -0.0350 0.0502 +vn -0.7029 0.6637 -0.2558 +vn -0.5118 0.8361 -0.1976 +vn -0.5544 0.8070 -0.2036 +vn -0.9623 -0.0584 -0.2656 +vn -0.7959 -0.6027 -0.0575 +vn -0.8559 0.3832 0.3473 +vn 0.7602 0.4555 -0.4633 +vn -0.9812 -0.1930 -0.0020 +vn -0.9840 -0.1781 0.0079 +vn -0.6596 -0.6690 -0.3425 +vn 0.5957 -0.7187 -0.3588 +vn -0.2038 -0.9698 -0.1343 +vn 0.6972 -0.6299 -0.3422 +vn 0.6389 -0.0706 0.7660 +vn -0.1767 0.0183 0.9841 +vn -0.1835 -0.6679 0.7213 +vn -0.4477 -0.8358 0.3177 +vn 0.5319 -0.7108 0.4603 +vn -0.9779 0.0771 -0.1943 +vn -0.9434 0.2058 -0.2602 +vn -0.4997 -0.8662 -0.0062 +vn -0.5842 -0.8075 0.0816 +vn -0.5243 -0.8493 0.0613 +vn -0.9177 -0.3971 -0.0125 +vn -0.8165 0.5511 -0.1722 +vn -0.9745 0.1695 -0.1468 +vn -0.9734 0.2242 -0.0471 +vn -0.8765 -0.3462 0.3346 +vn 0.7476 -0.5580 -0.3603 +vn -0.7052 0.6769 0.2109 +vn -0.6149 0.7325 0.2922 +vn -0.3467 0.9259 0.1502 +vn 0.7333 0.4820 -0.4795 +vn -0.1745 0.9809 -0.0860 +vn 0.8504 0.0196 -0.5257 +vn -0.2741 0.9601 0.0548 +vn -0.5752 0.8180 0.0021 +vn -0.3250 0.9438 -0.0598 +vn -0.6818 0.7281 0.0715 +vn 0.8150 0.2106 -0.5398 +vn -0.3632 0.9285 -0.0767 +vn -0.3362 0.9415 0.0214 +vn -0.7904 0.4781 0.3831 +vn -0.8501 0.5007 0.1632 +vn -0.8094 0.4956 0.3151 +vn -0.9144 0.3809 0.1369 +vn -0.8322 0.5538 -0.0263 +vn -0.9062 0.3745 0.1964 +vn 0.8239 0.4576 -0.3344 +vn -0.7703 0.5825 -0.2595 +vn -0.6201 0.7837 0.0371 +vn -0.5515 0.7726 0.3144 +vn 0.8151 0.4887 -0.3111 +vn -0.9217 0.2905 0.2570 +vn -0.9061 0.3739 0.1978 +vn 0.8060 0.5307 -0.2621 +vn -0.7326 0.6783 0.0564 +vn -0.7761 0.6152 0.1389 +vn -0.8447 0.5323 0.0557 +vn 0.7872 0.5640 -0.2494 +vn -0.9552 0.2811 0.0926 +vn -0.9578 0.2791 -0.0683 +vn -0.7686 -0.6305 -0.1081 +vn -0.8788 -0.4543 0.1459 +vn -0.8752 -0.3395 0.3446 +vn 0.1904 0.4709 -0.8614 +vn -0.8126 -0.1268 0.5688 +vn -0.8463 -0.1394 0.5141 +vn 0.5809 0.5082 -0.6358 +vn -0.9091 -0.2710 0.3163 +vn -0.9124 -0.3379 0.2311 +vn -0.8795 -0.1185 0.4609 +vn -0.8621 -0.5064 0.0201 +vn -0.9517 -0.3023 -0.0537 +vn -0.9727 -0.1225 0.1972 +vn -0.9045 -0.4027 -0.1406 +vn -0.7670 -0.5272 -0.3658 +vn -0.9869 -0.1232 0.1039 +vn 0.7321 0.6765 0.0802 +vn -0.8169 -0.1739 -0.5499 +vn -0.9680 -0.1545 -0.1977 +vn -0.9743 -0.1337 -0.1814 +vn -0.9811 0.1495 0.1229 +vn -0.9604 0.2634 0.0908 +vn -0.9923 0.0849 -0.0901 +vn -0.9561 -0.1987 -0.2154 +vn -0.8894 -0.3956 -0.2292 +vn -0.9830 0.0847 -0.1626 +vn -0.9346 0.3557 -0.0046 +vn 0.2482 0.5869 -0.7707 +vn -0.6189 -0.5288 -0.5809 +vn -0.9310 -0.3543 -0.0875 +vn -0.9232 -0.3704 -0.1024 +vn -0.9374 -0.3233 -0.1296 +vn -0.9635 -0.1301 0.2340 +vn 0.6083 0.7586 -0.2337 +vn -0.9742 -0.1804 0.1353 +vn -0.8574 -0.2081 0.4707 +vn -0.9621 -0.2196 0.1618 +vn -0.9594 -0.2733 -0.0702 +vn -0.7492 -0.2702 -0.6046 +vn -0.2832 -0.4864 -0.8265 +vn -0.5455 -0.3212 -0.7741 +vn -0.6149 -0.0119 -0.7885 +vn -0.2411 -0.4718 -0.8481 +vn 0.5597 0.7015 -0.4412 +vn 0.1345 0.8318 -0.5385 +vn 0.7455 0.5790 -0.3301 +vn 0.7100 0.6967 -0.1030 +vn 0.4696 0.7669 -0.4375 +vn 0.2393 0.9225 -0.3030 +vn 0.6023 0.7952 0.0692 +vn 0.6152 0.7882 0.0127 +vn 0.4940 0.8223 0.2824 +vn 0.4914 0.8257 0.2768 +vn -0.8994 0.1889 0.3943 +vn 0.6070 0.6274 -0.4878 +vn -0.9578 -0.2874 0.0046 +vn -0.9702 -0.2392 0.0388 +vn -0.9014 0.2226 0.3713 +vn -0.8750 0.2451 0.4175 +vn -0.6594 0.5365 0.5266 +vn 0.4852 0.6696 -0.5624 +vn -0.9790 -0.0447 0.1990 +vn -0.9502 -0.2361 -0.2034 +vn -0.9364 -0.2911 -0.1962 +vn -0.7824 -0.4122 -0.4668 +vn 0.4855 0.6691 -0.5627 +vn 0.6060 0.2759 -0.7461 +vn 0.4991 0.4151 -0.7606 +vn 0.4286 0.6434 -0.6343 +vn -0.9316 0.0649 0.3577 +vn 0.4977 0.5156 -0.6975 +vn -0.9155 -0.3557 0.1880 +vn -0.9361 -0.2524 0.2448 +vn -0.7663 -0.6422 0.0164 +vn -0.8602 -0.4654 -0.2082 +vn 0.6520 0.7359 -0.1827 +vn -0.6648 -0.2683 -0.6972 +vn -0.8541 -0.4697 -0.2235 +vn 0.4601 0.7238 0.5142 +vn 0.6575 0.7289 -0.1910 +vn 0.6254 0.7767 0.0754 +vn -0.9970 -0.0507 0.0582 +vn -0.9807 0.0872 -0.1748 +vn -0.8223 0.5137 -0.2450 +vn -0.8954 0.4248 0.1334 +vn -0.5470 0.7897 -0.2778 +vn -0.6380 0.7324 -0.2378 +vn -0.9857 0.1586 -0.0574 +vn -0.9309 -0.3621 -0.0490 +vn 0.7067 0.6064 -0.3645 +vn -0.8652 -0.1893 -0.4644 +vn -0.9568 -0.2890 0.0324 +vn -0.9834 -0.0567 -0.1724 +vn -0.9888 -0.0879 -0.1204 +vn -0.9934 -0.0309 0.1101 +vn -0.9102 0.0062 0.4141 +vn 0.4006 -0.2665 0.8766 +vn -0.0671 -0.1301 0.9892 +vn 0.0881 0.3205 0.9431 +vn -0.0743 -0.1009 0.9921 +vn 0.1172 -0.9926 -0.0315 +vn 0.1722 -0.9849 0.0197 +vn 0.0449 -0.9983 -0.0369 +vn 0.1013 -0.9672 -0.2330 +vn -0.0663 -0.9971 0.0381 +vn -0.0177 -0.9966 -0.0810 +vn -0.1562 -0.9609 -0.2286 +vn 0.0225 -0.9845 -0.1737 +vn -0.9249 0.3764 0.0546 +vn 0.5123 0.6841 0.5191 +vn -0.4450 0.8933 0.0626 +vn 0.5123 0.6842 0.5191 +vn 0.5123 0.6841 0.5192 +vn -0.8033 0.3122 0.5072 +vn -0.8206 0.2714 0.5029 +vn -0.7809 0.4005 0.4793 +vn -0.7547 0.4366 0.4898 +vn -0.7493 0.3174 0.5813 +vn -0.1729 0.9684 0.1797 +vn 0.4075 0.9032 0.1349 +vn 0.1173 0.9861 0.1176 +vn 0.3134 0.9345 -0.1688 +vn -0.3757 0.6586 -0.6520 +vn 0.0756 0.9797 -0.1857 +vn -0.1020 0.9711 0.2157 +vn 0.1211 0.9574 0.2622 +vn 0.6367 0.7542 0.1608 +vn 0.3281 0.1454 0.9334 +vn -0.2471 0.3052 0.9197 +vn 0.3862 -0.9053 0.1768 +vn -0.2863 -0.9115 0.2954 +vn 0.4536 -0.5582 0.6947 +vn 0.3923 -0.1301 0.9106 +vn 0.5146 -0.0012 0.8575 +vn 0.6765 -0.6834 -0.2745 +vn 0.9176 0.1588 -0.3644 +vn 0.9006 0.1503 -0.4079 +vn 0.8957 -0.4446 -0.0075 +vn 0.4377 -0.3153 -0.8420 +vn 0.1054 -0.6248 -0.7736 +vn -0.0158 -0.7808 -0.6246 +vn 0.2227 0.2301 -0.9473 +vn -0.8380 0.3457 0.4221 +vn -0.8203 0.2716 0.5033 +vn -0.8012 0.0762 0.5935 +vn -0.4431 0.3766 -0.8135 +vn 0.9397 0.0488 -0.3384 +vn 0.0849 0.6521 -0.7534 +vn -0.0047 0.7585 -0.6517 +vn -0.1247 0.6506 -0.7491 +vn -0.0543 0.3943 -0.9174 +vn 0.7592 0.0692 0.6471 +vn 0.3291 0.0980 -0.9392 +vn 0.0850 0.6521 -0.7534 +vn -0.4068 0.8819 -0.2382 +vn -0.4783 0.3555 0.8030 +vn -0.4067 0.8819 -0.2383 +vn -0.4065 0.8819 -0.2386 +vn 0.7769 0.6292 -0.0209 +vn 0.8522 0.4769 -0.2152 +vn 0.7999 0.6002 0.0034 +vn 0.6806 0.7191 0.1399 +vn 0.7264 0.6708 -0.1496 +vn 0.7120 0.6953 -0.0978 +vn 0.5823 0.7102 0.3957 +vn 0.5112 0.2576 -0.8199 +vn 0.2613 0.2746 0.9254 +vn 0.6856 0.3951 0.6115 +vn -0.7090 0.4494 0.5434 +vn -0.3528 0.1536 0.9230 +vn -0.9498 0.2888 0.1206 +vn -0.9622 0.2707 0.0300 +vn -0.5793 -0.7169 0.3880 +vn -0.6225 -0.6617 0.4178 +vn -0.7852 -0.3333 0.5219 +vn 0.2705 -0.4667 0.8420 +vn 0.9872 -0.1103 -0.1156 +vn -0.2620 -0.6121 -0.7462 +vn -0.2534 -0.6087 -0.7518 +vn -0.2876 -0.5958 -0.7499 +vn 0.7708 -0.3256 -0.5476 +vn 0.0002 -0.7907 -0.6121 +vn 0.7972 -0.5194 0.3078 +vn 0.9529 -0.2285 0.1992 +vn 0.8552 -0.4863 -0.1792 +vn 0.8603 -0.3684 -0.3524 +vn -0.8362 -0.4326 0.3372 +vn 0.9581 -0.2503 0.1391 +vn -0.0186 -0.7728 -0.6344 +vn -0.5711 -0.6026 0.5574 +vn -0.2764 -0.9261 0.2569 +vn 0.7476 0.0423 0.6628 +vn -0.2587 0.2900 0.9214 +vn 0.5031 -0.8429 0.1911 +vn -0.8510 0.5119 -0.1175 +vn 0.7833 -0.3375 0.5221 +vn -0.7103 0.4911 0.5043 +vn -0.4530 0.6213 0.6393 +vn 0.7767 -0.4661 0.4236 +vn -0.3645 0.5409 0.7580 +vn 0.6133 -0.3600 0.7030 +vn -0.8106 -0.3144 -0.4941 +vn 0.6429 -0.4641 -0.6094 +vn 0.5534 -0.7843 0.2804 +vn -0.4527 0.8598 0.2365 +vn -0.6922 -0.5173 -0.5033 +vn 0.5356 -0.8425 0.0582 +vn -0.3291 -0.2515 -0.9102 +vn 0.9972 0.0051 0.0748 +vn 0.5909 0.2322 0.7726 +vn 0.5686 -0.3954 0.7214 +vn -0.7576 -0.3066 -0.5763 +vn -0.5866 0.0454 -0.8086 +vn -0.5825 -0.0046 -0.8128 +vn -0.3248 -0.0628 -0.9437 +vn -0.2142 0.2786 0.9362 +vn 0.3639 0.3575 0.8601 +vn 0.2045 0.4911 0.8467 +vn 0.1654 -0.2320 0.9586 +vn 0.4233 -0.3922 0.8167 +vn 0.1145 -0.2395 0.9641 +vn 0.1095 -0.1576 0.9814 +vn 0.3505 -0.1821 0.9187 +vn 0.2672 -0.3605 0.8936 +vn 0.8977 0.3923 0.2004 +vn 0.8602 0.4965 -0.1168 +vn -0.2090 0.8270 -0.5220 +vn 0.9866 0.0930 -0.1341 +vn 0.9272 0.0407 -0.3724 +vn 0.9846 0.1297 -0.1169 +vn 0.9443 0.3143 0.0973 +vn 0.9572 0.2840 -0.0559 +vn 0.9697 0.1570 0.1874 +vn 0.3127 0.8630 -0.3968 +vn 0.4950 0.8269 -0.2667 +vn 0.9558 -0.0302 -0.2925 +vn 0.9798 0.0946 -0.1761 +vn 0.9624 0.0964 -0.2538 +vn 0.9702 0.1233 -0.2085 +vn 0.9673 0.1728 -0.1854 +vn 0.9696 0.2271 -0.0908 +vn 0.9838 0.0730 -0.1639 +vn 0.9698 0.2432 -0.0207 +vn 0.9564 0.1182 -0.2671 +vn 0.8550 -0.0297 -0.5178 +vn 0.4916 0.4675 -0.7347 +vn 0.3345 0.5158 -0.7887 +vn 0.7287 0.3180 -0.6066 +vn 0.4488 0.4966 -0.7429 +vn 0.4642 0.3685 -0.8054 +vn 0.4736 0.4024 -0.7834 +vn 0.9916 0.0151 -0.1284 +vn 0.7795 -0.1524 0.6076 +vn 0.9319 -0.3070 0.1932 +vn 0.7970 -0.5938 0.1109 +vn 0.4936 -0.4173 0.7630 +vn -0.9858 -0.1596 -0.0513 +vn 0.6831 -0.6850 0.2533 +vn 0.6997 -0.6853 0.2021 +vn 0.7030 -0.6446 0.3005 +vn 0.9585 0.1942 -0.2087 +vn 0.7944 0.5990 -0.1008 +vn 0.9892 0.0904 -0.1156 +vn 0.9500 -0.0885 -0.2993 +vn 0.9205 0.2269 -0.3181 +vn 0.9048 -0.2738 -0.3261 +vn 0.7434 0.6659 0.0620 +vn 0.9743 0.0653 -0.2156 +vn 0.8623 0.5059 -0.0214 +vn 0.8631 -0.2738 -0.4244 +vn 0.9746 0.1167 -0.1914 +vn 0.8996 0.4332 0.0550 +vn 0.8250 -0.2502 -0.5067 +vn 0.9558 0.0446 -0.2907 +vn 0.9470 0.0446 -0.3181 +vn 0.7475 0.4969 -0.4408 +vn 0.7851 0.2568 -0.5636 +vn 0.5776 0.4648 -0.6711 +vn 0.9168 0.3195 -0.2395 +vn 0.8841 0.1229 -0.4509 +vn 0.6305 0.6811 0.3721 +vn 0.0555 -0.2310 -0.9714 +vn -0.1696 0.2449 -0.9546 +vn 0.5644 0.4916 -0.6632 +vn -0.2139 -0.2131 -0.9533 +vn -0.6778 0.3316 -0.6562 +vn 0.2189 0.9370 0.2721 +vn 0.8191 -0.2067 0.5351 +vn 0.0334 0.9129 0.4069 +vn 0.2210 -0.5253 0.8217 +vn 0.2748 -0.7659 0.5813 +vn 0.2896 -0.7988 0.5273 +vn 0.6536 -0.4781 0.5867 +vn 0.8735 -0.3731 0.3126 +vn 0.9331 -0.2134 0.2896 +vn 0.7935 -0.4970 0.3512 +vn 0.8808 -0.3938 0.2630 +vn 0.8870 -0.4615 -0.0149 +vn -0.4532 -0.6099 0.6501 +vn 0.8628 0.1757 -0.4740 +vn 0.5844 -0.1075 0.8043 +vn 0.8421 0.3781 0.3845 +vn 0.6954 0.7182 -0.0227 +vn 0.9813 -0.1146 0.1547 +vn 0.5086 0.7548 0.4143 +vn 0.8909 0.0359 0.4527 +vn -0.5049 0.8164 0.2805 +vn 0.4652 -0.0537 0.8836 +vn 0.9256 0.2036 0.3190 +vn 0.9610 0.1828 0.2074 +vn 0.0373 0.9694 0.2427 +vn 0.9417 0.3117 0.1263 +vn -0.4142 0.8975 0.1516 +vn 0.5400 0.4980 0.6785 +vn -0.8649 -0.4596 -0.2018 +vn -0.6529 -0.7538 -0.0743 +vn -0.8533 0.1095 -0.5098 +vn -0.9781 -0.0750 -0.1939 +vn -0.9726 0.0739 -0.2205 +vn -0.9579 0.2058 -0.2001 +vn -0.5857 -0.2016 -0.7851 +vn 0.0198 -0.9988 -0.0445 +vn -0.5117 -0.1572 0.8447 +vn -0.9275 -0.1298 0.3506 +vn 0.5563 0.6994 0.4488 +vn 0.7771 0.1034 0.6208 +vn 0.9574 0.0074 0.2888 +vn 0.9499 -0.0533 0.3080 +vn -0.4024 0.2316 -0.8857 +vn -0.6364 0.2244 -0.7380 +vn -0.5150 0.3414 -0.7863 +vn -0.3405 -0.0941 -0.9355 +vn 0.7606 -0.0294 -0.6485 +vn -0.0443 -0.6523 -0.7567 +vn 0.8969 -0.0807 0.4348 +vn 0.6867 0.4304 0.5858 +vn -0.7275 0.2124 0.6524 +vn -0.0444 -0.9970 -0.0629 +vn 0.9535 0.2512 -0.1667 +vn 0.9781 0.1383 -0.1558 +vn 0.9500 0.2910 0.1134 +vn 0.9132 0.3210 0.2511 +vn 0.9303 0.3549 0.0924 +vn 0.4520 0.7108 -0.5390 +vn 0.4597 0.5225 -0.7181 +vn 0.6334 -0.1653 -0.7559 +vn 0.2621 0.9261 -0.2714 +vn 0.8717 0.1707 -0.4594 +vn 0.8427 0.3391 -0.4182 +vn 0.8435 0.5363 0.0302 +vn 0.6426 0.5700 0.5121 +vn -0.0400 -0.8823 -0.4689 +vn 0.7630 0.6396 0.0929 +vn 0.8088 0.5806 0.0936 +vn 0.9312 0.2417 -0.2729 +vn 0.8293 0.0081 -0.5588 +vn -0.7197 0.2475 -0.6487 +vn 0.7477 0.3090 -0.5877 +vn 0.9001 0.4078 -0.1532 +vn 0.9200 -0.1657 -0.3552 +vn 0.3197 0.8187 -0.4770 +vn 0.8796 0.0799 -0.4690 +vn 0.9531 0.0717 -0.2942 +vn 0.9234 -0.2104 0.3210 +vn 0.5882 0.4279 -0.6863 +vn -0.2753 0.5312 -0.8013 +vn 0.7377 0.1835 -0.6497 +vn 0.6808 0.2827 -0.6757 +vn 0.7590 0.2056 -0.6178 +vn 0.7387 0.3508 -0.5756 +vn 0.4582 0.3762 -0.8053 +vn 0.4661 0.5915 -0.6579 +vn 0.9427 0.2857 -0.1724 +vn 0.9887 0.1146 -0.0963 +vn 0.9740 -0.0711 -0.2151 +vn 0.9799 0.1952 -0.0410 +vn 0.4790 0.8119 -0.3338 +vn 0.4207 0.8555 -0.3018 +vn 0.3342 0.8234 -0.4586 +vn 0.4295 0.8525 -0.2978 +vn 0.9417 0.3349 0.0331 +vn 0.9741 0.2138 -0.0739 +vn 0.8546 0.5085 -0.1052 +vn 0.9782 0.2043 0.0360 +vn 0.9769 0.0414 -0.2098 +vn 0.5350 0.8074 -0.2488 +vn 0.8179 0.3774 -0.4342 +vn 0.5240 0.7831 -0.3350 +vn 0.4840 0.8613 -0.1545 +vn 0.3359 0.8805 -0.3346 +vn 0.4860 0.8604 -0.1531 +vn 0.4650 0.8534 -0.2355 +vn 0.4697 0.8730 -0.1311 +vn -0.7680 0.6405 -0.0041 +vn 0.9176 0.2538 0.3058 +vn 0.3743 0.8920 -0.2535 +vn 0.5006 0.8649 -0.0365 +vn 0.9927 -0.1084 -0.0526 +vn 0.9278 0.3562 -0.1109 +vn 0.8111 0.5829 -0.0491 +vn 0.9996 -0.0233 0.0185 +vn 0.8287 0.5240 0.1967 +vn 0.9326 0.3608 0.0060 +vn 0.9382 -0.3289 0.1077 +vn 0.9199 -0.3733 0.1204 +vn 0.7452 0.6668 0.0092 +vn 0.5599 0.7708 0.3038 +vn 0.7953 0.5953 -0.1142 +vn 0.9794 0.1958 0.0491 +vn 0.6704 0.7307 -0.1291 +vn 0.8203 0.3478 -0.4540 +vn 0.9215 0.3122 -0.2308 +vn 0.6884 0.7252 0.0114 +vn 0.4559 -0.1619 -0.8752 +vn 0.7150 0.4555 0.5303 +vn 0.9531 0.1377 0.2694 +vn 0.9486 -0.3165 -0.0014 +vn 0.9797 -0.0128 0.1999 +vn -0.8258 0.3302 -0.4572 +vn 0.7818 -0.5911 -0.1985 +vn 0.9539 -0.3000 -0.0108 +vn 0.9972 -0.0578 0.0470 +vn 0.9732 0.0427 0.2261 +vn 0.7632 -0.6178 -0.1892 +vn -0.8249 0.3291 -0.4596 +vn -0.6399 0.6151 -0.4607 +vn -0.7768 0.5634 -0.2815 +vn -0.7667 0.5926 -0.2469 +vn -0.8262 0.3328 -0.4546 +vn -0.7041 0.7077 -0.0588 +vn -0.8543 0.4396 -0.2772 +vn -0.8029 -0.0426 -0.5946 +vn -0.8290 0.4582 -0.3206 +vn -0.7398 0.4151 -0.5295 +vn -0.7521 0.5152 -0.4110 +vn -0.8372 0.1691 -0.5202 +vn -0.7449 0.6587 -0.1059 +vn -0.8268 0.4846 -0.2855 +vn -0.7407 0.6007 -0.3010 +vn 0.9545 -0.2978 -0.0138 +vn -0.7719 0.6323 -0.0654 +vn -0.7588 0.6182 -0.2051 +vn -0.2254 0.9530 -0.2025 +vn -0.4758 0.8691 -0.1352 +vn -0.4549 0.8885 -0.0603 +vn 0.9973 -0.0711 0.0202 +vn -0.7960 0.5100 -0.3261 +vn -0.8571 0.3561 -0.3723 +vn 0.3879 0.9054 -0.1725 +vn -0.5137 0.8454 0.1464 +vn -0.8517 0.5189 -0.0734 +vn 0.9801 -0.0500 0.1922 +vn -0.9268 0.2762 -0.2545 +vn -0.9740 0.2081 -0.0895 +vn -0.9142 0.2242 -0.3377 +vn 0.9479 -0.3099 -0.0744 +vn 0.9923 -0.0573 0.1096 +vn 0.9801 -0.1629 0.1138 +vn 0.9765 -0.1246 -0.1759 +vn 0.9338 -0.3156 -0.1687 +vn 0.7775 -0.5887 -0.2211 +vn 0.0570 0.9888 -0.1379 +vn 0.9047 -0.3827 0.1873 +vn -0.1623 0.8025 -0.5742 +vn -0.0397 0.9346 -0.3536 +vn -0.2491 0.5683 -0.7842 +vn 0.3836 0.9088 -0.1640 +vn 0.8229 -0.5674 -0.0304 +vn 0.8877 -0.4436 0.1236 +vn 0.9039 -0.3800 0.1962 +vn 0.3072 0.8756 -0.3729 +vn 0.9985 -0.0317 -0.0448 +vn 0.6785 0.6586 -0.3253 +vn 0.4127 0.8746 -0.2545 +vn 0.5294 0.7986 -0.2864 +vn 0.9286 -0.0519 -0.3674 +vn 0.9787 0.0651 -0.1947 +vn 0.9839 0.1587 -0.0819 +vn 0.9688 -0.2471 -0.0172 +vn 0.9910 0.0749 -0.1111 +vn 0.9981 -0.0613 0.0076 +vn 0.9292 -0.3604 -0.0813 +vn 0.9811 -0.1367 -0.1370 +vn 0.6298 -0.6418 -0.4375 +vn 0.9775 -0.1518 -0.1463 +vn 0.8807 0.4199 0.2193 +vn -0.6841 -0.6898 -0.2369 +vn -0.7966 -0.5584 -0.2313 +vn 0.5410 -0.8368 0.0841 +vn -0.5985 0.0898 0.7960 +vn 0.3532 -0.0076 0.9355 +vn 0.5648 -0.7702 0.2964 +vn 0.2827 -0.8895 0.3590 +vn 0.3666 -0.7744 0.5156 +vn -0.5625 0.7884 0.2490 +vn 0.9363 -0.3335 0.1096 +vn 0.9257 0.3084 -0.2189 +vn 0.9091 0.4109 0.0687 +vn 0.7120 0.6975 -0.0816 +vn 0.9469 0.3120 -0.0781 +vn 0.5687 -0.8064 0.1621 +vn 0.5430 -0.8084 0.2270 +vn 0.5529 -0.8179 0.1593 +vn 0.5450 -0.8375 0.0405 +vn 0.7516 0.6104 -0.2501 +vn 0.6447 0.7244 -0.2441 +vn 0.7030 0.6843 -0.1936 +vn 0.9644 0.0485 0.2599 +vn 0.9802 0.0956 0.1733 +vn 0.9723 0.2336 -0.0112 +vn 0.9270 0.3632 -0.0939 +vn 0.9900 0.1411 0.0048 +vn 0.9247 0.3768 0.0548 +vn 0.9710 0.2337 -0.0512 +vn 0.7309 0.6692 0.1340 +vn 0.9263 0.3707 0.0676 +vn 0.8941 0.4264 0.1366 +vn 0.9756 0.1922 -0.1064 +vn 0.9877 0.1536 -0.0283 +vn 0.9861 0.1647 -0.0239 +vn 0.7266 -0.2688 -0.6324 +vn 0.8926 -0.1967 -0.4057 +vn 0.7114 0.5826 -0.3931 +vn 0.8361 -0.1030 -0.5387 +vn 0.7995 -0.5797 -0.1574 +vn -0.2972 0.3822 -0.8750 +vn 0.9268 -0.2316 0.2957 +vn 0.9056 -0.4143 0.0914 +vn 0.8768 -0.0531 0.4779 +vn 0.9133 -0.3315 0.2365 +vn -0.1032 0.2663 -0.9584 +vn 0.9237 -0.2116 0.3194 +vn 0.9234 -0.2216 0.3134 +vn -0.6806 0.4655 -0.5657 +vn 0.9400 -0.0796 0.3317 +vn 0.9480 -0.2939 0.1224 +vn 0.9563 -0.2218 0.1902 +vn 0.8870 -0.4547 -0.0805 +vn 0.9723 -0.1917 0.1339 +vn 0.8754 -0.4348 -0.2115 +vn 0.9541 -0.2973 0.0353 +vn 0.9234 -0.0121 0.3836 +vn 0.9542 -0.0109 0.2990 +vn 0.9997 -0.0223 0.0127 +vn 0.8872 0.3701 0.2756 +vn 0.9174 0.2536 0.3068 +vn 0.9979 -0.0643 -0.0064 +vn 0.9640 0.1262 -0.2342 +vn 0.9218 -0.1599 -0.3532 +vn 0.9129 0.2711 -0.3053 +vn 0.5664 0.4740 -0.6742 +vn 0.8473 0.5294 0.0422 +vn 0.9019 0.4192 -0.1039 +vn 0.8754 0.4479 -0.1816 +vn 0.9417 0.2482 -0.2271 +vn 0.8792 -0.0980 -0.4662 +vn 0.7545 -0.4653 -0.4628 +vn 0.9596 -0.2758 0.0555 +vn 0.9615 -0.2646 0.0740 +vn -0.6054 0.7239 -0.3309 +vn 0.9202 -0.0780 0.3835 +vn 0.9633 -0.2669 0.0290 +vn 0.9844 -0.1724 0.0343 +vn 0.9607 -0.1121 0.2538 +vn 0.9679 -0.0798 0.2382 +vn 0.9706 -0.0908 0.2229 +vn 0.9701 -0.0366 0.2400 +vn 0.9043 -0.1070 0.4132 +vn 0.6935 0.0458 -0.7190 +vn 0.8246 -0.0931 0.5579 +vn 0.8122 -0.2155 -0.5421 +vn 0.6770 0.0371 -0.7351 +vn 0.6225 -0.2805 -0.7306 +vn 0.3826 -0.4597 -0.8015 +vn 0.3427 -0.4481 -0.8257 +vn -0.6583 0.7524 0.0225 +vn -0.6803 0.7301 -0.0645 +vn -0.5625 0.7885 0.2488 +vn 0.9812 -0.1859 0.0527 +vn 0.5153 -0.6942 -0.5025 +vn -0.6389 0.6021 -0.4789 +vn 0.9692 -0.2463 0.0009 +vn 0.8825 0.2398 0.4046 +vn 0.9358 0.2616 0.2362 +vn 0.9159 0.2822 0.2856 +vn 0.9986 -0.0030 0.0536 +vn 0.9089 -0.2522 -0.3320 +vn 0.9193 -0.1967 -0.3408 +vn 0.7223 -0.3805 -0.5775 +vn 0.9405 -0.2951 0.1682 +vn -0.5557 0.4632 -0.6904 +vn 0.9191 0.1528 0.3631 +vn 0.9600 -0.1673 0.2245 +vn 0.7974 -0.6034 0.0043 +vn 0.9365 -0.3474 0.0468 +vn 0.9372 -0.3431 0.0632 +vn 0.9352 -0.3485 0.0631 +vn -0.6693 0.6313 -0.3918 +vn -0.6703 0.6550 0.3488 +vn -0.7141 0.6897 -0.1197 +vn -0.6671 0.6387 -0.3834 +vn -0.8154 0.5746 -0.0707 +vn -0.6979 0.6744 -0.2411 +vn 0.9200 -0.2565 0.2962 +vn 0.9208 -0.3076 0.2397 +vn 0.9340 -0.2867 0.2134 +vn -0.7099 0.6348 -0.3051 +vn 0.9955 0.0888 -0.0336 +vn 0.9945 -0.0727 -0.0753 +vn 0.9938 -0.1040 -0.0385 +vn -0.1046 -0.9942 -0.0254 +vn -0.0305 -0.9985 0.0465 +vn 0.9771 -0.0404 0.2087 +vn 0.8689 0.0118 0.4948 +vn -0.4806 -0.2499 0.8406 +vn 0.0334 0.4110 0.9110 +vn 0.0151 0.4092 0.9123 +vn 0.0242 0.3555 0.9344 +vn 0.0519 0.3279 0.9433 +vn 0.1573 -0.1045 0.9820 +vn 0.0539 -0.0932 0.9942 +vn -0.0335 -0.9983 0.0469 +vn 0.0521 -0.9986 0.0070 +vn -0.0376 -0.9808 0.1912 +vn -0.1796 -0.9836 0.0183 +vn -0.0424 -0.9971 -0.0629 +vn 0.0355 -0.9985 -0.0406 +vn -0.0289 -0.9984 -0.0480 +vn -0.1663 -0.9858 0.0216 +vn -0.1502 -0.9884 -0.0219 +vn 0.0181 -0.9984 -0.0543 +vn 0.0658 -0.9925 0.1027 +vn -0.1027 -0.9724 -0.2096 +vn 0.0148 -0.9983 -0.0567 +vn -0.0197 -0.9886 -0.1492 +vn 0.1550 -0.9711 -0.1813 +vn 0.8367 0.4178 -0.3540 +vn 0.9929 0.0164 -0.1177 +vn 0.9888 0.0103 -0.1490 +vn 0.9783 -0.0404 -0.2032 +vn 0.9307 -0.0577 -0.3612 +vn 0.9371 -0.1172 -0.3289 +vn 0.8671 0.0207 -0.4977 +vn 0.7445 0.1857 -0.6413 +vn 0.7594 0.4776 -0.4418 +vn 0.9058 -0.1569 -0.3936 +vn -0.5002 0.6770 0.5399 +vn -0.5004 0.6770 0.5397 +vn -0.5001 0.6771 0.5398 +vn 0.4347 0.8995 0.0446 +vn 0.7679 0.4473 0.4585 +vn 0.8189 0.3236 0.4741 +vn 0.7678 0.3278 0.5505 +vn 0.7942 0.4117 0.4469 +vn 0.8367 0.2830 0.4690 +vn 0.9530 0.0865 -0.2904 +vn -0.0801 0.8328 -0.5477 +vn 0.4346 0.8995 0.0446 +vn 0.3395 0.6636 -0.6666 +vn -0.1212 0.8952 0.4288 +vn -0.6265 0.7595 0.1750 +vn -0.1120 -0.9694 -0.2184 +vn -0.1884 -0.9738 -0.1277 +vn -0.1435 -0.9896 -0.0116 +vn -0.5132 -0.8518 -0.1049 +vn -0.6775 -0.6929 -0.2468 +vn -0.1547 -0.9373 -0.3124 +vn -0.9339 0.1458 -0.3266 +vn -0.9185 0.1376 -0.3708 +vn -0.5940 -0.7231 -0.3526 +vn -0.4272 -0.6106 -0.6668 +vn -0.5005 -0.6105 -0.6139 +vn -0.7742 0.1687 -0.6100 +vn -0.4674 -0.3217 -0.8235 +vn -0.8889 -0.4572 0.0288 +vn -0.1282 -0.6263 -0.7689 +vn 0.0011 -0.7807 -0.6249 +vn -0.2644 0.2267 -0.9374 +vn 0.8363 0.2832 0.4695 +vn 0.8237 0.0875 0.5602 +vn -0.6254 0.7282 0.2803 +vn -0.2851 0.7178 0.6352 +vn -0.7632 0.5665 -0.3109 +vn -0.4301 0.7567 -0.4924 +vn 0.3994 0.5682 0.7194 +vn -0.7494 0.1282 -0.6496 +vn 0.3373 0.2300 0.9129 +vn -0.3686 0.0931 -0.9249 +vn 0.3842 0.8875 -0.2544 +vn -0.7696 0.5607 -0.3056 +vn -0.9034 0.4073 0.1340 +vn -0.8519 0.5097 -0.1204 +vn -0.5797 0.5909 -0.5611 +vn -0.8251 0.5382 -0.1719 +vn -0.7708 0.6371 0.0034 +vn -0.8488 0.5063 -0.1525 +vn -0.8454 0.3803 -0.3751 +vn -0.6755 0.5738 -0.4632 +vn -0.6862 0.7227 0.0821 +vn -0.6953 0.5899 -0.4107 +vn 0.2514 0.2858 0.9247 +vn 0.7462 0.1099 0.6566 +vn 0.9498 0.3020 0.0817 +vn 0.3881 0.1588 0.9078 +vn 0.5068 0.8561 0.1015 +vn 0.8651 0.4197 0.2748 +vn 0.8105 -0.3221 0.4892 +vn -0.2294 -0.4701 0.8523 +vn 0.8104 -0.3221 0.4894 +vn 0.8496 0.3576 0.3876 +vn 0.5055 0.3624 0.7830 +vn 0.8497 0.3576 0.3875 +vn -0.9395 0.1414 -0.3120 +vn 0.9804 0.1403 -0.1381 +vn 0.9822 0.1263 -0.1388 +vn 0.9965 0.0709 -0.0452 +vn -0.8050 -0.4281 0.4108 +vn 0.4459 -0.8582 0.2542 +vn -0.8380 -0.3402 0.4266 +vn -0.4169 -0.8629 0.2856 +vn -0.3723 0.8844 0.2814 +vn -0.4231 -0.8643 0.2720 +vn -0.3217 0.9380 0.1289 +vn -0.3208 0.9236 0.2101 +vn -0.4693 -0.8571 0.2123 +vn -0.6655 -0.6974 0.2658 +vn -0.7849 -0.5469 0.2911 +vn 0.3711 0.8896 0.2662 +vn 0.3161 0.9280 0.1970 +vn 0.3136 0.9425 0.1159 +vn 0.9629 0.1693 0.2103 +vn 0.9817 -0.0059 0.1903 +vn 0.7625 -0.4911 0.4213 +vn 0.8315 -0.1026 0.5460 +vn 0.6775 -0.6931 0.2461 +vn 0.8053 -0.5291 0.2674 +vn 0.4224 -0.8533 0.3058 +vn 0.4565 -0.8653 0.2071 +vn -0.5297 -0.8238 0.2018 +vn 0.2933 0.6073 0.7384 +vn -0.2532 0.1600 0.9541 +vn 0.8229 0.3440 0.4521 +vn -0.7922 -0.2120 0.5722 +vn 0.8170 -0.2272 0.5301 +vn 0.8031 0.2446 0.5433 +vn 0.5479 -0.8167 0.1813 +vn -0.1077 0.9541 0.2794 +vn 0.6017 -0.6119 0.5134 +vn -0.7814 0.2265 0.5815 +vn -0.4545 0.8293 0.3250 +vn 0.4558 0.8357 0.3064 +vn -0.9537 0.1637 0.2522 +vn -0.8085 -0.0939 0.5809 +vn -0.7380 -0.4913 0.4626 +vn -0.9730 -0.0178 0.2303 +vn -0.2008 0.4973 0.8440 +vn -0.2682 0.7182 0.6421 +vt 0.624629 0.393372 +vt 0.605802 0.401761 +vt 0.619905 0.421296 +vt 0.636176 0.408077 +vt 0.604627 0.429854 +vt 0.619468 0.435234 +vt 0.585888 0.386793 +vt 0.584132 0.412343 +vt 0.600543 0.378606 +vt 0.584597 0.431346 +vt 0.569171 0.384834 +vt 0.561897 0.411198 +vt 0.575994 0.368314 +vt 0.570569 0.371320 +vt 0.574184 0.376185 +vt 0.581205 0.371444 +vt 0.574954 0.357017 +vt 0.569563 0.359943 +vt 0.566224 0.362407 +vt 0.582846 0.445167 +vt 0.595240 0.450832 +vt 0.571831 0.421046 +vt 0.557698 0.423067 +vt 0.564739 0.435701 +vt 0.553211 0.426679 +vt 0.560548 0.437851 +vt 0.556378 0.417364 +vt 0.539170 0.430844 +vt 0.539972 0.436781 +vt 0.544123 0.443247 +vt 0.925300 0.232283 +vt 0.918362 0.237996 +vt 0.938829 0.238583 +vt 0.929621 0.233549 +vt 0.528729 0.443612 +vt 0.548833 0.446865 +vt 0.568699 0.442747 +vt 0.575456 0.437968 +vt 0.590437 0.454850 +vt 0.601283 0.457411 +vt 0.576827 0.458988 +vt 0.583379 0.463666 +vt 0.582375 0.450393 +vt 0.914606 0.269304 +vt 0.911066 0.265742 +vt 0.903721 0.262721 +vt 0.902315 0.271190 +vt 0.572845 0.454672 +vt 0.926559 0.278337 +vt 0.918949 0.269579 +vt 0.914417 0.275305 +vt 0.801091 0.171180 +vt 0.826629 0.169951 +vt 0.824237 0.145008 +vt 0.808439 0.141038 +vt 0.811040 0.195452 +vt 0.803171 0.197932 +vt 0.833683 0.221383 +vt 0.846122 0.190572 +vt 0.837741 0.167356 +vt 0.816548 0.225210 +vt 0.815557 0.257530 +vt 0.831274 0.251544 +vt 0.825303 0.277083 +vt 0.871041 0.260692 +vt 0.869380 0.281805 +vt 0.875412 0.240588 +vt 0.882288 0.225785 +vt 0.874127 0.216326 +vt 0.874175 0.200909 +vt 0.880053 0.252038 +vt 0.882959 0.239023 +vt 0.881759 0.212796 +vt 0.878283 0.262650 +vt 0.899124 0.197061 +vt 0.885449 0.190306 +vt 0.899930 0.204276 +vt 0.904909 0.202520 +vt 0.906579 0.214512 +vt 0.916031 0.198985 +vt 0.915203 0.193989 +vt 0.924417 0.195328 +vt 0.921873 0.204006 +vt 0.923652 0.203118 +vt 0.932602 0.191213 +vt 0.901786 0.237762 +vt 0.907277 0.228039 +vt 0.906837 0.237923 +vt 0.924374 0.244201 +vt 0.797141 0.138022 +vt 0.762381 0.160934 +vt 0.815485 0.313884 +vt 0.780680 0.297907 +vt 0.772918 0.327861 +vt 0.786882 0.334071 +vt 0.761346 0.129540 +vt 0.747270 0.126019 +vt 0.734611 0.155499 +vt 0.726019 0.187829 +vt 0.770382 0.200209 +vt 0.729146 0.157058 +vt 0.655537 0.152427 +vt 0.712225 0.187472 +vt 0.707682 0.149459 +vt 0.668318 0.130853 +vt 0.645865 0.198388 +vt 0.650761 0.244454 +vt 0.699032 0.230433 +vt 0.616442 0.168635 +vt 0.612470 0.195979 +vt 0.648038 0.145278 +vt 0.662092 0.125706 +vt 0.093926 0.412638 +vt 0.107718 0.413375 +vt 0.110117 0.391522 +vt 0.099445 0.361528 +vt 0.577924 0.152658 +vt 0.585027 0.193999 +vt 0.613533 0.141722 +vt 0.725463 0.214676 +vt 0.719901 0.244814 +vt 0.691989 0.279662 +vt 0.721300 0.275509 +vt 0.779593 0.231335 +vt 0.776396 0.264240 +vt 0.543903 0.189758 +vt 0.717168 0.306243 +vt 0.735806 0.302565 +vt 0.679295 0.321252 +vt 0.699563 0.312507 +vt 0.673617 0.356791 +vt 0.702261 0.347807 +vt 0.728435 0.340603 +vt 0.741842 0.318874 +vt 0.745120 0.336329 +vt 0.629781 0.307621 +vt 0.606295 0.270641 +vt 0.613963 0.225164 +vt 0.585471 0.223672 +vt 0.592388 0.267707 +vt 0.590001 0.303806 +vt 0.581759 0.339254 +vt 0.640586 0.333382 +vt 0.546529 0.231563 +vt 0.551635 0.291703 +vt 0.558272 0.393664 +vt 0.543690 0.398641 +vt 0.546770 0.408279 +vt 0.538736 0.399485 +vt 0.541377 0.407996 +vt 0.550439 0.415518 +vt 0.526956 0.404816 +vt 0.528879 0.410927 +vt 0.558342 0.414671 +vt 0.866581 0.161918 +vt 0.860390 0.154094 +vt 0.860997 0.163483 +vt 0.865266 0.171516 +vt 0.884938 0.160090 +vt 0.878683 0.155575 +vt 0.877411 0.161479 +vt 0.879883 0.167213 +vt 0.893512 0.159158 +vt 0.885671 0.169013 +vt 0.873182 0.187072 +vt 0.875534 0.180437 +vt 0.879452 0.175447 +vt 0.913349 0.280254 +vt 0.134591 0.554644 +vt 0.167139 0.517364 +vt 0.130532 0.509904 +vt 0.071524 0.534492 +vt 0.228053 0.504941 +vt 0.174317 0.479176 +vt 0.202911 0.540498 +vt 0.237019 0.545867 +vt 0.249687 0.526998 +vt 0.267875 0.485866 +vt 0.227141 0.458986 +vt 0.176078 0.571313 +vt 0.201264 0.595645 +vt 0.284620 0.495486 +vt 0.264024 0.527038 +vt 0.339100 0.599536 +vt 0.323192 0.620221 +vt 0.336707 0.624137 +vt 0.348107 0.623818 +vt 0.351242 0.636740 +vt 0.361370 0.560057 +vt 0.326892 0.566655 +vt 0.362401 0.592937 +vt 0.388383 0.594297 +vt 0.398160 0.601460 +vt 0.400831 0.590837 +vt 0.402511 0.609381 +vt 0.394620 0.569516 +vt 0.390871 0.550754 +vt 0.410248 0.551775 +vt 0.405603 0.523945 +vt 0.372082 0.511097 +vt 0.429221 0.552200 +vt 0.386767 0.481797 +vt 0.414117 0.507162 +vt 0.353941 0.475909 +vt 0.348146 0.491926 +vt 0.433625 0.491181 +vt 0.456972 0.494184 +vt 0.449649 0.466363 +vt 0.400429 0.431481 +vt 0.460251 0.449921 +vt 0.406908 0.411560 +vt 0.481848 0.448843 +vt 0.469268 0.436262 +vt 0.489924 0.404476 +vt 0.488497 0.398801 +vt 0.409172 0.379307 +vt 0.435110 0.404715 +vt 0.479365 0.368920 +vt 0.526937 0.393396 +vt 0.552059 0.387030 +vt 0.522736 0.372520 +vt 0.414888 0.324852 +vt 0.393737 0.347342 +vt 0.489991 0.348031 +vt 0.497241 0.343793 +vt 0.457386 0.321584 +vt 0.344327 0.334075 +vt 0.280720 0.302216 +vt 0.269101 0.359863 +vt 0.325138 0.382834 +vt 0.345920 0.267050 +vt 0.282203 0.264351 +vt 0.354020 0.229045 +vt 0.296443 0.217375 +vt 0.271879 0.447195 +vt 0.320470 0.413267 +vt 0.246808 0.417809 +vt 0.385885 0.251751 +vt 0.381544 0.237711 +vt 0.208697 0.302587 +vt 0.191066 0.338697 +vt 0.201328 0.265321 +vt 0.169128 0.354497 +vt 0.203977 0.377338 +vt 0.156925 0.342436 +vt 0.159558 0.275973 +vt 0.098089 0.322147 +vt 0.125749 0.370624 +vt 0.062220 0.338013 +vt 0.076190 0.370286 +vt 0.181985 0.241964 +vt 0.080013 0.290356 +vt 0.115665 0.245120 +vt 0.070541 0.236749 +vt 0.131253 0.223717 +vt 0.068797 0.178478 +vt 0.077163 0.137043 +vt 0.025967 0.111563 +vt 0.014113 0.164575 +vt 0.007405 0.113541 +vt 0.071070 0.134928 +vt 0.071759 0.073333 +vt 0.233884 0.230968 +vt 0.770676 0.338733 +vt 0.826649 0.296447 +vt 0.864449 0.313934 +vt 0.831017 0.384510 +vt 0.363854 0.195756 +vt 0.295978 0.176190 +vt 0.419207 0.247838 +vt 0.396133 0.243003 +vt 0.424765 0.285735 +vt 0.464062 0.265273 +vt 0.495495 0.310705 +vt 0.485991 0.285813 +vt 0.506732 0.272141 +vt 0.507371 0.257392 +vt 0.455457 0.313989 +vt 0.542089 0.279914 +vt 0.518904 0.251327 +vt 0.433782 0.217476 +vt 0.468258 0.227401 +vt 0.418894 0.210725 +vt 0.393717 0.230143 +vt 0.440501 0.197077 +vt 0.408618 0.180946 +vt 0.479463 0.186912 +vt 0.474456 0.180136 +vt 0.449400 0.183068 +vt 0.479162 0.172130 +vt 0.447058 0.160445 +vt 0.451401 0.134919 +vt 0.472526 0.137440 +vt 0.468652 0.131230 +vt 0.459815 0.101394 +vt 0.438374 0.099291 +vt 0.475696 0.127085 +vt 0.419322 0.116497 +vt 0.421682 0.133860 +vt 0.432043 0.079477 +vt 0.462381 0.081285 +vt 0.430717 0.093547 +vt 0.418186 0.087922 +vt 0.397726 0.058713 +vt 0.431034 0.060640 +vt 0.389631 0.135652 +vt 0.311718 0.113555 +vt 0.401948 0.151075 +vt 0.417674 0.155625 +vt 0.431051 0.163801 +vt 0.149042 0.593223 +vt 0.119964 0.583045 +vt 0.116625 0.601504 +vt 0.120572 0.616400 +vt 0.145972 0.611533 +vt 0.160634 0.634245 +vt 0.208657 0.603628 +vt 0.214695 0.610043 +vt 0.165642 0.644320 +vt 0.283481 0.602264 +vt 0.254767 0.559538 +vt 0.247768 0.550029 +vt 0.169761 0.651909 +vt 0.188959 0.666164 +vt 0.240030 0.676546 +vt 0.261502 0.649546 +vt 0.203186 0.682574 +vt 0.211826 0.706359 +vt 0.182650 0.720018 +vt 0.185008 0.733003 +vt 0.212676 0.767410 +vt 0.242347 0.742123 +vt 0.152706 0.754364 +vt 0.176816 0.784716 +vt 0.124769 0.769358 +vt 0.151134 0.740190 +vt 0.137625 0.749912 +vt 0.147425 0.730123 +vt 0.150730 0.718470 +vt 0.155988 0.705421 +vt 0.129457 0.727433 +vt 0.125041 0.719265 +vt 0.126617 0.740494 +vt 0.132013 0.745398 +vt 0.121794 0.761872 +vt 0.118856 0.754123 +vt 0.113705 0.745450 +vt 0.107153 0.733783 +vt 0.109949 0.759982 +vt 0.110725 0.770047 +vt 0.102499 0.773541 +vt 0.083151 0.777024 +vt 0.085515 0.796841 +vt 0.108915 0.798206 +vt 0.063756 0.781336 +vt 0.063124 0.800739 +vt 0.090446 0.757464 +vt 0.059129 0.772332 +vt 0.055925 0.784301 +vt 0.031258 0.764737 +vt 0.021833 0.781378 +vt 0.050013 0.796850 +vt 0.038303 0.755418 +vt 0.047016 0.745451 +vt 0.062210 0.730796 +vt 0.053933 0.735122 +vt 0.073567 0.716689 +vt 0.063912 0.722066 +vt 0.093359 0.692606 +vt 0.078560 0.705169 +vt 0.072159 0.708361 +vt 0.040333 0.744515 +vt 0.035511 0.746621 +vt 0.041630 0.739158 +vt 0.042810 0.732298 +vt 0.032990 0.736632 +vt 0.065529 0.715827 +vt 0.116029 0.679532 +vt 0.116388 0.673179 +vt 0.093963 0.684547 +vt 0.119178 0.712376 +vt 0.133615 0.701157 +vt 0.154390 0.663559 +vt 0.120229 0.667574 +vt 0.151892 0.686546 +vt 0.149097 0.644995 +vt 0.114824 0.636085 +vt 0.097435 0.662906 +vt 0.082570 0.625660 +vt 0.056572 0.638448 +vt 0.070128 0.667297 +vt 0.052937 0.632630 +vt 0.023584 0.653780 +vt 0.021253 0.626545 +vt 0.083206 0.614647 +vt 0.050506 0.594431 +vt 0.097621 0.596846 +vt 0.065912 0.573738 +vt 0.021737 0.659913 +vt 0.015413 0.655194 +vt 0.015183 0.662932 +vt 0.047893 0.674860 +vt 0.013355 0.686365 +vt 0.022326 0.687291 +vt 0.004499 0.663576 +vt 0.029220 0.685866 +vt 0.017465 0.693596 +vt 0.019713 0.692569 +vt 0.025983 0.691182 +vt 0.034813 0.684364 +vt 0.040740 0.678055 +vt 0.044604 0.679442 +vt 0.038194 0.684908 +vt 0.039185 0.683056 +vt 0.054665 0.672536 +vt 0.051713 0.675586 +vt 0.054715 0.677412 +vt 0.053166 0.676260 +vt 0.060688 0.670279 +vt 0.057680 0.674120 +vt 0.056256 0.675949 +vt 0.059576 0.674658 +vt 0.062101 0.676488 +vt 0.062655 0.673376 +vt 0.063541 0.672276 +vt 0.073608 0.670435 +vt 0.092970 0.667526 +vt 0.147020 0.045089 +vt 0.149666 0.039432 +vt 0.143242 0.044218 +vt 0.144940 0.042448 +vt 0.810478 0.502823 +vt 0.744243 0.496711 +vt 0.740663 0.449813 +vt 0.819840 0.439147 +vt 0.696940 0.489290 +vt 0.691030 0.426552 +vt 0.794984 0.550174 +vt 0.748777 0.545896 +vt 0.746766 0.566603 +vt 0.795839 0.586388 +vt 0.752800 0.587633 +vt 0.708376 0.564636 +vt 0.703259 0.533239 +vt 0.700500 0.587842 +vt 0.757534 0.610325 +vt 0.699340 0.609829 +vt 0.863735 0.556898 +vt 0.853536 0.584643 +vt 0.671148 0.586510 +vt 0.669870 0.577359 +vt 0.610713 0.608839 +vt 0.604193 0.594046 +vt 0.596925 0.678747 +vt 0.546827 0.644423 +vt 0.557814 0.573977 +vt 0.558247 0.513796 +vt 0.609381 0.542794 +vt 0.666073 0.535910 +vt 0.575648 0.778019 +vt 0.555456 0.686080 +vt 0.609760 0.774627 +vt 0.656202 0.776504 +vt 0.665932 0.655896 +vt 0.589534 0.802546 +vt 0.613296 0.820360 +vt 0.574706 0.802961 +vt 0.658740 0.820988 +vt 0.592800 0.826207 +vt 0.574965 0.821863 +vt 0.575925 0.845331 +vt 0.659503 0.859568 +vt 0.603396 0.860540 +vt 0.697979 0.855057 +vt 0.715405 0.812597 +vt 0.730744 0.839122 +vt 0.734197 0.860595 +vt 0.718585 0.870013 +vt 0.764341 0.848303 +vt 0.748540 0.865562 +vt 0.738371 0.879667 +vt 0.719503 0.886811 +vt 0.794953 0.811514 +vt 0.770784 0.681134 +vt 0.844930 0.798119 +vt 0.812745 0.666917 +vt 0.867737 0.648121 +vt 0.907755 0.776565 +vt 0.926943 0.585529 +vt 0.935400 0.653131 +vt 0.947452 0.767056 +vt 0.991470 0.667752 +vt 0.992519 0.597052 +vt 0.990929 0.780928 +vt 0.934870 0.830920 +vt 0.932128 0.810673 +vt 0.951400 0.808651 +vt 0.955138 0.829561 +vt 0.885302 0.816099 +vt 0.915649 0.848679 +vt 0.892019 0.846080 +vt 0.848480 0.840068 +vt 0.848436 0.818432 +vt 0.861549 0.863401 +vt 0.885611 0.891177 +vt 0.869706 0.885010 +vt 0.828630 0.862656 +vt 0.796346 0.838847 +vt 0.858707 0.891621 +vt 0.851166 0.884692 +vt 0.838692 0.882949 +vt 0.958740 0.849103 +vt 0.944760 0.848298 +vt 0.937417 0.870758 +vt 0.931957 0.896661 +vt 0.909592 0.893858 +vt 0.949590 0.895813 +vt 0.172640 0.378322 +vt 0.169504 0.366471 +vt 0.160047 0.380886 +vt 0.160894 0.399577 +vt 0.311463 0.648209 +vt 0.324689 0.643737 +vt 0.332453 0.638361 +vt 0.320791 0.667960 +vt 0.336891 0.664452 +vt 0.315262 0.664262 +vt 0.301157 0.676108 +vt 0.294138 0.683469 +vt 0.266938 0.714030 +vt 0.266196 0.536909 +vt 0.261015 0.529546 +vt 0.926303 0.425935 +vt 0.888670 0.454065 +vt 0.897222 0.396420 +vt 0.741403 0.395074 +vt 0.696595 0.405223 +vt 0.888424 0.523717 +vt 0.923340 0.571692 +vt 0.938328 0.497923 +vt 0.993207 0.539767 +vt 0.992795 0.572385 +vt 0.976798 0.459703 +vt 0.698068 0.681002 +vt 0.307987 0.736367 +vt 0.310112 0.756054 +vt 0.331223 0.763704 +vt 0.325217 0.740975 +vt 0.354013 0.770779 +vt 0.367599 0.754177 +vt 0.348433 0.745228 +vt 0.375452 0.737666 +vt 0.352899 0.738045 +vt 0.388501 0.755668 +vt 0.382420 0.746656 +vt 0.401240 0.738294 +vt 0.474441 0.686309 +vt 0.469752 0.696472 +vt 0.482515 0.702349 +vt 0.486443 0.682862 +vt 0.481498 0.739658 +vt 0.511474 0.723125 +vt 0.486892 0.788645 +vt 0.555083 0.789805 +vt 0.546679 0.850362 +vt 0.563191 0.841422 +vt 0.502406 0.860998 +vt 0.521578 0.959691 +vt 0.551314 0.895693 +vt 0.587907 0.944116 +vt 0.566463 0.879182 +vt 0.416300 0.897253 +vt 0.385539 0.806013 +vt 0.346962 0.808434 +vt 0.331080 0.832524 +vt 0.468484 0.754859 +vt 0.443513 0.775267 +vt 0.454435 0.735331 +vt 0.428917 0.771777 +vt 0.439954 0.777555 +vt 0.415960 0.765410 +vt 0.419142 0.790442 +vt 0.372585 0.784504 +vt 0.392523 0.768126 +vt 0.466914 0.708076 +vt 0.446238 0.724708 +vt 0.342944 0.784312 +vt 0.334759 0.799860 +vt 0.336095 0.772007 +vt 0.342451 0.777612 +vt 0.328644 0.785063 +vt 0.306795 0.775600 +vt 0.288944 0.763269 +vt 0.292672 0.782267 +vt 0.263707 0.797755 +vt 0.249339 0.795477 +vt 0.244563 0.805219 +vt 0.257579 0.829060 +vt 0.193948 0.826124 +vt 0.199340 0.805647 +vt 0.178193 0.792513 +vt 0.174582 0.812364 +vt 0.169726 0.820835 +vt 0.159200 0.836439 +vt 0.167277 0.792066 +vt 0.156892 0.809909 +vt 0.157838 0.820045 +vt 0.076099 0.842957 +vt 0.065943 0.849812 +vt 0.075368 0.858788 +vt 0.045074 0.880780 +vt 0.004200 0.880790 +vt 0.003752 0.939497 +vt 0.110487 0.934973 +vt 0.070518 0.831399 +vt 0.064056 0.829848 +vt 0.074594 0.814505 +vt 0.037243 0.850349 +vt 0.016019 0.853347 +vt 0.042538 0.842303 +vt 0.019328 0.821893 +vt 0.018048 0.827784 +vt 0.206833 0.126436 +vt 0.207614 0.172679 +vt 0.243406 0.155669 +vt 0.245646 0.149408 +vt 0.951597 0.351084 +vt 0.994267 0.331995 +vt 0.928240 0.339818 +vt 0.993805 0.353294 +vt 0.953166 0.359515 +vt 0.994258 0.373352 +vt 0.297471 0.070077 +vt 0.259080 0.090411 +vt 0.253515 0.098979 +vt 0.297240 0.111354 +vt 0.333566 0.489049 +vt 0.318959 0.476508 +vt 0.318577 0.485593 +vt 0.332607 0.510108 +vt 0.304944 0.453907 +vt 0.299307 0.508819 +vt 0.344205 0.472128 +vt 0.329884 0.458137 +vt 0.335612 0.433420 +vt 0.375064 0.619132 +vt 0.376773 0.614016 +vt 0.364380 0.618743 +vt 0.977174 0.989824 +vt 0.985479 0.970103 +vt 0.955894 0.970019 +vt 0.951312 0.980964 +vt 0.874004 0.971797 +vt 0.885232 0.973364 +vt 0.888154 0.992461 +vt 0.890701 0.971768 +vt 0.674238 0.104875 +vt 0.673251 0.101228 +vt 0.581713 0.500225 +vt 0.636145 0.477936 +vt 0.666615 0.436222 +vt 0.437664 0.993580 +vt 0.535066 0.993578 +vt 0.436443 0.972341 +vt 0.303951 0.839739 +vt 0.318117 0.916270 +vt 0.276879 0.832982 +vt 0.285118 0.831501 +vt 0.268334 0.842790 +vt 0.219965 0.858116 +vt 0.255940 0.925104 +vt 0.207887 0.857280 +vt 0.209497 0.931920 +vt 0.203107 0.846829 +vt 0.170717 0.860279 +vt 0.109378 0.868793 +vt 0.093711 0.870275 +vt 0.070817 0.875090 +vt 0.116064 0.993613 +vt 0.229826 0.993653 +vt 0.344525 0.993612 +vt 0.893363 0.096509 +vt 0.922370 0.131015 +vt 0.907402 0.130278 +vt 0.923936 0.153728 +vt 0.884568 0.121657 +vt 0.900160 0.135155 +vt 0.900713 0.141553 +vt 0.896324 0.143132 +vt 0.889614 0.137271 +vt 0.870846 0.138113 +vt 0.862366 0.122415 +vt 0.876275 0.135659 +vt 0.749624 0.039864 +vt 0.668247 0.057206 +vt 0.617117 0.035163 +vt 0.741543 0.007232 +vt 0.549372 0.043031 +vt 0.490662 0.029233 +vt 0.493140 0.007230 +vt 0.379268 0.007223 +vt 0.484512 0.041185 +vt 0.375757 0.020973 +vt 0.253986 0.014268 +vt 0.254784 0.018547 +vt 0.246651 0.007222 +vt 0.496317 0.044478 +vt 0.491273 0.051692 +vt 0.482063 0.042505 +vt 0.516620 0.040464 +vt 0.526380 0.047871 +vt 0.612607 0.051538 +vt 0.575667 0.057289 +vt 0.604447 0.063487 +vt 0.613427 0.068814 +vt 0.629409 0.068614 +vt 0.623099 0.075597 +vt 0.635627 0.077494 +vt 0.621566 0.084248 +vt 0.645959 0.070583 +vt 0.636224 0.083683 +vt 0.655818 0.072339 +vt 0.664106 0.074672 +vt 0.661349 0.085535 +vt 0.655641 0.090573 +vt 0.713670 0.068214 +vt 0.676456 0.079209 +vt 0.681476 0.084963 +vt 0.702022 0.083548 +vt 0.715622 0.086984 +vt 0.709686 0.091679 +vt 0.736884 0.069880 +vt 0.727170 0.092431 +vt 0.784797 0.077622 +vt 0.751752 0.093282 +vt 0.791992 0.105317 +vt 0.799338 0.119268 +vt 0.792948 0.133086 +vt 0.784541 0.122683 +vt 0.821714 0.110833 +vt 0.810599 0.112073 +vt 0.845947 0.086680 +vt 0.875846 0.048806 +vt 0.643121 0.118224 +vt 0.610784 0.118158 +vt 0.611317 0.095847 +vt 0.622608 0.107490 +vt 0.417617 0.712039 +vt 0.433061 0.688788 +vt 0.413289 0.685331 +vt 0.411749 0.691069 +vt 0.390986 0.718350 +vt 0.395018 0.709583 +vt 0.381619 0.718847 +vt 0.370647 0.713548 +vt 0.351413 0.703544 +vt 0.350631 0.726650 +vt 0.190271 0.165695 +vt 0.155911 0.142346 +vt 0.134544 0.140506 +vt 0.170235 0.180152 +vt 0.984862 0.170937 +vt 0.987804 0.162686 +vt 0.963023 0.166108 +vt 0.990295 0.193911 +vt 0.428091 0.619076 +vt 0.430577 0.643633 +vt 0.402904 0.643620 +vt 0.429198 0.634402 +vt 0.796040 0.876562 +vt 0.784553 0.867035 +vt 0.794359 0.859742 +vt 0.810098 0.872624 +vt 0.656564 0.880800 +vt 0.634952 0.876902 +vt 0.678394 0.870347 +vt 0.192888 0.445665 +vt 0.191414 0.431050 +vt 0.174770 0.444006 +vt 0.182898 0.456824 +vt 0.137992 0.428315 +vt 0.144182 0.437938 +vt 0.166322 0.447352 +vt 0.145382 0.459503 +vt 0.156947 0.470832 +vt 0.131949 0.383584 +vt 0.128590 0.426570 +vt 0.117227 0.408899 +vt 0.113909 0.418604 +vt 0.969399 0.121921 +vt 0.985093 0.150316 +vt 0.963856 0.145273 +vt 0.954806 0.131882 +vt 0.933651 0.165519 +vt 0.926945 0.147733 +vt 0.949851 0.153489 +vt 0.050734 0.680150 +vt 0.045372 0.681010 +vt 0.169434 0.029098 +vt 0.171178 0.021810 +vt 0.162961 0.029697 +vt 0.166595 0.031030 +vt 0.158693 0.029147 +vt 0.161422 0.031370 +vt 0.162284 0.034425 +vt 0.173899 0.029395 +vt 0.177193 0.026641 +vt 0.182715 0.015733 +vt 0.180480 0.024573 +vt 0.154411 0.027094 +vt 0.157137 0.033249 +vt 0.166527 0.004162 +vt 0.120723 0.101613 +vt 0.121352 0.091544 +vt 0.088522 0.109923 +vt 0.119926 0.110279 +vt 0.117518 0.117794 +vt 0.112964 0.130402 +vt 0.117202 0.133771 +vt 0.124281 0.133116 +vt 0.088521 0.144672 +vt 0.098107 0.153797 +vt 0.104561 0.150429 +vt 0.110280 0.143875 +vt 0.108884 0.151085 +vt 0.169418 0.031468 +vt 0.173170 0.031873 +vt 0.183421 0.021473 +vt 0.188478 0.018161 +vt 0.190387 0.015100 +vt 0.187010 0.025558 +vt 0.184881 0.026002 +vt 0.189348 0.031058 +vt 0.189413 0.024219 +vt 0.007727 0.693761 +vt 0.011824 0.695177 +vt 0.014338 0.694156 +vt 0.191617 0.019139 +vt 0.189798 0.016323 +vt 0.007853 0.701254 +vt 0.141614 0.621305 +vt 0.453841 0.562562 +vt 0.450614 0.557497 +vt 0.621085 0.381592 +vt 0.590073 0.367024 +vt 0.585612 0.466350 +vt 0.914778 0.282088 +vt 0.573474 0.472118 +vt 0.738006 0.122773 +vt 0.722229 0.118069 +vt 0.613580 0.196059 +vt 0.831268 0.289641 +vt 0.897766 0.269814 +vt 0.896833 0.280105 +vt 0.900920 0.187920 +vt 0.906056 0.186371 +vt 0.916957 0.188492 +vt 0.905532 0.247256 +vt 0.530184 0.414840 +vt 0.285778 0.504615 +vt 0.301502 0.575332 +vt 0.306646 0.548190 +vt 0.369133 0.637724 +vt 0.357264 0.631616 +vt 0.409802 0.590410 +vt 0.423248 0.610302 +vt 0.412854 0.587928 +vt 0.329437 0.551151 +vt 0.342749 0.551631 +vt 0.343381 0.525224 +vt 0.382017 0.551575 +vt 0.445754 0.567114 +vt 0.415764 0.563157 +vt 0.470179 0.558823 +vt 0.464470 0.562073 +vt 0.473458 0.505345 +vt 0.462416 0.510180 +vt 0.383139 0.421000 +vt 0.376005 0.392871 +vt 0.465680 0.424766 +vt 0.430928 0.411594 +vt 0.543963 0.369411 +vt 0.371370 0.377443 +vt 0.381932 0.320509 +vt 0.387518 0.316003 +vt 0.377311 0.380325 +vt 0.368020 0.337240 +vt 0.347461 0.410948 +vt 0.216835 0.424973 +vt 0.017886 0.310026 +vt 0.020924 0.287431 +vt 0.016643 0.337034 +vt 0.022990 0.381746 +vt 0.016630 0.229463 +vt 0.095460 0.195951 +vt 0.094881 0.189120 +vt 0.008962 0.084698 +vt 0.553840 0.314243 +vt 0.525563 0.316220 +vt 0.542568 0.296789 +vt 0.472414 0.221297 +vt 0.515177 0.195782 +vt 0.528092 0.217185 +vt 0.590871 0.268291 +vt 0.614857 0.260851 +vt 0.585640 0.223240 +vt 0.615889 0.232563 +vt 0.080210 0.699607 +vt 0.019410 0.757766 +vt 0.028066 0.755801 +vt 0.023192 0.769627 +vt 0.125566 0.087542 +vt 0.120009 0.098522 +vt 0.070718 0.699889 +vt 0.007811 0.652123 +vt 0.004478 0.687363 +vt 0.071622 0.671216 +vt 0.067577 0.673260 +vt 0.595676 0.884246 +vt 0.772105 0.875556 +vt 0.179970 0.450805 +vt 0.782635 0.877361 +vt 0.796305 0.880482 +vt 0.844945 0.888627 +vt 0.991719 0.893228 +vt 0.990709 0.852185 +vt 0.191791 0.400496 +vt 0.198616 0.407289 +vt 0.202670 0.418230 +vt 0.194914 0.425142 +vt 0.340147 0.644939 +vt 0.342741 0.649501 +vt 0.336677 0.649727 +vt 0.348728 0.646121 +vt 0.308220 0.689771 +vt 0.985594 0.402537 +vt 0.998959 0.462043 +vt 0.413343 0.748533 +vt 0.434892 0.720176 +vt 0.509516 0.685935 +vt 0.480357 0.657609 +vt 0.458234 0.679142 +vt 0.532821 0.707178 +vt 0.403303 0.801287 +vt 0.278317 0.782802 +vt 0.296544 0.822190 +vt 0.248635 0.783505 +vt 0.228933 0.803256 +vt 0.221283 0.820980 +vt 0.236231 0.808547 +vt 0.053321 0.870526 +vt 0.023851 0.843663 +vt 0.252278 0.048442 +vt 0.297321 0.033082 +vt 0.250697 0.057497 +vt 0.903902 0.971792 +vt 0.894857 0.974368 +vt 0.758541 0.993429 +vt 0.771862 0.993429 +vt 0.756747 0.967482 +vt 0.745405 0.993429 +vt 0.575881 0.486073 +vt 0.322487 0.814939 +vt 0.880494 0.007238 +vt 0.985119 0.059500 +vt 0.959755 0.109659 +vt 0.997403 0.148397 +vt 0.985107 0.119981 +vt 0.546763 0.050463 +vt 0.561667 0.055457 +vt 0.567988 0.071620 +vt 0.592949 0.062816 +vt 0.599616 0.073947 +vt 0.608431 0.076382 +vt 0.538001 0.052409 +vt 0.525625 0.054468 +vt 0.513932 0.049585 +vt 0.377063 0.032449 +vt 0.464261 0.044991 +vt 0.243534 0.013227 +vt 0.446902 0.678287 +vt 0.443077 0.659455 +vt 0.459722 0.699560 +vt 0.389933 0.685609 +vt 0.196473 0.088116 +vt 0.170640 0.112902 +vt 0.149756 0.099272 +vt 0.184379 0.075632 +vt 0.188509 0.064865 +vt 0.210614 0.068351 +vt 0.040161 0.508967 +vt 0.044226 0.523310 +vt 0.024807 0.524750 +vt 0.032926 0.516149 +vt 0.016970 0.458882 +vt 0.033925 0.458908 +vt 0.041113 0.475295 +vt 0.034019 0.488368 +vt 0.108366 0.816093 +vt 0.113245 0.825992 +vt 0.114349 0.833667 +vt 0.104148 0.831665 +vt 0.124293 0.822724 +vt 0.131144 0.830000 +vt 0.121939 0.837951 +vt 0.478330 0.643627 +vt 0.433294 0.633921 +vt 0.146004 0.451039 +vt 0.158981 0.467924 +vt 0.832496 0.117992 +vt 0.842668 0.115378 +vt 0.837949 0.132046 +vt 0.822327 0.130088 +vt 0.779319 0.107174 +vt 0.761515 0.103646 +vt 0.736594 0.096420 +vt 0.743730 0.111540 +vt 0.720823 0.104023 +vt 0.664273 0.088165 +vt 0.164368 0.033107 +vt 0.160165 0.038084 +vt 0.156513 0.037507 +vt 0.153597 0.040110 +vt 0.184961 0.004150 +vt 0.136797 0.063347 +vt 0.130818 0.078106 +vt 0.114360 0.069107 +vt 0.125246 0.057913 +vt 0.097102 0.052100 +vt 0.107941 0.050923 +vt 0.088513 0.060313 +vt 0.039034 0.687223 +vt 0.036442 0.688897 +vt 0.190356 0.004159 +vt 0.192004 0.007219 +vt 0.197637 0.005918 +vt 0.191814 0.010020 +vt 0.021964 0.694342 +vt 0.019869 0.696929 +vt 0.191938 0.024298 +vt 0.183016 0.021052 +vt 0.627374 0.380787 +vt 0.643230 0.399171 +vt 0.636591 0.367755 +vt 0.658648 0.400581 +vt 0.841436 0.288461 +vt 0.862778 0.290072 +vt 0.557790 0.348796 +vt 0.566235 0.352018 +vt 0.914799 0.250236 +vt 0.928775 0.243353 +vt 0.577372 0.440064 +vt 0.905721 0.283985 +vt 0.841176 0.149215 +vt 0.904115 0.255862 +vt 0.902883 0.223783 +vt 0.507252 0.414041 +vt 0.052833 0.450728 +vt 0.066763 0.472321 +vt 0.098239 0.441365 +vt 0.076866 0.501706 +vt 0.131631 0.478780 +vt 0.320248 0.539751 +vt 0.378237 0.637075 +vt 0.401052 0.561955 +vt 0.415729 0.569710 +vt 0.512447 0.497223 +vt 0.503531 0.483417 +vt 0.384926 0.301931 +vt 0.380378 0.285540 +vt 0.388637 0.276218 +vt 0.013496 0.010024 +vt 0.062594 0.037159 +vt 0.068737 0.020485 +vt 0.501458 0.323616 +vt 0.390808 0.289539 +vt 0.506688 0.162293 +vt 0.529813 0.142440 +vt 0.520516 0.124426 +vt 0.505196 0.113984 +vt 0.505718 0.105616 +vt 0.483446 0.094856 +vt 0.494613 0.656742 +vt 0.511997 0.677778 +vt 0.398732 0.030541 +vt 0.314878 0.033834 +vt 0.019458 0.770357 +vt 0.016267 0.776252 +vt 0.027551 0.749660 +vt 0.040870 0.724764 +vt 0.058872 0.723132 +vt 0.059074 0.712627 +vt 0.130153 0.084955 +vt 0.030101 0.694264 +vt 0.032513 0.689043 +vt 0.042957 0.684785 +vt 0.035084 0.688834 +vt 0.542924 0.627686 +vt 0.657372 0.891414 +vt 0.641315 0.890930 +vt 0.577434 0.883386 +vt 0.689705 0.887712 +vt 0.699004 0.887955 +vt 0.738509 0.888435 +vt 0.761730 0.880738 +vt 0.990757 0.812492 +vt 0.990706 0.834308 +vt 0.200396 0.434892 +vt 0.202363 0.428484 +vt 0.838510 0.892576 +vt 0.831205 0.885981 +vt 0.261505 0.724590 +vt 0.277602 0.725077 +vt 0.277573 0.712296 +vt 0.927430 0.348560 +vt 0.730908 0.367846 +vt 0.993864 0.485628 +vt 0.333721 0.745975 +vt 0.449635 0.716379 +vt 0.423127 0.786821 +vt 0.395782 0.794913 +vt 0.319534 0.812921 +vt 0.271107 0.758861 +vt 0.272147 0.772282 +vt 0.251816 0.765699 +vt 0.223596 0.839364 +vt 0.148482 0.825544 +vt 0.203188 0.844468 +vt 0.161172 0.850916 +vt 0.122577 0.848659 +vt 0.129350 0.858405 +vt 0.923594 0.959991 +vt 0.938595 0.941904 +vt 0.972572 0.960038 +vt 0.959060 0.962051 +vt 0.112242 0.860450 +vt 0.110585 0.865488 +vt 0.101963 0.842436 +vt 0.014129 0.844663 +vt 0.003910 0.842898 +vt 0.003792 0.852610 +vt 0.015848 0.829786 +vt 0.204921 0.146666 +vt 0.260532 0.145988 +vt 0.291783 0.133311 +vt 0.232878 0.106073 +vt 0.207573 0.094594 +vt 0.994274 0.256883 +vt 0.953142 0.270593 +vt 0.953793 0.276768 +vt 0.981052 0.296983 +vt 0.977914 0.233081 +vt 0.914548 0.254903 +vt 0.939284 0.267261 +vt 0.994234 0.306920 +vt 0.955142 0.311612 +vt 0.959782 0.319935 +vt 0.920245 0.331297 +vt 0.942038 0.983017 +vt 0.952173 0.980229 +vt 0.942488 0.996677 +vt 0.905492 0.996708 +vt 0.916226 0.977981 +vt 0.941921 0.987231 +vt 0.335743 0.528230 +vt 0.695982 0.963479 +vt 0.714715 0.969404 +vt 0.731096 0.994640 +vt 0.715650 0.963165 +vt 0.737904 0.963373 +vt 0.719755 0.968851 +vt 0.216677 0.078310 +vt 0.225667 0.069588 +vt 0.882588 0.311511 +vt 0.893646 0.307644 +vt 0.917755 0.311312 +vt 0.898634 0.291133 +vt 0.873506 0.299854 +vt 0.884580 0.295436 +vt 0.947729 0.298564 +vt 0.932409 0.290176 +vt 0.935710 0.307181 +vt 0.338184 0.529647 +vt 0.354276 0.457589 +vt 0.360253 0.443519 +vt 0.362071 0.599402 +vt 0.765070 0.963377 +vt 0.769766 0.964743 +vt 0.781056 0.988057 +vt 0.779923 0.963387 +vt 0.795225 0.963380 +vt 0.790831 0.964406 +vt 0.479938 0.562439 +vt 0.538061 0.504933 +vt 0.519624 0.595006 +vt 0.626552 0.448905 +vt 0.657648 0.420973 +vt 0.596641 0.993611 +vt 0.134983 0.872340 +vt 0.003359 0.993585 +vt 0.985114 0.007236 +vt 0.881924 0.149735 +vt 0.625495 0.007226 +vt 0.542099 0.062264 +vt 0.688738 0.106091 +vt 0.690262 0.088287 +vt 0.803227 0.119403 +vt 0.842687 0.135713 +vt 0.226229 0.184528 +vt 0.240544 0.193728 +vt 0.248237 0.201413 +vt 0.243939 0.184537 +vt 0.245889 0.192340 +vt 0.270824 0.184528 +vt 0.567295 0.118306 +vt 0.803333 0.948292 +vt 0.794191 0.913741 +vt 0.797184 0.939022 +vt 0.854856 0.948248 +vt 0.768597 0.948258 +vt 0.791323 0.940523 +vt 0.961623 0.905114 +vt 0.953283 0.925089 +vt 0.979048 0.960469 +vt 0.993255 0.905136 +vt 0.918746 0.905136 +vt 0.256917 0.027110 +vt 0.215148 0.027342 +vt 0.238574 0.028564 +vt 0.240621 0.044949 +vt 0.205345 0.035297 +vt 0.205614 0.053608 +vt 0.186992 0.053611 +vt 0.184163 0.046907 +vt 0.502649 0.084560 +vt 0.489549 0.073408 +vt 0.469255 0.068500 +vt 0.373499 0.712270 +vt 0.677101 0.961547 +vt 0.662408 0.956982 +vt 0.663068 0.973182 +vt 0.616991 0.964497 +vt 0.656863 0.975404 +vt 0.608015 0.957827 +vt 0.134546 0.201557 +vt 0.139404 0.216487 +vt 0.151579 0.211368 +vt 0.169702 0.196648 +vt 0.157857 0.131087 +vt 0.134562 0.125289 +vt 0.142913 0.114692 +vt 0.196526 0.129386 +vt 0.170044 0.129546 +vt 0.185111 0.147661 +vt 0.196526 0.152526 +vt 0.941950 0.195710 +vt 0.966160 0.188188 +vt 0.015060 0.539728 +vt 0.020517 0.553548 +vt 0.044227 0.558062 +vt 0.036217 0.571041 +vt 0.704939 0.912854 +vt 0.748484 0.923367 +vt 0.759246 0.940033 +vt 0.747736 0.912861 +vt 0.761884 0.919917 +vt 0.779233 0.912861 +vt 0.640450 0.913567 +vt 0.647822 0.937900 +vt 0.657742 0.939268 +vt 0.626899 0.939140 +vt 0.683102 0.912788 +vt 0.659020 0.912774 +vt 0.666509 0.937454 +vt 0.813789 0.971225 +vt 0.814105 0.991925 +vt 0.828828 0.993831 +vt 0.796095 0.993739 +vt 0.810741 0.878773 +vt 0.825664 0.883732 +vt 0.680027 0.889468 +vt 0.192642 0.461807 +vt 0.111784 0.429602 +vt 0.141533 0.377099 +vt 0.185871 0.379285 +vt 0.954152 0.154980 +vt 0.172252 0.035186 +vt 0.176814 0.032915 +vt 0.179979 0.028601 +vt 0.179094 0.031480 +vt 0.148515 0.004159 +vt 0.119574 0.046672 +vt 0.113180 0.137771 +vt 0.126090 0.114428 +vt 0.118981 0.143455 +vt 0.098532 0.155884 +vt 0.103712 0.157956 +vt 0.094400 0.153183 +vt 0.088524 0.154311 +vt 0.094262 0.158678 +vt 0.191525 0.012274 +vt 0.195981 0.012633 +vt 0.900281 0.960027 +vt 0.913219 0.906049 +vt 0.839292 0.970474 +vt 0.839962 0.992178 +vt 0.860205 0.970509 +vt 0.664564 0.362708 +vt 0.003133 0.694825 +vt 0.023513 0.702988 +vt 0.027876 0.693982 +vt 0.098896 0.158180 +vt 0.165651 0.034254 +vt 0.477078 0.655974 +vt 0.439648 0.771787 +vt 0.491182 0.616693 +vt 0.499114 0.612208 +vt 0.521487 0.616399 +vt 0.494661 0.637105 +vt 0.094851 0.864856 +vt -0.025354 0.886023 +vt 0.270605 0.835928 +vt 0.420968 0.644552 +vt 0.192705 0.459506 +vt 0.192200 0.443296 +vt 0.183425 0.021488 +vt 0.096513 0.002799 +vt 0.103539 0.005530 +vt 0.088527 0.003064 +vt 0.099517 0.014436 +vt 0.088515 0.016702 +vt 0.118003 0.004167 +vt 0.114685 0.052416 +vt 0.088512 0.048688 +s 1 +usemtl _GodzillaGodzilla +f 1/1/1 4/2/2 3/3/3 +f 3/3/3 2/4/4 1/1/1 +f 6/5/5 57/6/6 3/3/3 +f 3/3/3 4/2/2 6/5/5 +f 7/7/7 8/8/8 4/2/2 +f 4/2/2 5/9/9 7/7/7 +f 8/8/8 17/10/10 6/5/5 +f 6/5/5 4/2/2 8/8/8 +f 7/7/7 136/11/11 9/12/12 +f 9/12/12 8/8/8 7/7/7 +f 11/13/13 137/14/14 12/15/15 +f 12/15/15 10/16/16 11/13/13 +f 12/15/15 136/11/11 7/7/7 +f 7/7/7 10/16/16 12/15/15 +f 13/17/17 14/18/18 11/13/13 +f 11/13/13 14/18/18 15/19/19 +f 15/19/19 137/14/14 11/13/13 +f 17/10/10 139/20/20 16/21/21 +f 16/21/21 6/5/5 17/10/10 +f 18/22/22 17/10/10 8/8/8 +f 8/8/8 9/12/12 18/22/22 +f 18/22/22 9/12/12 19/23/23 +f 18/22/22 19/23/23 20/24/24 +f 20/24/24 17/10/10 18/22/22 +f 19/23/23 22/25/25 21/26/26 +f 21/26/26 20/24/24 19/23/23 +f 23/27/27 24/28/28 22/25/25 +f 22/25/25 19/23/23 23/27/27 +f 25/29/29 27/30/30 22/25/25 +f 22/25/25 24/28/28 25/29/29 +f 24/31/28 72/32/31 26/33/32 +f 26/33/32 25/34/29 24/31/28 +f 27/30/30 25/29/29 26/35/32 +f 27/30/30 73/36/33 21/26/26 +f 21/26/26 22/25/25 27/30/30 +f 28/37/34 138/38/35 20/24/24 +f 20/24/24 21/26/26 28/37/34 +f 31/39/36 32/40/37 16/21/21 +f 34/41/38 33/42/39 31/39/36 +f 31/39/36 30/43/40 34/41/38 +f 35/44/41 30/45/40 29/46/42 +f 29/46/42 69/47/43 35/44/41 +f 35/48/41 34/41/38 30/43/40 +f 36/49/44 34/50/38 35/44/41 +f 35/44/41 141/51/45 36/49/44 +f 37/52/46 38/53/47 39/54/48 +f 39/54/48 74/55/49 37/52/46 +f 40/56/50 38/53/47 37/52/46 +f 37/52/46 41/57/51 40/56/50 +f 40/56/50 45/58/52 43/59/53 +f 43/59/53 38/53/47 40/56/50 +f 42/60/54 38/53/47 43/59/53 +f 44/61/55 45/58/52 40/56/50 +f 40/56/50 41/57/51 44/61/55 +f 44/61/55 47/62/56 46/63/57 +f 46/63/57 45/58/52 44/61/55 +f 48/64/58 46/63/57 47/62/56 +f 49/65/59 46/63/57 48/64/58 +f 48/64/58 57/66/60 49/65/59 +f 50/67/61 45/58/52 46/63/57 +f 46/63/57 49/65/59 50/67/61 +f 50/67/61 54/68/62 51/69/63 +f 51/69/63 45/58/52 50/67/61 +f 51/69/63 56/70/64 43/59/53 +f 43/59/53 45/58/52 51/69/63 +f 52/71/65 53/72/66 50/67/61 +f 50/67/61 49/65/59 52/71/65 +f 54/68/62 50/67/61 53/72/66 +f 55/73/67 56/70/64 51/69/63 +f 51/69/63 54/68/62 55/73/67 +f 59/74/68 52/71/65 49/65/59 +f 49/65/59 57/66/60 59/74/68 +f 61/75/69 119/76/70 56/70/64 +f 56/70/64 55/73/67 61/75/69 +f 61/75/69 55/73/67 62/77/71 +f 62/77/71 63/78/72 61/75/69 +f 54/68/62 123/79/73 62/77/71 +f 62/77/71 55/73/67 54/68/62 +f 63/78/72 65/80/74 64/81/75 +f 64/81/75 61/75/69 63/78/72 +f 66/82/76 64/81/75 65/80/74 +f 65/80/74 67/83/77 66/82/76 +f 63/78/72 123/79/73 67/83/77 +f 67/83/77 65/80/74 63/78/72 +f 67/83/77 125/84/78 68/85/79 +f 68/85/79 66/82/76 67/83/77 +f 70/86/80 23/87/81 54/68/62 +f 54/68/62 53/72/66 70/86/80 +f 71/88/82 73/89/83 72/32/84 +f 37/52/46 74/55/49 75/90/85 +f 75/90/85 76/91/86 37/52/46 +f 75/92/85 109/93/87 77/94/88 +f 77/94/88 76/95/86 75/92/85 +f 77/96/88 108/97/89 78/98/90 +f 78/98/90 76/91/86 77/96/88 +f 79/99/91 98/100/92 76/91/86 +f 76/91/86 78/98/90 79/99/91 +f 79/99/91 78/98/90 80/101/93 +f 83/102/94 84/103/95 81/104/96 +f 81/104/96 82/105/97 83/102/94 +f 84/103/95 79/99/91 80/101/93 +f 80/101/93 81/104/96 84/103/95 +f 83/102/94 85/106/98 84/103/95 +f 85/106/98 111/107/99 86/108/100 +f 86/108/100 84/103/95 85/106/98 +f 87/109/101 88/110/102 85/106/98 +f 85/106/98 83/102/94 87/109/101 +f 89/111/103 87/109/101 83/102/94 +f 82/105/97 619/112/104 89/111/103 +f 89/111/103 83/102/94 82/105/97 +f 90/113/105 87/114/101 89/115/103 +f 89/115/103 221/116/106 90/113/105 +f 91/117/107 92/118/108 87/109/101 +f 87/109/101 90/119/105 91/117/107 +f 92/118/108 88/110/102 87/109/101 +f 84/103/95 86/108/100 93/120/109 +f 93/120/109 79/99/91 84/103/95 +f 93/120/109 86/108/100 94/121/110 +f 86/108/100 111/107/99 95/122/111 +f 95/122/111 94/121/110 86/108/100 +f 94/121/110 95/122/111 96/123/112 +f 97/124/113 98/100/92 93/120/109 +f 93/120/109 94/121/110 97/124/113 +f 93/120/109 98/100/92 79/99/91 +f 98/100/92 41/57/51 37/52/46 +f 37/52/46 76/91/86 98/100/92 +f 41/57/51 98/100/92 97/124/113 +f 97/124/113 44/61/55 41/57/51 +f 47/62/56 44/61/55 97/124/113 +f 97/124/113 99/125/114 47/62/56 +f 99/125/114 97/124/113 94/121/110 +f 94/121/110 96/123/112 99/125/114 +f 91/117/107 100/126/115 92/118/108 +f 96/123/112 95/122/111 101/127/116 +f 101/127/116 107/128/117 96/123/112 +f 95/122/111 103/129/118 102/130/119 +f 102/130/119 101/127/116 95/122/111 +f 103/129/118 620/131/120 104/132/121 +f 104/132/121 102/130/119 103/129/118 +f 104/132/121 105/133/122 101/127/116 +f 101/127/116 102/130/119 104/132/121 +f 106/134/123 107/128/117 101/127/116 +f 101/127/116 105/133/122 106/134/123 +f 106/134/123 105/133/122 108/135/124 +f 108/135/124 77/94/125 106/134/123 +f 99/125/114 96/123/112 107/128/117 +f 107/128/117 109/93/126 99/125/114 +f 106/134/123 77/94/125 109/93/126 +f 109/93/126 107/128/117 106/134/123 +f 48/64/58 47/62/56 99/125/114 +f 99/125/114 109/93/126 48/64/58 +f 110/136/127 103/129/118 95/122/111 +f 111/107/99 114/137/128 110/136/127 +f 110/136/127 95/122/111 111/107/99 +f 112/138/129 111/107/99 85/106/98 +f 85/106/98 88/110/102 112/138/129 +f 112/138/129 88/110/102 92/118/108 +f 92/118/108 113/139/130 112/138/129 +f 114/137/128 112/138/129 113/139/130 +f 113/139/130 115/140/131 114/137/128 +f 112/138/129 114/137/128 111/107/99 +f 116/141/132 110/136/127 114/137/128 +f 114/137/128 115/140/131 116/141/132 +f 117/142/133 217/143/134 110/136/127 +f 110/136/127 116/141/132 117/142/133 +f 118/144/135 605/145/136 115/140/131 +f 115/140/131 113/139/130 118/144/135 +f 113/139/130 92/118/108 100/126/115 +f 100/126/115 118/144/135 113/139/130 +f 119/146/137 121/147/138 120/148/139 +f 120/148/139 9/12/12 119/146/137 +f 121/147/138 124/149/140 122/150/141 +f 122/150/141 120/148/139 121/147/138 +f 122/150/141 123/151/142 120/148/139 +f 124/149/140 126/152/143 125/153/144 +f 125/153/144 122/150/141 124/149/140 +f 127/154/145 9/12/12 120/148/139 +f 120/148/139 123/151/142 127/154/145 +f 128/155/146 129/156/147 130/157/148 +f 130/157/148 131/158/149 128/155/146 +f 42/60/54 43/59/53 131/158/149 +f 131/158/149 130/157/148 42/60/54 +f 133/159/150 13/160/151 132/161/152 +f 132/161/152 134/162/153 133/159/150 +f 128/155/146 131/158/149 134/162/153 +f 134/162/153 132/161/152 128/155/146 +f 135/163/154 133/159/150 134/162/153 +f 134/162/153 15/164/155 135/163/154 +f 136/165/156 12/166/157 131/158/149 +f 131/158/149 43/59/53 136/165/156 +f 131/158/149 12/166/157 137/167/158 +f 137/167/158 134/162/153 131/158/149 +f 56/70/64 119/76/70 136/165/156 +f 136/165/156 43/59/53 56/70/64 +f 141/51/159 35/44/160 69/47/161 +f 69/47/161 140/168/162 141/51/159 +f 145/169/163 146/170/164 143/171/165 +f 143/171/165 144/172/166 145/169/163 +f 147/173/167 118/174/168 146/170/164 +f 146/170/164 148/175/169 147/173/167 +f 149/176/170 150/177/171 147/173/167 +f 147/173/167 148/175/169 149/176/170 +f 151/178/172 605/179/173 147/173/167 +f 147/173/167 150/177/171 151/178/172 +f 148/175/169 280/180/174 152/181/175 +f 152/181/175 149/176/170 148/175/169 +f 154/182/176 151/178/172 150/177/171 +f 150/177/171 479/183/177 154/182/176 +f 161/184/178 160/185/179 162/186/180 +f 162/186/180 163/187/181 161/184/178 +f 164/188/182 163/187/181 162/186/180 +f 162/186/180 990/187/183 164/188/182 +f 166/189/184 593/190/185 161/184/178 +f 161/184/178 167/191/186 166/189/184 +f 166/189/184 167/191/186 168/192/187 +f 169/193/188 170/194/189 168/192/187 +f 168/192/187 997/195/190 169/193/188 +f 170/194/189 171/196/191 166/189/184 +f 166/189/184 168/192/187 170/194/189 +f 175/197/192 177/198/193 176/199/194 +f 176/199/194 174/200/195 175/197/192 +f 179/201/196 176/199/194 177/198/193 +f 180/202/197 174/200/195 176/199/194 +f 176/199/194 1013/203/198 180/202/197 +f 181/204/199 587/205/200 174/200/195 +f 174/200/195 180/202/197 181/204/199 +f 184/206/201 1019/207/202 186/208/203 +f 186/208/203 183/209/204 184/206/201 +f 188/210/205 189/211/206 183/209/204 +f 183/209/204 186/208/203 188/210/205 +f 190/212/207 191/213/208 189/211/206 +f 189/211/206 188/210/205 190/212/207 +f 194/214/209 195/215/210 192/216/211 +f 192/216/211 193/217/212 194/214/209 +f 192/216/211 195/215/210 196/218/213 +f 196/218/213 1029/219/214 197/220/215 +f 197/220/215 198/221/216 196/218/213 +f 199/222/217 576/223/218 192/216/211 +f 192/216/211 196/218/213 199/222/217 +f 199/222/217 1031/224/219 200/225/220 +f 200/225/220 201/226/221 199/222/217 +f 203/227/222 206/228/223 205/229/224 +f 205/229/224 204/230/225 203/227/222 +f 206/228/223 203/227/222 207/231/226 +f 207/231/226 208/232/227 206/228/223 +f 207/231/226 210/233/228 209/234/229 +f 209/234/229 208/232/227 207/231/226 +f 212/235/230 211/236/231 204/230/225 +f 204/230/225 117/237/232 212/235/230 +f 213/238/233 214/239/234 210/233/228 +f 210/233/228 207/231/226 213/238/233 +f 215/240/235 216/241/236 205/229/224 +f 205/229/224 206/228/223 215/240/235 +f 208/232/227 219/242/237 215/240/235 +f 215/240/235 206/228/223 208/232/227 +f 216/241/236 621/243/238 217/244/239 +f 217/244/239 205/229/224 216/241/236 +f 215/240/235 218/245/240 216/241/236 +f 215/240/235 224/246/241 220/247/242 +f 220/247/242 218/245/240 215/240/235 +f 221/116/243 619/248/244 218/245/240 +f 218/245/240 220/247/242 221/116/243 +f 222/249/245 223/250/246 221/116/243 +f 221/116/243 220/247/242 222/249/245 +f 224/246/241 215/240/235 219/242/237 +f 219/242/237 486/251/247 224/246/241 +f 225/252/248 222/249/245 220/247/242 +f 224/246/241 227/253/249 225/252/248 +f 225/252/248 220/247/242 224/246/241 +f 226/254/250 225/252/248 227/253/249 +f 224/246/241 486/251/247 230/255/251 +f 230/255/251 227/253/249 224/246/241 +f 228/256/252 231/257/253 232/258/254 +f 232/258/254 229/259/255 228/256/252 +f 233/260/256 1070/258/257 229/259/255 +f 229/259/255 232/258/254 233/260/256 +f 626/261/258 234/262/259 232/258/254 +f 232/258/254 231/257/253 626/261/258 +f 236/263/260 486/251/247 219/242/237 +f 219/242/237 208/232/227 236/263/260 +f 209/234/229 236/263/260 208/232/227 +f 236/264/260 209/265/229 237/266/261 +f 237/266/261 484/267/262 236/264/260 +f 238/268/263 237/269/261 209/234/229 +f 209/234/229 210/233/228 238/268/263 +f 240/270/264 618/271/265 241/272/266 +f 241/272/266 242/273/267 240/270/264 +f 241/272/266 1083/274/268 243/275/269 +f 243/275/269 242/273/267 241/272/266 +f 245/276/270 248/277/271 242/273/267 +f 242/273/267 243/275/269 245/276/270 +f 201/226/221 1086/278/272 241/272/266 +f 241/272/266 199/222/217 201/226/221 +f 249/279/273 250/280/274 248/277/271 +f 248/277/271 245/276/270 249/279/273 +f 251/281/275 240/270/264 242/273/267 +f 242/273/267 1088/282/276 251/281/275 +f 240/270/264 251/281/275 239/283/277 +f 239/283/277 617/284/278 240/270/264 +f 252/285/279 276/286/280 239/283/277 +f 239/283/277 251/281/275 252/285/279 +f 252/285/279 1093/287/281 254/288/282 +f 254/288/282 255/289/283 252/285/279 +f 254/288/282 1096/290/284 256/291/285 +f 256/291/285 255/289/283 254/288/282 +f 259/292/286 256/291/285 258/293/287 +f 258/293/287 260/294/288 259/292/286 +f 261/295/289 265/296/290 259/292/286 +f 259/292/286 260/294/288 261/295/289 +f 262/297/291 261/295/289 260/294/288 +f 260/294/288 1099/295/292 262/297/291 +f 265/296/290 267/298/293 266/299/294 +f 266/299/294 259/292/286 265/296/290 +f 266/299/294 256/291/285 259/292/286 +f 268/300/295 265/296/290 261/295/289 +f 261/295/289 508/301/296 268/300/295 +f 270/302/297 265/296/290 268/300/295 +f 268/300/295 272/303/298 270/302/297 +f 271/304/299 272/303/298 268/300/295 +f 268/300/295 269/305/300 271/304/299 +f 273/306/301 272/303/298 271/304/299 +f 271/304/299 274/307/302 273/306/301 +f 267/298/293 272/303/298 273/306/301 +f 273/306/301 266/299/294 267/298/293 +f 237/269/261 238/268/263 273/306/301 +f 273/306/301 274/307/302 237/269/261 +f 275/308/303 273/306/301 238/268/263 +f 238/268/263 276/286/280 275/308/303 +f 266/299/294 273/306/301 275/308/303 +f 275/308/303 278/309/304 266/299/294 +f 238/268/263 617/284/278 239/283/277 +f 239/283/277 276/286/280 238/268/263 +f 252/285/279 255/289/283 277/310/305 +f 277/310/305 276/286/280 252/285/279 +f 278/309/306 275/308/303 276/286/280 +f 276/286/280 277/310/305 278/309/306 +f 256/291/285 277/310/305 255/289/283 +f 266/299/294 278/309/306 277/310/305 +f 277/310/305 256/291/285 266/299/294 +f 279/311/307 152/181/175 280/180/174 +f 146/170/164 145/169/163 280/180/174 +f 280/180/174 148/175/169 146/170/164 +f 279/311/307 280/180/174 145/169/163 +f 145/169/163 281/312/308 279/311/307 +f 279/311/307 281/312/308 282/313/309 +f 282/313/309 834/314/310 283/315/311 +f 283/315/311 279/311/307 282/313/309 +f 279/311/307 283/315/311 284/316/312 +f 284/316/312 152/181/175 279/311/307 +f 284/316/312 153/317/313 152/181/175 +f 286/318/314 153/317/313 284/316/312 +f 284/316/312 285/319/315 286/318/314 +f 286/318/314 159/320/316 158/321/317 +f 158/321/317 153/317/313 286/318/314 +f 158/321/317 287/322/318 152/181/175 +f 152/181/175 153/317/313 158/321/317 +f 288/323/319 289/324/320 286/318/314 +f 286/318/314 285/319/315 288/323/319 +f 286/318/314 289/324/320 290/325/321 +f 290/325/321 474/326/322 286/318/314 +f 291/327/323 292/328/324 290/325/321 +f 290/325/321 289/324/320 291/327/323 +f 293/329/325 294/330/326 292/328/324 +f 292/328/324 291/327/323 293/329/325 +f 295/331/327 299/332/328 292/328/324 +f 292/328/324 294/330/326 295/331/327 +f 294/330/326 297/333/329 296/334/330 +f 296/334/330 295/331/327 294/330/326 +f 298/335/331 296/334/330 297/333/329 +f 300/336/332 307/337/333 297/333/329 +f 297/333/329 294/330/326 300/336/332 +f 293/329/325 301/338/334 300/336/332 +f 300/336/332 294/330/326 293/329/325 +f 302/339/335 301/338/334 293/329/325 +f 293/329/325 303/340/336 302/339/335 +f 291/327/323 289/324/320 303/340/336 +f 303/340/336 293/329/325 291/327/323 +f 304/341/337 301/338/334 302/339/335 +f 302/339/335 342/342/338 304/341/337 +f 305/343/339 306/344/340 301/338/334 +f 301/338/334 304/341/337 305/343/339 +f 306/344/340 307/337/333 300/336/332 +f 300/336/332 301/338/334 306/344/340 +f 298/335/331 297/333/329 307/337/333 +f 307/337/333 308/345/341 298/335/331 +f 308/345/341 307/337/333 306/344/340 +f 306/344/340 309/346/342 308/345/341 +f 306/344/340 305/343/339 310/347/343 +f 310/347/343 309/346/342 306/344/340 +f 310/347/343 305/343/339 304/341/337 +f 304/341/337 833/348/344 310/347/343 +f 309/346/342 310/347/343 311/349/345 +f 311/349/345 312/350/346 308/345/341 +f 308/345/341 309/346/342 311/349/345 +f 308/345/341 312/350/346 298/335/331 +f 313/351/347 315/352/348 314/353/349 +f 314/353/349 1161/354/350 313/351/347 +f 314/353/349 315/352/348 316/355/351 +f 316/355/351 1165/356/352 314/353/349 +f 319/357/353 315/352/348 313/351/347 +f 320/358/354 317/359/355 316/355/351 +f 315/352/348 319/357/353 320/358/354 +f 320/358/354 316/355/351 315/352/348 +f 317/359/355 322/360/356 321/361/357 +f 321/361/357 318/362/358 317/359/355 +f 320/358/354 323/363/359 322/360/356 +f 322/360/356 317/359/355 320/358/354 +f 324/364/360 323/363/359 320/358/354 +f 320/358/354 319/357/353 324/364/360 +f 325/365/361 330/366/362 324/364/360 +f 324/364/360 319/357/353 325/365/361 +f 325/365/361 319/357/353 326/367/363 +f 326/367/363 329/368/364 325/365/361 +f 327/369/365 326/367/363 319/357/353 +f 319/357/353 833/348/344 327/369/365 +f 328/370/366 806/371/367 326/367/363 +f 326/367/363 327/369/365 328/370/366 +f 331/372/368 813/373/369 323/363/359 +f 323/363/359 324/364/360 331/372/368 +f 334/374/370 324/364/360 330/366/362 +f 330/366/362 810/375/371 334/374/370 +f 334/374/370 331/372/368 324/364/360 +f 331/372/368 334/374/370 335/376/372 +f 335/376/372 813/373/369 331/372/368 +f 336/377/373 329/368/364 326/367/363 +f 326/367/363 806/371/367 336/377/373 +f 327/369/365 340/378/374 339/379/375 +f 339/379/375 338/380/376 327/369/365 +f 341/381/377 343/382/378 340/378/374 +f 340/378/374 327/369/365 341/381/377 +f 342/342/338 302/339/335 303/340/336 +f 303/340/336 343/382/378 342/342/338 +f 342/342/338 343/382/378 341/381/377 +f 339/379/375 340/378/374 344/383/379 +f 344/383/379 346/384/380 339/379/375 +f 343/382/378 345/385/381 344/383/379 +f 344/383/379 340/378/374 343/382/378 +f 303/340/336 289/324/320 345/385/381 +f 345/385/381 343/382/378 303/340/336 +f 344/383/379 345/385/381 289/324/320 +f 289/324/320 288/323/319 344/383/379 +f 344/383/379 288/323/319 285/319/315 +f 285/319/315 347/386/382 344/383/379 +f 346/384/380 344/383/379 347/386/382 +f 347/386/382 348/387/383 346/384/380 +f 284/316/312 347/386/382 285/319/315 +f 349/388/384 346/384/380 348/387/383 +f 348/387/383 350/389/385 349/388/384 +f 350/389/385 352/390/386 351/391/387 +f 351/391/387 349/388/384 350/389/385 +f 353/392/388 352/390/386 350/389/385 +f 354/393/389 352/390/386 353/392/388 +f 353/392/388 1205/394/390 354/393/389 +f 353/392/388 350/389/385 355/395/391 +f 355/395/391 1206/396/392 353/392/388 +f 356/397/393 282/313/309 281/312/308 +f 281/312/308 357/398/394 356/397/393 +f 282/313/309 356/397/393 355/395/391 +f 355/395/391 834/314/310 282/313/309 +f 361/399/395 354/393/389 358/400/396 +f 358/400/396 359/401/397 361/399/395 +f 361/399/395 370/402/398 352/390/386 +f 352/390/386 354/393/389 361/399/395 +f 362/403/399 364/404/400 359/401/397 +f 359/401/397 360/405/401 362/403/399 +f 361/399/395 359/401/397 364/404/400 +f 364/404/400 366/406/402 361/399/395 +f 362/403/399 829/407/403 365/408/404 +f 365/408/404 364/404/400 362/403/399 +f 367/409/405 366/406/402 364/404/400 +f 368/410/406 369/411/407 361/399/395 +f 361/399/395 366/406/402 368/410/406 +f 370/402/398 361/399/395 369/411/407 +f 369/411/407 786/412/408 370/402/398 +f 368/410/406 785/413/409 371/414/410 +f 371/414/410 369/411/407 368/410/406 +f 374/415/411 352/390/386 370/402/398 +f 370/402/398 373/416/412 374/415/411 +f 375/417/413 374/415/411 373/416/412 +f 373/416/412 790/418/414 375/417/413 +f 376/419/415 352/390/386 374/415/411 +f 374/415/411 377/420/416 376/419/415 +f 375/417/413 790/421/414 377/420/416 +f 377/420/416 374/415/411 375/417/413 +f 377/420/416 792/422/417 378/423/418 +f 378/423/418 376/419/415 377/420/416 +f 792/424/417 379/425/419 376/419/415 +f 376/419/415 378/423/418 792/424/417 +f 351/391/387 352/390/386 376/419/415 +f 376/419/415 379/425/419 351/391/387 +f 351/391/387 381/426/420 380/427/421 +f 380/427/421 349/388/384 351/391/387 +f 380/427/421 339/379/375 346/384/380 +f 346/384/380 349/388/384 380/427/421 +f 380/427/421 338/380/376 339/379/375 +f 382/428/422 795/429/423 381/430/420 +f 381/430/420 351/431/387 382/428/422 +f 383/432/424 386/433/425 384/434/426 +f 384/434/426 385/435/427 383/432/424 +f 387/436/428 388/437/429 384/434/426 +f 384/434/426 386/433/425 387/436/428 +f 389/438/430 390/439/431 386/433/425 +f 386/433/425 383/432/424 389/438/430 +f 389/438/430 391/440/432 390/439/431 +f 391/440/432 389/438/430 392/441/433 +f 392/441/433 395/442/434 391/440/432 +f 390/439/431 391/440/432 393/443/435 +f 393/443/435 394/444/436 390/439/431 +f 394/444/436 387/436/428 386/433/425 +f 386/433/425 390/439/431 394/444/436 +f 393/443/435 391/440/432 395/442/434 +f 395/442/434 396/445/437 393/443/435 +f 396/445/437 395/442/434 397/446/438 +f 397/446/438 398/447/439 396/445/437 +f 392/441/433 397/446/438 395/442/434 +f 389/438/430 400/448/440 399/449/441 +f 399/449/441 392/441/433 389/438/430 +f 401/450/442 396/445/437 398/447/439 +f 396/445/437 401/450/442 402/451/443 +f 402/451/443 393/443/435 396/445/437 +f 403/452/444 404/453/445 402/451/443 +f 402/451/443 401/450/442 403/452/444 +f 403/452/444 406/454/446 405/455/447 +f 405/455/447 404/453/445 403/452/444 +f 407/456/448 622/457/449 408/458/450 +f 408/458/450 404/453/445 407/456/448 +f 408/458/450 624/459/451 402/451/443 +f 402/451/443 404/453/445 408/458/450 +f 409/460/452 442/461/453 406/454/446 +f 406/454/446 410/462/454 409/460/452 +f 411/463/455 410/462/454 406/454/446 +f 406/454/446 412/464/456 411/463/455 +f 412/464/456 403/452/444 401/450/442 +f 401/450/442 398/447/439 412/464/456 +f 412/464/456 406/454/446 403/452/444 +f 413/465/457 409/460/452 410/462/454 +f 410/462/454 415/466/458 413/465/457 +f 413/465/457 414/467/459 409/460/452 +f 415/466/458 410/462/454 411/463/455 +f 411/463/455 419/468/460 415/466/458 +f 413/465/457 417/469/461 416/470/462 +f 416/470/462 414/467/459 413/465/457 +f 415/466/458 417/469/461 413/465/457 +f 417/469/461 418/471/463 416/470/462 +f 419/468/460 762/472/464 420/473/465 +f 420/473/465 415/466/458 419/468/460 +f 420/473/465 418/471/463 417/469/461 +f 417/469/461 415/466/458 420/473/465 +f 421/474/466 419/468/460 422/475/467 +f 422/475/467 429/476/468 421/474/466 +f 422/475/467 419/468/460 411/463/455 +f 421/474/466 429/476/468 428/477/469 +f 428/477/469 427/478/470 421/474/466 +f 430/479/471 431/480/472 428/477/469 +f 428/477/469 429/476/468 430/479/471 +f 432/481/473 770/482/474 427/478/470 +f 427/478/470 428/477/469 432/481/473 +f 434/483/475 430/479/471 429/476/468 +f 429/476/468 422/475/467 434/483/475 +f 435/484/476 434/483/475 422/475/467 +f 436/485/477 434/483/475 435/484/476 +f 435/484/476 437/486/478 436/485/477 +f 438/487/479 441/488/480 436/485/477 +f 436/485/477 437/486/478 438/487/479 +f 399/449/441 438/487/479 437/486/478 +f 437/486/478 392/441/433 399/449/441 +f 439/489/481 440/490/482 438/487/479 +f 438/487/479 399/449/441 439/489/481 +f 440/490/482 443/491/483 441/488/480 +f 441/488/480 438/487/479 440/490/482 +f 442/492/484 440/490/482 439/489/481 +f 439/489/481 405/493/485 442/492/484 +f 443/491/483 440/490/482 442/492/484 +f 442/492/484 409/494/486 443/491/483 +f 446/495/487 447/496/488 444/497/489 +f 444/497/489 445/498/490 446/495/487 +f 447/496/488 441/488/480 443/491/483 +f 443/491/483 444/497/489 447/496/488 +f 447/496/488 448/499/491 441/488/480 +f 447/496/488 451/500/492 449/501/493 +f 449/501/493 448/499/491 447/496/488 +f 449/501/493 452/502/494 450/503/495 +f 450/503/495 448/499/491 449/501/493 +f 446/495/487 451/500/492 447/496/488 +f 453/504/496 452/502/494 449/501/493 +f 449/501/493 466/505/497 454/506/498 +f 454/506/498 453/504/496 449/501/493 +f 452/502/494 453/504/496 455/507/499 +f 455/507/499 456/508/500 452/502/494 +f 454/506/498 776/509/501 457/510/502 +f 457/510/502 453/504/496 454/506/498 +f 457/510/502 462/511/503 455/507/499 +f 455/507/499 453/504/496 457/510/502 +f 445/498/490 463/512/504 459/513/505 +f 459/513/505 446/495/487 445/498/490 +f 459/513/505 460/514/506 451/500/492 +f 451/500/492 446/495/487 459/513/505 +f 460/514/506 465/515/507 461/516/508 +f 461/516/508 451/500/492 460/514/506 +f 461/516/508 466/505/497 449/501/493 +f 449/501/493 451/500/492 461/516/508 +f 463/512/504 464/517/509 460/514/506 +f 460/514/506 459/513/505 463/512/504 +f 465/518/507 460/519/506 464/520/509 +f 464/520/509 772/521/510 465/518/507 +f 160/185/179 470/522/511 468/523/512 +f 468/523/512 467/524/513 160/185/179 +f 159/320/316 474/326/322 470/522/511 +f 470/522/511 160/185/179 159/320/316 +f 470/522/511 472/525/514 471/526/515 +f 471/526/515 468/523/512 470/522/511 +f 473/527/516 472/525/514 470/522/511 +f 470/522/511 1329/528/517 473/527/516 +f 286/318/314 474/326/322 159/320/316 +f 475/529/518 474/326/322 290/325/321 +f 290/325/321 476/530/519 475/529/518 +f 476/530/519 290/325/321 292/328/324 +f 292/328/324 299/332/328 476/530/519 +f 158/321/317 157/531/520 478/532/521 +f 478/532/521 287/322/318 158/321/317 +f 479/183/522 150/177/523 478/532/521 +f 478/532/521 157/531/520 479/183/522 +f 481/533/524 482/534/525 483/535/526 +f 484/267/527 385/435/427 384/434/426 +f 384/434/426 485/536/528 484/267/527 +f 388/437/429 230/537/529 485/536/528 +f 485/536/528 384/434/426 388/437/429 +f 385/435/427 484/267/527 483/535/526 +f 483/535/526 482/534/525 385/435/427 +f 482/534/525 487/538/530 383/432/424 +f 383/432/424 385/435/427 482/534/525 +f 383/432/424 487/538/530 400/448/440 +f 400/448/440 389/438/430 383/432/424 +f 488/539/531 400/448/440 487/538/530 +f 489/540/532 490/541/533 488/539/531 +f 488/539/531 487/538/530 489/540/532 +f 488/539/531 490/541/533 491/542/534 +f 491/542/534 439/489/481 488/539/531 +f 439/489/481 399/449/441 400/448/440 +f 400/448/440 488/539/531 439/489/481 +f 482/534/525 489/540/532 487/538/530 +f 481/533/524 715/543/535 489/540/532 +f 489/540/532 482/534/525 481/533/524 +f 437/486/478 435/484/476 397/446/438 +f 397/446/438 392/441/433 437/486/478 +f 435/484/476 492/544/536 398/447/439 +f 398/447/439 397/446/438 435/484/476 +f 492/544/536 411/463/455 412/464/456 +f 412/464/456 398/447/439 492/544/536 +f 492/544/536 435/484/476 422/475/467 +f 422/475/467 411/463/455 492/544/536 +f 495/545/537 1345/546/538 493/547/539 +f 493/547/539 494/548/540 495/545/537 +f 497/549/541 498/550/542 496/551/543 +f 496/551/543 493/547/539 497/549/541 +f 496/551/543 498/550/542 499/552/544 +f 499/552/544 1531/553/545 496/551/543 +f 498/550/542 502/554/546 500/555/547 +f 500/555/547 499/552/544 498/550/542 +f 501/556/548 499/552/544 500/555/547 +f 500/555/547 502/554/546 501/556/548 +f 505/557/549 737/558/550 506/559/551 +f 506/559/551 507/560/552 505/557/549 +f 506/559/551 510/561/553 509/562/554 +f 509/562/554 510/561/553 511/563/555 +f 511/563/555 512/564/556 509/562/554 +f 513/565/557 715/566/558 512/564/556 +f 512/564/556 511/563/555 513/565/557 +f 511/563/555 514/567/559 513/565/557 +f 514/567/559 516/568/560 515/569/561 +f 515/569/561 513/565/557 514/567/559 +f 515/569/561 516/568/560 517/570/562 +f 517/570/562 716/571/563 515/569/561 +f 518/572/564 702/573/565 519/574/566 +f 519/574/566 520/575/567 518/572/564 +f 521/576/568 705/577/569 511/563/555 +f 511/563/555 510/561/553 521/576/568 +f 510/561/553 522/578/570 521/576/568 +f 523/579/571 706/580/572 521/576/568 +f 521/576/568 522/578/570 523/579/571 +f 523/579/571 522/578/570 524/581/573 +f 524/581/573 704/582/574 523/579/571 +f 498/550/542 497/549/541 526/583/575 +f 526/583/575 525/584/576 498/550/542 +f 498/550/542 525/584/576 502/554/546 +f 528/585/577 527/586/578 522/578/570 +f 522/578/570 510/561/553 506/559/551 +f 506/559/551 528/585/577 522/578/570 +f 529/587/579 532/588/580 526/583/575 +f 526/583/575 497/549/541 529/587/579 +f 497/549/541 531/589/581 530/590/582 +f 530/590/582 529/587/579 497/549/541 +f 497/549/541 493/547/539 531/589/581 +f 533/591/583 532/588/580 529/587/579 +f 533/591/583 529/587/579 530/590/582 +f 530/590/582 531/589/581 533/591/583 +f 493/547/539 534/592/584 533/591/583 +f 533/591/583 531/589/581 493/547/539 +f 534/592/584 1345/546/538 535/593/585 +f 535/593/585 536/594/586 534/592/584 +f 538/595/587 1388/596/588 540/597/589 +f 540/597/589 541/598/590 538/595/587 +f 544/599/591 1393/600/592 545/601/593 +f 545/601/593 546/602/594 544/599/591 +f 547/603/595 754/604/596 544/599/591 +f 544/599/591 546/602/594 547/603/595 +f 548/605/597 549/606/598 547/603/595 +f 547/603/595 546/602/594 548/605/597 +f 550/607/599 547/603/595 549/606/598 +f 549/606/598 1395/603/600 550/607/599 +f 554/608/601 557/609/602 556/610/603 +f 560/611/604 564/612/605 561/613/606 +f 561/613/606 559/614/607 560/611/604 +f 554/608/601 563/615/608 562/616/609 +f 562/616/609 557/609/602 554/608/601 +f 563/615/610 554/608/601 555/617/611 +f 555/617/611 1397/608/612 563/615/610 +f 560/611/604 566/618/613 565/619/614 +f 565/619/614 564/612/605 560/611/604 +f 566/618/613 1403/620/615 567/621/616 +f 567/621/616 568/622/617 566/618/613 +f 572/623/618 575/624/619 574/625/620 +f 574/625/620 577/626/621 572/623/618 +f 582/627/622 579/628/623 581/629/624 +f 581/629/624 589/630/625 582/627/622 +f 579/628/623 582/627/622 583/631/626 +f 583/631/626 584/632/627 579/628/623 +f 589/633/628 580/634/629 591/635/630 +f 591/635/630 592/636/631 589/633/628 +f 599/637/632 600/638/633 598/639/634 +f 598/639/634 608/640/635 599/637/632 +f 600/638/633 602/641/636 154/182/176 +f 154/182/176 598/639/634 600/638/633 +f 598/639/634 154/182/176 156/642/637 +f 156/642/637 608/640/635 598/639/634 +f 601/643/638 603/644/639 600/638/633 +f 600/638/633 599/637/632 601/643/638 +f 603/644/639 602/641/636 600/638/633 +f 604/645/640 211/236/231 602/641/636 +f 602/641/636 603/644/639 604/645/640 +f 602/641/636 211/236/231 212/235/230 +f 151/178/172 154/182/176 602/641/636 +f 602/641/636 212/235/230 151/178/172 +f 151/178/172 212/235/230 605/179/173 +f 606/646/641 607/647/642 161/184/178 +f 161/184/178 993/648/643 606/646/641 +f 610/649/644 587/650/645 608/651/646 +f 608/651/646 609/652/647 610/649/644 +f 613/653/648 615/654/649 614/655/650 +f 614/655/650 612/656/651 613/653/648 +f 620/657/652 621/658/653 619/112/654 +f 619/112/654 82/105/655 620/657/652 +f 622/457/449 623/659/656 408/458/450 +f 623/659/656 626/660/657 624/459/451 +f 624/459/451 408/458/450 623/659/656 +f 393/443/435 402/451/443 624/459/451 +f 624/459/451 394/444/436 393/443/435 +f 624/459/451 387/436/428 394/444/436 +f 624/459/451 626/660/657 625/661/658 +f 625/661/658 387/436/428 624/459/451 +f 387/436/428 625/661/658 388/437/429 +f 629/662/659 1368/568/660 628/663/661 +f 628/663/661 516/568/560 629/662/659 +f 516/568/560 630/664/662 629/662/659 +f 514/567/559 518/572/564 630/664/662 +f 630/664/662 516/568/560 514/567/559 +f 520/575/567 731/665/663 631/666/664 +f 631/666/664 518/572/564 520/575/567 +f 632/667/665 538/595/587 541/598/590 +f 631/666/664 735/668/666 632/667/665 +f 632/667/665 633/669/667 631/666/664 +f 633/669/667 726/670/668 634/671/669 +f 634/671/669 631/666/664 633/669/667 +f 634/671/669 726/670/668 635/672/670 +f 635/672/670 636/673/671 634/671/669 +f 635/672/670 637/674/672 553/675/673 +f 553/675/673 636/673/671 635/672/670 +f 559/614/607 638/676/674 558/677/675 +f 558/677/675 746/678/676 559/614/607 +f 639/679/677 640/680/678 636/673/671 +f 636/673/671 559/614/607 639/679/677 +f 640/680/678 634/671/669 636/673/671 +f 640/680/678 1459/681/679 631/666/664 +f 631/666/664 634/671/669 640/680/678 +f 644/682/680 645/683/681 646/684/682 +f 646/684/682 645/683/681 647/685/683 +f 647/685/683 1476/683/684 646/684/682 +f 648/686/685 644/682/680 646/684/682 +f 646/684/682 649/687/686 648/686/685 +f 648/686/685 1478/688/687 650/689/688 +f 650/689/688 651/690/689 648/686/685 +f 653/691/690 1676/692/691 648/686/685 +f 648/686/685 652/693/692 653/691/690 +f 642/694/693 681/695/694 654/696/695 +f 654/696/695 643/697/696 642/694/693 +f 654/696/695 657/698/697 656/699/698 +f 656/699/698 655/700/699 654/696/695 +f 658/701/700 1511/699/701 655/700/699 +f 655/700/699 656/699/698 658/701/700 +f 656/699/698 662/702/702 659/703/703 +f 659/703/703 658/701/700 656/699/698 +f 660/704/704 658/701/700 659/703/703 +f 659/703/703 1508/705/705 660/704/704 +f 661/706/706 1509/704/707 658/701/700 +f 658/701/700 660/704/704 661/706/706 +f 664/707/708 665/708/709 663/709/710 +f 663/709/710 662/702/702 664/707/708 +f 666/710/711 664/707/708 662/702/702 +f 662/702/702 656/699/698 666/710/711 +f 666/710/711 656/699/698 657/698/697 +f 657/698/697 667/711/712 666/710/711 +f 657/698/697 654/696/695 671/712/713 +f 671/712/713 670/713/714 657/698/697 +f 672/714/715 671/712/713 673/715/716 +f 674/716/717 675/717/718 673/715/716 +f 673/715/716 671/712/713 674/716/717 +f 674/716/717 677/718/719 676/719/720 +f 676/719/720 675/717/718 674/716/717 +f 674/716/717 671/712/713 678/720/721 +f 678/720/721 677/718/719 674/716/717 +f 677/718/719 678/720/721 679/721/722 +f 679/721/722 1496/720/723 677/718/719 +f 680/722/724 1496/720/723 679/721/722 +f 679/721/722 678/720/721 680/722/724 +f 681/695/694 682/723/725 680/722/724 +f 680/722/724 678/720/721 681/695/694 +f 671/712/713 654/696/695 681/695/694 +f 681/695/694 678/720/721 671/712/713 +f 682/723/725 1611/724/726 683/725/727 +f 683/725/727 680/722/724 682/723/725 +f 684/726/728 1609/727/729 681/695/694 +f 681/695/694 642/694/693 684/726/728 +f 685/728/730 1609/727/729 684/726/728 +f 684/726/728 686/729/731 685/728/730 +f 687/730/732 1605/731/733 686/729/731 +f 686/729/731 684/726/728 687/730/732 +f 687/730/732 684/726/728 689/732/734 +f 689/732/734 1602/733/735 687/730/732 +f 684/726/728 642/694/693 689/732/734 +f 689/732/734 642/694/693 691/734/736 +f 691/734/736 690/735/737 689/732/734 +f 692/736/738 695/737/739 694/738/740 +f 694/738/740 693/739/741 692/736/738 +f 692/736/738 691/734/736 697/740/742 +f 697/740/742 696/741/743 692/736/738 +f 697/740/742 691/734/736 698/742/744 +f 691/734/736 642/694/693 641/743/745 +f 641/743/745 698/742/744 691/734/736 +f 641/743/745 644/682/680 698/742/744 +f 704/744/746 705/745/747 706/746/748 +f 706/746/748 523/747/749 704/744/746 +f 513/565/557 515/569/561 716/571/563 +f 716/571/563 715/566/558 513/565/557 +f 503/748/750 1355/749/751 717/750/752 +f 717/750/752 718/751/753 503/748/750 +f 719/752/754 501/556/548 503/748/750 +f 503/748/750 1528/753/755 719/752/754 +f 499/552/544 501/556/548 719/752/754 +f 719/752/754 1529/754/756 499/552/544 +f 499/552/544 1530/755/757 721/756/758 +f 721/756/758 722/757/759 499/552/544 +f 727/758/760 728/759/761 726/760/762 +f 726/760/762 725/761/763 727/758/760 +f 734/762/764 735/763/765 731/764/766 +f 731/764/766 733/765/767 734/762/764 +f 748/766/768 753/767/769 749/768/770 +f 749/768/770 750/769/771 748/766/768 +f 757/770/772 433/771/773 758/772/774 +f 758/772/774 756/773/775 757/770/772 +f 430/479/471 456/508/500 758/772/776 +f 758/772/776 433/771/777 430/479/471 +f 430/479/471 434/483/475 456/508/500 +f 434/483/475 450/503/495 452/502/494 +f 452/502/494 456/508/500 434/483/475 +f 450/503/495 434/483/475 436/485/477 +f 436/485/477 441/488/480 448/499/491 +f 448/499/491 450/503/495 436/485/477 +f 761/774/778 423/775/779 762/472/780 +f 762/472/780 424/776/781 761/774/778 +f 763/777/782 766/778/783 765/779/784 +f 765/779/784 764/780/785 763/777/782 +f 767/781/786 768/782/787 765/779/784 +f 765/779/784 766/778/783 767/781/786 +f 765/779/784 768/782/787 769/783/788 +f 770/784/789 771/785/790 769/783/788 +f 769/783/788 768/782/787 770/784/789 +f 767/781/786 772/521/791 773/786/792 +f 773/786/792 774/787/793 767/781/786 +f 773/786/792 423/788/794 775/789/795 +f 775/789/795 774/787/793 773/786/792 +f 778/790/796 1484/791/797 779/792/798 +f 779/792/798 780/793/799 778/790/796 +f 782/794/800 783/795/801 780/793/799 +f 780/793/799 781/796/802 782/794/800 +f 783/795/801 647/685/683 645/683/681 +f 645/683/681 780/793/799 783/795/801 +f 788/797/803 370/402/398 786/412/804 +f 786/412/804 787/798/805 788/797/803 +f 787/799/805 796/800/806 789/801/807 +f 789/801/807 788/802/803 787/799/805 +f 789/801/807 793/803/808 794/804/809 +f 794/804/809 790/805/810 789/801/807 +f 787/799/805 818/806/811 797/807/812 +f 797/807/812 796/800/806 787/799/805 +f 798/808/813 796/800/806 797/807/812 +f 797/807/812 816/809/814 798/808/813 +f 793/803/808 789/801/807 799/810/815 +f 799/810/815 791/811/816 793/803/808 +f 799/810/815 381/430/817 795/429/818 +f 795/429/818 791/811/816 799/810/815 +f 799/810/815 789/801/807 796/800/806 +f 796/800/806 1631/812/819 799/810/815 +f 806/813/820 801/814/821 805/815/822 +f 805/815/822 336/816/823 806/813/820 +f 807/817/824 336/816/823 805/815/822 +f 805/815/822 808/818/825 807/817/824 +f 808/818/825 810/819/826 809/820/827 +f 809/820/827 807/817/824 808/818/825 +f 805/815/822 811/821/828 808/818/825 +f 811/821/828 836/822/829 814/823/830 +f 814/823/830 813/824/831 811/821/828 +f 813/824/831 814/823/830 815/825/832 +f 787/799/833 786/826/834 817/827/835 +f 817/827/835 818/806/836 787/799/833 +f 798/808/813 816/809/837 819/828/838 +f 820/829/839 829/830/840 798/808/813 +f 798/808/813 819/828/838 820/829/839 +f 822/831/841 823/832/842 824/833/843 +f 824/833/843 367/834/844 822/831/841 +f 362/403/399 363/835/845 827/836/846 +f 827/836/846 828/837/847 362/403/399 +f 829/407/403 362/403/399 828/837/847 +f 829/830/403 820/829/848 830/838/849 +f 830/838/849 365/839/404 829/830/403 +f 827/836/846 363/835/845 831/840/850 +f 833/348/851 319/357/852 313/351/853 +f 313/351/853 310/347/854 833/348/851 +f 834/314/310 355/395/391 350/389/385 +f 350/389/385 348/387/383 834/314/310 +f 834/314/310 835/841/855 283/315/311 +f 834/314/310 348/387/383 347/386/382 +f 347/386/382 835/841/855 834/314/310 +f 284/316/312 283/315/311 835/841/855 +f 835/841/855 347/386/382 284/316/312 +f 357/398/394 281/312/308 145/169/163 +f 145/169/163 144/172/166 357/398/394 +f 176/199/194 179/201/196 841/842/856 +f 841/842/856 182/843/857 176/199/194 +f 843/1/858 844/4/859 845/3/860 +f 845/3/860 846/2/861 843/1/858 +f 846/2/861 845/3/860 848/5/862 +f 848/5/862 850/8/863 846/2/861 +f 847/9/864 875/844/865 846/2/861 +f 846/2/861 849/7/866 847/9/864 +f 849/7/866 846/2/861 850/8/863 +f 851/12/867 960/11/868 849/7/866 +f 849/7/866 850/8/863 851/12/867 +f 852/845/869 847/9/864 849/7/866 +f 849/7/866 853/16/870 852/845/869 +f 852/845/869 853/16/870 854/13/871 +f 854/13/871 955/17/872 852/845/869 +f 850/8/863 848/5/862 859/10/873 +f 859/10/873 860/22/874 850/8/863 +f 859/10/873 848/5/862 858/21/875 +f 858/21/875 962/20/876 859/10/873 +f 851/12/867 850/8/863 860/22/874 +f 860/22/874 861/23/877 851/12/867 +f 860/22/874 859/10/873 862/24/878 +f 862/24/878 861/23/877 860/22/874 +f 861/23/877 862/24/878 863/26/879 +f 863/26/879 864/25/880 861/23/877 +f 867/30/881 944/36/882 866/35/883 +f 866/35/883 865/29/884 867/30/881 +f 864/25/880 867/30/881 865/29/884 +f 865/29/884 943/28/885 864/25/880 +f 863/26/879 944/36/882 867/30/881 +f 867/30/881 864/25/880 863/26/879 +f 862/24/878 938/38/886 868/37/887 +f 868/37/887 863/26/879 862/24/878 +f 870/39/888 871/846/889 872/42/890 +f 871/168/889 937/51/891 873/49/892 +f 873/49/892 872/847/890 871/168/889 +f 872/42/890 873/848/892 874/41/893 +f 879/98/894 1129/97/895 878/96/896 +f 878/96/896 877/91/897 879/98/894 +f 880/99/898 879/98/894 877/91/897 +f 877/91/897 900/100/899 880/99/898 +f 880/99/898 881/101/900 879/98/894 +f 881/101/900 883/104/901 882/849/902 +f 882/849/902 879/98/894 881/101/900 +f 883/104/901 884/850/903 882/849/902 +f 885/103/904 883/104/901 881/101/900 +f 881/101/900 880/99/898 885/103/904 +f 885/103/904 888/106/905 886/102/906 +f 886/102/906 883/104/901 885/103/904 +f 886/102/906 887/105/907 883/104/901 +f 887/105/907 1452/657/908 884/850/903 +f 884/850/903 883/104/901 887/105/907 +f 889/108/909 1122/107/910 888/106/905 +f 888/106/905 885/103/904 889/108/909 +f 888/106/905 891/851/911 890/109/912 +f 890/109/912 886/102/906 888/106/905 +f 890/109/912 894/118/913 893/117/914 +f 893/117/914 892/119/915 890/109/912 +f 894/118/913 890/109/912 891/851/911 +f 885/103/904 880/99/898 895/120/916 +f 895/120/916 889/108/909 885/103/904 +f 895/120/916 896/121/917 889/108/909 +f 896/121/917 898/123/918 897/122/919 +f 897/122/919 889/108/909 896/121/917 +f 899/124/920 896/121/917 895/120/916 +f 895/120/916 900/100/899 899/124/920 +f 895/120/916 880/99/898 900/100/899 +f 900/100/899 877/91/897 876/52/921 +f 876/52/921 901/57/922 900/100/899 +f 901/57/922 904/61/923 899/124/920 +f 899/124/920 900/100/899 901/57/922 +f 902/56/924 901/57/922 876/52/921 +f 876/52/921 903/53/925 902/56/924 +f 904/61/923 901/57/922 902/56/924 +f 902/56/924 905/58/926 904/61/923 +f 903/53/925 907/60/927 906/59/928 +f 906/59/928 902/56/924 903/53/925 +f 905/58/926 902/56/924 906/59/928 +f 875/54/929 903/53/925 876/52/921 +f 876/52/921 842/55/930 875/54/929 +f 904/61/923 905/58/926 908/63/931 +f 908/63/931 909/62/932 904/61/923 +f 909/62/932 1131/125/933 899/124/920 +f 899/124/920 904/61/923 909/62/932 +f 910/64/934 909/62/932 908/63/931 +f 911/65/935 919/66/936 910/64/934 +f 910/64/934 908/63/931 911/65/935 +f 912/67/937 911/65/935 908/63/931 +f 908/63/931 905/58/926 912/67/937 +f 912/67/937 905/58/926 913/69/938 +f 913/69/938 916/68/939 912/67/937 +f 913/69/938 905/58/926 906/59/928 +f 906/59/928 918/70/940 913/69/938 +f 914/71/941 922/74/942 911/65/935 +f 911/65/935 912/67/937 914/71/941 +f 915/72/943 914/71/941 912/67/937 +f 912/67/937 916/68/939 915/72/943 +f 917/73/944 916/68/939 913/69/938 +f 913/69/938 918/70/940 917/73/944 +f 921/92/945 1130/93/946 910/64/934 +f 910/64/934 920/852/947 921/92/945 +f 919/66/936 911/65/935 922/74/942 +f 922/74/942 924/853/948 923/854/949 +f 923/854/949 919/66/936 922/74/942 +f 925/75/950 917/73/944 918/70/940 +f 918/70/940 945/76/951 925/75/950 +f 925/75/950 928/78/952 926/77/953 +f 926/77/953 917/73/944 925/75/950 +f 926/77/953 927/79/954 916/68/939 +f 916/68/939 917/73/944 926/77/953 +f 927/79/954 926/77/953 928/78/952 +f 929/855/955 930/856/956 925/75/950 +f 925/75/950 945/76/951 929/855/955 +f 930/856/956 949/857/957 931/81/958 +f 931/81/958 925/75/950 930/856/956 +f 928/78/952 925/75/950 931/81/958 +f 931/81/958 933/80/959 928/78/952 +f 931/81/958 949/857/957 932/85/960 +f 932/85/960 934/82/961 931/81/958 +f 934/82/961 950/83/962 933/80/959 +f 933/80/959 931/81/958 934/82/961 +f 924/853/948 963/46/963 935/47/964 +f 935/47/964 923/854/949 924/853/948 +f 937/51/965 871/168/966 935/47/967 +f 935/47/967 936/44/968 937/51/965 +f 939/86/969 915/72/943 916/68/939 +f 916/68/939 940/87/970 939/86/969 +f 941/88/971 868/858/972 939/86/969 +f 939/86/969 940/87/970 941/88/971 +f 942/32/973 941/88/971 940/87/970 +f 940/87/970 943/31/974 942/32/973 +f 941/88/971 942/32/973 944/89/975 +f 945/146/976 960/11/868 851/12/867 +f 851/12/867 946/148/977 945/146/976 +f 947/150/978 927/151/979 950/859/980 +f 950/859/980 948/153/981 947/150/978 +f 951/154/982 927/151/979 946/148/977 +f 946/148/977 851/12/867 951/154/982 +f 953/157/983 852/156/984 952/155/985 +f 952/155/985 954/158/986 953/157/983 +f 907/60/927 953/157/983 954/158/986 +f 954/158/986 906/59/928 907/60/927 +f 952/155/985 852/156/984 955/160/987 +f 955/160/987 956/161/988 952/155/985 +f 957/159/989 958/162/990 956/161/988 +f 956/161/988 955/160/987 957/159/989 +f 952/155/985 956/161/988 958/162/990 +f 958/162/990 954/158/986 952/155/985 +f 960/165/991 906/59/928 954/158/986 +f 954/158/986 961/166/992 960/165/991 +f 918/70/940 906/59/928 960/165/991 +f 960/165/991 945/76/951 918/70/940 +f 966/171/993 968/170/994 967/169/995 +f 967/169/995 144/172/996 966/171/993 +f 968/170/994 969/174/997 970/173/998 +f 970/173/998 971/175/999 968/170/994 +f 972/176/1000 971/175/999 970/173/998 +f 970/173/998 973/177/1001 972/176/1000 +f 973/177/1001 970/173/998 974/178/1002 +f 974/178/1002 979/182/1003 973/177/1001 +f 975/322/1004 977/181/1005 972/176/1000 +f 972/176/1000 973/177/1001 975/322/1004 +f 973/177/1001 1327/183/1006 976/532/1007 +f 976/532/1007 975/322/1004 973/177/1001 +f 971/175/999 972/176/1000 977/181/1005 +f 977/181/1005 1134/180/1008 971/175/999 +f 985/321/1009 978/317/1010 977/181/1005 +f 977/181/1005 975/322/1004 985/321/1009 +f 980/642/1011 1327/183/1012 981/860/1013 +f 981/860/1013 979/182/1003 980/642/1011 +f 984/861/1014 985/321/1009 983/531/1015 +f 983/531/1015 982/862/1016 984/861/1014 +f 986/320/1017 1140/318/1018 985/321/1009 +f 985/321/1009 984/861/1014 986/320/1017 +f 987/185/1019 986/320/1017 984/861/1014 +f 988/184/1020 987/185/1019 984/861/1014 +f 984/861/1014 994/190/1021 988/184/1020 +f 988/184/1020 992/863/1022 991/864/1023 +f 991/864/1023 990/187/1024 988/184/1020 +f 995/189/1025 167/191/1026 988/184/1020 +f 988/184/1020 994/190/1021 995/189/1025 +f 998/194/1027 996/192/1028 995/189/1025 +f 995/189/1025 171/196/1029 998/194/1027 +f 998/194/1027 1000/865/1030 999/866/1031 +f 999/866/1031 169/193/1032 998/194/1027 +f 1001/867/1033 1000/865/1030 998/194/1027 +f 998/194/1027 171/196/1029 1001/867/1033 +f 994/190/1021 1004/868/1034 1003/869/1035 +f 1003/869/1035 995/189/1025 994/190/1021 +f 1003/869/1035 1004/868/1034 1005/870/1036 +f 1005/870/1036 995/189/1025 1003/869/1035 +f 1006/200/1037 1002/871/1038 995/189/1025 +f 995/189/1025 1005/870/1036 1006/200/1037 +f 1011/872/1039 1010/873/1040 1008/198/1041 +f 1008/198/1041 179/201/1042 1011/872/1039 +f 1012/202/1043 1013/203/1044 1007/199/1045 +f 1007/199/1045 1006/200/1037 1012/202/1043 +f 1014/874/1046 1015/875/1047 1007/199/1045 +f 1007/199/1045 1013/203/1044 1014/874/1046 +f 1013/203/1044 1012/202/1043 1016/209/1048 +f 1016/209/1048 1017/206/1049 1013/203/1044 +f 1018/876/1050 185/877/1051 1017/206/1049 +f 1017/206/1049 1019/207/1052 1018/876/1050 +f 1017/206/1049 1016/209/1048 1020/208/1053 +f 1020/208/1053 1019/207/1052 1017/206/1049 +f 1022/211/1054 1016/209/1048 1023/878/1055 +f 1023/878/1055 1024/879/1056 1022/211/1054 +f 1025/880/1057 191/213/1058 1022/211/1054 +f 1022/211/1054 1026/881/1059 1025/880/1057 +f 1026/881/1059 1022/211/1054 1027/216/1060 +f 1027/216/1060 193/217/1061 1026/881/1059 +f 1027/216/1060 1022/211/1054 1024/879/1056 +f 1030/882/1062 198/221/1063 1028/218/1064 +f 1028/218/1064 1031/224/1065 1030/882/1062 +f 1024/879/1056 1036/883/1066 1032/223/1067 +f 1032/223/1067 1027/216/1060 1024/879/1056 +f 1033/222/1068 1028/218/1064 1027/216/1060 +f 1027/216/1060 1032/223/1067 1033/222/1068 +f 1033/222/1068 1031/224/1065 1028/218/1064 +f 1032/223/1067 1414/884/1069 1034/885/1070 +f 1034/885/1070 1033/222/1068 1032/223/1067 +f 1036/886/1071 1037/230/1072 1035/227/1073 +f 1035/227/1073 1409/887/1074 1036/886/1071 +f 1037/230/1072 1036/886/1071 1024/879/1075 +f 1024/879/1075 1424/888/1076 1037/230/1072 +f 1035/227/1073 1037/230/1072 1038/229/1077 +f 1038/229/1077 1039/228/1078 1035/227/1073 +f 1039/228/1078 1041/232/1079 1040/231/1080 +f 1040/231/1080 1035/227/1073 1039/228/1078 +f 1040/231/1080 1041/232/1079 1042/234/1081 +f 1042/234/1081 1043/233/1082 1040/231/1080 +f 1044/237/1083 1038/229/1077 1037/230/1072 +f 1037/230/1072 1046/235/1084 1044/237/1083 +f 1046/235/1084 1037/230/1072 1045/236/1085 +f 1045/236/1085 1047/641/1086 1046/235/1084 +f 974/178/1002 1048/179/1087 1046/235/1084 +f 1046/235/1084 1047/641/1086 974/178/1002 +f 1048/179/1087 1116/889/1088 1044/237/1083 +f 1044/237/1083 1046/235/1084 1048/179/1087 +f 1049/240/1089 1039/228/1078 1038/229/1077 +f 1038/229/1077 1050/241/1090 1049/240/1089 +f 1049/240/1089 1050/241/1090 1051/245/1091 +f 1049/240/1089 1051/245/1091 1052/247/1092 +f 1052/247/1092 1057/246/1093 1049/240/1089 +f 1052/247/1092 1051/245/1091 1053/116/1094 +f 1053/116/1094 1056/250/1095 1055/249/1096 +f 1055/249/1096 1052/247/1092 1053/116/1094 +f 1058/252/1097 1052/247/1092 1055/249/1096 +f 1057/246/1093 1052/247/1092 1058/252/1097 +f 1058/252/1097 1064/253/1098 1057/246/1093 +f 1058/252/1097 1055/249/1096 1059/890/1099 +f 1059/890/1099 1062/891/1100 1058/252/1097 +f 1059/890/1099 1055/249/1096 1060/892/1101 +f 1055/249/1096 1056/250/1095 1061/893/1102 +f 1061/893/1102 1060/892/1101 1055/249/1096 +f 1063/254/1103 1058/252/1097 1062/891/1100 +f 1062/891/1100 1065/894/1104 1063/254/1103 +f 1063/254/1103 1064/253/1098 1058/252/1097 +f 1066/256/1105 1067/895/1106 1063/254/1103 +f 1063/254/1103 1065/894/1104 1066/256/1105 +f 1067/895/1106 1068/255/1107 1064/253/1098 +f 1064/253/1098 1063/254/1103 1067/895/1106 +f 1068/255/1107 1073/251/1108 1057/246/1093 +f 1057/246/1093 1064/253/1098 1068/255/1107 +f 1067/895/1106 1066/256/1105 1069/257/1109 +f 1069/257/1109 1456/896/1110 1067/895/1106 +f 1066/256/1105 229/259/1111 1070/258/1112 +f 1070/258/1112 1069/257/1109 1066/256/1105 +f 1071/262/1113 1069/257/1109 1070/258/1112 +f 1070/258/1112 1072/897/1114 1071/262/1113 +f 1074/242/1115 1049/240/1089 1057/246/1093 +f 1057/246/1093 1073/251/1108 1074/242/1115 +f 1041/232/1079 1039/228/1078 1049/240/1089 +f 1049/240/1089 1074/242/1115 1041/232/1079 +f 1075/263/1116 1074/242/1115 1073/251/1108 +f 1075/263/1116 1042/234/1081 1041/232/1079 +f 1041/232/1079 1074/242/1115 1075/263/1116 +f 1075/264/1116 1333/267/1117 1076/266/1118 +f 1076/266/1118 1042/265/1081 1075/264/1116 +f 1077/268/1119 1043/233/1082 1042/234/1081 +f 1042/234/1081 1076/269/1118 1077/268/1119 +f 1080/272/1120 1448/271/1121 1079/270/1122 +f 1079/270/1122 1081/273/1123 1080/272/1120 +f 1082/275/1124 1083/274/1125 1080/272/1120 +f 1080/272/1120 1081/273/1123 1082/275/1124 +f 1084/898/1126 244/899/1127 1082/275/1124 +f 1082/275/1124 1085/900/1128 1084/898/1126 +f 1087/277/1129 245/276/1130 1081/273/1123 +f 1081/273/1123 1088/282/1131 1087/277/1129 +f 1089/281/1132 1088/282/1131 1081/273/1123 +f 1081/273/1123 1079/270/1122 1089/281/1132 +f 1079/270/1122 1449/284/1133 1078/283/1134 +f 1078/283/1134 1089/281/1132 1079/270/1122 +f 1088/282/1131 1089/281/1132 1090/285/1135 +f 1090/285/1135 1091/901/1136 1088/282/1131 +f 1090/285/1135 1089/281/1132 1078/283/1134 +f 1078/283/1134 1113/286/1137 1090/285/1135 +f 1090/285/1135 253/902/1138 1092/903/1139 +f 1092/903/1139 1091/901/1136 1090/285/1135 +f 1099/295/1140 260/294/1141 1098/292/1142 +f 1098/292/1142 1103/296/1143 1099/295/1140 +f 1103/296/1143 1098/292/1142 1104/299/1144 +f 1104/299/1144 1105/298/1145 1103/296/1143 +f 1104/299/1144 1098/292/1142 1095/291/1146 +f 1095/291/1146 1115/310/1147 1104/299/1144 +f 1106/300/1148 1102/301/1149 1099/295/1140 +f 1099/295/1140 1103/296/1143 1106/300/1148 +f 1105/298/1145 1109/303/1150 1107/302/1151 +f 1107/302/1151 1103/296/1143 1105/298/1145 +f 1106/300/1148 1103/296/1143 1107/302/1151 +f 1107/302/1151 1109/303/1150 1106/300/1148 +f 1108/304/1152 1361/305/1153 1106/300/1148 +f 1106/300/1148 1109/303/1150 1108/304/1152 +f 1110/306/1154 1111/307/1155 1108/304/1152 +f 1108/304/1152 1109/303/1150 1110/306/1154 +f 1104/299/1144 1112/308/1156 1110/306/1154 +f 1110/306/1154 1105/298/1145 1104/299/1144 +f 1109/303/1150 1105/298/1145 1110/306/1154 +f 1076/269/1118 1111/307/1155 1110/306/1154 +f 1110/306/1154 1077/268/1119 1076/269/1118 +f 1112/308/1156 1113/286/1137 1077/268/1119 +f 1077/268/1119 1110/306/1154 1112/308/1156 +f 1104/299/1144 1115/310/1147 1114/309/1157 +f 1114/309/1157 1112/308/1156 1104/299/1144 +f 1114/309/1157 1115/310/1147 1113/286/1137 +f 1113/286/1137 1112/308/1156 1114/309/1157 +f 1078/283/1134 1449/284/1133 1077/268/1119 +f 1077/268/1119 1113/286/1137 1078/283/1134 +f 1090/285/1135 1113/286/1137 1115/310/1147 +f 1115/310/1147 1094/289/1158 1090/285/1135 +f 1095/291/1146 1094/289/1158 1115/310/1147 +f 1117/136/1159 1116/141/1160 1118/904/1161 +f 1118/904/1161 1123/905/1162 1117/136/1159 +f 1119/906/1163 969/144/1164 1120/126/1165 +f 1120/126/1165 894/118/913 1119/906/1163 +f 893/117/914 894/118/913 1120/126/1165 +f 1121/907/1166 1119/906/1163 894/118/913 +f 894/118/913 891/851/911 1121/907/1166 +f 891/851/911 888/106/905 1122/107/910 +f 1122/107/910 1121/907/1166 891/851/911 +f 1121/907/1166 1122/107/910 1123/905/1162 +f 1117/136/1159 1123/905/1162 1122/107/910 +f 1122/107/910 897/122/919 1117/136/1159 +f 1123/905/1162 1118/904/1161 1119/906/1163 +f 1119/906/1163 1121/907/1166 1123/905/1162 +f 1122/107/910 889/108/909 897/122/919 +f 1124/129/1167 1117/136/1159 897/122/919 +f 897/122/919 1126/127/1168 1125/130/1169 +f 1125/130/1169 1124/129/1167 897/122/919 +f 1126/127/1168 1128/128/1170 1127/134/1171 +f 1127/134/1171 882/133/1172 1126/127/1168 +f 898/123/918 1128/128/1170 1126/127/1168 +f 1126/127/1168 897/122/919 898/123/918 +f 1130/93/946 878/94/1173 1127/134/1171 +f 1127/134/1171 1128/128/1170 1130/93/946 +f 1131/125/933 1130/93/946 1128/128/1170 +f 1128/128/1170 898/123/918 1131/125/933 +f 910/64/934 1130/93/946 1131/125/933 +f 1131/125/933 909/62/932 910/64/934 +f 1131/125/933 898/123/918 896/121/917 +f 896/121/917 899/124/920 1131/125/933 +f 1133/311/1174 1134/180/1008 977/181/1005 +f 968/170/994 971/175/999 1134/180/1008 +f 1134/180/1008 967/169/995 968/170/994 +f 1133/311/1174 1135/312/1175 967/169/995 +f 967/169/995 1134/180/1008 1133/311/1174 +f 1133/311/1174 1136/313/1176 1135/312/1175 +f 1137/315/1177 1673/314/1178 1136/313/1176 +f 1136/313/1176 1133/311/1174 1137/315/1177 +f 1133/311/1174 977/181/1005 1138/316/1179 +f 1138/316/1179 1137/315/1177 1133/311/1174 +f 1138/316/1179 977/181/1005 978/317/1010 +f 978/317/1010 1140/318/1018 1139/319/1180 +f 1139/319/1180 1138/316/1179 978/317/1010 +f 978/317/1010 985/321/1009 1140/318/1018 +f 1140/318/1018 1142/324/1181 1141/323/1182 +f 1141/323/1182 1139/319/1180 1140/318/1018 +f 1140/318/1018 1323/326/1183 1143/325/1184 +f 1143/325/1184 1142/324/1181 1140/318/1018 +f 1144/327/1185 1142/324/1181 1143/325/1184 +f 1143/325/1184 1145/328/1186 1144/327/1185 +f 1145/328/1186 1147/330/1187 1146/329/1188 +f 1146/329/1188 1144/327/1185 1145/328/1186 +f 1150/336/1189 1151/338/1190 1146/329/1188 +f 1146/329/1188 1147/330/1187 1150/336/1189 +f 1148/333/1191 1157/337/1192 1150/336/1189 +f 1150/336/1189 1147/330/1187 1148/333/1191 +f 1152/339/1193 1153/340/1194 1146/329/1188 +f 1146/329/1188 1151/338/1190 1152/339/1193 +f 1144/327/1185 1146/329/1188 1153/340/1194 +f 1153/340/1194 1142/324/1181 1144/327/1185 +f 1154/341/1195 1192/342/1196 1152/339/1193 +f 1152/339/1193 1151/338/1190 1154/341/1195 +f 1155/343/1197 1154/341/1195 1151/338/1190 +f 1151/338/1190 1156/344/1198 1155/343/1197 +f 1156/344/1198 1151/338/1190 1150/336/1189 +f 1150/336/1189 1157/337/1192 1156/344/1198 +f 1149/335/1199 1157/337/1192 1148/333/1191 +f 1148/333/1191 296/334/1200 1149/335/1199 +f 1158/345/1201 1159/346/1202 1156/344/1198 +f 1156/344/1198 1157/337/1192 1158/345/1201 +f 1149/335/1199 1162/350/1203 1158/345/1201 +f 1158/345/1201 1157/337/1192 1149/335/1199 +f 1160/349/1204 1159/346/1202 1158/345/1201 +f 1158/345/1201 1162/350/1203 1160/349/1204 +f 1162/350/1203 1149/335/1199 1161/354/1205 +f 1161/354/1205 1670/351/1206 1162/350/1203 +f 1165/356/1207 318/362/1208 1166/359/1209 +f 1166/359/1209 1164/355/1210 1165/356/1207 +f 1168/358/1211 1163/352/1212 1164/355/1210 +f 1164/355/1210 1166/359/1209 1168/358/1211 +f 1168/358/1211 1171/364/1213 1167/357/1214 +f 1167/357/1214 1163/352/1212 1168/358/1211 +f 1168/358/1211 1166/359/1209 1169/360/1215 +f 1169/360/1215 1170/363/1216 1168/358/1211 +f 1170/363/1216 1171/364/1213 1168/358/1211 +f 1172/365/1217 1167/357/1214 1171/364/1213 +f 1171/364/1213 1177/366/1218 1172/365/1217 +f 1172/365/1217 1176/368/1219 1173/367/1220 +f 1173/367/1220 1167/357/1214 1172/365/1217 +f 1174/369/1221 1672/348/1222 1167/357/1214 +f 1167/357/1214 1173/367/1220 1174/369/1221 +f 1175/370/1223 1188/908/1224 1174/369/1221 +f 1174/369/1221 1173/367/1220 1175/370/1223 +f 1178/372/1225 1171/364/1213 1170/363/1216 +f 1170/363/1216 1644/373/1226 1178/372/1225 +f 1180/909/1227 1646/910/1228 1169/360/1215 +f 1169/360/1215 1179/911/1229 1180/909/1227 +f 1183/374/1230 1184/375/1231 1177/366/1218 +f 1177/366/1218 1171/364/1213 1183/374/1230 +f 1183/374/1230 1171/364/1213 1178/372/1225 +f 1186/371/1232 1175/370/1223 1173/367/1220 +f 1186/813/1232 1188/814/1233 1187/912/1234 +f 1187/912/1234 1175/913/1223 1186/813/1232 +f 1188/908/1224 1175/370/1223 1187/914/1234 +f 1174/369/1221 1636/380/1235 1189/379/1236 +f 1189/379/1236 1190/378/1237 1174/369/1221 +f 1191/381/1238 1174/369/1221 1190/378/1237 +f 1190/378/1237 1193/382/1239 1191/381/1238 +f 1192/342/1196 1193/382/1239 1153/340/1194 +f 1153/340/1194 1152/339/1193 1192/342/1196 +f 1192/342/1196 1191/381/1238 1193/382/1239 +f 1189/379/1236 1196/384/1240 1194/383/1241 +f 1194/383/1241 1190/378/1237 1189/379/1236 +f 1193/382/1239 1190/378/1237 1194/383/1241 +f 1194/383/1241 1195/385/1242 1193/382/1239 +f 1195/385/1242 1142/324/1181 1153/340/1194 +f 1153/340/1194 1193/382/1239 1195/385/1242 +f 1194/383/1241 1141/323/1182 1142/324/1181 +f 1142/324/1181 1195/385/1242 1194/383/1241 +f 1194/383/1241 1197/386/1243 1139/319/1180 +f 1139/319/1180 1141/323/1182 1194/383/1241 +f 1196/384/1240 1198/387/1244 1197/386/1243 +f 1197/386/1243 1194/383/1241 1196/384/1240 +f 1138/316/1179 1139/319/1180 1197/386/1243 +f 1199/388/1245 1200/389/1246 1198/387/1244 +f 1198/387/1244 1196/384/1240 1199/388/1245 +f 1200/389/1246 1199/388/1245 1201/391/1247 +f 1201/391/1247 1202/390/1248 1200/389/1246 +f 1203/392/1249 1200/389/1246 1202/390/1248 +f 1204/393/1250 1205/394/1251 1203/392/1249 +f 1203/392/1249 1202/390/1248 1204/393/1250 +f 1205/394/1251 353/392/1252 1206/396/1253 +f 1206/396/1253 1203/392/1249 1205/394/1251 +f 1207/395/1254 1200/389/1246 1203/392/1249 +f 1203/392/1249 1206/396/1253 1207/395/1254 +f 1206/396/1253 357/398/1255 1208/397/1256 +f 1208/397/1256 1207/395/1254 1206/396/1253 +f 1135/312/1175 1136/313/1176 1208/397/1256 +f 1208/397/1256 357/398/1255 1135/312/1175 +f 1136/313/1176 1673/314/1178 1207/395/1254 +f 1207/395/1254 1208/397/1256 1136/313/1176 +f 1205/394/1251 1204/393/1250 1209/400/1257 +f 1209/400/1257 1210/915/1258 1205/394/1251 +f 1210/915/1258 1209/400/1257 1211/401/1259 +f 1211/401/1259 360/405/1260 1210/915/1258 +f 1212/399/1261 1211/401/1259 1209/400/1257 +f 1209/400/1257 1204/393/1250 1212/399/1261 +f 1202/390/1248 1222/402/1262 1212/399/1261 +f 1212/399/1261 1204/393/1250 1202/390/1248 +f 1215/835/1263 1214/916/1264 1213/403/1265 +f 1211/401/1259 1216/404/1266 1213/403/1265 +f 1212/399/1261 1218/406/1267 1216/404/1266 +f 1216/404/1266 1211/401/1259 1212/399/1261 +f 1213/403/1265 1216/404/1266 1217/408/1268 +f 1217/408/1268 1662/407/1269 1213/403/1265 +f 1219/409/1270 1216/404/1266 1218/406/1267 +f 1220/410/1271 1218/406/1267 1212/399/1261 +f 1212/399/1261 1221/411/1272 1220/410/1271 +f 1222/402/1262 1616/412/1273 1221/411/1272 +f 1221/411/1272 1212/399/1261 1222/402/1262 +f 1225/415/1274 1619/416/1275 1222/402/1262 +f 1222/402/1262 1202/390/1248 1225/415/1274 +f 1226/419/1276 1622/420/1277 1225/415/1274 +f 1225/415/1274 1202/390/1248 1226/419/1276 +f 1226/419/1276 1202/390/1248 1201/391/1247 +f 1201/391/1247 1625/425/1278 1226/419/1276 +f 1230/917/1279 1228/918/1280 1201/391/1247 +f 1201/391/1247 1229/426/1281 1230/917/1279 +f 1231/432/1282 1234/435/1283 1232/434/1284 +f 1232/434/1284 1233/433/1285 1231/432/1282 +f 1235/436/1286 1233/433/1285 1232/434/1284 +f 1232/434/1284 1236/437/1287 1235/436/1286 +f 1237/438/1288 1231/432/1282 1233/433/1285 +f 1233/433/1285 1238/439/1289 1237/438/1288 +f 1237/438/1288 1238/439/1289 1239/440/1290 +f 1239/440/1290 1243/442/1291 1240/441/1292 +f 1240/441/1292 1237/438/1288 1239/440/1290 +f 1238/439/1289 1242/444/1293 1241/443/1294 +f 1241/443/1294 1239/440/1290 1238/439/1289 +f 1242/444/1293 1238/439/1289 1233/433/1285 +f 1233/433/1285 1235/436/1286 1242/444/1293 +f 1241/443/1294 1244/445/1295 1243/442/1291 +f 1243/442/1291 1239/440/1290 1241/443/1294 +f 1244/445/1295 1246/447/1296 1245/446/1297 +f 1245/446/1297 1243/442/1291 1244/445/1295 +f 1240/441/1292 1243/442/1291 1245/446/1297 +f 1237/438/1288 1240/441/1292 1247/449/1298 +f 1247/449/1298 1248/448/1299 1237/438/1288 +f 1249/450/1300 1246/447/1296 1244/445/1295 +f 1244/445/1295 1241/443/1294 1250/451/1301 +f 1250/451/1301 1249/450/1300 1244/445/1295 +f 1251/452/1302 1249/450/1300 1250/451/1301 +f 1250/451/1301 1252/453/1303 1251/452/1302 +f 1255/456/1304 1252/453/1303 1256/458/1305 +f 1256/458/1305 1453/457/1306 1255/456/1304 +f 1250/451/1301 1455/459/1307 1256/458/1305 +f 1256/458/1305 1252/453/1303 1250/451/1301 +f 1258/463/1308 1259/464/1309 1253/454/1310 +f 1253/454/1310 1257/462/1311 1258/463/1308 +f 1259/464/1309 1251/452/1302 1253/454/1310 +f 1259/464/1309 1246/447/1296 1249/450/1300 +f 1249/450/1300 1251/452/1302 1259/464/1309 +f 1260/465/1312 1264/469/1313 1262/466/1314 +f 1262/466/1314 1257/462/1311 1260/465/1312 +f 1262/466/1314 1265/468/1315 1258/463/1308 +f 1258/463/1308 1257/462/1311 1262/466/1314 +f 1260/465/1312 1261/467/1316 1263/470/1317 +f 1263/470/1317 1264/469/1313 1260/465/1312 +f 1265/468/1315 1262/466/1314 1266/473/1318 +f 1266/473/1318 1267/472/1319 1265/468/1315 +f 1266/473/1318 1262/466/1314 1264/469/1313 +f 1264/469/1313 1315/471/1320 1266/473/1318 +f 1268/474/1321 1265/468/1315 1267/472/1319 +f 1267/472/1319 1272/776/1322 1268/474/1321 +f 1268/474/1321 1276/476/1323 1269/475/1324 +f 1269/475/1324 1265/468/1315 1268/474/1321 +f 1269/475/1324 1342/544/1325 1258/463/1308 +f 1258/463/1308 1265/468/1315 1269/475/1324 +f 1270/775/1326 1271/774/1327 1267/472/1319 +f 1267/472/1319 1266/473/1318 1270/775/1326 +f 1273/919/1328 1270/775/1326 1266/473/1318 +f 1275/477/1329 1276/476/1323 1268/474/1321 +f 1268/474/1321 1274/478/1330 1275/477/1329 +f 1277/479/1331 1276/476/1323 1275/477/1329 +f 1275/477/1329 1278/480/1332 1277/479/1331 +f 1279/481/1333 1275/477/1329 1274/478/1330 +f 1274/478/1330 1580/482/1334 1279/481/1333 +f 1281/771/1335 1277/479/1331 1280/920/1336 +f 1282/780/1337 1281/921/1335 1280/779/1336 +f 1280/779/1336 1575/777/1338 1282/780/1337 +f 1283/770/1339 1281/771/1335 1282/922/1337 +f 1282/922/1337 1571/923/1340 1283/770/1339 +f 1283/770/1339 1570/773/1341 1284/772/1342 +f 1284/772/1342 1281/771/1335 1283/770/1339 +f 1284/772/1342 1308/508/1343 1277/479/1331 +f 1277/479/1331 1281/771/1335 1284/772/1342 +f 1285/483/1344 1276/476/1323 1277/479/1331 +f 1277/479/1331 1308/508/1343 1285/483/1344 +f 1285/483/1344 1269/475/1324 1276/476/1323 +f 1286/484/1345 1342/544/1325 1269/475/1324 +f 1269/475/1324 1285/483/1344 1286/484/1345 +f 1287/485/1346 1288/486/1347 1286/484/1345 +f 1286/484/1345 1285/483/1344 1287/485/1346 +f 1289/487/1348 1288/486/1347 1287/485/1346 +f 1287/485/1346 1292/488/1349 1289/487/1348 +f 1247/449/1298 1240/441/1292 1288/486/1347 +f 1288/486/1347 1289/487/1348 1247/449/1298 +f 1290/489/1350 1247/449/1298 1289/487/1348 +f 1289/487/1348 1291/490/1351 1290/489/1350 +f 1291/490/1351 1289/487/1348 1292/488/1349 +f 1292/488/1349 1294/491/1352 1291/490/1351 +f 1298/495/1353 1297/498/1354 1296/497/1355 +f 1296/497/1355 1299/496/1356 1298/495/1353 +f 1299/496/1356 1296/497/1355 1294/491/1352 +f 1294/491/1352 1292/488/1349 1299/496/1356 +f 1299/496/1356 1292/488/1349 1300/499/1357 +f 1299/496/1356 1300/499/1357 1301/501/1358 +f 1301/501/1358 1303/500/1359 1299/496/1356 +f 1301/501/1358 1300/499/1357 1302/503/1360 +f 1302/503/1360 1304/502/1361 1301/501/1358 +f 1298/495/1353 1299/496/1356 1303/500/1359 +f 1305/504/1362 1301/501/1358 1304/502/1361 +f 1306/506/1363 1318/505/1364 1301/501/1358 +f 1301/501/1358 1305/504/1362 1306/506/1363 +f 1304/502/1361 1308/508/1343 1307/507/1365 +f 1307/507/1365 1305/504/1362 1304/502/1361 +f 1284/772/1366 1570/773/1367 1307/507/1365 +f 1307/507/1365 1308/508/1343 1284/772/1366 +f 1306/506/1363 1305/504/1362 1309/510/1368 +f 1309/510/1368 1313/509/1369 1306/506/1363 +f 1307/507/1365 1572/511/1370 1309/510/1368 +f 1309/510/1368 1305/504/1362 1307/507/1365 +f 1297/498/1354 1298/495/1353 1311/513/1371 +f 1311/513/1371 1314/512/1372 1297/498/1354 +f 1311/513/1371 1298/495/1353 1303/500/1359 +f 1303/500/1359 1312/514/1373 1311/513/1371 +f 1309/510/1368 1572/511/1370 1310/924/1374 +f 1310/924/1374 1313/509/1369 1309/510/1368 +f 1314/512/1372 1311/513/1371 1312/514/1373 +f 1312/514/1373 1585/517/1375 1314/512/1372 +f 1316/925/1376 1315/926/1377 1314/512/1372 +f 1314/512/1372 1585/517/1375 1316/925/1376 +f 1318/927/1364 1306/928/1363 1313/929/1369 +f 1313/929/1369 1573/930/1378 1318/927/1364 +f 1319/524/1379 469/931/1380 1321/932/1381 +f 1321/932/1381 1320/933/1382 1319/524/1379 +f 987/185/1019 162/186/1383 989/934/1384 +f 989/934/1384 1319/524/1385 987/185/1019 +f 986/320/1017 987/185/1019 1322/522/1386 +f 1322/522/1386 1323/326/1183 986/320/1017 +f 1140/318/1018 986/320/1017 1323/326/1183 +f 1324/529/1387 1325/530/1388 1143/325/1184 +f 1143/325/1184 1323/326/1183 1324/529/1387 +f 1325/530/1388 299/332/1389 1145/328/1186 +f 1145/328/1186 1143/325/1184 1325/530/1388 +f 1329/528/1390 475/529/1391 1328/935/1392 +f 1328/935/1392 1324/529/1393 1329/528/1390 +f 1330/533/1394 1331/535/1395 1332/534/1396 +f 1232/434/1284 1234/435/1283 1333/267/1397 +f 1333/267/1397 1334/536/1398 1232/434/1284 +f 1236/437/1287 1232/434/1284 1334/536/1398 +f 1334/536/1398 1068/537/1399 1236/437/1287 +f 1234/435/1283 1332/534/1396 1331/535/1395 +f 1331/535/1395 1333/267/1397 1234/435/1283 +f 1335/936/1400 1330/533/1394 1336/543/1401 +f 1336/543/1401 1365/937/1402 1335/936/1400 +f 1332/534/1396 1234/435/1283 1231/432/1282 +f 1231/432/1282 1337/538/1403 1332/534/1396 +f 1231/432/1282 1237/438/1288 1248/448/1299 +f 1248/448/1299 1337/538/1403 1231/432/1282 +f 1338/539/1404 1337/538/1403 1248/448/1299 +f 1339/540/1405 1337/538/1403 1338/539/1404 +f 1338/539/1404 1340/541/1406 1339/540/1405 +f 1338/539/1404 1290/489/1350 1341/542/1407 +f 1341/542/1407 1340/541/1406 1338/539/1404 +f 1248/448/1299 1247/449/1298 1290/489/1350 +f 1290/489/1350 1338/539/1404 1248/448/1299 +f 1290/489/1350 1293/493/1408 1341/542/1407 +f 1332/534/1396 1337/538/1403 1339/540/1405 +f 1330/533/1394 1332/534/1396 1339/540/1405 +f 1339/540/1405 1336/543/1401 1330/533/1394 +f 1288/486/1347 1240/441/1292 1245/446/1297 +f 1245/446/1297 1286/484/1345 1288/486/1347 +f 1286/484/1345 1245/446/1297 1246/447/1296 +f 1246/447/1296 1342/544/1325 1286/484/1345 +f 1259/464/1309 1258/463/1308 1342/544/1325 +f 1342/544/1325 1246/447/1296 1259/464/1309 +f 1349/555/1409 1351/554/1410 1347/550/1411 +f 1347/550/1411 1348/552/1412 1349/555/1409 +f 1350/556/1413 1349/555/1409 1348/552/1412 +f 1351/554/1410 1349/555/1409 1350/556/1413 +f 1350/556/1413 1352/938/1414 1351/554/1410 +f 1353/939/1415 1352/938/1414 1350/556/1413 +f 1350/556/1413 1354/748/1416 1353/939/1415 +f 1354/748/1416 1355/749/1417 1353/939/1415 +f 1359/560/1418 1361/940/1419 1358/559/1420 +f 1358/559/1420 1357/557/1421 1359/560/1418 +f 1360/941/1422 1359/560/1418 1357/557/1421 +f 1357/557/1421 1356/942/1423 1360/941/1422 +f 1361/940/1419 1108/943/1424 1362/562/1425 +f 1362/562/1425 1358/559/1420 1361/940/1419 +f 1358/559/1420 1362/562/1425 1363/561/1426 +f 1362/562/1425 1335/564/1427 1364/563/1428 +f 1364/563/1428 1363/561/1426 1362/562/1425 +f 1364/563/1428 1335/564/1427 1365/565/1429 +f 1365/565/1429 1366/567/1430 1364/563/1428 +f 1366/567/1430 1365/565/1429 1367/569/1431 +f 1367/569/1431 1368/568/1432 1366/567/1430 +f 1367/569/1431 1524/571/1433 1369/570/1434 +f 1369/570/1434 1368/568/1432 1367/569/1431 +f 1372/573/1435 1371/944/1436 1370/572/1437 +f 1374/576/1438 1516/578/1439 1363/561/1426 +f 1363/561/1426 1364/563/1428 1374/576/1438 +f 1347/550/1411 1351/554/1410 1376/584/1440 +f 1376/584/1440 1554/583/1441 1347/550/1411 +f 1351/554/1410 1352/938/1414 1376/584/1440 +f 1379/590/1442 1380/589/1443 1346/549/1444 +f 1346/549/1444 1378/587/1445 1379/590/1442 +f 1346/549/1444 1380/589/1443 1343/547/1446 +f 1343/547/1446 496/551/1447 1346/549/1444 +f 1381/591/1448 1380/589/1443 1379/590/1442 +f 1379/590/1442 1378/587/1445 1381/591/1448 +f 1343/547/1446 1380/589/1443 1381/591/1448 +f 1381/591/1448 1382/592/1449 1343/547/1446 +f 1382/592/1449 1345/546/1450 1343/547/1446 +f 1386/595/1451 1385/945/1452 1383/594/1453 +f 1383/594/1453 1464/946/1454 1386/595/1451 +f 1388/596/1455 538/595/1456 1387/947/1457 +f 1387/947/1457 1386/595/1451 1388/596/1455 +f 1392/948/1458 543/949/1459 1390/950/1460 +f 1390/950/1460 1391/949/1461 1392/948/1458 +f 1391/949/1461 1394/599/1462 1393/600/1463 +f 1393/600/1463 1392/948/1458 1391/949/1461 +f 1397/608/1464 1398/610/1465 1399/609/1466 +f 1402/611/1467 1475/612/1468 1404/619/1469 +f 1404/619/1469 1405/618/1470 1402/611/1467 +f 1399/609/1466 1406/951/1471 1405/618/1470 +f 1405/618/1470 1403/620/1472 1399/609/1466 +f 1405/618/1470 1406/951/1471 1402/611/1467 +f 1407/952/1473 568/622/1474 1405/618/1470 +f 1405/618/1470 1404/619/1469 1407/952/1473 +f 1423/953/1475 1426/954/1476 1424/633/1477 +f 1424/633/1477 1422/955/1478 1423/953/1475 +f 1429/640/1479 980/642/1011 1430/639/1480 +f 1431/637/1481 1429/640/1479 1430/639/1480 +f 1430/639/1480 1432/638/1482 1431/637/1481 +f 1430/639/1480 980/642/1011 979/182/1003 +f 979/182/1003 1432/638/1482 1430/639/1480 +f 1047/641/1086 1432/638/1482 979/182/1003 +f 979/182/1003 974/178/1002 1047/641/1086 +f 1433/644/1483 1432/638/1482 1047/641/1086 +f 1434/645/1484 1433/644/1483 1047/641/1086 +f 1047/641/1086 1045/236/1085 1434/645/1484 +f 970/173/998 969/174/997 1048/179/1087 +f 1048/179/1087 974/178/1002 970/173/998 +f 1441/655/1485 1444/654/1486 1442/653/1487 +f 1442/653/1487 1440/656/1488 1441/655/1485 +f 1443/956/1489 1444/957/1486 1441/655/1485 +f 1441/655/1485 1440/656/1488 1443/956/1489 +f 1448/958/1490 1445/959/1491 1447/960/1492 +f 1447/960/1492 1449/961/1493 1448/958/1490 +f 1450/112/1494 1451/658/1495 1452/657/1496 +f 1452/657/1496 887/105/1497 1450/112/1494 +f 1454/659/1498 1071/962/1499 1453/457/1306 +f 1453/457/1306 1256/458/1305 1454/659/1498 +f 1454/659/1498 1256/458/1305 1455/459/1307 +f 1455/459/1307 1457/660/1500 1454/659/1498 +f 1241/443/1294 1242/444/1293 1455/459/1307 +f 1455/459/1307 1250/451/1301 1241/443/1294 +f 1455/459/1307 1242/444/1293 1235/436/1286 +f 1455/459/1307 1235/436/1286 1456/661/1501 +f 1456/661/1501 1457/660/1500 1455/459/1307 +f 1235/436/1286 1236/437/1287 1456/661/1501 +f 1366/567/1430 1368/568/1432 1458/664/1502 +f 1458/664/1502 1370/572/1437 1366/567/1430 +f 1459/681/1503 1460/666/1504 1370/572/1437 +f 1370/572/1437 1458/664/1502 1459/681/1503 +f 1373/575/1505 1370/572/1437 1460/666/1504 +f 1460/666/1504 1461/665/1506 1373/575/1505 +f 1373/575/1505 1461/665/1506 1462/963/1507 +f 1462/963/1507 1553/574/1508 1373/575/1505 +f 1463/668/1509 1461/665/1506 1460/666/1504 +f 1463/668/1509 1543/667/1510 1386/595/1451 +f 1386/595/1451 1464/946/1454 1463/668/1509 +f 1465/669/1511 1460/666/1504 1466/671/1512 +f 1466/671/1512 1467/670/1513 1465/669/1511 +f 1473/694/1514 1597/734/1515 1470/743/1516 +f 1470/743/1516 1471/964/1517 1473/694/1514 +f 1472/965/1518 1470/743/1516 1474/682/1519 +f 1474/682/1519 1660/966/1520 1472/965/1518 +f 1473/694/1514 643/697/1521 1480/696/1522 +f 1480/696/1522 1481/695/1523 1473/694/1514 +f 1484/791/1524 1404/967/1525 1475/968/1526 +f 1475/968/1526 1483/790/1527 1484/791/1524 +f 1481/695/1523 1480/696/1522 1486/712/1528 +f 1486/712/1528 1496/720/1529 1481/695/1523 +f 1486/712/1528 1480/696/1522 1487/698/1530 +f 1487/698/1530 1488/713/1531 1486/712/1528 +f 1487/698/1530 1497/969/1532 1489/970/1533 +f 1489/970/1533 1488/713/1531 1487/698/1530 +f 1489/970/1533 670/713/1534 1490/971/1535 +f 1490/971/1535 1488/713/1531 1489/970/1533 +f 1486/712/1528 1488/713/1531 1491/972/1536 +f 1491/972/1536 1492/714/1537 1486/712/1528 +f 1490/971/1535 670/713/1534 1491/972/1536 +f 1491/972/1536 1488/713/1531 1490/971/1535 +f 1491/972/1536 672/714/1538 1493/973/1539 +f 1493/973/1539 1492/714/1537 1491/972/1536 +f 1494/974/1540 673/715/1541 1492/714/1537 +f 1492/714/1537 1493/973/1539 1494/974/1540 +f 1495/716/1542 677/718/1543 1496/720/1529 +f 1496/720/1529 1486/712/1528 1495/716/1542 +f 1487/698/1530 1499/711/1544 1498/975/1545 +f 1498/975/1545 1497/969/1532 1487/698/1530 +f 1500/976/1546 667/711/1547 1498/975/1545 +f 1498/975/1545 1499/711/1544 1500/976/1546 +f 1501/977/1548 1500/976/1546 1499/711/1544 +f 1499/711/1544 1502/710/1549 1501/977/1548 +f 1502/710/1549 1499/711/1544 1487/698/1530 +f 1487/698/1530 1511/699/1550 1502/710/1549 +f 1503/707/1551 1501/977/1548 1502/710/1549 +f 1502/710/1549 1511/699/1550 1504/702/1552 +f 1504/702/1552 1503/707/1551 1502/710/1549 +f 1504/702/1552 1507/703/1553 1506/978/1554 +f 1506/978/1554 1505/979/1555 1504/702/1552 +f 1508/705/1556 659/703/1557 1506/978/1554 +f 1506/978/1554 1507/703/1553 1508/705/1556 +f 1507/703/1553 658/701/1558 1509/704/1559 +f 1509/704/1559 1508/705/1556 1507/703/1553 +f 1510/980/1560 660/704/1561 1508/705/1556 +f 1508/705/1556 1509/704/1559 1510/980/1560 +f 1507/703/1553 1504/702/1552 1511/699/1550 +f 1511/699/1550 658/701/1558 1507/703/1553 +f 1480/696/1522 655/700/1562 1511/699/1550 +f 1511/699/1550 1487/698/1530 1480/696/1522 +f 1365/565/1429 1336/566/1563 1524/571/1433 +f 1524/571/1433 1367/569/1431 1365/565/1429 +f 1525/981/1564 504/942/1565 1526/982/1566 +f 1526/982/1566 1356/942/1423 1525/981/1564 +f 1526/982/1566 504/942/1565 1360/941/1422 +f 1360/941/1422 1356/942/1423 1526/982/1566 +f 1355/749/1417 1525/981/1564 1356/942/1423 +f 1356/942/1423 1548/983/1567 1355/749/1417 +f 1527/984/1568 718/751/1569 1354/748/1416 +f 1354/748/1416 1528/753/1570 1527/984/1568 +f 1537/985/1571 1535/986/1572 1538/987/1573 +f 1538/987/1573 1539/988/1574 1537/985/1571 +f 1537/985/1571 1539/988/1574 1540/989/1575 +f 1540/989/1575 1534/990/1576 1537/985/1571 +f 1549/991/1577 1545/992/1578 1550/993/1579 +f 1550/993/1579 1548/994/1580 1549/991/1577 +f 1548/995/1580 1550/996/1579 1551/997/1581 +f 1551/997/1581 1547/998/1582 1548/995/1580 +f 1561/999/1583 1562/1000/1584 1563/1001/1585 +f 1563/1001/1585 752/1002/1586 1561/999/1583 +f 1562/1000/1584 751/1001/1587 1564/1003/1588 +f 1564/1003/1588 1563/1001/1585 1562/1000/1584 +f 1563/1001/1585 1564/1003/1588 1565/1004/1589 +f 1565/1004/1589 1566/1005/1590 1563/1001/1585 +f 1568/1006/1591 1396/1007/1592 1567/766/1593 +f 1567/766/1593 1566/767/1594 1568/1006/1591 +f 1285/483/1344 1308/508/1343 1304/502/1361 +f 1304/502/1361 1302/503/1360 1285/483/1344 +f 1302/503/1360 1287/485/1346 1285/483/1344 +f 1300/499/1357 1292/488/1349 1287/485/1346 +f 1287/485/1346 1302/503/1360 1300/499/1357 +f 1580/784/1595 1274/1008/1596 1578/782/1597 +f 1578/782/1597 1579/783/1598 1580/784/1595 +f 1580/784/1595 1579/783/1598 1581/785/1599 +f 1581/785/1599 1279/1009/1600 1580/784/1595 +f 1588/1010/1601 700/1011/1602 1589/1012/1603 +f 1589/1012/1603 1590/1011/1604 1588/1010/1601 +f 1591/742/1605 1592/740/1606 1588/1010/1601 +f 1588/1010/1601 1590/1011/1604 1591/742/1605 +f 1593/1013/1607 697/740/1608 1588/1010/1601 +f 1588/1010/1601 1592/740/1606 1593/1013/1607 +f 1592/740/1606 1591/742/1605 1597/734/1515 +f 1597/734/1515 1594/736/1609 1592/740/1606 +f 1597/734/1515 1596/1014/1610 1594/736/1609 +f 1591/742/1605 1474/682/1519 1470/743/1516 +f 1470/743/1516 1597/734/1515 1591/742/1605 +f 1598/735/1611 699/1015/1612 1596/1014/1610 +f 1596/1014/1610 1597/734/1515 1598/735/1611 +f 1600/1016/1613 690/735/1614 1599/1017/1615 +f 1599/1017/1615 1598/735/1611 1600/1016/1613 +f 1597/734/1515 1473/694/1514 1601/732/1616 +f 1601/732/1616 1598/735/1611 1597/734/1515 +f 1602/733/1617 1600/1016/1613 1598/735/1611 +f 1598/735/1611 1601/732/1616 1602/733/1617 +f 1603/730/1618 1602/733/1617 1601/732/1616 +f 1601/732/1616 1608/726/1619 1603/730/1618 +f 1604/1018/1620 1602/733/1617 1603/730/1618 +f 1603/730/1618 1605/731/1621 1604/1018/1620 +f 1606/729/1622 1605/731/1621 1603/730/1618 +f 1603/730/1618 1608/726/1619 1606/729/1622 +f 1608/726/1619 1601/732/1616 1473/694/1514 +f 1608/726/1619 1473/694/1514 1481/695/1523 +f 1481/695/1523 1609/727/1623 1608/726/1619 +f 1481/695/1523 1610/723/1624 1609/727/1623 +f 1610/723/1624 1611/724/1625 1612/1019/1626 +f 1612/1019/1626 1609/727/1623 1610/723/1624 +f 1618/801/1627 1619/1020/1628 1620/805/1629 +f 1619/416/1628 1225/415/1630 1621/417/1631 +f 1621/417/1631 1620/418/1629 1619/416/1628 +f 1621/417/1631 1225/415/1630 1622/420/1632 +f 1622/420/1632 1620/421/1629 1621/417/1631 +f 1623/811/1633 1620/805/1629 1622/1021/1632 +f 1622/1021/1632 1624/1022/1634 1623/811/1633 +f 1623/811/1633 1624/1022/1634 1625/1023/1635 +f 1625/1023/1635 1228/429/1636 1623/811/1633 +f 1618/801/1627 1620/805/1637 1627/804/1638 +f 1627/804/1638 1626/803/1639 1618/801/1627 +f 1617/799/1640 1224/802/1641 1618/801/1627 +f 1618/801/1627 1628/800/1642 1617/799/1640 +f 1617/799/1640 1628/800/1642 1629/807/1643 +f 1629/807/1643 1652/806/1644 1617/799/1640 +f 1630/808/1645 1651/809/1646 1629/807/1643 +f 1629/807/1643 1628/800/1642 1630/808/1645 +f 1630/808/1645 1628/800/1642 1631/812/1647 +f 1631/812/1647 1632/1024/1648 1630/808/1645 +f 1628/800/1642 1618/801/1627 1633/810/1649 +f 1633/810/1649 1631/812/1647 1628/800/1642 +f 1626/803/1639 1623/811/1633 1633/810/1649 +f 1633/810/1649 1618/801/1627 1626/803/1639 +f 1634/1025/1650 1636/1026/1651 1635/1027/1652 +f 1635/1027/1652 1639/1028/1653 1634/1025/1650 +f 1638/1029/1654 1684/1030/1655 1635/1027/1652 +f 1635/1027/1652 1637/1031/1656 1638/1029/1654 +f 1642/818/1657 1641/817/1658 1643/820/1659 +f 1643/820/1659 1184/819/1660 1642/818/1657 +f 1615/413/1661 1649/1032/1662 1650/1033/1663 +f 1650/1033/1663 1220/410/1664 1615/413/1661 +f 1662/407/1269 1661/837/1665 1213/403/1265 +f 1663/1034/1666 1215/1035/1666 1664/1036/1666 +f 1664/1036/1666 1654/1037/1667 1663/1034/1666 +f 1216/404/1266 1219/409/1270 1665/1038/1668 +f 1665/1038/1668 1217/408/1268 1216/404/1266 +f 1217/408/1268 1665/1038/1668 1666/1039/1669 +f 1669/828/1670 1665/829/1671 1668/1040/1672 +f 1668/1040/1672 1667/1041/1673 1669/828/1670 +f 1160/349/1204 1162/350/1203 1670/351/1674 +f 1670/351/1674 1671/347/1675 1160/349/1204 +f 1673/314/1178 1198/387/1244 1200/389/1246 +f 1200/389/1246 1207/395/1254 1673/314/1178 +f 1673/314/1178 1137/315/1177 1674/841/1676 +f 1673/314/1178 1674/841/1676 1197/386/1243 +f 1197/386/1243 1198/387/1244 1673/314/1178 +f 1138/316/1179 1197/386/1243 1674/841/1676 +f 1674/841/1676 1137/315/1177 1138/316/1179 +f 1676/692/1677 1477/686/1678 1474/682/1519 +f 1474/682/1519 1591/742/1605 1676/692/1677 +f 1591/742/1605 1590/1011/1604 1676/692/1677 +f 74/1042/1679 1/1/1 58/1043/1680 +f 58/1043/1680 75/1044/1681 74/1042/1679 +f 1/1/1 74/1042/1679 39/844/1682 +f 58/1043/1680 1/1/1 2/4/4 +f 2/4/4 48/1045/1683 58/1043/1680 +f 39/844/1682 5/9/9 4/2/2 +f 4/2/2 1/1/1 39/844/1682 +f 57/66/6 48/64/1683 2/1046/4 +f 2/1046/4 3/1047/3 57/66/6 +f 129/845/1684 10/16/16 7/7/7 +f 7/7/7 5/9/9 129/845/1684 +f 129/845/1684 13/17/17 11/13/13 +f 11/13/13 10/16/16 129/845/1684 +f 135/1048/1685 14/18/18 13/17/17 +f 13/17/17 133/1049/1686 135/1048/1685 +f 14/18/18 135/1048/1685 15/19/19 +f 16/21/21 32/40/37 57/6/6 +f 57/6/6 6/5/5 16/21/21 +f 127/154/1687 23/27/27 19/23/23 +f 19/23/23 9/12/12 127/154/1687 +f 138/38/35 139/20/20 17/10/10 +f 17/10/10 20/24/24 138/38/35 +f 73/89/33 71/88/1688 28/858/34 +f 28/858/34 21/1050/26 73/89/33 +f 73/89/33 27/1051/30 26/33/32 +f 26/33/32 72/32/31 73/89/33 +f 29/1052/42 30/43/40 139/20/20 +f 139/20/20 138/38/1689 29/1052/42 +f 31/39/36 16/21/21 139/20/20 +f 139/20/20 30/43/40 31/39/36 +f 140/168/1690 69/47/1691 32/854/37 +f 32/854/37 31/1053/36 140/168/1690 +f 31/39/36 33/42/39 140/846/1690 +f 36/49/1692 141/51/45 140/168/1690 +f 140/168/1690 33/847/39 36/49/1692 +f 33/42/39 34/41/1693 36/848/1692 +f 5/1054/1694 39/54/48 38/53/47 +f 38/53/47 42/60/54 5/1054/1694 +f 75/92/1695 58/852/1696 48/64/58 +f 48/64/58 109/93/126 75/92/1695 +f 59/74/68 57/66/60 32/854/1697 +f 32/854/1697 60/853/1698 59/74/68 +f 123/79/73 63/78/72 62/77/71 +f 121/855/1699 119/76/70 61/75/69 +f 61/75/69 124/856/1700 121/855/1699 +f 124/856/1700 61/75/69 64/81/75 +f 64/81/75 126/857/1701 124/856/1700 +f 64/81/75 66/82/76 68/85/1702 +f 68/85/1702 126/857/1701 64/81/75 +f 69/47/1703 29/46/1704 60/853/1698 +f 60/853/1698 32/854/1697 69/47/1703 +f 29/46/1704 52/71/65 59/74/68 +f 59/74/68 60/853/1698 29/46/1704 +f 138/1055/1705 28/858/1706 52/71/65 +f 52/71/65 29/46/1704 138/1055/1705 +f 28/858/1706 70/86/80 53/72/66 +f 53/72/66 52/71/65 28/858/1706 +f 71/88/82 23/87/81 70/86/80 +f 70/86/80 28/858/1706 71/88/82 +f 127/1056/1707 123/79/73 54/68/62 +f 54/68/62 23/87/81 127/1056/1707 +f 72/32/84 24/31/1708 23/87/81 +f 23/87/81 71/88/82 72/32/84 +f 78/98/90 108/97/89 105/849/1709 +f 105/849/1709 80/101/93 78/98/90 +f 81/104/96 80/101/93 105/849/1709 +f 105/849/1709 104/850/1710 81/104/96 +f 104/850/1710 620/657/1711 82/105/97 +f 82/105/97 81/104/96 104/850/1710 +f 217/143/1712 620/131/120 103/129/118 +f 103/129/118 110/136/127 217/143/1712 +f 605/179/136 212/235/1713 117/237/133 +f 117/237/133 116/889/132 605/179/136 +f 115/140/131 605/145/136 116/141/132 +f 119/146/137 9/12/12 136/11/11 +f 122/150/141 125/153/144 67/859/1714 +f 67/859/1714 123/151/142 122/150/141 +f 68/1057/1715 125/153/144 126/152/143 +f 130/157/148 129/156/147 5/1054/1694 +f 5/1054/1694 42/60/54 130/157/148 +f 128/155/146 132/161/152 13/160/151 +f 13/160/151 129/156/147 128/155/146 +f 134/162/153 137/167/158 15/164/155 +f 964/1058/1716 142/1059/1717 91/1060/1718 +f 91/1060/1718 90/113/1719 964/1058/1716 +f 142/1059/1717 965/1061/1720 100/1062/1721 +f 100/1062/1721 91/1060/1718 142/1059/1717 +f 965/1061/1720 143/171/165 100/1062/1721 +f 144/172/166 143/171/165 965/1061/1720 +f 965/1061/1720 966/171/1722 144/172/166 +f 146/170/164 118/174/168 100/1062/1721 +f 100/1062/1721 143/171/165 146/170/164 +f 150/177/171 149/176/170 287/322/1723 +f 287/322/1723 478/532/1724 150/177/171 +f 287/322/1723 149/176/170 152/181/175 +f 154/182/176 479/183/177 155/860/1725 +f 155/860/1725 156/642/637 154/182/176 +f 479/183/177 156/642/637 155/860/1725 +f 595/1063/1726 156/642/637 479/183/177 +f 479/183/177 597/862/1727 595/1063/1726 +f 597/862/1727 479/183/177 157/531/520 +f 594/861/1728 597/862/1727 157/531/520 +f 157/531/520 158/321/317 594/861/1728 +f 594/861/1728 158/321/317 159/320/316 +f 159/320/316 160/185/179 594/861/1728 +f 161/184/178 593/190/185 594/861/1728 +f 594/861/1728 160/185/179 161/184/178 +f 989/934/1729 162/186/180 160/185/179 +f 160/185/179 467/524/513 989/934/1729 +f 991/864/1730 163/187/181 164/188/182 +f 164/188/182 990/187/183 991/864/1730 +f 161/184/178 163/187/181 991/864/1730 +f 991/864/1730 992/863/1731 161/184/178 +f 165/1064/1732 993/648/643 161/184/178 +f 161/184/178 992/863/1731 165/1064/1732 +f 167/191/186 996/192/1733 997/195/190 +f 997/195/190 168/192/187 167/191/186 +f 999/866/1734 1000/865/1735 170/194/189 +f 170/194/189 169/193/188 999/866/1734 +f 1001/867/1736 171/196/191 170/194/189 +f 170/194/189 1000/865/1735 1001/867/1736 +f 1002/871/1737 166/189/184 171/196/191 +f 171/196/191 995/189/1738 1002/871/1737 +f 172/869/1739 173/868/1740 593/190/185 +f 593/190/185 166/189/184 172/869/1739 +f 172/869/1739 166/189/184 586/870/1741 +f 586/870/1741 173/868/1740 172/869/1739 +f 174/200/195 586/870/1741 166/189/184 +f 166/189/184 1002/871/1737 174/200/195 +f 175/197/192 174/200/195 1002/871/1737 +f 1002/871/1737 1006/200/1742 175/197/192 +f 175/197/192 1008/198/1743 1009/1065/1744 +f 1009/1065/1744 177/198/193 175/197/192 +f 177/198/193 1009/1065/1744 178/1066/1745 +f 178/1066/1745 1010/873/1746 177/198/193 +f 1011/872/1747 179/201/196 177/198/193 +f 177/198/193 1010/873/1746 1011/872/1747 +f 176/199/194 1015/875/1748 1014/874/1749 +f 1014/874/1749 1013/203/198 176/199/194 +f 182/843/857 1007/199/1750 1015/875/1748 +f 1015/875/1748 176/199/194 182/843/857 +f 1013/203/198 184/206/201 183/209/204 +f 183/209/204 180/202/197 1013/203/198 +f 185/877/1751 184/206/201 1013/203/198 +f 1013/203/198 1017/206/1752 185/877/1751 +f 1018/876/1753 1019/207/202 184/206/201 +f 184/206/201 185/877/1751 1018/876/1753 +f 186/208/203 1019/207/202 187/1067/1754 +f 187/1067/1754 1021/1068/1755 186/208/203 +f 188/210/205 186/208/203 1021/1068/1755 +f 1021/1068/1755 1020/208/1756 188/210/205 +f 189/211/206 592/879/1757 579/878/1758 +f 579/878/1758 183/209/204 189/211/206 +f 1025/880/1759 1026/881/1760 189/211/206 +f 189/211/206 191/213/208 1025/880/1759 +f 1026/881/1760 193/217/212 192/216/211 +f 192/216/211 189/211/206 1026/881/1760 +f 192/216/211 592/879/1757 189/211/206 +f 195/215/210 1028/218/1761 1029/219/214 +f 1029/219/214 196/218/213 195/215/210 +f 1030/882/1762 1031/224/219 196/218/213 +f 196/218/213 198/221/216 1030/882/1762 +f 576/223/218 575/883/1763 592/879/1757 +f 592/879/1757 192/216/211 576/223/218 +f 199/222/217 196/218/213 1031/224/219 +f 202/885/1764 573/884/1765 576/223/218 +f 576/223/218 199/222/217 202/885/1764 +f 199/222/217 613/1069/1766 202/885/1764 +f 613/1069/1766 203/227/222 572/887/1767 +f 572/887/1767 202/885/1764 613/1069/1766 +f 203/227/222 204/230/225 575/886/1768 +f 575/886/1768 572/887/1767 203/227/222 +f 204/230/225 589/888/1769 592/879/1770 +f 592/879/1770 575/886/1768 204/230/225 +f 612/1070/1771 207/231/226 203/227/222 +f 203/227/222 613/1069/1766 612/1070/1771 +f 205/229/224 217/244/239 117/237/232 +f 117/237/232 204/230/225 205/229/224 +f 211/236/231 604/645/640 589/888/1769 +f 589/888/1769 204/230/225 211/236/231 +f 612/1070/1771 247/1071/1772 213/238/233 +f 213/238/233 207/231/226 612/1070/1771 +f 218/245/240 619/248/244 621/243/238 +f 621/243/238 216/241/236 218/245/240 +f 89/115/1773 619/248/244 221/116/243 +f 223/250/246 90/113/1719 221/116/243 +f 1059/890/1774 1060/892/1775 222/249/245 +f 222/249/245 225/252/248 1059/890/1774 +f 1061/893/1776 223/250/246 222/249/245 +f 222/249/245 1060/892/1775 1061/893/1776 +f 1061/893/1776 964/1058/1716 90/113/1719 +f 90/113/1719 223/250/246 1061/893/1776 +f 225/252/248 1062/891/1777 1059/890/1774 +f 226/254/250 1065/894/1778 1062/891/1777 +f 1062/891/1777 225/252/248 226/254/250 +f 226/254/250 627/895/1779 228/256/252 +f 228/256/252 1065/894/1778 226/254/250 +f 1065/894/1778 228/256/252 229/259/255 +f 229/259/255 1066/256/1780 1065/894/1778 +f 627/895/1779 226/254/250 227/253/249 +f 227/253/249 230/255/251 627/895/1779 +f 627/895/1779 625/896/1781 231/257/253 +f 231/257/253 228/256/252 627/895/1779 +f 1072/897/1782 233/260/256 232/258/254 +f 232/258/254 234/262/259 1072/897/1782 +f 235/1072/1783 1071/262/1784 1072/897/1782 +f 1072/897/1782 234/262/259 235/1072/1783 +f 235/1072/1783 234/262/259 622/1073/1785 +f 622/1073/1785 517/1074/1786 235/1072/1783 +f 617/284/278 238/268/263 210/233/228 +f 210/233/228 214/239/234 617/284/278 +f 240/270/264 617/284/278 618/271/265 +f 1083/274/268 1082/275/1124 244/899/1787 +f 244/899/1787 243/275/269 1083/274/268 +f 1084/898/1788 1085/900/1789 243/275/269 +f 243/275/269 244/899/1787 1084/898/1788 +f 245/276/270 243/275/269 1085/900/1789 +f 241/272/266 1086/278/272 246/1075/1790 +f 246/1075/1790 1083/274/268 241/272/266 +f 199/222/217 241/272/266 615/1076/1791 +f 615/1076/1791 613/1069/1766 199/222/217 +f 241/272/266 247/1071/1772 615/1076/1791 +f 618/271/265 213/238/233 247/1071/1772 +f 247/1071/1772 241/272/266 618/271/265 +f 1088/282/276 248/277/271 250/280/274 +f 250/280/274 1087/277/1129 1088/282/276 +f 1088/282/276 242/273/267 248/277/271 +f 1088/282/276 1091/901/1792 252/285/279 +f 252/285/279 251/281/275 1088/282/276 +f 252/285/279 1091/901/1792 1092/903/1793 +f 1092/903/1793 253/902/1794 252/285/279 +f 1093/287/281 252/285/279 253/902/1794 +f 253/902/1794 1090/285/1795 1093/287/281 +f 1096/290/284 1095/291/1796 257/1077/1797 +f 257/1077/1797 256/291/285 1096/290/284 +f 1097/1078/1798 258/293/287 256/291/285 +f 256/291/285 257/1077/1797 1097/1078/1798 +f 1100/1079/1799 263/1080/1800 261/295/289 +f 261/295/289 262/297/291 1100/1079/1799 +f 1101/1081/1801 264/1082/1802 261/295/289 +f 261/295/289 263/1080/1800 1101/1081/1801 +f 508/301/296 261/295/289 264/1082/1802 +f 508/1083/296 507/560/1803 269/940/300 +f 269/940/300 268/1084/295 508/1083/296 +f 267/298/293 265/296/290 270/302/1804 +f 270/302/1804 272/303/298 267/298/293 +f 271/304/299 509/1085/1805 512/1086/1806 +f 512/1086/1806 274/307/302 271/304/299 +f 1161/354/350 1149/335/1807 296/334/330 +f 296/334/330 298/335/331 1161/354/350 +f 312/350/346 313/351/347 1161/354/350 +f 1161/354/350 298/335/331 312/350/346 +f 1165/356/352 316/355/351 317/359/355 +f 317/359/355 318/362/358 1165/356/352 +f 838/911/1808 321/361/357 322/360/356 +f 332/909/1809 838/911/1808 322/360/356 +f 322/360/356 814/910/1810 332/909/1809 +f 323/363/359 814/910/1810 322/360/356 +f 838/911/1808 333/1087/1811 1181/1088/1812 +f 1181/1088/1812 321/361/357 838/911/1808 +f 815/1089/1813 814/910/1810 323/363/359 +f 323/363/359 813/373/369 815/1089/1813 +f 809/1090/1814 810/375/371 330/366/362 +f 330/366/362 807/1091/1815 809/1090/1814 +f 807/1091/1815 330/366/362 325/365/361 +f 325/365/361 329/368/364 807/1091/1815 +f 329/368/364 336/377/373 812/1092/1816 +f 812/1092/1816 807/1091/1815 329/368/364 +f 337/914/1817 806/371/367 328/370/366 +f 328/370/366 801/908/1818 337/914/1817 +f 327/369/365 801/908/1818 328/370/366 +f 338/1026/376 800/1027/1819 801/814/1818 +f 801/814/1818 327/1093/365 338/1026/376 +f 341/381/377 327/369/365 833/348/344 +f 833/348/344 304/341/337 342/342/338 +f 342/342/338 341/381/377 833/348/344 +f 1206/396/392 355/395/391 356/397/393 +f 356/397/393 357/398/394 1206/396/392 +f 1205/394/390 1210/915/1820 358/400/396 +f 358/400/396 354/393/389 1205/394/390 +f 1210/915/1820 360/405/401 359/401/397 +f 359/401/397 358/400/396 1210/915/1820 +f 362/403/399 360/405/401 1214/916/1821 +f 1214/916/1821 363/835/845 362/403/399 +f 366/406/402 367/409/405 824/1094/1822 +f 824/1094/1822 823/1095/1823 366/406/402 +f 784/1096/1824 786/412/408 369/411/407 +f 369/411/407 371/414/410 784/1096/1824 +f 785/413/409 368/410/406 372/1033/1825 +f 372/1033/1825 839/1032/1826 785/413/409 +f 823/1095/1827 368/410/406 366/406/402 +f 823/1095/1828 839/1097/1829 372/1033/1825 +f 372/1033/1825 368/410/406 823/1095/1828 +f 370/402/398 788/797/1830 373/416/412 +f 795/918/1831 382/917/1832 351/391/387 +f 351/391/387 379/425/419 795/918/1831 +f 404/453/445 405/455/447 491/1098/1833 +f 491/1098/1833 407/456/448 404/453/445 +f 406/454/446 442/461/453 405/455/447 +f 421/474/466 424/776/1834 762/472/464 +f 762/472/464 419/468/460 421/474/466 +f 420/473/465 762/472/464 423/775/1835 +f 423/775/1835 773/919/1836 420/473/465 +f 425/1099/1837 775/1100/1838 423/775/1839 +f 423/775/1839 761/774/1840 425/1099/1837 +f 773/919/1836 426/1101/1841 418/471/463 +f 418/471/463 420/473/465 773/919/1836 +f 767/1102/1842 424/776/1834 421/474/466 +f 421/474/466 768/1103/1843 767/1102/1842 +f 427/478/470 770/482/474 768/1103/1843 +f 768/1103/1843 421/474/466 427/478/470 +f 431/480/472 432/481/1844 428/477/469 +f 771/1104/1845 770/482/474 432/481/473 +f 769/1105/1846 431/480/472 430/479/471 +f 430/479/471 765/920/1847 769/1105/1846 +f 769/1105/1846 771/1104/1848 432/481/1849 +f 432/481/1849 431/480/472 769/1105/1846 +f 433/771/777 764/922/1850 765/920/1847 +f 765/920/1847 430/479/471 433/771/777 +f 757/770/1851 759/923/1852 764/922/1850 +f 764/922/1850 433/771/777 757/770/1851 +f 443/491/483 409/494/486 414/1106/1853 +f 414/1106/1853 444/497/489 443/491/483 +f 414/1106/1853 416/1107/1854 445/498/490 +f 445/498/490 444/497/489 414/1106/1853 +f 455/507/499 756/773/775 758/772/1855 +f 758/772/1855 456/508/500 455/507/499 +f 458/924/1856 462/511/503 457/510/502 +f 457/510/502 776/509/501 458/924/1856 +f 776/929/501 760/930/1857 777/1108/1858 +f 777/1108/1858 458/1109/1856 776/929/501 +f 462/511/1859 458/924/1856 777/1110/1858 +f 777/1110/1858 760/1111/1860 462/511/1859 +f 416/1107/1854 418/926/1861 463/512/504 +f 463/512/504 445/498/490 416/1107/1854 +f 426/925/1862 464/517/509 463/512/504 +f 463/512/504 418/926/1861 426/925/1862 +f 466/927/497 760/930/1857 776/929/501 +f 776/929/501 454/928/498 466/927/497 +f 468/523/512 1319/524/1863 1320/933/1864 +f 1320/933/1864 467/524/513 468/523/512 +f 467/524/513 1320/933/1864 1321/932/1865 +f 1321/932/1865 469/931/1866 467/524/513 +f 469/931/1866 1319/524/1867 989/934/1729 +f 989/934/1729 467/524/513 469/931/1866 +f 474/326/322 475/529/518 1329/528/1868 +f 1329/528/1868 470/522/511 474/326/322 +f 1326/1112/1869 476/530/519 299/332/328 +f 299/332/328 1325/530/1388 1326/1112/1869 +f 1326/1112/1869 1325/530/1388 477/1113/1870 +f 477/1113/1870 476/530/519 1326/1112/1869 +f 480/1114/1871 475/529/518 476/530/519 +f 476/530/519 477/1113/1872 480/1114/1871 +f 480/1114/1873 1324/529/1874 1328/935/1875 +f 1328/935/1875 475/529/1876 480/1114/1873 +f 274/1115/1877 512/936/1878 481/533/524 +f 481/533/524 483/535/526 274/1115/1877 +f 483/535/526 484/267/527 237/266/1879 +f 237/266/1879 274/1115/1877 483/535/526 +f 236/264/1880 484/267/527 485/536/528 +f 485/536/528 486/1116/1881 236/264/1880 +f 486/1116/1881 485/536/528 230/537/529 +f 512/936/1878 715/543/535 481/533/524 +f 489/540/532 715/543/535 716/1117/1882 +f 716/1117/1882 490/541/533 489/540/532 +f 439/489/481 491/542/534 405/493/485 +f 1344/1118/1883 494/548/540 493/547/539 +f 493/547/539 496/551/543 1344/1118/1883 +f 502/554/546 525/584/576 742/938/1884 +f 742/938/1884 501/556/548 502/554/546 +f 741/939/1885 503/748/750 501/556/548 +f 501/556/548 742/938/1884 741/939/1885 +f 503/748/750 741/939/1885 1355/749/751 +f 741/939/1885 738/1119/1886 739/983/1887 +f 739/983/1887 1355/749/751 741/939/1885 +f 1355/749/751 739/983/1887 504/942/1888 +f 504/942/1888 1525/981/1889 1355/749/751 +f 504/942/1888 739/983/1887 737/558/550 +f 737/558/550 505/557/549 504/942/1888 +f 1360/941/1890 505/557/549 507/560/552 +f 507/560/552 508/1083/1891 1360/941/1890 +f 504/942/1888 505/557/549 1360/941/1890 +f 507/560/552 506/559/551 269/940/1892 +f 269/940/1892 506/559/551 509/562/554 +f 509/562/554 271/943/1893 269/940/1892 +f 705/577/569 704/1120/1894 514/567/559 +f 514/567/559 511/563/555 705/577/569 +f 704/1120/1894 703/944/1895 518/572/564 +f 518/572/564 514/567/559 704/1120/1894 +f 702/573/565 518/572/564 703/944/1895 +f 525/584/576 708/1121/1896 704/582/574 +f 704/582/574 524/581/573 525/584/576 +f 702/573/565 526/583/575 519/574/566 +f 708/1121/1896 525/584/576 526/583/575 +f 526/583/575 702/573/565 708/1121/1896 +f 527/586/578 741/939/1885 742/938/1884 +f 742/938/1884 522/578/570 527/586/578 +f 737/558/550 528/585/577 506/559/551 +f 533/591/583 534/592/584 732/1122/1897 +f 732/1122/1897 532/588/580 533/591/583 +f 534/592/584 493/547/539 1345/546/538 +f 536/594/586 734/946/1898 732/1122/1897 +f 732/1122/1897 534/592/584 536/594/586 +f 1384/1123/1899 537/1124/1900 536/594/586 +f 536/594/586 535/593/585 1384/1123/1899 +f 1385/945/1901 536/594/586 537/1124/1900 +f 537/1124/1900 1383/594/1902 1385/945/1901 +f 538/595/587 734/946/1898 536/594/586 +f 536/594/586 1385/945/1901 538/595/587 +f 538/595/587 1385/945/1901 539/1125/1903 +f 539/1125/1903 1387/947/1904 538/595/587 +f 540/597/589 1389/598/1905 1390/950/1906 +f 1390/950/1906 541/598/590 540/597/589 +f 542/1126/1907 541/598/590 1390/950/1906 +f 1390/950/1906 543/949/1908 542/1126/1907 +f 543/949/1908 1392/948/1909 1393/600/592 +f 1393/600/592 544/599/591 543/949/1908 +f 754/604/596 547/603/595 550/607/599 +f 550/607/599 552/1127/1910 754/604/596 +f 637/1128/1911 542/1126/1907 543/949/1908 +f 543/949/1908 544/599/591 637/1128/1911 +f 754/604/596 551/1129/1912 637/1128/1911 +f 637/1128/1911 544/599/591 754/604/596 +f 551/1129/1912 754/604/596 750/1130/1913 +f 750/1130/1913 713/1131/1914 551/1129/1912 +f 713/1132/1914 710/1133/1915 553/1134/1916 +f 553/1134/1916 551/1135/1912 713/1132/1914 +f 712/1136/1917 638/1137/1918 713/1131/1914 +f 713/1131/1914 750/1130/1913 712/1136/1917 +f 750/1130/1913 749/1138/1919 712/1136/1917 +f 554/608/601 556/610/603 712/1136/1917 +f 712/1136/1917 749/1138/1919 554/608/601 +f 555/617/611 554/608/601 749/1138/1919 +f 749/1138/1919 752/1002/1920 555/617/611 +f 557/609/602 747/951/1921 746/678/676 +f 746/678/676 556/610/603 557/609/602 +f 556/610/603 746/678/676 558/677/675 +f 560/611/604 559/614/607 746/678/676 +f 1403/620/615 557/609/602 562/616/609 +f 562/616/609 1399/609/1922 1403/620/615 +f 564/968/605 565/967/614 1484/791/1923 +f 1484/791/1923 778/790/1924 564/968/605 +f 566/618/613 747/951/1921 557/609/602 +f 557/609/602 1403/620/615 566/618/613 +f 566/618/613 560/611/604 747/951/1921 +f 1407/952/1925 565/619/614 566/618/613 +f 566/618/613 568/622/617 1407/952/1925 +f 565/619/614 1408/1139/1926 569/1140/1927 +f 569/1140/1927 1484/1141/1923 565/619/614 +f 565/619/614 1407/952/1925 570/1142/1928 +f 570/1142/1928 1408/1139/1929 565/619/614 +f 577/626/621 576/1143/1930 571/1144/1931 +f 571/1144/1931 572/623/618 577/626/621 +f 572/623/618 571/1144/1931 578/1145/1932 +f 578/1145/1932 573/1146/1933 572/623/618 +f 202/1147/1934 572/623/618 573/1146/1933 +f 576/1148/1935 577/1149/1936 574/1150/1937 +f 574/1150/1937 575/1151/1938 576/1148/1935 +f 576/1148/1939 573/1152/1940 578/1153/1941 +f 578/1153/1941 571/1154/1942 576/1148/1939 +f 579/628/623 592/1155/1943 591/1156/1944 +f 591/1156/1944 580/1157/1945 579/628/623 +f 579/628/623 580/1157/1945 590/1158/1946 +f 590/1158/1946 581/629/624 579/628/623 +f 609/652/1947 608/651/1948 585/1159/1949 +f 585/1159/1949 586/1160/1950 609/652/1947 +f 586/1161/1950 587/1162/1951 610/1163/1952 +f 610/1163/1952 609/1164/1947 586/1161/1950 +f 586/870/1950 585/1165/1949 595/1063/1953 +f 595/1063/1953 173/868/1954 586/870/1950 +f 587/1166/1955 181/1167/1956 588/1168/1957 +f 588/1168/1957 601/1169/1958 587/1166/1955 +f 611/1170/1959 601/1169/1958 588/1168/1957 +f 588/1168/1957 181/1171/1956 611/1170/1959 +f 589/633/1960 584/954/1961 583/953/1962 +f 583/953/1962 582/955/1963 589/633/1960 +f 590/1172/1964 580/634/629 589/633/628 +f 589/633/628 581/1173/1965 590/1172/1964 +f 596/1174/1966 597/1175/1967 594/1176/1968 +f 594/1176/1968 593/1177/1969 596/1174/1966 +f 595/1178/1970 596/1174/1966 593/1177/1969 +f 593/1177/1969 173/1179/1971 595/1178/1970 +f 595/1180/1972 597/1181/1973 596/1182/1974 +f 174/200/195 587/205/200 586/870/1741 +f 608/640/635 156/642/637 595/1063/1726 +f 595/1063/1726 585/1183/1975 608/640/635 +f 587/205/1976 601/643/638 599/637/632 +f 599/637/632 608/640/635 587/205/1976 +f 611/1184/1977 604/645/640 603/644/639 +f 603/644/639 601/643/638 611/1184/1977 +f 180/202/197 584/1185/1978 611/1184/1977 +f 611/1184/1977 181/204/199 180/202/197 +f 183/209/204 579/878/1758 584/1185/1978 +f 584/1185/1978 180/202/197 183/209/204 +f 584/1185/1978 589/888/1769 604/645/640 +f 604/645/640 611/1184/1977 584/1185/1978 +f 118/174/168 147/173/167 605/179/173 +f 1435/1186/1979 167/191/186 161/184/178 +f 161/184/178 607/647/642 1435/1186/1979 +f 247/956/1980 612/656/651 614/655/650 +f 614/655/650 615/957/649 247/956/1980 +f 213/1187/1981 618/1188/1982 616/1189/1983 +f 616/1189/1983 214/1190/1984 213/1187/1981 +f 617/1191/1985 214/1190/1984 616/1189/1983 +f 616/1189/1983 618/1192/1982 617/1191/1985 +f 716/1193/1986 517/1194/1987 622/457/449 +f 622/457/449 490/1195/1988 716/1193/1986 +f 407/456/448 491/1098/1833 490/1195/1988 +f 490/1195/1988 622/457/449 407/456/448 +f 625/661/658 626/660/657 231/1196/1989 +f 625/661/658 627/1197/1990 230/537/529 +f 230/537/529 388/437/429 625/661/658 +f 623/659/656 622/457/449 234/962/1991 +f 234/962/1991 626/660/657 623/659/656 +f 516/568/560 628/663/661 235/1198/1992 +f 235/1198/1992 517/570/562 516/568/560 +f 1459/681/679 1458/664/1993 629/662/659 +f 629/662/659 630/664/662 1459/681/679 +f 1459/681/679 630/664/662 518/572/564 +f 518/572/564 631/666/664 1459/681/679 +f 520/575/567 519/574/566 732/963/1994 +f 732/963/1994 731/665/663 520/575/567 +f 735/668/666 631/666/664 731/665/663 +f 735/668/666 734/946/1898 538/595/587 +f 538/595/587 632/667/665 735/668/666 +f 709/1199/1995 559/614/607 636/673/671 +f 636/673/671 553/675/673 709/1199/1995 +f 559/614/607 709/1199/1995 638/676/674 +f 1469/1200/1996 639/679/677 559/614/607 +f 559/614/607 561/613/606 1469/1200/1996 +f 641/743/745 1471/964/1997 1469/1201/1998 +f 1469/1201/1998 561/965/1999 641/743/745 +f 1471/964/1997 641/743/745 642/694/693 +f 642/694/693 643/697/696 1471/964/1997 +f 561/965/1999 755/966/2000 644/682/680 +f 644/682/680 641/743/745 561/965/1999 +f 561/965/1999 564/968/2001 755/966/2000 +f 755/966/2000 780/793/799 645/683/681 +f 645/683/681 644/682/680 755/966/2000 +f 1478/688/687 648/686/685 649/687/686 +f 1479/1202/2002 652/693/692 648/686/685 +f 648/686/685 651/690/689 1479/1202/2002 +f 1512/1203/2003 1480/696/2004 643/697/696 +f 643/697/696 654/696/695 1512/1203/2003 +f 655/700/699 1480/696/2004 1512/1203/2003 +f 1512/1203/2003 654/696/695 655/700/699 +f 661/706/706 660/704/704 1510/980/2005 +f 1510/980/2005 1509/704/707 661/706/706 +f 662/702/702 1505/979/2006 1506/978/2007 +f 1506/978/2007 659/703/703 662/702/702 +f 663/709/710 1504/702/2008 1505/979/2006 +f 1505/979/2006 662/702/702 663/709/710 +f 1501/977/2009 1503/707/2010 665/708/709 +f 665/708/709 664/707/708 1501/977/2009 +f 664/707/708 666/710/711 1501/977/2009 +f 1501/977/2009 666/710/711 667/711/712 +f 667/711/712 1500/976/2011 1501/977/2009 +f 657/698/697 668/969/2012 1498/975/2013 +f 1498/975/2013 667/711/712 657/698/697 +f 669/1204/2014 1497/969/2015 1498/975/2013 +f 1498/975/2013 668/969/2012 669/1204/2014 +f 1489/970/2016 1497/969/2015 669/1204/2014 +f 669/1204/2014 668/969/2012 1489/970/2016 +f 657/698/697 670/713/714 1489/970/2016 +f 1489/970/2016 668/969/2012 657/698/697 +f 671/712/713 672/714/715 1491/972/2017 +f 1491/972/2017 670/713/714 671/712/713 +f 672/714/715 673/715/716 1494/974/2018 +f 1494/974/2018 1493/973/2019 672/714/715 +f 682/723/725 1609/727/729 1612/1019/2020 +f 1612/1019/2020 1611/724/726 682/723/725 +f 681/695/694 1609/727/729 682/723/725 +f 688/1205/2021 1607/1206/2022 686/729/731 +f 686/729/731 1605/731/733 688/1205/2021 +f 687/730/732 1602/733/735 1604/1018/2023 +f 1604/1018/2023 1605/731/733 687/730/732 +f 1607/1206/2022 1606/729/2024 685/728/730 +f 685/728/730 686/729/731 1607/1206/2022 +f 1602/733/735 689/732/734 690/735/737 +f 690/735/737 1600/1016/2025 1602/733/735 +f 690/735/737 691/734/736 1596/1014/2026 +f 1596/1014/2026 699/1015/2027 690/735/737 +f 691/734/736 692/736/738 1596/1014/2026 +f 1596/1014/2026 692/736/738 693/739/741 +f 693/739/741 1594/736/2028 1596/1014/2026 +f 692/736/738 696/741/743 1595/1207/2029 +f 1595/1207/2029 695/737/739 692/736/738 +f 696/741/743 697/740/742 1593/1013/2030 +f 1593/1013/2030 1592/740/2031 696/741/743 +f 698/742/744 700/1011/2032 1588/1010/2033 +f 1588/1010/2033 697/740/742 698/742/744 +f 1599/1017/2034 690/735/737 699/1015/2027 +f 699/1015/2027 1598/735/2035 1599/1017/2034 +f 1676/692/691 698/742/744 644/682/680 +f 644/682/680 648/686/685 1676/692/691 +f 698/742/744 1676/692/691 700/1011/2032 +f 700/1011/2032 1676/692/691 701/1208/2036 +f 701/1208/2036 1589/1012/2037 700/1011/2032 +f 702/1209/2038 703/1210/2039 707/1211/2040 +f 707/1211/2040 708/1212/2041 702/1209/2038 +f 708/1212/2041 707/1211/2040 703/1213/2039 +f 703/1213/2039 704/1214/2042 708/1212/2041 +f 705/745/747 521/1215/2043 706/746/748 +f 742/1216/2044 743/1217/2045 524/1218/2046 +f 524/1218/2046 522/1219/2047 742/1216/2044 +f 525/1220/2048 524/1221/2046 743/1217/2045 +f 743/1217/2045 742/1216/2044 525/1220/2048 +f 709/1222/2049 710/1223/2050 825/1224/2051 +f 825/1224/2051 638/1225/2052 709/1222/2049 +f 553/1226/2053 710/1223/2050 709/1222/2049 +f 712/1227/2054 556/1228/2055 558/1229/2056 +f 558/1229/2056 711/1230/2057 712/1227/2054 +f 711/1231/2057 558/1232/2056 638/1233/2058 +f 638/1233/2058 712/1234/2059 711/1231/2057 +f 508/301/296 264/1082/1802 714/1235/2060 +f 714/1235/2060 1523/1236/2061 508/301/296 +f 1360/1237/2062 508/301/296 1523/1236/2061 +f 1527/984/2063 1528/753/755 503/748/750 +f 503/748/750 718/751/753 1527/984/2063 +f 720/1238/2064 1530/755/757 499/552/544 +f 499/552/544 1529/754/756 720/1238/2064 +f 1531/553/545 499/552/544 722/757/759 +f 722/757/759 1348/552/2065 1531/553/545 +f 551/1239/2066 553/1240/2067 723/1241/2068 +f 553/1242/2069 637/1243/2070 551/1244/2071 +f 633/1245/2072 632/1246/2073 730/1247/2074 +f 730/1247/2074 724/1248/2075 633/1245/2072 +f 725/761/763 726/760/762 633/1245/2072 +f 633/1245/2072 724/1248/2075 725/761/763 +f 729/1249/2076 635/1250/2077 726/760/2078 +f 726/760/2078 728/759/2079 729/1249/2076 +f 729/1249/2076 637/1251/2080 635/1250/2077 +f 542/985/2081 728/988/2082 727/987/2083 +f 727/987/2083 725/986/2084 542/985/2081 +f 542/985/2081 637/990/2085 729/989/2086 +f 729/989/2086 728/988/2082 542/985/2081 +f 541/1252/2087 542/985/2081 725/986/2084 +f 725/986/2084 724/1253/2088 541/1252/2087 +f 541/1252/2087 724/1253/2088 730/1254/2089 +f 730/1254/2089 632/1255/2090 541/1252/2087 +f 732/1256/2091 734/1257/2092 733/765/2093 +f 733/765/2093 731/764/2094 732/1256/2091 +f 736/993/2095 528/992/2096 737/991/2097 +f 737/991/2097 739/994/2098 736/993/2095 +f 736/993/2095 740/1258/2099 528/992/2096 +f 740/1258/2099 738/1259/2100 527/1260/2101 +f 527/1260/2101 528/992/2096 740/1258/2099 +f 741/1261/2102 527/1260/2101 738/1259/2100 +f 739/995/2103 738/998/2104 740/997/2105 +f 740/997/2105 736/996/2106 739/995/2103 +f 526/1262/2107 532/1263/2108 744/1264/2109 +f 744/1264/2109 519/1265/2110 526/1262/2107 +f 744/1264/2109 532/1266/2108 732/1267/2111 +f 732/1267/2111 519/1265/2110 744/1264/2109 +f 745/1268/2112 747/1269/2113 560/1270/2114 +f 560/1270/2114 746/1271/2115 745/1268/2112 +f 746/1272/2116 747/1273/2117 745/1274/2118 +f 754/1006/2119 753/767/769 748/766/768 +f 748/766/768 750/1007/771 754/1006/2119 +f 1561/999/2120 752/1002/1920 751/1001/2121 +f 751/1001/2121 1562/1000/2122 1561/999/2120 +f 751/1001/2121 753/1005/2123 1565/1004/2124 +f 1565/1004/2124 1564/1003/2125 751/1001/2121 +f 749/1138/1919 753/1005/2123 751/1001/2121 +f 751/1001/2121 752/1002/1920 749/1138/1919 +f 754/604/596 552/1127/1910 1565/1004/2124 +f 1565/1004/2124 753/1005/2123 754/604/596 +f 826/1275/2126 778/1276/2127 755/1277/2128 +f 755/1277/2128 564/1278/2129 826/1275/2126 +f 757/770/772 756/773/775 763/1279/2130 +f 763/1279/2130 759/923/2131 757/770/772 +f 756/773/775 455/507/499 766/1280/2132 +f 766/1280/2132 763/1279/2130 756/773/775 +f 760/1111/2133 766/1280/2132 455/507/499 +f 455/507/499 462/511/2134 760/1111/2133 +f 424/776/781 774/1281/2135 425/1099/2136 +f 425/1099/2136 761/774/778 424/776/781 +f 767/1102/1842 774/1281/2137 424/776/1834 +f 763/777/782 764/780/785 759/1282/2138 +f 760/930/2139 772/521/791 767/781/786 +f 767/781/786 766/778/783 760/930/2139 +f 425/1283/2140 774/787/793 775/789/795 +f 773/786/792 772/521/791 464/520/2141 +f 464/520/2141 426/1284/2142 773/786/792 +f 466/927/2143 772/521/791 760/930/2139 +f 466/927/2143 461/1285/2144 465/518/2145 +f 465/518/2145 772/521/791 466/927/2143 +f 1485/1286/2146 781/796/802 780/793/799 +f 780/793/799 779/792/798 1485/1286/2146 +f 780/793/799 755/966/2000 778/790/796 +f 817/827/2147 786/826/2148 784/1287/2149 +f 784/1287/2149 371/1288/2150 817/827/2147 +f 839/1289/2151 797/807/2152 817/827/2153 +f 817/827/2153 371/1288/2154 839/1289/2151 +f 371/1288/2154 785/1290/2155 839/1289/2151 +f 789/801/807 790/805/414 373/1020/2156 +f 373/1020/2156 788/802/803 789/801/807 +f 791/811/816 792/1022/2157 377/1021/2158 +f 377/1021/2158 790/805/414 791/811/816 +f 791/811/816 795/429/818 379/1023/2159 +f 379/1023/2159 792/1022/2157 791/811/816 +f 794/804/2160 793/803/808 791/811/816 +f 791/811/816 790/805/414 794/804/2160 +f 798/808/813 1632/1024/2161 1631/812/819 +f 1631/812/819 796/800/806 798/808/813 +f 1631/812/819 1633/810/2162 840/1291/2163 +f 840/1291/2163 799/810/815 1631/812/819 +f 799/810/815 1680/1292/2164 804/1028/2165 +f 804/1028/2165 381/430/817 799/810/815 +f 381/430/817 804/1028/2165 380/1025/2166 +f 380/1025/2166 804/1028/2165 800/1027/2167 +f 800/1027/2167 338/1026/2168 380/1025/2166 +f 1637/1031/2169 805/815/822 801/814/821 +f 801/814/821 800/1027/2167 1637/1031/2169 +f 800/1027/2167 1677/1030/2170 802/1029/2171 +f 802/1029/2171 1637/1031/2169 800/1027/2167 +f 810/819/826 808/818/825 334/1293/2172 +f 811/821/828 813/824/831 334/1293/2172 +f 334/1293/2172 808/818/825 811/821/828 +f 807/817/824 812/1294/2173 336/816/823 +f 334/1293/2172 813/824/831 335/1295/2174 +f 836/822/829 838/1296/2175 332/1297/2176 +f 332/1297/2176 814/823/830 836/822/829 +f 333/1298/2177 836/822/829 811/821/828 +f 811/821/828 1181/1299/2178 333/1298/2177 +f 836/822/829 333/1298/2177 837/1300/2179 +f 839/1289/2151 823/832/2180 816/809/2181 +f 816/809/2181 797/807/2152 839/1289/2151 +f 817/827/2182 797/807/2182 818/806/2182 +f 819/828/838 822/1041/2183 832/1040/2184 +f 832/1040/2184 820/829/839 819/828/838 +f 829/830/840 827/1037/2185 1632/1024/2161 +f 1632/1024/2161 798/808/813 829/830/840 +f 1663/1034/2186 1654/1037/2187 1632/1024/2161 +f 1632/1024/2161 827/1037/2185 1663/1034/2186 +f 1663/1034/2186 827/1037/2185 831/1036/2188 +f 831/1036/2188 363/1035/2189 1663/1034/2186 +f 827/1037/2185 829/830/840 828/1301/847 +f 828/1301/847 821/1302/2190 827/1037/2191 +f 819/828/2192 816/809/2193 823/832/2194 +f 823/832/2194 822/831/2195 819/828/2192 +f 713/1132/2196 638/1303/2197 825/1304/2198 +f 825/1304/2198 710/1133/2199 713/1132/2196 +f 778/1305/2200 826/1306/2201 564/1307/2202 +f 217/143/2203 621/1308/2203 620/131/2203 +f 1214/916/1821 1215/835/2204 1663/1309/2205 +f 1663/1309/2205 363/835/845 1214/916/1821 +f 820/1038/2206 367/409/405 364/404/400 +f 364/404/400 365/408/404 820/1038/2206 +f 832/1310/2207 822/1311/2208 367/409/405 +f 367/409/405 820/1038/2206 832/1310/2207 +f 365/408/404 830/1039/2209 820/1038/2206 +f 311/349/345 310/347/2210 313/351/2211 +f 313/351/2211 312/350/346 311/349/345 +f 837/1300/2212 333/1312/2213 838/1296/2212 +f 838/1296/2212 836/822/2212 837/1300/2212 +f 806/813/2214 337/912/2214 801/814/2214 +f 842/1042/2215 921/1044/2216 920/1043/2217 +f 920/1043/2217 843/1/858 842/1042/2215 +f 875/844/865 842/1042/2215 843/1/858 +f 843/1/858 846/2/861 875/844/865 +f 920/1043/2217 910/1045/2218 844/4/859 +f 844/4/859 843/1/858 920/1043/2217 +f 919/66/2219 845/1047/860 844/1046/859 +f 844/1046/859 910/64/2218 919/66/2219 +f 848/5/862 845/3/860 919/6/2219 +f 854/13/871 853/16/870 961/15/2220 +f 961/15/2220 855/14/2221 854/13/871 +f 961/15/2220 853/16/870 849/7/866 +f 849/7/866 960/11/868 961/15/2220 +f 955/17/872 854/13/871 856/18/2222 +f 857/1048/2223 957/1049/2224 955/17/872 +f 955/17/872 856/18/2222 857/1048/2223 +f 854/13/871 855/14/2221 959/19/2225 +f 959/19/2225 856/18/2222 854/13/871 +f 856/18/2222 959/19/2225 857/1048/2223 +f 858/21/875 848/5/862 919/6/2219 +f 919/6/2219 923/40/2226 858/21/875 +f 861/23/877 940/27/2227 951/154/2228 +f 951/154/2228 851/12/867 861/23/877 +f 940/27/2227 861/23/877 864/25/880 +f 864/25/880 943/28/885 940/27/2227 +f 865/29/884 866/35/883 943/28/885 +f 938/38/886 862/24/878 859/10/873 +f 859/10/873 962/20/876 938/38/886 +f 868/858/887 941/88/2229 944/89/882 +f 944/89/882 863/1050/879 868/858/887 +f 963/1052/2230 938/38/2231 962/20/876 +f 962/20/876 869/43/2232 963/1052/2230 +f 870/39/888 869/43/2232 962/20/876 +f 962/20/876 858/21/875 870/39/888 +f 870/39/888 858/21/875 923/40/2226 +f 923/854/2226 935/47/2233 871/168/889 +f 871/168/889 870/1053/888 923/854/2226 +f 874/41/2234 869/43/2232 870/39/888 +f 870/39/888 872/42/890 874/41/2234 +f 936/48/2235 869/43/2232 874/41/2234 +f 936/44/2235 935/47/2236 963/46/2230 +f 963/46/2230 869/45/2232 936/44/2235 +f 873/49/2237 937/51/2238 936/44/2235 +f 936/44/2235 874/50/2234 873/49/2237 +f 876/52/921 877/91/897 921/90/2239 +f 921/90/2239 842/55/930 876/52/921 +f 878/94/896 1130/93/2240 921/92/2239 +f 921/92/2239 877/95/897 878/94/896 +f 1129/97/895 879/98/894 882/849/902 +f 890/109/912 892/119/915 1054/111/2241 +f 1054/111/2241 886/102/906 890/109/912 +f 1054/111/2241 1450/112/2242 887/105/907 +f 887/105/907 886/102/906 1054/111/2241 +f 903/53/925 875/54/929 847/1054/2243 +f 847/1054/2243 907/60/927 903/53/925 +f 928/78/952 933/80/959 950/83/962 +f 950/83/962 927/79/954 928/78/952 +f 932/85/2244 948/84/2245 950/83/962 +f 950/83/962 934/82/961 932/85/2244 +f 914/71/941 868/858/972 938/1055/2246 +f 938/1055/2246 963/46/963 914/71/941 +f 868/858/972 914/71/941 915/72/943 +f 915/72/943 939/86/969 868/858/972 +f 951/1056/2247 940/87/970 916/68/939 +f 916/68/939 927/79/954 951/1056/2247 +f 866/33/2248 944/89/975 942/32/973 +f 942/32/973 943/31/974 866/33/2248 +f 963/46/963 924/853/948 922/74/942 +f 922/74/942 914/71/941 963/46/963 +f 945/146/976 946/148/977 929/147/2249 +f 929/147/2249 946/148/977 947/150/978 +f 947/150/978 930/149/2250 929/147/2249 +f 947/150/978 946/148/977 927/151/979 +f 930/149/2250 947/150/978 948/153/981 +f 948/153/981 949/152/2251 930/149/2250 +f 932/1057/2252 949/152/2251 948/153/981 +f 953/157/983 907/60/927 847/1054/2243 +f 847/1054/2243 852/156/984 953/157/983 +f 857/163/2253 959/164/2254 958/162/990 +f 958/162/990 957/159/989 857/163/2253 +f 855/167/2255 961/166/992 954/158/986 +f 954/158/986 958/162/990 855/167/2255 +f 958/162/990 959/164/2254 855/167/2255 +f 964/1058/2256 892/113/2257 893/1060/2258 +f 893/1060/2258 142/1059/2259 964/1058/2256 +f 142/1059/2259 893/1060/2258 1120/1062/2260 +f 1120/1062/2260 965/1061/2261 142/1059/2259 +f 965/1061/2261 1120/1062/2260 966/171/993 +f 968/170/994 966/171/993 1120/1062/2260 +f 1120/1062/2260 969/174/997 968/170/994 +f 979/182/1003 981/860/2262 1327/183/1006 +f 1327/183/1006 973/177/1001 979/182/1003 +f 1428/1063/2263 982/862/1016 1327/183/1012 +f 1327/183/1012 980/642/1011 1428/1063/2263 +f 983/531/1015 976/532/2264 1327/183/1012 +f 1327/183/1012 982/862/1016 983/531/1015 +f 988/184/1020 990/187/1024 162/186/1383 +f 162/186/1383 987/185/1019 988/184/1020 +f 165/1064/2265 992/863/1022 988/184/1020 +f 988/184/1020 993/648/2266 165/1064/2265 +f 995/189/1025 996/192/1028 167/191/1026 +f 169/193/1032 997/195/2267 996/192/1028 +f 996/192/1028 998/194/1027 169/193/1032 +f 175/197/2268 1006/200/1037 1007/199/1045 +f 1007/199/1045 1008/198/1041 175/197/2268 +f 1008/198/1041 1010/873/1040 178/1066/2269 +f 178/1066/2269 1009/1065/2270 1008/198/1041 +f 179/201/1042 1008/198/1041 1007/199/1045 +f 1012/202/1043 1006/200/1037 1421/204/2271 +f 1421/204/2271 1439/1184/2272 1012/202/1043 +f 1020/208/1053 1021/1068/2273 187/1067/2274 +f 187/1067/2274 1019/207/1052 1020/208/1053 +f 188/210/2275 1020/208/1053 1016/209/1048 +f 1016/209/1048 1022/211/1054 188/210/2275 +f 190/212/2276 188/210/2275 1022/211/1054 +f 1022/211/1054 191/213/1058 190/212/2276 +f 194/214/2277 193/217/1061 1027/216/1060 +f 1027/216/1060 195/215/2278 194/214/2277 +f 1027/216/1060 1028/218/1064 195/215/2278 +f 197/220/2279 1029/219/2280 1028/218/1064 +f 1028/218/1064 198/221/1063 197/220/2279 +f 200/225/2281 1031/224/1065 1033/222/1068 +f 1033/222/1068 201/226/2282 200/225/2281 +f 1033/222/1068 1034/885/1070 1442/1069/2283 +f 1442/1069/2283 1444/1076/2284 1033/222/1068 +f 1442/1069/2283 1034/885/1070 1409/887/1074 +f 1409/887/1074 1035/227/1073 1442/1069/2283 +f 1440/1070/2285 1442/1069/2283 1035/227/1073 +f 1035/227/1073 1040/231/1080 1440/1070/2285 +f 1045/236/1085 1037/230/1072 1424/888/1076 +f 1445/238/2286 1040/231/1080 1043/233/1082 +f 1043/233/1082 1446/239/2287 1445/238/2286 +f 1440/1070/2285 1040/231/1080 1445/238/2286 +f 1445/238/2286 1443/1071/2288 1440/1070/2285 +f 1132/244/2289 1451/243/2290 1050/241/1090 +f 1050/241/1090 1038/229/1077 1132/244/2289 +f 1038/229/1077 1044/237/1083 1132/244/2289 +f 1051/245/1091 1050/241/1090 1451/243/2290 +f 1451/243/2290 1450/248/2291 1051/245/1091 +f 1450/248/2291 1054/115/2292 1053/116/1094 +f 1053/116/1094 1051/245/1091 1450/248/2291 +f 1053/116/1094 1054/115/2292 892/113/2257 +f 892/113/2257 1056/250/1095 1053/116/1094 +f 1061/893/1102 1056/250/1095 892/113/2257 +f 892/113/2257 964/1058/2256 1061/893/1102 +f 1070/258/1112 233/260/2293 1072/897/1114 +f 1453/1073/2294 1071/262/2295 235/1072/2296 +f 235/1072/2296 1369/1074/2297 1453/1073/2294 +f 1449/284/1133 1446/239/2287 1043/233/1082 +f 1043/233/1082 1077/268/1119 1449/284/1133 +f 1079/270/1122 1448/271/1121 1449/284/1133 +f 245/276/1130 1085/900/1128 1082/275/1124 +f 1082/275/1124 1081/273/1123 245/276/1130 +f 246/1075/2298 1086/278/2299 1080/272/1120 +f 1080/272/1120 1083/274/1125 246/1075/2298 +f 201/226/2282 1033/222/1068 1080/272/1120 +f 1080/272/1120 1086/278/2299 201/226/2282 +f 1444/1076/2284 1443/1071/2288 1080/272/1120 +f 1080/272/1120 1033/222/1068 1444/1076/2284 +f 1443/1071/2288 1445/238/2286 1448/271/1121 +f 1448/271/1121 1080/272/1120 1443/1071/2288 +f 249/279/2300 245/276/1130 1087/277/1129 +f 1087/277/1129 250/280/2301 249/279/2300 +f 254/288/2302 1093/287/2303 1090/285/1135 +f 1090/285/1135 1094/289/1158 254/288/2302 +f 254/288/2302 1094/289/1158 1095/291/1146 +f 1095/291/1146 1096/290/2304 254/288/2302 +f 1097/1078/2305 257/1077/2306 1095/291/1146 +f 1095/291/1146 258/293/2307 1097/1078/2305 +f 1098/292/1142 260/294/1141 258/293/2307 +f 258/293/2307 1095/291/1146 1098/292/1142 +f 1100/1079/2308 262/297/2309 1099/295/1140 +f 1099/295/1140 263/1080/2310 1100/1079/2308 +f 1101/1081/2311 263/1080/2310 1099/295/1140 +f 1099/295/1140 264/1082/2312 1101/1081/2311 +f 1102/301/1149 264/1082/2312 1099/295/1140 +f 1361/940/1153 1359/560/2313 1102/1083/1149 +f 1102/1083/1149 1106/1084/1148 1361/940/1153 +f 1335/1086/2314 1362/1085/2315 1108/304/1152 +f 1108/304/1152 1111/307/1155 1335/1086/2314 +f 1044/142/2316 1116/141/1160 1117/136/1159 +f 1117/136/1159 1132/143/2317 1044/142/2316 +f 1118/904/1161 1116/141/1160 1048/145/2318 +f 1118/904/1161 1048/145/2318 969/144/1164 +f 969/144/1164 1119/906/1163 1118/904/1161 +f 1124/129/1167 1125/130/1169 884/132/2319 +f 884/132/2319 1452/131/2320 1124/129/1167 +f 884/132/2319 1125/130/1169 1126/127/1168 +f 1126/127/1168 882/133/1172 884/132/2319 +f 1132/143/2321 1117/136/1159 1124/129/1167 +f 1124/129/1167 1452/131/2320 1132/143/2321 +f 1127/134/1171 878/94/1173 1129/135/2322 +f 1129/135/2322 882/133/1172 1127/134/1171 +f 1132/143/2323 1452/131/2323 1451/1308/2323 +f 295/331/2324 1147/330/1187 1145/328/1186 +f 1145/328/1186 299/332/1389 295/331/2324 +f 296/334/1200 1148/333/1191 1147/330/1187 +f 1147/330/1187 295/331/2324 296/334/1200 +f 1156/344/1198 1159/346/1202 1671/347/2325 +f 1671/347/2325 1155/343/1197 1156/344/1198 +f 1671/347/2325 1672/348/1222 1154/341/1195 +f 1154/341/1195 1155/343/1197 1671/347/2325 +f 1159/346/1202 1160/349/1204 1671/347/2325 +f 1670/351/1206 1161/354/1205 314/353/2326 +f 314/353/2326 1163/352/1212 1670/351/1206 +f 314/353/2326 1165/356/1207 1164/355/1210 +f 1164/355/1210 1163/352/1212 314/353/2326 +f 1167/357/1214 1670/351/1206 1163/352/1212 +f 1166/359/1209 318/362/1208 321/361/2327 +f 321/361/2327 1169/360/1215 1166/359/1209 +f 1179/911/1229 1169/360/1215 321/361/2327 +f 1170/363/1216 1169/360/1215 1646/910/1228 +f 1179/911/1229 321/361/2327 1181/1088/2328 +f 1181/1088/2328 1648/1087/2329 1179/911/1229 +f 1182/1089/2330 1644/373/1226 1170/363/1216 +f 1170/363/1216 1646/910/1228 1182/1089/2330 +f 1645/376/2331 1183/374/1230 1178/372/1225 +f 1178/372/1225 1644/373/1226 1645/376/2331 +f 1643/1090/2332 1641/1091/2333 1177/366/1218 +f 1177/366/1218 1184/375/1231 1643/1090/2332 +f 1641/1091/2333 1176/368/1219 1172/365/1217 +f 1172/365/1217 1177/366/1218 1641/1091/2333 +f 1176/368/1219 1641/1091/2333 1185/1092/2334 +f 1185/1092/2334 1640/377/2335 1176/368/1219 +f 1640/377/2335 1186/371/1232 1173/367/1220 +f 1173/367/1220 1176/368/1219 1640/377/2335 +f 1636/1026/1235 1174/1093/1221 1188/814/1224 +f 1188/814/1224 1635/1027/2336 1636/1026/1235 +f 1191/381/1238 1192/342/1196 1672/348/1222 +f 1672/348/1222 1174/369/1221 1191/381/1238 +f 1154/341/1195 1672/348/1222 1192/342/1196 +f 360/405/1260 1211/401/1259 1213/403/1265 +f 1213/403/1265 1214/916/1264 360/405/1260 +f 1218/406/1267 1223/1095/2337 1656/1094/2338 +f 1656/1094/2338 1219/409/1270 1218/406/1267 +f 1220/410/1271 1221/411/1272 1613/414/2339 +f 1613/414/2339 1615/413/2340 1220/410/1271 +f 1221/411/1272 1616/412/1273 1614/1096/2341 +f 1614/1096/2341 1613/414/2339 1221/411/1272 +f 1223/1095/2342 1218/406/1267 1220/410/1271 +f 1650/1033/2343 1649/1097/2344 1223/1095/2345 +f 1223/1095/2345 1220/410/2346 1650/1033/2343 +f 1616/412/1273 1222/402/1262 1224/797/2347 +f 1224/797/2347 1617/798/2348 1616/412/1273 +f 1619/1020/1275 1618/801/1627 1224/802/2347 +f 1224/802/2347 1222/1313/1262 1619/1020/1275 +f 1622/420/1277 1226/419/1276 1227/423/2349 +f 1227/423/2349 1624/422/2350 1622/420/1277 +f 1226/419/1276 1625/425/1278 1624/424/2350 +f 1624/424/2350 1227/423/2349 1226/419/1276 +f 1228/918/1280 1625/425/1278 1201/391/1247 +f 1201/391/1247 1199/388/1245 1634/427/2351 +f 1634/427/2351 1229/426/1281 1201/391/1247 +f 1196/384/1240 1189/379/1236 1634/427/2351 +f 1634/427/2351 1199/388/1245 1196/384/1240 +f 1634/427/2351 1189/379/1236 1636/380/1235 +f 1252/453/1303 1293/455/2352 1251/452/1302 +f 1252/453/1303 1255/456/1304 1341/1098/2353 +f 1341/1098/2353 1293/455/2352 1252/453/1303 +f 1253/454/1310 1251/452/1302 1293/455/2352 +f 1293/455/2352 1254/461/2354 1253/454/1310 +f 1253/454/1310 1254/461/2354 1295/460/2355 +f 1295/460/2355 1257/462/1311 1253/454/1310 +f 1260/465/1312 1257/462/1311 1295/460/2355 +f 1295/460/2355 1261/467/1316 1260/465/1312 +f 1264/469/1313 1263/470/1317 1315/471/1320 +f 1584/1099/2356 1271/774/1327 1270/775/1326 +f 1270/775/1326 1583/1100/2357 1584/1099/2356 +f 1273/919/1328 1266/473/1318 1315/471/1320 +f 1315/471/1320 1316/1101/2358 1273/919/1328 +f 1273/786/1328 1574/787/2359 1583/789/2357 +f 1583/789/2357 1270/788/1326 1273/786/1328 +f 1577/1102/2360 1578/1103/2361 1268/474/1321 +f 1268/474/1321 1272/776/1322 1577/1102/2360 +f 1274/478/1330 1268/474/1321 1578/1103/2361 +f 1278/480/1332 1275/477/1329 1279/481/2362 +f 1579/1105/2363 1280/920/1336 1277/479/1331 +f 1277/479/1331 1278/480/1332 1579/1105/2363 +f 1579/1105/2363 1278/480/1332 1279/481/2364 +f 1279/481/2364 1581/1104/2365 1579/1105/2363 +f 1254/492/2366 1293/493/1408 1290/489/1350 +f 1290/489/1350 1291/490/1351 1254/492/2366 +f 1294/491/1352 1295/494/2367 1254/492/2366 +f 1254/492/2366 1291/490/1351 1294/491/1352 +f 1294/491/1352 1296/497/1355 1261/1106/2368 +f 1261/1106/2368 1295/494/2367 1294/491/1352 +f 1261/1106/2368 1296/497/1355 1297/498/1354 +f 1297/498/1354 1263/1107/2369 1261/1106/2368 +f 1312/514/1373 1303/500/1359 1587/516/2370 +f 1587/516/2370 1317/515/2371 1312/514/1373 +f 1587/516/2370 1303/500/1359 1301/501/1358 +f 1301/501/1358 1318/505/1364 1587/516/2370 +f 1586/1108/2372 1573/930/1378 1313/929/1369 +f 1313/929/1369 1310/1109/1374 1586/1108/2372 +f 1572/511/2373 1573/1111/2374 1586/1110/2372 +f 1586/1110/2372 1310/924/1374 1572/511/2373 +f 1263/1107/2369 1297/498/1354 1314/512/1372 +f 1314/512/1372 1315/926/1377 1263/1107/2369 +f 1317/518/2371 1582/521/2375 1585/520/1375 +f 1585/520/1375 1312/519/1373 1317/518/2371 +f 468/523/2376 1322/522/1386 987/185/1019 +f 987/185/1019 1319/524/1385 468/523/2376 +f 1322/522/1386 468/523/2376 471/526/2377 +f 471/526/2377 472/525/2378 1322/522/1386 +f 473/527/2379 1329/528/2380 1322/522/1386 +f 1322/522/1386 472/525/2378 473/527/2379 +f 1323/326/1183 1322/522/1386 1329/528/2381 +f 1329/528/2381 1324/529/1387 1323/326/1183 +f 480/1114/2382 477/1113/2383 1325/530/1388 +f 1325/530/1388 1324/529/1387 480/1114/2382 +f 985/321/1009 975/322/2384 976/532/2264 +f 976/532/2264 983/531/1015 985/321/1009 +f 1111/1115/2385 1331/535/1395 1330/533/1394 +f 1330/533/1394 1335/936/1400 1111/1115/2385 +f 1331/535/1395 1111/1115/2385 1076/266/2386 +f 1076/266/2386 1333/267/1397 1331/535/1395 +f 1075/264/2387 1073/1116/2388 1334/536/1398 +f 1334/536/1398 1333/267/1397 1075/264/2387 +f 1073/1116/2388 1068/537/1399 1334/536/1398 +f 1339/540/1405 1340/541/1406 1524/1117/2389 +f 1524/1117/2389 1336/543/1401 1339/540/1405 +f 1344/1118/2390 496/551/1447 1343/547/1446 +f 1343/547/1446 494/548/2391 1344/1118/2390 +f 495/545/2392 494/548/2391 1343/547/1446 +f 1343/547/1446 1345/546/1450 495/545/2392 +f 1346/549/1444 496/551/1447 1347/550/1411 +f 1347/550/1411 1554/583/1441 1346/549/1444 +f 496/551/1447 1531/553/2393 1348/552/1412 +f 1348/552/1412 1347/550/1411 496/551/1447 +f 1548/983/1567 1547/1119/2394 1353/939/1415 +f 1353/939/1415 1355/749/1417 1548/983/1567 +f 1356/942/1423 1357/557/1421 1549/558/2395 +f 1549/558/2395 1548/983/1567 1356/942/1423 +f 1549/558/2395 1357/557/1421 1358/559/1420 +f 1358/559/1420 1545/585/2396 1549/558/2395 +f 1360/941/1422 1523/1314/2397 1102/1083/2398 +f 1102/1083/2398 1359/560/1418 1360/941/1422 +f 1513/577/2399 1364/563/1428 1366/567/1430 +f 1366/567/1430 1518/1120/2400 1513/577/2399 +f 1518/1120/2400 1366/567/1430 1370/572/1437 +f 1370/572/1437 1371/944/1436 1518/1120/2400 +f 1370/572/1437 1373/575/1505 1553/574/1508 +f 1553/574/1508 1372/573/1435 1370/572/1437 +f 1374/576/1438 1364/563/1428 1513/577/2399 +f 1513/577/2399 1514/1315/2401 1374/576/1438 +f 1515/579/2402 1516/578/1439 1374/576/1438 +f 1374/576/1438 1514/580/2403 1515/579/2402 +f 1515/579/2402 1518/582/2404 1375/581/2405 +f 1375/581/2405 1516/578/1439 1515/579/2402 +f 1518/582/2404 1377/1121/2406 1376/584/1440 +f 1376/584/1440 1375/581/2405 1518/582/2404 +f 1372/573/1435 1553/574/1508 1554/583/1441 +f 1377/1121/2406 1372/573/1435 1554/583/1441 +f 1554/583/1441 1376/584/1440 1377/1121/2406 +f 1546/586/2407 1516/578/1439 1352/938/1414 +f 1352/938/1414 1353/939/1415 1546/586/2407 +f 1545/585/2396 1516/578/1439 1546/586/2407 +f 1358/559/1420 1363/561/1426 1516/578/1439 +f 1516/578/1439 1545/585/2396 1358/559/1420 +f 1378/587/1445 1346/549/1444 1554/583/1441 +f 1554/583/1441 1556/588/2408 1378/587/1445 +f 1381/591/1448 1378/587/1445 1556/588/2408 +f 1381/591/1448 1556/588/2408 1462/1122/2409 +f 1462/1122/2409 1382/592/1449 1381/591/1448 +f 1382/592/1449 1383/594/1453 535/593/2410 +f 535/593/2410 1345/546/1450 1382/592/1449 +f 1383/594/1453 1382/592/1449 1462/1122/2409 +f 1462/1122/2409 1464/946/1454 1383/594/1453 +f 1384/1123/2411 535/593/2410 1383/594/1453 +f 1383/594/1453 537/1124/2412 1384/1123/2411 +f 1386/595/1451 1387/947/1457 539/1125/2413 +f 539/1125/2413 1385/945/1452 1386/595/1451 +f 540/597/2414 1388/596/1455 1386/595/1451 +f 1386/595/1451 1389/598/2415 540/597/2414 +f 1537/1126/2416 1391/949/1461 1390/950/1460 +f 1390/950/1460 1389/598/2417 1537/1126/2416 +f 1394/599/1462 546/602/2418 545/601/2419 +f 545/601/2419 1393/600/1463 1394/599/1462 +f 1395/603/2420 546/602/2418 1394/599/1462 +f 548/605/2421 546/602/2418 1395/603/2420 +f 1395/603/2420 549/606/2422 548/605/2421 +f 1395/603/2420 1394/599/1462 1568/604/2423 +f 1568/604/2423 550/607/2424 1395/603/2420 +f 1534/1128/2425 1394/599/1462 1391/949/1461 +f 1391/949/1461 1537/1126/2416 1534/1128/2425 +f 1568/604/2423 1394/599/1462 1534/1128/2425 +f 1534/1128/2425 1532/1129/2426 1568/604/2423 +f 552/1127/2427 550/607/2424 1568/604/2423 +f 1532/1129/2426 1522/1131/2428 1396/1130/2429 +f 1396/1130/2429 1568/604/2423 1532/1129/2426 +f 1659/1134/2430 1658/1133/2431 1522/1132/2428 +f 1522/1132/2428 1532/1135/2426 1659/1134/2430 +f 1522/1131/2428 1657/1137/2432 1560/1136/2433 +f 1560/1136/2433 1396/1130/2429 1522/1131/2428 +f 1396/1316/2429 1560/1317/2433 1559/1318/2434 +f 1559/1318/2434 1567/1319/2435 1396/1316/2429 +f 1560/1136/2433 1398/610/1465 1397/608/1464 +f 1397/608/1464 1559/1138/2434 1560/1136/2433 +f 555/617/2436 752/1002/1586 1559/1138/2434 +f 1559/1138/2434 1397/608/1464 555/617/2436 +f 1557/678/2437 1406/951/1471 1399/609/1466 +f 1399/609/1466 1398/610/1465 1557/678/2437 +f 1398/610/1465 1521/1320/2438 1400/677/2439 +f 1400/677/2439 1557/678/2437 1398/610/1465 +f 1400/677/2439 1657/676/2440 1401/614/2441 +f 1401/614/2441 1557/678/2437 1400/677/2439 +f 1402/611/1467 1557/678/2437 1401/614/2441 +f 1401/614/2441 1472/613/2442 1402/611/1467 +f 1397/608/1464 1399/609/1466 562/616/2443 +f 562/616/2443 563/615/2444 1397/608/1464 +f 1475/612/1468 1402/611/1467 1472/613/2442 +f 1472/613/2442 1660/1321/2445 1475/612/1468 +f 567/621/2446 1403/620/1472 1405/618/1470 +f 1405/618/1470 568/622/1474 567/621/2446 +f 1404/619/2447 1484/1141/2448 569/1140/2449 +f 569/1140/2449 1408/1139/2450 1404/619/2447 +f 1404/619/1469 1408/1139/2451 570/1142/2452 +f 570/1142/2452 1407/952/1473 1404/619/1469 +f 1411/626/2453 1409/623/2454 1412/1144/2455 +f 1412/1144/2455 1032/1143/2456 1411/626/2453 +f 1409/623/2454 1414/1146/2457 1413/1145/2458 +f 1413/1145/2458 1412/1144/2455 1409/623/2454 +f 1409/623/2454 1411/626/2453 1410/625/2459 +f 1410/625/2459 1036/624/2460 1409/623/2454 +f 1034/1147/2461 1414/1146/2457 1409/623/2454 +f 1032/1148/2462 1036/1151/2463 1410/1150/2464 +f 1410/1150/2464 1411/1149/2465 1032/1148/2462 +f 1032/1148/2466 1412/1154/2467 1413/1153/2468 +f 1413/1153/2468 1414/1152/2469 1032/1148/2466 +f 1023/628/2470 1416/1157/2471 1415/1156/2472 +f 1415/1156/2472 1024/1155/2473 1023/628/2470 +f 1023/628/2470 1417/629/2474 1425/1158/2475 +f 1425/1158/2475 1416/1157/2471 1023/628/2470 +f 1422/627/2476 1424/630/2477 1417/629/2474 +f 1417/629/2474 1023/628/2470 1422/627/2476 +f 1023/628/2470 1426/632/2478 1423/631/2479 +f 1423/631/2479 1422/627/2476 1023/628/2470 +f 1418/652/2480 1005/1160/2481 1436/1159/2482 +f 1436/1159/2482 1429/651/2483 1418/652/2480 +f 1419/1163/2484 1437/1162/2485 1005/1161/2481 +f 1005/1161/2481 1418/1164/2480 1419/1163/2484 +f 1005/870/2481 1004/868/2486 1428/1063/2487 +f 1428/1063/2487 1436/1165/2482 1005/870/2481 +f 1437/1166/2488 1438/1169/2489 1420/1168/2490 +f 1420/1168/2490 1421/1167/2491 1437/1166/2488 +f 1421/1171/2491 1420/1168/2490 1438/1169/2489 +f 1438/1169/2489 1439/1170/2492 1421/1171/2491 +f 1425/1172/2493 1417/1173/2494 1424/633/2495 +f 1424/633/2495 1416/634/2496 1425/1172/2493 +f 1415/635/2497 1416/634/2496 1424/633/2495 +f 1424/633/2495 1024/636/2498 1415/635/2497 +f 984/1176/2499 982/1175/2500 1427/1174/2501 +f 1427/1174/2501 994/1177/2502 984/1176/2499 +f 1428/1178/2503 1004/1179/2504 994/1177/2502 +f 994/1177/2502 1427/1174/2501 1428/1178/2503 +f 1428/1180/2505 1427/1182/2506 982/1181/2507 +f 1006/200/1037 1005/870/1036 1437/205/2508 +f 1437/205/2508 1421/204/2271 1006/200/1037 +f 1429/640/1479 1436/1183/2509 1428/1063/2263 +f 1428/1063/2263 980/642/1011 1429/640/1479 +f 1437/205/2510 1429/640/1479 1431/637/1481 +f 1431/637/1481 1438/643/2511 1437/205/2510 +f 1438/643/2511 1431/637/1481 1432/638/1482 +f 1432/638/1482 1433/644/1483 1438/643/2511 +f 1439/1184/2272 1438/643/2511 1433/644/1483 +f 1433/644/1483 1434/645/1484 1439/1184/2272 +f 1439/1184/2272 1434/645/1484 1426/1185/2512 +f 1426/1185/2512 1012/202/1043 1439/1184/2272 +f 1016/209/1048 1012/202/1043 1426/1185/2512 +f 1426/1185/2512 1023/878/1055 1016/209/1048 +f 1424/888/1076 1426/1185/2512 1434/645/1484 +f 1434/645/1484 1045/236/1085 1424/888/1076 +f 988/184/1020 1435/1186/2513 607/647/2514 +f 607/647/2514 606/646/2515 988/184/1020 +f 993/648/2266 988/184/1020 606/646/2515 +f 988/184/1020 167/191/1026 1435/1186/2513 +f 1419/649/2516 1418/652/2517 1429/651/2518 +f 1429/651/2518 1437/650/2519 1419/649/2516 +f 1446/1190/2520 1449/1191/2521 1447/1189/2522 +f 1447/1189/2522 1445/1187/2523 1446/1190/2520 +f 1524/1193/2524 1340/1195/2525 1453/457/1306 +f 1453/457/1306 1369/1194/2526 1524/1193/2524 +f 1255/456/1304 1453/457/1306 1340/1195/2525 +f 1340/1195/2525 1341/1098/2353 1255/456/1304 +f 1456/661/1501 1069/1196/2527 1457/660/1500 +f 1456/661/1501 1236/437/1287 1068/537/1399 +f 1068/537/1399 1067/1197/2528 1456/661/1501 +f 1457/660/1500 1069/1196/2527 1071/962/1499 +f 1071/962/1499 1454/659/1498 1457/660/1500 +f 1368/568/1432 1369/570/1434 235/1198/2529 +f 235/1198/2529 628/663/2530 1368/568/1432 +f 1368/568/1432 629/662/2531 1458/664/1502 +f 1460/666/1504 1465/669/1511 1543/667/1510 +f 1543/667/1510 1463/668/1509 1460/666/1504 +f 1543/667/1510 1542/1322/2532 1389/598/2415 +f 1389/598/2415 1386/595/1451 1543/667/1510 +f 1466/671/1512 1468/673/2533 1536/672/2534 +f 1536/672/2534 1467/670/1513 1466/671/1512 +f 1659/675/2535 1534/674/2536 1536/672/2534 +f 1536/672/2534 1468/673/2533 1659/675/2535 +f 1519/1199/2537 1659/675/2535 1468/673/2533 +f 1468/673/2533 1401/614/2441 1519/1199/2537 +f 1401/614/2441 1657/676/2440 1519/1199/2537 +f 639/679/2538 1401/614/2441 1468/673/2533 +f 1468/673/2533 640/680/2539 639/679/2538 +f 1469/1200/2540 1472/613/2442 1401/614/2441 +f 1401/614/2441 639/679/2538 1469/1200/2540 +f 640/680/2539 1468/673/2533 1466/671/1512 +f 640/680/2539 1466/671/1512 1460/666/1504 +f 1460/666/1504 1459/681/1503 640/680/2539 +f 1470/743/1516 1472/965/1518 1469/1201/2541 +f 1469/1201/2541 1471/964/1517 1470/743/1516 +f 1471/964/1517 643/697/1521 1473/694/1514 +f 1476/683/2542 1482/793/2543 1660/966/1520 +f 1660/966/1520 1474/682/1519 1476/683/2542 +f 1474/682/1519 646/684/2544 1476/683/2542 +f 1477/686/1678 649/687/2545 646/684/2544 +f 646/684/2544 1474/682/1519 1477/686/1678 +f 1478/688/2546 649/687/2545 1477/686/1678 +f 1477/686/1678 650/689/2547 1478/688/2546 +f 651/690/2548 650/689/2547 1477/686/1678 +f 1479/1202/2549 651/690/2548 1477/686/1678 +f 1477/686/1678 652/693/2550 1479/1202/2549 +f 653/691/2551 652/693/2550 1477/686/1678 +f 1477/686/1678 1676/692/1677 653/691/2551 +f 1482/793/2543 1483/790/1527 1660/966/1520 +f 779/792/2552 1484/791/1524 1483/790/1527 +f 1483/790/1527 1482/793/2543 779/792/2552 +f 1485/1286/2553 779/792/2552 1482/793/2543 +f 1482/793/2543 781/796/2554 1485/1286/2553 +f 782/794/2555 781/796/2554 1482/793/2543 +f 1482/793/2543 783/795/2556 782/794/2555 +f 783/795/2556 1482/793/2543 1476/683/2542 +f 1476/683/2542 647/685/2557 783/795/2556 +f 1492/714/1537 673/715/1541 1486/712/1528 +f 673/715/1541 675/717/2558 1495/716/1542 +f 1495/716/1542 1486/712/1528 673/715/1541 +f 1495/716/1542 675/717/2558 676/719/2559 +f 676/719/2559 677/718/1543 1495/716/1542 +f 1503/707/1551 1504/702/1552 663/709/2560 +f 663/709/2560 665/708/2561 1503/707/1551 +f 1372/1209/2562 1377/1212/2563 1517/1211/2564 +f 1517/1211/2564 1371/1210/2565 1372/1209/2562 +f 1517/1211/2564 1377/1212/2563 1518/1214/2566 +f 1518/1214/2566 1371/1213/2565 1517/1211/2564 +f 1518/744/2567 1515/747/2568 1514/746/2569 +f 1514/746/2569 1513/745/2570 1518/744/2567 +f 1352/1216/2571 1516/1219/2572 1375/1218/2573 +f 1375/1218/2573 1552/1217/2574 1352/1216/2571 +f 1376/1220/2575 1352/1216/2571 1552/1217/2574 +f 1552/1217/2574 1375/1221/2573 1376/1220/2575 +f 1519/1222/2576 1657/1225/2577 1520/1224/2578 +f 1520/1224/2578 1658/1223/2579 1519/1222/2576 +f 1659/1226/2580 1519/1222/2576 1658/1223/2579 +f 1521/1231/2581 1560/1234/2582 1657/1233/2583 +f 1657/1233/2583 1400/1232/2584 1521/1231/2581 +f 714/1235/2585 264/1082/2312 1102/301/1149 +f 1102/301/1149 1523/1236/2586 714/1235/2585 +f 1354/748/1416 718/751/1569 717/750/2587 +f 717/750/2587 1355/749/1417 1354/748/1416 +f 719/752/2588 1528/753/1570 1354/748/1416 +f 1354/748/1416 1350/556/1413 719/752/2588 +f 1348/552/1412 1529/754/2589 719/752/2588 +f 719/752/2588 1350/556/1413 1348/552/1412 +f 720/1238/2590 1529/754/2589 1348/552/1412 +f 1348/552/1412 1530/755/2591 720/1238/2590 +f 721/756/2592 1530/755/2591 1348/552/1412 +f 1348/552/1412 722/757/2593 721/756/2592 +f 1532/1239/2594 1533/1241/2595 1659/1240/2596 +f 1532/1244/2597 1534/1243/2598 1659/1242/2599 +f 1541/1248/2600 1542/1247/2601 1465/1245/2602 +f 1465/1245/2602 1535/761/2603 1541/1248/2600 +f 1535/761/2603 1465/1245/2602 1467/760/2604 +f 1538/758/2605 1535/761/2603 1467/760/2604 +f 1467/760/2604 1539/759/2606 1538/758/2605 +f 1465/1245/2602 1542/1247/2601 1543/1246/2607 +f 1540/1249/2608 1539/759/2609 1467/760/2610 +f 1467/760/2610 1536/1250/2611 1540/1249/2608 +f 1540/1249/2608 1536/1250/2611 1534/1251/2612 +f 1389/1252/2613 1541/1253/2614 1535/986/1572 +f 1535/986/1572 1537/985/1571 1389/1252/2613 +f 1389/1252/2613 1542/1254/2615 1541/1253/2614 +f 1461/764/2616 1463/763/2617 1464/762/2618 +f 1464/762/2618 1544/765/2619 1461/764/2616 +f 1462/1256/2620 1461/764/2616 1544/765/2619 +f 1544/765/2619 1464/1257/2618 1462/1256/2620 +f 1550/993/2621 1545/992/2622 1551/1258/2623 +f 1551/1258/2623 1545/992/2622 1546/1260/2624 +f 1546/1260/2624 1547/1259/2625 1551/1258/2623 +f 1353/1261/2626 1547/1259/2625 1546/1260/2624 +f 1555/1264/2627 1556/1263/2628 1554/1262/2629 +f 1554/1262/2629 1553/1265/2630 1555/1264/2627 +f 1555/1264/2627 1553/1265/2630 1462/1267/2631 +f 1462/1267/2631 1556/1266/2628 1555/1264/2627 +f 1558/1268/2632 1557/1271/2633 1402/1270/2634 +f 1402/1270/2634 1406/1269/2635 1558/1268/2632 +f 1557/1272/2636 1558/1274/2637 1406/1273/2638 +f 1398/1228/2639 1560/1227/2639 1521/1230/2639 +f 1559/1138/2434 752/1002/1586 1563/1001/1585 +f 1559/768/2434 1563/1323/1585 1566/767/1590 +f 1566/767/1590 1567/766/2640 1559/768/2434 +f 1568/604/2423 1566/1005/1590 1565/1004/1589 +f 1565/1004/1589 552/1127/2427 1568/604/2423 +f 1569/1275/2641 1475/1278/2642 1660/1277/2643 +f 1660/1277/2643 1483/1276/2644 1569/1275/2641 +f 1283/770/2645 1575/1279/2646 1570/773/1367 +f 1571/1282/2647 1282/780/2648 1575/777/2646 +f 1575/777/2646 1283/1324/2645 1571/1282/2647 +f 1575/777/2646 1280/779/2649 1576/778/2650 +f 1576/778/2650 1570/1325/1367 1575/777/2646 +f 1570/773/1367 1576/1280/2650 1307/507/1365 +f 1573/1111/2651 1572/511/2652 1307/507/1365 +f 1307/507/1365 1576/1280/2650 1573/1111/2651 +f 1272/776/2653 1267/472/2654 1271/774/2655 +f 1272/776/2653 1271/774/2655 1584/1099/2656 +f 1584/1099/2656 1574/1281/2657 1272/776/2653 +f 1577/1102/2360 1272/776/1322 1574/1281/2658 +f 1280/779/2659 1578/782/1597 1577/781/2660 +f 1577/781/2660 1576/778/2661 1280/779/2659 +f 1280/779/2659 1579/783/1598 1578/782/1597 +f 1573/930/2662 1576/778/2661 1577/781/2660 +f 1577/781/2660 1582/521/2663 1573/930/2662 +f 1577/781/2660 1574/787/2664 1273/786/2665 +f 1273/786/2665 1582/521/2663 1577/781/2660 +f 1584/1283/2666 1583/789/2667 1574/787/2668 +f 1273/786/2665 1316/1284/2669 1585/520/2670 +f 1585/520/2670 1582/521/2663 1273/786/2665 +f 1318/927/2671 1573/930/2662 1582/521/2663 +f 1318/927/2671 1582/521/2663 1317/518/2672 +f 1317/518/2672 1587/1285/2673 1318/927/2671 +f 1594/736/1609 696/741/2674 1592/740/1606 +f 1594/736/1609 695/737/2675 1595/1207/2676 +f 1595/1207/2676 696/741/2674 1594/736/1609 +f 694/738/2677 695/737/2675 1594/736/1609 +f 1594/736/1609 693/739/2678 694/738/2677 +f 688/1205/2679 1605/731/1621 1606/729/1622 +f 1606/729/1622 1607/1206/2680 688/1205/2679 +f 685/728/2681 1606/729/1622 1608/726/1619 +f 1608/726/1619 1609/727/1623 685/728/2681 +f 1481/695/1523 1496/720/1529 680/722/2682 +f 680/722/2682 1610/723/1624 1481/695/1523 +f 683/725/2683 1611/724/1625 1610/723/1624 +f 1610/723/1624 680/722/2682 683/725/2683 +f 1653/827/2684 1613/1288/2685 1614/1287/2686 +f 1614/1287/2686 1616/826/2687 1653/827/2684 +f 1613/1288/2688 1649/1289/2689 1615/1290/2690 +f 1653/827/2691 1629/807/2692 1649/1289/2689 +f 1649/1289/2689 1613/1288/2688 1653/827/2691 +f 1617/799/1640 1652/806/2693 1653/827/2694 +f 1653/827/2694 1616/826/2695 1617/799/1640 +f 1627/804/2696 1620/805/1629 1623/811/1633 +f 1623/811/1633 1626/803/1639 1627/804/2696 +f 1633/810/1649 1623/811/1633 1228/429/1636 +f 1228/429/1636 1229/430/2697 1633/810/1649 +f 1633/810/1649 1229/430/2697 1639/1028/1653 +f 1639/1028/1653 1687/1292/2698 1633/810/1649 +f 1229/430/2697 1634/1025/1650 1639/1028/1653 +f 1188/814/2699 805/815/2700 1637/1031/1656 +f 1637/1031/1656 1635/1027/1652 1188/814/2699 +f 1186/813/2701 805/815/2700 1188/814/2699 +f 1640/816/2702 1641/817/1658 805/815/2700 +f 805/815/2700 1186/813/2701 1640/816/2702 +f 1641/817/1658 1642/818/1657 805/815/2700 +f 1184/819/1660 1183/1293/2703 1642/818/1657 +f 1642/818/1657 1183/1293/2703 811/821/2704 +f 811/821/2704 805/815/2700 1642/818/1657 +f 1641/817/1658 1640/816/2702 1185/1294/2705 +f 1183/1293/2703 1645/1295/2706 1644/824/2707 +f 1644/824/2707 811/821/2704 1183/1293/2703 +f 811/821/2704 1644/824/2707 1646/823/2708 +f 1646/823/2708 1647/822/2709 811/821/2704 +f 1644/824/2707 1182/825/2710 1646/823/2708 +f 1647/822/2709 1646/823/2708 1180/1297/2711 +f 1180/1297/2711 1179/1296/2712 1647/822/2709 +f 1648/1298/2713 1181/1299/2714 811/821/2704 +f 811/821/2704 1647/822/2709 1648/1298/2713 +f 1647/822/2709 1675/1300/2715 1648/1298/2713 +f 1649/1289/2689 1629/807/2692 1651/809/2716 +f 1653/827/2717 1652/806/2717 1629/807/2717 +f 1630/808/1645 1669/828/2718 1651/809/2719 +f 1665/829/2720 1669/828/2718 1630/808/1645 +f 1630/808/1645 1662/830/2721 1665/829/2720 +f 1662/830/2721 1217/839/2722 1666/838/2723 +f 1666/838/2723 1665/829/2720 1662/830/2721 +f 1662/830/2721 1630/808/1645 1632/1024/1648 +f 1632/1024/1648 1654/1037/1667 1662/830/2721 +f 1661/1301/2724 1662/830/2721 1654/1037/1667 +f 1654/1037/2187 1655/1302/2725 1661/1301/1665 +f 1667/831/2726 1223/832/2726 1669/828/2726 +f 1667/831/2727 1219/834/2728 1656/833/2729 +f 1656/833/2729 1223/832/2730 1667/831/2727 +f 1522/1132/2731 1658/1133/2732 1520/1304/2733 +f 1520/1304/2733 1657/1303/2734 1522/1132/2731 +f 1483/1305/2735 1475/1307/2736 1569/1306/2737 +f 1213/403/1265 1661/837/1665 1654/836/2738 +f 1654/836/2738 1215/835/1263 1213/403/1265 +f 1654/836/2738 1664/840/2739 1215/835/1263 +f 1219/409/1270 1667/1311/2740 1668/1310/2741 +f 1668/1310/2741 1665/1038/1668 1219/409/1270 +f 1672/348/2742 1671/347/2742 1670/351/2742 +f 1670/351/2742 1167/357/2743 1672/348/2742 +f 1179/1296/2744 1648/1312/2745 1675/1300/2744 +f 1675/1300/2744 1647/822/2746 1179/1296/2744 +f 1651/809/2747 1669/1326/2748 1223/832/2749 +f 1223/832/2749 1649/1289/2749 1651/809/2747 +f 1228/429/1636 1230/428/2750 1229/430/2697 +f 357/398/1255 144/172/996 967/169/995 +f 967/169/995 1135/312/1175 357/398/1255 +f 1007/199/1045 182/843/2751 841/842/2752 +f 841/842/2752 179/201/1042 1007/199/1045 +f 1590/1011/1604 1589/1012/1603 701/1208/2753 +f 701/1208/2753 1676/692/1677 1590/1011/1604 +f 1682/1327/2754 1689/1328/2755 1683/1329/2756 +f 1683/1329/2756 1681/1328/2757 1682/1327/2754 +f 1678/1330/2758 1681/1328/2759 1683/1329/2760 +f 1683/1329/2760 1686/1331/2761 1678/1330/2758 +f 1690/1332/2762 1689/1328/2755 1682/1327/2754 +f 1682/1327/2754 1681/1328/2757 1690/1332/2762 +f 1680/1292/2763 1690/1332/2762 1681/1328/2757 +f 1681/1328/2757 1678/1330/2764 1680/1292/2763 +f 1685/1330/2765 1686/1331/2766 1683/1329/2767 +f 1683/1329/2767 1689/1328/2755 1685/1330/2765 +f 1685/1330/2768 1687/1292/2769 1688/1333/2770 +f 1688/1333/2770 1684/1030/2771 1685/1330/2768 +f 1687/1292/2772 1685/1330/2773 1689/1328/2774 +f 1689/1328/2774 1690/1332/2775 1687/1292/2772 +f 1680/1292/2164 799/810/815 840/1291/2163 +f 840/1291/2163 1690/1332/2776 1680/1292/2164 +f 804/1028/2165 1677/1030/2170 800/1027/2167 +f 1637/1031/2169 802/1029/2171 1679/1334/2777 +f 1679/1334/2777 1638/1029/2778 1637/1031/2169 +f 803/1333/2779 1677/1030/2780 804/1028/2781 +f 804/1028/2781 1680/1292/2782 803/1333/2779 +f 1687/1292/2698 1690/1332/2783 840/1291/2784 +f 840/1291/2784 1633/810/1649 1687/1292/2698 +f 1639/1028/1653 1635/1027/1652 1684/1030/1655 +f 1684/1030/1655 1688/1333/2785 1639/1028/1653 +f 1688/1333/2786 1687/1292/2786 1639/1028/2786 +f 1678/1330/2787 802/1029/2171 1677/1030/2170 +f 1685/1330/2788 1684/1030/1655 1638/1029/1654 +f 802/1029/2171 1678/1330/2758 1686/1331/2761 +f 1686/1331/2761 1679/1334/2777 802/1029/2171 +f 1678/1330/2789 1677/1030/2790 803/1333/2791 +f 803/1333/2791 1680/1292/2792 1678/1330/2789 +f 1686/1331/2766 1685/1330/2765 1638/1029/2793 +f 1638/1029/2793 1679/1334/2794 1686/1331/2766 diff --git a/samples/sample3.obj b/samples/sample3.obj new file mode 100644 index 0000000000000000000000000000000000000000..fe3f7fbb092174af34961d7e828d01634e534e81 --- /dev/null +++ b/samples/sample3.obj @@ -0,0 +1,3754 @@ +# Blender 4.0.0 +# www.blender.org +o Eagle +v -0.347259 0.287335 0.304923 +v -0.337158 0.291280 0.332233 +v -0.350803 0.283293 0.275696 +v -0.320149 0.294766 0.355179 +v -0.349198 0.278392 0.248021 +v -0.222957 0.324401 0.274657 +v -0.215319 0.327584 0.300213 +v -0.230378 0.319464 0.250395 +v -0.298163 0.297740 0.372931 +v -0.404901 0.253648 0.326166 +v -0.343462 0.273135 0.223087 +v -0.398226 0.256697 0.352639 +v -0.407798 0.250548 0.299900 +v -0.242286 0.310935 0.229457 +v -0.213034 0.326613 0.324951 +v -0.386392 0.259943 0.377361 +v -0.408340 0.246458 0.270930 +v -0.254686 0.302137 0.213959 +v -0.214636 0.323458 0.344963 +v -0.370901 0.263558 0.394483 +v -0.405789 0.242987 0.245828 +v -0.421412 0.238422 0.333099 +v -0.427665 0.230732 0.270637 +v -0.400210 0.246236 0.390527 +v -0.330277 0.265210 0.203228 +v -0.270975 0.296131 0.384988 +v -0.173683 0.328680 0.264480 +v -0.429675 0.229866 0.304809 +v -0.415407 0.237918 0.365194 +v -0.168474 0.331394 0.293612 +v -0.184609 0.322610 0.238613 +v -0.177255 0.327644 0.322858 +v -0.206792 0.311419 0.219108 +v -0.418833 0.227891 0.235446 +v -0.237815 0.297143 0.203886 +v -0.194913 0.320303 0.349748 +v -0.375468 0.251433 0.410865 +v -0.476756 0.205512 0.355587 +v -0.479895 0.203158 0.376050 +v -0.485632 0.199174 0.342020 +v -0.494478 0.192791 0.285495 +v -0.491807 0.193796 0.301081 +v -0.319617 0.256455 0.193680 +v -0.203916 0.298535 0.136863 +v -0.256085 0.289484 0.390736 +v -0.448734 0.214877 0.414372 +v -0.160202 0.321384 0.262240 +v -0.203154 0.236543 0.099216 +v -0.454759 0.211511 0.398700 +v -0.155252 0.324316 0.293911 +v -0.172451 0.314914 0.234557 +v -0.490618 0.191011 0.269820 +v -0.166302 0.320418 0.325079 +v -0.438188 0.212941 0.308414 +v -0.197493 0.303246 0.214702 +v -0.421925 0.221944 0.370750 +v -0.417431 0.217181 0.223407 +v -0.201274 0.266369 0.047756 +v -0.229311 0.289261 0.199320 +v -0.184615 0.313561 0.353029 +v -0.435068 0.217995 0.426425 +v -0.367625 0.243484 0.418726 +v -0.494602 0.191092 0.363749 +v -0.481634 0.195971 0.381082 +v -0.488429 0.191332 0.341512 +v -0.197917 0.271207 0.210323 +v -0.493338 0.186930 0.304650 +v -0.500000 0.183397 0.287983 +v -0.457468 0.204675 0.398422 +v -0.456231 0.205635 0.417997 +v -0.492602 0.184353 0.269419 +v -0.435138 0.212701 0.429084 +v -0.194632 0.193628 0.140081 +v -0.193818 0.332686 0.203719 +v -0.185552 0.320544 0.073513 +v -0.431263 0.201688 0.430950 +v -0.488792 0.171883 0.269953 +v -0.453268 0.192425 0.419751 +v -0.454148 0.191122 0.399060 +v -0.488994 0.173362 0.307110 +v -0.496768 0.169499 0.289737 +v -0.484914 0.176468 0.342037 +v -0.477472 0.181262 0.384387 +v -0.492204 0.174544 0.366221 +v -0.360292 0.228752 0.422063 +v -0.219983 0.273899 0.198011 +v -0.174531 0.298661 0.355319 +v -0.410957 0.199825 0.219752 +v -0.417428 0.204025 0.373183 +v -0.187636 0.287156 0.213593 +v -0.433939 0.194739 0.310309 +v -0.155694 0.304703 0.326960 +v -0.143339 0.308099 0.294999 +v -0.161119 0.298394 0.233590 +v -0.181895 0.290250 0.002244 +v -0.148083 0.304695 0.262134 +v -0.307710 0.238402 0.190136 +v -0.243189 0.272750 0.394004 +v -0.180880 0.213528 -0.018877 +v -0.180808 0.238871 -0.045295 +v -0.180713 0.349336 0.131155 +v -0.179827 0.231850 0.268517 +v -0.179466 0.143195 0.158854 +v -0.176608 0.366673 0.234442 +v -0.176186 0.316661 0.288524 +v -0.425432 0.197477 0.427841 +v -0.478780 0.167813 0.271784 +v -0.442808 0.186801 0.400445 +v -0.434630 0.190631 0.415362 +v -0.478386 0.167440 0.302934 +v -0.171941 0.180030 0.004606 +v -0.480517 0.165426 0.287632 +v -0.472162 0.170414 0.344094 +v -0.466270 0.174004 0.378048 +v -0.169250 0.376939 0.165721 +v -0.458672 0.174207 0.356678 +v -0.169000 0.260644 -0.070464 +v -0.357451 0.212882 0.413551 +v -0.176230 0.280341 0.352492 +v -0.218933 0.256960 0.206254 +v -0.399215 0.187299 0.238477 +v -0.186845 0.268796 0.222024 +v -0.157268 0.284974 0.325895 +v -0.162630 0.173433 0.284031 +v -0.394370 0.192262 0.368297 +v -0.163340 0.277232 0.241880 +v -0.147179 0.285997 0.296846 +v -0.407549 0.184202 0.308209 +v -0.151911 0.282222 0.267820 +v -0.378144 0.198824 0.393843 +v -0.249069 0.249160 0.388024 +v -0.307781 0.217588 0.206267 +v -0.404124 0.182764 0.274578 +v -0.396677 0.185965 0.336778 +v -0.156075 0.410950 0.241437 +v -0.378928 0.186802 0.250168 +v -0.344304 0.206676 0.398502 +v -0.154505 0.136821 0.023033 +v -0.187387 0.265233 0.349043 +v -0.227237 0.243559 0.217827 +v -0.153045 0.414336 0.183524 +v -0.152021 0.361819 0.309018 +v -0.378866 0.184597 0.275812 +v -0.357269 0.197599 0.381775 +v -0.183089 0.262711 0.329474 +v -0.212301 0.246970 0.233913 +v -0.376476 0.184236 0.304775 +v -0.367169 0.190158 0.357322 +v -0.311572 0.205519 0.227771 +v -0.373123 0.185893 0.330980 +v -0.266381 0.229680 0.377589 +v -0.198071 0.250619 0.255344 +v -0.183032 0.258724 0.305111 +v -0.189905 0.253948 0.279717 +v -0.145317 0.098222 0.162923 +v -0.142057 0.338380 0.029476 +v -0.312335 0.200001 0.253388 +v -0.283300 0.215928 0.360734 +v -0.140679 0.307797 -0.032423 +v -0.139241 0.365784 0.081771 +v -0.310645 0.197611 0.281689 +v -0.297017 0.205341 0.338288 +v -0.306097 0.199343 0.311125 +v -0.135880 0.277055 -0.095133 +v -0.133505 0.468974 0.240426 +v -0.133167 0.295566 0.374496 +v -0.132592 0.125548 0.280918 +v -0.132264 0.389958 0.117715 +v -0.131748 0.471989 0.194790 +v -0.129917 0.410436 0.300478 +v -0.123193 0.558227 0.246199 +v -0.122443 0.097057 0.032284 +v -0.121378 0.182458 -0.328378 +v -0.121311 0.423380 0.141985 +v -0.121186 0.540277 0.263166 +v -0.121079 0.218385 0.375776 +v -0.120684 0.563658 0.256302 +v -0.120303 0.581168 0.240296 +v -0.119441 0.549800 0.269574 +v -0.118187 0.581403 0.251630 +v -0.118118 0.176794 -0.109455 +v -0.116737 0.532895 0.242578 +v -0.115684 0.234361 -0.136325 +v -0.114823 0.532129 0.286651 +v -0.114791 0.543543 0.287887 +v -0.114666 0.191602 -0.327942 +v -0.113334 0.536796 0.203610 +v -0.113292 0.602954 0.247039 +v -0.112962 0.467170 0.290646 +v -0.112619 0.598277 0.256810 +v -0.112291 0.550855 0.230581 +v -0.109954 0.164591 0.352447 +v -0.109783 0.528412 0.251796 +v -0.109688 0.203667 -0.117799 +v -0.109493 0.191243 -0.238950 +v -0.109236 0.110478 -0.290242 +v -0.109194 0.249094 -0.152950 +v -0.109106 0.578498 0.288917 +v -0.108679 0.579540 0.223200 +v -0.108017 0.577398 0.288547 +v -0.107982 0.546564 0.306331 +v -0.107308 0.137050 -0.090711 +v -0.106989 0.365639 0.370634 +v -0.106637 0.174906 -0.324488 +v -0.106601 0.123736 -0.228188 +v -0.105806 0.535965 0.310361 +v -0.105475 0.609757 0.270451 +v -0.105340 0.008277 0.174540 +v -0.104711 0.104727 -0.320028 +v -0.104683 0.120135 -0.290130 +v -0.104642 0.478193 0.162301 +v -0.104039 0.617747 0.264620 +v -0.103413 0.112726 -0.317819 +v -0.103242 0.142279 -0.135871 +v -0.102398 0.527085 0.290867 +v -0.102086 0.594984 0.249919 +v -0.101826 0.518224 0.281160 +v -0.101170 0.134097 -0.229379 +v -0.100934 0.000000 0.173344 +v -0.100839 0.558048 0.319964 +v -0.100203 0.016789 0.170725 +v -0.099913 0.606780 0.231631 +v -0.099732 0.011168 0.149808 +v -0.098708 0.206135 -0.250563 +v -0.098667 0.612768 0.288895 +v -0.098186 0.215751 -0.135563 +v -0.097707 0.104736 -0.283474 +v -0.097341 0.153542 -0.138764 +v -0.097305 0.100179 -0.311182 +v -0.097183 0.598102 0.212399 +v -0.096553 0.550758 0.327943 +v -0.096430 0.152749 0.128739 +v -0.096240 0.152749 0.106998 +v -0.095275 0.574918 0.325136 +v -0.095167 0.000000 0.150809 +v -0.095167 0.023542 0.150809 +v -0.095022 0.621583 0.288330 +v -0.094891 0.011422 0.125183 +v -0.094359 0.011443 0.108301 +v -0.094205 0.105604 -0.060691 +v -0.094021 0.606507 0.307200 +v -0.093772 0.116892 -0.223617 +v -0.092780 0.592654 0.320464 +v -0.091706 -0.000000 0.125659 +v -0.091550 -0.000000 0.110211 +v -0.091291 0.000000 0.183158 +v -0.090553 0.523021 0.310805 +v -0.089542 0.572544 0.334686 +v -0.089375 0.016789 0.181743 +v -0.089308 0.036021 0.125861 +v -0.089170 0.036021 0.109999 +v -0.088807 0.023543 0.125659 +v -0.088777 0.133970 -0.132627 +v -0.088746 0.177286 -0.243981 +v -0.088735 0.008714 0.188720 +v -0.088672 0.023543 0.110211 +v -0.088659 0.613435 0.311815 +v -0.088577 0.586951 0.300071 +v -0.088551 0.540444 0.177031 +v -0.088343 0.625276 0.253614 +v -0.087793 0.416910 0.346449 +v -0.086653 0.595485 0.328782 +v -0.084339 0.000000 0.161828 +v -0.084339 0.023542 0.161828 +v -0.082871 0.011035 0.161334 +v -0.081717 0.008493 0.186690 +v -0.081494 0.075202 0.163867 +v -0.081387 0.176977 -0.360585 +v -0.081191 0.152749 0.144245 +v -0.080733 0.152749 0.091760 +v -0.079914 0.011154 0.095017 +v -0.079804 0.011035 0.162972 +v -0.079682 0.117790 -0.325217 +v -0.078983 0.541517 0.332788 +v -0.078830 -0.000000 0.099383 +v -0.078798 0.469114 0.337649 +v -0.078579 0.011017 0.141315 +v -0.078365 0.016789 0.180947 +v -0.078195 0.000000 0.161384 +v -0.078195 0.023542 0.161384 +v -0.078190 0.036021 0.137175 +v -0.078113 0.640724 0.256138 +v -0.077979 0.000000 0.136678 +v -0.077979 0.023543 0.136678 +v -0.077943 0.189063 -0.350680 +v -0.077856 0.036021 0.098881 +v -0.077815 0.130485 -0.295382 +v -0.077653 0.023543 0.099383 +v -0.077565 0.000000 0.182879 +v -0.077265 0.107153 -0.332229 +v -0.077070 0.630072 0.283259 +v -0.076864 0.347970 0.007250 +v -0.076710 0.316886 -0.054483 +v -0.075684 0.285823 -0.116889 +v -0.075497 0.257005 -0.174935 +v -0.075474 0.376934 0.060584 +v -0.074928 0.102781 0.284689 +v -0.074847 0.169514 -0.346888 +v -0.074431 0.523955 0.335575 +v -0.074096 0.598820 0.188336 +v -0.073789 0.213570 -0.272878 +v -0.073192 0.641134 0.219611 +v -0.072172 0.403912 0.099363 +v -0.071865 0.102399 -0.316626 +v -0.071766 0.146832 -0.232843 +v -0.070217 0.568756 0.341219 +v -0.069113 0.619884 0.312623 +v -0.067087 0.632621 0.302114 +v -0.066809 0.105255 -0.284853 +v -0.066604 0.597441 0.333838 +v -0.066392 0.438051 0.126406 +v -0.065007 0.159570 0.370946 +v -0.064735 0.169511 -0.177313 +v -0.064536 0.074381 0.025607 +v -0.064199 0.580770 0.340581 +v -0.064090 0.236462 0.416811 +v -0.063859 0.168226 -0.141393 +v -0.063807 0.000000 0.182999 +v -0.063459 0.315023 0.424014 +v -0.062917 0.016789 0.181082 +v -0.062746 0.000000 0.161518 +v -0.062746 0.023542 0.161518 +v -0.062599 0.194484 -0.178316 +v -0.062531 0.000000 0.136812 +v -0.062531 0.023543 0.136812 +v -0.062329 0.036021 0.137314 +v -0.062205 0.023543 0.099517 +v -0.061994 0.036021 0.099020 +v -0.061979 0.171957 -0.262738 +v -0.061851 0.633912 0.198138 +v -0.061802 0.011017 0.141432 +v -0.060996 -0.000000 0.099517 +v -0.059855 0.011157 0.095181 +v -0.059759 0.117560 -0.223283 +v -0.059667 0.008493 0.186882 +v -0.059451 0.152749 0.144435 +v -0.059392 0.200471 -0.184557 +v -0.058993 0.152749 0.091950 +v -0.058626 0.011034 0.163433 +v -0.058070 0.011035 0.161552 +v -0.057232 0.487752 0.146977 +v -0.055861 0.129688 -0.155702 +v -0.054224 0.380928 0.400748 +v -0.054187 0.000000 0.161963 +v -0.054187 0.023542 0.161963 +v -0.053664 0.121597 -0.317475 +v -0.053382 0.183340 -0.149113 +v -0.053137 0.619562 0.335156 +v -0.051512 0.023543 0.125984 +v -0.051378 0.023543 0.110536 +v -0.051044 0.092933 -0.092260 +v -0.051015 0.036021 0.126195 +v -0.050876 0.036021 0.110334 +v -0.050502 0.134831 -0.131364 +v -0.049670 0.157272 -0.255833 +v -0.049389 0.116069 -0.318805 +v -0.048651 -0.000000 0.125984 +v -0.048546 0.112021 -0.311572 +v -0.048495 -0.000000 0.110536 +v -0.047917 0.545287 0.162762 +v -0.047896 0.008711 0.188889 +v -0.047579 0.016789 0.181878 +v -0.047306 0.130811 -0.290251 +v -0.045816 0.136656 -0.334777 +v -0.045655 0.011445 0.108704 +v -0.045462 0.011419 0.125376 +v -0.045240 0.000000 0.183303 +v -0.043944 0.152749 0.129196 +v -0.043755 0.152749 0.107456 +v -0.043410 0.433642 0.370457 +v -0.043168 0.000000 0.151135 +v -0.043168 0.023542 0.151135 +v -0.043019 0.576679 0.333685 +v -0.041590 0.118069 -0.284238 +v -0.040586 0.172508 -0.144073 +v -0.039538 0.601595 0.176293 +v -0.039038 0.577586 0.373888 +v -0.038789 0.124592 -0.289583 +v -0.038780 0.477321 0.362129 +v -0.038689 0.011164 0.149798 +v -0.038399 0.147332 -0.251390 +v -0.038277 0.146214 -0.229788 +v -0.038226 0.662821 0.260039 +v -0.037408 0.554523 0.336059 +v -0.037408 0.598835 0.331310 +v -0.037136 0.202814 -0.154000 +v -0.037132 0.128197 -0.331477 +v -0.036782 0.662093 0.222510 +v -0.036681 0.173453 -0.260116 +v -0.036561 0.016789 0.171050 +v -0.036282 0.526370 0.357673 +v -0.036042 0.149308 -0.338598 +v -0.035427 0.000000 0.173659 +v -0.034576 0.642120 0.194637 +v -0.034307 0.118963 -0.406313 +v -0.034302 0.008338 0.174727 +v -0.033946 0.558224 0.373164 +v -0.033946 0.596948 0.374612 +v -0.032445 0.131169 -0.224557 +v -0.032186 0.571247 0.404888 +v -0.032080 0.114416 -0.394862 +v -0.031508 0.128682 -0.405483 +v -0.030522 0.655354 0.301706 +v -0.029119 0.581331 0.360070 +v -0.028351 0.111914 -0.162301 +v -0.028351 0.121316 -0.146071 +v -0.028091 0.139551 -0.227927 +v -0.027988 0.556156 0.401677 +v -0.027988 0.586338 0.408099 +v -0.027444 0.139234 -0.185757 +v -0.027444 0.148682 -0.169469 +v -0.026106 0.167704 -0.139448 +v -0.026064 0.627358 0.346572 +v -0.021200 0.554241 0.420291 +v -0.019708 0.149619 -0.133690 +v -0.019189 0.145263 -0.190858 +v -0.019189 0.154711 -0.174570 +v -0.018881 0.104493 -0.155686 +v -0.018881 0.113941 -0.139399 +v -0.018617 0.545622 0.415845 +v -0.018617 0.563441 0.424631 +v -0.018315 0.641305 0.217899 +v -0.017899 0.609630 0.191507 +v -0.017529 0.649976 0.174580 +v -0.015736 0.623297 0.150651 +v -0.014193 0.650850 0.219850 +v -0.013999 0.631594 0.215887 +v -0.013997 0.160366 -0.135901 +v -0.013907 0.659007 0.177522 +v -0.013670 0.640902 0.171780 +v -0.013391 0.672596 0.133429 +v -0.013073 0.617217 0.194340 +v -0.013014 0.601930 0.188784 +v -0.012767 0.126085 -0.179345 +v -0.012767 0.137266 -0.161034 +v -0.012013 0.121292 -0.175438 +v -0.012013 0.132508 -0.157061 +v -0.011976 0.672587 0.144429 +v -0.011770 0.630922 0.153618 +v -0.011736 0.664828 0.130495 +v -0.011627 0.615513 0.147970 +v -0.011243 0.536266 0.420770 +v -0.011243 0.544943 0.428320 +v -0.010768 0.537661 0.425085 +v -0.010422 0.631009 0.123593 +v -0.009178 0.122923 -0.177683 +v -0.009178 0.134263 -0.159149 +v -0.009158 0.637268 0.128994 +v -0.008905 0.623103 0.123410 +v -0.003242 0.133041 -0.185230 +v -0.003242 0.144222 -0.166919 +v -0.002223 0.114053 -0.169061 +v -0.002223 0.125304 -0.150650 +v 0.000002 0.117734 -0.431961 +v 0.000002 0.111829 -0.399718 +v 0.000002 0.138487 -0.415969 +v 0.000002 0.063894 0.018304 +v 0.000002 0.127034 -0.326957 +v 0.000002 0.084918 -0.106997 +v 0.000002 0.069187 0.164016 +v 0.000002 0.123813 -0.175729 +v 0.000002 0.164840 -0.339778 +v 0.000002 0.145242 -0.246557 +v 0.000002 0.096994 0.287782 +v 0.000002 0.163862 -0.360382 +v 0.000002 0.172229 -0.374405 +v 0.000002 0.170440 -0.275034 +v 0.000002 0.165551 -0.197842 +v 0.000002 0.189129 -0.261489 +v 0.000002 0.184473 -0.363633 +v 0.000002 0.169779 -0.139227 +v 0.000002 0.191680 -0.197163 +v 0.000002 0.197825 -0.200933 +v 0.000002 0.214038 -0.285782 +v 0.000002 0.156037 0.377155 +v 0.000002 0.217960 -0.155652 +v 0.000002 0.259372 -0.186212 +v 0.000002 0.288267 -0.126515 +v 0.000002 0.237441 0.424954 +v 0.000002 0.319302 -0.062216 +v 0.000002 0.350532 0.001457 +v 0.000002 0.319635 0.431961 +v 0.000002 0.380795 0.056774 +v 0.000002 0.410587 0.097389 +v 0.000002 0.388984 0.407141 +v 0.000002 0.446270 0.125011 +v 0.000002 0.442680 0.376502 +v 0.000002 0.492941 0.143523 +v 0.000002 0.482431 0.367416 +v 0.000002 0.528325 0.363383 +v 0.000002 0.547826 0.158502 +v 0.000002 0.529409 0.426597 +v 0.000002 0.531751 0.420206 +v 0.000002 0.543446 0.337247 +v 0.000002 0.540895 0.413718 +v 0.000002 0.540502 0.430525 +v 0.000002 0.548543 0.372802 +v 0.000002 0.548611 0.400071 +v 0.000002 0.567007 0.426971 +v 0.000002 0.582043 0.363087 +v 0.000002 0.602999 0.172490 +v 0.000002 0.593884 0.409705 +v 0.000002 0.609913 0.330123 +v 0.000002 0.606629 0.374974 +v 0.000002 0.629426 0.349218 +v 0.000002 0.644434 0.193300 +v 0.000002 0.664516 0.224605 +v 0.000002 0.657649 0.305875 +v 0.000002 0.665202 0.264073 +v 0.000053 0.619607 0.195260 +v 0.000055 0.633480 0.154484 +v 0.000058 0.639532 0.126612 +v 0.000058 0.633117 0.116485 +v 0.000059 0.599398 0.187919 +v 0.000061 0.612867 0.146948 +v 0.000064 0.622494 0.119916 +v 0.000112 0.654351 0.220708 +v 0.000113 0.662272 0.178330 +v 0.000114 0.680303 0.140846 +v 0.000116 0.628139 0.215142 +v 0.000116 0.683203 0.122684 +v 0.000120 0.637882 0.170587 +v 0.000121 0.668205 0.124035 +v 0.000560 0.130035 -0.183700 +v 0.000560 0.141375 -0.165166 +v 0.000668 0.115719 -0.171349 +v 0.000668 0.127059 -0.152815 +v 0.004091 0.133155 -0.185258 +v 0.004091 0.144281 -0.167002 +v 0.004117 0.113702 -0.168524 +v 0.004117 0.124893 -0.150190 +v 0.009001 0.623110 0.123408 +v 0.009247 0.637265 0.128988 +v 0.010500 0.631011 0.123589 +v 0.010596 0.122970 -0.177484 +v 0.010596 0.134310 -0.158951 +v 0.010773 0.537661 0.425085 +v 0.011248 0.536266 0.420770 +v 0.011248 0.544943 0.428320 +v 0.011706 0.615520 0.147972 +v 0.011845 0.630916 0.153615 +v 0.011904 0.664826 0.130492 +v 0.012142 0.674352 0.144566 +v 0.013093 0.601936 0.188786 +v 0.013146 0.617213 0.194338 +v 0.013536 0.673325 0.133418 +v 0.013815 0.640904 0.171781 +v 0.013978 0.120904 -0.174618 +v 0.013978 0.132095 -0.156283 +v 0.014002 0.160366 -0.135901 +v 0.014052 0.658992 0.177557 +v 0.014154 0.631602 0.215889 +v 0.014197 0.126148 -0.179074 +v 0.014197 0.137324 -0.160769 +v 0.014349 0.650804 0.220019 +v 0.015771 0.623298 0.150651 +v 0.017622 0.649972 0.174592 +v 0.017933 0.609631 0.191507 +v 0.018414 0.641288 0.217955 +v 0.018482 0.146118 -0.191239 +v 0.018482 0.155520 -0.175009 +v 0.018622 0.545622 0.415845 +v 0.018622 0.563441 0.424631 +v 0.019713 0.149619 -0.133690 +v 0.019909 0.104584 -0.155297 +v 0.019909 0.114033 -0.139009 +v 0.021204 0.554241 0.420291 +v 0.026069 0.627358 0.346572 +v 0.026111 0.167704 -0.139448 +v 0.027993 0.556156 0.401677 +v 0.027993 0.586338 0.408099 +v 0.028095 0.139551 -0.227927 +v 0.028870 0.139367 -0.185191 +v 0.028870 0.148815 -0.168904 +v 0.029082 0.111284 -0.160965 +v 0.029082 0.120732 -0.144677 +v 0.029123 0.581331 0.360070 +v 0.030526 0.655354 0.301706 +v 0.031513 0.128682 -0.405483 +v 0.032085 0.114416 -0.394862 +v 0.032191 0.571247 0.404888 +v 0.032450 0.131169 -0.224557 +v 0.033950 0.558224 0.373164 +v 0.033950 0.596948 0.374612 +v 0.034306 0.008338 0.174727 +v 0.034312 0.118963 -0.406313 +v 0.034581 0.642120 0.194637 +v 0.035431 0.000000 0.173659 +v 0.036047 0.149308 -0.338598 +v 0.036287 0.526370 0.357673 +v 0.036566 0.016789 0.171050 +v 0.036686 0.173453 -0.260116 +v 0.036787 0.662093 0.222510 +v 0.037136 0.128197 -0.331477 +v 0.037141 0.202814 -0.154000 +v 0.037412 0.554523 0.336059 +v 0.037412 0.598835 0.331310 +v 0.038231 0.662821 0.260039 +v 0.038282 0.146214 -0.229788 +v 0.038404 0.147332 -0.251390 +v 0.038694 0.011164 0.149798 +v 0.038785 0.477321 0.362129 +v 0.038794 0.124592 -0.289583 +v 0.039043 0.577586 0.373888 +v 0.039543 0.601595 0.176293 +v 0.040590 0.172508 -0.144073 +v 0.041595 0.118069 -0.284238 +v 0.043024 0.576679 0.333685 +v 0.043173 0.000000 0.151135 +v 0.043173 0.023542 0.151135 +v 0.043414 0.433642 0.370457 +v 0.043759 0.152749 0.107456 +v 0.043949 0.152749 0.129196 +v 0.045245 0.000000 0.183303 +v 0.045467 0.011419 0.125376 +v 0.045659 0.011445 0.108704 +v 0.045820 0.136656 -0.334777 +v 0.047311 0.130811 -0.290251 +v 0.047584 0.016789 0.181878 +v 0.047900 0.008711 0.188889 +v 0.047921 0.545287 0.162762 +v 0.048500 -0.000000 0.110536 +v 0.048551 0.112021 -0.311572 +v 0.048656 -0.000000 0.125984 +v 0.049394 0.116069 -0.318805 +v 0.049675 0.157272 -0.255833 +v 0.050506 0.134831 -0.131364 +v 0.050881 0.036021 0.110334 +v 0.051019 0.036021 0.126195 +v 0.051049 0.092933 -0.092260 +v 0.051382 0.023543 0.110536 +v 0.051517 0.023543 0.125984 +v 0.053141 0.619562 0.335156 +v 0.053387 0.183340 -0.149113 +v 0.053669 0.121597 -0.317475 +v 0.054192 0.000000 0.161963 +v 0.054192 0.023542 0.161963 +v 0.054229 0.380928 0.400748 +v 0.055866 0.129688 -0.155702 +v 0.057237 0.487752 0.146977 +v 0.058075 0.011035 0.161552 +v 0.058631 0.011034 0.163433 +v 0.058998 0.152749 0.091950 +v 0.059397 0.200471 -0.184557 +v 0.059456 0.152749 0.144435 +v 0.059671 0.008493 0.186882 +v 0.059764 0.117560 -0.223283 +v 0.059860 0.011157 0.095181 +v 0.061000 -0.000000 0.099517 +v 0.061807 0.011017 0.141432 +v 0.061855 0.633912 0.198138 +v 0.061984 0.171957 -0.262738 +v 0.061999 0.036021 0.099020 +v 0.062210 0.023543 0.099517 +v 0.062333 0.036021 0.137314 +v 0.062536 0.000000 0.136812 +v 0.062536 0.023543 0.136812 +v 0.062604 0.194484 -0.178316 +v 0.062751 0.000000 0.161518 +v 0.062751 0.023542 0.161518 +v 0.062922 0.016789 0.181082 +v 0.063464 0.315023 0.424014 +v 0.063812 0.000000 0.182999 +v 0.063864 0.168226 -0.141393 +v 0.064095 0.236462 0.416811 +v 0.064204 0.580770 0.340581 +v 0.064540 0.074381 0.025607 +v 0.064740 0.169511 -0.177313 +v 0.065011 0.159570 0.370946 +v 0.066397 0.438051 0.126406 +v 0.066609 0.597441 0.333838 +v 0.066814 0.105255 -0.284853 +v 0.067092 0.632621 0.302114 +v 0.069118 0.619884 0.312623 +v 0.070222 0.568756 0.341219 +v 0.071771 0.146832 -0.232843 +v 0.071869 0.102399 -0.316626 +v 0.072177 0.403912 0.099363 +v 0.073197 0.641134 0.219611 +v 0.073794 0.213570 -0.272878 +v 0.074101 0.598820 0.188336 +v 0.074435 0.523955 0.335575 +v 0.074852 0.169514 -0.346888 +v 0.074932 0.102781 0.284689 +v 0.075479 0.376934 0.060584 +v 0.075501 0.257005 -0.174935 +v 0.075689 0.285823 -0.116889 +v 0.076714 0.316886 -0.054483 +v 0.076869 0.347970 0.007250 +v 0.077074 0.630072 0.283259 +v 0.077270 0.107153 -0.332229 +v 0.077570 0.000000 0.182879 +v 0.077658 0.023543 0.099383 +v 0.077819 0.130485 -0.295382 +v 0.077861 0.036021 0.098881 +v 0.077948 0.189063 -0.350680 +v 0.077983 0.000000 0.136678 +v 0.077983 0.023543 0.136678 +v 0.078118 0.640724 0.256138 +v 0.078195 0.036021 0.137175 +v 0.078199 0.000000 0.161384 +v 0.078199 0.023542 0.161384 +v 0.078370 0.016789 0.180947 +v 0.078584 0.011017 0.141315 +v 0.078802 0.469114 0.337649 +v 0.078834 -0.000000 0.099383 +v 0.078988 0.541517 0.332788 +v 0.079687 0.117790 -0.325217 +v 0.079809 0.011035 0.162972 +v 0.079919 0.011154 0.095017 +v 0.080738 0.152749 0.091760 +v 0.081196 0.152749 0.144245 +v 0.081391 0.176977 -0.360585 +v 0.081499 0.075202 0.163867 +v 0.081721 0.008493 0.186690 +v 0.082875 0.011035 0.161334 +v 0.084343 0.000000 0.161828 +v 0.084343 0.023542 0.161828 +v 0.086657 0.595485 0.328782 +v 0.087798 0.416910 0.346449 +v 0.088348 0.625276 0.253614 +v 0.088556 0.540444 0.177031 +v 0.088582 0.586951 0.300071 +v 0.088663 0.613435 0.311815 +v 0.088677 0.023543 0.110211 +v 0.088739 0.008714 0.188720 +v 0.088751 0.177286 -0.243981 +v 0.088782 0.133970 -0.132627 +v 0.088812 0.023543 0.125659 +v 0.089174 0.036021 0.109999 +v 0.089313 0.036021 0.125861 +v 0.089379 0.016789 0.181743 +v 0.089547 0.572544 0.334686 +v 0.090557 0.523021 0.310805 +v 0.091296 0.000000 0.183158 +v 0.091554 -0.000000 0.110211 +v 0.091710 -0.000000 0.125659 +v 0.092785 0.592654 0.320464 +v 0.093777 0.116892 -0.223617 +v 0.094026 0.606507 0.307200 +v 0.094210 0.105604 -0.060691 +v 0.094364 0.011443 0.108301 +v 0.094896 0.011422 0.125183 +v 0.095027 0.621583 0.288330 +v 0.095171 0.000000 0.150809 +v 0.095171 0.023542 0.150809 +v 0.095280 0.574918 0.325136 +v 0.096245 0.152749 0.106998 +v 0.096434 0.152749 0.128739 +v 0.096557 0.550758 0.327943 +v 0.097188 0.598102 0.212399 +v 0.097309 0.100179 -0.311182 +v 0.097346 0.153542 -0.138764 +v 0.097712 0.104736 -0.283474 +v 0.098191 0.215751 -0.135563 +v 0.098672 0.612768 0.288895 +v 0.098713 0.206135 -0.250563 +v 0.099737 0.011168 0.149808 +v 0.099918 0.606780 0.231631 +v 0.100207 0.016789 0.170725 +v 0.100844 0.558048 0.319964 +v 0.100939 0.000000 0.173344 +v 0.101175 0.134097 -0.229379 +v 0.101831 0.518224 0.281160 +v 0.102091 0.594984 0.249919 +v 0.102403 0.527085 0.290867 +v 0.103247 0.142279 -0.135871 +v 0.103418 0.112726 -0.317819 +v 0.104044 0.617747 0.264620 +v 0.104647 0.478193 0.162301 +v 0.104687 0.120135 -0.290130 +v 0.104716 0.104727 -0.320028 +v 0.105344 0.008277 0.174540 +v 0.105479 0.609757 0.270451 +v 0.105811 0.535965 0.310361 +v 0.106606 0.123736 -0.228188 +v 0.106642 0.174906 -0.324488 +v 0.106994 0.365639 0.370634 +v 0.107312 0.137050 -0.090711 +v 0.107987 0.546564 0.306331 +v 0.108022 0.577398 0.288547 +v 0.108683 0.579540 0.223200 +v 0.109111 0.578498 0.288917 +v 0.109199 0.249094 -0.152950 +v 0.109241 0.110478 -0.290242 +v 0.109497 0.191243 -0.238950 +v 0.109693 0.203667 -0.117799 +v 0.109788 0.528412 0.251796 +v 0.109959 0.164591 0.352447 +v 0.112296 0.550855 0.230581 +v 0.112624 0.598277 0.256810 +v 0.112966 0.467170 0.290646 +v 0.113297 0.602954 0.247039 +v 0.113339 0.536796 0.203610 +v 0.114671 0.191602 -0.327942 +v 0.114796 0.543543 0.287887 +v 0.114827 0.532129 0.286651 +v 0.115689 0.234361 -0.136325 +v 0.116742 0.532895 0.242578 +v 0.118123 0.176794 -0.109455 +v 0.118191 0.581403 0.251630 +v 0.119446 0.549800 0.269574 +v 0.120308 0.581168 0.240296 +v 0.120689 0.563658 0.256302 +v 0.121084 0.218385 0.375776 +v 0.121191 0.540277 0.263166 +v 0.121316 0.423380 0.141985 +v 0.121383 0.182458 -0.328378 +v 0.122448 0.097057 0.032284 +v 0.123197 0.558227 0.246199 +v 0.129922 0.410436 0.300478 +v 0.131753 0.471989 0.194790 +v 0.132269 0.389958 0.117715 +v 0.132597 0.125548 0.280918 +v 0.133172 0.295566 0.374496 +v 0.133509 0.468974 0.240426 +v 0.135885 0.277055 -0.095133 +v 0.306284 0.199232 0.310041 +v 0.297406 0.205207 0.337277 +v 0.310616 0.197527 0.280570 +v 0.139245 0.365784 0.081771 +v 0.140684 0.307797 -0.032423 +v 0.283861 0.215779 0.359834 +v 0.312102 0.199945 0.252261 +v 0.142062 0.338380 0.029476 +v 0.145322 0.098222 0.162923 +v 0.189911 0.253934 0.279539 +v 0.183227 0.258688 0.304989 +v 0.197897 0.250625 0.255104 +v 0.267076 0.229523 0.376828 +v 0.373441 0.185723 0.329395 +v 0.311158 0.205490 0.226656 +v 0.367682 0.189965 0.355785 +v 0.376602 0.184092 0.303164 +v 0.211968 0.246990 0.233566 +v 0.183463 0.262650 0.329356 +v 0.357966 0.197386 0.380319 +v 0.378782 0.184481 0.274185 +v 0.152025 0.361819 0.309018 +v 0.153050 0.414336 0.183524 +v 0.226784 0.243587 0.217367 +v 0.187906 0.265150 0.348897 +v 0.154510 0.136821 0.023033 +v 0.345130 0.206454 0.397151 +v 0.378659 0.186712 0.248543 +v 0.156079 0.410950 0.241437 +v 0.397037 0.185777 0.335022 +v 0.404029 0.182635 0.272766 +v 0.307221 0.217582 0.205194 +v 0.249857 0.249002 0.387412 +v 0.378929 0.198588 0.392237 +v 0.151855 0.282240 0.267952 +v 0.407699 0.184037 0.306373 +v 0.147336 0.285989 0.297016 +v 0.163091 0.277271 0.241923 +v 0.394964 0.192043 0.366565 +v 0.162635 0.173433 0.284031 +v 0.157635 0.284931 0.325991 +v 0.186444 0.268842 0.221887 +v 0.398862 0.187209 0.236706 +v 0.218407 0.257004 0.205871 +v 0.176787 0.280260 0.352444 +v 0.358391 0.212637 0.412111 +v 0.169005 0.260644 -0.070464 +v 0.459164 0.173963 0.354460 +v 0.169255 0.376939 0.165721 +v 0.466918 0.173734 0.375775 +v 0.472560 0.170176 0.341774 +v 0.480500 0.165241 0.285246 +v 0.171946 0.180030 0.004606 +v 0.478482 0.167240 0.300566 +v 0.435563 0.190341 0.413338 +v 0.443629 0.186522 0.398357 +v 0.478651 0.167645 0.269413 +v 0.426462 0.197179 0.425891 +v 0.176190 0.316661 0.288524 +v 0.176613 0.366673 0.234442 +v 0.179471 0.143195 0.158854 +v 0.179832 0.231850 0.268517 +v 0.180718 0.349336 0.131155 +v 0.180813 0.238871 -0.045295 +v 0.180884 0.213528 -0.018877 +v 0.244039 0.272589 0.393463 +v 0.307050 0.238412 0.189089 +v 0.148004 0.304722 0.262320 +v 0.181900 0.290250 0.002244 +v 0.160827 0.298443 0.233675 +v 0.143501 0.308095 0.295224 +v 0.156086 0.304659 0.327091 +v 0.434112 0.194556 0.308295 +v 0.187188 0.287210 0.213473 +v 0.418066 0.203787 0.371299 +v 0.410477 0.199747 0.217911 +v 0.175123 0.298577 0.355306 +v 0.219411 0.273951 0.197641 +v 0.361307 0.228496 0.420621 +v 0.492765 0.174272 0.363761 +v 0.478171 0.180979 0.382042 +v 0.485302 0.176224 0.339632 +v 0.496769 0.169302 0.287238 +v 0.489125 0.173151 0.304673 +v 0.454963 0.190838 0.396894 +v 0.454234 0.192119 0.417594 +v 0.488652 0.171710 0.267515 +v 0.432319 0.201383 0.428963 +v 0.185557 0.320544 0.073513 +v 0.193823 0.332686 0.203719 +v 0.194636 0.193628 0.140081 +v 0.436188 0.212396 0.427082 +v 0.492469 0.184179 0.266969 +v 0.457195 0.205329 0.415835 +v 0.458289 0.204389 0.396249 +v 0.500000 0.183200 0.285478 +v 0.493462 0.186720 0.302198 +v 0.197922 0.271207 0.210323 +v 0.488825 0.191087 0.339100 +v 0.482321 0.195689 0.378724 +v 0.495158 0.190821 0.361292 +v 0.368627 0.243228 0.417250 +v 0.436103 0.217693 0.424430 +v 0.185202 0.313474 0.352961 +v 0.228760 0.289306 0.198901 +v 0.201279 0.266369 0.047756 +v 0.416992 0.217095 0.221540 +v 0.422559 0.221705 0.368856 +v 0.197067 0.303293 0.214530 +v 0.438362 0.212757 0.306392 +v 0.166692 0.320369 0.325152 +v 0.490492 0.190838 0.267392 +v 0.172179 0.314955 0.234580 +v 0.155420 0.324305 0.294069 +v 0.455587 0.211226 0.396555 +v 0.203159 0.236543 0.099216 +v 0.160136 0.321404 0.262359 +v 0.449679 0.214579 0.412275 +v 0.256925 0.289318 0.390122 +v 0.203920 0.298535 0.136863 +v 0.318997 0.256455 0.192568 +v 0.491911 0.193590 0.298648 +v 0.494468 0.192600 0.283041 +v 0.486037 0.198930 0.339638 +v 0.480551 0.202882 0.373714 +v 0.477265 0.205258 0.353277 +v 0.376420 0.251180 0.409342 +v 0.195481 0.320213 0.349614 +v 0.237304 0.297178 0.203415 +v 0.418491 0.227792 0.233582 +v 0.206404 0.311456 0.218878 +v 0.177635 0.327591 0.322861 +v 0.184373 0.322639 0.238557 +v 0.168644 0.331376 0.293683 +v 0.416015 0.237688 0.363366 +v 0.429837 0.229691 0.302869 +v 0.173639 0.328689 0.264510 +v 0.271778 0.295963 0.384274 +v 0.329733 0.265194 0.202050 +v 0.401009 0.245990 0.388818 +v 0.427580 0.230593 0.268712 +v 0.421787 0.238222 0.331228 +v 0.405534 0.242886 0.244077 +v 0.371743 0.263324 0.393007 +v 0.215172 0.323361 0.344691 +v 0.254251 0.302152 0.213372 +v 0.408270 0.246329 0.269164 +v 0.387107 0.259717 0.375769 +v 0.213427 0.326539 0.324694 +v 0.241971 0.310941 0.228970 +v 0.407942 0.250390 0.298143 +v 0.398759 0.256490 0.350957 +v 0.343068 0.273091 0.221822 +v 0.405238 0.253464 0.324433 +v 0.298879 0.297568 0.372023 +v 0.230223 0.319455 0.250004 +v 0.215534 0.327533 0.299940 +v 0.222983 0.324372 0.274326 +v 0.348989 0.278319 0.246721 +v 0.320733 0.294600 0.354109 +v 0.350800 0.283191 0.274390 +v 0.337572 0.291128 0.331035 +v 0.347471 0.287205 0.303648 +vn 0.0239 -0.8619 0.5066 +vn 0.0235 -0.9969 0.0752 +vn 0.8582 -0.3191 -0.4021 +vn 0.9920 -0.1118 0.0587 +vn 0.6744 -0.6321 0.3817 +vn 0.3732 -0.9080 -0.1902 +vn 0.0058 -0.9078 -0.4194 +vn -0.0055 -0.8532 0.5216 +vn -0.5954 -0.0794 -0.7995 +vn -0.8520 -0.2914 -0.4349 +vn -0.9969 -0.0706 0.0348 +vn -0.6748 0.6319 -0.3812 +vn -0.0364 -0.8613 0.5068 +vn -0.0430 -0.9961 0.0771 +vn 0.0073 0.3862 -0.9224 +vn -0.4264 0.0488 -0.9032 +vn -0.6980 0.6132 -0.3697 +vn -0.0115 0.8624 -0.5061 +vn 0.7070 0.6055 -0.3655 +vn 0.6230 -0.0935 -0.7767 +vn 0.4479 -0.6034 -0.6598 +vn 0.9973 0.0621 -0.0381 +vn -0.0568 0.3847 -0.9213 +vn -0.8546 -0.2615 -0.4486 +vn -0.9983 -0.0466 0.0337 +vn -0.0477 0.8610 -0.5064 +vn -0.5459 -0.7773 -0.3127 +vn -0.4336 -0.6051 -0.6677 +vn -0.9956 0.0797 -0.0489 +vn -0.7015 -0.6101 0.3684 +vn 0.5532 -0.7732 -0.3101 +vn 0.6985 -0.6126 0.3698 +vn -0.3706 -0.9083 -0.1941 +vn -0.6831 -0.6253 0.3774 +vn -0.0233 0.8532 -0.5210 +vn -0.0015 -0.1669 -0.9860 +vn 0.4018 0.0777 -0.9124 +vn 0.6491 0.6512 -0.3931 +vn 0.8604 -0.2389 -0.4502 +vn 0.9992 -0.0315 0.0253 +vn 0.9185 -0.0657 -0.3899 +vn 0.9191 -0.0546 -0.3901 +vn 0.3741 -0.0546 -0.9258 +vn 0.3738 -0.0657 -0.9252 +vn 0.9252 -0.0657 0.3738 +vn 0.9258 -0.0546 0.3741 +vn -0.6702 0.0186 0.7419 +vn 0.6826 0.0191 0.7306 +vn 0.4746 0.7353 0.4839 +vn -0.4664 0.7353 0.4918 +vn -0.7308 0.6528 0.1994 +vn -0.9441 0.0723 0.3215 +vn 0.2003 0.0395 0.9789 +vn 0.1414 0.7506 0.6454 +vn -0.2005 0.0395 0.9789 +vn 0.9440 0.0720 0.3220 +vn 0.7309 0.6528 0.1990 +vn -0.1416 0.7505 0.6455 +vn -0.9252 -0.0657 -0.3738 +vn -0.9258 -0.0546 -0.3741 +vn -0.9191 -0.0546 0.3901 +vn -0.9185 -0.0657 0.3899 +vn -0.3902 -0.0546 -0.9191 +vn -0.3899 -0.0657 -0.9185 +vn -0.3736 0.0096 -0.9276 +vn -0.2440 -0.7664 -0.5943 +vn -0.6247 -0.7351 -0.2636 +vn -0.9157 0.0688 -0.3960 +vn 0.2337 -0.7668 -0.5978 +vn 0.0012 -1.0000 0.0094 +vn -0.0044 -0.9996 0.0273 +vn 0.9916 0.0727 -0.1066 +vn 0.5761 -0.8155 -0.0549 +vn 0.6191 -0.7356 -0.2750 +vn 0.9083 0.0682 -0.4128 +vn -0.3740 -0.0546 0.9258 +vn -0.3738 -0.0657 0.9252 +vn 0.3579 0.0088 -0.9337 +vn -0.5747 -0.8072 0.1348 +vn -0.1181 -0.7849 0.6083 +vn -0.2636 0.0319 0.9641 +vn -0.9690 -0.0100 0.2469 +vn -0.5608 -0.8250 -0.0698 +vn -0.9881 0.0723 -0.1355 +vn 0.9998 -0.0027 -0.0188 +vn 0.5708 0.8149 0.1012 +vn -0.6888 -0.7061 0.1645 +vn -0.5550 -0.8192 -0.1445 +vn 0.5590 -0.8102 0.1766 +vn 0.0498 -0.7563 0.6523 +vn 0.3899 -0.0657 0.9185 +vn 0.3901 -0.0546 0.9191 +vn 0.9481 -0.0132 0.3176 +vn 0.4633 0.8348 0.2974 +vn 0.6070 -0.7733 0.1832 +vn 0.9453 0.0355 0.3243 +vn 0.5344 -0.8371 -0.1166 +vn -0.9698 0.0285 -0.2421 +vn 0.6665 0.7093 0.2294 +vn 0.9807 0.0302 -0.1930 +vn 0.5558 0.8303 -0.0418 +vn -0.5833 -0.8121 -0.0148 +vn -0.4221 -0.7853 0.4530 +vn -1.0000 -0.0031 0.0018 +vn 0.4318 -0.7847 0.4448 +vn 0.5848 -0.8107 -0.0268 +vn 0.9204 0.3685 0.1308 +vn -0.1885 0.7694 0.6103 +vn -0.4713 0.8399 0.2691 +vn 0.2311 0.8002 0.5534 +vn 0.1165 -0.4426 0.8891 +vn -0.5673 0.8158 0.1125 +vn -0.2248 0.7838 0.5788 +vn -0.1531 -0.1975 0.9683 +vn -0.5704 0.8182 -0.0719 +vn -0.9147 0.3831 0.1285 +vn -0.3740 -0.0546 -0.9258 +vn -0.9191 -0.0546 -0.3901 +vn -0.9185 -0.0657 -0.3899 +vn -0.3738 -0.0657 -0.9252 +vn -0.9258 -0.0546 0.3740 +vn -0.9252 -0.0657 0.3738 +vn -0.6665 0.7093 0.2294 +vn -0.9455 0.0354 0.3237 +vn 0.2635 0.0318 0.9641 +vn 0.1885 0.7694 0.6103 +vn -0.4750 0.7353 0.4835 +vn -0.6827 0.0190 0.7305 +vn 0.6704 0.0187 0.7417 +vn 0.4664 0.7353 0.4918 +vn 0.9185 -0.0657 0.3899 +vn 0.9191 -0.0546 0.3901 +vn 0.9258 -0.0546 -0.3741 +vn 0.9252 -0.0657 -0.3738 +vn 0.3902 -0.0546 -0.9191 +vn 0.3899 -0.0657 -0.9185 +vn 0.6246 -0.7350 -0.2637 +vn 0.2439 -0.7664 -0.5943 +vn 0.3733 0.0098 -0.9277 +vn 0.9157 0.0688 -0.3959 +vn -0.0012 -1.0000 0.0094 +vn -0.2337 -0.7668 -0.5979 +vn 0.0044 -0.9996 0.0273 +vn -0.6192 -0.7356 -0.2748 +vn -0.5762 -0.8155 -0.0549 +vn -0.9916 0.0722 -0.1075 +vn -0.9083 0.0682 -0.4128 +vn 0.3738 -0.0657 0.9252 +vn 0.3740 -0.0546 0.9258 +vn -0.3583 0.0086 -0.9336 +vn 0.1180 -0.7849 0.6083 +vn 0.5749 -0.8069 0.1356 +vn 0.9689 -0.0099 0.2471 +vn 0.9882 0.0728 -0.1345 +vn 0.5608 -0.8250 -0.0698 +vn -0.9998 -0.0021 -0.0179 +vn -0.5710 0.8147 0.1010 +vn -0.5589 -0.8103 0.1763 +vn 0.5550 -0.8192 -0.1448 +vn 0.6888 -0.7061 0.1645 +vn -0.0502 -0.7564 0.6522 +vn -0.3901 -0.0546 0.9191 +vn -0.3899 -0.0657 0.9185 +vn -0.9479 -0.0126 0.3184 +vn -0.4637 0.8347 0.2972 +vn -0.6068 -0.7734 0.1834 +vn -0.5345 -0.8371 -0.1165 +vn 0.9697 0.0284 -0.2427 +vn -0.9806 0.0299 -0.1936 +vn -0.5557 0.8304 -0.0414 +vn 0.4221 -0.7853 0.4529 +vn 0.5836 -0.8119 -0.0141 +vn 1.0000 -0.0029 0.0021 +vn -0.4318 -0.7846 0.4449 +vn -0.5848 -0.8107 -0.0268 +vn -0.9204 0.3685 0.1308 +vn 0.4715 0.8398 0.2690 +vn -0.2310 0.8002 0.5534 +vn -0.1168 -0.4426 0.8891 +vn 0.5672 0.8158 0.1125 +vn 0.2249 0.7838 0.5788 +vn 0.1561 -0.1975 0.9678 +vn 0.5704 0.8182 -0.0720 +vn 0.9147 0.3832 0.1287 +vn -0.8999 0.2160 -0.3788 +vn 0.3581 0.1808 -0.9160 +vn 0.8929 0.2159 -0.3950 +vn -0.3736 0.1808 -0.9098 +vn 0.8999 0.2159 -0.3788 +vn -0.3578 0.1802 -0.9162 +vn -0.8929 0.2159 -0.3951 +vn 0.3737 0.1811 -0.9097 +vn 0.9344 0.2484 0.2553 +vn 0.9217 0.2710 0.2774 +vn 0.9278 0.2112 0.3074 +vn 0.9100 0.2774 0.3082 +vn 0.9028 0.2653 0.3385 +vn 0.9020 0.2389 0.3596 +vn 0.9072 0.2060 0.3668 +vn 0.9170 0.1742 0.3589 +vn 0.9293 0.1511 0.3370 +vn 0.9410 0.1440 0.3061 +vn 0.9486 0.1560 0.2753 +vn 0.9498 0.1830 0.2539 +vn 0.9446 0.2165 0.2469 +vn -0.9344 0.2484 0.2553 +vn -0.9446 0.2165 0.2469 +vn -0.9278 0.2112 0.3074 +vn -0.9100 0.2774 0.3082 +vn -0.9028 0.2653 0.3385 +vn -0.9217 0.2710 0.2774 +vn 0.2586 0.4705 0.8436 +vn 0.3102 0.0545 0.9491 +vn 0.6228 0.1202 0.7731 +vn 0.5887 0.4029 0.7008 +vn -0.9498 0.1830 0.2539 +vn -0.9486 0.1560 0.2753 +vn -0.9410 0.1440 0.3061 +vn -0.9293 0.1511 0.3370 +vn -0.9170 0.1742 0.3589 +vn -0.9072 0.2060 0.3668 +vn -0.9020 0.2389 0.3596 +vn 0.6697 0.6874 0.2810 +vn 0.7613 0.6469 0.0445 +vn 0.5155 0.8419 -0.1593 +vn 0.3709 0.9061 0.2037 +vn 0.8910 -0.3749 0.2561 +vn 0.6888 -0.7070 0.1600 +vn 0.7979 -0.5646 -0.2111 +vn 0.9579 -0.2865 0.0161 +vn 0.4174 -0.3550 0.8365 +vn 0.7024 -0.1542 0.6948 +vn 0.2793 0.7757 0.5660 +vn 0.6077 0.6039 0.5158 +vn 0.8456 -0.2357 -0.4790 +vn 0.9831 -0.0760 -0.1667 +vn 0.9498 0.2114 -0.2306 +vn 0.8029 0.1920 -0.5643 +vn 0.6774 0.5878 -0.4423 +vn 0.8648 0.4801 -0.1469 +vn 0.5529 -0.6329 0.5421 +vn 0.8002 -0.3305 0.5004 +vn -0.8910 -0.3749 0.2561 +vn -0.6888 -0.7070 0.1600 +vn -0.5529 -0.6329 0.5421 +vn -0.8002 -0.3305 0.5004 +vn -0.6228 0.1202 0.7731 +vn -0.3102 0.0545 0.9491 +vn -0.2586 0.4705 0.8436 +vn -0.5887 0.4029 0.7008 +vn -0.9263 0.1988 0.3200 +vn -0.8648 0.4801 -0.1469 +vn -0.9498 0.2114 -0.2306 +vn -0.5155 0.8419 -0.1593 +vn -0.7613 0.6469 0.0445 +vn -0.6697 0.6874 0.2810 +vn -0.3709 0.9061 0.2037 +vn -0.7979 -0.5646 -0.2111 +vn -0.9579 -0.2865 0.0161 +vn -0.7024 -0.1542 0.6948 +vn -0.4174 -0.3550 0.8365 +vn -0.6077 0.6039 0.5158 +vn -0.2793 0.7757 0.5660 +vn -0.8456 -0.2357 -0.4790 +vn -0.9831 -0.0760 -0.1667 +vn -0.6774 0.5878 -0.4423 +vn -0.8029 0.1920 -0.5643 +vn 0.9263 0.1988 0.3200 +vn 0.3529 0.6288 -0.6929 +vn 0.5556 0.6541 -0.5133 +vn 0.4769 0.2484 -0.8431 +vn 0.2130 0.3154 -0.9248 +vn -0.4251 -0.7902 0.4414 +vn -0.4490 -0.5002 0.7404 +vn -0.6810 -0.4269 0.5950 +vn -0.5541 -0.7352 0.3904 +vn 0.2303 -0.4233 -0.8762 +vn -0.0325 -0.2608 -0.9648 +vn 0.3825 0.9225 0.0516 +vn 0.5723 0.8122 0.1132 +vn 0.5988 0.8007 0.0194 +vn 0.4107 0.9111 -0.0362 +vn 0.5909 0.7919 -0.1541 +vn 0.5845 0.8016 -0.1253 +vn 0.5547 0.7613 -0.3358 +vn -0.5689 -0.7937 0.2154 +vn -0.8252 -0.4311 0.3648 +vn -0.4075 -0.8393 -0.3599 +vn -0.4720 -0.8480 -0.2410 +vn -0.5996 -0.5774 -0.5541 +vn -0.4353 -0.6136 -0.6588 +vn -0.1965 -0.7524 -0.6287 +vn -0.2636 -0.9131 -0.3112 +vn -0.0608 -0.8967 -0.4385 +vn -0.1341 -0.9631 -0.2332 +vn -0.2927 -0.8292 0.4762 +vn -0.2839 -0.6501 0.7048 +vn -0.3196 -0.1018 -0.9421 +vn -0.1978 -0.0635 0.9782 +vn -0.3677 0.0281 0.9295 +vn -0.6488 0.1567 0.7446 +vn -0.1323 0.8586 0.4953 +vn 0.0651 0.7608 0.6458 +vn 0.2407 0.9019 0.3585 +vn 0.1140 0.9461 0.3032 +vn -0.1073 -0.8012 0.5887 +vn 0.2646 -0.3823 0.8853 +vn 0.4706 -0.8122 0.3448 +vn -0.1103 -0.9761 0.1874 +vn 0.1914 -0.7532 -0.6293 +vn 0.0562 -0.8973 -0.4377 +vn -0.2373 -0.4240 -0.8740 +vn 0.0251 -0.2622 -0.9647 +vn 0.0105 0.4336 0.9010 +vn -0.1355 0.5127 0.8478 +vn -0.5137 -0.8579 -0.0116 +vn -0.5421 -0.8355 0.0903 +vn -0.8782 -0.4665 0.1053 +vn -0.8344 -0.5296 -0.1527 +vn -0.4036 0.6158 0.6767 +vn -0.5868 0.6927 0.4193 +vn -0.2283 0.9348 0.2720 +vn -0.2416 0.9700 0.0285 +vn -0.6655 0.7428 0.0734 +vn -0.1715 -0.9405 0.2933 +vn -0.1892 -0.9335 0.3046 +vn -0.8491 0.2290 0.4760 +vn -0.9647 0.2427 0.1019 +vn -0.4966 -0.8595 -0.1206 +vn -0.7433 -0.5528 -0.3767 +vn -0.6240 0.7242 -0.2934 +vn -0.9353 0.1920 -0.2972 +vn 0.6023 0.7955 -0.0661 +vn 0.6230 0.7817 -0.0289 +vn -0.1861 0.9605 -0.2068 +vn -0.4955 0.6250 -0.6032 +vn -0.7740 0.1067 -0.6242 +vn 0.1321 0.9829 0.1284 +vn 0.1732 0.9849 0.0038 +vn -0.1297 0.8974 -0.4217 +vn -0.3065 0.4849 -0.8191 +vn -0.5367 -0.0027 -0.8438 +vn -0.0384 0.7992 -0.5998 +vn -0.0976 0.3878 -0.9166 +vn 0.2036 0.9742 -0.0977 +vn 0.2124 0.9549 -0.2073 +vn 0.0947 0.6974 -0.7104 +vn 0.2175 0.9183 -0.3309 +vn 0.5973 0.7941 0.1121 +vn 0.4714 0.7359 0.4860 +vn 0.5882 0.8087 0.0069 +vn 0.6039 0.7969 0.0195 +vn -0.1098 -0.9892 0.0975 +vn -0.1384 -0.9719 0.1904 +vn -0.1142 -0.9886 0.0983 +vn 0.3385 0.9301 0.1425 +vn 0.5094 0.8302 0.2265 +vn 0.2558 0.8488 -0.4626 +vn -0.3209 -0.9458 0.0491 +vn -0.3459 -0.9282 0.1370 +vn -0.3800 -0.8962 0.2291 +vn -0.2963 -0.9474 -0.1212 +vn -0.3050 -0.9516 -0.0371 +vn 0.3658 0.8414 0.3977 +vn 0.4239 0.8973 -0.1235 +vn 0.4241 0.8809 -0.2100 +vn 0.2681 0.7402 0.6166 +vn 0.5261 0.8222 0.2172 +vn -0.5071 0.8308 0.2294 +vn -0.3621 0.8422 0.3995 +vn -0.2375 0.9025 0.3593 +vn -0.3369 0.9305 0.1440 +vn 0.5440 -0.7965 0.2641 +vn 0.5856 -0.6233 -0.5182 +vn 0.1104 -0.9501 -0.2917 +vn -0.0883 -0.9948 0.0503 +vn 0.9107 0.0384 -0.4114 +vn 0.6863 -0.6254 -0.3714 +vn 0.8631 0.4562 0.2168 +vn -0.1969 -0.7279 0.6568 +vn 0.0915 -0.3060 0.9476 +vn -0.1311 -0.9907 -0.0355 +vn -0.1109 -0.9937 0.0145 +vn -0.1188 -0.9906 -0.0672 +vn 0.4774 0.3605 0.8014 +vn 0.0587 -0.9983 0.0026 +vn 0.0115 -0.8324 0.5540 +vn 0.8194 0.1236 -0.5598 +vn 0.9103 0.3881 0.1440 +vn 0.1651 -0.9644 -0.2066 +vn -0.0643 -0.9406 0.3334 +vn 0.4486 -0.4201 0.7888 +vn 0.5487 -0.6046 -0.5774 +vn 0.0344 -0.9435 -0.3297 +vn -0.6193 0.6386 0.4568 +vn -0.6534 0.1867 0.7336 +vn -0.9086 0.3893 0.1512 +vn -0.6227 0.7820 -0.0253 +vn 0.2049 -0.0624 0.9768 +vn -0.0804 -0.3035 0.9494 +vn 0.1991 -0.7283 0.6557 +vn 0.2885 -0.6496 0.7034 +vn 0.6092 -0.5856 0.5347 +vn 0.3804 0.7097 0.5930 +vn 0.6268 0.7238 0.2886 +vn -0.1184 -0.9887 0.0921 +vn 0.6589 0.1854 0.7290 +vn 0.6234 0.6374 0.4529 +vn -0.0591 0.7617 0.6452 +vn 0.1407 0.5132 0.8466 +vn 0.4087 0.6163 0.6732 +vn 0.1363 0.8591 0.4934 +vn 0.7121 0.5840 -0.3896 +vn 0.5421 -0.8358 0.0872 +vn 0.3467 -0.9281 0.1355 +vn 0.3206 -0.9460 0.0478 +vn 0.5132 -0.8581 -0.0147 +vn 0.7524 0.6521 -0.0929 +vn 0.6764 0.6003 -0.4267 +vn 0.7959 0.0916 -0.5985 +vn 0.4184 0.8058 -0.4190 +vn 0.7533 -0.6561 0.0446 +vn -0.2624 0.7414 0.6176 +vn -0.0040 0.4346 0.9006 +vn 0.7202 -0.6599 0.2141 +vn -0.0365 -0.9434 -0.3296 +vn -0.5536 -0.6049 -0.5724 +vn 0.3186 0.3370 0.8859 +vn -0.2193 0.3142 -0.9237 +vn 0.0927 0.3874 -0.9172 +vn 0.3127 -0.1031 -0.9442 +vn 0.3743 0.0288 0.9269 +vn 0.6544 0.1571 0.7397 +vn 0.7905 0.5431 -0.2832 +vn -0.6041 -0.5851 0.5410 +vn -0.3083 0.3400 0.8885 +vn -0.8558 0.1935 0.4798 +vn -0.4789 0.2503 -0.8414 +vn -0.5619 0.6520 -0.5091 +vn -0.3575 0.6282 -0.6910 +vn 0.8527 0.2290 0.4696 +vn 0.6844 -0.4271 0.5910 +vn 0.8277 -0.4311 0.3592 +vn 0.2608 -0.9135 -0.3121 +vn 0.1317 -0.9635 -0.2331 +vn 0.9656 0.2421 0.0947 +vn 0.8786 -0.4670 0.0995 +vn 0.8329 -0.5303 -0.1581 +vn 0.9333 0.1909 -0.3042 +vn 0.7402 -0.5537 -0.3815 +vn 0.7695 0.1053 -0.6299 +vn -0.8611 0.4571 0.2225 +vn -0.9137 0.0382 -0.4046 +vn -0.7916 0.5442 -0.2778 +vn -0.5963 0.7944 0.1153 +vn 0.1111 -0.9760 0.1875 +vn 0.1126 -0.8002 0.5891 +vn 0.1185 -0.9887 0.0923 +vn 0.1100 -0.9891 0.0977 +vn -0.4608 -0.8071 0.3692 +vn -0.6899 -0.6250 -0.3652 +vn -0.5444 -0.7968 0.2622 +vn -0.4425 -0.4198 0.7924 +vn -0.0075 -0.8316 0.5554 +vn 0.0881 -0.9948 0.0507 +vn -0.5896 -0.6236 -0.5134 +vn -0.8233 0.1237 -0.5540 +vn -0.3749 0.7109 0.5950 +vn -0.6236 0.7249 0.2926 +vn -0.7984 0.0926 -0.5950 +vn -0.6790 0.6004 -0.4225 +vn -0.7524 0.6528 -0.0877 +vn -0.5848 0.8020 -0.1217 +vn -0.7536 -0.6554 0.0507 +vn -0.9977 0.0668 -0.0085 +vn -0.7183 -0.6599 0.2202 +vn -0.2533 -0.3812 0.8891 +vn -0.4684 0.3623 0.8058 +vn -0.9581 0.2518 0.1363 +vn -0.4681 0.7361 0.4889 +vn -0.5874 0.8092 0.0106 +vn -0.2587 0.8485 -0.4616 +vn -0.2191 0.9181 -0.3302 +vn 0.0348 0.7986 -0.6009 +vn -0.1012 0.6960 -0.7109 +vn 0.4544 -0.4995 0.7376 +vn 0.0661 -0.9403 0.3339 +vn 0.1303 -0.9908 -0.0354 +vn -0.0593 -0.9982 0.0040 +vn -0.7145 0.5841 -0.3851 +vn 0.4277 -0.7901 0.4391 +vn 0.5564 -0.7352 0.3871 +vn -0.5913 0.7922 -0.1510 +vn -0.5570 0.7610 -0.3326 +vn -0.5982 0.8010 0.0228 +vn -0.5709 0.8127 0.1164 +vn -0.3814 0.9229 0.0534 +vn -0.4105 0.9112 -0.0342 +vn 0.4046 -0.8398 -0.3619 +vn 0.4301 -0.6147 -0.6612 +vn 0.2970 -0.8284 0.4749 +vn -0.1106 0.9467 0.3024 +vn 0.5699 -0.7939 0.2119 +vn 0.2424 0.9698 0.0257 +vn 0.2308 0.9350 0.2694 +vn 0.5903 0.6927 0.4144 +vn 0.6665 0.7424 0.0678 +vn 0.4953 -0.8599 -0.1232 +vn 0.4700 -0.8484 -0.2435 +vn 0.5951 -0.5785 -0.5578 +vn 0.1852 0.9602 -0.2093 +vn 0.6223 0.7235 -0.2988 +vn -0.2133 0.9549 -0.2068 +vn 0.1271 0.8969 -0.4237 +vn -0.1306 0.9831 0.1284 +vn 0.4914 0.6240 -0.6076 +vn 0.3006 0.4837 -0.8220 +vn -0.1726 0.9850 0.0041 +vn -0.2039 0.9742 -0.0972 +vn 0.5306 -0.0039 -0.8476 +vn 0.1731 -0.9403 0.2930 +vn -0.1658 -0.9648 -0.2040 +vn 0.3811 -0.8962 0.2272 +vn 0.1911 -0.9332 0.3042 +vn 0.1392 -0.9718 0.1904 +vn -0.6022 0.7959 -0.0625 +vn -0.4242 0.8974 -0.1214 +vn -0.4249 0.8811 -0.2078 +vn 0.3041 -0.9519 -0.0384 +vn 0.1144 -0.9885 0.0986 +vn 0.2950 -0.9476 -0.1223 +vn 0.1104 -0.9938 0.0147 +vn 0.1178 -0.9908 -0.0670 +vn -0.5239 0.8228 0.2201 +vn -0.6032 0.7973 0.0230 +vn -0.1170 -0.9492 -0.2921 +vn -0.4208 0.8057 -0.4168 +vn -0.6834 0.7047 0.1907 +vn -0.9988 -0.0076 -0.0490 +vn -0.9999 -0.0031 -0.0152 +vn -0.6523 0.7456 0.1361 +vn 0.6512 -0.7409 -0.1645 +vn 0.6300 -0.7166 -0.2994 +vn 0.9988 -0.0085 -0.0485 +vn 0.9999 -0.0010 -0.0149 +vn -0.6281 -0.7180 -0.3000 +vn 0.0006 -0.9271 -0.3748 +vn 0.0008 -0.9775 -0.2112 +vn -0.6481 -0.7435 -0.1649 +vn -0.0065 0.9544 0.2985 +vn 0.6905 0.6946 0.2017 +vn 0.6696 0.7097 0.2191 +vn -0.0553 0.9741 0.2194 +vn 0.6549 0.7434 0.1356 +vn 0.0019 0.9825 0.1861 +vn 0.6376 -0.4652 -0.6140 +vn 0.8678 0.3711 -0.3304 +vn 0.0011 -0.3783 -0.9257 +vn -0.6337 -0.4619 -0.6205 +vn -0.8687 0.3877 -0.3084 +vn -0.6722 0.7057 0.2241 +vn 0.8594 0.1923 0.4738 +vn 0.9598 0.2496 0.1286 +vn 0.9977 0.0665 -0.0151 +vn -0.0142 0.7323 -0.6809 +vn 0.6402 -0.5615 0.5242 +vn -0.0000 -0.8641 0.5033 +vn -0.0039 -0.8976 -0.4409 +vn 0.5987 -0.7413 -0.3035 +vn 0.5114 0.1924 0.8375 +vn 0.8368 -0.2375 0.4934 +vn 0.9342 -0.0496 0.3534 +vn 0.6229 0.5507 0.5556 +vn 0.9874 0.0077 0.1578 +vn 0.9947 0.0180 0.1009 +vn 0.7008 0.7077 0.0893 +vn 0.6926 0.6914 0.2057 +vn 0.4519 -0.1153 0.8846 +vn 0.7081 -0.6989 -0.1005 +vn 0.7449 -0.6633 0.0720 +vn 0.7275 -0.6757 0.1191 +vn 0.7293 -0.6707 0.1352 +vn -0.0000 -0.9979 0.0649 +vn -0.0000 -0.9911 0.1332 +vn -0.0000 -0.9670 -0.2548 +vn -0.0000 0.9976 0.0696 +vn -0.0000 0.9787 0.2055 +vn -0.0000 0.7927 0.6096 +vn -0.0000 -0.7459 -0.6660 +vn -0.0000 0.3599 0.9330 +vn -0.0000 -0.1720 0.9851 +vn -0.6403 -0.5609 0.5249 +vn -0.5947 -0.7434 -0.3062 +vn -0.4519 -0.1153 0.8846 +vn -0.7450 -0.6632 0.0719 +vn -0.7084 -0.6987 -0.0998 +vn -0.8367 -0.2371 0.4937 +vn -0.9342 -0.0496 0.3533 +vn -0.5114 0.1924 0.8375 +vn -0.9947 0.0178 0.1009 +vn -0.7293 -0.6707 0.1352 +vn -0.7274 -0.6758 0.1190 +vn -0.9874 0.0077 0.1578 +vn -0.6231 0.5509 0.5552 +vn -0.6926 0.6914 0.2057 +vn -0.7008 0.7077 0.0893 +vn 0.6093 -0.7457 -0.2696 +vn 0.6346 -0.7154 -0.2925 +vn 0.9954 0.0329 -0.0896 +vn 0.9992 0.0193 -0.0353 +vn -0.6326 -0.7169 -0.2930 +vn 0.0006 -0.9465 -0.3228 +vn 0.0006 -0.9484 -0.3172 +vn -0.6073 -0.7472 -0.2700 +vn 0.6243 0.7622 0.1709 +vn 0.6123 0.7807 -0.1252 +vn 0.0005 0.9672 -0.2541 +vn 0.0005 0.9628 0.2701 +vn 0.6000 0.7650 0.2342 +vn 0.0004 0.9484 0.3170 +vn 0.6204 -0.5613 -0.5478 +vn 0.8038 0.2053 -0.5583 +vn -0.6186 -0.5623 -0.5488 +vn 0.0008 -0.6320 -0.7750 +vn -0.6129 0.7801 -0.1256 +vn -0.8028 0.2060 -0.5595 +vn -0.9954 0.0330 -0.0902 +vn -0.6227 0.7635 0.1711 +vn -0.5983 0.7661 0.2347 +vn -0.9992 0.0191 -0.0354 +vn 0.0004 0.3307 -0.9437 +vn 0.3753 0.3730 0.8485 +vn 0.6479 0.3913 0.6536 +vn 0.6736 0.1836 0.7160 +vn 0.3502 0.1562 0.9236 +vn -0.0002 0.3538 0.9353 +vn -0.0000 0.1497 0.9887 +vn 0.3375 0.0496 0.9400 +vn -0.0000 0.0396 0.9992 +vn 0.6968 0.0881 0.7118 +vn 0.8087 0.5549 -0.1951 +vn 0.8035 0.5432 -0.2433 +vn 0.4643 0.7935 -0.3935 +vn 0.4920 0.7723 -0.4019 +vn 0.8113 0.4959 -0.3096 +vn 0.4577 0.7889 -0.4101 +vn 0.9817 0.1889 -0.0232 +vn 0.9843 0.0874 -0.1536 +vn 0.9566 0.2763 0.0922 +vn 0.2867 0.5732 0.7677 +vn 0.5906 0.4794 0.6491 +vn 0.6839 0.6116 0.3977 +vn 0.2862 0.8867 0.3632 +vn -0.0000 0.5998 0.8002 +vn -0.0000 0.9245 0.3813 +vn 0.3040 0.9477 0.0967 +vn -0.0000 0.9959 0.0899 +vn 0.7292 0.6760 0.1067 +vn 0.9106 0.3780 0.1672 +vn 0.8309 0.5226 -0.1911 +vn 0.9057 0.3846 0.1782 +vn 0.8636 0.4428 -0.2411 +vn 0.5444 0.6845 -0.4849 +vn 0.5791 0.5216 -0.6265 +vn 0.9396 0.2985 0.1672 +vn 0.8862 0.3520 -0.3011 +vn 0.8867 0.2942 -0.3567 +vn 0.9652 0.2287 0.1269 +vn 0.5696 0.3745 -0.7316 +vn 0.5499 0.3009 -0.7792 +vn 0.9531 -0.2935 0.0745 +vn 0.9349 -0.1491 -0.3220 +vn 0.5446 0.7700 -0.3324 +vn 0.6641 0.7177 -0.2094 +vn 0.2624 -0.9375 0.2286 +vn 0.3529 -0.9344 -0.0493 +vn 0.3158 -0.2704 0.9095 +vn 0.6707 -0.2718 0.6901 +vn 0.7614 0.0760 0.6439 +vn 0.3682 0.1317 0.9204 +vn 0.2352 -0.6568 0.7164 +vn 0.5271 -0.5774 0.6235 +vn -0.0000 -0.2988 0.9543 +vn -0.0000 -0.6853 0.7283 +vn -0.0000 0.1387 0.9903 +vn 0.2332 -0.9665 0.1076 +vn 0.2591 -0.9610 -0.0971 +vn 0.5812 -0.7979 -0.1599 +vn 0.6077 -0.7903 0.0781 +vn 0.3609 -0.8737 -0.3261 +vn 0.5677 -0.7706 -0.2895 +vn -0.0000 -0.9964 -0.0848 +vn -0.0000 -0.9348 -0.3552 +vn -0.0000 -0.9928 0.1196 +vn 0.1924 0.8432 -0.5020 +vn 0.1751 0.8750 -0.4513 +vn -0.0000 0.8873 -0.4612 +vn -0.0000 0.8485 -0.5292 +vn 0.1752 0.8805 -0.4405 +vn -0.0000 0.8966 -0.4428 +vn 0.9092 -0.2781 -0.3098 +vn 0.8025 -0.2021 -0.5614 +vn 0.7815 -0.5465 -0.3011 +vn 0.9058 -0.1573 -0.3934 +vn 0.8960 -0.3609 -0.2586 +vn 0.8186 -0.2807 -0.5011 +vn 0.9654 -0.2590 0.0293 +vn 0.9865 -0.1617 -0.0253 +vn 0.2150 -0.8947 0.3916 +vn 0.5863 -0.7140 0.3827 +vn -0.0000 -0.9185 0.3955 +vn 0.8503 0.1759 -0.4960 +vn 0.5689 0.6352 -0.5224 +vn 0.8860 -0.2155 -0.4105 +vn 0.6814 0.5898 -0.4334 +vn 0.9835 -0.0379 0.1768 +vn 0.9393 -0.1287 0.3180 +vn 0.9247 0.1321 0.3570 +vn 0.8513 0.3808 -0.3609 +vn 0.5261 0.3528 -0.7738 +vn 0.9359 0.3372 0.1024 +vn 0.6519 0.6910 -0.3123 +vn 0.4577 0.5576 -0.6925 +vn 0.2023 0.3351 -0.9202 +vn 0.1946 0.6429 -0.7408 +vn 0.0001 0.3429 -0.9394 +vn -0.0000 0.6574 -0.7535 +vn 0.2211 0.2719 -0.9366 +vn 0.0001 0.2583 -0.9661 +vn 0.2216 0.7296 -0.6469 +vn -0.0001 0.7223 -0.6916 +vn 0.2270 0.5206 -0.8230 +vn -0.0001 0.4984 -0.8669 +vn 0.8755 0.2561 0.4099 +vn 0.8904 0.1530 0.4287 +vn 0.6711 0.1766 0.7200 +vn 0.2982 0.1397 0.9442 +vn -0.0000 0.1402 0.9901 +vn 0.8901 -0.4527 0.0533 +vn 0.8642 -0.3459 0.3654 +vn 0.7205 -0.5603 -0.4087 +vn 0.4790 -0.6388 -0.6020 +vn 0.5700 -0.2240 -0.7905 +vn -0.0000 -0.6916 -0.7223 +vn 0.0003 -0.2512 -0.9679 +vn 0.8261 -0.5184 -0.2210 +vn 0.3726 0.4253 0.8248 +vn 0.6629 0.3850 0.6421 +vn -0.0000 0.4336 0.9011 +vn 0.8249 0.3543 0.4404 +vn 0.7999 0.3799 0.4646 +vn 0.2191 0.3455 -0.9125 +vn 0.0003 0.3241 -0.9460 +vn 0.6225 -0.1387 -0.7702 +vn 0.2135 -0.9612 -0.1747 +vn -0.0000 -0.3161 -0.9487 +vn -0.0000 -0.9806 -0.1959 +vn 0.8497 0.2391 0.4699 +vn 0.2496 0.9229 -0.2933 +vn -0.0000 0.9569 -0.2905 +vn 0.4061 -0.1982 -0.8921 +vn 0.2114 0.8030 -0.5573 +vn -0.0000 -0.1968 -0.9805 +vn -0.0001 0.8127 -0.5827 +vn 0.1824 -0.9619 -0.2036 +vn -0.0000 -0.9708 -0.2399 +vn -0.0000 0.9308 -0.3655 +vn 0.1845 0.9280 -0.3237 +vn 0.0348 -0.9826 0.1823 +vn -0.0000 -0.9829 0.1844 +vn -0.0000 0.9055 -0.4243 +vn 0.2715 0.8623 -0.4274 +vn 0.2072 0.8670 -0.4532 +vn -0.0000 0.8978 -0.4404 +vn -0.3375 0.0495 0.9400 +vn -0.3502 0.1562 0.9236 +vn -0.3754 0.3731 0.8484 +vn -0.6732 0.1831 0.7164 +vn -0.6480 0.3916 0.6533 +vn -0.6966 0.0877 0.7120 +vn -0.8115 0.4959 -0.3092 +vn -0.9841 0.0873 -0.1547 +vn -0.9817 0.1889 -0.0237 +vn -0.8037 0.5433 -0.2428 +vn -0.4578 0.7889 -0.4100 +vn -0.4644 0.7935 -0.3934 +vn -0.8086 0.5549 -0.1954 +vn -0.4920 0.7724 -0.4017 +vn -0.9568 0.2762 0.0911 +vn -0.3040 0.9477 0.0967 +vn -0.2862 0.8866 0.3633 +vn -0.2866 0.5730 0.7678 +vn -0.6839 0.6117 0.3977 +vn -0.5909 0.4795 0.6488 +vn -0.7291 0.6761 0.1067 +vn -0.5444 0.6845 -0.4849 +vn -0.8309 0.5227 -0.1907 +vn -0.8636 0.4429 -0.2407 +vn -0.5791 0.5215 -0.6266 +vn -0.9108 0.3780 0.1660 +vn -0.9055 0.3850 0.1782 +vn -0.8861 0.3524 -0.3010 +vn -0.5697 0.3746 -0.7315 +vn -0.8867 0.2942 -0.3565 +vn -0.5499 0.3009 -0.7792 +vn -0.9397 0.2980 0.1676 +vn -0.9652 0.2287 0.1270 +vn -0.9354 -0.1499 -0.3203 +vn -0.3529 -0.9344 -0.0491 +vn -0.2623 -0.9375 0.2285 +vn -0.9531 -0.2935 0.0745 +vn -0.5440 0.7706 -0.3320 +vn -0.6644 0.7179 -0.2080 +vn -0.2352 -0.6568 0.7165 +vn -0.3159 -0.2705 0.9094 +vn -0.5271 -0.5776 0.6233 +vn -0.6709 -0.2720 0.6899 +vn -0.7614 0.0757 0.6438 +vn -0.3682 0.1318 0.9203 +vn -0.2590 -0.9610 -0.0971 +vn -0.3609 -0.8737 -0.3262 +vn -0.5811 -0.7980 -0.1599 +vn -0.5676 -0.7707 -0.2896 +vn -0.2332 -0.9665 0.1075 +vn -0.6077 -0.7903 0.0782 +vn -0.1752 0.8805 -0.4405 +vn -0.1751 0.8750 -0.4513 +vn -0.1924 0.8432 -0.5020 +vn -0.9082 -0.2779 -0.3131 +vn -0.8962 -0.3610 -0.2578 +vn -0.9654 -0.2590 0.0301 +vn -0.9864 -0.1618 -0.0285 +vn -0.8026 -0.2021 -0.5612 +vn -0.8157 -0.2796 -0.5064 +vn -0.7817 -0.5469 -0.2997 +vn -0.9064 -0.1572 -0.3920 +vn -0.2152 -0.8947 0.3914 +vn -0.5864 -0.7139 0.3827 +vn -0.8910 -0.2132 -0.4009 +vn -0.8479 0.1755 -0.5003 +vn -0.6763 0.5880 -0.4437 +vn -0.5676 0.6349 -0.5241 +vn -0.9834 -0.0375 0.1774 +vn -0.9394 -0.1288 0.3177 +vn -0.9237 0.1324 0.3596 +vn -0.6515 0.6913 -0.3125 +vn -0.8513 0.3809 -0.3608 +vn -0.9358 0.3372 0.1025 +vn -0.5260 0.3529 -0.7738 +vn -0.4564 0.5576 -0.6934 +vn -0.2211 0.2719 -0.9366 +vn -0.2024 0.3352 -0.9202 +vn -0.1948 0.6426 -0.7410 +vn -0.2215 0.7296 -0.6470 +vn -0.2270 0.5206 -0.8231 +vn -0.8755 0.2560 0.4099 +vn -0.6712 0.1766 0.7199 +vn -0.8904 0.1530 0.4287 +vn -0.2982 0.1396 0.9442 +vn -0.8642 -0.3459 0.3654 +vn -0.8901 -0.4527 0.0534 +vn -0.4790 -0.6389 -0.6020 +vn -0.5701 -0.2241 -0.7904 +vn -0.7209 -0.5605 -0.4075 +vn -0.8261 -0.5185 -0.2205 +vn -0.3723 0.4249 0.8251 +vn -0.6629 0.3851 0.6421 +vn -0.8259 0.3538 0.4391 +vn -0.8000 0.3806 0.4638 +vn -0.2191 0.3455 -0.9125 +vn -0.6235 -0.1387 -0.7694 +vn -0.2135 -0.9612 -0.1747 +vn -0.8497 0.2391 0.4699 +vn -0.2501 0.9228 -0.2929 +vn -0.1823 -0.9619 -0.2036 +vn -0.4061 -0.1982 -0.8921 +vn -0.2109 0.8028 -0.5577 +vn -0.1845 0.9280 -0.3237 +vn -0.0348 -0.9826 0.1823 +vn -0.2715 0.8623 -0.4274 +vn -0.2071 0.8670 -0.4533 +vn -0.7023 -0.7104 0.0462 +vn -0.2389 -0.9578 0.1600 +vn -0.2517 -0.9556 0.1532 +vn -0.7114 -0.7014 0.0434 +vn -0.3124 0.9110 -0.2691 +vn -0.9560 0.2116 -0.2030 +vn -0.9696 0.1608 -0.1843 +vn -0.3420 0.9031 -0.2597 +vn 0.6844 0.7183 -0.1248 +vn 0.6855 0.7169 -0.1274 +vn 0.1883 0.9574 -0.2191 +vn 0.1764 0.9614 -0.2112 +vn 0.2391 -0.9518 0.1919 +vn 0.2599 -0.9449 0.1988 +vn 0.9615 -0.2582 0.0937 +vn 0.9487 -0.3019 0.0943 +vn -0.2833 0.7678 -0.5747 +vn -0.7150 -0.0453 -0.6976 +vn -0.9359 0.1985 -0.2909 +vn -0.3280 0.8829 -0.3361 +vn 0.6964 0.6930 -0.1863 +vn 0.1790 0.9408 -0.2878 +vn -0.6745 -0.7171 -0.1754 +vn -0.2398 -0.9526 -0.1872 +vn -0.2408 -0.9626 0.1244 +vn -0.7148 -0.6993 -0.0018 +vn -0.7182 -0.3851 -0.5796 +vn -0.2065 -0.9776 -0.0406 +vn -0.2606 -0.9507 0.1683 +vn -0.9569 -0.2902 0.0142 +vn 0.1460 0.7204 -0.6780 +vn -0.0520 -0.3101 -0.9493 +vn 0.2607 -0.9507 0.1682 +vn 0.2066 -0.9776 -0.0397 +vn 0.7183 -0.3852 -0.5793 +vn 0.9569 -0.2902 0.0140 +vn 0.2398 -0.9526 -0.1872 +vn 0.2408 -0.9626 0.1245 +vn 0.9360 0.1985 -0.2906 +vn 0.7150 -0.0449 -0.6977 +vn 0.2850 0.7678 -0.5738 +vn 0.3286 0.8828 -0.3358 +vn -0.2599 -0.9449 0.1989 +vn -0.9615 -0.2582 0.0937 +vn 0.7148 -0.6993 -0.0018 +vn 0.6757 -0.7168 -0.1721 +vn -0.1460 0.7204 -0.6780 +vn 0.0523 -0.3101 -0.9493 +vn -0.6448 0.5918 -0.4838 +vn 0.3123 0.9111 -0.2691 +vn -0.1883 0.9574 -0.2191 +vn -0.1764 0.9614 -0.2112 +vn 0.3420 0.9031 -0.2597 +vn 0.6448 0.5918 -0.4838 +vn 0.9560 0.2116 -0.2030 +vn 0.2517 -0.9556 0.1532 +vn 0.2389 -0.9578 0.1600 +vn 0.7023 -0.7104 0.0462 +vn 0.7114 -0.7014 0.0434 +vn -0.1792 0.9408 -0.2876 +vn -0.6965 0.6930 -0.1861 +vn -0.6855 0.7169 -0.1274 +vn -0.9487 -0.3019 0.0943 +vn -0.6845 0.7183 -0.1248 +vn 0.9696 0.1608 -0.1843 +vn -0.2392 -0.9518 0.1920 +vn -0.4069 -0.8913 0.2001 +vn -0.0000 -0.9743 0.2254 +vn -0.0000 -0.9744 0.2248 +vn -0.3839 -0.9010 0.2020 +vn -0.6063 0.7621 -0.2272 +vn -0.9968 -0.0771 -0.0186 +vn -0.9929 -0.1190 -0.0031 +vn -0.5970 0.7727 -0.2158 +vn -0.0006 0.7757 -0.6311 +vn -0.6208 0.6243 -0.4742 +vn -0.6361 0.7274 -0.2575 +vn -0.0000 0.9509 -0.3095 +vn -0.7858 -0.3325 -0.5216 +vn -0.9939 -0.0870 -0.0671 +vn -0.0001 -0.3186 -0.9479 +vn -0.4362 -0.8994 0.0271 +vn -0.0000 -0.9996 -0.0296 +vn -0.0000 -0.9761 0.2172 +vn -0.4413 -0.8809 0.1708 +vn -0.0000 0.9599 -0.2804 +vn -0.0000 0.9640 -0.2657 +vn 0.4069 -0.8913 0.2001 +vn 0.3840 -0.9010 0.2019 +vn 0.9929 -0.1190 -0.0031 +vn 0.9968 -0.0772 -0.0189 +vn 0.6062 0.7621 -0.2271 +vn 0.5970 0.7727 -0.2158 +vn 0.6361 0.7274 -0.2576 +vn 0.6175 0.6258 -0.4765 +vn 0.9940 -0.0870 -0.0670 +vn 0.7851 -0.3303 -0.5239 +vn 0.4353 -0.8999 0.0255 +vn 0.4413 -0.8809 0.1708 +s 1 +f 566//1 565//2 575//3 +f 566//1 575//3 576//4 +f 453//5 452//6 526//7 +f 453//5 526//7 527//8 +f 436//9 405//10 406//11 +f 437//12 436//9 406//11 +f 419//13 418//14 452//6 +f 419//13 452//6 453//5 +f 560//15 528//16 529//17 +f 561//18 560//15 529//17 +f 549//19 548//20 535//21 +f 549//19 535//21 536//22 +f 416//23 410//24 411//25 +f 417//26 416//23 411//25 +f 434//27 446//28 447//29 +f 435//30 434//27 447//29 +f 536//22 535//21 553//31 +f 536//22 553//31 554//32 +f 530//33 565//2 566//1 +f 531//34 530//33 566//1 +f 452//6 418//14 405//10 +f 452//6 405//10 436//9 +f 525//35 524//36 450//37 +f 525//35 450//37 451//38 +f 410//24 434//27 435//30 +f 411//25 410//24 435//30 +f 451//38 450//37 416//23 +f 451//38 416//23 417//26 +f 554//32 553//31 573//39 +f 554//32 573//39 574//40 +f 548//20 530//33 526//7 +f 548//20 526//7 535//21 +f 576//4 575//3 548//20 +f 576//4 548//20 549//19 +f 528//16 524//36 525//35 +f 529//17 528//16 525//35 +f 446//28 436//9 437//12 +f 447//29 446//28 437//12 +f 526//7 530//33 531//34 +f 527//8 526//7 531//34 +f 574//40 573//39 560//15 +f 574//40 560//15 561//18 +f 535//21 526//7 446//28 +f 535//21 446//28 524//36 +f 524//36 446//28 434//27 +f 524//36 434//27 450//37 +f 450//37 434//27 410//24 +f 416//23 450//37 410//24 +f 575//3 565//2 530//33 +f 548//20 575//3 530//33 +f 405//10 418//14 419//13 +f 406//11 405//10 419//13 +f 553//31 535//21 524//36 +f 528//16 553//31 524//36 +f 526//7 452//6 436//9 +f 446//28 526//7 436//9 +f 573//39 553//31 528//16 +f 573//39 528//16 560//15 +f 748//41 730//42 695//43 +f 748//41 695//43 711//44 +f 749//45 731//46 730//42 +f 749//45 730//42 748//41 +f 646//47 715//48 703//49 +f 661//50 646//47 703//49 +f 591//51 585//52 620//53 +f 591//51 620//53 619//54 +f 361//55 396//56 390//57 +f 362//58 361//55 390//57 +f 612//59 628//60 629//61 +f 612//59 629//61 613//62 +f 653//63 628//60 612//59 +f 643//64 653//63 612//59 +f 711//44 695//43 653//63 +f 711//44 653//63 643//64 +f 648//65 649//66 622//67 +f 648//65 622//67 616//68 +f 649//66 706//69 697//70 +f 656//71 649//66 697//70 +f 743//72 737//73 736//74 +f 743//72 736//74 742//75 +f 613//62 629//61 655//76 +f 613//62 655//76 645//77 +f 710//78 706//69 649//66 +f 710//78 649//66 648//65 +f 736//74 706//69 710//78 +f 742//75 736//74 710//78 +f 717//79 735//80 726//81 +f 709//82 717//79 726//81 +f 622//67 624//83 615//84 +f 616//68 622//67 615//84 +f 703//49 715//48 716//85 +f 703//49 716//85 702//86 +f 588//87 609//88 636//89 +f 588//87 636//89 614//90 +f 712//91 700//92 731//46 +f 712//91 731//46 749//45 +f 619//54 620//53 642//93 +f 619//54 642//93 637//94 +f 735//80 762//95 773//96 +f 726//81 735//80 773//96 +f 588//87 614//90 620//53 +f 585//52 588//87 620//53 +f 717//79 745//97 762//95 +f 735//80 717//79 762//95 +f 601//98 609//88 588//87 +f 601//98 588//87 585//52 +f 760//99 773//96 758//100 +f 760//99 758//100 746//101 +f 655//76 700//92 712//91 +f 645//77 655//76 712//91 +f 659//102 663//103 646//47 +f 641//104 659//102 646//47 +f 663//103 692//105 715//48 +f 646//47 663//103 715//48 +f 659//102 701//106 692//105 +f 663//103 659//102 692//105 +f 746//101 758//100 743//72 +f 746//101 743//72 729//107 +f 732//108 760//99 746//101 +f 718//109 732//108 746//101 +f 746//101 729//107 698//110 +f 746//101 698//110 718//109 +f 697//70 717//79 709//82 +f 704//111 697//70 709//82 +f 660//112 661//50 703//49 +f 660//112 703//49 702//86 +f 702//86 698//110 657//113 +f 702//86 657//113 660//112 +f 656//71 659//102 641//104 +f 650//114 656//71 641//104 +f 702//86 716//85 704//111 +f 702//86 704//111 698//110 +f 624//83 609//88 601//98 +f 615//84 624//83 601//98 +f 591//51 619//54 637//94 +f 610//115 591//51 637//94 +f 637//94 642//93 650//114 +f 637//94 650//114 657//113 +f 657//113 632//116 610//115 +f 637//94 657//113 610//115 +f 286//117 251//118 233//119 +f 270//120 286//117 233//119 +f 233//119 251//118 250//121 +f 233//119 250//121 232//122 +f 221//123 208//124 255//125 +f 221//123 255//125 249//126 +f 278//127 266//128 335//129 +f 278//127 335//129 320//130 +f 368//131 352//132 353//133 +f 368//131 353//133 369//134 +f 369//134 353//133 328//135 +f 369//134 328//135 338//136 +f 328//135 286//117 270//120 +f 338//136 328//135 270//120 +f 359//137 332//138 333//139 +f 365//140 359//137 333//139 +f 283//141 275//142 332//138 +f 283//141 332//138 324//143 +f 245//144 244//145 238//146 +f 239//147 245//144 238//146 +f 336//148 326//149 352//132 +f 336//148 352//132 368//131 +f 332//138 275//142 271//150 +f 333//139 332//138 271//150 +f 271//150 275//142 245//144 +f 271//150 245//144 239//147 +f 255//125 246//151 263//152 +f 255//125 263//152 272//153 +f 366//154 357//155 359//137 +f 366//154 359//137 365//140 +f 265//156 266//128 278//127 +f 280//157 265//156 278//127 +f 344//158 371//159 393//160 +f 367//161 344//158 393//160 +f 232//122 250//121 281//162 +f 232//122 281//162 269//163 +f 339//164 361//55 362//58 +f 345//165 339//164 362//58 +f 208//124 219//166 246//151 +f 208//124 246//151 255//125 +f 361//55 367//161 393//160 +f 361//55 393//160 396//56 +f 219//166 235//167 263//152 +f 219//166 263//152 246//151 +f 393//160 371//159 380//168 +f 396//56 393//160 380//168 +f 223//169 208//124 221//123 +f 236//170 223//169 221//123 +f 269//163 281//162 326//149 +f 269//163 326//149 336//148 +f 335//129 318//171 321//172 +f 335//129 321//172 340//173 +f 266//128 289//174 318//171 +f 266//128 318//171 335//129 +f 289//174 279//175 321//172 +f 289//174 321//172 318//171 +f 238//146 223//169 236//170 +f 252//176 238//146 236//170 +f 236//170 221//123 249//126 +f 236//170 249//126 264//177 +f 284//178 252//176 236//170 +f 264//177 284//178 236//170 +f 272//153 263//152 283//141 +f 272//153 283//141 277//179 +f 278//127 320//130 322//180 +f 280//157 278//127 322//180 +f 325//181 284//178 280//157 +f 322//180 325//181 280//157 +f 340//173 321//172 324//143 +f 340//173 324//143 331//182 +f 277//179 265//156 280//157 +f 284//178 277//179 280//157 +f 380//168 371//159 357//155 +f 380//168 357//155 366//154 +f 345//165 362//58 390//57 +f 345//165 390//57 372//183 +f 331//182 339//164 345//165 +f 325//181 331//182 345//165 +f 372//183 349//184 325//181 +f 372//183 325//181 345//165 +f 642//93 636//89 656//71 +f 642//93 656//71 650//114 +f 615//84 601//98 610//115 +f 632//116 615//84 610//115 +f 716//85 701//106 697//70 +f 716//85 697//70 704//111 +f 650//114 641//104 660//112 +f 657//113 650//114 660//112 +f 704//111 709//82 718//109 +f 698//110 704//111 718//109 +f 758//100 745//97 737//73 +f 758//100 737//73 743//72 +f 641//104 646//47 661//50 +f 660//112 641//104 661//50 +f 656//71 697//70 701//106 +f 659//102 656//71 701//106 +f 773//96 762//95 745//97 +f 773//96 745//97 758//100 +f 601//98 585//52 591//51 +f 610//115 601//98 591//51 +f 620//53 614//90 636//89 +f 620//53 636//89 642//93 +f 697//70 737//73 745//97 +f 717//79 697//70 745//97 +f 609//88 624//83 656//71 +f 609//88 656//71 636//89 +f 715//48 692//105 701//106 +f 715//48 701//106 716//85 +f 616//68 615//84 632//116 +f 631//185 616//68 632//116 +f 709//82 726//81 732//108 +f 718//109 709//82 732//108 +f 742//75 710//78 693//186 +f 725//187 742//75 693//186 +f 693//186 710//78 648//65 +f 693//186 648//65 654//188 +f 729//107 743//72 742//75 +f 729//107 742//75 725//187 +f 654//188 648//65 616//68 +f 654//188 616//68 631//185 +f 324//143 344//158 339//164 +f 331//182 324//143 339//164 +f 372//183 380//168 366//154 +f 372//183 366//154 349//184 +f 283//141 279//175 265//156 +f 277//179 283//141 265//156 +f 322//180 340//173 331//182 +f 322//180 331//182 325//181 +f 264//177 272//153 277//179 +f 264//177 277//179 284//178 +f 244//145 235//167 223//169 +f 238//146 244//145 223//169 +f 320//130 335//129 340//173 +f 320//130 340//173 322//180 +f 279//175 283//141 324//143 +f 279//175 324//143 321//172 +f 235//167 219//166 208//124 +f 223//169 235//167 208//124 +f 390//57 396//56 380//168 +f 390//57 380//168 372//183 +f 344//158 367//161 361//55 +f 339//164 344//158 361//55 +f 235//167 244//145 283//141 +f 235//167 283//141 263//152 +f 324//143 357//155 371//159 +f 344//158 324//143 371//159 +f 279//175 289//174 266//128 +f 265//156 279//175 266//128 +f 349//184 366//154 365//140 +f 349//184 365//140 350//189 +f 249//126 255//125 272//153 +f 249//126 272//153 264//177 +f 288//190 271//150 239//147 +f 288//190 239//147 256//191 +f 333//139 271//150 288//190 +f 327//192 333//139 288//190 +f 239//147 238//146 252//176 +f 256//191 239//147 252//176 +f 365//140 333//139 327//192 +f 350//189 365//140 327//192 +f 726//81 773//96 760//99 +f 732//108 726//81 760//99 +f 655//76 657//113 698//110 +f 655//76 698//110 700//92 +f 700//92 698//110 729//107 +f 700//92 729//107 731//46 +f 629//61 632//116 657//113 +f 629//61 657//113 655//76 +f 695//43 693//186 654//188 +f 695//43 654//188 653//63 +f 653//63 654//188 631//185 +f 653//63 631//185 628//60 +f 628//60 631//185 632//116 +f 628//60 632//116 629//61 +f 731//46 729//107 725//187 +f 731//46 725//187 730//42 +f 730//42 725//187 693//186 +f 730//42 693//186 695//43 +f 281//162 284//178 325//181 +f 281//162 325//181 326//149 +f 250//121 252//176 284//178 +f 250//121 284//178 281//162 +f 326//149 325//181 349//184 +f 326//149 349//184 352//132 +f 328//135 327//192 288//190 +f 328//135 288//190 286//117 +f 353//133 350//189 327//192 +f 353//133 327//192 328//135 +f 352//132 349//184 350//189 +f 352//132 350//189 353//133 +f 251//118 256//191 252//176 +f 251//118 252//176 250//121 +f 286//117 288//190 256//191 +f 286//117 256//191 251//118 +f 622//67 649//66 656//71 +f 624//83 622//67 656//71 +f 697//70 706//69 736//74 +f 697//70 736//74 737//73 +f 245//144 275//142 283//141 +f 244//145 245//144 283//141 +f 324//143 332//138 359//137 +f 324//143 359//137 357//155 +f 791//193 774//194 783//195 +f 774//194 756//196 783//195 +f 756//196 740//197 783//195 +f 740//197 738//198 783//195 +f 738//198 747//199 783//195 +f 747//199 761//200 783//195 +f 761//200 780//201 783//195 +f 780//201 796//202 783//195 +f 796//202 802//203 783//195 +f 802//203 804//204 783//195 +f 804//204 801//205 783//195 +f 190//206 180//207 198//208 +f 225//209 198//208 241//210 +f 198//208 225//209 207//211 +f 190//206 198//208 207//211 +f 671//212 675//213 733//214 +f 671//212 733//214 719//215 +f 177//216 198//208 180//207 +f 179//217 198//208 177//216 +f 185//218 198//208 179//217 +f 201//219 198//208 185//218 +f 220//220 198//208 201//219 +f 234//221 198//208 220//220 +f 243//222 198//208 234//221 +f 241//210 198//208 243//222 +f 744//223 769//224 721//225 +f 690//226 744//223 721//225 +f 797//227 764//228 788//229 +f 797//227 788//229 806//230 +f 675//213 707//231 750//232 +f 675//213 750//232 733//214 +f 674//233 724//234 744//223 +f 674//233 744//223 690//226 +f 806//230 788//229 790//235 +f 810//236 806//230 790//235 +f 803//237 782//238 759//239 +f 793//240 803//237 759//239 +f 707//231 734//241 775//242 +f 707//231 775//242 750//232 +f 671//212 719//215 724//234 +f 674//233 671//212 724//234 +f 810//236 790//235 782//238 +f 803//237 810//236 782//238 +f 184//243 217//244 247//245 +f 184//243 247//245 206//246 +f 248//247 306//248 310//249 +f 262//250 248//247 310//249 +f 200//251 188//252 178//253 +f 260//254 212//255 237//256 +f 260//254 237//256 291//257 +f 193//258 217//244 184//243 +f 175//259 193//258 184//243 +f 231//260 274//261 306//248 +f 248//247 231//260 306//248 +f 237//256 257//262 307//263 +f 291//257 237//256 307//263 +f 191//264 193//258 175//259 +f 191//264 175//259 171//265 +f 222//266 199//267 178//253 +f 222//266 178//253 188//252 +f 206//246 247//245 274//261 +f 231//260 206//246 274//261 +f 257//262 262//250 310//249 +f 257//262 310//249 307//263 +f 199//267 191//264 171//265 +f 199//267 171//265 178//253 +f 769//224 793//240 759//239 +f 769//224 759//239 721//225 +f 222//266 188//252 212//255 +f 260//254 222//266 212//255 +f 781//268 793//240 769//224 +f 781//268 769//224 744//223 +f 781//268 744//223 724//234 +f 781//268 724//234 719//215 +f 781//268 719//215 733//214 +f 781//268 733//214 750//232 +f 781//268 750//232 775//242 +f 781//268 775//242 797//227 +f 781//268 797//227 806//230 +f 781//268 806//230 810//236 +f 781//268 810//236 803//237 +f 781//268 803//237 793//240 +f 200//251 178//253 171//265 +f 200//251 171//265 175//259 +f 200//251 175//259 184//243 +f 200//251 184//243 206//246 +f 200//251 206//246 231//260 +f 200//251 231//260 248//247 +f 200//251 248//247 262//250 +f 200//251 262//250 257//262 +f 200//251 257//262 237//256 +f 200//251 237//256 212//255 +f 200//251 212//255 188//252 +f 791//193 783//195 801//205 +f 734//241 764//228 797//227 +f 775//242 734//241 797//227 +f 956//269 947//270 924//271 +f 956//269 924//271 938//272 +f 830//273 850//274 862//275 +f 842//276 830//273 862//275 +f 938//272 924//271 893//277 +f 938//272 893//277 884//278 +f 979//279 969//280 971//281 +f 979//279 971//281 980//282 +f 964//283 958//284 947//270 +f 960//285 964//283 947//270 +f 836//286 842//276 862//275 +f 836//286 862//275 858//287 +f 841//288 835//289 859//290 +f 861//291 841//288 859//290 +f 849//292 832//293 841//288 +f 861//291 849//292 841//288 +f 860//294 845//295 832//293 +f 849//292 860//294 832//293 +f 844//296 863//297 850//274 +f 830//273 844//296 850//274 +f 884//278 849//292 861//291 +f 895//298 884//278 861//291 +f 863//297 896//299 883//300 +f 850//274 863//297 883//300 +f 850//274 883//300 894//301 +f 862//275 850//274 894//301 +f 945//302 955//303 972//304 +f 945//302 972//304 962//305 +f 867//306 898//307 892//308 +f 856//309 867//306 892//308 +f 132//310 121//311 88//312 +f 132//310 88//312 97//313 +f 883//300 896//299 919//314 +f 883//300 919//314 936//315 +f 827//316 828//317 854//318 +f 827//316 854//318 852//319 +f 921//320 936//315 955//303 +f 921//320 955//303 945//302 +f 928//321 921//320 945//302 +f 928//321 945//302 949//322 +f 928//321 949//322 951//323 +f 931//324 928//321 951//323 +f 851//325 863//297 844//296 +f 837//326 851//325 844//296 +f 889//327 928//321 931//324 +f 888//328 889//327 931//324 +f 835//289 829//329 855//330 +f 859//290 835//289 855//330 +f 828//317 836//286 858//287 +f 828//317 858//287 854//318 +f 888//328 931//324 934//331 +f 885//332 888//328 934//331 +f 968//333 953//334 958//284 +f 964//283 968//333 958//284 +f 931//324 951//323 954//335 +f 934//331 931//324 954//335 +f 885//332 934//331 930//336 +f 887//337 885//332 930//336 +f 949//322 966//338 974//339 +f 951//323 949//322 974//339 +f 934//331 954//335 950//340 +f 930//336 934//331 950//340 +f 887//337 930//336 926//341 +f 891//342 887//337 926//341 +f 930//336 950//340 948//343 +f 926//341 930//336 948//343 +f 891//342 926//341 922//344 +f 895//298 891//342 922//344 +f 954//335 975//345 973//346 +f 954//335 973//346 950//340 +f 926//341 948//343 946//347 +f 926//341 946//347 922//344 +f 950//340 973//346 967//348 +f 950//340 967//348 948//343 +f 952//349 942//350 943//351 +f 952//349 943//351 959//352 +f 847//353 856//309 833//354 +f 847//353 833//354 831//355 +f 977//356 965//357 969//280 +f 977//356 969//280 979//279 +f 948//343 967//348 963//358 +f 948//343 963//358 946//347 +f 818//359 819//360 828//317 +f 827//316 818//359 828//317 +f 833//354 837//326 823//361 +f 819//360 833//354 823//361 +f 824//362 820//363 829//329 +f 835//289 824//362 829//329 +f 972//304 961//364 965//357 +f 972//304 965//357 977//356 +f 823//361 830//273 842//276 +f 836//286 823//361 842//276 +f 975//345 980//282 978//365 +f 975//345 978//365 973//346 +f 962//305 972//304 977//356 +f 962//305 977//356 966//338 +f 974//339 979//279 980//282 +f 974//339 980//282 975//345 +f 894//301 883//300 936//315 +f 894//301 936//315 921//320 +f 973//346 978//365 976//366 +f 973//346 976//366 967//348 +f 961//364 944//367 957//368 +f 961//364 957//368 965//357 +f 965//357 957//368 952//349 +f 969//280 965//357 952//349 +f 969//280 952//349 959//352 +f 971//281 969//280 959//352 +f 971//281 959//352 953//334 +f 968//333 971//281 953//334 +f 16//369 20//370 9//371 +f 4//372 16//369 9//371 +f 856//309 851//325 837//326 +f 856//309 837//326 833//354 +f 890//373 899//374 868//375 +f 890//373 868//375 853//376 +f 912//377 902//378 892//308 +f 925//379 912//377 892//308 +f 875//380 905//381 896//299 +f 863//297 875//380 896//299 +f 848//382 853//376 834//383 +f 848//382 834//383 838//384 +f 892//308 898//307 917//385 +f 892//308 917//385 925//379 +f 869//386 871//387 853//376 +f 848//382 869//386 853//376 +f 916//388 899//374 890//373 +f 927//389 916//388 890//373 +f 873//390 872//391 851//325 +f 856//309 873//390 851//325 +f 871//387 901//392 890//373 +f 853//376 871//387 890//373 +f 853//376 847//353 831//355 +f 853//376 831//355 834//383 +f 893//277 904//393 874//394 +f 893//277 874//394 860//294 +f 42//395 67//396 54//397 +f 28//398 42//395 54//397 +f 85//399 76//400 106//401 +f 85//399 106//401 118//402 +f 925//379 917//385 942//350 +f 925//379 942//350 952//349 +f 902//378 903//403 872//391 +f 902//378 872//391 873//390 +f 944//367 920//404 935//405 +f 944//367 935//405 957//368 +f 868//375 865//406 847//353 +f 853//376 868//375 847//353 +f 927//389 914//407 939//408 +f 927//389 939//408 953//334 +f 890//373 901//392 914//407 +f 890//373 914//407 927//389 +f 860//294 848//382 838//384 +f 860//294 838//384 845//295 +f 26//409 45//410 60//411 +f 36//412 26//409 60//411 +f 941//413 916//388 927//389 +f 953//334 941//413 927//389 +f 874//394 869//386 848//382 +f 860//294 874//394 848//382 +f 153//414 162//415 163//416 +f 153//414 163//416 154//417 +f 958//284 940//418 929//419 +f 958//284 929//419 947//270 +f 924//271 910//420 904//393 +f 924//271 904//393 893//277 +f 865//406 867//306 856//309 +f 847//353 865//406 856//309 +f 819//360 823//361 836//286 +f 828//317 819//360 836//286 +f 967//348 976//366 970//421 +f 967//348 970//421 963//358 +f 837//326 844//296 830//273 +f 823//361 837//326 830//273 +f 831//355 833//354 819//360 +f 818//359 831//355 819//360 +f 838//384 834//383 820//363 +f 824//362 838//384 820//363 +f 904//393 900//422 869//386 +f 904//393 869//386 874//394 +f 37//423 62//424 45//410 +f 26//409 37//423 45//410 +f 899//374 897//425 865//406 +f 899//374 865//406 868//375 +f 107//426 77//427 88//312 +f 121//311 107//426 88//312 +f 896//299 905//381 909//428 +f 896//299 909//428 919//314 +f 97//313 43//429 59//430 +f 86//431 97//313 59//430 +f 45//410 98//432 87//433 +f 60//411 45//410 87//433 +f 897//425 898//307 867//306 +f 897//425 867//306 865//406 +f 957//368 935//405 932//434 +f 957//368 932//434 952//349 +f 78//435 76//400 72//436 +f 78//435 72//436 70//437 +f 57//438 34//439 25//440 +f 43//429 57//438 25//440 +f 92//441 87//433 119//442 +f 92//441 119//442 123//443 +f 149//444 136//445 121//311 +f 149//444 121//311 132//310 +f 93//446 127//447 129//448 +f 96//449 93//446 129//448 +f 96//449 129//448 126//450 +f 94//451 96//449 126//450 +f 92//441 123//443 127//447 +f 93//446 92//441 127//447 +f 56//452 69//453 49//454 +f 56//452 49//454 29//455 +f 72//436 76//400 85//399 +f 62//424 72//436 85//399 +f 125//456 114//457 116//458 +f 125//456 116//458 134//459 +f 89//460 79//461 69//453 +f 89//460 69//453 56//452 +f 91//462 80//463 110//464 +f 91//462 110//464 128//465 +f 91//462 82//466 65//467 +f 91//462 65//467 54//397 +f 70//437 72//436 61//468 +f 70//437 61//468 46//469 +f 77//427 71//470 57//438 +f 88//312 77//427 57//438 +f 872//391 875//380 863//297 +f 851//325 872//391 863//297 +f 919//314 909//428 920//404 +f 919//314 920//404 944//367 +f 71//470 52//471 34//439 +f 57//438 71//470 34//439 +f 41//472 42//395 28//398 +f 23//473 41//472 28//398 +f 77//427 81//474 68//475 +f 77//427 68//475 71//470 +f 68//475 67//396 42//395 +f 68//475 42//395 41//472 +f 71//470 68//475 41//472 +f 71//470 41//472 52//471 +f 81//474 80//463 67//396 +f 81//474 67//396 68//475 +f 959//352 943//351 941//413 +f 959//352 941//413 953//334 +f 953//334 939//408 940//418 +f 953//334 940//418 958//284 +f 84//476 83//477 64//478 +f 84//476 64//478 63//479 +f 63//479 64//478 39//480 +f 63//479 39//480 38//481 +f 82//466 84//476 63//479 +f 82//466 63//479 65//467 +f 18//482 14//483 33//484 +f 35//485 18//482 33//484 +f 87//433 98//432 131//486 +f 87//433 131//486 119//442 +f 88//312 57//438 43//429 +f 97//313 88//312 43//429 +f 106//401 76//400 78//435 +f 106//401 78//435 109//487 +f 89//460 83//477 114//457 +f 89//460 114//457 125//456 +f 64//478 83//477 89//460 +f 56//452 64//478 89//460 +f 133//488 112//489 107//426 +f 133//488 107//426 121//311 +f 65//467 63//479 38//481 +f 65//467 38//481 40//490 +f 43//429 25//440 35//485 +f 59//430 43//429 35//485 +f 119//442 131//486 151//491 +f 119//442 151//491 139//492 +f 34//439 23//473 17//493 +f 34//439 17//493 21//494 +f 10//495 12//496 2//497 +f 1//498 10//495 2//497 +f 140//499 149//444 132//310 +f 140//499 132//310 120//500 +f 131//486 118//402 137//501 +f 131//486 137//501 151//491 +f 98//432 85//399 118//402 +f 98//432 118//402 131//486 +f 20//370 37//423 26//409 +f 9//371 20//370 26//409 +f 9//371 26//409 36//412 +f 19//502 9//371 36//412 +f 39//480 64//478 56//452 +f 29//455 39//480 56//452 +f 893//277 860//294 849//292 +f 884//278 893//277 849//292 +f 119//442 139//492 145//503 +f 123//443 119//442 145//503 +f 30//504 32//505 53//506 +f 30//504 53//506 50//507 +f 60//411 87//433 92//441 +f 53//506 60//411 92//441 +f 50//507 53//506 92//441 +f 50//507 92//441 93//446 +f 36//412 60//411 53//506 +f 32//505 36//412 53//506 +f 126//450 152//508 146//509 +f 126//450 146//509 122//510 +f 123//443 145//503 153//414 +f 127//447 123//443 153//414 +f 127//447 153//414 154//417 +f 129//448 127//447 154//417 +f 129//448 154//417 152//508 +f 129//448 152//508 126//450 +f 27//511 30//504 50//507 +f 27//511 50//507 47//512 +f 14//483 8//513 31//514 +f 33//484 14//483 31//514 +f 15//515 19//502 36//412 +f 15//515 36//412 32//505 +f 51//516 47//512 96//449 +f 51//516 96//449 94//451 +f 47//512 50//507 93//446 +f 47//512 93//446 96//449 +f 31//514 27//511 47//512 +f 31//514 47//512 51//516 +f 33//484 31//514 51//516 +f 33//484 51//516 55//517 +f 7//518 15//515 32//505 +f 7//518 32//505 30//504 +f 6//519 7//518 30//504 +f 6//519 30//504 27//511 +f 59//430 55//517 90//520 +f 59//430 90//520 86//431 +f 55//517 51//516 94//451 +f 55//517 94//451 90//520 +f 35//485 33//484 55//517 +f 59//430 35//485 55//517 +f 130//521 109//487 108//522 +f 130//521 108//522 125//456 +f 8//513 6//519 27//511 +f 31//514 8//513 27//511 +f 158//523 144//524 148//525 +f 158//523 148//525 162//415 +f 137//501 118//402 130//521 +f 137//501 130//521 144//524 +f 61//468 72//436 62//424 +f 37//423 61//468 62//424 +f 110//464 80//463 81//474 +f 112//489 110//464 81//474 +f 17//493 13//526 3//527 +f 5//528 17//493 3//527 +f 139//492 151//491 158//523 +f 139//492 158//523 145//503 +f 154//417 163//416 161//529 +f 154//417 161//529 152//508 +f 145//503 158//523 162//415 +f 145//503 162//415 153//414 +f 162//415 148//525 150//530 +f 162//415 150//530 163//416 +f 146//509 157//531 149//444 +f 146//509 149//444 140//499 +f 161//529 147//532 143//533 +f 161//529 143//533 157//531 +f 24//534 37//423 20//370 +f 16//369 24//534 20//370 +f 29//455 24//534 16//369 +f 29//455 16//369 12//496 +f 22//535 29//455 12//496 +f 22//535 12//496 10//495 +f 28//398 22//535 10//495 +f 28//398 10//495 13//526 +f 2//497 4//372 15//515 +f 7//518 2//497 15//515 +f 23//473 28//398 13//526 +f 23//473 13//526 17//493 +f 109//487 78//435 79//461 +f 108//522 109//487 79//461 +f 148//525 125//456 134//459 +f 150//530 148//525 134//459 +f 144//524 130//521 125//456 +f 148//525 144//524 125//456 +f 128//465 110//464 112//489 +f 128//465 112//489 133//488 +f 67//396 80//463 91//462 +f 54//397 67//396 91//462 +f 112//489 81//474 77//427 +f 107//426 112//489 77//427 +f 147//532 128//465 133//488 +f 143//533 147//532 133//488 +f 134//459 116//458 113//536 +f 134//459 113//536 128//465 +f 150//530 134//459 128//465 +f 147//532 150//530 128//465 +f 113//536 82//466 91//462 +f 128//465 113//536 91//462 +f 46//469 61//468 37//423 +f 24//534 46//469 37//423 +f 116//458 84//476 82//466 +f 113//536 116//458 82//466 +f 54//397 65//467 40//490 +f 54//397 40//490 28//398 +f 108//522 79//461 89//460 +f 125//456 108//522 89//460 +f 52//471 41//472 23//473 +f 34//439 52//471 23//473 +f 49//454 46//469 24//534 +f 29//455 49//454 24//534 +f 40//490 38//481 22//535 +f 28//398 40//490 22//535 +f 152//508 161//529 157//531 +f 152//508 157//531 146//509 +f 21//494 17//493 5//528 +f 11//537 21//494 5//528 +f 429//538 424//539 422//540 +f 429//538 422//540 426//541 +f 12//496 16//369 4//372 +f 2//497 12//496 4//372 +f 13//526 10//495 1//498 +f 3//527 13//526 1//498 +f 143//533 133//488 121//311 +f 136//445 143//533 121//311 +f 114//457 83//477 84//476 +f 116//458 114//457 84//476 +f 38//481 39//480 29//455 +f 22//535 38//481 29//455 +f 118//402 106//401 109//487 +f 118//402 109//487 130//521 +f 79//461 78//435 70//437 +f 79//461 70//437 69//453 +f 947//270 929//419 910//420 +f 947//270 910//420 924//271 +f 90//520 122//510 120//500 +f 90//520 120//500 86//431 +f 552//542 547//543 557//544 +f 559//545 552//542 557//544 +f 430//546 522//547 520//548 +f 427//549 430//546 520//548 +f 518//550 551//551 543//552 +f 518//550 543//552 519//553 +f 555//554 551//551 518//550 +f 555//554 518//550 517//555 +f 557//544 547//543 542//556 +f 557//544 542//556 546//557 +f 523//558 522//547 430//546 +f 523//558 430//546 440//559 +f 431//560 424//539 429//538 +f 438//561 431//560 429//538 +f 547//543 522//547 523//558 +f 542//556 547//543 523//558 +f 520//548 522//547 547//543 +f 520//548 547//543 552//542 +f 903//403 905//381 875//380 +f 872//391 903//403 875//380 +f 911//562 903//403 902//378 +f 912//377 911//562 902//378 +f 935//405 911//562 912//377 +f 932//434 935//405 912//377 +f 94//451 126//450 122//510 +f 90//520 94//451 122//510 +f 917//385 898//307 897//425 +f 918//563 917//385 897//425 +f 943//351 918//563 916//388 +f 941//413 943//351 916//388 +f 942//350 917//385 918//563 +f 943//351 942//350 918//563 +f 918//563 897//425 899//374 +f 916//388 918//563 899//374 +f 900//422 901//392 871//387 +f 900//422 871//387 869//386 +f 914//407 901//392 900//422 +f 913//564 914//407 900//422 +f 940//418 913//564 910//420 +f 929//419 940//418 910//420 +f 909//428 905//381 903//403 +f 911//562 909//428 903//403 +f 913//564 900//422 904//393 +f 910//420 913//564 904//393 +f 920//404 909//428 911//562 +f 935//405 920//404 911//562 +f 939//408 914//407 913//564 +f 940//418 939//408 913//564 +f 861//291 859//290 891//342 +f 895//298 861//291 891//342 +f 859//290 855//330 887//337 +f 859//290 887//337 891//342 +f 855//330 852//319 885//332 +f 855//330 885//332 887//337 +f 852//319 854//318 888//328 +f 852//319 888//328 885//332 +f 854//318 858//287 889//327 +f 854//318 889//327 888//328 +f 932//434 912//377 925//379 +f 952//349 932//434 925//379 +f 834//383 831//355 818//359 +f 820//363 834//383 818//359 +f 862//275 894//301 889//327 +f 858//287 862//275 889//327 +f 438//561 429//538 518//550 +f 519//553 438//561 518//550 +f 518//550 429//538 426//541 +f 517//555 518//550 426//541 +f 440//559 430//546 424//539 +f 431//560 440//559 424//539 +f 424//539 430//546 427//549 +f 424//539 427//549 422//540 +f 551//551 557//544 546//557 +f 551//551 546//557 543//552 +f 559//545 557//544 551//551 +f 555//554 559//545 551//551 +f 546//557 542//556 523//558 +f 546//557 523//558 521//565 +f 543//552 546//557 521//565 +f 519//553 543//552 521//565 +f 523//558 440//559 431//560 +f 521//565 523//558 431//560 +f 521//565 431//560 438//561 +f 521//565 438//561 519//553 +f 829//329 827//316 852//319 +f 855//330 829//329 852//319 +f 951//323 974//339 975//345 +f 954//335 951//323 975//345 +f 945//302 962//305 966//338 +f 949//322 945//302 966//338 +f 889//327 894//301 921//320 +f 889//327 921//320 928//321 +f 122//510 146//509 140//499 +f 122//510 140//499 120//500 +f 69//453 70//437 46//469 +f 69//453 46//469 49//454 +f 936//315 919//314 944//367 +f 936//315 944//367 955//303 +f 832//293 824//362 835//289 +f 841//288 832//293 835//289 +f 820//363 818//359 827//316 +f 829//329 820//363 827//316 +f 966//338 977//356 979//279 +f 966//338 979//279 974//339 +f 976//366 964//283 960//285 +f 976//366 960//285 970//421 +f 978//365 968//333 964//283 +f 978//365 964//283 976//366 +f 980//282 971//281 968//333 +f 980//282 968//333 978//365 +f 845//295 838//384 824//362 +f 832//293 845//295 824//362 +f 922//344 938//272 884//278 +f 922//344 884//278 895//298 +f 955//303 944//367 961//364 +f 955//303 961//364 972//304 +f 963//358 970//421 956//269 +f 963//358 956//269 946//347 +f 946//347 956//269 938//272 +f 946//347 938//272 922//344 +f 157//531 143//533 136//445 +f 157//531 136//445 149//444 +f 163//416 150//530 147//532 +f 163//416 147//532 161//529 +f 11//537 5//528 14//483 +f 18//482 11//537 14//483 +f 3//527 1//498 6//519 +f 8//513 3//527 6//519 +f 5//528 3//527 8//513 +f 14//483 5//528 8//513 +f 1//498 2//497 7//518 +f 6//519 1//498 7//518 +f 25//440 11//537 18//482 +f 35//485 25//440 18//482 +f 4//372 9//371 19//502 +f 15//515 4//372 19//502 +f 120//500 132//310 97//313 +f 120//500 97//313 86//431 +f 62//424 85//399 98//432 +f 45//410 62//424 98//432 +f 34//439 21//494 11//537 +f 25//440 34//439 11//537 +f 151//491 137//501 144//524 +f 151//491 144//524 158//523 +f 970//421 960//285 947//270 +f 970//421 947//270 956//269 +f 892//308 902//378 873//390 +f 892//308 873//390 856//309 +f 537//566 492//567 493//568 +f 537//566 493//568 538//569 +f 563//570 567//571 581//572 +f 571//573 563//570 581//572 +f 604//574 608//575 597//576 +f 584//577 604//574 597//576 +f 539//578 537//566 567//571 +f 563//570 539//578 567//571 +f 567//571 562//579 570//580 +f 581//572 567//571 570//580 +f 537//566 538//569 562//579 +f 567//571 537//566 562//579 +f 571//573 581//572 604//574 +f 571//573 604//574 584//577 +f 583//581 596//582 608//575 +f 604//574 583//581 608//575 +f 570//580 583//581 604//574 +f 581//572 570//580 604//574 +f 497//583 494//584 596//582 +f 583//581 497//583 596//582 +f 498//585 497//583 583//581 +f 570//580 498//585 583//581 +f 584//577 597//576 503//586 +f 504//587 584//577 503//586 +f 502//588 571//573 584//577 +f 502//588 584//577 504//587 +f 562//579 495//589 498//585 +f 570//580 562//579 498//585 +f 538//569 493//568 495//589 +f 562//579 538//569 495//589 +f 499//590 563//570 571//573 +f 502//588 499//590 571//573 +f 499//590 496//591 539//578 +f 499//590 539//578 563//570 +f 493//568 492//567 444//592 +f 442//593 493//568 444//592 +f 444//592 492//567 496//591 +f 443//594 444//592 496//591 +f 408//595 420//596 414//597 +f 408//595 414//597 400//598 +f 420//596 442//593 444//592 +f 420//596 444//592 414//597 +f 414//597 444//592 443//594 +f 414//597 443//594 421//599 +f 373//600 384//601 397//602 +f 373//600 397//602 377//603 +f 400//598 414//597 421//599 +f 400//598 421//599 409//604 +f 377//603 397//602 408//595 +f 377//603 408//595 400//598 +f 377//603 400//598 409//604 +f 398//605 377//603 409//604 +f 385//606 373//600 377//603 +f 385//606 377//603 398//605 +f 397//602 497//583 498//585 +f 397//602 498//585 408//595 +f 384//601 494//584 497//583 +f 384//601 497//583 397//602 +f 398//605 409//604 502//588 +f 504//587 398//605 502//588 +f 503//586 385//606 398//605 +f 503//586 398//605 504//587 +f 495//589 493//568 442//593 +f 495//589 442//593 420//596 +f 498//585 495//589 420//596 +f 498//585 420//596 408//595 +f 443//594 496//591 499//590 +f 421//599 443//594 499//590 +f 409//604 421//599 499//590 +f 409//604 499//590 502//588 +f 496//591 492//567 537//566 +f 496//591 537//566 539//578 +f 544//607 540//608 556//609 +f 544//607 556//609 558//610 +f 441//611 515//612 514//613 +f 433//614 441//611 514//613 +f 541//615 533//616 512//617 +f 541//615 512//617 511//618 +f 545//619 541//615 511//618 +f 545//619 511//618 510//620 +f 540//608 532//621 534//622 +f 540//608 534//622 556//609 +f 449//623 516//624 515//612 +f 441//611 449//623 515//612 +f 448//625 445//626 425//627 +f 439//628 448//625 425//627 +f 515//612 516//624 532//621 +f 515//612 532//621 540//608 +f 514//613 515//612 540//608 +f 514//613 540//608 544//607 +f 512//617 448//625 439//628 +f 511//618 512//617 439//628 +f 511//618 439//628 432//629 +f 510//620 511//618 432//629 +f 445//626 449//623 441//611 +f 425//627 445//626 441//611 +f 425//627 441//611 433//614 +f 423//630 425//627 433//614 +f 556//609 534//622 533//616 +f 556//609 533//616 541//615 +f 558//610 556//609 541//615 +f 558//610 541//615 545//619 +f 534//622 513//631 512//617 +f 533//616 534//622 512//617 +f 513//631 516//624 449//623 +f 513//631 449//623 445//626 +f 512//617 513//631 445//626 +f 512//617 445//626 448//625 +f 532//621 516//624 513//631 +f 534//622 532//621 513//631 +f 611//632 720//633 705//634 +f 602//635 611//632 705//634 +f 487//636 611//632 602//635 +f 489//637 487//636 602//635 +f 489//637 602//635 590//638 +f 490//639 489//637 590//638 +f 602//635 705//634 682//640 +f 590//638 602//635 682//640 +f 880//641 906//642 825//643 +f 880//641 825//643 821//644 +f 906//642 886//645 822//646 +f 906//642 822//646 825//643 +f 937//647 923//648 886//645 +f 937//647 886//645 906//642 +f 907//649 937//647 906//642 +f 907//649 906//642 880//641 +f 568//650 633//651 673//652 +f 578//653 568//650 673//652 +f 505//654 568//650 578//653 +f 508//655 505//654 578//653 +f 508//655 578//653 598//656 +f 509//657 508//655 598//656 +f 578//653 673//652 699//658 +f 578//653 699//658 598//656 +f 877//659 907//649 880//641 +f 877//659 880//641 866//660 +f 846//661 877//659 866//660 +f 846//661 866//660 840//662 +f 840//662 866//660 813//663 +f 840//662 813//663 807//664 +f 866//660 880//641 821//644 +f 866//660 821//644 813//663 +f 816//665 812//666 794//667 +f 799//668 816//665 794//667 +f 794//667 812//666 770//669 +f 794//667 770//669 722//670 +f 812//666 840//662 807//664 +f 812//666 807//664 770//669 +f 846//661 840//662 812//666 +f 816//665 846//661 812//666 +f 786//671 808//672 795//673 +f 786//671 795//673 757//674 +f 727//675 777//676 808//672 +f 727//675 808//672 786//671 +f 665//677 805//678 815//679 +f 662//680 665//677 815//679 +f 665//677 669//681 789//682 +f 665//677 789//682 805//678 +f 479//683 475//684 669//681 +f 479//683 669//681 665//677 +f 479//683 665//677 662//680 +f 482//685 479//683 662//680 +f 714//686 667//687 809//688 +f 826//689 714//686 809//688 +f 809//688 667//687 630//690 +f 809//688 630//690 741//691 +f 667//687 457//692 459//693 +f 667//687 459//693 630//690 +f 460//694 457//692 667//687 +f 460//694 667//687 714//686 +f 685//695 689//696 481//697 +f 685//695 481//697 483//698 +f 689//696 688//699 480//700 +f 689//696 480//700 481//697 +f 825//643 822//646 688//699 +f 825//643 688//699 689//696 +f 821//644 825//643 689//696 +f 821//644 689//696 685//695 +f 882//701 787//702 755//703 +f 882//701 755//703 881//704 +f 870//705 800//706 787//702 +f 882//701 870//705 787//702 +f 908//707 870//705 882//701 +f 933//708 908//707 882//701 +f 933//708 882//701 881//704 +f 933//708 881//704 923//648 +f 669//681 684//709 814//710 +f 669//681 814//710 789//682 +f 684//709 714//686 826//689 +f 684//709 826//689 814//710 +f 464//711 460//694 714//686 +f 464//711 714//686 684//709 +f 475//684 464//711 684//709 +f 475//684 684//709 669//681 +f 886//645 864//712 817//713 +f 886//645 817//713 822//646 +f 864//712 798//714 784//715 +f 864//712 784//715 817//713 +f 881//704 755//703 798//714 +f 881//704 798//714 864//712 +f 923//648 881//704 864//712 +f 923//648 864//712 886//645 +f 915//716 933//708 923//648 +f 915//716 923//648 937//647 +f 879//717 908//707 933//708 +f 879//717 933//708 915//716 +f 815//679 879//717 915//716 +f 815//679 915//716 876//718 +f 876//718 915//716 937//647 +f 876//718 937//647 907//649 +f 751//719 794//667 722//670 +f 751//719 722//670 681//720 +f 765//721 799//668 794//667 +f 765//721 794//667 751//719 +f 765//721 751//719 679//722 +f 699//658 765//721 679//722 +f 751//719 681//720 651//723 +f 679//722 751//719 651//723 +f 651//723 681//720 605//724 +f 651//723 605//724 587//725 +f 587//725 605//724 501//726 +f 587//725 501//726 506//727 +f 605//724 621//728 491//729 +f 605//724 491//729 501//726 +f 681//720 722//670 621//728 +f 681//720 621//728 605//724 +f 678//730 685//695 483//698 +f 678//730 483//698 484//731 +f 813//663 821//644 685//695 +f 813//663 685//695 678//730 +f 807//664 813//663 678//730 +f 807//664 678//730 670//732 +f 670//732 678//730 484//731 +f 670//732 484//731 486//733 +f 723//734 766//735 799//668 +f 723//734 799//668 765//721 +f 682//640 766//735 723//734 +f 666//736 682//640 723//734 +f 633//651 666//736 723//734 +f 633//651 723//734 673//652 +f 673//652 723//734 765//721 +f 673//652 765//721 699//658 +f 590//638 682//640 666//736 +f 577//737 590//638 666//736 +f 490//639 590//638 577//737 +f 500//738 490//639 577//737 +f 500//738 577//737 568//650 +f 505//654 500//738 568//650 +f 577//737 666//736 633//651 +f 568//650 577//737 633//651 +f 814//710 826//689 878//739 +f 857//740 814//710 878//739 +f 789//682 814//710 857//740 +f 805//678 789//682 857//740 +f 805//678 857//740 879//717 +f 815//679 805//678 879//717 +f 857//740 878//739 908//707 +f 879//717 857//740 908//707 +f 779//741 639//742 668//743 +f 800//706 779//741 668//743 +f 639//742 461//744 468//745 +f 668//743 639//742 468//745 +f 630//690 459//693 461//744 +f 639//742 630//690 461//744 +f 741//691 630//690 639//742 +f 779//741 741//691 639//742 +f 843//746 809//688 741//691 +f 843//746 741//691 779//741 +f 826//689 809//688 843//746 +f 878//739 826//689 843//746 +f 878//739 843//746 870//705 +f 908//707 878//739 870//705 +f 843//746 779//741 800//706 +f 870//705 843//746 800//706 +f 638//747 778//748 720//633 +f 611//632 638//747 720//633 +f 662//680 815//679 778//748 +f 638//747 662//680 778//748 +f 482//685 662//680 638//747 +f 485//749 482//685 638//747 +f 485//749 638//747 611//632 +f 487//636 485//749 611//632 +f 778//748 815//679 876//718 +f 778//748 876//718 839//750 +f 778//748 839//750 811//751 +f 720//633 778//748 811//751 +f 839//750 877//659 846//661 +f 811//751 839//750 846//661 +f 839//750 876//718 907//649 +f 839//750 907//649 877//659 +f 621//728 640//752 488//753 +f 621//728 488//753 491//729 +f 640//752 670//732 486//733 +f 640//752 486//733 488//753 +f 770//669 807//664 670//732 +f 770//669 670//732 640//752 +f 722//670 770//669 640//752 +f 722//670 640//752 621//728 +f 787//702 658//754 644//755 +f 787//702 644//755 755//703 +f 658//754 472//756 473//757 +f 644//755 658//754 473//757 +f 668//743 468//745 472//756 +f 658//754 668//743 472//756 +f 800//706 668//743 658//754 +f 787//702 800//706 658//754 +f 705//634 792//758 766//735 +f 682//640 705//634 766//735 +f 792//758 816//665 799//668 +f 766//735 792//758 799//668 +f 811//751 846//661 816//665 +f 792//758 811//751 816//665 +f 720//633 811//751 792//758 +f 705//634 720//633 792//758 +f 699//658 679//722 593//759 +f 598//656 699//658 593//759 +f 598//656 593//759 507//760 +f 598//656 507//760 509//657 +f 593//759 587//725 506//727 +f 593//759 506//727 507//760 +f 679//722 651//723 587//725 +f 679//722 587//725 593//759 +f 808//672 713//761 696//762 +f 795//673 808//672 696//762 +f 713//761 466//763 470//764 +f 696//762 713//761 470//764 +f 683//765 465//766 466//763 +f 713//761 683//765 466//763 +f 777//676 683//765 713//761 +f 777//676 713//761 808//672 +f 696//762 470//764 474//767 +f 680//768 696//762 474//767 +f 757//674 795//673 696//762 +f 757//674 696//762 680//768 +f 652//769 683//765 777//676 +f 652//769 777//676 727//675 +f 467//770 465//766 683//765 +f 467//770 683//765 652//769 +f 644//755 652//769 727//675 +f 644//755 727//675 755//703 +f 473//757 467//770 652//769 +f 473//757 652//769 644//755 +f 680//768 474//767 477//771 +f 686//772 680//768 477//771 +f 784//715 757//674 680//768 +f 784//715 680//768 686//772 +f 798//714 786//671 757//674 +f 798//714 757//674 784//715 +f 755//703 727//675 786//671 +f 755//703 786//671 798//714 +f 688//699 687//773 478//774 +f 688//699 478//774 480//700 +f 687//773 686//772 477//771 +f 687//773 477//771 478//774 +f 817//713 784//715 686//772 +f 817//713 686//772 687//773 +f 822//646 817//713 687//773 +f 822//646 687//773 688//699 +f 391//775 379//776 489//637 +f 391//775 489//637 490//639 +f 379//776 370//777 487//636 +f 379//776 487//636 489//637 +f 276//778 261//779 370//777 +f 276//778 370//777 379//776 +f 299//780 276//778 379//776 +f 299//780 379//776 391//775 +f 95//781 58//782 44//783 +f 75//784 95//781 44//783 +f 159//785 95//781 75//784 +f 156//786 159//785 75//784 +f 156//786 75//784 101//787 +f 160//788 156//786 101//787 +f 75//784 44//783 74//789 +f 101//787 75//784 74//789 +f 383//790 403//791 508//655 +f 383//790 508//655 509//657 +f 403//791 413//792 505//654 +f 403//791 505//654 508//655 +f 308//793 348//794 413//792 +f 308//793 413//792 403//791 +f 282//795 308//793 403//791 +f 383//790 282//795 403//791 +f 168//796 115//797 141//798 +f 174//799 168//796 141//798 +f 115//797 104//800 135//801 +f 141//798 115//797 135//801 +f 101//787 74//789 104//800 +f 115//797 101//787 104//800 +f 160//788 101//787 115//797 +f 168//796 160//788 115//797 +f 174//799 141//798 169//802 +f 211//803 174//799 169//802 +f 211//803 169//802 187//804 +f 259//805 211//803 187//804 +f 187//804 169//802 165//806 +f 187//804 165//806 182//807 +f 169//802 141//798 135//801 +f 169//802 135//801 165//806 +f 173//808 204//809 254//810 +f 195//811 173//808 254//810 +f 186//812 173//808 195//811 +f 224//813 186//812 195//811 +f 312//814 475//684 479//683 +f 316//815 312//814 479//683 +f 192//816 312//814 316//815 +f 176//817 192//816 316//815 +f 166//818 176//817 316//815 +f 166//818 316//815 319//819 +f 319//819 316//815 479//683 +f 319//819 479//683 482//685 +f 459//693 457//692 314//820 +f 351//821 459//693 314//820 +f 351//821 314//820 172//822 +f 240//823 351//821 172//822 +f 172//822 314//820 267//824 +f 172//822 267//824 155//825 +f 314//820 457//692 460//694 +f 267//824 314//820 460//694 +f 293//826 159//785 156//786 +f 292//827 293//826 156//786 +f 480//700 293//826 292//827 +f 481//697 480//700 292//827 +f 481//697 292//827 296//828 +f 483//698 481//697 296//828 +f 292//827 156//786 160//788 +f 296//828 292//827 160//788 +f 99//829 111//830 73//831 +f 99//829 73//831 48//832 +f 194//833 181//834 111//830 +f 194//833 111//830 99//829 +f 226//835 194//833 99//829 +f 100//836 226//835 99//829 +f 100//836 99//829 48//832 +f 58//782 100//836 48//832 +f 267//824 460//694 464//711 +f 297//837 267//824 464//711 +f 155//825 267//824 297//837 +f 167//838 155//825 297//837 +f 167//838 297//837 312//814 +f 192//816 167//838 312//814 +f 297//837 464//711 475//684 +f 312//814 297//837 475//684 +f 183//839 226//835 100//836 +f 117//840 183//839 100//836 +f 197//841 183//839 117//840 +f 164//842 197//841 117//840 +f 164//842 117//840 95//781 +f 159//785 164//842 95//781 +f 117//840 100//836 58//782 +f 95//781 117//840 58//782 +f 66//843 102//844 166//818 +f 105//845 66//843 166//818 +f 48//832 73//831 102//844 +f 66//843 48//832 102//844 +f 58//782 48//832 66//843 +f 44//783 58//782 66//843 +f 44//783 66//843 105//845 +f 74//789 44//783 105//845 +f 302//846 230//847 216//848 +f 302//846 216//848 282//795 +f 187//804 182//807 216//848 +f 230//847 187//804 216//848 +f 259//805 187//804 230//847 +f 300//849 259//805 230//847 +f 330//850 300//849 230//847 +f 330//850 230//847 302//846 +f 491//729 360//851 376//852 +f 501//726 491//729 376//852 +f 501//726 376//852 394//853 +f 506//727 501//726 394//853 +f 376//852 300//849 330//850 +f 394//853 376//852 330//850 +f 360//851 259//805 300//849 +f 376//852 360//851 300//849 +f 303//854 168//796 174//799 +f 311//855 303//854 174//799 +f 296//828 160//788 168//796 +f 303//854 296//828 168//796 +f 483//698 296//828 303//854 +f 484//731 483//698 303//854 +f 484//731 303//854 311//855 +f 486//733 484//731 311//855 +f 258//856 315//857 348//794 +f 308//793 258//856 348//794 +f 258//856 215//858 299//780 +f 258//856 299//780 315//857 +f 182//807 215//858 258//856 +f 216//848 182//807 258//856 +f 216//848 258//856 308//793 +f 282//795 216//848 308//793 +f 413//792 404//859 500//738 +f 413//792 500//738 505//654 +f 404//859 391//775 490//639 +f 404//859 490//639 500//738 +f 315//857 299//780 391//775 +f 315//857 391//775 404//859 +f 348//794 315//857 404//859 +f 348//794 404//859 413//792 +f 102//844 124//860 176//817 +f 102//844 176//817 166//818 +f 124//860 167//838 192//816 +f 124//860 192//816 176//817 +f 103//861 155//825 167//838 +f 103//861 167//838 124//860 +f 73//831 103//861 124//860 +f 73//831 124//860 102//844 +f 461//744 459//693 351//821 +f 461//744 351//821 342//862 +f 468//745 461//744 342//862 +f 468//745 342//862 313//863 +f 313//863 342//862 202//864 +f 313//863 202//864 181//834 +f 342//862 351//821 240//823 +f 342//862 240//823 202//864 +f 111//830 138//865 103//861 +f 111//830 103//861 73//831 +f 138//865 172//822 155//825 +f 138//865 155//825 103//861 +f 240//823 172//822 138//865 +f 202//864 240//823 138//865 +f 181//834 202//864 138//865 +f 181//834 138//865 111//830 +f 343//866 319//819 482//685 +f 343//866 482//685 485//749 +f 203//867 166//818 319//819 +f 203//867 319//819 343//866 +f 261//779 203//867 343//866 +f 261//779 343//866 370//777 +f 370//777 343//866 485//749 +f 370//777 485//749 487//636 +f 135//801 104//800 142//868 +f 135//801 142//868 170//869 +f 170//869 142//868 203//867 +f 170//869 203//867 261//779 +f 105//845 166//818 203//867 +f 142//868 105//845 203//867 +f 74//789 105//845 142//868 +f 104//800 74//789 142//868 +f 311//855 174//799 211//803 +f 341//870 311//855 211//803 +f 486//733 311//855 341//870 +f 488//753 486//733 341//870 +f 488//753 341//870 360//851 +f 491//729 488//753 360//851 +f 341//870 211//803 259//805 +f 360//851 341//870 259//805 +f 472//756 468//745 313//863 +f 472//756 313//863 323//871 +f 473//757 472//756 323//871 +f 473//757 323//871 337//872 +f 337//872 323//871 194//833 +f 226//835 337//872 194//833 +f 323//871 313//863 181//834 +f 323//871 181//834 194//833 +f 165//806 135//801 170//869 +f 165//806 170//869 189//873 +f 182//807 165//806 189//873 +f 182//807 189//873 215//858 +f 215//858 189//873 276//778 +f 215//858 276//778 299//780 +f 189//873 170//869 261//779 +f 189//873 261//779 276//778 +f 506//727 394//853 388//874 +f 507//760 506//727 388//874 +f 507//760 388//874 383//790 +f 509//657 507//760 383//790 +f 388//874 302//846 282//795 +f 388//874 282//795 383//790 +f 394//853 330//850 302//846 +f 388//874 394//853 302//846 +f 466//763 465//766 298//875 +f 466//763 298//875 268//876 +f 470//764 466//763 268//876 +f 470//764 268//876 285//877 +f 285//877 268//876 173//808 +f 285//877 173//808 186//812 +f 268//876 298//875 204//809 +f 173//808 268//876 204//809 +f 285//877 186//812 224//813 +f 301//878 285//877 224//813 +f 474//767 470//764 285//877 +f 474//767 285//877 301//878 +f 298//875 465//766 467//770 +f 329//879 298//875 467//770 +f 204//809 298//875 329//879 +f 254//810 204//809 329//879 +f 329//879 467//770 473//757 +f 337//872 329//879 473//757 +f 254//810 329//879 337//872 +f 226//835 254//810 337//872 +f 301//878 224//813 197//841 +f 295//880 301//878 197//841 +f 477//771 474//767 301//878 +f 477//771 301//878 295//880 +f 195//811 254//810 226//835 +f 183//839 195//811 226//835 +f 224//813 195//811 183//839 +f 197//841 224//813 183//839 +f 295//880 197//841 164//842 +f 294//881 295//880 164//842 +f 477//771 295//880 294//881 +f 478//774 477//771 294//881 +f 478//774 294//881 293//826 +f 480//700 478//774 293//826 +f 294//881 164//842 159//785 +f 293//826 294//881 159//785 +f 439//628 425//627 423//630 +f 432//629 439//628 423//630 +f 582//882 647//883 627//884 +f 564//885 582//882 627//884 +f 599//886 572//887 550//888 +f 569//889 599//886 550//888 +f 753//890 763//891 676//892 +f 753//890 676//892 664//893 +f 728//894 739//895 776//896 +f 728//894 776//896 767//897 +f 635//898 625//899 603//900 +f 618//901 635//898 603//900 +f 763//891 771//902 694//903 +f 763//891 694//903 676//892 +f 623//904 677//905 672//906 +f 623//904 672//906 607//907 +f 209//908 229//909 227//910 +f 196//911 209//908 227//910 +f 708//912 691//913 625//899 +f 708//912 625//899 635//898 +f 754//914 752//915 772//916 +f 754//914 772//916 785//917 +f 739//895 754//914 785//917 +f 739//895 785//917 776//896 +f 607//907 672//906 647//883 +f 582//882 607//907 647//883 +f 618//901 603//900 572//887 +f 599//886 618//901 572//887 +f 776//896 785//917 771//902 +f 776//896 771//902 763//891 +f 672//906 754//914 739//895 +f 647//883 672//906 739//895 +f 229//909 304//918 309//919 +f 227//910 229//909 309//919 +f 378//920 356//921 346//922 +f 378//920 346//922 363//923 +f 196//911 227//910 242//924 +f 205//925 196//911 242//924 +f 676//892 694//903 618//901 +f 676//892 618//901 599//886 +f 374//926 358//927 356//921 +f 374//926 356//921 378//920 +f 273//928 290//929 209//908 +f 273//928 209//908 213//930 +f 382//931 305//932 317//933 +f 412//934 382//931 317//933 +f 771//902 768//935 708//912 +f 771//902 708//912 694//903 +f 407//936 378//920 363//923 +f 407//936 363//923 382//931 +f 354//937 334//938 399//939 +f 354//937 399//939 415//940 +f 346//922 273//928 287//941 +f 363//923 346//922 287//941 +f 603//900 607//907 582//882 +f 572//887 603//900 582//882 +f 210//942 196//911 205//925 +f 218//943 210//942 205//925 +f 218//943 205//925 214//944 +f 228//945 218//943 214//944 +f 242//924 227//910 309//919 +f 242//924 309//919 334//938 +f 213//930 209//908 196//911 +f 210//942 213//930 196//911 +f 358//927 304//918 290//929 +f 358//927 290//929 356//921 +f 428//946 407//936 382//931 +f 428//946 382//931 412//934 +f 356//921 290//929 273//928 +f 346//922 356//921 273//928 +f 415//940 399//939 407//936 +f 415//940 407//936 428//946 +f 399//939 374//926 378//920 +f 399//939 378//920 407//936 +f 334//938 309//919 374//926 +f 334//938 374//926 399//939 +f 290//929 304//918 229//909 +f 290//929 229//909 209//908 +f 273//928 213//930 210//942 +f 287//941 273//928 210//942 +f 287//941 210//942 218//943 +f 305//932 287//941 218//943 +f 309//919 304//918 358//927 +f 374//926 309//919 358//927 +f 691//913 677//905 623//904 +f 625//899 691//913 623//904 +f 253//947 242//924 334//938 +f 253//947 334//938 354//937 +f 205//925 242//924 253//947 +f 214//944 205//925 253//947 +f 305//932 218//943 228//945 +f 317//933 305//932 228//945 +f 363//923 287//941 305//932 +f 382//931 363//923 305//932 +f 772//916 691//913 708//912 +f 768//935 772//916 708//912 +f 625//899 623//904 607//907 +f 603//900 625//899 607//907 +f 572//887 582//882 564//885 +f 550//888 572//887 564//885 +f 785//917 772//916 768//935 +f 785//917 768//935 771//902 +f 767//897 776//896 763//891 +f 767//897 763//891 753//890 +f 694//903 708//912 635//898 +f 694//903 635//898 618//901 +f 672//906 677//905 752//915 +f 672//906 752//915 754//914 +f 647//883 739//895 728//894 +f 627//884 647//883 728//894 +f 664//893 676//892 599//886 +f 664//893 599//886 569//889 +f 752//915 677//905 691//913 +f 772//916 752//915 691//913 +f 381//948 463//949 471//950 +f 375//951 381//948 471//950 +f 389//952 355//953 347//954 +f 386//955 389//952 347//954 +f 456//956 402//957 392//958 +f 462//959 456//956 392//958 +f 402//957 395//960 364//961 +f 392//958 402//957 364//961 +f 456//956 454//962 395//960 +f 456//956 395//960 402//957 +f 401//963 455//964 458//965 +f 387//966 401//963 458//965 +f 387//966 458//965 463//949 +f 381//948 387//966 463//949 +f 392//958 364//961 355//953 +f 389//952 392//958 355//953 +f 462//959 392//958 389//952 +f 469//967 462//959 389//952 +f 469//967 389//952 386//955 +f 476//968 469//967 386//955 +f 395//960 401//963 387//966 +f 364//961 395//960 387//966 +f 364//961 387//966 381//948 +f 355//953 364//961 381//948 +f 395//960 454//962 455//964 +f 395//960 455//964 401//963 +f 355//953 381//948 375//951 +f 347//954 355//953 375//951 +f 471//950 463//949 600//969 +f 471//950 600//969 606//970 +f 634//971 626//972 592//973 +f 634//971 592//973 595//974 +f 589//975 579//976 456//956 +f 589//975 456//956 462//959 +f 617//977 586//978 579//976 +f 617//977 579//976 589//975 +f 586//978 454//962 456//956 +f 579//976 586//978 456//956 +f 458//965 455//964 580//979 +f 458//965 580//979 594//980 +f 463//949 458//965 594//980 +f 463//949 594//980 600//969 +f 626//972 617//977 589//975 +f 626//972 589//975 592//973 +f 592//973 589//975 462//959 +f 592//973 462//959 469//967 +f 595//974 592//973 469//967 +f 595//974 469//967 476//968 +f 594//980 580//979 586//978 +f 594//980 586//978 617//977 +f 600//969 594//980 617//977 +f 600//969 617//977 626//972 +f 455//964 454//962 586//978 +f 580//979 455//964 586//978 +f 606//970 600//969 626//972 +f 606//970 626//972 634//971 diff --git a/samples/sample4.obj b/samples/sample4.obj new file mode 100644 index 0000000000000000000000000000000000000000..aeb6866470ad05f793e3d392f0b1b6c3b55e3730 --- /dev/null +++ b/samples/sample4.obj @@ -0,0 +1,7057 @@ +# Blender 4.0.0 +# www.blender.org +o pm0489_00_Skin +v 0.012495 0.159868 0.397648 +v 0.017089 0.149577 0.400404 +v 0.018376 0.151966 0.407019 +v 0.018743 0.136898 0.404263 +v 0.020764 0.138552 0.410327 +v 0.018927 0.123117 0.405366 +v 0.013230 0.162992 0.403896 +v 0.005880 0.172363 0.400221 +v 0.011760 0.161705 0.408490 +v 0.005329 0.167034 0.395627 +v -0.001654 0.169607 0.395075 +v 0.020948 0.124587 0.411613 +v 0.018743 0.112091 0.412165 +v 0.018192 0.126240 0.416024 +v 0.016722 0.110070 0.405733 +v 0.012863 0.099228 0.405549 +v 0.018008 0.139655 0.414186 +v 0.015987 0.152334 0.411062 +v 0.011944 0.152517 0.414737 +v 0.013230 0.140757 0.417861 +v -0.001838 0.175303 0.399669 +v -0.009372 0.172179 0.399853 +v -0.001838 0.171444 0.406836 +v -0.008637 0.166850 0.395443 +v -0.015435 0.159500 0.397097 +v 0.005329 0.169056 0.407019 +v 0.004961 0.163543 0.412716 +v 0.009555 0.158765 0.413267 +v 0.006064 0.141676 0.421720 +v 0.005696 0.153253 0.418045 +v -0.001838 0.153436 0.418964 +v -0.001838 0.165564 0.412348 +v -0.008820 0.163359 0.412532 +v -0.009004 0.168872 0.406836 +v -0.015252 0.161522 0.407938 +v -0.016722 0.162624 0.403344 +v -0.021683 0.151415 0.406284 +v -0.019846 0.149210 0.399853 +v -0.021499 0.136347 0.403528 +v -0.023704 0.138001 0.409408 +v -0.021499 0.122565 0.404447 +v -0.013230 0.158581 0.412716 +v -0.015435 0.152150 0.414370 +v -0.019294 0.151782 0.410327 +v -0.021132 0.139103 0.413451 +v -0.021132 0.125873 0.415289 +v -0.023704 0.124035 0.410695 +v -0.021316 0.111724 0.411430 +v -0.019111 0.109519 0.405182 +v -0.014884 0.098677 0.404998 +v -0.009372 0.153069 0.417861 +v -0.016722 0.140390 0.417310 +v -0.009555 0.141492 0.421352 +v -0.001838 0.141860 0.422639 +v -0.001654 0.129732 0.424660 +v -0.009555 0.129364 0.423374 +v -0.001470 0.116134 0.424109 +v 0.006064 0.129548 0.423558 +v -0.016722 0.127710 0.419331 +v -0.015068 0.114847 0.419882 +v -0.008820 0.115766 0.423190 +v -0.007718 0.103638 0.420434 +v 0.013414 0.128078 0.419882 +v 0.012128 0.115215 0.420434 +v -0.018927 0.113377 0.416024 +v -0.014517 0.102352 0.416024 +v -0.012128 0.107130 0.419515 +v 0.005696 0.115950 0.423374 +v 0.005145 0.103822 0.420801 +v 0.016354 0.113745 0.416575 +v 0.012128 0.102720 0.416575 +v 0.009555 0.107314 0.419882 +v -0.016171 0.098493 0.411062 +v -0.008820 0.089489 0.410695 +v -0.008085 0.090959 0.403896 +v -0.000919 0.088019 0.403712 +v -0.001286 0.102536 0.421169 +v -0.001103 0.093348 0.417126 +v -0.008453 0.095921 0.416759 +v -0.000919 0.086365 0.410695 +v 0.006983 0.089673 0.411062 +v 0.006248 0.091327 0.404263 +v 0.006248 0.096104 0.417126 +v 0.013965 0.098861 0.411613 +v 0.065417 0.420434 0.480338 +v 0.120911 0.418964 0.458104 +v 0.067990 0.385153 0.488791 +v -0.057516 0.423741 0.479603 +v -0.062293 0.388644 0.487872 +v -0.112826 0.425395 0.456817 +v 0.057883 0.168688 0.343624 +v 0.062477 0.162073 0.343440 +v 0.056046 0.168321 0.359794 +v 0.055494 0.172179 0.352076 +v 0.051084 0.178427 0.357222 +v 0.048512 0.178978 0.363469 +v 0.042815 0.190371 0.357405 +v 0.039140 0.191474 0.361999 +v 0.035465 0.200110 0.352628 +v 0.031606 0.201764 0.353914 +v 0.031422 0.204153 0.344359 +v 0.029952 0.204153 0.344359 +v 0.035832 0.200662 0.336090 +v 0.031422 0.204153 0.344359 +v 0.029952 0.204153 0.344359 +v 0.031974 0.202499 0.334803 +v 0.042448 0.192209 0.331312 +v 0.040243 0.193495 0.327637 +v 0.051635 0.179530 0.330577 +v 0.049798 0.180265 0.325064 +v 0.055862 0.172731 0.335355 +v 0.057148 0.169239 0.328188 +v 0.057883 0.168688 0.343624 +v 0.062477 0.162073 0.343440 +v 0.031974 0.202499 0.334803 +v 0.029952 0.204153 0.344359 +v 0.028850 0.204520 0.344359 +v 0.026828 0.204888 0.332966 +v 0.037486 0.193863 0.323043 +v 0.040243 0.193495 0.327637 +v 0.026645 0.217935 0.333150 +v 0.028850 0.217567 0.344726 +v 0.047042 0.181183 0.319184 +v 0.049798 0.180265 0.325064 +v 0.056964 0.166115 0.319919 +v 0.057148 0.169239 0.328188 +v 0.062477 0.162073 0.343440 +v 0.067071 0.153253 0.343256 +v 0.026828 0.230981 0.333333 +v 0.029033 0.230798 0.344726 +v 0.065417 0.143146 0.315693 +v 0.070379 0.142227 0.342889 +v 0.074237 0.119441 0.341970 +v 0.027196 0.244028 0.333333 +v 0.029401 0.243844 0.344726 +v 0.027747 0.257075 0.332966 +v 0.029952 0.256891 0.344542 +v 0.021499 0.257258 0.323227 +v 0.020764 0.244212 0.323411 +v 0.011760 0.257626 0.316795 +v 0.020581 0.231165 0.323594 +v 0.011209 0.244579 0.316979 +v 0.000368 0.257810 0.314406 +v -0.000184 0.244763 0.314590 +v 0.010842 0.231349 0.316979 +v -0.000551 0.231533 0.314590 +v 0.020397 0.218118 0.323411 +v 0.010842 0.218302 0.316795 +v -0.000735 0.218486 0.314406 +v 0.020397 0.205072 0.323043 +v 0.010842 0.205255 0.316428 +v -0.000551 0.205255 0.314039 +v 0.029768 0.195149 0.314223 +v 0.015803 0.195884 0.305954 +v -0.000184 0.196068 0.302830 +v 0.037670 0.182653 0.306689 +v 0.020764 0.183940 0.296950 +v 0.000184 0.184124 0.292723 +v 0.044101 0.166850 0.299890 +v 0.024440 0.167218 0.287946 +v 0.000551 0.167218 0.282800 +v 0.050533 0.143881 0.293275 +v 0.027931 0.144065 0.280228 +v 0.000919 0.143881 0.274348 +v 0.069092 0.120360 0.313488 +v 0.072584 0.096472 0.341235 +v 0.053473 0.120911 0.289599 +v 0.029585 0.121095 0.276553 +v 0.001286 0.120911 0.270489 +v 0.067622 0.097391 0.313304 +v 0.066152 0.076259 0.340316 +v 0.052370 0.097758 0.290151 +v 0.029033 0.097758 0.278023 +v 0.001470 0.097391 0.271959 +v 0.061558 0.077178 0.315325 +v 0.053657 0.057148 0.339581 +v 0.026828 0.077361 0.283719 +v 0.001838 0.076810 0.278207 +v 0.047777 0.077545 0.294193 +v 0.050165 0.057699 0.319368 +v 0.039324 0.042264 0.338846 +v 0.022234 0.057699 0.294745 +v 0.002389 0.057148 0.290334 +v 0.039140 0.058067 0.303381 +v 0.036751 0.042631 0.324513 +v 0.030136 0.034546 0.338111 +v 0.016906 0.042631 0.305954 +v 0.002756 0.042080 0.302830 +v 0.029033 0.042815 0.312569 +v 0.028115 0.035097 0.327637 +v 0.024440 0.024256 0.336825 +v 0.013598 0.035281 0.312569 +v 0.003124 0.034914 0.310180 +v 0.022418 0.035465 0.318633 +v 0.022970 0.024991 0.328739 +v 0.018008 0.013965 0.335171 +v 0.011760 0.025358 0.317163 +v 0.003675 0.024991 0.315325 +v 0.018559 0.025358 0.321757 +v 0.017089 0.014517 0.329842 +v 0.010842 0.003491 0.333517 +v 0.009739 0.014701 0.322308 +v 0.004410 0.014517 0.321205 +v 0.014149 0.014701 0.325432 +v 0.010474 0.003675 0.331312 +v 0.007350 0.003859 0.328188 +v 0.005145 0.003675 0.327637 +v 0.005329 0.001286 0.329842 +v 0.009188 0.003859 0.329474 +v 0.008269 0.001286 0.331863 +v 0.008453 0.001103 0.333150 +v 0.006983 0.000368 0.332782 +v 0.006431 0.001286 0.330026 +v 0.007534 0.001286 0.330761 +v 0.006983 0.000735 0.331312 +v 0.005329 0.000000 0.332782 +v 0.005329 0.000368 0.331128 +v 0.005329 0.000000 0.334436 +v 0.006983 0.000368 0.332782 +v 0.005329 0.000000 0.332782 +v 0.006983 0.000368 0.334436 +v 0.008453 0.001103 0.333150 +v 0.006431 0.000551 0.336090 +v 0.005145 0.000368 0.336273 +v 0.004961 0.002205 0.339397 +v 0.008269 0.000919 0.334252 +v 0.010474 0.003124 0.335722 +v 0.010842 0.003491 0.333517 +v 0.018008 0.013965 0.335171 +v 0.007534 0.000735 0.335355 +v 0.007167 0.002573 0.338846 +v 0.016906 0.013230 0.340500 +v 0.024440 0.024256 0.336825 +v 0.009004 0.002756 0.337743 +v 0.009372 0.011760 0.347850 +v 0.004043 0.011393 0.348769 +v 0.013965 0.012495 0.344910 +v 0.022786 0.023337 0.344726 +v 0.030136 0.034546 0.338111 +v 0.011209 0.021499 0.355935 +v 0.003124 0.020764 0.357222 +v 0.018008 0.022418 0.351525 +v 0.027747 0.033811 0.348585 +v 0.039324 0.042264 0.338846 +v 0.012679 0.032157 0.363102 +v 0.002205 0.031422 0.364939 +v 0.021683 0.032892 0.357405 +v 0.036384 0.041529 0.352995 +v 0.053657 0.057148 0.339581 +v 0.015803 0.039691 0.372841 +v 0.001470 0.038956 0.375230 +v 0.028115 0.040610 0.364939 +v 0.049430 0.056413 0.359427 +v 0.066152 0.076259 0.340316 +v 0.020397 0.054576 0.387358 +v 0.000368 0.053841 0.390849 +v 0.037670 0.055494 0.376332 +v 0.060639 0.075340 0.365307 +v 0.072584 0.096472 0.341235 +v 0.024623 0.073135 0.400037 +v -0.000551 0.072400 0.404447 +v 0.046123 0.074237 0.386255 +v 0.066703 0.095369 0.368798 +v 0.074237 0.119441 0.341970 +v 0.026828 0.092981 0.407203 +v -0.001103 0.092429 0.412165 +v 0.050533 0.094083 0.391952 +v 0.067990 0.118339 0.370452 +v 0.070379 0.142227 0.342889 +v 0.027196 0.116134 0.409776 +v -0.001470 0.115583 0.414921 +v 0.051452 0.117236 0.394157 +v 0.064498 0.141125 0.369717 +v 0.067071 0.153253 0.343256 +v 0.056046 0.164278 0.366961 +v 0.062477 0.162073 0.343440 +v 0.056046 0.168321 0.359794 +v 0.048512 0.178978 0.363469 +v 0.048695 0.140206 0.392319 +v 0.025542 0.139287 0.407203 +v -0.001654 0.138736 0.412165 +v 0.045204 0.179346 0.369166 +v 0.039140 0.191474 0.361999 +v 0.042264 0.163910 0.386806 +v 0.022051 0.163175 0.400404 +v -0.001654 0.162624 0.404263 +v 0.035649 0.192576 0.366226 +v 0.031606 0.201764 0.353914 +v 0.026277 0.204153 0.355752 +v 0.029952 0.204153 0.344359 +v 0.028850 0.204520 0.344359 +v 0.028850 0.217567 0.344726 +v 0.035832 0.179346 0.380926 +v 0.018743 0.179530 0.392870 +v -0.001654 0.179897 0.394157 +v 0.027931 0.193311 0.373392 +v 0.013965 0.194598 0.381294 +v -0.001654 0.194598 0.382764 +v 0.026277 0.217383 0.356119 +v 0.029033 0.230798 0.344726 +v 0.019662 0.203785 0.365307 +v 0.009923 0.203418 0.371738 +v -0.001654 0.203234 0.373760 +v 0.026645 0.230614 0.356119 +v 0.029401 0.243844 0.344726 +v 0.019662 0.217016 0.365674 +v 0.009923 0.216832 0.371922 +v -0.001470 0.216832 0.374127 +v 0.027012 0.243660 0.356119 +v 0.029952 0.256891 0.344542 +v 0.027747 0.256891 0.355935 +v 0.020029 0.230430 0.365858 +v 0.010290 0.230246 0.372106 +v -0.001286 0.230430 0.374311 +v 0.020397 0.243660 0.365858 +v 0.021132 0.257075 0.365491 +v 0.010658 0.243844 0.372106 +v -0.000735 0.244028 0.374311 +v 0.011393 0.257258 0.371922 +v 0.000000 0.257442 0.374127 +v 0.035097 0.595002 0.247887 +v 0.029401 0.586733 0.236126 +v 0.028850 0.587100 0.247887 +v 0.035832 0.595186 0.236310 +v 0.030320 0.587284 0.212054 +v 0.029952 0.602536 0.247703 +v 0.037854 0.595186 0.212054 +v 0.032341 0.586549 0.185410 +v 0.030504 0.602903 0.236126 +v 0.039875 0.595002 0.185593 +v 0.034730 0.585998 0.157663 +v 0.020581 0.605108 0.242007 +v 0.014700 0.606946 0.247703 +v 0.014700 0.606946 0.244212 +v 0.022970 0.605108 0.235943 +v 0.014884 0.607130 0.235759 +v 0.031239 0.602352 0.211871 +v 0.024072 0.605843 0.211687 +v 0.015252 0.607865 0.211503 +v 0.042631 0.594634 0.157846 +v 0.038405 0.585263 0.125138 +v 0.033076 0.602536 0.185410 +v 0.025358 0.606211 0.185042 +v 0.015619 0.608416 0.184675 +v 0.046307 0.594267 0.125505 +v 0.042631 0.584344 0.090775 +v 0.035649 0.602720 0.157479 +v 0.027196 0.606395 0.157111 +v 0.016538 0.608783 0.156560 +v 0.050900 0.593716 0.091327 +v 0.047593 0.583241 0.056046 +v 0.039324 0.602536 0.124954 +v 0.029952 0.606578 0.124403 +v 0.018376 0.608967 0.123852 +v 0.056229 0.593164 0.056597 +v 0.053289 0.582323 0.022051 +v 0.043734 0.602536 0.090592 +v 0.033444 0.606578 0.089857 +v 0.020581 0.609151 0.088938 +v 0.062110 0.592613 0.022786 +v 0.059904 0.581036 -0.014517 +v 0.048695 0.602352 0.055862 +v 0.037486 0.606578 0.054759 +v 0.023521 0.609151 0.053657 +v 0.068909 0.591878 -0.013598 +v 0.066520 0.580118 -0.049247 +v 0.054392 0.602168 0.021683 +v 0.042264 0.606578 0.020397 +v 0.027196 0.609335 0.018927 +v 0.075708 0.591143 -0.048512 +v 0.073135 0.579015 -0.082690 +v 0.061191 0.601801 -0.014701 +v 0.047960 0.606578 -0.016354 +v 0.031606 0.609335 -0.018192 +v 0.082690 0.590408 -0.081955 +v 0.080485 0.577913 -0.118890 +v 0.067806 0.601617 -0.049798 +v 0.053657 0.606578 -0.051452 +v 0.036200 0.609519 -0.053657 +v 0.090408 0.589673 -0.118523 +v 0.088203 0.573686 -0.157479 +v 0.074421 0.601249 -0.083425 +v 0.059353 0.606395 -0.085447 +v 0.040794 0.609335 -0.088019 +v 0.098309 0.585998 -0.157479 +v 0.094634 0.568173 -0.191290 +v 0.081955 0.600882 -0.120360 +v 0.065968 0.606211 -0.122933 +v 0.045939 0.609335 -0.125689 +v 0.105292 0.580669 -0.192025 +v 0.101985 0.560456 -0.228592 +v 0.089857 0.597574 -0.159868 +v 0.072584 0.603087 -0.162624 +v 0.051084 0.606395 -0.165748 +v 0.113010 0.573319 -0.230246 +v 0.108967 0.549063 -0.266630 +v 0.096656 0.592613 -0.194965 +v 0.078280 0.598310 -0.198273 +v 0.055678 0.601801 -0.201580 +v 0.120544 0.561926 -0.269386 +v 0.115399 0.534178 -0.302279 +v 0.104006 0.585446 -0.234105 +v 0.084712 0.591327 -0.237780 +v 0.060639 0.594818 -0.241272 +v 0.127343 0.547225 -0.306138 +v 0.121279 0.517457 -0.336273 +v 0.111540 0.574421 -0.274164 +v 0.090959 0.580301 -0.278207 +v 0.065233 0.584160 -0.282065 +v 0.133591 0.530504 -0.341051 +v 0.127343 0.498346 -0.371555 +v 0.118155 0.559721 -0.311834 +v 0.096472 0.565785 -0.316428 +v 0.069276 0.569643 -0.320838 +v 0.140206 0.511577 -0.377067 +v 0.132672 0.478684 -0.402977 +v 0.124403 0.543366 -0.347850 +v 0.101433 0.549614 -0.352811 +v 0.072767 0.553105 -0.357222 +v 0.145902 0.491731 -0.409408 +v 0.136714 0.462698 -0.426865 +v 0.130651 0.524623 -0.384601 +v 0.106578 0.531055 -0.390114 +v 0.076259 0.534179 -0.394524 +v 0.150129 0.475928 -0.434032 +v 0.138736 0.453326 -0.440279 +v 0.136163 0.504961 -0.417861 +v 0.110805 0.511393 -0.423741 +v 0.079015 0.513230 -0.427600 +v 0.140390 0.488975 -0.442852 +v 0.114112 0.495406 -0.449100 +v 0.152150 0.466189 -0.447813 +v 0.142411 0.479052 -0.457001 +v 0.143330 0.467291 -0.471334 +v 0.115766 0.485667 -0.463433 +v 0.117236 0.473723 -0.478133 +v 0.117971 0.463065 -0.487321 +v 0.152518 0.455531 -0.462330 +v 0.139838 0.443587 -0.454980 +v 0.137266 0.437339 -0.466740 +v 0.148475 0.447997 -0.472988 +v 0.129548 0.434215 -0.475744 +v 0.137082 0.442301 -0.482359 +v 0.116134 0.431275 -0.478133 +v 0.117788 0.440830 -0.488423 +v 0.152518 0.455531 -0.462330 +v 0.148475 0.447997 -0.472988 +v 0.140206 0.457920 -0.481257 +v 0.118706 0.451672 -0.492834 +v 0.137082 0.442301 -0.482359 +v 0.131937 0.451121 -0.487872 +v 0.117788 0.440830 -0.488423 +v 0.098677 0.443771 -0.486402 +v 0.116134 0.431275 -0.478133 +v 0.117788 0.440830 -0.488423 +v 0.101433 0.436237 -0.478868 +v 0.094267 0.439728 -0.471334 +v 0.087652 0.450386 -0.479236 +v 0.089489 0.445424 -0.459941 +v 0.083058 0.458104 -0.469313 +v 0.114847 0.433480 -0.466189 +v 0.113194 0.438809 -0.454061 +v 0.111356 0.448916 -0.439361 +v 0.079015 0.457552 -0.450386 +v 0.080301 0.465086 -0.459941 +v 0.078648 0.456450 -0.446894 +v 0.082874 0.454245 -0.445057 +v 0.077178 0.461779 -0.430724 +v 0.109702 0.458287 -0.426130 +v 0.106578 0.473907 -0.402426 +v 0.075156 0.474458 -0.405366 +v 0.072400 0.492650 -0.373576 +v 0.106578 0.473907 -0.402426 +v 0.075156 0.474458 -0.405366 +v 0.102720 0.493752 -0.371371 +v 0.097942 0.512679 -0.336457 +v 0.069276 0.510841 -0.338479 +v 0.093164 0.529401 -0.302830 +v 0.065968 0.527196 -0.304851 +v 0.088019 0.544285 -0.267549 +v 0.062477 0.542080 -0.269754 +v 0.082323 0.555862 -0.229879 +v 0.058251 0.553473 -0.232084 +v 0.076259 0.563580 -0.192760 +v 0.053657 0.561191 -0.195149 +v 0.070746 0.569276 -0.159133 +v 0.049430 0.566887 -0.161521 +v 0.064315 0.573502 -0.120728 +v 0.044469 0.571297 -0.123116 +v 0.058067 0.574789 -0.084344 +v 0.039324 0.572584 -0.086549 +v 0.052370 0.576075 -0.050900 +v 0.034914 0.573870 -0.052922 +v 0.046674 0.577178 -0.015803 +v 0.030504 0.575156 -0.017641 +v 0.041161 0.578464 0.020948 +v 0.026277 0.576442 0.019478 +v 0.036384 0.579566 0.055127 +v 0.022602 0.577729 0.054024 +v 0.032341 0.580853 0.090224 +v 0.019662 0.579015 0.089305 +v 0.028850 0.581955 0.124770 +v 0.017273 0.580118 0.124035 +v 0.026093 0.582874 0.157295 +v 0.015619 0.581220 0.156928 +v 0.024440 0.583609 0.185226 +v 0.014701 0.581955 0.184858 +v 0.023153 0.584344 0.211871 +v 0.014149 0.582690 0.211687 +v 0.021867 0.585263 0.235943 +v 0.013782 0.583793 0.235759 +v 0.013782 0.583793 0.242007 +v 0.042631 0.584344 0.090775 +v 0.047593 0.583241 0.056046 +v 0.038405 0.585263 0.125138 +v 0.034730 0.585998 0.157663 +v 0.053289 0.582323 0.022051 +v 0.032341 0.586549 0.185410 +v 0.059904 0.581036 -0.014517 +v 0.030320 0.587284 0.212054 +v 0.066520 0.580118 -0.049247 +v 0.029401 0.586733 0.236126 +v 0.073135 0.579015 -0.082690 +v 0.080485 0.577913 -0.118890 +v 0.088203 0.573686 -0.157479 +v 0.094634 0.568173 -0.191290 +v 0.101985 0.560456 -0.228592 +v 0.108967 0.549063 -0.266630 +v 0.115399 0.534178 -0.302279 +v 0.121279 0.517457 -0.336273 +v 0.127343 0.498346 -0.371555 +v 0.132672 0.478684 -0.402977 +v 0.109702 0.458287 -0.426130 +v 0.136714 0.462698 -0.426865 +v 0.138736 0.453326 -0.440279 +v 0.111356 0.448916 -0.439361 +v 0.139838 0.443587 -0.454980 +v 0.113194 0.438809 -0.454061 +v 0.137266 0.437339 -0.466740 +v 0.114847 0.433480 -0.466189 +v 0.129548 0.434215 -0.475744 +v 0.116134 0.431275 -0.478133 +v 0.028850 0.587100 0.247887 +v 0.013414 0.583976 0.248071 +v 0.019478 0.585446 0.240537 +v 0.081036 0.476112 -0.463249 +v 0.080301 0.465086 -0.459941 +v 0.083058 0.458104 -0.469313 +v 0.093348 0.471334 -0.477949 +v 0.086917 0.483462 -0.464535 +v 0.081220 0.480154 -0.462697 +v 0.080669 0.494304 -0.451488 +v 0.114112 0.495406 -0.449100 +v 0.079015 0.513230 -0.427600 +v 0.110805 0.511393 -0.423741 +v 0.115766 0.485667 -0.463433 +v 0.117236 0.473723 -0.478133 +v 0.097207 0.460676 -0.486402 +v 0.087652 0.450386 -0.479236 +v 0.117971 0.463065 -0.487321 +v 0.103271 0.451856 -0.490077 +v 0.098677 0.443771 -0.486402 +v 0.118706 0.451672 -0.492834 +v 0.117788 0.440830 -0.488423 +v 0.023153 0.546306 0.305586 +v 0.009004 0.548695 0.295847 +v 0.008637 0.546858 0.302830 +v 0.025910 0.548144 0.298971 +v 0.035649 0.545939 0.313120 +v 0.009372 0.550717 0.288681 +v 0.040610 0.547593 0.307791 +v 0.045020 0.545571 0.324513 +v 0.028666 0.549981 0.292539 +v 0.010658 0.557699 0.273245 +v 0.051268 0.547042 0.321205 +v 0.049982 0.545204 0.338295 +v 0.035097 0.556413 0.278207 +v 0.012128 0.568725 0.258177 +v 0.045388 0.549063 0.302646 +v 0.056964 0.546674 0.337376 +v 0.049798 0.545204 0.352995 +v 0.041713 0.567255 0.264425 +v 0.013230 0.581771 0.249173 +v 0.045939 0.581036 0.256156 +v 0.056413 0.555310 0.291253 +v 0.057699 0.548512 0.317898 +v 0.056597 0.546674 0.354649 +v 0.044469 0.545388 0.366777 +v 0.067255 0.565784 0.280412 +v 0.072032 0.554392 0.310915 +v 0.063947 0.548144 0.336457 +v 0.050533 0.546858 0.370636 +v 0.034914 0.545571 0.377986 +v 0.074237 0.579199 0.273796 +v 0.047960 0.596288 0.253216 +v 0.063580 0.548144 0.356119 +v 0.079934 0.553840 0.334620 +v 0.085998 0.564682 0.304116 +v 0.039324 0.547225 0.383499 +v 0.022051 0.545939 0.385153 +v 0.077361 0.594450 0.271408 +v 0.047409 0.610988 0.256891 +v 0.075707 0.609151 0.274348 +v 0.056597 0.548328 0.374495 +v 0.079199 0.553657 0.359794 +v 0.095369 0.563947 0.332966 +v 0.094818 0.577912 0.300073 +v 0.098677 0.592980 0.298603 +v 0.096104 0.607865 0.300625 +v 0.105292 0.577177 0.331863 +v 0.109335 0.592245 0.331496 +v 0.106395 0.607130 0.332231 +v 0.094451 0.563763 0.363102 +v 0.070011 0.554024 0.382947 +v 0.104190 0.576994 0.365307 +v 0.108232 0.592062 0.366042 +v 0.105292 0.606946 0.365307 +v 0.083241 0.564131 0.391216 +v 0.043550 0.548695 0.389195 +v 0.091694 0.577545 0.396362 +v 0.095369 0.592613 0.398199 +v 0.092981 0.607313 0.396178 +v 0.053289 0.554759 0.401691 +v 0.024256 0.547777 0.391951 +v 0.007534 0.546490 0.387541 +v 0.007350 0.548328 0.394524 +v 0.026461 0.549430 0.398567 +v 0.007167 0.550165 0.401691 +v 0.031422 0.555678 0.413635 +v 0.006615 0.556964 0.417310 +v 0.063028 0.565050 0.413818 +v 0.036384 0.566519 0.428151 +v 0.006431 0.568173 0.432378 +v 0.069460 0.578464 0.421169 +v 0.039875 0.580117 0.436972 +v 0.006983 0.581955 0.441749 +v 0.072216 0.593715 0.423925 +v 0.070746 0.608416 0.420985 +v 0.041713 0.595185 0.440096 +v 0.007718 0.597207 0.445057 +v 0.041529 0.610070 0.436604 +v 0.008637 0.611907 0.441382 +v 0.045939 0.581036 0.256156 +v 0.013414 0.582506 0.248806 +v 0.013230 0.581771 0.249173 +v 0.032709 0.585630 0.249724 +v 0.013414 0.583976 0.248071 +v 0.028850 0.587100 0.247887 +v 0.047409 0.610988 0.256891 +v 0.015436 0.627527 0.259463 +v 0.015068 0.612826 0.249908 +v 0.044836 0.625873 0.265527 +v 0.075707 0.609151 0.274348 +v 0.015803 0.641124 0.274715 +v 0.070195 0.624219 0.281514 +v 0.096104 0.607865 0.300625 +v 0.039875 0.639654 0.279860 +v 0.015619 0.651047 0.295112 +v 0.088754 0.623116 0.305035 +v 0.106395 0.607130 0.332231 +v 0.060823 0.638368 0.292907 +v 0.032892 0.649945 0.298787 +v 0.015252 0.656560 0.315325 +v 0.097942 0.622381 0.333517 +v 0.105292 0.606946 0.365307 +v 0.076075 0.637449 0.312385 +v 0.047960 0.649026 0.307975 +v 0.027196 0.655457 0.316979 +v 0.014884 0.657846 0.328188 +v 0.097023 0.622198 0.363469 +v 0.092981 0.607313 0.396178 +v 0.083793 0.636714 0.335722 +v 0.058802 0.648291 0.321940 +v 0.027931 0.656560 0.328923 +v 0.014517 0.658398 0.340132 +v 0.036751 0.654171 0.320470 +v 0.085814 0.622565 0.391400 +v 0.070746 0.608416 0.420985 +v 0.082874 0.636714 0.360529 +v 0.064315 0.647923 0.338662 +v 0.040794 0.654171 0.329658 +v 0.065785 0.623668 0.413635 +v 0.041529 0.610070 0.436604 +v 0.073686 0.637082 0.383499 +v 0.043550 0.653987 0.340867 +v 0.063580 0.647923 0.356487 +v 0.039324 0.624954 0.427784 +v 0.008637 0.611907 0.441382 +v 0.009739 0.626608 0.432010 +v 0.057148 0.637817 0.401874 +v 0.035465 0.638919 0.413451 +v 0.011025 0.640389 0.416942 +v 0.057148 0.648107 0.372841 +v 0.029768 0.649577 0.394340 +v 0.012312 0.650496 0.396729 +v 0.045204 0.648658 0.385888 +v 0.042999 0.653987 0.352811 +v 0.025175 0.655274 0.375781 +v 0.013230 0.656192 0.376883 +v 0.034914 0.653804 0.372657 +v 0.039691 0.653987 0.363837 +v 0.026645 0.656376 0.363653 +v 0.013782 0.657663 0.363653 +v 0.014149 0.658214 0.351525 +v 0.028115 0.656744 0.352076 +v 0.028482 0.656744 0.340500 +v 0.047960 0.596288 0.253216 +v 0.032709 0.585630 0.249724 +v 0.045939 0.581036 0.256156 +v 0.039324 0.595369 0.249724 +v 0.028850 0.587100 0.247887 +v 0.047409 0.610988 0.256891 +v 0.035097 0.595002 0.247887 +v 0.034179 0.604557 0.249908 +v 0.015068 0.612826 0.249908 +v 0.029952 0.602536 0.247703 +v 0.014884 0.609886 0.248438 +v 0.014700 0.606946 0.247703 +v -0.000367 0.213524 0.269203 +v -0.001103 0.202499 0.317163 +v 0.065233 0.202499 0.318265 +v 0.062845 0.214443 0.270305 +v 0.000368 0.233921 0.234473 +v 0.124770 0.213892 0.318082 +v 0.062477 0.232819 0.239250 +v 0.001103 0.257442 0.211871 +v 0.120544 0.221793 0.276920 +v 0.165380 0.236861 0.316060 +v 0.065417 0.255788 0.220323 +v 0.002021 0.285189 0.195516 +v 0.150864 0.242742 0.276185 +v 0.192209 0.267916 0.314223 +v 0.110437 0.236861 0.247152 +v 0.068909 0.283168 0.204520 +v 0.002940 0.317714 0.188901 +v 0.119625 0.252113 0.238883 +v 0.169239 0.272510 0.267732 +v 0.070930 0.315877 0.198273 +v 0.003859 0.353547 0.188350 +v 0.127527 0.279309 0.228409 +v 0.178978 0.309996 0.261485 +v 0.205807 0.307975 0.312753 +v 0.131202 0.313304 0.221977 +v 0.071114 0.351709 0.197721 +v 0.004778 0.388093 0.191474 +v 0.180081 0.348401 0.260198 +v 0.207461 0.347483 0.312018 +v 0.131202 0.350055 0.221058 +v 0.069827 0.386255 0.200662 +v 0.005696 0.423190 0.200478 +v 0.174568 0.383131 0.260933 +v 0.201948 0.382212 0.312569 +v 0.188534 0.417677 0.315325 +v 0.128629 0.384601 0.223999 +v 0.067071 0.421536 0.209298 +v 0.006615 0.459757 0.217016 +v 0.163910 0.418596 0.266630 +v 0.122381 0.419882 0.232084 +v 0.062477 0.458287 0.225285 +v 0.007350 0.491547 0.238331 +v 0.147740 0.455715 0.277472 +v 0.166115 0.454980 0.318633 +v 0.112275 0.456817 0.246417 +v 0.054759 0.490077 0.245865 +v 0.007718 0.515987 0.259096 +v 0.095553 0.488975 0.263690 +v 0.124954 0.488056 0.289783 +v 0.137817 0.487505 0.324881 +v 0.046674 0.514884 0.265711 +v 0.008269 0.536567 0.284270 +v 0.033627 0.535832 0.288864 +v 0.079934 0.513965 0.280779 +v 0.056413 0.535097 0.300257 +v 0.070195 0.534546 0.315693 +v 0.102352 0.513230 0.302095 +v 0.108784 0.513047 0.330026 +v 0.077545 0.534362 0.334252 +v 0.045020 0.545571 0.324513 +v 0.059169 0.541529 0.337008 +v 0.049982 0.545204 0.338295 +v 0.053473 0.541896 0.321573 +v 0.035649 0.545939 0.313120 +v 0.068357 0.538037 0.335722 +v 0.042631 0.542264 0.308710 +v 0.023153 0.546306 0.305586 +v 0.061742 0.538221 0.318633 +v 0.077545 0.534362 0.334252 +v 0.070195 0.534546 0.315693 +v 0.049430 0.538589 0.304484 +v 0.056413 0.535097 0.300257 +v 0.026645 0.542815 0.300073 +v 0.008637 0.546858 0.302830 +v 0.008453 0.543366 0.296582 +v 0.030136 0.539324 0.294561 +v 0.033627 0.535832 0.288864 +v 0.008453 0.539875 0.290518 +v 0.008269 0.536567 0.284270 +v 0.000000 0.283903 0.490445 +v 0.067071 0.282065 0.482176 +v -0.000551 0.256340 0.475560 +v 0.063763 0.255053 0.467843 +v -0.001103 0.232084 0.451121 +v 0.060823 0.233554 0.447630 +v -0.001470 0.212973 0.418045 +v 0.061926 0.213892 0.417677 +v 0.065233 0.202499 0.318265 +v -0.001103 0.202499 0.317163 +v -0.001470 0.202499 0.371003 +v 0.065050 0.202132 0.370452 +v -0.001470 0.212973 0.418045 +v 0.124770 0.213892 0.318082 +v 0.061926 0.213892 0.417677 +v 0.124403 0.214259 0.371003 +v 0.165380 0.236861 0.316060 +v 0.164278 0.236494 0.373025 +v 0.192209 0.267916 0.314223 +v 0.115031 0.224182 0.417310 +v 0.192576 0.267916 0.376332 +v 0.205807 0.307975 0.312753 +v 0.108416 0.238515 0.439361 +v 0.060823 0.233554 0.447630 +v 0.063763 0.255053 0.467843 +v 0.117788 0.252113 0.448732 +v 0.146270 0.244212 0.422455 +v 0.165564 0.272510 0.428335 +v 0.176222 0.309445 0.432194 +v 0.206174 0.307791 0.379089 +v 0.207461 0.347483 0.312018 +v 0.177508 0.347850 0.432929 +v 0.207093 0.347299 0.380007 +v 0.201948 0.382212 0.312569 +v 0.172914 0.382396 0.430724 +v 0.201397 0.381845 0.379089 +v 0.188534 0.417677 0.315325 +v 0.187982 0.417494 0.376148 +v 0.166115 0.454980 0.318633 +v 0.162808 0.418045 0.424293 +v 0.165748 0.454796 0.372657 +v 0.137817 0.487505 0.324881 +v 0.146821 0.455164 0.413451 +v 0.137633 0.487505 0.366042 +v 0.108784 0.513047 0.330026 +v 0.111172 0.456082 0.444138 +v 0.120911 0.418964 0.458104 +v 0.065417 0.420434 0.480338 +v 0.124219 0.487688 0.401139 +v 0.108784 0.512863 0.361080 +v 0.077545 0.534362 0.334252 +v 0.077361 0.534179 0.356670 +v 0.101801 0.513047 0.388644 +v 0.069644 0.534362 0.375046 +v 0.079199 0.513414 0.409776 +v 0.055678 0.534730 0.390298 +v 0.032892 0.535281 0.401323 +v 0.094634 0.488423 0.426865 +v 0.045572 0.514333 0.424476 +v 0.007350 0.536016 0.405549 +v 0.006799 0.515435 0.430540 +v 0.053473 0.489342 0.444322 +v 0.005880 0.490628 0.450937 +v 0.060823 0.457369 0.464719 +v 0.004961 0.458839 0.472069 +v 0.003859 0.422087 0.488423 +v 0.002756 0.386806 0.497244 +v 0.067990 0.385153 0.488791 +v 0.069092 0.350606 0.491363 +v 0.001838 0.352260 0.500000 +v 0.069092 0.314774 0.490628 +v 0.000919 0.316612 0.499081 +v 0.067071 0.282065 0.482176 +v 0.000000 0.283903 0.490445 +v 0.022051 0.545939 0.385153 +v 0.007534 0.542999 0.393605 +v 0.007534 0.546490 0.387541 +v 0.025726 0.542448 0.390481 +v 0.034914 0.545571 0.377986 +v 0.007350 0.539508 0.399485 +v 0.041896 0.541896 0.382212 +v 0.044469 0.545388 0.366777 +v 0.029217 0.538956 0.395994 +v 0.007350 0.536016 0.405549 +v 0.032892 0.535281 0.401323 +v 0.048695 0.538405 0.386255 +v 0.055678 0.534730 0.390298 +v 0.052922 0.541713 0.369533 +v 0.049798 0.545204 0.352995 +v 0.061191 0.538037 0.372290 +v 0.069644 0.534362 0.375046 +v 0.058986 0.541529 0.354282 +v 0.049982 0.545204 0.338295 +v 0.059169 0.541529 0.337008 +v 0.068173 0.537854 0.355568 +v 0.077361 0.534179 0.356670 +v 0.068357 0.538037 0.335722 +v 0.077545 0.534362 0.334252 +v -0.059169 0.167402 0.341419 +v -0.057883 0.166850 0.357773 +v -0.063396 0.160603 0.341051 +v -0.057148 0.170709 0.349871 +v -0.052922 0.177325 0.355200 +v -0.050717 0.177876 0.361632 +v -0.045204 0.189269 0.355752 +v -0.041713 0.190555 0.360529 +v -0.037854 0.199191 0.351341 +v -0.033995 0.201029 0.352628 +v -0.033444 0.203418 0.343256 +v -0.031974 0.203418 0.343256 +v -0.037486 0.199927 0.334620 +v -0.031974 0.203418 0.343256 +v -0.033444 0.203418 0.343256 +v -0.033627 0.201764 0.333517 +v -0.043918 0.191290 0.329658 +v -0.041345 0.192576 0.325983 +v -0.052554 0.178243 0.328556 +v -0.050533 0.178978 0.323227 +v -0.056781 0.171444 0.333333 +v -0.057699 0.167769 0.325983 +v -0.059169 0.167402 0.341419 +v -0.063396 0.160603 0.341051 +v -0.033627 0.201764 0.333517 +v -0.031055 0.203969 0.343256 +v -0.031974 0.203418 0.343256 +v -0.028482 0.204520 0.331863 +v -0.038589 0.192944 0.321573 +v -0.041345 0.192576 0.325983 +v -0.047593 0.179897 0.317347 +v -0.050533 0.178978 0.323227 +v -0.057332 0.164829 0.317714 +v -0.057699 0.167769 0.325983 +v -0.063396 0.160603 0.341051 +v -0.067806 0.151782 0.340684 +v -0.030503 0.194598 0.312936 +v -0.028482 0.217935 0.332415 +v -0.031055 0.217567 0.343807 +v -0.030871 0.231165 0.343991 +v -0.028298 0.231349 0.332598 +v -0.030320 0.244763 0.344175 +v -0.021867 0.218118 0.322859 +v -0.027931 0.244947 0.332782 +v -0.029768 0.258361 0.343991 +v -0.027380 0.258361 0.332598 +v -0.021867 0.231533 0.323043 +v -0.021499 0.244947 0.323043 +v -0.020764 0.258361 0.323043 +v -0.021867 0.204888 0.322308 +v -0.012128 0.218302 0.316428 +v -0.011944 0.231533 0.316612 +v -0.011577 0.244947 0.316612 +v -0.011025 0.258177 0.316612 +v -0.000184 0.244763 0.314590 +v 0.000368 0.257810 0.314406 +v -0.000551 0.231533 0.314590 +v -0.000735 0.218486 0.314406 +v -0.012128 0.205072 0.316060 +v -0.000551 0.205255 0.314039 +v -0.000184 0.196068 0.302830 +v -0.016354 0.195700 0.305402 +v -0.020581 0.183388 0.296215 +v 0.000184 0.184124 0.292723 +v -0.037670 0.181735 0.305402 +v -0.023521 0.166667 0.287027 +v 0.000551 0.167218 0.282800 +v -0.043734 0.165932 0.298052 +v -0.026093 0.143330 0.279125 +v 0.000919 0.143881 0.274348 +v -0.065050 0.141492 0.313304 +v -0.070930 0.140573 0.340132 +v -0.074421 0.117236 0.339214 +v -0.049247 0.142595 0.291437 +v -0.068173 0.118339 0.310915 +v -0.072216 0.093532 0.338479 +v -0.027380 0.120176 0.275450 +v 0.001286 0.120911 0.270489 +v -0.051635 0.119441 0.287762 +v -0.066152 0.094634 0.310731 +v -0.064682 0.073135 0.337743 +v -0.026277 0.096656 0.276920 +v 0.001470 0.097391 0.271959 +v -0.049982 0.095737 0.288313 +v -0.059169 0.074054 0.312936 +v -0.050900 0.054024 0.337560 +v -0.023337 0.076075 0.282617 +v 0.001838 0.076810 0.278207 +v -0.044653 0.075156 0.292356 +v -0.046674 0.054759 0.317530 +v -0.035097 0.039140 0.337376 +v -0.017824 0.056413 0.293826 +v 0.002389 0.057148 0.290334 +v -0.034914 0.055678 0.301911 +v -0.031974 0.039875 0.323043 +v -0.024807 0.031790 0.337008 +v -0.011577 0.041529 0.305402 +v 0.002756 0.042080 0.302830 +v -0.023704 0.040610 0.311466 +v -0.022602 0.032525 0.326534 +v -0.017641 0.021499 0.335906 +v -0.007534 0.034179 0.312018 +v 0.003124 0.034914 0.310180 +v -0.016538 0.033444 0.317898 +v -0.015987 0.022418 0.328004 +v -0.009739 0.011944 0.334620 +v -0.004410 0.024440 0.316795 +v 0.003675 0.024991 0.315325 +v -0.011209 0.023521 0.321205 +v -0.008637 0.012679 0.329474 +v -0.000735 0.002573 0.333333 +v -0.000919 0.013965 0.322124 +v 0.004410 0.014517 0.321205 +v -0.005513 0.013414 0.325064 +v -0.000367 0.002940 0.331128 +v 0.002940 0.003491 0.328004 +v 0.005145 0.003675 0.327637 +v 0.005329 0.001286 0.329842 +v 0.000919 0.003124 0.329291 +v 0.002205 0.000735 0.331863 +v 0.002021 0.000551 0.332966 +v 0.003675 0.000000 0.332782 +v 0.004043 0.001103 0.330026 +v 0.002940 0.000919 0.330577 +v 0.003859 0.000551 0.331312 +v 0.005329 0.000000 0.332782 +v 0.005329 0.000368 0.331128 +v 0.005329 0.000000 0.334436 +v 0.005329 0.000000 0.332782 +v 0.003675 0.000000 0.332782 +v 0.003675 0.000000 0.334252 +v 0.002021 0.000551 0.332966 +v 0.004043 0.000368 0.335906 +v 0.005145 0.000368 0.336273 +v 0.004961 0.002205 0.339397 +v 0.002205 0.000551 0.334068 +v -0.000367 0.002389 0.335538 +v -0.000735 0.002573 0.333333 +v -0.009739 0.011944 0.334620 +v 0.002940 0.000368 0.335355 +v 0.002756 0.002205 0.338846 +v -0.008637 0.011393 0.339949 +v -0.017641 0.021499 0.335906 +v 0.000919 0.002205 0.337560 +v -0.001286 0.011025 0.347666 +v 0.004043 0.011393 0.348769 +v -0.005696 0.011025 0.344542 +v -0.016171 0.020764 0.343991 +v -0.024807 0.031790 0.337008 +v -0.004961 0.020397 0.355568 +v 0.003124 0.020764 0.357222 +v -0.011760 0.020397 0.350790 +v -0.022969 0.031239 0.347483 +v -0.035097 0.039140 0.337376 +v -0.008269 0.031055 0.362734 +v 0.002205 0.031422 0.364939 +v -0.017089 0.030871 0.356487 +v -0.032525 0.038589 0.351525 +v -0.050900 0.054024 0.337560 +v -0.012863 0.038405 0.372106 +v 0.001470 0.038956 0.375230 +v -0.024807 0.038405 0.363837 +v -0.047409 0.053473 0.357589 +v -0.064682 0.073135 0.337743 +v -0.019662 0.053289 0.386439 +v 0.000368 0.053841 0.390849 +v -0.036384 0.053289 0.374862 +v -0.060272 0.072400 0.362918 +v -0.072216 0.093532 0.338479 +v -0.025542 0.072032 0.398934 +v -0.000551 0.072400 0.404447 +v -0.046490 0.072032 0.384418 +v -0.067071 0.092613 0.366226 +v -0.074421 0.117236 0.339214 +v -0.028666 0.092062 0.406101 +v -0.001103 0.092429 0.412165 +v -0.051819 0.092245 0.389930 +v -0.069276 0.116134 0.367696 +v -0.070930 0.140573 0.340132 +v -0.029768 0.115399 0.408673 +v -0.001470 0.115583 0.414921 +v -0.053657 0.115583 0.392135 +v -0.065968 0.139655 0.367328 +v -0.067806 0.151782 0.340684 +v -0.057883 0.162992 0.364939 +v -0.063396 0.160603 0.341051 +v -0.057883 0.166850 0.357773 +v -0.050717 0.177876 0.361632 +v -0.051084 0.138920 0.390481 +v -0.028482 0.138552 0.406101 +v -0.001654 0.138736 0.412165 +v -0.047593 0.178243 0.367328 +v -0.041713 0.190555 0.360529 +v -0.044836 0.162808 0.385153 +v -0.025358 0.162624 0.399485 +v -0.001654 0.162624 0.404263 +v -0.038221 0.191658 0.364756 +v -0.033995 0.201029 0.352628 +v -0.038772 0.178427 0.379456 +v -0.022234 0.178978 0.391952 +v -0.001654 0.179897 0.394157 +v -0.030871 0.192760 0.372473 +v -0.017273 0.194230 0.380742 +v -0.001654 0.194598 0.382764 +v -0.001654 0.203234 0.373760 +v -0.028850 0.203602 0.354833 +v -0.031974 0.203418 0.343256 +v -0.031055 0.203969 0.343256 +v -0.031055 0.217567 0.343807 +v -0.013047 0.203234 0.371187 +v -0.022602 0.203418 0.364572 +v -0.028850 0.217383 0.355200 +v -0.012863 0.216832 0.371738 +v -0.001470 0.216832 0.374127 +v -0.001286 0.230430 0.374311 +v -0.022602 0.217016 0.365123 +v -0.028666 0.230981 0.355568 +v -0.030871 0.231165 0.343991 +v -0.012679 0.230614 0.371922 +v -0.000735 0.244028 0.374311 +v -0.022234 0.230798 0.365307 +v -0.012128 0.244212 0.371922 +v 0.000000 0.257442 0.374127 +v -0.011577 0.257810 0.371738 +v -0.021867 0.244396 0.365307 +v -0.021132 0.257993 0.365307 +v -0.028298 0.244579 0.355568 +v -0.030320 0.244763 0.344175 +v -0.027563 0.258361 0.355568 +v -0.029768 0.258361 0.343991 +v -0.006799 0.597207 0.246600 +v -0.001470 0.588570 0.246968 +v -0.001470 0.588203 0.235391 +v -0.007166 0.597023 0.235208 +v -0.001470 0.588387 0.211136 +v -0.000919 0.604190 0.246784 +v -0.008269 0.597023 0.210952 +v -0.002389 0.588019 0.184307 +v -0.000919 0.604190 0.235391 +v -0.009372 0.596839 0.184123 +v -0.003124 0.587468 0.156009 +v 0.009004 0.605660 0.241823 +v 0.014700 0.606946 0.247703 +v 0.014700 0.606946 0.244212 +v 0.006615 0.605843 0.235575 +v 0.014884 0.607130 0.235759 +v -0.001103 0.603638 0.211136 +v 0.006248 0.606578 0.211319 +v 0.015252 0.607865 0.211503 +v -0.010290 0.596656 0.155641 +v -0.003308 0.586733 0.122933 +v -0.002205 0.604006 0.184307 +v 0.005880 0.606946 0.184491 +v 0.015619 0.608416 0.184675 +v -0.010658 0.596288 0.122381 +v -0.002940 0.585814 0.087652 +v -0.002940 0.604006 0.155825 +v 0.005880 0.607130 0.156193 +v 0.016538 0.608783 0.156560 +v -0.010658 0.595737 0.086917 +v -0.002205 0.584895 0.051819 +v -0.003124 0.604006 0.122749 +v 0.006615 0.607314 0.123116 +v 0.018376 0.608967 0.123852 +v -0.009923 0.595369 0.051084 +v -0.000551 0.583977 0.016538 +v -0.002756 0.604006 0.087468 +v 0.007718 0.607497 0.088019 +v 0.020581 0.609151 0.088938 +v -0.008636 0.594818 0.015619 +v 0.001654 0.582874 -0.020948 +v -0.001838 0.604006 0.051635 +v 0.009555 0.607497 0.052554 +v 0.023521 0.609151 0.053657 +v -0.006615 0.594267 -0.022051 +v 0.003859 0.581955 -0.056781 +v -0.000367 0.603822 0.016170 +v 0.012128 0.607497 0.017457 +v 0.027196 0.609335 0.018927 +v -0.004778 0.593716 -0.058067 +v 0.006248 0.581036 -0.091143 +v 0.001838 0.603638 -0.021316 +v 0.015252 0.607497 -0.020029 +v 0.031606 0.609335 -0.018192 +v -0.002573 0.593164 -0.092797 +v 0.009004 0.580118 -0.128262 +v 0.004043 0.603455 -0.057332 +v 0.018376 0.607497 -0.055678 +v 0.036200 0.609519 -0.053657 +v 0.000000 0.592429 -0.130283 +v 0.011393 0.576075 -0.167402 +v 0.006431 0.603271 -0.092062 +v 0.021867 0.607497 -0.090224 +v 0.040794 0.609335 -0.088019 +v 0.002205 0.588938 -0.169974 +v 0.013414 0.570746 -0.201580 +v 0.009188 0.603087 -0.129915 +v 0.025726 0.607497 -0.128078 +v 0.045939 0.609335 -0.125689 +v 0.004043 0.583793 -0.204888 +v 0.015436 0.563212 -0.239434 +v 0.011760 0.599963 -0.169974 +v 0.029585 0.604373 -0.168137 +v 0.051084 0.606395 -0.165748 +v 0.005880 0.576626 -0.243477 +v 0.016906 0.552003 -0.277839 +v 0.013965 0.595186 -0.205623 +v 0.032709 0.599596 -0.203969 +v 0.055678 0.601801 -0.201580 +v 0.007350 0.565601 -0.282984 +v 0.018008 0.537486 -0.313671 +v 0.016171 0.588203 -0.244947 +v 0.036200 0.592797 -0.243661 +v 0.060639 0.594818 -0.241272 +v 0.008085 0.551084 -0.320103 +v 0.018559 0.521132 -0.348034 +v 0.018008 0.577545 -0.285373 +v 0.039140 0.582139 -0.284454 +v 0.065233 0.584160 -0.282065 +v 0.008453 0.534914 -0.355200 +v 0.018927 0.502389 -0.383499 +v 0.019111 0.563028 -0.323411 +v 0.041713 0.567622 -0.322859 +v 0.069276 0.569643 -0.320838 +v 0.008637 0.516354 -0.391584 +v 0.018743 0.482911 -0.415105 +v 0.019846 0.547042 -0.359610 +v 0.043734 0.551635 -0.359427 +v 0.072767 0.553105 -0.357222 +v 0.008269 0.497060 -0.424109 +v 0.018559 0.467291 -0.439361 +v 0.020397 0.528666 -0.396729 +v 0.045572 0.533260 -0.396913 +v 0.076259 0.534179 -0.394524 +v 0.007902 0.481441 -0.448916 +v 0.018376 0.458104 -0.452775 +v 0.020397 0.509188 -0.430173 +v 0.046858 0.513782 -0.430540 +v 0.079015 0.513230 -0.427600 +v 0.020213 0.493752 -0.455531 +v 0.047777 0.497979 -0.456082 +v 0.007902 0.471885 -0.462697 +v 0.020213 0.484013 -0.469680 +v 0.021316 0.472253 -0.483829 +v 0.048144 0.488240 -0.470415 +v 0.048695 0.476479 -0.485299 +v 0.048879 0.465821 -0.494304 +v 0.009555 0.461228 -0.477030 +v 0.019478 0.448548 -0.467475 +v 0.023888 0.442117 -0.478317 +v 0.015068 0.453510 -0.486770 +v 0.032892 0.438258 -0.485483 +v 0.027563 0.446894 -0.493569 +v 0.046307 0.434215 -0.485299 +v 0.047409 0.443587 -0.495590 +v 0.009555 0.461228 -0.477030 +v 0.015068 0.453510 -0.486770 +v 0.025542 0.462697 -0.492834 +v 0.048328 0.454612 -0.500000 +v 0.027563 0.446894 -0.493569 +v 0.034362 0.455163 -0.497795 +v 0.047409 0.443587 -0.495590 +v 0.046307 0.434215 -0.485299 +v 0.065968 0.445241 -0.489710 +v 0.047409 0.443587 -0.495590 +v 0.061191 0.437890 -0.483094 +v 0.045204 0.436237 -0.473355 +v 0.067071 0.440830 -0.474090 +v 0.075891 0.450753 -0.480522 +v 0.069827 0.446343 -0.461962 +v 0.045020 0.441749 -0.461044 +v 0.079015 0.458287 -0.469680 +v 0.044653 0.451488 -0.446343 +v 0.079015 0.457552 -0.450386 +v 0.080301 0.465086 -0.459941 +v 0.078648 0.456450 -0.446894 +v 0.073870 0.454612 -0.445976 +v 0.044469 0.460860 -0.432929 +v 0.077178 0.461779 -0.430724 +v 0.043734 0.476296 -0.409225 +v 0.075156 0.474458 -0.405366 +v 0.032892 0.438258 -0.485483 +v 0.045204 0.436237 -0.473355 +v 0.046307 0.434215 -0.485299 +v 0.023888 0.442117 -0.478317 +v 0.045020 0.441749 -0.461044 +v 0.019478 0.448548 -0.467475 +v 0.044653 0.451488 -0.446343 +v 0.018376 0.458104 -0.452775 +v 0.044469 0.460860 -0.432929 +v 0.018559 0.467291 -0.439361 +v 0.018743 0.482911 -0.415105 +v 0.043734 0.476296 -0.409225 +v 0.018927 0.502389 -0.383499 +v 0.042631 0.495957 -0.377986 +v 0.075156 0.474458 -0.405366 +v 0.018559 0.521132 -0.348034 +v 0.072400 0.492650 -0.373576 +v 0.041161 0.514701 -0.342889 +v 0.018008 0.537486 -0.313671 +v 0.069276 0.510841 -0.338479 +v 0.039324 0.531238 -0.309078 +v 0.016906 0.552003 -0.277839 +v 0.065968 0.527196 -0.304851 +v 0.037302 0.545939 -0.273796 +v 0.015436 0.563212 -0.239434 +v 0.062477 0.542080 -0.269754 +v 0.034546 0.557332 -0.235759 +v 0.013414 0.570746 -0.201580 +v 0.058251 0.553473 -0.232084 +v 0.031239 0.565050 -0.198456 +v 0.011393 0.576075 -0.167402 +v 0.053657 0.561191 -0.195149 +v 0.028298 0.570562 -0.164645 +v 0.009004 0.580118 -0.128262 +v 0.049430 0.566887 -0.161521 +v 0.024807 0.574789 -0.125873 +v 0.006248 0.581036 -0.091143 +v 0.044469 0.571297 -0.123116 +v 0.020948 0.575891 -0.089122 +v 0.003859 0.581955 -0.056781 +v 0.039324 0.572584 -0.086549 +v 0.017641 0.576994 -0.054943 +v 0.001654 0.582874 -0.020948 +v 0.034914 0.573870 -0.052922 +v 0.014517 0.578096 -0.019478 +v -0.000551 0.583977 0.016538 +v 0.030504 0.575156 -0.017641 +v 0.011393 0.579383 0.017824 +v -0.002205 0.584895 0.051819 +v 0.026277 0.576442 0.019478 +v 0.008820 0.580485 0.052738 +v -0.002940 0.585814 0.087652 +v 0.022602 0.577729 0.054024 +v 0.006983 0.581588 0.088387 +v -0.003308 0.586733 0.122933 +v 0.019662 0.579015 0.089305 +v 0.005880 0.582690 0.123484 +v -0.003124 0.587468 0.156009 +v 0.017273 0.580118 0.124035 +v 0.005145 0.583609 0.156376 +v -0.002389 0.588019 0.184307 +v 0.015619 0.581220 0.156928 +v 0.005145 0.584344 0.184675 +v -0.001470 0.588387 0.211136 +v 0.014701 0.581955 0.184858 +v 0.005513 0.585079 0.211319 +v -0.001470 0.588203 0.235391 +v 0.014149 0.582690 0.211687 +v 0.005880 0.585998 0.235575 +v 0.013782 0.583793 0.235759 +v -0.001470 0.588570 0.246968 +v 0.008085 0.585998 0.240169 +v 0.013782 0.583793 0.242007 +v 0.013414 0.583976 0.248071 +v 0.081036 0.476112 -0.463249 +v 0.079015 0.458287 -0.469680 +v 0.080301 0.465086 -0.459941 +v 0.071849 0.472253 -0.480154 +v 0.076259 0.483829 -0.465638 +v 0.081220 0.480154 -0.462697 +v 0.080669 0.494304 -0.451488 +v 0.068725 0.461962 -0.489342 +v 0.075891 0.450753 -0.480522 +v 0.062845 0.453510 -0.494304 +v 0.065968 0.445241 -0.489710 +v 0.047409 0.443587 -0.495590 +v 0.048328 0.454612 -0.500000 +v 0.048879 0.465821 -0.494304 +v 0.048695 0.476479 -0.485299 +v 0.048144 0.488240 -0.470415 +v 0.047777 0.497979 -0.456082 +v 0.079015 0.513230 -0.427600 +v 0.046858 0.513782 -0.430540 +v -0.005880 0.547225 0.305219 +v 0.008637 0.546858 0.302830 +v 0.009004 0.548695 0.295847 +v -0.007901 0.549430 0.298420 +v -0.018743 0.547777 0.312385 +v 0.009372 0.550717 0.288681 +v -0.022969 0.549982 0.306872 +v -0.028298 0.547960 0.323594 +v -0.009923 0.551635 0.291804 +v 0.010658 0.557699 0.273245 +v -0.027012 0.552186 0.301176 +v -0.014149 0.558802 0.276920 +v 0.012128 0.568725 0.258177 +v -0.034179 0.550349 0.319735 +v -0.033444 0.548144 0.337376 +v -0.040059 0.552738 0.316060 +v -0.036016 0.559904 0.288864 +v -0.017824 0.570562 0.262587 +v 0.013230 0.581771 0.249173 +v -0.019846 0.584711 0.253951 +v -0.040243 0.550533 0.335906 +v -0.033627 0.547960 0.352076 +v -0.047042 0.552922 0.334252 +v -0.052738 0.560639 0.307424 +v -0.044469 0.571849 0.276920 +v -0.040610 0.550349 0.352995 +v -0.028850 0.547777 0.365858 +v -0.047409 0.552738 0.353914 +v -0.049247 0.586181 0.269754 +v -0.019846 0.599963 0.251011 +v -0.061926 0.561007 0.330761 +v -0.064682 0.572951 0.299338 +v -0.034914 0.550165 0.369166 +v -0.019478 0.547409 0.377251 +v -0.050349 0.601617 0.267181 +v -0.017824 0.614664 0.254686 +v -0.047225 0.616134 0.270305 +v -0.041161 0.552370 0.372473 +v -0.062661 0.560823 0.355752 +v -0.075891 0.573318 0.327453 +v -0.071481 0.587284 0.294561 +v -0.073319 0.602719 0.292907 +v -0.069276 0.617236 0.295112 +v -0.083976 0.587835 0.325616 +v -0.086182 0.603271 0.325064 +v -0.081771 0.617787 0.325983 +v -0.076810 0.573135 0.357773 +v -0.054759 0.560272 0.379640 +v -0.085079 0.587651 0.359059 +v -0.087468 0.603087 0.359610 +v -0.082874 0.617604 0.359059 +v -0.067438 0.572400 0.386439 +v -0.028850 0.551819 0.387725 +v -0.074789 0.586733 0.390849 +v -0.076626 0.602168 0.392503 +v -0.072584 0.616685 0.390665 +v -0.039140 0.559353 0.399118 +v -0.048695 0.571297 0.410327 +v -0.054024 0.585446 0.417126 +v -0.055494 0.600882 0.419515 +v -0.052003 0.615399 0.416942 +v -0.026093 0.599044 0.437890 +v -0.023888 0.613745 0.434399 +v -0.025726 0.583793 0.434767 +v 0.007718 0.597207 0.445057 +v 0.008637 0.611907 0.441382 +v 0.006983 0.581955 0.441749 +v 0.006431 0.568173 0.432378 +v -0.023153 0.569827 0.426314 +v 0.006615 0.556964 0.417310 +v -0.017824 0.558251 0.412348 +v 0.007167 0.550165 0.401691 +v -0.012128 0.551084 0.397832 +v 0.007350 0.548328 0.394524 +v -0.009555 0.549063 0.391400 +v 0.007534 0.546490 0.387541 +v -0.006799 0.547042 0.384785 +v -0.024256 0.549614 0.382580 +v -0.019846 0.584711 0.253951 +v 0.013230 0.581771 0.249173 +v 0.013414 0.582506 0.248806 +v -0.005513 0.587835 0.248438 +v 0.013414 0.583976 0.248071 +v -0.001470 0.588570 0.246968 +v -0.017824 0.614664 0.254686 +v 0.015068 0.612826 0.249908 +v 0.015436 0.627527 0.259463 +v -0.014149 0.629180 0.263690 +v -0.047225 0.616134 0.270305 +v 0.015803 0.641124 0.274715 +v -0.040610 0.630467 0.277839 +v -0.069276 0.617236 0.295112 +v -0.008637 0.642411 0.278206 +v 0.015619 0.651047 0.295112 +v -0.060639 0.631569 0.300073 +v -0.081771 0.617787 0.325983 +v -0.030503 0.643513 0.289783 +v -0.001838 0.651966 0.297501 +v 0.015252 0.656560 0.315325 +v -0.071849 0.631937 0.328004 +v -0.082874 0.617604 0.359059 +v -0.047042 0.644248 0.308159 +v -0.017457 0.652701 0.305954 +v 0.003308 0.656927 0.316244 +v 0.014884 0.657846 0.328188 +v -0.072767 0.631753 0.357957 +v -0.072584 0.616685 0.390665 +v -0.056229 0.644616 0.331128 +v -0.029217 0.653252 0.319000 +v -0.006615 0.656560 0.319000 +v 0.001838 0.658030 0.328004 +v 0.014517 0.658398 0.340132 +v -0.063396 0.631018 0.386439 +v -0.052003 0.615399 0.416942 +v -0.056964 0.644616 0.355935 +v -0.035832 0.653620 0.335355 +v -0.011209 0.657111 0.328004 +v 0.000551 0.658398 0.339581 +v 0.014149 0.658214 0.351525 +v -0.014700 0.657295 0.339030 +v 0.000184 0.658398 0.351158 +v 0.013782 0.657663 0.363653 +v -0.036384 0.653436 0.352995 +v -0.049430 0.644065 0.379456 +v -0.014884 0.657295 0.350790 +v 0.000735 0.657846 0.362918 +v 0.013230 0.656192 0.376883 +v -0.030871 0.653069 0.369901 +v -0.045020 0.629915 0.409960 +v -0.023888 0.613745 0.434399 +v -0.012312 0.656927 0.361999 +v 0.001286 0.656560 0.375046 +v 0.012312 0.650496 0.396729 +v -0.034179 0.642962 0.398750 +v -0.019662 0.628261 0.425946 +v 0.008637 0.611907 0.441382 +v 0.009739 0.626608 0.432010 +v -0.013230 0.641676 0.411981 +v 0.011025 0.640389 0.416942 +v -0.004961 0.651415 0.393054 +v -0.020029 0.652334 0.383682 +v -0.008269 0.656376 0.371371 +v -0.019846 0.599963 0.251011 +v -0.019846 0.584711 0.253951 +v -0.005513 0.587835 0.248438 +v -0.011025 0.598126 0.248071 +v -0.001470 0.588570 0.246968 +v -0.017824 0.614664 0.254686 +v -0.006799 0.597207 0.246600 +v -0.004961 0.606762 0.248622 +v 0.015068 0.612826 0.249908 +v -0.000919 0.604190 0.246784 +v 0.014884 0.609886 0.248438 +v 0.014700 0.606946 0.247703 +v -0.067255 0.206174 0.317530 +v -0.001103 0.202499 0.317163 +v -0.000367 0.213524 0.269203 +v -0.063580 0.217935 0.269570 +v -0.126057 0.220691 0.316612 +v 0.000368 0.233921 0.234473 +v -0.120911 0.228225 0.275450 +v -0.165380 0.245682 0.314039 +v -0.061926 0.236126 0.238515 +v 0.001103 0.257442 0.211871 +v -0.149945 0.251011 0.274348 +v -0.190555 0.278390 0.311834 +v -0.063396 0.259280 0.219588 +v 0.002021 0.285189 0.195516 +v -0.109702 0.242742 0.245682 +v -0.065050 0.286843 0.203785 +v 0.002940 0.317714 0.188901 +v -0.117971 0.258545 0.237413 +v -0.166667 0.281514 0.265711 +v -0.065233 0.319552 0.197354 +v 0.003859 0.353547 0.188350 +v -0.124219 0.286108 0.226755 +v -0.174384 0.319552 0.259280 +v -0.201948 0.319000 0.310180 +v -0.126057 0.320287 0.220323 +v -0.063396 0.355384 0.196803 +v 0.004778 0.388093 0.191474 +v -0.173282 0.358140 0.257993 +v -0.201396 0.358692 0.309445 +v -0.123852 0.356854 0.219405 +v -0.060456 0.389746 0.199926 +v 0.005696 0.423190 0.200478 +v -0.165932 0.392319 0.258728 +v -0.194046 0.392870 0.310180 +v -0.178611 0.427784 0.313120 +v -0.119441 0.391217 0.222528 +v -0.055862 0.424844 0.208379 +v 0.006615 0.459757 0.217016 +v -0.153436 0.427233 0.264609 +v -0.111540 0.426314 0.230614 +v -0.049430 0.461227 0.224550 +v 0.007350 0.491547 0.238331 +v -0.135428 0.463433 0.275634 +v -0.154355 0.463800 0.316795 +v -0.099596 0.462514 0.244947 +v -0.040243 0.492650 0.245130 +v 0.007718 0.515987 0.259096 +v -0.081404 0.493752 0.262587 +v -0.110989 0.494487 0.288313 +v -0.124219 0.494671 0.323227 +v -0.031055 0.517089 0.265160 +v 0.008269 0.536567 0.284270 +v -0.017273 0.537302 0.288497 +v -0.064682 0.517824 0.279860 +v -0.040059 0.537854 0.299522 +v -0.054024 0.538221 0.314774 +v -0.087284 0.518376 0.300992 +v -0.094083 0.518559 0.328923 +v -0.061742 0.538405 0.333333 +v 0.000000 0.283903 0.490445 +v -0.000551 0.256340 0.475560 +v -0.066887 0.285741 0.481441 +v -0.064866 0.258545 0.466924 +v -0.001103 0.232084 0.451121 +v -0.062845 0.236861 0.446711 +v -0.001470 0.212973 0.418045 +v -0.064682 0.217200 0.416942 +v -0.018192 0.536751 0.400772 +v 0.007350 0.536016 0.405549 +v 0.006799 0.515435 0.430540 +v -0.032157 0.516354 0.423925 +v 0.005880 0.490628 0.450937 +v -0.065417 0.517457 0.408857 +v -0.040794 0.537486 0.389563 +v -0.054575 0.537854 0.374127 +v -0.041529 0.491915 0.443587 +v 0.004961 0.458839 0.472069 +v -0.082323 0.493201 0.425763 +v -0.050900 0.460309 0.463984 +v 0.003859 0.422087 0.488423 +v -0.087835 0.518192 0.387541 +v -0.061926 0.538221 0.355568 +v -0.057516 0.423741 0.479603 +v 0.002756 0.386806 0.497244 +v -0.062293 0.388644 0.487872 +v -0.065417 0.354282 0.490628 +v 0.001838 0.352260 0.500000 +v -0.067071 0.318449 0.489893 +v 0.000919 0.316612 0.499081 +v -0.066887 0.285741 0.481441 +v 0.000000 0.283903 0.490445 +v -0.100882 0.461779 0.442852 +v -0.112826 0.425395 0.456817 +v -0.154355 0.426681 0.422271 +v -0.136347 0.462881 0.411797 +v -0.111724 0.494120 0.399669 +v -0.178795 0.427416 0.373943 +v -0.166483 0.391768 0.428519 +v -0.154723 0.463433 0.370636 +v -0.124770 0.494487 0.364388 +v -0.094451 0.518376 0.359794 +v -0.061742 0.538405 0.333333 +v -0.094083 0.518559 0.328923 +v -0.124219 0.494671 0.323227 +v -0.154355 0.463800 0.316795 +v -0.178611 0.427784 0.313120 +v -0.194230 0.392687 0.376700 +v -0.194046 0.392870 0.310180 +v -0.201764 0.358324 0.377435 +v -0.172914 0.357405 0.430724 +v -0.201396 0.358692 0.309445 +v -0.203050 0.318817 0.376516 +v -0.173649 0.319000 0.429989 +v -0.201948 0.319000 0.310180 +v -0.191474 0.278207 0.373943 +v -0.165013 0.281330 0.426314 +v -0.190555 0.278390 0.311834 +v -0.147189 0.252297 0.420617 +v -0.165013 0.245314 0.371003 +v -0.165380 0.245682 0.314039 +v -0.126240 0.221058 0.369350 +v -0.126057 0.220691 0.316612 +v -0.117053 0.230614 0.415840 +v -0.067806 0.205807 0.369533 +v -0.067255 0.206174 0.317530 +v -0.001103 0.202499 0.317163 +v -0.001470 0.202499 0.371003 +v -0.001470 0.212973 0.418045 +v -0.064682 0.217200 0.416942 +v -0.109886 0.244396 0.438074 +v -0.118523 0.258545 0.447262 +v -0.062845 0.236861 0.446711 +v -0.064866 0.258545 0.466924 +v -0.033627 0.547960 0.352076 +v -0.042999 0.544836 0.335906 +v -0.033444 0.548144 0.337376 +v -0.043183 0.544836 0.353179 +v -0.028850 0.547777 0.365858 +v -0.052370 0.541529 0.334620 +v -0.037486 0.544469 0.368614 +v -0.019478 0.547409 0.377251 +v -0.052554 0.541529 0.354465 +v -0.061742 0.538405 0.333333 +v -0.061926 0.538221 0.355568 +v -0.045939 0.541161 0.371371 +v -0.054575 0.537854 0.374127 +v -0.026645 0.544101 0.381477 +v -0.006799 0.547042 0.384785 +v -0.033811 0.540794 0.385520 +v -0.040794 0.537486 0.389563 +v -0.018192 0.536751 0.400772 +v -0.010658 0.543550 0.390114 +v -0.014333 0.540243 0.395443 +v 0.007350 0.536016 0.405549 +v 0.007350 0.539508 0.399485 +v 0.007534 0.542999 0.393605 +v 0.007534 0.546490 0.387541 +v -0.017273 0.537302 0.288497 +v 0.008269 0.536567 0.284270 +v 0.008453 0.539875 0.290518 +v -0.013414 0.540610 0.294010 +v 0.008453 0.543366 0.296582 +v -0.032892 0.541161 0.303749 +v -0.040059 0.537854 0.299522 +v -0.009739 0.543918 0.299706 +v 0.008637 0.546858 0.302830 +v -0.005880 0.547225 0.305219 +v -0.018743 0.547777 0.312385 +v -0.025910 0.544469 0.307975 +v -0.028298 0.547960 0.323594 +v -0.036935 0.544653 0.320654 +v -0.033444 0.548144 0.337376 +v -0.042999 0.544836 0.335906 +v -0.045388 0.541529 0.317714 +v -0.054024 0.538221 0.314774 +v -0.052370 0.541529 0.334620 +v -0.061742 0.538405 0.333333 +v 0.055862 0.172731 0.335355 +v 0.057883 0.168688 0.343624 +v 0.058986 0.162257 0.343440 +v 0.057699 0.166667 0.334987 +v 0.051635 0.179530 0.330577 +v 0.058986 0.154906 0.343256 +v 0.054392 0.175671 0.330026 +v 0.042448 0.192209 0.331312 +v 0.058434 0.159500 0.334436 +v 0.065233 0.137266 0.342889 +v 0.047777 0.190371 0.330945 +v 0.035832 0.200662 0.336090 +v 0.056413 0.170893 0.329291 +v 0.067255 0.142043 0.332782 +v 0.042631 0.199927 0.335906 +v 0.031422 0.204153 0.344359 +v 0.039508 0.203602 0.344542 +v 0.052738 0.187247 0.330393 +v 0.049247 0.198273 0.335722 +v 0.047409 0.202132 0.344726 +v 0.071481 0.154171 0.326902 +v 0.080118 0.119993 0.330761 +v 0.076626 0.113561 0.342337 +v 0.077729 0.172363 0.328004 +v 0.081220 0.184307 0.335171 +v 0.082323 0.188350 0.345829 +v 0.106395 0.176957 0.346380 +v 0.104373 0.171444 0.334068 +v 0.130099 0.162992 0.347299 +v 0.087284 0.134693 0.324329 +v 0.098126 0.157295 0.325799 +v 0.127527 0.157111 0.333150 +v 0.153436 0.151782 0.348401 +v 0.119441 0.140206 0.323411 +v 0.150864 0.144800 0.332598 +v 0.178795 0.141676 0.350423 +v 0.106211 0.114112 0.321389 +v 0.176038 0.133958 0.333150 +v 0.205072 0.132304 0.352811 +v 0.141676 0.125322 0.321573 +v 0.097023 0.096656 0.328556 +v 0.092797 0.089673 0.341786 +v 0.202867 0.123668 0.334436 +v 0.230246 0.124403 0.356303 +v 0.166115 0.111907 0.321022 +v 0.126975 0.096104 0.319184 +v 0.116318 0.075707 0.327086 +v 0.111356 0.067438 0.341786 +v 0.150864 0.080485 0.318449 +v 0.139103 0.057699 0.326718 +v 0.133407 0.048328 0.342337 +v 0.193311 0.099412 0.321205 +v 0.228960 0.115583 0.337376 +v 0.168137 0.040426 0.326534 +v 0.162440 0.029952 0.343256 +v 0.179713 0.065968 0.318082 +v 0.201213 0.029401 0.328372 +v 0.195884 0.018192 0.345094 +v 0.232451 0.014701 0.348769 +v 0.221610 0.090224 0.323778 +v 0.211136 0.056781 0.320103 +v 0.256523 0.110989 0.342705 +v 0.256156 0.119441 0.361080 +v 0.281330 0.114664 0.365307 +v 0.237229 0.026461 0.332598 +v 0.245131 0.054024 0.325064 +v 0.253032 0.086733 0.329107 +v 0.282249 0.107681 0.349320 +v 0.300625 0.108416 0.368063 +v 0.270489 0.032525 0.339214 +v 0.266630 0.021316 0.353730 +v 0.280779 0.086365 0.337008 +v 0.301911 0.102720 0.355384 +v 0.313671 0.099228 0.369533 +v 0.276553 0.058434 0.333150 +v 0.296215 0.046307 0.347850 +v 0.293458 0.037486 0.358875 +v 0.314407 0.095553 0.362734 +v 0.317163 0.086182 0.368431 +v 0.301360 0.087284 0.345829 +v 0.299339 0.066336 0.343072 +v 0.314590 0.087468 0.357773 +v 0.316795 0.075524 0.366961 +v 0.314039 0.075524 0.356119 +v 0.312753 0.063580 0.358508 +v 0.311466 0.058434 0.364021 +v 0.035465 0.200110 0.352628 +v 0.031422 0.204153 0.344359 +v 0.039508 0.203602 0.344542 +v 0.042264 0.199375 0.353179 +v 0.042815 0.190371 0.357405 +v 0.047409 0.202132 0.344726 +v 0.047593 0.188534 0.358140 +v 0.051084 0.178427 0.357222 +v 0.048695 0.197721 0.353730 +v 0.082323 0.188350 0.345829 +v 0.053473 0.174568 0.357773 +v 0.055494 0.172179 0.352076 +v 0.052003 0.185410 0.358692 +v 0.057148 0.165932 0.352260 +v 0.057883 0.168688 0.343624 +v 0.058986 0.162257 0.343440 +v 0.055311 0.169791 0.358324 +v 0.057699 0.158765 0.352444 +v 0.058986 0.154906 0.343256 +v 0.076259 0.171261 0.361816 +v 0.080301 0.183388 0.355935 +v 0.070011 0.152885 0.360713 +v 0.066336 0.141308 0.353730 +v 0.065233 0.137266 0.342889 +v 0.076626 0.113561 0.342337 +v 0.078831 0.118890 0.354833 +v 0.092797 0.089673 0.341786 +v 0.103271 0.170893 0.358140 +v 0.106395 0.176957 0.346380 +v 0.085447 0.133223 0.363102 +v 0.095186 0.095553 0.356119 +v 0.111356 0.067438 0.341786 +v 0.096104 0.155825 0.364572 +v 0.125873 0.155825 0.360529 +v 0.130099 0.162992 0.347299 +v 0.113929 0.074421 0.357589 +v 0.133407 0.048328 0.342337 +v 0.103455 0.112275 0.365674 +v 0.116501 0.138368 0.367696 +v 0.148475 0.143513 0.363102 +v 0.153436 0.151782 0.348401 +v 0.136163 0.056229 0.359610 +v 0.162440 0.029952 0.343256 +v 0.123117 0.093899 0.368614 +v 0.137817 0.123117 0.371003 +v 0.173098 0.132488 0.366042 +v 0.178795 0.141676 0.350423 +v 0.164645 0.038589 0.361632 +v 0.195884 0.018192 0.345094 +v 0.146270 0.078096 0.371738 +v 0.161338 0.109519 0.374495 +v 0.199375 0.121830 0.369533 +v 0.205072 0.132304 0.352811 +v 0.173833 0.062845 0.374862 +v 0.187615 0.096472 0.378170 +v 0.196986 0.027012 0.364204 +v 0.224734 0.113194 0.373208 +v 0.230246 0.124403 0.356303 +v 0.256156 0.119441 0.361080 +v 0.203969 0.052738 0.377986 +v 0.214627 0.086365 0.381477 +v 0.232451 0.023521 0.367512 +v 0.232451 0.014701 0.348769 +v 0.266630 0.021316 0.353730 +v 0.251746 0.107681 0.377067 +v 0.245131 0.081404 0.384785 +v 0.237413 0.049247 0.381110 +v 0.265895 0.029401 0.370452 +v 0.293458 0.037486 0.358875 +v 0.277839 0.104006 0.379272 +v 0.281330 0.114664 0.365307 +v 0.269203 0.053473 0.382764 +v 0.292356 0.043550 0.371922 +v 0.311466 0.058434 0.364021 +v 0.273429 0.081588 0.386071 +v 0.298236 0.099963 0.379089 +v 0.300625 0.108416 0.368063 +v 0.295480 0.083058 0.384234 +v 0.310731 0.062110 0.370636 +v 0.316795 0.075524 0.366961 +v 0.293275 0.061926 0.381661 +v 0.310731 0.073135 0.375965 +v 0.317163 0.086182 0.368431 +v 0.311466 0.085079 0.377619 +v 0.312385 0.094083 0.375046 +v 0.313671 0.099228 0.369533 +v -0.037854 0.199191 0.351341 +v -0.041713 0.202683 0.343072 +v -0.033444 0.203418 0.343256 +v -0.044469 0.198273 0.351525 +v -0.045204 0.189269 0.355752 +v -0.049430 0.201029 0.342889 +v -0.049798 0.187247 0.356303 +v -0.052922 0.177325 0.355200 +v -0.050900 0.196435 0.351709 +v -0.083976 0.186329 0.342705 +v -0.055311 0.173282 0.355752 +v -0.057148 0.170709 0.349871 +v -0.054024 0.184124 0.356670 +v -0.058618 0.164645 0.350055 +v -0.059169 0.167402 0.341419 +v -0.060088 0.160970 0.341235 +v -0.057148 0.168321 0.356303 +v -0.058986 0.157479 0.350239 +v -0.059904 0.153436 0.341051 +v -0.078280 0.169423 0.358875 +v -0.082323 0.181551 0.352995 +v -0.067255 0.139655 0.351158 +v -0.065601 0.135612 0.340500 +v -0.076442 0.111724 0.339397 +v -0.071481 0.151231 0.358140 +v -0.079199 0.117053 0.351709 +v -0.092062 0.087284 0.338295 +v -0.104925 0.168504 0.354098 +v -0.107865 0.174384 0.342337 +v -0.086549 0.131202 0.359978 +v -0.095186 0.093348 0.352444 +v -0.109886 0.064866 0.337560 +v -0.097758 0.153436 0.360897 +v -0.127343 0.152885 0.355752 +v -0.131202 0.159868 0.342337 +v -0.113377 0.071665 0.353179 +v -0.131569 0.045204 0.337376 +v -0.104006 0.109886 0.361816 +v -0.117971 0.135612 0.363286 +v -0.149761 0.140022 0.357405 +v -0.154355 0.148107 0.342705 +v -0.135244 0.052922 0.354465 +v -0.160235 0.026093 0.337192 +v -0.123484 0.090959 0.363837 +v -0.138920 0.119809 0.365674 +v -0.174201 0.128262 0.359610 +v -0.179530 0.137449 0.343624 +v -0.163175 0.034546 0.355568 +v -0.193311 0.013414 0.337743 +v -0.146270 0.074605 0.366226 +v -0.162257 0.105660 0.368247 +v -0.200110 0.117053 0.361999 +v -0.205623 0.127343 0.345094 +v -0.173466 0.058802 0.368247 +v -0.188166 0.091878 0.371003 +v -0.195333 0.022235 0.356670 +v -0.225469 0.107681 0.364572 +v -0.230614 0.118890 0.347666 +v -0.256523 0.113194 0.351341 +v -0.203602 0.047777 0.370268 +v -0.215178 0.081220 0.373392 +v -0.230981 0.018008 0.358692 +v -0.229879 0.009188 0.340132 +v -0.264425 0.014884 0.343624 +v -0.252481 0.101617 0.367512 +v -0.245682 0.075524 0.375413 +v -0.237045 0.043550 0.372106 +v -0.264609 0.022970 0.360529 +v -0.291805 0.030320 0.347850 +v -0.278574 0.097391 0.368798 +v -0.281698 0.107865 0.354649 +v -0.268835 0.046858 0.372473 +v -0.291437 0.036567 0.360897 +v -0.310548 0.050900 0.352260 +v -0.273980 0.074972 0.375597 +v -0.298787 0.092797 0.367696 +v -0.301176 0.101250 0.356670 +v -0.295847 0.075891 0.373025 +v -0.310180 0.054576 0.358875 +v -0.316244 0.067990 0.355017 +v -0.293091 0.054943 0.370636 +v -0.310548 0.065601 0.364204 +v -0.317163 0.078464 0.356303 +v -0.311650 0.077545 0.365858 +v -0.312753 0.086549 0.363102 +v -0.314039 0.091694 0.357589 +v -0.056781 0.171444 0.333333 +v -0.060088 0.160970 0.341235 +v -0.059169 0.167402 0.341419 +v -0.058434 0.165197 0.332782 +v -0.052554 0.178243 0.328556 +v -0.059904 0.153436 0.341051 +v -0.055127 0.174384 0.327821 +v -0.043918 0.191290 0.329658 +v -0.058986 0.158030 0.332231 +v -0.065601 0.135612 0.340500 +v -0.049063 0.189269 0.329107 +v -0.037486 0.199927 0.334620 +v -0.057148 0.169423 0.327269 +v -0.067438 0.140573 0.330209 +v -0.044285 0.199008 0.334252 +v -0.033444 0.203418 0.343256 +v -0.041713 0.202683 0.343072 +v -0.053840 0.185961 0.328372 +v -0.050900 0.197170 0.333885 +v -0.049430 0.201029 0.342889 +v -0.071665 0.152517 0.324146 +v -0.079750 0.117971 0.327637 +v -0.076442 0.111724 0.339397 +v -0.082506 0.182286 0.332047 +v -0.083976 0.186329 0.342705 +v -0.107865 0.174384 0.342337 +v -0.078464 0.170526 0.325064 +v -0.105292 0.168872 0.330026 +v -0.131202 0.159868 0.342337 +v -0.086917 0.132488 0.321022 +v -0.098309 0.154906 0.321940 +v -0.128078 0.153987 0.328188 +v -0.154355 0.148107 0.342705 +v -0.119074 0.137449 0.318817 +v -0.150864 0.141308 0.326902 +v -0.179530 0.137449 0.343624 +v -0.105292 0.111724 0.317347 +v -0.175854 0.129732 0.326534 +v -0.205623 0.127343 0.345094 +v -0.140757 0.122014 0.316244 +v -0.095921 0.094451 0.324881 +v -0.092062 0.087284 0.338295 +v -0.202499 0.118706 0.326718 +v -0.230614 0.118890 0.347666 +v -0.165013 0.107865 0.314774 +v -0.125505 0.093164 0.314406 +v -0.114664 0.072951 0.322676 +v -0.109886 0.064866 0.337560 +v -0.148842 0.076994 0.312753 +v -0.136898 0.054392 0.321389 +v -0.131569 0.045204 0.337376 +v -0.191841 0.094818 0.314039 +v -0.228592 0.110070 0.328739 +v -0.165564 0.036384 0.320287 +v -0.160235 0.026093 0.337192 +v -0.177325 0.061742 0.311283 +v -0.198456 0.024623 0.320838 +v -0.193311 0.013414 0.337743 +v -0.229879 0.009188 0.340132 +v -0.219956 0.084895 0.315325 +v -0.208563 0.051635 0.312201 +v -0.256156 0.104741 0.332966 +v -0.256523 0.113194 0.351341 +v -0.281698 0.107865 0.354649 +v -0.234473 0.020764 0.323594 +v -0.242558 0.048144 0.315877 +v -0.251562 0.080669 0.319552 +v -0.282065 0.100882 0.338479 +v -0.301176 0.101250 0.356670 +v -0.268100 0.025910 0.329107 +v -0.264425 0.014884 0.343624 +v -0.279493 0.079566 0.326351 +v -0.301727 0.095369 0.343991 +v -0.314039 0.091694 0.357589 +v -0.274531 0.051819 0.322675 +v -0.294377 0.039140 0.336641 +v -0.291805 0.030320 0.347850 +v -0.314407 0.088019 0.350790 +v -0.317163 0.078464 0.356303 +v -0.300441 0.080118 0.334436 +v -0.297685 0.059169 0.331680 +v -0.314039 0.079934 0.345829 +v -0.316244 0.067990 0.355017 +v -0.313120 0.067990 0.344359 +v -0.311650 0.056046 0.346564 +v -0.310548 0.050900 0.352260 +v -0.065417 0.354282 0.490628 +v -0.120911 0.390298 0.464535 +v -0.062293 0.388644 0.487872 +v -0.112826 0.425395 0.456817 +v -0.154355 0.426681 0.422271 +v -0.166483 0.391768 0.428519 +v -0.125505 0.355935 0.467291 +v -0.067071 0.318449 0.489893 +v -0.172914 0.357405 0.430724 +v -0.127527 0.319368 0.466005 +v -0.066887 0.285741 0.481441 +v -0.173649 0.319000 0.429989 +v -0.125505 0.285189 0.459390 +v -0.064866 0.258545 0.466924 +v -0.165013 0.281330 0.426314 +v -0.118523 0.258545 0.447262 +v -0.147189 0.252297 0.420617 +v 0.069092 0.350606 0.491363 +v 0.067990 0.385153 0.488791 +v 0.126975 0.383499 0.466189 +v 0.120911 0.418964 0.458104 +v 0.162808 0.418045 0.424293 +v 0.172914 0.382396 0.430724 +v 0.129732 0.348953 0.468945 +v 0.069092 0.314774 0.490628 +v 0.177508 0.347850 0.432929 +v 0.129732 0.312385 0.467659 +v 0.067071 0.282065 0.482176 +v 0.176222 0.309445 0.432194 +v 0.126057 0.278390 0.460860 +v 0.063763 0.255053 0.467843 +v 0.165564 0.272510 0.428335 +v 0.117788 0.252113 0.448732 +v 0.146270 0.244212 0.422455 +vn 0.9088 0.3695 0.1937 +vn 0.9403 0.3402 -0.0068 +vn 0.8073 0.5830 -0.0919 +vn 0.9909 0.1226 0.0563 +vn 0.9572 0.1518 0.2464 +vn 0.9912 -0.0876 0.0990 +vn 0.7825 0.6080 0.1339 +vn 0.4979 0.8633 0.0826 +vn 0.6623 0.5863 0.4665 +vn 0.5114 0.8384 -0.1885 +vn -0.0069 0.9692 -0.2463 +vn 0.9554 -0.0606 0.2889 +vn 0.9017 -0.2805 0.3289 +vn 0.8078 -0.0128 0.5893 +vn 0.9404 -0.3117 0.1357 +vn 0.8060 -0.5651 0.1760 +vn 0.8127 0.1908 0.5505 +vn 0.7701 0.3912 0.5038 +vn 0.5556 0.3929 0.7327 +vn 0.5723 0.2166 0.7909 +vn -0.0130 0.9985 0.0536 +vn -0.5212 0.8511 0.0633 +vn -0.0199 0.8661 0.4995 +vn -0.5238 0.8260 -0.2081 +vn -0.8170 0.5635 -0.1227 +vn 0.4111 0.7717 0.4852 +vn 0.3189 0.5985 0.7349 +vn 0.5263 0.5267 0.6675 +vn 0.2907 0.2350 0.9275 +vn 0.2959 0.4195 0.8582 +vn -0.0223 0.4334 0.9009 +vn -0.0222 0.6386 0.7692 +vn -0.3608 0.5904 0.7220 +vn -0.4476 0.7614 0.4689 +vn -0.6934 0.5700 0.4408 +vn -0.8014 0.5890 0.1039 +vn -0.9241 0.3475 0.1590 +vn -0.9473 0.3176 -0.0426 +vn -0.9948 0.1004 0.0179 +vn -0.9691 0.1292 0.2102 +vn -0.9918 -0.1085 0.0676 +vn -0.5637 0.5136 0.6469 +vn -0.5923 0.3791 0.7110 +vn -0.7978 0.3724 0.4741 +vn -0.8374 0.1710 0.5192 +vn -0.8289 -0.0320 0.5584 +vn -0.9635 -0.0815 0.2552 +vn -0.9048 -0.3002 0.3021 +vn -0.9365 -0.3319 0.1129 +vn -0.7949 -0.5846 0.1621 +vn -0.3381 0.4119 0.8462 +vn -0.6069 0.2024 0.7686 +vn -0.3312 0.2275 0.9157 +vn -0.0213 0.2404 0.9704 +vn -0.0195 0.0476 0.9987 +vn -0.3232 0.0379 0.9456 +vn -0.0169 -0.1504 0.9885 +vn 0.2861 0.0452 0.9571 +vn -0.5970 0.0133 0.8021 +vn -0.5635 -0.1674 0.8090 +vn -0.3144 -0.1530 0.9369 +vn -0.3231 -0.3454 0.8811 +vn 0.5657 0.0273 0.8242 +vn 0.5363 -0.1542 0.8298 +vn -0.7705 -0.2320 0.5938 +vn -0.6477 -0.4165 0.6380 +vn -0.5217 -0.3061 0.7964 +vn 0.2822 -0.1459 0.9482 +vn 0.2976 -0.3379 0.8929 +vn 0.7546 -0.2144 0.6202 +vn 0.6343 -0.4014 0.6607 +vn 0.4983 -0.2938 0.8157 +vn -0.7637 -0.5351 0.3612 +vn -0.4730 -0.7644 0.4381 +vn -0.4920 -0.8435 0.2155 +vn 0.0086 -0.9698 0.2436 +vn -0.0133 -0.3657 0.9306 +vn -0.0071 -0.6262 0.7796 +vn -0.3974 -0.5621 0.7253 +vn 0.0019 -0.8745 0.4851 +vn 0.4780 -0.7538 0.4509 +vn 0.5110 -0.8307 0.2211 +vn 0.3833 -0.5531 0.7397 +vn 0.7661 -0.5176 0.3810 +vn 0.3115 0.0845 0.9465 +vn 0.5476 0.3899 0.7404 +vn 0.2932 0.3877 0.8739 +vn -0.5348 0.4192 0.7336 +vn -0.3182 0.1016 0.9426 +vn -0.2825 0.4034 0.8703 +vn 0.8559 0.3202 0.4061 +vn 0.9636 0.2673 0.0108 +vn 0.9624 -0.2308 0.1431 +vn 0.4876 -0.2536 0.8354 +vn 0.2982 0.0851 0.9507 +vn 0.7596 0.3894 0.5210 +vn 0.3962 0.4366 0.8077 +vn 0.6034 0.5553 0.5723 +vn 0.5644 0.6237 0.5408 +vn 0.6652 0.5768 0.4741 +vn 0.7097 0.7044 0.0071 +vn 0.7201 0.6939 0.0040 +vn 0.7327 0.6789 -0.0469 +vn 0.6892 0.7181 -0.0964 +vn 0.5360 0.7154 -0.4483 +vn 0.7165 0.5950 -0.3641 +vn 0.3405 0.6250 -0.7024 +vn 0.6321 0.6018 -0.4881 +vn 0.2815 0.1540 -0.9471 +vn 0.7384 0.4745 -0.4792 +vn 0.6569 -0.4511 -0.6042 +vn 0.8822 0.3539 -0.3105 +vn 0.9430 -0.2463 0.2240 +vn 0.9573 0.2605 0.1252 +vn 0.7333 0.6795 -0.0248 +vn 0.5356 0.8381 -0.1035 +vn 0.6903 0.6241 -0.3660 +vn 0.7371 0.5622 -0.3750 +vn 0.6519 0.5786 -0.4901 +vn 0.6132 0.6109 -0.5009 +vn 0.8831 0.2829 -0.3742 +vn 0.9579 0.2867 0.0165 +vn 0.7026 0.5163 -0.4897 +vn 0.7171 0.4689 -0.5156 +vn 0.8361 0.4009 -0.3745 +vn 0.8753 0.3492 -0.3344 +vn 0.9629 0.2456 0.1122 +vn 0.9387 0.3411 0.0492 +vn 0.9234 0.0807 -0.3752 +vn 0.9967 0.0807 0.0098 +vn 0.8940 0.2472 -0.3736 +vn 0.9757 0.2181 0.0225 +vn 0.9998 0.0154 0.0127 +vn 0.9269 -0.0158 -0.3750 +vn 0.9999 -0.0147 0.0071 +vn 0.9248 -0.0458 -0.3778 +vn 0.9990 -0.0441 0.0069 +vn 0.7112 -0.0399 -0.7018 +vn 0.7133 -0.0114 -0.7008 +vn 0.3901 -0.0272 -0.9204 +vn 0.7111 0.0786 -0.6987 +vn 0.3910 -0.0011 -0.9204 +vn 0.0082 -0.0115 -0.9999 +vn 0.0115 0.0124 -0.9999 +vn 0.3908 0.0808 -0.9169 +vn 0.0151 0.0880 -0.9960 +vn 0.6820 0.2562 -0.6850 +vn 0.3771 0.2437 -0.8935 +vn 0.0167 0.2421 -0.9701 +vn 0.6026 0.4733 -0.6425 +vn 0.3420 0.4458 -0.8272 +vn 0.0126 0.4369 -0.8994 +vn 0.5401 0.5629 -0.6256 +vn 0.3107 0.5578 -0.7696 +vn 0.0094 0.5531 -0.8330 +vn 0.5532 0.5416 -0.6330 +vn 0.3113 0.5515 -0.7739 +vn 0.0091 0.5507 -0.8347 +vn 0.6193 0.4387 -0.6511 +vn 0.3378 0.4506 -0.8263 +vn 0.0115 0.4489 -0.8935 +vn 0.6740 0.2702 -0.6876 +vn 0.3644 0.2742 -0.8900 +vn 0.0149 0.2708 -0.9625 +vn 0.9225 0.0308 -0.3847 +vn 0.9724 -0.2335 0.0020 +vn 0.6965 0.0422 -0.7163 +vn 0.3755 0.0443 -0.9258 +vn 0.0186 0.0414 -0.9990 +vn 0.8968 -0.2214 -0.3830 +vn 0.8889 -0.4581 -0.0076 +vn 0.6769 -0.2106 -0.7053 +vn 0.3661 -0.2040 -0.9079 +vn 0.0233 -0.2049 -0.9785 +vn 0.8202 -0.4463 -0.3578 +vn 0.7971 -0.6037 -0.0136 +vn 0.3405 -0.4217 -0.8404 +vn 0.0294 -0.4209 -0.9066 +vn 0.6216 -0.4330 -0.6528 +vn 0.7363 -0.5936 -0.3248 +vn 0.7452 -0.6666 -0.0189 +vn 0.3146 -0.5692 -0.7597 +vn 0.0353 -0.5689 -0.8216 +vn 0.5625 -0.5808 -0.5885 +vn 0.6870 -0.6589 -0.3065 +vn 0.7398 -0.6722 -0.0276 +vn 0.3013 -0.6413 -0.7057 +vn 0.0389 -0.6436 -0.7644 +vn 0.5280 -0.6496 -0.5470 +vn 0.6898 -0.6547 -0.3091 +vn 0.7550 -0.6535 -0.0548 +vn 0.3115 -0.6378 -0.7044 +vn 0.0461 -0.6443 -0.7634 +vn 0.5357 -0.6449 -0.5451 +vn 0.7047 -0.6245 -0.3368 +vn 0.7366 -0.6731 -0.0665 +vn 0.3410 -0.6005 -0.7233 +vn 0.0530 -0.6114 -0.7895 +vn 0.5660 -0.6070 -0.5578 +vn 0.6907 -0.6371 -0.3421 +vn 0.6570 -0.7490 -0.0853 +vn 0.3385 -0.6123 -0.7145 +vn 0.0549 -0.6283 -0.7760 +vn 0.5561 -0.6145 -0.5596 +vn 0.6231 -0.7050 -0.3387 +vn 0.3262 -0.6805 -0.6561 +vn 0.0587 -0.7071 -0.7047 +vn 0.0636 -0.8245 -0.5623 +vn 0.5162 -0.6736 -0.5289 +vn 0.4881 -0.8049 -0.3375 +vn 0.5017 -0.8589 -0.1031 +vn 0.2948 -0.9497 -0.1053 +vn 0.3055 -0.7899 -0.5318 +vn 0.4790 -0.7212 -0.5004 +vn 0.3045 -0.8788 -0.3674 +vn 0.0443 -0.9938 -0.1022 +vn 0.0597 -0.9320 -0.3576 +vn 0.0304 -0.9824 -0.1842 +vn 0.2948 -0.9436 -0.1507 +vn 0.0461 -0.9964 0.0713 +vn 0.2787 -0.9573 0.0774 +vn 0.4947 -0.8583 -0.1362 +vn 0.2892 -0.9199 0.2650 +vn 0.0469 -0.9626 0.2670 +vn 0.0586 -0.8821 0.4674 +vn 0.4612 -0.8803 0.1115 +vn 0.6118 -0.7776 0.1449 +vn 0.6581 -0.7454 -0.1064 +vn 0.7382 -0.6696 -0.0818 +vn 0.4529 -0.8419 0.2934 +vn 0.3011 -0.8447 0.4426 +vn 0.6820 -0.7058 0.1918 +vn 0.7595 -0.6468 -0.0697 +vn 0.4860 -0.8065 0.3366 +vn 0.3090 -0.7776 0.5476 +vn 0.0350 -0.8059 0.5910 +vn 0.5314 -0.7408 0.4109 +vn 0.7027 -0.6791 0.2120 +vn 0.7507 -0.6594 -0.0410 +vn 0.3081 -0.7404 0.5974 +vn 0.0295 -0.7646 0.6438 +vn 0.5426 -0.7108 0.4476 +vn 0.6912 -0.6802 0.2440 +vn 0.7482 -0.6629 -0.0283 +vn 0.2991 -0.7158 0.6310 +vn 0.0191 -0.7334 0.6795 +vn 0.5342 -0.6979 0.4771 +vn 0.6937 -0.6719 0.2594 +vn 0.7981 -0.6023 -0.0149 +vn 0.2975 -0.6919 0.6578 +vn 0.0150 -0.7028 0.7112 +vn 0.5371 -0.6812 0.4975 +vn 0.7360 -0.6119 0.2897 +vn 0.8889 -0.4581 -0.0078 +vn 0.3056 -0.6338 0.7106 +vn 0.0048 -0.6433 0.7656 +vn 0.5632 -0.6224 0.5435 +vn 0.8173 -0.4699 0.3334 +vn 0.9723 -0.2336 0.0020 +vn 0.3328 -0.4962 0.8019 +vn -0.0057 -0.5050 0.8631 +vn 0.6217 -0.4834 0.6163 +vn 0.8930 -0.2457 0.3770 +vn 0.9998 0.0153 0.0129 +vn 0.3592 -0.2708 0.8931 +vn -0.0137 -0.2786 0.9603 +vn 0.6774 -0.2587 0.6886 +vn 0.9158 0.0057 0.4017 +vn 0.9757 0.2180 0.0226 +vn 0.3655 -0.0109 0.9308 +vn -0.0187 -0.0176 0.9997 +vn 0.6931 -0.0019 0.7208 +vn 0.8843 0.2217 0.4109 +vn 0.9394 0.3425 0.0123 +vn 0.8220 0.3754 0.4283 +vn 0.9676 0.2518 0.0199 +vn 0.8480 0.3223 0.4206 +vn 0.7423 0.3984 0.5388 +vn 0.6646 0.2299 0.7109 +vn 0.3492 0.2291 0.9086 +vn -0.0212 0.2248 0.9742 +vn 0.6831 0.4792 0.5511 +vn 0.5893 0.5683 0.5742 +vn 0.6038 0.4007 0.6891 +vn 0.3181 0.4115 0.8541 +vn -0.0222 0.4105 0.9116 +vn 0.6334 0.5397 0.5546 +vn 0.6608 0.5901 0.4638 +vn 0.7340 0.4951 0.4649 +vn 0.6148 0.7872 -0.0486 +vn 0.7135 0.7000 0.0302 +vn 0.9586 0.2842 0.0192 +vn 0.5336 0.5015 0.6810 +vn 0.2883 0.5121 0.8091 +vn -0.0224 0.5125 0.8584 +vn 0.5200 0.5197 0.6779 +vn 0.2858 0.5111 0.8106 +vn -0.0209 0.5059 0.8624 +vn 0.8706 0.2670 0.4133 +vn 0.9968 0.0788 0.0136 +vn 0.5812 0.4316 0.6899 +vn 0.3156 0.3964 0.8621 +vn -0.0186 0.3878 0.9216 +vn 0.9134 0.0755 0.4001 +vn 0.9998 -0.0160 0.0106 +vn 0.6612 0.2301 0.7141 +vn 0.3508 0.2125 0.9120 +vn -0.0104 0.2111 0.9774 +vn 0.9193 -0.0104 0.3934 +vn 0.9991 -0.0421 0.0082 +vn 0.9218 -0.0352 0.3860 +vn 0.6928 0.0694 0.7178 +vn 0.3679 0.0694 0.9273 +vn -0.0096 0.0769 0.9970 +vn 0.6987 -0.0019 0.7154 +vn 0.7043 -0.0211 0.7096 +vn 0.3726 0.0106 0.9279 +vn -0.0085 0.0255 0.9996 +vn 0.3793 -0.0038 0.9253 +vn -0.0068 0.0133 0.9999 +vn 0.4067 -0.7382 -0.5383 +vn 0.5166 -0.8535 -0.0686 +vn 0.6212 -0.1595 -0.7672 +vn 0.9856 -0.0229 -0.1678 +vn 0.6733 -0.7383 0.0394 +vn 0.4342 0.4365 -0.7880 +vn 0.9991 -0.0173 0.0380 +vn 0.6819 -0.7280 0.0708 +vn 0.5375 0.8398 -0.0763 +vn 0.9960 -0.0136 0.0888 +vn 0.6807 -0.7274 0.0869 +vn 0.2413 0.9436 -0.2268 +vn 0.0024 0.7055 -0.7087 +vn 0.0427 0.9211 -0.3871 +vn 0.3207 0.9472 -0.0067 +vn 0.0440 0.9990 0.0085 +vn 0.7103 0.7032 0.0308 +vn 0.3336 0.9421 0.0356 +vn 0.0389 0.9989 0.0277 +vn 0.9938 -0.0124 0.1105 +vn 0.6759 -0.7300 0.1012 +vn 0.7127 0.6989 0.0604 +vn 0.3363 0.9411 0.0348 +vn 0.0367 0.9991 0.0226 +vn 0.9916 -0.0127 0.1289 +vn 0.6708 -0.7330 0.1132 +vn 0.7098 0.7007 0.0720 +vn 0.3331 0.9422 0.0360 +vn 0.0350 0.9992 0.0176 +vn 0.9893 -0.0134 0.1455 +vn 0.6654 -0.7356 0.1273 +vn 0.7054 0.7040 0.0827 +vn 0.3272 0.9442 0.0379 +vn 0.0337 0.9993 0.0140 +vn 0.9860 -0.0137 0.1662 +vn 0.6601 -0.7380 0.1400 +vn 0.7003 0.7079 0.0926 +vn 0.3203 0.9464 0.0408 +vn 0.0325 0.9994 0.0125 +vn 0.9829 -0.0137 0.1837 +vn 0.6557 -0.7407 0.1463 +vn 0.6949 0.7113 0.1059 +vn 0.3138 0.9484 0.0458 +vn 0.0314 0.9994 0.0122 +vn 0.9810 -0.0153 0.1933 +vn 0.6521 -0.7427 0.1520 +vn 0.6908 0.7135 0.1174 +vn 0.3084 0.9500 0.0493 +vn 0.0303 0.9995 0.0110 +vn 0.9794 -0.0149 0.2016 +vn 0.6485 -0.7449 0.1569 +vn 0.6876 0.7153 0.1244 +vn 0.3039 0.9513 0.0517 +vn 0.0297 0.9995 0.0110 +vn 0.9778 -0.0167 0.2087 +vn 0.6429 -0.7438 0.1827 +vn 0.6845 0.7176 0.1285 +vn 0.2999 0.9525 0.0524 +vn 0.0291 0.9995 0.0101 +vn 0.9782 0.0001 0.2077 +vn 0.6335 -0.7398 0.2267 +vn 0.6820 0.7192 0.1328 +vn 0.2965 0.9536 0.0525 +vn 0.0290 0.9995 0.0086 +vn 0.9792 -0.0025 0.2030 +vn 0.6270 -0.7347 0.2589 +vn 0.6836 0.7227 0.1021 +vn 0.2990 0.9541 0.0165 +vn 0.0313 0.9991 -0.0281 +vn 0.9796 -0.0027 0.2010 +vn 0.6198 -0.7239 0.3030 +vn 0.6895 0.7223 0.0537 +vn 0.3044 0.9515 -0.0455 +vn 0.0402 0.9950 -0.0911 +vn 0.9808 0.0093 0.1947 +vn 0.6095 -0.7031 0.3663 +vn 0.6927 0.7210 0.0179 +vn 0.3078 0.9470 -0.0919 +vn 0.0463 0.9894 -0.1378 +vn 0.9825 0.0141 0.1856 +vn 0.6015 -0.6809 0.4179 +vn 0.6972 0.7160 -0.0340 +vn 0.3137 0.9369 -0.1546 +vn 0.0527 0.9781 -0.2014 +vn 0.9833 0.0094 0.1820 +vn 0.5969 -0.6656 0.4481 +vn 0.7049 0.7013 -0.1063 +vn 0.3218 0.9150 -0.2432 +vn 0.0628 0.9545 -0.2914 +vn 0.9837 0.0075 0.1797 +vn 0.5904 -0.6499 0.4786 +vn 0.7113 0.6835 -0.1639 +vn 0.3266 0.8900 -0.3182 +vn 0.0721 0.9275 -0.3668 +vn 0.9851 0.0119 0.1718 +vn 0.5820 -0.6421 0.4990 +vn 0.7143 0.6704 -0.2008 +vn 0.3268 0.8708 -0.3672 +vn 0.0781 0.9060 -0.4161 +vn 0.9862 0.0019 0.1654 +vn 0.5754 -0.6519 0.4939 +vn 0.7154 0.6546 -0.2443 +vn 0.3253 0.8466 -0.4212 +vn 0.0851 0.8768 -0.4732 +vn 0.9911 -0.0138 0.1322 +vn 0.5599 -0.6992 0.4446 +vn 0.7152 0.6383 -0.2847 +vn 0.3176 0.8210 -0.4745 +vn 0.0911 0.8416 -0.5323 +vn 0.7131 0.6134 -0.3395 +vn 0.2970 0.7800 -0.5508 +vn 0.9960 -0.0808 0.0392 +vn 0.7023 0.5573 -0.4429 +vn 0.6755 0.4151 -0.6095 +vn 0.1983 0.7329 -0.6508 +vn 0.1177 0.5982 -0.7926 +vn 0.1045 0.3355 -0.9362 +vn 0.9588 -0.2302 -0.1664 +vn 0.5357 -0.7989 0.2735 +vn 0.4187 -0.9079 0.0213 +vn 0.7805 -0.4212 -0.4619 +vn 0.2731 -0.9558 -0.1089 +vn 0.4771 -0.5449 -0.6895 +vn -0.0181 -0.9917 -0.1270 +vn 0.0462 -0.5794 -0.8137 +vn 0.9585 -0.2296 -0.1691 +vn 0.7796 -0.4195 -0.4650 +vn 0.5527 0.2204 -0.8037 +vn 0.1130 0.0245 -0.9933 +vn 0.4743 -0.5434 -0.6926 +vn 0.4335 -0.0197 -0.9009 +vn 0.0461 -0.5789 -0.8141 +vn 0.0459 -0.5793 -0.8138 +vn -0.0199 -0.9925 -0.1208 +vn -0.3955 -0.4988 -0.7712 +vn -0.2294 -0.9728 0.0320 +vn -0.4142 -0.9100 0.0180 +vn -0.6903 -0.3630 -0.6258 +vn -0.3883 -0.8843 0.2592 +vn -0.8439 -0.2379 -0.4809 +vn -0.0511 -0.9825 0.1789 +vn -0.0696 -0.9420 0.3284 +vn 0.0161 -0.8788 0.4769 +vn -0.7705 -0.6355 -0.0490 +vn -0.8861 -0.1923 -0.4216 +vn -0.4771 -0.8468 0.2351 +vn -0.2564 -0.8926 0.3708 +vn -0.0792 -0.9097 0.4076 +vn 0.1203 -0.8509 0.5114 +vn 0.1460 -0.8271 0.5427 +vn -0.0886 -0.8608 0.5011 +vn -0.0868 -0.8739 0.4783 +vn 0.1472 -0.8368 0.5274 +vn -0.0862 -0.8696 0.4862 +vn 0.1611 -0.8436 0.5122 +vn 0.1742 -0.8633 0.4736 +vn -0.0822 -0.8885 0.4515 +vn 0.1827 -0.8818 0.4347 +vn -0.0784 -0.9077 0.4123 +vn 0.1929 -0.9111 0.3642 +vn -0.0697 -0.9382 0.3391 +vn 0.2059 -0.9384 0.2775 +vn -0.0594 -0.9673 0.2467 +vn 0.2163 -0.9516 0.2185 +vn -0.0530 -0.9816 0.1834 +vn 0.2246 -0.9584 0.1759 +vn -0.0469 -0.9892 0.1387 +vn 0.2354 -0.9650 0.1158 +vn -0.0377 -0.9965 0.0745 +vn 0.2437 -0.9664 0.0820 +vn -0.0354 -0.9987 0.0378 +vn 0.2474 -0.9655 0.0809 +vn -0.0350 -0.9987 0.0382 +vn 0.2517 -0.9646 0.0788 +vn -0.0353 -0.9987 0.0378 +vn 0.2561 -0.9636 0.0770 +vn -0.0353 -0.9986 0.0382 +vn 0.2611 -0.9627 0.0717 +vn -0.0355 -0.9987 0.0374 +vn 0.2669 -0.9615 0.0661 +vn -0.0359 -0.9987 0.0374 +vn 0.2730 -0.9601 0.0606 +vn -0.0365 -0.9987 0.0365 +vn 0.2779 -0.9591 0.0538 +vn -0.0371 -0.9987 0.0352 +vn 0.2789 -0.9591 0.0477 +vn -0.0384 -0.9986 0.0359 +vn 0.2713 -0.9614 0.0465 +vn -0.0413 -0.9984 0.0396 +vn 0.2177 -0.9757 -0.0234 +vn 0.0323 -0.9994 -0.0093 +vn 0.0370 -0.9841 -0.1735 +vn 0.6701 -0.7335 0.1135 +vn 0.6645 -0.7364 0.1275 +vn 0.6753 -0.7306 0.1015 +vn 0.6800 -0.7280 0.0874 +vn 0.6593 -0.7387 0.1402 +vn 0.6814 -0.7284 0.0715 +vn 0.6555 -0.7409 0.1462 +vn 0.6737 -0.7379 0.0405 +vn 0.6511 -0.7436 0.1520 +vn 0.5180 -0.8528 -0.0669 +vn 0.6479 -0.7454 0.1566 +vn 0.6356 -0.7498 0.1839 +vn 0.6294 -0.7429 0.2278 +vn 0.6239 -0.7370 0.2600 +vn 0.6122 -0.7293 0.3055 +vn 0.6020 -0.7079 0.3695 +vn 0.5986 -0.6819 0.4203 +vn 0.5952 -0.6658 0.4500 +vn 0.5861 -0.6518 0.4813 +vn 0.5821 -0.6409 0.5004 +vn 0.1222 -0.8511 0.5105 +vn 0.5752 -0.6515 0.4947 +vn 0.5594 -0.6991 0.4454 +vn 0.0167 -0.8792 0.4762 +vn 0.5341 -0.7990 0.2762 +vn -0.0701 -0.9419 0.3285 +vn 0.4131 -0.9103 0.0279 +vn -0.0505 -0.9825 0.1793 +vn 0.2685 -0.9583 -0.0974 +vn -0.0199 -0.9931 -0.1152 +vn -0.0268 -0.8777 -0.4785 +vn 0.4101 -0.7380 -0.5359 +vn 0.1641 -0.9786 -0.1241 +vn -0.8450 -0.2352 -0.4802 +vn -0.8861 -0.1919 -0.4219 +vn -0.6834 0.2859 -0.6717 +vn -0.2069 0.5720 -0.7937 +vn -0.1845 0.6545 -0.7332 +vn -0.3355 0.5895 -0.7348 +vn 0.1022 0.7442 -0.6601 +vn 0.3003 0.7785 -0.5512 +vn 0.0928 0.8266 -0.5550 +vn 0.3209 0.8107 -0.4897 +vn 0.1980 0.7330 -0.6507 +vn 0.1179 0.5982 -0.7926 +vn -0.1925 0.3917 -0.8997 +vn -0.6891 -0.3639 -0.6267 +vn 0.1056 0.3361 -0.9359 +vn -0.1133 0.2207 -0.9687 +vn -0.3930 -0.4997 -0.7719 +vn 0.1140 0.0260 -0.9931 +vn 0.0460 -0.5789 -0.8141 +vn 0.0362 -0.0981 -0.9945 +vn 0.0003 -0.7292 -0.6843 +vn 0.3294 -0.1115 -0.9376 +vn 0.2173 -0.7440 -0.6319 +vn 0.6236 -0.1162 -0.7731 +vn -0.0177 -0.8497 -0.5270 +vn 0.4104 -0.7582 -0.5067 +vn 0.8538 -0.1154 -0.5077 +vn 0.1545 -0.8603 -0.4859 +vn -0.0245 -0.8312 -0.5555 +vn 0.5516 -0.7694 -0.3221 +vn 0.9788 -0.1152 -0.1695 +vn 0.1658 -0.8391 -0.5181 +vn -0.0195 -0.7307 -0.6824 +vn 0.3023 -0.8709 -0.3875 +vn 0.6227 -0.7762 -0.0993 +vn 0.9739 -0.1188 0.1935 +vn 0.2332 -0.7231 -0.6502 +vn -0.0077 -0.5957 -0.8032 +vn 0.3434 -0.4914 -0.8004 +vn 0.3291 -0.8465 -0.4185 +vn 0.4075 -0.8796 -0.2456 +vn 0.6140 -0.7777 0.1346 +vn 0.8398 -0.1258 0.5281 +vn 0.4474 -0.7171 -0.5344 +vn 0.4463 -0.8539 -0.2678 +vn 0.4591 -0.8850 -0.0771 +vn 0.5270 -0.7740 0.3511 +vn 0.6029 -0.1321 0.7868 +vn 0.5995 -0.4498 -0.6620 +vn 0.5115 -0.0374 -0.8585 +vn 0.4526 -0.8863 0.0986 +vn 0.5047 -0.8591 -0.0851 +vn 0.6018 -0.7204 -0.3447 +vn 0.3736 -0.7650 0.5246 +vn 0.3051 -0.1309 0.9433 +vn 0.7004 -0.0403 -0.7127 +vn 0.3960 0.4225 -0.8153 +vn 0.6488 0.3666 -0.6668 +vn 0.3889 -0.8831 0.2625 +vn 0.4986 -0.8602 0.1071 +vn 0.6802 -0.7251 -0.1078 +vn 0.7871 -0.4439 -0.4282 +vn 0.8857 -0.0461 -0.4620 +vn 0.8352 0.3467 -0.4268 +vn 0.8848 -0.4472 -0.1312 +vn 0.9889 -0.0516 -0.1394 +vn 0.9323 0.3388 -0.1265 +vn 0.6723 -0.7262 0.1438 +vn 0.4288 -0.8567 0.2866 +vn 0.8744 -0.4480 0.1864 +vn 0.9771 -0.0527 0.2063 +vn 0.9215 0.3372 0.1926 +vn 0.5794 -0.7219 0.3784 +vn 0.2796 -0.8759 0.3932 +vn 0.7559 -0.4430 0.4821 +vn 0.8477 -0.0475 0.5284 +vn 0.8024 0.3418 0.4893 +vn 0.3041 -0.8490 0.4321 +vn 0.1848 -0.7523 0.6324 +vn -0.0430 -0.1147 0.9925 +vn -0.0552 -0.7370 0.6736 +vn 0.1221 -0.8652 0.4864 +vn -0.0560 -0.8527 0.5193 +vn 0.1353 -0.8378 0.5290 +vn -0.0603 -0.8246 0.5626 +vn 0.4124 -0.7125 0.5677 +vn 0.1905 -0.6990 0.6893 +vn -0.0609 -0.6831 0.7278 +vn 0.5437 -0.4320 0.7196 +vn 0.2629 -0.4164 0.8703 +vn -0.0525 -0.3981 0.9158 +vn 0.6160 -0.0362 0.7869 +vn 0.5891 0.3519 0.7274 +vn 0.3101 -0.0200 0.9505 +vn -0.0332 -0.0008 0.9994 +vn 0.3074 0.3667 0.8781 +vn -0.0089 0.3841 0.9232 +vn -0.0121 -0.5946 -0.8039 +vn -0.0175 -0.6252 -0.7803 +vn 0.3437 -0.4903 -0.8009 +vn 0.3206 -0.5110 -0.7976 +vn -0.0420 -0.8720 -0.4877 +vn 0.4043 -0.7428 -0.5336 +vn 0.0590 0.5586 -0.8274 +vn 0.0633 0.7071 -0.7043 +vn 0.3959 0.4223 -0.8155 +vn 0.3192 0.6727 -0.6675 +vn 0.6488 0.3665 -0.6668 +vn 0.0653 0.8455 -0.5299 +vn 0.5359 0.6464 -0.5431 +vn 0.2533 0.8310 -0.4952 +vn 0.0645 0.9318 -0.3572 +vn 0.6929 0.6334 -0.3446 +vn 0.4135 0.8180 -0.3998 +vn 0.1979 0.9216 -0.3340 +vn 0.0622 0.9741 -0.2172 +vn 0.7724 0.6272 -0.1001 +vn 0.9215 0.3373 0.1926 +vn 0.5272 0.8115 -0.2519 +vn 0.3075 0.9112 -0.2743 +vn 0.1629 0.9644 -0.2084 +vn 0.0597 0.9919 -0.1121 +vn 0.7637 0.6259 0.1580 +vn 0.8024 0.3417 0.4893 +vn 0.5840 0.8086 -0.0715 +vn 0.3766 0.9102 -0.1726 +vn 0.1570 0.9813 -0.1114 +vn 0.0573 0.9979 -0.0300 +vn 0.2399 0.9509 -0.1956 +vn 0.6679 0.6290 0.3978 +vn 0.5776 0.8077 0.1184 +vn 0.4104 0.9107 -0.0475 +vn 0.2577 0.9584 -0.1228 +vn 0.4963 0.6366 0.5902 +vn 0.5090 0.8087 0.2950 +vn 0.2696 0.9624 -0.0326 +vn 0.4059 0.9101 0.0838 +vn 0.2689 0.6487 0.7120 +vn 0.0126 0.6630 0.7485 +vn 0.3856 0.8130 0.4363 +vn 0.2190 0.8228 0.5245 +vn 0.0289 0.8341 0.5508 +vn 0.3637 0.9083 0.2064 +vn 0.1747 0.9177 0.3567 +vn 0.0401 0.9274 0.3718 +vn 0.2881 0.9082 0.3036 +vn 0.2664 0.9620 0.0601 +vn 0.1481 0.9622 0.2286 +vn 0.0472 0.9719 0.2308 +vn 0.2258 0.9489 0.2207 +vn 0.2484 0.9571 0.1493 +vn 0.1487 0.9801 0.1314 +vn 0.0518 0.9907 0.1258 +vn 0.0549 0.9975 0.0437 +vn 0.1542 0.9868 0.0488 +vn 0.1569 0.9872 -0.0285 +vn 0.3435 -0.4908 -0.8007 +vn 0.3199 -0.5125 -0.7969 +vn 0.5124 -0.0375 -0.8580 +vn 0.5056 -0.0410 -0.8618 +vn 0.4039 -0.7459 -0.5296 +vn 0.3965 0.4232 -0.8146 +vn 0.6362 -0.1562 -0.7556 +vn 0.3892 0.3991 -0.8302 +vn 0.0584 0.5596 -0.8267 +vn 0.4397 0.4508 -0.7768 +vn 0.0600 0.5947 -0.8017 +vn 0.0211 0.7085 -0.7054 +vn 0.2121 -0.9680 -0.1341 +vn -0.0264 -0.9896 -0.1413 +vn -0.0244 -0.9272 -0.3739 +vn 0.1973 -0.9099 -0.3650 +vn -0.0167 -0.8391 -0.5438 +vn 0.4149 -0.9005 -0.1299 +vn 0.2011 -0.8302 -0.5200 +vn -0.0101 -0.7286 -0.6849 +vn 0.3974 -0.8581 -0.3252 +vn 0.5755 -0.8053 -0.1428 +vn 0.2232 -0.7349 -0.6403 +vn -0.0193 -0.5909 -0.8065 +vn 0.5271 -0.7782 -0.3413 +vn 0.7236 -0.6708 -0.1625 +vn 0.3713 -0.7974 -0.4757 +vn 0.2519 -0.6016 -0.7580 +vn -0.0195 -0.4171 -0.9086 +vn 0.4174 -0.7365 -0.5323 +vn 0.6375 -0.6521 -0.4103 +vn 0.2845 -0.4300 -0.8568 +vn -0.0190 -0.1942 -0.9808 +vn 0.4767 -0.6182 -0.6250 +vn 0.7402 -0.4632 -0.4874 +vn 0.8596 -0.4746 -0.1894 +vn 0.5359 -0.4446 -0.7177 +vn 0.3116 -0.2069 -0.9274 +vn 0.0315 0.0971 -0.9948 +vn 0.8184 -0.1906 -0.5421 +vn 0.9570 -0.1994 -0.2106 +vn 0.5875 -0.1783 -0.7893 +vn 0.3222 0.0913 -0.9423 +vn -0.0037 0.3940 -0.9191 +vn 0.8254 0.0814 -0.5586 +vn 0.9735 0.0778 -0.2152 +vn 0.8935 0.4046 -0.1947 +vn 0.5966 0.0861 -0.7979 +vn 0.3042 0.3923 -0.8681 +vn 0.0090 0.6495 -0.7603 +vn 0.7603 0.3989 -0.5127 +vn 0.5564 0.3951 -0.7309 +vn 0.2693 0.6241 -0.7335 +vn 0.0181 0.8027 -0.5961 +vn 0.6468 0.6378 -0.4182 +vn 0.7425 0.6505 -0.1598 +vn 0.4835 0.6291 -0.6087 +vn 0.2141 0.8020 -0.5576 +vn 0.0246 0.8928 -0.4498 +vn 0.3759 0.8050 -0.4590 +vn 0.4975 0.8106 -0.3089 +vn 0.5602 0.8211 -0.1093 +vn 0.1674 0.8919 -0.4201 +vn 0.0275 0.9300 -0.3666 +vn 0.1373 0.9293 -0.3428 +vn 0.2896 0.8938 -0.3424 +vn 0.2328 0.9321 -0.2775 +vn 0.3028 0.9360 -0.1796 +vn 0.3782 0.8978 -0.2255 +vn 0.4200 0.9044 -0.0753 +vn 0.3397 0.9387 -0.0584 +vn 0.9815 -0.0755 -0.1762 +vn 0.6000 0.7934 -0.1024 +vn 0.8574 -0.0805 -0.5083 +vn 0.5331 0.7848 -0.3162 +vn 0.6264 -0.0908 -0.7742 +vn 0.3029 0.9518 -0.0485 +vn 0.4051 0.7687 -0.4950 +vn 0.3310 -0.0993 -0.9384 +vn 0.2717 0.9492 -0.1586 +vn 0.3346 0.9408 -0.0549 +vn 0.2985 0.9379 -0.1767 +vn 0.2115 0.9449 -0.2498 +vn 0.2303 0.9336 -0.2746 +vn 0.2300 0.7532 -0.6163 +vn -0.0007 -0.0991 -0.9951 +vn 0.0279 0.7491 -0.6619 +vn 0.1286 0.9410 -0.3131 +vn 0.1373 0.9300 -0.3411 +vn 0.0318 0.9402 -0.3391 +vn 0.0299 0.9293 -0.3682 +vn -0.0186 -0.7358 0.6769 +vn 0.2444 -0.6138 0.7507 +vn -0.0121 -0.6039 0.7970 +vn 0.2156 -0.7417 0.6351 +vn -0.0236 -0.8502 0.5259 +vn 0.1950 -0.8362 0.5126 +vn -0.0259 -0.9314 0.3632 +vn 0.1928 -0.9139 0.3573 +vn -0.0273 -0.9904 0.1353 +vn -0.0264 -0.9896 -0.1414 +vn 0.2120 -0.9680 -0.1341 +vn 0.2105 -0.9690 0.1292 +vn -0.0259 -0.9313 0.3632 +vn 0.4136 -0.9017 0.1262 +vn 0.5714 -0.8089 0.1382 +vn 0.3812 -0.8561 0.3490 +vn 0.7235 -0.6703 0.1651 +vn 0.3670 -0.8040 0.4678 +vn 0.1931 -0.8365 0.5127 +vn 0.4115 -0.7435 0.5271 +vn 0.5095 -0.7697 0.3847 +vn 0.6199 -0.6491 0.4408 +vn 0.7256 -0.4653 0.5069 +vn 0.8582 -0.4736 0.1982 +vn 0.8048 -0.1945 0.5608 +vn 0.9539 -0.2010 0.2230 +vn 0.8174 0.0771 0.5709 +vn 0.9707 0.0761 0.2280 +vn 0.8910 0.4033 0.2084 +vn 0.7545 0.3946 0.5244 +vn 0.7404 0.6487 0.1758 +vn 0.6401 0.6323 0.4364 +vn 0.5586 0.8200 0.1249 +vn 0.4200 0.9044 -0.0756 +vn 0.4714 0.6203 0.6269 +vn 0.4906 0.8067 0.3294 +vn 0.4187 0.9037 0.0894 +vn 0.3388 0.9390 -0.0593 +vn 0.3380 0.9385 0.0702 +vn 0.3733 0.8958 0.2412 +vn 0.2994 0.9347 0.1918 +vn 0.2806 0.8899 0.3596 +vn 0.2272 0.9295 0.2905 +vn 0.1316 0.9261 0.3535 +vn 0.3625 0.7978 0.4817 +vn 0.1581 0.8873 0.4333 +vn 0.0236 0.9269 0.3746 +vn 0.0231 0.8881 0.4591 +vn 0.1996 0.7947 0.5733 +vn 0.0250 0.7958 0.6050 +vn 0.2487 0.6424 0.7249 +vn 0.0262 0.6442 0.7644 +vn -0.0152 0.3920 0.9198 +vn 0.0192 0.0906 0.9957 +vn 0.3114 0.0845 0.9465 +vn 0.3009 -0.2154 0.9290 +vn 0.0085 -0.2041 0.9789 +vn 0.2745 -0.4384 0.8558 +vn -0.0314 -0.4259 0.9042 +vn -0.0122 -0.6040 0.7969 +vn -0.0054 -0.1118 0.9937 +vn 0.0193 0.7402 0.6721 +vn 0.3065 -0.1119 0.9453 +vn 0.2207 0.7447 0.6298 +vn 0.6058 -0.1016 0.7891 +vn 0.0235 0.9381 0.3456 +vn 0.3982 0.7618 0.5110 +vn 0.8436 -0.0873 0.5299 +vn 0.1213 0.9386 0.3229 +vn 0.0228 0.9271 0.3740 +vn 0.1314 0.9276 0.3496 +vn 0.2072 0.9428 0.2612 +vn 0.2246 0.9316 0.2857 +vn 0.5290 0.7806 0.3330 +vn 0.9779 -0.0769 0.1945 +vn 0.2687 0.9479 0.1710 +vn 0.2947 0.9368 0.1883 +vn 0.5983 0.7922 0.1203 +vn 0.9813 -0.0754 -0.1770 +vn 0.5992 0.7936 -0.1055 +vn 0.3015 0.9514 0.0629 +vn 0.3328 0.9405 0.0688 +vn 0.3020 0.9519 -0.0525 +vn 0.3336 0.9408 -0.0592 +vn -0.9661 0.2497 -0.0659 +vn -0.8986 0.3204 0.2997 +vn -0.9835 -0.1403 -0.1143 +vn -0.7416 -0.0088 0.6708 +vn -0.5104 0.2402 0.8257 +vn -0.7413 0.4013 0.5381 +vn -0.4765 0.4864 0.7323 +vn -0.6788 0.5066 0.5316 +vn -0.6103 0.6373 0.4705 +vn -0.7185 0.5582 0.4149 +vn -0.7444 0.6591 0.1072 +vn -0.7760 0.6165 0.1331 +vn -0.7284 0.6851 0.0033 +vn -0.7857 0.6128 0.0843 +vn -0.6189 0.5815 -0.5280 +vn -0.7002 0.5175 -0.4918 +vn -0.3855 0.5496 -0.7412 +vn -0.6275 0.5696 -0.5309 +vn -0.5122 0.2759 -0.8133 +vn -0.7308 0.4569 -0.5071 +vn -0.7202 0.0120 -0.6936 +vn -0.8820 0.3380 -0.3284 +vn -0.9897 -0.1417 -0.0214 +vn -0.9680 0.2459 0.0491 +vn -0.7765 0.6247 0.0821 +vn -0.8340 0.5516 0.0138 +vn -0.6986 0.5250 -0.4861 +vn -0.7662 0.4779 -0.4296 +vn -0.6484 0.5619 -0.5137 +vn -0.6084 0.5790 -0.5428 +vn -0.6959 0.4995 -0.5160 +vn -0.7081 0.4518 -0.5427 +vn -0.8308 0.3809 -0.4058 +vn -0.8744 0.3338 -0.3521 +vn -0.9725 0.2303 0.0359 +vn -0.9476 0.3193 0.0135 +vn -0.5312 0.5465 -0.6474 +vn -0.8704 0.2963 -0.3933 +vn -0.9536 0.3009 -0.0058 +vn -0.9921 0.1252 -0.0097 +vn -0.9107 0.1230 -0.3942 +vn -0.9981 0.0615 -0.0086 +vn -0.6627 0.2667 -0.6998 +vn -0.9187 0.0566 -0.3909 +vn -0.9988 0.0483 -0.0077 +vn -0.9190 0.0417 -0.3921 +vn -0.6914 0.1111 -0.7139 +vn -0.6996 0.0442 -0.7132 +vn -0.6990 0.0272 -0.7146 +vn -0.5897 0.4618 -0.6626 +vn -0.3527 0.2497 -0.9018 +vn -0.3684 0.0985 -0.9244 +vn -0.3741 0.0289 -0.9269 +vn -0.3728 0.0090 -0.9279 +vn 0.0092 0.0134 -0.9999 +vn 0.0070 -0.0080 -0.9999 +vn 0.0128 0.0885 -0.9960 +vn 0.0153 0.2424 -0.9701 +vn -0.3212 0.4391 -0.8390 +vn 0.0123 0.4383 -0.8987 +vn 0.0109 0.5522 -0.8336 +vn -0.2936 0.5479 -0.7834 +vn -0.2946 0.5435 -0.7860 +vn 0.0094 0.5506 -0.8347 +vn -0.5418 0.5272 -0.6546 +vn -0.3169 0.4428 -0.8387 +vn -0.6044 0.4242 -0.6743 +vn -0.3367 0.2666 -0.9031 +vn 0.0149 0.2709 -0.9625 +vn -0.8844 0.2283 -0.4071 +vn -0.9801 0.1978 -0.0144 +vn -0.9997 -0.0063 -0.0249 +vn -0.6533 0.2559 -0.7125 +vn -0.9078 0.0110 -0.4192 +vn -0.9646 -0.2615 -0.0349 +vn -0.3415 0.0369 -0.9392 +vn 0.0185 0.0422 -0.9989 +vn -0.6701 0.0275 -0.7418 +vn -0.8747 -0.2467 -0.4171 +vn -0.8690 -0.4931 -0.0413 +vn -0.3238 -0.2131 -0.9218 +vn 0.0233 -0.2036 -0.9788 +vn -0.6427 -0.2291 -0.7311 +vn -0.7870 -0.4785 -0.3896 +vn -0.7641 -0.6436 -0.0445 +vn -0.2873 -0.4333 -0.8542 +vn 0.0293 -0.4194 -0.9073 +vn -0.5767 -0.4563 -0.6776 +vn -0.6913 -0.6298 -0.3542 +vn -0.7022 -0.7103 -0.0492 +vn -0.2484 -0.5833 -0.7733 +vn 0.0369 -0.5675 -0.8225 +vn -0.5058 -0.6077 -0.6123 +vn -0.6325 -0.6981 -0.3356 +vn -0.6789 -0.7321 -0.0550 +vn -0.2221 -0.6568 -0.7206 +vn 0.0479 -0.6412 -0.7659 +vn -0.4600 -0.6789 -0.5723 +vn -0.6183 -0.7099 -0.3373 +vn -0.6655 -0.7419 -0.0819 +vn -0.2178 -0.6596 -0.7194 +vn 0.0543 -0.6397 -0.7667 +vn -0.4525 -0.6858 -0.5700 +vn -0.6086 -0.7065 -0.3611 +vn -0.6282 -0.7724 -0.0933 +vn -0.2145 -0.6352 -0.7420 +vn 0.0628 -0.6099 -0.7900 +vn -0.4480 -0.6690 -0.5931 +vn -0.5775 -0.7300 -0.3655 +vn -0.5262 -0.8435 -0.1080 +vn -0.2078 -0.6525 -0.7288 +vn 0.0677 -0.6274 -0.7758 +vn -0.4310 -0.6860 -0.5863 +vn -0.4915 -0.7950 -0.3555 +vn -0.1850 -0.7224 -0.6662 +vn 0.0793 -0.7051 -0.7047 +vn 0.0912 -0.8240 -0.5591 +vn -0.3809 -0.7471 -0.5447 +vn -0.3329 -0.8735 -0.3552 +vn -0.3460 -0.9309 -0.1169 +vn -0.1332 -0.9845 -0.1139 +vn -0.1559 -0.8303 -0.5351 +vn -0.3474 -0.7904 -0.5046 +vn -0.1439 -0.9231 -0.3565 +vn 0.1115 -0.9886 -0.1009 +vn 0.0982 -0.9306 -0.3527 +vn -0.1346 -0.9778 -0.1603 +vn 0.1281 -0.9748 -0.1827 +vn 0.1019 -0.9922 0.0713 +vn -0.1225 -0.9906 0.0607 +vn -0.3378 -0.9280 -0.1573 +vn -0.1537 -0.9567 0.2473 +vn 0.0929 -0.9596 0.2656 +vn 0.0543 -0.8821 0.4679 +vn -0.3156 -0.9442 0.0943 +vn -0.4901 -0.8641 0.1145 +vn -0.5270 -0.8393 -0.1336 +vn -0.6297 -0.7689 -0.1108 +vn -0.3320 -0.9037 0.2703 +vn -0.1914 -0.8826 0.4293 +vn -0.5796 -0.7970 0.1700 +vn -0.6701 -0.7359 -0.0974 +vn -0.3652 -0.8744 0.3195 +vn -0.2214 -0.8151 0.5353 +vn 0.0529 -0.8039 0.5924 +vn -0.4391 -0.8099 0.3888 +vn -0.6185 -0.7615 0.1940 +vn -0.6888 -0.7219 -0.0667 +vn -0.2382 -0.7734 0.5874 +vn 0.0420 -0.7617 0.6466 +vn -0.4697 -0.7727 0.4270 +vn -0.6365 -0.7384 0.2226 +vn -0.7045 -0.7076 -0.0549 +vn -0.2526 -0.7402 0.6232 +vn 0.0239 -0.7283 0.6849 +vn -0.4848 -0.7432 0.4611 +vn -0.6591 -0.7139 0.2364 +vn -0.7654 -0.6421 -0.0438 +vn -0.2698 -0.7091 0.6514 +vn 0.0086 -0.7008 0.7133 +vn -0.5068 -0.7136 0.4837 +vn -0.7141 -0.6483 0.2643 +vn -0.8688 -0.4935 -0.0408 +vn -0.2971 -0.6484 0.7009 +vn 0.0004 -0.6426 0.7662 +vn -0.5493 -0.6499 0.5252 +vn -0.8094 -0.5025 0.3039 +vn -0.9645 -0.2618 -0.0347 +vn -0.3412 -0.5083 0.7907 +vn -0.0058 -0.5043 0.8635 +vn -0.6233 -0.5075 0.5950 +vn -0.8986 -0.2718 0.3443 +vn -0.9997 -0.0064 -0.0252 +vn -0.3833 -0.2808 0.8799 +vn -0.0137 -0.2782 0.9604 +vn -0.6939 -0.2783 0.6642 +vn -0.9304 -0.0144 0.3663 +vn -0.9801 0.1978 -0.0145 +vn -0.4001 -0.0195 0.9163 +vn -0.0188 -0.0173 0.9997 +vn -0.7197 -0.0178 0.6941 +vn -0.9038 0.2028 0.3768 +vn -0.9469 0.3207 -0.0235 +vn -0.8464 0.3555 0.3965 +vn -0.9705 0.2345 -0.0567 +vn -0.8924 0.3232 0.3149 +vn -0.7234 0.4096 0.5558 +vn -0.6959 0.2154 0.6850 +vn -0.3887 0.2210 0.8945 +vn -0.0212 0.2250 0.9741 +vn -0.7143 0.4630 0.5247 +vn -0.6658 0.5207 0.5344 +vn -0.6389 0.3860 0.6655 +vn -0.3599 0.4035 0.8412 +vn -0.0222 0.4106 0.9115 +vn -0.7021 0.4978 0.5092 +vn -0.7334 0.5566 0.3903 +vn -0.5716 0.4873 0.6601 +vn -0.3307 0.5043 0.7977 +vn -0.5600 0.5027 0.6585 +vn -0.3264 0.5009 0.8016 +vn -0.0228 0.5050 0.8628 +vn -0.0194 0.3829 0.9236 +vn -0.7877 0.4748 0.3926 +vn -0.7953 0.5966 0.1075 +vn -0.8169 0.5759 0.0323 +vn -0.9537 0.3006 -0.0075 +vn -0.3521 0.3891 0.8512 +vn -0.6142 0.4203 0.6679 +vn -0.8786 0.2796 0.3871 +vn -0.3783 0.2186 0.8995 +vn -0.0102 0.2093 0.9778 +vn -0.0097 0.0753 0.9971 +vn -0.6788 0.2401 0.6940 +vn -0.9188 0.1173 0.3770 +vn -0.9920 0.1257 -0.0114 +vn -0.3893 0.0863 0.9171 +vn -0.0086 0.0244 0.9997 +vn -0.7061 0.1010 0.7009 +vn -0.3904 0.0392 0.9198 +vn -0.0079 0.0156 0.9998 +vn -0.3869 0.0310 0.9216 +vn -0.7112 0.0515 0.7011 +vn -0.7104 0.0439 0.7024 +vn -0.9249 0.0606 0.3753 +vn -0.9980 0.0626 -0.0100 +vn -0.9255 0.0507 0.3753 +vn -0.9987 0.0508 -0.0070 +vn -0.5215 -0.8492 -0.0826 +vn -0.4301 -0.7571 -0.4917 +vn -0.6425 0.0002 -0.7663 +vn -0.9776 0.0589 -0.2022 +vn -0.7322 -0.6808 0.0196 +vn -0.4502 0.4993 -0.7403 +vn -0.9981 0.0618 0.0053 +vn -0.7375 -0.6743 0.0364 +vn -0.4021 0.9097 -0.1034 +vn -0.9974 0.0609 0.0375 +vn -0.7357 -0.6768 0.0271 +vn -0.0295 0.9702 -0.2404 +vn 0.0017 0.7033 -0.7109 +vn 0.0416 0.9206 -0.3882 +vn -0.2344 0.9720 -0.0179 +vn 0.0444 0.9990 0.0069 +vn -0.6530 0.7573 0.0053 +vn -0.2588 0.9656 0.0246 +vn 0.0391 0.9989 0.0270 +vn -0.9980 0.0586 0.0241 +vn -0.7314 -0.6818 0.0172 +vn -0.6623 0.7488 0.0246 +vn -0.2672 0.9635 0.0190 +vn 0.0368 0.9991 0.0221 +vn -0.9984 0.0561 0.0084 +vn -0.7267 -0.6869 0.0077 +vn -0.6636 0.7480 0.0136 +vn -0.2678 0.9634 0.0104 +vn 0.0352 0.9992 0.0172 +vn -0.9986 0.0530 -0.0065 +vn -0.7221 -0.6917 -0.0051 +vn -0.6613 0.7501 0.0011 +vn -0.2641 0.9645 0.0025 +vn 0.0338 0.9993 0.0136 +vn -0.9984 0.0505 -0.0252 +vn -0.7182 -0.6956 -0.0154 +vn -0.6595 0.7517 -0.0097 +vn -0.2600 0.9656 -0.0029 +vn 0.0325 0.9994 0.0121 +vn -0.9980 0.0485 -0.0410 +vn -0.7148 -0.6990 -0.0210 +vn -0.6576 0.7530 -0.0219 +vn -0.2562 0.9666 -0.0081 +vn 0.0313 0.9994 0.0119 +vn -0.9978 0.0457 -0.0489 +vn -0.7119 -0.7019 -0.0250 +vn -0.6550 0.7549 -0.0330 +vn -0.2523 0.9676 -0.0136 +vn 0.0304 0.9995 0.0108 +vn -0.9974 0.0454 -0.0556 +vn -0.7091 -0.7045 -0.0287 +vn -0.6530 0.7564 -0.0380 +vn -0.2489 0.9684 -0.0156 +vn 0.0296 0.9995 0.0109 +vn -0.9972 0.0429 -0.0613 +vn -0.7108 -0.7034 -0.0014 +vn -0.6512 0.7577 -0.0422 +vn -0.2458 0.9692 -0.0179 +vn 0.0296 0.9995 0.0100 +vn -0.9964 0.0595 -0.0604 +vn -0.7132 -0.6992 0.0499 +vn -0.6491 0.7594 -0.0451 +vn -0.2424 0.9700 -0.0204 +vn 0.0295 0.9995 0.0087 +vn -0.9968 0.0571 -0.0562 +vn -0.7147 -0.6943 0.0849 +vn -0.6415 0.7637 -0.0723 +vn -0.2352 0.9703 -0.0558 +vn 0.0365 0.9990 -0.0277 +vn -0.9969 0.0570 -0.0546 +vn -0.7185 -0.6824 0.1345 +vn -0.6355 0.7628 -0.1194 +vn -0.2251 0.9676 -0.1148 +vn 0.0440 0.9949 -0.0906 +vn -0.9961 0.0707 -0.0534 +vn -0.7226 -0.6593 0.2080 +vn -0.6302 0.7615 -0.1517 +vn -0.2169 0.9632 -0.1587 +vn 0.0498 0.9893 -0.1373 +vn -0.9957 0.0788 -0.0497 +vn -0.7250 -0.6350 0.2666 +vn -0.6211 0.7585 -0.1971 +vn -0.2072 0.9534 -0.2195 +vn 0.0590 0.9779 -0.2007 +vn -0.9960 0.0770 -0.0443 +vn -0.7261 -0.6180 0.3013 +vn -0.6126 0.7448 -0.2646 +vn -0.1952 0.9322 -0.3048 +vn 0.0695 0.9543 -0.2906 +vn -0.9961 0.0779 -0.0404 +vn -0.7254 -0.6000 0.3374 +vn -0.6073 0.7279 -0.3183 +vn -0.1845 0.9080 -0.3762 +vn 0.0761 0.9273 -0.3664 +vn -0.9954 0.0862 -0.0411 +vn -0.7207 -0.5905 0.3632 +vn -0.6031 0.7167 -0.3501 +vn -0.1754 0.8893 -0.4224 +vn 0.0796 0.9059 -0.4159 +vn -0.9959 0.0798 -0.0417 +vn -0.7141 -0.5993 0.3618 +vn -0.5957 0.7031 -0.3884 +vn -0.1640 0.8652 -0.4739 +vn 0.0831 0.8769 -0.4735 +vn -0.9952 0.0668 -0.0719 +vn -0.6930 -0.6471 0.3179 +vn -0.5887 0.6881 -0.4242 +vn -0.1478 0.8391 -0.5235 +vn 0.0869 0.8418 -0.5327 +vn -0.5778 0.6659 -0.4720 +vn -0.1156 0.7965 -0.5935 +vn -0.9868 0.0008 -0.1618 +vn -0.5518 0.6092 -0.5695 +vn -0.5019 0.4633 -0.7304 +vn -0.0030 0.7409 -0.6716 +vn 0.0933 0.5989 -0.7954 +vn 0.1136 0.3344 -0.9356 +vn -0.9213 -0.1529 -0.3576 +vn -0.6429 -0.7503 0.1541 +vn -0.5127 -0.8472 -0.1391 +vn -0.7025 -0.3607 -0.6136 +vn -0.3448 -0.8452 -0.4083 +vn -0.3706 -0.5100 -0.7763 +vn 0.0167 -0.9330 -0.3595 +vn 0.0716 -0.5804 -0.8112 +vn -0.9203 -0.1525 -0.3602 +vn -0.7008 -0.3592 -0.6163 +vn -0.3292 0.2591 -0.9080 +vn 0.1068 -0.0519 -0.9929 +vn -0.3673 -0.5092 -0.7784 +vn -0.2108 0.0054 -0.9775 +vn 0.0718 -0.5802 -0.8113 +vn 0.0718 -0.5801 -0.8114 +vn 0.5002 -0.5353 -0.6806 +vn 0.0164 -0.9348 -0.3548 +vn 0.2921 -0.9387 -0.1833 +vn -0.0261 -0.9986 0.0459 +vn 0.3121 -0.9446 0.1015 +vn 0.7698 -0.4228 -0.4782 +vn 0.3444 -0.8949 0.2837 +vn -0.0749 -0.9418 0.3277 +vn 0.9013 -0.3089 -0.3036 +vn -0.1844 -0.8708 0.4558 +vn 0.7147 -0.6924 0.0985 +vn 0.9345 -0.2665 -0.2358 +vn 0.3517 -0.8803 0.3184 +vn 0.0896 -0.9062 0.4132 +vn -0.2901 -0.8336 0.4701 +vn -0.0769 -0.9094 0.4087 +vn -0.3188 -0.8089 0.4941 +vn -0.0839 -0.8610 0.5017 +vn 0.0151 -0.9371 -0.3486 +vn -0.0263 -0.9986 0.0466 +vn -0.3443 -0.8508 -0.3969 +vn -0.5092 -0.8505 -0.1316 +vn -0.0745 -0.9417 0.3280 +vn -0.6420 -0.7505 0.1567 +vn -0.1839 -0.8708 0.4560 +vn -0.6928 -0.6470 0.3186 +vn -0.2915 -0.8342 0.4682 +vn -0.7141 -0.5987 0.3627 +vn -0.7211 -0.5892 0.3644 +vn -0.3179 -0.8181 0.4792 +vn -0.7218 -0.6023 0.3409 +vn -0.3284 -0.8250 0.4600 +vn -0.0828 -0.8738 0.4793 +vn -0.7252 -0.6182 0.3032 +vn -0.0848 -0.8698 0.4860 +vn -0.3348 -0.8442 0.4186 +vn -0.7232 -0.6360 0.2694 +vn -0.0826 -0.8884 0.4516 +vn -0.3360 -0.8630 0.3773 +vn -0.7164 -0.6645 0.2127 +vn -0.0794 -0.9076 0.4123 +vn -0.3317 -0.8930 0.3041 +vn -0.7119 -0.6884 0.1387 +vn -0.0733 -0.9381 0.3386 +vn -0.3250 -0.9215 0.2125 +vn -0.7124 -0.6964 0.0866 +vn -0.0631 -0.9671 0.2463 +vn -0.3213 -0.9350 0.1500 +vn -0.7096 -0.7026 0.0519 +vn -0.0542 -0.9816 0.1833 +vn -0.3187 -0.9419 0.1059 +vn -0.7041 -0.7101 0.0014 +vn -0.0492 -0.9892 0.1384 +vn -0.3137 -0.9486 0.0423 +vn -0.7091 -0.7045 -0.0289 +vn -0.0422 -0.9964 0.0741 +vn -0.3135 -0.9496 0.0064 +vn -0.7117 -0.7020 -0.0250 +vn -0.0350 -0.9987 0.0378 +vn -0.3173 -0.9483 0.0082 +vn -0.7151 -0.6987 -0.0212 +vn -0.0353 -0.9987 0.0381 +vn -0.3213 -0.9469 0.0095 +vn -0.7185 -0.6954 -0.0156 +vn -0.0352 -0.9987 0.0379 +vn -0.3261 -0.9453 0.0120 +vn -0.7229 -0.6909 -0.0055 +vn -0.0355 -0.9986 0.0382 +vn -0.3317 -0.9432 0.0155 +vn -0.7272 -0.6863 0.0074 +vn -0.0356 -0.9987 0.0374 +vn -0.3377 -0.9410 0.0207 +vn -0.7316 -0.6815 0.0170 +vn -0.0361 -0.9986 0.0375 +vn -0.3443 -0.9386 0.0240 +vn -0.7361 -0.6764 0.0267 +vn -0.0367 -0.9987 0.0367 +vn -0.3505 -0.9362 0.0274 +vn -0.7376 -0.6742 0.0358 +vn -0.0374 -0.9987 0.0355 +vn -0.3533 -0.9350 0.0324 +vn -0.7316 -0.6815 0.0185 +vn -0.0388 -0.9986 0.0364 +vn -0.3123 -0.9493 0.0358 +vn -0.5191 -0.8506 -0.0842 +vn -0.0416 -0.9983 0.0411 +vn -0.2103 -0.9775 -0.0143 +vn 0.0302 -0.9995 -0.0083 +vn -0.4256 -0.7579 -0.4943 +vn -0.1651 -0.9845 -0.0589 +vn 0.0357 -0.9845 -0.1717 +vn -0.0273 -0.8775 -0.4788 +vn 0.9346 -0.2664 -0.2358 +vn 0.9024 -0.3075 -0.3019 +vn 0.8262 0.2219 -0.5179 +vn 0.5715 0.4441 -0.6901 +vn 0.3828 0.6349 -0.6710 +vn 0.5244 0.5527 -0.6477 +vn 0.0958 0.7449 -0.6602 +vn 0.5173 0.2873 -0.8061 +vn 0.7686 -0.4245 -0.4785 +vn 0.4425 0.0444 -0.8957 +vn 0.4978 -0.5368 -0.6811 +vn 0.0717 -0.5802 -0.8113 +vn 0.1056 -0.0508 -0.9931 +vn 0.1123 0.3346 -0.9356 +vn 0.0936 0.5993 -0.7951 +vn -0.0028 0.7398 -0.6729 +vn -0.1195 0.7958 -0.5937 +vn 0.0887 0.8265 -0.5558 +vn -0.1486 0.8290 -0.5391 +vn -0.0005 -0.7283 -0.6853 +vn 0.0360 -0.0981 -0.9945 +vn -0.3148 -0.0697 -0.9466 +vn -0.2486 -0.7115 -0.6572 +vn -0.6128 -0.0345 -0.7895 +vn -0.0185 -0.8472 -0.5309 +vn -0.4604 -0.6969 -0.5499 +vn -0.8486 -0.0027 -0.5290 +vn -0.2089 -0.8341 -0.5106 +vn -0.0250 -0.8283 -0.5598 +vn -0.3758 -0.8214 -0.4290 +vn -0.2234 -0.8111 -0.5406 +vn -0.0197 -0.7286 -0.6847 +vn -0.6257 -0.6870 -0.3696 +vn -0.9811 0.0150 -0.1930 +vn -0.5055 -0.8125 -0.2904 +vn -0.4016 -0.7941 -0.4562 +vn -0.2714 -0.6899 -0.6711 +vn -0.0077 -0.5953 -0.8034 +vn -0.3428 -0.4498 -0.8247 +vn -0.7175 -0.6826 -0.1386 +vn -0.9852 0.0124 0.1708 +vn -0.5778 -0.8085 -0.1118 +vn -0.5385 -0.7834 -0.3103 +vn -0.4938 -0.6556 -0.5713 +vn -0.7204 -0.6847 0.1108 +vn -0.8604 -0.0102 0.5095 +vn -0.5814 -0.8095 0.0817 +vn -0.6024 -0.3782 -0.7029 +vn -0.4546 0.0177 -0.8905 +vn -0.6149 -0.7791 -0.1223 +vn -0.6639 -0.6377 -0.3906 +vn -0.6342 -0.6929 0.3430 +vn -0.6307 -0.0460 0.7747 +vn -0.6517 0.0367 -0.7576 +vn -0.2911 0.4608 -0.8384 +vn -0.5580 0.4347 -0.7069 +vn -0.5158 -0.8154 0.2628 +vn -0.6199 -0.7804 0.0817 +vn -0.7599 -0.6312 -0.1552 +vn -0.8037 -0.3492 -0.4817 +vn -0.8527 0.0528 -0.5197 +vn -0.7609 0.4369 -0.4797 +vn -0.9209 -0.3398 -0.1910 +vn -0.9770 0.0604 -0.2047 +vn -0.8779 0.4410 -0.1865 +vn -0.7680 -0.6324 0.1013 +vn -0.5531 -0.7868 0.2738 +vn -0.9315 -0.3406 0.1274 +vn -0.9882 0.0592 0.1411 +vn -0.8884 0.4395 0.1326 +vn -0.6869 -0.6397 0.3449 +vn -0.3922 -0.8255 0.4059 +vn -0.8323 -0.3486 0.4310 +vn -0.8806 0.0509 0.4711 +vn -0.7894 0.4316 0.4365 +vn -0.4244 -0.7973 0.4292 +vn -0.5274 -0.6517 0.5452 +vn -0.6353 -0.3622 0.6821 +vn -0.6668 0.0368 0.7443 +vn -0.5926 0.4186 0.6882 +vn -0.3725 0.0188 0.9278 +vn -0.3216 0.4022 0.8572 +vn -0.3644 -0.3795 0.8504 +vn -0.0332 -0.0009 0.9994 +vn -0.0525 -0.3984 0.9157 +vn -0.0607 -0.6835 0.7274 +vn -0.3094 -0.6670 0.6778 +vn -0.0598 -0.8241 0.5633 +vn -0.2536 -0.8103 0.5283 +vn -0.0552 -0.8510 0.5223 +vn -0.2352 -0.8380 0.4924 +vn -0.0547 -0.7346 0.6763 +vn -0.2676 -0.7196 0.6407 +vn -0.0555 -0.1112 0.9923 +vn -0.3369 -0.0837 0.9378 +vn -0.4746 -0.7050 0.5271 +vn -0.0172 -0.6283 -0.7778 +vn -0.0124 -0.5869 -0.8095 +vn -0.3424 -0.4494 -0.8251 +vn -0.3207 -0.4723 -0.8210 +vn -0.0400 -0.8720 -0.4878 +vn -0.4307 -0.7433 -0.5119 +vn -0.2912 0.4611 -0.8382 +vn -0.1967 0.7019 -0.6846 +vn -0.4227 0.7005 -0.5749 +vn -0.7609 0.4368 -0.4798 +vn -0.1249 0.8524 -0.5077 +vn -0.5929 0.7060 -0.3872 +vn -0.2916 0.8579 -0.4231 +vn -0.0705 0.9367 -0.3429 +vn -0.6886 0.7098 -0.1486 +vn -0.4148 0.8648 -0.2831 +vn -0.1843 0.9389 -0.2906 +vn -0.0392 0.9758 -0.2151 +vn -0.6972 0.7084 0.1095 +vn -0.4833 0.8689 -0.1069 +vn -0.2596 0.9461 -0.1937 +vn -0.1180 0.9711 -0.2075 +vn -0.0379 0.9923 -0.1179 +vn 0.0574 0.9979 -0.0300 +vn -0.6177 0.7016 0.3552 +vn -0.4897 0.8679 0.0830 +vn -0.3013 0.9509 -0.0711 +vn -0.1396 0.9808 -0.1360 +vn -0.0426 0.9985 -0.0352 +vn -0.1569 0.9865 -0.0468 +vn -0.0451 0.9981 0.0422 +vn -0.3056 0.9503 0.0602 +vn -0.4332 0.8619 0.2637 +vn -0.1599 0.9861 0.0460 +vn -0.0459 0.9911 0.1250 +vn -0.2721 0.9442 0.1854 +vn -0.4595 0.6906 0.5585 +vn -0.1485 0.9795 0.1362 +vn -0.0537 0.9736 0.2219 +vn 0.0400 0.9274 0.3718 +vn -0.3197 0.8528 0.4129 +vn -0.2407 0.6774 0.6951 +vn -0.1593 0.8441 0.5119 +vn 0.0288 0.8342 0.5508 +vn -0.0935 0.9329 0.3478 +vn -0.2035 0.9360 0.2873 +vn -0.1317 0.9690 0.2088 +vn -0.3184 -0.4731 -0.8214 +vn -0.3421 -0.4509 -0.8244 +vn -0.4535 0.0175 -0.8911 +vn -0.4419 0.0125 -0.8970 +vn -0.4288 -0.7450 -0.5110 +vn -0.2908 0.4602 -0.8388 +vn -0.6286 -0.0041 -0.7777 +vn -0.2928 0.4173 -0.8603 +vn 0.0583 0.5589 -0.8272 +vn -0.4493 0.4965 -0.7427 +vn 0.0584 0.5946 -0.8019 +vn 0.0178 0.7078 -0.7062 +vn -0.0263 -0.9896 -0.1414 +vn -0.2625 -0.9551 -0.1370 +vn -0.2417 -0.8979 -0.3678 +vn -0.4615 -0.8768 -0.1354 +vn -0.0168 -0.8390 -0.5438 +vn -0.4392 -0.8354 -0.3305 +vn -0.6164 -0.7729 -0.1503 +vn -0.2393 -0.8182 -0.5227 +vn -0.5642 -0.7487 -0.3482 +vn -0.7568 -0.6307 -0.1718 +vn -0.2547 -0.7220 -0.6433 +vn -0.0194 -0.5909 -0.8065 +vn -0.4080 -0.7763 -0.4806 +vn -0.2747 -0.5874 -0.7613 +vn -0.4500 -0.7130 -0.5378 +vn -0.6667 -0.6168 -0.4185 +vn -0.2967 -0.4143 -0.8604 +vn -0.5017 -0.5917 -0.6310 +vn -0.7581 -0.4225 -0.4968 +vn -0.8816 -0.4274 -0.2002 +vn -0.5502 -0.4152 -0.7245 +vn -0.3108 -0.1900 -0.9313 +vn -0.8207 -0.1462 -0.5523 +vn -0.9637 -0.1473 -0.2226 +vn -0.5864 -0.1465 -0.7967 +vn -0.3050 0.1082 -0.9462 +vn -0.8128 0.1259 -0.5688 +vn -0.9651 0.1304 -0.2273 +vn -0.8678 0.4524 -0.2057 +vn -0.5811 0.1180 -0.8052 +vn -0.2716 0.4079 -0.8717 +vn -0.7311 0.4394 -0.5220 +vn -0.5251 0.4244 -0.7377 +vn -0.2262 0.6375 -0.7365 +vn -0.6061 0.6717 -0.4260 +vn -0.7041 0.6897 -0.1688 +vn -0.4413 0.6541 -0.6144 +vn -0.1644 0.8121 -0.5598 +vn -0.3266 0.8239 -0.4632 +vn -0.4493 0.8361 -0.3148 +vn -0.5135 0.8502 -0.1160 +vn -0.1152 0.8990 -0.4225 +vn 0.0275 0.9292 -0.3685 +vn -0.0821 0.9338 -0.3482 +vn -0.2382 0.9070 -0.3473 +vn -0.1792 0.9414 -0.2857 +vn -0.2520 0.9495 -0.1869 +vn -0.3284 0.9159 -0.2309 +vn -0.3718 0.9247 -0.0818 +vn -0.2910 0.9545 -0.0652 +vn -0.2867 -0.5994 0.7473 +vn -0.0121 -0.6039 0.7969 +vn -0.2634 -0.7287 0.6321 +vn -0.0237 -0.8502 0.5259 +vn -0.2464 -0.8243 0.5098 +vn -0.2465 -0.9019 0.3546 +vn 0.0232 0.8881 0.4590 +vn 0.0238 0.9266 0.3753 +vn -0.0838 0.9316 0.3537 +vn -0.1141 0.8944 0.4324 +vn -0.2363 0.9035 0.3576 +vn -0.1804 0.9397 0.2904 +vn -0.2532 0.9485 0.1902 +vn -0.1627 0.8046 0.5711 +vn -0.3243 0.8166 0.4775 +vn -0.2224 0.6552 0.7220 +vn -0.0153 0.3920 0.9198 +vn -0.3282 0.9142 0.2377 +vn -0.2921 0.9541 0.0664 +vn -0.3237 -0.1984 0.9251 +vn 0.0085 -0.2042 0.9789 +vn -0.3085 -0.4226 0.8522 +vn -0.4447 0.6452 0.6212 +vn -0.7385 0.4351 0.5151 +vn -0.6103 0.6662 0.4286 +vn -0.4501 0.8323 0.3236 +vn -0.8704 0.4511 0.1974 +vn -0.8190 0.1215 0.5607 +vn -0.7063 0.6880 0.1668 +vn -0.5148 0.8491 0.1182 +vn -0.3723 0.9242 0.0847 +vn -0.2917 0.9543 -0.0642 +vn -0.3718 0.9247 -0.0815 +vn -0.7041 0.6898 -0.1688 +vn -0.9679 0.1287 0.2158 +vn -0.9650 0.1304 -0.2274 +vn -0.9661 -0.1490 0.2110 +vn -0.8211 -0.1505 0.5506 +vn -0.9637 -0.1474 -0.2226 +vn -0.8850 -0.4263 0.1873 +vn -0.7560 -0.4251 0.4977 +vn -0.8816 -0.4274 -0.2003 +vn -0.7607 -0.6301 0.1558 +vn -0.6597 -0.6144 0.4328 +vn -0.5552 -0.7408 0.3780 +vn -0.6161 -0.7767 0.1308 +vn -0.6164 -0.7730 -0.1502 +vn -0.4634 -0.8779 0.1208 +vn -0.4314 -0.8340 0.3439 +vn -0.2643 -0.9561 0.1262 +vn -0.2625 -0.9551 -0.1371 +vn -0.0273 -0.9904 0.1352 +vn -0.2465 -0.9020 0.3546 +vn -0.4159 -0.7828 0.4629 +vn -0.4578 -0.7199 0.5217 +vn -0.2464 -0.8242 0.5098 +vn -0.9823 -0.0259 -0.1857 +vn -0.5605 0.8204 -0.1130 +vn -0.9850 -0.0285 0.1702 +vn -0.5621 0.8192 0.1137 +vn -0.8599 -0.0460 0.5084 +vn -0.2625 0.9631 -0.0588 +vn -0.4940 0.8038 0.3314 +vn -0.6303 -0.0725 0.7730 +vn -0.2631 0.9629 0.0606 +vn -0.2973 0.9526 -0.0647 +vn -0.2974 0.9523 0.0679 +vn -0.2285 0.9577 0.1750 +vn -0.2575 0.9467 0.1935 +vn -0.3629 0.7783 0.5124 +vn -0.3370 -0.0970 0.9365 +vn -0.1629 0.9496 0.2678 +vn -0.1827 0.9381 0.2943 +vn -0.0837 0.9305 0.3567 +vn -0.1848 0.7532 0.6313 +vn -0.0743 0.9420 0.3274 +vn 0.0231 0.9268 0.3749 +vn 0.0237 0.9379 0.3462 +vn 0.0194 0.7401 0.6722 +vn 0.0316 0.9397 -0.3404 +vn 0.0297 0.9294 -0.3680 +vn -0.0819 0.9326 -0.3516 +vn -0.0689 0.9434 -0.3244 +vn 0.0277 0.7482 -0.6629 +vn -0.1596 0.9507 -0.2659 +vn -0.1827 0.9395 -0.2899 +vn -0.1719 0.7602 -0.6265 +vn -0.0008 -0.1001 -0.9950 +vn -0.3139 -0.0872 -0.9454 +vn -0.6120 -0.0634 -0.7883 +vn -0.3531 0.7839 -0.5107 +vn -0.8479 -0.0400 -0.5286 +vn -0.4884 0.8074 -0.3310 +vn -0.9822 -0.0258 -0.1863 +vn -0.5598 0.8206 -0.1152 +vn -0.2264 0.9585 -0.1734 +vn -0.2572 0.9475 -0.1901 +vn -0.2618 0.9631 -0.0618 +vn -0.2969 0.9525 -0.0681 +vn -0.7740 -0.4144 0.4787 +vn 0.4888 -0.8710 0.0493 +vn -0.3357 -0.8521 -0.4016 +vn -0.9103 -0.4038 -0.0911 +vn -0.6401 -0.5056 -0.5784 +vn -0.9162 -0.3522 0.1909 +vn -0.8497 -0.3964 -0.3477 +vn 0.4617 0.6891 -0.5586 +vn -0.8762 -0.2999 -0.3772 +vn -0.9115 -0.4113 0.0034 +vn 0.3103 0.5176 -0.7974 +vn 0.6507 0.6826 -0.3325 +vn -0.5266 -0.0540 -0.8484 +vn -0.7563 -0.3185 -0.5714 +vn 0.3769 0.7986 -0.4693 +vn 0.8043 0.5940 -0.0132 +vn 0.4879 0.8713 0.0533 +vn 0.0633 0.3833 -0.9215 +vn 0.2092 0.8065 -0.5529 +vn 0.4047 0.9112 0.0769 +vn -0.3828 -0.0331 -0.9232 +vn -0.7059 -0.3200 -0.6319 +vn -0.8830 -0.4661 -0.0555 +vn -0.0102 0.3744 -0.9272 +vn 0.2604 0.7736 -0.5777 +vn 0.3440 0.9363 0.0703 +vn 0.4256 0.9035 0.0509 +vn 0.3213 0.7369 -0.5948 +vn 0.4283 0.9020 0.0550 +vn -0.3512 -0.0228 -0.9360 +vn 0.0268 0.3619 -0.9318 +vn 0.3323 0.7317 -0.5951 +vn 0.3699 0.9271 0.0602 +vn 0.0508 0.3497 -0.9355 +vn 0.3061 0.7392 -0.5999 +vn 0.3391 0.9383 0.0680 +vn -0.2951 -0.0435 -0.9545 +vn 0.3000 0.7419 -0.5996 +vn 0.3236 0.9432 0.0751 +vn 0.0764 0.3354 -0.9390 +vn -0.6382 -0.3908 -0.6633 +vn -0.8195 -0.5704 -0.0553 +vn 0.3185 0.7357 -0.5977 +vn 0.2800 0.9555 0.0929 +vn 0.1090 0.3186 -0.9416 +vn -0.2244 -0.0855 -0.9707 +vn -0.5435 -0.4722 -0.6940 +vn -0.7283 -0.6816 -0.0707 +vn -0.1450 -0.1281 -0.9811 +vn -0.4244 -0.5582 -0.7129 +vn -0.5967 -0.7983 -0.0817 +vn 0.1645 0.3001 -0.9396 +vn 0.3298 0.7418 -0.5839 +vn -0.2691 -0.6375 -0.7220 +vn -0.4080 -0.9093 -0.0820 +vn -0.0448 -0.1685 -0.9847 +vn -0.0442 -0.6979 -0.7148 +vn -0.1472 -0.9863 -0.0747 +vn 0.1828 -0.9821 -0.0445 +vn 0.2437 0.2879 -0.9261 +vn 0.1040 -0.2009 -0.9741 +vn 0.3890 0.7514 -0.5329 +vn 0.3046 0.9434 0.1310 +vn 0.4560 0.8730 0.1729 +vn 0.2646 -0.6985 -0.6649 +vn 0.3266 -0.2084 -0.9219 +vn 0.3776 0.2956 -0.8775 +vn 0.5298 0.7234 -0.4428 +vn 0.6591 0.7305 0.1787 +vn 0.5437 -0.6272 -0.5577 +vn 0.5049 -0.8631 -0.0080 +vn 0.5585 0.2723 -0.7835 +vn 0.7172 0.6071 -0.3421 +vn 0.8399 0.5144 0.1734 +vn 0.5535 -0.1929 -0.8102 +vn 0.7724 -0.4897 -0.4043 +vn 0.7505 -0.6597 0.0399 +vn 0.8382 0.5077 -0.1992 +vn 0.9691 0.1787 0.1703 +vn 0.7581 0.2722 -0.5927 +vn 0.7792 -0.1549 -0.6073 +vn 0.9276 0.2296 -0.2948 +vn 0.9770 -0.1718 0.1264 +vn 0.9439 -0.0900 -0.3177 +vn 0.8953 -0.3605 -0.2618 +vn 0.8938 -0.4413 0.0798 +vn 0.4912 0.8710 0.0116 +vn 0.8001 0.5987 -0.0369 +vn 0.6447 0.5990 0.4749 +vn 0.4384 0.7034 0.5594 +vn 0.4575 0.4625 0.7595 +vn 0.4090 0.9116 -0.0410 +vn 0.3062 0.3607 0.8810 +vn -0.2285 -0.1603 0.9602 +vn 0.3014 0.7068 0.6400 +vn 0.3532 0.9351 0.0281 +vn -0.7358 -0.2383 0.6338 +vn -0.4103 -0.6573 0.6322 +vn -0.0873 0.2749 0.9575 +vn -0.9115 -0.3942 0.1175 +vn 0.3817 -0.8921 0.2416 +vn -0.9025 -0.4147 -0.1165 +vn -0.7377 -0.1358 0.6613 +vn -0.8807 -0.3297 0.3401 +vn -0.9056 -0.3424 -0.2504 +vn -0.0963 0.2934 0.9511 +vn 0.2084 0.7203 0.6616 +vn -0.4923 -0.1047 0.8641 +vn -0.8215 -0.3365 0.4603 +vn -0.9136 -0.3865 -0.1260 +vn -0.8778 -0.4729 -0.0760 +vn -0.7566 -0.3794 0.5326 +vn -0.8209 -0.5637 -0.0912 +vn 0.2560 0.6927 0.6742 +vn 0.4235 0.9048 0.0456 +vn -0.4301 -0.1125 0.8957 +vn -0.7097 -0.4462 0.5452 +vn -0.7285 -0.6783 -0.0957 +vn -0.0666 0.2789 0.9580 +vn 0.2564 0.6744 0.6924 +vn 0.4323 0.8988 0.0732 +vn -0.6337 -0.5313 0.5622 +vn -0.5971 -0.7968 -0.0923 +vn -0.3981 -0.1279 0.9084 +vn -0.0586 0.2640 0.9627 +vn 0.2043 0.6850 0.6993 +vn 0.3702 0.9256 0.0784 +vn -0.5233 -0.6229 0.5815 +vn -0.4101 -0.9066 -0.0996 +vn -0.3587 -0.1694 0.9180 +vn -0.0682 0.2518 0.9654 +vn 0.1788 0.6879 0.7034 +vn 0.3409 0.9368 0.0781 +vn -0.3820 -0.7177 0.5823 +vn -0.1466 -0.9842 -0.0993 +vn -0.2997 -0.2173 0.9290 +vn -0.0600 0.2346 0.9702 +vn 0.1662 0.6772 0.7168 +vn 0.3270 0.9402 0.0950 +vn -0.2270 -0.2770 0.9337 +vn -0.0408 0.2036 0.9782 +vn -0.1789 -0.7925 0.5830 +vn 0.1365 0.6651 0.7342 +vn 0.2891 0.9507 0.1119 +vn 0.3117 0.9398 0.1402 +vn -0.1151 -0.3298 0.9370 +vn -0.0113 0.1693 0.9855 +vn 0.0957 -0.8082 0.5811 +vn 0.1831 -0.9803 -0.0743 +vn 0.5070 -0.8616 -0.0257 +vn 0.1728 0.6429 0.7462 +vn 0.0925 0.1318 0.9870 +vn 0.0645 -0.3636 0.9293 +vn 0.3927 -0.7291 0.5605 +vn 0.7563 -0.6536 0.0296 +vn 0.3381 0.5821 0.7395 +vn 0.4588 0.8712 0.1746 +vn 0.3056 -0.3393 0.8896 +vn 0.6478 -0.5805 0.4933 +vn 0.8959 -0.4354 0.0876 +vn 0.2786 0.1160 0.9534 +vn 0.5461 0.4875 0.6813 +vn 0.6517 0.7272 0.2154 +vn 0.5277 0.0889 0.8448 +vn 0.7975 -0.4348 0.4183 +vn 0.9738 -0.1724 0.1486 +vn 0.5688 -0.2888 0.7701 +vn 0.8060 -0.2234 0.5481 +vn 0.9662 0.1738 0.1902 +vn 0.7693 0.1500 0.6210 +vn 0.7260 0.3999 0.5595 +vn 0.8284 0.5106 0.2303 +vn -0.8304 0.5541 0.0580 +vn -0.5772 0.8094 -0.1083 +vn -0.6857 0.6077 0.4007 +vn -0.3072 0.7853 0.5376 +vn -0.5341 0.5065 0.6769 +vn -0.3492 0.9348 -0.0648 +vn -0.2668 0.3399 0.9018 +vn -0.0320 -0.0409 0.9987 +vn -0.2165 0.7640 0.6078 +vn -0.3764 0.9264 0.0143 +vn 0.6480 -0.3110 0.6953 +vn 0.0779 -0.6361 0.7677 +vn 0.0048 0.2755 0.9613 +vn 0.9121 -0.3591 0.1977 +vn -0.5386 -0.8409 0.0532 +vn 0.8373 -0.3866 -0.3867 +vn 0.6680 -0.1489 0.7291 +vn 0.8743 -0.3112 0.3725 +vn 0.9224 -0.3205 -0.2157 +vn 0.0493 0.2988 0.9530 +vn -0.2514 0.7150 0.6524 +vn 0.8113 -0.3166 0.4914 +vn 0.9268 -0.3644 -0.0911 +vn 0.8912 -0.4517 -0.0424 +vn 0.4593 -0.0884 0.8839 +vn 0.7432 -0.3701 0.5575 +vn 0.8371 -0.5438 -0.0597 +vn -0.2997 0.6860 0.6630 +vn -0.4465 0.8943 0.0291 +vn 0.3958 -0.0921 0.9137 +vn 0.6959 -0.4387 0.5686 +vn 0.7477 -0.6606 -0.0677 +vn 0.0179 0.2831 0.9589 +vn -0.3034 0.6668 0.6807 +vn -0.4562 0.8881 0.0564 +vn 0.6201 -0.5240 0.5839 +vn 0.6191 -0.7823 -0.0693 +vn 0.3682 -0.1339 0.9201 +vn 0.0090 0.2669 0.9637 +vn -0.2494 0.6791 0.6904 +vn -0.3949 0.9165 0.0639 +vn 0.5123 -0.6137 0.6007 +vn 0.4352 -0.8964 -0.0836 +vn 0.3268 -0.1707 0.9296 +vn 0.0210 0.2538 0.9670 +vn -0.2224 0.6829 0.6958 +vn -0.3659 0.9284 0.0647 +vn 0.3690 -0.7120 0.5975 +vn 0.1738 -0.9804 -0.0933 +vn 0.2679 -0.2136 0.9395 +vn 0.0160 0.2356 0.9717 +vn -0.2092 0.6728 0.7097 +vn -0.3528 0.9321 0.0821 +vn 0.1938 -0.2741 0.9420 +vn -0.0023 0.2036 0.9791 +vn 0.1673 -0.7880 0.5925 +vn -0.1790 0.6622 0.7276 +vn -0.3159 0.9435 0.1004 +vn -0.3392 0.9320 0.1278 +vn 0.0906 -0.3288 0.9401 +vn -0.0305 0.1685 0.9852 +vn -0.0907 -0.8150 0.5723 +vn -0.1565 -0.9844 -0.0808 +vn -0.4848 -0.8735 -0.0445 +vn -0.2226 0.6290 0.7448 +vn -0.1343 0.1235 0.9832 +vn -0.0864 -0.3719 0.9242 +vn -0.3984 -0.7350 0.5487 +vn -0.7410 -0.6715 0.0013 +vn -0.3772 0.5817 0.7207 +vn -0.4858 0.8599 0.1567 +vn -0.3301 -0.3501 0.8766 +vn -0.6530 -0.5932 0.4708 +vn -0.8879 -0.4568 0.0538 +vn -0.3161 0.1171 0.9415 +vn -0.5812 0.4832 0.6548 +vn -0.6767 0.7113 0.1902 +vn -0.5601 0.0868 0.8239 +vn -0.8010 -0.4563 0.3876 +vn -0.9743 -0.1957 0.1116 +vn -0.5888 -0.3126 0.7453 +vn -0.8239 -0.2262 0.5196 +vn -0.9766 0.1505 0.1534 +vn -0.8093 0.0943 0.5797 +vn -0.7468 0.3960 0.5343 +vn -0.8485 0.4905 0.1985 +vn -0.4890 -0.8453 -0.2154 +vn 0.8915 -0.3955 0.2209 +vn 0.2017 -0.6237 -0.7552 +vn 0.8974 -0.3927 -0.2012 +vn 0.4674 -0.4583 -0.7560 +vn 0.9165 -0.3302 0.2257 +vn 0.8470 -0.4434 -0.2934 +vn -0.5129 0.6123 -0.6016 +vn 0.8945 -0.2901 -0.3403 +vn 0.9203 -0.3893 0.0381 +vn -0.4416 0.4799 -0.7581 +vn -0.7306 0.5436 -0.4132 +vn 0.5703 -0.0793 -0.8176 +vn 0.7848 -0.2945 -0.5453 +vn -0.5516 0.6899 -0.4688 +vn -0.8341 0.5457 0.0800 +vn -0.5751 0.8149 -0.0714 +vn -0.0289 0.3805 -0.9243 +vn -0.4126 0.7208 -0.5570 +vn -0.3491 0.9354 0.0563 +vn 0.4228 -0.0364 -0.9055 +vn 0.7366 -0.3039 -0.6042 +vn 0.8954 -0.4448 -0.0217 +vn -0.2541 0.7671 -0.5890 +vn -0.3687 0.9278 0.0568 +vn -0.4488 0.8930 0.0343 +vn 0.0408 0.3711 -0.9277 +vn -0.3091 0.7298 -0.6098 +vn -0.4516 0.8914 0.0384 +vn 0.3870 -0.0139 -0.9220 +vn -0.0027 0.3628 -0.9319 +vn -0.3166 0.7258 -0.6107 +vn -0.3940 0.9180 0.0457 +vn -0.0286 0.3501 -0.9363 +vn -0.2927 0.7338 -0.6131 +vn -0.3639 0.9298 0.0547 +vn 0.3321 -0.0365 -0.9425 +vn -0.2921 0.7355 -0.6113 +vn -0.3488 0.9351 0.0623 +vn -0.0401 0.3321 -0.9424 +vn 0.6683 -0.3866 -0.6356 +vn 0.8345 -0.5505 -0.0239 +vn -0.3100 0.7295 -0.6097 +vn -0.3062 0.9484 0.0818 +vn -0.0778 0.3163 -0.9455 +vn 0.2626 -0.0875 -0.9609 +vn 0.5794 -0.4609 -0.6721 +vn 0.7466 -0.6639 -0.0428 +vn 0.1841 -0.1264 -0.9748 +vn 0.4632 -0.5488 -0.6959 +vn 0.6184 -0.7837 -0.0586 +vn -0.1333 0.2976 -0.9453 +vn -0.3241 0.7347 -0.5960 +vn 0.3039 -0.6339 -0.7112 +vn 0.4325 -0.8992 -0.0661 +vn 0.0834 -0.1684 -0.9822 +vn 0.0752 -0.6957 -0.7144 +vn 0.1735 -0.9824 -0.0686 +vn -0.1574 -0.9862 -0.0509 +vn -0.2129 0.2856 -0.9344 +vn -0.0683 -0.2003 -0.9773 +vn -0.3957 0.7322 -0.5543 +vn -0.3319 0.9358 0.1189 +vn -0.4831 0.8617 0.1551 +vn -0.2222 -0.7045 -0.6741 +vn -0.2883 -0.2127 -0.9336 +vn -0.3529 0.2813 -0.8924 +vn -0.5396 0.6943 -0.4762 +vn -0.6827 0.7144 0.1532 +vn -0.5155 -0.6282 -0.5828 +vn -0.4834 -0.8750 -0.0267 +vn -0.5315 0.2813 -0.7990 +vn -0.7225 0.5793 -0.3775 +vn -0.8579 0.4940 0.1412 +vn -0.5252 -0.1827 -0.8311 +vn -0.7516 -0.4926 -0.4387 +vn -0.7354 -0.6775 0.0117 +vn -0.8532 0.4695 -0.2271 +vn -0.9788 0.1554 0.1334 +vn -0.7449 0.2296 -0.6264 +vn -0.7569 -0.1468 -0.6368 +vn -0.9161 0.2250 -0.3320 +vn -0.9767 -0.1952 0.0893 +vn -0.9243 -0.1374 -0.3561 +vn -0.8804 -0.3722 -0.2939 +vn -0.8854 -0.4626 0.0461 +vn -0.5930 0.1122 0.7973 +vn -0.5983 -0.1531 0.7865 +vn -0.5607 -0.4224 0.7122 +vn -0.2867 -0.5994 0.7474 +vn -0.5104 -0.5987 0.6173 +vn 0.5882 0.0802 0.8047 +vn 0.3009 -0.2153 0.9290 +vn 0.7545 0.3946 0.5245 +vn 0.8173 0.0771 0.5710 +vn 0.5792 -0.1850 0.7939 +vn 0.8048 -0.1946 0.5608 +vn 0.5281 -0.4519 0.7189 +vn 0.4695 -0.6253 0.6234 +vn 0.6200 -0.6491 0.4408 +s 1 +f 3//1 2//2 1//3 +f 4//4 2//2 3//1 +f 5//5 4//4 3//1 +f 6//6 4//4 5//5 +f 7//7 3//1 1//3 +f 7//7 1//3 8//8 +f 7//7 9//9 3//1 +f 7//7 8//8 9//9 +f 1//3 10//10 8//8 +f 8//8 10//10 11//11 +f 12//12 6//6 5//5 +f 12//12 13//13 6//6 +f 12//12 5//5 14//14 +f 12//12 14//14 13//13 +f 13//13 15//15 6//6 +f 16//16 15//15 13//13 +f 5//5 3//1 17//17 +f 5//5 17//17 14//14 +f 9//9 18//18 3//1 +f 3//1 18//18 17//17 +f 17//17 18//18 19//19 +f 19//19 18//18 9//9 +f 14//14 17//17 20//20 +f 20//20 17//17 19//19 +f 21//21 8//8 11//11 +f 22//22 21//21 11//11 +f 21//21 23//23 8//8 +f 23//23 21//21 22//22 +f 24//24 22//22 11//11 +f 24//24 25//25 22//22 +f 8//8 26//26 9//9 +f 23//23 26//26 8//8 +f 9//9 26//26 27//27 +f 27//27 26//26 23//23 +f 28//28 19//19 9//9 +f 28//28 9//9 27//27 +f 28//28 27//27 19//19 +f 20//20 19//19 29//29 +f 27//27 30//30 19//19 +f 19//19 30//30 29//29 +f 31//31 30//30 27//27 +f 29//29 30//30 31//31 +f 32//32 27//27 23//23 +f 32//32 31//31 27//27 +f 33//33 32//32 23//23 +f 31//31 32//32 33//33 +f 34//34 23//23 22//22 +f 34//34 33//33 23//23 +f 34//34 22//22 35//35 +f 34//34 35//35 33//33 +f 22//22 36//36 35//35 +f 35//35 36//36 37//37 +f 25//25 36//36 22//22 +f 37//37 36//36 25//25 +f 38//38 37//37 25//25 +f 38//38 39//39 37//37 +f 39//39 40//40 37//37 +f 39//39 41//41 40//40 +f 35//35 42//42 33//33 +f 33//33 42//42 43//43 +f 43//43 42//42 35//35 +f 44//44 35//35 37//37 +f 44//44 43//43 35//35 +f 44//44 37//37 45//45 +f 37//37 40//40 45//45 +f 44//44 45//45 43//43 +f 45//45 40//40 46//46 +f 41//41 47//47 40//40 +f 40//40 47//47 46//46 +f 48//48 47//47 41//41 +f 46//46 47//47 48//48 +f 49//49 48//48 41//41 +f 49//49 50//50 48//48 +f 51//51 33//33 43//43 +f 51//51 31//31 33//33 +f 45//45 52//52 43//43 +f 45//45 46//46 52//52 +f 51//51 43//43 53//53 +f 51//51 53//53 31//31 +f 43//43 52//52 53//53 +f 53//53 54//54 31//31 +f 54//54 29//29 31//31 +f 55//55 54//54 53//53 +f 54//54 55//55 29//29 +f 53//53 52//52 56//56 +f 56//56 55//55 53//53 +f 57//57 55//55 56//56 +f 55//55 58//58 29//29 +f 55//55 57//57 58//58 +f 20//20 29//29 58//58 +f 46//46 59//59 52//52 +f 52//52 59//59 56//56 +f 60//60 59//59 46//46 +f 56//56 59//59 60//60 +f 61//61 57//57 56//56 +f 61//61 56//56 60//60 +f 61//61 62//62 57//57 +f 61//61 60//60 62//62 +f 63//63 20//20 58//58 +f 63//63 58//58 64//64 +f 63//63 14//14 20//20 +f 63//63 64//64 14//14 +f 65//65 60//60 46//46 +f 65//65 66//66 60//60 +f 65//65 46//46 48//48 +f 65//65 48//48 66//66 +f 60//60 67//67 62//62 +f 66//66 67//67 60//60 +f 62//62 67//67 66//66 +f 57//57 68//68 58//58 +f 58//58 68//68 64//64 +f 69//69 68//68 57//57 +f 64//64 68//68 69//69 +f 64//64 70//70 14//14 +f 71//71 70//70 64//64 +f 14//14 70//70 13//13 +f 13//13 70//70 71//71 +f 72//72 64//64 69//69 +f 72//72 71//71 64//64 +f 72//72 69//69 71//71 +f 48//48 73//73 66//66 +f 66//66 73//73 74//74 +f 50//50 73//73 48//48 +f 74//74 73//73 50//50 +f 75//75 74//74 50//50 +f 75//75 76//76 74//74 +f 77//77 69//69 57//57 +f 77//77 78//78 69//69 +f 62//62 77//77 57//57 +f 78//78 77//77 62//62 +f 79//79 62//62 66//66 +f 79//79 66//66 74//74 +f 79//79 78//78 62//62 +f 79//79 74//74 78//78 +f 76//76 80//80 74//74 +f 74//74 80//80 78//78 +f 80//80 76//76 81//81 +f 80//80 81//81 78//78 +f 76//76 82//82 81//81 +f 81//81 82//82 16//16 +f 78//78 83//83 69//69 +f 81//81 83//83 78//78 +f 69//69 83//83 71//71 +f 71//71 83//83 81//81 +f 84//84 81//81 16//16 +f 84//84 71//71 81//81 +f 84//84 16//16 13//13 +f 84//84 13//13 71//71 +f 87//85 86//86 85//87 +f 90//88 89//89 88//90 +f 93//91 92//92 91//93 +f 94//94 93//91 91//93 +f 94//94 95//95 93//91 +f 95//95 96//96 93//91 +f 95//95 97//97 96//96 +f 97//97 98//98 96//96 +f 97//97 99//99 98//98 +f 99//99 100//100 98//98 +f 99//99 101//101 100//100 +f 101//101 102//102 100//100 +f 105//103 104//104 103//105 +f 106//106 105//103 103//105 +f 106//106 103//105 107//107 +f 108//108 106//106 107//107 +f 108//108 107//107 109//109 +f 110//110 108//108 109//109 +f 110//110 109//109 111//111 +f 112//112 110//110 111//111 +f 111//111 113//113 112//112 +f 113//113 114//114 112//112 +f 117//115 116//116 115//117 +f 118//118 117//115 115//117 +f 119//119 118//118 115//117 +f 120//120 119//119 115//117 +f 118//118 121//121 117//115 +f 121//121 122//122 117//115 +f 123//123 119//119 120//120 +f 124//124 123//123 120//120 +f 125//125 123//123 124//124 +f 126//126 125//125 124//124 +f 126//126 127//127 125//125 +f 127//127 128//128 125//125 +f 121//121 129//129 122//122 +f 129//129 130//130 122//122 +f 125//125 128//128 131//131 +f 128//128 132//132 131//131 +f 131//131 132//132 133//133 +f 129//129 134//134 130//130 +f 134//134 135//135 130//130 +f 134//134 136//136 135//135 +f 136//136 137//137 135//135 +f 138//138 136//136 134//134 +f 139//139 138//138 134//134 +f 139//139 134//134 129//129 +f 140//140 138//138 139//139 +f 141//141 129//129 121//121 +f 141//141 139//139 129//129 +f 142//142 140//140 139//139 +f 142//142 139//139 141//141 +f 143//143 140//140 142//142 +f 144//144 143//143 142//142 +f 144//144 142//142 145//145 +f 145//145 142//142 141//141 +f 146//146 144//144 145//145 +f 147//147 141//141 121//121 +f 145//145 141//141 147//147 +f 147//147 121//121 118//118 +f 146//146 145//145 148//148 +f 148//148 145//145 147//147 +f 149//149 146//146 148//148 +f 150//150 147//147 118//118 +f 148//148 147//147 150//150 +f 118//118 119//119 150//150 +f 149//149 148//148 151//151 +f 151//151 148//148 150//150 +f 152//152 149//149 151//151 +f 119//119 153//153 150//150 +f 150//150 153//153 151//151 +f 153//153 119//119 123//123 +f 151//151 154//154 152//152 +f 153//153 154//154 151//151 +f 154//154 155//155 152//152 +f 156//156 153//153 123//123 +f 154//154 153//153 156//156 +f 156//156 123//123 125//125 +f 155//155 154//154 157//157 +f 157//157 154//154 156//156 +f 158//158 155//155 157//157 +f 159//159 156//156 125//125 +f 157//157 156//156 159//159 +f 159//159 125//125 131//131 +f 158//158 157//157 160//160 +f 160//160 157//157 159//159 +f 161//161 158//158 160//160 +f 162//162 159//159 131//131 +f 160//160 159//159 162//162 +f 161//161 160//160 163//163 +f 163//163 160//160 162//162 +f 164//164 161//161 163//163 +f 162//162 131//131 165//165 +f 165//165 131//131 133//133 +f 165//165 133//133 166//166 +f 163//163 162//162 167//167 +f 167//167 162//162 165//165 +f 164//164 163//163 168//168 +f 168//168 163//163 167//167 +f 169//169 164//164 168//168 +f 170//170 165//165 166//166 +f 167//167 165//165 170//170 +f 170//170 166//166 171//171 +f 168//168 167//167 172//172 +f 172//172 167//167 170//170 +f 169//169 168//168 173//173 +f 173//173 168//168 172//172 +f 174//174 169//169 173//173 +f 175//175 170//170 171//171 +f 172//172 170//170 175//175 +f 175//175 171//171 176//176 +f 174//174 173//173 177//177 +f 178//178 174//174 177//177 +f 173//173 172//172 179//179 +f 179//179 172//172 175//175 +f 177//177 173//173 179//179 +f 180//180 175//175 176//176 +f 179//179 175//175 180//180 +f 180//180 176//176 181//181 +f 178//178 177//177 182//182 +f 183//183 178//178 182//182 +f 177//177 179//179 184//184 +f 184//184 179//179 180//180 +f 182//182 177//177 184//184 +f 185//185 180//180 181//181 +f 184//184 180//180 185//185 +f 185//185 181//181 186//186 +f 183//183 182//182 187//187 +f 188//188 183//183 187//187 +f 182//182 184//184 189//189 +f 189//189 184//184 185//185 +f 187//187 182//182 189//189 +f 190//190 185//185 186//186 +f 189//189 185//185 190//190 +f 186//186 191//191 190//190 +f 188//188 187//187 192//192 +f 193//193 188//188 192//192 +f 187//187 189//189 194//194 +f 194//194 189//189 190//190 +f 192//192 187//187 194//194 +f 191//191 195//195 190//190 +f 190//190 195//195 194//194 +f 191//191 196//196 195//195 +f 192//192 197//197 193//193 +f 197//197 198//198 193//193 +f 194//194 199//199 192//192 +f 195//195 199//199 194//194 +f 199//199 197//197 192//192 +f 196//196 200//200 195//195 +f 195//195 200//200 199//199 +f 196//196 201//201 200//200 +f 197//197 202//202 198//198 +f 202//202 203//203 198//198 +f 199//199 204//204 197//197 +f 200//200 204//204 199//199 +f 204//204 202//202 197//197 +f 201//201 205//205 200//200 +f 200//200 205//205 204//204 +f 202//202 206//206 203//203 +f 206//206 207//207 203//203 +f 207//207 206//206 208//208 +f 204//204 209//209 202//202 +f 209//209 206//206 202//202 +f 205//205 209//209 204//204 +f 205//205 201//201 210//210 +f 201//201 211//211 210//210 +f 210//210 211//211 212//212 +f 206//206 213//213 208//208 +f 206//206 209//209 213//213 +f 209//209 205//205 214//214 +f 205//205 210//210 214//214 +f 209//209 214//214 213//213 +f 213//213 214//214 210//210 +f 215//215 210//210 212//212 +f 215//215 213//213 210//210 +f 208//208 213//213 215//215 +f 215//215 212//212 216//216 +f 217//217 208//208 215//215 +f 217//217 215//215 216//216 +f 220//218 219//219 218//220 +f 219//219 221//221 218//220 +f 221//221 219//219 222//222 +f 218//220 221//221 223//223 +f 224//224 218//220 223//223 +f 225//225 224//224 223//223 +f 226//226 221//221 222//222 +f 221//221 226//226 223//223 +f 226//226 222//222 227//227 +f 222//222 228//228 227//227 +f 227//227 228//228 229//229 +f 226//226 230//230 223//223 +f 230//230 226//226 227//227 +f 231//231 225//225 223//223 +f 231//231 223//223 230//230 +f 232//232 227//227 229//229 +f 232//232 229//229 233//233 +f 234//234 230//230 227//227 +f 234//234 231//231 230//230 +f 234//234 227//227 232//232 +f 225//225 231//231 235//235 +f 236//236 225//225 235//235 +f 231//231 234//234 237//237 +f 237//237 234//234 232//232 +f 235//235 231//231 237//237 +f 238//238 232//232 233//233 +f 237//237 232//232 238//238 +f 238//238 233//233 239//239 +f 236//236 235//235 240//240 +f 241//241 236//236 240//240 +f 235//235 237//237 242//242 +f 242//242 237//237 238//238 +f 240//240 235//235 242//242 +f 243//243 238//238 239//239 +f 242//242 238//238 243//243 +f 239//239 244//244 243//243 +f 241//241 240//240 245//245 +f 246//246 241//241 245//245 +f 240//240 242//242 247//247 +f 247//247 242//242 243//243 +f 245//245 240//240 247//247 +f 244//244 248//248 243//243 +f 243//243 248//248 247//247 +f 244//244 249//249 248//248 +f 245//245 250//250 246//246 +f 250//250 251//251 246//246 +f 247//247 252//252 245//245 +f 248//248 252//252 247//247 +f 252//252 250//250 245//245 +f 249//249 253//253 248//248 +f 248//248 253//253 252//252 +f 249//249 254//254 253//253 +f 250//250 255//255 251//251 +f 255//255 256//256 251//251 +f 252//252 257//257 250//250 +f 253//253 257//257 252//252 +f 257//257 255//255 250//250 +f 254//254 258//258 253//253 +f 253//253 258//258 257//257 +f 254//254 259//259 258//258 +f 255//255 260//260 256//256 +f 260//260 261//261 256//256 +f 257//257 262//262 255//255 +f 258//258 262//262 257//257 +f 262//262 260//260 255//255 +f 259//259 263//263 258//258 +f 258//258 263//263 262//262 +f 259//259 264//264 263//263 +f 260//260 265//265 261//261 +f 265//265 266//266 261//261 +f 262//262 267//267 260//260 +f 263//263 267//267 262//262 +f 267//267 265//265 260//260 +f 264//264 268//268 263//263 +f 263//263 268//268 267//267 +f 264//264 269//269 268//268 +f 265//265 270//270 266//266 +f 270//270 271//271 266//266 +f 267//267 272//272 265//265 +f 268//268 272//272 267//267 +f 272//272 270//270 265//265 +f 269//269 273//273 268//268 +f 268//268 273//273 272//272 +f 269//269 274//274 273//273 +f 274//274 275//275 273//273 +f 275//275 274//274 276//276 +f 277//277 275//275 276//276 +f 277//277 278//278 275//275 +f 273//273 279//279 272//272 +f 272//272 279//279 270//270 +f 273//273 275//275 279//279 +f 270//270 280//280 271//271 +f 279//279 280//280 270//270 +f 280//280 281//281 271//271 +f 278//278 282//282 275//275 +f 278//278 283//283 282//282 +f 275//275 284//284 279//279 +f 279//279 284//284 280//280 +f 275//275 282//282 284//284 +f 280//280 285//285 281//281 +f 284//284 285//285 280//280 +f 285//285 286//286 281//281 +f 283//283 287//287 282//282 +f 283//283 288//288 287//287 +f 288//288 289//289 287//287 +f 288//288 290//290 289//289 +f 290//290 291//291 289//289 +f 291//291 292//292 289//289 +f 282//282 293//293 284//284 +f 284//284 293//293 285//285 +f 282//282 287//287 293//293 +f 285//285 294//294 286//286 +f 293//293 294//294 285//285 +f 294//294 295//295 286//286 +f 287//287 296//296 293//293 +f 293//293 296//296 294//294 +f 296//296 287//287 289//289 +f 294//294 297//297 295//295 +f 296//296 297//297 294//294 +f 297//297 298//298 295//295 +f 292//292 299//299 289//289 +f 292//292 300//300 299//299 +f 301//301 296//296 289//289 +f 297//297 296//296 301//301 +f 289//289 299//299 301//301 +f 298//298 297//297 302//302 +f 302//302 297//297 301//301 +f 303//303 298//298 302//302 +f 300//300 304//304 299//299 +f 300//300 305//305 304//304 +f 299//299 306//306 301//301 +f 301//301 306//306 302//302 +f 299//299 304//304 306//306 +f 302//302 307//307 303//303 +f 306//306 307//307 302//302 +f 307//307 308//308 303//303 +f 305//305 309//309 304//304 +f 305//305 310//310 309//309 +f 310//310 311//311 309//309 +f 304//304 312//312 306//306 +f 306//306 312//312 307//307 +f 304//304 309//309 312//312 +f 307//307 313//313 308//308 +f 312//312 313//313 307//307 +f 313//313 314//314 308//308 +f 309//309 311//311 315//315 +f 309//309 315//315 312//312 +f 312//312 315//315 313//313 +f 311//311 316//316 315//315 +f 313//313 317//317 314//314 +f 315//315 317//317 313//313 +f 315//315 316//316 317//317 +f 317//317 318//318 314//314 +f 316//316 319//319 317//317 +f 317//317 319//319 318//318 +f 319//319 320//320 318//318 +f 323//321 322//322 321//323 +f 322//322 324//324 321//323 +f 322//322 325//325 324//324 +f 321//323 324//324 326//326 +f 325//325 327//327 324//324 +f 325//325 328//328 327//327 +f 324//324 329//329 326//326 +f 324//324 327//327 329//329 +f 328//328 330//330 327//327 +f 328//328 331//331 330//330 +f 326//326 329//329 332//332 +f 326//326 332//332 333//333 +f 332//332 334//334 333//333 +f 332//332 335//335 334//334 +f 329//329 335//335 332//332 +f 335//335 336//336 334//334 +f 327//327 337//337 329//329 +f 329//329 337//337 335//335 +f 327//327 330//330 337//337 +f 335//335 338//338 336//336 +f 337//337 338//338 335//335 +f 338//338 339//339 336//336 +f 331//331 340//340 330//330 +f 331//331 341//341 340//340 +f 330//330 342//342 337//337 +f 337//337 342//342 338//338 +f 330//330 340//340 342//342 +f 338//338 343//343 339//339 +f 342//342 343//343 338//338 +f 343//343 344//344 339//339 +f 341//341 345//345 340//340 +f 341//341 346//346 345//345 +f 340//340 347//347 342//342 +f 342//342 347//347 343//343 +f 340//340 345//345 347//347 +f 343//343 348//348 344//344 +f 347//347 348//348 343//343 +f 348//348 349//349 344//344 +f 346//346 350//350 345//345 +f 346//346 351//351 350//350 +f 345//345 352//352 347//347 +f 347//347 352//352 348//348 +f 345//345 350//350 352//352 +f 348//348 353//353 349//349 +f 352//352 353//353 348//348 +f 353//353 354//354 349//349 +f 351//351 355//355 350//350 +f 351//351 356//356 355//355 +f 350//350 357//357 352//352 +f 352//352 357//357 353//353 +f 350//350 355//355 357//357 +f 353//353 358//358 354//354 +f 357//357 358//358 353//353 +f 358//358 359//359 354//354 +f 356//356 360//360 355//355 +f 356//356 361//361 360//360 +f 355//355 362//362 357//357 +f 357//357 362//362 358//358 +f 355//355 360//360 362//362 +f 358//358 363//363 359//359 +f 362//362 363//363 358//358 +f 363//363 364//364 359//359 +f 361//361 365//365 360//360 +f 361//361 366//366 365//365 +f 360//360 367//367 362//362 +f 362//362 367//367 363//363 +f 360//360 365//365 367//367 +f 363//363 368//368 364//364 +f 367//367 368//368 363//363 +f 368//368 369//369 364//364 +f 366//366 370//370 365//365 +f 366//366 371//371 370//370 +f 365//365 372//372 367//367 +f 367//367 372//372 368//368 +f 365//365 370//370 372//372 +f 368//368 373//373 369//369 +f 372//372 373//373 368//368 +f 373//373 374//374 369//369 +f 371//371 375//375 370//370 +f 371//371 376//376 375//375 +f 370//370 377//377 372//372 +f 372//372 377//377 373//373 +f 370//370 375//375 377//377 +f 373//373 378//378 374//374 +f 377//377 378//378 373//373 +f 378//378 379//379 374//374 +f 376//376 380//380 375//375 +f 376//376 381//381 380//380 +f 375//375 382//382 377//377 +f 377//377 382//382 378//378 +f 375//375 380//380 382//382 +f 378//378 383//383 379//379 +f 382//382 383//383 378//378 +f 383//383 384//384 379//379 +f 381//381 385//385 380//380 +f 381//381 386//386 385//385 +f 380//380 387//387 382//382 +f 382//382 387//387 383//383 +f 380//380 385//385 387//387 +f 383//383 388//388 384//384 +f 387//387 388//388 383//383 +f 388//388 389//389 384//384 +f 386//386 390//390 385//385 +f 386//386 391//391 390//390 +f 385//385 392//392 387//387 +f 387//387 392//392 388//388 +f 385//385 390//390 392//392 +f 388//388 393//393 389//389 +f 392//392 393//393 388//388 +f 393//393 394//394 389//389 +f 391//391 395//395 390//390 +f 391//391 396//396 395//395 +f 390//390 397//397 392//392 +f 392//392 397//397 393//393 +f 390//390 395//395 397//397 +f 393//393 398//398 394//394 +f 397//397 398//398 393//393 +f 398//398 399//399 394//394 +f 396//396 400//400 395//395 +f 396//396 401//401 400//400 +f 395//395 402//402 397//397 +f 397//397 402//402 398//398 +f 395//395 400//400 402//402 +f 398//398 403//403 399//399 +f 402//402 403//403 398//398 +f 403//403 404//404 399//399 +f 401//401 405//405 400//400 +f 401//401 406//406 405//405 +f 400//400 407//407 402//402 +f 402//402 407//407 403//403 +f 400//400 405//405 407//407 +f 403//403 408//408 404//404 +f 407//407 408//408 403//403 +f 408//408 409//409 404//404 +f 406//406 410//410 405//405 +f 406//406 411//411 410//410 +f 405//405 412//412 407//407 +f 407//407 412//412 408//408 +f 405//405 410//410 412//412 +f 408//408 413//413 409//409 +f 412//412 413//413 408//408 +f 413//413 414//414 409//409 +f 411//411 415//415 410//410 +f 411//411 416//416 415//415 +f 410//410 417//417 412//412 +f 412//412 417//417 413//413 +f 410//410 415//415 417//417 +f 413//413 418//418 414//414 +f 417//417 418//418 413//413 +f 418//418 419//419 414//414 +f 416//416 420//420 415//415 +f 416//416 421//421 420//420 +f 415//415 422//422 417//417 +f 417//417 422//422 418//418 +f 415//415 420//420 422//422 +f 418//418 423//423 419//419 +f 422//422 423//423 418//418 +f 423//423 424//424 419//419 +f 421//421 425//425 420//420 +f 425//425 421//421 426//426 +f 420//420 427//427 422//422 +f 422//422 427//427 423//423 +f 420//420 425//425 427//427 +f 423//423 428//428 424//424 +f 427//427 428//428 423//423 +f 428//428 429//429 424//424 +f 427//427 430//430 428//428 +f 425//425 430//430 427//427 +f 430//430 431//431 428//428 +f 432//432 425//425 426//426 +f 430//430 425//425 432//432 +f 430//430 433//433 431//431 +f 433//433 430//430 432//432 +f 433//433 432//432 434//434 +f 433//433 435//435 431//431 +f 433//433 434//434 435//435 +f 434//434 436//436 435//435 +f 437//437 436//436 434//434 +f 432//432 426//426 438//438 +f 426//426 439//439 438//438 +f 439//439 440//440 438//438 +f 440//440 441//441 438//438 +f 440//440 442//442 441//441 +f 442//442 443//443 441//441 +f 442//442 444//444 443//443 +f 444//444 445//445 443//443 +f 432//432 446//446 434//434 +f 446//446 447//447 434//434 +f 448//448 437//437 434//434 +f 447//447 448//448 434//434 +f 449//449 437//437 448//448 +f 447//447 450//450 448//448 +f 451//451 449//449 448//448 +f 450//450 451//451 448//448 +f 452//452 449//449 451//451 +f 450//450 452//452 451//451 +f 455//453 454//454 453//455 +f 454//454 456//456 453//455 +f 453//455 456//456 457//457 +f 457//457 456//456 454//454 +f 458//458 453//455 457//457 +f 458//458 457//457 459//459 +f 460//460 458//458 459//459 +f 461//461 457//457 454//454 +f 459//459 457//457 461//461 +f 462//462 459//459 461//461 +f 459//459 462//462 463//463 +f 460//460 459//459 464//464 +f 465//465 460//460 464//464 +f 464//464 459//459 466//466 +f 459//459 467//467 466//466 +f 467//467 459//459 463//463 +f 466//466 467//467 468//468 +f 467//467 463//463 468//468 +f 463//463 469//469 468//468 +f 469//469 470//470 468//468 +f 470//470 471//471 468//468 +f 474//472 473//473 472//474 +f 473//473 475//475 472//474 +f 472//474 475//475 476//476 +f 477//477 472//474 476//476 +f 477//477 476//476 478//478 +f 479//479 477//477 478//478 +f 479//479 478//478 480//480 +f 481//481 479//479 480//480 +f 481//481 480//480 482//482 +f 483//483 481//481 482//482 +f 483//483 482//482 484//484 +f 485//485 483//483 484//484 +f 485//485 484//484 486//486 +f 487//487 485//485 486//486 +f 487//487 486//486 488//488 +f 489//489 487//487 488//488 +f 489//489 488//488 490//490 +f 491//491 489//489 490//490 +f 491//491 490//490 492//492 +f 493//493 491//491 492//492 +f 493//493 492//492 494//494 +f 495//495 493//493 494//494 +f 495//495 494//494 496//496 +f 497//497 495//495 496//496 +f 497//497 496//496 498//498 +f 499//499 497//497 498//498 +f 499//499 498//498 500//500 +f 501//501 499//499 500//500 +f 501//501 500//500 502//502 +f 503//503 501//501 502//502 +f 503//503 502//502 504//504 +f 505//505 503//503 504//504 +f 505//505 504//504 506//506 +f 507//507 505//505 506//506 +f 507//507 506//506 508//508 +f 509//509 507//507 508//508 +f 509//509 508//508 510//510 +f 511//511 509//509 510//510 +f 512//512 511//511 510//510 +f 500//500 498//498 513//513 +f 498//498 514//514 513//513 +f 500//500 513//513 515//515 +f 502//502 500//500 515//515 +f 498//498 496//496 514//514 +f 502//502 515//515 516//516 +f 496//496 517//517 514//514 +f 504//504 502//502 516//516 +f 496//496 494//494 517//517 +f 504//504 516//516 518//518 +f 494//494 519//519 517//517 +f 506//506 504//504 518//518 +f 494//494 492//492 519//519 +f 506//506 518//518 520//520 +f 492//492 521//521 519//519 +f 508//508 506//506 520//520 +f 492//492 490//490 521//521 +f 508//508 520//520 522//522 +f 490//490 523//523 521//521 +f 490//490 488//488 523//523 +f 488//488 524//524 523//523 +f 488//488 486//486 524//524 +f 486//486 525//525 524//524 +f 486//486 484//484 525//525 +f 484//484 526//526 525//525 +f 484//484 482//482 526//526 +f 482//482 527//527 526//526 +f 482//482 480//480 527//527 +f 480//480 528//528 527//527 +f 480//480 478//478 528//528 +f 478//478 529//529 528//528 +f 478//478 476//476 529//529 +f 476//476 530//530 529//529 +f 476//476 475//475 530//530 +f 475//475 531//531 530//530 +f 475//475 473//473 531//531 +f 473//473 532//532 531//531 +f 473//473 533//533 532//532 +f 533//533 534//534 532//532 +f 535//535 534//534 533//533 +f 536//536 535//535 533//533 +f 535//535 536//536 537//537 +f 536//536 538//538 537//537 +f 537//537 538//538 539//539 +f 538//538 540//540 539//539 +f 539//539 540//540 541//541 +f 540//540 542//542 541//541 +f 544//543 512//512 543//544 +f 512//512 545//545 543//544 +f 545//545 512//512 510//510 +f 545//545 510//510 543//544 +f 510//510 522//522 543//544 +f 510//510 508//508 522//522 +f 548//546 547//547 546//548 +f 549//549 548//546 546//548 +f 549//549 546//548 550//550 +f 546//548 551//551 550//550 +f 550//550 551//551 552//552 +f 550//550 552//552 553//553 +f 552//552 554//554 553//553 +f 554//554 555//555 553//553 +f 556//556 550//550 553//553 +f 550//550 556//556 549//549 +f 556//556 557//557 549//549 +f 549//549 558//558 548//546 +f 558//558 549//549 557//557 +f 558//558 559//559 548//546 +f 560//560 558//558 557//557 +f 558//558 561//561 559//559 +f 561//561 558//558 560//560 +f 561//561 562//562 559//559 +f 563//563 561//561 560//560 +f 564//564 562//562 561//561 +f 563//563 564//564 561//561 +f 567//565 566//566 565//567 +f 566//566 568//568 565//567 +f 565//567 568//568 569//569 +f 566//566 570//570 568//568 +f 568//568 571//571 569//569 +f 569//569 571//571 572//572 +f 570//570 573//573 568//568 +f 568//568 573//573 571//571 +f 570//570 574//574 573//573 +f 571//571 575//575 572//572 +f 572//572 575//575 576//576 +f 574//574 577//577 573//573 +f 574//574 578//578 577//577 +f 573//573 579//579 571//571 +f 571//571 579//579 575//575 +f 573//573 577//577 579//579 +f 575//575 580//580 576//576 +f 576//576 580//580 581//581 +f 578//578 582//582 577//577 +f 578//578 583//583 582//582 +f 583//583 584//584 582//582 +f 577//577 585//585 579//579 +f 577//577 582//582 585//585 +f 579//579 586//586 575//575 +f 575//575 586//586 580//580 +f 579//579 585//585 586//586 +f 580//580 587//587 581//581 +f 581//581 587//587 588//588 +f 582//582 584//584 589//589 +f 582//582 589//589 585//585 +f 585//585 590//590 586//586 +f 585//585 589//589 590//590 +f 586//586 591//591 580//580 +f 580//580 591//591 587//587 +f 586//586 590//590 591//591 +f 587//587 592//592 588//588 +f 588//588 592//592 593//593 +f 584//584 594//594 589//589 +f 584//584 595//595 594//594 +f 591//591 596//596 587//587 +f 587//587 596//596 592//592 +f 590//590 597//597 591//591 +f 591//591 597//597 596//596 +f 589//589 598//598 590//590 +f 589//589 594//594 598//598 +f 590//590 598//598 597//597 +f 592//592 599//599 593//593 +f 593//593 599//599 600//600 +f 595//595 601//601 594//594 +f 595//595 602//602 601//601 +f 602//602 603//603 601//601 +f 596//596 604//604 592//592 +f 592//592 604//604 599//599 +f 597//597 605//605 596//596 +f 596//596 605//605 604//604 +f 598//598 606//606 597//597 +f 597//597 606//606 605//605 +f 594//594 607//607 598//598 +f 594//594 601//601 607//607 +f 598//598 607//607 606//606 +f 601//601 603//603 608//608 +f 601//601 608//608 607//607 +f 603//603 609//609 608//608 +f 607//607 610//610 606//606 +f 607//607 608//608 610//610 +f 608//608 609//609 611//611 +f 608//608 611//611 610//610 +f 609//609 612//612 611//611 +f 606//606 613//613 605//605 +f 606//606 610//610 613//613 +f 605//605 614//614 604//604 +f 605//605 613//613 614//614 +f 610//610 611//611 615//615 +f 610//610 615//615 613//613 +f 611//611 612//612 616//616 +f 611//611 616//616 615//615 +f 612//612 617//617 616//616 +f 613//613 615//615 618//618 +f 613//613 618//618 614//614 +f 604//604 614//614 619//619 +f 604//604 619//619 599//599 +f 615//615 616//616 620//620 +f 615//615 620//620 618//618 +f 616//616 617//617 621//621 +f 616//616 621//621 620//620 +f 617//617 622//622 621//621 +f 614//614 618//618 623//623 +f 614//614 623//623 619//619 +f 599//599 619//619 624//624 +f 599//599 624//624 600//600 +f 600//600 624//624 625//625 +f 624//624 626//626 625//625 +f 619//619 627//627 624//624 +f 624//624 627//627 626//626 +f 619//619 623//623 627//627 +f 627//627 628//628 626//626 +f 627//627 629//629 628//628 +f 623//623 629//629 627//627 +f 629//629 630//630 628//628 +f 618//618 631//631 623//623 +f 623//623 631//631 629//629 +f 618//618 620//620 631//631 +f 629//629 632//632 630//630 +f 631//631 632//632 629//629 +f 632//632 633//633 630//630 +f 620//620 634//634 631//631 +f 631//631 634//634 632//632 +f 620//620 621//621 634//634 +f 632//632 635//635 633//633 +f 634//634 635//635 632//632 +f 635//635 636//636 633//633 +f 621//621 637//637 634//634 +f 634//634 637//637 635//635 +f 621//621 622//622 637//637 +f 622//622 638//638 637//637 +f 635//635 639//639 636//636 +f 637//637 639//639 635//635 +f 637//637 638//638 639//639 +f 639//639 640//640 636//636 +f 638//638 641//641 639//639 +f 639//639 641//641 640//640 +f 641//641 642//642 640//640 +f 645//643 644//644 643//645 +f 644//644 646//646 643//645 +f 644//644 647//647 646//646 +f 647//647 648//648 646//646 +f 651//649 650//650 649//651 +f 650//650 652//652 649//651 +f 649//651 652//652 653//653 +f 650//650 654//654 652//652 +f 652//652 655//655 653//653 +f 653//653 655//655 656//609 +f 654//654 657//656 652//652 +f 652//652 657//656 655//655 +f 654//654 658//657 657//656 +f 655//655 659//658 656//609 +f 656//609 659//658 660//612 +f 657//656 661//659 655//655 +f 655//655 661//659 659//658 +f 658//657 662//660 657//656 +f 657//656 662//660 661//659 +f 658//657 663//661 662//660 +f 659//658 664//662 660//612 +f 660//612 664//662 665//663 +f 661//659 666//664 659//658 +f 659//658 666//664 664//662 +f 662//660 667//665 661//659 +f 661//659 667//665 666//664 +f 663//661 668//666 662//660 +f 662//660 668//666 667//665 +f 668//666 663//661 669//667 +f 664//662 670//668 665//663 +f 665//663 670//668 671//669 +f 666//664 672//670 664//662 +f 664//662 672//670 670//668 +f 667//665 673//671 666//664 +f 666//664 673//671 672//670 +f 674//672 668//666 669//667 +f 674//672 669//667 675//673 +f 668//666 676//674 667//665 +f 667//665 676//674 673//671 +f 676//674 668//666 674//672 +f 670//668 677//675 671//669 +f 671//669 677//675 678//638 +f 672//670 679//676 670//668 +f 670//668 679//676 677//675 +f 673//671 680//677 672//670 +f 672//670 680//677 679//676 +f 676//674 681//678 673//671 +f 681//678 676//674 674//672 +f 673//671 681//678 680//677 +f 677//675 682//679 678//638 +f 678//638 682//679 683//641 +f 679//676 684//680 677//675 +f 677//675 684//680 682//679 +f 685//681 681//678 674//672 +f 681//678 685//681 680//677 +f 680//677 686//682 679//676 +f 679//676 686//682 684//680 +f 680//677 685//681 686//682 +f 682//679 687//683 683//641 +f 683//641 687//683 688//642 +f 687//683 689//684 688//642 +f 684//680 690//685 682//679 +f 682//679 690//685 687//683 +f 687//683 691//686 689//684 +f 690//685 691//686 687//683 +f 691//686 692//687 689//684 +f 686//682 693//688 684//680 +f 684//680 693//688 690//685 +f 691//686 694//689 692//687 +f 694//689 695//690 692//687 +f 690//685 696//691 691//686 +f 693//688 696//691 690//685 +f 696//691 694//689 691//686 +f 686//682 697//692 693//688 +f 685//681 697//692 686//682 +f 694//689 698//693 695//690 +f 698//693 699//694 695//690 +f 696//691 700//695 694//689 +f 700//695 698//693 694//689 +f 693//688 701//696 696//691 +f 701//696 700//695 696//691 +f 698//693 700//695 701//696 +f 697//692 701//696 693//688 +f 699//694 698//693 702//697 +f 702//697 698//693 701//696 +f 701//696 697//692 702//697 +f 703//698 699//694 702//697 +f 703//698 702//697 704//699 +f 697//692 705//700 702//697 +f 702//697 705//700 704//699 +f 697//692 685//681 705//700 +f 704//699 705//700 675//673 +f 685//681 706//701 705//700 +f 705//700 706//701 675//673 +f 706//701 685//681 674//672 +f 706//701 674//672 675//673 +f 709//702 708//703 707//704 +f 708//703 710//705 707//704 +f 708//703 711//706 710//705 +f 707//704 710//705 712//707 +f 711//706 713//708 710//705 +f 710//705 714//709 712//707 +f 710//705 713//708 714//709 +f 715//710 712//707 714//709 +f 713//708 716//711 714//709 +f 717//712 715//710 714//709 +f 714//709 716//711 718//713 +f 717//712 714//709 718//713 +f 721//714 720//715 719//716 +f 722//717 721//714 719//716 +f 722//717 719//716 723//718 +f 724//719 721//714 722//717 +f 725//720 722//717 723//718 +f 723//718 726//721 725//720 +f 727//722 724//719 722//717 +f 727//722 722//717 725//720 +f 728//723 724//719 727//722 +f 726//721 729//724 725//720 +f 729//724 726//721 730//725 +f 731//726 728//723 727//722 +f 732//727 728//723 731//726 +f 733//728 727//722 725//720 +f 725//720 729//724 733//728 +f 731//726 727//722 733//728 +f 734//729 729//724 730//725 +f 730//725 735//730 734//729 +f 729//724 736//731 733//728 +f 736//731 731//726 733//728 +f 736//731 729//724 734//729 +f 737//732 732//727 731//726 +f 737//732 731//726 736//731 +f 735//730 738//733 734//729 +f 735//730 739//734 738//733 +f 740//735 736//731 734//729 +f 740//735 737//732 736//731 +f 734//729 738//733 740//735 +f 737//732 741//736 732//727 +f 741//736 742//737 732//727 +f 740//735 743//738 737//732 +f 738//733 743//738 740//735 +f 743//738 741//736 737//732 +f 739//734 744//739 738//733 +f 738//733 744//739 743//738 +f 739//734 745//740 744//739 +f 741//736 746//741 742//737 +f 746//741 747//742 742//737 +f 743//738 748//743 741//736 +f 744//739 748//743 743//738 +f 748//743 746//741 741//736 +f 745//740 749//744 744//739 +f 744//739 749//744 748//743 +f 749//744 745//740 750//745 +f 746//741 751//746 747//742 +f 751//746 752//747 747//742 +f 753//748 752//747 751//746 +f 748//743 754//749 746//741 +f 749//744 754//749 748//743 +f 754//749 751//746 746//741 +f 755//750 749//744 750//745 +f 754//749 749//744 755//750 +f 750//745 756//751 755//750 +f 757//752 753//748 751//746 +f 757//752 751//746 754//749 +f 758//753 754//749 755//750 +f 758//753 757//752 754//749 +f 756//751 759//754 755//750 +f 755//750 759//754 758//753 +f 756//751 760//755 759//754 +f 757//752 761//756 753//748 +f 761//756 762//757 753//748 +f 758//753 763//758 757//752 +f 759//754 763//758 758//753 +f 763//758 761//756 757//752 +f 760//755 764//759 759//754 +f 764//759 760//755 765//760 +f 766//761 763//758 759//754 +f 763//758 766//761 761//756 +f 764//759 766//761 759//754 +f 761//756 767//762 762//757 +f 766//761 767//762 761//756 +f 767//762 768//763 762//757 +f 769//764 764//759 765//760 +f 766//761 764//759 769//764 +f 765//760 770//765 769//764 +f 770//765 771//766 769//764 +f 772//767 766//761 769//764 +f 769//764 771//766 772//767 +f 766//761 772//767 767//762 +f 771//766 773//768 772//767 +f 773//768 774//769 772//767 +f 767//762 775//770 768//763 +f 772//767 775//770 767//762 +f 774//769 775//770 772//767 +f 775//770 776//771 768//763 +f 774//769 777//772 775//770 +f 777//772 776//771 775//770 +f 780//773 779//774 778//775 +f 779//774 781//776 778//775 +f 778//775 781//776 782//777 +f 779//774 783//778 781//776 +f 781//776 784//779 782//777 +f 782//777 784//779 785//780 +f 783//778 786//781 781//776 +f 781//776 786//781 784//779 +f 783//778 787//782 786//781 +f 787//782 788//783 786//781 +f 786//781 788//783 789//784 +f 786//781 789//784 784//779 +f 788//783 790//785 789//784 +f 784//779 791//786 785//780 +f 784//779 789//784 791//786 +f 785//780 791//786 792//787 +f 791//786 793//788 792//787 +f 789//784 790//785 794//789 +f 789//784 794//789 791//786 +f 791//786 794//789 793//788 +f 790//785 795//790 794//789 +f 794//789 796//791 793//788 +f 794//789 795//790 796//791 +f 795//790 797//792 796//791 +f 800//793 799//794 798//795 +f 800//793 801//796 799//794 +f 801//796 800//793 802//797 +f 801//796 802//797 803//798 +f 804//799 803//798 802//797 +f 804//799 805//800 803//798 +f 808//801 807//802 806//803 +f 809//804 808//801 806//803 +f 808//801 809//804 810//805 +f 809//804 806//803 811//719 +f 809//804 812//800 810//805 +f 813//806 809//804 811//719 +f 809//804 813//806 812//800 +f 813//806 811//719 814//723 +f 815//807 813//806 814//723 +f 815//807 814//723 816//727 +f 813//806 817//808 812//800 +f 813//806 815//807 817//808 +f 818//809 815//807 816//727 +f 818//809 816//727 819//737 +f 812//800 817//808 820//810 +f 812//800 820//810 821//811 +f 822//796 821//811 820//810 +f 823//812 822//796 820//810 +f 824//813 823//812 820//810 +f 817//808 824//813 820//810 +f 815//807 824//813 817//808 +f 815//807 818//809 824//813 +f 818//809 825//814 824//813 +f 826//815 825//814 818//809 +f 827//816 818//809 819//737 +f 827//816 826//815 818//809 +f 827//816 819//737 828//742 +f 829//817 826//815 827//816 +f 830//818 827//816 828//742 +f 830//818 829//817 827//816 +f 830//818 828//742 831//747 +f 832//819 829//817 830//818 +f 833//820 830//818 831//747 +f 833//820 832//819 830//818 +f 833//820 831//747 834//748 +f 833//820 835//821 832//819 +f 835//821 833//820 834//748 +f 835//821 834//748 836//757 +f 835//821 837//822 832//819 +f 838//823 835//821 836//757 +f 838//823 836//757 839//763 +f 840//824 837//822 835//821 +f 838//823 840//824 835//821 +f 841//825 838//823 839//763 +f 841//825 839//763 842//826 +f 840//824 843//827 837//822 +f 843//827 844//86 837//822 +f 843//827 845//87 844//86 +f 846//828 840//824 838//823 +f 841//825 846//828 838//823 +f 847//829 841//825 842//826 +f 847//829 842//826 848//830 +f 849//831 847//829 848//830 +f 847//829 850//832 841//825 +f 847//829 849//831 850//832 +f 850//832 846//828 841//825 +f 849//831 851//833 850//832 +f 850//832 851//833 852//834 +f 850//832 852//834 846//828 +f 851//833 853//835 852//834 +f 853//835 854//836 852//834 +f 846//828 855//837 840//824 +f 855//837 843//827 840//824 +f 852//834 855//837 846//828 +f 854//836 856//838 852//834 +f 855//837 852//834 856//838 +f 854//836 857//839 856//838 +f 857//839 858//840 856//838 +f 859//841 856//838 858//840 +f 859//841 855//837 856//838 +f 860//842 859//841 858//840 +f 843//827 855//837 861//843 +f 855//837 859//841 861//843 +f 859//841 860//842 861//843 +f 860//842 862//844 861//843 +f 861//843 862//844 863//845 +f 843//827 861//843 845//87 +f 861//843 863//845 845//87 +f 864//846 845//87 863//845 +f 864//846 865//847 845//87 +f 865//847 864//846 866//848 +f 864//846 867//849 866//848 +f 866//848 867//849 868//850 +f 867//849 869//851 868//850 +f 868//850 869//851 870//794 +f 869//851 871//852 870//794 +f 874//853 873//854 872//855 +f 873//854 875//856 872//855 +f 872//855 875//856 876//857 +f 873//854 877//858 875//856 +f 875//856 878//859 876//857 +f 876//857 878//859 879//860 +f 877//858 880//861 875//856 +f 875//856 880//861 878//859 +f 877//858 881//862 880//861 +f 881//862 882//863 880//861 +f 880//861 882//863 883//864 +f 880//861 883//864 878//859 +f 882//863 884//865 883//864 +f 878//859 885//866 879//860 +f 878//859 883//864 885//866 +f 879//860 885//866 886//867 +f 883//864 884//865 887//868 +f 883//864 887//868 885//866 +f 884//865 888//869 887//868 +f 885//866 889//870 886//867 +f 885//866 887//868 889//870 +f 886//867 889//870 890//871 +f 889//870 891//872 890//871 +f 887//868 888//869 892//873 +f 887//868 892//873 889//870 +f 889//870 892//873 891//872 +f 888//869 893//874 892//873 +f 892//873 894//875 891//872 +f 892//873 893//874 894//875 +f 893//874 895//876 894//875 +f 898//877 897//878 896//879 +f 897//878 899//880 896//879 +f 900//881 899//880 897//878 +f 901//882 900//881 897//878 +f 902//883 900//881 901//882 +f 903//884 902//883 901//882 +f 904//885 902//883 903//884 +f 905//886 904//885 903//884 +f 906//887 904//885 905//886 +f 907//888 906//887 905//886 +f 910//889 909//890 908//891 +f 909//890 911//892 908//891 +f 908//891 911//892 912//893 +f 911//892 913//894 912//893 +f 912//893 913//894 914//895 +f 913//894 915//896 914//895 +f 914//895 915//896 916//897 +f 915//896 917//898 916//897 +f 918//899 916//897 917//898 +f 919//900 918//899 917//898 +f 922//901 921//902 920//903 +f 921//902 923//904 920//903 +f 923//904 924//905 920//903 +f 924//905 925//906 920//903 +f 924//905 926//907 925//906 +f 926//907 927//908 925//906 +f 926//907 928//909 927//908 +f 928//909 929//910 927//908 +f 930//911 929//910 928//909 +f 931//912 930//911 928//909 +f 924//905 932//913 926//907 +f 932//913 924//905 923//904 +f 933//914 923//904 921//902 +f 934//915 933//914 921//902 +f 934//915 935//916 933//914 +f 935//916 936//917 933//914 +f 935//916 937//918 936//917 +f 933//914 938//919 923//904 +f 933//914 936//917 938//919 +f 937//918 939//920 936//917 +f 937//918 940//921 939//920 +f 940//921 941//922 939//920 +f 936//917 939//920 942//923 +f 936//917 942//923 938//919 +f 939//920 941//922 943//924 +f 939//920 943//924 942//923 +f 941//922 944//925 943//924 +f 938//919 945//926 923//904 +f 945//926 932//913 923//904 +f 938//919 942//923 946//927 +f 938//919 946//927 945//926 +f 942//923 943//924 947//928 +f 942//923 947//928 946//927 +f 943//924 944//925 948//929 +f 943//924 948//929 947//928 +f 944//925 949//930 948//929 +f 948//929 949//930 950//931 +f 949//930 951//932 950//931 +f 948//929 950//931 952//933 +f 947//928 948//929 952//933 +f 947//928 952//933 953//934 +f 946//927 947//928 953//934 +f 946//927 953//934 954//935 +f 946//927 954//935 945//926 +f 953//934 955//936 954//935 +f 955//936 956//937 954//935 +f 954//935 957//938 945//926 +f 956//937 957//938 954//935 +f 957//938 932//913 945//926 +f 957//938 956//937 958//939 +f 956//937 959//940 958//939 +f 932//913 957//938 960//941 +f 957//938 958//939 960//941 +f 932//913 960//941 926//907 +f 926//907 960//941 928//909 +f 958//939 959//940 961//942 +f 959//940 962//161 961//942 +f 960//941 958//939 963//943 +f 960//941 963//943 928//909 +f 958//939 961//942 963//943 +f 961//942 962//161 964//944 +f 962//161 965//945 964//944 +f 928//909 963//943 966//946 +f 931//912 928//909 966//946 +f 967//947 931//912 966//946 +f 967//947 966//946 968//948 +f 963//943 961//942 969//949 +f 963//943 969//949 966//946 +f 961//942 964//944 969//949 +f 966//946 970//950 968//948 +f 966//946 969//949 970//950 +f 968//948 970//950 971//951 +f 964//944 965//945 972//952 +f 965//945 973//953 972//952 +f 969//949 964//944 974//954 +f 969//949 974//954 970//950 +f 964//944 972//952 974//954 +f 970//950 975//955 971//951 +f 970//950 974//954 975//955 +f 971//951 975//955 976//956 +f 972//952 973//953 977//957 +f 973//953 978//958 977//957 +f 974//954 972//952 979//959 +f 974//954 979//959 975//955 +f 972//952 977//957 979//959 +f 975//955 980//960 976//956 +f 975//955 979//959 980//960 +f 976//956 980//960 981//961 +f 977//957 978//958 982//962 +f 978//958 983//963 982//962 +f 979//959 977//957 984//964 +f 979//959 984//964 980//960 +f 977//957 982//962 984//964 +f 980//960 985//965 981//961 +f 980//960 984//964 985//965 +f 981//961 985//965 986//966 +f 982//962 983//963 987//967 +f 983//963 988//968 987//967 +f 984//964 982//962 989//969 +f 984//964 989//969 985//965 +f 982//962 987//967 989//969 +f 985//965 990//970 986//966 +f 985//965 989//969 990//970 +f 986//966 990//970 991//971 +f 987//967 988//968 992//972 +f 988//968 993//973 992//972 +f 989//969 987//967 994//974 +f 989//969 994//974 990//970 +f 987//967 992//972 994//974 +f 990//970 995//975 991//971 +f 990//970 994//974 995//975 +f 996//976 991//971 995//975 +f 992//972 993//973 997//977 +f 993//973 998//978 997//977 +f 994//974 992//972 999//979 +f 994//974 999//979 995//975 +f 992//972 997//977 999//979 +f 1000//980 996//976 995//975 +f 1000//980 995//975 999//979 +f 1001//981 996//976 1000//980 +f 1002//982 997//977 998//978 +f 1003//983 1002//982 998//978 +f 1004//984 999//979 997//977 +f 1004//984 1000//980 999//979 +f 1002//982 1004//984 997//977 +f 1005//985 1001//981 1000//980 +f 1005//985 1000//980 1004//984 +f 1006//986 1001//981 1005//985 +f 1007//987 1002//982 1003//983 +f 1008//988 1007//987 1003//983 +f 1009//989 1004//984 1002//982 +f 1009//989 1005//985 1004//984 +f 1007//987 1009//989 1002//982 +f 1010//990 1006//986 1005//985 +f 1010//990 1005//985 1009//989 +f 1011//991 1007//987 1008//988 +f 1012//992 1011//991 1008//988 +f 1011//991 1012//992 1013//993 +f 1014//994 1009//989 1007//987 +f 1011//991 1014//994 1007//987 +f 1014//994 1010//990 1009//989 +f 1006//986 1010//990 1015//995 +f 1016//996 1006//986 1015//995 +f 1016//996 1015//995 1017//997 +f 1018//998 1011//991 1013//993 +f 1014//994 1011//991 1018//998 +f 1010//990 1014//994 1019//999 +f 1015//995 1010//990 1019//999 +f 1019//999 1014//994 1018//998 +f 1019//999 1018//998 1015//995 +f 1015//995 1020//1000 1017//997 +f 1018//998 1020//1000 1015//995 +f 1018//998 1013//993 1020//1000 +f 1017//997 1020//1000 1021//1001 +f 1013//993 1022//1002 1020//1000 +f 1020//1000 1022//1002 1021//1001 +f 1025//1003 1024//1004 1023//1005 +f 1026//1006 1025//1003 1023//1005 +f 1025//1003 1026//1006 1027//1007 +f 1026//1006 1023//1005 1028//1008 +f 1023//1005 1029//1009 1028//1008 +f 1029//1009 1030//1010 1028//1008 +f 1026//1006 1031//1011 1027//1007 +f 1031//1011 1026//1006 1028//1008 +f 1027//1007 1031//1011 1032//1012 +f 1033//1013 1027//1007 1032//1012 +f 1033//1013 1032//1012 1034//1014 +f 1035//1015 1031//1011 1028//1008 +f 1031//1011 1035//1015 1032//1012 +f 1030//1010 1036//1016 1028//1008 +f 1028//1008 1036//1016 1035//1015 +f 1032//1012 1037//1017 1034//1014 +f 1034//1014 1037//1017 1038//1018 +f 1035//1015 1039//1019 1032//1012 +f 1036//1016 1039//1019 1035//1015 +f 1032//1012 1039//1019 1037//1017 +f 1036//1016 1030//1010 1040//1020 +f 1030//1010 1041//1021 1040//1020 +f 1039//1019 1036//1016 1042//1022 +f 1039//1019 1042//1022 1037//1017 +f 1036//1016 1040//1020 1042//1022 +f 1037//1017 1043//1023 1038//1018 +f 1037//1017 1042//1022 1043//1023 +f 1038//1018 1043//1023 1044//1024 +f 1040//1020 1041//1021 1045//1025 +f 1041//1021 1046//1026 1045//1025 +f 1042//1022 1040//1020 1047//1027 +f 1042//1022 1047//1027 1043//1023 +f 1040//1020 1045//1025 1047//1027 +f 1043//1023 1048//1028 1044//1024 +f 1043//1023 1047//1027 1048//1028 +f 1049//1029 1044//1024 1048//1028 +f 1045//1025 1046//1026 1050//1030 +f 1046//1026 1051//1031 1050//1030 +f 1047//1027 1045//1025 1052//1032 +f 1047//1027 1052//1032 1048//1028 +f 1045//1025 1050//1030 1052//1032 +f 1053//1033 1049//1029 1048//1028 +f 1053//1033 1048//1028 1052//1032 +f 1054//1034 1049//1029 1053//1033 +f 1055//1035 1050//1030 1051//1031 +f 1056//1036 1055//1035 1051//1031 +f 1057//1037 1052//1032 1050//1030 +f 1057//1037 1053//1033 1052//1032 +f 1055//1035 1057//1037 1050//1030 +f 1058//1038 1054//1034 1053//1033 +f 1058//1038 1053//1033 1057//1037 +f 1059//1039 1054//1034 1058//1038 +f 1060//1040 1055//1035 1056//1036 +f 1061//1041 1060//1040 1056//1036 +f 1062//1042 1057//1037 1055//1035 +f 1062//1042 1058//1038 1057//1037 +f 1060//1040 1062//1042 1055//1035 +f 1063//1043 1059//1039 1058//1038 +f 1063//1043 1058//1038 1062//1042 +f 1064//1044 1059//1039 1063//1043 +f 1065//1045 1060//1040 1061//1041 +f 1066//1046 1065//1045 1061//1041 +f 1067//1047 1062//1042 1060//1040 +f 1067//1047 1063//1043 1062//1042 +f 1065//1045 1067//1047 1060//1040 +f 1068//1048 1064//1044 1063//1043 +f 1068//1048 1063//1043 1067//1047 +f 1069//1049 1064//1044 1068//1048 +f 1070//1050 1065//1045 1066//1046 +f 1071//1051 1070//1050 1066//1046 +f 1072//1052 1067//1047 1065//1045 +f 1072//1052 1068//1048 1067//1047 +f 1070//1050 1072//1052 1065//1045 +f 1073//1053 1069//1049 1068//1048 +f 1073//1053 1068//1048 1072//1052 +f 1074//1054 1069//1049 1073//1053 +f 1075//1055 1070//1050 1071//1051 +f 1076//1056 1075//1055 1071//1051 +f 1077//1057 1072//1052 1070//1050 +f 1077//1057 1073//1053 1072//1052 +f 1075//1055 1077//1057 1070//1050 +f 1078//1058 1074//1054 1073//1053 +f 1078//1058 1073//1053 1077//1057 +f 1079//1059 1074//1054 1078//1058 +f 1080//1060 1079//1059 1078//1058 +f 1079//1059 1080//1060 1081//1061 +f 1080//1060 1082//1062 1081//1061 +f 1083//1063 1082//1062 1080//1060 +f 1084//1064 1078//1058 1077//1057 +f 1084//1064 1077//1057 1075//1055 +f 1080//1060 1078//1058 1084//1064 +f 1085//1065 1075//1055 1076//1056 +f 1085//1065 1084//1064 1075//1055 +f 1086//1066 1085//1065 1076//1056 +f 1087//1067 1083//1063 1080//1060 +f 1088//1068 1083//1063 1087//1067 +f 1089//1069 1080//1060 1084//1064 +f 1089//1069 1084//1064 1085//1065 +f 1087//1067 1080//1060 1089//1069 +f 1090//1070 1085//1065 1086//1066 +f 1090//1070 1089//1069 1085//1065 +f 1091//1071 1090//1070 1086//1066 +f 1092//1072 1088//1068 1087//1067 +f 1093//1073 1088//1068 1092//1072 +f 1094//1074 1087//1067 1089//1069 +f 1094//1074 1089//1069 1090//1070 +f 1092//1072 1087//1067 1094//1074 +f 1095//1075 1090//1070 1091//1071 +f 1095//1075 1094//1074 1090//1070 +f 1096//295 1095//1075 1091//1071 +f 1097//1076 1094//1074 1095//1075 +f 1097//1076 1092//1072 1094//1074 +f 1098//1077 1095//1075 1096//295 +f 1098//1077 1097//1076 1095//1075 +f 1099//1078 1098//1077 1096//295 +f 1098//1077 1099//1078 1100//1079 +f 1101//1080 1093//1073 1092//1072 +f 1102//1081 1093//1073 1101//1080 +f 1103//1082 1102//1081 1101//1080 +f 1104//1083 1103//1082 1101//1080 +f 1097//1076 1098//1077 1105//1084 +f 1105//1084 1098//1077 1100//1079 +f 1092//1072 1097//1076 1106//1085 +f 1101//1080 1092//1072 1106//1085 +f 1106//1085 1097//1076 1105//1084 +f 1107//1086 1104//1083 1101//1080 +f 1107//1086 1101//1080 1106//1085 +f 1108//1087 1105//1084 1100//1079 +f 1109//1088 1108//1087 1100//1079 +f 1109//1088 1110//1089 1108//1087 +f 1111//1090 1106//1085 1105//1084 +f 1111//1090 1107//1086 1106//1085 +f 1108//1087 1111//1090 1105//1084 +f 1107//1086 1112//1091 1104//1083 +f 1112//1091 1107//1086 1111//1090 +f 1112//1091 1113//1092 1104//1083 +f 1110//1089 1114//1093 1108//1087 +f 1108//1087 1114//1093 1111//1090 +f 1110//1089 1115//1094 1114//1093 +f 1114//1093 1116//1095 1111//1090 +f 1116//1095 1112//1091 1111//1090 +f 1115//1094 1117//1096 1114//1093 +f 1114//1093 1117//1096 1116//1095 +f 1115//1094 1118//1097 1117//1096 +f 1118//1097 1119//1098 1117//1096 +f 1117//1096 1120//1099 1116//1095 +f 1117//1096 1119//1098 1120//1099 +f 1116//1095 1120//1099 1112//1091 +f 1119//1098 1121//1100 1120//1099 +f 1112//1091 1122//1101 1113//1092 +f 1120//1099 1122//1101 1112//1091 +f 1120//1099 1121//1100 1122//1101 +f 1122//1101 1123//1102 1113//1092 +f 1121//1100 1124//1103 1122//1101 +f 1122//1101 1124//1103 1123//1102 +f 1124//1103 1125//1104 1123//1102 +f 1128//1105 1127//1106 1126//1107 +f 1129//1108 1128//1105 1126//1107 +f 1130//1109 1128//1105 1129//1108 +f 1129//1108 1126//1107 1131//1110 +f 1132//1111 1130//1109 1129//1108 +f 1133//1112 1130//1109 1132//1111 +f 1134//1113 1129//1108 1131//1110 +f 1132//1111 1129//1108 1134//1113 +f 1135//1114 1133//1112 1132//1111 +f 1136//1115 1133//1112 1135//1114 +f 1134//1113 1131//1110 1137//1116 +f 1131//1110 1138//1117 1137//1116 +f 1138//1117 1139//1118 1137//1116 +f 1137//1116 1139//1118 1140//1119 +f 1140//1119 1134//1113 1137//1116 +f 1139//1118 1141//1120 1140//1119 +f 1142//1121 1132//1111 1134//1113 +f 1142//1121 1134//1113 1140//1119 +f 1135//1114 1132//1111 1142//1121 +f 1140//1119 1141//1120 1143//1122 +f 1143//1122 1142//1121 1140//1119 +f 1141//1120 1144//1123 1143//1122 +f 1145//1124 1136//1115 1135//1114 +f 1146//1125 1136//1115 1145//1124 +f 1147//1126 1135//1114 1142//1121 +f 1147//1126 1142//1121 1143//1122 +f 1145//1124 1135//1114 1147//1126 +f 1143//1122 1144//1123 1148//1127 +f 1148//1127 1147//1126 1143//1122 +f 1144//1123 1149//1128 1148//1127 +f 1150//1129 1146//1125 1145//1124 +f 1151//1130 1146//1125 1150//1129 +f 1152//1131 1145//1124 1147//1126 +f 1152//1131 1147//1126 1148//1127 +f 1150//1129 1145//1124 1152//1131 +f 1148//1127 1149//1128 1153//1132 +f 1153//1132 1152//1131 1148//1127 +f 1149//1128 1154//1133 1153//1132 +f 1155//1134 1151//1130 1150//1129 +f 1156//1135 1151//1130 1155//1134 +f 1157//1136 1150//1129 1152//1131 +f 1157//1136 1152//1131 1153//1132 +f 1155//1134 1150//1129 1157//1136 +f 1153//1132 1154//1133 1158//1137 +f 1158//1137 1157//1136 1153//1132 +f 1154//1133 1159//1138 1158//1137 +f 1160//1139 1156//1135 1155//1134 +f 1161//1140 1156//1135 1160//1139 +f 1162//1141 1155//1134 1157//1136 +f 1162//1141 1157//1136 1158//1137 +f 1160//1139 1155//1134 1162//1141 +f 1158//1137 1159//1138 1163//1142 +f 1163//1142 1162//1141 1158//1137 +f 1159//1138 1164//1143 1163//1142 +f 1165//1144 1161//1140 1160//1139 +f 1166//1145 1161//1140 1165//1144 +f 1167//1146 1160//1139 1162//1141 +f 1167//1146 1162//1141 1163//1142 +f 1165//1144 1160//1139 1167//1146 +f 1163//1142 1164//1143 1168//1147 +f 1168//1147 1167//1146 1163//1142 +f 1164//1143 1169//1148 1168//1147 +f 1170//1149 1166//1145 1165//1144 +f 1171//1150 1166//1145 1170//1149 +f 1172//1151 1165//1144 1167//1146 +f 1172//1151 1167//1146 1168//1147 +f 1170//1149 1165//1144 1172//1151 +f 1168//1147 1169//1148 1173//1152 +f 1173//1152 1172//1151 1168//1147 +f 1169//1148 1174//1153 1173//1152 +f 1175//1154 1171//1150 1170//1149 +f 1176//1155 1171//1150 1175//1154 +f 1177//1156 1170//1149 1172//1151 +f 1177//1156 1172//1151 1173//1152 +f 1175//1154 1170//1149 1177//1156 +f 1173//1152 1174//1153 1178//1157 +f 1178//1157 1177//1156 1173//1152 +f 1174//1153 1179//1158 1178//1157 +f 1180//1159 1176//1155 1175//1154 +f 1181//1160 1176//1155 1180//1159 +f 1182//1161 1175//1154 1177//1156 +f 1182//1161 1177//1156 1178//1157 +f 1180//1159 1175//1154 1182//1161 +f 1178//1157 1179//1158 1183//1162 +f 1183//1162 1182//1161 1178//1157 +f 1179//1158 1184//1163 1183//1162 +f 1185//1164 1181//1160 1180//1159 +f 1186//1165 1181//1160 1185//1164 +f 1187//1166 1180//1159 1182//1161 +f 1187//1166 1182//1161 1183//1162 +f 1185//1164 1180//1159 1187//1166 +f 1183//1162 1184//1163 1188//1167 +f 1188//1167 1187//1166 1183//1162 +f 1184//1163 1189//1168 1188//1167 +f 1190//1169 1186//1165 1185//1164 +f 1191//1170 1186//1165 1190//1169 +f 1192//1171 1185//1164 1187//1166 +f 1192//1171 1187//1166 1188//1167 +f 1190//1169 1185//1164 1192//1171 +f 1188//1167 1189//1168 1193//1172 +f 1193//1172 1192//1171 1188//1167 +f 1189//1168 1194//1173 1193//1172 +f 1195//1174 1191//1170 1190//1169 +f 1196//1175 1191//1170 1195//1174 +f 1197//1176 1190//1169 1192//1171 +f 1197//1176 1192//1171 1193//1172 +f 1195//1174 1190//1169 1197//1176 +f 1193//1172 1194//1173 1198//1177 +f 1198//1177 1197//1176 1193//1172 +f 1194//1173 1199//1178 1198//1177 +f 1200//1179 1196//1175 1195//1174 +f 1201//1180 1196//1175 1200//1179 +f 1202//1181 1195//1174 1197//1176 +f 1202//1181 1197//1176 1198//1177 +f 1200//1179 1195//1174 1202//1181 +f 1198//1177 1199//1178 1203//1182 +f 1203//1182 1202//1181 1198//1177 +f 1199//1178 1204//1183 1203//1182 +f 1205//1184 1201//1180 1200//1179 +f 1206//1185 1201//1180 1205//1184 +f 1207//1186 1200//1179 1202//1181 +f 1207//1186 1202//1181 1203//1182 +f 1205//1184 1200//1179 1207//1186 +f 1203//1182 1204//1183 1208//1187 +f 1208//1187 1207//1186 1203//1182 +f 1204//1183 1209//1188 1208//1187 +f 1210//1189 1206//1185 1205//1184 +f 1211//1190 1206//1185 1210//1189 +f 1212//1191 1205//1184 1207//1186 +f 1212//1191 1207//1186 1208//1187 +f 1210//1189 1205//1184 1212//1191 +f 1208//1187 1209//1188 1213//1192 +f 1213//1192 1212//1191 1208//1187 +f 1209//1188 1214//1193 1213//1192 +f 1215//1194 1211//1190 1210//1189 +f 1216//1195 1211//1190 1215//1194 +f 1217//1196 1210//1189 1212//1191 +f 1217//1196 1212//1191 1213//1192 +f 1215//1194 1210//1189 1217//1196 +f 1213//1192 1214//1193 1218//1197 +f 1218//1197 1217//1196 1213//1192 +f 1214//1193 1219//1198 1218//1197 +f 1220//1199 1216//1195 1215//1194 +f 1221//1200 1216//1195 1220//1199 +f 1222//1201 1215//1194 1217//1196 +f 1222//1201 1217//1196 1218//1197 +f 1220//1199 1215//1194 1222//1201 +f 1218//1197 1219//1198 1223//1202 +f 1223//1202 1222//1201 1218//1197 +f 1219//1198 1224//1203 1223//1202 +f 1225//1204 1221//1200 1220//1199 +f 1226//1205 1221//1200 1225//1204 +f 1227//1206 1220//1199 1222//1201 +f 1227//1206 1222//1201 1223//1202 +f 1225//1204 1220//1199 1227//1206 +f 1223//1202 1224//1203 1228//1207 +f 1228//1207 1227//1206 1223//1202 +f 1224//1203 1229//1208 1228//1207 +f 1230//1209 1226//1205 1225//1204 +f 1226//1205 1230//1209 1231//1210 +f 1232//1211 1225//1204 1227//1206 +f 1232//1211 1227//1206 1228//1207 +f 1230//1209 1225//1204 1232//1211 +f 1228//1207 1229//1208 1233//1212 +f 1233//1212 1232//1211 1228//1207 +f 1229//1208 1234//1213 1233//1212 +f 1235//1214 1232//1211 1233//1212 +f 1235//1214 1230//1209 1232//1211 +f 1236//1215 1235//1214 1233//1212 +f 1230//1209 1237//1216 1231//1210 +f 1230//1209 1235//1214 1237//1216 +f 1238//1217 1235//1214 1236//1215 +f 1235//1214 1238//1217 1237//1216 +f 1237//1216 1238//1217 1239//1218 +f 1240//1219 1238//1217 1236//1215 +f 1239//1218 1238//1217 1240//1219 +f 1241//1220 1239//1218 1240//1219 +f 1241//1220 1242//1221 1239//1218 +f 1231//1210 1237//1216 1243//1222 +f 1244//1223 1231//1210 1243//1222 +f 1245//1224 1244//1223 1243//1222 +f 1246//1225 1245//1224 1243//1222 +f 1247//1226 1245//1224 1246//1225 +f 1248//1227 1247//1226 1246//1225 +f 1249//1228 1247//1226 1248//1227 +f 1250//1229 1249//1228 1248//1227 +f 1251//1230 1237//1216 1239//1218 +f 1252//1231 1251//1230 1239//1218 +f 1242//1221 1253//1232 1239//1218 +f 1253//1232 1252//1231 1239//1218 +f 1242//1221 1254//1233 1253//1232 +f 1255//1234 1252//1231 1253//1232 +f 1254//1233 1256//1235 1253//1232 +f 1256//1235 1255//1234 1253//1232 +f 1254//1233 1257//1236 1256//1235 +f 1257//1236 1255//1234 1256//1235 +f 1260//1237 1259//1238 1258//1239 +f 1259//1238 1261//1240 1258//1239 +f 1258//1239 1261//1240 1262//1241 +f 1261//1240 1259//1238 1263//1242 +f 1261//1240 1263//1242 1262//1241 +f 1259//1238 1264//1243 1263//1242 +f 1263//1242 1264//1243 1265//1244 +f 1262//1241 1263//1242 1266//1245 +f 1263//1242 1265//1244 1266//1245 +f 1264//1243 1267//1246 1265//1244 +f 1266//1245 1265//1244 1268//1247 +f 1265//1244 1267//1246 1269//1248 +f 1267//1246 1270//1249 1269//1248 +f 1265//1244 1269//1248 1271//1250 +f 1265//1244 1272//1251 1268//1247 +f 1272//1251 1265//1244 1271//1250 +f 1268//1247 1272//1251 1273//1252 +f 1272//1251 1271//1250 1274//1253 +f 1272//1251 1274//1253 1273//1252 +f 1273//1252 1274//1253 1275//1254 +f 1274//1253 1276//1255 1275//1254 +f 1279//1256 1278//1257 1277//1258 +f 1278//1257 1280//1259 1277//1258 +f 1278//1257 1281//1260 1280//1259 +f 1281//1260 1282//1261 1280//1259 +f 1281//1260 1283//1262 1282//1261 +f 1283//1262 1284//1263 1282//1261 +f 1284//1263 1283//1262 1285//1264 +f 1286//1265 1284//1263 1285//1264 +f 1286//1265 1285//1264 1287//1266 +f 1285//1264 1288//1267 1287//1266 +f 1287//1266 1288//1267 1289//1268 +f 1288//1267 1290//1269 1289//1268 +f 1288//1267 1291//1270 1290//1269 +f 1289//1268 1290//1269 1292//1271 +f 1291//1270 1293//1272 1290//1269 +f 1290//1269 1294//1273 1292//1271 +f 1290//1269 1293//1272 1294//1273 +f 1292//1271 1294//1273 1295//1274 +f 1293//1272 1296//1275 1294//1273 +f 1294//1273 1297//1276 1295//1274 +f 1294//1273 1296//1275 1297//1276 +f 1295//1274 1297//1276 1298//1277 +f 1296//1275 1299//1278 1297//1276 +f 1297//1276 1300//1279 1298//1277 +f 1297//1276 1299//1278 1300//1279 +f 1298//1277 1300//1279 1301//1280 +f 1299//1278 1302//1281 1300//1279 +f 1300//1279 1303//1282 1301//1280 +f 1300//1279 1302//1281 1303//1282 +f 1301//1280 1303//1282 1304//1283 +f 1302//1281 1305//1284 1303//1282 +f 1303//1282 1306//1285 1304//1283 +f 1303//1282 1305//1284 1306//1285 +f 1304//1283 1306//1285 1307//1286 +f 1305//1284 1308//1287 1306//1285 +f 1306//1285 1309//1288 1307//1286 +f 1306//1285 1308//1287 1309//1288 +f 1307//1286 1309//1288 1310//1289 +f 1308//1287 1311//1290 1309//1288 +f 1309//1288 1312//1291 1310//1289 +f 1309//1288 1311//1290 1312//1291 +f 1310//1289 1312//1291 1313//1292 +f 1311//1290 1314//1293 1312//1291 +f 1312//1291 1315//1294 1313//1292 +f 1312//1291 1314//1293 1315//1294 +f 1313//1292 1315//1294 1316//1295 +f 1314//1293 1317//1296 1315//1294 +f 1315//1294 1318//1297 1316//1295 +f 1315//1294 1317//1296 1318//1297 +f 1316//1295 1318//1297 1319//1298 +f 1317//1296 1320//1299 1318//1297 +f 1318//1297 1321//1300 1319//1298 +f 1318//1297 1320//1299 1321//1300 +f 1319//1298 1321//1300 1322//1301 +f 1320//1299 1323//1302 1321//1300 +f 1321//1300 1324//1303 1322//1301 +f 1321//1300 1323//1302 1324//1303 +f 1322//1301 1324//1303 1325//1304 +f 1323//1302 1326//1305 1324//1303 +f 1324//1303 1327//1306 1325//1304 +f 1324//1303 1326//1305 1327//1306 +f 1325//1304 1327//1306 1328//1307 +f 1326//1305 1329//1308 1327//1306 +f 1327//1306 1330//1309 1328//1307 +f 1327//1306 1329//1308 1330//1309 +f 1328//1307 1330//1309 1331//1310 +f 1329//1308 1332//1311 1330//1309 +f 1330//1309 1333//1312 1331//1310 +f 1330//1309 1332//1311 1333//1312 +f 1331//1310 1333//1312 1334//1313 +f 1332//1311 1335//1314 1333//1312 +f 1333//1312 1336//1315 1334//1313 +f 1333//1312 1335//1314 1336//1315 +f 1334//1313 1336//1315 1337//1316 +f 1335//1314 1338//1317 1336//1315 +f 1336//1315 1339//1318 1337//1316 +f 1336//1315 1338//1317 1339//1318 +f 1337//1316 1339//1318 1340//1319 +f 1338//1317 1341//1320 1339//1318 +f 1339//1318 1342//1321 1340//1319 +f 1339//1318 1341//1320 1342//1321 +f 1340//1319 1342//1321 1343//1322 +f 1341//1320 1344//1323 1342//1321 +f 1342//1321 1345//1324 1343//1322 +f 1342//1321 1344//1323 1345//1324 +f 1344//1323 1346//1325 1345//1324 +f 1343//1322 1345//1324 1347//1326 +f 1345//1324 1346//1325 1348//1327 +f 1345//1324 1348//1327 1347//1326 +f 1346//1325 1349//1328 1348//1327 +f 1348//1327 1349//1328 1347//1326 +f 1349//1328 1350//1329 1347//1326 +f 1353//1330 1352//1331 1351//1332 +f 1352//1331 1354//1333 1351//1332 +f 1351//1332 1354//1333 1355//1334 +f 1356//1335 1351//1332 1355//1334 +f 1356//1335 1355//1334 1357//1336 +f 1358//1337 1354//1333 1352//1331 +f 1359//1338 1358//1337 1352//1331 +f 1360//1339 1358//1337 1359//1338 +f 1361//1340 1360//1339 1359//1338 +f 1360//1339 1361//1340 1362//1341 +f 1363//1342 1360//1339 1362//1341 +f 1358//1337 1360//1339 1363//1342 +f 1364//1343 1358//1337 1363//1342 +f 1354//1333 1358//1337 1364//1343 +f 1365//1344 1354//1333 1364//1343 +f 1355//1334 1354//1333 1365//1344 +f 1366//1345 1355//1334 1365//1344 +f 1357//1336 1355//1334 1366//1345 +f 1367//1346 1357//1336 1366//1345 +f 1368//1347 1357//1336 1367//1346 +f 1369//1348 1368//1347 1367//1346 +f 1372//1349 1371//1350 1370//1351 +f 1373//1352 1372//1349 1370//1351 +f 1373//1352 1370//1351 1374//1353 +f 1375//1354 1372//1349 1373//1352 +f 1376//1355 1373//1352 1374//1353 +f 1376//1355 1374//1353 1377//1356 +f 1378//1357 1375//1354 1373//1352 +f 1379//1358 1375//1354 1378//1357 +f 1380//1359 1378//1357 1373//1352 +f 1376//1355 1380//1359 1373//1352 +f 1381//1360 1379//1358 1378//1357 +f 1381//1360 1378//1357 1380//1359 +f 1382//1361 1379//1358 1381//1360 +f 1383//1362 1376//1355 1377//1356 +f 1383//1362 1377//1356 1384//1363 +f 1385//1364 1380//1359 1376//1355 +f 1383//1362 1385//1364 1376//1355 +f 1386//1365 1381//1360 1380//1359 +f 1386//1365 1380//1359 1385//1364 +f 1387//1366 1382//1361 1381//1360 +f 1387//1366 1381//1360 1386//1365 +f 1388//1367 1382//1361 1387//1366 +f 1389//1368 1388//1367 1387//1366 +f 1390//1369 1383//1362 1384//1363 +f 1390//1369 1384//1363 1391//1370 +f 1392//1371 1385//1364 1383//1362 +f 1390//1369 1392//1371 1383//1362 +f 1393//1372 1386//1365 1385//1364 +f 1393//1372 1385//1364 1392//1371 +f 1394//1373 1387//1366 1386//1365 +f 1389//1368 1387//1366 1394//1373 +f 1394//1373 1386//1365 1393//1372 +f 1395//1374 1390//1369 1391//1370 +f 1395//1374 1391//1370 1396//1375 +f 1397//1376 1392//1371 1390//1369 +f 1395//1374 1397//1376 1390//1369 +f 1398//1377 1389//1368 1394//1373 +f 1399//1378 1389//1368 1398//1377 +f 1400//1379 1393//1372 1392//1371 +f 1400//1379 1392//1371 1397//1376 +f 1401//1380 1394//1373 1393//1372 +f 1398//1377 1394//1373 1401//1380 +f 1401//1380 1393//1372 1400//1379 +f 1402//1381 1395//1374 1396//1375 +f 1402//1381 1396//1375 1403//1382 +f 1404//1383 1399//1378 1398//1377 +f 1405//1384 1399//1378 1404//1383 +f 1406//1385 1405//1384 1404//1383 +f 1407//1386 1397//1376 1395//1374 +f 1402//1381 1407//1386 1395//1374 +f 1408//1387 1400//1379 1397//1376 +f 1408//1387 1397//1376 1407//1386 +f 1409//1388 1401//1380 1400//1379 +f 1409//1388 1400//1379 1408//1387 +f 1410//1389 1398//1377 1401//1380 +f 1404//1383 1398//1377 1410//1389 +f 1410//1389 1401//1380 1409//1388 +f 1406//1385 1404//1383 1411//1390 +f 1411//1390 1404//1383 1410//1389 +f 1412//1391 1406//1385 1411//1390 +f 1413//1392 1410//1389 1409//1388 +f 1411//1390 1410//1389 1413//1392 +f 1412//1391 1411//1390 1414//1393 +f 1414//1393 1411//1390 1413//1392 +f 1415//1394 1412//1391 1414//1393 +f 1416//1395 1409//1388 1408//1387 +f 1413//1392 1409//1388 1416//1395 +f 1417//1396 1408//1387 1407//1386 +f 1416//1395 1408//1387 1417//1396 +f 1414//1393 1413//1392 1418//1397 +f 1418//1397 1413//1392 1416//1395 +f 1415//1394 1414//1393 1419//1398 +f 1419//1398 1414//1393 1418//1397 +f 1420//1399 1415//1394 1419//1398 +f 1418//1397 1416//1395 1421//1400 +f 1421//1400 1416//1395 1417//1396 +f 1417//1396 1407//1386 1422//1401 +f 1422//1401 1407//1386 1402//1381 +f 1419//1398 1418//1397 1423//1402 +f 1423//1402 1418//1397 1421//1400 +f 1420//1399 1419//1398 1424//1403 +f 1424//1403 1419//1398 1423//1402 +f 1425//1404 1420//1399 1424//1403 +f 1421//1400 1417//1396 1426//1405 +f 1426//1405 1417//1396 1422//1401 +f 1423//1402 1421//1400 1427//1406 +f 1427//1406 1421//1400 1426//1405 +f 1424//1403 1423//1402 1428//1407 +f 1428//1407 1423//1402 1427//1406 +f 1425//1404 1424//1403 1429//1408 +f 1429//1408 1424//1403 1428//1407 +f 1430//1409 1425//1404 1429//1408 +f 1430//1409 1429//1408 1431//1410 +f 1432//1411 1430//1409 1431//1410 +f 1429//1408 1428//1407 1433//1412 +f 1431//1410 1429//1408 1433//1412 +f 1431//1410 1434//1413 1432//1411 +f 1434//1413 1435//642 1432//1411 +f 1436//1414 1434//1413 1431//1410 +f 1433//1412 1436//1414 1431//1410 +f 1437//1415 1436//1414 1433//1412 +f 1433//1412 1428//1407 1438//1416 +f 1438//1416 1437//1415 1433//1412 +f 1428//1407 1427//1406 1438//1416 +f 1439//1417 1437//1415 1438//1416 +f 1438//1416 1427//1406 1440//1418 +f 1440//1418 1439//1417 1438//1416 +f 1427//1406 1426//1405 1440//1418 +f 1441//1419 1439//1417 1440//1418 +f 1440//1418 1426//1405 1442//1420 +f 1442//1420 1441//1419 1440//1418 +f 1443//1421 1441//1419 1442//1420 +f 1426//1405 1422//1401 1442//1420 +f 1444//1422 1443//1421 1442//1420 +f 1442//1420 1422//1401 1444//1422 +f 1445//1423 1443//1421 1444//1422 +f 1446//1424 1445//1423 1444//1422 +f 1422//1401 1447//1425 1444//1422 +f 1444//1422 1447//1425 1446//1424 +f 1447//1425 1422//1401 1402//1381 +f 1447//1425 1403//1382 1446//1424 +f 1447//1425 1402//1381 1403//1382 +f 1450//1426 1449//1427 1448//1428 +f 1451//1429 1450//1426 1448//1428 +f 1452//1430 1450//1426 1451//1429 +f 1453//1431 1452//1430 1451//1429 +f 1456//650 1455//649 1454//1432 +f 1457//1433 1456//650 1454//1432 +f 1457//1433 1454//1432 1458//1385 +f 1459//654 1456//650 1457//1433 +f 1460//1434 1457//1433 1458//1385 +f 1460//1434 1458//1385 1461//1435 +f 1462//1436 1459//654 1457//1433 +f 1462//1436 1457//1433 1460//1434 +f 1463//657 1459//654 1462//1436 +f 1464//1437 1460//1434 1461//1435 +f 1464//1437 1461//1435 1465//1394 +f 1466//1438 1462//1436 1460//1434 +f 1466//1438 1460//1434 1464//1437 +f 1467//1439 1463//657 1462//1436 +f 1467//1439 1462//1436 1466//1438 +f 1468//661 1463//657 1467//1439 +f 1469//1440 1464//1437 1465//1394 +f 1469//1440 1465//1394 1470//1399 +f 1471//1441 1466//1438 1464//1437 +f 1471//1441 1464//1437 1469//1440 +f 1472//1442 1467//1439 1466//1438 +f 1472//1442 1466//1438 1471//1441 +f 1473//1443 1468//661 1467//1439 +f 1473//1443 1467//1439 1472//1442 +f 1474//667 1468//661 1473//1443 +f 1475//1444 1469//1440 1470//1399 +f 1475//1444 1470//1399 1476//1404 +f 1477//1445 1471//1441 1469//1440 +f 1477//1445 1469//1440 1475//1444 +f 1478//1446 1472//1442 1471//1441 +f 1478//1446 1471//1441 1477//1445 +f 1479//1447 1473//1443 1472//1442 +f 1479//1447 1472//1442 1478//1446 +f 1480//1448 1474//667 1473//1443 +f 1473//1443 1479//1447 1480//1448 +f 1481//1449 1474//667 1480//1448 +f 1482//1450 1475//1444 1476//1404 +f 1482//1450 1476//1404 1483//1409 +f 1484//1451 1477//1445 1475//1444 +f 1484//1451 1475//1444 1482//1450 +f 1485//1452 1478//1446 1477//1445 +f 1485//1452 1477//1445 1484//1451 +f 1486//1453 1479//1447 1478//1446 +f 1479//1447 1486//1453 1480//1448 +f 1486//1453 1478//1446 1485//1452 +f 1487//1454 1481//1449 1480//1448 +f 1488//699 1481//1449 1487//1454 +f 1486//1453 1489//1455 1480//1448 +f 1489//1455 1486//1453 1485//1452 +f 1489//1455 1487//1454 1480//1448 +f 1490//1456 1488//699 1487//1454 +f 1487//1454 1489//1455 1490//1456 +f 1491//698 1488//699 1490//1456 +f 1492//1457 1485//1452 1484//1451 +f 1489//1455 1485//1452 1492//1457 +f 1493//1458 1484//1451 1482//1450 +f 1492//1457 1484//1451 1493//1458 +f 1489//1455 1494//1459 1490//1456 +f 1494//1459 1489//1455 1492//1457 +f 1495//1460 1491//698 1490//1456 +f 1490//1456 1494//1459 1495//1460 +f 1496//694 1491//698 1495//1460 +f 1494//1459 1492//1457 1497//1461 +f 1497//1461 1492//1457 1493//1458 +f 1493//1458 1482//1450 1498//1462 +f 1498//1462 1482//1450 1483//1409 +f 1498//1462 1483//1409 1499//1411 +f 1494//1459 1500//1463 1495//1460 +f 1500//1463 1494//1459 1497//1461 +f 1501//1464 1496//694 1495//1460 +f 1501//1464 1495//1460 1500//1463 +f 1502//1465 1496//694 1501//1464 +f 1503//1466 1493//1458 1498//1462 +f 1497//1461 1493//1458 1503//1466 +f 1504//1467 1498//1462 1499//1411 +f 1503//1466 1498//1462 1504//1467 +f 1499//1411 1505//642 1504//1467 +f 1505//642 1506//684 1504//1467 +f 1504//1467 1506//684 1507//1468 +f 1507//1468 1503//1466 1504//1467 +f 1506//684 1508//1469 1507//1468 +f 1508//1469 1502//1465 1509//1470 +f 1509//1470 1502//1465 1501//1464 +f 1507//1468 1508//1469 1509//1470 +f 1510//1471 1497//1461 1503//1466 +f 1510//1471 1503//1466 1507//1468 +f 1509//1470 1510//1471 1507//1468 +f 1500//1463 1497//1461 1510//1471 +f 1501//1464 1511//1472 1509//1470 +f 1511//1472 1510//1471 1509//1470 +f 1511//1472 1500//1463 1510//1471 +f 1511//1472 1501//1464 1500//1463 +f 1514//1473 1513//1474 1512//1475 +f 1515//1476 1514//1473 1512//1475 +f 1516//1477 1514//1473 1515//1476 +f 1515//1476 1512//1475 1517//1478 +f 1518//1479 1516//1477 1515//1476 +f 1519//1480 1515//1476 1517//1478 +f 1518//1479 1515//1476 1519//1480 +f 1517//1478 1520//1481 1519//1480 +f 1521//1482 1518//1479 1519//1480 +f 1520//1481 1522//1483 1519//1480 +f 1519//1480 1522//1483 1521//1482 +f 1522//1483 1523//1484 1521//1482 +f 1526//716 1525//1485 1524//1486 +f 1527//1487 1526//716 1524//1486 +f 1524//1486 1528//1488 1527//1487 +f 1529//1489 1526//716 1527//1487 +f 1528//1488 1530//1490 1527//1487 +f 1528//1488 1531//1491 1530//1490 +f 1532//1492 1529//1489 1527//1487 +f 1527//1487 1530//1490 1532//1492 +f 1533//721 1529//1489 1532//1492 +f 1531//1491 1534//1493 1530//1490 +f 1531//1491 1535//1494 1534//1493 +f 1536//1495 1533//721 1532//1492 +f 1537//1496 1533//721 1536//1495 +f 1530//1490 1538//1497 1532//1492 +f 1530//1490 1534//1493 1538//1497 +f 1536//1495 1532//1492 1538//1497 +f 1539//1498 1537//1496 1536//1495 +f 1540//730 1537//1496 1539//1498 +f 1541//1499 1536//1495 1538//1497 +f 1534//1493 1541//1499 1538//1497 +f 1536//1495 1541//1499 1539//1498 +f 1535//1494 1542//1500 1534//1493 +f 1534//1493 1542//1500 1541//1499 +f 1543//1501 1540//730 1539//1498 +f 1544//734 1540//730 1543//1501 +f 1541//1499 1545//1502 1539//1498 +f 1542//1500 1545//1502 1541//1499 +f 1543//1501 1539//1498 1545//1502 +f 1546//1503 1542//1500 1535//1494 +f 1547//1504 1546//1503 1535//1494 +f 1548//1505 1545//1502 1542//1500 +f 1548//1505 1543//1501 1545//1502 +f 1546//1503 1548//1505 1542//1500 +f 1549//1506 1544//734 1543//1501 +f 1549//1506 1543//1501 1548//1505 +f 1550//740 1544//734 1549//1506 +f 1551//1507 1546//1503 1547//1504 +f 1552//1508 1551//1507 1547//1504 +f 1553//1509 1548//1505 1546//1503 +f 1553//1509 1549//1506 1548//1505 +f 1551//1507 1553//1509 1546//1503 +f 1554//1510 1550//740 1549//1506 +f 1554//1510 1549//1506 1553//1509 +f 1555//745 1550//740 1554//1510 +f 1556//1511 1551//1507 1552//1508 +f 1557//1512 1556//1511 1552//1508 +f 1557//1512 1558//1513 1556//1511 +f 1559//1514 1553//1509 1551//1507 +f 1559//1514 1554//1510 1553//1509 +f 1556//1511 1559//1514 1551//1507 +f 1560//1515 1555//745 1554//1510 +f 1554//1510 1559//1514 1560//1515 +f 1561//751 1555//745 1560//1515 +f 1558//1513 1562//1516 1556//1511 +f 1556//1511 1562//1516 1559//1514 +f 1559//1514 1563//1517 1560//1515 +f 1562//1516 1563//1517 1559//1514 +f 1564//1518 1561//751 1560//1515 +f 1564//1518 1560//1515 1563//1517 +f 1565//755 1561//751 1564//1518 +f 1566//1519 1562//1516 1558//1513 +f 1567//1520 1566//1519 1558//1513 +f 1568//1521 1563//1517 1562//1516 +f 1568//1521 1564//1518 1563//1517 +f 1566//1519 1568//1521 1562//1516 +f 1569//1522 1565//755 1564//1518 +f 1570//760 1565//755 1569//1522 +f 1568//1521 1571//1523 1564//1518 +f 1571//1523 1568//1521 1566//1519 +f 1571//1523 1569//1522 1564//1518 +f 1572//1524 1566//1519 1567//1520 +f 1572//1524 1571//1523 1566//1519 +f 1573//1525 1572//1524 1567//1520 +f 1574//1526 1570//760 1569//1522 +f 1569//1522 1571//1523 1574//1526 +f 1575//1527 1570//760 1574//1526 +f 1576//1528 1575//1527 1574//1526 +f 1571//1523 1577//1529 1574//1526 +f 1576//1528 1574//1526 1577//1529 +f 1577//1529 1571//1523 1572//1524 +f 1578//1530 1576//1528 1577//1529 +f 1579//1531 1578//1530 1577//1529 +f 1580//1532 1572//1524 1573//1525 +f 1580//1532 1577//1529 1572//1524 +f 1580//1532 1579//1531 1577//1529 +f 1581//1533 1580//1532 1573//1525 +f 1582//1534 1579//1531 1580//1532 +f 1581//1533 1582//1534 1580//1532 +f 1585//1535 1584//793 1583//1536 +f 1586//1537 1584//793 1585//1535 +f 1584//793 1586//1537 1587//1538 +f 1587//1538 1586//1537 1588//1539 +f 1588//1539 1589//805 1587//1538 +f 1590//1540 1589//805 1588//1539 +f 1593//1541 1592//1542 1591//1543 +f 1594//1544 1593//1541 1591//1543 +f 1595//842 1593//1541 1594//1544 +f 1594//1544 1591//1543 1596//1545 +f 1591//1543 1597//1546 1596//1545 +f 1597//1546 1598//1547 1596//1545 +f 1599//1548 1595//842 1594//1544 +f 1600//844 1595//842 1599//1548 +f 1601//1549 1599//1548 1594//1544 +f 1596//1545 1601//1549 1594//1544 +f 1602//1550 1600//844 1599//1548 +f 1599//1548 1601//1549 1602//1550 +f 1600//844 1602//1550 1603//1551 +f 1598//1547 1604//1552 1596//1545 +f 1598//1547 1605//1553 1604//1552 +f 1603//1551 1602//1550 1606//90 +f 1606//90 1607//846 1603//1551 +f 1608//89 1607//846 1606//90 +f 1607//846 1608//89 1609//1554 +f 1610//1555 1607//846 1609//1554 +f 1610//1555 1609//1554 1611//1556 +f 1612//851 1610//1555 1611//1556 +f 1612//851 1611//1556 1613//1535 +f 1614//1536 1612//851 1613//1535 +f 1602//1550 1615//1557 1606//90 +f 1606//90 1615//1557 1616//88 +f 1601//1549 1615//1557 1602//1550 +f 1616//88 1615//1557 1617//1558 +f 1615//1557 1601//1549 1618//1559 +f 1615//1557 1618//1559 1617//1558 +f 1601//1549 1619//1560 1618//1559 +f 1601//1549 1596//1545 1619//1560 +f 1596//1545 1604//1552 1619//1560 +f 1617//1558 1618//1559 1620//1561 +f 1617//1558 1620//1561 1621//1562 +f 1618//1559 1619//1560 1622//1563 +f 1618//1559 1622//1563 1620//1561 +f 1619//1560 1604//1552 1623//1564 +f 1619//1560 1623//1564 1622//1563 +f 1604//1552 1624//1565 1623//1564 +f 1605//1553 1624//1565 1604//1552 +f 1624//1565 1605//1553 1625//1566 +f 1626//1567 1624//1565 1625//1566 +f 1623//1564 1624//1565 1626//1567 +f 1627//1525 1623//1564 1626//1567 +f 1622//1563 1623//1564 1627//1525 +f 1628//1568 1622//1563 1627//1525 +f 1620//1561 1622//1563 1628//1568 +f 1629//1513 1620//1561 1628//1568 +f 1630//1569 1620//1561 1629//1513 +f 1620//1561 1630//1569 1621//1562 +f 1631//1570 1630//1569 1629//1513 +f 1621//1562 1630//1569 1632//1571 +f 1632//1571 1630//1569 1631//1570 +f 1633//1572 1621//1562 1632//1571 +f 1634//1573 1632//1571 1631//1570 +f 1633//1572 1632//1571 1635//1574 +f 1635//1574 1632//1571 1634//1573 +f 1636//1575 1633//1572 1635//1574 +f 1637//1576 1635//1574 1634//1573 +f 1636//1575 1635//1574 1638//1577 +f 1638//1577 1635//1574 1637//1576 +f 1639//1578 1636//1575 1638//1577 +f 1640//1494 1638//1577 1637//1576 +f 1639//1578 1638//1577 1641//1579 +f 1642//1580 1638//1577 1640//1494 +f 1638//1577 1642//1580 1641//1579 +f 1643//1581 1642//1580 1640//1494 +f 1644//1582 1642//1580 1643//1581 +f 1645//1488 1644//1582 1643//1581 +f 1641//1579 1642//1580 1646//1583 +f 1642//1580 1644//1582 1646//1583 +f 1647//1584 1644//1582 1645//1488 +f 1648//1585 1647//1584 1645//1488 +f 1648//1585 1649//715 1647//1584 +f 1649//715 1650//1586 1647//1584 +f 1647//1584 1650//1586 1651//805 +f 1652//1587 1647//1584 1651//805 +f 1644//1582 1647//1584 1652//1587 +f 1646//1583 1644//1582 1652//1587 +f 1641//1579 1646//1583 1653//1588 +f 1646//1583 1652//1587 1653//1588 +f 1654//1589 1641//1579 1653//1588 +f 1653//1588 1652//1587 1655//1590 +f 1656//1537 1654//1589 1653//1588 +f 1655//1590 1656//1537 1653//1588 +f 1659//1591 1658//1592 1657//1593 +f 1658//1592 1660//1594 1657//1593 +f 1657//1593 1660//1594 1661//1595 +f 1658//1592 1662//1596 1660//1594 +f 1660//1594 1663//1597 1661//1595 +f 1661//1595 1663//1597 1664//1598 +f 1662//1596 1665//1599 1660//1594 +f 1660//1594 1665//1599 1663//1597 +f 1662//1596 1666//1600 1665//1599 +f 1666//1600 1667//1601 1665//1599 +f 1665//1599 1667//1601 1668//1602 +f 1665//1599 1668//1602 1663//1597 +f 1667//1601 1669//1603 1668//1602 +f 1663//1597 1670//1604 1664//1598 +f 1663//1597 1668//1602 1670//1604 +f 1671//1605 1664//1598 1670//1604 +f 1668//1602 1669//1603 1672//1606 +f 1668//1602 1672//1606 1670//1604 +f 1669//1603 1673//1607 1672//1606 +f 1672//1606 1673//1607 1674//1608 +f 1675//1609 1671//1605 1670//1604 +f 1675//1609 1670//1604 1672//1606 +f 1676//1610 1672//1606 1674//1608 +f 1676//1610 1675//1609 1672//1606 +f 1674//1608 1677//1611 1676//1610 +f 1677//1611 1678//1612 1676//1610 +f 1676//1610 1678//1612 1675//1609 +f 1678//1612 1679//1613 1675//1609 +f 1675//1609 1679//1613 1671//1605 +f 1679//1613 1680//853 1671//1605 +f 1683//1614 1682//1615 1681//1616 +f 1684//1617 1683//1614 1681//1616 +f 1685//1618 1683//1614 1684//1617 +f 1684//1617 1681//1616 1686//1619 +f 1681//1616 1687//1620 1686//1619 +f 1688//1621 1685//1618 1684//1617 +f 1689//1622 1685//1618 1688//1621 +f 1690//1623 1689//1622 1688//1621 +f 1690//1623 1688//1621 1691//1624 +f 1688//1621 1684//1617 1692//1625 +f 1688//1621 1692//1625 1691//1624 +f 1684//1617 1686//1619 1692//1625 +f 1691//1624 1692//1625 1693//1626 +f 1692//1625 1694//1627 1693//1626 +f 1692//1625 1686//1619 1694//1627 +f 1693//1626 1694//1627 1695//1628 +f 1694//1627 1696//1629 1695//1628 +f 1686//1619 1687//1620 1697//1630 +f 1686//1619 1697//1630 1694//1627 +f 1694//1627 1697//1630 1696//1629 +f 1687//1620 1698//1631 1697//1630 +f 1697//1630 1699//1632 1696//1629 +f 1697//1630 1698//1631 1699//1632 +f 1698//1631 1700//1633 1699//1632 +f 1703//1634 1702//1635 1701//1636 +f 1704//1637 1703//1634 1701//1636 +f 1704//1637 1701//1636 1705//1638 +f 1706//1639 1703//1634 1704//1637 +f 1707//1640 1704//1637 1705//1638 +f 1707//1640 1705//1638 1708//1641 +f 1709//1642 1706//1639 1704//1637 +f 1709//1642 1704//1637 1707//1640 +f 1710//1643 1706//1639 1709//1642 +f 1711//1644 1707//1640 1708//1641 +f 1711//1644 1708//1641 1712//1645 +f 1713//1646 1709//1642 1707//1640 +f 1713//1646 1707//1640 1711//1644 +f 1714//1647 1710//1643 1709//1642 +f 1714//1647 1709//1642 1713//1646 +f 1715//1648 1711//1644 1712//1645 +f 1715//1648 1712//1645 1716//1649 +f 1717//1650 1715//1648 1716//1649 +f 1718//1651 1713//1646 1711//1644 +f 1718//1651 1711//1644 1715//1648 +f 1719//1652 1715//1648 1717//1650 +f 1719//1652 1718//1651 1715//1648 +f 1720//1653 1719//1652 1717//1650 +f 1721//1654 1713//1646 1718//1651 +f 1721//1654 1714//1647 1713//1646 +f 1710//1643 1714//1647 1722//1655 +f 1723//1656 1710//1643 1722//1655 +f 1724//1657 1718//1651 1719//1652 +f 1724//1657 1721//1654 1718//1651 +f 1725//1658 1719//1652 1720//1653 +f 1725//1658 1724//1657 1719//1652 +f 1726//1659 1725//1658 1720//1653 +f 1725//1658 1726//1659 1727//1660 +f 1728//1661 1725//1658 1727//1660 +f 1724//1657 1725//1658 1728//1661 +f 1728//1661 1727//1660 1729//1662 +f 1714//1647 1721//1654 1730//1663 +f 1722//1655 1714//1647 1730//1663 +f 1721//1654 1724//1657 1731//1664 +f 1731//1664 1724//1657 1728//1661 +f 1730//1663 1721//1654 1731//1664 +f 1732//1665 1728//1661 1729//1662 +f 1731//1664 1728//1661 1732//1665 +f 1732//1665 1729//1662 1733//1666 +f 1730//1663 1731//1664 1734//1667 +f 1734//1667 1731//1664 1732//1665 +f 1735//1668 1732//1665 1733//1666 +f 1734//1667 1732//1665 1735//1668 +f 1735//1668 1733//1666 1736//1669 +f 1722//1655 1730//1663 1737//1670 +f 1737//1670 1730//1663 1734//1667 +f 1738//1671 1735//1668 1736//1669 +f 1738//1671 1736//1669 1739//1672 +f 1740//1673 1734//1667 1735//1668 +f 1737//1670 1734//1667 1740//1673 +f 1740//1673 1735//1668 1738//1671 +f 1741//1674 1722//1655 1737//1670 +f 1723//1656 1722//1655 1741//1674 +f 1742//1675 1723//1656 1741//1674 +f 1743//1676 1738//1671 1739//1672 +f 1743//1676 1739//1672 1744//1677 +f 1745//1678 1740//1673 1738//1671 +f 1745//1678 1738//1671 1743//1676 +f 1746//1679 1737//1670 1740//1673 +f 1741//1674 1737//1670 1746//1679 +f 1746//1679 1740//1673 1745//1678 +f 1742//1675 1741//1674 1747//1680 +f 1747//1680 1741//1674 1746//1679 +f 1748//1681 1742//1675 1747//1680 +f 1749//1682 1746//1679 1745//1678 +f 1747//1680 1746//1679 1749//1682 +f 1748//1681 1747//1680 1750//1683 +f 1750//1683 1747//1680 1749//1682 +f 1751//1684 1748//1681 1750//1683 +f 1752//1685 1745//1678 1743//1676 +f 1749//1682 1745//1678 1752//1685 +f 1753//1686 1743//1676 1744//1677 +f 1752//1685 1743//1676 1753//1686 +f 1751//1684 1750//1683 1754//1687 +f 1755//1688 1751//1684 1754//1687 +f 1750//1683 1749//1682 1756//1689 +f 1756//1689 1749//1682 1752//1685 +f 1754//1687 1750//1683 1756//1689 +f 1755//1688 1754//1687 1757//1690 +f 1758//1691 1755//1688 1757//1690 +f 1759//1692 1758//1691 1757//1690 +f 1756//1689 1752//1685 1760//1693 +f 1760//1693 1752//1685 1753//1686 +f 1754//1687 1756//1689 1761//1694 +f 1757//1690 1754//1687 1761//1694 +f 1761//1694 1756//1689 1760//1693 +f 1762//1695 1753//1686 1744//1677 +f 1763//1696 1762//1695 1744//1677 +f 1763//1696 1764//1697 1762//1695 +f 1765//1698 1759//1692 1757//1690 +f 1765//1698 1757//1690 1761//1694 +f 1766//1699 1761//1694 1760//1693 +f 1766//1699 1765//1698 1761//1694 +f 1767//1700 1760//1693 1753//1686 +f 1762//1695 1767//1700 1753//1686 +f 1767//1700 1766//1699 1760//1693 +f 1764//1697 1768//1701 1762//1695 +f 1762//1695 1768//1701 1767//1700 +f 1764//1697 1769//1702 1768//1701 +f 1765//1698 1770//1703 1759//1692 +f 1770//1703 1771//1704 1759//1692 +f 1767//1700 1772//1705 1766//1699 +f 1768//1701 1772//1705 1767//1700 +f 1769//1702 1773//1706 1768//1701 +f 1768//1701 1773//1706 1772//1705 +f 1769//1702 1774//1707 1773//1706 +f 1766//1699 1775//1708 1765//1698 +f 1772//1705 1775//1708 1766//1699 +f 1775//1708 1770//1703 1765//1698 +f 1770//1703 1776//1709 1771//1704 +f 1776//1709 1777//1710 1771//1704 +f 1774//1707 1778//1711 1773//1706 +f 1778//1711 1774//1707 1779//1712 +f 1773//1706 1780//1713 1772//1705 +f 1772//1705 1780//1713 1775//1708 +f 1773//1706 1778//1711 1780//1713 +f 1775//1708 1781//1714 1770//1703 +f 1780//1713 1781//1714 1775//1708 +f 1781//1714 1776//1709 1770//1703 +f 1782//1715 1778//1711 1779//1712 +f 1778//1711 1782//1715 1780//1713 +f 1780//1713 1782//1715 1781//1714 +f 1782//1715 1779//1712 1783//1716 +f 1782//1715 1784//1717 1781//1714 +f 1781//1714 1784//1717 1776//1709 +f 1784//1717 1782//1715 1783//1716 +f 1776//1709 1785//1718 1777//1710 +f 1784//1717 1785//1718 1776//1709 +f 1784//1717 1783//1716 1785//1718 +f 1785//1718 1786//1719 1777//1710 +f 1783//1716 1786//1719 1785//1718 +f 1789//1720 1788//1721 1787//1722 +f 1790//1723 1789//1720 1787//1722 +f 1790//1723 1787//1722 1791//1724 +f 1792//1725 1789//1720 1790//1723 +f 1793//1726 1790//1723 1791//1724 +f 1793//1726 1791//1724 1794//1727 +f 1795//1728 1792//1725 1790//1723 +f 1796//1729 1792//1725 1795//1728 +f 1797//1730 1793//1726 1794//1727 +f 1797//1730 1794//1727 1798//1731 +f 1793//1726 1799//1732 1790//1723 +f 1799//1732 1795//1728 1790//1723 +f 1799//1732 1793//1726 1797//1730 +f 1800//1733 1797//1730 1798//1731 +f 1800//1733 1798//1731 1801//1734 +f 1802//1735 1800//1733 1801//1734 +f 1803//1736 1799//1732 1797//1730 +f 1803//1736 1797//1730 1800//1733 +f 1804//1737 1800//1733 1802//1735 +f 1804//1737 1803//1736 1800//1733 +f 1805//1738 1804//1737 1802//1735 +f 1799//1732 1806//1739 1795//1728 +f 1806//1739 1799//1732 1803//1736 +f 1807//1740 1796//1729 1795//1728 +f 1806//1739 1807//1740 1795//1728 +f 1808//1741 1803//1736 1804//1737 +f 1808//1741 1806//1739 1803//1736 +f 1809//1742 1804//1737 1805//1738 +f 1809//1742 1808//1741 1804//1737 +f 1810//1743 1809//1742 1805//1738 +f 1809//1742 1810//1743 1811//1744 +f 1812//1745 1809//1742 1811//1744 +f 1808//1741 1809//1742 1812//1745 +f 1812//1745 1811//1744 1813//1746 +f 1796//1729 1807//1740 1814//1747 +f 1815//1748 1796//1729 1814//1747 +f 1806//1739 1808//1741 1816//1749 +f 1816//1749 1808//1741 1812//1745 +f 1817//1750 1812//1745 1813//1746 +f 1816//1749 1812//1745 1817//1750 +f 1817//1750 1813//1746 1818//1751 +f 1807//1740 1806//1739 1819//1752 +f 1819//1752 1806//1739 1816//1749 +f 1814//1747 1807//1740 1819//1752 +f 1815//1748 1814//1747 1820//1753 +f 1821//1754 1815//1748 1820//1753 +f 1822//1755 1817//1750 1818//1751 +f 1822//1755 1818//1751 1823//1756 +f 1824//1757 1816//1749 1817//1750 +f 1819//1752 1816//1749 1824//1757 +f 1824//1757 1817//1750 1822//1755 +f 1814//1747 1819//1752 1825//1758 +f 1825//1758 1819//1752 1824//1757 +f 1820//1753 1814//1747 1825//1758 +f 1821//1754 1820//1753 1826//1759 +f 1827//1760 1821//1754 1826//1759 +f 1828//1761 1822//1755 1823//1756 +f 1828//1761 1823//1756 1829//1762 +f 1830//1763 1824//1757 1822//1755 +f 1825//1758 1824//1757 1830//1763 +f 1830//1763 1822//1755 1828//1761 +f 1820//1753 1825//1758 1831//1764 +f 1831//1764 1825//1758 1830//1763 +f 1826//1759 1820//1753 1831//1764 +f 1827//1760 1826//1759 1832//1765 +f 1833//1766 1827//1760 1832//1765 +f 1834//1767 1828//1761 1829//1762 +f 1834//1767 1829//1762 1835//1768 +f 1836//1769 1830//1763 1828//1761 +f 1831//1764 1830//1763 1836//1769 +f 1836//1769 1828//1761 1834//1767 +f 1826//1759 1831//1764 1837//1770 +f 1837//1770 1831//1764 1836//1769 +f 1832//1765 1826//1759 1837//1770 +f 1833//1766 1832//1765 1838//1771 +f 1839//1772 1833//1766 1838//1771 +f 1840//1773 1836//1769 1834//1767 +f 1837//1770 1836//1769 1840//1773 +f 1832//1765 1837//1770 1841//1774 +f 1838//1771 1832//1765 1841//1774 +f 1841//1774 1837//1770 1840//1773 +f 1842//1775 1834//1767 1835//1768 +f 1840//1773 1834//1767 1842//1775 +f 1839//1772 1838//1771 1843//1776 +f 1844//1777 1839//1772 1843//1776 +f 1845//1778 1844//1777 1843//1776 +f 1841//1774 1840//1773 1846//1779 +f 1846//1779 1840//1773 1842//1775 +f 1838//1771 1841//1774 1847//1780 +f 1843//1776 1838//1771 1847//1780 +f 1847//1780 1841//1774 1846//1779 +f 1848//1781 1842//1775 1835//1768 +f 1849//1782 1848//1781 1835//1768 +f 1849//1782 1850//1783 1848//1781 +f 1851//1784 1845//1778 1843//1776 +f 1851//1784 1843//1776 1847//1780 +f 1852//1785 1847//1780 1846//1779 +f 1852//1785 1851//1784 1847//1780 +f 1853//1786 1846//1779 1842//1775 +f 1848//1781 1853//1786 1842//1775 +f 1853//1786 1852//1785 1846//1779 +f 1850//1783 1854//1787 1848//1781 +f 1848//1781 1854//1787 1853//1786 +f 1850//1783 1855//1788 1854//1787 +f 1851//1784 1856//1789 1845//1778 +f 1856//1789 1857//1790 1845//1778 +f 1853//1786 1858//1791 1852//1785 +f 1854//1787 1858//1791 1853//1786 +f 1855//1788 1859//1792 1854//1787 +f 1854//1787 1859//1792 1858//1791 +f 1855//1788 1860//1793 1859//1792 +f 1852//1785 1861//1794 1851//1784 +f 1861//1794 1856//1789 1851//1784 +f 1858//1791 1861//1794 1852//1785 +f 1856//1789 1862//1795 1857//1790 +f 1862//1795 1863//1796 1857//1790 +f 1861//1794 1864//1797 1856//1789 +f 1861//1794 1858//1791 1864//1797 +f 1864//1797 1862//1795 1856//1789 +f 1860//1793 1865//1798 1859//1792 +f 1865//1798 1860//1793 1866//1799 +f 1859//1792 1867//1800 1858//1791 +f 1859//1792 1865//1798 1867//1800 +f 1858//1791 1867//1800 1864//1797 +f 1868//1801 1865//1798 1866//1799 +f 1865//1798 1868//1801 1867//1800 +f 1867//1800 1868//1801 1864//1797 +f 1868//1801 1866//1799 1869//1802 +f 1868//1801 1870//1803 1864//1797 +f 1864//1797 1870//1803 1862//1795 +f 1870//1803 1868//1801 1869//1802 +f 1862//1795 1871//1804 1863//1796 +f 1870//1803 1871//1804 1862//1795 +f 1870//1803 1869//1802 1871//1804 +f 1871//1804 1872//1805 1863//1796 +f 1869//1802 1872//1805 1871//1804 +f 1875//1806 1874//1807 1873//1808 +f 1874//1807 1876//1809 1873//1808 +f 1873//1808 1876//1809 1877//1810 +f 1874//1807 1878//1811 1876//1809 +f 1876//1809 1879//1812 1877//1810 +f 1877//1810 1879//1812 1880//1813 +f 1878//1811 1881//1814 1876//1809 +f 1876//1809 1881//1814 1879//1812 +f 1878//1811 1882//1815 1881//1814 +f 1879//1812 1883//1816 1880//1813 +f 1880//1813 1883//1816 1884//1817 +f 1881//1814 1885//1818 1879//1812 +f 1879//1812 1885//1818 1883//1816 +f 1883//1816 1886//1819 1884//1817 +f 1884//1817 1886//1819 1887//1820 +f 1886//1819 1888//1821 1887//1820 +f 1885//1818 1889//1822 1883//1816 +f 1883//1816 1889//1822 1886//1819 +f 1886//1819 1890//1823 1888//1821 +f 1889//1822 1890//1823 1886//1819 +f 1890//1823 1891//1824 1888//1821 +f 1892//1825 1885//1818 1881//1814 +f 1885//1818 1892//1825 1889//1822 +f 1882//1815 1893//1826 1881//1814 +f 1893//1826 1892//1825 1881//1814 +f 1890//1823 1894//1827 1891//1824 +f 1894//1827 1895//1828 1891//1824 +f 1895//1828 1894//1827 1896//1829 +f 1889//1822 1897//1830 1890//1823 +f 1897//1830 1894//1827 1890//1823 +f 1892//1825 1897//1830 1889//1822 +f 1894//1827 1898//1831 1896//1829 +f 1894//1827 1897//1830 1898//1831 +f 1896//1829 1898//1831 1899//1832 +f 1893//1826 1882//1815 1900//1833 +f 1882//1815 1901//1834 1900//1833 +f 1897//1830 1892//1825 1902//1835 +f 1897//1830 1902//1835 1898//1831 +f 1898//1831 1903//1836 1899//1832 +f 1898//1831 1902//1835 1903//1836 +f 1899//1832 1903//1836 1904//1837 +f 1892//1825 1893//1826 1905//1838 +f 1892//1825 1905//1838 1902//1835 +f 1893//1826 1900//1833 1905//1838 +f 1900//1833 1901//1834 1906//1839 +f 1901//1834 1907//1840 1906//1839 +f 1903//1836 1908//1841 1904//1837 +f 1904//1837 1908//1841 1909//1842 +f 1902//1835 1910//1843 1903//1836 +f 1902//1835 1905//1838 1910//1843 +f 1903//1836 1910//1843 1908//1841 +f 1905//1838 1900//1833 1911//1844 +f 1905//1838 1911//1844 1910//1843 +f 1900//1833 1906//1839 1911//1844 +f 1906//1839 1907//1840 1912//1845 +f 1907//1840 1913//1846 1912//1845 +f 1908//1841 1914//1847 1909//1842 +f 1909//1842 1914//1847 1915//1848 +f 1910//1843 1916//1849 1908//1841 +f 1910//1843 1911//1844 1916//1849 +f 1908//1841 1916//1849 1914//1847 +f 1911//1844 1906//1839 1917//1850 +f 1911//1844 1917//1850 1916//1849 +f 1906//1839 1912//1845 1917//1850 +f 1912//1845 1913//1846 1918//1851 +f 1913//1846 1919//1852 1918//1851 +f 1914//1847 1920//1853 1915//1848 +f 1915//1848 1920//1853 1921//1854 +f 1916//1849 1922//1855 1914//1847 +f 1916//1849 1917//1850 1922//1855 +f 1914//1847 1922//1855 1920//1853 +f 1917//1850 1912//1845 1923//1856 +f 1917//1850 1923//1856 1922//1855 +f 1912//1845 1918//1851 1923//1856 +f 1918//1851 1919//1852 1924//1857 +f 1919//1852 1925//1858 1924//1857 +f 1922//1855 1926//1859 1920//1853 +f 1922//1855 1923//1856 1926//1859 +f 1923//1856 1918//1851 1927//1860 +f 1918//1851 1924//1857 1927//1860 +f 1923//1856 1927//1860 1926//1859 +f 1920//1853 1928//1861 1921//1854 +f 1920//1853 1926//1859 1928//1861 +f 1924//1857 1925//1858 1929//1862 +f 1925//1858 1930//1863 1929//1862 +f 1930//1863 1931//1864 1929//1862 +f 1926//1859 1927//1860 1932//1865 +f 1926//1859 1932//1865 1928//1861 +f 1927//1860 1924//1857 1933//1866 +f 1924//1857 1929//1862 1933//1866 +f 1927//1860 1933//1866 1932//1865 +f 1928//1861 1934//1867 1921//1854 +f 1934//1867 1935//1868 1921//1854 +f 1936//1869 1935//1868 1934//1867 +f 1931//1864 1937//1870 1929//1862 +f 1929//1862 1937//1870 1933//1866 +f 1933//1866 1938//1871 1932//1865 +f 1937//1870 1938//1871 1933//1866 +f 1932//1865 1939//1872 1928//1861 +f 1939//1872 1934//1867 1928//1861 +f 1938//1871 1939//1872 1932//1865 +f 1940//1873 1936//1869 1934//1867 +f 1940//1873 1934//1867 1939//1872 +f 1941//1874 1936//1869 1940//1873 +f 1942//1875 1937//1870 1931//1864 +f 1943//1876 1942//1875 1931//1864 +f 1944//1877 1939//1872 1938//1871 +f 1944//1877 1940//1873 1939//1872 +f 1945//1878 1941//1874 1940//1873 +f 1945//1878 1940//1873 1944//1877 +f 1946//1879 1941//1874 1945//1878 +f 1947//1880 1938//1871 1937//1870 +f 1942//1875 1947//1880 1937//1870 +f 1947//1880 1944//1877 1938//1871 +f 1948//1881 1942//1875 1943//1876 +f 1949//1882 1948//1881 1943//1876 +f 1950//1883 1947//1880 1942//1875 +f 1944//1877 1947//1880 1950//1883 +f 1948//1881 1950//1883 1942//1875 +f 1951//1884 1946//1879 1945//1878 +f 1946//1879 1951//1884 1952//1885 +f 1953//1886 1945//1878 1944//1877 +f 1951//1884 1945//1878 1953//1886 +f 1953//1886 1944//1877 1950//1883 +f 1951//1884 1954//1887 1952//1885 +f 1954//1887 1951//1884 1953//1886 +f 1954//1887 1953//1886 1950//1883 +f 1952//1885 1954//1887 1955//1888 +f 1956//1889 1954//1887 1950//1883 +f 1956//1889 1950//1883 1948//1881 +f 1954//1887 1956//1889 1955//1888 +f 1957//1890 1948//1881 1949//1882 +f 1957//1890 1956//1889 1948//1881 +f 1955//1888 1956//1889 1957//1890 +f 1958//1891 1957//1890 1949//1882 +f 1958//1891 1955//1888 1957//1890 +f 1961//1892 1960//1893 1959//1894 +f 1960//1893 1962//1895 1959//1894 +f 1959//1894 1962//1895 1963//1896 +f 1960//1893 1964//1897 1962//1895 +f 1962//1895 1965//1898 1963//1896 +f 1963//1896 1965//1898 1966//1899 +f 1964//1897 1967//1900 1962//1895 +f 1962//1895 1967//1900 1965//1898 +f 1964//1897 1968//1901 1967//1900 +f 1965//1898 1969//1902 1966//1899 +f 1966//1899 1969//1902 1970//1903 +f 1967//1900 1971//1904 1965//1898 +f 1965//1898 1971//1904 1969//1902 +f 1968//1901 1972//1905 1967//1900 +f 1967//1900 1972//1905 1971//1904 +f 1969//1902 1973//1906 1970//1903 +f 1970//1903 1973//1906 1974//1907 +f 1973//1906 1975//1908 1974//1907 +f 1971//1904 1976//1909 1969//1902 +f 1969//1902 1976//1909 1973//1906 +f 1973//1906 1977//1910 1975//1908 +f 1976//1909 1977//1910 1973//1906 +f 1977//1910 1978//1911 1975//1908 +f 1971//1904 1979//1912 1976//1909 +f 1972//1905 1979//1912 1971//1904 +f 1972//1905 1968//1901 1980//1913 +f 1968//1901 1981//1914 1980//1913 +f 1977//1910 1982//1915 1978//1911 +f 1982//1915 1983//1916 1978//1911 +f 1983//1916 1982//1915 1984//1917 +f 1976//1909 1985//1918 1977//1910 +f 1985//1918 1982//1915 1977//1910 +f 1979//1912 1985//1918 1976//1909 +f 1982//1915 1986//1919 1984//1917 +f 1982//1915 1985//1918 1986//1919 +f 1984//1917 1986//1919 1987//1920 +f 1979//1912 1972//1905 1988//1921 +f 1972//1905 1980//1913 1988//1921 +f 1985//1918 1979//1912 1989//1922 +f 1985//1918 1989//1922 1986//1919 +f 1979//1912 1988//1921 1989//1922 +f 1986//1919 1990//1923 1987//1920 +f 1986//1919 1989//1922 1990//1923 +f 1987//1920 1990//1923 1991//1924 +f 1989//1922 1988//1921 1992//1925 +f 1989//1922 1992//1925 1990//1923 +f 1990//1923 1993//1926 1991//1924 +f 1990//1923 1992//1925 1993//1926 +f 1991//1924 1993//1926 1994//1927 +f 1988//1921 1980//1913 1995//1928 +f 1988//1921 1995//1928 1992//1925 +f 1993//1926 1996//1929 1994//1927 +f 1994//1927 1996//1929 1997//1930 +f 1992//1925 1998//1931 1993//1926 +f 1992//1925 1995//1928 1998//1931 +f 1993//1926 1998//1931 1996//1929 +f 1980//1913 1999//1932 1995//1928 +f 1980//1913 1981//1914 1999//1932 +f 1981//1914 2000//1933 1999//1932 +f 1996//1929 2001//1934 1997//1930 +f 1997//1930 2001//1934 2002//1935 +f 1998//1931 2003//1936 1996//1929 +f 1996//1929 2003//1936 2001//1934 +f 1995//1928 2004//1937 1998//1931 +f 1995//1928 1999//1932 2004//1937 +f 1998//1931 2004//1937 2003//1936 +f 1999//1932 2000//1933 2005//1938 +f 1999//1932 2005//1938 2004//1937 +f 2000//1933 2006//1939 2005//1938 +f 2004//1937 2007//1940 2003//1936 +f 2004//1937 2005//1938 2007//1940 +f 2005//1938 2006//1939 2008//1941 +f 2005//1938 2008//1941 2007//1940 +f 2006//1939 2009//1942 2008//1941 +f 2003//1936 2010//1943 2001//1934 +f 2003//1936 2007//1940 2010//1943 +f 2001//1934 2011//1944 2002//1935 +f 2001//1934 2010//1943 2011//1944 +f 2008//1941 2009//1942 2012//1945 +f 2009//1942 2013//1946 2012//1945 +f 2007//1940 2008//1941 2014//1947 +f 2007//1940 2014//1947 2010//1943 +f 2008//1941 2012//1945 2014//1947 +f 2012//1945 2013//1946 2015//1948 +f 2013//1946 2016//1949 2015//1948 +f 2016//1949 2017//1950 2015//1948 +f 2010//1943 2014//1947 2018//1951 +f 2010//1943 2018//1951 2011//1944 +f 2014//1947 2012//1945 2019//1952 +f 2012//1945 2015//1948 2019//1952 +f 2014//1947 2019//1952 2018//1951 +f 2011//1944 2020//1953 2002//1935 +f 2020//1953 2021//1954 2002//1935 +f 2022//1955 2021//1954 2020//1953 +f 2017//1950 2023//1956 2015//1948 +f 2015//1948 2023//1956 2019//1952 +f 2019//1952 2024//1957 2018//1951 +f 2023//1956 2024//1957 2019//1952 +f 2018//1951 2025//1958 2011//1944 +f 2025//1958 2020//1953 2011//1944 +f 2024//1957 2025//1958 2018//1951 +f 2026//1959 2022//1955 2020//1953 +f 2026//1959 2020//1953 2025//1958 +f 2027//1960 2022//1955 2026//1959 +f 2028//1961 2023//1956 2017//1950 +f 2029//1962 2028//1961 2017//1950 +f 2030//1963 2025//1958 2024//1957 +f 2030//1963 2026//1959 2025//1958 +f 2031//1964 2027//1960 2026//1959 +f 2031//1964 2026//1959 2030//1963 +f 2032//1965 2027//1960 2031//1964 +f 2033//1966 2024//1957 2023//1956 +f 2033//1966 2030//1963 2024//1957 +f 2028//1961 2033//1966 2023//1956 +f 2034//1967 2028//1961 2029//1962 +f 2035//1968 2034//1967 2029//1962 +f 2036//1969 2032//1965 2031//1964 +f 2032//1965 2036//1969 2037//1970 +f 2038//1971 2031//1964 2030//1963 +f 2038//1971 2030//1963 2033//1966 +f 2036//1969 2031//1964 2038//1971 +f 2039//1972 2033//1966 2028//1961 +f 2039//1972 2038//1971 2033//1966 +f 2034//1967 2039//1972 2028//1961 +f 2036//1969 2040//1973 2037//1970 +f 2040//1973 2036//1969 2038//1971 +f 2040//1973 2038//1971 2039//1972 +f 2037//1970 2040//1973 2041//1974 +f 2042//1975 2040//1973 2039//1972 +f 2042//1975 2039//1972 2034//1967 +f 2040//1973 2042//1975 2041//1974 +f 2043//1976 2034//1967 2035//1968 +f 2043//1976 2042//1975 2034//1967 +f 2041//1974 2042//1975 2043//1976 +f 2044//1977 2043//1976 2035//1968 +f 2044//1977 2041//1974 2043//1976 +f 2047//89 2046//1978 2045//1554 +f 2046//1978 2047//89 2048//88 +f 2048//88 2049//1558 2046//1978 +f 2046//1978 2049//1558 2050//1562 +f 2045//1554 2046//1978 2051//1979 +f 2046//1978 2050//1562 2051//1979 +f 2045//1554 2051//1979 2052//1556 +f 2051//1979 2050//1562 2053//1572 +f 2052//1556 2051//1979 2054//1980 +f 2051//1979 2053//1572 2054//1980 +f 2052//1556 2054//1980 2055//1981 +f 2054//1980 2053//1572 2056//1575 +f 2055//1981 2054//1980 2057//1982 +f 2054//1980 2056//1575 2057//1982 +f 2057//1982 2058//1537 2055//1981 +f 2057//1982 2056//1575 2059//1578 +f 2060//1589 2058//1537 2057//1982 +f 2059//1578 2061//1579 2057//1982 +f 2057//1982 2061//1579 2060//1589 +f 2064//1983 2063//85 2062//1984 +f 2063//85 2064//1983 2065//86 +f 2066//1985 2065//86 2064//1983 +f 2066//1985 2064//1983 2067//1986 +f 2064//1983 2062//1984 2068//1987 +f 2067//1986 2064//1983 2068//1987 +f 2068//1987 2062//1984 2069//850 +f 2067//1986 2068//1987 2070//1988 +f 2068//1987 2069//850 2071//1989 +f 2070//1988 2068//1987 2071//1989 +f 2071//1989 2069//850 2072//794 +f 2070//1988 2071//1989 2073//815 +f 2071//1989 2072//794 2074//1990 +f 2073//815 2071//1989 2074//1990 +f 2075//796 2074//1990 2072//794 +f 2073//815 2074//1990 2076//1991 +f 2075//796 2077//812 2074//1990 +f 2078//813 2076//1991 2074//1990 +f 2078//813 2074//1990 2077//812 diff --git a/samples/sample5.obj b/samples/sample5.obj new file mode 100644 index 0000000000000000000000000000000000000000..4828516c90add3cdca5c33f0a4f1f147e7448648 --- /dev/null +++ b/samples/sample5.obj @@ -0,0 +1,815 @@ +# Blender 4.0.0 +# www.blender.org +o SimpleTreeMesh +v 0.023939 0.616908 -0.040523 +v 0.056181 0.986068 -0.053929 +v 0.131819 0.868087 -0.116792 +v 0.118476 0.715170 -0.111236 +v 0.086487 0.971189 0.049299 +v 0.174668 0.847062 0.029201 +v 0.148760 0.700311 -0.008008 +v 0.036104 0.940336 0.140447 +v 0.103428 0.803414 0.158084 +v 0.098377 0.669438 0.083140 +v -0.065420 0.911567 0.166102 +v -0.040165 0.762733 0.194366 +v -0.003146 0.640668 0.108795 +v -0.064136 1.000000 0.040523 +v -0.158652 0.901738 0.111236 +v -0.172016 0.748821 0.116792 +v -0.096378 0.630840 0.053929 +v -0.188936 0.916618 0.008008 +v -0.214843 0.769867 -0.029201 +v -0.126663 0.645719 -0.049299 +v -0.138574 0.947470 -0.083119 +v -0.143604 0.813515 -0.158084 +v -0.076301 0.676593 -0.140426 +v -0.037030 0.976240 -0.108774 +v -0.000011 0.854196 -0.194366 +v 0.025244 0.705362 -0.166081 +v -0.176267 0.498043 -0.025097 +v -0.320282 0.702584 -0.065841 +v -0.233132 0.663313 -0.109258 +v -0.173468 0.578584 -0.092358 +v -0.291807 0.722241 -0.000179 +v -0.192872 0.691115 -0.016363 +v -0.145014 0.598241 -0.026696 +v -0.299468 0.698249 0.069650 +v -0.203689 0.657189 0.082383 +v -0.152654 0.574249 0.043154 +v -0.338760 0.644646 0.102734 +v -0.259271 0.581383 0.129167 +v -0.191967 0.520646 0.076216 +v -0.383881 0.673394 0.012406 +v -0.386680 0.592853 0.079689 +v -0.327016 0.508124 0.096568 +v -0.239867 0.468853 0.053172 +v -0.415155 0.573196 0.014006 +v -0.367297 0.480322 0.003694 +v -0.268341 0.449196 -0.012512 +v -0.407494 0.597188 -0.055823 +v -0.356459 0.514248 -0.095073 +v -0.260681 0.473188 -0.082341 +v -0.368202 0.650770 -0.088907 +v -0.300899 0.590054 -0.141857 +v -0.221389 0.526791 -0.115424 +v 0.169175 0.540471 -0.014279 +v 0.415155 0.632882 -0.010176 +v 0.367929 0.541249 -0.045153 +v 0.266026 0.502967 -0.046858 +v 0.405874 0.622148 0.065483 +v 0.354796 0.526097 0.061842 +v 0.256745 0.492255 0.028801 +v 0.363383 0.655442 0.120370 +v 0.294711 0.573154 0.139437 +v 0.214254 0.525528 0.083667 +v 0.312558 0.713212 0.122327 +v 0.222841 0.654874 0.142215 +v 0.163450 0.583319 0.085624 +v 0.380051 0.724177 0.037619 +v 0.283199 0.761659 0.070197 +v 0.181297 0.723377 0.068514 +v 0.134071 0.631745 0.033515 +v 0.292480 0.772371 -0.005461 +v 0.194429 0.738530 -0.038482 +v 0.143352 0.642478 -0.042144 +v 0.334971 0.739098 -0.060327 +v 0.254514 0.691472 -0.116098 +v 0.185843 0.609184 -0.097030 +v 0.385775 0.681307 -0.062284 +v 0.326385 0.609753 -0.118855 +v 0.236668 0.551414 -0.098967 +v -0.001736 0.467779 -0.009081 +v 0.093916 0.539502 -0.074785 +v 0.088191 0.552382 -0.021351 +v -0.001147 0.700564 -0.021098 +v 0.017541 0.000000 -0.053066 +v -0.064136 0.445702 -0.046016 +v -0.042501 0.475229 -0.086571 +v 0.014153 0.458014 -0.102292 +v 0.041197 0.272077 -0.052519 +v -0.012575 0.271361 -0.001294 +v -0.041702 0.410914 -0.055024 +v -0.000284 0.611142 -0.022024 +v -0.001189 0.620296 -0.062411 +v 0.041365 0.625053 -0.062158 +v 0.043743 0.618634 -0.023097 +v 0.008534 0.687095 -0.003420 +v -0.028948 0.013764 -0.054013 +v 0.015037 0.019720 -0.099219 +v 0.059990 0.023403 -0.052203 +v 0.014616 0.018604 -0.008387 +v -0.063316 0.263553 -0.055045 +v -0.009986 0.270414 -0.106269 +v 0.065820 0.513217 -0.057907 +v 0.036461 0.519993 -0.019478 +v -0.009292 0.519867 -0.051825 +v 0.023287 0.521025 -0.095242 +v -0.170269 0.471273 -0.026317 +v -0.167281 0.487120 -0.044375 +v -0.156800 0.502778 -0.029053 +v -0.158273 0.486173 -0.011565 +v 0.140405 0.517068 -0.018026 +v 0.097746 0.560464 -0.041007 +v 0.151833 0.503662 -0.028822 +v 0.144888 0.533526 -0.031347 +v 0.152338 0.517468 -0.044438 +v 0.020572 0.604197 -0.082025 +v 0.093663 0.558738 -0.065210 +v -0.028654 0.423226 -0.093537 +v -0.076490 0.456962 -0.065588 +v -0.030884 0.413377 -0.018383 +v -0.021161 0.025192 -0.088254 +v 0.051046 0.034157 -0.088507 +v 0.049404 0.034178 -0.016152 +v 0.061927 0.516836 -0.030926 +v 0.156463 0.508481 -0.038713 +v -0.022529 0.026370 -0.017773 +v -0.016952 0.685937 -0.017899 +v -0.005440 0.695008 -0.041702 +v 0.019120 0.698039 -0.028675 +v -0.258681 0.556171 0.002094 +v -0.252410 0.559243 0.013943 +v 0.252683 0.607101 0.032231 +v 0.256472 0.611478 0.019562 +v -0.057381 0.452100 -0.020825 +v -0.164334 0.474430 -0.014637 +v -0.025728 0.505851 -0.080636 +v -0.049720 0.270562 -0.094147 +v 0.029095 0.271193 -0.092169 +v 0.027180 0.271572 -0.013501 +v 0.053508 0.520351 -0.092001 +v 0.039976 0.412198 -0.016742 +v 0.021961 0.605859 -0.008429 +v -0.011964 0.615624 -0.039513 +v 0.054666 0.616908 -0.042354 +v -0.051635 0.271130 -0.015311 +v -0.013101 0.504735 -0.019036 +v 0.003967 0.527338 -0.024213 +v -0.005945 0.528664 -0.082698 +v -0.171658 0.475271 -0.039176 +v -0.163261 0.500168 -0.040271 +v -0.155179 0.498611 -0.016447 +v 0.092127 0.516479 -0.064915 +v 0.145814 0.506798 -0.019709 +v 0.141584 0.529843 -0.021245 +v 0.149034 0.529085 -0.041575 +vn 0.5116 -0.0758 -0.8559 +vn 0.5115 -0.0757 -0.8559 +vn -0.0105 0.9872 -0.1590 +vn 0.3898 -0.7547 -0.5277 +vn 0.3547 0.6066 -0.7115 +vn 0.3548 0.6066 -0.7115 +vn 0.5827 -0.7626 -0.2807 +vn 0.7947 0.5886 -0.1485 +vn 0.9516 -0.0937 -0.2927 +vn 0.1822 0.9793 0.0877 +vn 0.1509 0.9080 0.3908 +vn 0.5514 -0.8339 0.0223 +vn 0.7232 0.4257 0.5439 +vn 0.8801 -0.2567 0.3995 +vn 0.3390 -0.4690 0.8156 +vn 0.3390 -0.4689 0.8156 +vn -0.0861 0.8150 0.5730 +vn 0.3144 -0.9270 0.2045 +vn 0.1821 0.2132 0.9599 +vn -0.3547 -0.6065 0.7116 +vn -0.3548 -0.6065 0.7115 +vn -0.3900 0.7547 0.5275 +vn 0.0105 -0.9872 0.1590 +vn -0.5116 0.0758 0.8559 +vn -0.5116 0.0757 0.8559 +vn -0.7948 -0.5885 0.1483 +vn -0.7947 -0.5885 0.1483 +vn -0.5827 0.7626 0.2809 +vn -0.1822 -0.9793 -0.0877 +vn -0.9515 0.0936 0.2929 +vn -0.9516 0.0938 0.2927 +vn -0.7232 -0.4256 -0.5439 +vn -0.5514 0.8340 -0.0224 +vn -0.1509 -0.9079 -0.3910 +vn -0.8801 0.2566 -0.3995 +vn -0.8801 0.2567 -0.3995 +vn -0.1821 -0.2133 -0.9599 +vn -0.3143 0.9270 -0.2046 +vn 0.0861 -0.8149 -0.5732 +vn -0.3390 0.4691 -0.8155 +vn -0.3389 0.4691 -0.8155 +vn 0.3669 0.0734 -0.9274 +vn 0.3668 0.0734 -0.9274 +vn 0.3670 0.0735 -0.9273 +vn -0.6484 0.7155 -0.2601 +vn 0.7327 -0.4510 -0.5096 +vn -0.1743 0.5304 -0.8296 +vn -0.1742 0.5304 -0.8296 +vn 0.9072 -0.2879 -0.3069 +vn 0.2239 0.9028 -0.3673 +vn 0.2239 0.9028 -0.3672 +vn 0.7649 0.4459 -0.4649 +vn -0.4740 0.8787 -0.0575 +vn -0.4265 0.8688 0.2517 +vn 0.9546 -0.2977 0.0021 +vn 0.3323 0.8802 0.3388 +vn 0.8733 0.4233 0.2410 +vn 0.8733 0.4234 0.2409 +vn 0.6285 0.0188 0.7775 +vn 0.6285 0.0189 0.7775 +vn 0.6287 0.0189 0.7774 +vn -0.5337 0.6916 0.4867 +vn 0.8475 -0.4748 0.2372 +vn 0.0877 0.4758 0.8751 +vn 0.0877 0.4758 0.8752 +vn 0.1742 -0.5303 0.8297 +vn -0.7327 0.4512 0.5096 +vn 0.6485 -0.7154 0.2601 +vn -0.3667 -0.0734 0.9274 +vn -0.3668 -0.0734 0.9274 +vn -0.2239 -0.9028 0.3673 +vn -0.2239 -0.9028 0.3672 +vn -0.9071 0.2880 0.3070 +vn 0.4740 -0.8787 0.0575 +vn -0.7648 -0.4458 0.4651 +vn -0.7648 -0.4459 0.4652 +vn -0.3323 -0.8802 -0.3388 +vn -0.3323 -0.8802 -0.3389 +vn -0.9546 0.2979 -0.0024 +vn 0.4265 -0.8688 -0.2517 +vn -0.8733 -0.4232 -0.2412 +vn -0.0877 -0.4759 -0.8751 +vn -0.8474 0.4750 -0.2372 +vn 0.5337 -0.6916 -0.4867 +vn -0.6287 -0.0190 -0.7774 +vn 0.2506 -0.6346 -0.7311 +vn 0.2506 -0.6345 -0.7311 +vn 0.9023 0.4125 -0.1253 +vn -0.4506 -0.7663 -0.4581 +vn 0.7805 -0.1729 -0.6007 +vn 0.7805 -0.1730 -0.6007 +vn -0.4066 -0.8964 -0.1768 +vn 0.8815 -0.4703 0.0415 +vn 0.8816 -0.4702 0.0416 +vn 0.3516 -0.9319 -0.0888 +vn 0.9466 0.2822 0.1561 +vn 0.8327 0.3318 0.4433 +vn -0.5205 -0.8467 0.1104 +vn 0.6213 -0.3569 0.6975 +vn 0.6213 -0.3569 0.6976 +vn 0.0914 -0.8185 0.5672 +vn -0.3776 -0.3609 0.8527 +vn -0.3774 -0.3609 0.8528 +vn 0.6271 0.5324 0.5686 +vn -0.7259 -0.6462 0.2355 +vn 0.1524 0.1007 0.9832 +vn 0.1524 0.1006 0.9832 +vn -0.7806 0.1728 0.6007 +vn 0.4506 0.7661 0.4582 +vn -0.9023 -0.4126 0.1253 +vn -0.2506 0.6347 0.7310 +vn -0.2506 0.6346 0.7311 +vn -0.8815 0.4703 -0.0415 +vn -0.8816 0.4702 -0.0414 +vn 0.4064 0.8964 0.1768 +vn -0.9465 -0.2823 -0.1562 +vn -0.3516 0.9319 0.0888 +vn -0.6214 0.3570 -0.6975 +vn -0.6213 0.3570 -0.6975 +vn 0.5204 0.8468 -0.1105 +vn -0.8327 -0.3319 -0.4433 +vn -0.0915 0.8185 -0.5671 +vn -0.0915 0.8186 -0.5671 +vn -0.1523 -0.1009 -0.9832 +vn -0.1524 -0.1007 -0.9832 +vn 0.7259 0.6461 -0.2357 +vn -0.6273 -0.5322 -0.5686 +vn 0.3776 0.3608 -0.8528 +vn 0.1039 -0.1096 0.9885 +vn -0.9568 0.1403 0.2545 +vn -0.9429 0.1383 -0.3030 +vn -0.3607 0.1349 -0.9229 +vn -0.5666 0.2060 -0.7978 +vn -0.0920 0.9802 -0.1754 +vn 0.2159 0.0280 0.9760 +vn 0.2221 0.0292 0.9746 +vn 0.2216 0.0320 0.9746 +vn -0.9937 -0.0134 -0.1115 +vn -0.8895 -0.0826 0.4495 +vn 0.5076 -0.3725 -0.7769 +vn -0.0350 0.9474 0.3182 +vn -0.0413 0.9090 0.4147 +vn -0.1616 0.9205 -0.3557 +vn -0.6037 -0.7936 -0.0758 +vn -0.8114 -0.4181 -0.4084 +vn 0.6538 -0.6247 0.4268 +vn 0.7847 0.5947 -0.1748 +vn 0.4140 0.3766 -0.8287 +vn 0.8403 0.3028 0.4498 +vn 0.5428 0.0380 0.8390 +vn -0.4827 -0.1551 0.8620 +vn -0.7216 -0.0546 0.6901 +vn -0.6739 -0.0531 0.7370 +vn -0.5293 -0.0490 0.8470 +vn 0.0193 -0.9317 -0.3628 +vn 0.3016 -0.3958 -0.8674 +vn 0.5141 0.8431 -0.1577 +vn 0.5149 0.8441 -0.1493 +vn 0.5144 0.8434 -0.1549 +vn -0.1764 -0.1548 0.9721 +vn 0.3167 -0.4582 0.8305 +vn 0.1088 0.1595 -0.9812 +vn -0.9068 0.1354 -0.3991 +vn -0.2760 0.3660 -0.8887 +vn -0.0655 0.2407 0.9684 +vn 0.0037 0.3477 0.9376 +vn 0.0151 0.2697 0.9628 +vn 0.0012 0.3651 0.9310 +vn 0.0609 -0.9724 -0.2255 +vn -0.0625 -0.4293 0.9010 +vn -0.0052 -0.2761 0.9611 +vn 0.0031 -0.2533 0.9674 +vn -0.3100 -0.2982 -0.9028 +vn -0.3275 -0.3761 -0.8668 +vn -0.3071 -0.2861 -0.9076 +vn -0.1645 -0.9307 0.3267 +vn -0.1785 -0.9400 0.2907 +vn -0.1798 -0.9408 0.2875 +vn -0.2748 0.3256 -0.9047 +vn -0.2776 -0.9004 -0.3349 +vn -0.2562 -0.9270 -0.2739 +vn -0.2520 -0.9315 -0.2624 +vn -0.2740 -0.8933 0.3564 +vn -0.2765 -0.8935 0.3538 +vn 0.2830 0.8311 0.4787 +vn 0.0214 0.9641 -0.2647 +vn -0.6838 0.7277 -0.0541 +vn -0.4156 0.5994 0.6841 +vn -0.2527 -0.8965 -0.3639 +vn -0.2581 -0.8973 -0.3580 +vn 0.4477 -0.8168 -0.3639 +vn 0.4478 -0.8168 -0.3639 +vn 0.4388 -0.8175 -0.3731 +vn 0.4446 -0.8153 0.3711 +vn 0.4420 -0.8155 0.3735 +vn -0.7807 -0.5511 0.2948 +vn -0.7826 -0.5396 0.3105 +vn -0.7766 -0.5704 0.2674 +vn 0.6250 0.5155 0.5862 +vn 0.5602 0.5048 0.6567 +vn 0.5364 0.5000 0.6799 +vn -0.2506 0.2181 -0.9432 +vn -0.2673 0.2036 -0.9419 +vn -0.1768 0.2798 -0.9436 +vn -0.5163 -0.0228 -0.8561 +vn -0.2489 0.0636 -0.9664 +vn 0.5031 0.4312 -0.7490 +vn 0.7326 0.5931 -0.3338 +vn 0.7529 0.6057 -0.2572 +vn 0.1082 0.0950 -0.9896 +vn 0.3348 -0.0243 -0.9420 +vn 0.2982 -0.0278 -0.9541 +vn 0.2945 -0.0282 -0.9552 +vn 0.2899 0.2246 -0.9303 +vn 0.9696 -0.1809 0.1648 +vn 0.4134 0.0369 0.9098 +vn -0.1880 -0.0668 0.9799 +vn -0.9509 0.1323 -0.2799 +vn -0.9516 0.1447 0.2712 +vn -0.2456 0.0506 -0.9680 +vn -0.2836 0.0452 -0.9579 +vn -0.2914 0.0440 -0.9556 +vn 0.9363 -0.0792 0.3421 +vn 0.9268 -0.0853 0.3658 +vn 0.9386 -0.0777 0.3362 +vn 0.9327 -0.0922 -0.3487 +vn 0.9501 -0.1024 -0.2948 +vn 0.9304 -0.0909 -0.3550 +vn -0.3452 0.0681 0.9361 +vn -0.3942 0.0595 0.9171 +vn -0.3373 0.0695 0.9388 +vn 0.2817 0.0081 0.9595 +vn 0.2935 0.0060 0.9559 +vn -0.9077 0.0632 -0.4148 +vn -0.9079 0.0632 -0.4145 +vn -0.9077 0.0632 -0.4149 +vn -0.5400 -0.1406 0.8298 +vn -0.8368 -0.2143 0.5039 +vn -0.5776 0.1874 -0.7946 +vn 0.3574 0.4742 -0.8046 +vn 0.3328 0.4629 -0.8216 +vn 0.3271 0.4602 -0.8254 +vn 0.8228 0.4243 -0.3781 +vn -0.9489 -0.1171 0.2930 +vn -0.9656 -0.1307 0.2249 +vn -0.9468 -0.1156 0.3004 +vn 0.8335 0.2854 0.4731 +vn -0.9541 -0.1312 -0.2693 +vn -0.9338 -0.1181 -0.3377 +vn -0.9314 -0.1167 -0.3448 +vn -0.2921 -0.0568 -0.9547 +vn -0.2967 -0.0565 -0.9533 +vn -0.2915 -0.0569 -0.9549 +vn 0.2897 0.0029 -0.9571 +vn 0.3333 0.0152 -0.9427 +vn 0.3393 0.0169 -0.9405 +vn 0.9548 0.0827 -0.2856 +vn 0.9715 0.0742 -0.2250 +vn 0.9524 0.0837 -0.2930 +vn 0.9606 0.0741 0.2680 +vn 0.9402 0.0829 0.3304 +vn 0.9374 0.0840 0.3378 +vn 0.2262 -0.0019 0.9741 +vn -0.2630 -0.0543 0.9633 +vn -0.3296 -0.0500 0.9428 +vn -0.3374 -0.0496 0.9401 +vn -0.5316 -0.7084 0.4643 +vn -0.5328 -0.7092 0.4616 +vn -0.5315 -0.7083 0.4645 +vn -0.0766 0.2769 0.9578 +vn -0.7464 0.6569 0.1067 +vn -0.7603 0.6189 0.1972 +vn -0.3382 0.2103 -0.9173 +vn -0.7088 -0.6916 -0.1391 +vn 0.3285 0.8576 -0.3957 +vn 0.3307 0.8590 -0.3909 +vn 0.3309 0.8591 -0.3904 +vn -0.2001 0.3509 -0.9148 +vn -0.5422 -0.1626 -0.8244 +vn 0.5620 0.8042 0.1936 +vn -0.1714 -0.9653 0.1970 +vn -0.1370 -0.9831 0.1217 +vn -0.1785 -0.9608 0.2123 +vn 0.0474 -0.2743 0.9605 +vn 0.4220 0.2384 0.8747 +vn 0.5312 0.3474 -0.7727 +vn 0.5290 0.3795 -0.7590 +vn 0.5287 0.3838 -0.7571 +vn -0.1571 -0.2398 0.9580 +vn -0.1602 -0.1451 0.9764 +vn 0.5150 0.8445 -0.1471 +vn 0.3718 0.8169 0.4411 +vn 0.3428 0.8042 0.4856 +vn 0.3373 0.8013 0.4941 +vn 0.0565 -0.9077 -0.4158 +vn 0.0969 0.2343 0.9673 +vn 0.7232 -0.6903 -0.0212 +vn 0.7175 -0.6966 -0.0048 +vn 0.7235 -0.6900 -0.0221 +vn 0.2996 -0.4110 -0.8610 +vn 0.4270 -0.7307 0.5327 +vn -0.5990 0.7950 0.0958 +vn -0.5986 0.7955 0.0943 +vn -0.5985 0.7956 0.0940 +vn -0.5560 0.2510 0.7924 +vn -0.2093 -0.2645 0.9414 +vn -0.3401 0.8047 -0.4867 +vn 0.6321 -0.1886 -0.7516 +vn 0.2845 0.3038 -0.9093 +vn 0.3547 0.6065 -0.7115 +vn 0.9516 -0.0938 -0.2928 +vn 0.7232 0.4257 0.5438 +vn 0.3389 -0.4690 0.8156 +vn -0.3549 -0.6066 0.7114 +vn -0.5116 0.0758 0.8558 +vn 0.2239 0.9027 -0.3673 +vn 0.3323 0.8802 0.3389 +vn 0.8734 0.4232 0.2411 +vn 0.6286 0.0190 0.7775 +vn 0.0877 0.4759 0.8751 +vn -0.3666 -0.0734 0.9275 +vn -0.2238 -0.9028 0.3673 +vn -0.7649 -0.4458 0.4650 +vn -0.0877 -0.4758 -0.8751 +vn 0.8815 -0.4703 0.0414 +vn 0.6214 -0.3569 0.6975 +vn -0.3775 -0.3610 0.8528 +vn 0.1524 0.1008 0.9832 +vn -0.2507 0.6344 0.7312 +vn -0.8815 0.4703 -0.0416 +vn -0.0915 0.8185 -0.5672 +vn -0.1523 -0.1007 -0.9832 +vn -0.8368 -0.0564 0.5445 +vn 0.0158 0.2646 0.9642 +vn -0.0657 -0.4379 0.8966 +vn -0.3285 -0.3812 -0.8641 +vn -0.1622 -0.9290 0.3327 +vn -0.2798 -0.8979 -0.3398 +vn -0.2717 -0.8930 0.3588 +vn -0.2473 -0.8958 -0.3693 +vn 0.4570 -0.8159 -0.3543 +vn 0.4471 -0.8150 0.3686 +vn -0.7855 -0.5159 0.3417 +vn 0.6645 0.5200 0.5368 +vn -0.3251 0.1516 -0.9335 +vn 0.4613 0.4002 -0.7918 +vn 0.3397 -0.0238 -0.9402 +vn -0.2396 0.0515 -0.9695 +vn 0.9253 -0.0862 0.3693 +vn 0.9519 -0.1036 -0.2882 +vn -0.4007 0.0584 0.9143 +vn -0.9079 0.0632 -0.4144 +vn 0.3649 0.4778 -0.7991 +vn -0.9671 -0.1322 0.2173 +vn -0.9560 -0.1326 -0.2617 +vn -0.2972 -0.0565 -0.9531 +vn 0.2843 0.0013 -0.9587 +vn 0.9732 0.0733 -0.2181 +vn 0.9626 0.0731 0.2608 +vn 0.2195 -0.0038 0.9756 +vn -0.2553 -0.0546 0.9653 +vn -0.5329 -0.7093 0.4614 +vn 0.3283 0.8575 -0.3961 +vn -0.1331 -0.9846 0.1132 +vn 0.5316 0.3409 -0.7753 +vn 0.3822 0.8209 0.4243 +vn 0.7172 -0.6968 -0.0042 +vn -0.5991 0.7949 0.0959 +s 1 +f 26//1 3//1 4//2 +f 24//3 14//3 2//3 +f 1//4 26//4 4//4 +f 24//5 3//5 25//6 +f 1//7 4//7 7//7 +f 3//8 5//8 6//8 +f 3//9 7//9 4//9 +f 2//10 14//10 5//10 +f 5//11 14//11 8//11 +f 1//12 7//12 10//12 +f 5//13 9//13 6//13 +f 6//14 10//14 7//14 +f 10//15 12//15 13//16 +f 8//17 14//17 11//17 +f 1//18 10//18 13//18 +f 8//19 12//19 9//19 +f 13//20 16//20 17//21 +f 11//22 14//22 15//22 +f 1//23 13//23 17//23 +f 11//24 16//24 12//25 +f 16//26 20//26 17//27 +f 15//28 14//28 18//28 +f 1//29 17//29 20//29 +f 15//30 19//31 16//31 +f 19//32 23//32 20//32 +f 18//33 14//33 21//33 +f 1//34 20//34 23//34 +f 18//35 22//35 19//36 +f 22//37 26//37 23//37 +f 21//38 14//38 24//38 +f 1//39 23//39 26//39 +f 22//40 24//41 25//40 +f 52//42 29//43 30//44 +f 50//45 40//45 28//45 +f 27//46 52//46 30//46 +f 51//47 28//48 29//47 +f 27//49 30//49 33//49 +f 28//50 32//50 29//51 +f 30//52 32//52 33//52 +f 28//53 40//53 31//53 +f 31//54 40//54 34//54 +f 27//55 33//55 36//55 +f 32//56 34//56 35//56 +f 33//57 35//57 36//58 +f 36//59 38//60 39//61 +f 34//62 40//62 37//62 +f 27//63 36//63 39//63 +f 34//64 38//64 35//65 +f 39//66 42//66 43//66 +f 37//67 40//67 41//67 +f 27//68 39//68 43//68 +f 37//69 42//70 38//70 +f 42//71 46//71 43//72 +f 41//73 40//73 44//73 +f 27//74 43//74 46//74 +f 41//75 45//75 42//76 +f 46//77 48//77 49//78 +f 44//79 40//79 47//79 +f 27//80 46//80 49//80 +f 45//81 47//81 48//81 +f 48//82 52//82 49//82 +f 47//83 40//83 50//83 +f 27//84 49//84 52//84 +f 47//85 51//85 48//85 +f 78//86 55//86 56//87 +f 76//88 66//88 54//88 +f 53//89 78//89 56//89 +f 77//90 54//90 55//91 +f 53//92 56//92 59//92 +f 54//93 58//93 55//94 +f 56//95 58//95 59//95 +f 54//96 66//96 57//96 +f 57//97 66//97 60//97 +f 53//98 59//98 62//98 +f 57//99 61//99 58//100 +f 58//101 62//101 59//101 +f 62//102 64//102 65//103 +f 60//104 66//104 63//104 +f 53//105 62//105 65//105 +f 60//106 64//106 61//107 +f 65//108 68//108 69//108 +f 63//109 66//109 67//109 +f 53//110 65//110 69//110 +f 63//111 68//111 64//112 +f 68//113 72//113 69//114 +f 67//115 66//115 70//115 +f 53//116 69//116 72//116 +f 67//117 71//117 68//117 +f 71//118 75//119 72//119 +f 70//120 66//120 73//120 +f 53//121 72//121 75//121 +f 71//122 73//122 74//123 +f 75//124 77//124 78//125 +f 73//126 66//126 76//126 +f 53//127 75//127 78//127 +f 73//128 77//128 74//128 +f 81//129 140//129 102//129 +f 143//130 89//130 99//130 +f 89//131 135//131 99//131 +f 104//132 146//132 114//132 +f 114//133 146//133 91//133 +f 107//134 134//134 148//134 +f 79//135 139//136 102//137 +f 141//138 146//138 103//138 +f 103//139 145//139 141//139 +f 80//140 150//140 138//140 +f 107//141 149//141 144//141 +f 144//142 103//142 107//142 +f 103//143 134//143 107//143 +f 84//144 117//144 116//144 +f 116//145 89//145 84//145 +f 93//146 140//146 142//146 +f 110//147 115//147 142//147 +f 80//148 114//148 92//148 +f 93//149 142//149 94//149 +f 93//150 94//150 140//150 +f 90//151 94//151 125//151 +f 90//152 145//153 140//154 +f 101//155 150//155 111//155 +f 113//156 150//156 80//156 +f 112//157 153//158 110//159 +f 102//160 140//160 145//160 +f 102//161 122//161 81//161 +f 114//162 138//162 104//162 +f 91//163 146//163 141//163 +f 148//164 134//164 106//164 +f 102//165 144//165 79//165 +f 108//166 144//167 149//168 +f 101//169 138//169 150//169 +f 132//170 108//171 133//172 +f 106//173 117//174 147//175 +f 133//176 84//177 132//178 +f 85//179 106//179 134//179 +f 117//180 105//181 147//182 +f 124//183 83//183 98//184 +f 82//185 94//185 127//185 +f 82//186 127//186 126//186 +f 82//187 126//187 125//187 +f 82//188 125//188 94//188 +f 119//189 83//189 95//190 +f 120//191 83//192 96//193 +f 121//194 83//194 97//195 +f 118//196 84//197 89//198 +f 81//199 142//200 140//201 +f 134//202 86//203 85//204 +f 85//205 116//205 117//205 +f 86//206 116//206 85//206 +f 115//207 92//208 142//209 +f 86//210 104//210 138//210 +f 136//211 86//212 138//213 +f 80//214 138//214 114//214 +f 122//215 139//215 101//215 +f 102//216 139//216 122//216 +f 132//217 118//217 79//217 +f 116//218 135//218 89//218 +f 89//219 143//219 118//219 +f 116//220 100//221 135//222 +f 87//223 139//224 137//225 +f 101//226 136//227 138//228 +f 88//229 118//230 143//231 +f 139//136 88//232 137//233 +f 126//234 141//235 125//236 +f 90//237 140//237 94//237 +f 125//238 141//238 90//238 +f 126//239 114//239 91//239 +f 126//240 92//241 114//242 +f 127//243 142//243 92//243 +f 99//244 124//245 143//246 +f 94//247 142//247 127//247 +f 119//248 99//249 135//250 +f 100//251 119//252 135//253 +f 120//254 100//255 136//256 +f 87//257 120//258 136//259 +f 121//260 87//261 137//262 +f 88//232 121//263 137//233 +f 124//264 88//265 143//266 +f 128//267 133//268 129//269 +f 145//270 144//270 102//270 +f 146//271 134//271 103//271 +f 103//272 144//272 145//272 +f 104//273 134//273 146//273 +f 128//274 147//274 105//274 +f 128//275 107//276 148//277 +f 128//278 148//278 106//278 +f 106//279 147//279 128//279 +f 129//280 149//280 107//280 +f 151//281 101//282 111//283 +f 129//284 133//284 108//284 +f 108//285 149//285 129//285 +f 153//286 80//287 115//288 +f 122//289 151//289 109//289 +f 109//290 81//290 122//290 +f 110//159 153//158 115//291 +f 152//292 110//293 81//294 +f 111//295 150//295 123//295 +f 152//296 81//296 109//296 +f 130//297 123//298 131//299 +f 113//300 123//300 150//300 +f 130//301 151//301 111//301 +f 130//302 112//303 152//304 +f 130//305 152//305 109//305 +f 109//306 151//306 130//306 +f 131//307 153//307 112//307 +f 131//308 123//308 113//308 +f 113//309 153//309 131//309 +f 26//1 25//1 3//1 +f 24//5 2//310 3//5 +f 3//8 2//8 5//8 +f 3//9 6//311 7//9 +f 5//13 8//312 9//13 +f 6//14 9//14 10//14 +f 10//15 9//313 12//15 +f 8//19 11//19 12//19 +f 13//20 12//314 16//20 +f 11//24 15//315 16//24 +f 16//26 19//26 20//26 +f 15//30 18//31 19//31 +f 19//32 22//32 23//32 +f 18//35 21//35 22//35 +f 22//37 25//37 26//37 +f 22//40 21//41 24//41 +f 52//42 51//43 29//43 +f 51//47 50//48 28//48 +f 28//50 31//316 32//50 +f 30//52 29//52 32//52 +f 32//56 31//317 34//56 +f 33//57 32//318 35//57 +f 36//59 35//319 38//60 +f 34//64 37//320 38//64 +f 39//66 38//66 42//66 +f 37//69 41//321 42//70 +f 42//71 45//322 46//71 +f 41//75 44//323 45//75 +f 46//77 45//77 48//77 +f 45//81 44//81 47//81 +f 48//82 51//324 52//82 +f 47//85 50//85 51//85 +f 78//86 77//86 55//86 +f 77//90 76//90 54//90 +f 54//93 57//325 58//93 +f 56//95 55//95 58//95 +f 57//99 60//326 61//99 +f 58//101 61//101 62//101 +f 62//102 61//327 64//102 +f 60//106 63//328 64//106 +f 65//108 64//108 68//108 +f 63//111 67//329 68//111 +f 68//113 71//330 72//113 +f 67//117 70//117 71//117 +f 71//118 74//118 75//119 +f 71//122 70//331 73//122 +f 75//124 74//332 77//124 +f 73//128 76//128 77//128 +f 90//152 141//333 145//153 +f 108//166 79//334 144//167 +f 132//170 79//335 108//171 +f 106//173 85//336 117//174 +f 133//176 105//337 84//177 +f 117//180 84//338 105//181 +f 124//183 95//339 83//183 +f 119//189 96//340 83//189 +f 120//191 97//341 83//192 +f 121//194 98//342 83//194 +f 118//196 132//343 84//197 +f 81//199 110//344 142//200 +f 134//202 104//345 86//203 +f 115//207 80//346 92//208 +f 136//211 100//347 86//212 +f 116//220 86//348 100//221 +f 87//223 101//349 139//224 +f 101//226 87//350 136//227 +f 88//229 79//351 118//230 +f 139//136 79//135 88//232 +f 126//234 91//352 141//235 +f 126//240 127//353 92//241 +f 99//244 95//354 124//245 +f 119//248 95//355 99//249 +f 100//251 96//356 119//252 +f 120//254 96//357 100//255 +f 87//257 97//358 120//258 +f 121//260 97//359 87//261 +f 88//232 98//360 121//263 +f 124//264 98//361 88//265 +f 128//267 105//362 133//268 +f 128//275 129//363 107//276 +f 151//281 122//364 101//282 +f 153//286 113//365 80//287 +f 152//292 112//366 110//293 +f 130//297 111//367 123//298 +f 130//302 131//368 112//303 diff --git a/test_model.ipynb b/test_model.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..472506a2d76529a9b281969ac9298303018bd82d --- /dev/null +++ b/test_model.ipynb @@ -0,0 +1,162 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from ThirdParty.PointLLM.pointllm.model.pointllm import PointLLMLlamaForCausalLM" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ydengbd/anaconda3/envs/mich/lib/python3.10/site-packages/huggingface_hub/file_download.py:1142: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", + " warnings.warn(\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading PointBERT config from /home/ydengbd/Anymate/space/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_8192point_2layer.yaml.\n" + ] + } + ], + "source": [ + "from transformers import AutoConfig\n", + "\n", + "config = AutoConfig.from_pretrained('RunsenXu/PointLLM_7B_v1.2')\n", + "model = PointLLMLlamaForCausalLM(config)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/ydengbd/anaconda3/envs/mich/lib/python3.10/site-packages/huggingface_hub/file_download.py:1142: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", + " warnings.warn(\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading PointBERT config from /home/ydengbd/Anymate/space/ThirdParty/PointLLM/pointllm/model/pointbert/PointTransformer_8192point_2layer.yaml.\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "e7003df263064096b0c3838e9deb1cd3", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Loading checkpoint shards: 0%| | 0/3 [00:00