diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..6f7efde7263935620f4810989e3710afb4686f77
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,200 @@
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Secret
+.idea/**/deployment.xml
+.idea/**/remote-mappings.xml
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+checkpoint/*
+outputs/*
+data/data_3d_h36m.npz
+data/own2DFiles/*
+
+
+
+__pycache__/
+*.py[cod]
+
+
+*.pyc
+.ipynb_checkpoints/
+*.gif
+*.jpg
+*.png
+*.npz
+*.zip
+*.json
+*.mp4
+*.tar
+*.pth
+*.weights
+*.avi
+*.caffemodel
+*.npy
+
+
+/st_gcn
+/outputs
+/nohub*
+/VideoSave
+/ActionRecognition
+/work_dir
+
+
+
+
+
+
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+
+eggs/
+.eggs/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# dotenv
+.env
+
+# virtualenv
+.venv
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+
+# Self-defined files
+/local_test/
+/lab_processing/
+
+
+inputs/
+*.miframes
\ No newline at end of file
diff --git a/HPE2keyframes.py b/HPE2keyframes.py
new file mode 100644
index 0000000000000000000000000000000000000000..f681dabc7d8dcad25cd4e69ff72ae4ff22ed6702
--- /dev/null
+++ b/HPE2keyframes.py
@@ -0,0 +1,323 @@
+import numpy as np
+import matplotlib.pyplot as plt
+import pickle
+import torch
+import torch.nn.functional as F
+from scipy.spatial.transform import Rotation
+from scipy.ndimage import binary_erosion, binary_dilation
+
+import os
+import json
+
+
+
+def euler_angles_smooth(XYZ_euler_angles):
+
+ if XYZ_euler_angles.ndim == 1:
+ XYZ_euler_angles = XYZ_euler_angles[:, np.newaxis]
+
+ for i in range(XYZ_euler_angles.shape[0]-1):
+ for j in range(XYZ_euler_angles.shape[1]):
+ # smooth
+ if XYZ_euler_angles[i+1, j] - XYZ_euler_angles[i, j] > 180:
+ XYZ_euler_angles[i+1:, j] = XYZ_euler_angles[i+1:, j] - 360
+ elif XYZ_euler_angles[i+1, j] - XYZ_euler_angles[i, j] < -180:
+ XYZ_euler_angles[i+1:, j] = XYZ_euler_angles[i+1:, j] + 360
+
+ return np.squeeze(XYZ_euler_angles)
+
+
+
+def xyz2euler_body(xyz, xyz_body_frame, X_dir=1.0, Y_dir=1.0):
+ '''
+ xyz: Coordinates from 3D human pose estimation. Each dimension: (frame, 3, xyz)
+ xyz_body_frame: Coordinates of body frame. Used to calculate the Y direction rotation of body.
+ X_dir: -1.0 for arm and body.
+ Y_dir: -1.0 for body and head.
+ '''
+
+ # swap y and z to align the coordinate in the mine-imator
+ xyz[:, :, [1, 2]] = xyz[:, :, [2, 1]]
+ xyz[:, :, 0] = -xyz[:, :, 0]
+ xyz_body_frame[:, :, [1, 2]] = xyz_body_frame[:, :, [2, 1]]
+ xyz_body_frame[:, :, 0] = -xyz_body_frame[:, :, 0]
+
+ p0, p1, p2 = torch.unbind(xyz, dim=1)
+ p1_, p4_, p14_, p11_ = torch.unbind(xyz_body_frame, dim=1)
+
+ # solve the cosine pose matrix
+ Y = (p0 - p1) * Y_dir
+ arm = p2 - p1
+
+ Y = F.normalize(Y, dim=1)
+ X = F.normalize(p11_ + p4_ - p1_ - p14_, dim=1)
+ # X = F.normalize(torch.cross(X_dir*arm, Y), dim=1) # TODO smooth
+ Z = F.normalize(torch.cross(X, Y), dim=1)
+
+ cos_pose_matrix = torch.stack([X, Y, Z], dim=2)
+ r = Rotation.from_matrix(cos_pose_matrix)
+ YXZ_euler_angles = r.as_euler("YXZ", degrees=True)
+
+ # bend
+ bend = -(Y * F.normalize(arm, dim=1)).sum(dim=1) * Y_dir
+ bend = torch.rad2deg(torch.acos(bend))
+
+ # swap xyz
+ YXZ_euler_angles[:, [0, 1, 2]] = YXZ_euler_angles[:, [1, 0, 2]]
+ XYZ_euler_angles = YXZ_euler_angles
+
+ # arm cos_pose_matrix
+ Y_arm = F.normalize(arm, dim=1)
+ X_arm = X
+ Z_arm = F.normalize(torch.cross(X_arm, Y_arm), dim=1)
+ cos_pose_matrix_arm = torch.stack([X_arm, Y_arm, Z_arm], dim=2)
+
+ # avoid abrupt changes in angle
+ XYZ_euler_angles = euler_angles_smooth(XYZ_euler_angles)
+ bend = euler_angles_smooth(bend.numpy())
+
+ return XYZ_euler_angles, bend, cos_pose_matrix_arm
+
+
+def xyz2euler_relative(xyz, cos_body, X_dir=1.0, Y_dir=1.0, head=False, leg=False, euler_body=None):
+ '''
+ xyz: Coordinates from 3D human pose estimation. Each dimension: (frame, 3, xyz)
+ X_dir: -1.0 for arm and body.
+ Y_dir: -1.0 for body and head.
+ '''
+
+ # swap y and z to align the coordinate in the mine-imator
+ xyz[:, :, [1, 2]] = xyz[:, :, [2, 1]]
+ xyz[:, :, 0] = -xyz[:, :, 0]
+ p0, p1, p2 = torch.unbind(xyz, dim=1)
+
+ # solve the cosine pose matrix
+ Y = (p0 - p1) * Y_dir
+ arm = p2 - p1
+
+ Y = F.normalize(Y, dim=1)
+ X = F.normalize(torch.cross(X_dir*arm, Y), dim=1) # TODO smooth
+ Z = F.normalize(torch.cross(X, Y), dim=1)
+
+ cos_pose_matrix = torch.stack([X, Y, Z], dim=2)
+
+ if head == True:
+ Y_arm = F.normalize(arm, dim=1)
+ X_arm = X
+ Z_arm = F.normalize(torch.cross(X_arm, Y_arm), dim=1)
+ cos_pose_matrix = torch.stack([X_arm, Y_arm, Z_arm], dim=2)
+
+ # relative to the body rotation Y
+ if leg == True:
+ euler_body_Y = euler_body * 0
+ euler_body_Y[:, 0:1] = euler_body[:, 1:2]
+ r_body_Y = Rotation.from_euler("YXZ", euler_body_Y, degrees=True)
+ cos_body_Y = torch.from_numpy(r_body_Y.as_matrix())
+
+ # relative to the body
+ cos_relative = cos_body if leg == False else cos_body_Y.float()
+ cos_pose_matrix = cos_relative.permute(0, 2, 1) @ cos_pose_matrix
+ r = Rotation.from_matrix(cos_pose_matrix)
+ YXZ_euler_angles = r.as_euler("YXZ", degrees=True)
+
+ # bend
+ bend = -(Y * F.normalize(arm, dim=1)).sum(dim=1) * Y_dir
+ bend = torch.rad2deg(torch.acos(bend))
+ # if head == True:
+ # bend = bend * 0.5
+
+ # swap xyz
+ YXZ_euler_angles[:, [0, 1, 2]] = YXZ_euler_angles[:, [1, 0, 2]]
+ XYZ_euler_angles = YXZ_euler_angles
+
+ # avoid abrupt changes in angle
+ XYZ_euler_angles = euler_angles_smooth(XYZ_euler_angles)
+ bend = euler_angles_smooth(bend.numpy())
+
+ return XYZ_euler_angles, bend
+
+
+def calculate_body_offset(euler_body, euler_right_leg, bend_right_leg, euler_left_leg, bend_left_leg, length_leg=[6, 6], prior=False):
+ '''
+ Calculate the offset of the body to make the movement more realistic.
+ First, determine the foot positions of both legs based on the actual
+ effect of Euler angle rotation in Mine-imator. Then, determine which
+ leg is currently touching the ground and fix the grounded leg. This
+ allows the calculation of the body offset.
+
+ '''
+
+ def calculate_leg_coordinates(r_body_Y, euler_leg, bend_leg, length_leg, right=True):
+ YXZ_euler_leg = euler_leg[:, [1, 0, 2]]
+ r1 = Rotation.from_euler("YXZ", YXZ_euler_leg, degrees=True)
+ m1 = r1.as_matrix()
+ X1 = m1[:, :, 0] # direction
+ Y1 = m1[:, :, 1] # vector to be rotated
+ r2 = Rotation.from_rotvec(X1*bend_leg[:, np.newaxis], degrees=True)
+ Y2 = r2.apply(Y1) # reconstruct the arm vector
+ coordinates = -(Y1 * length_leg[0] + Y2 * length_leg[1])
+ coordinates[:, 0] = coordinates[:, 0] - 2
+ coordinates = r_body_Y.apply(coordinates)
+ return coordinates
+
+ # calculate the endpoint coordinates of two legs
+ euler_body_Y = euler_body * 0
+ euler_body_Y[:, 0:1] = euler_body[:, 1:2]
+ r_body_Y = Rotation.from_euler("YXZ", euler_body_Y, degrees=True)
+ right_coordinates = calculate_leg_coordinates(r_body_Y, euler_right_leg, bend_right_leg, length_leg)
+ left_coordinates = calculate_leg_coordinates(r_body_Y, euler_left_leg, bend_left_leg, length_leg)
+ # stack, 0: right, 1: left
+ coordinates = np.stack([right_coordinates, left_coordinates], axis=1)
+
+ # determine which leg grounded, 0: right, 1: left
+ grounded_flag = (right_coordinates[:, 1] > left_coordinates[:, 1])*1
+ # prior knowledge: The more bended legs are not grounded
+ if prior == True:
+ grounded_flag_left = (bend_right_leg - bend_left_leg) > 30
+ grounded_flag_right = (bend_left_leg - bend_right_leg) > 30
+ grounded_flag += grounded_flag_left*1
+ grounded_flag *= (1 - grounded_flag_right)*1
+ # smoothing
+ grounded_flag = binary_erosion(grounded_flag, structure=np.ones(7))*1
+ grounded_flag = binary_dilation(grounded_flag, structure=np.ones(7))*1
+
+ body_POS = np.zeros_like(right_coordinates)
+
+ # POS_Y
+ ind = np.array(range(right_coordinates.shape[0]))
+ body_POS[:, 1] = -coordinates[ind, grounded_flag, 1]
+
+ # extract the X, Z coordinates of grounded leg in time t_1
+ X_t1 = coordinates[ind[:-1], grounded_flag[:-1], 0]
+ Z_t1 = coordinates[ind[:-1], grounded_flag[:-1], 2]
+ # extract the X, Z coordinates of grounded leg in time t_2
+ # note that the split of grounded_flag not changed
+ X_t2 = coordinates[ind[1:], grounded_flag[:-1], 0]
+ Z_t2 = coordinates[ind[1:], grounded_flag[:-1], 2]
+
+ # calculate the relative displacement between two frames
+ X_relative = X_t2 - X_t1
+ Z_relative = Z_t2 - Z_t1
+
+ # calculate the absolute displacement
+ X_abs = np.cumsum(X_relative)
+ Z_abs = np.cumsum(Z_relative)
+
+ body_POS[1:, 0] = -X_abs
+ body_POS[1:, 2] = -Z_abs
+
+ return body_POS
+
+
+def add_keyframes(data, length, part_name, euler, bend, not_body=True, not_head=True, body_steve=False, body_POS=None):
+ for i in range(length):
+ if not_head:
+ keyframes_dict = {
+ "position": i,
+ "part_name": part_name,
+ "values": {
+ "ROT_X": float(euler[i][0]),
+ "ROT_Y": float(euler[i][2]), # Y, Z args in mine-imator miframes is exchanged. Maybe a bug.
+ "ROT_Z": float(euler[i][1]*not_body),
+ "BEND_ANGLE_X": float(bend[i])
+ }
+ }
+ else: # no bend
+ keyframes_dict = {
+ "position": i,
+ "part_name": part_name,
+ "values": {
+ "ROT_X": float(euler[i][0]),
+ "ROT_Y": float(euler[i][2]),
+ "ROT_Z": float(euler[i][1]),
+ }
+ }
+ if body_steve == True:
+ keyframes_dict = {
+ "position": i,
+ "values": {
+ "POS_X": float(body_POS[i][0]),
+ "POS_Y": float(body_POS[i][2]),
+ "POS_Z": float(body_POS[i][1]),
+ "ROT_Z": float(euler[i][1])
+ }
+ }
+ data["keyframes"].append(keyframes_dict)
+
+ print(f"add_key_frames: {part_name}")
+
+
+def hpe2keyframes(HPE_filename, FPS_mine_imator, keyframes_filename, prior=True):
+
+ # read data
+ with open(HPE_filename, 'rb') as file:
+ data = np.load(file)
+ print(f"open file: {HPE_filename}")
+ xyz = data.copy()
+ length = xyz.shape[0]
+
+ # extract data from each body part
+ xyz_right_leg = torch.from_numpy(xyz[:, 1:4, :])
+ xyz_right_arm = torch.from_numpy(xyz[:, 14:17, :])
+ xyz_left_leg = torch.from_numpy(xyz[:, 4:7, :])
+ xyz_left_arm = torch.from_numpy(xyz[:, 11:14, :])
+ xyz_body = torch.from_numpy(xyz[:, [0, 7, 8], :])
+ xyz_body_frame = torch.from_numpy(xyz[:, [1, 4, 14, 11], :])
+ xyz_head = torch.from_numpy(xyz[:, [8, 9, 10], :])
+
+ # calculate the absolute euler angles of body
+ euler_body, bend_body, cos_pos_matrix = xyz2euler_body(xyz_body, xyz_body_frame, X_dir=-1, Y_dir=-1)
+
+ # calculate the relative euler angles of arm and head with respect to the body ROT_Y
+ euler_right_leg, bend_right_leg = xyz2euler_relative(xyz_right_leg, cos_pos_matrix, leg=True, euler_body=euler_body)
+ euler_left_leg, bend_left_leg = xyz2euler_relative(xyz_left_leg, cos_pos_matrix, leg=True, euler_body=euler_body)
+
+ # calculate the relative euler angles of arm and head with respect to the upper body
+ euler_right_arm, bend_right_arm = xyz2euler_relative(xyz_right_arm, cos_pos_matrix, X_dir=-1)
+ euler_left_arm, bend_left_arm = xyz2euler_relative(xyz_left_arm, cos_pos_matrix, X_dir=-1)
+ euler_head, bend_head = xyz2euler_relative(xyz_head, cos_pos_matrix, Y_dir=-1, head=True)
+
+ # create json format data
+ data = {
+ "format": 34,
+ "created_in": "2.0.0", # mine-imator version
+ "is_model": True,
+ "tempo": FPS_mine_imator, # FPS
+ "length": length, # keyframes length
+ "keyframes": [
+ ],
+ "templates": [],
+ "timelines": [],
+ "resources": []
+ }
+
+ # relative offset makes the model more realistic
+ # caculate the relative offset based on Euler angle and bending angle
+ body_POS = calculate_body_offset(euler_body, euler_right_leg, bend_right_leg, euler_left_leg, bend_left_leg, prior=prior)
+
+
+ add_keyframes(data, length, "left_leg", euler_left_leg, bend_left_leg)
+ add_keyframes(data, length, "right_leg", euler_right_leg, bend_right_leg)
+ add_keyframes(data, length, "left_arm", euler_left_arm, bend_left_arm)
+ add_keyframes(data, length, "right_arm", euler_right_arm, bend_right_arm)
+ add_keyframes(data, length, "body", euler_body, bend_body, not_body=False)
+ add_keyframes(data, length, "head", euler_head, bend_head, not_head=False)
+ add_keyframes(data, length, "abc", euler_body, bend_body, body_steve=True, body_POS=body_POS) # TODO
+
+ # save json
+ with open(keyframes_filename, "w") as file:
+ json.dump(data, file, indent=4)
+
+ print(f"keyframes file saves successfully, file path: {os.path.abspath(keyframes_filename)}")
+
+
+
+if __name__ == '__main__':
+ # config
+ HPE_filename = "outputs/test_3d_output_malaoshi_2-00_2-30_postprocess.npy"
+ FPS_mine_imator = 30
+ keyframes_filename = "steve_malaoshi2.miframes"
+ prior = True
+ hpe2keyframes(HPE_filename, FPS_mine_imator, keyframes_filename, prior=prior)
+
+print("Done!")
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/common/arguments.py b/common/arguments.py
new file mode 100644
index 0000000000000000000000000000000000000000..8481672d8d0dd49a50f8f3b06b7040c62e62529d
--- /dev/null
+++ b/common/arguments.py
@@ -0,0 +1,102 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import argparse
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description='Training script')
+
+ # General arguments
+ parser.add_argument('-d', '--dataset', default='h36m', type=str, metavar='NAME', help='target dataset') # h36m or humaneva
+ parser.add_argument('-k', '--keypoints', default='cpn_ft_h36m_dbb', type=str, metavar='NAME', help='2D detections to use')
+ parser.add_argument('-str', '--subjects-train', default='S1,S5,S6,S7,S8', type=str, metavar='LIST',
+ help='training subjects separated by comma')
+ parser.add_argument('-ste', '--subjects-test', default='S9,S11', type=str, metavar='LIST', help='test subjects separated by comma')
+ parser.add_argument('-sun', '--subjects-unlabeled', default='', type=str, metavar='LIST',
+ help='unlabeled subjects separated by comma for self-supervision')
+ parser.add_argument('-a', '--actions', default='*', type=str, metavar='LIST',
+ help='actions to train/test on, separated by comma, or * for all')
+ parser.add_argument('-c', '--checkpoint', default='checkpoint', type=str, metavar='PATH',
+ help='checkpoint directory')
+ parser.add_argument('--checkpoint-frequency', default=10, type=int, metavar='N',
+ help='create a checkpoint every N epochs')
+ parser.add_argument('-r', '--resume', default='', type=str, metavar='FILENAME',
+ help='checkpoint to resume (file name)')
+ parser.add_argument('--evaluate', default='pretrained_h36m_detectron_coco.bin', type=str, metavar='FILENAME', help='checkpoint to evaluate (file name)')
+ parser.add_argument('--render', action='store_true', help='visualize a particular video')
+ parser.add_argument('--by-subject', action='store_true', help='break down error by subject (on evaluation)')
+ parser.add_argument('--export-training-curves', action='store_true', help='save training curves as .png images')
+
+ # Model arguments
+ parser.add_argument('-s', '--stride', default=1, type=int, metavar='N', help='chunk size to use during training')
+ parser.add_argument('-e', '--epochs', default=60, type=int, metavar='N', help='number of training epochs')
+ parser.add_argument('-b', '--batch-size', default=1024, type=int, metavar='N', help='batch size in terms of predicted frames')
+ parser.add_argument('-drop', '--dropout', default=0.25, type=float, metavar='P', help='dropout probability')
+ parser.add_argument('-lr', '--learning-rate', default=0.001, type=float, metavar='LR', help='initial learning rate')
+ parser.add_argument('-lrd', '--lr-decay', default=0.95, type=float, metavar='LR', help='learning rate decay per epoch')
+ parser.add_argument('-no-da', '--no-data-augmentation', dest='data_augmentation', action='store_false',
+ help='disable train-time flipping')
+ parser.add_argument('-no-tta', '--no-test-time-augmentation', dest='test_time_augmentation', action='store_false',
+ help='disable test-time flipping')
+ parser.add_argument('-arc', '--architecture', default='3,3,3,3,3', type=str, metavar='LAYERS', help='filter widths separated by comma')
+ parser.add_argument('--causal', action='store_true', help='use causal convolutions for real-time processing')
+ parser.add_argument('-ch', '--channels', default=1024, type=int, metavar='N', help='number of channels in convolution layers')
+
+ # Experimental
+ parser.add_argument('--subset', default=1, type=float, metavar='FRACTION', help='reduce dataset size by fraction')
+ parser.add_argument('--downsample', default=1, type=int, metavar='FACTOR', help='downsample frame rate by factor (semi-supervised)')
+ parser.add_argument('--warmup', default=1, type=int, metavar='N', help='warm-up epochs for semi-supervision')
+ parser.add_argument('--no-eval', action='store_true', help='disable epoch evaluation while training (small speed-up)')
+ parser.add_argument('--dense', action='store_true', help='use dense convolutions instead of dilated convolutions')
+ parser.add_argument('--disable-optimizations', action='store_true', help='disable optimized model for single-frame predictions')
+ parser.add_argument('--linear-projection', action='store_true', help='use only linear coefficients for semi-supervised projection')
+ parser.add_argument('--no-bone-length', action='store_false', dest='bone_length_term',
+ help='disable bone length term in semi-supervised settings')
+ parser.add_argument('--no-proj', action='store_true', help='disable projection for semi-supervised setting')
+
+ # Visualization
+ parser.add_argument('--viz-subject', type=str, metavar='STR', help='subject to render')
+ parser.add_argument('--viz-action', type=str, metavar='STR', help='action to render')
+ parser.add_argument('--viz-camera', type=int, default=0, metavar='N', help='camera to render')
+ parser.add_argument('--viz-video', type=str, metavar='PATH', help='path to input video')
+ parser.add_argument('--viz-skip', type=int, default=0, metavar='N', help='skip first N frames of input video')
+ parser.add_argument('--viz-output', type=str, metavar='PATH', help='output file name (.gif or .mp4)')
+ parser.add_argument('--viz-bitrate', type=int, default=30000, metavar='N', help='bitrate for mp4 videos')
+ parser.add_argument('--viz-no-ground-truth', action='store_true', help='do not show ground-truth poses')
+ parser.add_argument('--viz-limit', type=int, default=-1, metavar='N', help='only render first N frames')
+ parser.add_argument('--viz-downsample', type=int, default=1, metavar='N', help='downsample FPS by a factor N')
+ parser.add_argument('--viz-size', type=int, default=5, metavar='N', help='image size')
+ # self add
+ parser.add_argument('-in2d','--input_npz', type=str, default='', help='input 2d numpy file')
+ parser.add_argument('--video', dest='input_video', type=str, default='', help='input video name')
+
+ parser.add_argument('--layers', default=3, type=int)
+ parser.add_argument('--channel', default=256, type=int)
+ parser.add_argument('--d_hid', default=512, type=int)
+ parser.add_argument('-f', '--frames', type=int, default=243)
+ parser.add_argument('--n_joints', type=int, default=17)
+ parser.add_argument('--out_joints', type=int, default=17)
+ parser.add_argument('--in_channels', type=int, default=2)
+ parser.add_argument('--out_channels', type=int, default=3)
+ parser.add_argument('--stride_num', type=list, default=[3, 3, 3, 3, 3])
+
+ parser.set_defaults(bone_length_term=True)
+ parser.set_defaults(data_augmentation=True)
+ parser.set_defaults(test_time_augmentation=True)
+
+ args = parser.parse_args()
+ # Check invalid configuration
+ if args.resume and args.evaluate:
+ print('Invalid flags: --resume and --evaluate cannot be set at the same time')
+ exit()
+
+ if args.export_training_curves and args.no_eval:
+ print('Invalid flags: --export-training-curves and --no-eval cannot be set at the same time')
+ exit()
+
+ return args
diff --git a/common/camera.py b/common/camera.py
new file mode 100644
index 0000000000000000000000000000000000000000..d147a36d7024007ef2461883dd6c1907582ee08e
--- /dev/null
+++ b/common/camera.py
@@ -0,0 +1,107 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import numpy as np
+import torch
+
+from common.quaternion import qrot, qinverse
+from common.utils import wrap
+
+
+def normalize_screen_coordinates(X, w, h):
+ assert X.shape[-1] == 2
+
+ # Normalize so that [0, w] is mapped to [-1, 1], while preserving the aspect ratio
+ return X / w * 2 - [1, h / w]
+
+
+def normalize_screen_coordinates_new(X, w, h):
+ assert X.shape[-1] == 2
+
+ return (X - (w / 2, h / 2)) / (w / 2, h / 2)
+
+
+def image_coordinates_new(X, w, h):
+ assert X.shape[-1] == 2
+
+ # Reverse camera frame normalization
+ return (X * (w / 2, h / 2)) + (w / 2, h / 2)
+
+
+def image_coordinates(X, w, h):
+ assert X.shape[-1] == 2
+
+ # Reverse camera frame normalization
+ return (X + [1, h / w]) * w / 2
+
+
+def world_to_camera(X, R, t):
+ Rt = wrap(qinverse, R) # Invert rotation
+ return wrap(qrot, np.tile(Rt, (*X.shape[:-1], 1)), X - t) # Rotate and translate
+
+
+def camera_to_world(X, R, t):
+ return wrap(qrot, np.tile(R, (*X.shape[:-1], 1)), X) + t
+
+
+def project_to_2d(X, camera_params):
+ """
+ Project 3D points to 2D using the Human3.6M camera projection function.
+ This is a differentiable and batched reimplementation of the original MATLAB script.
+
+ Arguments:
+ X -- 3D points in *camera space* to transform (N, *, 3)
+ camera_params -- intrinsic parameteres (N, 2+2+3+2=9)
+ focal length / principal point / radial_distortion / tangential_distortion
+ """
+ assert X.shape[-1] == 3
+ assert len(camera_params.shape) == 2
+ assert camera_params.shape[-1] == 9
+ assert X.shape[0] == camera_params.shape[0]
+
+ while len(camera_params.shape) < len(X.shape):
+ camera_params = camera_params.unsqueeze(1)
+
+ f = camera_params[..., :2] # focal lendgth
+ c = camera_params[..., 2:4] # center principal point
+ k = camera_params[..., 4:7]
+ p = camera_params[..., 7:]
+
+ XX = torch.clamp(X[..., :2] / X[..., 2:], min=-1, max=1)
+ r2 = torch.sum(XX[..., :2] ** 2, dim=len(XX.shape) - 1, keepdim=True)
+
+ radial = 1 + torch.sum(k * torch.cat((r2, r2 ** 2, r2 ** 3), dim=len(r2.shape) - 1), dim=len(r2.shape) - 1, keepdim=True)
+ tan = torch.sum(p * XX, dim=len(XX.shape) - 1, keepdim=True)
+
+ XXX = XX * (radial + tan) + p * r2
+
+ return f * XXX + c
+
+
+def project_to_2d_linear(X, camera_params):
+ """
+ 使用linear parameters is a little difference for use linear and no-linear parameters
+ Project 3D points to 2D using only linear parameters (focal length and principal point).
+
+ Arguments:
+ X -- 3D points in *camera space* to transform (N, *, 3)
+ camera_params -- intrinsic parameteres (N, 2+2+3+2=9)
+ """
+ assert X.shape[-1] == 3
+ assert len(camera_params.shape) == 2
+ assert camera_params.shape[-1] == 9
+ assert X.shape[0] == camera_params.shape[0]
+
+ while len(camera_params.shape) < len(X.shape):
+ camera_params = camera_params.unsqueeze(1)
+
+ f = camera_params[..., :2]
+ c = camera_params[..., 2:4]
+
+ XX = torch.clamp(X[..., :2] / X[..., 2:], min=-1, max=1)
+
+ return f * XX + c
diff --git a/common/generators.py b/common/generators.py
new file mode 100644
index 0000000000000000000000000000000000000000..f41dfb77fecc4f09bb5a4778ab9b6c6657c48de7
--- /dev/null
+++ b/common/generators.py
@@ -0,0 +1,425 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+from itertools import zip_longest
+
+import numpy as np
+
+
+class ChunkedGenerator:
+ """
+ Batched data generator, used for training.
+ The sequences are split into equal-length chunks and padded as necessary.
+
+ Arguments:
+ batch_size -- the batch size to use for training
+ cameras -- list of cameras, one element for each video (optional, used for semi-supervised training)
+ poses_3d -- list of ground-truth 3D poses, one element for each video (optional, used for supervised training)
+ poses_2d -- list of input 2D keypoints, one element for each video
+ chunk_length -- number of output frames to predict for each training example (usually 1)
+ pad -- 2D input padding to compensate for valid convolutions, per side (depends on the receptive field)
+ causal_shift -- asymmetric padding offset when causal convolutions are used (usually 0 or "pad")
+ shuffle -- randomly shuffle the dataset before each epoch
+ random_seed -- initial seed to use for the random generator
+ augment -- augment the dataset by flipping poses horizontally
+ kps_left and kps_right -- list of left/right 2D keypoints if flipping is enabled
+ joints_left and joints_right -- list of left/right 3D joints if flipping is enabled
+ """
+
+ def __init__(self, batch_size, cameras, poses_3d, poses_2d,
+ chunk_length, pad=0, causal_shift=0,
+ shuffle=True, random_seed=1234,
+ augment=False, kps_left=None, kps_right=None, joints_left=None, joints_right=None,
+ endless=False):
+ assert poses_3d is None or len(poses_3d) == len(poses_2d), (len(poses_3d), len(poses_2d))
+ assert cameras is None or len(cameras) == len(poses_2d)
+
+ # Build lineage info
+ pairs = [] # (seq_idx, start_frame, end_frame, flip) tuples
+ for i in range(len(poses_2d)):
+ assert poses_3d is None or poses_3d[i].shape[0] == poses_3d[i].shape[0]
+ n_chunks = (poses_2d[i].shape[0] + chunk_length - 1) // chunk_length
+ offset = (n_chunks * chunk_length - poses_2d[i].shape[0]) // 2
+ bounds = np.arange(n_chunks + 1) * chunk_length - offset
+ augment_vector = np.full(len(bounds - 1), False, dtype=bool)
+ pairs += zip(np.repeat(i, len(bounds - 1)), bounds[:-1], bounds[1:], augment_vector)
+ if augment:
+ pairs += zip(np.repeat(i, len(bounds - 1)), bounds[:-1], bounds[1:], ~augment_vector)
+
+ # Initialize buffers
+ if cameras is not None:
+ self.batch_cam = np.empty((batch_size, cameras[0].shape[-1]))
+ if poses_3d is not None:
+ self.batch_3d = np.empty((batch_size, chunk_length, poses_3d[0].shape[-2], poses_3d[0].shape[-1]))
+ self.batch_2d = np.empty((batch_size, chunk_length + 2 * pad, poses_2d[0].shape[-2], poses_2d[0].shape[-1]))
+
+ self.num_batches = (len(pairs) + batch_size - 1) // batch_size
+ self.batch_size = batch_size
+ self.random = np.random.RandomState(random_seed)
+ self.pairs = pairs
+ self.shuffle = shuffle
+ self.pad = pad
+ self.causal_shift = causal_shift
+ self.endless = endless
+ self.state = None
+
+ self.cameras = cameras
+ self.poses_3d = poses_3d
+ self.poses_2d = poses_2d
+
+ self.augment = augment
+ self.kps_left = kps_left
+ self.kps_right = kps_right
+ self.joints_left = joints_left
+ self.joints_right = joints_right
+
+ def num_frames(self):
+ return self.num_batches * self.batch_size
+
+ def random_state(self):
+ return self.random
+
+ def set_random_state(self, random):
+ self.random = random
+
+ def augment_enabled(self):
+ return self.augment
+
+ def next_pairs(self):
+ if self.state is None:
+ if self.shuffle:
+ pairs = self.random.permutation(self.pairs)
+ else:
+ pairs = self.pairs
+ return 0, pairs
+ else:
+ return self.state
+
+ def next_epoch(self):
+ enabled = True
+ while enabled:
+ start_idx, pairs = self.next_pairs()
+ for b_i in range(start_idx, self.num_batches):
+ chunks = pairs[b_i * self.batch_size: (b_i + 1) * self.batch_size]
+ for i, (seq_i, start_3d, end_3d, flip) in enumerate(chunks):
+ start_2d = start_3d - self.pad - self.causal_shift
+ end_2d = end_3d + self.pad - self.causal_shift
+
+ # 2D poses
+ seq_2d = self.poses_2d[seq_i]
+ low_2d = max(start_2d, 0)
+ high_2d = min(end_2d, seq_2d.shape[0])
+ pad_left_2d = low_2d - start_2d
+ pad_right_2d = end_2d - high_2d
+ if pad_left_2d != 0 or pad_right_2d != 0:
+ self.batch_2d[i] = np.pad(seq_2d[low_2d:high_2d], ((pad_left_2d, pad_right_2d), (0, 0), (0, 0)), 'edge')
+ else:
+ self.batch_2d[i] = seq_2d[low_2d:high_2d]
+
+ if flip:
+ # Flip 2D keypoints
+ self.batch_2d[i, :, :, 0] *= -1
+ self.batch_2d[i, :, self.kps_left + self.kps_right] = self.batch_2d[i, :, self.kps_right + self.kps_left]
+
+ # 3D poses
+ if self.poses_3d is not None:
+ seq_3d = self.poses_3d[seq_i]
+ low_3d = max(start_3d, 0)
+ high_3d = min(end_3d, seq_3d.shape[0])
+ pad_left_3d = low_3d - start_3d
+ pad_right_3d = end_3d - high_3d
+ if pad_left_3d != 0 or pad_right_3d != 0:
+ self.batch_3d[i] = np.pad(seq_3d[low_3d:high_3d], ((pad_left_3d, pad_right_3d), (0, 0), (0, 0)), 'edge')
+ else:
+ self.batch_3d[i] = seq_3d[low_3d:high_3d]
+
+ if flip:
+ # Flip 3D joints
+ self.batch_3d[i, :, :, 0] *= -1
+ self.batch_3d[i, :, self.joints_left + self.joints_right] = \
+ self.batch_3d[i, :, self.joints_right + self.joints_left]
+
+ # Cameras
+ if self.cameras is not None:
+ self.batch_cam[i] = self.cameras[seq_i]
+ if flip:
+ # Flip horizontal distortion coefficients
+ self.batch_cam[i, 2] *= -1
+ self.batch_cam[i, 7] *= -1
+
+ if self.endless:
+ self.state = (b_i + 1, pairs)
+ if self.poses_3d is None and self.cameras is None:
+ yield None, None, self.batch_2d[:len(chunks)]
+ elif self.poses_3d is not None and self.cameras is None:
+ yield None, self.batch_3d[:len(chunks)], self.batch_2d[:len(chunks)]
+ elif self.poses_3d is None:
+ yield self.batch_cam[:len(chunks)], None, self.batch_2d[:len(chunks)]
+ else:
+ yield self.batch_cam[:len(chunks)], self.batch_3d[:len(chunks)], self.batch_2d[:len(chunks)]
+
+ if self.endless:
+ self.state = None
+ else:
+ enabled = False
+
+
+class UnchunkedGenerator:
+ """
+ Non-batched data generator, used for testing.
+ Sequences are returned one at a time (i.e. batch size = 1), without chunking.
+
+ If data augmentation is enabled, the batches contain two sequences (i.e. batch size = 2),
+ the second of which is a mirrored version of the first.
+
+ Arguments:
+ cameras -- list of cameras, one element for each video (optional, used for semi-supervised training)
+ poses_3d -- list of ground-truth 3D poses, one element for each video (optional, used for supervised training)
+ poses_2d -- list of input 2D keypoints, one element for each video
+ pad -- 2D input padding to compensate for valid convolutions, per side (depends on the receptive field)
+ causal_shift -- asymmetric padding offset when causal convolutions are used (usually 0 or "pad")
+ augment -- augment the dataset by flipping poses horizontally
+ kps_left and kps_right -- list of left/right 2D keypoints if flipping is enabled
+ joints_left and joints_right -- list of left/right 3D joints if flipping is enabled
+ """
+
+ def __init__(self, cameras, poses_3d, poses_2d, pad=0, causal_shift=0,
+ augment=False, kps_left=None, kps_right=None, joints_left=None, joints_right=None):
+ assert poses_3d is None or len(poses_3d) == len(poses_2d)
+ assert cameras is None or len(cameras) == len(poses_2d)
+
+ self.augment = augment
+ self.kps_left = kps_left
+ self.kps_right = kps_right
+ self.joints_left = joints_left
+ self.joints_right = joints_right
+
+ self.pad = pad
+ self.causal_shift = causal_shift
+ self.cameras = [] if cameras is None else cameras
+ self.poses_3d = [] if poses_3d is None else poses_3d
+ self.poses_2d = poses_2d
+
+ def num_frames(self):
+ count = 0
+ for p in self.poses_2d:
+ count += p.shape[0]
+ return count
+
+ def augment_enabled(self):
+ return self.augment
+
+ def set_augment(self, augment):
+ self.augment = augment
+
+ def next_epoch(self):
+ for seq_cam, seq_3d, seq_2d in zip_longest(self.cameras, self.poses_3d, self.poses_2d):
+ batch_cam = None if seq_cam is None else np.expand_dims(seq_cam, axis=0)
+ batch_3d = None if seq_3d is None else np.expand_dims(seq_3d, axis=0)
+ # 2D input padding to compensate for valid convolutions, per side (depends on the receptive field)
+ batch_2d = np.expand_dims(np.pad(seq_2d,
+ ((self.pad + self.causal_shift, self.pad - self.causal_shift), (0, 0), (0, 0)),
+ 'edge'), axis=0)
+ if self.augment:
+ # Append flipped version
+ if batch_cam is not None:
+ batch_cam = np.concatenate((batch_cam, batch_cam), axis=0)
+ batch_cam[1, 2] *= -1
+ batch_cam[1, 7] *= -1
+
+ if batch_3d is not None:
+ batch_3d = np.concatenate((batch_3d, batch_3d), axis=0)
+ batch_3d[1, :, :, 0] *= -1
+ batch_3d[1, :, self.joints_left + self.joints_right] = batch_3d[1, :, self.joints_right + self.joints_left]
+
+ batch_2d = np.concatenate((batch_2d, batch_2d), axis=0)
+ batch_2d[1, :, :, 0] *= -1
+ batch_2d[1, :, self.kps_left + self.kps_right] = batch_2d[1, :, self.kps_right + self.kps_left]
+
+ yield batch_cam, batch_3d, batch_2d
+
+class Evaluate_Generator:
+ """
+ Batched data generator, used for training.
+ The sequences are split into equal-length chunks and padded as necessary.
+ Arguments:
+ batch_size -- the batch size to use for training
+ cameras -- list of cameras, one element for each video (optional, used for semi-supervised training)
+ poses_3d -- list of ground-truth 3D poses, one element for each video (optional, used for supervised training)
+ poses_2d -- list of input 2D keypoints, one element for each video
+ chunk_length -- number of output frames to predict for each training example (usually 1)
+ pad -- 2D input padding to compensate for valid convolutions, per side (depends on the receptive field)
+ causal_shift -- asymmetric padding offset when causal convolutions are used (usually 0 or "pad")
+ shuffle -- randomly shuffle the dataset before each epoch
+ random_seed -- initial seed to use for the random generator
+ augment -- augment the dataset by flipping poses horizontally
+ kps_left and kps_right -- list of left/right 2D keypoints if flipping is enabled
+ joints_left and joints_right -- list of left/right 3D joints if flipping is enabled
+ """
+
+ def __init__(self, batch_size, cameras, poses_3d, poses_2d,
+ chunk_length, pad=0, causal_shift=0,
+ shuffle=True, random_seed=1234,
+ augment=False, kps_left=None, kps_right=None, joints_left=None, joints_right=None,
+ endless=False):
+ assert poses_3d is None or len(poses_3d) == len(poses_2d), (len(poses_3d), len(poses_2d))
+ assert cameras is None or len(cameras) == len(poses_2d)
+
+ # Build lineage info
+ pairs = [] # (seq_idx, start_frame, end_frame, flip) tuples
+ for i in range(len(poses_2d)):
+ assert poses_3d is None or poses_3d[i].shape[0] == poses_3d[i].shape[0]
+ n_chunks = (poses_2d[i].shape[0] + chunk_length - 1) // chunk_length
+ offset = (n_chunks * chunk_length - poses_2d[i].shape[0]) // 2
+ bounds = np.arange(n_chunks + 1) * chunk_length - offset
+ augment_vector = np.full(len(bounds - 1), False, dtype=bool)
+ pairs += zip(np.repeat(i, len(bounds - 1)), bounds[:-1], bounds[1:], augment_vector)
+
+ # Initialize buffers
+ if cameras is not None:
+ self.batch_cam = np.empty((batch_size, cameras[0].shape[-1]))
+ if poses_3d is not None:
+ self.batch_3d = np.empty((batch_size, chunk_length, poses_3d[0].shape[-2], poses_3d[0].shape[-1]))
+
+ if augment:
+ self.batch_2d_flip = np.empty(
+ (batch_size, chunk_length + 2 * pad, poses_2d[0].shape[-2], poses_2d[0].shape[-1]))
+ self.batch_2d = np.empty((batch_size, chunk_length + 2 * pad, poses_2d[0].shape[-2], poses_2d[0].shape[-1]))
+ else:
+ self.batch_2d = np.empty((batch_size, chunk_length + 2 * pad, poses_2d[0].shape[-2], poses_2d[0].shape[-1]))
+
+ self.num_batches = (len(pairs) + batch_size - 1) // batch_size
+ self.batch_size = batch_size
+ self.random = np.random.RandomState(random_seed)
+ self.pairs = pairs
+ self.shuffle = shuffle
+ self.pad = pad
+ self.causal_shift = causal_shift
+ self.endless = endless
+ self.state = None
+
+ self.cameras = cameras
+ self.poses_3d = poses_3d
+ self.poses_2d = poses_2d
+
+ self.augment = augment
+ self.kps_left = kps_left
+ self.kps_right = kps_right
+ self.joints_left = joints_left
+ self.joints_right = joints_right
+
+ def num_frames(self):
+ return self.num_batches * self.batch_size
+
+ def random_state(self):
+ return self.random
+
+ def set_random_state(self, random):
+ self.random = random
+
+ def augment_enabled(self):
+ return self.augment
+
+ def next_pairs(self):
+ if self.state is None:
+ if self.shuffle:
+ pairs = self.random.permutation(self.pairs)
+ else:
+ pairs = self.pairs
+ return 0, pairs
+ else:
+ return self.state
+
+ def next_epoch(self):
+ enabled = True
+ while enabled:
+ start_idx, pairs = self.next_pairs()
+ for b_i in range(start_idx, self.num_batches):
+ chunks = pairs[b_i * self.batch_size: (b_i + 1) * self.batch_size]
+ for i, (seq_i, start_3d, end_3d, flip) in enumerate(chunks):
+ start_2d = start_3d - self.pad - self.causal_shift
+ end_2d = end_3d + self.pad - self.causal_shift
+
+ # 2D poses
+ seq_2d = self.poses_2d[seq_i]
+ low_2d = max(start_2d, 0)
+ high_2d = min(end_2d, seq_2d.shape[0])
+ pad_left_2d = low_2d - start_2d
+ pad_right_2d = end_2d - high_2d
+ if pad_left_2d != 0 or pad_right_2d != 0:
+ self.batch_2d[i] = np.pad(seq_2d[low_2d:high_2d], ((pad_left_2d, pad_right_2d), (0, 0), (0, 0)),
+ 'edge')
+ if self.augment:
+ self.batch_2d_flip[i] = np.pad(seq_2d[low_2d:high_2d],
+ ((pad_left_2d, pad_right_2d), (0, 0), (0, 0)),
+ 'edge')
+
+ else:
+ self.batch_2d[i] = seq_2d[low_2d:high_2d]
+ if self.augment:
+ self.batch_2d_flip[i] = seq_2d[low_2d:high_2d]
+
+ if self.augment:
+ self.batch_2d_flip[i, :, :, 0] *= -1
+ self.batch_2d_flip[i, :, self.kps_left + self.kps_right] = self.batch_2d_flip[i, :,
+ self.kps_right + self.kps_left]
+
+ # 3D poses
+ if self.poses_3d is not None:
+ seq_3d = self.poses_3d[seq_i]
+ low_3d = max(start_3d, 0)
+ high_3d = min(end_3d, seq_3d.shape[0])
+ pad_left_3d = low_3d - start_3d
+ pad_right_3d = end_3d - high_3d
+ if pad_left_3d != 0 or pad_right_3d != 0:
+ self.batch_3d[i] = np.pad(seq_3d[low_3d:high_3d],
+ ((pad_left_3d, pad_right_3d), (0, 0), (0, 0)), 'edge')
+ else:
+ self.batch_3d[i] = seq_3d[low_3d:high_3d]
+
+ if flip:
+ self.batch_3d[i, :, :, 0] *= -1
+ self.batch_3d[i, :, self.joints_left + self.joints_right] = \
+ self.batch_3d[i, :, self.joints_right + self.joints_left]
+
+ # Cameras
+ if self.cameras is not None:
+ self.batch_cam[i] = self.cameras[seq_i]
+ if flip:
+ # Flip horizontal distortion coefficients
+ self.batch_cam[i, 2] *= -1
+ self.batch_cam[i, 7] *= -1
+
+ if self.endless:
+ self.state = (b_i + 1, pairs)
+
+ if self.augment:
+ if self.poses_3d is None and self.cameras is None:
+ yield None, None, self.batch_2d[:len(chunks)], self.batch_2d_flip[:len(chunks)]
+ elif self.poses_3d is not None and self.cameras is None:
+ yield None, self.batch_3d[:len(chunks)], self.batch_2d[:len(chunks)], self.batch_2d_flip[
+ :len(chunks)]
+ elif self.poses_3d is None:
+ yield self.batch_cam[:len(chunks)], None, self.batch_2d[:len(chunks)], self.batch_2d_flip[
+ :len(chunks)]
+ else:
+ yield self.batch_cam[:len(chunks)], self.batch_3d[:len(chunks)], self.batch_2d[:len(
+ chunks)], self.batch_2d_flip[:len(chunks)]
+ else:
+ if self.poses_3d is None and self.cameras is None:
+ yield None, None, self.batch_2d[:len(chunks)]
+ elif self.poses_3d is not None and self.cameras is None:
+ yield None, self.batch_3d[:len(chunks)], self.batch_2d[:len(chunks)]
+ elif self.poses_3d is None:
+ yield self.batch_cam[:len(chunks)], None, self.batch_2d[:len(chunks)]
+ else:
+ yield self.batch_cam[:len(chunks)], self.batch_3d[:len(chunks)], self.batch_2d[:len(chunks)]
+
+ if self.endless:
+ self.state = None
+ else:
+ enabled = False
\ No newline at end of file
diff --git a/common/h36m_dataset.py b/common/h36m_dataset.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d05c760588f7b7fa936dfde239641bf78f2d01b
--- /dev/null
+++ b/common/h36m_dataset.py
@@ -0,0 +1,258 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import copy
+
+import numpy as np
+
+from common.camera import normalize_screen_coordinates
+from common.mocap_dataset import MocapDataset
+from common.skeleton import Skeleton
+
+h36m_skeleton = Skeleton(parents=[-1, 0, 1, 2, 3, 4, 0, 6, 7, 8, 9, 0, 11, 12, 13, 14, 12,
+ 16, 17, 18, 19, 20, 19, 22, 12, 24, 25, 26, 27, 28, 27, 30],
+ joints_left=[6, 7, 8, 9, 10, 16, 17, 18, 19, 20, 21, 22, 23],
+ joints_right=[1, 2, 3, 4, 5, 24, 25, 26, 27, 28, 29, 30, 31])
+
+h36m_cameras_intrinsic_params = [
+ {
+ 'id': '54138969',
+ 'center': [512.54150390625, 515.4514770507812],
+ 'focal_length': [1145.0494384765625, 1143.7811279296875],
+ 'radial_distortion': [-0.20709891617298126, 0.24777518212795258, -0.0030751503072679043],
+ 'tangential_distortion': [-0.0009756988729350269, -0.00142447161488235],
+ 'res_w': 1000,
+ 'res_h': 1002,
+ 'azimuth': 70, # Only used for visualization
+ },
+ {
+ 'id': '55011271',
+ 'center': [508.8486328125, 508.0649108886719],
+ 'focal_length': [1149.6756591796875, 1147.5916748046875],
+ 'radial_distortion': [-0.1942136287689209, 0.2404085397720337, 0.006819975562393665],
+ 'tangential_distortion': [-0.0016190266469493508, -0.0027408944442868233],
+ 'res_w': 1000,
+ 'res_h': 1000,
+ 'azimuth': -70, # Only used for visualization
+ },
+ {
+ 'id': '58860488',
+ 'center': [519.8158569335938, 501.40264892578125],
+ 'focal_length': [1149.1407470703125, 1148.7989501953125],
+ 'radial_distortion': [-0.2083381861448288, 0.25548800826072693, -0.0024604974314570427],
+ 'tangential_distortion': [0.0014843869721516967, -0.0007599993259645998],
+ 'res_w': 1000,
+ 'res_h': 1000,
+ 'azimuth': 110, # Only used for visualization
+ },
+ {
+ 'id': '60457274',
+ 'center': [514.9682006835938, 501.88201904296875],
+ 'focal_length': [1145.5113525390625, 1144.77392578125],
+ 'radial_distortion': [-0.198384091258049, 0.21832367777824402, -0.008947807364165783],
+ 'tangential_distortion': [-0.0005872055771760643, -0.0018133620033040643],
+ 'res_w': 1000,
+ 'res_h': 1002,
+ 'azimuth': -110, # Only used for visualization
+ },
+]
+
+h36m_cameras_extrinsic_params = {
+ 'S1': [
+ {
+ 'orientation': [0.1407056450843811, -0.1500701755285263, -0.755240797996521, 0.6223280429840088],
+ 'translation': [1841.1070556640625, 4955.28466796875, 1563.4454345703125],
+ },
+ {
+ 'orientation': [0.6157187819480896, -0.764836311340332, -0.14833825826644897, 0.11794740706682205],
+ 'translation': [1761.278564453125, -5078.0068359375, 1606.2650146484375],
+ },
+ {
+ 'orientation': [0.14651472866535187, -0.14647851884365082, 0.7653023600578308, -0.6094175577163696],
+ 'translation': [-1846.7777099609375, 5215.04638671875, 1491.972412109375],
+ },
+ {
+ 'orientation': [0.5834008455276489, -0.7853162288665771, 0.14548823237419128, -0.14749594032764435],
+ 'translation': [-1794.7896728515625, -3722.698974609375, 1574.8927001953125],
+ },
+ ],
+ 'S2': [
+ {},
+ {},
+ {},
+ {},
+ ],
+ 'S3': [
+ {},
+ {},
+ {},
+ {},
+ ],
+ 'S4': [
+ {},
+ {},
+ {},
+ {},
+ ],
+ 'S5': [
+ {
+ 'orientation': [0.1467377245426178, -0.162370964884758, -0.7551892995834351, 0.6178938746452332],
+ 'translation': [2097.3916015625, 4880.94482421875, 1605.732421875],
+ },
+ {
+ 'orientation': [0.6159758567810059, -0.7626792192459106, -0.15728192031383514, 0.1189815029501915],
+ 'translation': [2031.7008056640625, -5167.93310546875, 1612.923095703125],
+ },
+ {
+ 'orientation': [0.14291371405124664, -0.12907841801643372, 0.7678384780883789, -0.6110143065452576],
+ 'translation': [-1620.5948486328125, 5171.65869140625, 1496.43701171875],
+ },
+ {
+ 'orientation': [0.5920479893684387, -0.7814217805862427, 0.1274748593568802, -0.15036417543888092],
+ 'translation': [-1637.1737060546875, -3867.3173828125, 1547.033203125],
+ },
+ ],
+ 'S6': [
+ {
+ 'orientation': [0.1337897777557373, -0.15692396461963654, -0.7571090459823608, 0.6198879480361938],
+ 'translation': [1935.4517822265625, 4950.24560546875, 1618.0838623046875],
+ },
+ {
+ 'orientation': [0.6147197484970093, -0.7628812789916992, -0.16174767911434174, 0.11819244921207428],
+ 'translation': [1969.803955078125, -5128.73876953125, 1632.77880859375],
+ },
+ {
+ 'orientation': [0.1529948115348816, -0.13529130816459656, 0.7646096348762512, -0.6112781167030334],
+ 'translation': [-1769.596435546875, 5185.361328125, 1476.993408203125],
+ },
+ {
+ 'orientation': [0.5916101336479187, -0.7804774045944214, 0.12832270562648773, -0.1561593860387802],
+ 'translation': [-1721.668701171875, -3884.13134765625, 1540.4879150390625],
+ },
+ ],
+ 'S7': [
+ {
+ 'orientation': [0.1435241848230362, -0.1631336808204651, -0.7548328638076782, 0.6188824772834778],
+ 'translation': [1974.512939453125, 4926.3544921875, 1597.8326416015625],
+ },
+ {
+ 'orientation': [0.6141672730445862, -0.7638262510299683, -0.1596645563840866, 0.1177929937839508],
+ 'translation': [1937.0584716796875, -5119.7900390625, 1631.5665283203125],
+ },
+ {
+ 'orientation': [0.14550060033798218, -0.12874816358089447, 0.7660516500473022, -0.6127139329910278],
+ 'translation': [-1741.8111572265625, 5208.24951171875, 1464.8245849609375],
+ },
+ {
+ 'orientation': [0.5912848114967346, -0.7821764349937439, 0.12445473670959473, -0.15196487307548523],
+ 'translation': [-1734.7105712890625, -3832.42138671875, 1548.5830078125],
+ },
+ ],
+ 'S8': [
+ {
+ 'orientation': [0.14110587537288666, -0.15589867532253265, -0.7561917304992676, 0.619644045829773],
+ 'translation': [2150.65185546875, 4896.1611328125, 1611.9046630859375],
+ },
+ {
+ 'orientation': [0.6169601678848267, -0.7647668123245239, -0.14846350252628326, 0.11158157885074615],
+ 'translation': [2219.965576171875, -5148.453125, 1613.0440673828125],
+ },
+ {
+ 'orientation': [0.1471444070339203, -0.13377119600772858, 0.7670128345489502, -0.6100369691848755],
+ 'translation': [-1571.2215576171875, 5137.0185546875, 1498.1761474609375],
+ },
+ {
+ 'orientation': [0.5927824378013611, -0.7825870513916016, 0.12147816270589828, -0.14631995558738708],
+ 'translation': [-1476.913330078125, -3896.7412109375, 1547.97216796875],
+ },
+ ],
+ 'S9': [
+ {
+ 'orientation': [0.15540587902069092, -0.15548215806484222, -0.7532095313072205, 0.6199594736099243],
+ 'translation': [2044.45849609375, 4935.1171875, 1481.2275390625],
+ },
+ {
+ 'orientation': [0.618784487247467, -0.7634735107421875, -0.14132238924503326, 0.11933968216180801],
+ 'translation': [1990.959716796875, -5123.810546875, 1568.8048095703125],
+ },
+ {
+ 'orientation': [0.13357827067375183, -0.1367100477218628, 0.7689454555511475, -0.6100738644599915],
+ 'translation': [-1670.9921875, 5211.98583984375, 1528.387939453125],
+ },
+ {
+ 'orientation': [0.5879399180412292, -0.7823407053947449, 0.1427614390850067, -0.14794869720935822],
+ 'translation': [-1696.04345703125, -3827.099853515625, 1591.4127197265625],
+ },
+ ],
+ 'S11': [
+ {
+ 'orientation': [0.15232472121715546, -0.15442320704460144, -0.7547563314437866, 0.6191070079803467],
+ 'translation': [2098.440185546875, 4926.5546875, 1500.278564453125],
+ },
+ {
+ 'orientation': [0.6189449429512024, -0.7600917220115662, -0.15300633013248444, 0.1255258321762085],
+ 'translation': [2083.182373046875, -4912.1728515625, 1561.07861328125],
+ },
+ {
+ 'orientation': [0.14943228662014008, -0.15650227665901184, 0.7681233882904053, -0.6026304364204407],
+ 'translation': [-1609.8153076171875, 5177.3359375, 1537.896728515625],
+ },
+ {
+ 'orientation': [0.5894251465797424, -0.7818877100944519, 0.13991211354732513, -0.14715361595153809],
+ 'translation': [-1590.738037109375, -3854.1689453125, 1578.017578125],
+ },
+ ],
+}
+
+
+class Human36mDataset(MocapDataset):
+ def __init__(self, path, remove_static_joints=True):
+ super().__init__(fps=50, skeleton=h36m_skeleton)
+
+ self._cameras = copy.deepcopy(h36m_cameras_extrinsic_params)
+ for cameras in self._cameras.values():
+ for i, cam in enumerate(cameras):
+ cam.update(h36m_cameras_intrinsic_params[i])
+ for k, v in cam.items():
+ if k not in ['id', 'res_w', 'res_h']:
+ cam[k] = np.array(v, dtype='float32')
+
+ # Normalize camera frame
+ cam['center'] = normalize_screen_coordinates(cam['center'], w=cam['res_w'], h=cam['res_h']).astype('float32')
+ cam['focal_length'] = cam['focal_length'] / cam['res_w'] * 2
+ if 'translation' in cam:
+ cam['translation'] = cam['translation'] / 1000 # mm to meters
+
+ # Add intrinsic parameters vector
+ cam['intrinsic'] = np.concatenate((cam['focal_length'],
+ cam['center'],
+ cam['radial_distortion'],
+ cam['tangential_distortion']))
+
+ # Load serialized dataset
+ data = np.load(path)['positions_3d'].item()
+
+ self._data = {}
+ for subject, actions in data.items():
+ self._data[subject] = {}
+ for action_name, positions in actions.items():
+ self._data[subject][action_name] = {
+ 'positions': positions,
+ 'cameras': self._cameras[subject],
+ }
+
+ # import ipdb;ipdb.set_trace()
+ if remove_static_joints:
+ # Bring the skeleton to 17 joints instead of the original 32
+ self.remove_joints([4, 5, 9, 10, 11, 16, 20, 21, 22, 23, 24, 28, 29, 30, 31])
+
+ # Rewire shoulders to the correct parents
+ self._skeleton._parents[11] = 8
+ self._skeleton._parents[14] = 8
+
+ def supports_semi_supervised(self):
+ return True
diff --git a/common/humaneva_dataset.py b/common/humaneva_dataset.py
new file mode 100644
index 0000000000000000000000000000000000000000..5dbfe023e75af62a2326a4de5af6675776379ed3
--- /dev/null
+++ b/common/humaneva_dataset.py
@@ -0,0 +1,122 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import copy
+
+import numpy as np
+
+from common.mocap_dataset import MocapDataset
+from common.skeleton import Skeleton
+
+humaneva_skeleton = Skeleton(parents=[-1, 0, 1, 2, 3, 1, 5, 6, 0, 8, 9, 0, 11, 12, 1],
+ joints_left=[2, 3, 4, 8, 9, 10],
+ joints_right=[5, 6, 7, 11, 12, 13])
+
+humaneva_cameras_intrinsic_params = [
+ {
+ 'id': 'C1',
+ 'res_w': 640,
+ 'res_h': 480,
+ 'azimuth': 0, # Only used for visualization
+ },
+ {
+ 'id': 'C2',
+ 'res_w': 640,
+ 'res_h': 480,
+ 'azimuth': -90, # Only used for visualization
+ },
+ {
+ 'id': 'C3',
+ 'res_w': 640,
+ 'res_h': 480,
+ 'azimuth': 90, # Only used for visualization
+ },
+]
+
+humaneva_cameras_extrinsic_params = {
+ 'S1': [
+ {
+ 'orientation': [0.424207, -0.4983646, -0.5802981, 0.4847012],
+ 'translation': [4062.227, 663.2477, 1528.397],
+ },
+ {
+ 'orientation': [0.6503354, -0.7481602, -0.0919284, 0.0941766],
+ 'translation': [844.8131, -3805.2092, 1504.9929],
+ },
+ {
+ 'orientation': [0.0664734, -0.0690535, 0.7416416, -0.6639132],
+ 'translation': [-797.67377, 3916.3174, 1433.6602],
+ },
+ ],
+ 'S2': [
+ {
+ 'orientation': [0.4214752, -0.4961493, -0.5838273, 0.4851187],
+ 'translation': [4112.9121, 626.4929, 1545.2988],
+ },
+ {
+ 'orientation': [0.6501393, -0.7476588, -0.0954617, 0.0959808],
+ 'translation': [923.5740, -3877.9243, 1504.5518],
+ },
+ {
+ 'orientation': [0.0699353, -0.0712403, 0.7421637, -0.662742],
+ 'translation': [-781.4915, 3838.8853, 1444.9929],
+ },
+ ],
+ 'S3': [
+ {
+ 'orientation': [0.424207, -0.4983646, -0.5802981, 0.4847012],
+ 'translation': [4062.2271, 663.2477, 1528.3970],
+ },
+ {
+ 'orientation': [0.6503354, -0.7481602, -0.0919284, 0.0941766],
+ 'translation': [844.8131, -3805.2092, 1504.9929],
+ },
+ {
+ 'orientation': [0.0664734, -0.0690535, 0.7416416, -0.6639132],
+ 'translation': [-797.6738, 3916.3174, 1433.6602],
+ },
+ ],
+ 'S4': [
+ {},
+ {},
+ {},
+ ],
+
+}
+
+
+class HumanEvaDataset(MocapDataset):
+ def __init__(self, path):
+ super().__init__(fps=60, skeleton=humaneva_skeleton)
+
+ self._cameras = copy.deepcopy(humaneva_cameras_extrinsic_params)
+ for cameras in self._cameras.values():
+ for i, cam in enumerate(cameras):
+ cam.update(humaneva_cameras_intrinsic_params[i])
+ for k, v in cam.items():
+ if k not in ['id', 'res_w', 'res_h']:
+ cam[k] = np.array(v, dtype='float32')
+ if 'translation' in cam:
+ cam['translation'] = cam['translation'] / 1000 # mm to meters
+
+ for subject in list(self._cameras.keys()):
+ data = self._cameras[subject]
+ del self._cameras[subject]
+ for prefix in ['Train/', 'Validate/', 'Unlabeled/Train/', 'Unlabeled/Validate/', 'Unlabeled/']:
+ self._cameras[prefix + subject] = data
+
+ # Load serialized dataset
+ data = np.load(path)['positions_3d'].item()
+
+ self._data = {}
+ for subject, actions in data.items():
+ self._data[subject] = {}
+ for action_name, positions in actions.items():
+ self._data[subject][action_name] = {
+ 'positions': positions,
+ 'cameras': self._cameras[subject],
+ }
diff --git a/common/inference_3d.py b/common/inference_3d.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9a8da8e74306322a675b8c3b71e308816c6daee
--- /dev/null
+++ b/common/inference_3d.py
@@ -0,0 +1,107 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+import hashlib
+import os
+import pathlib
+import shutil
+import sys
+import time
+
+import cv2
+import numpy as np
+import torch
+from torch.autograd import Variable
+
+def get_varialbe(target):
+ num = len(target)
+ var = []
+
+ for i in range(num):
+ temp = Variable(target[i]).contiguous().cuda().type(torch.cuda.FloatTensor)
+ var.append(temp)
+
+ return var
+def input_augmentation(input_2D, input_2D_flip, model_trans, joints_left, joints_right):
+ B, T, J, C = input_2D.shape
+
+ input_2D_flip = input_2D_flip.view(B, T, J, C, 1).permute(0, 3, 1, 2, 4)
+ input_2D_non_flip = input_2D.view(B, T, J, C, 1).permute(0, 3, 1, 2, 4)
+
+ output_3D_flip, output_3D_flip_VTE = model_trans(input_2D_flip)
+
+ output_3D_flip_VTE[:, 0] *= -1
+ output_3D_flip[:, 0] *= -1
+
+ output_3D_flip_VTE[:, :, :, joints_left + joints_right] = output_3D_flip_VTE[:, :, :, joints_right + joints_left]
+ output_3D_flip[:, :, :, joints_left + joints_right] = output_3D_flip[:, :, :, joints_right + joints_left]
+
+ output_3D_non_flip, output_3D_non_flip_VTE = model_trans(input_2D_non_flip)
+
+ output_3D_VTE = (output_3D_non_flip_VTE + output_3D_flip_VTE) / 2
+ output_3D = (output_3D_non_flip + output_3D_flip) / 2
+
+ input_2D = input_2D_non_flip
+
+ return input_2D, output_3D, output_3D_VTE
+
+def step(opt, dataLoader, model, optimizer=None, epoch=None):
+ model_trans = model['trans']
+
+ model_trans.eval()
+
+ joints_left = [4, 5, 6, 11, 12, 13]
+ joints_right = [1, 2, 3, 14, 15, 16]
+ epoch_cnt=0
+ out = []
+ for _, batch, batch_2d, batch_2d_flip in dataLoader.next_epoch():
+ #[gt_3D, input_2D] = get_varialbe([batch, batch_2d])
+ #input_2D = Variable(batch_2d).contiguous().cuda().type(torch.cuda.FloatTensor)
+ input_2D = torch.from_numpy(batch_2d.astype('float32'))
+ input_2D_flip = torch.from_numpy(batch_2d_flip.astype('float32'))
+ if torch.cuda.is_available():
+ input_2D = input_2D.cuda()
+ input_2D_flip = input_2D_flip.cuda()
+
+ N = input_2D.size(0)
+
+ # out_target = gt_3D.clone().view(N, -1, opt.out_joints, opt.out_channels)
+ # out_target[:, :, 0] = 0
+ # gt_3D = gt_3D.view(N, -1, opt.out_joints, opt.out_channels).type(torch.cuda.FloatTensor)
+ #
+ # if out_target.size(1) > 1:
+ # out_target_single = out_target[:, opt.pad].unsqueeze(1)
+ # gt_3D_single = gt_3D[:, opt.pad].unsqueeze(1)
+ # else:
+ # out_target_single = out_target
+ # gt_3D_single = gt_3D
+
+
+ input_2D, output_3D, output_3D_VTE = input_augmentation(input_2D, input_2D_flip, model_trans, joints_left, joints_right)
+
+
+ output_3D_VTE = output_3D_VTE.permute(0, 2, 3, 4, 1).contiguous().view(N, -1, opt.out_joints, opt.out_channels)
+ output_3D = output_3D.permute(0, 2, 3, 4, 1).contiguous().view(N, -1, opt.out_joints, opt.out_channels)
+
+ output_3D_single = output_3D
+
+
+ pred_out = output_3D_single
+
+ input_2D = input_2D.permute(0, 2, 3, 1, 4).view(N, -1, opt.n_joints, 2)
+
+ pred_out[:, :, 0, :] = 0
+
+ if epoch_cnt == 0:
+ out = pred_out.squeeze(1).cpu()
+ else:
+ out = torch.cat((out, pred_out.squeeze(1).cpu()), dim=0)
+ epoch_cnt +=1
+ return out.numpy()
+
+def val(opt, val_loader, model):
+ with torch.no_grad():
+ return step(opt, val_loader, model)
\ No newline at end of file
diff --git a/common/jpt_arguments.py b/common/jpt_arguments.py
new file mode 100644
index 0000000000000000000000000000000000000000..f56b9b3907062693da5835c5047e3cbebb39542a
--- /dev/null
+++ b/common/jpt_arguments.py
@@ -0,0 +1,92 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import argparse
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description='Training script')
+
+ # General arguments
+ parser.add_argument('-d', '--dataset', default='h36m', type=str, metavar='NAME', help='target dataset') # h36m or humaneva
+ parser.add_argument('-k', '--keypoints', default='cpn_ft_h36m_dbb', type=str, metavar='NAME', help='2D detections to use')
+ parser.add_argument('-str', '--subjects-train', default='S1,S5,S6,S7,S8', type=str, metavar='LIST',
+ help='training subjects separated by comma')
+ parser.add_argument('-ste', '--subjects-test', default='S9,S11', type=str, metavar='LIST', help='test subjects separated by comma')
+ parser.add_argument('-sun', '--subjects-unlabeled', default='', type=str, metavar='LIST',
+ help='unlabeled subjects separated by comma for self-supervision')
+ parser.add_argument('-a', '--actions', default='*', type=str, metavar='LIST',
+ help='actions to train/test on, separated by comma, or * for all')
+ parser.add_argument('-c', '--checkpoint', default='checkpoint', type=str, metavar='PATH',
+ help='checkpoint directory')
+ parser.add_argument('--checkpoint-frequency', default=10, type=int, metavar='N',
+ help='create a checkpoint every N epochs')
+ parser.add_argument('-r', '--resume', default='', type=str, metavar='FILENAME',
+ help='checkpoint to resume (file name)')
+ parser.add_argument('--evaluate', default='pretrained_h36m_detectron_coco.bin', type=str, metavar='FILENAME', help='checkpoint to evaluate (file name)')
+ parser.add_argument('--render', action='store_true', help='visualize a particular video')
+ parser.add_argument('--by-subject', action='store_true', help='break down error by subject (on evaluation)')
+ parser.add_argument('--export-training-curves', action='store_true', help='save training curves as .png images')
+
+ # Model arguments
+ parser.add_argument('-s', '--stride', default=1, type=int, metavar='N', help='chunk size to use during training')
+ parser.add_argument('-e', '--epochs', default=60, type=int, metavar='N', help='number of training epochs')
+ parser.add_argument('-b', '--batch-size', default=1024, type=int, metavar='N', help='batch size in terms of predicted frames')
+ parser.add_argument('-drop', '--dropout', default=0.25, type=float, metavar='P', help='dropout probability')
+ parser.add_argument('-lr', '--learning-rate', default=0.001, type=float, metavar='LR', help='initial learning rate')
+ parser.add_argument('-lrd', '--lr-decay', default=0.95, type=float, metavar='LR', help='learning rate decay per epoch')
+ parser.add_argument('-no-da', '--no-data-augmentation', dest='data_augmentation', action='store_false',
+ help='disable train-time flipping')
+ parser.add_argument('-no-tta', '--no-test-time-augmentation', dest='test_time_augmentation', action='store_false',
+ help='disable test-time flipping')
+ parser.add_argument('-arc', '--architecture', default='3,3,3,3,3', type=str, metavar='LAYERS', help='filter widths separated by comma')
+ parser.add_argument('--causal', action='store_true', help='use causal convolutions for real-time processing')
+ parser.add_argument('-ch', '--channels', default=1024, type=int, metavar='N', help='number of channels in convolution layers')
+
+ # Experimental
+ parser.add_argument('--subset', default=1, type=float, metavar='FRACTION', help='reduce dataset size by fraction')
+ parser.add_argument('--downsample', default=1, type=int, metavar='FACTOR', help='downsample frame rate by factor (semi-supervised)')
+ parser.add_argument('--warmup', default=1, type=int, metavar='N', help='warm-up epochs for semi-supervision')
+ parser.add_argument('--no-eval', action='store_true', help='disable epoch evaluation while training (small speed-up)')
+ parser.add_argument('--dense', action='store_true', help='use dense convolutions instead of dilated convolutions')
+ parser.add_argument('--disable-optimizations', action='store_true', help='disable optimized model for single-frame predictions')
+ parser.add_argument('--linear-projection', action='store_true', help='use only linear coefficients for semi-supervised projection')
+ parser.add_argument('--no-bone-length', action='store_false', dest='bone_length_term',
+ help='disable bone length term in semi-supervised settings')
+ parser.add_argument('--no-proj', action='store_true', help='disable projection for semi-supervised setting')
+
+ # Visualization
+ parser.add_argument('--viz-subject', type=str, metavar='STR', help='subject to render')
+ parser.add_argument('--viz-action', type=str, metavar='STR', help='action to render')
+ parser.add_argument('--viz-camera', type=int, default=0, metavar='N', help='camera to render')
+ parser.add_argument('--viz-video', type=str, metavar='PATH', help='path to input video')
+ parser.add_argument('--viz-skip', type=int, default=0, metavar='N', help='skip first N frames of input video')
+ parser.add_argument('--viz-output', type=str, metavar='PATH', help='output file name (.gif or .mp4)')
+ parser.add_argument('--viz-bitrate', type=int, default=30000, metavar='N', help='bitrate for mp4 videos')
+ parser.add_argument('--viz-no-ground-truth', action='store_true', help='do not show ground-truth poses')
+ parser.add_argument('--viz-limit', type=int, default=-1, metavar='N', help='only render first N frames')
+ parser.add_argument('--viz-downsample', type=int, default=1, metavar='N', help='downsample FPS by a factor N')
+ parser.add_argument('--viz-size', type=int, default=5, metavar='N', help='image size')
+ # self add
+ parser.add_argument('--input-npz', dest='input_npz', type=str, default='', help='input 2d numpy file')
+
+ parser.set_defaults(bone_length_term=True)
+ parser.set_defaults(data_augmentation=True)
+ parser.set_defaults(test_time_augmentation=True)
+
+ args = parser.parse_args(args=[])
+ # Check invalid configuration
+ if args.resume and args.evaluate:
+ print('Invalid flags: --resume and --evaluate cannot be set at the same time')
+ exit()
+
+ if args.export_training_curves and args.no_eval:
+ print('Invalid flags: --export-training-curves and --no-eval cannot be set at the same time')
+ exit()
+
+ # opt = parser.parse_args(args=[])
+ return args
diff --git a/common/loss.py b/common/loss.py
new file mode 100644
index 0000000000000000000000000000000000000000..12e5f437f89a137eff7580f12e37677c7caf797d
--- /dev/null
+++ b/common/loss.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import numpy as np
+import torch
+
+
+def mpjpe(predicted, target):
+ """
+ Mean per-joint position error (i.e. mean Euclidean distance),
+ often referred to as "Protocol #1" in many papers.
+ """
+ assert predicted.shape == target.shape
+ return torch.mean(torch.norm(predicted - target, dim=len(target.shape) - 1))
+
+
+def weighted_mpjpe(predicted, target, w):
+ """
+ Weighted mean per-joint position error (i.e. mean Euclidean distance)
+ """
+ assert predicted.shape == target.shape
+ assert w.shape[0] == predicted.shape[0]
+ return torch.mean(w * torch.norm(predicted - target, dim=len(target.shape) - 1))
+
+
+def p_mpjpe(predicted, target):
+ """
+ Pose error: MPJPE after rigid alignment (scale, rotation, and translation),
+ often referred to as "Protocol #2" in many papers.
+ """
+ assert predicted.shape == target.shape
+
+ muX = np.mean(target, axis=1, keepdims=True)
+ muY = np.mean(predicted, axis=1, keepdims=True)
+
+ X0 = target - muX
+ Y0 = predicted - muY
+
+ normX = np.sqrt(np.sum(X0 ** 2, axis=(1, 2), keepdims=True))
+ normY = np.sqrt(np.sum(Y0 ** 2, axis=(1, 2), keepdims=True))
+
+ X0 /= normX
+ Y0 /= normY
+
+ H = np.matmul(X0.transpose(0, 2, 1), Y0)
+ U, s, Vt = np.linalg.svd(H)
+ V = Vt.transpose(0, 2, 1)
+ R = np.matmul(V, U.transpose(0, 2, 1))
+
+ # Avoid improper rotations (reflections), i.e. rotations with det(R) = -1
+ sign_detR = np.sign(np.expand_dims(np.linalg.det(R), axis=1))
+ V[:, :, -1] *= sign_detR
+ s[:, -1] *= sign_detR.flatten()
+ R = np.matmul(V, U.transpose(0, 2, 1)) # Rotation
+
+ tr = np.expand_dims(np.sum(s, axis=1, keepdims=True), axis=2)
+
+ a = tr * normX / normY # Scale
+ t = muX - a * np.matmul(muY, R) # Translation
+
+ # Perform rigid transformation on the input
+ predicted_aligned = a * np.matmul(predicted, R) + t
+
+ # Return MPJPE
+ return np.mean(np.linalg.norm(predicted_aligned - target, axis=len(target.shape) - 1))
+
+
+def n_mpjpe(predicted, target):
+ """
+ Normalized MPJPE (scale only), adapted from:
+ https://github.com/hrhodin/UnsupervisedGeometryAwareRepresentationLearning/blob/master/losses/poses.py
+ """
+ assert predicted.shape == target.shape
+
+ norm_predicted = torch.mean(torch.sum(predicted ** 2, dim=3, keepdim=True), dim=2, keepdim=True)
+ norm_target = torch.mean(torch.sum(target * predicted, dim=3, keepdim=True), dim=2, keepdim=True)
+ scale = norm_target / norm_predicted
+ return mpjpe(scale * predicted, target)
+
+
+def mean_velocity_error(predicted, target):
+ """
+ Mean per-joint velocity error (i.e. mean Euclidean distance of the 1st derivative)
+ """
+ assert predicted.shape == target.shape
+
+ velocity_predicted = np.diff(predicted, axis=0)
+ velocity_target = np.diff(target, axis=0)
+
+ return np.mean(np.linalg.norm(velocity_predicted - velocity_target, axis=len(target.shape) - 1))
diff --git a/common/mocap_dataset.py b/common/mocap_dataset.py
new file mode 100644
index 0000000000000000000000000000000000000000..1697e5b9659639b813d58fa4435fe957ead721cc
--- /dev/null
+++ b/common/mocap_dataset.py
@@ -0,0 +1,40 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+
+class MocapDataset:
+ def __init__(self, fps, skeleton):
+ self._skeleton = skeleton
+ self._fps = fps
+ self._data = None # Must be filled by subclass
+ self._cameras = None # Must be filled by subclass
+
+ def remove_joints(self, joints_to_remove):
+ kept_joints = self._skeleton.remove_joints(joints_to_remove)
+ for subject in self._data.keys():
+ for action in self._data[subject].keys():
+ s = self._data[subject][action]
+ s['positions'] = s['positions'][:, kept_joints]
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+ def subjects(self):
+ return self._data.keys()
+
+ def fps(self):
+ return self._fps
+
+ def skeleton(self):
+ return self._skeleton
+
+ def cameras(self):
+ return self._cameras
+
+ def supports_semi_supervised(self):
+ # This method can be overridden
+ return False
diff --git a/common/model.py b/common/model.py
new file mode 100644
index 0000000000000000000000000000000000000000..8be5a30e9847cdf7a93403c1a5f8a4e497946829
--- /dev/null
+++ b/common/model.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import torch.nn as nn
+
+
+class TemporalModelBase(nn.Module):
+ """
+ Do not instantiate this class.
+ """
+
+ def __init__(self, num_joints_in, in_features, num_joints_out,
+ filter_widths, causal, dropout, channels):
+ super().__init__()
+
+ # Validate input
+ for fw in filter_widths:
+ assert fw % 2 != 0, 'Only odd filter widths are supported'
+
+ self.num_joints_in = num_joints_in
+ self.in_features = in_features
+ self.num_joints_out = num_joints_out
+ self.filter_widths = filter_widths
+
+ self.drop = nn.Dropout(dropout)
+ self.relu = nn.ReLU(inplace=True)
+
+ self.pad = [filter_widths[0] // 2]
+ self.expand_bn = nn.BatchNorm1d(channels, momentum=0.1)
+ self.shrink = nn.Conv1d(channels, num_joints_out * 3, 1)
+
+ def set_bn_momentum(self, momentum):
+ self.expand_bn.momentum = momentum
+ for bn in self.layers_bn:
+ bn.momentum = momentum
+
+ def receptive_field(self):
+ """
+ Return the total receptive field of this model as # of frames.
+ """
+ frames = 0
+ for f in self.pad:
+ frames += f
+ return 1 + 2 * frames
+
+ def total_causal_shift(self):
+ """
+ Return the asymmetric offset for sequence padding.
+ The returned value is typically 0 if causal convolutions are disabled,
+ otherwise it is half the receptive field.
+ """
+ frames = self.causal_shift[0]
+ next_dilation = self.filter_widths[0]
+ for i in range(1, len(self.filter_widths)):
+ frames += self.causal_shift[i] * next_dilation
+ next_dilation *= self.filter_widths[i]
+ return frames
+
+ def forward(self, x):
+ assert len(x.shape) == 4
+ assert x.shape[-2] == self.num_joints_in
+ assert x.shape[-1] == self.in_features
+
+ sz = x.shape[:3]
+ x = x.view(x.shape[0], x.shape[1], -1)
+ x = x.permute(0, 2, 1)
+
+ x = self._forward_blocks(x)
+
+ x = x.permute(0, 2, 1)
+ x = x.view(sz[0], -1, self.num_joints_out, 3)
+
+ return x
+
+
+class TemporalModel(TemporalModelBase):
+ """
+ Reference 3D pose estimation model with temporal convolutions.
+ This implementation can be used for all use-cases.
+ """
+
+ def __init__(self, num_joints_in, in_features, num_joints_out,
+ filter_widths, causal=False, dropout=0.25, channels=1024, dense=False):
+ """
+ Initialize this model.
+
+ Arguments:
+ num_joints_in -- number of input joints (e.g. 17 for Human3.6M)
+ in_features -- number of input features for each joint (typically 2 for 2D input)
+ num_joints_out -- number of output joints (can be different than input)
+ filter_widths -- list of convolution widths, which also determines the # of blocks and receptive field
+ causal -- use causal convolutions instead of symmetric convolutions (for real-time applications)
+ dropout -- dropout probability
+ channels -- number of convolution channels
+ dense -- use regular dense convolutions instead of dilated convolutions (ablation experiment)
+ """
+ super().__init__(num_joints_in, in_features, num_joints_out, filter_widths, causal, dropout, channels)
+
+ self.expand_conv = nn.Conv1d(num_joints_in * in_features, channels, filter_widths[0], bias=False)
+
+ layers_conv = []
+ layers_bn = []
+
+ self.causal_shift = [(filter_widths[0]) // 2 if causal else 0]
+ next_dilation = filter_widths[0]
+ for i in range(1, len(filter_widths)):
+ self.pad.append((filter_widths[i] - 1) * next_dilation // 2)
+ self.causal_shift.append((filter_widths[i] // 2 * next_dilation) if causal else 0)
+
+ layers_conv.append(nn.Conv1d(channels, channels,
+ filter_widths[i] if not dense else (2 * self.pad[-1] + 1),
+ dilation=next_dilation if not dense else 1,
+ bias=False))
+ layers_bn.append(nn.BatchNorm1d(channels, momentum=0.1))
+ layers_conv.append(nn.Conv1d(channels, channels, 1, dilation=1, bias=False))
+ layers_bn.append(nn.BatchNorm1d(channels, momentum=0.1))
+
+ next_dilation *= filter_widths[i]
+
+ self.layers_conv = nn.ModuleList(layers_conv)
+ self.layers_bn = nn.ModuleList(layers_bn)
+
+ def _forward_blocks(self, x):
+ x = self.drop(self.relu(self.expand_bn(self.expand_conv(x))))
+
+ for i in range(len(self.pad) - 1):
+ pad = self.pad[i + 1]
+ shift = self.causal_shift[i + 1]
+ # clip
+ res = x[:, :, pad + shift: x.shape[2] - pad + shift]
+
+ x = self.drop(self.relu(self.layers_bn[2 * i](self.layers_conv[2 * i](x))))
+ x = res + self.drop(self.relu(self.layers_bn[2 * i + 1](self.layers_conv[2 * i + 1](x))))
+
+ x = self.shrink(x)
+ return x
+
+
+class TemporalModelOptimized1f(TemporalModelBase):
+ """
+ 3D pose estimation model optimized for single-frame batching, i.e.
+ where batches have input length = receptive field, and output length = 1.
+ This scenario is only used for training when stride == 1.
+
+ This implementation replaces dilated convolutions with strided convolutions
+ to avoid generating unused intermediate results. The weights are interchangeable
+ with the reference implementation.
+ """
+
+ def __init__(self, num_joints_in, in_features, num_joints_out,
+ filter_widths, causal=False, dropout=0.25, channels=1024):
+ """
+ Initialize this model.
+
+ Arguments:
+ num_joints_in -- number of input joints (e.g. 17 for Human3.6M)
+ in_features -- number of input features for each joint (typically 2 for 2D input)
+ num_joints_out -- number of output joints (can be different than input)
+ filter_widths -- list of convolution widths, which also determines the # of blocks and receptive field
+ causal -- use causal convolutions instead of symmetric convolutions (for real-time applications)
+ dropout -- dropout probability
+ channels -- number of convolution channels
+ """
+ super().__init__(num_joints_in, in_features, num_joints_out, filter_widths, causal, dropout, channels)
+
+ self.expand_conv = nn.Conv1d(num_joints_in * in_features, channels, filter_widths[0], stride=filter_widths[0], bias=False)
+
+ layers_conv = []
+ layers_bn = []
+
+ self.causal_shift = [(filter_widths[0] // 2) if causal else 0]
+ next_dilation = filter_widths[0]
+ for i in range(1, len(filter_widths)):
+ self.pad.append((filter_widths[i] - 1) * next_dilation // 2)
+ self.causal_shift.append((filter_widths[i] // 2) if causal else 0)
+
+ layers_conv.append(nn.Conv1d(channels, channels, filter_widths[i], stride=filter_widths[i], bias=False))
+ layers_bn.append(nn.BatchNorm1d(channels, momentum=0.1))
+ layers_conv.append(nn.Conv1d(channels, channels, 1, dilation=1, bias=False))
+ layers_bn.append(nn.BatchNorm1d(channels, momentum=0.1))
+ next_dilation *= filter_widths[i]
+
+ self.layers_conv = nn.ModuleList(layers_conv)
+ self.layers_bn = nn.ModuleList(layers_bn)
+
+ def _forward_blocks(self, x):
+ x = self.drop(self.relu(self.expand_bn(self.expand_conv(x))))
+
+ for i in range(len(self.pad) - 1):
+ res = x[:, :, self.causal_shift[i + 1] + self.filter_widths[i + 1] // 2:: self.filter_widths[i + 1]]
+
+ x = self.drop(self.relu(self.layers_bn[2 * i](self.layers_conv[2 * i](x))))
+ x = res + self.drop(self.relu(self.layers_bn[2 * i + 1](self.layers_conv[2 * i + 1](x))))
+
+ x = self.shrink(x)
+ return x
diff --git a/common/quaternion.py b/common/quaternion.py
new file mode 100644
index 0000000000000000000000000000000000000000..f8d700f358bfaf2d453a2a403a611e88d117595e
--- /dev/null
+++ b/common/quaternion.py
@@ -0,0 +1,36 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import torch
+
+
+def qrot(q, v):
+ """
+ Rotate vector(s) v about the rotation described by 四元数quaternion(s) q.
+ Expects a tensor of shape (*, 4) for q and a tensor of shape (*, 3) for v,
+ where * denotes any number of dimensions.
+ Returns a tensor of shape (*, 3).
+ """
+ assert q.shape[-1] == 4
+ assert v.shape[-1] == 3
+ assert q.shape[:-1] == v.shape[:-1]
+
+ qvec = q[..., 1:]
+ uv = torch.cross(qvec, v, dim=len(q.shape) - 1)
+ uuv = torch.cross(qvec, uv, dim=len(q.shape) - 1)
+ return (v + 2 * (q[..., :1] * uv + uuv))
+
+
+def qinverse(q, inplace=False):
+ # We assume the quaternion to be normalized
+ if inplace:
+ q[..., 1:] *= -1
+ return q
+ else:
+ w = q[..., :1]
+ xyz = q[..., 1:]
+ return torch.cat((w, -xyz), dim=len(q.shape) - 1)
diff --git a/common/skeleton.py b/common/skeleton.py
new file mode 100644
index 0000000000000000000000000000000000000000..f8e33f2dfb3b12a6969b457b410c6e4637c9a4ec
--- /dev/null
+++ b/common/skeleton.py
@@ -0,0 +1,88 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import numpy as np
+
+
+class Skeleton:
+ def __init__(self, parents, joints_left, joints_right):
+ assert len(joints_left) == len(joints_right)
+
+ self._parents = np.array(parents)
+ self._joints_left = joints_left
+ self._joints_right = joints_right
+ self._compute_metadata()
+
+ def num_joints(self):
+ return len(self._parents)
+
+ def parents(self):
+ return self._parents
+
+ def has_children(self):
+ return self._has_children
+
+ def children(self):
+ return self._children
+
+ def remove_joints(self, joints_to_remove):
+ """
+ Remove the joints specified in 'joints_to_remove'.
+ """
+ valid_joints = []
+ for joint in range(len(self._parents)):
+ if joint not in joints_to_remove:
+ valid_joints.append(joint)
+
+ for i in range(len(self._parents)):
+ while self._parents[i] in joints_to_remove:
+ self._parents[i] = self._parents[self._parents[i]]
+
+ index_offsets = np.zeros(len(self._parents), dtype=int)
+ new_parents = []
+ for i, parent in enumerate(self._parents):
+ if i not in joints_to_remove:
+ new_parents.append(parent - index_offsets[parent])
+ else:
+ index_offsets[i:] += 1
+ self._parents = np.array(new_parents)
+
+ if self._joints_left is not None:
+ new_joints_left = []
+ for joint in self._joints_left:
+ if joint in valid_joints:
+ new_joints_left.append(joint - index_offsets[joint])
+ self._joints_left = new_joints_left
+ if self._joints_right is not None:
+ new_joints_right = []
+ for joint in self._joints_right:
+ if joint in valid_joints:
+ new_joints_right.append(joint - index_offsets[joint])
+ self._joints_right = new_joints_right
+
+ self._compute_metadata()
+
+ return valid_joints
+
+ def joints_left(self):
+ return self._joints_left
+
+ def joints_right(self):
+ return self._joints_right
+
+ def _compute_metadata(self):
+ self._has_children = np.zeros(len(self._parents)).astype(bool)
+ for i, parent in enumerate(self._parents):
+ if parent != -1:
+ self._has_children[parent] = True
+
+ self._children = []
+ for i, parent in enumerate(self._parents):
+ self._children.append([])
+ for i, parent in enumerate(self._parents):
+ if parent != -1:
+ self._children[parent].append(i)
diff --git a/common/utils.py b/common/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..e32f0669b8127a5632c13d649b856096bf028de3
--- /dev/null
+++ b/common/utils.py
@@ -0,0 +1,202 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+import hashlib
+import os
+import pathlib
+import shutil
+import sys
+import time
+
+import cv2
+import numpy as np
+import torch
+
+
+def add_path():
+ Alphapose_path = os.path.abspath('joints_detectors/Alphapose')
+ hrnet_path = os.path.abspath('joints_detectors/hrnet')
+ trackers_path = os.path.abspath('pose_trackers')
+ paths = filter(lambda p: p not in sys.path, [Alphapose_path, hrnet_path, trackers_path])
+
+ sys.path.extend(paths)
+
+
+def wrap(func, *args, unsqueeze=False):
+ """
+ Wrap a torch function so it can be called with NumPy arrays.
+ Input and return types are seamlessly converted.
+ """
+
+ # Convert input types where applicable
+ args = list(args)
+ for i, arg in enumerate(args):
+ if type(arg) == np.ndarray:
+ args[i] = torch.from_numpy(arg)
+ if unsqueeze:
+ args[i] = args[i].unsqueeze(0)
+
+ result = func(*args)
+
+ # Convert output types where applicable
+ if isinstance(result, tuple):
+ result = list(result)
+ for i, res in enumerate(result):
+ if type(res) == torch.Tensor:
+ if unsqueeze:
+ res = res.squeeze(0)
+ result[i] = res.numpy()
+ return tuple(result)
+ elif type(result) == torch.Tensor:
+ if unsqueeze:
+ result = result.squeeze(0)
+ return result.numpy()
+ else:
+ return result
+
+
+def deterministic_random(min_value, max_value, data):
+ digest = hashlib.sha256(data.encode()).digest()
+ raw_value = int.from_bytes(digest[:4], byteorder='little', signed=False)
+ return int(raw_value / (2 ** 32 - 1) * (max_value - min_value)) + min_value
+
+
+def alpha_map(prediction):
+ p_min, p_max = prediction.min(), prediction.max()
+
+ k = 1.6 / (p_max - p_min)
+ b = 0.8 - k * p_max
+
+ prediction = k * prediction + b
+
+ return prediction
+
+
+def change_score(prediction, detectron_detection_path):
+ detectron_predictions = np.load(detectron_detection_path, allow_pickle=True)['positions_2d'].item()
+ pose = detectron_predictions['S1']['Directions 1']
+ prediction[..., 2] = pose[..., 2]
+
+ return prediction
+
+
+class Timer:
+ def __init__(self, message, show=True):
+ self.message = message
+ self.elapsed = 0
+ self.show = show
+
+ def __enter__(self):
+ self.start = time.perf_counter()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if self.show:
+ print(f'{self.message} --- elapsed time: {time.perf_counter() - self.start} s')
+
+
+def calculate_area(data):
+ """
+ Get the rectangle area of keypoints.
+ :param data: AlphaPose json keypoint format([x, y, score, ... , x, y, score]) or AlphaPose result keypoint format([[x, y], ..., [x, y]])
+ :return: area
+ """
+ data = np.array(data)
+
+ if len(data.shape) == 1:
+ data = np.reshape(data, (-1, 3))
+
+ width = min(data[:, 0]) - max(data[:, 0])
+ height = min(data[:, 1]) - max(data[:, 1])
+
+ return np.abs(width * height)
+
+
+def read_video(filename, fps=None, skip=0, limit=-1):
+ stream = cv2.VideoCapture(filename)
+
+ i = 0
+ while True:
+ grabbed, frame = stream.read()
+ # if the `grabbed` boolean is `False`, then we have
+ # reached the end of the video file
+ if not grabbed:
+ print('===========================> This video get ' + str(i) + ' frames in total.')
+ sys.stdout.flush()
+ break
+
+ i += 1
+ if i > skip:
+ frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
+ yield np.array(frame)
+ if i == limit:
+ break
+
+
+def split_video(video_path):
+ stream = cv2.VideoCapture(video_path)
+
+ output_dir = os.path.dirname(video_path)
+ video_name = os.path.basename(video_path)
+ video_name = video_name[:video_name.rfind('.')]
+
+ save_folder = pathlib.Path(f'./{output_dir}/alpha_pose_{video_name}/split_image/')
+ shutil.rmtree(str(save_folder), ignore_errors=True)
+ save_folder.mkdir(parents=True, exist_ok=True)
+
+ total_frames = int(stream.get(cv2.CAP_PROP_FRAME_COUNT))
+ length = len(str(total_frames)) + 1
+
+ i = 1
+ while True:
+ grabbed, frame = stream.read()
+
+ if not grabbed:
+ print(f'Split totally {i + 1} images from video.')
+ break
+
+ save_path = f'{save_folder}/output{str(i).zfill(length)}.png'
+ cv2.imwrite(save_path, frame)
+
+ i += 1
+
+ saved_path = os.path.dirname(save_path)
+ print(f'Split images saved in {saved_path}')
+
+ return saved_path
+
+
+def evaluate(test_generator, model_pos, action=None, return_predictions=False):
+ """
+ Inference the 3d positions from 2d position.
+ :type test_generator: UnchunkedGenerator
+ :param test_generator:
+ :param model_pos: 3d pose model
+ :param return_predictions: return predictions if true
+ :return:
+ """
+ joints_left, joints_right = list([4, 5, 6, 11, 12, 13]), list([1, 2, 3, 14, 15, 16])
+ with torch.no_grad():
+ model_pos.eval()
+ N = 0
+ for _, batch, batch_2d in test_generator.next_epoch():
+ inputs_2d = torch.from_numpy(batch_2d.astype('float32'))
+ if torch.cuda.is_available():
+ inputs_2d = inputs_2d.cuda()
+ # Positional model
+ predicted_3d_pos = model_pos(inputs_2d)
+ if test_generator.augment_enabled():
+ # Undo flipping and take average with non-flipped version
+ predicted_3d_pos[1, :, :, 0] *= -1
+ predicted_3d_pos[1, :, joints_left + joints_right] = predicted_3d_pos[1, :, joints_right + joints_left]
+ predicted_3d_pos = torch.mean(predicted_3d_pos, dim=0, keepdim=True)
+ if return_predictions:
+ return predicted_3d_pos.squeeze(0).cpu().numpy()
+
+
+if __name__ == '__main__':
+ os.chdir('..')
+
+ split_video('outputs/kobe.mp4')
diff --git a/common/visualization.py b/common/visualization.py
new file mode 100644
index 0000000000000000000000000000000000000000..301fa090f87167f1ef8c3d4f84f47ba17a4ab2b0
--- /dev/null
+++ b/common/visualization.py
@@ -0,0 +1,251 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import time
+
+import cv2
+import matplotlib.pyplot as plt
+import numpy as np
+from matplotlib.animation import FuncAnimation, writers
+from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
+from mpl_toolkits.mplot3d import Axes3D
+from tqdm import tqdm
+
+from common.utils import read_video
+
+
+def ckpt_time(ckpt=None, display=0, desc=''):
+ if not ckpt:
+ return time.time()
+ else:
+ if display:
+ print(desc + ' consume time {:0.4f}'.format(time.time() - float(ckpt)))
+ return time.time() - float(ckpt), time.time()
+
+
+def set_equal_aspect(ax, data):
+ """
+ Create white cubic bounding box to make sure that 3d axis is in equal aspect.
+ :param ax: 3D axis
+ :param data: shape of(frames, 3), generated from BVH using convert_bvh2dataset.py
+ """
+ X, Y, Z = data[..., 0], data[..., 1], data[..., 2]
+
+ # Create cubic bounding box to simulate equal aspect ratio
+ max_range = np.array([X.max() - X.min(), Y.max() - Y.min(), Z.max() - Z.min()]).max()
+ Xb = 0.5 * max_range * np.mgrid[-1:2:2, -1:2:2, -1:2:2][0].flatten() + 0.5 * (X.max() + X.min())
+ Yb = 0.5 * max_range * np.mgrid[-1:2:2, -1:2:2, -1:2:2][1].flatten() + 0.5 * (Y.max() + Y.min())
+ Zb = 0.5 * max_range * np.mgrid[-1:2:2, -1:2:2, -1:2:2][2].flatten() + 0.5 * (Z.max() + Z.min())
+
+ for xb, yb, zb in zip(Xb, Yb, Zb):
+ ax.plot([xb], [yb], [zb], 'w')
+
+
+def downsample_tensor(X, factor):
+ length = X.shape[0] // factor * factor
+ return np.mean(X[:length].reshape(-1, factor, *X.shape[1:]), axis=1)
+
+
+def render_animation(keypoints, poses, skeleton, fps, bitrate, azim, output, viewport,
+ limit=-1, downsample=1, size=6, input_video_path=None, input_video_skip=0):
+ """
+ TODO
+ Render an animation. The supported output modes are:
+ -- 'interactive': display an interactive figure
+ (also works on notebooks if associated with %matplotlib inline)
+ -- 'html': render the animation as HTML5 video. Can be displayed in a notebook using HTML(...).
+ -- 'filename.mp4': render and export the animation as an h264 video (requires ffmpeg).
+ -- 'filename.gif': render and export the animation a gif file (requires imagemagick).
+ """
+ plt.ioff()
+ fig = plt.figure(figsize=(size * (1 + len(poses)), size))
+ ax_in = fig.add_subplot(1, 1 + len(poses), 1)
+ ax_in.get_xaxis().set_visible(False)
+ ax_in.get_yaxis().set_visible(False)
+ ax_in.set_axis_off()
+ ax_in.set_title('Input')
+
+ # prevent wired error
+ _ = Axes3D.__class__.__name__
+
+ ax_3d = []
+ lines_3d = []
+ trajectories = []
+ radius = 1.7
+ for index, (title, data) in enumerate(poses.items()):
+ ax = fig.add_subplot(1, 1 + len(poses), index + 2, projection='3d')
+ ax.view_init(elev=15., azim=azim)
+ ax.set_xlim3d([-radius / 2, radius / 2])
+ ax.set_zlim3d([0, radius])
+ ax.set_ylim3d([-radius / 2, radius / 2])
+ # ax.set_aspect('equal')
+ ax.set_xticklabels([])
+ ax.set_yticklabels([])
+ ax.set_zticklabels([])
+ ax.dist = 12.5
+ ax.set_title(title) # , pad=35
+ ax_3d.append(ax)
+ lines_3d.append([])
+ trajectories.append(data[:, 0, [0, 1]])
+ poses = list(poses.values())
+
+ # Decode video
+ if input_video_path is None:
+ # Black background
+ all_frames = np.zeros((keypoints.shape[0], viewport[1], viewport[0]), dtype='uint8')
+ else:
+ # Load video using ffmpeg
+ all_frames = []
+ for f in read_video(input_video_path, fps=None, skip=input_video_skip):
+ all_frames.append(f)
+
+ effective_length = min(keypoints.shape[0], len(all_frames))
+ all_frames = all_frames[:effective_length]
+
+ if downsample > 1:
+ keypoints = downsample_tensor(keypoints, downsample)
+ all_frames = downsample_tensor(np.array(all_frames), downsample).astype('uint8')
+ for idx in range(len(poses)):
+ poses[idx] = downsample_tensor(poses[idx], downsample)
+ trajectories[idx] = downsample_tensor(trajectories[idx], downsample)
+ fps /= downsample
+
+ initialized = False
+ image = None
+ lines = []
+ points = None
+
+ if limit < 1:
+ limit = len(all_frames)
+ else:
+ limit = min(limit, len(all_frames))
+
+ parents = skeleton.parents()
+ pbar = tqdm(total=limit)
+
+ def update_video(i):
+ nonlocal initialized, image, lines, points
+
+ for n, ax in enumerate(ax_3d):
+ ax.set_xlim3d([-radius / 2 + trajectories[n][i, 0], radius / 2 + trajectories[n][i, 0]])
+ ax.set_ylim3d([-radius / 2 + trajectories[n][i, 1], radius / 2 + trajectories[n][i, 1]])
+
+ # Update 2D poses
+ if not initialized:
+ image = ax_in.imshow(all_frames[i], aspect='equal')
+
+ for j, j_parent in enumerate(parents):
+ if j_parent == -1:
+ continue
+
+ # if len(parents) == keypoints.shape[1] and 1 == 2:
+ # # Draw skeleton only if keypoints match (otherwise we don't have the parents definition)
+ # lines.append(ax_in.plot([keypoints[i, j, 0], keypoints[i, j_parent, 0]],
+ # [keypoints[i, j, 1], keypoints[i, j_parent, 1]], color='pink'))
+
+ col = 'red' if j in skeleton.joints_right() else 'black'
+ for n, ax in enumerate(ax_3d):
+ pos = poses[n][i]
+ lines_3d[n].append(ax.plot([pos[j, 0], pos[j_parent, 0]],
+ [pos[j, 1], pos[j_parent, 1]],
+ [pos[j, 2], pos[j_parent, 2]], zdir='z', c=col))
+
+ points = ax_in.scatter(*keypoints[i].T, 5, color='red', edgecolors='white', zorder=10)
+
+ initialized = True
+ else:
+ image.set_data(all_frames[i])
+
+ for j, j_parent in enumerate(parents):
+ if j_parent == -1:
+ continue
+
+ # if len(parents) == keypoints.shape[1] and 1 == 2:
+ # lines[j - 1][0].set_data([keypoints[i, j, 0], keypoints[i, j_parent, 0]],
+ # [keypoints[i, j, 1], keypoints[i, j_parent, 1]])
+
+ for n, ax in enumerate(ax_3d):
+ pos = poses[n][i]
+ lines_3d[n][j - 1][0].set_xdata(np.array([pos[j, 0], pos[j_parent, 0]])) # Hotfix matplotlib's bug. https://github.com/matplotlib/matplotlib/pull/20555
+ lines_3d[n][j - 1][0].set_ydata([pos[j, 1], pos[j_parent, 1]])
+ lines_3d[n][j - 1][0].set_3d_properties([pos[j, 2], pos[j_parent, 2]], zdir='z')
+
+ points.set_offsets(keypoints[i])
+
+ pbar.update()
+
+ fig.tight_layout()
+
+ anim = FuncAnimation(fig, update_video, frames=limit, interval=1000.0 / fps, repeat=False)
+ if output.endswith('.mp4'):
+ Writer = writers['ffmpeg']
+ writer = Writer(fps=fps, metadata={}, bitrate=bitrate)
+ anim.save(output, writer=writer)
+ elif output.endswith('.gif'):
+ anim.save(output, dpi=60, writer='imagemagick')
+ else:
+ raise ValueError('Unsupported output format (only .mp4 and .gif are supported)')
+ pbar.close()
+ plt.close()
+
+
+def render_animation_test(keypoints, poses, skeleton, fps, bitrate, azim, output, viewport, limit=-1, downsample=1, size=6, input_video_frame=None,
+ input_video_skip=0, num=None):
+ t0 = ckpt_time()
+ fig = plt.figure(figsize=(12, 6))
+ canvas = FigureCanvas(fig)
+ fig.add_subplot(121)
+ plt.imshow(input_video_frame)
+ # 3D
+ ax = fig.add_subplot(122, projection='3d')
+ ax.view_init(elev=15., azim=azim)
+ # set 长度范围
+ radius = 1.7
+ ax.set_xlim3d([-radius / 2, radius / 2])
+ ax.set_zlim3d([0, radius])
+ ax.set_ylim3d([-radius / 2, radius / 2])
+ ax.set_aspect('equal')
+ # 坐标轴刻度
+ ax.set_xticklabels([])
+ ax.set_yticklabels([])
+ ax.set_zticklabels([])
+ ax.dist = 7.5
+
+ # lxy add
+ ax.set_xlabel('X Label')
+ ax.set_ylabel('Y Label')
+ ax.set_zlabel('Z Label')
+
+ # array([-1, 0, 1, 2, 0, 4, 5, 0, 7, 8, 9, 8, 11, 12, 8, 14, 15])
+ parents = skeleton.parents()
+
+ pos = poses['Reconstruction'][-1]
+ _, t1 = ckpt_time(t0, desc='1 ')
+ for j, j_parent in enumerate(parents):
+ if j_parent == -1:
+ continue
+
+ if len(parents) == keypoints.shape[1]:
+ color_pink = 'pink'
+ if j == 1 or j == 2:
+ color_pink = 'black'
+
+ col = 'red' if j in skeleton.joints_right() else 'black'
+ # 画图3D
+ ax.plot([pos[j, 0], pos[j_parent, 0]],
+ [pos[j, 1], pos[j_parent, 1]],
+ [pos[j, 2], pos[j_parent, 2]], zdir='z', c=col)
+
+ # plt.savefig('test/3Dimage_{}.png'.format(1000+num))
+ width, height = fig.get_size_inches() * fig.get_dpi()
+ _, t2 = ckpt_time(t1, desc='2 ')
+ canvas.draw() # draw the canvas, cache the renderer
+ image = np.fromstring(canvas.tostring_rgb(), dtype='uint8').reshape(int(height), int(width), 3)
+ cv2.imshow('im', image)
+ cv2.waitKey(5)
+ _, t3 = ckpt_time(t2, desc='3 ')
+ return image
diff --git a/data/data_utils.py b/data/data_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..24945af2d6ed0d954c48187b67bad20ee5883498
--- /dev/null
+++ b/data/data_utils.py
@@ -0,0 +1,110 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import h5py
+import numpy as np
+
+mpii_metadata = {
+ 'layout_name': 'mpii',
+ 'num_joints': 16,
+ 'keypoints_symmetry': [
+ [3, 4, 5, 13, 14, 15],
+ [0, 1, 2, 10, 11, 12],
+ ]
+}
+
+coco_metadata = {
+ 'layout_name': 'coco',
+ 'num_joints': 17,
+ 'keypoints_symmetry': [
+ [1, 3, 5, 7, 9, 11, 13, 15],
+ [2, 4, 6, 8, 10, 12, 14, 16],
+ ]
+}
+
+h36m_metadata = {
+ 'layout_name': 'h36m',
+ 'num_joints': 17,
+ 'keypoints_symmetry': [
+ [4, 5, 6, 11, 12, 13],
+ [1, 2, 3, 14, 15, 16],
+ ]
+}
+
+humaneva15_metadata = {
+ 'layout_name': 'humaneva15',
+ 'num_joints': 15,
+ 'keypoints_symmetry': [
+ [2, 3, 4, 8, 9, 10],
+ [5, 6, 7, 11, 12, 13]
+ ]
+}
+
+humaneva20_metadata = {
+ 'layout_name': 'humaneva20',
+ 'num_joints': 20,
+ 'keypoints_symmetry': [
+ [3, 4, 5, 6, 11, 12, 13, 14],
+ [7, 8, 9, 10, 15, 16, 17, 18]
+ ]
+}
+
+
+def suggest_metadata(name):
+ names = []
+ for metadata in [mpii_metadata, coco_metadata, h36m_metadata, humaneva15_metadata, humaneva20_metadata]:
+ if metadata['layout_name'] in name:
+ return metadata
+ names.append(metadata['layout_name'])
+ raise KeyError('Cannot infer keypoint layout from name "{}". Tried {}.'.format(name, names))
+
+
+def import_detectron_poses(path):
+ # Latin1 encoding because Detectron runs on Python 2.7
+ data = np.load(path, encoding='latin1')
+ kp = data['keypoints']
+ bb = data['boxes']
+ results = []
+ for i in range(len(bb)):
+ if len(bb[i][1]) == 0:
+ assert i > 0
+ # Use last pose in case of detection failure
+ results.append(results[-1])
+ continue
+ best_match = np.argmax(bb[i][1][:, 4])
+ # import ipdb;ipdb.set_trace()
+ keypoints = kp[i][1][best_match].T.copy()
+ results.append(keypoints)
+ results = np.array(results)
+ # return results[:, :, 4:6] # Soft-argmax
+ return results[:, :, [0, 1, 3]] # Argmax + score
+
+
+def my_pose(path):
+ data = np.load(path, encoding='latin1')
+
+
+def import_cpn_poses(path):
+ data = np.load(path)
+ kp = data['keypoints']
+ return kp[:, :, :2]
+
+
+def import_sh_poses(path):
+ with h5py.File(path) as hf:
+ positions = hf['poses'].value
+ return positions.astype('float32')
+
+
+def suggest_pose_importer(name):
+ if 'detectron' in name:
+ return import_detectron_poses
+ if 'cpn' in name:
+ return import_cpn_poses
+ if 'sh' in name:
+ return import_sh_poses
+ raise KeyError('Cannot infer keypoint format from name "{}". Tried detectron, cpn, sh.'.format(name))
diff --git a/data/prepare_2d_kpt.py b/data/prepare_2d_kpt.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6a46947c5cc4399049ac3a69d57fe92f7b1fdf5
--- /dev/null
+++ b/data/prepare_2d_kpt.py
@@ -0,0 +1,45 @@
+import argparse
+import os
+import sys
+
+import numpy as np
+from data_utils import suggest_metadata, suggest_pose_importer
+
+sys.path.append('../')
+
+output_prefix_2d = 'data_2d_h36m_'
+cam_map = {
+ '54138969': 0,
+ '55011271': 1,
+ '58860488': 2,
+ '60457274': 3,
+}
+
+if __name__ == '__main__':
+ if os.path.basename(os.getcwd()) != 'data':
+ print('This script must be launched from the "data" directory')
+ exit(0)
+
+ parser = argparse.ArgumentParser(description='Human3.6M dataset converter')
+
+ parser.add_argument('-i', '--input', default='', type=str, metavar='PATH', help='input path to 2D detections')
+ parser.add_argument('-o', '--output', default='detectron_pt_coco', type=str, metavar='PATH',
+ help='output suffix for 2D detections (e.g. detectron_pt_coco)')
+
+ args = parser.parse_args()
+
+ if not args.input:
+ print('Please specify the input directory')
+ exit(0)
+
+ # according to output name,generate some format. we use detectron
+ import_func = suggest_pose_importer('detectron_pt_coco')
+ metadata = suggest_metadata('detectron_pt_coco')
+
+ print('Parsing 2D detections from', args.input)
+ keypoints = import_func(args.input)
+
+ output = keypoints.astype(np.float32)
+ # 生成的数据用于后面的3D检测
+ np.savez_compressed(output_prefix_2d + 'test' + args.output, positions_2d=output, metadata=metadata)
+ print('npz name is ', output_prefix_2d + 'test' + args.output)
diff --git a/data/prepare_data_2d_h36m_generic.py b/data/prepare_data_2d_h36m_generic.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e766d31dfaf5857ca03a4cbd993a5b7dce50c45
--- /dev/null
+++ b/data/prepare_data_2d_h36m_generic.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import argparse
+import os
+import re
+import sys
+from glob import glob
+
+import ipdb
+import numpy as np
+from data_utils import suggest_metadata, suggest_pose_importer
+
+sys.path.append('../')
+
+output_prefix_2d = 'data_2d_h36m_'
+cam_map = {
+ '54138969': 0,
+ '55011271': 1,
+ '58860488': 2,
+ '60457274': 3,
+}
+
+if __name__ == '__main__':
+ if os.path.basename(os.getcwd()) != 'data':
+ print('This script must be launched from the "data" directory')
+ exit(0)
+
+ parser = argparse.ArgumentParser(description='Human3.6M dataset converter')
+
+ parser.add_argument('-i', '--input', default='', type=str, metavar='PATH', help='input path to 2D detections')
+ parser.add_argument('-o', '--output', default='', type=str, metavar='PATH', help='output suffix for 2D detections (e.g. detectron_pt_coco)')
+
+ args = parser.parse_args()
+
+ if not args.input:
+ print('Please specify the input directory')
+ exit(0)
+
+ if not args.output:
+ print('Please specify an output suffix (e.g. detectron_pt_coco)')
+ exit(0)
+
+ import_func = suggest_pose_importer(args.output)
+ metadata = suggest_metadata(args.output)
+
+ print('Parsing 2D detections from', args.input)
+
+ output = {}
+
+ # lxy add
+ keypoints = import_func(args.input)
+ output['S1'] = {}
+ output['S1']['Walking'] = [None, None, None, None]
+ output['S1']['Walking'][0] = keypoints.astype(np.float32)
+ np.savez_compressed(output_prefix_2d + '00' + args.output, positions_2d=output, metadata=metadata)
+ data = np.load('data_2d_h36m_detectron_pt_coco.npz')
+ data1 = np.load('data_2d_h36m_00detectron_pt_coco.npz')
+ actions = data['positions_2d'].item()
+ actions1 = data1['positions_2d'].item()
+ meta = data['metadata']
+
+ actions['S1']['Walking'][0] = actions1['S1']['Walking'][0][:, :, :]
+ np.savez_compressed('data_2d_h36m_lxy_cpn_ft_h36m_dbb.npz', positions_2d=actions, metadata=meta)
+
+ os.exit()
+ ipdb.set_trace()
+
+ # match all file with the format
+ file_list = glob(args.input + '/S*/*.mp4.npz')
+ for f in file_list:
+ path, fname = os.path.split(f)
+ subject = os.path.basename(path)
+ assert subject.startswith('S'), subject + ' does not look like a subject directory'
+
+ if '_ALL' in fname:
+ continue
+
+ m = re.search('(.*)\\.([0-9]+)\\.mp4\\.npz', fname)
+ # first parentheses
+ action = m.group(1)
+ # second parentheses
+ camera = m.group(2)
+ camera_idx = cam_map[camera]
+
+ if subject == 'S11' and action == 'Directions':
+ continue # Discard corrupted video
+
+ # Use consistent naming convention
+ canonical_name = action.replace('TakingPhoto', 'Photo') \
+ .replace('WalkingDog', 'WalkDog')
+
+ keypoints = import_func(f)
+ assert keypoints.shape[1] == metadata['num_joints']
+
+ if subject not in output:
+ output[subject] = {}
+ if canonical_name not in output[subject]:
+ output[subject][canonical_name] = [None, None, None, None]
+ output[subject][canonical_name][camera_idx] = keypoints.astype('float32')
+
+ print('Saving...')
+ np.savez_compressed(output_prefix_2d + args.output, positions_2d=output, metadata=metadata)
+ print('Done.')
diff --git a/data/prepare_data_2d_h36m_sh.py b/data/prepare_data_2d_h36m_sh.py
new file mode 100644
index 0000000000000000000000000000000000000000..a0fa4ea3d6aa3a7489e2a724212a40ab1cd2b3ba
--- /dev/null
+++ b/data/prepare_data_2d_h36m_sh.py
@@ -0,0 +1,112 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import argparse
+import os
+import sys
+import tarfile
+import zipfile
+from glob import glob
+from shutil import rmtree
+
+import h5py
+import numpy as np
+
+sys.path.append('../')
+
+output_filename_pt = 'data_2d_h36m_sh_pt_mpii'
+output_filename_ft = 'data_2d_h36m_sh_ft_h36m'
+subjects = ['S1', 'S5', 'S6', 'S7', 'S8', 'S9', 'S11']
+cam_map = {
+ '54138969': 0,
+ '55011271': 1,
+ '58860488': 2,
+ '60457274': 3,
+}
+
+metadata = {
+ 'num_joints': 16,
+ 'keypoints_symmetry': [
+ [3, 4, 5, 13, 14, 15],
+ [0, 1, 2, 10, 11, 12],
+ ]
+}
+
+
+def process_subject(subject, file_list, output):
+ if subject == 'S11':
+ assert len(file_list) == 119, "Expected 119 files for subject " + subject + ", got " + str(len(file_list))
+ else:
+ assert len(file_list) == 120, "Expected 120 files for subject " + subject + ", got " + str(len(file_list))
+
+ for f in file_list:
+ action, cam = os.path.splitext(os.path.basename(f))[0].replace('_', ' ').split('.')
+
+ if subject == 'S11' and action == 'Directions':
+ continue # Discard corrupted video
+
+ if action not in output[subject]:
+ output[subject][action] = [None, None, None, None]
+
+ with h5py.File(f) as hf:
+ positions = hf['poses'].value
+ output[subject][action][cam_map[cam]] = positions.astype('float32')
+
+
+if __name__ == '__main__':
+ if os.path.basename(os.getcwd()) != 'data':
+ print('This script must be launched from the "data" directory')
+ exit(0)
+
+ parser = argparse.ArgumentParser(description='Human3.6M dataset downloader/converter')
+
+ parser.add_argument('-pt', '--pretrained', default='', type=str, metavar='PATH', help='convert pretrained dataset')
+ parser.add_argument('-ft', '--fine-tuned', default='', type=str, metavar='PATH', help='convert fine-tuned dataset')
+
+ args = parser.parse_args()
+
+ if args.pretrained:
+ print('Converting pretrained dataset from', args.pretrained)
+ print('Extracting...')
+ with zipfile.ZipFile(args.pretrained, 'r') as archive:
+ archive.extractall('sh_pt')
+
+ print('Converting...')
+ output = {}
+ for subject in subjects:
+ output[subject] = {}
+ file_list = glob('sh_pt/h36m/' + subject + '/StackedHourglass/*.h5')
+ process_subject(subject, file_list, output)
+
+ print('Saving...')
+ np.savez_compressed(output_filename_pt, positions_2d=output, metadata=metadata)
+
+ print('Cleaning up...')
+ rmtree('sh_pt')
+
+ print('Done.')
+
+ if args.fine_tuned:
+ print('Converting fine-tuned dataset from', args.fine_tuned)
+ print('Extracting...')
+ with tarfile.open(args.fine_tuned, 'r:gz') as archive:
+ archive.extractall('sh_ft')
+
+ print('Converting...')
+ output = {}
+ for subject in subjects:
+ output[subject] = {}
+ file_list = glob('sh_ft/' + subject + '/StackedHourglassFineTuned240/*.h5')
+ process_subject(subject, file_list, output)
+
+ print('Saving...')
+ np.savez_compressed(output_filename_ft, positions_2d=output, metadata=metadata)
+
+ print('Cleaning up...')
+ rmtree('sh_ft')
+
+ print('Done.')
diff --git a/data/prepare_data_h36m.py b/data/prepare_data_h36m.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb58cb911abdc8ffd7dda1acf45d3068be228a08
--- /dev/null
+++ b/data/prepare_data_h36m.py
@@ -0,0 +1,142 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import argparse
+import os
+import sys
+import zipfile
+from glob import glob
+from shutil import rmtree
+
+import h5py
+import numpy as np
+
+sys.path.append('../')
+from common.h36m_dataset import Human36mDataset
+from common.camera import world_to_camera, project_to_2d, image_coordinates
+from common.utils import wrap
+
+output_filename = 'data_3d_h36m'
+output_filename_2d = 'data_2d_h36m_gt'
+subjects = ['S1', 'S5', 'S6', 'S7', 'S8', 'S9', 'S11']
+
+if __name__ == '__main__':
+ if os.path.basename(os.getcwd()) != 'data':
+ print('This script must be launched from the "data" directory')
+ exit(0)
+
+ parser = argparse.ArgumentParser(description='Human3.6M dataset downloader/converter')
+
+ # Default: convert dataset preprocessed by Martinez et al. in https://github.com/una-dinosauria/3d-pose-baseline
+ parser.add_argument('--from-archive', default='', type=str, metavar='PATH', help='convert preprocessed dataset')
+
+ # Alternatively, convert dataset from original source (the Human3.6M dataset path must be specified manually)
+ parser.add_argument('--from-source', default='', type=str, metavar='PATH', help='convert original dataset')
+
+ args = parser.parse_args()
+
+ if args.from_archive and args.from_source:
+ print('Please specify only one argument')
+ exit(0)
+
+ if os.path.exists(output_filename + '.npz'):
+ print('The dataset already exists at', output_filename + '.npz')
+ exit(0)
+
+ if args.from_archive:
+ print('Extracting Human3.6M dataset from', args.from_archive)
+ with zipfile.ZipFile(args.from_archive, 'r') as archive:
+ archive.extractall()
+
+ print('Converting...')
+ output = {}
+ for subject in subjects:
+ output[subject] = {}
+ file_list = glob('h36m/' + subject + '/MyPoses/3D_positions/*.h5')
+ assert len(file_list) == 30, "Expected 30 files for subject " + subject + ", got " + str(len(file_list))
+ for f in file_list:
+ action = os.path.splitext(os.path.basename(f))[0]
+
+ if subject == 'S11' and action == 'Directions':
+ continue # Discard corrupted video
+
+ with h5py.File(f) as hf:
+ positions = hf['3D_positions'].value.reshape(32, 3, -1).transpose(2, 0, 1)
+ positions /= 1000 # Meters instead of millimeters
+ output[subject][action] = positions.astype('float32')
+
+ print('Saving...')
+ np.savez_compressed(output_filename, positions_3d=output)
+
+ print('Cleaning up...')
+ rmtree('h36m')
+
+ print('Done.')
+
+ elif args.from_source:
+ print('Converting original Human3.6M dataset from', args.from_source)
+ output = {}
+
+ from scipy.io import loadmat
+
+ import ipdb;
+
+ ipdb.set_trace()
+ for subject in subjects:
+ output[subject] = {}
+ file_list = glob(args.from_source + '/' + subject + '/MyPoseFeatures/D3_Positions/*.cdf.mat')
+ assert len(file_list) == 30, "Expected 30 files for subject " + subject + ", got " + str(len(file_list))
+ for f in file_list:
+ action = os.path.splitext(os.path.splitext(os.path.basename(f))[0])[0]
+
+ if subject == 'S11' and action == 'Directions':
+ continue # Discard corrupted video
+
+ # Use consistent naming convention
+ canonical_name = action.replace('TakingPhoto', 'Photo') \
+ .replace('WalkingDog', 'WalkDog')
+
+ hf = loadmat(f)
+ positions = hf['data'][0, 0].reshape(-1, 32, 3)
+ positions /= 1000 # Meters instead of millimeters
+ output[subject][canonical_name] = positions.astype('float32')
+
+ print('Saving...')
+ np.savez_compressed(output_filename, positions_3d=output)
+
+ print('Done.')
+
+ else:
+ print('Please specify the dataset source')
+ exit(0)
+
+ # Create 2D pose file
+ print('')
+ print('Computing ground-truth 2D poses...')
+ dataset = Human36mDataset(output_filename + '.npz')
+ output_2d_poses = {}
+ for subject in dataset.subjects():
+ output_2d_poses[subject] = {}
+ for action in dataset[subject].keys():
+ anim = dataset[subject][action]
+
+ positions_2d = []
+ for cam in anim['cameras']:
+ pos_3d = world_to_camera(anim['positions'], R=cam['orientation'], t=cam['translation'])
+ pos_2d = wrap(project_to_2d, pos_3d, cam['intrinsic'], unsqueeze=True)
+ pos_2d_pixel_space = image_coordinates(pos_2d, w=cam['res_w'], h=cam['res_h'])
+ positions_2d.append(pos_2d_pixel_space.astype('float32'))
+ output_2d_poses[subject][action] = positions_2d
+
+ print('Saving...')
+ metadata = {
+ 'num_joints': dataset.skeleton().num_joints(),
+ 'keypoints_symmetry': [dataset.skeleton().joints_left(), dataset.skeleton().joints_right()]
+ }
+ np.savez_compressed(output_filename_2d, positions_2d=output_2d_poses, metadata=metadata)
+
+ print('Done.')
diff --git a/data/prepare_data_humaneva.py b/data/prepare_data_humaneva.py
new file mode 100644
index 0000000000000000000000000000000000000000..2ed83c2b3e3d40a64faef1d2e35ba9ea83249848
--- /dev/null
+++ b/data/prepare_data_humaneva.py
@@ -0,0 +1,242 @@
+# Copyright (c) 2018-present, Facebook, Inc.
+# All rights reserved.
+#
+# This source code is licensed under the license found in the
+# LICENSE file in the root directory of this source tree.
+#
+
+import argparse
+import os
+import re
+import sys
+from glob import glob
+
+import numpy as np
+from data_utils import suggest_metadata, suggest_pose_importer
+
+sys.path.append('../')
+from itertools import groupby
+
+subjects = ['Train/S1', 'Train/S2', 'Train/S3', 'Validate/S1', 'Validate/S2', 'Validate/S3']
+
+cam_map = {
+ 'C1': 0,
+ 'C2': 1,
+ 'C3': 2,
+}
+
+# Frame numbers for train/test split
+# format: [start_frame, end_frame[ (inclusive, exclusive)
+index = {
+ 'Train/S1': {
+ 'Walking 1': (590, 1203),
+ 'Jog 1': (367, 740),
+ 'ThrowCatch 1': (473, 945),
+ 'Gestures 1': (395, 801),
+ 'Box 1': (385, 789),
+ },
+ 'Train/S2': {
+ 'Walking 1': (438, 876),
+ 'Jog 1': (398, 795),
+ 'ThrowCatch 1': (550, 1128),
+ 'Gestures 1': (500, 901),
+ 'Box 1': (382, 734),
+ },
+ 'Train/S3': {
+ 'Walking 1': (448, 939),
+ 'Jog 1': (401, 842),
+ 'ThrowCatch 1': (493, 1027),
+ 'Gestures 1': (533, 1102),
+ 'Box 1': (512, 1021),
+ },
+ 'Validate/S1': {
+ 'Walking 1': (5, 590),
+ 'Jog 1': (5, 367),
+ 'ThrowCatch 1': (5, 473),
+ 'Gestures 1': (5, 395),
+ 'Box 1': (5, 385),
+ },
+ 'Validate/S2': {
+ 'Walking 1': (5, 438),
+ 'Jog 1': (5, 398),
+ 'ThrowCatch 1': (5, 550),
+ 'Gestures 1': (5, 500),
+ 'Box 1': (5, 382),
+ },
+ 'Validate/S3': {
+ 'Walking 1': (5, 448),
+ 'Jog 1': (5, 401),
+ 'ThrowCatch 1': (5, 493),
+ 'Gestures 1': (5, 533),
+ 'Box 1': (5, 512),
+ },
+}
+
+# Frames to skip for each video (synchronization)
+sync_data = {
+ 'S1': {
+ 'Walking 1': (82, 81, 82),
+ 'Jog 1': (51, 51, 50),
+ 'ThrowCatch 1': (61, 61, 60),
+ 'Gestures 1': (45, 45, 44),
+ 'Box 1': (57, 57, 56),
+ },
+ 'S2': {
+ 'Walking 1': (115, 115, 114),
+ 'Jog 1': (100, 100, 99),
+ 'ThrowCatch 1': (127, 127, 127),
+ 'Gestures 1': (122, 122, 121),
+ 'Box 1': (119, 119, 117),
+ },
+ 'S3': {
+ 'Walking 1': (80, 80, 80),
+ 'Jog 1': (65, 65, 65),
+ 'ThrowCatch 1': (79, 79, 79),
+ 'Gestures 1': (83, 83, 82),
+ 'Box 1': (1, 1, 1),
+ },
+ 'S4': {}
+}
+
+if __name__ == '__main__':
+ if os.path.basename(os.getcwd()) != 'data':
+ print('This script must be launched from the "data" directory')
+ exit(0)
+
+ parser = argparse.ArgumentParser(description='HumanEva dataset converter')
+
+ parser.add_argument('-p', '--path', default='', type=str, metavar='PATH', help='path to the processed HumanEva dataset')
+ parser.add_argument('--convert-3d', action='store_true', help='convert 3D mocap data')
+ parser.add_argument('--convert-2d', default='', type=str, metavar='PATH', help='convert user-supplied 2D detections')
+ parser.add_argument('-o', '--output', default='', type=str, metavar='PATH', help='output suffix for 2D detections (e.g. detectron_pt_coco)')
+
+ args = parser.parse_args()
+
+ if not args.convert_2d and not args.convert_3d:
+ print('Please specify one conversion mode')
+ exit(0)
+
+ if args.path:
+ print('Parsing HumanEva dataset from', args.path)
+ output = {}
+ output_2d = {}
+ frame_mapping = {}
+
+ from scipy.io import loadmat
+
+ num_joints = None
+
+ for subject in subjects:
+ output[subject] = {}
+ output_2d[subject] = {}
+ split, subject_name = subject.split('/')
+ if subject_name not in frame_mapping:
+ frame_mapping[subject_name] = {}
+
+ file_list = glob(args.path + '/' + subject + '/*.mat')
+ for f in file_list:
+ action = os.path.splitext(os.path.basename(f))[0]
+
+ # Use consistent naming convention
+ canonical_name = action.replace('_', ' ')
+
+ hf = loadmat(f)
+ positions = hf['poses_3d']
+ positions_2d = hf['poses_2d'].transpose(1, 0, 2, 3) # Ground-truth 2D poses
+ assert positions.shape[0] == positions_2d.shape[0] and positions.shape[1] == positions_2d.shape[2]
+ assert num_joints is None or num_joints == positions.shape[1], "Joint number inconsistency among files"
+ num_joints = positions.shape[1]
+
+ # Sanity check for the sequence length
+ assert positions.shape[0] == index[subject][canonical_name][1] - index[subject][canonical_name][0]
+
+ # Split corrupted motion capture streams into contiguous chunks
+ # e.g. 012XX567X9 is split into "012", "567", and "9".
+ all_chunks = [list(v) for k, v in groupby(positions, lambda x: np.isfinite(x).all())]
+ all_chunks_2d = [list(v) for k, v in groupby(positions_2d, lambda x: np.isfinite(x).all())]
+ assert len(all_chunks) == len(all_chunks_2d)
+ current_index = index[subject][canonical_name][0]
+ chunk_indices = []
+ for i, chunk in enumerate(all_chunks):
+ next_index = current_index + len(chunk)
+ name = canonical_name + ' chunk' + str(i)
+ if np.isfinite(chunk).all():
+ output[subject][name] = np.array(chunk, dtype='float32') / 1000
+ output_2d[subject][name] = list(np.array(all_chunks_2d[i], dtype='float32').transpose(1, 0, 2, 3))
+ chunk_indices.append((current_index, next_index, np.isfinite(chunk).all(), split, name))
+ current_index = next_index
+ assert current_index == index[subject][canonical_name][1]
+ if canonical_name not in frame_mapping[subject_name]:
+ frame_mapping[subject_name][canonical_name] = []
+ frame_mapping[subject_name][canonical_name] += chunk_indices
+
+ metadata = suggest_metadata('humaneva' + str(num_joints))
+ output_filename = 'data_3d_' + metadata['layout_name']
+ output_prefix_2d = 'data_2d_' + metadata['layout_name'] + '_'
+
+ if args.convert_3d:
+ print('Saving...')
+ np.savez_compressed(output_filename, positions_3d=output)
+ np.savez_compressed(output_prefix_2d + 'gt', positions_2d=output_2d, metadata=metadata)
+ print('Done.')
+
+ else:
+ print('Please specify the dataset source')
+ exit(0)
+
+ if args.convert_2d:
+ if not args.output:
+ print('Please specify an output suffix (e.g. detectron_pt_coco)')
+ exit(0)
+
+ import_func = suggest_pose_importer(args.output)
+ metadata = suggest_metadata(args.output)
+
+ print('Parsing 2D detections from', args.convert_2d)
+
+ output = {}
+ file_list = glob(args.convert_2d + '/S*/*.avi.npz')
+ for f in file_list:
+ path, fname = os.path.split(f)
+ subject = os.path.basename(path)
+ assert subject.startswith('S'), subject + ' does not look like a subject directory'
+
+ m = re.search('(.*) \\((.*)\\)', fname.replace('_', ' '))
+ action = m.group(1)
+ camera = m.group(2)
+ camera_idx = cam_map[camera]
+
+ keypoints = import_func(f)
+ assert keypoints.shape[1] == metadata['num_joints']
+
+ if action in sync_data[subject]:
+ sync_offset = sync_data[subject][action][camera_idx] - 1
+ else:
+ sync_offset = 0
+
+ if subject in frame_mapping and action in frame_mapping[subject]:
+ chunks = frame_mapping[subject][action]
+ for (start_idx, end_idx, labeled, split, name) in chunks:
+ canonical_subject = split + '/' + subject
+ if not labeled:
+ canonical_subject = 'Unlabeled/' + canonical_subject
+ if canonical_subject not in output:
+ output[canonical_subject] = {}
+ kps = keypoints[start_idx + sync_offset:end_idx + sync_offset]
+ assert len(kps) == end_idx - start_idx, "Got len {}, expected {}".format(len(kps), end_idx - start_idx)
+
+ if name not in output[canonical_subject]:
+ output[canonical_subject][name] = [None, None, None]
+
+ output[canonical_subject][name][camera_idx] = kps.astype('float32')
+ else:
+ canonical_subject = 'Unlabeled/' + subject
+ if canonical_subject not in output:
+ output[canonical_subject] = {}
+ if action not in output[canonical_subject]:
+ output[canonical_subject][action] = [None, None, None]
+ output[canonical_subject][action][camera_idx] = keypoints.astype('float32')
+
+ print('Saving...')
+ np.savez_compressed(output_prefix_2d + args.output, positions_2d=output, metadata=metadata)
+ print('Done.')
diff --git a/joints_detectors/Alphapose/.gitignore b/joints_detectors/Alphapose/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..1f50207d6d02402f8d659415ee77cae16068759d
--- /dev/null
+++ b/joints_detectors/Alphapose/.gitignore
@@ -0,0 +1,29 @@
+human_detection/output
+examples/results
+examples/res
+PoseFlow/__pycache__
+PoseFlow/*.npy
+PoseFlow/alpha-pose-results-test.json
+PoseFlow/alpha-pose-results-val.json
+PoseFlow/test-predict
+PoseFlow/val-predict
+train_sppe/coco-minival500_images.txt
+train_sppe/person_keypoints_val2014.json
+
+ssd/examples
+images
+
+*.npy
+*.so
+*.pyc
+.ipynb_checkpoints
+*/.ipynb_checkpoints/
+*/.tensorboard/*
+*/exp
+
+*.pth
+*.h5
+*.zip
+*.weights
+
+coco-minival/
diff --git a/joints_detectors/Alphapose/LICENSE b/joints_detectors/Alphapose/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..c038374200bf87e2e3c5d636b1e01dff536a474b
--- /dev/null
+++ b/joints_detectors/Alphapose/LICENSE
@@ -0,0 +1,515 @@
+ALPHAPOSE: MULTIPERSON KEYPOINT DETECTION
+SOFTWARE LICENSE AGREEMENT
+ACADEMIC OR NON-PROFIT ORGANIZATION NONCOMMERCIAL RESEARCH USE ONLY
+
+BY USING OR DOWNLOADING THE SOFTWARE, YOU ARE AGREEING TO THE TERMS OF THIS LICENSE AGREEMENT. IF YOU DO NOT AGREE WITH THESE TERMS, YOU MAY NOT USE OR DOWNLOAD THE SOFTWARE.
+
+This is a license agreement ("Agreement") between your academic institution or non-profit organization or self (called "Licensee" or "You" in this Agreement) and Shanghai Jiao Tong University (called "Licensor" in this Agreement). All rights not specifically granted to you in this Agreement are reserved for Licensor.
+
+RESERVATION OF OWNERSHIP AND GRANT OF LICENSE:
+Licensor retains exclusive ownership of any copy of the Software (as defined below) licensed under this Agreement and hereby grants to Licensee a personal, non-exclusive,
+non-transferable license to use the Software for noncommercial research purposes, without the right to sublicense, pursuant to the terms and conditions of this Agreement. As used in this Agreement, the term "Software" means (i) the actual copy of all or any portion of code for program routines made accessible to Licensee by Licensor pursuant to this Agreement, inclusive of backups, updates, and/or merged copies permitted hereunder or subsequently supplied by Licensor, including all or any file structures, programming instructions, user interfaces and screen formats and sequences as well as any and all documentation and instructions related to it, and (ii) all or any derivatives and/or modifications created or made by You to any of the items specified in (i).
+
+CONFIDENTIALITY: Licensee acknowledges that the Software is proprietary to Licensor, and as such, Licensee agrees to receive all such materials in confidence and use the Software only in accordance with the terms of this Agreement. Licensee agrees to use reasonable effort to protect the Software from unauthorized use, reproduction, distribution, or publication.
+
+PERMITTED USES: The Software may be used for your own noncommercial internal research purposes. You understand and agree that Licensor is not obligated to implement any suggestions and/or feedback you might provide regarding the Software, but to the extent Licensor does so, you are not entitled to any compensation related thereto.
+
+DERIVATIVES: You may create derivatives of or make modifications to the Software, however, You agree that all and any such derivatives and modifications will be owned by Licensor and become a part of the Software licensed to You under this Agreement. You may only use such derivatives and modifications for your own noncommercial internal research purposes, and you may not otherwise use, distribute or copy such derivatives and modifications in violation of this Agreement.
+
+BACKUPS: If Licensee is an organization, it may make that number of copies of the Software necessary for internal noncommercial use at a single site within its organization provided that all information appearing in or on the original labels, including the copyright and trademark notices are copied onto the labels of the copies.
+
+USES NOT PERMITTED: You may not distribute, copy or use the Software except as explicitly permitted herein. Licensee has not been granted any trademark license as part of this Agreement and may not use the name or mark “AlphaPose", "Shanghai Jiao Tong" or any renditions thereof without the prior written permission of Licensor.
+
+You may not sell, rent, lease, sublicense, lend, time-share or transfer, in whole or in part, or provide third parties access to prior or present versions (or any parts thereof) of the Software.
+
+ASSIGNMENT: You may not assign this Agreement or your rights hereunder without the prior written consent of Licensor. Any attempted assignment without such consent shall be null and void.
+
+TERM: The term of the license granted by this Agreement is from Licensee's acceptance of this Agreement by downloading the Software or by using the Software until terminated as provided below.
+
+The Agreement automatically terminates without notice if you fail to comply with any provision of this Agreement. Licensee may terminate this Agreement by ceasing using the Software. Upon any termination of this Agreement, Licensee will delete any and all copies of the Software. You agree that all provisions which operate to protect the proprietary rights of Licensor shall remain in force should breach occur and that the obligation of confidentiality described in this Agreement is binding in perpetuity and, as such, survives the term of the Agreement.
+
+FEE: Provided Licensee abides completely by the terms and conditions of this Agreement, there is no fee due to Licensor for Licensee's use of the Software in accordance with this Agreement.
+
+DISCLAIMER OF WARRANTIES: THE SOFTWARE IS PROVIDED "AS-IS" WITHOUT WARRANTY OF ANY KIND INCLUDING ANY WARRANTIES OF PERFORMANCE OR MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE OR PURPOSE OR OF NON-INFRINGEMENT. LICENSEE BEARS ALL RISK RELATING TO QUALITY AND PERFORMANCE OF THE SOFTWARE AND RELATED MATERIALS.
+
+SUPPORT AND MAINTENANCE: No Software support or training by the Licensor is provided as part of this Agreement.
+
+EXCLUSIVE REMEDY AND LIMITATION OF LIABILITY: To the maximum extent permitted under applicable law, Licensor shall not be liable for direct, indirect, special, incidental, or consequential damages or lost profits related to Licensee's use of and/or inability to use the Software, even if Licensor is advised of the possibility of such damage.
+
+EXPORT REGULATION: Licensee agrees to comply with any and all applicable
+U.S. export control laws, regulations, and/or other laws related to embargoes and sanction programs administered by the Office of Foreign Assets Control.
+
+SEVERABILITY: If any provision(s) of this Agreement shall be held to be invalid, illegal, or unenforceable by a court or other tribunal of competent jurisdiction, the validity, legality and enforceability of the remaining provisions shall not in any way be affected or impaired thereby.
+
+NO IMPLIED WAIVERS: No failure or delay by Licensor in enforcing any right or remedy under this Agreement shall be construed as a waiver of any future or other exercise of such right or remedy by Licensor.
+
+ENTIRE AGREEMENT AND AMENDMENTS: This Agreement constitutes the sole and entire agreement between Licensee and Licensor as to the matter set forth herein and supersedes any previous agreements, understandings, and arrangements between the parties relating hereto.
+
+
+
+************************************************************************
+
+THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
+
+This project incorporates material from the project(s) listed below (collectively, "Third Party Code"). This Third Party Code is licensed to you under their original license terms set forth below. We reserves all other rights not expressly granted, whether by implication, estoppel or otherwise.
+
+1. Torch, (https://github.com/torch/distro)
+
+Copyright (c) 2016, Soumith Chintala, Ronan Collobert, Koray Kavukcuoglu, Clement Farabet All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+Neither the name of distro nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+2. TensorFlow (https://github.com/tensorflow/tensorflow)
+Copyright 2018 The TensorFlow Authors. All rights reserved.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2017, The TensorFlow Authors.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+3. tf-faster-rcnn (https://github.com/endernewton/tf-faster-rcnn)
+MIT License
+
+Copyright (c) 2017 Xinlei Chen
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+4.PyraNet (https://github.com/bearpaw/PyraNet)
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+5. pose-hg-demo (https://github.com/umich-vl/pose-hg-demo)
+Copyright (c) 2016, University of Michigan
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+************END OF THIRD-PARTY SOFTWARE NOTICES AND INFORMATION**********
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/README.md b/joints_detectors/Alphapose/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1fe8075797d362f2c868b8b67202f4d9a559e671
--- /dev/null
+++ b/joints_detectors/Alphapose/README.md
@@ -0,0 +1,112 @@
+
+
+

+
+
+
+## News!
+- Apr 2019: [**MXNet** version](https://github.com/MVIG-SJTU/AlphaPose/tree/mxnet) of AlphaPose is released! It runs at **23 fps** on COCO validation set.
+- Feb 2019: [CrowdPose](https://github.com/MVIG-SJTU/AlphaPose/blob/pytorch/doc/CrowdPose.md) is integrated into AlphaPose Now!
+- Dec 2018: [General version](https://github.com/MVIG-SJTU/AlphaPose/tree/pytorch/PoseFlow) of PoseFlow is released! 3X Faster and support pose tracking results visualization!
+- Sep 2018: [**PyTorch** version](https://github.com/MVIG-SJTU/AlphaPose/tree/pytorch) of AlphaPose is released! It runs at **20 fps** on COCO validation set (4.6 people per image on average) and achieves 71 mAP!
+
+## AlphaPose
+[Alpha Pose](http://www.mvig.org/research/alphapose.html) is an accurate multi-person pose estimator, which is the **first open-source system that achieves 70+ mAP (72.3 mAP) on COCO dataset and 80+ mAP (82.1 mAP) on MPII dataset.**
+To match poses that correspond to the same person across frames, we also provide an efficient online pose tracker called Pose Flow. It is the **first open-source online pose tracker that achieves both 60+ mAP (66.5 mAP) and 50+ MOTA (58.3 MOTA) on PoseTrack Challenge dataset.**
+
+AlphaPose supports both Linux and **Windows!**
+
+
+

+
+
+
+## Installation
+**Windows Version** please check out [doc/win_install.md](doc/win_install.md)
+
+1. Get the code.
+ ```Shell
+ git clone -b pytorch https://github.com/MVIG-SJTU/AlphaPose.git
+ ```
+
+2. Install [pytorch 0.4.0](https://github.com/pytorch/pytorch) and other dependencies.
+ ```Shell
+ pip install -r requirements.txt
+ ```
+
+3. Download the models manually: **duc_se.pth** (2018/08/30) ([Google Drive]( https://drive.google.com/open?id=1OPORTWB2cwd5YTVBX-NE8fsauZJWsrtW) | [Baidu pan](https://pan.baidu.com/s/15jbRNKuslzm5wRSgUVytrA)), **yolov3-spp.weights**([Google Drive](https://drive.google.com/open?id=1D47msNOOiJKvPOXlnpyzdKA3k6E97NTC) | [Baidu pan](https://pan.baidu.com/s/1Zb2REEIk8tcahDa8KacPNA)). Place them into `./models/sppe` and `./models/yolo` respectively.
+
+
+## Quick Start
+- **Input dir**: Run AlphaPose for all images in a folder with:
+```
+python3 demo.py --indir ${img_directory} --outdir examples/res
+```
+- **Video**: Run AlphaPose for a video and save the rendered video with:
+```
+python3 video_demo.py --video ${path to video} --outdir examples/res --save_video
+```
+- **Webcam**: Run AlphaPose using webcam and visualize the results with:
+```
+python3 webcam_demo.py --webcam 0 --outdir examples/res --vis
+```
+- **Input list**: Run AlphaPose for images in a list and save the rendered images with:
+```
+python3 demo.py --list examples/list-coco-demo.txt --indir ${img_directory} --outdir examples/res --save_img
+```
+- **Note**: If you meet OOM(out of memory) problem, decreasing the pose estimation batch until the program can run on your computer:
+```
+python3 demo.py --indir ${img_directory} --outdir examples/res --posebatch 30
+```
+- **Getting more accurate**: You can enable flip testing to get more accurate results by disable fast_inference, e.g.:
+```
+python3 demo.py --indir ${img_directory} --outdir examples/res --fast_inference False
+```
+- **Speeding up**: Checkout the [speed_up.md](doc/speed_up.md) for more details.
+- **Output format**: Checkout the [output.md](doc/output.md) for more details.
+- **For more**: Checkout the [run.md](doc/run.md) for more options
+
+## Pose Tracking
+
+
+
+
+
+
+Please read [PoseFlow/README.md](PoseFlow/) for details.
+
+### CrowdPose
+
+
+
+
+Please read [doc/CrowdPose.md](doc/CrowdPose.md) for details.
+
+
+## FAQ
+Check out [faq.md](doc/faq.md) for faq.
+
+## Contributors
+Pytorch version of AlphaPose is developed and maintained by [Jiefeng Li](http://jeff-leaf.site/), [Hao-Shu Fang](https://fang-haoshu.github.io/), [Yuliang Xiu](http://xiuyuliang.cn) and [Cewu Lu](http://www.mvig.org/).
+
+## Citation
+Please cite these papers in your publications if it helps your research:
+
+ @inproceedings{fang2017rmpe,
+ title={{RMPE}: Regional Multi-person Pose Estimation},
+ author={Fang, Hao-Shu and Xie, Shuqin and Tai, Yu-Wing and Lu, Cewu},
+ booktitle={ICCV},
+ year={2017}
+ }
+
+ @inproceedings{xiu2018poseflow,
+ author = {Xiu, Yuliang and Li, Jiefeng and Wang, Haoyu and Fang, Yinghong and Lu, Cewu},
+ title = {{Pose Flow}: Efficient Online Pose Tracking},
+ booktitle={BMVC},
+ year = {2018}
+ }
+
+
+
+## License
+AlphaPose is freely available for free non-commercial use, and may be redistributed under these conditions. For commercial queries, please drop an e-mail at mvig.alphapose[at]gmail[dot]com and cc lucewu[[at]sjtu[dot]edu[dot]cn. We will send the detail agreement to you.
diff --git a/joints_detectors/Alphapose/SPPE/.gitattributes b/joints_detectors/Alphapose/SPPE/.gitattributes
new file mode 100644
index 0000000000000000000000000000000000000000..dfe0770424b2a19faf507a501ebfc23be8f54e7b
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/.gitattributes
@@ -0,0 +1,2 @@
+# Auto detect text files and perform LF normalization
+* text=auto
diff --git a/joints_detectors/Alphapose/SPPE/.gitignore b/joints_detectors/Alphapose/SPPE/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..8c72637c230fa6602bea9f5185ee430299436702
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/.gitignore
@@ -0,0 +1,114 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+
+.vscode/
+*.pkl
+exp
+exp/*
+data
+data/*
+model
+model/*
+*/images
+*/images/*
+
+*.h5
+*.pth
+
diff --git a/joints_detectors/Alphapose/SPPE/LICENSE b/joints_detectors/Alphapose/SPPE/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..fedbdfd08ffbfa2c2bc457bc7e3988a474304c78
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 Jeff-sjtu
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/SPPE/README.md b/joints_detectors/Alphapose/SPPE/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..2b54b7312219ea6a186a2731891ec0c9a7016962
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/README.md
@@ -0,0 +1 @@
+# pytorch-AlphaPose
diff --git a/joints_detectors/Alphapose/SPPE/__init__.py b/joints_detectors/Alphapose/SPPE/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/SPPE/src/__init__.py b/joints_detectors/Alphapose/SPPE/src/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/SPPE/src/main_fast_inference.py b/joints_detectors/Alphapose/SPPE/src/main_fast_inference.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff782469885215e83fa820d0bd9d0bbe886050a2
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/main_fast_inference.py
@@ -0,0 +1,67 @@
+import sys
+
+import torch
+import torch._utils
+import torch.nn as nn
+import torch.utils.data
+import torch.utils.data.distributed
+
+from SPPE.src.models.FastPose import createModel
+from SPPE.src.utils.img import flip, shuffleLR
+
+try:
+ torch._utils._rebuild_tensor_v2
+except AttributeError:
+ def _rebuild_tensor_v2(storage, storage_offset, size, stride, requires_grad, backward_hooks):
+ tensor = torch._utils._rebuild_tensor(storage, storage_offset, size, stride)
+ tensor.requires_grad = requires_grad
+ tensor._backward_hooks = backward_hooks
+ return tensor
+ torch._utils._rebuild_tensor_v2 = _rebuild_tensor_v2
+
+
+class InferenNet(nn.Module):
+ def __init__(self, kernel_size, dataset):
+ super(InferenNet, self).__init__()
+
+ model = createModel().cuda()
+ print('Loading pose model from {}'.format('joints_detectors/Alphapose/models/sppe/duc_se.pth'))
+ sys.stdout.flush()
+ model.load_state_dict(torch.load('joints_detectors/Alphapose/models/sppe/duc_se.pth'))
+ model.eval()
+ self.pyranet = model
+
+ self.dataset = dataset
+
+ def forward(self, x):
+ out = self.pyranet(x)
+ out = out.narrow(1, 0, 17)
+
+ flip_out = self.pyranet(flip(x))
+ flip_out = flip_out.narrow(1, 0, 17)
+
+ flip_out = flip(shuffleLR(
+ flip_out, self.dataset))
+
+ out = (flip_out + out) / 2
+
+ return out
+
+
+class InferenNet_fast(nn.Module):
+ def __init__(self, kernel_size, dataset):
+ super(InferenNet_fast, self).__init__()
+
+ model = createModel().cuda()
+ print('Loading pose model from {}'.format('models/sppe/duc_se.pth'))
+ model.load_state_dict(torch.load('models/sppe/duc_se.pth'))
+ model.eval()
+ self.pyranet = model
+
+ self.dataset = dataset
+
+ def forward(self, x):
+ out = self.pyranet(x)
+ out = out.narrow(1, 0, 17)
+
+ return out
diff --git a/joints_detectors/Alphapose/SPPE/src/models/FastPose.py b/joints_detectors/Alphapose/SPPE/src/models/FastPose.py
new file mode 100644
index 0000000000000000000000000000000000000000..d9e660819e0cd872930f44d81af09cedc92d4f07
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/FastPose.py
@@ -0,0 +1,35 @@
+import torch.nn as nn
+from torch.autograd import Variable
+
+from .layers.SE_Resnet import SEResnet
+from .layers.DUC import DUC
+from opt import opt
+
+
+def createModel():
+ return FastPose()
+
+
+class FastPose(nn.Module):
+ DIM = 128
+
+ def __init__(self):
+ super(FastPose, self).__init__()
+
+ self.preact = SEResnet('resnet101')
+
+ self.suffle1 = nn.PixelShuffle(2)
+ self.duc1 = DUC(512, 1024, upscale_factor=2)
+ self.duc2 = DUC(256, 512, upscale_factor=2)
+
+ self.conv_out = nn.Conv2d(
+ self.DIM, opt.nClasses, kernel_size=3, stride=1, padding=1)
+
+ def forward(self, x: Variable):
+ out = self.preact(x)
+ out = self.suffle1(out)
+ out = self.duc1(out)
+ out = self.duc2(out)
+
+ out = self.conv_out(out)
+ return out
diff --git a/joints_detectors/Alphapose/SPPE/src/models/__init__.py b/joints_detectors/Alphapose/SPPE/src/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b9742821a6f164200bc145e7a847382f08778303
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/__init__.py
@@ -0,0 +1 @@
+from . import *
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/SPPE/src/models/hg-prm.py b/joints_detectors/Alphapose/SPPE/src/models/hg-prm.py
new file mode 100644
index 0000000000000000000000000000000000000000..6de6a345292a5a21dce863905f193373dbe32416
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/hg-prm.py
@@ -0,0 +1,126 @@
+import torch.nn as nn
+from .layers.PRM import Residual as ResidualPyramid
+from .layers.Residual import Residual as Residual
+from torch.autograd import Variable
+from opt import opt
+from collections import defaultdict
+
+
+class Hourglass(nn.Module):
+ def __init__(self, n, nFeats, nModules, inputResH, inputResW, net_type, B, C):
+ super(Hourglass, self).__init__()
+
+ self.ResidualUp = ResidualPyramid if n >= 2 else Residual
+ self.ResidualDown = ResidualPyramid if n >= 3 else Residual
+
+ self.depth = n
+ self.nModules = nModules
+ self.nFeats = nFeats
+ self.net_type = net_type
+ self.B = B
+ self.C = C
+ self.inputResH = inputResH
+ self.inputResW = inputResW
+
+ self.up1 = self._make_residual(self.ResidualUp, False, inputResH, inputResW)
+ self.low1 = nn.Sequential(
+ nn.MaxPool2d(2),
+ self._make_residual(self.ResidualDown, False, inputResH / 2, inputResW / 2)
+ )
+ if n > 1:
+ self.low2 = Hourglass(n - 1, nFeats, nModules, inputResH / 2, inputResW / 2, net_type, B, C)
+ else:
+ self.low2 = self._make_residual(self.ResidualDown, False, inputResH / 2, inputResW / 2)
+
+ self.low3 = self._make_residual(self.ResidualDown, True, inputResH / 2, inputResW / 2)
+ self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
+
+ self.upperBranch = self.up1
+ self.lowerBranch = nn.Sequential(
+ self.low1,
+ self.low2,
+ self.low3,
+ self.up2
+ )
+
+ def _make_residual(self, resBlock, useConv, inputResH, inputResW):
+ layer_list = []
+ for i in range(self.nModules):
+ layer_list.append(resBlock(self.nFeats, self.nFeats, inputResH, inputResW,
+ stride=1, net_type=self.net_type, useConv=useConv,
+ baseWidth=self.B, cardinality=self.C))
+ return nn.Sequential(*layer_list)
+
+ def forward(self, x: Variable):
+ up1 = self.upperBranch(x)
+ up2 = self.lowerBranch(x)
+ out = up1 + up2
+ return out
+
+
+class PyraNet(nn.Module):
+ def __init__(self):
+ super(PyraNet, self).__init__()
+
+ B, C = opt.baseWidth, opt.cardinality
+ self.inputResH = opt.inputResH / 4
+ self.inputResW = opt.inputResW / 4
+ self.nStack = opt.nStack
+
+ self.cnv1 = nn.Sequential(
+ nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3),
+ nn.BatchNorm2d(64),
+ nn.ReLU(True)
+ )
+ self.r1 = nn.Sequential(
+ ResidualPyramid(64, 128, opt.inputResH / 2, opt.inputResW / 2,
+ stride=1, net_type='no_preact', useConv=False, baseWidth=B, cardinality=C),
+ nn.MaxPool2d(2)
+ )
+ self.r4 = ResidualPyramid(128, 128, self.inputResH, self.inputResW,
+ stride=1, net_type='preact', useConv=False, baseWidth=B, cardinality=C)
+ self.r5 = ResidualPyramid(128, opt.nFeats, self.inputResH, self.inputResW,
+ stride=1, net_type='preact', useConv=False, baseWidth=B, cardinality=C)
+ self.preact = nn.Sequential(
+ self.cnv1,
+ self.r1,
+ self.r4,
+ self.r5
+ )
+ self.stack_layers = defaultdict(list)
+ for i in range(self.nStack):
+ hg = Hourglass(4, opt.nFeats, opt.nResidual, self.inputResH, self.inputResW, 'preact', B, C)
+ lin = nn.Sequential(
+ hg,
+ nn.BatchNorm2d(opt.nFeats),
+ nn.ReLU(True),
+ nn.Conv2d(opt.nFeats, opt.nFeats, kernel_size=1, stride=1, padding=0),
+ nn.BatchNorm2d(opt.nFeats),
+ nn.ReLU(True)
+ )
+ tmpOut = nn.Conv2d(opt.nFeats, opt.nClasses, kernel_size=1, stride=1, padding=0)
+ self.stack_layers['lin'].append(lin)
+ self.stack_layers['out'].append(tmpOut)
+ if i < self.nStack - 1:
+ lin_ = nn.Conv2d(opt.nFeats, opt.nFeats, kernel_size=1, stride=1, padding=0)
+ tmpOut_ = nn.Conv2d(opt.nClasses, opt.nFeats, kernel_size=1, stride=1, padding=0)
+ self.stack_layers['lin_'].append(lin_)
+ self.stack_layers['out_'].append(tmpOut_)
+
+ def forward(self, x: Variable):
+ out = []
+ inter = self.preact(x)
+ for i in range(self.nStack):
+ lin = self.stack_layers['lin'][i](inter)
+ tmpOut = self.stack_layers['out'][i](lin)
+ out.append(tmpOut)
+ if i < self.nStack - 1:
+ lin_ = self.stack_layers['lin_'][i](lin)
+ tmpOut_ = self.stack_layers['out_'][i](tmpOut)
+ inter = inter + lin_ + tmpOut_
+ return out
+
+
+def createModel(**kw):
+ model = PyraNet()
+ return model
diff --git a/joints_detectors/Alphapose/SPPE/src/models/hgPRM.py b/joints_detectors/Alphapose/SPPE/src/models/hgPRM.py
new file mode 100644
index 0000000000000000000000000000000000000000..d115071735f81cfd3bbdc34d43e1b475b0b8fc8d
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/hgPRM.py
@@ -0,0 +1,236 @@
+import torch.nn as nn
+from .layers.PRM import Residual as ResidualPyramid
+from .layers.Residual import Residual as Residual
+from torch.autograd import Variable
+import torch
+from opt import opt
+import math
+
+
+class Hourglass(nn.Module):
+ def __init__(self, n, nFeats, nModules, inputResH, inputResW, net_type, B, C):
+ super(Hourglass, self).__init__()
+
+ self.ResidualUp = ResidualPyramid if n >= 2 else Residual
+ self.ResidualDown = ResidualPyramid if n >= 3 else Residual
+
+ self.depth = n
+ self.nModules = nModules
+ self.nFeats = nFeats
+ self.net_type = net_type
+ self.B = B
+ self.C = C
+ self.inputResH = inputResH
+ self.inputResW = inputResW
+
+ up1 = self._make_residual(self.ResidualUp, False, inputResH, inputResW)
+ low1 = nn.Sequential(
+ nn.MaxPool2d(2),
+ self._make_residual(self.ResidualDown, False, inputResH / 2, inputResW / 2)
+ )
+ if n > 1:
+ low2 = Hourglass(n - 1, nFeats, nModules, inputResH / 2, inputResW / 2, net_type, B, C)
+ else:
+ low2 = self._make_residual(self.ResidualDown, False, inputResH / 2, inputResW / 2)
+
+ low3 = self._make_residual(self.ResidualDown, True, inputResH / 2, inputResW / 2)
+ up2 = nn.UpsamplingNearest2d(scale_factor=2)
+
+ self.upperBranch = up1
+ self.lowerBranch = nn.Sequential(
+ low1,
+ low2,
+ low3,
+ up2
+ )
+
+ def _make_residual(self, resBlock, useConv, inputResH, inputResW):
+ layer_list = []
+ for i in range(self.nModules):
+ layer_list.append(resBlock(self.nFeats, self.nFeats, inputResH, inputResW,
+ stride=1, net_type=self.net_type, useConv=useConv,
+ baseWidth=self.B, cardinality=self.C))
+ return nn.Sequential(*layer_list)
+
+ def forward(self, x: Variable):
+ up1 = self.upperBranch(x)
+ up2 = self.lowerBranch(x)
+ # out = up1 + up2
+ out = torch.add(up1, up2)
+ return out
+
+
+class PyraNet(nn.Module):
+ def __init__(self):
+ super(PyraNet, self).__init__()
+
+ B, C = opt.baseWidth, opt.cardinality
+ self.inputResH = opt.inputResH / 4
+ self.inputResW = opt.inputResW / 4
+ self.nStack = opt.nStack
+
+ conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 3))
+
+ cnv1 = nn.Sequential(
+ conv1,
+ nn.BatchNorm2d(64),
+ nn.ReLU(True)
+ )
+
+ r1 = nn.Sequential(
+ ResidualPyramid(64, 128, opt.inputResH / 2, opt.inputResW / 2,
+ stride=1, net_type='no_preact', useConv=False, baseWidth=B, cardinality=C),
+ nn.MaxPool2d(2)
+ )
+ r4 = ResidualPyramid(128, 128, self.inputResH, self.inputResW,
+ stride=1, net_type='preact', useConv=False, baseWidth=B, cardinality=C)
+ r5 = ResidualPyramid(128, opt.nFeats, self.inputResH, self.inputResW,
+ stride=1, net_type='preact', useConv=False, baseWidth=B, cardinality=C)
+ self.preact = nn.Sequential(
+ cnv1,
+ r1,
+ r4,
+ r5
+ )
+
+ self.stack_lin = nn.ModuleList()
+ self.stack_out = nn.ModuleList()
+ self.stack_lin_ = nn.ModuleList()
+ self.stack_out_ = nn.ModuleList()
+
+ for i in range(self.nStack):
+ hg = Hourglass(4, opt.nFeats, opt.nResidual, self.inputResH, self.inputResW, 'preact', B, C)
+ conv1 = nn.Conv2d(opt.nFeats, opt.nFeats, kernel_size=1, stride=1, padding=0)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 2))
+ lin = nn.Sequential(
+ hg,
+ nn.BatchNorm2d(opt.nFeats),
+ nn.ReLU(True),
+ conv1,
+ nn.BatchNorm2d(opt.nFeats),
+ nn.ReLU(True)
+ )
+ tmpOut = nn.Conv2d(opt.nFeats, opt.nClasses, kernel_size=1, stride=1, padding=0)
+ if opt.init:
+ nn.init.xavier_normal(tmpOut.weight)
+ self.stack_lin.append(lin)
+ self.stack_out.append(tmpOut)
+ if i < self.nStack - 1:
+ lin_ = nn.Conv2d(opt.nFeats, opt.nFeats, kernel_size=1, stride=1, padding=0)
+ tmpOut_ = nn.Conv2d(opt.nClasses, opt.nFeats, kernel_size=1, stride=1, padding=0)
+ if opt.init:
+ nn.init.xavier_normal(lin_.weight)
+ nn.init.xavier_normal(tmpOut_.weight)
+ self.stack_lin_.append(lin_)
+ self.stack_out_.append(tmpOut_)
+
+ def forward(self, x: Variable):
+ out = []
+ inter = self.preact(x)
+ for i in range(self.nStack):
+ lin = self.stack_lin[i](inter)
+ tmpOut = self.stack_out[i](lin)
+ out.append(tmpOut)
+ if i < self.nStack - 1:
+ lin_ = self.stack_lin_[i](lin)
+ tmpOut_ = self.stack_out_[i](tmpOut)
+ inter = inter + lin_ + tmpOut_
+ return out
+
+
+class PyraNet_Inference(nn.Module):
+ def __init__(self):
+ super(PyraNet_Inference, self).__init__()
+
+ B, C = opt.baseWidth, opt.cardinality
+ self.inputResH = opt.inputResH / 4
+ self.inputResW = opt.inputResW / 4
+ self.nStack = opt.nStack
+
+ conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 3))
+
+ cnv1 = nn.Sequential(
+ conv1,
+ nn.BatchNorm2d(64),
+ nn.ReLU(True)
+ )
+
+ r1 = nn.Sequential(
+ ResidualPyramid(64, 128, opt.inputResH / 2, opt.inputResW / 2,
+ stride=1, net_type='no_preact', useConv=False, baseWidth=B, cardinality=C),
+ nn.MaxPool2d(2)
+ )
+ r4 = ResidualPyramid(128, 128, self.inputResH, self.inputResW,
+ stride=1, net_type='preact', useConv=False, baseWidth=B, cardinality=C)
+ r5 = ResidualPyramid(128, opt.nFeats, self.inputResH, self.inputResW,
+ stride=1, net_type='preact', useConv=False, baseWidth=B, cardinality=C)
+ self.preact = nn.Sequential(
+ cnv1,
+ r1,
+ r4,
+ r5
+ )
+
+ self.stack_lin = nn.ModuleList()
+ self.stack_out = nn.ModuleList()
+ self.stack_lin_ = nn.ModuleList()
+ self.stack_out_ = nn.ModuleList()
+
+ for i in range(self.nStack):
+ hg = Hourglass(4, opt.nFeats, opt.nResidual,
+ self.inputResH, self.inputResW, 'preact', B, C)
+ conv1 = nn.Conv2d(opt.nFeats, opt.nFeats,
+ kernel_size=1, stride=1, padding=0)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 2))
+ lin = nn.Sequential(
+ hg,
+ nn.BatchNorm2d(opt.nFeats),
+ nn.ReLU(True),
+ conv1,
+ nn.BatchNorm2d(opt.nFeats),
+ nn.ReLU(True)
+ )
+ tmpOut = nn.Conv2d(opt.nFeats, opt.nClasses,
+ kernel_size=1, stride=1, padding=0)
+ if opt.init:
+ nn.init.xavier_normal(tmpOut.weight)
+ self.stack_lin.append(lin)
+ self.stack_out.append(tmpOut)
+ if i < self.nStack - 1:
+ lin_ = nn.Conv2d(opt.nFeats, opt.nFeats,
+ kernel_size=1, stride=1, padding=0)
+ tmpOut_ = nn.Conv2d(opt.nClasses, opt.nFeats,
+ kernel_size=1, stride=1, padding=0)
+ if opt.init:
+ nn.init.xavier_normal(lin_.weight)
+ nn.init.xavier_normal(tmpOut_.weight)
+ self.stack_lin_.append(lin_)
+ self.stack_out_.append(tmpOut_)
+
+ def forward(self, x: Variable):
+ inter = self.preact(x)
+ for i in range(self.nStack):
+ lin = self.stack_lin[i](inter)
+ tmpOut = self.stack_out[i](lin)
+ out = tmpOut
+ if i < self.nStack - 1:
+ lin_ = self.stack_lin_[i](lin)
+ tmpOut_ = self.stack_out_[i](tmpOut)
+ inter = inter + lin_ + tmpOut_
+ return out
+
+
+def createModel(**kw):
+ model = PyraNet()
+ return model
+
+
+def createModel_Inference(**kw):
+ model = PyraNet_Inference()
+ return model
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/DUC.py b/joints_detectors/Alphapose/SPPE/src/models/layers/DUC.py
new file mode 100644
index 0000000000000000000000000000000000000000..3592661fc875dbaa7fa70e01ef49befa89569ebd
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/DUC.py
@@ -0,0 +1,23 @@
+import torch.nn as nn
+import torch.nn.functional as F
+
+
+class DUC(nn.Module):
+ '''
+ INPUT: inplanes, planes, upscale_factor
+ OUTPUT: (planes // 4)* ht * wd
+ '''
+ def __init__(self, inplanes, planes, upscale_factor=2):
+ super(DUC, self).__init__()
+ self.conv = nn.Conv2d(inplanes, planes, kernel_size=3, padding=1, bias=False)
+ self.bn = nn.BatchNorm2d(planes)
+ self.relu = nn.ReLU()
+
+ self.pixel_shuffle = nn.PixelShuffle(upscale_factor)
+
+ def forward(self, x):
+ x = self.conv(x)
+ x = self.bn(x)
+ x = self.relu(x)
+ x = self.pixel_shuffle(x)
+ return x
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/PRM.py b/joints_detectors/Alphapose/SPPE/src/models/layers/PRM.py
new file mode 100644
index 0000000000000000000000000000000000000000..375bea4e45362ee240632c94ab6bfbf72f324e26
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/PRM.py
@@ -0,0 +1,135 @@
+import torch.nn as nn
+from .util_models import ConcatTable, CaddTable, Identity
+import math
+from opt import opt
+
+
+class Residual(nn.Module):
+ def __init__(self, numIn, numOut, inputResH, inputResW, stride=1,
+ net_type='preact', useConv=False, baseWidth=9, cardinality=4):
+ super(Residual, self).__init__()
+
+ self.con = ConcatTable([convBlock(numIn, numOut, inputResH,
+ inputResW, net_type, baseWidth, cardinality, stride),
+ skipLayer(numIn, numOut, stride, useConv)])
+ self.cadd = CaddTable(True)
+
+ def forward(self, x):
+ out = self.con(x)
+ out = self.cadd(out)
+ return out
+
+
+def convBlock(numIn, numOut, inputResH, inputResW, net_type, baseWidth, cardinality, stride):
+ numIn = int(numIn)
+ numOut = int(numOut)
+
+ addTable = ConcatTable()
+ s_list = []
+ if net_type != 'no_preact':
+ s_list.append(nn.BatchNorm2d(numIn))
+ s_list.append(nn.ReLU(True))
+
+ conv1 = nn.Conv2d(numIn, numOut // 2, kernel_size=1)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 2))
+ s_list.append(conv1)
+
+ s_list.append(nn.BatchNorm2d(numOut // 2))
+ s_list.append(nn.ReLU(True))
+
+ conv2 = nn.Conv2d(numOut // 2, numOut // 2,
+ kernel_size=3, stride=stride, padding=1)
+ if opt.init:
+ nn.init.xavier_normal(conv2.weight)
+ s_list.append(conv2)
+
+ s = nn.Sequential(*s_list)
+ addTable.add(s)
+
+ D = math.floor(numOut // baseWidth)
+ C = cardinality
+ s_list = []
+
+ if net_type != 'no_preact':
+ s_list.append(nn.BatchNorm2d(numIn))
+ s_list.append(nn.ReLU(True))
+
+ conv1 = nn.Conv2d(numIn, D, kernel_size=1, stride=stride)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / C))
+
+ s_list.append(conv1)
+ s_list.append(nn.BatchNorm2d(D))
+ s_list.append(nn.ReLU(True))
+ s_list.append(pyramid(D, C, inputResH, inputResW))
+ s_list.append(nn.BatchNorm2d(D))
+ s_list.append(nn.ReLU(True))
+
+ a = nn.Conv2d(D, numOut // 2, kernel_size=1)
+ a.nBranchIn = C
+ if opt.init:
+ nn.init.xavier_normal(a.weight, gain=math.sqrt(1 / C))
+ s_list.append(a)
+
+ s = nn.Sequential(*s_list)
+ addTable.add(s)
+
+ elewiswAdd = nn.Sequential(
+ addTable,
+ CaddTable(False)
+ )
+ conv2 = nn.Conv2d(numOut // 2, numOut, kernel_size=1)
+ if opt.init:
+ nn.init.xavier_normal(conv2.weight, gain=math.sqrt(1 / 2))
+ model = nn.Sequential(
+ elewiswAdd,
+ nn.BatchNorm2d(numOut // 2),
+ nn.ReLU(True),
+ conv2
+ )
+ return model
+
+
+def pyramid(D, C, inputResH, inputResW):
+ pyraTable = ConcatTable()
+ sc = math.pow(2, 1 / C)
+ for i in range(C):
+ scaled = 1 / math.pow(sc, i + 1)
+ conv1 = nn.Conv2d(D, D, kernel_size=3, stride=1, padding=1)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight)
+ s = nn.Sequential(
+ nn.FractionalMaxPool2d(2, output_ratio=(scaled, scaled)),
+ conv1,
+ nn.UpsamplingBilinear2d(size=(int(inputResH), int(inputResW))))
+ pyraTable.add(s)
+ pyra = nn.Sequential(
+ pyraTable,
+ CaddTable(False)
+ )
+ return pyra
+
+
+class skipLayer(nn.Module):
+ def __init__(self, numIn, numOut, stride, useConv):
+ super(skipLayer, self).__init__()
+ self.identity = False
+
+ if numIn == numOut and stride == 1 and not useConv:
+ self.identity = True
+ else:
+ conv1 = nn.Conv2d(numIn, numOut, kernel_size=1, stride=stride)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 2))
+ self.m = nn.Sequential(
+ nn.BatchNorm2d(numIn),
+ nn.ReLU(True),
+ conv1
+ )
+
+ def forward(self, x):
+ if self.identity:
+ return x
+ else:
+ return self.m(x)
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/Residual.py b/joints_detectors/Alphapose/SPPE/src/models/layers/Residual.py
new file mode 100644
index 0000000000000000000000000000000000000000..1449a41aa42da847d85b5851bc9c2ff68cb20cb0
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/Residual.py
@@ -0,0 +1,54 @@
+import torch.nn as nn
+import math
+from .util_models import ConcatTable, CaddTable, Identity
+from opt import opt
+
+
+def Residual(numIn, numOut, *arg, stride=1, net_type='preact', useConv=False, **kw):
+ con = ConcatTable([convBlock(numIn, numOut, stride, net_type),
+ skipLayer(numIn, numOut, stride, useConv)])
+ cadd = CaddTable(True)
+ return nn.Sequential(con, cadd)
+
+
+def convBlock(numIn, numOut, stride, net_type):
+ s_list = []
+ if net_type != 'no_preact':
+ s_list.append(nn.BatchNorm2d(numIn))
+ s_list.append(nn.ReLU(True))
+
+ conv1 = nn.Conv2d(numIn, numOut // 2, kernel_size=1)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 2))
+ s_list.append(conv1)
+
+ s_list.append(nn.BatchNorm2d(numOut // 2))
+ s_list.append(nn.ReLU(True))
+
+ conv2 = nn.Conv2d(numOut // 2, numOut // 2, kernel_size=3, stride=stride, padding=1)
+ if opt.init:
+ nn.init.xavier_normal(conv2.weight)
+ s_list.append(conv2)
+ s_list.append(nn.BatchNorm2d(numOut // 2))
+ s_list.append(nn.ReLU(True))
+
+ conv3 = nn.Conv2d(numOut // 2, numOut, kernel_size=1)
+ if opt.init:
+ nn.init.xavier_normal(conv3.weight)
+ s_list.append(conv3)
+
+ return nn.Sequential(*s_list)
+
+
+def skipLayer(numIn, numOut, stride, useConv):
+ if numIn == numOut and stride == 1 and not useConv:
+ return Identity()
+ else:
+ conv1 = nn.Conv2d(numIn, numOut, kernel_size=1, stride=stride)
+ if opt.init:
+ nn.init.xavier_normal(conv1.weight, gain=math.sqrt(1 / 2))
+ return nn.Sequential(
+ nn.BatchNorm2d(numIn),
+ nn.ReLU(True),
+ conv1
+ )
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/Resnet.py b/joints_detectors/Alphapose/SPPE/src/models/layers/Resnet.py
new file mode 100644
index 0000000000000000000000000000000000000000..72f07db4a7b8d9395e2ac7a8ad51d7607ee21959
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/Resnet.py
@@ -0,0 +1,82 @@
+import torch.nn as nn
+import torch.nn.functional as F
+
+
+class Bottleneck(nn.Module):
+ expansion = 4
+
+ def __init__(self, inplanes, planes, stride=1, downsample=None):
+ super(Bottleneck, self).__init__()
+ self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, stride=1, bias=False)
+ self.bn1 = nn.BatchNorm2d(planes)
+ self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
+ self.bn2 = nn.BatchNorm2d(planes)
+ self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, stride=1, bias=False)
+ self.bn3 = nn.BatchNorm2d(planes * 4)
+ self.downsample = downsample
+ self.stride = stride
+
+ def forward(self, x):
+ residual = x
+
+ out = F.relu(self.bn1(self.conv1(x)), inplace=True)
+ out = F.relu(self.bn2(self.conv2(out)), inplace=True)
+ out = self.bn3(self.conv3(out))
+
+ if self.downsample is not None:
+ residual = self.downsample(x)
+
+ out += residual
+ out = F.relu(out, inplace=True)
+
+ return out
+
+
+class ResNet(nn.Module):
+ """ Resnet """
+ def __init__(self, architecture):
+ super(ResNet, self).__init__()
+ assert architecture in ["resnet50", "resnet101"]
+ self.inplanes = 64
+ self.layers = [3, 4, {"resnet50": 6, "resnet101": 23}[architecture], 3]
+ self.block = Bottleneck
+
+ self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
+ self.bn1 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.01, affine=True)
+ self.relu = nn.ReLU(inplace=True)
+ self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2)
+
+ self.layer1 = self.make_layer(self.block, 64, self.layers[0])
+ self.layer2 = self.make_layer(self.block, 128, self.layers[1], stride=2)
+ self.layer3 = self.make_layer(self.block, 256, self.layers[2], stride=2)
+
+ self.layer4 = self.make_layer(
+ self.block, 512, self.layers[3], stride=2)
+
+ def forward(self, x):
+ x = self.maxpool(self.relu(self.bn1(self.conv1(x))))
+ x = self.layer1(x)
+ x = self.layer2(x)
+ x = self.layer3(x)
+ x = self.layer4(x)
+ return x
+
+ def stages(self):
+ return [self.layer1, self.layer2, self.layer3, self.layer4]
+
+ def make_layer(self, block, planes, blocks, stride=1):
+ downsample = None
+ if stride != 1 or self.inplanes != planes * block.expansion:
+ downsample = nn.Sequential(
+ nn.Conv2d(self.inplanes, planes * block.expansion,
+ kernel_size=1, stride=stride, bias=False),
+ nn.BatchNorm2d(planes * block.expansion),
+ )
+
+ layers = []
+ layers.append(block(self.inplanes, planes, stride, downsample))
+ self.inplanes = planes * block.expansion
+ for i in range(1, blocks):
+ layers.append(block(self.inplanes, planes))
+
+ return nn.Sequential(*layers)
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/SE_Resnet.py b/joints_detectors/Alphapose/SPPE/src/models/layers/SE_Resnet.py
new file mode 100644
index 0000000000000000000000000000000000000000..caecaa36dc09998153eed2ff74ebfbd775bfe828
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/SE_Resnet.py
@@ -0,0 +1,99 @@
+import torch.nn as nn
+from .SE_module import SELayer
+import torch.nn.functional as F
+
+
+class Bottleneck(nn.Module):
+ expansion = 4
+
+ def __init__(self, inplanes, planes, stride=1, downsample=None, reduction=False):
+ super(Bottleneck, self).__init__()
+ self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+ self.bn1 = nn.BatchNorm2d(planes)
+ self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
+ padding=1, bias=False)
+ self.bn2 = nn.BatchNorm2d(planes)
+ self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
+ self.bn3 = nn.BatchNorm2d(planes * 4)
+ if reduction:
+ self.se = SELayer(planes * 4)
+
+ self.reduc = reduction
+ self.downsample = downsample
+ self.stride = stride
+
+ def forward(self, x):
+ residual = x
+
+ out = F.relu(self.bn1(self.conv1(x)), inplace=True)
+ out = F.relu(self.bn2(self.conv2(out)), inplace=True)
+
+ out = self.conv3(out)
+ out = self.bn3(out)
+ if self.reduc:
+ out = self.se(out)
+
+ if self.downsample is not None:
+ residual = self.downsample(x)
+
+ out += residual
+ out = F.relu(out)
+
+ return out
+
+
+class SEResnet(nn.Module):
+ """ SEResnet """
+
+ def __init__(self, architecture):
+ super(SEResnet, self).__init__()
+ assert architecture in ["resnet50", "resnet101"]
+ self.inplanes = 64
+ self.layers = [3, 4, {"resnet50": 6, "resnet101": 23}[architecture], 3]
+ self.block = Bottleneck
+
+ self.conv1 = nn.Conv2d(3, 64, kernel_size=7,
+ stride=2, padding=3, bias=False)
+ self.bn1 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.01, affine=True)
+ self.relu = nn.ReLU(inplace=True)
+ self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
+
+ self.layer1 = self.make_layer(self.block, 64, self.layers[0])
+ self.layer2 = self.make_layer(
+ self.block, 128, self.layers[1], stride=2)
+ self.layer3 = self.make_layer(
+ self.block, 256, self.layers[2], stride=2)
+
+ self.layer4 = self.make_layer(
+ self.block, 512, self.layers[3], stride=2)
+
+ def forward(self, x):
+ x = self.maxpool(self.relu(self.bn1(self.conv1(x)))) # 64 * h/4 * w/4
+ x = self.layer1(x) # 256 * h/4 * w/4
+ x = self.layer2(x) # 512 * h/8 * w/8
+ x = self.layer3(x) # 1024 * h/16 * w/16
+ x = self.layer4(x) # 2048 * h/32 * w/32
+ return x
+
+ def stages(self):
+ return [self.layer1, self.layer2, self.layer3, self.layer4]
+
+ def make_layer(self, block, planes, blocks, stride=1):
+ downsample = None
+ if stride != 1 or self.inplanes != planes * block.expansion:
+ downsample = nn.Sequential(
+ nn.Conv2d(self.inplanes, planes * block.expansion,
+ kernel_size=1, stride=stride, bias=False),
+ nn.BatchNorm2d(planes * block.expansion),
+ )
+
+ layers = []
+ if downsample is not None:
+ layers.append(block(self.inplanes, planes, stride, downsample, reduction=True))
+ else:
+ layers.append(block(self.inplanes, planes, stride, downsample))
+ self.inplanes = planes * block.expansion
+ for i in range(1, blocks):
+ layers.append(block(self.inplanes, planes))
+
+ return nn.Sequential(*layers)
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/SE_module.py b/joints_detectors/Alphapose/SPPE/src/models/layers/SE_module.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab8aefe7c23a5b3fed6780350b78773a16df3e5a
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/SE_module.py
@@ -0,0 +1,19 @@
+from torch import nn
+
+
+class SELayer(nn.Module):
+ def __init__(self, channel, reduction=1):
+ super(SELayer, self).__init__()
+ self.avg_pool = nn.AdaptiveAvgPool2d(1)
+ self.fc = nn.Sequential(
+ nn.Linear(channel, channel // reduction),
+ nn.ReLU(inplace=True),
+ nn.Linear(channel // reduction, channel),
+ nn.Sigmoid()
+ )
+
+ def forward(self, x):
+ b, c, _, _ = x.size()
+ y = self.avg_pool(x).view(b, c)
+ y = self.fc(y).view(b, c, 1, 1)
+ return x * y
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/__init__.py b/joints_detectors/Alphapose/SPPE/src/models/layers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6e690fd59145ce8900fd9ab8d8a996ee7d33834
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/joints_detectors/Alphapose/SPPE/src/models/layers/util_models.py b/joints_detectors/Alphapose/SPPE/src/models/layers/util_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..52d60d8c2535cceb3d348c79c95a47750c5d2d8a
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/models/layers/util_models.py
@@ -0,0 +1,37 @@
+import torch
+import torch.nn as nn
+from torch.autograd import Variable
+
+
+class ConcatTable(nn.Module):
+ def __init__(self, module_list=None):
+ super(ConcatTable, self).__init__()
+
+ self.modules_list = nn.ModuleList(module_list)
+
+ def forward(self, x: Variable):
+ y = []
+ for i in range(len(self.modules_list)):
+ y.append(self.modules_list[i](x))
+ return y
+
+ def add(self, module):
+ self.modules_list.append(module)
+
+
+class CaddTable(nn.Module):
+ def __init__(self, inplace=False):
+ super(CaddTable, self).__init__()
+ self.inplace = inplace
+
+ def forward(self, x: Variable or list):
+ return torch.stack(x, 0).sum(0)
+
+
+class Identity(nn.Module):
+ def __init__(self, params=None):
+ super(Identity, self).__init__()
+ self.params = nn.ParameterList(params)
+
+ def forward(self, x: Variable or list):
+ return x
diff --git a/joints_detectors/Alphapose/SPPE/src/opt.py b/joints_detectors/Alphapose/SPPE/src/opt.py
new file mode 100644
index 0000000000000000000000000000000000000000..15152207dd476fd2a31dafa248b244c220aa4f66
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/opt.py
@@ -0,0 +1,102 @@
+import argparse
+import torch
+
+parser = argparse.ArgumentParser(description='PyTorch AlphaPose Training')
+
+"----------------------------- General options -----------------------------"
+parser.add_argument('--expID', default='default', type=str,
+ help='Experiment ID')
+parser.add_argument('--dataset', default='coco', type=str,
+ help='Dataset choice: mpii | coco')
+parser.add_argument('--nThreads', default=30, type=int,
+ help='Number of data loading threads')
+parser.add_argument('--debug', default=False, type=bool,
+ help='Print the debug information')
+parser.add_argument('--snapshot', default=1, type=int,
+ help='How often to take a snapshot of the model (0 = never)')
+
+"----------------------------- AlphaPose options -----------------------------"
+parser.add_argument('--addDPG', default=False, type=bool,
+ help='Train with data augmentation')
+
+"----------------------------- Model options -----------------------------"
+parser.add_argument('--netType', default='hgPRM', type=str,
+ help='Options: hgPRM | resnext')
+parser.add_argument('--loadModel', default=None, type=str,
+ help='Provide full path to a previously trained model')
+parser.add_argument('--Continue', default=False, type=bool,
+ help='Pick up where an experiment left off')
+parser.add_argument('--nFeats', default=256, type=int,
+ help='Number of features in the hourglass')
+parser.add_argument('--nClasses', default=17, type=int,
+ help='Number of output channel')
+parser.add_argument('--nStack', default=8, type=int,
+ help='Number of hourglasses to stack')
+
+"----------------------------- Hyperparameter options -----------------------------"
+parser.add_argument('--LR', default=2.5e-4, type=float,
+ help='Learning rate')
+parser.add_argument('--momentum', default=0, type=float,
+ help='Momentum')
+parser.add_argument('--weightDecay', default=0, type=float,
+ help='Weight decay')
+parser.add_argument('--crit', default='MSE', type=str,
+ help='Criterion type')
+parser.add_argument('--optMethod', default='rmsprop', type=str,
+ help='Optimization method: rmsprop | sgd | nag | adadelta')
+
+
+"----------------------------- Training options -----------------------------"
+parser.add_argument('--nEpochs', default=50, type=int,
+ help='Number of hourglasses to stack')
+parser.add_argument('--epoch', default=0, type=int,
+ help='Current epoch')
+parser.add_argument('--trainBatch', default=40, type=int,
+ help='Train-batch size')
+parser.add_argument('--validBatch', default=20, type=int,
+ help='Valid-batch size')
+parser.add_argument('--trainIters', default=0, type=int,
+ help='Total train iters')
+parser.add_argument('--valIters', default=0, type=int,
+ help='Total valid iters')
+parser.add_argument('--init', default=None, type=str,
+ help='Initialization')
+"----------------------------- Data options -----------------------------"
+parser.add_argument('--inputResH', default=384, type=int,
+ help='Input image height')
+parser.add_argument('--inputResW', default=320, type=int,
+ help='Input image width')
+parser.add_argument('--outputResH', default=96, type=int,
+ help='Output heatmap height')
+parser.add_argument('--outputResW', default=80, type=int,
+ help='Output heatmap width')
+parser.add_argument('--scale', default=0.25, type=float,
+ help='Degree of scale augmentation')
+parser.add_argument('--rotate', default=30, type=float,
+ help='Degree of rotation augmentation')
+parser.add_argument('--hmGauss', default=1, type=int,
+ help='Heatmap gaussian size')
+
+"----------------------------- PyraNet options -----------------------------"
+parser.add_argument('--baseWidth', default=9, type=int,
+ help='Heatmap gaussian size')
+parser.add_argument('--cardinality', default=5, type=int,
+ help='Heatmap gaussian size')
+parser.add_argument('--nResidual', default=1, type=int,
+ help='Number of residual modules at each location in the pyranet')
+
+"----------------------------- Distribution options -----------------------------"
+parser.add_argument('--dist', dest='dist', type=int, default=1,
+ help='distributed training or not')
+parser.add_argument('--backend', dest='backend', type=str, default='gloo',
+ help='backend for distributed training')
+parser.add_argument('--port', dest='port',
+ help='port of server')
+
+
+opt = parser.parse_args()
+if opt.Continue:
+ opt = torch.load("../exp/{}/{}/option.pkl".format(opt.dataset, opt.expID))
+ opt.Continue = True
+ opt.nEpochs = 50
+ print("--- Continue ---")
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/__init__.py b/joints_detectors/Alphapose/SPPE/src/utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6e690fd59145ce8900fd9ab8d8a996ee7d33834
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/utils/__init__.py
@@ -0,0 +1 @@
+from . import *
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/dataset/.coco.py.swp b/joints_detectors/Alphapose/SPPE/src/utils/dataset/.coco.py.swp
new file mode 100644
index 0000000000000000000000000000000000000000..ca05847bb38d6349466f505cd98f962b71deaee0
Binary files /dev/null and b/joints_detectors/Alphapose/SPPE/src/utils/dataset/.coco.py.swp differ
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/dataset/__init__.py b/joints_detectors/Alphapose/SPPE/src/utils/dataset/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/dataset/coco.py b/joints_detectors/Alphapose/SPPE/src/utils/dataset/coco.py
new file mode 100644
index 0000000000000000000000000000000000000000..e1f264648b5bac3090d75021b8245251b866b519
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/utils/dataset/coco.py
@@ -0,0 +1,85 @@
+import os
+import h5py
+from functools import reduce
+
+import torch.utils.data as data
+from ..pose import generateSampleBox
+from opt import opt
+
+
+class Mscoco(data.Dataset):
+ def __init__(self, train=True, sigma=1,
+ scale_factor=(0.2, 0.3), rot_factor=40, label_type='Gaussian'):
+ self.img_folder = '../data/coco/images' # root image folders
+ self.is_train = train # training set or test set
+ self.inputResH = opt.inputResH
+ self.inputResW = opt.inputResW
+ self.outputResH = opt.outputResH
+ self.outputResW = opt.outputResW
+ self.sigma = sigma
+ self.scale_factor = scale_factor
+ self.rot_factor = rot_factor
+ self.label_type = label_type
+
+ self.nJoints_coco = 17
+ self.nJoints_mpii = 16
+ self.nJoints = 33
+
+ self.accIdxs = (1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17)
+ self.flipRef = ((2, 3), (4, 5), (6, 7),
+ (8, 9), (10, 11), (12, 13),
+ (14, 15), (16, 17))
+
+ # create train/val split
+ with h5py.File('../data/coco/annot_clean.h5', 'r') as annot:
+ # train
+ self.imgname_coco_train = annot['imgname'][:-5887]
+ self.bndbox_coco_train = annot['bndbox'][:-5887]
+ self.part_coco_train = annot['part'][:-5887]
+ # val
+ self.imgname_coco_val = annot['imgname'][-5887:]
+ self.bndbox_coco_val = annot['bndbox'][-5887:]
+ self.part_coco_val = annot['part'][-5887:]
+
+ self.size_train = self.imgname_coco_train.shape[0]
+ self.size_val = self.imgname_coco_val.shape[0]
+
+ def __getitem__(self, index):
+ sf = self.scale_factor
+
+ if self.is_train:
+ part = self.part_coco_train[index]
+ bndbox = self.bndbox_coco_train[index]
+ imgname = self.imgname_coco_train[index]
+ else:
+ part = self.part_coco_val[index]
+ bndbox = self.bndbox_coco_val[index]
+ imgname = self.imgname_coco_val[index]
+
+ imgname = reduce(lambda x, y: x + y, map(lambda x: chr(int(x)), imgname))
+ img_path = os.path.join(self.img_folder, imgname)
+
+ metaData = generateSampleBox(img_path, bndbox, part, self.nJoints,
+ 'coco', sf, self, train=self.is_train)
+
+ inp, out_bigcircle, out_smallcircle, out, setMask = metaData
+
+ label = []
+ for i in range(opt.nStack):
+ if i < 2:
+ # label.append(out_bigcircle.clone())
+ label.append(out.clone())
+ elif i < 4:
+ # label.append(out_smallcircle.clone())
+ label.append(out.clone())
+ else:
+ label.append(out.clone())
+
+ return inp, label, setMask, 'coco'
+
+ def __len__(self):
+ if self.is_train:
+ return self.size_train
+ else:
+ return self.size_val
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/dataset/fuse.py b/joints_detectors/Alphapose/SPPE/src/utils/dataset/fuse.py
new file mode 100644
index 0000000000000000000000000000000000000000..db3e04a20c0b21186402c861c65de0cb7d0940b4
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/utils/dataset/fuse.py
@@ -0,0 +1,122 @@
+import os
+import h5py
+from functools import reduce
+
+import torch.utils.data as data
+from ..pose import generateSampleBox
+from opt import opt
+
+
+class Mscoco(data.Dataset):
+ def __init__(self, train=True, sigma=1,
+ scale_factor=0.25, rot_factor=30, label_type='Gaussian'):
+ self.img_folder = '../data/' # root image folders
+ self.is_train = train # training set or test set
+ self.inputResH = 320
+ self.inputResW = 256
+ self.outputResH = 80
+ self.outputResW = 64
+ self.sigma = sigma
+ self.scale_factor = (0.2, 0.3)
+ self.rot_factor = rot_factor
+ self.label_type = label_type
+
+ self.nJoints_coco = 17
+ self.nJoints_mpii = 16
+ self.nJoints = 33
+
+ self.accIdxs = (1, 2, 3, 4, 5, 6, 7, 8, # COCO
+ 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, # MPII
+ 28, 29, 32, 33)
+
+ self.flipRef = ((2, 3), (4, 5), (6, 7), # COCO
+ (8, 9), (10, 11), (12, 13),
+ (14, 15), (16, 17),
+ (18, 23), (19, 22), (20, 21), # MPII
+ (28, 33), (29, 32), (30, 31))
+
+ '''
+ Create train/val split
+ '''
+ # COCO
+ with h5py.File('../data/coco/annot_clean.h5', 'r') as annot:
+ # train
+ self.imgname_coco_train = annot['imgname'][:-5887]
+ self.bndbox_coco_train = annot['bndbox'][:-5887]
+ self.part_coco_train = annot['part'][:-5887]
+ # val
+ self.imgname_coco_val = annot['imgname'][-5887:]
+ self.bndbox_coco_val = annot['bndbox'][-5887:]
+ self.part_coco_val = annot['part'][-5887:]
+ # MPII
+ with h5py.File('../data/mpii/annot_mpii.h5', 'r') as annot:
+ # train
+ self.imgname_mpii_train = annot['imgname'][:-1358]
+ self.bndbox_mpii_train = annot['bndbox'][:-1358]
+ self.part_mpii_train = annot['part'][:-1358]
+ # val
+ self.imgname_mpii_val = annot['imgname'][-1358:]
+ self.bndbox_mpii_val = annot['bndbox'][-1358:]
+ self.part_mpii_val = annot['part'][-1358:]
+
+ self.size_coco_train = self.imgname_coco_train.shape[0]
+ self.size_coco_val = self.imgname_coco_val.shape[0]
+ self.size_train = self.imgname_coco_train.shape[0] + self.imgname_mpii_train.shape[0]
+ self.size_val = self.imgname_coco_val.shape[0] + self.imgname_mpii_val.shape[0]
+ self.train, self.valid = [], []
+
+ def __getitem__(self, index):
+ sf = self.scale_factor
+
+ if self.is_train and index < self.size_coco_train: # COCO
+ part = self.part_coco_train[index]
+ bndbox = self.bndbox_coco_train[index]
+ imgname = self.imgname_coco_train[index]
+ imgset = 'coco'
+ elif self.is_train: # MPII
+ part = self.part_mpii_train[index - self.size_coco_train]
+ bndbox = self.bndbox_mpii_train[index - self.size_coco_train]
+ imgname = self.imgname_mpii_train[index - self.size_coco_train]
+ imgset = 'mpii'
+ elif index < self.size_coco_val:
+ part = self.part_coco_val[index]
+ bndbox = self.bndbox_coco_val[index]
+ imgname = self.imgname_coco_val[index]
+ imgset = 'coco'
+ else:
+ part = self.part_mpii_val[index - self.size_coco_val]
+ bndbox = self.bndbox_mpii_val[index - self.size_coco_val]
+ imgname = self.imgname_mpii_val[index - self.size_coco_val]
+ imgset = 'mpii'
+
+ if imgset == 'coco':
+ imgname = reduce(lambda x, y: x + y, map(lambda x: chr(int(x)), imgname))
+ else:
+ imgname = reduce(lambda x, y: x + y, map(lambda x: chr(int(x)), imgname))[:13]
+
+ img_path = os.path.join(self.img_folder, imgset, 'images', imgname)
+
+ metaData = generateSampleBox(img_path, bndbox, part, self.nJoints,
+ imgset, sf, self, train=self.is_train)
+
+ inp, out_bigcircle, out_smallcircle, out, setMask = metaData
+
+ label = []
+ for i in range(opt.nStack):
+ if i < 2:
+ # label.append(out_bigcircle.clone())
+ label.append(out.clone())
+ elif i < 4:
+ # label.append(out_smallcircle.clone())
+ label.append(out.clone())
+ else:
+ label.append(out.clone())
+
+ return inp, label, setMask, imgset
+
+ def __len__(self):
+ if self.is_train:
+ return self.size_train
+ else:
+ return self.size_val
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/dataset/mpii.py b/joints_detectors/Alphapose/SPPE/src/utils/dataset/mpii.py
new file mode 100644
index 0000000000000000000000000000000000000000..eae0dd884ae1c9f02fd46b2924cb0132e8275c74
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/utils/dataset/mpii.py
@@ -0,0 +1,84 @@
+import os
+import h5py
+from functools import reduce
+
+import torch.utils.data as data
+from ..pose import generateSampleBox
+from opt import opt
+
+
+class Mpii(data.Dataset):
+ def __init__(self, train=True, sigma=1,
+ scale_factor=0.25, rot_factor=30, label_type='Gaussian'):
+ self.img_folder = '../data/mpii/images' # root image folders
+ self.is_train = train # training set or test set
+ self.inputResH = 320
+ self.inputResW = 256
+ self.outputResH = 80
+ self.outputResW = 64
+ self.sigma = sigma
+ self.scale_factor = (0.2, 0.3)
+ self.rot_factor = rot_factor
+ self.label_type = label_type
+
+ self.nJoints_mpii = 16
+ self.nJoints = 16
+
+ self.accIdxs = (1, 2, 3, 4, 5, 6,
+ 11, 12, 15, 16)
+ self.flipRef = ((1, 6), (2, 5), (3, 4),
+ (11, 16), (12, 15), (13, 14))
+
+ # create train/val split
+ with h5py.File('../data/mpii/annot_mpii.h5', 'r') as annot:
+ # train
+ self.imgname_mpii_train = annot['imgname'][:-1358]
+ self.bndbox_mpii_train = annot['bndbox'][:-1358]
+ self.part_mpii_train = annot['part'][:-1358]
+ # val
+ self.imgname_mpii_val = annot['imgname'][-1358:]
+ self.bndbox_mpii_val = annot['bndbox'][-1358:]
+ self.part_mpii_val = annot['part'][-1358:]
+
+ self.size_train = self.imgname_mpii_train.shape[0]
+ self.size_val = self.imgname_mpii_val.shape[0]
+ self.train, self.valid = [], []
+
+ def __getitem__(self, index):
+ sf = self.scale_factor
+
+ if self.is_train:
+ part = self.part_mpii_train[index]
+ bndbox = self.bndbox_mpii_train[index]
+ imgname = self.imgname_mpii_train[index]
+ else:
+ part = self.part_mpii_val[index]
+ bndbox = self.bndbox_mpii_val[index]
+ imgname = self.imgname_mpii_val[index]
+
+ imgname = reduce(lambda x, y: x + y, map(lambda x: chr(int(x)), imgname))[:13]
+ img_path = os.path.join(self.img_folder, imgname)
+
+ metaData = generateSampleBox(img_path, bndbox, part, self.nJoints,
+ 'mpii', sf, self, train=self.is_train)
+
+ inp, out_bigcircle, out_smallcircle, out, setMask = metaData
+
+ label = []
+ for i in range(opt.nStack):
+ if i < 2:
+ #label.append(out_bigcircle.clone())
+ label.append(out.clone())
+ elif i < 4:
+ #label.append(out_smallcircle.clone())
+ label.append(out.clone())
+ else:
+ label.append(out.clone())
+
+ return inp, label, setMask
+
+ def __len__(self):
+ if self.is_train:
+ return self.size_train
+ else:
+ return self.size_val
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/eval.py b/joints_detectors/Alphapose/SPPE/src/utils/eval.py
new file mode 100644
index 0000000000000000000000000000000000000000..babff62966de3a47b53ae3aa2c442da39f4b6a7d
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/utils/eval.py
@@ -0,0 +1,219 @@
+from opt import opt
+try:
+ from utils.img import transformBoxInvert, transformBoxInvert_batch, findPeak, processPeaks
+except ImportError:
+ from SPPE.src.utils.img import transformBoxInvert, transformBoxInvert_batch, findPeak, processPeaks
+import torch
+
+
+class DataLogger(object):
+ def __init__(self):
+ self.clear()
+
+ def clear(self):
+ self.value = 0
+ self.sum = 0
+ self.cnt = 0
+ self.avg = 0
+
+ def update(self, value, n=1):
+ self.value = value
+ self.sum += value * n
+ self.cnt += n
+ self._cal_avg()
+
+ def _cal_avg(self):
+ self.avg = self.sum / self.cnt
+
+
+def accuracy(output, label, dataset):
+ if type(output) == list:
+ return accuracy(output[opt.nStack - 1], label[opt.nStack - 1], dataset)
+ else:
+ return heatmapAccuracy(output.cpu().data, label.cpu().data, dataset.accIdxs)
+
+
+def heatmapAccuracy(output, label, idxs):
+ preds = getPreds(output)
+ gt = getPreds(label)
+
+ norm = torch.ones(preds.size(0)) * opt.outputResH / 10
+ dists = calc_dists(preds, gt, norm)
+ #print(dists)
+ acc = torch.zeros(len(idxs) + 1)
+ avg_acc = 0
+ cnt = 0
+ for i in range(len(idxs)):
+ acc[i + 1] = dist_acc(dists[idxs[i] - 1])
+ if acc[i + 1] >= 0:
+ avg_acc = avg_acc + acc[i + 1]
+ cnt += 1
+ if cnt != 0:
+ acc[0] = avg_acc / cnt
+ return acc
+
+
+def getPreds(hm):
+ ''' get predictions from score maps in torch Tensor
+ return type: torch.LongTensor
+ '''
+ assert hm.dim() == 4, 'Score maps should be 4-dim'
+ maxval, idx = torch.max(hm.view(hm.size(0), hm.size(1), -1), 2)
+
+ maxval = maxval.view(hm.size(0), hm.size(1), 1)
+ idx = idx.view(hm.size(0), hm.size(1), 1) + 1
+
+ preds = idx.repeat(1, 1, 2).float()
+
+ preds[:, :, 0] = (preds[:, :, 0] - 1) % hm.size(3)
+ preds[:, :, 1] = torch.floor((preds[:, :, 1] - 1) / hm.size(3))
+
+ # pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
+ # preds *= pred_mask
+ return preds
+
+
+def calc_dists(preds, target, normalize):
+ preds = preds.float().clone()
+ target = target.float().clone()
+ dists = torch.zeros(preds.size(1), preds.size(0))
+ for n in range(preds.size(0)):
+ for c in range(preds.size(1)):
+ if target[n, c, 0] > 0 and target[n, c, 1] > 0:
+ dists[c, n] = torch.dist(
+ preds[n, c, :], target[n, c, :]) / normalize[n]
+ else:
+ dists[c, n] = -1
+ return dists
+
+
+def dist_acc(dists, thr=0.5):
+ ''' Return percentage below threshold while ignoring values with a -1 '''
+ if dists.ne(-1).sum() > 0:
+ return dists.le(thr).eq(dists.ne(-1)).float().sum() * 1.0 / dists.ne(-1).float().sum()
+ else:
+ return - 1
+
+
+def postprocess(output):
+ p = getPreds(output)
+
+ for i in range(p.size(0)):
+ for j in range(p.size(1)):
+ hm = output[i][j]
+ pX, pY = int(round(p[i][j][0])), int(round(p[i][j][1]))
+ if 0 < pX < opt.outputResW - 1 and 0 < pY < opt.outputResH - 1:
+ diff = torch.Tensor((hm[pY][pX + 1] - hm[pY][pX - 1], hm[pY + 1][pX] - hm[pY - 1][pX]))
+ p[i][j] += diff.sign() * 0.25
+ p -= 0.5
+
+ return p
+
+
+def getPrediction(hms, pt1, pt2, inpH, inpW, resH, resW):
+ '''
+ Get keypoint location from heatmaps
+ '''
+
+ assert hms.dim() == 4, 'Score maps should be 4-dim'
+ maxval, idx = torch.max(hms.view(hms.size(0), hms.size(1), -1), 2)
+
+ maxval = maxval.view(hms.size(0), hms.size(1), 1)
+ idx = idx.view(hms.size(0), hms.size(1), 1) + 1
+
+ preds = idx.repeat(1, 1, 2).float()
+
+ preds[:, :, 0] = (preds[:, :, 0] - 1) % hms.size(3)
+ preds[:, :, 1] = torch.floor((preds[:, :, 1] - 1) / hms.size(3))
+
+ pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
+ preds *= pred_mask
+
+ # Very simple post-processing step to improve performance at tight PCK thresholds
+ for i in range(preds.size(0)):
+ for j in range(preds.size(1)):
+ hm = hms[i][j]
+ pX, pY = int(round(float(preds[i][j][0]))), int(round(float(preds[i][j][1])))
+ if 0 < pX < opt.outputResW - 1 and 0 < pY < opt.outputResH - 1:
+ diff = torch.Tensor(
+ (hm[pY][pX + 1] - hm[pY][pX - 1], hm[pY + 1][pX] - hm[pY - 1][pX]))
+ preds[i][j] += diff.sign() * 0.25
+ preds += 0.2
+
+ preds_tf = torch.zeros(preds.size())
+
+ preds_tf = transformBoxInvert_batch(preds, pt1, pt2, inpH, inpW, resH, resW)
+
+ return preds, preds_tf, maxval
+
+
+def getMultiPeakPrediction(hms, pt1, pt2, inpH, inpW, resH, resW):
+
+ assert hms.dim() == 4, 'Score maps should be 4-dim'
+
+ preds_img = {}
+ hms = hms.numpy()
+ for n in range(hms.shape[0]): # Number of samples
+ preds_img[n] = {} # Result of sample: n
+ for k in range(hms.shape[1]): # Number of keypoints
+ preds_img[n][k] = [] # Result of keypoint: k
+ hm = hms[n][k]
+
+ candidate_points = findPeak(hm)
+
+ res_pt = processPeaks(candidate_points, hm,
+ pt1[n], pt2[n], inpH, inpW, resH, resW)
+
+ preds_img[n][k] = res_pt
+
+ return preds_img
+
+
+def getPrediction_batch(hms, pt1, pt2, inpH, inpW, resH, resW):
+ '''
+ Get keypoint location from heatmaps
+ pt1, pt2: [n, 2]
+ OUTPUT:
+ preds: [n, 17, 2]
+ '''
+
+ assert hms.dim() == 4, 'Score maps should be 4-dim'
+ flat_hms = hms.view(hms.size(0), hms.size(1), -1)
+ maxval, idx = torch.max(flat_hms, 2)
+
+ maxval = maxval.view(hms.size(0), hms.size(1), 1)
+ idx = idx.view(hms.size(0), hms.size(1), 1) + 1
+
+ preds = idx.repeat(1, 1, 2).float()
+
+ preds[:, :, 0] = (preds[:, :, 0] - 1) % hms.size(3)
+ preds[:, :, 1] = torch.floor((preds[:, :, 1] - 1) / hms.size(3))
+
+ pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
+ preds *= pred_mask
+
+ # Very simple post-processing step to improve performance at tight PCK thresholds
+ idx_up = (idx - hms.size(3)).clamp(0, flat_hms.size(2) - 1)
+ idx_down = (idx + hms.size(3)).clamp(0, flat_hms.size(2) - 1)
+ idx_left = (idx - 1).clamp(0, flat_hms.size(2) - 1)
+ idx_right = (idx + 1).clamp(0, flat_hms.size(2) - 1)
+
+ maxval_up = flat_hms.gather(2, idx_up)
+ maxval_down = flat_hms.gather(2, idx_down)
+ maxval_left = flat_hms.gather(2, idx_left)
+ maxval_right = flat_hms.gather(2, idx_right)
+
+ diff1 = (maxval_right - maxval_left).sign() * 0.25
+ diff2 = (maxval_down - maxval_up).sign() * 0.25
+ diff1[idx_up <= hms.size(3)] = 0
+ diff1[idx_down / hms.size(3) >= (hms.size(3) - 1)] = 0
+ diff2[(idx_left % hms.size(3)) == 0] = 0
+ diff2[(idx_left % hms.size(3)) == (hms.size(3) - 1)] = 0
+
+ preds[:, :, 0] += diff1.squeeze(-1)
+ preds[:, :, 1] += diff2.squeeze(-1)
+
+ preds_tf = torch.zeros(preds.size())
+ preds_tf = transformBoxInvert_batch(preds, pt1, pt2, inpH, inpW, resH, resW)
+
+ return preds, preds_tf, maxval
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/img.py b/joints_detectors/Alphapose/SPPE/src/utils/img.py
new file mode 100644
index 0000000000000000000000000000000000000000..18517513843d7f00b06e49d0294d09e5b893dbf8
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/utils/img.py
@@ -0,0 +1,497 @@
+import numpy as np
+import cv2
+import torch
+import scipy.misc
+from torchvision import transforms
+import torch.nn.functional as F
+from scipy.ndimage import maximum_filter
+
+from PIL import Image
+from copy import deepcopy
+import matplotlib
+matplotlib.use('agg')
+import matplotlib.pyplot as plt
+
+
+def im_to_torch(img):
+ img = np.transpose(img, (2, 0, 1)) # C*H*W
+ img = to_torch(img).float()
+ if img.max() > 1:
+ img /= 255
+ return img
+
+
+def torch_to_im(img):
+ img = to_numpy(img)
+ img = np.transpose(img, (1, 2, 0)) # C*H*W
+ return img
+
+
+def load_image(img_path):
+ # H x W x C => C x H x W
+ return im_to_torch(scipy.misc.imread(img_path, mode='RGB'))
+
+
+def to_numpy(tensor):
+ if torch.is_tensor(tensor):
+ return tensor.cpu().numpy()
+ elif type(tensor).__module__ != 'numpy':
+ raise ValueError("Cannot convert {} to numpy array"
+ .format(type(tensor)))
+ return tensor
+
+
+def to_torch(ndarray):
+ if type(ndarray).__module__ == 'numpy':
+ return torch.from_numpy(ndarray)
+ elif not torch.is_tensor(ndarray):
+ raise ValueError("Cannot convert {} to torch tensor"
+ .format(type(ndarray)))
+ return ndarray
+
+
+def drawCircle(img, pt, sigma):
+ img = to_numpy(img)
+ tmpSize = 3 * sigma
+ # Check that any part of the gaussian is in-bounds
+ ul = [int(pt[0] - tmpSize), int(pt[1] - tmpSize)]
+ br = [int(pt[0] + tmpSize + 1), int(pt[1] + tmpSize + 1)]
+
+ if (ul[0] >= img.shape[1] or ul[1] >= img.shape[0] or
+ br[0] < 0 or br[1] < 0):
+ # If not, just return the image as is
+ return to_torch(img)
+
+ # Generate gaussian
+ size = 2 * tmpSize + 1
+ x = np.arange(0, size, 1, float)
+ y = x[:, np.newaxis]
+ x0 = y0 = size // 2
+ sigma = size / 4.0
+ # The gaussian is not normalized, we want the center value to equal 1
+ g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
+ g[g > 0] = 1
+ # Usable gaussian range
+ g_x = max(0, -ul[0]), min(br[0], img.shape[1]) - ul[0]
+ g_y = max(0, -ul[1]), min(br[1], img.shape[0]) - ul[1]
+ # Image range
+ img_x = max(0, ul[0]), min(br[0], img.shape[1])
+ img_y = max(0, ul[1]), min(br[1], img.shape[0])
+
+ img[img_y[0]:img_y[1], img_x[0]:img_x[1]] = g[g_y[0]:g_y[1], g_x[0]:g_x[1]]
+ return to_torch(img)
+
+
+def drawGaussian(img, pt, sigma):
+ img = to_numpy(img)
+ tmpSize = 3 * sigma
+ # Check that any part of the gaussian is in-bounds
+ ul = [int(pt[0] - tmpSize), int(pt[1] - tmpSize)]
+ br = [int(pt[0] + tmpSize + 1), int(pt[1] + tmpSize + 1)]
+
+ if (ul[0] >= img.shape[1] or ul[1] >= img.shape[0] or
+ br[0] < 0 or br[1] < 0):
+ # If not, just return the image as is
+ return to_torch(img)
+
+ # Generate gaussian
+ size = 2 * tmpSize + 1
+ x = np.arange(0, size, 1, float)
+ y = x[:, np.newaxis]
+ x0 = y0 = size // 2
+ sigma = size / 4.0
+ # The gaussian is not normalized, we want the center value to equal 1
+ g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
+
+ # Usable gaussian range
+ g_x = max(0, -ul[0]), min(br[0], img.shape[1]) - ul[0]
+ g_y = max(0, -ul[1]), min(br[1], img.shape[0]) - ul[1]
+ # Image range
+ img_x = max(0, ul[0]), min(br[0], img.shape[1])
+ img_y = max(0, ul[1]), min(br[1], img.shape[0])
+
+ img[img_y[0]:img_y[1], img_x[0]:img_x[1]] = g[g_y[0]:g_y[1], g_x[0]:g_x[1]]
+ return to_torch(img)
+
+
+def drawBigCircle(img, pt, sigma):
+ img = to_numpy(img)
+ tmpSize = 3 * sigma
+ # Check that any part of the gaussian is in-bounds
+ ul = [int(pt[0] - tmpSize), int(pt[1] - tmpSize)]
+ br = [int(pt[0] + tmpSize + 1), int(pt[1] + tmpSize + 1)]
+
+ if (ul[0] >= img.shape[1] or ul[1] >= img.shape[0] or
+ br[0] < 0 or br[1] < 0):
+ # If not, just return the image as is
+ return to_torch(img)
+
+ # Generate gaussian
+ size = 2 * tmpSize + 1
+ x = np.arange(0, size, 1, float)
+ y = x[:, np.newaxis]
+ x0 = y0 = size // 2
+ sigma = size / 4.0
+ # The gaussian is not normalized, we want the center value to equal 1
+ g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
+ g[g > 0.4] = 1
+ # Usable gaussian range
+ g_x = max(0, -ul[0]), min(br[0], img.shape[1]) - ul[0]
+ g_y = max(0, -ul[1]), min(br[1], img.shape[0]) - ul[1]
+ # Image range
+ img_x = max(0, ul[0]), min(br[0], img.shape[1])
+ img_y = max(0, ul[1]), min(br[1], img.shape[0])
+
+ img[img_y[0]:img_y[1], img_x[0]:img_x[1]] = g[g_y[0]:g_y[1], g_x[0]:g_x[1]]
+ return to_torch(img)
+
+
+def drawSmallCircle(img, pt, sigma):
+ img = to_numpy(img)
+ tmpSize = 3 * sigma
+ # Check that any part of the gaussian is in-bounds
+ ul = [int(pt[0] - tmpSize), int(pt[1] - tmpSize)]
+ br = [int(pt[0] + tmpSize + 1), int(pt[1] + tmpSize + 1)]
+
+ if (ul[0] >= img.shape[1] or ul[1] >= img.shape[0] or
+ br[0] < 0 or br[1] < 0):
+ # If not, just return the image as is
+ return to_torch(img)
+
+ # Generate gaussian
+ size = 2 * tmpSize + 1
+ x = np.arange(0, size, 1, float)
+ y = x[:, np.newaxis]
+ x0 = y0 = size // 2
+ sigma = size / 4.0
+ # The gaussian is not normalized, we want the center value to equal 1
+ g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
+ g[g > 0.5] = 1
+ # Usable gaussian range
+ g_x = max(0, -ul[0]), min(br[0], img.shape[1]) - ul[0]
+ g_y = max(0, -ul[1]), min(br[1], img.shape[0]) - ul[1]
+ # Image range
+ img_x = max(0, ul[0]), min(br[0], img.shape[1])
+ img_y = max(0, ul[1]), min(br[1], img.shape[0])
+
+ img[img_y[0]:img_y[1], img_x[0]:img_x[1]] = g[g_y[0]:g_y[1], g_x[0]:g_x[1]]
+ return to_torch(img)
+
+
+def transformBox(pt, ul, br, inpH, inpW, resH, resW):
+ center = torch.zeros(2)
+ center[0] = (br[0] - 1 - ul[0]) / 2
+ center[1] = (br[1] - 1 - ul[1]) / 2
+
+ lenH = max(br[1] - ul[1], (br[0] - ul[0]) * inpH / inpW)
+ lenW = lenH * inpW / inpH
+
+ _pt = torch.zeros(2)
+ _pt[0] = pt[0] - ul[0]
+ _pt[1] = pt[1] - ul[1]
+ # Move to center
+ _pt[0] = _pt[0] + max(0, (lenW - 1) / 2 - center[0])
+ _pt[1] = _pt[1] + max(0, (lenH - 1) / 2 - center[1])
+ pt = (_pt * resH) / lenH
+ pt[0] = round(float(pt[0]))
+ pt[1] = round(float(pt[1]))
+ return pt.int()
+
+
+def transformBoxInvert(pt, ul, br, inpH, inpW, resH, resW):
+ center = np.zeros(2)
+ center[0] = (br[0] - 1 - ul[0]) / 2
+ center[1] = (br[1] - 1 - ul[1]) / 2
+
+ lenH = max(br[1] - ul[1], (br[0] - ul[0]) * inpH / inpW)
+ lenW = lenH * inpW / inpH
+
+ _pt = (pt * lenH) / resH
+ _pt[0] = _pt[0] - max(0, (lenW - 1) / 2 - center[0])
+ _pt[1] = _pt[1] - max(0, (lenH - 1) / 2 - center[1])
+
+ new_point = np.zeros(2)
+ new_point[0] = _pt[0] + ul[0]
+ new_point[1] = _pt[1] + ul[1]
+ return new_point
+
+
+def transformBoxInvert_batch(pt, ul, br, inpH, inpW, resH, resW):
+ '''
+ pt: [n, 17, 2]
+ ul: [n, 2]
+ br: [n, 2]
+ '''
+ center = (br - 1 - ul) / 2
+
+ size = br - ul
+ size[:, 0] *= (inpH / inpW)
+
+ lenH, _ = torch.max(size, dim=1) # [n,]
+ lenW = lenH * (inpW / inpH)
+
+ _pt = (pt * lenH[:, np.newaxis, np.newaxis]) / resH
+ _pt[:, :, 0] = _pt[:, :, 0] - ((lenW[:, np.newaxis].repeat(1, 17) - 1) /
+ 2 - center[:, 0].unsqueeze(-1).repeat(1, 17)).clamp(min=0)
+ _pt[:, :, 1] = _pt[:, :, 1] - ((lenH[:, np.newaxis].repeat(1, 17) - 1) /
+ 2 - center[:, 1].unsqueeze(-1).repeat(1, 17)).clamp(min=0)
+
+ new_point = torch.zeros(pt.size())
+ new_point[:, :, 0] = _pt[:, :, 0] + ul[:, 0].unsqueeze(-1).repeat(1, 17)
+ new_point[:, :, 1] = _pt[:, :, 1] + ul[:, 1].unsqueeze(-1).repeat(1, 17)
+ return new_point
+
+
+def cropBox(img, ul, br, resH, resW):
+ ul = ul.int()
+ br = (br - 1).int()
+ # br = br.int()
+ lenH = max((br[1] - ul[1]).item(), (br[0] - ul[0]).item() * resH / resW)
+ lenW = lenH * resW / resH
+ if img.dim() == 2:
+ img = img[np.newaxis, :]
+
+ box_shape = [(br[1] - ul[1]).item(), (br[0] - ul[0]).item()]
+ pad_size = [(lenH - box_shape[0]) // 2, (lenW - box_shape[1]) // 2]
+ # Padding Zeros
+ if ul[1] > 0:
+ img[:, :ul[1], :] = 0
+ if ul[0] > 0:
+ img[:, :, :ul[0]] = 0
+ if br[1] < img.shape[1] - 1:
+ img[:, br[1] + 1:, :] = 0
+ if br[0] < img.shape[2] - 1:
+ img[:, :, br[0] + 1:] = 0
+
+ src = np.zeros((3, 2), dtype=np.float32)
+ dst = np.zeros((3, 2), dtype=np.float32)
+
+ src[0, :] = np.array(
+ [ul[0] - pad_size[1], ul[1] - pad_size[0]], np.float32)
+ src[1, :] = np.array(
+ [br[0] + pad_size[1], br[1] + pad_size[0]], np.float32)
+ dst[0, :] = 0
+ dst[1, :] = np.array([resW - 1, resH - 1], np.float32)
+
+ src[2:, :] = get_3rd_point(src[0, :], src[1, :])
+ dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :])
+
+ trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))
+
+ dst_img = cv2.warpAffine(torch_to_im(img), trans,
+ (resW, resH), flags=cv2.INTER_LINEAR)
+
+ return im_to_torch(torch.Tensor(dst_img))
+
+
+def cv_rotate(img, rot, resW, resH):
+ center = np.array((resW - 1, resH - 1)) / 2
+ rot_rad = np.pi * rot / 180
+
+ src_dir = get_dir([0, (resH - 1) * -0.5], rot_rad)
+ dst_dir = np.array([0, (resH - 1) * -0.5], np.float32)
+
+ src = np.zeros((3, 2), dtype=np.float32)
+ dst = np.zeros((3, 2), dtype=np.float32)
+
+ src[0, :] = center
+ src[1, :] = center + src_dir
+ dst[0, :] = [(resW - 1) * 0.5, (resH - 1) * 0.5]
+ dst[1, :] = np.array([(resW - 1) * 0.5, (resH - 1) * 0.5]) + dst_dir
+
+ src[2:, :] = get_3rd_point(src[0, :], src[1, :])
+ dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :])
+
+ trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))
+
+ dst_img = cv2.warpAffine(torch_to_im(img), trans,
+ (resW, resH), flags=cv2.INTER_LINEAR)
+
+ return im_to_torch(torch.Tensor(dst_img))
+
+
+def flip(x):
+ assert (x.dim() == 3 or x.dim() == 4)
+ dim = x.dim() - 1
+ if '0.4.1' in torch.__version__ or '1.0' in torch.__version__:
+ return x.flip(dims=(dim,))
+ else:
+ is_cuda = False
+ if x.is_cuda:
+ is_cuda = True
+ x = x.cpu()
+ x = x.numpy().copy()
+ if x.ndim == 3:
+ x = np.transpose(np.fliplr(np.transpose(x, (0, 2, 1))), (0, 2, 1))
+ elif x.ndim == 4:
+ for i in range(x.shape[0]):
+ x[i] = np.transpose(
+ np.fliplr(np.transpose(x[i], (0, 2, 1))), (0, 2, 1))
+ # x = x.swapaxes(dim, 0)
+ # x = x[::-1, ...]
+ # x = x.swapaxes(0, dim)
+
+ x = torch.from_numpy(x.copy())
+ if is_cuda:
+ x = x.cuda()
+ return x
+
+
+def shuffleLR(x, dataset):
+ flipRef = dataset.flipRef
+ assert (x.dim() == 3 or x.dim() == 4)
+ for pair in flipRef:
+ dim0, dim1 = pair
+ dim0 -= 1
+ dim1 -= 1
+ if x.dim() == 4:
+ tmp = x[:, dim1].clone()
+ x[:, dim1] = x[:, dim0].clone()
+ x[:, dim0] = tmp.clone()
+ #x[:, dim0], x[:, dim1] = deepcopy((x[:, dim1], x[:, dim0]))
+ else:
+ tmp = x[dim1].clone()
+ x[dim1] = x[dim0].clone()
+ x[dim0] = tmp.clone()
+ #x[dim0], x[dim1] = deepcopy((x[dim1], x[dim0]))
+ return x
+
+
+def drawMPII(inps, preds):
+ assert inps.dim() == 4
+ p_color = ['g', 'b', 'purple', 'b', 'purple',
+ 'y', 'o', 'y', 'o', 'y', 'o',
+ 'pink', 'r', 'pink', 'r', 'pink', 'r']
+ p_color = ['r', 'r', 'r', 'b', 'b', 'b',
+ 'black', 'black', 'black', 'black',
+ 'y', 'y', 'white', 'white', 'g', 'g']
+
+ nImg = inps.size(0)
+ imgs = []
+ for n in range(nImg):
+ img = to_numpy(inps[n])
+ img = np.transpose(img, (1, 2, 0))
+ imgs.append(img)
+
+ fig = plt.figure()
+ plt.imshow(imgs[0])
+ ax = fig.add_subplot(1, 1, 1)
+ #print(preds.shape)
+ for p in range(16):
+ x, y = preds[0][p]
+ cor = (round(x), round(y)), 10
+ ax.add_patch(plt.Circle(*cor, color=p_color[p]))
+ plt.axis('off')
+
+ plt.show()
+
+ return imgs
+
+
+def drawCOCO(inps, preds, scores):
+ assert inps.dim() == 4
+ p_color = ['g', 'b', 'purple', 'b', 'purple',
+ 'y', 'orange', 'y', 'orange', 'y', 'orange',
+ 'pink', 'r', 'pink', 'r', 'pink', 'r']
+
+ nImg = inps.size(0)
+ imgs = []
+ for n in range(nImg):
+ img = to_numpy(inps[n])
+ img = np.transpose(img, (1, 2, 0))
+ imgs.append(img)
+
+ fig = plt.figure()
+ plt.imshow(imgs[0])
+ ax = fig.add_subplot(1, 1, 1)
+ #print(preds.shape)
+ for p in range(17):
+ if scores[0][p][0] < 0.2:
+ continue
+ x, y = preds[0][p]
+ cor = (round(x), round(y)), 3
+ ax.add_patch(plt.Circle(*cor, color=p_color[p]))
+ plt.axis('off')
+
+ plt.show()
+
+ return imgs
+
+
+def get_3rd_point(a, b):
+ direct = a - b
+ return b + np.array([-direct[1], direct[0]], dtype=np.float32)
+
+
+def get_dir(src_point, rot_rad):
+ sn, cs = np.sin(rot_rad), np.cos(rot_rad)
+
+ src_result = [0, 0]
+ src_result[0] = src_point[0] * cs - src_point[1] * sn
+ src_result[1] = src_point[0] * sn + src_point[1] * cs
+
+ return src_result
+
+
+def findPeak(hm):
+ mx = maximum_filter(hm, size=5)
+ idx = zip(*np.where((mx == hm) * (hm > 0.1)))
+ candidate_points = []
+ for (y, x) in idx:
+ candidate_points.append([x, y, hm[y][x]])
+ if len(candidate_points) == 0:
+ return torch.zeros(0)
+ candidate_points = np.array(candidate_points)
+ candidate_points = candidate_points[np.lexsort(-candidate_points.T)]
+ return torch.Tensor(candidate_points)
+
+
+def processPeaks(candidate_points, hm, pt1, pt2, inpH, inpW, resH, resW):
+ # type: (Tensor, Tensor, Tensor, Tensor, float, float, float, float) -> List[Tensor]
+
+ if candidate_points.shape[0] == 0: # Low Response
+ maxval = np.max(hm.reshape(1, -1), 1)
+ idx = np.argmax(hm.reshape(1, -1), 1)
+
+ x = idx % resW
+ y = int(idx / resW)
+
+ candidate_points = np.zeros((1, 3))
+ candidate_points[0, 0:1] = x
+ candidate_points[0, 1:2] = y
+ candidate_points[0, 2:3] = maxval
+
+ res_pts = []
+ for i in range(candidate_points.shape[0]):
+ x, y, maxval = candidate_points[i][0], candidate_points[i][1], candidate_points[i][2]
+
+ if bool(maxval < 0.05) and len(res_pts) > 0:
+ pass
+ else:
+ if bool(x > 0) and bool(x < resW - 2):
+ if bool(hm[int(y)][int(x) + 1] - hm[int(y)][int(x) - 1] > 0):
+ x += 0.25
+ elif bool(hm[int(y)][int(x) + 1] - hm[int(y)][int(x) - 1] < 0):
+ x -= 0.25
+ if bool(y > 0) and bool(y < resH - 2):
+ if bool(hm[int(y) + 1][int(x)] - hm[int(y) - 1][int(x)] > 0):
+ y += (0.25 * inpH / inpW)
+ elif bool(hm[int(y) + 1][int(x)] - hm[int(y) - 1][int(x)] < 0):
+ y -= (0.25 * inpH / inpW)
+
+ #pt = torch.zeros(2)
+ pt = np.zeros(2)
+ pt[0] = x + 0.2
+ pt[1] = y + 0.2
+
+ pt = transformBoxInvert(pt, pt1, pt2, inpH, inpW, resH, resW)
+
+ res_pt = np.zeros(3)
+ res_pt[:2] = pt
+ res_pt[2] = maxval
+
+ res_pts.append(res_pt)
+
+ if maxval < 0.05:
+ break
+ return res_pts
diff --git a/joints_detectors/Alphapose/SPPE/src/utils/pose.py b/joints_detectors/Alphapose/SPPE/src/utils/pose.py
new file mode 100644
index 0000000000000000000000000000000000000000..9fa2abe12d34a385704b34d9ab98c62b364a85d4
--- /dev/null
+++ b/joints_detectors/Alphapose/SPPE/src/utils/pose.py
@@ -0,0 +1,169 @@
+from .img import (load_image, drawGaussian, drawBigCircle, drawSmallCircle, cv_rotate,
+ cropBox, transformBox, transformBoxInvert, flip, shuffleLR, drawCOCO)
+from .eval import getPrediction
+import torch
+import numpy as np
+import random
+from opt import opt
+
+
+def rnd(x):
+ return max(-2 * x, min(2 * x, np.random.randn(1)[0] * x))
+
+
+def generateSampleBox(img_path, bndbox, part, nJoints, imgset, scale_factor, dataset, train=True):
+
+ nJoints_coco = 17
+ nJoints_mpii = 16
+ img = load_image(img_path)
+ if train:
+ img[0].mul_(random.uniform(0.7, 1.3)).clamp_(0, 1)
+ img[1].mul_(random.uniform(0.7, 1.3)).clamp_(0, 1)
+ img[2].mul_(random.uniform(0.7, 1.3)).clamp_(0, 1)
+
+ ori_img = img.clone()
+ img[0].add_(-0.406)
+ img[1].add_(-0.457)
+ img[2].add_(-0.480)
+
+ upLeft = torch.Tensor((int(bndbox[0][0]), int(bndbox[0][1])))
+ bottomRight = torch.Tensor((int(bndbox[0][2]), int(bndbox[0][3])))
+ ht = bottomRight[1] - upLeft[1]
+ width = bottomRight[0] - upLeft[0]
+ imght = img.shape[1]
+ imgwidth = img.shape[2]
+ scaleRate = random.uniform(*scale_factor)
+
+ upLeft[0] = max(0, upLeft[0] - width * scaleRate / 2)
+ upLeft[1] = max(0, upLeft[1] - ht * scaleRate / 2)
+ bottomRight[0] = min(imgwidth - 1, bottomRight[0] + width * scaleRate / 2)
+ bottomRight[1] = min(imght - 1, bottomRight[1] + ht * scaleRate / 2)
+
+ # Doing Random Sample
+ if opt.addDPG:
+ PatchScale = random.uniform(0, 1)
+ if PatchScale > 0.85:
+ ratio = ht / width
+ if (width < ht):
+ patchWidth = PatchScale * width
+ patchHt = patchWidth * ratio
+ else:
+ patchHt = PatchScale * ht
+ patchWidth = patchHt / ratio
+
+ xmin = upLeft[0] + random.uniform(0, 1) * (width - patchWidth)
+ ymin = upLeft[1] + random.uniform(0, 1) * (ht - patchHt)
+
+ xmax = xmin + patchWidth + 1
+ ymax = ymin + patchHt + 1
+ else:
+ xmin = max(1, min(upLeft[0] + np.random.normal(-0.0142, 0.1158) * width, imgwidth - 3))
+ ymin = max(1, min(upLeft[1] + np.random.normal(0.0043, 0.068) * ht, imght - 3))
+ xmax = min(max(xmin + 2, bottomRight[0] + np.random.normal(0.0154, 0.1337) * width), imgwidth - 3)
+ ymax = min(max(ymin + 2, bottomRight[1] + np.random.normal(-0.0013, 0.0711) * ht), imght - 3)
+
+ upLeft[0] = xmin
+ upLeft[1] = ymin
+ bottomRight[0] = xmax
+ bottomRight[1] = ymax
+
+ # Counting Joints number
+ jointNum = 0
+ if imgset == 'coco':
+ for i in range(17):
+ if part[i][0] > 0 and part[i][0] > upLeft[0] and part[i][1] > upLeft[1] \
+ and part[i][0] < bottomRight[0] and part[i][1] < bottomRight[1]:
+ jointNum += 1
+ else:
+ for i in range(16):
+ if part[i][0] > 0 and part[i][0] > upLeft[0] and part[i][1] > upLeft[1] \
+ and part[i][0] < bottomRight[0] and part[i][1] < bottomRight[1]:
+ jointNum += 1
+
+ # Doing Random Crop
+ if opt.addDPG:
+ if jointNum > 13 and train:
+ switch = random.uniform(0, 1)
+ if switch > 0.96:
+ bottomRight[0] = (upLeft[0] + bottomRight[0]) / 2
+ bottomRight[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.92:
+ upLeft[0] = (upLeft[0] + bottomRight[0]) / 2
+ bottomRight[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.88:
+ upLeft[1] = (upLeft[1] + bottomRight[1]) / 2
+ bottomRight[0] = (upLeft[0] + bottomRight[0]) / 2
+ elif switch > 0.84:
+ upLeft[0] = (upLeft[0] + bottomRight[0]) / 2
+ upLeft[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.80:
+ bottomRight[0] = (upLeft[0] + bottomRight[0]) / 2
+ elif switch > 0.76:
+ upLeft[0] = (upLeft[0] + bottomRight[0]) / 2
+ elif switch > 0.72:
+ bottomRight[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.68:
+ upLeft[1] = (upLeft[1] + bottomRight[1]) / 2
+
+ ori_inp = cropBox(ori_img, upLeft, bottomRight, opt.inputResH, opt.inputResW)
+ inp = cropBox(img, upLeft, bottomRight, opt.inputResH, opt.inputResW)
+ if jointNum == 0:
+ inp = torch.zeros(3, opt.inputResH, opt.inputResW)
+
+ out_bigcircle = torch.zeros(nJoints, opt.outputResH, opt.outputResW)
+ out_smallcircle = torch.zeros(nJoints, opt.outputResH, opt.outputResW)
+ out = torch.zeros(nJoints, opt.outputResH, opt.outputResW)
+ setMask = torch.zeros(nJoints, opt.outputResH, opt.outputResW)
+
+ # Draw Label
+ if imgset == 'coco':
+ for i in range(nJoints_coco):
+ if part[i][0] > 0 and part[i][0] > upLeft[0] and part[i][1] > upLeft[1] \
+ and part[i][0] < bottomRight[0] and part[i][1] < bottomRight[1]:
+ out_bigcircle[i] = drawBigCircle(out_bigcircle[i], transformBox(part[i], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss * 2)
+ out_smallcircle[i] = drawSmallCircle(out_smallcircle[i], transformBox(part[i], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss)
+ out[i] = drawGaussian(out[i], transformBox(part[i], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss)
+ setMask[i].add_(1)
+ elif imgset == 'mpii':
+ for i in range(nJoints_coco, nJoints_coco + nJoints_mpii):
+ if part[i - nJoints_coco][0] > 0 and part[i - nJoints_coco][0] > upLeft[0] and part[i - nJoints_coco][1] > upLeft[1] \
+ and part[i - nJoints_coco][0] < bottomRight[0] and part[i - nJoints_coco][1] < bottomRight[1]:
+ out_bigcircle[i] = drawBigCircle(out_bigcircle[i], transformBox(part[i - nJoints_coco], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss * 2)
+ out_smallcircle[i] = drawSmallCircle(out_smallcircle[i], transformBox(part[i - nJoints_coco], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss)
+ out[i] = drawGaussian(out[i], transformBox(part[i - nJoints_coco], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss)
+ setMask[i].add_(1)
+ else:
+ for i in range(nJoints_coco, nJoints_coco + nJoints_mpii):
+ if part[i - nJoints_coco][0] > 0 and part[i - nJoints_coco][0] > upLeft[0] and part[i - nJoints_coco][1] > upLeft[1] \
+ and part[i - nJoints_coco][0] < bottomRight[0] and part[i - nJoints_coco][1] < bottomRight[1]:
+ out_bigcircle[i] = drawBigCircle(out_bigcircle[i], transformBox(part[i - nJoints_coco], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss * 2)
+ out_smallcircle[i] = drawSmallCircle(out_smallcircle[i], transformBox(part[i - nJoints_coco], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss)
+ out[i] = drawGaussian(out[i], transformBox(part[i - nJoints_coco], upLeft, bottomRight, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW), opt.hmGauss)
+ if i != 6 + nJoints_coco and i != 7 + nJoints_coco:
+ setMask[i].add_(1)
+
+ if opt.debug:
+ preds_hm, preds_img, preds_scores = getPrediction(out.unsqueeze(0), upLeft.unsqueeze(0), bottomRight.unsqueeze(0), opt.inputResH,
+ opt.inputResW, opt.outputResH, opt.outputResW)
+ tmp_preds = preds_hm.mul(opt.inputResH / opt.outputResH)
+ drawCOCO(ori_inp.unsqueeze(0), tmp_preds, preds_scores)
+
+ if train:
+ # Flip
+ if random.uniform(0, 1) < 0.5:
+ inp = flip(inp)
+ ori_inp = flip(ori_inp)
+ out_bigcircle = shuffleLR(flip(out_bigcircle), dataset)
+ out_smallcircle = shuffleLR(flip(out_smallcircle), dataset)
+ out = shuffleLR(flip(out), dataset)
+ # Rotate
+ r = rnd(opt.rotate)
+ if random.uniform(0, 1) < 0.6:
+ r = 0
+ if r != 0:
+ inp = cv_rotate(inp, r, opt.inputResW, opt.inputResH)
+ out_bigcircle = cv_rotate(out_bigcircle, r, opt.outputResW, opt.outputResH)
+ out_smallcircle = cv_rotate(out_smallcircle, r, opt.outputResW, opt.outputResH)
+ out = cv_rotate(out, r, opt.outputResW, opt.outputResH)
+
+ return inp, out_bigcircle, out_smallcircle, out, setMask
diff --git a/joints_detectors/Alphapose/dataloader.py b/joints_detectors/Alphapose/dataloader.py
new file mode 100644
index 0000000000000000000000000000000000000000..92072bb2f46b5f4f61d7e8e690622d5f74d1ed80
--- /dev/null
+++ b/joints_detectors/Alphapose/dataloader.py
@@ -0,0 +1,782 @@
+import os
+import sys
+import time
+from multiprocessing import Queue as pQueue
+from threading import Thread
+
+import cv2
+import numpy as np
+import torch
+import torch.multiprocessing as mp
+import torch.utils.data as data
+import torchvision.transforms as transforms
+from PIL import Image
+from torch.autograd import Variable
+
+from SPPE.src.utils.eval import getPrediction, getMultiPeakPrediction
+from SPPE.src.utils.img import load_image, cropBox, im_to_torch
+from matching import candidate_reselect as matching
+from opt import opt
+from pPose_nms import pose_nms
+from yolo.darknet import Darknet
+from yolo.preprocess import prep_image, prep_frame
+from yolo.util import dynamic_write_results
+
+# import the Queue class from Python 3
+if sys.version_info >= (3, 0):
+ from queue import Queue, LifoQueue
+# otherwise, import the Queue class for Python 2.7
+else:
+ from Queue import Queue, LifoQueue
+
+if opt.vis_fast:
+ from fn import vis_frame_fast as vis_frame
+else:
+ from fn import vis_frame
+
+
+class Image_loader(data.Dataset):
+ def __init__(self, im_names, format='yolo'):
+ super(Image_loader, self).__init__()
+ self.img_dir = opt.inputpath
+ self.imglist = im_names
+ self.transform = transforms.Compose([
+ transforms.ToTensor(),
+ transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))
+ ])
+ self.format = format
+
+ def getitem_ssd(self, index):
+ im_name = self.imglist[index].rstrip('\n').rstrip('\r')
+ im_name = os.path.join(self.img_dir, im_name)
+ im = Image.open(im_name)
+ inp = load_image(im_name)
+ if im.mode == 'L':
+ im = im.convert('RGB')
+
+ ow = oh = 512
+ im = im.resize((ow, oh))
+ im = self.transform(im)
+ return im, inp, im_name
+
+ def getitem_yolo(self, index):
+ inp_dim = int(opt.inp_dim)
+ im_name = self.imglist[index].rstrip('\n').rstrip('\r')
+ im_name = os.path.join(self.img_dir, im_name)
+ im, orig_img, im_dim = prep_image(im_name, inp_dim)
+ # im_dim = torch.FloatTensor([im_dim]).repeat(1, 2)
+
+ inp = load_image(im_name)
+ return im, inp, orig_img, im_name, im_dim
+
+ def __getitem__(self, index):
+ if self.format == 'ssd':
+ return self.getitem_ssd(index)
+ elif self.format == 'yolo':
+ return self.getitem_yolo(index)
+ else:
+ raise NotImplementedError
+
+ def __len__(self):
+ return len(self.imglist)
+
+
+class ImageLoader:
+ def __init__(self, im_names, batchSize=1, format='yolo', queueSize=50):
+ self.img_dir = opt.inputpath
+ self.imglist = im_names
+ self.transform = transforms.Compose([
+ transforms.ToTensor(),
+ transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))
+ ])
+ self.format = format
+
+ self.batchSize = batchSize
+ self.datalen = len(self.imglist)
+ leftover = 0
+ if (self.datalen) % batchSize:
+ leftover = 1
+ self.num_batches = self.datalen // batchSize + leftover
+
+ # initialize the queue used to store data
+ if opt.sp:
+ self.Q = Queue(maxsize=queueSize)
+ else:
+ self.Q = mp.Queue(maxsize=queueSize)
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ if self.format == 'ssd':
+ if opt.sp:
+ p = Thread(target=self.getitem_ssd, args=())
+ else:
+ p = mp.Process(target=self.getitem_ssd, args=())
+ elif self.format == 'yolo':
+ if opt.sp:
+ p = Thread(target=self.getitem_yolo, args=())
+ else:
+ p = mp.Process(target=self.getitem_yolo, args=())
+ else:
+ raise NotImplementedError
+ p.daemon = True
+ p.start()
+ return self
+
+ def getitem_ssd(self):
+ length = len(self.imglist)
+ for index in range(length):
+ im_name = self.imglist[index].rstrip('\n').rstrip('\r')
+ im_name = os.path.join(self.img_dir, im_name)
+ im = Image.open(im_name)
+ inp = load_image(im_name)
+ if im.mode == 'L':
+ im = im.convert('RGB')
+
+ ow = oh = 512
+ im = im.resize((ow, oh))
+ im = self.transform(im)
+ while self.Q.full():
+ time.sleep(2)
+ self.Q.put((im, inp, im_name))
+
+ def getitem_yolo(self):
+ for i in range(self.num_batches):
+ img = []
+ orig_img = []
+ im_name = []
+ im_dim_list = []
+ for k in range(i * self.batchSize, min((i + 1) * self.batchSize, self.datalen)):
+ inp_dim = int(opt.inp_dim)
+ im_name_k = self.imglist[k].rstrip('\n').rstrip('\r')
+ im_name_k = os.path.join(self.img_dir, im_name_k)
+ img_k, orig_img_k, im_dim_list_k = prep_image(im_name_k, inp_dim)
+
+ img.append(img_k)
+ orig_img.append(orig_img_k)
+ im_name.append(im_name_k)
+ im_dim_list.append(im_dim_list_k)
+
+ with torch.no_grad():
+ # Human Detection
+ img = torch.cat(img)
+ im_dim_list = torch.FloatTensor(im_dim_list).repeat(1, 2)
+ im_dim_list_ = im_dim_list
+
+ while self.Q.full():
+ time.sleep(2)
+
+ self.Q.put((img, orig_img, im_name, im_dim_list))
+
+ def getitem(self):
+ return self.Q.get()
+
+ def length(self):
+ return len(self.imglist)
+
+ def len(self):
+ return self.Q.qsize()
+
+
+class VideoLoader:
+ def __init__(self, path, batchSize=1, queueSize=50):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.path = path
+ self.stream = cv2.VideoCapture(path)
+ assert self.stream.isOpened(), 'Cannot capture source'
+ self.stopped = False
+
+ self.batchSize = batchSize
+ self.datalen = int(self.stream.get(cv2.CAP_PROP_FRAME_COUNT))
+ leftover = 0
+ if (self.datalen) % batchSize:
+ leftover = 1
+ self.num_batches = self.datalen // batchSize + leftover
+
+ # initialize the queue used to store frames read from
+ # the video file
+ if opt.sp:
+ self.Q = Queue(maxsize=queueSize)
+ else:
+ self.Q = mp.Queue(maxsize=queueSize)
+
+ def length(self):
+ return self.datalen
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ if opt.sp:
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ else:
+ p = mp.Process(target=self.update, args=())
+ p.daemon = True
+ p.start()
+ return self
+
+ def update(self):
+ stream = cv2.VideoCapture(self.path)
+ assert stream.isOpened(), 'Cannot capture source'
+
+ for i in range(self.num_batches):
+ img = []
+ orig_img = []
+ im_name = []
+ im_dim_list = []
+ for k in range(i * self.batchSize, min((i + 1) * self.batchSize, self.datalen)):
+ inp_dim = int(opt.inp_dim)
+ (grabbed, frame) = stream.read()
+ # if the `grabbed` boolean is `False`, then we have
+ # reached the end of the video file
+ if not grabbed:
+ self.Q.put((None, None, None, None))
+ print('===========================> This video get ' + str(k) + ' frames in total.')
+ sys.stdout.flush()
+ return
+ # process and add the frame to the queue
+ img_k, orig_img_k, im_dim_list_k = prep_frame(frame, inp_dim)
+
+ img.append(img_k)
+ orig_img.append(orig_img_k)
+ im_name.append(str(k) + '.jpg')
+ im_dim_list.append(im_dim_list_k)
+
+ with torch.no_grad():
+ # Human Detection
+ img = torch.cat(img)
+ im_dim_list = torch.FloatTensor(im_dim_list).repeat(1, 2)
+
+ while self.Q.full():
+ time.sleep(2)
+
+ self.Q.put((img, orig_img, im_name, im_dim_list))
+
+ def videoinfo(self):
+ # indicate the video info
+ fourcc = int(self.stream.get(cv2.CAP_PROP_FOURCC))
+ fps = self.stream.get(cv2.CAP_PROP_FPS)
+ frameSize = (int(self.stream.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))
+ return (fourcc, fps, frameSize)
+
+ def getitem(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def len(self):
+ return self.Q.qsize()
+
+
+class DetectionLoader:
+ def __init__(self, dataloder, batchSize=1, queueSize=1024):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.det_model = Darknet("joints_detectors/Alphapose/yolo/cfg/yolov3-spp.cfg")
+ self.det_model.load_weights('joints_detectors/Alphapose/models/yolo/yolov3-spp.weights')
+ self.det_model.net_info['height'] = opt.inp_dim
+ self.det_inp_dim = int(self.det_model.net_info['height'])
+ assert self.det_inp_dim % 32 == 0
+ assert self.det_inp_dim > 32
+ self.det_model.cuda()
+ self.det_model.eval()
+
+ self.stopped = False
+ self.dataloder = dataloder
+ self.batchSize = batchSize
+ self.datalen = self.dataloder.length()
+ leftover = 0
+ if (self.datalen) % batchSize:
+ leftover = 1
+ self.num_batches = self.datalen // batchSize + leftover
+ # initialize the queue used to store frames read from
+ # the video file
+ if opt.sp:
+ self.Q = Queue(maxsize=queueSize)
+ else:
+ self.Q = mp.Queue(maxsize=queueSize)
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ if opt.sp:
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ else:
+ p = mp.Process(target=self.update, args=(), daemon=True)
+ # p = mp.Process(target=self.update, args=())
+ # p.daemon = True
+ p.start()
+ return self
+
+ def update(self):
+ # keep looping the whole dataset
+ for i in range(self.num_batches):
+ img, orig_img, im_name, im_dim_list = self.dataloder.getitem()
+ if img is None:
+ self.Q.put((None, None, None, None, None, None, None))
+ return
+
+ with torch.no_grad():
+ # Human Detection
+ img = img.cuda()
+ prediction = self.det_model(img, CUDA=True)
+ # NMS process
+ dets = dynamic_write_results(prediction, opt.confidence,
+ opt.num_classes, nms=True, nms_conf=opt.nms_thesh)
+ if isinstance(dets, int) or dets.shape[0] == 0:
+ for k in range(len(orig_img)):
+ if self.Q.full():
+ time.sleep(2)
+ self.Q.put((orig_img[k], im_name[k], None, None, None, None, None))
+ continue
+ dets = dets.cpu()
+ im_dim_list = torch.index_select(im_dim_list, 0, dets[:, 0].long())
+ scaling_factor = torch.min(self.det_inp_dim / im_dim_list, 1)[0].view(-1, 1)
+
+ # coordinate transfer
+ dets[:, [1, 3]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 0].view(-1, 1)) / 2
+ dets[:, [2, 4]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 1].view(-1, 1)) / 2
+
+ dets[:, 1:5] /= scaling_factor
+ for j in range(dets.shape[0]):
+ dets[j, [1, 3]] = torch.clamp(dets[j, [1, 3]], 0.0, im_dim_list[j, 0])
+ dets[j, [2, 4]] = torch.clamp(dets[j, [2, 4]], 0.0, im_dim_list[j, 1])
+ boxes = dets[:, 1:5]
+ scores = dets[:, 5:6]
+
+ for k in range(len(orig_img)):
+ boxes_k = boxes[dets[:, 0] == k]
+ if isinstance(boxes_k, int) or boxes_k.shape[0] == 0:
+ if self.Q.full():
+ time.sleep(2)
+ self.Q.put((orig_img[k], im_name[k], None, None, None, None, None))
+ continue
+ inps = torch.zeros(boxes_k.size(0), 3, opt.inputResH, opt.inputResW)
+ pt1 = torch.zeros(boxes_k.size(0), 2)
+ pt2 = torch.zeros(boxes_k.size(0), 2)
+ if self.Q.full():
+ time.sleep(2)
+ self.Q.put((orig_img[k], im_name[k], boxes_k, scores[dets[:, 0] == k], inps, pt1, pt2))
+
+ def read(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def len(self):
+ # return queue len
+ return self.Q.qsize()
+
+
+class DetectionProcessor:
+ def __init__(self, detectionLoader, queueSize=1024):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.detectionLoader = detectionLoader
+ self.stopped = False
+ self.datalen = self.detectionLoader.datalen
+
+ # initialize the queue used to store data
+ if opt.sp:
+ self.Q = Queue(maxsize=queueSize)
+ else:
+ self.Q = pQueue(maxsize=queueSize)
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ if opt.sp:
+ # t = Thread(target=self.update, args=(), daemon=True)
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ else:
+ p = mp.Process(target=self.update, args=(), daemon=True)
+ # p = mp.Process(target=self.update, args=())
+ # p.daemon = True
+ p.start()
+ return self
+
+ def update(self):
+ # keep looping the whole dataset
+ for i in range(self.datalen):
+
+ with torch.no_grad():
+ (orig_img, im_name, boxes, scores, inps, pt1, pt2) = self.detectionLoader.read()
+ if orig_img is None:
+ self.Q.put((None, None, None, None, None, None, None))
+ return
+ if boxes is None or boxes.nelement() == 0:
+ while self.Q.full():
+ time.sleep(0.2)
+ self.Q.put((None, orig_img, im_name, boxes, scores, None, None))
+ continue
+ inp = im_to_torch(cv2.cvtColor(orig_img, cv2.COLOR_BGR2RGB))
+ inps, pt1, pt2 = crop_from_dets(inp, boxes, inps, pt1, pt2)
+
+ while self.Q.full():
+ time.sleep(0.2)
+ self.Q.put((inps, orig_img, im_name, boxes, scores, pt1, pt2))
+
+ def read(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def len(self):
+ # return queue len
+ return self.Q.qsize()
+
+
+class VideoDetectionLoader:
+ def __init__(self, path, batchSize=4, queueSize=256):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.det_model = Darknet("yolo/cfg/yolov3-spp.cfg")
+ self.det_model.load_weights('models/yolo/yolov3-spp.weights')
+ self.det_model.net_info['height'] = opt.inp_dim
+ self.det_inp_dim = int(self.det_model.net_info['height'])
+ assert self.det_inp_dim % 32 == 0
+ assert self.det_inp_dim > 32
+ self.det_model.cuda()
+ self.det_model.eval()
+
+ self.stream = cv2.VideoCapture(path)
+ assert self.stream.isOpened(), 'Cannot capture source'
+ self.stopped = False
+ self.batchSize = batchSize
+ self.datalen = int(self.stream.get(cv2.CAP_PROP_FRAME_COUNT))
+ leftover = 0
+ if (self.datalen) % batchSize:
+ leftover = 1
+ self.num_batches = self.datalen // batchSize + leftover
+ # initialize the queue used to store frames read from
+ # the video file
+ self.Q = Queue(maxsize=queueSize)
+
+ def length(self):
+ return self.datalen
+
+ def len(self):
+ return self.Q.qsize()
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping the whole video
+ for i in range(self.num_batches):
+ img = []
+ inp = []
+ orig_img = []
+ im_name = []
+ im_dim_list = []
+ for k in range(i * self.batchSize, min((i + 1) * self.batchSize, self.datalen)):
+ (grabbed, frame) = self.stream.read()
+ # if the `grabbed` boolean is `False`, then we have
+ # reached the end of the video file
+ if not grabbed:
+ self.stop()
+ return
+ # process and add the frame to the queue
+ inp_dim = int(opt.inp_dim)
+ img_k, orig_img_k, im_dim_list_k = prep_frame(frame, inp_dim)
+ inp_k = im_to_torch(orig_img_k)
+
+ img.append(img_k)
+ inp.append(inp_k)
+ orig_img.append(orig_img_k)
+ im_dim_list.append(im_dim_list_k)
+
+ with torch.no_grad():
+ ht = inp[0].size(1)
+ wd = inp[0].size(2)
+ # Human Detection
+ img = Variable(torch.cat(img)).cuda()
+ im_dim_list = torch.FloatTensor(im_dim_list).repeat(1, 2)
+ im_dim_list = im_dim_list.cuda()
+
+ prediction = self.det_model(img, CUDA=True)
+ # NMS process
+ dets = dynamic_write_results(prediction, opt.confidence,
+ opt.num_classes, nms=True, nms_conf=opt.nms_thesh)
+ if isinstance(dets, int) or dets.shape[0] == 0:
+ for k in range(len(inp)):
+ while self.Q.full():
+ time.sleep(0.2)
+ self.Q.put((inp[k], orig_img[k], None, None))
+ continue
+
+ im_dim_list = torch.index_select(im_dim_list, 0, dets[:, 0].long())
+ scaling_factor = torch.min(self.det_inp_dim / im_dim_list, 1)[0].view(-1, 1)
+
+ # coordinate transfer
+ dets[:, [1, 3]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 0].view(-1, 1)) / 2
+ dets[:, [2, 4]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 1].view(-1, 1)) / 2
+
+ dets[:, 1:5] /= scaling_factor
+ for j in range(dets.shape[0]):
+ dets[j, [1, 3]] = torch.clamp(dets[j, [1, 3]], 0.0, im_dim_list[j, 0])
+ dets[j, [2, 4]] = torch.clamp(dets[j, [2, 4]], 0.0, im_dim_list[j, 1])
+ boxes = dets[:, 1:5].cpu()
+ scores = dets[:, 5:6].cpu()
+
+ for k in range(len(inp)):
+ while self.Q.full():
+ time.sleep(0.2)
+ self.Q.put((inp[k], orig_img[k], boxes[dets[:, 0] == k], scores[dets[:, 0] == k]))
+
+ def videoinfo(self):
+ # indicate the video info
+ fourcc = int(self.stream.get(cv2.CAP_PROP_FOURCC))
+ fps = self.stream.get(cv2.CAP_PROP_FPS)
+ frameSize = (int(self.stream.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))
+ return (fourcc, fps, frameSize)
+
+ def read(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def more(self):
+ # return True if there are still frames in the queue
+ return self.Q.qsize() > 0
+
+ def stop(self):
+ # indicate that the thread should be stopped
+ self.stopped = True
+
+
+class WebcamLoader:
+ def __init__(self, webcam, queueSize=256):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.stream = cv2.VideoCapture(int(webcam))
+ assert self.stream.isOpened(), 'Cannot capture source'
+ self.stopped = False
+ # initialize the queue used to store frames read from
+ # the video file
+ self.Q = LifoQueue(maxsize=queueSize)
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping infinitely
+ while True:
+ # otherwise, ensure the queue has room in it
+ if not self.Q.full():
+ # read the next frame from the file
+ (grabbed, frame) = self.stream.read()
+ # if the `grabbed` boolean is `False`, then we have
+ # reached the end of the video file
+ if not grabbed:
+ self.stop()
+ return
+ # process and add the frame to the queue
+ inp_dim = int(opt.inp_dim)
+ img, orig_img, dim = prep_frame(frame, inp_dim)
+ inp = im_to_torch(orig_img)
+ im_dim_list = torch.FloatTensor([dim]).repeat(1, 2)
+
+ self.Q.put((img, orig_img, inp, im_dim_list))
+ else:
+ with self.Q.mutex:
+ self.Q.queue.clear()
+
+ def videoinfo(self):
+ # indicate the video info
+ fourcc = int(self.stream.get(cv2.CAP_PROP_FOURCC))
+ fps = self.stream.get(cv2.CAP_PROP_FPS)
+ frameSize = (int(self.stream.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))
+ return (fourcc, fps, frameSize)
+
+ def read(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def len(self):
+ # return queue size
+ return self.Q.qsize()
+
+ def stop(self):
+ # indicate that the thread should be stopped
+ self.stopped = True
+
+
+class DataWriter:
+ def __init__(self, save_video=False,
+ savepath='examples/res/1.avi', fourcc=cv2.VideoWriter_fourcc(*'XVID'), fps=25, frameSize=(640, 480),
+ queueSize=1024):
+ if save_video:
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.stream = cv2.VideoWriter(savepath, fourcc, fps, frameSize)
+ assert self.stream.isOpened(), 'Cannot open video for writing'
+ self.save_video = save_video
+ self.stopped = False
+ self.final_result = []
+ # initialize the queue used to store frames read from
+ # the video file
+ self.Q = Queue(maxsize=queueSize)
+ if opt.save_img:
+ if not os.path.exists(opt.outputpath + '/vis'):
+ os.mkdir(opt.outputpath + '/vis')
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=(), daemon=True)
+ # t = Thread(target=self.update, args=())
+ # t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping infinitely
+ while True:
+ # if the thread indicator variable is set, stop the
+ # thread
+ if self.stopped:
+ if self.save_video:
+ self.stream.release()
+ return
+ # otherwise, ensure the queue is not empty
+ if not self.Q.empty():
+ (boxes, scores, hm_data, pt1, pt2, orig_img, im_name) = self.Q.get()
+ orig_img = np.array(orig_img, dtype=np.uint8)
+ if boxes is None:
+ if opt.save_img or opt.save_video or opt.vis:
+ img = orig_img
+ if opt.vis:
+ cv2.imshow("AlphaPose Demo", img)
+ cv2.waitKey(30)
+ if opt.save_img:
+ cv2.imwrite(os.path.join(opt.outputpath, 'vis', im_name), img)
+ if opt.save_video:
+ self.stream.write(img)
+ else:
+ # location prediction (n, kp, 2) | score prediction (n, kp, 1)
+ if opt.matching:
+ preds = getMultiPeakPrediction(
+ hm_data, pt1.numpy(), pt2.numpy(), opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW)
+ result = matching(boxes, scores.numpy(), preds)
+ else:
+ preds_hm, preds_img, preds_scores = getPrediction(
+ hm_data, pt1, pt2, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW)
+ result = pose_nms(
+ boxes, scores, preds_img, preds_scores)
+ result = {
+ 'imgname': im_name,
+ 'result': result
+ }
+ self.final_result.append(result)
+ if opt.save_img or opt.save_video or opt.vis:
+ img = vis_frame(orig_img, result)
+ if opt.vis:
+ cv2.imshow("AlphaPose Demo", img)
+ cv2.waitKey(30)
+ if opt.save_img:
+ cv2.imwrite(os.path.join(opt.outputpath, 'vis', im_name), img)
+ if opt.save_video:
+ self.stream.write(img)
+ else:
+ time.sleep(0.1)
+
+ def running(self):
+ # indicate that the thread is still running
+ time.sleep(0.2)
+ return not self.Q.empty()
+
+ def save(self, boxes, scores, hm_data, pt1, pt2, orig_img, im_name):
+ # save next frame in the queue
+ self.Q.put((boxes, scores, hm_data, pt1, pt2, orig_img, im_name))
+
+ def stop(self):
+ # indicate that the thread should be stopped
+ self.stopped = True
+ time.sleep(0.2)
+
+ def results(self):
+ # return final result
+ return self.final_result
+
+ def len(self):
+ # return queue len
+ return self.Q.qsize()
+
+
+class Mscoco(data.Dataset):
+ def __init__(self, train=True, sigma=1,
+ scale_factor=(0.2, 0.3), rot_factor=40, label_type='Gaussian'):
+ self.img_folder = '../data/coco/images' # root image folders
+ self.is_train = train # training set or test set
+ self.inputResH = opt.inputResH
+ self.inputResW = opt.inputResW
+ self.outputResH = opt.outputResH
+ self.outputResW = opt.outputResW
+ self.sigma = sigma
+ self.scale_factor = scale_factor
+ self.rot_factor = rot_factor
+ self.label_type = label_type
+
+ self.nJoints_coco = 17
+ self.nJoints_mpii = 16
+ self.nJoints = 33
+
+ self.accIdxs = (1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17)
+ self.flipRef = ((2, 3), (4, 5), (6, 7),
+ (8, 9), (10, 11), (12, 13),
+ (14, 15), (16, 17))
+
+ def __getitem__(self, index):
+ pass
+
+ def __len__(self):
+ pass
+
+
+def crop_from_dets(img, boxes, inps, pt1, pt2):
+ '''
+ Crop human from origin image according to Dectecion Results
+ '''
+
+ imght = img.size(1)
+ imgwidth = img.size(2)
+ tmp_img = img
+ tmp_img[0].add_(-0.406)
+ tmp_img[1].add_(-0.457)
+ tmp_img[2].add_(-0.480)
+ for i, box in enumerate(boxes):
+ upLeft = torch.Tensor(
+ (float(box[0]), float(box[1])))
+ bottomRight = torch.Tensor(
+ (float(box[2]), float(box[3])))
+
+ ht = bottomRight[1] - upLeft[1]
+ width = bottomRight[0] - upLeft[0]
+
+ scaleRate = 0.3
+
+ upLeft[0] = max(0, upLeft[0] - width * scaleRate / 2)
+ upLeft[1] = max(0, upLeft[1] - ht * scaleRate / 2)
+ bottomRight[0] = max(
+ min(imgwidth - 1, bottomRight[0] + width * scaleRate / 2), upLeft[0] + 5)
+ bottomRight[1] = max(
+ min(imght - 1, bottomRight[1] + ht * scaleRate / 2), upLeft[1] + 5)
+
+ try:
+ inps[i] = cropBox(tmp_img.clone(), upLeft, bottomRight, opt.inputResH, opt.inputResW)
+ except IndexError:
+ print(tmp_img.shape)
+ print(upLeft)
+ print(bottomRight)
+ print('===')
+ pt1[i] = upLeft
+ pt2[i] = bottomRight
+
+ return inps, pt1, pt2
diff --git a/joints_detectors/Alphapose/dataloader_webcam.py b/joints_detectors/Alphapose/dataloader_webcam.py
new file mode 100644
index 0000000000000000000000000000000000000000..75cd3b919338fe90a54e6032cd176a39751f7a6a
--- /dev/null
+++ b/joints_detectors/Alphapose/dataloader_webcam.py
@@ -0,0 +1,520 @@
+import os
+import torch
+from torch.autograd import Variable
+import torch.utils.data as data
+import torchvision.transforms as transforms
+from PIL import Image, ImageDraw
+from SPPE.src.utils.img import load_image, cropBox, im_to_torch
+from opt import opt
+from yolo.preprocess import prep_image, prep_frame, inp_to_image
+from pPose_nms import pose_nms, write_json
+from SPPE.src.utils.eval import getPrediction
+from yolo.util import write_results, dynamic_write_results
+from yolo.darknet import Darknet
+from tqdm import tqdm
+import cv2
+import json
+import numpy as np
+import sys
+import time
+import torch.multiprocessing as mp
+from multiprocessing import Process
+from multiprocessing import Queue as pQueue
+from threading import Thread
+# import the Queue class from Python 3
+if sys.version_info >= (3, 0):
+ from queue import Queue, LifoQueue
+# otherwise, import the Queue class for Python 2.7
+else:
+ from Queue import Queue, LifoQueue
+
+if opt.vis_fast:
+ from fn import vis_frame_fast as vis_frame
+else:
+ from fn import vis_frame
+
+
+class WebcamLoader:
+ def __init__(self, webcam, batchSize=1, queueSize=256):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.stream = cv2.VideoCapture(int(webcam))
+ assert self.stream.isOpened(), 'Cannot capture source'
+ self.stopped = False
+ # initialize the queue used to store frames read from
+ # the video file
+ self.batchSize = batchSize
+ self.Q = LifoQueue(maxsize=queueSize)
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping infinitely
+ i = 0
+ while True:
+ # otherwise, ensure the queue has room in it
+ if not self.Q.full():
+ img = []
+ orig_img = []
+ im_name = []
+ im_dim_list = []
+ for k in range(self.batchSize):
+ (grabbed, frame) = self.stream.read()
+ # if the `grabbed` boolean is `False`, then we have
+ # reached the end of the video file
+ if not grabbed:
+ self.stop()
+ return
+ inp_dim = int(opt.inp_dim)
+ img_k, orig_img_k, im_dim_list_k = prep_frame(frame, inp_dim)
+
+ img.append(img_k)
+ orig_img.append(orig_img_k)
+ im_name.append(str(i)+'.jpg')
+ im_dim_list.append(im_dim_list_k)
+
+ with torch.no_grad():
+ # Human Detection
+ img = torch.cat(img)
+ im_dim_list = torch.FloatTensor(im_dim_list).repeat(1,2)
+
+ self.Q.put((img, orig_img, im_name, im_dim_list))
+ i = i+1
+
+ else:
+ with self.Q.mutex:
+ self.Q.queue.clear()
+ def videoinfo(self):
+ # indicate the video info
+ fourcc=int(self.stream.get(cv2.CAP_PROP_FOURCC))
+ fps=self.stream.get(cv2.CAP_PROP_FPS)
+ frameSize=(int(self.stream.get(cv2.CAP_PROP_FRAME_WIDTH)),int(self.stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))
+ return (fourcc,fps,frameSize)
+
+ def getitem(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def len(self):
+ # return queue size
+ return self.Q.qsize()
+
+ def stop(self):
+ # indicate that the thread should be stopped
+ self.stopped = True
+
+
+class DetectionLoader:
+ def __init__(self, dataloder, batchSize=1, queueSize=1024):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.det_model = Darknet("yolo/cfg/yolov3-spp.cfg")
+ self.det_model.load_weights('models/yolo/yolov3-spp.weights')
+ self.det_model.net_info['height'] = opt.inp_dim
+ self.det_inp_dim = int(self.det_model.net_info['height'])
+ assert self.det_inp_dim % 32 == 0
+ assert self.det_inp_dim > 32
+ self.det_model.cuda()
+ self.det_model.eval()
+
+ self.stopped = False
+ self.dataloder = dataloder
+ self.batchSize = batchSize
+ # initialize the queue used to store frames read from
+ # the video file
+ self.Q = LifoQueue(maxsize=queueSize)
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping the whole dataset
+ while True:
+ img, orig_img, im_name, im_dim_list = self.dataloder.getitem()
+ with self.dataloder.Q.mutex:
+ self.dataloder.Q.queue.clear()
+ with torch.no_grad():
+ # Human Detection
+ img = img.cuda()
+ prediction = self.det_model(img, CUDA=True)
+ # NMS process
+ dets = dynamic_write_results(prediction, opt.confidence,
+ opt.num_classes, nms=True, nms_conf=opt.nms_thesh)
+ if isinstance(dets, int) or dets.shape[0] == 0:
+ for k in range(len(orig_img)):
+ if self.Q.full():
+ time.sleep(2)
+ self.Q.put((orig_img[k], im_name[k], None, None, None, None, None))
+ continue
+ dets = dets.cpu()
+ im_dim_list = torch.index_select(im_dim_list,0, dets[:, 0].long())
+ scaling_factor = torch.min(self.det_inp_dim / im_dim_list, 1)[0].view(-1, 1)
+
+ # coordinate transfer
+ dets[:, [1, 3]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 0].view(-1, 1)) / 2
+ dets[:, [2, 4]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 1].view(-1, 1)) / 2
+
+
+ dets[:, 1:5] /= scaling_factor
+ for j in range(dets.shape[0]):
+ dets[j, [1, 3]] = torch.clamp(dets[j, [1, 3]], 0.0, im_dim_list[j, 0])
+ dets[j, [2, 4]] = torch.clamp(dets[j, [2, 4]], 0.0, im_dim_list[j, 1])
+ boxes = dets[:, 1:5]
+ scores = dets[:, 5:6]
+
+ for k in range(len(orig_img)):
+ boxes_k = boxes[dets[:,0]==k]
+ if isinstance(boxes_k, int) or boxes_k.shape[0] == 0:
+ if self.Q.full():
+ time.sleep(2)
+ self.Q.put((orig_img[k], im_name[k], None, None, None, None, None))
+ continue
+ inps = torch.zeros(boxes_k.size(0), 3, opt.inputResH, opt.inputResW)
+ pt1 = torch.zeros(boxes_k.size(0), 2)
+ pt2 = torch.zeros(boxes_k.size(0), 2)
+ if self.Q.full():
+ time.sleep(2)
+ self.Q.put((orig_img[k], im_name[k], boxes_k, scores[dets[:,0]==k], inps, pt1, pt2))
+
+ def read(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def len(self):
+ # return queue len
+ return self.Q.qsize()
+
+
+class DetectionProcessor:
+ def __init__(self, detectionLoader, queueSize=1024):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.detectionLoader = detectionLoader
+ self.stopped = False
+
+ # initialize the queue used to store data
+ self.Q = LifoQueue(maxsize=queueSize)
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping the whole dataset
+ while True:
+
+ with torch.no_grad():
+ (orig_img, im_name, boxes, scores, inps, pt1, pt2) = self.detectionLoader.read()
+ with self.detectionLoader.Q.mutex:
+ self.detectionLoader.Q.queue.clear()
+ if boxes is None or boxes.nelement() == 0:
+ while self.Q.full():
+ time.sleep(0.2)
+ self.Q.put((None, orig_img, im_name, boxes, scores, None, None))
+ continue
+ inp = im_to_torch(cv2.cvtColor(orig_img, cv2.COLOR_BGR2RGB))
+ inps, pt1, pt2 = crop_from_dets(inp, boxes, inps, pt1, pt2)
+
+ while self.Q.full():
+ time.sleep(0.2)
+ self.Q.put((inps, orig_img, im_name, boxes, scores, pt1, pt2))
+
+ def read(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def len(self):
+ # return queue len
+ return self.Q.qsize()
+
+
+class WebcamDetectionLoader:
+ def __init__(self, webcam = 0, batchSize=1, queueSize=256):
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.det_model = Darknet("yolo/cfg/yolov3-spp.cfg")
+ self.det_model.load_weights('models/yolo/yolov3-spp.weights')
+ self.det_model.net_info['height'] = opt.inp_dim
+ self.det_inp_dim = int(self.det_model.net_info['height'])
+ assert self.det_inp_dim % 32 == 0
+ assert self.det_inp_dim > 32
+ self.det_model.cuda()
+ self.det_model.eval()
+
+ self.stream = cv2.VideoCapture(int(webcam))
+ assert self.stream.isOpened(), 'Cannot open webcam'
+ self.stopped = False
+ self.batchSize = batchSize
+
+ # initialize the queue used to store frames read from
+ # the video file
+ self.Q = LifoQueue(maxsize=queueSize)
+
+
+ def len(self):
+ return self.Q.qsize()
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping
+ while True:
+ img = []
+ inp = []
+ orig_img = []
+ im_name = []
+ im_dim_list = []
+ for k in range(self.batchSize):
+ (grabbed, frame) = self.stream.read()
+ if not grabbed:
+ continue
+ # process and add the frame to the queue
+ inp_dim = int(opt.inp_dim)
+ img_k, orig_img_k, im_dim_list_k = prep_frame(frame, inp_dim)
+ inp_k = im_to_torch(orig_img_k)
+
+ img.append(img_k)
+ inp.append(inp_k)
+ orig_img.append(orig_img_k)
+ im_dim_list.append(im_dim_list_k)
+
+ with torch.no_grad():
+ ht = inp[0].size(1)
+ wd = inp[0].size(2)
+ # Human Detection
+ img = Variable(torch.cat(img)).cuda()
+ im_dim_list = torch.FloatTensor(im_dim_list).repeat(1,2)
+ im_dim_list = im_dim_list.cuda()
+
+ prediction = self.det_model(img, CUDA=True)
+ # NMS process
+ dets = dynamic_write_results(prediction, opt.confidence,
+ opt.num_classes, nms=True, nms_conf=opt.nms_thesh)
+ if isinstance(dets, int) or dets.shape[0] == 0:
+ for k in range(len(inp)):
+ if self.Q.full():
+ with self.Q.mutex:
+ self.Q.queue.clear()
+ self.Q.put((inp[k], orig_img[k], None, None))
+ continue
+
+ im_dim_list = torch.index_select(im_dim_list,0, dets[:, 0].long())
+ scaling_factor = torch.min(self.det_inp_dim / im_dim_list, 1)[0].view(-1, 1)
+
+ # coordinate transfer
+ dets[:, [1, 3]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 0].view(-1, 1)) / 2
+ dets[:, [2, 4]] -= (self.det_inp_dim - scaling_factor * im_dim_list[:, 1].view(-1, 1)) / 2
+
+ dets[:, 1:5] /= scaling_factor
+ for j in range(dets.shape[0]):
+ dets[j, [1, 3]] = torch.clamp(dets[j, [1, 3]], 0.0, im_dim_list[j, 0])
+ dets[j, [2, 4]] = torch.clamp(dets[j, [2, 4]], 0.0, im_dim_list[j, 1])
+ boxes = dets[:, 1:5].cpu()
+ scores = dets[:, 5:6].cpu()
+
+ for k in range(len(inp)):
+ if self.Q.full():
+ with self.Q.mutex:
+ self.Q.queue.clear()
+ self.Q.put((inp[k], orig_img[k], boxes[dets[:,0]==k], scores[dets[:,0]==k]))
+
+ def videoinfo(self):
+ # indicate the video info
+ fourcc=int(self.stream.get(cv2.CAP_PROP_FOURCC))
+ fps=self.stream.get(cv2.CAP_PROP_FPS)
+ frameSize=(int(self.stream.get(cv2.CAP_PROP_FRAME_WIDTH)),int(self.stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))
+ return (fourcc,fps,frameSize)
+
+ def read(self):
+ # return next frame in the queue
+ return self.Q.get()
+
+ def more(self):
+ # return True if there are still frames in the queue
+ return self.Q.qsize() > 0
+
+ def stop(self):
+ # indicate that the thread should be stopped
+ self.stopped = True
+
+
+
+class DataWriter:
+ def __init__(self, save_video=False,
+ savepath='examples/res/1.avi', fourcc=cv2.VideoWriter_fourcc(*'XVID'), fps=25, frameSize=(640,480),
+ queueSize=1024):
+ if save_video:
+ # initialize the file video stream along with the boolean
+ # used to indicate if the thread should be stopped or not
+ self.stream = cv2.VideoWriter(savepath, fourcc, fps, frameSize)
+ assert self.stream.isOpened(), 'Cannot open video for writing'
+ self.save_video = save_video
+ self.stopped = False
+ self.final_result = []
+ # initialize the queue used to store frames read from
+ # the video file
+ self.Q = Queue(maxsize=queueSize)
+ if opt.save_img:
+ if not os.path.exists(opt.outputpath + '/vis'):
+ os.mkdir(opt.outputpath + '/vis')
+
+ def start(self):
+ # start a thread to read frames from the file video stream
+ t = Thread(target=self.update, args=())
+ t.daemon = True
+ t.start()
+ return self
+
+ def update(self):
+ # keep looping infinitely
+ while True:
+ # if the thread indicator variable is set, stop the
+ # thread
+ if self.stopped:
+ if self.save_video:
+ self.stream.release()
+ return
+ # otherwise, ensure the queue is not empty
+ if not self.Q.empty():
+ (boxes, scores, hm_data, pt1, pt2, orig_img, im_name) = self.Q.get()
+ orig_img = np.array(orig_img, dtype=np.uint8)
+ if boxes is None:
+ if opt.save_img or opt.save_video or opt.vis:
+ img = orig_img
+ if opt.vis:
+ cv2.imshow("AlphaPose Demo", img)
+ cv2.waitKey(30)
+ if opt.save_img:
+ cv2.imwrite(os.path.join(opt.outputpath, 'vis', im_name), img)
+ if opt.save_video:
+ self.stream.write(img)
+ else:
+ # location prediction (n, kp, 2) | score prediction (n, kp, 1)
+
+ preds_hm, preds_img, preds_scores = getPrediction(
+ hm_data, pt1, pt2, opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW)
+
+ result = pose_nms(boxes, scores, preds_img, preds_scores)
+ result = {
+ 'imgname': im_name,
+ 'result': result
+ }
+ self.final_result.append(result)
+ if opt.save_img or opt.save_video or opt.vis:
+ img = vis_frame(orig_img, result)
+ if opt.vis:
+ cv2.imshow("AlphaPose Demo", img)
+ cv2.waitKey(30)
+ if opt.save_img:
+ cv2.imwrite(os.path.join(opt.outputpath, 'vis', im_name), img)
+ if opt.save_video:
+ self.stream.write(img)
+ else:
+ time.sleep(0.1)
+
+ def running(self):
+ # indicate that the thread is still running
+ time.sleep(0.2)
+ return not self.Q.empty()
+
+ def save(self, boxes, scores, hm_data, pt1, pt2, orig_img, im_name):
+ # save next frame in the queue
+ self.Q.put((boxes, scores, hm_data, pt1, pt2, orig_img, im_name))
+
+ def stop(self):
+ # indicate that the thread should be stopped
+ self.stopped = True
+ time.sleep(0.2)
+
+ def results(self):
+ # return final result
+ return self.final_result
+
+ def len(self):
+ # return queue len
+ return self.Q.qsize()
+
+class Mscoco(data.Dataset):
+ def __init__(self, train=True, sigma=1,
+ scale_factor=(0.2, 0.3), rot_factor=40, label_type='Gaussian'):
+ self.img_folder = '../data/coco/images' # root image folders
+ self.is_train = train # training set or test set
+ self.inputResH = opt.inputResH
+ self.inputResW = opt.inputResW
+ self.outputResH = opt.outputResH
+ self.outputResW = opt.outputResW
+ self.sigma = sigma
+ self.scale_factor = scale_factor
+ self.rot_factor = rot_factor
+ self.label_type = label_type
+
+ self.nJoints_coco = 17
+ self.nJoints_mpii = 16
+ self.nJoints = 33
+
+ self.accIdxs = (1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17)
+ self.flipRef = ((2, 3), (4, 5), (6, 7),
+ (8, 9), (10, 11), (12, 13),
+ (14, 15), (16, 17))
+
+ def __getitem__(self, index):
+ pass
+
+ def __len__(self):
+ pass
+
+
+def crop_from_dets(img, boxes, inps, pt1, pt2):
+ '''
+ Crop human from origin image according to Dectecion Results
+ '''
+
+ imght = img.size(1)
+ imgwidth = img.size(2)
+ tmp_img = img
+ tmp_img[0].add_(-0.406)
+ tmp_img[1].add_(-0.457)
+ tmp_img[2].add_(-0.480)
+ for i, box in enumerate(boxes):
+ upLeft = torch.Tensor(
+ (float(box[0]), float(box[1])))
+ bottomRight = torch.Tensor(
+ (float(box[2]), float(box[3])))
+
+ ht = bottomRight[1] - upLeft[1]
+ width = bottomRight[0] - upLeft[0]
+ if width > 100:
+ scaleRate = 0.2
+ else:
+ scaleRate = 0.3
+
+ upLeft[0] = max(0, upLeft[0] - width * scaleRate / 2)
+ upLeft[1] = max(0, upLeft[1] - ht * scaleRate / 2)
+ bottomRight[0] = max(
+ min(imgwidth - 1, bottomRight[0] + width * scaleRate / 2), upLeft[0] + 5)
+ bottomRight[1] = max(
+ min(imght - 1, bottomRight[1] + ht * scaleRate / 2), upLeft[1] + 5)
+
+ inps[i] = cropBox(tmp_img.clone(), upLeft, bottomRight, opt.inputResH, opt.inputResW)
+ pt1[i] = upLeft
+ pt2[i] = bottomRight
+
+ return inps, pt1, pt2
diff --git a/joints_detectors/Alphapose/demo.py b/joints_detectors/Alphapose/demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc4c57bd1898dcb1bd26527a03edcb0c8bd9fb55
--- /dev/null
+++ b/joints_detectors/Alphapose/demo.py
@@ -0,0 +1,131 @@
+# import os
+#
+# os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
+# os.environ["CUDA_VISIBLE_DEVICES"] = "1"
+#
+# import numpy as np
+# from tqdm import tqdm
+#
+# from SPPE.src.main_fast_inference import *
+# from dataloader import ImageLoader, DetectionLoader, DetectionProcessor, DataWriter, Mscoco
+# from fn import getTime
+# from opt import opt
+# from pPose_nms import write_json
+# from in_the_wild_data import split_frame
+#
+#
+# def main(args):
+# inputpath = args.inputpath
+# inputlist = args.inputlist
+# mode = args.mode
+# if not os.path.exists(args.outputpath):
+# os.makedirs(args.outputpath, exist_ok=True)
+#
+# if len(inputlist):
+# im_names = open(inputlist, 'r').readlines()
+# elif len(inputpath) and inputpath != '/':
+# for root, dirs, files in os.walk(inputpath):
+# im_names = [f for f in files if 'png' in f or 'jpg' in f]
+# else:
+# raise IOError('Error: must contain either --indir/--list')
+#
+# # Load input images
+# data_loader = ImageLoader(im_names, batchSize=args.detbatch, format='yolo').start()
+#
+# # Load detection loader
+# print('Loading YOLO model..')
+# sys.stdout.flush()
+# det_loader = DetectionLoader(data_loader, batchSize=args.detbatch).start()
+# det_processor = DetectionProcessor(det_loader).start()
+#
+# # Load pose model
+# pose_dataset = Mscoco()
+# if args.fast_inference:
+# pose_model = InferenNet_fast(4 * 1 + 1, pose_dataset)
+# else:
+# pose_model = InferenNet(4 * 1 + 1, pose_dataset)
+# pose_model.cuda()
+# pose_model.eval()
+#
+# runtime_profile = {
+# 'dt': [],
+# 'pt': [],
+# 'pn': []
+# }
+#
+# # Init data writer
+# writer = DataWriter(args.save_video).start()
+#
+# data_len = data_loader.length()
+# im_names_desc = tqdm(range(data_len))
+#
+# batchSize = args.posebatch
+# for i in im_names_desc:
+# start_time = getTime()
+# with torch.no_grad():
+# (inps, orig_img, im_name, boxes, scores, pt1, pt2) = det_processor.read()
+# if boxes is None or boxes.nelement() == 0:
+# writer.save(None, None, None, None, None, orig_img, im_name.split('/')[-1])
+# continue
+#
+# ckpt_time, det_time = getTime(start_time)
+# runtime_profile['dt'].append(det_time)
+# # Pose Estimation
+#
+# datalen = inps.size(0)
+# leftover = 0
+# if (datalen) % batchSize:
+# leftover = 1
+# num_batches = datalen // batchSize + leftover
+# hm = []
+# for j in range(num_batches):
+# inps_j = inps[j * batchSize:min((j + 1) * batchSize, datalen)].cuda()
+# hm_j = pose_model(inps_j)
+# hm.append(hm_j)
+# hm = torch.cat(hm)
+# ckpt_time, pose_time = getTime(ckpt_time)
+# runtime_profile['pt'].append(pose_time)
+# hm = hm.cpu()
+# writer.save(boxes, scores, hm, pt1, pt2, orig_img, im_name.split('/')[-1])
+#
+# ckpt_time, post_time = getTime(ckpt_time)
+# runtime_profile['pn'].append(post_time)
+#
+# if args.profile:
+# # TQDM
+# im_names_desc.set_description(
+# 'det time: {dt:.3f} | pose time: {pt:.2f} | post processing: {pn:.4f}'.format(
+# dt=np.mean(runtime_profile['dt']), pt=np.mean(runtime_profile['pt']), pn=np.mean(runtime_profile['pn']))
+# )
+#
+# print('===========================> Finish Model Running.')
+# if (args.save_img or args.save_video) and not args.vis_fast:
+# print('===========================> Rendering remaining images in the queue...')
+# print('===========================> If this step takes too long, you can enable the --vis_fast flag to use fast rendering (real-time).')
+# while writer.running():
+# pass
+# writer.stop()
+# final_result = writer.results()
+# write_json(final_result, args.outputpath)
+#
+#
+# if __name__ == "__main__":
+# args = opt
+# args.dataset = 'coco'
+# args.sp = True
+# if not args.sp:
+# torch.multiprocessing.set_start_method('forkserver', force=True)
+# torch.multiprocessing.set_sharing_strategy('file_system')
+#
+# video_name = 'kobe'
+#
+# args.inputpath = f'../in_the_wild_data/split_{video_name}'
+# if not os.listdir(args.inputpath):
+# split_frame.split(f'../in_the_wild_data/{video_name}.mp4')
+#
+# args.outputpath = f'../in_the_wild_data/alphapose_{video_name}'
+# args.save_img = True
+#
+# args.detbatch = 4
+#
+# main(args)
diff --git a/joints_detectors/Alphapose/doc/CrowdPose.md b/joints_detectors/Alphapose/doc/CrowdPose.md
new file mode 100644
index 0000000000000000000000000000000000000000..ba8428167feb80bdac1c18984709fd1515ebe1a6
--- /dev/null
+++ b/joints_detectors/Alphapose/doc/CrowdPose.md
@@ -0,0 +1,64 @@
+## [CrowdPose: Efficient Crowded Scenes Pose Estimation and A New Benchmark](https://arxiv.org/abs/1812.00324) *(accepted to CVPR2019)*
+
+
+
+
+
+## Introduction
+Our proposed method surpasses the state-of-the-art methods on [CrowdPose](https://arxiv.org/abs/1812.00324) dataset by **5** mAP and results on MSCOCO dataset demonstrate the generalization ability of our method (comparatively **0.8** mAP higher). Images in our proposed CrowdPose dataset have a uniform distribution of *Crowd Index* among \[0, 1\].
+
+## Code
+We provide [evaluation tools](https://github.com/Jeff-sjtu/CrowdPose) for CrowdPose dataset. Our evaluation tools is developed based on [@cocodataset/cocoapi](https://github.com/cocodataset/cocoapi). The source code of our model is integrated into [AlphaPose](https://github.com/MVIG-SJTU/AlphaPose/tree/pytorch).
+
+## Quick Start
+Run with `matching` option to use the matching algorithm in CrowdPose.
+
+- **Input dir**: Run AlphaPose for all images in a folder with:
+```
+python3 demo.py --indir ${img_directory} --outdir examples/res --matching
+```
+
+## Dataset
+[Train + Validation + Test Images](https://drive.google.com/file/d/1VprytECcLtU4tKP32SYi_7oDRbw7yUTL/view?usp=sharing) (Google Drive)
+
+[Annotations](https://drive.google.com/open?id=196vsma1uuLLCcUt1NrXp1K8PBU6tVH8w) (Google Drive)
+
+## Results
+
+**Results on CrowdPose Validation:**
+
+*Compare with state-of-the-art methods*
+
+
+| Method | AP @0.5:0.95 | AP @0.5 | AP @0.75 | AR @0.5:0.95 | AR @0.5 | AR @0.75 |
+|:-------|:-----:|:-------:|:-------:|:-------:|:-------:|:-------:|
+| Detectron (Mask R-CNN) | 57.2 | 83.5 | 60.3 | 65.9 | 89.3 | 69.4 |
+| Simple Pose (Xiao *et al.*) | 60.8 | 81.4 | 65.7 | 67.3 | 86.3 | 71.8 |
+| **Ours** | **66.0** | **84.2** | **71.5** | **72.7** | **89.5** | **77.5** |
+
+
+
+*Compare with open-source systems*
+
+
+| Method | AP @*Easy* | AP @*Medium* | AP @*Hard* | FPS |
+|:-------|:-----:|:-------:|:-------:|:-------:|
+| OpenPose (CMU-Pose) | 62.7 | 48.7 | 32.3 | 5.3 |
+| Detectron (Mask R-CNN) | 69.4 | 57.9 | 45.8 | 2.9 |
+| **Ours** | **75.5** | **66.3** | **57.4** | **10.1** |
+
+
+
+**Results on MSCOCO Validation:**
+
+
+| Method | AP @0.5:0.95 | AR @0.5:0.95 |
+|:-------|:-----:|:-------:|
+| Detectron (Mask R-CNN) | 64.8 | 71.1 |
+| Simple Pose (Xiao *et al.*) | 69.8 | 74.1 |
+| **AlphaPose** | **70.9** | **76.4** |
+
+
+
+## Contributors
+ CrowdPose is authored by [Jiefeng Li](http://jeff-leaf.site/), [Can Wang](https://github.com/Canwang-sjtu), [Hao Zhu](https://github.com/BernieZhu), [Yihuan Mao](), [Hao-Shu Fang](https://fang-haoshu.github.io/), and [Cewu Lu](http://www.mvig.org/).
diff --git a/joints_detectors/Alphapose/doc/contributors.md b/joints_detectors/Alphapose/doc/contributors.md
new file mode 100644
index 0000000000000000000000000000000000000000..cb02844c7d040a4420b6361273845af19aa1b90a
--- /dev/null
+++ b/joints_detectors/Alphapose/doc/contributors.md
@@ -0,0 +1 @@
+Alpha Pose is contributed and maintained by Hao-Shu Fang, Jiefeng Li, Yuliang Xiu, Ruiheng Chang and Cewu Lu.
diff --git a/joints_detectors/Alphapose/doc/faq.md b/joints_detectors/Alphapose/doc/faq.md
new file mode 100644
index 0000000000000000000000000000000000000000..0569b9b715891c0eb27613170a00b6fcf87e79db
--- /dev/null
+++ b/joints_detectors/Alphapose/doc/faq.md
@@ -0,0 +1,32 @@
+AlphaPose - Frequently Asked Question (FAQ)
+============================================
+
+## FAQ
+1. [Can't open webcan or video file](#Can't-open-webcan-or-video-file)
+
+## FAQ
+### Can't open webcam or video file
+**Q:** - I can't open the webcam or video file.
+
+**A**: Try re-install `opencv-python` with version >= 3.3.1.11 by
+```
+pip3 uninstall opencv_python
+pip3 install opencv_python --user
+```
+Many people meet this problem at https://github.com/opencv/opencv/issues/8471. The solution I use is
+```
+sudo cp /build/lib/python3/cv2.cpython-35m-x86_64-linux-gnu.so /usr/local/lib/python3.5/dist-packages/cv2/cv2.cpython-35m-x86_64-linux-gnu.so
+```
+The idea is to replace the cv2.so library provided by pypi with the one compiled from sources. You can check for more info at https://github.com/opencv/opencv/issues/8471.
+
+### Can't open webcam
+**Q:** - I can't open the webcam with the latest `opencv-python`
+
+**A**: Check if your device is valid by
+```
+ls /dev/video*
+```
+Usually you can find `video0`, but if you have a device with other index like `video3`, you can run the program by
+```
+python3 webcam_demo.py --webcam 3 --outdir examples/res --vis
+```
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/doc/output.md b/joints_detectors/Alphapose/doc/output.md
new file mode 100644
index 0000000000000000000000000000000000000000..922b844ac6afc261951e183fcbc8d14b1dfc97d5
--- /dev/null
+++ b/joints_detectors/Alphapose/doc/output.md
@@ -0,0 +1,143 @@
+AlphaPose - Output format
+====================================
+
+
+
+## Contents
+1. [Output Format](#output-format)
+ 1. [Keypoint Ordering](#keypoint-ordering)
+ 2. [Heatmap Ordering](#heatmap-ordering)
+
+
+## Output Format
+1. By default, we save the results for all images in one json file, which is similar to the [results format](http://cocodataset.org/#format) used by COCO.
+ 1. `keypoints` contains the body part locations and detection confidence formatted as `x1,y1,c1,x2,y2,c2,...`. `c` is the confidence score in the range [0,1] for MPII dataset and range [0,6] for COCO dataset.
+ 2. `score` is the confidence score for the whole person, computed by our parametric pose NMS.
+```
+[
+ // for person_1 in image_1
+ {
+ "image_id" : string, image_1_name,
+ "category_id" : int, 1 for person
+ "keypoints" : [x1,y1,c1,...,xk,yk,ck],
+ "score" : float,
+ },
+ // for person_2 in image_1
+ {
+ "image_id" : string, image_1_name,
+ "category_id" : int, 1 for person
+ "keypoints" : [x1,y1,c1,...,xk,yk,ck],
+ "score" : float,
+ },
+ ...
+ // for persons in image_2
+{
+ "image_id" : string, image_2_name,
+ "category_id" : int, 1 for person
+ "keypoints" : [x1,y1,c1,...,xk,yk,ck],
+ "score" : float,
+ },
+ ...
+]
+```
+
+2. If the `--format` flag is set as 'cmu', we will save the results for each image in the format used by CMU-Pose.
+```
+{
+ "version":0.1,
+ "bodies":[
+ {"joints":[x1,y1,c1,...,xk,yk,ck]},
+ {"joints":[x1,y1,c1,...,xk,yk,ck]},
+ ]
+}
+```
+
+3. If the `--format` flag is set as 'open', we will save the results for each image in the format used by OpenPose.
+```
+{
+ "version":0.1,
+ "people":[
+ {"pose_keypoints_2d":[x1,y1,c1,...,xk,yk,ck]},
+ {"pose_keypoints_2d":[x1,y1,c1,...,xk,yk,ck]},
+ ]
+}
+```
+
+### Keypoint Ordering
+The default keypoint order is
+```
+// Result for COCO (17 body parts)
+ {0, "Nose"},
+ {1, "LEye"},
+ {2, "REye"},
+ {3, "LEar"},
+ {4, "REar"},
+ {5, "LShoulder"},
+ {6, "RShoulder"},
+ {7, "LElbow"},
+ {8, "RElbow"},
+ {9, "LWrist"},
+ {10, "RWrist"},
+ {11, "LHip"},
+ {12, "RHip"},
+ {13, "LKnee"},
+ {14, "Rknee"},
+ {15, "LAnkle"},
+ {16, "RAnkle"},
+// Result for MPII (16 body parts)
+ {0, "RAnkle"},
+ {1, "Rknee"},
+ {2, "RHip"},
+ {3, "LHip"},
+ {4, "LKnee"},
+ {5, "LAnkle"},
+ {6, "Pelv"},
+ {7, "Thrx"},
+ {8, "Neck"},
+ {9, "Head"},
+ {10, "RWrist"},
+ {11, "RElbow"},
+ {12, "RShoulder"},
+ {13, "LShoulder"},
+ {14, "LElbow"},
+ {15, "LWrist"},
+```
+If the `--format` flag is set to 'cmu' or 'open', the keypoint order is
+```
+//Result for COCO (18 body parts)
+ {0, "Nose"},
+ {1, "Neck"},
+ {2, "RShoulder"},
+ {3, "RElbow"},
+ {4, "RWrist"},
+ {5, "LShoulder"},
+ {6, "LElbow"},
+ {7, "LWrist"},
+ {8, "RHip"},
+ {9, "RKnee"},
+ {10, "RAnkle"},
+ {11, "LHip"},
+ {12, "LKnee"},
+ {13, "LAnkle"},
+ {14, "REye"},
+ {15, "LEye"},
+ {16, "REar"},
+ {17, "LEar"},
+// Result for MPII (15 body parts)
+ {0, "Head"},
+ {1, "Neck"},
+ {2, "RShoulder"},
+ {3, "RElbow"},
+ {4, "RWrist"},
+ {5, "LShoulder"},
+ {6, "LElbow"},
+ {7, "LWrist"},
+ {8, "RHip"},
+ {9, "RKnee"},
+ {10, "RAnkle"},
+ {11, "LHip"},
+ {12, "LKnee"},
+ {13, "LAnkle"},
+ {14, "Thrx"},
+```
+
diff --git a/joints_detectors/Alphapose/doc/run.md b/joints_detectors/Alphapose/doc/run.md
new file mode 100644
index 0000000000000000000000000000000000000000..fe830a27def9e26d6154814ead78f5836076d717
--- /dev/null
+++ b/joints_detectors/Alphapose/doc/run.md
@@ -0,0 +1,35 @@
+AlphaPose Usage & Examples
+====================================
+
+Here, we first list the flags of this script and then give some examples.
+
+## Flags
+- `--indir`: Directory of the input images. All the images in the directory will be processed.
+- `--list`: A text file list for the input images
+- `--video`: Read video and process the video frame by frame.
+- `--outdir`: Output directory to store the pose estimation results.
+- `--vis`: If turned-on, it will render the results and visualize them.
+- `--save_img`: If turned-on, it will render the results and save them as images in $outdir/vis.
+- `--save_video`: If turned-on, it will render the results and save them as a video.
+- `--vis_fast`: If turned on, it will use faster rendering method. Default is false.
+- `--format`: The format of the saved results. By default, it will save the output in COCO-like format. Alternative options are 'cmu' and 'open', which saves the results in the format of CMU-Pose or OpenPose. For more details, see [output.md](output.md)
+- `--conf`: Confidence threshold for human detection. Lower the value can improve the final accuracy but decrease the speed. Default is 0.1.
+- `--nms`: Confidence threshold for human detection. Increase the value can improve the final accuracy but decrease the speed. Default is 0.6.
+- `--detbatch`: Batch size for the detection network.
+- `--posebatch`: Maximum batch size for the pose estimation network. If you met OOM problem, decrease this value until it fit in the memory.
+- `--sp`: Run the program using a single process. Windows users need to turn this flag on.
+- `--inp_dim`: The input size of detection network. The inp_dim should be multiple of 32. Default is 608.
+
+## Examples
+- **Run AlphaPose for all images in a folder ,save the results in the format of CMU-Pose and save the images**:
+```
+python3 demo.py --indir examples/demo/ --outdir examples/results/ --save_img --format cmu
+```
+- **Run AlphaPose for a video, save the video and use faster rendering method**:
+```
+python3 video_demo.py --video examples/input.mp4 --outdir examples/results/ --save_video --vis_fast
+```
+- **Run AlphaPose for a video, speeding up by increasing the confidence and lowering the NMS threshold.**:
+```
+python3 video_demo.py --video examples/input.mp4 --outdir examples/results/ --conf 0.5 --nms 0.45
+```
diff --git a/joints_detectors/Alphapose/doc/speed_up.md b/joints_detectors/Alphapose/doc/speed_up.md
new file mode 100644
index 0000000000000000000000000000000000000000..3dd12e9d298bfafaa9831bfcecccc56468491f76
--- /dev/null
+++ b/joints_detectors/Alphapose/doc/speed_up.md
@@ -0,0 +1,16 @@
+AlphaPose - Speeding Up
+============================================
+
+
+Run AlphaPose for a video, speeding up by increasing the confidence and lowering the NMS threshold:
+```
+python3 video_demo.py --video ${path to video} --outdir examples/results/ --conf 0.5 --nms 0.45
+```
+For users with GPU memory >= 8GB, I suggest increasing the detection batch:
+```
+python3 demo.py --indir ${img_directory} --outdir examples/res --detbatch 2
+```
+For users that do not need to detect small size persons, I suggest lowering the input size of detection network. The inp_dim should be multiple of 32.
+```
+python3 demo.py --indir ${img_directory} --outdir examples/res --inp_dim 480
+```
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/doc/win_install.md b/joints_detectors/Alphapose/doc/win_install.md
new file mode 100644
index 0000000000000000000000000000000000000000..e98e039df0139e6d97b34bbdb55e9ef2e013fab5
--- /dev/null
+++ b/joints_detectors/Alphapose/doc/win_install.md
@@ -0,0 +1,37 @@
+AlphaPose - Windows Installation
+============================================
+
+Tested on Win10 with GTX 1060
+
+1. Download and install Git for Windows: https://git-scm.com/download/win
+2. Download and install Python3(3.6 or 3.7): https://www.python.org/downloads/
+3. Download and install CUDA toolkit: https://developer.nvidia.com/cuda-downloads
+4. Download and install PyTorch: https://pytorch.org/
+ Right click, choose the "Git Bash Here"
+
+

+
+ Input the command here and press Enter
+
+

+
+5. Download win-64/intel-openmp-2018.0.0-8.tar.bz2: https://anaconda.org/anaconda/intel-openmp/files
+ put the .dll files in Library\bin into a dir, then append the path of it to the environment variable PATH.
+ I suggest you to put it in C:\Users\\bin since this dir is already in the PATH
+6. Right click, choose the "Git Bash Here" and then follow the instructions in README to install AlphaPose
+
+

+
+7. Have fun! Now you can run AlphaPose by entering command. Try webcam demo by input
+```
+python3 webcam_demo.py --vis
+```
+**Note:** For `demo.py` and `video_demo.py`, you need to turn on the `--sp` flag, like
+
+```
+python3 demo.py --indir ${img_directory} --outdir examples/res --sp
+```
+
+
+

+
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/examples/list-coco-demo.txt b/joints_detectors/Alphapose/examples/list-coco-demo.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8ad8ec54810e3a24dc296371dd04604f43fbf782
--- /dev/null
+++ b/joints_detectors/Alphapose/examples/list-coco-demo.txt
@@ -0,0 +1,20 @@
+000000375530.jpg
+000000244462.jpg
+000000113397.jpg
+000000113408.jpg
+000000375554.jpg
+000000171819.jpg
+000000375566.jpg
+000000244496.jpg
+000000139077.jpg
+000000506656.jpg
+000000375606.jpg
+000000244539.jpg
+000000565045.jpg
+000000113473.jpg
+000000375621.jpg
+000000244550.jpg
+000000492605.jpg
+000000506707.jpg
+000000113493.jpg
+000000215524.jpg
diff --git a/joints_detectors/Alphapose/examples/list-coco-minival500.txt b/joints_detectors/Alphapose/examples/list-coco-minival500.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e6d257e94e4cd88494c37aca4d4e89d1a182738d
--- /dev/null
+++ b/joints_detectors/Alphapose/examples/list-coco-minival500.txt
@@ -0,0 +1,500 @@
+000000375521.jpg
+000000244455.jpg
+000000375530.jpg
+000000244462.jpg
+000000113397.jpg
+000000113408.jpg
+000000375554.jpg
+000000171819.jpg
+000000375566.jpg
+000000244496.jpg
+000000139077.jpg
+000000506656.jpg
+000000375606.jpg
+000000244539.jpg
+000000565045.jpg
+000000113473.jpg
+000000375621.jpg
+000000244550.jpg
+000000492605.jpg
+000000506707.jpg
+000000113493.jpg
+000000215524.jpg
+000000506717.jpg
+000000506723.jpg
+000000433980.jpg
+000000244586.jpg
+000000113533.jpg
+000000113559.jpg
+000000390298.jpg
+000000281072.jpg
+000000113571.jpg
+000000543218.jpg
+000000506803.jpg
+000000113588.jpg
+000000113590.jpg
+000000244665.jpg
+000000375755.jpg
+000000375769.jpg
+000000324776.jpg
+000000506872.jpg
+000000506874.jpg
+000000375810.jpg
+000000375812.jpg
+000000113678.jpg
+000000375823.jpg
+000000261292.jpg
+000000506933.jpg
+000000113720.jpg
+000000113722.jpg
+000000506945.jpg
+000000375875.jpg
+000000506950.jpg
+000000113736.jpg
+000000375881.jpg
+000000244815.jpg
+000000113745.jpg
+000000113756.jpg
+000000113757.jpg
+000000375902.jpg
+000000244833.jpg
+000000244834.jpg
+000000507015.jpg
+000000178370.jpg
+000000412184.jpg
+000000507037.jpg
+000000507065.jpg
+000000244925.jpg
+000000244931.jpg
+000000507080.jpg
+000000193743.jpg
+000000104176.jpg
+000000113890.jpg
+000000113905.jpg
+000000113914.jpg
+000000376059.jpg
+000000244999.jpg
+000000507147.jpg
+000000507154.jpg
+000000245013.jpg
+000000376093.jpg
+000000507167.jpg
+000000245026.jpg
+000000507171.jpg
+000000106375.jpg
+000000507180.jpg
+000000376112.jpg
+000000507187.jpg
+000000113975.jpg
+000000113989.jpg
+000000507211.jpg
+000000507223.jpg
+000000376160.jpg
+000000114025.jpg
+000000376185.jpg
+000000114049.jpg
+000000114055.jpg
+000000507273.jpg
+000000376206.jpg
+000000376208.jpg
+000000376209.jpg
+000000376233.jpg
+000000376236.jpg
+000000507312.jpg
+000000245174.jpg
+000000376247.jpg
+000000114108.jpg
+000000245182.jpg
+000000507330.jpg
+000000507352.jpg
+000000114147.jpg
+000000376295.jpg
+000000245227.jpg
+000000114158.jpg
+000000376307.jpg
+000000376322.jpg
+000000106411.jpg
+000000114183.jpg
+000000376342.jpg
+000000507427.jpg
+000000062726.jpg
+000000507436.jpg
+000000434098.jpg
+000000245295.jpg
+000000376372.jpg
+000000245301.jpg
+000000245311.jpg
+000000245313.jpg
+000000245315.jpg
+000000245320.jpg
+000000376393.jpg
+000000507473.jpg
+000000038721.jpg
+000000376416.jpg
+000000114282.jpg
+000000114291.jpg
+000000376441.jpg
+000000376449.jpg
+000000376450.jpg
+000000245383.jpg
+000000376456.jpg
+000000114313.jpg
+000000114317.jpg
+000000376469.jpg
+000000507551.jpg
+000000245411.jpg
+000000245414.jpg
+000000114352.jpg
+000000245426.jpg
+000000245430.jpg
+000000245432.jpg
+000000245447.jpg
+000000245448.jpg
+000000376521.jpg
+000000245453.jpg
+000000376531.jpg
+000000245460.jpg
+000000114389.jpg
+000000245462.jpg
+000000376545.jpg
+000000376549.jpg
+000000114414.jpg
+000000507633.jpg
+000000245497.jpg
+000000376575.jpg
+000000507663.jpg
+000000507667.jpg
+000000114453.jpg
+000000281220.jpg
+000000114458.jpg
+000000376603.jpg
+000000114481.jpg
+000000376628.jpg
+000000507719.jpg
+000000139113.jpg
+000000172004.jpg
+000000507750.jpg
+000000376679.jpg
+000000346774.jpg
+000000507782.jpg
+000000245642.jpg
+000000507797.jpg
+000000245660.jpg
+000000245667.jpg
+000000303089.jpg
+000000376751.jpg
+000000507826.jpg
+000000507833.jpg
+000000376773.jpg
+000000114634.jpg
+000000245716.jpg
+000000303101.jpg
+000000108130.jpg
+000000245754.jpg
+000000114684.jpg
+000000477867.jpg
+000000114710.jpg
+000000507927.jpg
+000000376859.jpg
+000000114717.jpg
+000000507935.jpg
+000000507945.jpg
+000000114744.jpg
+000000114745.jpg
+000000245818.jpg
+000000376891.jpg
+000000507966.jpg
+000000376900.jpg
+000000507975.jpg
+000000106508.jpg
+000000507979.jpg
+000000019129.jpg
+000000508006.jpg
+000000376939.jpg
+000000376959.jpg
+000000245898.jpg
+000000376990.jpg
+000000114855.jpg
+000000114868.jpg
+000000114870.jpg
+000000508087.jpg
+000000114884.jpg
+000000508101.jpg
+000000508119.jpg
+000000114907.jpg
+000000150224.jpg
+000000377060.jpg
+000000246001.jpg
+000000246004.jpg
+000000246005.jpg
+000000246014.jpg
+000000114946.jpg
+000000377091.jpg
+000000508167.jpg
+000000377097.jpg
+000000377111.jpg
+000000246040.jpg
+000000377113.jpg
+000000246053.jpg
+000000246057.jpg
+000000061605.jpg
+000000246064.jpg
+000000246066.jpg
+000000115006.jpg
+000000377155.jpg
+000000508230.jpg
+000000246105.jpg
+000000246106.jpg
+000000377183.jpg
+000000115043.jpg
+000000377195.jpg
+000000246124.jpg
+000000246125.jpg
+000000115060.jpg
+000000508288.jpg
+000000508302.jpg
+000000508303.jpg
+000000377239.jpg
+000000246183.jpg
+000000508339.jpg
+000000062878.jpg
+000000115128.jpg
+000000115146.jpg
+000000508370.jpg
+000000508373.jpg
+000000246231.jpg
+000000246252.jpg
+000000246265.jpg
+000000377352.jpg
+000000377361.jpg
+000000377385.jpg
+000000115243.jpg
+000000115245.jpg
+000000508470.jpg
+000000521655.jpg
+000000377427.jpg
+000000182784.jpg
+000000390585.jpg
+000000508514.jpg
+000000115314.jpg
+000000565353.jpg
+000000508538.jpg
+000000456127.jpg
+000000246398.jpg
+000000377486.jpg
+000000115363.jpg
+000000115370.jpg
+000000246454.jpg
+000000508605.jpg
+000000377577.jpg
+000000508656.jpg
+000000377588.jpg
+000000115455.jpg
+000000115459.jpg
+000000246535.jpg
+000000246562.jpg
+000000377635.jpg
+000000128476.jpg
+000000246576.jpg
+000000377652.jpg
+000000508730.jpg
+000000246589.jpg
+000000246590.jpg
+000000115521.jpg
+000000246597.jpg
+000000246612.jpg
+000000246626.jpg
+000000246629.jpg
+000000377706.jpg
+000000305000.jpg
+000000377715.jpg
+000000246649.jpg
+000000115579.jpg
+000000115584.jpg
+000000377730.jpg
+000000508811.jpg
+000000508822.jpg
+000000246686.jpg
+000000150342.jpg
+000000115626.jpg
+000000115636.jpg
+000000508872.jpg
+000000377802.jpg
+000000377809.jpg
+000000115667.jpg
+000000246746.jpg
+000000377832.jpg
+000000368807.jpg
+000000115700.jpg
+000000508917.jpg
+000000246782.jpg
+000000115721.jpg
+000000508949.jpg
+000000508950.jpg
+000000543577.jpg
+000000508962.jpg
+000000115752.jpg
+000000508972.jpg
+000000508977.jpg
+000000309371.jpg
+000000115765.jpg
+000000377910.jpg
+000000508985.jpg
+000000246843.jpg
+000000115772.jpg
+000000106677.jpg
+000000115791.jpg
+000000115793.jpg
+000000115796.jpg
+000000509014.jpg
+000000509020.jpg
+000000377949.jpg
+000000246878.jpg
+000000377951.jpg
+000000246883.jpg
+000000509028.jpg
+000000509037.jpg
+000000115823.jpg
+000000115830.jpg
+000000195896.jpg
+000000377984.jpg
+000000377999.jpg
+000000368836.jpg
+000000115870.jpg
+000000115875.jpg
+000000246951.jpg
+000000246963.jpg
+000000246968.jpg
+000000115898.jpg
+000000378048.jpg
+000000115912.jpg
+000000182805.jpg
+000000115924.jpg
+000000115930.jpg
+000000562519.jpg
+000000509158.jpg
+000000378096.jpg
+000000378116.jpg
+000000509192.jpg
+000000509194.jpg
+000000368855.jpg
+000000378126.jpg
+000000247057.jpg
+000000378134.jpg
+000000378147.jpg
+000000509223.jpg
+000000509227.jpg
+000000116017.jpg
+000000378163.jpg
+000000116023.jpg
+000000378169.jpg
+000000200267.jpg
+000000116037.jpg
+000000116046.jpg
+000000259640.jpg
+000000378204.jpg
+000000116061.jpg
+000000116067.jpg
+000000116068.jpg
+000000247141.jpg
+000000116083.jpg
+000000116096.jpg
+000000063040.jpg
+000000378244.jpg
+000000509319.jpg
+000000247179.jpg
+000000116132.jpg
+000000116133.jpg
+000000247206.jpg
+000000543644.jpg
+000000378284.jpg
+000000247216.jpg
+000000116149.jpg
+000000509366.jpg
+000000116173.jpg
+000000247259.jpg
+000000378334.jpg
+000000247264.jpg
+000000116202.jpg
+000000378347.jpg
+000000116208.jpg
+000000281512.jpg
+000000247285.jpg
+000000247306.jpg
+000000509451.jpg
+000000509459.jpg
+000000247317.jpg
+000000509471.jpg
+000000116261.jpg
+000000509497.jpg
+000000509514.jpg
+000000247378.jpg
+000000509526.jpg
+000000509536.jpg
+000000378467.jpg
+000000543676.jpg
+000000378482.jpg
+000000116341.jpg
+000000194153.jpg
+000000116354.jpg
+000000378502.jpg
+000000509577.jpg
+000000247438.jpg
+000000378515.jpg
+000000509589.jpg
+000000509590.jpg
+000000116377.jpg
+000000172315.jpg
+000000116389.jpg
+000000378538.jpg
+000000247474.jpg
+000000116405.jpg
+000000247484.jpg
+000000378561.jpg
+000000116434.jpg
+000000116439.jpg
+000000247519.jpg
+000000116455.jpg
+000000291936.jpg
+000000116466.jpg
+000000378614.jpg
+000000509695.jpg
+000000378652.jpg
+000000378657.jpg
+000000378658.jpg
+000000247587.jpg
+000000116517.jpg
+000000378667.jpg
+000000378673.jpg
+000000172342.jpg
+000000509766.jpg
+000000247625.jpg
+000000247639.jpg
+000000509786.jpg
+000000116574.jpg
+000000378727.jpg
+000000509811.jpg
+000000509826.jpg
+000000247692.jpg
+000000247714.jpg
+000000247720.jpg
+000000509867.jpg
+000000509891.jpg
+000000378823.jpg
+000000378825.jpg
+000000378831.jpg
+000000116696.jpg
+000000247782.jpg
+000000116712.jpg
+000000247788.jpg
+000000247790.jpg
+000000378873.jpg
+000000247808.jpg
+000000259755.jpg
+000000378894.jpg
+000000247839.jpg
+000000247840.jpg
+000000378928.jpg
+000000378940.jpg
+000000378948.jpg
+000000378962.jpg
diff --git a/joints_detectors/Alphapose/fn.py b/joints_detectors/Alphapose/fn.py
new file mode 100644
index 0000000000000000000000000000000000000000..7669df4f4c555fbb11252d916e8a89595e1a0865
--- /dev/null
+++ b/joints_detectors/Alphapose/fn.py
@@ -0,0 +1,233 @@
+import collections
+import math
+import re
+import time
+
+import cv2
+import numpy as np
+import torch
+from torch._six import string_classes
+
+RED = (0, 0, 255)
+GREEN = (0, 255, 0)
+BLUE = (255, 0, 0)
+CYAN = (255, 255, 0)
+YELLOW = (0, 255, 255)
+ORANGE = (0, 165, 255)
+PURPLE = (255, 0, 255)
+
+numpy_type_map = {
+ 'float64': torch.DoubleTensor,
+ 'float32': torch.FloatTensor,
+ 'float16': torch.HalfTensor,
+ 'int64': torch.LongTensor,
+ 'int32': torch.IntTensor,
+ 'int16': torch.ShortTensor,
+ 'int8': torch.CharTensor,
+ 'uint8': torch.ByteTensor,
+}
+
+_use_shared_memory = True
+
+
+def collate_fn(batch):
+ r"""Puts each data field into a tensor with outer dimension batch size"""
+
+ error_msg = "batch must contain tensors, numbers, dicts or lists; found {}"
+ elem_type = type(batch[0])
+
+ if isinstance(batch[0], torch.Tensor):
+ out = None
+ if _use_shared_memory:
+ # If we're in a background process, concatenate directly into a
+ # shared memory tensor to avoid an extra copy
+ numel = sum([x.numel() for x in batch])
+ storage = batch[0].storage()._new_shared(numel)
+ out = batch[0].new(storage)
+ return torch.stack(batch, 0, out=out)
+ elif elem_type.__module__ == 'numpy' and elem_type.__name__ != 'str_' \
+ and elem_type.__name__ != 'string_':
+ elem = batch[0]
+ if elem_type.__name__ == 'ndarray':
+ # array of string classes and object
+ if re.search('[SaUO]', elem.dtype.str) is not None:
+ raise TypeError(error_msg.format(elem.dtype))
+
+ return torch.stack([torch.from_numpy(b) for b in batch], 0)
+ if elem.shape == (): # scalars
+ py_type = float if elem.dtype.name.startswith('float') else int
+ return numpy_type_map[elem.dtype.name](list(map(py_type, batch)))
+ elif isinstance(batch[0], int):
+ return torch.LongTensor(batch)
+ elif isinstance(batch[0], float):
+ return torch.DoubleTensor(batch)
+ elif isinstance(batch[0], string_classes):
+ return batch
+ elif isinstance(batch[0], collections.Mapping):
+ return {key: collate_fn([d[key] for d in batch]) for key in batch[0]}
+ elif isinstance(batch[0], collections.Sequence):
+ transposed = zip(*batch)
+ return [collate_fn(samples) for samples in transposed]
+
+ raise TypeError((error_msg.format(type(batch[0]))))
+
+
+def collate_fn_list(batch):
+ img, inp, im_name = zip(*batch)
+ img = collate_fn(img)
+ im_name = collate_fn(im_name)
+
+ return img, inp, im_name
+
+
+def vis_frame_fast(frame, im_res, format='coco'):
+ '''
+ frame: frame image
+ im_res: im_res of predictions
+ format: coco or mpii
+
+ return rendered image
+ '''
+ if format == 'coco':
+ l_pair = [
+ (0, 1), (0, 2), (1, 3), (2, 4), # Head
+ (5, 6), (5, 7), (7, 9), (6, 8), (8, 10),
+ (17, 11), (17, 12), # Body
+ (11, 13), (12, 14), (13, 15), (14, 16)
+ ]
+ p_color = [(0, 255, 255), (0, 191, 255), (0, 255, 102), (0, 77, 255), (0, 255, 0), # Nose, LEye, REye, LEar, REar
+ (77, 255, 255), (77, 255, 204), (77, 204, 255), (191, 255, 77), (77, 191, 255), (191, 255, 77),
+ # LShoulder, RShoulder, LElbow, RElbow, LWrist, RWrist
+ (204, 77, 255), (77, 255, 204), (191, 77, 255), (77, 255, 191), (127, 77, 255), (77, 255, 127),
+ (0, 255, 255)] # LHip, RHip, LKnee, Rknee, LAnkle, RAnkle, Neck
+ line_color = [(0, 215, 255), (0, 255, 204), (0, 134, 255), (0, 255, 50),
+ (77, 255, 222), (77, 196, 255), (77, 135, 255), (191, 255, 77), (77, 255, 77),
+ (77, 222, 255), (255, 156, 127),
+ (0, 127, 255), (255, 127, 77), (0, 77, 255), (255, 77, 36)]
+ elif format == 'mpii':
+ l_pair = [
+ (8, 9), (11, 12), (11, 10), (2, 1), (1, 0),
+ (13, 14), (14, 15), (3, 4), (4, 5),
+ (8, 7), (7, 6), (6, 2), (6, 3), (8, 12), (8, 13)
+ ]
+ p_color = [PURPLE, BLUE, BLUE, RED, RED, BLUE, BLUE, RED, RED, PURPLE, PURPLE, PURPLE, RED, RED, BLUE, BLUE]
+ else:
+ NotImplementedError
+
+ im_name = im_res['imgname'].split('/')[-1]
+ img = frame
+ for human in im_res['result']:
+ part_line = {}
+ kp_preds = human['keypoints']
+ kp_scores = human['kp_score']
+ kp_preds = torch.cat((kp_preds, torch.unsqueeze((kp_preds[5, :] + kp_preds[6, :]) / 2, 0)))
+ kp_scores = torch.cat((kp_scores, torch.unsqueeze((kp_scores[5, :] + kp_scores[6, :]) / 2, 0)))
+ # Draw keypoints
+ for n in range(kp_scores.shape[0]):
+ if kp_scores[n] <= 0.05:
+ continue
+ cor_x, cor_y = int(kp_preds[n, 0]), int(kp_preds[n, 1])
+ part_line[n] = (cor_x, cor_y)
+ cv2.circle(img, (cor_x, cor_y), 4, p_color[n], -1)
+ # Draw limbs
+ for i, (start_p, end_p) in enumerate(l_pair):
+ if start_p in part_line and end_p in part_line:
+ start_xy = part_line[start_p]
+ end_xy = part_line[end_p]
+ cv2.line(img, start_xy, end_xy, line_color[i], 2 * (kp_scores[start_p] + kp_scores[end_p]) + 1)
+ return img
+
+
+def vis_frame(frame, im_res, format='coco'):
+ '''
+ frame: frame image
+ im_res: im_res of predictions
+ format: coco or mpii
+
+ return rendered image
+ '''
+ if format == 'coco':
+ l_pair = [
+ (0, 1), (0, 2), (1, 3), (2, 4), # Head
+ (5, 6), (5, 7), (7, 9), (6, 8), (8, 10),
+ (17, 11), (17, 12), # Body
+ (11, 13), (12, 14), (13, 15), (14, 16)
+ ]
+
+ p_color = [(0, 255, 255), (0, 191, 255), (0, 255, 102), (0, 77, 255), (0, 255, 0), # Nose, LEye, REye, LEar, REar
+ (77, 255, 255), (77, 255, 204), (77, 204, 255), (191, 255, 77), (77, 191, 255), (191, 255, 77),
+ # LShoulder, RShoulder, LElbow, RElbow, LWrist, RWrist
+ (204, 77, 255), (77, 255, 204), (191, 77, 255), (77, 255, 191), (127, 77, 255), (77, 255, 127),
+ (0, 255, 255)] # LHip, RHip, LKnee, Rknee, LAnkle, RAnkle, Neck
+ line_color = [(0, 215, 255), (0, 255, 204), (0, 134, 255), (0, 255, 50),
+ (77, 255, 222), (77, 196, 255), (77, 135, 255), (191, 255, 77), (77, 255, 77),
+ (77, 222, 255), (255, 156, 127),
+ (0, 127, 255), (255, 127, 77), (0, 77, 255), (255, 77, 36)]
+ elif format == 'mpii':
+ l_pair = [
+ (8, 9), (11, 12), (11, 10), (2, 1), (1, 0),
+ (13, 14), (14, 15), (3, 4), (4, 5),
+ (8, 7), (7, 6), (6, 2), (6, 3), (8, 12), (8, 13)
+ ]
+ p_color = [PURPLE, BLUE, BLUE, RED, RED, BLUE, BLUE, RED, RED, PURPLE, PURPLE, PURPLE, RED, RED, BLUE, BLUE]
+ line_color = [PURPLE, BLUE, BLUE, RED, RED, BLUE, BLUE, RED, RED, PURPLE, PURPLE, RED, RED, BLUE, BLUE]
+ else:
+ raise NotImplementedError
+
+ im_name = im_res['imgname'].split('/')[-1]
+ img = frame
+ height, width = img.shape[:2]
+ img = cv2.resize(img, (int(width / 2), int(height / 2)))
+ for human in im_res['result']:
+ part_line = {}
+ kp_preds = human['keypoints']
+ kp_scores = human['kp_score']
+ kp_preds = torch.cat((kp_preds, torch.unsqueeze((kp_preds[5, :] + kp_preds[6, :]) / 2, 0)))
+ kp_scores = torch.cat((kp_scores, torch.unsqueeze((kp_scores[5, :] + kp_scores[6, :]) / 2, 0)))
+
+ # Draw keypoints
+ for n in range(kp_scores.shape[0]):
+ if kp_scores[n] <= 0.05:
+ continue
+ cor_x, cor_y = int(kp_preds[n, 0]), int(kp_preds[n, 1])
+ part_line[n] = (int(cor_x / 2), int(cor_y / 2))
+ bg = img.copy()
+ cv2.circle(bg, (int(cor_x / 2), int(cor_y / 2)), 2, p_color[n], -1)
+ # Now create a mask of logo and create its inverse mask also
+ transparency = max(0, min(1, kp_scores[n].item()))
+ img = cv2.addWeighted(bg, transparency, img, 1 - transparency, 0)
+
+ # Draw proposal score on the head
+ middle_eye = (kp_preds[1] + kp_preds[2]) / 4
+ middle_cor = int(middle_eye[0]) - 10, int(middle_eye[1]) - 12
+ cv2.putText(img, f"{human['proposal_score'].item():.2f}", middle_cor, cv2.FONT_HERSHEY_SIMPLEX, 0.3, (0, 0, 255))
+
+ # Draw limbs
+ for i, (start_p, end_p) in enumerate(l_pair):
+ if start_p in part_line and end_p in part_line:
+ start_xy = part_line[start_p]
+ end_xy = part_line[end_p]
+ bg = img.copy()
+
+ X = (start_xy[0], end_xy[0])
+ Y = (start_xy[1], end_xy[1])
+ mX = np.mean(X)
+ mY = np.mean(Y)
+ length = ((Y[0] - Y[1]) ** 2 + (X[0] - X[1]) ** 2) ** 0.5
+ angle = math.degrees(math.atan2(Y[0] - Y[1], X[0] - X[1]))
+ stickwidth = (kp_scores[start_p] + kp_scores[end_p]) + 1
+ polygon = cv2.ellipse2Poly((int(mX), int(mY)), (int(length / 2), int(stickwidth)), int(angle), 0, 360, 1)
+ cv2.fillConvexPoly(bg, polygon, line_color[i])
+ # cv2.line(bg, start_xy, end_xy, line_color[i], (2 * (kp_scores[start_p] + kp_scores[end_p])) + 1)
+ transparency = max(0, min(1, 0.5 * (kp_scores[start_p] + kp_scores[end_p]).item()))
+ img = cv2.addWeighted(bg, transparency, img, 1 - transparency, 0)
+ img = cv2.resize(img, (width, height), interpolation=cv2.INTER_CUBIC)
+ return img
+
+
+def getTime(time1=0):
+ if not time1:
+ return time.time()
+ else:
+ interval = time.time() - time1
+ return time.time(), interval
diff --git a/joints_detectors/Alphapose/gene_npz.py b/joints_detectors/Alphapose/gene_npz.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2cb1a897997369405df75b3e09b02912892988f
--- /dev/null
+++ b/joints_detectors/Alphapose/gene_npz.py
@@ -0,0 +1,198 @@
+import ntpath
+import os
+import shutil
+
+import numpy as np
+import torch.utils.data
+from tqdm import tqdm
+
+from SPPE.src.main_fast_inference import *
+from common.utils import calculate_area
+from dataloader import DetectionLoader, DetectionProcessor, DataWriter, Mscoco, VideoLoader
+from fn import getTime
+from opt import opt
+from pPose_nms import write_json
+
+args = opt
+args.dataset = 'coco'
+args.fast_inference = False
+args.save_img = True
+args.sp = True
+if not args.sp:
+ torch.multiprocessing.set_start_method('forkserver', force=True)
+ torch.multiprocessing.set_sharing_strategy('file_system')
+
+
+def model_load():
+ model = None
+ return model
+
+
+def image_interface(model, image):
+ pass
+
+
+def generate_kpts(video_file):
+ final_result, video_name = handle_video(video_file)
+
+ # ============ Changing ++++++++++
+
+ kpts = []
+ no_person = []
+ for i in range(len(final_result)):
+ if not final_result[i]['result']: # No people
+ no_person.append(i)
+ kpts.append(None)
+ continue
+
+ kpt = max(final_result[i]['result'],
+ key=lambda x: x['proposal_score'].data[0] * calculate_area(x['keypoints']), )['keypoints']
+
+ kpts.append(kpt.data.numpy())
+
+ for n in no_person:
+ kpts[n] = kpts[-1]
+ no_person.clear()
+
+ for n in no_person:
+ kpts[n] = kpts[-1] if kpts[-1] else kpts[n-1]
+
+ # ============ Changing End ++++++++++
+
+ name = f'{args.outputpath}/{video_name}.npz'
+ kpts = np.array(kpts).astype(np.float32)
+ print('kpts npz save in ', name)
+ np.savez_compressed(name, kpts=kpts)
+
+ return kpts
+
+
+def handle_video(video_file):
+ # =========== common ===============
+ args.video = video_file
+ base_name = os.path.basename(args.video)
+ video_name = base_name[:base_name.rfind('.')]
+ # =========== end common ===============
+ # =========== image ===============
+ # img_path = f'outputs/alpha_pose_{video_name}/split_image/'
+ # args.inputpath = img_path
+ # args.outputpath = f'outputs/alpha_pose_{video_name}'
+ # if os.path.exists(args.outputpath):
+ # shutil.rmtree(f'{args.outputpath}/vis', ignore_errors=True)
+ # else:
+ # os.mkdir(args.outputpath)
+ #
+ # # if not len(video_file):
+ # # raise IOError('Error: must contain --video')
+ #
+ # if len(img_path) and img_path != '/':
+ # for root, dirs, files in os.walk(img_path):
+ # im_names = sorted([f for f in files if 'png' in f or 'jpg' in f])
+ # else:
+ # raise IOError('Error: must contain either --indir/--list')
+ #
+ # # Load input images
+ # data_loader = ImageLoader(im_names, batchSize=args.detbatch, format='yolo').start()
+ # print(f'Totally {data_loader.datalen} images')
+ # =========== end image ===============
+ # =========== video ===============
+ args.outputpath = f'output_alphapose/{video_name}'
+ if os.path.exists(args.outputpath):
+ shutil.rmtree(f'{args.outputpath}/vis', ignore_errors=True)
+ else:
+ os.mkdir(args.outputpath)
+ videofile = args.video
+ mode = args.mode
+ if not len(videofile):
+ raise IOError('Error: must contain --video')
+ # Load input video
+ data_loader = VideoLoader(videofile, batchSize=args.detbatch).start()
+ (fourcc, fps, frameSize) = data_loader.videoinfo()
+ print('the video is {} f/s'.format(fps))
+ # =========== end video ===============
+ # Load detection loader
+ print('Loading YOLO model..')
+ sys.stdout.flush()
+ det_loader = DetectionLoader(data_loader, batchSize=args.detbatch).start()
+ # start a thread to read frames from the file video stream
+ det_processor = DetectionProcessor(det_loader).start()
+ # Load pose model
+ pose_dataset = Mscoco()
+ if args.fast_inference:
+ pose_model = InferenNet_fast(4 * 1 + 1, pose_dataset)
+ else:
+ pose_model = InferenNet(4 * 1 + 1, pose_dataset)
+ pose_model.cuda()
+ pose_model.eval()
+ runtime_profile = {
+ 'dt': [],
+ 'pt': [],
+ 'pn': []
+ }
+ # Data writer
+ save_path = os.path.join(args.outputpath, 'AlphaPose_' + ntpath.basename(video_file).split('.')[0] + '.avi')
+ # writer = DataWriter(args.save_video, save_path, cv2.VideoWriter_fourcc(*'XVID'), fps, frameSize).start()
+ writer = DataWriter(args.save_video).start()
+ print('Start pose estimation...')
+ im_names_desc = tqdm(range(data_loader.length()))
+ batchSize = args.posebatch
+ for i in im_names_desc:
+
+ start_time = getTime()
+ with torch.no_grad():
+ (inps, orig_img, im_name, boxes, scores, pt1, pt2) = det_processor.read()
+ if orig_img is None:
+ print(f'{i}-th image read None: handle_video')
+ break
+ if boxes is None or boxes.nelement() == 0:
+ writer.save(None, None, None, None, None, orig_img, im_name.split('/')[-1])
+ continue
+
+ ckpt_time, det_time = getTime(start_time)
+ runtime_profile['dt'].append(det_time)
+ # Pose Estimation
+
+ datalen = inps.size(0)
+ leftover = 0
+ if datalen % batchSize:
+ leftover = 1
+ num_batches = datalen // batchSize + leftover
+ hm = []
+ for j in range(num_batches):
+ inps_j = inps[j * batchSize:min((j + 1) * batchSize, datalen)].cuda()
+ hm_j = pose_model(inps_j)
+ hm.append(hm_j)
+ hm = torch.cat(hm)
+ ckpt_time, pose_time = getTime(ckpt_time)
+ runtime_profile['pt'].append(pose_time)
+
+ hm = hm.cpu().data
+ writer.save(boxes, scores, hm, pt1, pt2, orig_img, im_name.split('/')[-1])
+
+ ckpt_time, post_time = getTime(ckpt_time)
+ runtime_profile['pn'].append(post_time)
+
+ if args.profile:
+ # TQDM
+ im_names_desc.set_description(
+ 'det time: {dt:.4f} | pose time: {pt:.4f} | post processing: {pn:.4f}'.format(
+ dt=np.mean(runtime_profile['dt']), pt=np.mean(runtime_profile['pt']), pn=np.mean(runtime_profile['pn']))
+ )
+ if (args.save_img or args.save_video) and not args.vis_fast:
+ print('===========================> Rendering remaining images in the queue...')
+ print('===========================> If this step takes too long, you can enable the --vis_fast flag to use fast rendering (real-time).')
+ while writer.running():
+ pass
+ writer.stop()
+ final_result = writer.results()
+ write_json(final_result, args.outputpath)
+
+ return final_result, video_name
+
+
+if __name__ == "__main__":
+ os.chdir('../..')
+ print(os.getcwd())
+
+ # handle_video(img_path='outputs/image/kobe')
+ generate_kpts('outputs/dance.mp4')
diff --git a/joints_detectors/Alphapose/matching.py b/joints_detectors/Alphapose/matching.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa1bdf809a36808492a973368519a8a8f599f235
--- /dev/null
+++ b/joints_detectors/Alphapose/matching.py
@@ -0,0 +1,229 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import numpy as np
+import torch
+from scipy.optimize import linear_sum_assignment
+
+sigmas = np.array([.26, .25, .25, .35, .35, .79, .79, .72, .72, .62, .62, 1.07, 1.07, .87, .87, .89, .89])
+
+
+def candidate_reselect(bboxes, bboxes_scores, pose_preds):
+ '''
+ Grouping
+ '''
+ # Group same keypointns together
+ kp_groups = grouping(bboxes, bboxes_scores, pose_preds)
+
+ '''
+ Re-select
+ '''
+
+ # Generate Matrix
+ human_num = len(pose_preds.keys())
+ costMatrix = []
+ for k in range(17):
+ kp_group = kp_groups[k]
+ joint_num = len(kp_group.keys())
+
+ costMatrix.append(np.zeros((human_num, joint_num)))
+
+ group_size = {k: {} for k in range(17)}
+
+ for n, person in pose_preds.items():
+ h_id = n
+ assert 0 <= h_id < human_num
+
+ for k in range(17):
+ g_id = person['group_id'][k]
+ if g_id is not None:
+ if g_id not in group_size[k].keys():
+ group_size[k][g_id] = 0
+
+ group_size[k][g_id] += 1
+
+ g_id = int(g_id) - 1
+ _, _, score = person[k][0]
+ h_score = person['human_score']
+
+ if score < 0.05:
+ costMatrix[k][h_id][g_id] = 0
+ else:
+ costMatrix[k][h_id][g_id] = -(h_score * score)
+
+ pose_preds = matching(pose_preds, costMatrix, kp_groups)
+
+ # To JSON
+ final_result = []
+
+ for n, person in pose_preds.items():
+
+ final_pose = torch.zeros(17, 2)
+ final_score = torch.zeros(17, 1)
+
+ max_score = 0
+ mean_score = 0
+
+ xmax, xmin = 0, 1e5
+ ymax, ymin = 0, 1e5
+
+ for k in range(17):
+ assert len(person[k]) > 0
+ x, y, s = person[k][0]
+
+ xmax = max(xmax, x)
+ xmin = min(xmin, x)
+ ymax = max(ymax, y)
+ ymin = min(ymin, y)
+
+ final_pose[k][0] = x.item() - 0.3
+ final_pose[k][1] = y.item() - 0.3
+ final_score[k] = s.item()
+ mean_score += (s.item() / 17)
+ max_score = max(max_score, s.item())
+
+ if torch.max(final_score).item() < 0.1:
+ continue
+
+ if (1.5 ** 2 * (xmax - xmin) * (ymax - ymin) < 40 * 40):
+ continue
+
+ final_result.append({
+ 'keypoints': final_pose,
+ 'kp_score': final_score,
+ 'proposal_score': mean_score + max_score + person['bbox_score']
+ })
+
+ return final_result
+
+
+def grouping(bboxes, bboxes_scores, pose_preds):
+ kp_groups = {}
+ for k in range(17):
+ kp_groups[k] = {}
+
+ ids = np.zeros(17)
+
+ for n, person in pose_preds.items():
+ pose_preds[n]['bbox'] = bboxes[n]
+ pose_preds[n]['bbox_score'] = bboxes_scores[n]
+ pose_preds[n]['group_id'] = {}
+ s = 0
+
+ for k in range(17):
+ pose_preds[n]['group_id'][k] = None
+ pose_preds[n][k] = np.array(pose_preds[n][k])
+ assert len(pose_preds[n][k]) > 0
+ s += pose_preds[n][k][0][-1]
+
+ s = s / 17
+
+ pose_preds[n]['human_score'] = s
+
+ for k in range(17):
+ latest_id = ids[k]
+ kp_group = kp_groups[k]
+
+ assert len(person[k]) > 0
+ x0, y0, s0 = person[k][0]
+ if s0 < 0.05:
+ continue
+
+ for g_id, g in kp_group.items():
+ x_c, y_c = kp_group[g_id]['group_center']
+
+ '''
+ Get Average Box Size
+ '''
+ group_area = kp_group[g_id]['group_area']
+ group_area = group_area[0] * group_area[1] / (group_area[2] ** 2)
+
+ '''
+ Groupingn Criterion
+ '''
+ # Joint Group
+ dist = np.sqrt(
+ ((x_c - x0) ** 2 + (y_c - y0) ** 2) / group_area)
+
+ if dist <= 0.1 * sigmas[k]: # Small Distance
+ if s0 >= 0.3:
+ kp_group[g_id]['kp_list'][0] += x0 * s0
+ kp_group[g_id]['kp_list'][1] += y0 * s0
+ kp_group[g_id]['kp_list'][2] += s0
+
+ kp_group[g_id]['group_area'][0] += (person['bbox'][2] - person['bbox'][0]) * person['human_score']
+ kp_group[g_id]['group_area'][1] += (person['bbox'][3] - person['bbox'][1]) * person['human_score']
+ kp_group[g_id]['group_area'][2] += person['human_score']
+
+ x_c = kp_group[g_id]['kp_list'][0] / kp_group[g_id]['kp_list'][2]
+ y_c = kp_group[g_id]['kp_list'][1] / kp_group[g_id]['kp_list'][2]
+ kp_group[g_id]['group_center'] = (x_c, y_c)
+
+ pose_preds[n]['group_id'][k] = g_id
+ break
+ else:
+ # A new keypoint group
+ latest_id += 1
+ kp_group[latest_id] = {
+ 'kp_list': None,
+ 'group_center': person[k][0].copy()[:2],
+ 'group_area': None
+ }
+
+ x, y, s = person[k][0]
+ kp_group[latest_id]['kp_list'] = np.array((x * s, y * s, s))
+
+ # Ref Area
+ ref_width = person['bbox'][2] - person['bbox'][0]
+ ref_height = person['bbox'][3] - person['bbox'][1]
+ ref_score = person['human_score']
+ kp_group[latest_id]['group_area'] = np.array((
+ ref_width * ref_score, ref_height * ref_score, ref_score))
+
+ pose_preds[n]['group_id'][k] = latest_id
+ ids[k] = latest_id
+ return kp_groups
+
+
+def matching(pose_preds, matrix, kp_groups):
+ index = []
+ for k in range(17):
+ human_ind, joint_ind = linear_sum_assignment(matrix[k])
+ # human_ind, joint_ind = greedy_matching(matrix[k])
+
+ index.append(list(zip(human_ind, joint_ind)))
+
+ for n, person in pose_preds.items():
+ for k in range(17):
+ g_id = person['group_id'][k]
+ if g_id is not None:
+ g_id = int(g_id) - 1
+ h_id = n
+
+ x, y, s = pose_preds[n][k][0]
+ if ((h_id, g_id) not in index[k]) and len(pose_preds[n][k]) > 1:
+ pose_preds[n][k] = np.delete(pose_preds[n][k], 0, 0)
+ elif ((h_id, g_id) not in index[k]) and len(person[k]) == 1:
+ x, y, _ = pose_preds[n][k][0]
+ pose_preds[n][k][0] = (x, y, 1e-5)
+ pass
+ elif ((h_id, g_id) in index[k]):
+ x, y = kp_groups[k][g_id + 1]['group_center']
+ s = pose_preds[n][k][0][2]
+ pose_preds[n][k][0] = (x, y, s)
+
+ return pose_preds
+
+
+def greedy_matching(matrix):
+ num_human, num_joint = matrix.shape
+
+ if num_joint <= num_human or True:
+ human_ind = np.argmin(matrix, axis=0)
+ joint_ind = np.arange(num_joint)
+ else:
+ pass
+
+ return human_ind.tolist(), joint_ind.tolist()
diff --git a/joints_detectors/Alphapose/models/sppe/.gitkeep b/joints_detectors/Alphapose/models/sppe/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/models/ssd/.gitkeep b/joints_detectors/Alphapose/models/ssd/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/models/yolo/.gitkeep b/joints_detectors/Alphapose/models/yolo/.gitkeep
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/online_demo.py b/joints_detectors/Alphapose/online_demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..86017b1aecaf9e0e6f82a7eaf1006a50ed49493d
--- /dev/null
+++ b/joints_detectors/Alphapose/online_demo.py
@@ -0,0 +1,152 @@
+import torch
+from torch.autograd import Variable
+import torch.nn.functional as F
+import torchvision.transforms as transforms
+
+import torch.nn as nn
+import torch.utils.data
+import numpy as np
+from opt import opt
+
+from dataloader import WebcamLoader, DataWriter, crop_from_dets, Mscoco
+from yolo.darknet import Darknet
+from yolo.util import write_results, dynamic_write_results
+from SPPE.src.main_fast_inference import *
+
+from SPPE.src.utils.img import im_to_torch
+import os
+import sys
+from tqdm import tqdm
+import time
+from fn import getTime
+import cv2
+
+from pPose_nms import write_json
+
+args = opt
+args.dataset = 'coco'
+
+
+def loop():
+ n = 0
+ while True:
+ yield n
+ n += 1
+
+
+if __name__ == "__main__":
+ webcam = args.webcam
+ mode = args.mode
+ if not os.path.exists(args.outputpath):
+ os.mkdir(args.outputpath)
+
+ # Load input video
+ fvs = WebcamLoader(webcam).start()
+ (fourcc, fps, frameSize) = fvs.videoinfo()
+ # Data writer
+ save_path = os.path.join(args.outputpath, 'AlphaPose_webcam' + webcam + '.avi')
+ writer = DataWriter(args.save_video, save_path, cv2.VideoWriter_fourcc(*'XVID'), fps, frameSize).start()
+
+ # Load YOLO model
+ print('Loading YOLO model..')
+ sys.stdout.flush()
+ det_model = Darknet("yolo/cfg/yolov3-spp.cfg")
+ det_model.load_weights('models/yolo/yolov3-spp.weights')
+ det_model.net_info['height'] = args.inp_dim
+ det_inp_dim = int(det_model.net_info['height'])
+ assert det_inp_dim % 32 == 0
+ assert det_inp_dim > 32
+ det_model.cuda()
+ det_model.eval()
+
+ # Load pose model
+ pose_dataset = Mscoco()
+ if args.fast_inference:
+ pose_model = InferenNet_fast(4 * 1 + 1, pose_dataset)
+ else:
+ pose_model = InferenNet(4 * 1 + 1, pose_dataset)
+ pose_model.cuda()
+ pose_model.eval()
+
+ runtime_profile = {
+ 'ld': [],
+ 'dt': [],
+ 'dn': [],
+ 'pt': [],
+ 'pn': []
+ }
+
+ print('Starting webcam demo, press Ctrl + C to terminate...')
+ sys.stdout.flush()
+ im_names_desc = tqdm(loop())
+ for i in im_names_desc:
+ try:
+ start_time = getTime()
+
+ (img, orig_img, inp, im_dim_list) = fvs.read()
+ ckpt_time, load_time = getTime(start_time)
+ runtime_profile['ld'].append(load_time)
+ with torch.no_grad():
+ # Human Detection
+ img = Variable(img).cuda()
+ im_dim_list = im_dim_list.cuda()
+
+ prediction = det_model(img, CUDA=True)
+ ckpt_time, det_time = getTime(ckpt_time)
+ runtime_profile['dt'].append(det_time)
+ # NMS process
+ dets = dynamic_write_results(prediction, opt.confidence,
+ opt.num_classes, nms=True, nms_conf=opt.nms_thesh)
+ if isinstance(dets, int) or dets.shape[0] == 0:
+ writer.save(None, None, None, None, None, orig_img, im_name=str(i) + '.jpg')
+ continue
+ im_dim_list = torch.index_select(im_dim_list, 0, dets[:, 0].long())
+ scaling_factor = torch.min(det_inp_dim / im_dim_list, 1)[0].view(-1, 1)
+
+ # coordinate transfer
+ dets[:, [1, 3]] -= (det_inp_dim - scaling_factor * im_dim_list[:, 0].view(-1, 1)) / 2
+ dets[:, [2, 4]] -= (det_inp_dim - scaling_factor * im_dim_list[:, 1].view(-1, 1)) / 2
+
+ dets[:, 1:5] /= scaling_factor
+ for j in range(dets.shape[0]):
+ dets[j, [1, 3]] = torch.clamp(dets[j, [1, 3]], 0.0, im_dim_list[j, 0])
+ dets[j, [2, 4]] = torch.clamp(dets[j, [2, 4]], 0.0, im_dim_list[j, 1])
+ boxes = dets[:, 1:5].cpu()
+ scores = dets[:, 5:6].cpu()
+ ckpt_time, detNMS_time = getTime(ckpt_time)
+ runtime_profile['dn'].append(detNMS_time)
+ # Pose Estimation
+ inps = torch.zeros(boxes.size(0), 3, opt.inputResH, opt.inputResW)
+ pt1 = torch.zeros(boxes.size(0), 2)
+ pt2 = torch.zeros(boxes.size(0), 2)
+ inps, pt1, pt2 = crop_from_dets(inp, boxes, inps, pt1, pt2)
+ inps = Variable(inps.cuda())
+
+ hm = pose_model(inps)
+ ckpt_time, pose_time = getTime(ckpt_time)
+ runtime_profile['pt'].append(pose_time)
+
+ writer.save(boxes, scores, hm.cpu(), pt1, pt2, orig_img, im_name=str(i) + '.jpg')
+
+ ckpt_time, post_time = getTime(ckpt_time)
+ runtime_profile['pn'].append(post_time)
+
+ # TQDM
+ im_names_desc.set_description(
+ 'load time: {ld:.4f} | det time: {dt:.4f} | det NMS: {dn:.4f} | pose time: {pt:.4f} | post process: {pn:.4f}'.format(
+ ld=np.mean(runtime_profile['ld']), dt=np.mean(runtime_profile['dt']), dn=np.mean(runtime_profile['dn']),
+ pt=np.mean(runtime_profile['pt']), pn=np.mean(runtime_profile['pn']))
+ )
+ except KeyboardInterrupt:
+ break
+
+ print(' ')
+ print('===========================> Finish Model Running.')
+ if (args.save_img or args.save_video) and not args.vis_fast:
+ print('===========================> Rendering remaining images in the queue...')
+ print('===========================> If this step takes too long, you can enable the --vis_fast flag to use fast rendering (real-time).')
+ while writer.running():
+ pass
+ writer.stop()
+ final_result = writer.results()
+ write_json(final_result, args.outputpath)
diff --git a/joints_detectors/Alphapose/opt.py b/joints_detectors/Alphapose/opt.py
new file mode 100644
index 0000000000000000000000000000000000000000..192ed325af080361fcd32b891ba5ab04e90e5712
--- /dev/null
+++ b/joints_detectors/Alphapose/opt.py
@@ -0,0 +1,146 @@
+import argparse
+
+parser = argparse.ArgumentParser(description='PyTorch AlphaPose Training')
+
+"----------------------------- General options -----------------------------"
+parser.add_argument('--expID', default='default', type=str,
+ help='Experiment ID')
+parser.add_argument('--dataset', default='coco', type=str,
+ help='Dataset choice: mpii | coco')
+parser.add_argument('--nThreads', default=30, type=int,
+ help='Number of data loading threads')
+parser.add_argument('--debug', default=False, type=bool,
+ help='Print the debug information')
+parser.add_argument('--snapshot', default=1, type=int,
+ help='How often to take a snapshot of the model (0 = never)')
+
+"----------------------------- AlphaPose options -----------------------------"
+parser.add_argument('--addDPG', default=False, type=bool,
+ help='Train with data augmentation')
+parser.add_argument('--sp', default=False, action='store_true',
+ help='Use single process for pytorch')
+parser.add_argument('--profile', default=False, action='store_true',
+ help='add speed profiling at screen output')
+
+"----------------------------- Model options -----------------------------"
+parser.add_argument('--netType', default='hgPRM', type=str,
+ help='Options: hgPRM | resnext')
+parser.add_argument('--loadModel', default=None, type=str,
+ help='Provide full path to a previously trained model')
+parser.add_argument('--Continue', default=False, type=bool,
+ help='Pick up where an experiment left off')
+parser.add_argument('--nFeats', default=256, type=int,
+ help='Number of features in the hourglass')
+parser.add_argument('--nClasses', default=33, type=int,
+ help='Number of output channel')
+parser.add_argument('--nStack', default=4, type=int,
+ help='Number of hourglasses to stack')
+
+"----------------------------- Hyperparameter options -----------------------------"
+parser.add_argument('--fast_inference', default=True, type=bool,
+ help='Fast inference')
+parser.add_argument('--use_pyranet', default=True, type=bool,
+ help='use pyranet')
+
+"----------------------------- Hyperparameter options -----------------------------"
+parser.add_argument('--LR', default=2.5e-4, type=float,
+ help='Learning rate')
+parser.add_argument('--momentum', default=0, type=float,
+ help='Momentum')
+parser.add_argument('--weightDecay', default=0, type=float,
+ help='Weight decay')
+parser.add_argument('--crit', default='MSE', type=str,
+ help='Criterion type')
+parser.add_argument('--optMethod', default='rmsprop', type=str,
+ help='Optimization method: rmsprop | sgd | nag | adadelta')
+
+"----------------------------- Training options -----------------------------"
+parser.add_argument('--nEpochs', default=50, type=int,
+ help='Number of hourglasses to stack')
+parser.add_argument('--epoch', default=0, type=int,
+ help='Current epoch')
+parser.add_argument('--trainBatch', default=40, type=int,
+ help='Train-batch size')
+parser.add_argument('--validBatch', default=20, type=int,
+ help='Valid-batch size')
+parser.add_argument('--trainIters', default=0, type=int,
+ help='Total train iters')
+parser.add_argument('--valIters', default=0, type=int,
+ help='Total valid iters')
+parser.add_argument('--init', default=None, type=str,
+ help='Initialization')
+
+"----------------------------- Data options -----------------------------"
+parser.add_argument('--inputResH', default=320, type=int,
+ help='Input image height')
+parser.add_argument('--inputResW', default=256, type=int,
+ help='Input image width')
+parser.add_argument('--outputResH', default=80, type=int,
+ help='Output heatmap height')
+parser.add_argument('--outputResW', default=64, type=int,
+ help='Output heatmap width')
+parser.add_argument('--scale', default=0.25, type=float,
+ help='Degree of scale augmentation')
+parser.add_argument('--rotate', default=30, type=float,
+ help='Degree of rotation augmentation')
+parser.add_argument('--hmGauss', default=1, type=int,
+ help='Heatmap gaussian size')
+
+"----------------------------- PyraNet options -----------------------------"
+parser.add_argument('--baseWidth', default=9, type=int,
+ help='Heatmap gaussian size')
+parser.add_argument('--cardinality', default=5, type=int,
+ help='Heatmap gaussian size')
+parser.add_argument('--nResidual', default=1, type=int,
+ help='Number of residual modules at each location in the pyranet')
+
+"----------------------------- Distribution options -----------------------------"
+parser.add_argument('--dist', dest='dist', type=int, default=1,
+ help='distributed training or not')
+parser.add_argument('--backend', dest='backend', type=str, default='gloo',
+ help='backend for distributed training')
+parser.add_argument('--port', dest='port',
+ help='port of server')
+
+"----------------------------- Detection options -----------------------------"
+parser.add_argument('--net', dest='demo_net', help='Network to use [vgg16 res101]',
+ default='res152')
+parser.add_argument('--indir', dest='inputpath',
+ help='image-directory', default="")
+parser.add_argument('--list', dest='inputlist',
+ help='image-list', default="")
+parser.add_argument('--mode', dest='mode',
+ help='detection mode, fast/normal/accurate', default="normal")
+parser.add_argument('--outdir', dest='outputpath',
+ help='output-directory', default="examples/res/")
+parser.add_argument('--inp_dim', dest='inp_dim', type=str, default='608',
+ help='inpdim')
+parser.add_argument('--conf', dest='confidence', type=float, default=0.05,
+ help='bounding box confidence threshold')
+parser.add_argument('--nms', dest='nms_thesh', type=float, default=0.6,
+ help='bounding box nms threshold')
+parser.add_argument('--save_img', default=False, action='store_true',
+ help='save result as image')
+parser.add_argument('--vis', default=False, action='store_true',
+ help='visualize image')
+parser.add_argument('--matching', default=False, action='store_true',
+ help='use best matching')
+parser.add_argument('--format', type=str,
+ help='save in the format of cmu or coco or openpose, option: coco/cmu/open')
+parser.add_argument('--detbatch', type=int, default=1,
+ help='detection batch size')
+parser.add_argument('--posebatch', type=int, default=80,
+ help='pose estimation maximum batch size')
+
+"----------------------------- Video options -----------------------------"
+parser.add_argument('--video', dest='video',
+ help='video-name', default="")
+parser.add_argument('--webcam', dest='webcam', type=str,
+ help='webcam number', default='0')
+parser.add_argument('--save_video', dest='save_video',
+ help='whether to save rendered video', default=False, action='store_true')
+parser.add_argument('--vis_fast', dest='vis_fast',
+ help='use fast rendering', action='store_true', default=False)
+opt = parser.parse_args()
+
+opt.num_classes = 80
diff --git a/joints_detectors/Alphapose/pPose_nms.py b/joints_detectors/Alphapose/pPose_nms.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c041e145445cc2ccd2ea6034a37fa2c4bb3b24e
--- /dev/null
+++ b/joints_detectors/Alphapose/pPose_nms.py
@@ -0,0 +1,363 @@
+# -*- coding: utf-8 -*-
+import torch
+import json
+import os
+import zipfile
+import time
+from multiprocessing.dummy import Pool as ThreadPool
+import numpy as np
+from opt import opt
+
+''' Constant Configuration '''
+delta1 = 1
+mu = 1.7
+delta2 = 2.65
+gamma = 22.48
+scoreThreds = 0.3
+matchThreds = 5
+areaThres = 0#40 * 40.5
+alpha = 0.1
+#pool = ThreadPool(4)
+
+
+def pose_nms(bboxes, bbox_scores, pose_preds, pose_scores):
+ '''
+ Parametric Pose NMS algorithm
+ bboxes: bbox locations list (n, 4)
+ bbox_scores: bbox scores list (n,)
+ pose_preds: pose locations list (n, 17, 2)
+ pose_scores: pose scores list (n, 17, 1)
+ '''
+ #global ori_pose_preds, ori_pose_scores, ref_dists
+
+ pose_scores[pose_scores == 0] = 1e-5
+
+ final_result = []
+
+ ori_bbox_scores = bbox_scores.clone()
+ ori_pose_preds = pose_preds.clone()
+ ori_pose_scores = pose_scores.clone()
+
+ xmax = bboxes[:, 2]
+ xmin = bboxes[:, 0]
+ ymax = bboxes[:, 3]
+ ymin = bboxes[:, 1]
+
+ widths = xmax - xmin
+ heights = ymax - ymin
+ ref_dists = alpha * np.maximum(widths, heights)
+
+ nsamples = bboxes.shape[0]
+ human_scores = pose_scores.mean(dim=1)
+
+ human_ids = np.arange(nsamples)
+ # Do pPose-NMS
+ pick = []
+ merge_ids = []
+ while(human_scores.shape[0] != 0):
+ # Pick the one with highest score
+ pick_id = torch.argmax(human_scores)
+ pick.append(human_ids[pick_id])
+ # num_visPart = torch.sum(pose_scores[pick_id] > 0.2)
+
+ # Get numbers of match keypoints by calling PCK_match
+ ref_dist = ref_dists[human_ids[pick_id]]
+ simi = get_parametric_distance(pick_id, pose_preds, pose_scores, ref_dist)
+ num_match_keypoints = PCK_match(pose_preds[pick_id], pose_preds, ref_dist)
+
+ # Delete humans who have more than matchThreds keypoints overlap and high similarity
+ delete_ids = torch.from_numpy(np.arange(human_scores.shape[0]))[(simi > gamma) | (num_match_keypoints >= matchThreds)]
+
+ if delete_ids.shape[0] == 0:
+ delete_ids = pick_id
+ #else:
+ # delete_ids = torch.from_numpy(delete_ids)
+
+ merge_ids.append(human_ids[delete_ids])
+ pose_preds = np.delete(pose_preds, delete_ids, axis=0)
+ pose_scores = np.delete(pose_scores, delete_ids, axis=0)
+ human_ids = np.delete(human_ids, delete_ids)
+ human_scores = np.delete(human_scores, delete_ids, axis=0)
+ bbox_scores = np.delete(bbox_scores, delete_ids, axis=0)
+
+ assert len(merge_ids) == len(pick)
+ preds_pick = ori_pose_preds[pick]
+ scores_pick = ori_pose_scores[pick]
+ bbox_scores_pick = ori_bbox_scores[pick]
+ #final_result = pool.map(filter_result, zip(scores_pick, merge_ids, preds_pick, pick, bbox_scores_pick))
+ #final_result = [item for item in final_result if item is not None]
+
+ for j in range(len(pick)):
+ ids = np.arange(17)
+ max_score = torch.max(scores_pick[j, ids, 0])
+
+ if max_score < scoreThreds:
+ continue
+
+ # Merge poses
+ merge_id = merge_ids[j]
+ merge_pose, merge_score = p_merge_fast(
+ preds_pick[j], ori_pose_preds[merge_id], ori_pose_scores[merge_id], ref_dists[pick[j]])
+
+ max_score = torch.max(merge_score[ids])
+ if max_score < scoreThreds:
+ continue
+
+ xmax = max(merge_pose[:, 0])
+ xmin = min(merge_pose[:, 0])
+ ymax = max(merge_pose[:, 1])
+ ymin = min(merge_pose[:, 1])
+
+ if (1.5 ** 2 * (xmax - xmin) * (ymax - ymin) < areaThres):
+ continue
+
+ final_result.append({
+ 'keypoints': merge_pose - 0.3,
+ 'kp_score': merge_score,
+ 'proposal_score': torch.mean(merge_score) + bbox_scores_pick[j] + 1.25 * max(merge_score)
+ })
+
+ return final_result
+
+
+def filter_result(args):
+ score_pick, merge_id, pred_pick, pick, bbox_score_pick = args
+ global ori_pose_preds, ori_pose_scores, ref_dists
+ ids = np.arange(17)
+ max_score = torch.max(score_pick[ids, 0])
+
+ if max_score < scoreThreds:
+ return None
+
+ # Merge poses
+ merge_pose, merge_score = p_merge_fast(
+ pred_pick, ori_pose_preds[merge_id], ori_pose_scores[merge_id], ref_dists[pick])
+
+ max_score = torch.max(merge_score[ids])
+ if max_score < scoreThreds:
+ return None
+
+ xmax = max(merge_pose[:, 0])
+ xmin = min(merge_pose[:, 0])
+ ymax = max(merge_pose[:, 1])
+ ymin = min(merge_pose[:, 1])
+
+ if (1.5 ** 2 * (xmax - xmin) * (ymax - ymin) < 40 * 40.5):
+ return None
+
+ return {
+ 'keypoints': merge_pose - 0.3,
+ 'kp_score': merge_score,
+ 'proposal_score': torch.mean(merge_score) + bbox_score_pick + 1.25 * max(merge_score)
+ }
+
+
+def p_merge(ref_pose, cluster_preds, cluster_scores, ref_dist):
+ '''
+ Score-weighted pose merging
+ INPUT:
+ ref_pose: reference pose -- [17, 2]
+ cluster_preds: redundant poses -- [n, 17, 2]
+ cluster_scores: redundant poses score -- [n, 17, 1]
+ ref_dist: reference scale -- Constant
+ OUTPUT:
+ final_pose: merged pose -- [17, 2]
+ final_score: merged score -- [17]
+ '''
+ dist = torch.sqrt(torch.sum(
+ torch.pow(ref_pose[np.newaxis, :] - cluster_preds, 2),
+ dim=2
+ )) # [n, 17]
+
+ kp_num = 17
+ ref_dist = min(ref_dist, 15)
+
+ mask = (dist <= ref_dist)
+ final_pose = torch.zeros(kp_num, 2)
+ final_score = torch.zeros(kp_num)
+
+ if cluster_preds.dim() == 2:
+ cluster_preds.unsqueeze_(0)
+ cluster_scores.unsqueeze_(0)
+ if mask.dim() == 1:
+ mask.unsqueeze_(0)
+
+ for i in range(kp_num):
+ cluster_joint_scores = cluster_scores[:, i][mask[:, i]] # [k, 1]
+ cluster_joint_location = cluster_preds[:, i, :][mask[:, i].unsqueeze(
+ -1).repeat(1, 2)].view((torch.sum(mask[:, i]), -1))
+
+ # Get an normalized score
+ normed_scores = cluster_joint_scores / torch.sum(cluster_joint_scores)
+
+ # Merge poses by a weighted sum
+ final_pose[i, 0] = torch.dot(cluster_joint_location[:, 0], normed_scores.squeeze(-1))
+ final_pose[i, 1] = torch.dot(cluster_joint_location[:, 1], normed_scores.squeeze(-1))
+
+ final_score[i] = torch.dot(cluster_joint_scores.transpose(0, 1).squeeze(0), normed_scores.squeeze(-1))
+
+ return final_pose, final_score
+
+
+def p_merge_fast(ref_pose, cluster_preds, cluster_scores, ref_dist):
+ '''
+ Score-weighted pose merging
+ INPUT:
+ ref_pose: reference pose -- [17, 2]
+ cluster_preds: redundant poses -- [n, 17, 2]
+ cluster_scores: redundant poses score -- [n, 17, 1]
+ ref_dist: reference scale -- Constant
+ OUTPUT:
+ final_pose: merged pose -- [17, 2]
+ final_score: merged score -- [17]
+ '''
+ dist = torch.sqrt(torch.sum(
+ torch.pow(ref_pose[np.newaxis, :] - cluster_preds, 2),
+ dim=2
+ ))
+
+ kp_num = 17
+ ref_dist = min(ref_dist, 15)
+
+ mask = (dist <= ref_dist)
+ final_pose = torch.zeros(kp_num, 2)
+ final_score = torch.zeros(kp_num)
+
+ if cluster_preds.dim() == 2:
+ cluster_preds.unsqueeze_(0)
+ cluster_scores.unsqueeze_(0)
+ if mask.dim() == 1:
+ mask.unsqueeze_(0)
+
+ # Weighted Merge
+ masked_scores = cluster_scores.mul(mask.float().unsqueeze(-1))
+ normed_scores = masked_scores / torch.sum(masked_scores, dim=0)
+
+ final_pose = torch.mul(cluster_preds, normed_scores.repeat(1, 1, 2)).sum(dim=0)
+ final_score = torch.mul(masked_scores, normed_scores).sum(dim=0)
+ return final_pose, final_score
+
+
+def get_parametric_distance(i, all_preds, keypoint_scores, ref_dist):
+ pick_preds = all_preds[i]
+ pred_scores = keypoint_scores[i]
+ dist = torch.sqrt(torch.sum(
+ torch.pow(pick_preds[np.newaxis, :] - all_preds, 2),
+ dim=2
+ ))
+ mask = (dist <= 1)
+
+ # Define a keypoints distance
+ score_dists = torch.zeros(all_preds.shape[0], 17)
+ keypoint_scores.squeeze_()
+ if keypoint_scores.dim() == 1:
+ keypoint_scores.unsqueeze_(0)
+ if pred_scores.dim() == 1:
+ pred_scores.unsqueeze_(1)
+ # The predicted scores are repeated up to do broadcast
+ pred_scores = pred_scores.repeat(1, all_preds.shape[0]).transpose(0, 1)
+
+ score_dists[mask] = torch.tanh(pred_scores[mask] / delta1) * torch.tanh(keypoint_scores[mask] / delta1)
+
+ point_dist = torch.exp((-1) * dist / delta2)
+ final_dist = torch.sum(score_dists, dim=1) + mu * torch.sum(point_dist, dim=1)
+
+ return final_dist
+
+
+def PCK_match(pick_pred, all_preds, ref_dist):
+ dist = torch.sqrt(torch.sum(
+ torch.pow(pick_pred[np.newaxis, :] - all_preds, 2),
+ dim=2
+ ))
+ ref_dist = min(ref_dist, 7)
+ num_match_keypoints = torch.sum(
+ dist / ref_dist <= 1,
+ dim=1
+ )
+
+ return num_match_keypoints
+
+
+def write_json(all_results, outputpath, for_eval=False):
+ '''
+ all_result: result dict of predictions
+ outputpath: output directory
+ '''
+ form = opt.format
+ json_results = []
+ json_results_cmu = {}
+ for im_res in all_results:
+ im_name = im_res['imgname']
+ for human in im_res['result']:
+ keypoints = []
+ result = {}
+ if for_eval:
+ result['image_id'] = int(im_name.split('/')[-1].split('.')[0].split('_')[-1])
+ else:
+ result['image_id'] = im_name.split('/')[-1]
+ result['category_id'] = 1
+
+ kp_preds = human['keypoints']
+ kp_scores = human['kp_score']
+ pro_scores = human['proposal_score']
+ for n in range(kp_scores.shape[0]):
+ keypoints.append(float(kp_preds[n, 0]))
+ keypoints.append(float(kp_preds[n, 1]))
+ keypoints.append(float(kp_scores[n]))
+ result['keypoints'] = keypoints
+ result['score'] = float(pro_scores)
+
+ if form == 'cmu': # the form of CMU-Pose
+ if result['image_id'] not in json_results_cmu.keys():
+ json_results_cmu[result['image_id']]={}
+ json_results_cmu[result['image_id']]['version']="AlphaPose v0.2"
+ json_results_cmu[result['image_id']]['bodies']=[]
+ tmp={'joints':[]}
+ result['keypoints'].append((result['keypoints'][15]+result['keypoints'][18])/2)
+ result['keypoints'].append((result['keypoints'][16]+result['keypoints'][19])/2)
+ result['keypoints'].append((result['keypoints'][17]+result['keypoints'][20])/2)
+ indexarr=[0,51,18,24,30,15,21,27,36,42,48,33,39,45,6,3,12,9]
+ for i in indexarr:
+ tmp['joints'].append(result['keypoints'][i])
+ tmp['joints'].append(result['keypoints'][i+1])
+ tmp['joints'].append(result['keypoints'][i+2])
+ json_results_cmu[result['image_id']]['bodies'].append(tmp)
+ elif form == 'open': # the form of OpenPose
+ if result['image_id'] not in json_results_cmu.keys():
+ json_results_cmu[result['image_id']]={}
+ json_results_cmu[result['image_id']]['version']="AlphaPose v0.2"
+ json_results_cmu[result['image_id']]['people']=[]
+ tmp={'pose_keypoints_2d':[]}
+ result['keypoints'].append((result['keypoints'][15]+result['keypoints'][18])/2)
+ result['keypoints'].append((result['keypoints'][16]+result['keypoints'][19])/2)
+ result['keypoints'].append((result['keypoints'][17]+result['keypoints'][20])/2)
+ indexarr=[0,51,18,24,30,15,21,27,36,42,48,33,39,45,6,3,12,9]
+ for i in indexarr:
+ tmp['pose_keypoints_2d'].append(result['keypoints'][i])
+ tmp['pose_keypoints_2d'].append(result['keypoints'][i+1])
+ tmp['pose_keypoints_2d'].append(result['keypoints'][i+2])
+ json_results_cmu[result['image_id']]['people'].append(tmp)
+ else:
+ json_results.append(result)
+
+ if form == 'cmu': # the form of CMU-Pose
+ with open(os.path.join(outputpath,'alphapose-results.json'), 'w') as json_file:
+ json_file.write(json.dumps(json_results_cmu))
+ if not os.path.exists(os.path.join(outputpath,'sep-json')):
+ os.mkdir(os.path.join(outputpath,'sep-json'))
+ for name in json_results_cmu.keys():
+ with open(os.path.join(outputpath,'sep-json',name.split('.')[0]+'.json'),'w') as json_file:
+ json_file.write(json.dumps(json_results_cmu[name]))
+ elif form == 'open': # the form of OpenPose
+ with open(os.path.join(outputpath,'alphapose-results.json'), 'w') as json_file:
+ json_file.write(json.dumps(json_results_cmu))
+ if not os.path.exists(os.path.join(outputpath,'sep-json')):
+ os.mkdir(os.path.join(outputpath,'sep-json'))
+ for name in json_results_cmu.keys():
+ with open(os.path.join(outputpath,'sep-json',name.split('.')[0]+'.json'),'w') as json_file:
+ json_file.write(json.dumps(json_results_cmu[name]))
+ else:
+ with open(os.path.join(outputpath,'alphapose-results.json'), 'w') as json_file:
+ json_file.write(json.dumps(json_results))
+
diff --git a/joints_detectors/Alphapose/requirements.txt b/joints_detectors/Alphapose/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..6e1f58e06f607c58586255a41209cbd28aa46a08
--- /dev/null
+++ b/joints_detectors/Alphapose/requirements.txt
@@ -0,0 +1,9 @@
+torch==0.4.0
+torchvision
+visdom
+nibabel
+pandas
+tqdm
+matplotlib
+opencv-python
+ntpath
diff --git a/joints_detectors/Alphapose/train_sppe/README.md b/joints_detectors/Alphapose/train_sppe/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..9b3d95225614fbfcab375d0edc4238ba5aa89ffc
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/README.md
@@ -0,0 +1,81 @@
+This folder includes PyTorch code for training the Single Person Pose Estimation network in AlphaPose.
+
+## Installation
+1. Install PyTorch >= 0.4.0 following [official instruction](https://pytorch.org/get-started/locally/).
+2. Install other dependencies.
+``` bash
+cd ${TRAIN_ROOT}
+pip install -r requirements.txt
+```
+3.Disable cudnn for batch_norm: (See: [@Microsoft / human-pose-estimation.pytorch#installation](https://github.com/Microsoft/human-pose-estimation.pytorch#installation))
+```
+# PYTORCH=/path/to/pytorch
+# for pytorch v0.4.0
+sed -i "1194s/torch\.backends\.cudnn\.enabled/False/g" ${PYTORCH}/torch/nn/functional.py
+# for pytorch v0.4.1
+sed -i "1254s/torch\.backends\.cudnn\.enabled/False/g" ${PYTORCH}/torch/nn/functional.py
+
+# Note that instructions like # PYTORCH=/path/to/pytorch indicate that you should pick
+# a path where you'd like to have pytorch installed and then set an environment
+# variable (PYTORCH in this case) accordingly.
+```
+## Data preparation
+
+### COCO Data
+Please download [annot_coco.h5](https://drive.google.com/open?id=1OviCQgzKO2t0gh4Me0MXfi6xgXyTWC5T) and `person_keypoints_val2014.json` from `cocodataset`(http://cocodataset.org/#download).
+```
+${TRAIN_ROOT}
+|-- src
+|-- exp
+|-- data
+`-- |-- coco
+ `-- |-- annot_coco.h5
+ |-- person_keypoints_val2014.json
+ `-- images
+ |-- trainval2017
+ | |-- 000000000001.jpg
+ | |-- 000000000002.jpg
+ | |-- 000000000003.jpg
+ | |-- ...
+```
+
+## Train on COCO
+``` bash
+cd src
+# Train without DPG first
+python train.py --dataset coco --expID exp1 --nClasses 17 --LR 1e-4
+# Then, train with DPG
+python train.py --dataset coco --expID exp1 --nClasses 17 --LR 1e-4 --addDPG
+
+# Or, train with pretrian model
+python train.py --dataset coco --expID exp1 --nClasses 17 --LR 1e-5 --addDPG --loadModel #{MODEL_DIR}
+```
+
+## Train on new dataset
+Please create the `h5` file from your own datset. Here is the python demo to read the `h5` file.
+``` python
+>>> import h5py
+>>> annot = h5py.File('annot_coco.h5')
+>>> for k in annot.keys():
+... print(k)
+bndbox
+imgname
+part
+
+>>> bndboxes = annot['bndbox'][:]
+>>> bndbox.shape
+(144213, 1, 4)
+>>> imgnames = annot['imgname'][:]
+>>> imgname.shape
+(144213, 16)
+>>> parts = annot['part'][:]
+>>> part.shape
+(144213, 17, 2)
+```
+```
+bndbox: [1 x 4] (upleft_x, upleft_y, bottomright_x, bottomright_y)
+imgname: [16] #ascii number of imagename
+part: [17 x 2] (kp1_x, kp1_y, kp2_x, kp2_y, ..., kp17_x, kp17_y)
+```
+
+Please refer to this python demo to create your own `h5` files. How to save data in `h5` files, please refer to [h5py quick start](http://docs.h5py.org/en/stable/quick.html#quick).
diff --git a/joints_detectors/Alphapose/train_sppe/data/coco/coco-minival500_images.txt b/joints_detectors/Alphapose/train_sppe/data/coco/coco-minival500_images.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d6dc42e0364c595b52cce2e2335394373091b208
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/data/coco/coco-minival500_images.txt
@@ -0,0 +1 @@
+375521,244455,375530,244462,113397,113408,375554,171819,375566,244496,139077,506656,375606,244539,565045,113473,375621,244550,492605,506707,113493,215524,506717,506723,433980,244586,113533,113559,390298,281072,113571,543218,506803,113588,113590,244665,375755,375769,324776,506872,506874,375810,375812,113678,375823,261292,506933,113720,113722,506945,375875,506950,113736,375881,244815,113745,113756,113757,375902,244833,244834,507015,178370,412184,507037,507065,244925,244931,507080,193743,104176,113890,113905,113914,376059,244999,507147,507154,245013,376093,507167,245026,507171,106375,507180,376112,507187,113975,113989,507211,507223,376160,114025,376185,114049,114055,507273,376206,376208,376209,376233,376236,507312,245174,376247,114108,245182,507330,507352,114147,376295,245227,114158,376307,376322,106411,114183,376342,507427,062726,507436,434098,245295,376372,245301,245311,245313,245315,245320,376393,507473,038721,376416,114282,114291,376441,376449,376450,245383,376456,114313,114317,376469,507551,245411,245414,114352,245426,245430,245432,245447,245448,376521,245453,376531,245460,114389,245462,376545,376549,114414,507633,245497,376575,507663,507667,114453,281220,114458,376603,114481,376628,507719,139113,172004,507750,376679,346774,507782,245642,507797,245660,245667,303089,376751,507826,507833,376773,114634,245716,303101,108130,245754,114684,477867,114710,507927,376859,114717,507935,507945,114744,114745,245818,376891,507966,376900,507975,106508,507979,019129,508006,376939,376959,245898,376990,114855,114868,114870,508087,114884,508101,508119,114907,150224,377060,246001,246004,246005,246014,114946,377091,508167,377097,377111,246040,377113,246053,246057,061605,246064,246066,115006,377155,508230,246105,246106,377183,115043,377195,246124,246125,115060,508288,508302,508303,377239,246183,508339,062878,115128,115146,508370,508373,246231,246252,246265,377352,377361,377385,115243,115245,508470,521655,377427,182784,390585,508514,115314,565353,508538,456127,246398,377486,115363,115370,246454,508605,377577,508656,377588,115455,115459,246535,246562,377635,128476,246576,377652,508730,246589,246590,115521,246597,246612,246626,246629,377706,305000,377715,246649,115579,115584,377730,508811,508822,246686,150342,115626,115636,508872,377802,377809,115667,246746,377832,368807,115700,508917,246782,115721,508949,508950,543577,508962,115752,508972,508977,309371,115765,377910,508985,246843,115772,106677,115791,115793,115796,509014,509020,377949,246878,377951,246883,509028,509037,115823,115830,195896,377984,377999,368836,115870,115875,246951,246963,246968,115898,378048,115912,182805,115924,115930,562519,509158,378096,378116,509192,509194,368855,378126,247057,378134,378147,509223,509227,116017,378163,116023,378169,200267,116037,116046,259640,378204,116061,116067,116068,247141,116083,116096,063040,378244,509319,247179,116132,116133,247206,543644,378284,247216,116149,509366,116173,247259,378334,247264,116202,378347,116208,281512,247285,247306,509451,509459,247317,509471,116261,509497,509514,247378,509526,509536,378467,543676,378482,116341,194153,116354,378502,509577,247438,378515,509589,509590,116377,172315,116389,378538,247474,116405,247484,378561,116434,116439,247519,116455,291936,116466,378614,509695,378652,378657,378658,247587,116517,378667,378673,172342,509766,247625,247639,509786,116574,378727,509811,509826,247692,247714,247720,509867,509891,378823,378825,378831,116696,247782,116712,247788,247790,378873,247808,259755,378894,247839,247840,378928,378940,378948,378962
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/train_sppe/requirements.txt b/joints_detectors/Alphapose/train_sppe/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2072cb449db4b22f5f0efbc33269f1938c175912
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/requirements.txt
@@ -0,0 +1,12 @@
+torch==0.4.0
+torchvision
+opencv-python
+matplotlib
+nibabel
+cython
+pandas
+h5py
+scipy
+tqdm
+pycocotools
+tensorboardx
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537703645.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537703645.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..ad9530ce2777ad8c4ded323f2bad3affe12595b7
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537703645.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a18ebf54707c24344dd13e573c88c944539cbb266257694fe97efa1bcf6af42d
+size 66022
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537703981.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537703981.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..17f4889d9f39269a70cc409cff824980c929ee01
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537703981.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f47a7481036cf8af2d49afb66bd9abf720f9d45a327aa4bb098cccf9fa8e65df
+size 25
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537704040.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537704040.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..f7575edf7e09a1c53a17c1df99bf67bf8a7ccaf7
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537704040.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b913f359c88fe8f8ef1abd5e9dfe8f116dca36d013f4709ed45380e6d54f82e2
+size 25
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537704067.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537704067.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..8168e8913465df5618cb989f6e7c044a2b8b9c4e
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537704067.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:22285d5556a570b161674675a287e59383460a638cef707b965b6ccf208a1f69
+size 558879
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537711198.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537711198.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..5eddcaecb7d7e82a7e21262decbe7dfa7bc0df0d
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537711198.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8ec84b2e66e0ff5cd95124aec6f9e77432742be03cd0fb94525649fd2079af2d
+size 220834
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537712715.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537712715.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..0bb581645d5349471200d2f47a494f608f888966
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537712715.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:419a572fb0e7626f798ab1734975dc310242a13aa935972d14fddacd1ec4ec95
+size 582487
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716834.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716834.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..326208dd6b529cc55878fe9dad79f0ef4d5a9864
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716834.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a7da2ffd1b386fe4af7168e35eff08a71fa380220cdb6eccfa2c820b985c0b60
+size 215
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716861.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716861.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..72bbd5e5f23973fbdc1c027007dfc07bf6803790
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716861.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7c2fa4f60398707786ad1b782cdc15bcbc30f5e49f575f15af6b9090cdc41464
+size 310
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716907.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716907.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..e3d91c4a0f70ce7565a256cc9c273579c87e3e03
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537716907.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bf5a9e7e3fa80a7bc8fbdf6103266ba28f7ddc44a8dfc1906ef26eb3e9118d6d
+size 582487
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718129.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718129.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..9665b7d48519c67e7c1465f27f3153aa8e03e541
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718129.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f5a4222dcfc8092a6a6ca0c8a42fee65c64d7c3ed4bac35bce758ce24a988a3e
+size 25
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718230.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718230.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..2757b5446c2e4eba21a0bceeeb723c6ea7fb9d7c
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718230.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e85578fd44bffc3fef894308060c7b08034e86cc7102938628b65cae886441ea
+size 25
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718771.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718771.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..401dd982d7bb3634e31182065dd2715c6679285b
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718771.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4ef52cb28e5b6fb32ae2d975aa5a9d6662d5575ad7618a0d6ea06092c2def7f3
+size 25
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718831.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718831.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..f2c42896909707fc4379364b4da229c0f8783361
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537718831.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:43633b0f03c71865ec9ef8ea0302c0adc7124d0c1a7f9c041f5dfc53c62a29cd
+size 595
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537719643.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537719643.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..c8ea48f73911ba93b85317b6ffcf2849526cea06
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537719643.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7ee003be75c021246c8d4a5c17153f623b6eae1674993a0767a3a8275bcc77d6
+size 52054
diff --git a/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537719983.mvig-14 b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537719983.mvig-14
new file mode 100644
index 0000000000000000000000000000000000000000..06764e61a548a6f3682b536450251fc391c2462a
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/.tensorboard/coco/test/events.out.tfevents.1537719983.mvig-14
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c9c4615141536d46cfa01d383f1f82d481ccab33debce8571e1bf887d3b334af
+size 302933
diff --git a/joints_detectors/Alphapose/train_sppe/src/evaluation.py b/joints_detectors/Alphapose/train_sppe/src/evaluation.py
new file mode 100644
index 0000000000000000000000000000000000000000..10c13ddac01132844ab2a0d89c57e37f28ca9728
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/evaluation.py
@@ -0,0 +1,133 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import torch
+import torch.nn as nn
+import torch.utils.data
+from .predict.annot.coco_minival import Mscoco_minival
+from .predict.p_poseNMS import pose_nms, write_json
+import numpy as np
+from .opt import opt
+from tqdm import tqdm
+from .utils.img import flip, shuffleLR, vis_frame
+from .util.eval import getPrediction
+from .util.eval import getmap
+import os
+import cv2
+
+
+def gaussian(size):
+ '''
+ Generate a 2D gaussian array
+ '''
+ sigma = 1
+ x = np.arange(0, size, 1, float)
+ y = x[:, np.newaxis]
+ x0 = y0 = size // 2
+ sigma = size / 4.0
+ # The gaussian is not normalized, we want the center value to equal 1
+ g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
+ g = g[np.newaxis, :]
+ return g
+
+
+gaussian_kernel = nn.Conv2d(17, 17, kernel_size=4 * 1 + 1,
+ stride=1, padding=2, groups=17, bias=False)
+
+g = torch.from_numpy(gaussian(4 * 1 + 1)).clone()
+g = torch.unsqueeze(g, 1)
+g = g.repeat(17, 1, 1, 1)
+gaussian_kernel.weight.data = g.float()
+gaussian_kernel.cuda()
+
+
+def prediction(model):
+ model.eval()
+ dataset = Mscoco_minival()
+ minival_loader = torch.utils.data.DataLoader(
+ dataset, batch_size=1, shuffle=False, num_workers=20, pin_memory=True)
+ minival_loader_desc = tqdm(minival_loader)
+
+ final_result = []
+
+ tmp_inp = {}
+ for i, (inp, box, im_name, metaData) in enumerate(minival_loader_desc):
+ #inp = torch.autograd.Variable(inp.cuda(), volatile=True)
+ pt1, pt2, ori_inp = metaData
+ #with torch.autograd.profiler.profile(use_cuda=True) as prof:
+ if im_name[0] in tmp_inp.keys():
+ inps = tmp_inp[im_name[0]]['inps']
+ ori_inps = tmp_inp[im_name[0]]['ori_inps']
+ boxes = tmp_inp[im_name[0]]['boxes']
+ pt1s = tmp_inp[im_name[0]]['pt1s']
+ pt2s = tmp_inp[im_name[0]]['pt2s']
+ tmp_inp[im_name[0]]['inps'] = torch.cat((inps, inp), dim=0)
+ tmp_inp[im_name[0]]['pt1s'] = torch.cat((pt1s, pt1), dim=0)
+ tmp_inp[im_name[0]]['pt2s'] = torch.cat((pt2s, pt2), dim=0)
+ tmp_inp[im_name[0]]['ori_inps'] = torch.cat(
+ (ori_inps, ori_inp), dim=0)
+ tmp_inp[im_name[0]]['boxes'] = torch.cat((boxes, box), dim=0)
+ else:
+ tmp_inp[im_name[0]] = {
+ 'inps': inp,
+ 'ori_inps': ori_inp,
+ 'boxes': box,
+ 'pt1s': pt1,
+ 'pt2s': pt2
+ }
+
+ for im_name, item in tqdm(tmp_inp.items()):
+ inp = item['inps']
+ pt1 = item['pt1s']
+ pt2 = item['pt2s']
+ box = item['boxes']
+ ori_inp = item['ori_inps']
+
+ with torch.no_grad():
+ try:
+ kp_preds = model(inp)
+ kp_preds = kp_preds.data[:, :17, :]
+ except RuntimeError as e:
+ '''
+ Divide inputs into two batches
+ '''
+ assert str(e) == 'CUDA error: out of memory'
+ bn = inp.shape[0]
+ inp1 = inp[: bn // 2]
+ inp2 = inp[bn // 2:]
+ kp_preds1 = model(inp1)
+ kp_preds2 = model(inp2)
+ kp_preds = torch.cat((kp_preds1, kp_preds2), dim=0)
+ kp_preds = kp_preds.data[:, :17, :]
+
+ # kp_preds = gaussian_kernel(F.relu(kp_preds))
+
+ # Get predictions
+ # location prediction (n, kp, 2) | score prediction (n, kp, 1)
+
+ preds, preds_img, preds_scores = getPrediction(
+ kp_preds.cpu().data, pt1, pt2,
+ opt.inputResH, opt.inputResW, opt.outputResH, opt.outputResW
+ )
+
+ result = pose_nms(box, preds_img, preds_scores)
+ result = {
+ 'imgname': im_name,
+ 'result': result
+ }
+ #img = display_frame(orig_img, result, opt.outputpath)
+ #ori_inp = np.transpose(
+ # ori_inp[0][:3].clone().numpy(), (1, 2, 0)) * 255
+ #img = vis_frame(ori_inp, result)
+ #cv2.imwrite(os.path.join(
+ # './val', 'vis', im_name), img)
+ final_result.append(result)
+
+ write_json(final_result, './val', for_eval=True)
+ return getmap()
+
+
+if __name__ == '__main__':
+ prediction()
diff --git a/joints_detectors/Alphapose/train_sppe/src/install.sh b/joints_detectors/Alphapose/train_sppe/src/install.sh
new file mode 100644
index 0000000000000000000000000000000000000000..7db1830fe2be5cc92ffc2886a9d37bc23ce9abc9
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/install.sh
@@ -0,0 +1,11 @@
+torch==0.4.0
+torchvision
+opencv-python
+matplotlib
+nibabel
+cython
+pandas
+h5py
+scipy
+tqdm
+pycocotools
\ No newline at end of file
diff --git a/joints_detectors/Alphapose/train_sppe/src/models/FastPose.py b/joints_detectors/Alphapose/train_sppe/src/models/FastPose.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b5d590b627745a6bb1ce3d037eb87678b28b137
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/models/FastPose.py
@@ -0,0 +1,41 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import torch.nn as nn
+
+from .layers.DUC import DUC
+from .layers.SE_Resnet import SEResnet
+
+# Import training option
+from opt import opt
+
+
+def createModel():
+ return FastPose_SE()
+
+
+class FastPose_SE(nn.Module):
+ conv_dim = 128
+
+ def __init__(self):
+ super(FastPose_SE, self).__init__()
+
+ self.preact = SEResnet('resnet101')
+
+ self.suffle1 = nn.PixelShuffle(2)
+ self.duc1 = DUC(512, 1024, upscale_factor=2)
+ self.duc2 = DUC(256, 512, upscale_factor=2)
+
+ self.conv_out = nn.Conv2d(
+ self.conv_dim, opt.nClasses, kernel_size=3, stride=1, padding=1)
+
+ def forward(self, x):
+ out = self.preact(x)
+ out = self.suffle1(out)
+ out = self.duc1(out)
+ out = self.duc2(out)
+
+ out = self.conv_out(out)
+ return out
diff --git a/joints_detectors/Alphapose/train_sppe/src/models/layers/DUC.py b/joints_detectors/Alphapose/train_sppe/src/models/layers/DUC.py
new file mode 100644
index 0000000000000000000000000000000000000000..86811b6fd629c18d1556fef844a52ff96ef47b87
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/models/layers/DUC.py
@@ -0,0 +1,28 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import torch.nn as nn
+
+
+class DUC(nn.Module):
+ '''
+ Initialize: inplanes, planes, upscale_factor
+ OUTPUT: (planes // upscale_factor^2) * ht * wd
+ '''
+
+ def __init__(self, inplanes, planes, upscale_factor=2):
+ super(DUC, self).__init__()
+ self.conv = nn.Conv2d(
+ inplanes, planes, kernel_size=3, padding=1, bias=False)
+ self.bn = nn.BatchNorm2d(planes, momentum=0.1)
+ self.relu = nn.ReLU(inplace=True)
+ self.pixel_shuffle = nn.PixelShuffle(upscale_factor)
+
+ def forward(self, x):
+ x = self.conv(x)
+ x = self.bn(x)
+ x = self.relu(x)
+ x = self.pixel_shuffle(x)
+ return x
diff --git a/joints_detectors/Alphapose/train_sppe/src/models/layers/SE_Resnet.py b/joints_detectors/Alphapose/train_sppe/src/models/layers/SE_Resnet.py
new file mode 100644
index 0000000000000000000000000000000000000000..1be60a1ad9f108fa83bc96f0341bc9c71baf4e76
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/models/layers/SE_Resnet.py
@@ -0,0 +1,105 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import torch.nn as nn
+from models.layers.SE_module import SELayer
+import torch.nn.functional as F
+
+
+class Bottleneck(nn.Module):
+ expansion = 4
+
+ def __init__(self, inplanes, planes, stride=1, downsample=None, reduction=False):
+ super(Bottleneck, self).__init__()
+ self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
+ self.bn1 = nn.BatchNorm2d(planes, momentum=0.1)
+ self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
+ padding=1, bias=False)
+ self.bn2 = nn.BatchNorm2d(planes, momentum=0.1)
+ self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
+ self.bn3 = nn.BatchNorm2d(planes * 4, momentum=0.1)
+ if reduction:
+ self.se = SELayer(planes * 4)
+
+ self.reduc = reduction
+ self.downsample = downsample
+ self.stride = stride
+
+ def forward(self, x):
+ residual = x
+
+ out = F.relu(self.bn1(self.conv1(x)), inplace=True)
+ out = F.relu(self.bn2(self.conv2(out)), inplace=True)
+
+ out = self.conv3(out)
+ out = self.bn3(out)
+ if self.reduc:
+ out = self.se(out)
+
+ if self.downsample is not None:
+ residual = self.downsample(x)
+
+ out += residual
+ out = F.relu(out)
+
+ return out
+
+
+class SEResnet(nn.Module):
+ """ SEResnet """
+
+ def __init__(self, architecture):
+ super(SEResnet, self).__init__()
+ assert architecture in ["resnet50", "resnet101"]
+ self.inplanes = 64
+ self.layers = [3, 4, {"resnet50": 6, "resnet101": 23}[architecture], 3]
+ self.block = Bottleneck
+
+ self.conv1 = nn.Conv2d(3, 64, kernel_size=7,
+ stride=2, padding=3, bias=False)
+ self.bn1 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.1, affine=True)
+ self.relu = nn.ReLU(inplace=True)
+ self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
+
+ self.layer1 = self.make_layer(self.block, 64, self.layers[0])
+ self.layer2 = self.make_layer(
+ self.block, 128, self.layers[1], stride=2)
+ self.layer3 = self.make_layer(
+ self.block, 256, self.layers[2], stride=2)
+
+ self.layer4 = self.make_layer(
+ self.block, 512, self.layers[3], stride=2)
+
+ def forward(self, x):
+ x = self.maxpool(self.relu(self.bn1(self.conv1(x)))) # 64 * h/4 * w/4
+ x = self.layer1(x) # 256 * h/4 * w/4
+ x = self.layer2(x) # 512 * h/8 * w/8
+ x = self.layer3(x) # 1024 * h/16 * w/16
+ x = self.layer4(x) # 2048 * h/32 * w/32
+ return x
+
+ def stages(self):
+ return [self.layer1, self.layer2, self.layer3, self.layer4]
+
+ def make_layer(self, block, planes, blocks, stride=1):
+ downsample = None
+ if stride != 1 or self.inplanes != planes * block.expansion:
+ downsample = nn.Sequential(
+ nn.Conv2d(self.inplanes, planes * block.expansion,
+ kernel_size=1, stride=stride, bias=False),
+ nn.BatchNorm2d(planes * block.expansion, momentum=0.1),
+ )
+
+ layers = []
+ if downsample is not None:
+ layers.append(block(self.inplanes, planes,
+ stride, downsample, reduction=True))
+ else:
+ layers.append(block(self.inplanes, planes, stride, downsample))
+ self.inplanes = planes * block.expansion
+ for i in range(1, blocks):
+ layers.append(block(self.inplanes, planes))
+
+ return nn.Sequential(*layers)
diff --git a/joints_detectors/Alphapose/train_sppe/src/models/layers/SE_module.py b/joints_detectors/Alphapose/train_sppe/src/models/layers/SE_module.py
new file mode 100644
index 0000000000000000000000000000000000000000..f370fd4e1fb777306e37f4a7c7be99bd0fbca64a
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/models/layers/SE_module.py
@@ -0,0 +1,24 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+from torch import nn
+
+
+class SELayer(nn.Module):
+ def __init__(self, channel, reduction=1):
+ super(SELayer, self).__init__()
+ self.avg_pool = nn.AdaptiveAvgPool2d(1)
+ self.fc = nn.Sequential(
+ nn.Linear(channel, channel // reduction),
+ nn.ReLU(inplace=True),
+ nn.Linear(channel // reduction, channel),
+ nn.Sigmoid()
+ )
+
+ def forward(self, x):
+ b, c, _, _ = x.size()
+ y = self.avg_pool(x).view(b, c)
+ y = self.fc(y).view(b, c, 1, 1)
+ return x * y
diff --git a/joints_detectors/Alphapose/train_sppe/src/opt.py b/joints_detectors/Alphapose/train_sppe/src/opt.py
new file mode 100644
index 0000000000000000000000000000000000000000..42b296be0f2c898a5bdeb248728278550cc59454
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/opt.py
@@ -0,0 +1,76 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import argparse
+
+parser = argparse.ArgumentParser(description='PyTorch AlphaPose Training')
+
+"----------------------------- General options -----------------------------"
+parser.add_argument('--expID', default='default', type=str,
+ help='Experiment ID')
+parser.add_argument('--dataset', default='coco', type=str,
+ help='Dataset choice: mpii | coco')
+parser.add_argument('--nThreads', default=30, type=int,
+ help='Number of data loading threads')
+parser.add_argument('--snapshot', default=1, type=int,
+ help='How often to take a snapshot of the model (0 = never)')
+
+"----------------------------- AlphaPose options -----------------------------"
+parser.add_argument('--addDPG', default=False, dest='addDPG',
+ help='Train with data augmentation', action='store_true')
+
+"----------------------------- Model options -----------------------------"
+parser.add_argument('--loadModel', default=None, type=str,
+ help='Provide full path to a previously trained model')
+parser.add_argument('--nClasses', default=17, type=int,
+ help='Number of output channel')
+
+"----------------------------- Hyperparameter options -----------------------------"
+parser.add_argument('--LR', default=1e-3, type=float,
+ help='Learning rate')
+parser.add_argument('--momentum', default=0, type=float,
+ help='Momentum')
+parser.add_argument('--weightDecay', default=0, type=float,
+ help='Weight decay')
+parser.add_argument('--eps', default=1e-8, type=float,
+ help='epsilon')
+parser.add_argument('--crit', default='MSE', type=str,
+ help='Criterion type')
+parser.add_argument('--optMethod', default='rmsprop', type=str,
+ help='Optimization method: rmsprop | sgd | nag | adadelta')
+
+
+"----------------------------- Training options -----------------------------"
+parser.add_argument('--nEpochs', default=50, type=int,
+ help='Number of hourglasses to stack')
+parser.add_argument('--epoch', default=0, type=int,
+ help='Current epoch')
+parser.add_argument('--trainBatch', default=128, type=int,
+ help='Train-batch size')
+parser.add_argument('--validBatch', default=24, type=int,
+ help='Valid-batch size')
+parser.add_argument('--trainIters', default=0, type=int,
+ help='Total train iters')
+parser.add_argument('--valIters', default=0, type=int,
+ help='Total valid iters')
+
+"----------------------------- Data options -----------------------------"
+parser.add_argument('--inputResH', default=320, type=int,
+ help='Input image height')
+parser.add_argument('--inputResW', default=256, type=int,
+ help='Input image width')
+parser.add_argument('--outputResH', default=80, type=int,
+ help='Output heatmap height')
+parser.add_argument('--outputResW', default=64, type=int,
+ help='Output heatmap width')
+parser.add_argument('--scale', default=0.3, type=float,
+ help='Degree of scale augmentation')
+parser.add_argument('--rotate', default=40, type=float,
+ help='Degree of rotation augmentation')
+parser.add_argument('--hmGauss', default=1, type=int,
+ help='Heatmap gaussian size')
+
+
+opt = parser.parse_args()
diff --git a/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/index.txt b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/index.txt
new file mode 100644
index 0000000000000000000000000000000000000000..9dfbc523157fd7437717fa33f69eb72e26e300bb
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/index.txt
@@ -0,0 +1,500 @@
+COCO_val2014_000000375521.jpg 1 8
+COCO_val2014_000000244455.jpg 9 38
+COCO_val2014_000000375530.jpg 39 108
+COCO_val2014_000000244462.jpg 109 120
+COCO_val2014_000000113397.jpg 121 129
+COCO_val2014_000000113408.jpg 130 163
+COCO_val2014_000000375554.jpg 164 175
+COCO_val2014_000000171819.jpg 176 182
+COCO_val2014_000000375566.jpg 183 192
+COCO_val2014_000000244496.jpg 193 194
+COCO_val2014_000000139077.jpg 195 200
+COCO_val2014_000000506656.jpg 201 232
+COCO_val2014_000000375606.jpg 233 242
+COCO_val2014_000000244539.jpg 243 286
+COCO_val2014_000000565045.jpg 287 288
+COCO_val2014_000000113473.jpg 289 299
+COCO_val2014_000000375621.jpg 300 316
+COCO_val2014_000000244550.jpg 317 335
+COCO_val2014_000000492605.jpg 336 358
+COCO_val2014_000000506707.jpg 359 412
+COCO_val2014_000000113493.jpg 413 419
+COCO_val2014_000000215524.jpg 420 438
+COCO_val2014_000000506717.jpg 439 456
+COCO_val2014_000000506723.jpg 457 481
+COCO_val2014_000000433980.jpg 482 486
+COCO_val2014_000000244586.jpg 487 520
+COCO_val2014_000000113533.jpg 521 525
+COCO_val2014_000000113559.jpg 526 537
+COCO_val2014_000000390298.jpg 538 576
+COCO_val2014_000000281072.jpg 577 583
+COCO_val2014_000000113571.jpg 584 591
+COCO_val2014_000000543218.jpg 592 605
+COCO_val2014_000000506803.jpg 606 613
+COCO_val2014_000000113588.jpg 614 628
+COCO_val2014_000000113590.jpg 629 652
+COCO_val2014_000000244665.jpg 653 708
+COCO_val2014_000000375755.jpg 709 710
+COCO_val2014_000000375769.jpg 711 718
+COCO_val2014_000000324776.jpg 719 721
+COCO_val2014_000000506872.jpg 722 774
+COCO_val2014_000000506874.jpg 775 876
+COCO_val2014_000000375810.jpg 877 886
+COCO_val2014_000000375812.jpg 887 889
+COCO_val2014_000000113678.jpg 890 902
+COCO_val2014_000000375823.jpg 903 1018
+COCO_val2014_000000261292.jpg 1019 1083
+COCO_val2014_000000506933.jpg 1084 1094
+COCO_val2014_000000113720.jpg 1095 1111
+COCO_val2014_000000113722.jpg 1112 1179
+COCO_val2014_000000506945.jpg 1180 1184
+COCO_val2014_000000375875.jpg 1185 1190
+COCO_val2014_000000506950.jpg 1191 1253
+COCO_val2014_000000113736.jpg 1254 1258
+COCO_val2014_000000375881.jpg 1259 1269
+COCO_val2014_000000244815.jpg 1270 1315
+COCO_val2014_000000113745.jpg 1316 1330
+COCO_val2014_000000113756.jpg 1331 1338
+COCO_val2014_000000113757.jpg 1339 1438
+COCO_val2014_000000375902.jpg 1439 1488
+COCO_val2014_000000244833.jpg 1489 1509
+COCO_val2014_000000244834.jpg 1510 1532
+COCO_val2014_000000507015.jpg 1533 1548
+COCO_val2014_000000178370.jpg 1549 1574
+COCO_val2014_000000412184.jpg 1575 1584
+COCO_val2014_000000507037.jpg 1585 1642
+COCO_val2014_000000507065.jpg 1643 1646
+COCO_val2014_000000244925.jpg 1647 1652
+COCO_val2014_000000244931.jpg 1653 1683
+COCO_val2014_000000507080.jpg 1684 1715
+COCO_val2014_000000193743.jpg 1716 1727
+COCO_val2014_000000104176.jpg 1728 1730
+COCO_val2014_000000113890.jpg 1731 1735
+COCO_val2014_000000113905.jpg 1736 1747
+COCO_val2014_000000113914.jpg 1748 1778
+COCO_val2014_000000376059.jpg 1779 1781
+COCO_val2014_000000244999.jpg 1782 1800
+COCO_val2014_000000507147.jpg 1801 1814
+COCO_val2014_000000507154.jpg 1815 1872
+COCO_val2014_000000245013.jpg 1873 1889
+COCO_val2014_000000376093.jpg 1890 1903
+COCO_val2014_000000507167.jpg 1904 1906
+COCO_val2014_000000245026.jpg 1907 1911
+COCO_val2014_000000507171.jpg 1912 1917
+COCO_val2014_000000106375.jpg 1918 1935
+COCO_val2014_000000507180.jpg 1936 1952
+COCO_val2014_000000376112.jpg 1953 2018
+COCO_val2014_000000507187.jpg 2019 2108
+COCO_val2014_000000113975.jpg 2109 2133
+COCO_val2014_000000113989.jpg 2134 2136
+COCO_val2014_000000507211.jpg 2137 2141
+COCO_val2014_000000507223.jpg 2142 2171
+COCO_val2014_000000376160.jpg 2172 2188
+COCO_val2014_000000114025.jpg 2189 2260
+COCO_val2014_000000376185.jpg 2261 2263
+COCO_val2014_000000114049.jpg 2264 2301
+COCO_val2014_000000114055.jpg 2302 2312
+COCO_val2014_000000507273.jpg 2313 2316
+COCO_val2014_000000376206.jpg 2317 2319
+COCO_val2014_000000376208.jpg 2320 2376
+COCO_val2014_000000376209.jpg 2377 2394
+COCO_val2014_000000376233.jpg 2395 2403
+COCO_val2014_000000376236.jpg 2404 2407
+COCO_val2014_000000507312.jpg 2408 2411
+COCO_val2014_000000245174.jpg 2412 2416
+COCO_val2014_000000376247.jpg 2417 2438
+COCO_val2014_000000114108.jpg 2439 2450
+COCO_val2014_000000245182.jpg 2451 2506
+COCO_val2014_000000507330.jpg 2507 2564
+COCO_val2014_000000507352.jpg 2565 2568
+COCO_val2014_000000114147.jpg 2569 2646
+COCO_val2014_000000376295.jpg 2647 2663
+COCO_val2014_000000245227.jpg 2664 2740
+COCO_val2014_000000114158.jpg 2741 2745
+COCO_val2014_000000376307.jpg 2746 2766
+COCO_val2014_000000376322.jpg 2767 2831
+COCO_val2014_000000106411.jpg 2832 2836
+COCO_val2014_000000114183.jpg 2837 2863
+COCO_val2014_000000376342.jpg 2864 2885
+COCO_val2014_000000507427.jpg 2886 2887
+COCO_val2014_000000062726.jpg 2888 2945
+COCO_val2014_000000507436.jpg 2946 2949
+COCO_val2014_000000434098.jpg 2950 2951
+COCO_val2014_000000245295.jpg 2952 2970
+COCO_val2014_000000376372.jpg 2971 2999
+COCO_val2014_000000245301.jpg 3000 3002
+COCO_val2014_000000245311.jpg 3003 3027
+COCO_val2014_000000245313.jpg 3028 3071
+COCO_val2014_000000245315.jpg 3072 3088
+COCO_val2014_000000245320.jpg 3089 3099
+COCO_val2014_000000376393.jpg 3100 3108
+COCO_val2014_000000507473.jpg 3109 3116
+COCO_val2014_000000038721.jpg 3117 3118
+COCO_val2014_000000376416.jpg 3119 3179
+COCO_val2014_000000114282.jpg 3180 3227
+COCO_val2014_000000114291.jpg 3228 3308
+COCO_val2014_000000376441.jpg 3309 3318
+COCO_val2014_000000376449.jpg 3319 3321
+COCO_val2014_000000376450.jpg 3322 3351
+COCO_val2014_000000245383.jpg 3352 3498
+COCO_val2014_000000376456.jpg 3499 3504
+COCO_val2014_000000114313.jpg 3505 3528
+COCO_val2014_000000114317.jpg 3529 3536
+COCO_val2014_000000376469.jpg 3537 3551
+COCO_val2014_000000507551.jpg 3552 3570
+COCO_val2014_000000245411.jpg 3571 3576
+COCO_val2014_000000245414.jpg 3577 3578
+COCO_val2014_000000114352.jpg 3579 3580
+COCO_val2014_000000245426.jpg 3581 3593
+COCO_val2014_000000245430.jpg 3594 3631
+COCO_val2014_000000245432.jpg 3632 3696
+COCO_val2014_000000245447.jpg 3697 3719
+COCO_val2014_000000245448.jpg 3720 3738
+COCO_val2014_000000376521.jpg 3739 3752
+COCO_val2014_000000245453.jpg 3753 3755
+COCO_val2014_000000376531.jpg 3756 3757
+COCO_val2014_000000245460.jpg 3758 3760
+COCO_val2014_000000114389.jpg 3761 3762
+COCO_val2014_000000245462.jpg 3763 3774
+COCO_val2014_000000376545.jpg 3775 3782
+COCO_val2014_000000376549.jpg 3783 3786
+COCO_val2014_000000114414.jpg 3787 3800
+COCO_val2014_000000507633.jpg 3801 3802
+COCO_val2014_000000245497.jpg 3803 3816
+COCO_val2014_000000376575.jpg 3817 3829
+COCO_val2014_000000507663.jpg 3830 3842
+COCO_val2014_000000507667.jpg 3843 3889
+COCO_val2014_000000114453.jpg 3890 3901
+COCO_val2014_000000281220.jpg 3902 3905
+COCO_val2014_000000114458.jpg 3906 3910
+COCO_val2014_000000376603.jpg 3911 3923
+COCO_val2014_000000114481.jpg 3924 3932
+COCO_val2014_000000376628.jpg 3933 3998
+COCO_val2014_000000507719.jpg 3999 4028
+COCO_val2014_000000139113.jpg 4029 4038
+COCO_val2014_000000172004.jpg 4039 4075
+COCO_val2014_000000507750.jpg 4076 4179
+COCO_val2014_000000376679.jpg 4180 4188
+COCO_val2014_000000346774.jpg 4189 4197
+COCO_val2014_000000507782.jpg 4198 4207
+COCO_val2014_000000245642.jpg 4208 4222
+COCO_val2014_000000507797.jpg 4223 4270
+COCO_val2014_000000245660.jpg 4271 4272
+COCO_val2014_000000245667.jpg 4273 4298
+COCO_val2014_000000303089.jpg 4299 4306
+COCO_val2014_000000376751.jpg 4307 4346
+COCO_val2014_000000507826.jpg 4347 4352
+COCO_val2014_000000507833.jpg 4353 4394
+COCO_val2014_000000376773.jpg 4395 4405
+COCO_val2014_000000114634.jpg 4406 4418
+COCO_val2014_000000245716.jpg 4419 4426
+COCO_val2014_000000303101.jpg 4427 4504
+COCO_val2014_000000108130.jpg 4505 4516
+COCO_val2014_000000245754.jpg 4517 4568
+COCO_val2014_000000114684.jpg 4569 4643
+COCO_val2014_000000477867.jpg 4644 4680
+COCO_val2014_000000114710.jpg 4681 4688
+COCO_val2014_000000507927.jpg 4689 4760
+COCO_val2014_000000376859.jpg 4761 4793
+COCO_val2014_000000114717.jpg 4794 4799
+COCO_val2014_000000507935.jpg 4800 4802
+COCO_val2014_000000507945.jpg 4803 4806
+COCO_val2014_000000114744.jpg 4807 4824
+COCO_val2014_000000114745.jpg 4825 4834
+COCO_val2014_000000245818.jpg 4835 4853
+COCO_val2014_000000376891.jpg 4854 4929
+COCO_val2014_000000507966.jpg 4930 5034
+COCO_val2014_000000376900.jpg 5035 5129
+COCO_val2014_000000507975.jpg 5130 5174
+COCO_val2014_000000106508.jpg 5175 5204
+COCO_val2014_000000507979.jpg 5205 5213
+COCO_val2014_000000019129.jpg 5214 5224
+COCO_val2014_000000508006.jpg 5225 5240
+COCO_val2014_000000376939.jpg 5241 5248
+COCO_val2014_000000376959.jpg 5249 5257
+COCO_val2014_000000245898.jpg 5258 5276
+COCO_val2014_000000376990.jpg 5277 5287
+COCO_val2014_000000114855.jpg 5288 5293
+COCO_val2014_000000114868.jpg 5294 5296
+COCO_val2014_000000114870.jpg 5297 5302
+COCO_val2014_000000508087.jpg 5303 5322
+COCO_val2014_000000114884.jpg 5323 5391
+COCO_val2014_000000508101.jpg 5392 5411
+COCO_val2014_000000508119.jpg 5412 5415
+COCO_val2014_000000114907.jpg 5416 5506
+COCO_val2014_000000150224.jpg 5507 5535
+COCO_val2014_000000377060.jpg 5536 5544
+COCO_val2014_000000246001.jpg 5545 5549
+COCO_val2014_000000246004.jpg 5550 5555
+COCO_val2014_000000246005.jpg 5556 5567
+COCO_val2014_000000246014.jpg 5568 5742
+COCO_val2014_000000114946.jpg 5743 5750
+COCO_val2014_000000377091.jpg 5751 5754
+COCO_val2014_000000508167.jpg 5755 5762
+COCO_val2014_000000377097.jpg 5763 5770
+COCO_val2014_000000377111.jpg 5771 5778
+COCO_val2014_000000246040.jpg 5779 5781
+COCO_val2014_000000377113.jpg 5782 5816
+COCO_val2014_000000246053.jpg 5817 5822
+COCO_val2014_000000246057.jpg 5823 5903
+COCO_val2014_000000061605.jpg 5904 5919
+COCO_val2014_000000246064.jpg 5920 5925
+COCO_val2014_000000246066.jpg 5926 5936
+COCO_val2014_000000115006.jpg 5937 6016
+COCO_val2014_000000377155.jpg 6017 6020
+COCO_val2014_000000508230.jpg 6021 6068
+COCO_val2014_000000246105.jpg 6069 6079
+COCO_val2014_000000246106.jpg 6080 6086
+COCO_val2014_000000377183.jpg 6087 6125
+COCO_val2014_000000115043.jpg 6126 6128
+COCO_val2014_000000377195.jpg 6129 6213
+COCO_val2014_000000246124.jpg 6214 6224
+COCO_val2014_000000246125.jpg 6225 6229
+COCO_val2014_000000115060.jpg 6230 6261
+COCO_val2014_000000508288.jpg 6262 6270
+COCO_val2014_000000508302.jpg 6271 6280
+COCO_val2014_000000508303.jpg 6281 6292
+COCO_val2014_000000377239.jpg 6293 6317
+COCO_val2014_000000246183.jpg 6318 6356
+COCO_val2014_000000508339.jpg 6357 6374
+COCO_val2014_000000062878.jpg 6375 6377
+COCO_val2014_000000115128.jpg 6378 6386
+COCO_val2014_000000115146.jpg 6387 6413
+COCO_val2014_000000508370.jpg 6414 6423
+COCO_val2014_000000508373.jpg 6424 6430
+COCO_val2014_000000246231.jpg 6431 6434
+COCO_val2014_000000246252.jpg 6435 6447
+COCO_val2014_000000246265.jpg 6448 6524
+COCO_val2014_000000377352.jpg 6525 6536
+COCO_val2014_000000377361.jpg 6537 6542
+COCO_val2014_000000377385.jpg 6543 6546
+COCO_val2014_000000115243.jpg 6547 6559
+COCO_val2014_000000115245.jpg 6560 6587
+COCO_val2014_000000508470.jpg 6588 6593
+COCO_val2014_000000521655.jpg 6594 6598
+COCO_val2014_000000377427.jpg 6599 6610
+COCO_val2014_000000182784.jpg 6611 6666
+COCO_val2014_000000390585.jpg 6667 6668
+COCO_val2014_000000508514.jpg 6669 6675
+COCO_val2014_000000115314.jpg 6676 6686
+COCO_val2014_000000565353.jpg 6687 6690
+COCO_val2014_000000508538.jpg 6691 6754
+COCO_val2014_000000456127.jpg 6755 6782
+COCO_val2014_000000246398.jpg 6783 6865
+COCO_val2014_000000377486.jpg 6866 6894
+COCO_val2014_000000115363.jpg 6895 6905
+COCO_val2014_000000115370.jpg 6906 7015
+COCO_val2014_000000246454.jpg 7016 7027
+COCO_val2014_000000508605.jpg 7028 7036
+COCO_val2014_000000377577.jpg 7037 7038
+COCO_val2014_000000508656.jpg 7039 7056
+COCO_val2014_000000377588.jpg 7057 7074
+COCO_val2014_000000115455.jpg 7075 7081
+COCO_val2014_000000115459.jpg 7082 7093
+COCO_val2014_000000246535.jpg 7094 7096
+COCO_val2014_000000246562.jpg 7097 7103
+COCO_val2014_000000377635.jpg 7104 7113
+COCO_val2014_000000128476.jpg 7114 7124
+COCO_val2014_000000246576.jpg 7125 7130
+COCO_val2014_000000377652.jpg 7131 7141
+COCO_val2014_000000508730.jpg 7142 7147
+COCO_val2014_000000246589.jpg 7148 7169
+COCO_val2014_000000246590.jpg 7170 7219
+COCO_val2014_000000115521.jpg 7220 7230
+COCO_val2014_000000246597.jpg 7231 7238
+COCO_val2014_000000246612.jpg 7239 7241
+COCO_val2014_000000246626.jpg 7242 7250
+COCO_val2014_000000246629.jpg 7251 7262
+COCO_val2014_000000377706.jpg 7263 7277
+COCO_val2014_000000305000.jpg 7278 7307
+COCO_val2014_000000377715.jpg 7308 7345
+COCO_val2014_000000246649.jpg 7346 7349
+COCO_val2014_000000115579.jpg 7350 7355
+COCO_val2014_000000115584.jpg 7356 7357
+COCO_val2014_000000377730.jpg 7358 7383
+COCO_val2014_000000508811.jpg 7384 7393
+COCO_val2014_000000508822.jpg 7394 7402
+COCO_val2014_000000246686.jpg 7403 7424
+COCO_val2014_000000150342.jpg 7425 7465
+COCO_val2014_000000115626.jpg 7466 7476
+COCO_val2014_000000115636.jpg 7477 7546
+COCO_val2014_000000508872.jpg 7547 7556
+COCO_val2014_000000377802.jpg 7557 7561
+COCO_val2014_000000377809.jpg 7562 7569
+COCO_val2014_000000115667.jpg 7570 7575
+COCO_val2014_000000246746.jpg 7576 7582
+COCO_val2014_000000377832.jpg 7583 7672
+COCO_val2014_000000368807.jpg 7673 7681
+COCO_val2014_000000115700.jpg 7682 7694
+COCO_val2014_000000508917.jpg 7695 7762
+COCO_val2014_000000246782.jpg 7763 7783
+COCO_val2014_000000115721.jpg 7784 7796
+COCO_val2014_000000508949.jpg 7797 7800
+COCO_val2014_000000508950.jpg 7801 7850
+COCO_val2014_000000543577.jpg 7851 7873
+COCO_val2014_000000508962.jpg 7874 7893
+COCO_val2014_000000115752.jpg 7894 7899
+COCO_val2014_000000508972.jpg 7900 7907
+COCO_val2014_000000508977.jpg 7908 7920
+COCO_val2014_000000309371.jpg 7921 7946
+COCO_val2014_000000115765.jpg 7947 7985
+COCO_val2014_000000377910.jpg 7986 7987
+COCO_val2014_000000508985.jpg 7988 8054
+COCO_val2014_000000246843.jpg 8055 8069
+COCO_val2014_000000115772.jpg 8070 8108
+COCO_val2014_000000106677.jpg 8109 8182
+COCO_val2014_000000115791.jpg 8183 8212
+COCO_val2014_000000115793.jpg 8213 8228
+COCO_val2014_000000115796.jpg 8229 8298
+COCO_val2014_000000509014.jpg 8299 8367
+COCO_val2014_000000509020.jpg 8368 8378
+COCO_val2014_000000377949.jpg 8379 8384
+COCO_val2014_000000246878.jpg 8385 8429
+COCO_val2014_000000377951.jpg 8430 8435
+COCO_val2014_000000246883.jpg 8436 8441
+COCO_val2014_000000509028.jpg 8442 8482
+COCO_val2014_000000509037.jpg 8483 8502
+COCO_val2014_000000115823.jpg 8503 8508
+COCO_val2014_000000115830.jpg 8509 8534
+COCO_val2014_000000195896.jpg 8535 8541
+COCO_val2014_000000377984.jpg 8542 8563
+COCO_val2014_000000377999.jpg 8564 8590
+COCO_val2014_000000368836.jpg 8591 8620
+COCO_val2014_000000115870.jpg 8621 8653
+COCO_val2014_000000115875.jpg 8654 8662
+COCO_val2014_000000246951.jpg 8663 8684
+COCO_val2014_000000246963.jpg 8685 8713
+COCO_val2014_000000246968.jpg 8714 8718
+COCO_val2014_000000115898.jpg 8719 8720
+COCO_val2014_000000378048.jpg 8721 8750
+COCO_val2014_000000115912.jpg 8751 8758
+COCO_val2014_000000182805.jpg 8759 8766
+COCO_val2014_000000115924.jpg 8767 8769
+COCO_val2014_000000115930.jpg 8770 8818
+COCO_val2014_000000562519.jpg 8819 8824
+COCO_val2014_000000509158.jpg 8825 8839
+COCO_val2014_000000378096.jpg 8840 8844
+COCO_val2014_000000378116.jpg 8845 8847
+COCO_val2014_000000509192.jpg 8848 8849
+COCO_val2014_000000509194.jpg 8850 8853
+COCO_val2014_000000368855.jpg 8854 8861
+COCO_val2014_000000378126.jpg 8862 8868
+COCO_val2014_000000247057.jpg 8869 8898
+COCO_val2014_000000378134.jpg 8899 8958
+COCO_val2014_000000378147.jpg 8959 8961
+COCO_val2014_000000509223.jpg 8962 8963
+COCO_val2014_000000509227.jpg 8964 8965
+COCO_val2014_000000116017.jpg 8966 8971
+COCO_val2014_000000378163.jpg 8972 8983
+COCO_val2014_000000116023.jpg 8984 8988
+COCO_val2014_000000378169.jpg 8989 8992
+COCO_val2014_000000200267.jpg 8993 9066
+COCO_val2014_000000116037.jpg 9067 9104
+COCO_val2014_000000116046.jpg 9105 9110
+COCO_val2014_000000259640.jpg 9111 9148
+COCO_val2014_000000378204.jpg 9149 9160
+COCO_val2014_000000116061.jpg 9161 9203
+COCO_val2014_000000116067.jpg 9204 9217
+COCO_val2014_000000116068.jpg 9218 9230
+COCO_val2014_000000247141.jpg 9231 9315
+COCO_val2014_000000116083.jpg 9316 9391
+COCO_val2014_000000116096.jpg 9392 9470
+COCO_val2014_000000063040.jpg 9471 9526
+COCO_val2014_000000378244.jpg 9527 9530
+COCO_val2014_000000509319.jpg 9531 9536
+COCO_val2014_000000247179.jpg 9537 9616
+COCO_val2014_000000116132.jpg 9617 9653
+COCO_val2014_000000116133.jpg 9654 9671
+COCO_val2014_000000247206.jpg 9672 9680
+COCO_val2014_000000543644.jpg 9681 9684
+COCO_val2014_000000378284.jpg 9685 9696
+COCO_val2014_000000247216.jpg 9697 9700
+COCO_val2014_000000116149.jpg 9701 9720
+COCO_val2014_000000509366.jpg 9721 9722
+COCO_val2014_000000116173.jpg 9723 9735
+COCO_val2014_000000247259.jpg 9736 9753
+COCO_val2014_000000378334.jpg 9754 9760
+COCO_val2014_000000247264.jpg 9761 9766
+COCO_val2014_000000116202.jpg 9767 9787
+COCO_val2014_000000378347.jpg 9788 9794
+COCO_val2014_000000116208.jpg 9795 9809
+COCO_val2014_000000281512.jpg 9810 9828
+COCO_val2014_000000247285.jpg 9829 9836
+COCO_val2014_000000247306.jpg 9837 9897
+COCO_val2014_000000509451.jpg 9898 9924
+COCO_val2014_000000509459.jpg 9925 9926
+COCO_val2014_000000247317.jpg 9927 9943
+COCO_val2014_000000509471.jpg 9944 9947
+COCO_val2014_000000116261.jpg 9948 9960
+COCO_val2014_000000509497.jpg 9961 9975
+COCO_val2014_000000509514.jpg 9976 9994
+COCO_val2014_000000247378.jpg 9995 10147
+COCO_val2014_000000509526.jpg 10148 10160
+COCO_val2014_000000509536.jpg 10161 10170
+COCO_val2014_000000378467.jpg 10171 10211
+COCO_val2014_000000543676.jpg 10212 10227
+COCO_val2014_000000378482.jpg 10228 10242
+COCO_val2014_000000116341.jpg 10243 10254
+COCO_val2014_000000194153.jpg 10255 10316
+COCO_val2014_000000116354.jpg 10317 10331
+COCO_val2014_000000378502.jpg 10332 10336
+COCO_val2014_000000509577.jpg 10337 10339
+COCO_val2014_000000247438.jpg 10340 10368
+COCO_val2014_000000378515.jpg 10369 10419
+COCO_val2014_000000509589.jpg 10420 10478
+COCO_val2014_000000509590.jpg 10479 10481
+COCO_val2014_000000116377.jpg 10482 10500
+COCO_val2014_000000172315.jpg 10501 10502
+COCO_val2014_000000116389.jpg 10503 10510
+COCO_val2014_000000378538.jpg 10511 10529
+COCO_val2014_000000247474.jpg 10530 10535
+COCO_val2014_000000116405.jpg 10536 10554
+COCO_val2014_000000247484.jpg 10555 10563
+COCO_val2014_000000378561.jpg 10564 10568
+COCO_val2014_000000116434.jpg 10569 10572
+COCO_val2014_000000116439.jpg 10573 10581
+COCO_val2014_000000247519.jpg 10582 10599
+COCO_val2014_000000116455.jpg 10600 10614
+COCO_val2014_000000291936.jpg 10615 10663
+COCO_val2014_000000116466.jpg 10664 10671
+COCO_val2014_000000378614.jpg 10672 10694
+COCO_val2014_000000509695.jpg 10695 10706
+COCO_val2014_000000378652.jpg 10707 10712
+COCO_val2014_000000378657.jpg 10713 10787
+COCO_val2014_000000378658.jpg 10788 10806
+COCO_val2014_000000247587.jpg 10807 10830
+COCO_val2014_000000116517.jpg 10831 10859
+COCO_val2014_000000378667.jpg 10860 10861
+COCO_val2014_000000378673.jpg 10862 10985
+COCO_val2014_000000172342.jpg 10986 11040
+COCO_val2014_000000509766.jpg 11041 11114
+COCO_val2014_000000247625.jpg 11115 11143
+COCO_val2014_000000247639.jpg 11144 11147
+COCO_val2014_000000509786.jpg 11148 11246
+COCO_val2014_000000116574.jpg 11247 11249
+COCO_val2014_000000378727.jpg 11250 11257
+COCO_val2014_000000509811.jpg 11258 11269
+COCO_val2014_000000509826.jpg 11270 11293
+COCO_val2014_000000247692.jpg 11294 11331
+COCO_val2014_000000247714.jpg 11332 11354
+COCO_val2014_000000247720.jpg 11355 11364
+COCO_val2014_000000509867.jpg 11365 11366
+COCO_val2014_000000509891.jpg 11367 11369
+COCO_val2014_000000378823.jpg 11370 11374
+COCO_val2014_000000378825.jpg 11375 11397
+COCO_val2014_000000378831.jpg 11398 11401
+COCO_val2014_000000116696.jpg 11402 11413
+COCO_val2014_000000247782.jpg 11414 11419
+COCO_val2014_000000116712.jpg 11420 11424
+COCO_val2014_000000247788.jpg 11425 11440
+COCO_val2014_000000247790.jpg 11441 11535
+COCO_val2014_000000378873.jpg 11536 11571
+COCO_val2014_000000247808.jpg 11572 11613
+COCO_val2014_000000259755.jpg 11614 11636
+COCO_val2014_000000378894.jpg 11637 11645
+COCO_val2014_000000247839.jpg 11646 11718
+COCO_val2014_000000247840.jpg 11719 11751
+COCO_val2014_000000378928.jpg 11752 11767
+COCO_val2014_000000378940.jpg 11768 11771
+COCO_val2014_000000378948.jpg 11772 11782
+COCO_val2014_000000378962.jpg 11783 11916
diff --git a/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/score-proposals.txt b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/score-proposals.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d1c1bfb1ca548b2e313a4c33eb1a26203b8d1050
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/score-proposals.txt
@@ -0,0 +1,11916 @@
+0.999492883682
+0.993960082531
+0.479738622904
+0.324869394302
+0.219188168645
+0.16854262352
+0.110285542905
+0.0858747214079
+0.995599746704
+0.989841520786
+0.679940402508
+0.504856705666
+0.475200980902
+0.471870332956
+0.447865277529
+0.429018259048
+0.337190419436
+0.319637835026
+0.271794199944
+0.222486406565
+0.203339353204
+0.175188466907
+0.130425557494
+0.119351573288
+0.117693811655
+0.114823028445
+0.106032192707
+0.100345753133
+0.0978375449777
+0.0905253067613
+0.0899967998266
+0.0833349823952
+0.0745534002781
+0.0715349018574
+0.0648096203804
+0.0647063031793
+0.0617409572005
+0.0608367286623
+0.981398761272
+0.964637994766
+0.95433318615
+0.949445486069
+0.925107717514
+0.907492876053
+0.842338621616
+0.655192911625
+0.630225241184
+0.621461749077
+0.572391629219
+0.51968318224
+0.511017620564
+0.502350449562
+0.493378341198
+0.455660045147
+0.412498384714
+0.385131657124
+0.375502943993
+0.374631553888
+0.360924810171
+0.335126042366
+0.324225634336
+0.32314786315
+0.307047903538
+0.292191803455
+0.274905949831
+0.264714390039
+0.260394543409
+0.242708534002
+0.237473592162
+0.234475180507
+0.232327207923
+0.200783759356
+0.194877251983
+0.173600584269
+0.169051364064
+0.162651211023
+0.159774616361
+0.158850267529
+0.149250462651
+0.144410803914
+0.142088830471
+0.136787518859
+0.134062200785
+0.127555191517
+0.123396776617
+0.111541137099
+0.108107566833
+0.105781108141
+0.105175063014
+0.104316711426
+0.102937452495
+0.0992618575692
+0.0952050089836
+0.0940507426858
+0.0885497555137
+0.0859985351562
+0.0851188972592
+0.0843536779284
+0.0799091234803
+0.0740433260798
+0.0727317929268
+0.0708644911647
+0.0681031271815
+0.0668971091509
+0.0643133968115
+0.0630158260465
+0.0620795227587
+0.0601776540279
+0.999656438828
+0.998913526535
+0.995190382004
+0.34229773283
+0.287741363049
+0.255084246397
+0.14169678092
+0.107619561255
+0.0786949321628
+0.0747418403625
+0.074281655252
+0.0613623894751
+0.999863862991
+0.324975907803
+0.229890659451
+0.153991982341
+0.152483135462
+0.131618365645
+0.126892864704
+0.0763252526522
+0.0650265663862
+0.989799380302
+0.941025257111
+0.935985803604
+0.925374448299
+0.833952128887
+0.805228173733
+0.778729081154
+0.705790638924
+0.505927324295
+0.423516482115
+0.421965003014
+0.42009305954
+0.330459833145
+0.298183083534
+0.291310250759
+0.263179302216
+0.248358771205
+0.202742278576
+0.200076788664
+0.193008795381
+0.16475199163
+0.163366854191
+0.154088407755
+0.144210129976
+0.142992377281
+0.102379836142
+0.0962139293551
+0.0925681740046
+0.089814260602
+0.0796467363834
+0.0782985389233
+0.0711481124163
+0.0698980614543
+0.0649924352765
+0.999777138233
+0.99935233593
+0.998615980148
+0.81040841341
+0.319595783949
+0.301922947168
+0.294586539268
+0.229932501912
+0.179931282997
+0.141108438373
+0.111675545573
+0.0926999226213
+0.996788740158
+0.996013045311
+0.336115509272
+0.25322881341
+0.18036198616
+0.105847723782
+0.0795667916536
+0.999674916267
+0.463505893946
+0.276579022408
+0.233868762851
+0.157210886478
+0.139014154673
+0.111833527684
+0.0806431099772
+0.0675158724189
+0.0601982213557
+0.999973893166
+0.225755929947
+0.998042583466
+0.240756064653
+0.114970505238
+0.108133591712
+0.0684409365058
+0.0640670210123
+0.99909734726
+0.798950254917
+0.488475739956
+0.407939553261
+0.370860040188
+0.345638781786
+0.33371797204
+0.319134652615
+0.307910740376
+0.281438022852
+0.250035613775
+0.240205734968
+0.217588350177
+0.176003932953
+0.149512767792
+0.134568363428
+0.130587235093
+0.127596646547
+0.120341867208
+0.1199298352
+0.101712338626
+0.0995369926095
+0.099075242877
+0.0975799933076
+0.0945169553161
+0.0833776518703
+0.0797591358423
+0.075694039464
+0.0722997933626
+0.0691524669528
+0.0687937960029
+0.0684566572309
+0.995439708233
+0.457822352648
+0.278776347637
+0.260652095079
+0.242001637816
+0.230160549283
+0.118752092123
+0.0946052670479
+0.0868303403258
+0.072974845767
+0.983536660671
+0.962505400181
+0.939424097538
+0.930523097515
+0.824349284172
+0.819419682026
+0.811243057251
+0.70593971014
+0.620759367943
+0.530036866665
+0.425242543221
+0.415767937899
+0.342079490423
+0.332208961248
+0.294696509838
+0.280615031719
+0.269890576601
+0.261995077133
+0.251514792442
+0.237717136741
+0.225803628564
+0.209254652262
+0.188073128462
+0.171397969127
+0.167952403426
+0.165840357542
+0.143090963364
+0.132051944733
+0.130775392056
+0.117600671947
+0.112533979118
+0.111871100962
+0.109801247716
+0.10410939157
+0.09575997293
+0.0954454690218
+0.0919766947627
+0.0809513404965
+0.0785838589072
+0.0772199481726
+0.0690419971943
+0.0665116533637
+0.0647971779108
+0.0616164878011
+0.990408539772
+0.227840378881
+0.999985218048
+0.485865861177
+0.482521414757
+0.385961472988
+0.337533205748
+0.176351770759
+0.16250705719
+0.134380280972
+0.115335538983
+0.102839931846
+0.0647527873516
+0.998914003372
+0.924011230469
+0.8384770751
+0.703737795353
+0.306126892567
+0.293333053589
+0.274674743414
+0.239137932658
+0.234545946121
+0.192100316286
+0.17526294291
+0.158667698503
+0.114992663264
+0.106630004942
+0.0824557095766
+0.0734141990542
+0.0611515901983
+0.99883466959
+0.931670844555
+0.698832690716
+0.505997061729
+0.405928492546
+0.316452503204
+0.313364207745
+0.293478876352
+0.210044026375
+0.174378156662
+0.152941703796
+0.130912616849
+0.116683937609
+0.109798550606
+0.0824980959296
+0.0743907615542
+0.0726513117552
+0.0707300156355
+0.0629474669695
+0.999614834785
+0.999296307564
+0.989449679852
+0.977931559086
+0.966331422329
+0.8743095994
+0.42717140913
+0.376376777887
+0.292847275734
+0.243784248829
+0.240463957191
+0.237770870328
+0.203233003616
+0.172418624163
+0.158074364066
+0.104111179709
+0.103559553623
+0.0840974226594
+0.0840451642871
+0.078673325479
+0.0752383098006
+0.0669889599085
+0.061822719872
+0.999636173248
+0.999489665031
+0.998015880585
+0.994870066643
+0.977380633354
+0.968475162983
+0.914325773716
+0.874566078186
+0.858541727066
+0.602037250996
+0.488696634769
+0.451988309622
+0.399621874094
+0.353223264217
+0.348430663347
+0.328525006771
+0.323260307312
+0.322763055563
+0.302256703377
+0.272081583738
+0.257825613022
+0.242076769471
+0.229551255703
+0.222455278039
+0.217568591237
+0.207020252943
+0.174569636583
+0.16757145524
+0.163806036115
+0.148824214935
+0.147089123726
+0.142588600516
+0.139694094658
+0.13809235394
+0.123314812779
+0.121277667582
+0.115410208702
+0.115259580314
+0.112911939621
+0.097537688911
+0.0972633957863
+0.0970381498337
+0.0931109786034
+0.0818825811148
+0.0802644118667
+0.0732785463333
+0.0722855180502
+0.0684220045805
+0.0641431957483
+0.0639233216643
+0.0626135468483
+0.062176078558
+0.0618688613176
+0.0612218491733
+0.99969625473
+0.941518425941
+0.366980135441
+0.21025967598
+0.0985209941864
+0.0950182378292
+0.0818398892879
+0.931773841381
+0.895082175732
+0.431939303875
+0.392440617085
+0.277659088373
+0.268109887838
+0.220120340586
+0.184832930565
+0.16296876967
+0.155434504151
+0.143514439464
+0.117978766561
+0.101316392422
+0.0991503372788
+0.0888557136059
+0.0833457633853
+0.0808531418443
+0.0782108157873
+0.0655632913113
+0.994079828262
+0.989101946354
+0.986889302731
+0.969659626484
+0.72717320919
+0.642631649971
+0.602401077747
+0.327921837568
+0.300226122141
+0.297737956047
+0.268540412188
+0.236945062876
+0.156428799033
+0.139572903514
+0.126503065228
+0.108452051878
+0.095074236393
+0.0744049027562
+0.99992442131
+0.904516279697
+0.842290878296
+0.764649033546
+0.518767595291
+0.456234961748
+0.408291846514
+0.38108754158
+0.337718993425
+0.331471681595
+0.268526494503
+0.239610224962
+0.173294886947
+0.156068578362
+0.155905470252
+0.14615252614
+0.11888179183
+0.118688203394
+0.114289045334
+0.107454314828
+0.102681897581
+0.093857601285
+0.0771646499634
+0.0690673440695
+0.0657695233822
+0.984160125256
+0.292255222797
+0.100073628128
+0.0868779346347
+0.0635081008077
+0.999417543411
+0.99934309721
+0.930694997311
+0.689022421837
+0.669366657734
+0.584827005863
+0.552331089973
+0.289371907711
+0.280333012342
+0.271883577108
+0.250584661961
+0.23355832696
+0.218224972486
+0.215483397245
+0.198920145631
+0.174808561802
+0.174693733454
+0.171294555068
+0.170481160283
+0.144210860133
+0.135660216212
+0.116033531725
+0.114016167819
+0.112052559853
+0.0904293805361
+0.0767554193735
+0.075451053679
+0.0725498497486
+0.0686912387609
+0.0649528056383
+0.0649490058422
+0.0620482377708
+0.0612104348838
+0.0606903657317
+0.997620403767
+0.860514223576
+0.334949165583
+0.164575979114
+0.0942285358906
+0.98776614666
+0.94780766964
+0.692370951176
+0.496276915073
+0.356452703476
+0.273326128721
+0.237490713596
+0.20630645752
+0.144201397896
+0.0926714092493
+0.0839046314359
+0.0649424940348
+0.997858822346
+0.94626647234
+0.937799155712
+0.753734946251
+0.731554210186
+0.656129300594
+0.5294444561
+0.51907813549
+0.381039589643
+0.360713392496
+0.289949119091
+0.285999894142
+0.269170761108
+0.268323332071
+0.259371221066
+0.25302913785
+0.202936723828
+0.178656071424
+0.16711884737
+0.155997380614
+0.155294418335
+0.149529248476
+0.146859303117
+0.139491945505
+0.122604131699
+0.11899433285
+0.106275327504
+0.0952298492193
+0.0927046984434
+0.0907006263733
+0.08807361871
+0.087392732501
+0.0825712680817
+0.0783201381564
+0.0782657191157
+0.0742919072509
+0.0712554976344
+0.0657785385847
+0.0617483705282
+0.999415278435
+0.250952929258
+0.248517647386
+0.184083074331
+0.122986279428
+0.107423953712
+0.092696711421
+0.997085273266
+0.395976006985
+0.297716200352
+0.271473228931
+0.182642042637
+0.109248168766
+0.0891480892897
+0.0611597597599
+0.999190509319
+0.369652718306
+0.304084807634
+0.260237276554
+0.259710907936
+0.200951427221
+0.167235076427
+0.159367650747
+0.104638837278
+0.0877899378538
+0.0775552019477
+0.0684577450156
+0.0673224106431
+0.0651181712747
+0.91885727644
+0.512119352818
+0.251051098108
+0.170556426048
+0.166682124138
+0.111889407039
+0.0718964040279
+0.0606993027031
+0.997902274132
+0.963533401489
+0.954179108143
+0.709474265575
+0.42558068037
+0.328569769859
+0.268064826727
+0.256333082914
+0.240771949291
+0.168359160423
+0.132161483169
+0.122721597552
+0.103087127209
+0.0925750359893
+0.0859326198697
+0.997896075249
+0.97813808918
+0.971520662308
+0.966816842556
+0.845091879368
+0.5651730299
+0.513645887375
+0.433256059885
+0.315559446812
+0.296288102865
+0.256476342678
+0.234913155437
+0.229334950447
+0.21348528564
+0.178671494126
+0.15715636313
+0.145606577396
+0.144778728485
+0.121749743819
+0.107191942632
+0.106215760112
+0.0875644758344
+0.0754200741649
+0.0610868148506
+0.996561348438
+0.992629289627
+0.971422851086
+0.915516257286
+0.87067937851
+0.866195738316
+0.782382011414
+0.76263320446
+0.739094674587
+0.662936329842
+0.642497122288
+0.638236999512
+0.632567942142
+0.557490110397
+0.500993907452
+0.481478303671
+0.395501047373
+0.359540164471
+0.352965503931
+0.318840056658
+0.298661351204
+0.284852683544
+0.283305794001
+0.268289595842
+0.266704946756
+0.258686155081
+0.251927345991
+0.244326293468
+0.22163978219
+0.218729987741
+0.203385680914
+0.194525927305
+0.185075059533
+0.171849086881
+0.167434990406
+0.164294213057
+0.157387986779
+0.151329323649
+0.141817077994
+0.140281766653
+0.132790967822
+0.12866191566
+0.109361216426
+0.100093506277
+0.0976850986481
+0.0848641395569
+0.0843716338277
+0.0789872482419
+0.0774397701025
+0.0678279921412
+0.0654214397073
+0.0647762417793
+0.0646055564284
+0.0645006671548
+0.064126573503
+0.0607352815568
+0.999895453453
+0.242578521371
+0.999136388302
+0.308304429054
+0.158898040652
+0.119613327086
+0.101486437023
+0.0920139551163
+0.0842799693346
+0.069587662816
+0.999378442764
+0.288056880236
+0.085576929152
+0.997902989388
+0.992656290531
+0.978101015091
+0.970024526119
+0.969297945499
+0.845023036003
+0.812908291817
+0.777108669281
+0.593415796757
+0.527406036854
+0.486854940653
+0.42415201664
+0.393617123365
+0.383050918579
+0.357428729534
+0.335975885391
+0.333173364401
+0.329146772623
+0.318886816502
+0.296545118093
+0.280442208052
+0.250436395407
+0.238707110286
+0.23426283896
+0.224724411964
+0.212407022715
+0.206477478147
+0.176500573754
+0.176125600934
+0.165311202407
+0.165166631341
+0.163465619087
+0.156499862671
+0.152207940817
+0.149408072233
+0.133821219206
+0.130579739809
+0.128987982869
+0.120002299547
+0.118584170938
+0.102604061365
+0.102106079459
+0.0994338020682
+0.0967155471444
+0.0958890095353
+0.0873774066567
+0.0815375670791
+0.080139786005
+0.0799154862761
+0.0696268081665
+0.0680142715573
+0.0639838501811
+0.0638411939144
+0.985350728035
+0.97742664814
+0.973573029041
+0.968596875668
+0.960185050964
+0.895330250263
+0.881959974766
+0.853618919849
+0.849450826645
+0.809964537621
+0.795149028301
+0.698823034763
+0.664999783039
+0.66082072258
+0.607843339443
+0.599546909332
+0.582334697247
+0.579861760139
+0.54010283947
+0.518515408039
+0.488883256912
+0.472137123346
+0.453472465277
+0.441994220018
+0.421997189522
+0.415158361197
+0.388488233089
+0.368738472462
+0.340345442295
+0.328722774982
+0.313123464584
+0.31243494153
+0.306628912687
+0.304995268583
+0.2935141325
+0.283271968365
+0.278672397137
+0.275063961744
+0.266076177359
+0.260969012976
+0.244764357805
+0.240690752864
+0.236080378294
+0.229984566569
+0.220584779978
+0.218644157052
+0.211633488536
+0.20887580514
+0.203417152166
+0.202942758799
+0.198087409139
+0.196844890714
+0.190384328365
+0.185013025999
+0.182814270258
+0.177647605538
+0.17329762876
+0.168064042926
+0.164772123098
+0.158749267459
+0.15697003901
+0.146022245288
+0.139997079968
+0.137533098459
+0.136277467012
+0.130814239383
+0.126145422459
+0.12098389864
+0.120112270117
+0.118278123438
+0.1170181036
+0.116369172931
+0.11177251488
+0.108297646046
+0.106844194233
+0.103673391044
+0.102139174938
+0.101913690567
+0.0995773226023
+0.0987084433436
+0.0980582013726
+0.0976958945394
+0.0957340970635
+0.0915343686938
+0.0884331092238
+0.086019307375
+0.0837795138359
+0.0824421718717
+0.0785977840424
+0.075908370316
+0.0727732405066
+0.0724786072969
+0.0722603425384
+0.0709447935224
+0.0679260566831
+0.0674071833491
+0.063802704215
+0.0627604722977
+0.0611408613622
+0.0611251890659
+0.0608690083027
+0.0604504086077
+0.999858140945
+0.999626398087
+0.974081933498
+0.570031106472
+0.441846460104
+0.234560161829
+0.217142626643
+0.152443349361
+0.134880885482
+0.0919810831547
+0.999822199345
+0.22064216435
+0.179285764694
+0.958132684231
+0.877983927727
+0.39924249053
+0.381956368685
+0.334272861481
+0.297574073076
+0.153631985188
+0.134273782372
+0.111169353127
+0.101520597935
+0.0828640833497
+0.0813899338245
+0.0662650242448
+0.996429264545
+0.979345798492
+0.958068370819
+0.954420089722
+0.948219299316
+0.934643864632
+0.928759515285
+0.899920761585
+0.885222256184
+0.877951085567
+0.819386184216
+0.815837562084
+0.758064389229
+0.744569063187
+0.727022409439
+0.723778665066
+0.712464690208
+0.701499402523
+0.691516637802
+0.632513940334
+0.630814671516
+0.590766131878
+0.582069635391
+0.558253109455
+0.536061227322
+0.532858371735
+0.497314482927
+0.472345858812
+0.46086359024
+0.438421547413
+0.431421726942
+0.412772268057
+0.398876190186
+0.36781078577
+0.35504424572
+0.339647740126
+0.329151779413
+0.323749244213
+0.313679993153
+0.308615297079
+0.29049372673
+0.289771288633
+0.273077517748
+0.270049214363
+0.268131583929
+0.266949564219
+0.264880478382
+0.255761444569
+0.248623535037
+0.245730534196
+0.236677855253
+0.23274359107
+0.227368876338
+0.222898304462
+0.218956843019
+0.215685412288
+0.210810422897
+0.200572043657
+0.197497531772
+0.196024268866
+0.191777303815
+0.188506633043
+0.186768978834
+0.185102567077
+0.175270840526
+0.169820964336
+0.169180005789
+0.168613806367
+0.166576102376
+0.164480209351
+0.164132803679
+0.16324557364
+0.159761160612
+0.149807050824
+0.13819038868
+0.13745367527
+0.132722556591
+0.131717145443
+0.127812847495
+0.12651924789
+0.125647887588
+0.125364467502
+0.123768381774
+0.121427692473
+0.119086928666
+0.114504985511
+0.111253783107
+0.110566273332
+0.110158517957
+0.109492145479
+0.105661772192
+0.0991558656096
+0.0949034839869
+0.090548619628
+0.0848559960723
+0.0819426700473
+0.0812029093504
+0.0805972963572
+0.080316901207
+0.0776076093316
+0.0771791264415
+0.0769562423229
+0.0749591812491
+0.0739887878299
+0.0712219104171
+0.0708383619785
+0.0703931227326
+0.0700512528419
+0.0693789944053
+0.0661187469959
+0.0655283108354
+0.0649004131556
+0.0646634548903
+0.064565308392
+0.0645508468151
+0.0637779831886
+0.998092830181
+0.986626327038
+0.986567616463
+0.985102653503
+0.982052624226
+0.980285942554
+0.976705729961
+0.97459000349
+0.971032977104
+0.964682102203
+0.960731744766
+0.956911504269
+0.939833700657
+0.927844583988
+0.897799372673
+0.893734514713
+0.790211439133
+0.703121721745
+0.576869308949
+0.551661431789
+0.351305663586
+0.333526164293
+0.316975772381
+0.303974270821
+0.292871147394
+0.277671962976
+0.274035334587
+0.245356887579
+0.242917329073
+0.230936229229
+0.221115663648
+0.215599000454
+0.214189589024
+0.210386678576
+0.209544911981
+0.199479088187
+0.189695969224
+0.181456938386
+0.180157572031
+0.163496226072
+0.153024584055
+0.146820738912
+0.145633682609
+0.136604651809
+0.134815603495
+0.134453058243
+0.132449910045
+0.128727793694
+0.128320142627
+0.11927742511
+0.102699317038
+0.0949502065778
+0.0937742367387
+0.092089690268
+0.0799251198769
+0.0729037746787
+0.0724332779646
+0.0717890262604
+0.0689349770546
+0.068635687232
+0.0679918900132
+0.0652565658092
+0.0649015232921
+0.0623659230769
+0.0608515068889
+0.999870300293
+0.909138441086
+0.516985177994
+0.456896394491
+0.330804258585
+0.223818168044
+0.143922314048
+0.122931227088
+0.119309209287
+0.119048945606
+0.0687326937914
+0.997335016727
+0.990597307682
+0.928604364395
+0.819329023361
+0.423085272312
+0.298981338739
+0.272785127163
+0.271952450275
+0.249644711614
+0.15911115706
+0.145287483931
+0.131897732615
+0.117681242526
+0.094081170857
+0.0879610553384
+0.0679850205779
+0.0672641843557
+0.998700857162
+0.989446878433
+0.987874448299
+0.96109598875
+0.950961649418
+0.941874563694
+0.935863435268
+0.883387207985
+0.88171505928
+0.879160165787
+0.823961079121
+0.761780083179
+0.757166683674
+0.655046880245
+0.587436139584
+0.574445962906
+0.539202451706
+0.505134284496
+0.487393766642
+0.474951863289
+0.421707183123
+0.403331637383
+0.330541610718
+0.320432335138
+0.316513776779
+0.304258644581
+0.291089177132
+0.289544641972
+0.285942345858
+0.268391668797
+0.250951498747
+0.236489251256
+0.226216763258
+0.222792685032
+0.219109237194
+0.215087294579
+0.209180861712
+0.207587584853
+0.198238894343
+0.193663358688
+0.190253868699
+0.186268299818
+0.18331220746
+0.168563574553
+0.146872743964
+0.146271541715
+0.144683018327
+0.125954374671
+0.117547154427
+0.112334020436
+0.101358279586
+0.0930288881063
+0.0885026156902
+0.0827558115125
+0.0789773538709
+0.0788442716002
+0.078642860055
+0.0780586153269
+0.0774497240782
+0.0765702351928
+0.0755276232958
+0.0693253427744
+0.0688149482012
+0.0685828477144
+0.0656614676118
+0.0644745156169
+0.061588935554
+0.0603139251471
+0.999784171581
+0.403836071491
+0.309445679188
+0.160096928477
+0.0713572055101
+0.9808639884
+0.536068499088
+0.156737297773
+0.101507924497
+0.0728682130575
+0.0661017820239
+0.998502016068
+0.997744321823
+0.99670958519
+0.992138445377
+0.9913828969
+0.990440130234
+0.985786318779
+0.98450666666
+0.976729869843
+0.974200546741
+0.96973747015
+0.957668602467
+0.956502020359
+0.939894795418
+0.922024667263
+0.900501668453
+0.894863903522
+0.850771844387
+0.72757267952
+0.68471455574
+0.419738441706
+0.408308535814
+0.40307700634
+0.394256860018
+0.387193202972
+0.381077438593
+0.356318682432
+0.32621127367
+0.325574189425
+0.315113633871
+0.292171567678
+0.284082144499
+0.268517911434
+0.266067236662
+0.260642021894
+0.243273302913
+0.241387933493
+0.235496759415
+0.231470033526
+0.226916193962
+0.22383274138
+0.223404064775
+0.211687162519
+0.15698261559
+0.144021496177
+0.129662036896
+0.128030687571
+0.123170860112
+0.112105436623
+0.0946203395724
+0.0912208333611
+0.0906088650227
+0.08779361099
+0.0873223617673
+0.0832078903913
+0.0824867635965
+0.0720259845257
+0.0683835074306
+0.0676698684692
+0.0666104108095
+0.061387848109
+0.061332475394
+0.0605092570186
+0.999552667141
+0.235602304339
+0.22608935833
+0.183458417654
+0.0638798549771
+0.999914884567
+0.944484949112
+0.719243049622
+0.255686193705
+0.228892609477
+0.152473479509
+0.14050424099
+0.12697994709
+0.106847785413
+0.0836771503091
+0.0710156336427
+0.998929917812
+0.992641985416
+0.967503726482
+0.96478164196
+0.888246715069
+0.831703186035
+0.748587012291
+0.638082504272
+0.631500124931
+0.63052213192
+0.576804220676
+0.430203914642
+0.422913610935
+0.327330976725
+0.306735634804
+0.283916294575
+0.264527410269
+0.23200725019
+0.220501899719
+0.198018670082
+0.192027494311
+0.183891996741
+0.167097494006
+0.163548901677
+0.16161224246
+0.160704061389
+0.149015516043
+0.141897112131
+0.140964120626
+0.120860792696
+0.117994785309
+0.112248688936
+0.093765899539
+0.0921326130629
+0.087596014142
+0.084868952632
+0.0765975639224
+0.0718270018697
+0.0680474117398
+0.067190207541
+0.0663392096758
+0.0644343942404
+0.0644332319498
+0.0638081952929
+0.0634312778711
+0.0630827620625
+0.984358906746
+0.732030034065
+0.671958804131
+0.516137003899
+0.400481492281
+0.368468940258
+0.279494196177
+0.235217243433
+0.221134275198
+0.175252646208
+0.154028892517
+0.114927172661
+0.0954927876592
+0.0913179293275
+0.0822376385331
+0.999918937683
+0.970668196678
+0.476957172155
+0.322649091482
+0.270368784666
+0.115955688059
+0.088431365788
+0.0760695263743
+0.961236715317
+0.939968526363
+0.914159417152
+0.912049174309
+0.882859766483
+0.878688156605
+0.871318876743
+0.853021919727
+0.84502518177
+0.835968136787
+0.832006514072
+0.817732036114
+0.814195871353
+0.772458076477
+0.719515323639
+0.71319937706
+0.708887338638
+0.648909091949
+0.643209755421
+0.598719120026
+0.565223455429
+0.562458217144
+0.556444406509
+0.509981155396
+0.474364846945
+0.448702573776
+0.427006453276
+0.406702280045
+0.386948674917
+0.384247153997
+0.343856751919
+0.330059826374
+0.319582164288
+0.317124992609
+0.305292665958
+0.302733123302
+0.302618414164
+0.299988955259
+0.297606915236
+0.297292739153
+0.289043694735
+0.278998851776
+0.275473594666
+0.273885190487
+0.250618845224
+0.227123975754
+0.225500449538
+0.210000485182
+0.20928953588
+0.208426624537
+0.20802564919
+0.202931091189
+0.199252277613
+0.197735309601
+0.185656189919
+0.183437883854
+0.177282899618
+0.175504282117
+0.165975466371
+0.156797677279
+0.155877739191
+0.152557641268
+0.152133896947
+0.149370193481
+0.149092838168
+0.146153882146
+0.129461705685
+0.12757268548
+0.126325875521
+0.117640562356
+0.117387458682
+0.116718687117
+0.107707224786
+0.106355257332
+0.10586155951
+0.101144760847
+0.0994608104229
+0.098688274622
+0.0965053364635
+0.0943646430969
+0.0936669036746
+0.093289077282
+0.0880585834384
+0.0876612812281
+0.0862455368042
+0.0806761458516
+0.0787023976445
+0.0769043415785
+0.0746140256524
+0.0743458792567
+0.0735867097974
+0.072234749794
+0.0720670446754
+0.0676723197103
+0.0647900551558
+0.064303971827
+0.063821323216
+0.0623247921467
+0.0622700117528
+0.06097086519
+0.999151229858
+0.998788654804
+0.986057162285
+0.915573000908
+0.860394835472
+0.823859274387
+0.767817020416
+0.697704851627
+0.661002397537
+0.533989608288
+0.514244556427
+0.447624117136
+0.374602854252
+0.371317595243
+0.366488337517
+0.32456573844
+0.319859653711
+0.306776702404
+0.277711093426
+0.245209157467
+0.226394608617
+0.194831192493
+0.190953403711
+0.184744656086
+0.180146306753
+0.17307972908
+0.17122618854
+0.169946819544
+0.165591567755
+0.145456001163
+0.126592770219
+0.117286644876
+0.11491458863
+0.109542995691
+0.109022378922
+0.105816595256
+0.102871477604
+0.091538079083
+0.0896820649505
+0.0880534946918
+0.0845179259777
+0.0829027220607
+0.0734692364931
+0.0697531253099
+0.0693785622716
+0.0691952481866
+0.0679783821106
+0.0671191811562
+0.0644898116589
+0.0625449493527
+0.848525047302
+0.757584869862
+0.592602789402
+0.45671826601
+0.441175788641
+0.436411380768
+0.216978400946
+0.17420385778
+0.172991365194
+0.163114562631
+0.155303865671
+0.140481933951
+0.121708236635
+0.109924308956
+0.107429422438
+0.104814216495
+0.100296236575
+0.0711502358317
+0.0668397247791
+0.0648447349668
+0.0608805157244
+0.998441398144
+0.986010313034
+0.754216253757
+0.327330559492
+0.285503327847
+0.254068762064
+0.233038857579
+0.224494621158
+0.204928264022
+0.176259770989
+0.15878854692
+0.153029754758
+0.144003212452
+0.123171724379
+0.10473062098
+0.104293182492
+0.100405462086
+0.0966978520155
+0.0854993239045
+0.0798303186893
+0.0728051438928
+0.0695858821273
+0.0656919926405
+0.999488949776
+0.999135553837
+0.999060213566
+0.995350956917
+0.959395945072
+0.932092249393
+0.427258998156
+0.247698172927
+0.243182644248
+0.2417884022
+0.22120256722
+0.197569847107
+0.172928199172
+0.120666049421
+0.0764563009143
+0.0605998411775
+0.997894346714
+0.996328771114
+0.947068870068
+0.789298951626
+0.721704721451
+0.57981389761
+0.444444447756
+0.429881423712
+0.317837387323
+0.306783944368
+0.276813358068
+0.257238596678
+0.213015466928
+0.188939258456
+0.166220739484
+0.141522198915
+0.136196568608
+0.135038986802
+0.130057454109
+0.109331391752
+0.102551557124
+0.0863153636456
+0.0769856646657
+0.0746278986335
+0.0726669579744
+0.0692289322615
+0.986445128918
+0.877138316631
+0.296969234943
+0.251197814941
+0.209028437734
+0.127100452781
+0.124995708466
+0.102780491114
+0.0878400877118
+0.0630679279566
+0.998360216618
+0.996891438961
+0.996819496155
+0.992800116539
+0.988880872726
+0.981363832951
+0.970982015133
+0.967247843742
+0.952856183052
+0.82020509243
+0.81507319212
+0.775345981121
+0.773203074932
+0.76171463728
+0.744431495667
+0.60289478302
+0.499960899353
+0.449743449688
+0.403355717659
+0.386890739202
+0.383161842823
+0.3802985847
+0.335433602333
+0.326487749815
+0.316613435745
+0.313662290573
+0.303182035685
+0.280266970396
+0.278901398182
+0.272723048925
+0.25584346056
+0.240782469511
+0.208597674966
+0.195776134729
+0.189354583621
+0.178054347634
+0.171536371112
+0.165226235986
+0.136164143682
+0.131536662579
+0.130551680923
+0.12157728523
+0.113888978958
+0.111252680421
+0.106897570193
+0.104641743004
+0.0941817313433
+0.0920574143529
+0.0916488096118
+0.0879059806466
+0.0847855210304
+0.0815162137151
+0.0812413170934
+0.0767783746123
+0.0736479610205
+0.0703736171126
+0.0700301975012
+0.0611623339355
+0.999948978424
+0.295387029648
+0.0990027338266
+0.0843500345945
+0.995814025402
+0.48517036438
+0.372675329447
+0.211779847741
+0.140900030732
+0.0759604126215
+0.99966275692
+0.998325169086
+0.988689601421
+0.978204846382
+0.963288605213
+0.954632878304
+0.602697610855
+0.477911859751
+0.394361197948
+0.339056223631
+0.330876171589
+0.3290835917
+0.31872433424
+0.269891530275
+0.225490272045
+0.210185557604
+0.156327188015
+0.145282328129
+0.134035989642
+0.126930177212
+0.122194387019
+0.108180209994
+0.0998326539993
+0.0943481698632
+0.084713973105
+0.0801545232534
+0.0783098116517
+0.0761912316084
+0.0708919167519
+0.0691306293011
+0.0600557625294
+0.996773302555
+0.99382185936
+0.967659175396
+0.942431688309
+0.914037406445
+0.609189629555
+0.465440988541
+0.462314218283
+0.353851288557
+0.334369689226
+0.314169496298
+0.275777220726
+0.256544619799
+0.255311191082
+0.248643219471
+0.248607158661
+0.213506400585
+0.198838919401
+0.166773095727
+0.131783515215
+0.13154001534
+0.123493291438
+0.12041272223
+0.119383059442
+0.107547998428
+0.105681687593
+0.104950636625
+0.102524854243
+0.0804539546371
+0.0730816423893
+0.0707876011729
+0.070159226656
+0.999970793724
+0.851253986359
+0.330489009619
+0.313186526299
+0.25214779377
+0.232962623239
+0.149993568659
+0.148087874055
+0.147563964128
+0.116420648992
+0.0707835555077
+0.0654265657067
+0.999879598618
+0.228803426027
+0.0721170678735
+0.996537327766
+0.609768211842
+0.338433682919
+0.144052430987
+0.118209667504
+0.973541259766
+0.765373945236
+0.742233932018
+0.528197288513
+0.436665415764
+0.303954124451
+0.233642980456
+0.221238955855
+0.1822835356
+0.12081567198
+0.0905170440674
+0.0672383233905
+0.999764144421
+0.708599746227
+0.683087706566
+0.426544874907
+0.375797688961
+0.351938068867
+0.345895558596
+0.297938287258
+0.296000361443
+0.239224493504
+0.156785845757
+0.153305083513
+0.144729286432
+0.138416305184
+0.138032868505
+0.129736199975
+0.128157973289
+0.122422866523
+0.118536651134
+0.118379153311
+0.117844633758
+0.098596483469
+0.0911683812737
+0.0887476503849
+0.0778276249766
+0.0729256346822
+0.0717860385776
+0.0698186904192
+0.0697360336781
+0.0653672069311
+0.0607799775898
+0.999997019768
+0.2637604177
+0.106153085828
+0.999384999275
+0.996282756329
+0.995554387569
+0.976472735405
+0.712992489338
+0.551137328148
+0.301937937737
+0.259456664324
+0.243244022131
+0.242410838604
+0.22410261631
+0.217033103108
+0.158823609352
+0.0966102853417
+0.092458024621
+0.0895451307297
+0.0884843915701
+0.0718254446983
+0.0687712728977
+0.99284607172
+0.980100989342
+0.900957167149
+0.460336774588
+0.343119174242
+0.314925134182
+0.297398298979
+0.170492723584
+0.150879889727
+0.147442758083
+0.139802396297
+0.116615094244
+0.0842784047127
+0.082887686789
+0.999394774437
+0.993870615959
+0.979072391987
+0.958510816097
+0.926709294319
+0.924248576164
+0.922914803028
+0.909316182137
+0.882558763027
+0.877871990204
+0.867593228817
+0.818394482136
+0.794391691685
+0.76480448246
+0.683413684368
+0.618809103966
+0.437776654959
+0.423712551594
+0.398386359215
+0.383725732565
+0.365925401449
+0.352377474308
+0.350791245699
+0.32784345746
+0.318664073944
+0.306725651026
+0.228372082114
+0.224469035864
+0.22115470469
+0.216503232718
+0.199309960008
+0.189509615302
+0.186865672469
+0.180585354567
+0.163903176785
+0.155492976308
+0.154333844781
+0.150602281094
+0.14668096602
+0.144709646702
+0.138742133975
+0.138191729784
+0.13703815639
+0.131253302097
+0.121480867267
+0.116870462894
+0.108106084168
+0.107339151204
+0.101625002921
+0.100048258901
+0.0974127873778
+0.0828539505601
+0.0800424143672
+0.0798574090004
+0.0745134726167
+0.0667054280639
+0.0657761469483
+0.0607411973178
+0.884007573128
+0.814344167709
+0.511051356792
+0.299531400204
+0.280624508858
+0.256574004889
+0.246438860893
+0.181852161884
+0.147134453058
+0.143054440618
+0.134415328503
+0.102018773556
+0.0967191085219
+0.0698435306549
+0.0689419955015
+0.0668237805367
+0.0613152012229
+0.999800026417
+0.999270379543
+0.980650007725
+0.95439940691
+0.945978283882
+0.421636641026
+0.295146465302
+0.243283838034
+0.231646180153
+0.223435997963
+0.176070138812
+0.088039830327
+0.0635640844703
+0.0604995377362
+0.999966621399
+0.314166098833
+0.0942704975605
+0.999523878098
+0.269028306007
+0.202810809016
+0.0727992653847
+0.0723398923874
+0.991573095322
+0.312597841024
+0.143228694797
+0.0963333249092
+0.0772514119744
+0.0678995922208
+0.999724805355
+0.621562421322
+0.606597483158
+0.42802092433
+0.265654444695
+0.246451660991
+0.236827716231
+0.222400605679
+0.174677908421
+0.153781116009
+0.135678485036
+0.129201963544
+0.112103924155
+0.103640504181
+0.0768901109695
+0.0679778456688
+0.0676292702556
+0.0649948343635
+0.99888163805
+0.994226276875
+0.975945591927
+0.821527183056
+0.790768384933
+0.360445171595
+0.344113171101
+0.339583128691
+0.303787082434
+0.291960507631
+0.231416001916
+0.142443478107
+0.138138234615
+0.105735436082
+0.0948120057583
+0.0843097791076
+0.0736026614904
+0.999084234238
+0.99751663208
+0.87273144722
+0.870045959949
+0.843678414822
+0.826082766056
+0.792666554451
+0.651751160622
+0.614340662956
+0.57799243927
+0.557264328003
+0.538393855095
+0.531236290932
+0.516332924366
+0.411666691303
+0.408201545477
+0.39130961895
+0.390885233879
+0.360546857119
+0.356467723846
+0.312673002481
+0.301928907633
+0.298979043961
+0.277885168791
+0.256221503019
+0.255395650864
+0.240898698568
+0.23477691412
+0.226729512215
+0.223752513528
+0.213653713465
+0.195855647326
+0.1919837147
+0.180960550904
+0.176149904728
+0.172165423632
+0.165263921022
+0.156358525157
+0.144573017955
+0.141231551766
+0.138471260667
+0.128941774368
+0.117388010025
+0.116859726608
+0.112458147109
+0.11127102375
+0.110319375992
+0.109536677599
+0.0988517031074
+0.0986483767629
+0.0912523940206
+0.0842417776585
+0.0810287445784
+0.079233944416
+0.0787686184049
+0.0751672610641
+0.0745585486293
+0.0663108825684
+0.0647831186652
+0.0637103617191
+0.0635937675834
+0.0624729432166
+0.0624453127384
+0.0618059039116
+0.060828987509
+0.0606292933226
+0.998833358288
+0.998027026653
+0.992818593979
+0.99039041996
+0.985175192356
+0.969231367111
+0.951649844646
+0.927588522434
+0.924068629742
+0.924029231071
+0.916282832623
+0.901527822018
+0.821474611759
+0.803702175617
+0.802009284496
+0.715507924557
+0.715279996395
+0.706916093826
+0.676765799522
+0.584294259548
+0.579971849918
+0.486873805523
+0.477100521326
+0.460753560066
+0.455285251141
+0.427424162626
+0.390452802181
+0.378221541643
+0.377199918032
+0.372630387545
+0.354984998703
+0.348948895931
+0.322037965059
+0.314832359552
+0.292183637619
+0.278986543417
+0.278048068285
+0.266565740108
+0.26571059227
+0.264216899872
+0.26102784276
+0.240818619728
+0.229767069221
+0.228059276938
+0.219726920128
+0.207909882069
+0.199034884572
+0.195175439119
+0.192696541548
+0.186339780688
+0.183600112796
+0.180633693933
+0.176124677062
+0.172990232706
+0.169773921371
+0.168290674686
+0.160385146737
+0.157003998756
+0.153540506959
+0.153067842126
+0.152856454253
+0.146869078279
+0.143617942929
+0.141152709723
+0.133573487401
+0.132967695594
+0.132092103362
+0.126997560263
+0.121161177754
+0.119851358235
+0.11570866406
+0.115516275167
+0.114983394742
+0.112909719348
+0.109846614301
+0.104886032641
+0.101139955223
+0.0902998298407
+0.0867500305176
+0.0854861363769
+0.0800135955215
+0.0760250687599
+0.0755273550749
+0.0744897350669
+0.0699663311243
+0.0659632459283
+0.0639220923185
+0.0632839873433
+0.063265196979
+0.0613499470055
+0.999635338783
+0.94232493639
+0.862491369247
+0.83494335413
+0.804855585098
+0.640434682369
+0.522239565849
+0.466096788645
+0.455571264029
+0.307157963514
+0.233381822705
+0.19365529716
+0.18072758615
+0.158014595509
+0.150018751621
+0.144809693098
+0.140234336257
+0.120272517204
+0.106956109405
+0.102811440825
+0.0999299734831
+0.0692918077111
+0.0669135525823
+0.0664581283927
+0.0651089996099
+0.999938964844
+0.239465415478
+0.0624406114221
+0.99989938736
+0.23088145256
+0.158267259598
+0.0674279630184
+0.0603751204908
+0.999519348145
+0.972000420094
+0.966677725315
+0.905854344368
+0.639407396317
+0.54828286171
+0.515552401543
+0.510804831982
+0.350098639727
+0.30909371376
+0.304367870092
+0.269935905933
+0.25921741128
+0.228409573436
+0.211878195405
+0.18561090529
+0.181743949652
+0.165276154876
+0.137670487165
+0.122498966753
+0.117440223694
+0.110392831266
+0.106699183583
+0.0970512479544
+0.0891740098596
+0.0852291285992
+0.0752872899175
+0.0725029855967
+0.0686039924622
+0.0663395747542
+0.996912479401
+0.995352745056
+0.955630779266
+0.948339045048
+0.438975274563
+0.314693182707
+0.262969851494
+0.246622666717
+0.212860375643
+0.188534021378
+0.162263005972
+0.103185325861
+0.0886962637305
+0.0789243727922
+0.073184452951
+0.0684161484241
+0.0644490420818
+0.987596690655
+0.973598659039
+0.86969524622
+0.845696032047
+0.682914435863
+0.609813034534
+0.571805715561
+0.562994837761
+0.511383771896
+0.501621484756
+0.45442840457
+0.399621635675
+0.359234184027
+0.346823364496
+0.345773816109
+0.322200804949
+0.320906281471
+0.305216878653
+0.304258435965
+0.302897870541
+0.279263794422
+0.272052437067
+0.264237821102
+0.242589861155
+0.216788798571
+0.213080629706
+0.212283685803
+0.2108104527
+0.208515509963
+0.205944091082
+0.205903559923
+0.205872446299
+0.191727668047
+0.163179323077
+0.1627240628
+0.156438201666
+0.152369499207
+0.150318920612
+0.150152876973
+0.1476790905
+0.135655537248
+0.1324827075
+0.131056532264
+0.130052641034
+0.125478133559
+0.121844723821
+0.116792909801
+0.115003183484
+0.113407738507
+0.112994939089
+0.11223257333
+0.109531231225
+0.106611154974
+0.105637937784
+0.103683315217
+0.101391479373
+0.0994349569082
+0.0972782447934
+0.0970510691404
+0.0936243087053
+0.0933126211166
+0.0887526795268
+0.0871261283755
+0.0858739167452
+0.0800449699163
+0.079789288342
+0.0782871991396
+0.0774666592479
+0.07491363585
+0.0732964798808
+0.0692740678787
+0.0663519725204
+0.999466240406
+0.2550740242
+0.0744996368885
+0.947721719742
+0.872089862823
+0.72296255827
+0.56947940588
+0.453172385693
+0.424482196569
+0.418372422457
+0.392047345638
+0.381979107857
+0.361397564411
+0.313496679068
+0.311955451965
+0.285129010677
+0.256483525038
+0.249919399619
+0.241721153259
+0.232460230589
+0.218532025814
+0.216701507568
+0.195242643356
+0.165638178587
+0.162037670612
+0.155851051211
+0.150907248259
+0.12022806704
+0.110991954803
+0.108817666769
+0.103278428316
+0.102052487433
+0.0969331786036
+0.0832917317748
+0.0777233019471
+0.0753358826041
+0.0732664167881
+0.0731282755733
+0.0706726387143
+0.0628674328327
+0.0625956580043
+0.999097824097
+0.984159111977
+0.495202511549
+0.388968020678
+0.332515448332
+0.225414797664
+0.20408308506
+0.167913645506
+0.106669075787
+0.105365134776
+0.0783853307366
+0.999718010426
+0.272136926651
+0.109760269523
+0.0982676297426
+0.999598920345
+0.254140555859
+0.0604672767222
+0.985642492771
+0.867252588272
+0.83300536871
+0.814552009106
+0.794310867786
+0.770018041134
+0.697976708412
+0.680191636086
+0.642426013947
+0.625288486481
+0.617250025272
+0.577726721764
+0.547453284264
+0.494452983141
+0.491671532393
+0.442434251308
+0.390026509762
+0.366916745901
+0.323798894882
+0.320360839367
+0.312212377787
+0.282553076744
+0.280955642462
+0.267035961151
+0.229852154851
+0.216525748372
+0.199405029416
+0.192440614104
+0.185120135546
+0.166628405452
+0.155397772789
+0.145825013518
+0.144653081894
+0.139330968261
+0.135729804635
+0.134784787893
+0.130278974771
+0.128228724003
+0.126666754484
+0.124562382698
+0.123832449317
+0.121996201575
+0.120598971844
+0.113457277417
+0.104193262756
+0.100250519812
+0.0973354205489
+0.0957927629352
+0.0928587019444
+0.0884056165814
+0.0834645181894
+0.0761998295784
+0.0715222358704
+0.069726459682
+0.0690330192447
+0.062686175108
+0.060595266521
+0.999508142471
+0.9982894063
+0.994869470596
+0.939218401909
+0.793253540993
+0.440464258194
+0.416591644287
+0.286947548389
+0.245645016432
+0.218556255102
+0.205952808261
+0.162640154362
+0.143430963159
+0.133487746119
+0.0977573171258
+0.0892532914877
+0.0630949139595
+0.0605812221766
+0.999902963638
+0.982903897762
+0.962769210339
+0.289933919907
+0.234480828047
+0.231147482991
+0.157841995358
+0.0691936612129
+0.0633596852422
+0.998189389706
+0.303114831448
+0.128108143806
+0.0631893351674
+0.993933498859
+0.274072945118
+0.0682247281075
+0.0636836215854
+0.989841759205
+0.39410007
+0.190112307668
+0.169500812888
+0.0671824291348
+0.995071828365
+0.985187113285
+0.824429392815
+0.814031839371
+0.658738315105
+0.525981783867
+0.498508900404
+0.319903165102
+0.291988968849
+0.271385133266
+0.252768397331
+0.249553784728
+0.165024966002
+0.156930446625
+0.131120100617
+0.120916500688
+0.0976756587625
+0.0869847908616
+0.0844496712089
+0.0710915252566
+0.066839158535
+0.0658646896482
+0.981059491634
+0.933898925781
+0.840278029442
+0.300586760044
+0.268482863903
+0.196117088199
+0.1549808532
+0.136478111148
+0.124374836683
+0.0990431234241
+0.0863495990634
+0.0825428590178
+0.99974411726
+0.995366156101
+0.945289194584
+0.916317045689
+0.884275734425
+0.742576777935
+0.737443685532
+0.730685114861
+0.550673723221
+0.527485728264
+0.468621313572
+0.437209606171
+0.417689979076
+0.413742661476
+0.32873287797
+0.317363590002
+0.292495340109
+0.281575709581
+0.250476121902
+0.250185191631
+0.248624697328
+0.238566324115
+0.228228792548
+0.226443946362
+0.223095595837
+0.221947714686
+0.214462086558
+0.203116729856
+0.186592340469
+0.18328243494
+0.162296801805
+0.152612581849
+0.149118691683
+0.136379346251
+0.135684907436
+0.12636128068
+0.113327398896
+0.112002171576
+0.104787044227
+0.104254722595
+0.101894080639
+0.0941995531321
+0.0917486101389
+0.0815635770559
+0.0801797360182
+0.0771722719073
+0.0747504085302
+0.071962043643
+0.0715929344296
+0.06872600317
+0.0686828568578
+0.0651393160224
+0.0621151477098
+0.0616709701717
+0.0614994429052
+0.061317525804
+0.995398104191
+0.989223003387
+0.986508131027
+0.960229694843
+0.923324286938
+0.909637033939
+0.888903021812
+0.856651961803
+0.721619546413
+0.583319604397
+0.555656075478
+0.51603525877
+0.449812501669
+0.424254566431
+0.387321799994
+0.363632291555
+0.360526621342
+0.340512633324
+0.301316529512
+0.286867886782
+0.282243162394
+0.267814964056
+0.265537828207
+0.254166841507
+0.25123783946
+0.249549940228
+0.212853863835
+0.212790578604
+0.205511286855
+0.193593740463
+0.166068464518
+0.162603527308
+0.159331306815
+0.155562967062
+0.144842818379
+0.137025743723
+0.132236838341
+0.115236997604
+0.102133907378
+0.099004201591
+0.095005556941
+0.0926011875272
+0.0900416821241
+0.0839807242155
+0.0839479267597
+0.0831182003021
+0.0812319144607
+0.0803785473108
+0.0803152620792
+0.0717453882098
+0.0714654326439
+0.0692609399557
+0.0687359571457
+0.06604193151
+0.0630961805582
+0.0616758614779
+0.0612098872662
+0.0603446438909
+0.551071763039
+0.355493187904
+0.127735957503
+0.113267883658
+0.99869787693
+0.997135519981
+0.997087419033
+0.981531739235
+0.962709963322
+0.902133762836
+0.896942019463
+0.880095183849
+0.841980159283
+0.71230506897
+0.649276316166
+0.621684253216
+0.603271782398
+0.576595008373
+0.562812268734
+0.537833034992
+0.508484423161
+0.495326459408
+0.473632931709
+0.46071523428
+0.434933811426
+0.427213639021
+0.36814057827
+0.347847431898
+0.326168358326
+0.321724891663
+0.309012800455
+0.307590425014
+0.303530871868
+0.297515809536
+0.294174194336
+0.265360206366
+0.251946657896
+0.237898662686
+0.233052700758
+0.218735352159
+0.211902543902
+0.207139700651
+0.205423310399
+0.203775808215
+0.185126751661
+0.178718626499
+0.155116677284
+0.153852045536
+0.149281546474
+0.148153617978
+0.1468963027
+0.143211245537
+0.136751383543
+0.136105775833
+0.129117399454
+0.128622233868
+0.125339612365
+0.124681830406
+0.12064666301
+0.116295948625
+0.109813712537
+0.095793299377
+0.0906180143356
+0.0876462608576
+0.0874714627862
+0.0873928442597
+0.0848263353109
+0.0825060233474
+0.0817610546947
+0.081176199019
+0.0772922039032
+0.0759613141418
+0.0759374350309
+0.0749160498381
+0.07334484905
+0.0698151290417
+0.0686321184039
+0.0659399703145
+0.0653813332319
+0.0614499114454
+0.0611522421241
+0.0611125305295
+0.997938811779
+0.97371327877
+0.882980704308
+0.83957016468
+0.685529887676
+0.387645989656
+0.306871235371
+0.295318454504
+0.236844241619
+0.224608361721
+0.153867214918
+0.111464679241
+0.0979059636593
+0.0901129469275
+0.0758296176791
+0.069416180253
+0.0638578310609
+0.991798043251
+0.975281774998
+0.974453568459
+0.972915172577
+0.962067961693
+0.922958016396
+0.919221937656
+0.911733627319
+0.885406911373
+0.818806827068
+0.814976632595
+0.785368144512
+0.766407966614
+0.756810128689
+0.72566306591
+0.681391716003
+0.662613332272
+0.632780849934
+0.59375333786
+0.578510582447
+0.567092299461
+0.525968015194
+0.509967029095
+0.498026192188
+0.475825279951
+0.443578571081
+0.38332349062
+0.369737535715
+0.29315212369
+0.276902675629
+0.272787719965
+0.252235889435
+0.241522744298
+0.226999849081
+0.216909587383
+0.211917087436
+0.209981441498
+0.202381372452
+0.201762676239
+0.198775053024
+0.19538988173
+0.190258458257
+0.18290103972
+0.180567249656
+0.177811890841
+0.177129507065
+0.176085621119
+0.171809598804
+0.170181930065
+0.167169451714
+0.164508596063
+0.162908092141
+0.155590817332
+0.144701614976
+0.138813957572
+0.138776555657
+0.138697400689
+0.136735782027
+0.133051991463
+0.115195833147
+0.114237055182
+0.110824204981
+0.109025888145
+0.105972476304
+0.105188973248
+0.104761511087
+0.098305799067
+0.0971888527274
+0.0963880866766
+0.0919001996517
+0.0844946354628
+0.07134591043
+0.0685607790947
+0.0676183179021
+0.0660856589675
+0.0656773597002
+0.0639887154102
+0.999923229218
+0.354945123196
+0.253288954496
+0.185291424394
+0.100215964019
+0.998217880726
+0.958414494991
+0.67045712471
+0.53784763813
+0.438871383667
+0.29115357995
+0.289249539375
+0.276562780142
+0.230681315064
+0.207341685891
+0.166574195027
+0.158164486289
+0.154147416353
+0.151328831911
+0.113640919328
+0.105682633817
+0.0970806106925
+0.0736084356904
+0.0699090063572
+0.0645034462214
+0.0633850693703
+0.981248378754
+0.974489271641
+0.925791203976
+0.901707112789
+0.890968501568
+0.886975765228
+0.875669121742
+0.837235033512
+0.803717374802
+0.746896326542
+0.671077728271
+0.598825275898
+0.594167053699
+0.565981209278
+0.556484341621
+0.529440820217
+0.462566286325
+0.458415001631
+0.449301123619
+0.407920449972
+0.374837338924
+0.351506441832
+0.345314383507
+0.339126080275
+0.315099328756
+0.306301057339
+0.290721565485
+0.278728187084
+0.263731151819
+0.249402090907
+0.233367949724
+0.227732136846
+0.226487502456
+0.216547071934
+0.193195998669
+0.185389682651
+0.177589908242
+0.169164523482
+0.161058411002
+0.155760705471
+0.14032882452
+0.137218818069
+0.136230707169
+0.134212210774
+0.130737900734
+0.126023158431
+0.122165977955
+0.11187197268
+0.10936165601
+0.103690385818
+0.102937288582
+0.100102007389
+0.0990415588021
+0.0985739678144
+0.093554534018
+0.0925473719835
+0.0917510092258
+0.0874563083053
+0.0779068768024
+0.0773418024182
+0.0720049738884
+0.0673734694719
+0.0647443458438
+0.0624483972788
+0.0615202188492
+0.999390721321
+0.27092179656
+0.12502771616
+0.101404346526
+0.0844964012504
+0.999862909317
+0.999842643738
+0.918844282627
+0.663967430592
+0.516722738743
+0.42672291398
+0.421197503805
+0.333573192358
+0.325663000345
+0.305360883474
+0.277258843184
+0.276684880257
+0.248697593808
+0.245815455914
+0.214961126447
+0.214756175876
+0.17795497179
+0.152994692326
+0.142633154988
+0.119850359857
+0.114534802735
+0.0936763212085
+0.0908334553242
+0.0904529541731
+0.087975487113
+0.0828520730138
+0.0658017247915
+0.998747944832
+0.947770416737
+0.876299202442
+0.841949760914
+0.832983255386
+0.486348986626
+0.385988771915
+0.323273926973
+0.287436634302
+0.274376004934
+0.254098564386
+0.224776223302
+0.223495364189
+0.174624815583
+0.133527055383
+0.124701343477
+0.0853236541152
+0.082740701735
+0.0742358341813
+0.0661213099957
+0.0620301812887
+0.0614072903991
+0.999799907207
+0.228506803513
+0.998525679111
+0.988032221794
+0.986261665821
+0.985079050064
+0.973870873451
+0.940362811089
+0.919757544994
+0.86805665493
+0.843167126179
+0.604645431042
+0.54724830389
+0.506655216217
+0.493300914764
+0.432817280293
+0.378002375364
+0.3537119627
+0.341719865799
+0.315242618322
+0.299833208323
+0.292147219181
+0.279280364513
+0.265987634659
+0.258858293295
+0.243629381061
+0.232145860791
+0.228915929794
+0.225424855947
+0.220686167479
+0.208621636033
+0.208113029599
+0.201096788049
+0.184495478868
+0.176418706775
+0.172579541802
+0.170396089554
+0.161462634802
+0.15377651155
+0.147575810552
+0.143008261919
+0.141997203231
+0.13986492157
+0.136891454458
+0.126320630312
+0.117974184453
+0.11507640779
+0.112597376108
+0.104336544871
+0.0978810191154
+0.0804336592555
+0.0778841078281
+0.0772632732987
+0.0762242078781
+0.0747530087829
+0.0730223432183
+0.0702525377274
+0.0659003555775
+0.0653769224882
+0.0652874782681
+0.997094154358
+0.452748835087
+0.24701410532
+0.0761813819408
+0.869754195213
+0.188872456551
+0.999495625496
+0.995098769665
+0.95416790247
+0.892586708069
+0.807850837708
+0.480310350657
+0.423093169928
+0.407716453075
+0.291425466537
+0.21536141634
+0.204057395458
+0.183758571744
+0.143380627036
+0.124237053096
+0.120032586157
+0.116329967976
+0.109942398965
+0.0993844047189
+0.0823880434036
+0.999544560909
+0.999043405056
+0.993535399437
+0.947416961193
+0.861258208752
+0.78301024437
+0.700155377388
+0.663154006004
+0.455382615328
+0.422339230776
+0.357188791037
+0.350440442562
+0.323453962803
+0.286072731018
+0.242548465729
+0.237781345844
+0.199165731668
+0.175416782498
+0.168543994427
+0.149943619967
+0.139937058091
+0.115142554045
+0.102114476264
+0.10165335983
+0.100736640394
+0.0836024805903
+0.0692452341318
+0.0678016245365
+0.0671614557505
+0.997565627098
+0.310370534658
+0.121943630278
+0.999060809612
+0.763829112053
+0.548439204693
+0.392829746008
+0.343789696693
+0.327950537205
+0.322002232075
+0.271202534437
+0.20572309196
+0.205052271485
+0.193017303944
+0.146933466196
+0.143753260374
+0.108680784702
+0.107662349939
+0.10150655359
+0.100530572236
+0.100387252867
+0.0988109260798
+0.097924284637
+0.0943624898791
+0.083846077323
+0.0750280022621
+0.0727624222636
+0.0659538730979
+0.998539090157
+0.997881114483
+0.997382938862
+0.985440790653
+0.981495320797
+0.965629041195
+0.957271277905
+0.909261226654
+0.875363767147
+0.660025954247
+0.450789391994
+0.444396108389
+0.38357719779
+0.365181267262
+0.331636041403
+0.319503486156
+0.310181796551
+0.288008749485
+0.236835598946
+0.235814407468
+0.222920387983
+0.219529896975
+0.215840056539
+0.207058951259
+0.190333992243
+0.187692150474
+0.181554317474
+0.177683204412
+0.164974540472
+0.153224885464
+0.150455832481
+0.149982064962
+0.118726201355
+0.114707112312
+0.110912621021
+0.102897204459
+0.0999281257391
+0.0949439331889
+0.0939573347569
+0.0885958150029
+0.0792355909944
+0.0737782716751
+0.0722291395068
+0.0634772926569
+0.999286234379
+0.996085643768
+0.994139194489
+0.969297945499
+0.56023555994
+0.424246877432
+0.273330807686
+0.242655724287
+0.205557361245
+0.19320268929
+0.18737000227
+0.101471960545
+0.0938806831837
+0.0760854706168
+0.0731589347124
+0.0683814436197
+0.065781570971
+0.999308109283
+0.496494144201
+0.46033602953
+0.42969122529
+0.271194547415
+0.207085624337
+0.163825705647
+0.126369640231
+0.0806090533733
+0.0642518699169
+0.0629470646381
+0.999680042267
+0.969459056854
+0.433087617159
+0.382071852684
+0.20693461597
+0.127581313252
+0.097639799118
+0.0782695710659
+0.0751127526164
+0.999941825867
+0.344175100327
+0.284262686968
+0.217524588108
+0.169766381383
+0.121343225241
+0.0752773955464
+0.0629453659058
+0.999940395355
+0.235369309783
+0.999597847462
+0.997480869293
+0.995526254177
+0.952282130718
+0.93034529686
+0.922278404236
+0.856420874596
+0.835806667805
+0.829061567783
+0.813815116882
+0.809287786484
+0.737478256226
+0.733895421028
+0.716523885727
+0.694051384926
+0.657566189766
+0.587335705757
+0.564491331577
+0.491372793913
+0.476113259792
+0.439809888601
+0.413700371981
+0.402025580406
+0.367200285196
+0.341176629066
+0.275124877691
+0.256491661072
+0.249379917979
+0.248360559344
+0.236920565367
+0.233534157276
+0.225334987044
+0.216104656458
+0.216007277369
+0.212946221232
+0.211455762386
+0.191019937396
+0.186041116714
+0.185381963849
+0.183242455125
+0.17873236537
+0.16872766614
+0.166486382484
+0.163530185819
+0.127895727754
+0.127229854465
+0.11301510036
+0.104644842446
+0.101528868079
+0.0965113043785
+0.096376478672
+0.0931837633252
+0.0881967842579
+0.0872703939676
+0.0853974819183
+0.0847291871905
+0.0742643550038
+0.0702226832509
+0.0673470273614
+0.0669089928269
+0.0655114129186
+0.998662948608
+0.995158016682
+0.991211950779
+0.986758708954
+0.969135463238
+0.83894276619
+0.82522124052
+0.719213306904
+0.703989326954
+0.60787820816
+0.515525400639
+0.491848349571
+0.458793759346
+0.427799791098
+0.390281736851
+0.383812487125
+0.372109562159
+0.368942886591
+0.31460827589
+0.292765378952
+0.288988262415
+0.270797073841
+0.239032045007
+0.235466718674
+0.234509572387
+0.224899783731
+0.211850211024
+0.172076702118
+0.159356221557
+0.156808510423
+0.153375610709
+0.145980522037
+0.137990802526
+0.132976606488
+0.127839401364
+0.12676911056
+0.117707535625
+0.109960392118
+0.109015099704
+0.0919677764177
+0.0898765772581
+0.0852149873972
+0.0815847441554
+0.0812755227089
+0.0786580294371
+0.0683072730899
+0.0642883852124
+0.0634211823344
+0.998977780342
+0.998807191849
+0.959875881672
+0.949519515038
+0.933165788651
+0.922093987465
+0.906822144985
+0.904073417187
+0.902075588703
+0.842401206493
+0.802260875702
+0.789300918579
+0.785109460354
+0.754698634148
+0.733961939812
+0.708277106285
+0.683625876904
+0.673088729382
+0.599248826504
+0.57192504406
+0.527688324451
+0.394457608461
+0.388507813215
+0.379782170057
+0.373887211084
+0.372881412506
+0.33828613162
+0.33057141304
+0.326114505529
+0.323657602072
+0.32317173481
+0.30563762784
+0.300480127335
+0.241196870804
+0.240974634886
+0.23461048305
+0.231546878815
+0.231183499098
+0.227321967483
+0.226995944977
+0.213403090835
+0.208149254322
+0.207060351968
+0.20258924365
+0.186601728201
+0.171379178762
+0.156725972891
+0.155011996627
+0.146505519748
+0.13764873147
+0.131793454289
+0.130502641201
+0.129497945309
+0.116936035454
+0.116710364819
+0.113072186708
+0.112832061946
+0.105793237686
+0.105617702007
+0.101857751608
+0.100984752178
+0.0997249260545
+0.0982602611184
+0.0916966274381
+0.089905358851
+0.0877860561013
+0.0874497890472
+0.0795760154724
+0.0795100182295
+0.0791537761688
+0.0787766501307
+0.0740403085947
+0.070902235806
+0.0708448886871
+0.0693192631006
+0.0676619186997
+0.0667790099978
+0.0660361796618
+0.0647912099957
+0.061996858567
+0.0606067031622
+0.99957293272
+0.993750274181
+0.827045619488
+0.388509839773
+0.27541911602
+0.182834744453
+0.126942291856
+0.112534366548
+0.0737042203546
+0.0627977401018
+0.99841272831
+0.348081588745
+0.0971109122038
+0.999543309212
+0.976110160351
+0.875870883465
+0.836310982704
+0.68884819746
+0.620647251606
+0.52679681778
+0.451055079699
+0.412902623415
+0.400187045336
+0.37918689847
+0.249540492892
+0.228980332613
+0.194315731525
+0.18153488636
+0.164872825146
+0.16343472898
+0.158788993955
+0.134009376168
+0.117968551815
+0.114006377757
+0.102439410985
+0.0982652008533
+0.0926960557699
+0.0900781527162
+0.0899202004075
+0.08598279953
+0.085274413228
+0.073134444654
+0.0625574961305
+0.993216335773
+0.975570499897
+0.970373034477
+0.945912122726
+0.937525212765
+0.924663424492
+0.907295465469
+0.889712512493
+0.881514787674
+0.879716038704
+0.859954357147
+0.834656834602
+0.804263532162
+0.79725921154
+0.767066657543
+0.763344585896
+0.685545742512
+0.664651453495
+0.66150867939
+0.656108856201
+0.652143657207
+0.634157001972
+0.616104960442
+0.585067749023
+0.573857486248
+0.567478239536
+0.559469163418
+0.548016548157
+0.541197776794
+0.530756890774
+0.518088042736
+0.512053370476
+0.493232667446
+0.486038953066
+0.480477005243
+0.450116246939
+0.410009592772
+0.401282966137
+0.387558162212
+0.36515390873
+0.361659288406
+0.355800062418
+0.353248745203
+0.33175688982
+0.322483748198
+0.320060878992
+0.306599199772
+0.303767681122
+0.288973212242
+0.284207552671
+0.282932132483
+0.282419413328
+0.282074481249
+0.280106902122
+0.280104994774
+0.276965349913
+0.273052960634
+0.269783467054
+0.267787784338
+0.267066478729
+0.264517426491
+0.243556424975
+0.242042064667
+0.235254913568
+0.234700322151
+0.229953601956
+0.226104989648
+0.225797668099
+0.218533590436
+0.215065404773
+0.214965030551
+0.212674438953
+0.211623907089
+0.209298104048
+0.206417113543
+0.202060818672
+0.189581111073
+0.182283326983
+0.181591510773
+0.181434750557
+0.179919213057
+0.174335986376
+0.169061899185
+0.167664662004
+0.167320653796
+0.163333147764
+0.156408444047
+0.150729268789
+0.147325515747
+0.147240594029
+0.145339742303
+0.143960431218
+0.143300622702
+0.140598759055
+0.139069333673
+0.138993412256
+0.137379005551
+0.137277722359
+0.136481150985
+0.133706137538
+0.130954056978
+0.129161864519
+0.126901268959
+0.125610381365
+0.123504243791
+0.118266813457
+0.11660309881
+0.116041325033
+0.113958820701
+0.110167823732
+0.108921475708
+0.108087614179
+0.10614284873
+0.10605327785
+0.104277007282
+0.102406144142
+0.0999448895454
+0.0994986966252
+0.0984758362174
+0.0971331447363
+0.0932394340634
+0.0907735154033
+0.0905347019434
+0.0898824855685
+0.0869038030505
+0.0844980031252
+0.0835441872478
+0.0833920091391
+0.0833822190762
+0.0825098007917
+0.0818506851792
+0.0818314403296
+0.0789027884603
+0.0778140872717
+0.0777319148183
+0.0760681778193
+0.073419354856
+0.0732608065009
+0.0715128928423
+0.0670804455876
+0.0667047277093
+0.0656472966075
+0.0640507563949
+0.0627428367734
+0.0618509948254
+0.0615127906203
+0.0603558346629
+0.99989771843
+0.999846816063
+0.288911312819
+0.221270933747
+0.217263042927
+0.0870270952582
+0.999519348145
+0.939058661461
+0.81638365984
+0.708284199238
+0.61575961113
+0.326931416988
+0.316072255373
+0.283757388592
+0.244688674808
+0.239954039454
+0.193931862712
+0.171606019139
+0.171484023333
+0.154199242592
+0.122150093317
+0.116301976144
+0.0977482572198
+0.0922164916992
+0.0922150462866
+0.0875006318092
+0.0864450708032
+0.0795192196965
+0.0762795507908
+0.0600846260786
+0.999850034714
+0.999378204346
+0.993506610394
+0.247796267271
+0.231860265136
+0.216742813587
+0.082324385643
+0.0609562955797
+0.703911662102
+0.663096189499
+0.541894435883
+0.385675787926
+0.320125311613
+0.192803189158
+0.146333813667
+0.113117776811
+0.102297760546
+0.0871008634567
+0.0798650681973
+0.0750821530819
+0.0736013352871
+0.0644679442048
+0.0601631775498
+0.999855399132
+0.999534010887
+0.998559117317
+0.945432722569
+0.6314702034
+0.340753138065
+0.288988023996
+0.25110861659
+0.24417348206
+0.235046938062
+0.223163604736
+0.163651794195
+0.131423309445
+0.12228102982
+0.110422708094
+0.0882274731994
+0.0843949168921
+0.0774702876806
+0.0745047032833
+0.996391952038
+0.477867037058
+0.264010310173
+0.238096654415
+0.136751115322
+0.0876591950655
+0.999528646469
+0.228829145432
+0.999871134758
+0.232539698482
+0.955274581909
+0.663733124733
+0.247329637408
+0.240137562156
+0.226819604635
+0.155680269003
+0.155516892672
+0.121254317462
+0.106654785573
+0.0893527716398
+0.0882698819041
+0.0732044652104
+0.0637960135937
+0.978656172752
+0.975334584713
+0.970794796944
+0.969568729401
+0.963326990604
+0.944955050945
+0.901605427265
+0.567686855793
+0.487928688526
+0.411227524281
+0.364766359329
+0.361663818359
+0.329274713993
+0.322866290808
+0.304080754519
+0.284154355526
+0.271245062351
+0.2456792593
+0.198432952166
+0.178731113672
+0.155805125833
+0.14596773684
+0.141371563077
+0.123677909374
+0.120500333607
+0.108573883772
+0.104590073228
+0.100885458291
+0.097399443388
+0.0962924659252
+0.0943651348352
+0.0922386944294
+0.0921239852905
+0.0835223421454
+0.0824559703469
+0.0767510607839
+0.0721935778856
+0.0688089430332
+0.998698711395
+0.972800433636
+0.927046239376
+0.880885362625
+0.847339630127
+0.842203736305
+0.759785652161
+0.692715227604
+0.624692857265
+0.598495423794
+0.573394179344
+0.557663679123
+0.461614489555
+0.429479956627
+0.428445547819
+0.426035940647
+0.381149142981
+0.364916950464
+0.364005476236
+0.361728638411
+0.327846139669
+0.301384836435
+0.290213793516
+0.27750813961
+0.251559436321
+0.236893072724
+0.226439148188
+0.223389059305
+0.212522357702
+0.200780212879
+0.192410871387
+0.176976382732
+0.166478917003
+0.162682369351
+0.153008341789
+0.149034217
+0.141834944487
+0.139627918601
+0.139469787478
+0.137039512396
+0.134072139859
+0.129238680005
+0.123912990093
+0.120750106871
+0.11958219111
+0.117354437709
+0.105600915849
+0.103273026645
+0.100106246769
+0.0998390763998
+0.0954754501581
+0.0945441871881
+0.0903118029237
+0.0870001614094
+0.0859416350722
+0.0843768343329
+0.0742490366101
+0.0703430175781
+0.0698920562863
+0.06979957968
+0.0644008144736
+0.0626694485545
+0.0622330233455
+0.0612617172301
+0.0607994683087
+0.995929539204
+0.978537201881
+0.903406202793
+0.768872022629
+0.627911269665
+0.515701532364
+0.380156606436
+0.32027977705
+0.314873456955
+0.30187690258
+0.264389663935
+0.252791136503
+0.24653506279
+0.184514909983
+0.159627184272
+0.159008622169
+0.105330318213
+0.103792145848
+0.0955826118588
+0.0940215811133
+0.0936533883214
+0.0844427123666
+0.0724783167243
+0.999798476696
+0.999317526817
+0.997812747955
+0.968771934509
+0.877855181694
+0.592038333416
+0.345867812634
+0.29451841116
+0.288064688444
+0.253395438194
+0.243003606796
+0.157659396529
+0.144619271159
+0.135754212737
+0.12618894875
+0.114776432514
+0.101125977933
+0.0916216522455
+0.0625303760171
+0.999813020229
+0.999654531479
+0.99839168787
+0.451335400343
+0.288661897182
+0.266241550446
+0.255533367395
+0.191562905908
+0.138799205422
+0.132231146097
+0.124312601984
+0.071453794837
+0.0702380239964
+0.0700588598847
+0.998936235905
+0.261909663677
+0.0668368861079
+0.999743163586
+0.227087751031
+0.999866724014
+0.315880894661
+0.0882930755615
+0.999761164188
+0.219246551394
+0.999893426895
+0.814684450626
+0.366395562887
+0.324268400669
+0.250215739012
+0.196875050664
+0.144196018577
+0.139053329825
+0.111592799425
+0.0713099613786
+0.0695058628917
+0.0624440200627
+0.999748408794
+0.996121108532
+0.320835798979
+0.281315147877
+0.183734700084
+0.10904482007
+0.0824709311128
+0.0658466592431
+0.999536752701
+0.28215354681
+0.115026369691
+0.111843936145
+0.999535441399
+0.999520540237
+0.974512219429
+0.929280757904
+0.744673728943
+0.437213808298
+0.237644687295
+0.227778077126
+0.205230429769
+0.199573725462
+0.178779348731
+0.176341310143
+0.142445281148
+0.0658986642957
+0.999948501587
+0.236017182469
+0.998930156231
+0.648658931255
+0.349143654108
+0.277649492025
+0.203198328614
+0.168934375048
+0.162634953856
+0.13789960742
+0.104771405458
+0.0852926447988
+0.0814294368029
+0.0722193792462
+0.0642064288259
+0.0617296099663
+0.999113857746
+0.585631608963
+0.562474012375
+0.512128591537
+0.483534693718
+0.242238044739
+0.21625213325
+0.186181604862
+0.155920892954
+0.119565315545
+0.106025241315
+0.0762975737453
+0.0706060603261
+0.999106943607
+0.998624444008
+0.985658943653
+0.966313481331
+0.457853972912
+0.252304255962
+0.252244204283
+0.210999727249
+0.175932973623
+0.11610417068
+0.113444440067
+0.090659186244
+0.065131649375
+0.970032930374
+0.964421212673
+0.956641674042
+0.955138146877
+0.915984213352
+0.672148644924
+0.638635694981
+0.522949755192
+0.482919037342
+0.449975758791
+0.425284445286
+0.342054963112
+0.319900035858
+0.309387624264
+0.280058145523
+0.259689807892
+0.245165169239
+0.244594186544
+0.238632112741
+0.235903099179
+0.219339847565
+0.199819222093
+0.187922984362
+0.177261009812
+0.163472115993
+0.155036762357
+0.142612800002
+0.129438668489
+0.125532478094
+0.124418437481
+0.122861757874
+0.1210103333
+0.106301836669
+0.0973434373736
+0.09565833956
+0.0915573686361
+0.0908259749413
+0.0894632935524
+0.0803199931979
+0.0798049122095
+0.0779393538833
+0.0764619931579
+0.0704126358032
+0.0681950524449
+0.0675774514675
+0.0643995031714
+0.0603313297033
+0.999955654144
+0.952925026417
+0.92770922184
+0.813818097115
+0.37018468976
+0.301853358746
+0.233075529337
+0.188911750913
+0.0967919901013
+0.0967421457171
+0.0824940949678
+0.0628109127283
+0.997334241867
+0.2618445158
+0.246833130717
+0.0836516469717
+0.998163402081
+0.455946147442
+0.182981953025
+0.090917468071
+0.0749205127358
+0.979809641838
+0.727417826653
+0.605292916298
+0.357147067785
+0.272150188684
+0.162512585521
+0.142401754856
+0.136632680893
+0.128961741924
+0.0978384837508
+0.0959154814482
+0.0681008249521
+0.0668543279171
+0.999784886837
+0.998968482018
+0.867163896561
+0.343409597874
+0.246007218957
+0.242650523782
+0.143552795053
+0.0854872688651
+0.0633688494563
+0.998659968376
+0.989638984203
+0.976663291454
+0.976441025734
+0.968908190727
+0.813292503357
+0.738913536072
+0.684554219246
+0.652851998806
+0.550465404987
+0.547324776649
+0.501012384892
+0.455573916435
+0.395549237728
+0.385624110699
+0.378431469202
+0.365013629198
+0.325610488653
+0.32128238678
+0.285903245211
+0.254492849112
+0.252062380314
+0.234780013561
+0.226388812065
+0.216572150588
+0.214440375566
+0.213191241026
+0.209486559033
+0.209165021777
+0.207173883915
+0.199442714453
+0.17650577426
+0.172633036971
+0.169691622257
+0.158597245812
+0.150062829256
+0.147365525365
+0.146462857723
+0.142768800259
+0.138792261481
+0.131640180945
+0.12914519012
+0.126254230738
+0.122310571373
+0.119602516294
+0.119118705392
+0.117478460073
+0.1157412678
+0.113226890564
+0.109149113297
+0.108718261123
+0.101616062224
+0.0986347272992
+0.0961443856359
+0.0934873595834
+0.0930428504944
+0.092565536499
+0.0868843048811
+0.0761469304562
+0.0760496780276
+0.0753775164485
+0.0722406879067
+0.0634995475411
+0.0620065964758
+0.0610646530986
+0.0606535077095
+0.999444186687
+0.994910538197
+0.985620260239
+0.835098683834
+0.822133898735
+0.789752840996
+0.651724636555
+0.584192872047
+0.456067472696
+0.438526451588
+0.326544702053
+0.260963350534
+0.259355723858
+0.254966825247
+0.222301974893
+0.203936427832
+0.175814211369
+0.172652631998
+0.143587321043
+0.12492159754
+0.124358028173
+0.11673784256
+0.0989296212792
+0.0975260362029
+0.0866920202971
+0.0860415324569
+0.0741546973586
+0.0710344016552
+0.0677868723869
+0.0609155446291
+0.999710857868
+0.999052584171
+0.891223490238
+0.828133881092
+0.318629682064
+0.271316260099
+0.215562656522
+0.165405884385
+0.0945571511984
+0.0725582167506
+0.980177640915
+0.970484495163
+0.93881714344
+0.920491874218
+0.900938630104
+0.750771522522
+0.628063499928
+0.495109885931
+0.338463664055
+0.324809134007
+0.294777005911
+0.284767746925
+0.278138190508
+0.273254305124
+0.253451853991
+0.233639806509
+0.232829123735
+0.227334320545
+0.157761484385
+0.149634316564
+0.144216433167
+0.141110509634
+0.113072142005
+0.105230726302
+0.0960897579789
+0.0936128348112
+0.0900513678789
+0.0899339690804
+0.0795992314816
+0.0753014236689
+0.0747466087341
+0.0710517987609
+0.0661604478955
+0.065829589963
+0.0655243247747
+0.0655174925923
+0.0617206208408
+0.999046385288
+0.965437829494
+0.950092196465
+0.930512368679
+0.920779705048
+0.917126059532
+0.90729123354
+0.898122549057
+0.86050003767
+0.860449552536
+0.817966997623
+0.813523590565
+0.798862695694
+0.746779680252
+0.741243898869
+0.728260993958
+0.720394015312
+0.683536231518
+0.674817979336
+0.672284960747
+0.639966905117
+0.622249126434
+0.614108622074
+0.592328250408
+0.556807041168
+0.545988619328
+0.519410252571
+0.518498063087
+0.517130732536
+0.506128132343
+0.47183188796
+0.454237669706
+0.380163758993
+0.36874717474
+0.356404334307
+0.34113496542
+0.330895483494
+0.315059810877
+0.314552336931
+0.311426758766
+0.300803899765
+0.290229171515
+0.288209438324
+0.263654559851
+0.263095766306
+0.260067909956
+0.25839972496
+0.256644427776
+0.253614544868
+0.251791477203
+0.243260189891
+0.23516894877
+0.22878742218
+0.223614111543
+0.217887118459
+0.214867591858
+0.204077795148
+0.194004788995
+0.193463012576
+0.189574807882
+0.179940715432
+0.173538297415
+0.17151722312
+0.169873446226
+0.166312530637
+0.162716209888
+0.161418572068
+0.160614073277
+0.159332811832
+0.157844394445
+0.152271330357
+0.148882761598
+0.14776918292
+0.140623450279
+0.128989741206
+0.127412512898
+0.125911682844
+0.124442651868
+0.123818807304
+0.122774980962
+0.119965068996
+0.110772065818
+0.110084339976
+0.109838224947
+0.101508811116
+0.0961550399661
+0.0910584852099
+0.0887931585312
+0.0885992124677
+0.088555328548
+0.0878334790468
+0.0837953537703
+0.0827603489161
+0.0821253955364
+0.0781668946147
+0.0767849236727
+0.0753081738949
+0.0750401690602
+0.0741090625525
+0.0722230151296
+0.0721265226603
+0.0707730054855
+0.0643110871315
+0.0621439144015
+0.999683260918
+0.934800744057
+0.499918669462
+0.290155351162
+0.264350026846
+0.170621916652
+0.154296562076
+0.141831591725
+0.0846975818276
+0.998842298985
+0.955382585526
+0.551221847534
+0.266970783472
+0.218863591552
+0.142771780491
+0.136444866657
+0.083930298686
+0.0695770457387
+0.997997939587
+0.978888034821
+0.343735784292
+0.316558986902
+0.238619312644
+0.19389693439
+0.103046908975
+0.094350233674
+0.0914278924465
+0.0613827593625
+0.934709668159
+0.80220913887
+0.593818664551
+0.329293519258
+0.223682895303
+0.204423367977
+0.201385095716
+0.177880138159
+0.152899980545
+0.147103607655
+0.109445847571
+0.10456905514
+0.0784220322967
+0.0699635818601
+0.0603775493801
+0.997681856155
+0.996425628662
+0.99634462595
+0.947052896023
+0.92278021574
+0.803041815758
+0.801991641521
+0.789935231209
+0.702942609787
+0.681192755699
+0.515666544437
+0.512978494167
+0.427031934261
+0.383074343204
+0.345467090607
+0.338910073042
+0.271787673235
+0.235589429736
+0.227199360728
+0.216966673732
+0.210420757532
+0.208665132523
+0.206409841776
+0.200855761766
+0.198519960046
+0.194111987948
+0.177837267518
+0.173649445176
+0.173567205667
+0.159938275814
+0.150976240635
+0.129063904285
+0.120560847223
+0.116361223161
+0.10824304074
+0.0978798121214
+0.0936308950186
+0.0903887450695
+0.0851185172796
+0.0802545621991
+0.0781006366014
+0.0780253186822
+0.0756061971188
+0.0707845315337
+0.068078674376
+0.0656115710735
+0.0649440661073
+0.0621316060424
+0.999605596066
+0.241380944848
+0.99380916357
+0.98542457819
+0.978319823742
+0.964034736156
+0.857049882412
+0.494316518307
+0.479962944984
+0.394852638245
+0.390385240316
+0.385483235121
+0.327519744635
+0.285850584507
+0.281522512436
+0.271637111902
+0.247668117285
+0.180126100779
+0.152133762836
+0.097914069891
+0.0900454223156
+0.0874194130301
+0.0794461369514
+0.0744372233748
+0.0732711553574
+0.0711869373918
+0.0706560239196
+0.0622603408992
+0.999770343304
+0.775024473667
+0.351726263762
+0.34961938858
+0.184085696936
+0.180407956243
+0.0730275511742
+0.0642328336835
+0.998471319675
+0.97281140089
+0.972439706326
+0.725111782551
+0.691080152988
+0.476315140724
+0.388737380505
+0.387112230062
+0.323842734098
+0.310147970915
+0.307060241699
+0.277370214462
+0.276523321867
+0.248051851988
+0.237702727318
+0.216148614883
+0.195786118507
+0.156051665545
+0.148883491755
+0.14226141572
+0.137506127357
+0.135838270187
+0.133868277073
+0.12659201026
+0.122373491526
+0.119450166821
+0.117808960378
+0.114593066275
+0.11297056824
+0.109249293804
+0.100609600544
+0.0880053117871
+0.0871940031648
+0.0743257701397
+0.073550209403
+0.0716729611158
+0.0695308223367
+0.069167226553
+0.0678350254893
+0.0618297196925
+0.999951958656
+0.279753446579
+0.18590579927
+0.114261709154
+0.0849887058139
+0.0762716382742
+0.987154722214
+0.969870388508
+0.907344639301
+0.895591497421
+0.802549898624
+0.622002065182
+0.485719114542
+0.428340286016
+0.414465487003
+0.381977051497
+0.37289121747
+0.313868224621
+0.279406249523
+0.261806964874
+0.247925892472
+0.237484484911
+0.223891675472
+0.2201397717
+0.216879799962
+0.197938501835
+0.189752086997
+0.183992147446
+0.178551435471
+0.17111967504
+0.148113921285
+0.142700731754
+0.13808684051
+0.127825990319
+0.127453029156
+0.114105798304
+0.113897897303
+0.112465724349
+0.110114991665
+0.0977530777454
+0.0925996825099
+0.0877473950386
+0.0841295048594
+0.0825326144695
+0.0725993141532
+0.0620328746736
+0.0614539980888
+0.0612276419997
+0.9997189641
+0.999280869961
+0.998686373234
+0.425476551056
+0.255172550678
+0.25051048398
+0.247921198606
+0.0968772917986
+0.0770838260651
+0.0746506378055
+0.0705885589123
+0.991487145424
+0.989211559296
+0.982241094112
+0.917699038982
+0.417591243982
+0.33052739501
+0.288149237633
+0.26740449667
+0.218157678843
+0.122245803475
+0.0975705981255
+0.0705549716949
+0.0690207928419
+0.999976396561
+0.84847587347
+0.621804714203
+0.259323894978
+0.212585836649
+0.181926801801
+0.175957292318
+0.0841959267855
+0.999094367027
+0.998620390892
+0.99663490057
+0.996036708355
+0.988729357719
+0.987530231476
+0.907178342342
+0.835805535316
+0.819276630878
+0.769920170307
+0.743556320667
+0.733779847622
+0.700996100903
+0.680927693844
+0.680250883102
+0.643130898476
+0.609348595142
+0.585534453392
+0.483778476715
+0.480661332607
+0.461091518402
+0.426278889179
+0.376637101173
+0.313000380993
+0.309503853321
+0.294061869383
+0.29167380929
+0.2814874053
+0.264918535948
+0.256000220776
+0.249536454678
+0.244936168194
+0.243801414967
+0.238004207611
+0.237482875586
+0.228543683887
+0.225077047944
+0.219357430935
+0.212896749377
+0.212367877364
+0.208287805319
+0.207244560122
+0.19757334888
+0.191451534629
+0.18919454515
+0.176703155041
+0.173925995827
+0.173339307308
+0.162265732884
+0.161336541176
+0.159667804837
+0.151149168611
+0.146011665463
+0.133236289024
+0.131549820304
+0.127454698086
+0.115429148078
+0.115316651762
+0.11207973212
+0.110545098782
+0.106431044638
+0.105943508446
+0.0933810397983
+0.0875795260072
+0.0870897397399
+0.0870581045747
+0.0849899202585
+0.0832566618919
+0.0800332948565
+0.0795391350985
+0.0761967822909
+0.0751811414957
+0.0686085373163
+0.0655881389976
+0.0626740604639
+0.0625777393579
+0.0609871894121
+0.0608366876841
+0.998808145523
+0.978352308273
+0.946493208408
+0.531579554081
+0.336577802896
+0.234309047461
+0.231451258063
+0.199308156967
+0.115787371993
+0.094144590199
+0.0849893763661
+0.0692880600691
+0.997657418251
+0.914787352085
+0.842862904072
+0.832503139973
+0.727083802223
+0.718545913696
+0.613875985146
+0.380111992359
+0.360219269991
+0.340783417225
+0.331788927317
+0.330129683018
+0.311313837767
+0.294847875834
+0.290592730045
+0.272960454226
+0.267677634954
+0.267642617226
+0.23398783803
+0.231492593884
+0.209310919046
+0.206468462944
+0.194265529513
+0.183873027563
+0.181126534939
+0.147194296122
+0.145616650581
+0.138182833791
+0.135115161538
+0.133270770311
+0.12867115438
+0.127086669207
+0.123512022197
+0.11801237613
+0.115202046931
+0.111728526652
+0.109662361443
+0.107553161681
+0.106809966266
+0.0991066470742
+0.0927091166377
+0.0918141677976
+0.0912182629108
+0.0865476876497
+0.0828610956669
+0.0801194384694
+0.074218146503
+0.0724795013666
+0.0720610693097
+0.0701918452978
+0.0693461000919
+0.0604380220175
+0.999760448933
+0.86714631319
+0.816415429115
+0.788084208965
+0.78722345829
+0.724879443645
+0.705936610699
+0.683652997017
+0.68223965168
+0.612919092178
+0.610759735107
+0.566298425198
+0.550322771072
+0.50426286459
+0.44874304533
+0.444353997707
+0.441268861294
+0.412246227264
+0.404279083014
+0.370709776878
+0.353381633759
+0.327494889498
+0.310750305653
+0.288770735264
+0.277479529381
+0.274135500193
+0.272434651852
+0.262119829655
+0.238835722208
+0.232859775424
+0.217261970043
+0.215436369181
+0.204956755042
+0.193568021059
+0.191744402051
+0.18996694684
+0.186739042401
+0.18348929286
+0.182431682944
+0.17886698246
+0.175354704261
+0.171264588833
+0.161247596145
+0.153144195676
+0.14706325531
+0.143830597401
+0.121107600629
+0.120694808662
+0.119060531259
+0.114746056497
+0.110849156976
+0.110167354345
+0.10890712589
+0.108444593847
+0.107555598021
+0.107058748603
+0.103277385235
+0.0973204597831
+0.0952064171433
+0.0928353369236
+0.0926011949778
+0.0924280807376
+0.0903032273054
+0.0868149399757
+0.0835096314549
+0.0831751078367
+0.0785778537393
+0.0783511698246
+0.0702950805426
+0.0692116543651
+0.0674300864339
+0.0669756233692
+0.0654204264283
+0.0624707229435
+0.0616197921336
+0.999868273735
+0.989881694317
+0.942361593246
+0.911660313606
+0.900287389755
+0.815557539463
+0.713956177235
+0.60606944561
+0.485827952623
+0.443666309118
+0.33224901557
+0.290036737919
+0.267458319664
+0.248794049025
+0.239270597696
+0.23631298542
+0.18384988606
+0.180183649063
+0.177024587989
+0.164183601737
+0.15894022584
+0.154323756695
+0.145862385631
+0.13509143889
+0.127169549465
+0.126517936587
+0.117209993303
+0.116610124707
+0.0951070412993
+0.0929615646601
+0.0850312933326
+0.0762813165784
+0.0761486440897
+0.0751333758235
+0.0716297179461
+0.0685564726591
+0.0680993199348
+0.998735010624
+0.675233960152
+0.443177103996
+0.353596955538
+0.247651949525
+0.0914334580302
+0.0901449918747
+0.0630879625678
+0.998683989048
+0.997266888618
+0.990259289742
+0.96475571394
+0.910313248634
+0.883459985256
+0.876430332661
+0.810277283192
+0.727450788021
+0.709894537926
+0.675183475018
+0.649260401726
+0.618355691433
+0.508260190487
+0.470760852098
+0.459174335003
+0.44481241703
+0.37651976943
+0.345261543989
+0.329995244741
+0.305368334055
+0.303850829601
+0.293539494276
+0.28953024745
+0.28188508749
+0.281354576349
+0.270833313465
+0.259975969791
+0.241000682116
+0.230477154255
+0.230408370495
+0.228823408484
+0.220539569855
+0.212583810091
+0.194750934839
+0.193950772285
+0.189674809575
+0.1878387779
+0.18600718677
+0.182614207268
+0.182181805372
+0.181430518627
+0.170422032475
+0.145685836673
+0.144189372659
+0.135545626283
+0.134503245354
+0.132028579712
+0.127025350928
+0.12055131793
+0.119322456419
+0.11587343365
+0.110900476575
+0.098680369556
+0.0983603894711
+0.0981445461512
+0.0886262133718
+0.0884347185493
+0.0873519554734
+0.082098968327
+0.0798895061016
+0.0798854157329
+0.0790678858757
+0.078222066164
+0.0744107961655
+0.0741434767842
+0.071318641305
+0.0682421028614
+0.0676609873772
+0.0648136362433
+0.0644767209888
+0.0629993900657
+0.999658465385
+0.941824376583
+0.777161359787
+0.679553151131
+0.583405435085
+0.508303046227
+0.50286757946
+0.456321954727
+0.392060011625
+0.36596763134
+0.357713013887
+0.353012889624
+0.278389841318
+0.260415643454
+0.2289275527
+0.193694680929
+0.187770649791
+0.185200303793
+0.148732587695
+0.137808144093
+0.130796641111
+0.129050761461
+0.12671880424
+0.120503179729
+0.114337444305
+0.104058556259
+0.0997885242105
+0.0889311283827
+0.0881459638476
+0.0868649706244
+0.0807909667492
+0.0685496330261
+0.0628277957439
+0.993924319744
+0.403751403093
+0.337419182062
+0.120362840593
+0.0803850069642
+0.0715106204152
+0.999909162521
+0.242596298456
+0.0718633979559
+0.999756753445
+0.336956322193
+0.242152914405
+0.0780701413751
+0.949013352394
+0.860411882401
+0.682342350483
+0.616908490658
+0.362518429756
+0.268650859594
+0.179816871881
+0.166450455785
+0.141754031181
+0.133054926991
+0.128029599786
+0.116518974304
+0.11229582876
+0.101051978767
+0.0844284370542
+0.0774002447724
+0.0630281493068
+0.0612119212747
+0.991825819016
+0.559499979019
+0.524457216263
+0.296190351248
+0.115169249475
+0.11465177685
+0.0974652171135
+0.0707409009337
+0.0675399899483
+0.0622006654739
+0.929676532745
+0.568548798561
+0.520284175873
+0.475271910429
+0.386187791824
+0.379938781261
+0.347091078758
+0.272842139006
+0.231019258499
+0.219022423029
+0.177951723337
+0.14778649807
+0.128122761846
+0.122259750962
+0.0943952277303
+0.0941446721554
+0.0869525521994
+0.0662781521678
+0.0655903443694
+0.997000396252
+0.755881667137
+0.632992506027
+0.629196703434
+0.615661919117
+0.608366429806
+0.574722826481
+0.543653011322
+0.518964707851
+0.491417855024
+0.486114829779
+0.473431974649
+0.373551666737
+0.336220651865
+0.294528633356
+0.285080730915
+0.283164978027
+0.270789891481
+0.267374873161
+0.262299954891
+0.261827856302
+0.259826391935
+0.253992438316
+0.244009688497
+0.236380741
+0.235265105963
+0.212029382586
+0.211424872279
+0.206109791994
+0.203512132168
+0.200594350696
+0.183964759111
+0.178449749947
+0.168804883957
+0.163178816438
+0.161985114217
+0.160097897053
+0.159852385521
+0.15813587606
+0.143423110247
+0.139567390084
+0.13544267416
+0.1337813586
+0.130622088909
+0.129764720798
+0.128316402435
+0.126835912466
+0.122056268156
+0.115022607148
+0.112495161593
+0.111757583916
+0.108219914138
+0.106095582247
+0.105105444789
+0.103794366121
+0.0980107933283
+0.0940403863788
+0.0937906056643
+0.0898168906569
+0.0889679715037
+0.0851756706834
+0.0821775346994
+0.0812926590443
+0.0780201405287
+0.0752083584666
+0.0749936401844
+0.0741730630398
+0.0728838294744
+0.0727431252599
+0.0709050223231
+0.0697273761034
+0.0681760162115
+0.0666232034564
+0.0645008087158
+0.063754722476
+0.060277633369
+0.980925858021
+0.961145699024
+0.955871224403
+0.927060186863
+0.91016882658
+0.901796400547
+0.87553602457
+0.860718429089
+0.857993543148
+0.833189547062
+0.688921391964
+0.647196531296
+0.64332985878
+0.548955440521
+0.547513306141
+0.525035738945
+0.47614556551
+0.46869289875
+0.455920517445
+0.438962608576
+0.413366466761
+0.400826245546
+0.391203790903
+0.389866769314
+0.375553667545
+0.368844002485
+0.367311060429
+0.366997867823
+0.362397789955
+0.349152505398
+0.337282896042
+0.336910307407
+0.312470525503
+0.306252330542
+0.299998998642
+0.29983240366
+0.297902703285
+0.297751784325
+0.282943367958
+0.272932440042
+0.245445489883
+0.237419396639
+0.214245975018
+0.206457987428
+0.205036625266
+0.203109845519
+0.202645838261
+0.202090039849
+0.194356366992
+0.190513536334
+0.189655333757
+0.185713037848
+0.177025333047
+0.176391124725
+0.171382382512
+0.168194934726
+0.166052535176
+0.163875505328
+0.155168101192
+0.150955453515
+0.149135202169
+0.144219458103
+0.142550572753
+0.138942927122
+0.127337619662
+0.126871302724
+0.123588368297
+0.122162535787
+0.121648423374
+0.120686352253
+0.119397550821
+0.110383942723
+0.109916374087
+0.109735265374
+0.109185881913
+0.108729116619
+0.104324243963
+0.103836506605
+0.102476254106
+0.0939859375358
+0.0918285995722
+0.0918251276016
+0.0897175446153
+0.086158156395
+0.0836462900043
+0.0811710134149
+0.0786540359259
+0.0773945450783
+0.0768013894558
+0.0749296844006
+0.0734364911914
+0.0724447965622
+0.071884997189
+0.0692236050963
+0.0689150691032
+0.0689133703709
+0.0687279626727
+0.0678910389543
+0.0659589916468
+0.0651655942202
+0.0641546547413
+0.064084418118
+0.0627348199487
+0.0614681318402
+0.0602305345237
+0.999400615692
+0.982150137424
+0.95837032795
+0.930996775627
+0.924158215523
+0.901169896126
+0.874354958534
+0.861836433411
+0.836242735386
+0.821148216724
+0.799998819828
+0.776892125607
+0.757043242455
+0.745335936546
+0.735382735729
+0.680729985237
+0.658538937569
+0.610141277313
+0.596382439137
+0.572279989719
+0.531951069832
+0.473333597183
+0.471643745899
+0.4573751688
+0.440963387489
+0.410715728998
+0.385961860418
+0.372734695673
+0.36145478487
+0.361124426126
+0.347487717867
+0.333969503641
+0.295032799244
+0.294876515865
+0.279749393463
+0.278046697378
+0.275012612343
+0.27021920681
+0.256812512875
+0.255291223526
+0.254607319832
+0.251141965389
+0.243253603578
+0.242820799351
+0.242272362113
+0.233997836709
+0.215534180403
+0.21192201972
+0.203618422151
+0.199565187097
+0.192863956094
+0.189578860998
+0.166365101933
+0.162385851145
+0.159190416336
+0.158501341939
+0.155764639378
+0.154518276453
+0.153611585498
+0.143660008907
+0.143419623375
+0.140932530165
+0.139496713877
+0.134303748608
+0.132587105036
+0.132412940264
+0.128892689943
+0.128097385168
+0.118986904621
+0.11498067528
+0.11448033154
+0.110576622188
+0.107722103596
+0.104527786374
+0.101710744202
+0.0999440848827
+0.095421411097
+0.0931055173278
+0.086274035275
+0.0810504406691
+0.0809945687652
+0.0804785564542
+0.0793621465564
+0.0784489363432
+0.0772594586015
+0.0728152170777
+0.0727198570967
+0.0714057013392
+0.0711666420102
+0.0699724629521
+0.0687068253756
+0.0684032440186
+0.0683584585786
+0.0675285235047
+0.0641392990947
+0.997060477734
+0.985852956772
+0.927538573742
+0.913199901581
+0.899129807949
+0.882609426975
+0.698277592659
+0.598035216331
+0.545655608177
+0.524832069874
+0.483872771263
+0.442138940096
+0.415071815252
+0.400254487991
+0.390766173601
+0.368828892708
+0.323354929686
+0.321677178144
+0.317087322474
+0.290193736553
+0.267937809229
+0.267542004585
+0.224218979478
+0.203200802207
+0.196493983269
+0.195107683539
+0.18767914176
+0.182597219944
+0.180915772915
+0.173579156399
+0.156573474407
+0.156094968319
+0.153256416321
+0.140113130212
+0.13594789803
+0.103080019355
+0.0976760238409
+0.0935561656952
+0.0913989245892
+0.0902653336525
+0.0787095576525
+0.0781434327364
+0.0689990147948
+0.0661777332425
+0.0610179007053
+0.997890293598
+0.971940279007
+0.531565606594
+0.414952278137
+0.407168537378
+0.403338164091
+0.400638312101
+0.364516407251
+0.320358991623
+0.286281436682
+0.237474247813
+0.214805752039
+0.200779810548
+0.13523247838
+0.131032288074
+0.128894433379
+0.114809706807
+0.104284562171
+0.101609528065
+0.0965284258127
+0.0939433574677
+0.0897003039718
+0.0872239843011
+0.0870838612318
+0.0743611678481
+0.0720844566822
+0.0695490092039
+0.0628148242831
+0.0618017315865
+0.0606888197362
+0.984470009804
+0.973152458668
+0.566552221775
+0.531401276588
+0.268309414387
+0.185084283352
+0.143271774054
+0.115001633763
+0.099053747952
+0.998081922531
+0.989743053913
+0.676154255867
+0.511151611805
+0.393500089645
+0.258572071791
+0.207933083177
+0.17001272738
+0.111584812403
+0.0838525742292
+0.0803325772285
+0.999251544476
+0.998200535774
+0.562200963497
+0.433628618717
+0.43249475956
+0.32786989212
+0.271553188562
+0.24913148582
+0.203035011888
+0.152553215623
+0.115873984993
+0.0980858802795
+0.0946531146765
+0.0829540044069
+0.0803153142333
+0.0714732632041
+0.999674320221
+0.98679792881
+0.53225672245
+0.269168257713
+0.177835449576
+0.0886795371771
+0.0782569795847
+0.0635307133198
+0.999966144562
+0.369555562735
+0.347666561604
+0.281538665295
+0.206522315741
+0.197484403849
+0.118861921132
+0.076033487916
+0.0607469640672
+0.999825060368
+0.856891214848
+0.558902025223
+0.533104360104
+0.455834835768
+0.391414582729
+0.348204284906
+0.32812204957
+0.325495928526
+0.268157660961
+0.152481511235
+0.144894436002
+0.123845741153
+0.119607895613
+0.102061852813
+0.09370521456
+0.0932293832302
+0.0886394158006
+0.0834347605705
+0.99528837204
+0.982694089413
+0.651544988155
+0.502734422684
+0.305707812309
+0.301115810871
+0.22840154171
+0.144117087126
+0.123726889491
+0.0790064483881
+0.0664870664477
+0.284549117088
+0.228133767843
+0.0960451513529
+0.0883836820722
+0.0827844962478
+0.0767489746213
+0.99732118845
+0.246472209692
+0.0993652567267
+0.998878657818
+0.966350317001
+0.418950229883
+0.199502915144
+0.147074863315
+0.0701441094279
+0.985722839832
+0.764966845512
+0.76092684269
+0.613129854202
+0.4841581285
+0.434870332479
+0.266871809959
+0.262657254934
+0.202428415418
+0.185591489077
+0.130192667246
+0.125067219138
+0.115099899471
+0.106370590627
+0.102400660515
+0.0853334069252
+0.0852201506495
+0.0649961978197
+0.0627226009965
+0.0605516172945
+0.999593913555
+0.979831278324
+0.97712212801
+0.975044608116
+0.97255885601
+0.942188858986
+0.937050402164
+0.920983612537
+0.898805439472
+0.844247102737
+0.841478645802
+0.697156190872
+0.667565762997
+0.648092210293
+0.608020603657
+0.520098984241
+0.401066124439
+0.397459030151
+0.384429365396
+0.344817966223
+0.341967523098
+0.331167817116
+0.329445123672
+0.279389679432
+0.26490727067
+0.262623369694
+0.256753176451
+0.253972560167
+0.251863390207
+0.249432325363
+0.2482547611
+0.246240660548
+0.244757801294
+0.241268008947
+0.239396750927
+0.207089930773
+0.187647491693
+0.186088696122
+0.184963941574
+0.181262984872
+0.180410578847
+0.176594182849
+0.162564009428
+0.152330413461
+0.144076213241
+0.141543850303
+0.131974607706
+0.123175360262
+0.117671340704
+0.113312512636
+0.105053082108
+0.102079965174
+0.095763489604
+0.095325499773
+0.0950408950448
+0.0912193804979
+0.0907698199153
+0.0780957341194
+0.0761816874146
+0.0755928531289
+0.0742813274264
+0.071082636714
+0.07058160007
+0.0701734349132
+0.0691963136196
+0.0682564452291
+0.0651336312294
+0.064162530005
+0.0609817840159
+0.993393003941
+0.990949690342
+0.889611423016
+0.701053798199
+0.356801271439
+0.320676147938
+0.28425976634
+0.255173057318
+0.223431810737
+0.20296369493
+0.150168254972
+0.140964984894
+0.135314822197
+0.116256341338
+0.0896904915571
+0.082616917789
+0.0713361352682
+0.0613046959043
+0.0603251568973
+0.0600855275989
+0.999836206436
+0.656895577908
+0.217802122235
+0.184624195099
+0.995726704597
+0.980472743511
+0.978414595127
+0.918051242828
+0.898795485497
+0.894916713238
+0.894030153751
+0.888022184372
+0.874621152878
+0.870987415314
+0.864393830299
+0.853805422783
+0.846061766148
+0.831376969814
+0.829289972782
+0.826669275761
+0.79227745533
+0.776941716671
+0.771121442318
+0.768724858761
+0.742800414562
+0.72237354517
+0.705098748207
+0.704277515411
+0.677749454975
+0.647211492062
+0.632441759109
+0.619394123554
+0.582736492157
+0.557993173599
+0.512751936913
+0.500015854836
+0.432753264904
+0.424838960171
+0.387359470129
+0.363117575645
+0.324057579041
+0.318569540977
+0.307794481516
+0.279892444611
+0.266025602818
+0.251419901848
+0.249537363648
+0.24515606463
+0.242546990514
+0.23482003808
+0.229570165277
+0.218249022961
+0.217245295644
+0.215601503849
+0.214633762836
+0.213021233678
+0.209445133805
+0.202178910375
+0.195967033505
+0.195477575064
+0.19516018033
+0.189925506711
+0.188304677606
+0.18726746738
+0.183985501528
+0.175400704145
+0.17190939188
+0.164070561528
+0.163019970059
+0.161046147346
+0.144684314728
+0.142110586166
+0.13469555974
+0.119699165225
+0.117632962763
+0.108988888562
+0.108914069831
+0.105968996882
+0.105487324297
+0.102074086666
+0.101763956249
+0.0996679365635
+0.0925127342343
+0.0908635854721
+0.0889407098293
+0.0883489102125
+0.0879924297333
+0.0862389951944
+0.0739420056343
+0.0673952102661
+0.0664400160313
+0.0656843259931
+0.062753982842
+0.0623628795147
+0.0613485127687
+0.995609819889
+0.985808908939
+0.977792084217
+0.856063306332
+0.85261952877
+0.66942769289
+0.362217873335
+0.328203260899
+0.296238809824
+0.294222384691
+0.265820294619
+0.25049495697
+0.239388793707
+0.225614026189
+0.207796096802
+0.207389205694
+0.162959337234
+0.161703631282
+0.146451890469
+0.112416028976
+0.110222414136
+0.098472982645
+0.0983410999179
+0.0972052365541
+0.0949347391725
+0.0938810706139
+0.0778178051114
+0.0675895810127
+0.0647535175085
+0.999327898026
+0.589051127434
+0.449965268373
+0.358234494925
+0.16492651403
+0.133294284344
+0.115192569792
+0.0824446231127
+0.0745395943522
+0.993969082832
+0.317980706692
+0.156709343195
+0.120312787592
+0.072157561779
+0.999146938324
+0.973865270615
+0.510059118271
+0.209527924657
+0.167541012168
+0.087510637939
+0.999665141106
+0.966150462627
+0.755212068558
+0.470045179129
+0.28701967001
+0.27125787735
+0.212211683393
+0.177917823195
+0.119212672114
+0.0986754968762
+0.0781536400318
+0.0741831511259
+0.99813747406
+0.992610991001
+0.988100707531
+0.963804841042
+0.940439105034
+0.932027816772
+0.924494802952
+0.901855230331
+0.883580863476
+0.878562510014
+0.876759886742
+0.868268311024
+0.867651045322
+0.863742232323
+0.861105024815
+0.855585157871
+0.843935787678
+0.833129823208
+0.785797417164
+0.778739392757
+0.777355253696
+0.776069581509
+0.744816362858
+0.716855108738
+0.703691959381
+0.70262336731
+0.689530670643
+0.682512402534
+0.67793905735
+0.664110004902
+0.647759020329
+0.642681956291
+0.637902617455
+0.635760426521
+0.572109341621
+0.557261526585
+0.54349988699
+0.533530235291
+0.528042078018
+0.507111132145
+0.499702274799
+0.496988683939
+0.494393497705
+0.494116961956
+0.489973902702
+0.46968844533
+0.467306405306
+0.448118567467
+0.446046561003
+0.430171132088
+0.429295927286
+0.362535774708
+0.361276835203
+0.359457165003
+0.355101078749
+0.336555451155
+0.323863208294
+0.309299021959
+0.299248546362
+0.295654773712
+0.295230895281
+0.291768103838
+0.289865583181
+0.287403583527
+0.275661706924
+0.27197355032
+0.265996694565
+0.26417028904
+0.262838125229
+0.261226981878
+0.252158403397
+0.250873923302
+0.24856300652
+0.2457382828
+0.243677020073
+0.239125519991
+0.238070085645
+0.23425129056
+0.234179913998
+0.231965720654
+0.231655731797
+0.229975625873
+0.226315796375
+0.224215686321
+0.222507998347
+0.21534486115
+0.215285971761
+0.213717341423
+0.211489111185
+0.204273805022
+0.203083410859
+0.200720965862
+0.19841632247
+0.198205828667
+0.198038488626
+0.197612836957
+0.19684228301
+0.196061432362
+0.191607475281
+0.182949393988
+0.180072486401
+0.179515048862
+0.178141653538
+0.174838274717
+0.174369752407
+0.172083944082
+0.169615000486
+0.169252082705
+0.16807128489
+0.165467873216
+0.162430986762
+0.160352438688
+0.157461091876
+0.154356122017
+0.149120181799
+0.146296083927
+0.145871728659
+0.138907641172
+0.137010514736
+0.133578002453
+0.133500680327
+0.132396057248
+0.130838170648
+0.129945799708
+0.127266794443
+0.124972112477
+0.124241486192
+0.118300482631
+0.112176552415
+0.11154203862
+0.109520696104
+0.108537249267
+0.107933029532
+0.10636087507
+0.106118708849
+0.102305918932
+0.0992020592093
+0.0977046489716
+0.0964346677065
+0.0959736555815
+0.0957908481359
+0.0944921150804
+0.0944909229875
+0.0920612365007
+0.0867724344134
+0.086532138288
+0.0863964110613
+0.0851268470287
+0.0846216604114
+0.0833846330643
+0.0813491865993
+0.0811135843396
+0.0778039395809
+0.0776319429278
+0.077386572957
+0.0756799131632
+0.0742044076324
+0.0739722251892
+0.07396338135
+0.0724207162857
+0.0722482800484
+0.0719790309668
+0.07094078511
+0.0681423842907
+0.0676694139838
+0.0671794489026
+0.0669196769595
+0.0668865591288
+0.0667234137654
+0.0667151585221
+0.0661136955023
+0.0658057928085
+0.0643227696419
+0.0643060356379
+0.0610485225916
+0.999647498131
+0.999614596367
+0.254914730787
+0.250034332275
+0.177178859711
+0.14827118814
+0.0968680903316
+0.0941291227937
+0.9870698452
+0.298628121614
+0.167579203844
+0.119780682027
+0.99989759922
+0.401040881872
+0.300980985165
+0.223686814308
+0.156358599663
+0.113467633724
+0.0850547477603
+0.0714357346296
+0.999783933163
+0.248866841197
+0.228009745479
+0.146991983056
+0.121359944344
+0.0753244906664
+0.0669587031007
+0.065579585731
+0.999702990055
+0.261216312647
+0.241786777973
+0.130015745759
+0.0937835648656
+0.0924456864595
+0.0758688002825
+0.0630233511329
+0.999973058701
+0.25638115406
+0.0641999989748
+0.999904513359
+0.963299572468
+0.864715456963
+0.855619728565
+0.703570067883
+0.636732518673
+0.599631309509
+0.520215392113
+0.516608476639
+0.390511721373
+0.351930856705
+0.334477543831
+0.310902237892
+0.306291848421
+0.284767687321
+0.263958483934
+0.24243555963
+0.234237253666
+0.228929206729
+0.228206530213
+0.193452358246
+0.182682871819
+0.146259009838
+0.131011262536
+0.0973791778088
+0.095102198422
+0.0923015996814
+0.0910785347223
+0.0826298072934
+0.0822088122368
+0.0754336044192
+0.0725092738867
+0.0724725201726
+0.067164644599
+0.0645485296845
+0.999751150608
+0.960565567017
+0.44544672966
+0.190759316087
+0.169472292066
+0.0882143229246
+0.98605132103
+0.972831904888
+0.949218332767
+0.921257913113
+0.865982949734
+0.826148152351
+0.813286542892
+0.77413713932
+0.721324026585
+0.637411773205
+0.618342101574
+0.600509047508
+0.595347881317
+0.563892483711
+0.525508105755
+0.487411797047
+0.445209026337
+0.43853020668
+0.433189183474
+0.419008225203
+0.383307427168
+0.377691715956
+0.357129126787
+0.343159973621
+0.338061869144
+0.32299849391
+0.305379152298
+0.302161157131
+0.275153011084
+0.27429330349
+0.270834058523
+0.256879419088
+0.237222969532
+0.231187850237
+0.228386193514
+0.227571979165
+0.216610983014
+0.216348335147
+0.199788570404
+0.1895788908
+0.185845777392
+0.180060386658
+0.172594517469
+0.160984829068
+0.158350273967
+0.156319499016
+0.155826866627
+0.155704841018
+0.147876277566
+0.143310695887
+0.138112276793
+0.136820062995
+0.131968989968
+0.131513282657
+0.124637529254
+0.123622491956
+0.11758775264
+0.114273399115
+0.110476411879
+0.104043342173
+0.103128887713
+0.101541139185
+0.100412964821
+0.100223995745
+0.098596021533
+0.0911998972297
+0.0905094742775
+0.084547907114
+0.081448033452
+0.0803300589323
+0.0793693587184
+0.072701394558
+0.0722543075681
+0.0719430297613
+0.0708824172616
+0.070429444313
+0.0662089139223
+0.0641233772039
+0.0633174702525
+0.0621746592224
+0.0604780949652
+0.999423503876
+0.697014093399
+0.569929003716
+0.34526848793
+0.240208864212
+0.194512933493
+0.182338282466
+0.171356841922
+0.148433312774
+0.124569609761
+0.116505540907
+0.0881381779909
+0.0725853815675
+0.0702084451914
+0.066964328289
+0.0654918327928
+0.999749481678
+0.28783378005
+0.236948981881
+0.0952038690448
+0.0648550465703
+0.0606005452573
+0.999816715717
+0.836382091045
+0.304812848568
+0.250629484653
+0.238994598389
+0.235573261976
+0.189358532429
+0.126861974597
+0.103864826262
+0.0816480368376
+0.0668829977512
+0.999321103096
+0.998557984829
+0.979724287987
+0.965157449245
+0.958973228931
+0.946936190128
+0.919280350208
+0.882884144783
+0.867400109768
+0.848203897476
+0.82910066843
+0.791690707207
+0.769681930542
+0.755970776081
+0.679480493069
+0.65517526865
+0.615828812122
+0.599623858929
+0.562855601311
+0.456504225731
+0.45091766119
+0.435975730419
+0.416528999805
+0.377013802528
+0.353063762188
+0.345091789961
+0.301550596952
+0.2978541255
+0.282179683447
+0.276892840862
+0.273619532585
+0.264848709106
+0.258112519979
+0.254024714231
+0.241193547845
+0.225419417024
+0.224230334163
+0.21858458221
+0.214154615998
+0.210886463523
+0.20482994616
+0.189787015319
+0.189678192139
+0.185276031494
+0.185275137424
+0.178157195449
+0.177144691348
+0.167311549187
+0.160059124231
+0.158031582832
+0.156881034374
+0.154251813889
+0.149874553084
+0.145652517676
+0.139069005847
+0.135851964355
+0.128094419837
+0.126501783729
+0.125699862838
+0.117894470692
+0.112521469593
+0.106480650604
+0.105416074395
+0.102137938142
+0.0905746221542
+0.0890486612916
+0.0877559483051
+0.0871848091483
+0.0839751660824
+0.0838447138667
+0.0798647999763
+0.0771381109953
+0.0755137503147
+0.0740694403648
+0.0740684270859
+0.0717643126845
+0.0633712485433
+0.0623853206635
+0.0612244382501
+0.0612185411155
+0.999930262566
+0.264978945255
+0.215267211199
+0.0700423568487
+0.918793022633
+0.909988462925
+0.895452558994
+0.870280861855
+0.858644247055
+0.751621365547
+0.684864163399
+0.589216113091
+0.459946513176
+0.421353459358
+0.403338313103
+0.380832463503
+0.349546313286
+0.333078503609
+0.323137789965
+0.284661501646
+0.257442176342
+0.239297389984
+0.232733950019
+0.221663698554
+0.220750555396
+0.210659101605
+0.19155330956
+0.18532858789
+0.177422478795
+0.177394881845
+0.172175943851
+0.16719789803
+0.165301904082
+0.153737619519
+0.153027787805
+0.139929592609
+0.127370283008
+0.126917883754
+0.116201668978
+0.101289063692
+0.0932319760323
+0.0879509598017
+0.0815275087953
+0.080434910953
+0.0798888206482
+0.074094414711
+0.072818942368
+0.0664271488786
+0.0630739405751
+0.0625502467155
+0.0619324855506
+0.0609252341092
+0.996094286442
+0.783859848976
+0.391915529966
+0.341696858406
+0.305452167988
+0.232237443328
+0.206424579024
+0.160152792931
+0.091163046658
+0.0860409587622
+0.0607722178102
+0.99935323
+0.23200763762
+0.159444361925
+0.137707769871
+0.101102054119
+0.0700017139316
+0.0629425346851
+0.928241431713
+0.927603900433
+0.809474110603
+0.721140265465
+0.675829648972
+0.609257519245
+0.608087182045
+0.419587910175
+0.411121159792
+0.407088369131
+0.40494915843
+0.350431680679
+0.312210649252
+0.299827635288
+0.29370829463
+0.240865156054
+0.240434840322
+0.240147918463
+0.205328375101
+0.199372544885
+0.189182281494
+0.17486166954
+0.157603800297
+0.152335777879
+0.145285010338
+0.121484808624
+0.116248063743
+0.110192686319
+0.107921048999
+0.106773361564
+0.104240886867
+0.099738702178
+0.0916322171688
+0.0895457193255
+0.0828451141715
+0.0789540708065
+0.0712443292141
+0.0651727244258
+0.064178571105
+0.999585330486
+0.314905285835
+0.090255908668
+0.99640250206
+0.991733074188
+0.988321602345
+0.985226690769
+0.978298485279
+0.954948425293
+0.925517737865
+0.916895270348
+0.903446316719
+0.896908640862
+0.878919064999
+0.873370230198
+0.773900210857
+0.668187618256
+0.620916545391
+0.545057594776
+0.542878806591
+0.536131858826
+0.520977675915
+0.466932296753
+0.435304552317
+0.404742985964
+0.385689944029
+0.378920286894
+0.345028698444
+0.338276416063
+0.335390478373
+0.333610296249
+0.332468956709
+0.329072386026
+0.315503418446
+0.300111144781
+0.295513391495
+0.278983712196
+0.274576187134
+0.273975402117
+0.268722385168
+0.259604364634
+0.257844150066
+0.254004627466
+0.23782761395
+0.222446501255
+0.201061934233
+0.184850260615
+0.182833105326
+0.170536994934
+0.170308798552
+0.1564168185
+0.154877603054
+0.152422636747
+0.151924192905
+0.136061936617
+0.128889486194
+0.127029314637
+0.120414614677
+0.114896140993
+0.112237617373
+0.108092047274
+0.106608577073
+0.106116451323
+0.105271905661
+0.101962596178
+0.100966535509
+0.100008808076
+0.0997903347015
+0.0944616869092
+0.0905239284039
+0.089131526649
+0.0875524058938
+0.0850820094347
+0.080189101398
+0.0795401185751
+0.0778274834156
+0.0750621259212
+0.0750113353133
+0.0696930959821
+0.0675457343459
+0.0668147876859
+0.0652059242129
+0.0651736706495
+0.0650265514851
+0.0645697638392
+0.0622994340956
+0.0619180761278
+0.060244306922
+0.996591448784
+0.991301238537
+0.935803174973
+0.351363003254
+0.273861974478
+0.220002219081
+0.163848161697
+0.157545402646
+0.121596924961
+0.110192619264
+0.085512638092
+0.998530030251
+0.569775760174
+0.381305664778
+0.126088038087
+0.0886057466269
+0.956721305847
+0.896050095558
+0.740476250648
+0.709197700024
+0.62139570713
+0.60418766737
+0.394144654274
+0.319270521402
+0.31787148118
+0.285831272602
+0.254930436611
+0.250063329935
+0.243644207716
+0.220025658607
+0.214191377163
+0.211039036512
+0.199717491865
+0.16814301908
+0.167992785573
+0.14836089313
+0.135369986296
+0.120998799801
+0.11588152498
+0.103489726782
+0.0999585613608
+0.0913734659553
+0.0913628265262
+0.0912134721875
+0.0902073606849
+0.08596906811
+0.0755198448896
+0.0753846466541
+0.995486080647
+0.359751075506
+0.329952299595
+0.189676970243
+0.162288531661
+0.108207084239
+0.090929068625
+0.0844911634922
+0.0726141929626
+0.99982124567
+0.426559776068
+0.396254062653
+0.339897453785
+0.151158213615
+0.144483014941
+0.121539771557
+0.106073461473
+0.0818219929934
+0.0761971622705
+0.99281847477
+0.680088579655
+0.665134966373
+0.273329675198
+0.232434228063
+0.205759435892
+0.174087077379
+0.141760021448
+0.100063487887
+0.0746182054281
+0.0671750381589
+0.0666241720319
+0.997017025948
+0.98956900835
+0.984933435917
+0.835994064808
+0.834919095039
+0.394066184759
+0.323447614908
+0.286985963583
+0.265474617481
+0.253260105848
+0.249513924122
+0.240021839738
+0.213724806905
+0.176993653178
+0.151453897357
+0.146544799209
+0.139100432396
+0.133656397462
+0.0986795723438
+0.0940695106983
+0.0937565118074
+0.0896872729063
+0.0851817131042
+0.0739896818995
+0.0636100023985
+0.990040004253
+0.938260674477
+0.864358723164
+0.845659077168
+0.838073611259
+0.751042664051
+0.699841856956
+0.633207440376
+0.460593640804
+0.43503755331
+0.4214848876
+0.350840419531
+0.309491723776
+0.300565779209
+0.286990016699
+0.275838643312
+0.275075018406
+0.264865338802
+0.227897971869
+0.224788337946
+0.203065857291
+0.202984720469
+0.201085418463
+0.197300702333
+0.187384575605
+0.154775500298
+0.129838332534
+0.107454903424
+0.102793551981
+0.100681632757
+0.0982740074396
+0.0932383909822
+0.0884768813848
+0.0874537453055
+0.0862033814192
+0.0700074955821
+0.0626935660839
+0.0619733966887
+0.0603422746062
+0.987687170506
+0.9534278512
+0.690516114235
+0.656992018223
+0.338447481394
+0.294588714838
+0.278581172228
+0.222097337246
+0.195482179523
+0.186479762197
+0.183085009456
+0.153174683452
+0.138071283698
+0.121064096689
+0.0994497686625
+0.0965776145458
+0.0915165692568
+0.0672760978341
+0.999877333641
+0.275574713945
+0.0628048405051
+0.999425053596
+0.999152898788
+0.342125207186
+0.29858404398
+0.261546045542
+0.171999439597
+0.0854120254517
+0.0804008245468
+0.0674412176013
+0.9988951087
+0.997553884983
+0.996629297733
+0.908194959164
+0.826258838177
+0.810361266136
+0.4657304883
+0.302203804255
+0.269821435213
+0.259423732758
+0.237845614552
+0.233258873224
+0.229376673698
+0.219584435225
+0.219068586826
+0.213079333305
+0.159966424108
+0.118193872273
+0.111889615655
+0.10762155056
+0.0977184548974
+0.0846654996276
+0.0769793763757
+0.0767243877053
+0.0760666206479
+0.0758332312107
+0.0648975148797
+0.999572694302
+0.98286485672
+0.88434368372
+0.362865835428
+0.23070730269
+0.192461296916
+0.154038637877
+0.116912871599
+0.0916816517711
+0.0849516913295
+0.999251186848
+0.748837888241
+0.647558152676
+0.278570622206
+0.186324417591
+0.111687466502
+0.091396741569
+0.999945402145
+0.235306695104
+0.0996424108744
+0.0619166530669
+0.999047458172
+0.995164990425
+0.673056781292
+0.279895395041
+0.273194611073
+0.234997496009
+0.157293826342
+0.103998601437
+0.0987002179027
+0.0841744318604
+0.0822166949511
+0.065088570118
+0.0610328428447
+0.996588587761
+0.988081455231
+0.970761656761
+0.965746879578
+0.940706551075
+0.877267301083
+0.853065907955
+0.759087741375
+0.70178103447
+0.694271981716
+0.659762918949
+0.646405637264
+0.584201097488
+0.558450222015
+0.517859101295
+0.507764279842
+0.446306347847
+0.429356813431
+0.391583681107
+0.376185446978
+0.364522248507
+0.339197009802
+0.338623702526
+0.326753795147
+0.30984839797
+0.29733940959
+0.291559040546
+0.277454018593
+0.27645239234
+0.266719907522
+0.263562530279
+0.251346826553
+0.246089965105
+0.237865269184
+0.234279170632
+0.217461153865
+0.21277782321
+0.210009992123
+0.204820886254
+0.197075307369
+0.18971683085
+0.188739061356
+0.176231786609
+0.168941780925
+0.157661437988
+0.146052896976
+0.143397852778
+0.137338131666
+0.132488116622
+0.132200032473
+0.1315574646
+0.127576574683
+0.121079370379
+0.119582787156
+0.118819706142
+0.11427077651
+0.108684711158
+0.108010806143
+0.105824142694
+0.105662770569
+0.102198645473
+0.0995450615883
+0.0981618538499
+0.0953179448843
+0.0942320078611
+0.0872875005007
+0.0836980044842
+0.0795563533902
+0.075822301209
+0.0731236860156
+0.0729961916804
+0.0728418529034
+0.0709374174476
+0.0693177878857
+0.0668859407306
+0.0632602870464
+0.060149744153
+0.999566614628
+0.999563157558
+0.324191182852
+0.2952863276
+0.271803110838
+0.163585722446
+0.111699931324
+0.0835042372346
+0.0832152739167
+0.0695675387979
+0.0624154955149
+0.0612318105996
+0.999528646469
+0.293835282326
+0.208048820496
+0.126639187336
+0.107121534646
+0.0966868177056
+0.999256670475
+0.232148826122
+0.0776641964912
+0.0605016648769
+0.999573767185
+0.990654528141
+0.80548030138
+0.370883822441
+0.33605208993
+0.265214055777
+0.21687503159
+0.160572215915
+0.142053470016
+0.140499129891
+0.108606867492
+0.0854005813599
+0.0658566728234
+0.931642949581
+0.820483922958
+0.7946138978
+0.647652029991
+0.587699711323
+0.523620843887
+0.482994407415
+0.436845242977
+0.341811776161
+0.314222514629
+0.293038904667
+0.258700311184
+0.215625882149
+0.193253815174
+0.189073070884
+0.168528825045
+0.162029579282
+0.151471629739
+0.145816281438
+0.116787202656
+0.108335539699
+0.0960503667593
+0.0940984934568
+0.079460658133
+0.0708612725139
+0.0656147524714
+0.0640139728785
+0.0612058825791
+0.997976720333
+0.339769601822
+0.17908065021
+0.13043436408
+0.125352188945
+0.0921332389116
+0.995088875294
+0.466483056545
+0.205591082573
+0.157520651817
+0.0688285380602
+0.998803377151
+0.970529258251
+0.404499232769
+0.322687268257
+0.239596903324
+0.218683362007
+0.203765109181
+0.121299549937
+0.104370109737
+0.0995576307178
+0.0739333853126
+0.0635761618614
+0.972024202347
+0.940296411514
+0.900251865387
+0.836435079575
+0.818792760372
+0.760461091995
+0.611418485641
+0.607984602451
+0.517088651657
+0.501720368862
+0.455332934856
+0.444703787565
+0.443194389343
+0.438420265913
+0.407955110073
+0.36962968111
+0.331677258015
+0.305339425802
+0.305180549622
+0.298449248075
+0.287496834993
+0.265444546938
+0.258132725954
+0.249221682549
+0.226935341954
+0.223901957273
+0.221350461245
+0.206763118505
+0.193542584777
+0.191532969475
+0.171158954501
+0.150512501597
+0.144064337015
+0.143987566233
+0.142530471087
+0.14159822464
+0.132589057088
+0.12518042326
+0.119938611984
+0.119907841086
+0.119603894651
+0.119336351752
+0.111741878092
+0.106621332467
+0.0906489342451
+0.0898181423545
+0.0791564434767
+0.0718372538686
+0.0708483532071
+0.0700654163957
+0.0669043138623
+0.064406119287
+0.0641064271331
+0.0631545335054
+0.0628813058138
+0.0611348487437
+0.999726831913
+0.242094576359
+0.97813808918
+0.603029310703
+0.304822295904
+0.253997594118
+0.196623265743
+0.109936110675
+0.0772310867906
+0.995287299156
+0.545065462589
+0.332667857409
+0.212983295321
+0.124006286263
+0.11533959955
+0.100709214807
+0.0835785940289
+0.0801721960306
+0.0699243471026
+0.0669233947992
+0.999619483948
+0.29930254817
+0.120637021959
+0.0704602003098
+0.998993217945
+0.998658299446
+0.995828449726
+0.989591419697
+0.980485022068
+0.973572194576
+0.954182565212
+0.935072243214
+0.926322996616
+0.876103460789
+0.859138429165
+0.805592834949
+0.744191348553
+0.741405487061
+0.706147074699
+0.66938072443
+0.601031184196
+0.464482486248
+0.452049940825
+0.43095639348
+0.398519992828
+0.327904045582
+0.324799716473
+0.324042648077
+0.316706001759
+0.302008718252
+0.29931885004
+0.284750431776
+0.281142383814
+0.27257835865
+0.251516222954
+0.247995227575
+0.230802774429
+0.224081024528
+0.22376921773
+0.208123937249
+0.207746952772
+0.206273972988
+0.183547630906
+0.177025288343
+0.166702523828
+0.153717026114
+0.148100197315
+0.14721685648
+0.140495061874
+0.127937793732
+0.122056119144
+0.116986840963
+0.114419817924
+0.108014836907
+0.10657338798
+0.0945177674294
+0.0928792729974
+0.0812291130424
+0.080842025578
+0.0771686211228
+0.0767068862915
+0.0735696256161
+0.0712363943458
+0.0693595781922
+0.06720662117
+0.0670527219772
+0.0618645139039
+0.0601130500436
+0.992919027805
+0.885387182236
+0.448596149683
+0.389277338982
+0.383373379707
+0.372457236052
+0.301650702953
+0.288977116346
+0.22561557591
+0.200352340937
+0.193471163511
+0.156157463789
+0.151210814714
+0.141674399376
+0.11864683032
+0.110155560076
+0.105183377862
+0.0978337153792
+0.0901338532567
+0.0879865586758
+0.084437482059
+0.0776363313198
+0.074201785028
+0.0727921500802
+0.0679612159729
+0.0648669824004
+0.063520476222
+0.061262845993
+0.999399423599
+0.978470087051
+0.977693557739
+0.974572002888
+0.956830739975
+0.956642746925
+0.947710037231
+0.932061553001
+0.925328314304
+0.919510543346
+0.918345570564
+0.896972239017
+0.892491340637
+0.878386437893
+0.860445201397
+0.79055339098
+0.655444681644
+0.650136530399
+0.565142869949
+0.431629419327
+0.419715672731
+0.41757646203
+0.415912300348
+0.412700027227
+0.395003765821
+0.383092284203
+0.382614463568
+0.333122223616
+0.329138308764
+0.328209877014
+0.322359502316
+0.317336976528
+0.315322220325
+0.307242572308
+0.29644536972
+0.290674746037
+0.266260743141
+0.257342845201
+0.234238907695
+0.232782229781
+0.228175669909
+0.227599054575
+0.223950266838
+0.22240960598
+0.215394482017
+0.208211481571
+0.202178582549
+0.196228697896
+0.187722429633
+0.179256007075
+0.172852039337
+0.170425266027
+0.162458181381
+0.158523157239
+0.145546972752
+0.142881140113
+0.140177339315
+0.13711091876
+0.134610697627
+0.131572410464
+0.128846034408
+0.120284311473
+0.120125107467
+0.118642024696
+0.117176868021
+0.104854203761
+0.104717411101
+0.100027874112
+0.0910078138113
+0.0898029282689
+0.0884613767266
+0.0881040021777
+0.0835251063108
+0.0809030532837
+0.0761048272252
+0.0758955404162
+0.074640981853
+0.0740627720952
+0.0669538527727
+0.0650240704417
+0.0633001178503
+0.0631686225533
+0.0623364411294
+0.999653816223
+0.94340389967
+0.750251591206
+0.545947432518
+0.473024517298
+0.409049361944
+0.368470847607
+0.354282230139
+0.351360410452
+0.298499047756
+0.257070809603
+0.231195211411
+0.199325755239
+0.161797136068
+0.143436729908
+0.142542362213
+0.121286265552
+0.108890376985
+0.0939121767879
+0.0873317793012
+0.0866383016109
+0.0850111842155
+0.0841589421034
+0.0705835074186
+0.0699430331588
+0.0685967430472
+0.0680602937937
+0.0652983412147
+0.0627056434751
+0.981294393539
+0.7474796772
+0.498229712248
+0.390886127949
+0.317699164152
+0.250567704439
+0.244197502732
+0.174021303654
+0.0954852327704
+0.0855824202299
+0.0671508535743
+0.995467305183
+0.989156603813
+0.981159329414
+0.94357329607
+0.931730568409
+0.92686778307
+0.873984336853
+0.826598525047
+0.806592047215
+0.787720322609
+0.781865298748
+0.772805988789
+0.753112316132
+0.645101904869
+0.606848716736
+0.577498793602
+0.571519494057
+0.568128287792
+0.512196302414
+0.508441030979
+0.497798413038
+0.466664433479
+0.455729335546
+0.447570383549
+0.416837096214
+0.407716840506
+0.403053581715
+0.401124507189
+0.39968764782
+0.379527956247
+0.377868771553
+0.354503601789
+0.341604113579
+0.337582170963
+0.336239308119
+0.329979538918
+0.326874732971
+0.310327202082
+0.289490073919
+0.27976718545
+0.275756299496
+0.271680802107
+0.270303457975
+0.263294875622
+0.261558800936
+0.255211889744
+0.24741601944
+0.247053876519
+0.240944936872
+0.232124149799
+0.230390161276
+0.224415823817
+0.222892001271
+0.216895535588
+0.213973671198
+0.198428019881
+0.193440943956
+0.190254449844
+0.185545176268
+0.184415712953
+0.170413136482
+0.168342262506
+0.162192821503
+0.162167280912
+0.15900978446
+0.158171698451
+0.153709933162
+0.143163770437
+0.141106873751
+0.139101877809
+0.127829015255
+0.12507905066
+0.121952407062
+0.11902539432
+0.114810794592
+0.112448967993
+0.11044151336
+0.108875520527
+0.107859559357
+0.105230569839
+0.10371927917
+0.102066993713
+0.100801408291
+0.0971511751413
+0.094415076077
+0.0943386554718
+0.0920490399003
+0.0893284454942
+0.0852804854512
+0.0816306248307
+0.0811651870608
+0.0805525183678
+0.0800169631839
+0.077233158052
+0.0769194215536
+0.0752452835441
+0.0748977214098
+0.0718644186854
+0.0694822445512
+0.0688810348511
+0.0687386542559
+0.0670263618231
+0.0656152069569
+0.0655941367149
+0.0652325376868
+0.0633669942617
+0.0618750154972
+0.0612601898611
+0.0612247399986
+0.0611878372729
+0.997337162495
+0.800556838512
+0.644194841385
+0.446538150311
+0.334345877171
+0.265950739384
+0.169146105647
+0.156202599406
+0.14240527153
+0.0992033928633
+0.0683369264007
+0.0672984644771
+0.997743606567
+0.310684591532
+0.29385510087
+0.2607640028
+0.117482759058
+0.110026396811
+0.0785076543689
+0.06939843297
+0.0681236460805
+0.999642133713
+0.258288174868
+0.988637030125
+0.849762916565
+0.831986486912
+0.68997502327
+0.534762978554
+0.301733016968
+0.240518406034
+0.225290864706
+0.172802016139
+0.171939045191
+0.169241651893
+0.15033686161
+0.124715603888
+0.0953694656491
+0.0789984688163
+0.0755188390613
+0.0749936327338
+0.0662939473987
+0.993837058544
+0.99350810051
+0.976979494095
+0.971027433872
+0.927385568619
+0.602248311043
+0.516098201275
+0.30507183075
+0.299415767193
+0.232242107391
+0.229931116104
+0.224938094616
+0.221804022789
+0.135515913367
+0.104852691293
+0.094304420054
+0.0777750387788
+0.0740854591131
+0.993667423725
+0.392991483212
+0.290017336607
+0.154927298427
+0.103427238762
+0.0782919973135
+0.073717802763
+0.999545633793
+0.817728817463
+0.521251440048
+0.354351371527
+0.261027187109
+0.168395683169
+0.148014739156
+0.147344663739
+0.115000911057
+0.0933415219188
+0.0630446523428
+0.0617033429444
+0.999594151974
+0.255783230066
+0.0725708454847
+0.997001230717
+0.966746449471
+0.355883687735
+0.296183556318
+0.149156630039
+0.141488462687
+0.0700709298253
+0.999528765678
+0.901022791862
+0.706227600574
+0.380389243364
+0.338564395905
+0.295996010303
+0.130823329091
+0.104411453009
+0.0905048400164
+0.0879968777299
+0.988569915295
+0.963732600212
+0.286798298359
+0.218737959862
+0.203057959676
+0.13743725419
+0.09066362679
+0.0850404128432
+0.0836478769779
+0.0667648166418
+0.0648810639977
+0.999981999397
+0.462309390306
+0.36390247941
+0.22330673039
+0.108254976571
+0.0680678263307
+0.999573171139
+0.984952032566
+0.325761556625
+0.304711937904
+0.29471629858
+0.245072290301
+0.131177172065
+0.122152656317
+0.121434278786
+0.0927219092846
+0.0718027204275
+0.999347627163
+0.997858703136
+0.242044657469
+0.232478022575
+0.0672069713473
+0.0647902786732
+0.993768453598
+0.970427274704
+0.967563211918
+0.906951904297
+0.572265326977
+0.474701136351
+0.390586495399
+0.304914146662
+0.299222826958
+0.283159762621
+0.253796637058
+0.241537258029
+0.204465150833
+0.143380597234
+0.136049568653
+0.11668164283
+0.102182202041
+0.0933771207929
+0.0747946873307
+0.0710505992174
+0.0695484057069
+0.0685590654612
+0.934580564499
+0.891804873943
+0.882510483265
+0.802462518215
+0.7891471982
+0.74493265152
+0.687112569809
+0.684889018536
+0.586304068565
+0.556956291199
+0.539672553539
+0.517548263073
+0.504299342632
+0.322133004665
+0.316113859415
+0.309121251106
+0.303063422441
+0.293669104576
+0.288682967424
+0.2879807055
+0.219391420484
+0.212455257773
+0.190936610103
+0.190637096763
+0.190106675029
+0.180670469999
+0.171477407217
+0.169344887137
+0.167399793863
+0.154648065567
+0.146116316319
+0.140386536717
+0.137933894992
+0.127411961555
+0.127320125699
+0.122049741447
+0.114478386939
+0.1096919626
+0.102671630681
+0.0984744951129
+0.0976060256362
+0.0975667089224
+0.0881121382117
+0.0793357789516
+0.0780472457409
+0.071719661355
+0.0633822306991
+0.0628453940153
+0.062213294208
+0.0620276741683
+0.999912381172
+0.711495757103
+0.628027200699
+0.41038697958
+0.36085319519
+0.223193019629
+0.166163980961
+0.123038999736
+0.0999667868018
+0.0723592862487
+0.0613801404834
+0.999815285206
+0.259990900755
+0.179814815521
+0.156719297171
+0.12982814014
+0.109694845974
+0.0728060752153
+0.0707457885146
+0.999953746796
+0.272777348757
+0.0762792825699
+0.996541559696
+0.595854461193
+0.233358815312
+0.218245401978
+0.141317039728
+0.132643014193
+0.113373890519
+0.0942837223411
+0.0630631297827
+0.999760806561
+0.996524989605
+0.680748999119
+0.414275497198
+0.299382477999
+0.263586312532
+0.149964034557
+0.14856351912
+0.137466996908
+0.124059282243
+0.0701696947217
+0.0648011490703
+0.999790132046
+0.999727547169
+0.243873864412
+0.226577296853
+0.221836790442
+0.11712475121
+0.114479772747
+0.112192466855
+0.111395418644
+0.0893452316523
+0.0844174623489
+0.0659474804997
+0.0651253461838
+0.0643536970019
+0.0628024041653
+0.999808490276
+0.94337439537
+0.937252759933
+0.874699950218
+0.846981227398
+0.696466326714
+0.69230234623
+0.518538236618
+0.476354509592
+0.433672219515
+0.390404999256
+0.359507977962
+0.247214272618
+0.245695278049
+0.243668422103
+0.22769279778
+0.202177360654
+0.180206164718
+0.175821751356
+0.174879819155
+0.146874323487
+0.138867869973
+0.118832044303
+0.110369078815
+0.107934623957
+0.0999486073852
+0.0996070727706
+0.0895005315542
+0.0832811519504
+0.0600558817387
+0.999309778214
+0.983438789845
+0.958445668221
+0.943826079369
+0.942461431026
+0.933533132076
+0.853879213333
+0.761849701405
+0.718042612076
+0.596516132355
+0.406269431114
+0.401232659817
+0.399286448956
+0.314919710159
+0.277226299047
+0.268599838018
+0.257576614618
+0.245165720582
+0.226391404867
+0.202594548464
+0.199204280972
+0.189792111516
+0.164424225688
+0.15276889503
+0.147418484092
+0.121750749648
+0.109162315726
+0.0962316319346
+0.0829912126064
+0.0802234485745
+0.0793528929353
+0.0793061628938
+0.0791105777025
+0.0716859698296
+0.0707231089473
+0.0679737553
+0.0618108808994
+0.0601726174355
+0.999969363213
+0.655605554581
+0.239963546395
+0.150742813945
+0.999837636948
+0.63870215416
+0.296659648418
+0.240688845515
+0.115407809615
+0.0797453671694
+0.999839544296
+0.257087349892
+0.999105274677
+0.998685479164
+0.991332828999
+0.813524365425
+0.800044178963
+0.637673914433
+0.426673024893
+0.415089428425
+0.350696325302
+0.28601834178
+0.283769339323
+0.249437853694
+0.241122335196
+0.217315286398
+0.206178754568
+0.159099340439
+0.142236709595
+0.134413912892
+0.131300151348
+0.122947327793
+0.101821817458
+0.086166806519
+0.0740347802639
+0.0645901113749
+0.0644907802343
+0.0601861663163
+0.999847054482
+0.913182735443
+0.618367910385
+0.493262082338
+0.26099473238
+0.238283976912
+0.166772842407
+0.130453705788
+0.105912394822
+0.0619120784104
+0.998251378536
+0.676184296608
+0.271244287491
+0.221775770187
+0.181421369314
+0.12882360816
+0.0823142156005
+0.0796986892819
+0.068931043148
+0.999464809895
+0.99933797121
+0.99839091301
+0.996566534042
+0.980399608612
+0.63525390625
+0.396099030972
+0.328433841467
+0.326502561569
+0.269772261381
+0.233716845512
+0.221280664206
+0.21858137846
+0.185293495655
+0.17918227613
+0.127624437213
+0.122339956462
+0.117155328393
+0.0958246141672
+0.0762821286917
+0.0717908069491
+0.0706969127059
+0.999534726143
+0.956270635128
+0.78299087286
+0.619055449963
+0.600671231747
+0.515981793404
+0.454024881124
+0.372248202562
+0.358608514071
+0.314116448164
+0.255853027105
+0.246589273214
+0.23751270771
+0.232381805778
+0.231391504407
+0.225431054831
+0.201145067811
+0.200535327196
+0.185915127397
+0.17939555645
+0.17325592041
+0.162455633283
+0.156048521399
+0.124668359756
+0.118499360979
+0.113812297583
+0.112187035382
+0.0974951833487
+0.0974452346563
+0.0970161035657
+0.0943970829248
+0.0863668695092
+0.0820543542504
+0.0810112133622
+0.0760793760419
+0.0758784860373
+0.0745631679893
+0.0720474421978
+0.068379804492
+0.0659238696098
+0.0652408972383
+0.999770700932
+0.564142644405
+0.340968191624
+0.280649900436
+0.184589445591
+0.141827240586
+0.0961844027042
+0.086652956903
+0.0813105925918
+0.0719143673778
+0.0617583543062
+0.999069392681
+0.97903239727
+0.978238999844
+0.975399255753
+0.966738522053
+0.961927175522
+0.954255998135
+0.953180730343
+0.884184718132
+0.842392086983
+0.780926704407
+0.699150800705
+0.677627623081
+0.654128074646
+0.601215183735
+0.587340116501
+0.48059129715
+0.450808763504
+0.449022501707
+0.424880325794
+0.419575810432
+0.405825018883
+0.362418323755
+0.362317711115
+0.361302822828
+0.352737993002
+0.335081070662
+0.329048126936
+0.322515547276
+0.320378899574
+0.308156341314
+0.306011736393
+0.301215618849
+0.283441364765
+0.27208763361
+0.270206749439
+0.252556353807
+0.246236935258
+0.218259528279
+0.213042810559
+0.20434781909
+0.173240557313
+0.17066590488
+0.164311349392
+0.158040985465
+0.1560587883
+0.154564797878
+0.147336453199
+0.140170350671
+0.130519136786
+0.128979027271
+0.124843247235
+0.111892074347
+0.108165994287
+0.107647553086
+0.100019231439
+0.0955723747611
+0.0941369533539
+0.0932310968637
+0.09222830832
+0.0877915546298
+0.0810086354613
+0.0803981497884
+0.0788272023201
+0.073427438736
+0.072064332664
+0.0714404508471
+0.0621041990817
+0.0617963932455
+0.0610187426209
+0.992899298668
+0.65851777792
+0.576886773109
+0.499951004982
+0.286502748728
+0.238583773375
+0.161186665297
+0.129746571183
+0.107545405626
+0.069496229291
+0.999663949013
+0.995927274227
+0.251562595367
+0.219694793224
+0.0827833339572
+0.9991543293
+0.994803130627
+0.883366823196
+0.338154911995
+0.244539558887
+0.240792810917
+0.098435766995
+0.0817362740636
+0.999712884426
+0.980890274048
+0.483280688524
+0.221983999014
+0.137036830187
+0.0840323641896
+0.998424053192
+0.312210798264
+0.200230032206
+0.132479533553
+0.116376101971
+0.102148190141
+0.0791081115603
+0.996057271957
+0.992395281792
+0.984329521656
+0.974482238293
+0.963348090649
+0.946114122868
+0.942731738091
+0.933136284351
+0.919347941875
+0.908869743347
+0.73175483942
+0.706171631813
+0.682261109352
+0.616430222988
+0.553826570511
+0.534924328327
+0.528635442257
+0.517790615559
+0.497245490551
+0.482954949141
+0.457427144051
+0.443433642387
+0.431958407164
+0.414077222347
+0.334260374308
+0.334044843912
+0.313742667437
+0.309803456068
+0.301132380962
+0.298630386591
+0.288474589586
+0.276427984238
+0.262236833572
+0.259798437357
+0.24260379374
+0.236177042127
+0.231493681669
+0.227245301008
+0.203895255923
+0.199481025338
+0.192932128906
+0.188553407788
+0.182057052851
+0.180160075426
+0.17834636569
+0.166424617171
+0.163593173027
+0.160310536623
+0.160161375999
+0.158785015345
+0.158235907555
+0.155487820506
+0.153020426631
+0.149402424693
+0.1467153579
+0.139196887612
+0.138734251261
+0.130453705788
+0.12520699203
+0.121888831258
+0.120392896235
+0.118774168193
+0.115981467068
+0.111503772438
+0.106362327933
+0.104354888201
+0.102328687906
+0.0950536653399
+0.0934586450458
+0.0896688401699
+0.0896506533027
+0.0858297348022
+0.0851439982653
+0.0846239030361
+0.0845995619893
+0.0835793018341
+0.0807672962546
+0.077451094985
+0.0772495493293
+0.0757977217436
+0.0743080228567
+0.071950815618
+0.0702368989587
+0.0699286013842
+0.0690144002438
+0.0681125074625
+0.067356094718
+0.0643888190389
+0.0638857260346
+0.0621795170009
+0.672215044498
+0.523001134396
+0.325364202261
+0.181692481041
+0.129675522447
+0.085443161428
+0.0837215483189
+0.0609933286905
+0.0606978870928
+0.999304533005
+0.987264871597
+0.908962666988
+0.288548588753
+0.274188786745
+0.265243172646
+0.241437807679
+0.199028119445
+0.158455595374
+0.125412195921
+0.0982922166586
+0.0724106654525
+0.0704554766417
+0.993785858154
+0.982573449612
+0.953100383282
+0.929751038551
+0.892648696899
+0.867128074169
+0.804423987865
+0.802300155163
+0.78337675333
+0.780208230019
+0.705207109451
+0.636888206005
+0.586753249168
+0.546583652496
+0.417186111212
+0.409511446953
+0.398674190044
+0.386740624905
+0.38101914525
+0.367669671774
+0.330847620964
+0.328646570444
+0.318590253592
+0.313761562109
+0.282247930765
+0.273219019175
+0.263079494238
+0.230236247182
+0.229020178318
+0.226447865367
+0.224753662944
+0.222333058715
+0.221686527133
+0.212345972657
+0.209558710456
+0.207722947001
+0.199930995703
+0.195840105414
+0.168167501688
+0.168066158891
+0.163701832294
+0.160573944449
+0.144484788179
+0.142971858382
+0.12179222703
+0.120382525027
+0.111095875502
+0.104992069304
+0.103123739362
+0.101115472615
+0.098357655108
+0.0934689044952
+0.0923248827457
+0.089435249567
+0.0879178345203
+0.0877492278814
+0.0816079899669
+0.0803733989596
+0.0802021250129
+0.0795307829976
+0.0777494236827
+0.0753972679377
+0.0688570812345
+0.0669772773981
+0.0644938796759
+0.0626188665628
+0.0622743330896
+0.0607079192996
+0.997906565666
+0.968211889267
+0.899787604809
+0.794611811638
+0.624473512173
+0.417919009924
+0.320336431265
+0.27963796258
+0.26075142622
+0.240269824862
+0.192754715681
+0.187987521291
+0.171289250255
+0.159312561154
+0.158740594983
+0.0821267589927
+0.0818191319704
+0.068516433239
+0.0672103241086
+0.065728828311
+0.0635766759515
+0.831298172474
+0.509759008884
+0.435923904181
+0.308531165123
+0.257574290037
+0.192840337753
+0.162329524755
+0.131342366338
+0.107066757977
+0.106905534863
+0.0774046853185
+0.0737820491195
+0.0600249134004
+0.999014735222
+0.259882360697
+0.162748336792
+0.0674859210849
+0.998517453671
+0.996604204178
+0.987738132477
+0.977688729763
+0.952325344086
+0.933723926544
+0.868783891201
+0.843804895878
+0.823487460613
+0.707257926464
+0.56450432539
+0.480521649122
+0.464974999428
+0.429939866066
+0.376900881529
+0.356451839209
+0.329525977373
+0.30372017622
+0.300460636616
+0.273165971041
+0.268592506647
+0.237408638
+0.235097423196
+0.229255750775
+0.225810647011
+0.216855958104
+0.205594494939
+0.193760678172
+0.181685343385
+0.177585110068
+0.157115444541
+0.146852254868
+0.13667319715
+0.134726896882
+0.126585483551
+0.124617934227
+0.121704950929
+0.118229709566
+0.115511998534
+0.111710950732
+0.108114436269
+0.104117386043
+0.0932931154966
+0.0829723626375
+0.0780748203397
+0.0742584019899
+0.0696071684361
+0.0688854902983
+0.0686996951699
+0.0615456402302
+0.997082412243
+0.994456768036
+0.990616381168
+0.963978946209
+0.87989205122
+0.821647763252
+0.388714820147
+0.361970335245
+0.355000257492
+0.317142128944
+0.268964737654
+0.25882139802
+0.252873718739
+0.206996098161
+0.175608247519
+0.165373623371
+0.150731816888
+0.112059853971
+0.0979655086994
+0.0879131779075
+0.0703918486834
+0.0697063058615
+0.0644352957606
+0.997612118721
+0.922433018684
+0.723499894142
+0.437144488096
+0.413682073355
+0.376486092806
+0.257346212864
+0.240200296044
+0.191237419844
+0.185256138444
+0.16052891314
+0.151502877474
+0.129777252674
+0.118568718433
+0.108675427735
+0.0893014296889
+0.0791508555412
+0.07139582932
+0.0655610114336
+0.0640480443835
+0.928671896458
+0.409835427999
+0.186382472515
+0.170811593533
+0.114078044891
+0.0905986651778
+0.998878777027
+0.872958540916
+0.366352170706
+0.282998144627
+0.246284082532
+0.125234037638
+0.113887518644
+0.0756373032928
+0.999482214451
+0.600884854794
+0.495869338512
+0.467495679855
+0.29986923933
+0.190346494317
+0.146913975477
+0.128931537271
+0.123174920678
+0.105487182736
+0.0913796573877
+0.0907498970628
+0.0784928500652
+0.985241234303
+0.982559561729
+0.978997290134
+0.968012094498
+0.876975238323
+0.716936290264
+0.477344751358
+0.350689530373
+0.308093637228
+0.298280328512
+0.270283222198
+0.243495076895
+0.222046822309
+0.176135420799
+0.16826210916
+0.148713886738
+0.131712645292
+0.0954710990191
+0.0831436812878
+0.080810405314
+0.0799375250936
+0.0737701356411
+0.0732511654496
+0.0698933079839
+0.0696706026793
+0.0615881867707
+0.999531745911
+0.998627662659
+0.997731626034
+0.991755306721
+0.982281208038
+0.932970404625
+0.737601280212
+0.640313923359
+0.589346110821
+0.57566010952
+0.561876237392
+0.508812427521
+0.411704331636
+0.279640078545
+0.262511283159
+0.255712419748
+0.24915099144
+0.239174813032
+0.236345902085
+0.223564043641
+0.221887439489
+0.205888524652
+0.188539609313
+0.185902446508
+0.157645270228
+0.146827593446
+0.138461172581
+0.128459632397
+0.105842217803
+0.0900628194213
+0.087972573936
+0.0833854004741
+0.0731311589479
+0.0724459663033
+0.06578656286
+0.0639344602823
+0.0638320297003
+0.0602390393615
+0.0601266175508
+0.294877827168
+0.0770357474685
+0.99931538105
+0.969477057457
+0.915282487869
+0.911972463131
+0.800472438335
+0.777152836323
+0.755175054073
+0.701369464397
+0.661733806133
+0.646748185158
+0.620197832584
+0.598666906357
+0.431109786034
+0.420760810375
+0.414985656738
+0.368939638138
+0.366710662842
+0.363941043615
+0.331070929766
+0.32373803854
+0.313791185617
+0.286590635777
+0.284686326981
+0.277373790741
+0.250269144773
+0.23987711966
+0.232445955276
+0.224905908108
+0.211780279875
+0.201196596026
+0.195459470153
+0.178792640567
+0.174023061991
+0.171036764979
+0.158079341054
+0.155160412192
+0.153384134173
+0.148956656456
+0.141508340836
+0.141188979149
+0.137287840247
+0.12751673162
+0.124325923622
+0.122190065682
+0.11753308028
+0.113891020417
+0.113768704236
+0.111258625984
+0.106031768024
+0.104319483042
+0.100845590234
+0.0973495915532
+0.0959351509809
+0.0941296070814
+0.0890717059374
+0.0885871723294
+0.0883445218205
+0.0881924256682
+0.0796804204583
+0.0792854428291
+0.0777773857117
+0.0756999924779
+0.0740049108863
+0.070074878633
+0.0685919970274
+0.0610492117703
+0.0601226389408
+0.998621463776
+0.998311877251
+0.990745663643
+0.985422730446
+0.956351399422
+0.592141211033
+0.275923162699
+0.23105609417
+0.226518109441
+0.214741840959
+0.172950461507
+0.134401232004
+0.111966595054
+0.0735102072358
+0.0610477030277
+0.973368346691
+0.967409074306
+0.899611353874
+0.834040105343
+0.767958521843
+0.766332805157
+0.674870014191
+0.640792429447
+0.612776041031
+0.572886228561
+0.518378973007
+0.50894588232
+0.30312782526
+0.292561233044
+0.255380809307
+0.244923815131
+0.221202045679
+0.218013688922
+0.205433771014
+0.201450228691
+0.197932556272
+0.172553762794
+0.169808343053
+0.16717210412
+0.164155602455
+0.161036536098
+0.142375424504
+0.113773860037
+0.106495268643
+0.104496769607
+0.101255118847
+0.10017709434
+0.087682493031
+0.0806440711021
+0.0720712989569
+0.070815898478
+0.0630010664463
+0.0613329038024
+0.060483455658
+0.997082054615
+0.996271848679
+0.994997620583
+0.986445248127
+0.962887585163
+0.952173113823
+0.938099086285
+0.931356906891
+0.899597764015
+0.889140427113
+0.843292355537
+0.816506564617
+0.759680390358
+0.738815665245
+0.675472438335
+0.66677325964
+0.630374133587
+0.617292821407
+0.537257552147
+0.484312534332
+0.413993805647
+0.407227516174
+0.404225021601
+0.362381070852
+0.342267990112
+0.340154141188
+0.328027367592
+0.317694604397
+0.316504716873
+0.310183793306
+0.274893462658
+0.267083644867
+0.264902204275
+0.247464731336
+0.221397399902
+0.220522448421
+0.208107292652
+0.204949140549
+0.195853039622
+0.192887216806
+0.191052481532
+0.176745891571
+0.176365166903
+0.174274593592
+0.173170924187
+0.167925611138
+0.147856250405
+0.146837443113
+0.14533637464
+0.128484427929
+0.124265000224
+0.123619928956
+0.115672558546
+0.113519057631
+0.106513366103
+0.103437528014
+0.101347878575
+0.100872337818
+0.0994723439217
+0.0991938486695
+0.0985937044024
+0.0951827988029
+0.0820039585233
+0.081140011549
+0.0764551758766
+0.0748048201203
+0.0747987404466
+0.0729971304536
+0.0701585263014
+0.0693869963288
+0.0690938383341
+0.0663556829095
+0.0662173703313
+0.0627249926329
+0.999967694283
+0.986745417118
+0.985454797745
+0.980681598186
+0.97392231226
+0.908892571926
+0.886560678482
+0.815091133118
+0.447496056557
+0.368582725525
+0.349644571543
+0.346570640802
+0.319674402475
+0.259739816189
+0.247652828693
+0.24104295671
+0.203757256269
+0.179351240396
+0.175032153726
+0.162493139505
+0.14987963438
+0.134152725339
+0.130406185985
+0.116944074631
+0.109120100737
+0.0977554172277
+0.0832336694002
+0.0796109586954
+0.0720738768578
+0.07134013623
+0.984368681908
+0.897734999657
+0.765750646591
+0.547011137009
+0.389447033405
+0.34582144022
+0.32212921977
+0.249455973506
+0.228835299611
+0.173347502947
+0.145957514644
+0.133528515697
+0.119680568576
+0.0984598696232
+0.0757301002741
+0.0708837807178
+0.999146699905
+0.977446734905
+0.973405480385
+0.966820776463
+0.956369459629
+0.914402544498
+0.896405816078
+0.894983530045
+0.849479615688
+0.731288611889
+0.729866743088
+0.726370811462
+0.699951648712
+0.684844195843
+0.638903260231
+0.551805377007
+0.487293213606
+0.468263536692
+0.406858414412
+0.371105134487
+0.369505107403
+0.338315576315
+0.310189425945
+0.30988484621
+0.294193685055
+0.290552645922
+0.277899980545
+0.249787017703
+0.242018565536
+0.228196293116
+0.224517554045
+0.222166180611
+0.220522150397
+0.217437535524
+0.21717260778
+0.194081559777
+0.181697547436
+0.181641817093
+0.173010736704
+0.168336719275
+0.163264304399
+0.152141302824
+0.143787741661
+0.14027929306
+0.134750157595
+0.121467910707
+0.114764735103
+0.101374916732
+0.0996926054358
+0.0974574759603
+0.0963041782379
+0.0938090085983
+0.0910535752773
+0.0899914428592
+0.0895614326
+0.086814545095
+0.0861885249615
+0.0859835892916
+0.0859761759639
+0.082804441452
+0.0796514824033
+0.0788973644376
+0.0723619237542
+0.0688183680177
+0.0667386874557
+0.0649215206504
+0.0645766705275
+0.0638380870223
+0.0617257840931
+0.0604900754988
+0.987041831017
+0.986601769924
+0.985887467861
+0.975555062294
+0.968244493008
+0.955009639263
+0.929877400398
+0.911091268063
+0.904320716858
+0.878374576569
+0.842879712582
+0.757816970348
+0.73763948679
+0.725357294083
+0.695447206497
+0.686005055904
+0.66173774004
+0.627629756927
+0.622188568115
+0.620288312435
+0.591323018074
+0.48629707098
+0.458327978849
+0.437268316746
+0.421276301146
+0.398014456034
+0.368132174015
+0.317442119122
+0.316143900156
+0.299563646317
+0.290871083736
+0.279677927494
+0.265232026577
+0.255920916796
+0.242857888341
+0.234192728996
+0.227418541908
+0.2208583951
+0.220628812909
+0.215353220701
+0.210661157966
+0.205349206924
+0.204139471054
+0.188326105475
+0.185460850596
+0.184700787067
+0.178669571877
+0.165006756783
+0.162029474974
+0.154502913356
+0.143533810973
+0.140683799982
+0.128839612007
+0.117512382567
+0.117027305067
+0.111969962716
+0.111672021449
+0.0949384719133
+0.0930223464966
+0.0863487198949
+0.0801082700491
+0.077382966876
+0.0759806334972
+0.0721328184009
+0.071169398725
+0.0664945989847
+0.0660735294223
+0.0657014325261
+0.0641266033053
+0.999861359596
+0.427282005548
+0.420357942581
+0.261394917965
+0.110569424927
+0.108395330608
+0.108333095908
+0.0974808856845
+0.0930888205767
+0.0667694732547
+0.0667112544179
+0.997330546379
+0.654560446739
+0.291420549154
+0.184708818793
+0.117750152946
+0.103739283979
+0.999415516853
+0.839909017086
+0.747773051262
+0.504516839981
+0.495268255472
+0.458001464605
+0.434995472431
+0.385435819626
+0.358133971691
+0.32956880331
+0.291260957718
+0.270664334297
+0.269563764334
+0.267330855131
+0.240687161684
+0.222644224763
+0.2174834162
+0.188659965992
+0.187579154968
+0.177042886615
+0.16684089601
+0.16223756969
+0.161437466741
+0.156523838639
+0.135594189167
+0.135347634554
+0.134045943618
+0.13040111959
+0.127426370978
+0.118550986052
+0.116226233542
+0.102556444705
+0.101696409285
+0.100609734654
+0.0976218432188
+0.0959184318781
+0.0856241434813
+0.0837556868792
+0.0812045559287
+0.0788788795471
+0.0765646323562
+0.0688586160541
+0.0682339966297
+0.0654645115137
+0.0613331831992
+0.96193498373
+0.262180179358
+0.222836509347
+0.107315897942
+0.106162659824
+0.083573333919
+0.99903190136
+0.997419834137
+0.280324101448
+0.266756772995
+0.0899597480893
+0.0731279924512
+0.998982131481
+0.997113347054
+0.987826526165
+0.982749462128
+0.956938147545
+0.917741656303
+0.89250677824
+0.75852483511
+0.693428397179
+0.47499781847
+0.472768843174
+0.33173084259
+0.276509732008
+0.261360406876
+0.25578725338
+0.236306011677
+0.234313651919
+0.225812152028
+0.223267674446
+0.217445120215
+0.203244149685
+0.194196358323
+0.193431988358
+0.187305405736
+0.160498023033
+0.159103110433
+0.151061818004
+0.136239483953
+0.128768399358
+0.127508625388
+0.111696086824
+0.108987510204
+0.108918011189
+0.092921026051
+0.0901430919766
+0.0875933989882
+0.0764854103327
+0.0716658905149
+0.070265725255
+0.0688454136252
+0.0634646117687
+0.983906745911
+0.979303479195
+0.953061044216
+0.910031616688
+0.79559648037
+0.794248998165
+0.680732309818
+0.336245328188
+0.251929551363
+0.236175224185
+0.231977835298
+0.222920686007
+0.215601578355
+0.177372828126
+0.170346945524
+0.126366943121
+0.122063778341
+0.0707916989923
+0.0675657093525
+0.0636561065912
+0.999124109745
+0.95559579134
+0.335633426905
+0.189344316721
+0.169774979353
+0.140989258885
+0.999840140343
+0.961707174778
+0.858922302723
+0.844822049141
+0.79887676239
+0.715954303741
+0.544930994511
+0.4975284338
+0.432779669762
+0.339537680149
+0.314660340548
+0.284707576036
+0.274929046631
+0.223072171211
+0.208192542195
+0.17063011229
+0.124015904963
+0.12204939127
+0.115327633917
+0.102703399956
+0.0867205932736
+0.0854152813554
+0.0805300623178
+0.0740347579122
+0.073657438159
+0.0652743726969
+0.999866127968
+0.999671459198
+0.974003314972
+0.259788662195
+0.230321779847
+0.215791374445
+0.0817734152079
+0.997030735016
+0.34769898653
+0.324184179306
+0.301092356443
+0.296595841646
+0.286813735962
+0.222813129425
+0.208755522966
+0.192234814167
+0.188726916909
+0.174763262272
+0.151840478182
+0.141688376665
+0.139051422477
+0.133311748505
+0.130648344755
+0.11715836823
+0.111482828856
+0.0754759684205
+0.0728836879134
+0.0661561638117
+0.0624446943402
+0.997358500957
+0.940094411373
+0.880283892155
+0.704438745975
+0.4662733078
+0.424349606037
+0.369004666805
+0.34559327364
+0.31790792942
+0.29895606637
+0.266740322113
+0.214255899191
+0.170582249761
+0.160710096359
+0.149877399206
+0.138445109129
+0.13044179976
+0.126896575093
+0.119334079325
+0.110072247684
+0.0997977480292
+0.0713705793023
+0.0703639686108
+0.0689340159297
+0.0658354684711
+0.0658119469881
+0.0646213069558
+0.999941945076
+0.953048288822
+0.94254642725
+0.850432991982
+0.832034409046
+0.728959321976
+0.709047615528
+0.587097465992
+0.500774085522
+0.396899342537
+0.394484817982
+0.373822808266
+0.310672014952
+0.273537039757
+0.267405390739
+0.224373295903
+0.182935506105
+0.173327282071
+0.163637757301
+0.140023753047
+0.136790826917
+0.108789592981
+0.0996274873614
+0.0922265052795
+0.0838004201651
+0.0774200931191
+0.0729408934712
+0.0716582760215
+0.0646496191621
+0.0612550005317
+0.998717188835
+0.995120823383
+0.993693888187
+0.987637400627
+0.972319602966
+0.869444668293
+0.806164264679
+0.745419740677
+0.466332167387
+0.414090722799
+0.39402449131
+0.39316251874
+0.335397094488
+0.269511252642
+0.244224816561
+0.231962233782
+0.228760495782
+0.20111349225
+0.179224982858
+0.168310210109
+0.164801374078
+0.158653780818
+0.15118253231
+0.12252458185
+0.112351365387
+0.105436913669
+0.0970543622971
+0.0848429426551
+0.0730557218194
+0.0673143640161
+0.0638587921858
+0.0627655684948
+0.0615500696003
+0.999268949032
+0.974084496498
+0.473171263933
+0.344996988773
+0.244219079614
+0.122845321894
+0.117027692497
+0.107375003397
+0.0760171189904
+0.998769938946
+0.989225327969
+0.985316216946
+0.983475089073
+0.951185405254
+0.57660639286
+0.475443273783
+0.418282717466
+0.285181820393
+0.250196576118
+0.227724894881
+0.214662909508
+0.177842736244
+0.174011051655
+0.149140387774
+0.121523305774
+0.120910108089
+0.0957762300968
+0.0872720554471
+0.0719359591603
+0.0626236051321
+0.0621949806809
+0.998985826969
+0.997508406639
+0.991696834564
+0.983357310295
+0.972927808762
+0.787732899189
+0.610768139362
+0.527510285378
+0.440636754036
+0.350340425968
+0.240117758512
+0.239989861846
+0.234486505389
+0.225252836943
+0.218417719007
+0.190934225917
+0.164603888988
+0.139253720641
+0.132916226983
+0.113653682172
+0.107052870095
+0.0973430648446
+0.0856653004885
+0.0847604721785
+0.0748263522983
+0.0730226635933
+0.071620374918
+0.0689997002482
+0.0662944689393
+0.998581051826
+0.254398405552
+0.140133053064
+0.0966392308474
+0.0717679932714
+0.999868869781
+0.231976747513
+0.991943955421
+0.987451851368
+0.985974371433
+0.975968003273
+0.933837175369
+0.503213942051
+0.432781517506
+0.390527963638
+0.371599704027
+0.302940219641
+0.297495514154
+0.280758440495
+0.25901594758
+0.247282981873
+0.244963765144
+0.186276912689
+0.144528895617
+0.143962949514
+0.134435117245
+0.129979893565
+0.118750713766
+0.111666746438
+0.0888966545463
+0.0885583013296
+0.081235691905
+0.0746461302042
+0.0721422955394
+0.0637637451291
+0.0631813108921
+0.0625683888793
+0.997547924519
+0.572566688061
+0.363994091749
+0.217090532184
+0.168808534741
+0.120830573142
+0.0810230895877
+0.077639631927
+0.999306321144
+0.822654902935
+0.479814201593
+0.308454543352
+0.197075054049
+0.128335833549
+0.105784796178
+0.0810245871544
+0.999436676502
+0.255276292562
+0.0884118676186
+0.998903274536
+0.997219204903
+0.983924508095
+0.973297178745
+0.9574868083
+0.949055671692
+0.942855596542
+0.878789544106
+0.811559021473
+0.700583338737
+0.689505755901
+0.587479233742
+0.566563367844
+0.509110152721
+0.455933153629
+0.397487848997
+0.378331065178
+0.365315228701
+0.326857507229
+0.296198993921
+0.29509678483
+0.286599576473
+0.266186326742
+0.244158193469
+0.23257316649
+0.221215218306
+0.212495550513
+0.204004481435
+0.203657314181
+0.200632125139
+0.196031436324
+0.195155590773
+0.17739982903
+0.16443438828
+0.142510190606
+0.136717140675
+0.134969204664
+0.128123447299
+0.120792739093
+0.111649297178
+0.0986558645964
+0.0958403497934
+0.0952036231756
+0.0933785960078
+0.0885968506336
+0.0755569040775
+0.0724885985255
+0.0715738162398
+0.0684327185154
+0.997487187386
+0.695897519588
+0.35728135705
+0.189232870936
+0.136365503073
+0.071802392602
+0.999494194984
+0.980037689209
+0.979199230671
+0.561471402645
+0.504794538021
+0.315684258938
+0.266540080309
+0.251384019852
+0.245189934969
+0.203233763576
+0.141810953617
+0.118508063257
+0.115389607847
+0.0874758064747
+0.0701965764165
+0.999741613865
+0.993976473808
+0.282515764236
+0.22163169086
+0.102295204997
+0.999371707439
+0.25885283947
+0.0680761188269
+0.999915480614
+0.221820831299
+0.999724566936
+0.363480299711
+0.143625617027
+0.130269914865
+0.999858736992
+0.798165500164
+0.417843073606
+0.21489778161
+0.212205216289
+0.0920646414161
+0.0809744298458
+0.0741038843989
+0.999981164932
+0.761807203293
+0.436751872301
+0.297322720289
+0.124121211469
+0.100059710443
+0.0830144435167
+0.999504566193
+0.996889531612
+0.996331393719
+0.955871939659
+0.910660147667
+0.906697630882
+0.8234795928
+0.810048043728
+0.756106257439
+0.617325305939
+0.336177021265
+0.311133682728
+0.258691191673
+0.241923168302
+0.240495026112
+0.236267864704
+0.234108328819
+0.233682274818
+0.206012442708
+0.181026309729
+0.170508921146
+0.137938350439
+0.108388774097
+0.107753410935
+0.096209011972
+0.0866120383143
+0.066968716681
+0.0645221695304
+0.0622421950102
+0.0603217333555
+0.99984896183
+0.998339533806
+0.947395324707
+0.916589140892
+0.890009582043
+0.885176360607
+0.876690685749
+0.831985116005
+0.750023961067
+0.694691717625
+0.665149629116
+0.649426996708
+0.548454523087
+0.530202686787
+0.527496278286
+0.502862870693
+0.466090977192
+0.428791999817
+0.376059412956
+0.369240015745
+0.348168551922
+0.314102232456
+0.312108516693
+0.311491280794
+0.286958962679
+0.282896906137
+0.281279057264
+0.275987058878
+0.263362765312
+0.255519658327
+0.227041274309
+0.215528771281
+0.207249313593
+0.205350011587
+0.193648591638
+0.18751694262
+0.178930014372
+0.177003368735
+0.162903934717
+0.155962482095
+0.1424356848
+0.141680732369
+0.126816213131
+0.114026673138
+0.109822466969
+0.105679221451
+0.103511795402
+0.102504871786
+0.0995284989476
+0.0962163507938
+0.0936759188771
+0.0809666514397
+0.079386010766
+0.073398321867
+0.0724855586886
+0.0648428499699
+0.0645998641849
+0.062997713685
+0.0613682717085
+0.0612242296338
+0.976861059666
+0.241896092892
+0.0640575364232
+0.99987411499
+0.228309512138
+0.999981164932
+0.228070512414
+0.999774038792
+0.996418237686
+0.254770725965
+0.23145930469
+0.0987531989813
+0.0759082660079
+0.994928121567
+0.48700851202
+0.386618494987
+0.222440525889
+0.209275618196
+0.195116817951
+0.146801605821
+0.100628659129
+0.0890925899148
+0.0785910785198
+0.0697466656566
+0.0645165145397
+0.999650716782
+0.471487730742
+0.228502005339
+0.178789258003
+0.067815117538
+0.999979138374
+0.385719686747
+0.219306096435
+0.125236809254
+0.999076247215
+0.991174340248
+0.987598598003
+0.984944224358
+0.980740487576
+0.97671097517
+0.969477176666
+0.909366488457
+0.909245967865
+0.867043197155
+0.811072468758
+0.790458261967
+0.781556248665
+0.781186461449
+0.746510326862
+0.737351953983
+0.70183467865
+0.607485353947
+0.594738543034
+0.593580067158
+0.582208037376
+0.54889523983
+0.458082914352
+0.454957544804
+0.404505580664
+0.382360428572
+0.364262968302
+0.354983955622
+0.315158128738
+0.31389990449
+0.312372416258
+0.295599907637
+0.283987909555
+0.264422744513
+0.245825633407
+0.241419255733
+0.216903269291
+0.214117124677
+0.210327923298
+0.208907395601
+0.207863539457
+0.201571822166
+0.198329985142
+0.186197832227
+0.178957626224
+0.176398083568
+0.162885606289
+0.155486211181
+0.141444504261
+0.134666636586
+0.132977694273
+0.126216426492
+0.121847838163
+0.119761615992
+0.101361230016
+0.100925691426
+0.100372903049
+0.0973197072744
+0.08568097651
+0.0856034085155
+0.0850395411253
+0.0836379751563
+0.0750297158957
+0.0745426267385
+0.0690359920263
+0.0678648427129
+0.0647069811821
+0.0645706057549
+0.0641559585929
+0.0640532448888
+0.0626750960946
+0.0613030716777
+0.0605093315244
+0.0604316480458
+0.99968791008
+0.999673604965
+0.988363087177
+0.929248631001
+0.904639899731
+0.884096562862
+0.871048510075
+0.673950910568
+0.55148857832
+0.496680498123
+0.405239373446
+0.385088473558
+0.355011999607
+0.315216034651
+0.311018794775
+0.233859241009
+0.231391325593
+0.229530736804
+0.222472220659
+0.209700211883
+0.201906919479
+0.172515392303
+0.168065920472
+0.157170951366
+0.144621297717
+0.138863295317
+0.132859885693
+0.116290904582
+0.0978939682245
+0.0962697714567
+0.0853615477681
+0.0796765759587
+0.0791458338499
+0.065162949264
+0.0627753660083
+0.0618150532246
+0.0612092316151
+0.0605558566749
+0.999800503254
+0.999398469925
+0.995612204075
+0.249649405479
+0.234370842576
+0.224453613162
+0.998004615307
+0.969588279724
+0.960964381695
+0.956989049911
+0.932800889015
+0.871589243412
+0.849373042583
+0.740092515945
+0.666870176792
+0.517849624157
+0.453431278467
+0.436019390821
+0.343305319548
+0.32957649231
+0.301072686911
+0.295777589083
+0.285896003246
+0.257557213306
+0.213665023446
+0.205506399274
+0.203258559108
+0.202517911792
+0.201468005776
+0.175479188561
+0.161158248782
+0.152354702353
+0.149644002318
+0.139234915376
+0.127986177802
+0.126082196832
+0.104555316269
+0.0892999097705
+0.0883722454309
+0.0857918933034
+0.0780893117189
+0.0756266415119
+0.073498018086
+0.0694115534425
+0.999425411224
+0.990275263786
+0.454510450363
+0.368632316589
+0.240667507052
+0.228047177196
+0.218716114759
+0.208774164319
+0.121137067676
+0.0907304063439
+0.0844509676099
+0.0637911111116
+0.998184382915
+0.996814906597
+0.993934452534
+0.99384355545
+0.913686513901
+0.656646251678
+0.55050444603
+0.521912336349
+0.516117811203
+0.449124693871
+0.396979540586
+0.384039312601
+0.367748588324
+0.323958933353
+0.29776674509
+0.276605516672
+0.270192056894
+0.225135669112
+0.217178478837
+0.214180871844
+0.205765217543
+0.179115667939
+0.174527317286
+0.152544498444
+0.132154494524
+0.12504196167
+0.120318338275
+0.117332920432
+0.104755274951
+0.102595068514
+0.096455834806
+0.092838421464
+0.085684157908
+0.0823908001184
+0.075908690691
+0.0734192505479
+0.0711930245161
+0.0680368840694
+0.0672268792987
+0.0651670172811
+0.0620461218059
+0.0612448304892
+0.0607766136527
+0.999109208584
+0.975574851036
+0.813391208649
+0.52200615406
+0.443572551012
+0.363533347845
+0.250791579485
+0.219813719392
+0.20447486639
+0.162129849195
+0.124345995486
+0.115122579038
+0.0943290814757
+0.0630508363247
+0.999613821507
+0.999575316906
+0.997644484043
+0.304409623146
+0.288200616837
+0.246144518256
+0.243443235755
+0.13658362627
+0.0722275972366
+0.0703070908785
+0.0674824863672
+0.0658555850387
+0.0619911886752
+0.998963356018
+0.972402513027
+0.940724015236
+0.932743251324
+0.847617447376
+0.841303527355
+0.766889572144
+0.746221780777
+0.720576405525
+0.719646573067
+0.69652736187
+0.660291492939
+0.651326715946
+0.642819941044
+0.640799105167
+0.54286634922
+0.493774652481
+0.48621737957
+0.424220204353
+0.419423609972
+0.398265987635
+0.395358324051
+0.381732970476
+0.370228976011
+0.355773419142
+0.3523889184
+0.339383304119
+0.339120656252
+0.30273398757
+0.292775899172
+0.292248904705
+0.27720707655
+0.271470844746
+0.254039824009
+0.235494926572
+0.229750573635
+0.220483958721
+0.219281360507
+0.219198390841
+0.203939214349
+0.192121013999
+0.182730764151
+0.180710241199
+0.177314713597
+0.175771847367
+0.174459934235
+0.17042593658
+0.16663017869
+0.166467964649
+0.159673690796
+0.153285935521
+0.150462329388
+0.147816330194
+0.146987468004
+0.142433002591
+0.1381187886
+0.136851727962
+0.116215966642
+0.116084784269
+0.113827228546
+0.112945444882
+0.108553484082
+0.105448618531
+0.101424999535
+0.0970856398344
+0.0957155674696
+0.0921172648668
+0.0914572179317
+0.0895034372807
+0.0857234746218
+0.0845205932856
+0.083335518837
+0.0811870098114
+0.0789483934641
+0.0783948227763
+0.075130879879
+0.0749548003078
+0.074830211699
+0.0744628086686
+0.0724942311645
+0.0712927877903
+0.070331774652
+0.0650504082441
+0.0637699067593
+0.0601666048169
+0.999430716038
+0.996187508106
+0.986824333668
+0.975689232349
+0.928738176823
+0.838212013245
+0.826367616653
+0.789069473743
+0.719139397144
+0.704701781273
+0.623931646347
+0.556512534618
+0.490600794554
+0.446261048317
+0.431299030781
+0.430968910456
+0.424282491207
+0.416363120079
+0.410952717066
+0.398057997227
+0.396323829889
+0.389153420925
+0.312233835459
+0.311252206564
+0.309397667646
+0.285612106323
+0.282541453838
+0.275128185749
+0.272518903017
+0.260360091925
+0.25322842598
+0.249544814229
+0.243215307593
+0.241604357958
+0.226373881102
+0.226270154119
+0.204255208373
+0.200443759561
+0.198658078909
+0.183075919747
+0.172248229384
+0.170506551862
+0.169984862208
+0.164996787906
+0.158444583416
+0.154718473554
+0.149993062019
+0.13511288166
+0.134517282248
+0.119497746229
+0.119418859482
+0.116643123329
+0.115059413016
+0.114179253578
+0.11051517725
+0.109064541757
+0.10761500895
+0.107587918639
+0.105532370508
+0.0997468307614
+0.0985546335578
+0.0942277759314
+0.0898526757956
+0.0837607309222
+0.0797574296594
+0.0785757750273
+0.0775882378221
+0.0704097896814
+0.069792792201
+0.0681733191013
+0.06623005867
+0.0648828521371
+0.064729295671
+0.0628466978669
+0.0627769380808
+0.0616583786905
+0.994825482368
+0.993010222912
+0.986519575119
+0.979093849659
+0.976521730423
+0.975592315197
+0.962715268135
+0.957856655121
+0.950484871864
+0.947852909565
+0.897880554199
+0.896423697472
+0.882922887802
+0.844086527824
+0.838403582573
+0.818503856659
+0.807530045509
+0.745602011681
+0.674593269825
+0.663724005222
+0.613600432873
+0.509360015392
+0.485164821148
+0.47818595171
+0.446938365698
+0.402477622032
+0.384771376848
+0.376240760088
+0.334779560566
+0.327094733715
+0.302205443382
+0.286881536245
+0.276563048363
+0.268045216799
+0.267693608999
+0.248649179935
+0.244097977877
+0.221685215831
+0.213227257133
+0.212871730328
+0.209986388683
+0.208360463381
+0.203444197774
+0.198618009686
+0.196756586432
+0.192417830229
+0.1796875
+0.176318973303
+0.175903856754
+0.175629764795
+0.17553858459
+0.174518674612
+0.172735840082
+0.17043119669
+0.168359041214
+0.160383418202
+0.146888077259
+0.141810446978
+0.140694394708
+0.139849290252
+0.138947352767
+0.118498526514
+0.118102416396
+0.11686257273
+0.111250378191
+0.107679106295
+0.106150761247
+0.100477933884
+0.0974943339825
+0.0960067883134
+0.0904370844364
+0.0903329476714
+0.089974835515
+0.0840358808637
+0.0789192542434
+0.078557819128
+0.0773743391037
+0.0718290656805
+0.0670303106308
+0.999118030071
+0.99843531847
+0.997423768044
+0.994790315628
+0.994093179703
+0.988848090172
+0.978015065193
+0.967324495316
+0.966657698154
+0.724939882755
+0.697642803192
+0.684925079346
+0.621037304401
+0.432323485613
+0.418324321508
+0.401449561119
+0.396412879229
+0.339446932077
+0.336006760597
+0.319183707237
+0.317633420229
+0.300582677126
+0.285205125809
+0.280551075935
+0.269249290228
+0.256742715836
+0.244927600026
+0.234610006213
+0.222219899297
+0.19347435236
+0.188280045986
+0.187084957957
+0.182984888554
+0.158113241196
+0.152356937528
+0.146938577294
+0.146169900894
+0.134838089347
+0.134554564953
+0.128044486046
+0.120856948197
+0.116164006293
+0.10954297334
+0.103668399155
+0.101141840219
+0.0980478152633
+0.0952260196209
+0.0915942937136
+0.089857570827
+0.0869224444032
+0.0830587744713
+0.0772616937757
+0.0760051012039
+0.0701606944203
+0.0674643144011
+0.0638219118118
+0.999280631542
+0.260925084352
+0.128856495023
+0.0723394975066
+0.993311882019
+0.642047166824
+0.512149333954
+0.190985873342
+0.119117602706
+0.0898107737303
+0.997704565525
+0.995253801346
+0.991706490517
+0.982018470764
+0.957116961479
+0.954610466957
+0.950716376305
+0.899031758308
+0.891542196274
+0.886567890644
+0.879221618176
+0.79160130024
+0.782691776752
+0.749382913113
+0.744449615479
+0.689770281315
+0.647324740887
+0.639235138893
+0.569430232048
+0.51583468914
+0.451004594564
+0.397427260876
+0.377324581146
+0.367087036371
+0.358904182911
+0.356711447239
+0.319530785084
+0.307652026415
+0.298603832722
+0.288140833378
+0.269373297691
+0.265693902969
+0.263692617416
+0.260874509811
+0.255989313126
+0.250408142805
+0.244835078716
+0.237917557359
+0.217920646071
+0.209465414286
+0.205521464348
+0.20504437387
+0.204843401909
+0.199135959148
+0.192626267672
+0.191966980696
+0.179932013154
+0.179893434048
+0.150653287768
+0.14473451674
+0.141796201468
+0.141405895352
+0.141211077571
+0.138576552272
+0.137402698398
+0.136087864637
+0.130068942904
+0.128149420023
+0.118490844965
+0.115967184305
+0.113740995526
+0.0980010256171
+0.0951235443354
+0.0911354199052
+0.089813709259
+0.0850657895207
+0.0822857096791
+0.0812103003263
+0.0810098871589
+0.080983504653
+0.0753046646714
+0.0722572281957
+0.0711138024926
+0.0697357729077
+0.0693197622895
+0.0689960196614
+0.0673434063792
+0.0667957663536
+0.0653370842338
+0.0607278496027
+0.975586175919
+0.959787368774
+0.942150473595
+0.726087510586
+0.714168190956
+0.666308581829
+0.548655629158
+0.541298925877
+0.494958609343
+0.438261330128
+0.350017309189
+0.348631054163
+0.278492897749
+0.222956225276
+0.22023229301
+0.191104009748
+0.172284290195
+0.170996561646
+0.167979210615
+0.16763612628
+0.15687379241
+0.153538316488
+0.134655430913
+0.129420727491
+0.127806246281
+0.117188595235
+0.110745325685
+0.105960235
+0.0986673980951
+0.0984482169151
+0.088109433651
+0.0844041928649
+0.0826599672437
+0.0733860656619
+0.0721763670444
+0.0692484602332
+0.0666551962495
+0.998814344406
+0.993756949902
+0.886017203331
+0.61123162508
+0.533999085426
+0.490596860647
+0.417297005653
+0.29553976655
+0.219601139426
+0.194884568453
+0.148602828383
+0.125176265836
+0.117750950158
+0.0970333963633
+0.0937042757869
+0.069618023932
+0.0685485228896
+0.0680819898844
+0.991534054279
+0.355808794498
+0.283551603556
+0.148340165615
+0.104472123086
+0.0995064526796
+0.0850493609905
+0.0782635957003
+0.0697690621018
+0.99991106987
+0.244626387954
+0.0844115465879
+0.071184001863
+0.995880961418
+0.540756285191
+0.530434966087
+0.361382693052
+0.261692166328
+0.223499342799
+0.185807392001
+0.13990187645
+0.130168527365
+0.123071946204
+0.0941201373935
+0.0709463134408
+0.999811112881
+0.998667597771
+0.230246022344
+0.22926864028
+0.998564183712
+0.997946798801
+0.985165119171
+0.941293239594
+0.647760868073
+0.431107252836
+0.339977830648
+0.337169319391
+0.306633174419
+0.208821922541
+0.14287121594
+0.140332996845
+0.140232235193
+0.135571315885
+0.126639202237
+0.118254564703
+0.106434769928
+0.102547213435
+0.0915523320436
+0.0675884708762
+0.999232530594
+0.21700720489
+0.999677658081
+0.981823205948
+0.922502934933
+0.859117746353
+0.439464360476
+0.274583280087
+0.260792851448
+0.260363072157
+0.184732705355
+0.127507656813
+0.120545990765
+0.102297060192
+0.0601931326091
+0.992639303207
+0.991714060307
+0.985566973686
+0.984285414219
+0.963764488697
+0.620207548141
+0.309568077326
+0.305422514677
+0.275089174509
+0.247915819287
+0.197693601251
+0.120236083865
+0.118068546057
+0.104172930121
+0.0993527323008
+0.087502554059
+0.0733383372426
+0.0715071484447
+0.999524950981
+0.915762543678
+0.381037265062
+0.33484429121
+0.221661865711
+0.123540438712
+0.0681861937046
+0.999886035919
+0.999846577644
+0.250054717064
+0.23465976119
+0.167752325535
+0.0888562202454
+0.999256193638
+0.999022603035
+0.995295345783
+0.983832597733
+0.977420985699
+0.938246309757
+0.392120093107
+0.304893761873
+0.281726151705
+0.26976275444
+0.263413667679
+0.247773587704
+0.186527907848
+0.153093382716
+0.119151927531
+0.113645583391
+0.0722449421883
+0.0691849291325
+0.063812315464
+0.0620200671256
+0.0607798732817
+0.999950528145
+0.992190003395
+0.276637017727
+0.251549214125
+0.180080294609
+0.0761709883809
+0.0717695206404
+0.896281719208
+0.703536748886
+0.469767838717
+0.35194593668
+0.246505156159
+0.215047121048
+0.171883672476
+0.164586007595
+0.146100401878
+0.0959902927279
+0.0857849791646
+0.083300806582
+0.080426171422
+0.0755417123437
+0.0627321749926
+0.999719321728
+0.990707039833
+0.943727314472
+0.906231224537
+0.730656683445
+0.5246322155
+0.466064184904
+0.294522881508
+0.231323033571
+0.230393514037
+0.208666577935
+0.17046956718
+0.106120951474
+0.102273903787
+0.099495716393
+0.0904405340552
+0.0701552480459
+0.0670055821538
+0.0661889687181
+0.998050689697
+0.995930671692
+0.700910151005
+0.412136971951
+0.310613900423
+0.170418649912
+0.138574063778
+0.0765570700169
+0.921052992344
+0.861914873123
+0.782074809074
+0.648371100426
+0.550338923931
+0.529159903526
+0.451466649771
+0.449322909117
+0.448351144791
+0.412485480309
+0.397247552872
+0.377875775099
+0.352481693029
+0.324045658112
+0.298453807831
+0.28922522068
+0.284477978945
+0.263548493385
+0.262201488018
+0.260909378529
+0.255472093821
+0.240902334452
+0.215260401368
+0.213836163282
+0.20698197186
+0.196953430772
+0.196279779077
+0.195359274745
+0.193594768643
+0.181845918298
+0.176777601242
+0.165086224675
+0.156095653772
+0.151001870632
+0.142666533589
+0.140388414264
+0.132337480783
+0.131554767489
+0.11873383075
+0.117143899202
+0.112240046263
+0.108800441027
+0.10712608695
+0.101819492877
+0.101571798325
+0.0972444787621
+0.0954732224345
+0.086484529078
+0.0823562219739
+0.0806227326393
+0.0802488327026
+0.076133877039
+0.0757148861885
+0.0747767314315
+0.0742015689611
+0.0731023624539
+0.07199075073
+0.0681364163756
+0.0673984885216
+0.0627685561776
+0.0613379403949
+0.997393012047
+0.994322299957
+0.991469025612
+0.966510415077
+0.915483534336
+0.884050548077
+0.720015347004
+0.589534759521
+0.461607843637
+0.404501676559
+0.368422448635
+0.36576795578
+0.301459431648
+0.29040157795
+0.237349748611
+0.226100295782
+0.211244419217
+0.162413358688
+0.150720521808
+0.14269413054
+0.121456936002
+0.112311899662
+0.108723506331
+0.0986962020397
+0.0903049707413
+0.0768970698118
+0.064977735281
+0.999966740608
+0.219209507108
+0.999661326408
+0.99965441227
+0.908818185329
+0.760868251324
+0.340409040451
+0.332962661982
+0.308644145727
+0.249470293522
+0.248854488134
+0.152649611235
+0.139552459121
+0.0885343551636
+0.0811202228069
+0.0792088881135
+0.0644731894135
+0.0616070777178
+0.0608826428652
+0.999786555767
+0.990432858467
+0.248646914959
+0.225040569901
+0.999803960323
+0.99896478653
+0.749217510223
+0.746102035046
+0.317182004452
+0.314589679241
+0.265853971243
+0.245910182595
+0.222387298942
+0.123711466789
+0.114297300577
+0.0777277946472
+0.0605505444109
+0.987117707729
+0.982259511948
+0.924745440483
+0.839852809906
+0.765697360039
+0.413397938013
+0.279361993074
+0.236235961318
+0.230093717575
+0.218911796808
+0.17386379838
+0.114684298635
+0.102157466114
+0.0997303128242
+0.0679922252893
+0.999820649624
+0.958805561066
+0.800850450993
+0.552648484707
+0.527999818325
+0.395270973444
+0.3172313869
+0.258164525032
+0.220190182328
+0.159379854798
+0.158194109797
+0.131898075342
+0.127181142569
+0.121738120914
+0.11690723151
+0.110058926046
+0.100624881685
+0.0674076080322
+0.0670459493995
+0.997967660427
+0.997359097004
+0.99399638176
+0.973244547844
+0.915413379669
+0.906831026077
+0.900175690651
+0.881154596806
+0.875833451748
+0.872334301472
+0.856586158276
+0.851628899574
+0.793303966522
+0.719053983688
+0.703884780407
+0.688319981098
+0.677664935589
+0.635647892952
+0.616430640221
+0.591406047344
+0.587574243546
+0.583887457848
+0.511965692043
+0.506694316864
+0.463805943727
+0.453463733196
+0.444454729557
+0.438274592161
+0.409958660603
+0.404633611441
+0.390283972025
+0.32783177495
+0.327171623707
+0.32682287693
+0.325419604778
+0.320770561695
+0.309786945581
+0.301907271147
+0.300872802734
+0.29770770669
+0.295052170753
+0.279344141483
+0.273449361324
+0.270052433014
+0.268339037895
+0.265416353941
+0.256161600351
+0.246208578348
+0.244706526399
+0.242864608765
+0.241382300854
+0.231087759137
+0.231076002121
+0.229626879096
+0.229297339916
+0.228345513344
+0.227306514978
+0.225149556994
+0.222949847579
+0.222514465451
+0.220467165112
+0.219928592443
+0.218209862709
+0.212051749229
+0.209413632751
+0.206045687199
+0.203807681799
+0.199446678162
+0.197942137718
+0.195952549577
+0.191939041018
+0.178589433432
+0.178064465523
+0.173670604825
+0.172248989344
+0.168207794428
+0.163200855255
+0.16200311482
+0.161553636193
+0.156476035714
+0.142782449722
+0.140417933464
+0.138676986098
+0.138091206551
+0.136998325586
+0.135957866907
+0.13447843492
+0.133512184024
+0.132552564144
+0.122351385653
+0.121486075222
+0.119992919266
+0.119257338345
+0.119230002165
+0.115991137922
+0.112454652786
+0.109146676958
+0.108783356845
+0.108327567577
+0.105775542557
+0.105408012867
+0.104598268867
+0.103591285646
+0.102652408183
+0.101723238826
+0.0999928191304
+0.0992403626442
+0.0979554727674
+0.0966920256615
+0.0933598279953
+0.0927701294422
+0.0918072983623
+0.08860270679
+0.0881614014506
+0.0868270993233
+0.085781276226
+0.0856022164226
+0.0849558487535
+0.0849360972643
+0.0846387520432
+0.0846056118608
+0.0842245146632
+0.0828220769763
+0.0828171446919
+0.0809283554554
+0.0806427523494
+0.078833989799
+0.0777271464467
+0.076623365283
+0.0762644633651
+0.0743041783571
+0.0740823149681
+0.0737914219499
+0.0735002830625
+0.0709770694375
+0.0707598924637
+0.0705191046
+0.0704877972603
+0.0701687186956
+0.0701267793775
+0.0691085159779
+0.068739593029
+0.0686050206423
+0.0683276876807
+0.0656282454729
+0.065391458571
+0.0639444291592
+0.0634098649025
+0.0626582577825
+0.0623227842152
+0.0613517537713
+0.0606674104929
+0.0602330192924
+0.999875426292
+0.437412768602
+0.347389817238
+0.277341842651
+0.248011559248
+0.207684770226
+0.17679040134
+0.124954856932
+0.101744748652
+0.0984162092209
+0.0779643803835
+0.0722421035171
+0.0645274147391
+0.99881541729
+0.997493982315
+0.697072267532
+0.285765707493
+0.266210585833
+0.225699961185
+0.181298777461
+0.0996693745255
+0.098057821393
+0.0767000392079
+0.999502420425
+0.998655676842
+0.998366296291
+0.979027807713
+0.924251079559
+0.912279784679
+0.88493347168
+0.838683307171
+0.619835495949
+0.556869983673
+0.51318615675
+0.485629111528
+0.415404856205
+0.371445000172
+0.329514414072
+0.308405011892
+0.258371651173
+0.254268050194
+0.235270157456
+0.222202003002
+0.20456020534
+0.200304478407
+0.20007532835
+0.17853294313
+0.152400821447
+0.148664772511
+0.133183509111
+0.133066818118
+0.129867240787
+0.12886634469
+0.118456356227
+0.108910523355
+0.102634452283
+0.0964878723025
+0.0914936214685
+0.0726132765412
+0.0721158161759
+0.0719225779176
+0.0665308535099
+0.0643943250179
+0.062666669488
+0.985966742039
+0.977180600166
+0.840638160706
+0.487635612488
+0.329573839903
+0.248140767217
+0.227939888835
+0.172817364335
+0.172072708607
+0.144336953759
+0.101808652282
+0.0743254870176
+0.0690270736814
+0.06518638134
+0.0649603232741
+0.0638995170593
+0.999954819679
+0.377778738737
+0.240679517388
+0.203901365399
+0.20073568821
+0.19483423233
+0.143475040793
+0.130012050271
+0.115757077932
+0.0904213264585
+0.0789740607142
+0.0751534625888
+0.0736794248223
+0.0666357651353
+0.0631072297692
+0.992618620396
+0.988196194172
+0.946931540966
+0.87262815237
+0.51244109869
+0.399176031351
+0.242038890719
+0.190089941025
+0.13709436357
+0.133800894022
+0.123546481133
+0.0646584182978
+0.999907970428
+0.96192419529
+0.933129012585
+0.928367674351
+0.926583170891
+0.918897271156
+0.898933291435
+0.89816313982
+0.802240312099
+0.792959988117
+0.753592908382
+0.487964391708
+0.4713742733
+0.456777125597
+0.43927937746
+0.427674740553
+0.383803158998
+0.371457397938
+0.350080251694
+0.34402474761
+0.334862232208
+0.334855854511
+0.331439614296
+0.309913277626
+0.270557045937
+0.261522203684
+0.256123960018
+0.255635052919
+0.233474910259
+0.230281189084
+0.223321139812
+0.216078922153
+0.199734851718
+0.187115758657
+0.185149848461
+0.15785522759
+0.141010209918
+0.139864891768
+0.135325536132
+0.130514606833
+0.129802972078
+0.127937480807
+0.125146433711
+0.122662514448
+0.121461898088
+0.116295456886
+0.111215494573
+0.109697476029
+0.109329216182
+0.103729300201
+0.0958468839526
+0.0919257327914
+0.0900427773595
+0.0830124169588
+0.0766316354275
+0.0746269822121
+0.0722369328141
+0.0696646496654
+0.0665420070291
+0.0659611225128
+0.065107755363
+0.0607519932091
+0.998430550098
+0.601262629032
+0.520209312439
+0.297347843647
+0.296840041876
+0.252513617277
+0.216144025326
+0.209858968854
+0.141453951597
+0.140164971352
+0.125535532832
+0.0934433937073
+0.0826083645225
+0.0725472941995
+0.0708980858326
+0.998122155666
+0.302493780851
+0.18272562325
+0.114352211356
+0.06917129457
+0.905493438244
+0.321576207876
+0.140370935202
+0.999545633793
+0.998488306999
+0.931342065334
+0.901890575886
+0.76403003931
+0.738413572311
+0.432459563017
+0.316827118397
+0.284850388765
+0.278645962477
+0.277750968933
+0.259215354919
+0.229964181781
+0.220286488533
+0.201620101929
+0.185592964292
+0.139805495739
+0.137121796608
+0.124243706465
+0.101606361568
+0.0962783098221
+0.0960087925196
+0.0863846689463
+0.0837675109506
+0.0672956481576
+0.0661116763949
+0.065628670156
+0.0635211989284
+0.0630836039782
+0.996523678303
+0.98087823391
+0.959477722645
+0.938833653927
+0.937268137932
+0.830377221107
+0.797958552837
+0.72400790453
+0.60142070055
+0.478676021099
+0.410042703152
+0.38530805707
+0.375328689814
+0.29457911849
+0.268977403641
+0.26497399807
+0.264549791813
+0.247912928462
+0.237297102809
+0.235868826509
+0.234246701002
+0.199806913733
+0.192326366901
+0.177112579346
+0.174844488502
+0.164343118668
+0.145477607846
+0.144660770893
+0.140262529254
+0.131071120501
+0.122948080301
+0.122622393072
+0.119470834732
+0.118103906512
+0.114384062588
+0.110614031553
+0.104516997933
+0.0949389562011
+0.08837492764
+0.0847669169307
+0.0809181034565
+0.0794751048088
+0.0781405940652
+0.0739796236157
+0.0731292143464
+0.0685299560428
+0.068443402648
+0.0658480525017
+0.0657177940011
+0.0651498287916
+0.0624759308994
+0.999194324017
+0.992772996426
+0.899116933346
+0.87852025032
+0.870016515255
+0.867780864239
+0.852316498756
+0.776414215565
+0.758568882942
+0.702324509621
+0.643758535385
+0.628484070301
+0.624393224716
+0.482644110918
+0.480379372835
+0.426007777452
+0.419244021177
+0.397207558155
+0.345676034689
+0.344111382961
+0.287395209074
+0.283238530159
+0.277707517147
+0.255617588758
+0.230577155948
+0.220824271441
+0.22044621408
+0.209809169173
+0.206227898598
+0.188913181424
+0.169209718704
+0.160852700472
+0.156999126077
+0.147428721189
+0.145388901234
+0.142579168081
+0.133377224207
+0.130970045924
+0.126538574696
+0.112837925553
+0.111194558442
+0.108091063797
+0.10669746995
+0.10615517199
+0.102313056588
+0.0926437228918
+0.0912235453725
+0.0840500667691
+0.0763542354107
+0.0734778121114
+0.0715201124549
+0.0677856728435
+0.0668210983276
+0.0664521753788
+0.0661018043756
+0.0657693073153
+0.0651113986969
+0.0627768188715
+0.0611315891147
+0.999615311623
+0.244081541896
+0.0656598359346
+0.999897956848
+0.99629342556
+0.628307938576
+0.399312466383
+0.307299554348
+0.277965605259
+0.242140576243
+0.182818859816
+0.169158220291
+0.133710354567
+0.118517138064
+0.115217730403
+0.11095892638
+0.0970351323485
+0.0954003408551
+0.0710832178593
+0.0672273561358
+0.0663731470704
+0.061036773026
+0.999868512154
+0.223072290421
+0.998020768166
+0.337347775698
+0.286315411329
+0.285602480173
+0.158869758248
+0.103260166943
+0.0760552510619
+0.0719363540411
+0.996355891228
+0.995821535587
+0.976063609123
+0.972942590714
+0.569133222103
+0.344425857067
+0.342247277498
+0.299538075924
+0.276172488928
+0.244877308607
+0.242558658123
+0.215872615576
+0.176749974489
+0.173293203115
+0.167104661465
+0.142916843295
+0.13362121582
+0.0999065116048
+0.0974993929267
+0.999701678753
+0.989298641682
+0.363252788782
+0.234831571579
+0.146386608481
+0.0973748937249
+0.998073220253
+0.992775082588
+0.811663985252
+0.448805391788
+0.437613606453
+0.392116606236
+0.287579745054
+0.275448948145
+0.191055685282
+0.184340968728
+0.133034184575
+0.12319919467
+0.117677949369
+0.1047815606
+0.0846432819963
+0.0845958888531
+0.0754865556955
+0.0642727017403
+0.0621015317738
+0.999563634396
+0.998136937618
+0.719792544842
+0.29618576169
+0.258438527584
+0.136987179518
+0.0903879925609
+0.0747673660517
+0.0669409185648
+0.999117314816
+0.537455916405
+0.185627177358
+0.12163425982
+0.0807637423277
+0.99976426363
+0.271578222513
+0.163317292929
+0.0702967718244
+0.990616381168
+0.318415433168
+0.229645505548
+0.150993123651
+0.146079495549
+0.091490060091
+0.0796074792743
+0.0765443146229
+0.0621388293803
+0.999545991421
+0.825253605843
+0.599474906921
+0.47928711772
+0.245411485434
+0.240931659937
+0.225484520197
+0.201149523258
+0.19593654573
+0.179026827216
+0.169535398483
+0.127217814326
+0.106539934874
+0.103316284716
+0.0848058834672
+0.0787426233292
+0.0687099993229
+0.0652789622545
+0.97836625576
+0.909288644791
+0.625054717064
+0.541705429554
+0.307900220156
+0.235822483897
+0.200357526541
+0.173291221261
+0.144456192851
+0.116883955896
+0.0932330563664
+0.0860997959971
+0.0757554322481
+0.0704594329
+0.0608351118863
+0.995462954044
+0.978649437428
+0.89387100935
+0.74652081728
+0.613294899464
+0.611050009727
+0.590625405312
+0.517358779907
+0.507970631123
+0.482709169388
+0.430837213993
+0.407912492752
+0.355527430773
+0.342438399792
+0.301172733307
+0.300823092461
+0.296116322279
+0.274621039629
+0.272613078356
+0.257721930742
+0.216330230236
+0.208200901747
+0.207893058658
+0.201893299818
+0.186502441764
+0.176869943738
+0.176116779447
+0.170481458306
+0.168603360653
+0.144631519914
+0.133888885379
+0.130893036723
+0.126569896936
+0.121195912361
+0.114777192473
+0.113911777735
+0.111834019423
+0.102454714477
+0.0955322980881
+0.0878722444177
+0.0866768434644
+0.0821864753962
+0.0796779319644
+0.0722764134407
+0.0668506547809
+0.0643995180726
+0.062804594636
+0.0625292360783
+0.0615699104965
+0.999076604843
+0.690958797932
+0.53432071209
+0.294024378061
+0.183435767889
+0.131702437997
+0.126019462943
+0.0632935091853
+0.956428766251
+0.698242366314
+0.692928552628
+0.508051991463
+0.406040966511
+0.402095109224
+0.371073067188
+0.353615790606
+0.271175503731
+0.233343601227
+0.222996860743
+0.215830877423
+0.156835615635
+0.13622431457
+0.113940313458
+0.106321014464
+0.0944580957294
+0.0940234586596
+0.0903858989477
+0.0832663998008
+0.0751622095704
+0.0723561644554
+0.0632261708379
+0.999696016312
+0.890000224113
+0.884035825729
+0.534651041031
+0.319534868002
+0.215671434999
+0.204784050584
+0.20143891871
+0.197323054075
+0.0962259024382
+0.091528467834
+0.060466516763
+0.998721897602
+0.344238758087
+0.32789888978
+0.181264311075
+0.0950939208269
+0.0645099878311
+0.980090141296
+0.957752346992
+0.946172118187
+0.944004893303
+0.943708539009
+0.927326083183
+0.915109038353
+0.830131351948
+0.808904647827
+0.789487302303
+0.756651699543
+0.706804096699
+0.682843267918
+0.652131021023
+0.625316143036
+0.598293364048
+0.594900548458
+0.586679756641
+0.566850364208
+0.56562179327
+0.53235667944
+0.516675293446
+0.486273884773
+0.476835280657
+0.398179590702
+0.397317826748
+0.370959222317
+0.370247453451
+0.368269830942
+0.34644061327
+0.32861328125
+0.315858513117
+0.280185550451
+0.271817237139
+0.257970631123
+0.257155269384
+0.239059716463
+0.233583316207
+0.229495108128
+0.227187290788
+0.219404369593
+0.218671992421
+0.210738435388
+0.208579778671
+0.20677921176
+0.194637387991
+0.187160283327
+0.17249314487
+0.164421200752
+0.161736175418
+0.150534138083
+0.142911180854
+0.134086802602
+0.127083778381
+0.123994477093
+0.122946321964
+0.1223167032
+0.117321945727
+0.116359785199
+0.10727917403
+0.103281803429
+0.0993120968342
+0.096459724009
+0.096146568656
+0.0940626859665
+0.0920884981751
+0.0875430107117
+0.0869522094727
+0.0848299562931
+0.0798117369413
+0.0716144368052
+0.0715672671795
+0.0702840834856
+0.0687487721443
+0.0667836815119
+0.98867058754
+0.681477189064
+0.46047821641
+0.425325483084
+0.264114260674
+0.244405329227
+0.239701807499
+0.201895102859
+0.164746418595
+0.114486627281
+0.0946237444878
+0.0932745859027
+0.093152038753
+0.0832790583372
+0.0788561180234
+0.0684474110603
+0.0639678239822
+0.0628100782633
+0.0616762749851
+0.999842762947
+0.952508032322
+0.802034139633
+0.749326050282
+0.644581913948
+0.631828904152
+0.321100354195
+0.241418138146
+0.23744738102
+0.218950524926
+0.191180244088
+0.186615481973
+0.165298774838
+0.143140882254
+0.115516915917
+0.10326769948
+0.101696521044
+0.0976809263229
+0.0950469225645
+0.0808950588107
+0.0718290582299
+0.0717925056815
+0.0714248418808
+0.0637685880065
+0.998808145523
+0.969223976135
+0.95273065567
+0.9007152915
+0.859959661961
+0.776232302189
+0.509472370148
+0.490769505501
+0.455271631479
+0.39194136858
+0.301246285439
+0.260796070099
+0.255524247885
+0.2436876297
+0.229280725121
+0.192021593451
+0.185302242637
+0.138455927372
+0.126913070679
+0.126130014658
+0.120667107403
+0.11590718478
+0.114323832095
+0.108243785799
+0.0990132912993
+0.0797908008099
+0.0736158192158
+0.0713671818376
+0.0622235201299
+0.999929785728
+0.236773580313
+0.996485233307
+0.973144292831
+0.952949345112
+0.936492681503
+0.934775471687
+0.934706747532
+0.925210177898
+0.909250319004
+0.907571732998
+0.905214488506
+0.873499751091
+0.856217503548
+0.841894328594
+0.797026395798
+0.785228848457
+0.721731722355
+0.710372626781
+0.708245754242
+0.674999237061
+0.672585606575
+0.646301329136
+0.64030444622
+0.584947764874
+0.574567556381
+0.521926701069
+0.520913422108
+0.507004916668
+0.493492692709
+0.487534403801
+0.476897388697
+0.459832310677
+0.438529103994
+0.436075150967
+0.430230021477
+0.425077706575
+0.412595272064
+0.40896999836
+0.405536472797
+0.397898375988
+0.348343491554
+0.343986362219
+0.342459559441
+0.340788394213
+0.336075127125
+0.33369871974
+0.32458627224
+0.320855110884
+0.308423131704
+0.304758220911
+0.301163464785
+0.281074106693
+0.275988280773
+0.263495177031
+0.256263971329
+0.254672586918
+0.248097345233
+0.243980914354
+0.243575572968
+0.24307988584
+0.239909932017
+0.237402498722
+0.236374273896
+0.223613366485
+0.219628855586
+0.205245360732
+0.202288284898
+0.196715682745
+0.192995041609
+0.191146671772
+0.18573564291
+0.174688339233
+0.173786327243
+0.167393893003
+0.160832315683
+0.15882101655
+0.154459357262
+0.153864204884
+0.151325315237
+0.145265087485
+0.143302410841
+0.134295761585
+0.133973807096
+0.13315063715
+0.129150629044
+0.124759934843
+0.121627151966
+0.12074573338
+0.118403509259
+0.114418104291
+0.113920293748
+0.113882392645
+0.109763622284
+0.10965012759
+0.108391456306
+0.10837161541
+0.106906972826
+0.104507245123
+0.101064741611
+0.0999932512641
+0.0975551754236
+0.096052788198
+0.0955029428005
+0.0949434190989
+0.0934264957905
+0.0921269953251
+0.0875599086285
+0.0847728177905
+0.0847529768944
+0.0845466852188
+0.0835842266679
+0.0805003866553
+0.0799826383591
+0.0781304910779
+0.077094681561
+0.0738087594509
+0.0683386176825
+0.0682177022099
+0.0669514685869
+0.0657949447632
+0.0650624334812
+0.0645970776677
+0.0633753612638
+0.0629113391042
+0.0628305673599
+0.999109208584
+0.992743134499
+0.983281314373
+0.982452630997
+0.975484549999
+0.970617771149
+0.928097903728
+0.865000605583
+0.826762795448
+0.812776565552
+0.811406314373
+0.799418926239
+0.701389253139
+0.676407516003
+0.490412205458
+0.435803949833
+0.352880001068
+0.306210845709
+0.267815083265
+0.264043390751
+0.263534426689
+0.254603892565
+0.247845634818
+0.244158819318
+0.2342633605
+0.233986437321
+0.231409206986
+0.222957953811
+0.205752298236
+0.201805457473
+0.199451759458
+0.199187934399
+0.190660268068
+0.153879851103
+0.144555643201
+0.138336762786
+0.116321623325
+0.112225517631
+0.110304139555
+0.107437700033
+0.10678870976
+0.101761899889
+0.0938980057836
+0.0898617058992
+0.086585983634
+0.0851408094168
+0.0815256685019
+0.0813156589866
+0.0747051164508
+0.0738424509764
+0.0723445862532
+0.068854406476
+0.0665960013866
+0.0640038326383
+0.0609489679337
+0.999246478081
+0.998169898987
+0.992393374443
+0.989687263966
+0.937703549862
+0.90972495079
+0.876017689705
+0.867915332317
+0.841566503048
+0.836467921734
+0.818332314491
+0.776203989983
+0.721603572369
+0.592362761497
+0.494615644217
+0.491387367249
+0.457357943058
+0.441071778536
+0.439277619123
+0.39573648572
+0.32120257616
+0.318038165569
+0.293519079685
+0.288585543633
+0.284743964672
+0.279273092747
+0.258300900459
+0.252231836319
+0.242583885789
+0.237786963582
+0.232144594193
+0.226224780083
+0.222068950534
+0.221124842763
+0.216105744243
+0.21522437036
+0.213586583734
+0.207736417651
+0.203470900655
+0.198889255524
+0.187587931752
+0.185902893543
+0.163339167833
+0.161005318165
+0.154672786593
+0.15464425087
+0.15107126534
+0.138669639826
+0.136114791036
+0.134524568915
+0.133036404848
+0.123886078596
+0.113351538777
+0.110617406666
+0.109029516578
+0.108273267746
+0.0941390767694
+0.090435102582
+0.0902486294508
+0.0901169329882
+0.0900552421808
+0.090035803616
+0.087268806994
+0.0852597653866
+0.0829373151064
+0.0754492506385
+0.0750243887305
+0.0745019465685
+0.0741142481565
+0.0724273175001
+0.0703431516886
+0.0650764331222
+0.0622702613473
+0.061198618263
+0.999644637108
+0.990814864635
+0.894765794277
+0.885608255863
+0.754888832569
+0.713861584663
+0.684454917908
+0.590283930302
+0.497921705246
+0.433427602053
+0.359696686268
+0.35575479269
+0.263097971678
+0.261578083038
+0.238353610039
+0.229653939605
+0.180853202939
+0.172279164195
+0.141518607736
+0.124984435737
+0.112127348781
+0.103811040521
+0.101689502597
+0.0964450463653
+0.0937804952264
+0.0867951586843
+0.0855999812484
+0.0703577697277
+0.0605317391455
+0.99923813343
+0.397775232792
+0.117807760835
+0.0822966992855
+0.992630243301
+0.963168919086
+0.962939083576
+0.95875620842
+0.949525296688
+0.947828471661
+0.947741806507
+0.926220238209
+0.86968100071
+0.858731865883
+0.82063472271
+0.817806184292
+0.756629288197
+0.742880165577
+0.721090257168
+0.711871862411
+0.709046542645
+0.70113068819
+0.684598088264
+0.680248141289
+0.679118931293
+0.650461673737
+0.638901889324
+0.633076548576
+0.628844857216
+0.623205304146
+0.606007456779
+0.589472055435
+0.543069064617
+0.540096759796
+0.492528945208
+0.48947045207
+0.469458311796
+0.455150544643
+0.445721745491
+0.435236483812
+0.406128436327
+0.380710035563
+0.366729050875
+0.362650752068
+0.349364519119
+0.34099110961
+0.311600416899
+0.266718536615
+0.248400360346
+0.247402533889
+0.243538707495
+0.238049387932
+0.235403135419
+0.229550123215
+0.214147239923
+0.208026111126
+0.205201610923
+0.198470279574
+0.193872436881
+0.193602651358
+0.185264185071
+0.182557508349
+0.179749533534
+0.179342880845
+0.168395116925
+0.165852680802
+0.16438511014
+0.163314878941
+0.162283062935
+0.147158786654
+0.146352693439
+0.14418630302
+0.142627716064
+0.136985763907
+0.130022868514
+0.127032727003
+0.124445214868
+0.123649470508
+0.123124606907
+0.121300078928
+0.11136007309
+0.104333639145
+0.100446812809
+0.0974583253264
+0.0972215905786
+0.0968780815601
+0.0957682654262
+0.0938705429435
+0.093074850738
+0.0905844271183
+0.0877063646913
+0.086587227881
+0.0784232839942
+0.0765969008207
+0.076567068696
+0.0710005909204
+0.0671970173717
+0.0661967769265
+0.0641636252403
+0.0639960244298
+0.0635517835617
+0.0620191954076
+0.0604455210268
+0.999867677689
+0.210688859224
+0.0713231265545
+0.991888701916
+0.56558328867
+0.405686616898
+0.180391490459
+0.156724274158
+0.14362333715
+0.0636452510953
+0.0619817040861
+0.997169792652
+0.30119228363
+0.249937698245
+0.179026842117
+0.159113720059
+0.123483106494
+0.117558807135
+0.10598859936
+0.0839374139905
+0.0659993514419
+0.0613634325564
+0.0608138591051
+0.999056994915
+0.956122219563
+0.892946183681
+0.88439810276
+0.478143721819
+0.360712349415
+0.282595068216
+0.279659181833
+0.248867988586
+0.215730160475
+0.212947562337
+0.188425406814
+0.183872282505
+0.152105331421
+0.140800788999
+0.127725064754
+0.119623720646
+0.0949818342924
+0.0842129662633
+0.0828401148319
+0.0730597451329
+0.0690868645906
+0.066667996347
+0.0658395588398
+0.99817097187
+0.996992468834
+0.970074415207
+0.962572574615
+0.960867285728
+0.958837211132
+0.947814822197
+0.936578333378
+0.824584782124
+0.465736061335
+0.430638581514
+0.424678534269
+0.371656805277
+0.339509576559
+0.320714145899
+0.274679452181
+0.270700365305
+0.259493798018
+0.257501095533
+0.245620459318
+0.225503370166
+0.216872289777
+0.207695201039
+0.173816710711
+0.172349110246
+0.14915291965
+0.11227889359
+0.104980617762
+0.098499186337
+0.0936903730035
+0.0912390351295
+0.0871238932014
+0.0784211829305
+0.0765602812171
+0.0750699341297
+0.0734168812633
+0.0658170133829
+0.0601446032524
+0.999540090561
+0.666923224926
+0.607936024666
+0.564328491688
+0.226049855351
+0.171824634075
+0.163891911507
+0.157775998116
+0.15336227417
+0.153187602758
+0.127075091004
+0.120113298297
+0.119716398418
+0.119523651898
+0.114527449012
+0.112873136997
+0.0966422110796
+0.0733615309
+0.070689022541
+0.0700094625354
+0.0624397359788
+0.0616969838738
+0.0603746697307
+0.999722778797
+0.921412825584
+0.423539400101
+0.350275099277
+0.217978239059
+0.139648079872
+0.132571429014
+0.101855933666
+0.0736287012696
+0.0692351013422
+0.999940633774
+0.242266193032
+0.999653220177
+0.293486297131
+0.0847252160311
+0.999656200409
+0.245260566473
+0.107899814844
+0.0841839239001
+0.0716745704412
+0.97137093544
+0.871734440327
+0.795829951763
+0.486216306686
+0.377939939499
+0.308111965656
+0.251147001982
+0.235551834106
+0.223601177335
+0.217630192637
+0.201627239585
+0.196282878518
+0.120490066707
+0.111871816218
+0.0874184295535
+0.087182790041
+0.0804472714663
+0.0732774287462
+0.0717148631811
+0.0708374977112
+0.0693840533495
+0.0648901537061
+0.0637912377715
+0.995474874973
+0.550908148289
+0.196957230568
+0.108727455139
+0.892513751984
+0.62106192112
+0.332313805819
+0.283135324717
+0.280570268631
+0.194330200553
+0.178090944886
+0.131099164486
+0.0899079889059
+0.0691684037447
+0.0669651404023
+0.0668090209365
+0.999904870987
+0.389615207911
+0.245767563581
+0.0926912352443
+0.0872029811144
+0.0666669905186
+0.997706055641
+0.406540095806
+0.222790658474
+0.14788800478
+0.0640456452966
+0.99798822403
+0.808094739914
+0.700625061989
+0.355237215757
+0.246342003345
+0.240103527904
+0.20532938838
+0.175236612558
+0.125729233027
+0.125303760171
+0.0872136950493
+0.0796238481998
+0.0789869725704
+0.0663390159607
+0.0639635175467
+0.0629102885723
+0.968968570232
+0.947573840618
+0.939949333668
+0.925491809845
+0.92008459568
+0.908232152462
+0.885247826576
+0.826033949852
+0.786818385124
+0.761864840984
+0.738084614277
+0.737488508224
+0.683340013027
+0.673958957195
+0.641772508621
+0.62966376543
+0.627728939056
+0.576566338539
+0.53709602356
+0.529606103897
+0.527243614197
+0.510518312454
+0.475520402193
+0.468297332525
+0.406658530235
+0.403307497501
+0.400523334742
+0.393328279257
+0.36211976409
+0.348257213831
+0.344720304012
+0.320322930813
+0.282329976559
+0.279686659575
+0.276620656252
+0.274324953556
+0.270865261555
+0.256627589464
+0.246466085315
+0.241774708033
+0.239855900407
+0.235757052898
+0.231983333826
+0.228315219283
+0.222469106317
+0.218911245465
+0.217133238912
+0.195460438728
+0.192689582705
+0.187854856253
+0.181146860123
+0.16436624527
+0.158901408315
+0.15862840414
+0.155625700951
+0.153132334352
+0.15167838335
+0.144184857607
+0.141377851367
+0.137543201447
+0.133870020509
+0.128608733416
+0.12526383996
+0.125120297074
+0.124360859394
+0.11782182008
+0.11736100167
+0.11522244662
+0.113823622465
+0.11160415411
+0.110644489527
+0.104634143412
+0.10315862298
+0.102552689612
+0.101551935077
+0.0996452569962
+0.0978473052382
+0.0972594246268
+0.0942338779569
+0.0922752767801
+0.0887507870793
+0.0850504934788
+0.0823895260692
+0.0810889527202
+0.0754410848022
+0.0751402676105
+0.0741264373064
+0.0739272758365
+0.0725436285138
+0.0717726647854
+0.0691707134247
+0.0649259164929
+0.0634897127748
+0.0632733404636
+0.0618012472987
+0.955810964108
+0.951663374901
+0.946589589119
+0.942293643951
+0.8848695755
+0.788360178471
+0.548145830631
+0.520300447941
+0.483562320471
+0.357719182968
+0.310420870781
+0.309353232384
+0.308743983507
+0.272014528513
+0.251808285713
+0.251057982445
+0.24288021028
+0.231367245317
+0.202209070325
+0.200972035527
+0.149208888412
+0.137366756797
+0.135332509875
+0.128293901682
+0.125284984708
+0.111191928387
+0.108640111983
+0.0960215255618
+0.0942403227091
+0.0888722613454
+0.0838943719864
+0.0767089724541
+0.071540415287
+0.0691746324301
+0.063325561583
+0.060267701745
+0.997511148453
+0.994918107986
+0.990752220154
+0.968038260937
+0.952356040478
+0.902082622051
+0.901974439621
+0.753111183643
+0.670473098755
+0.598843634129
+0.46637570858
+0.458024919033
+0.349030166864
+0.308825284243
+0.304603666067
+0.294533073902
+0.274471640587
+0.240688100457
+0.219368264079
+0.216227844357
+0.202139824629
+0.169384345412
+0.142244338989
+0.135620445013
+0.131436288357
+0.130214199424
+0.120447449386
+0.112564846873
+0.101925462484
+0.0976354703307
+0.0949821323156
+0.0947913378477
+0.0846702009439
+0.0803752020001
+0.0793736800551
+0.0784438550472
+0.0760157555342
+0.0738162472844
+0.0663342103362
+0.0658029392362
+0.0648400485516
+0.0602975934744
+0.999670505524
+0.999530673027
+0.99944216013
+0.988520979881
+0.980825185776
+0.547989428043
+0.518245577812
+0.250518530607
+0.238671511412
+0.233977407217
+0.218646690249
+0.209478765726
+0.187583118677
+0.169768035412
+0.130613699555
+0.124472916126
+0.113801844418
+0.0916478186846
+0.0889775827527
+0.0821042060852
+0.07185934484
+0.0647586286068
+0.0620876699686
+0.999585688114
+0.597760677338
+0.276933193207
+0.251055806875
+0.183060079813
+0.123441524804
+0.0787074193358
+0.0747317522764
+0.0608398802578
+0.998697340488
+0.990386784077
+0.988062560558
+0.937335729599
+0.891906380653
+0.875658929348
+0.857246696949
+0.832446753979
+0.700079262257
+0.662508964539
+0.637773275375
+0.633226454258
+0.569138765335
+0.53634083271
+0.521243453026
+0.506232559681
+0.475990831852
+0.39992710948
+0.394736886024
+0.390601634979
+0.389564424753
+0.380248188972
+0.353010296822
+0.328064203262
+0.316458016634
+0.309703826904
+0.300846576691
+0.296114474535
+0.268241554499
+0.265651166439
+0.264369219542
+0.263969480991
+0.261458456516
+0.260906636715
+0.251807421446
+0.243931263685
+0.226375237107
+0.219155922532
+0.20752055943
+0.202227503061
+0.197862043977
+0.195747092366
+0.186634615064
+0.182489097118
+0.179625585675
+0.162939667702
+0.158605843782
+0.154647380114
+0.13762421906
+0.135476112366
+0.132997795939
+0.123799823225
+0.115774780512
+0.113132104278
+0.109975762665
+0.107187576592
+0.10472304374
+0.0995202437043
+0.0911397859454
+0.0905823409557
+0.0895934179425
+0.0887583866715
+0.0803543627262
+0.0709221437573
+0.0703653097153
+0.0692569315434
+0.0685588121414
+0.0685393810272
+0.0678650811315
+0.0660905614495
+0.0649406835437
+0.0619710832834
+0.0605246387422
+0.990560948849
+0.969888687134
+0.924007117748
+0.923805654049
+0.837742507458
+0.577892303467
+0.542351365089
+0.394126981497
+0.372652113438
+0.303107947111
+0.287626504898
+0.275559008121
+0.263308525085
+0.216103076935
+0.213415622711
+0.198330163956
+0.19333204627
+0.17177040875
+0.167991906404
+0.15807954967
+0.127967014909
+0.11359320581
+0.113546133041
+0.107090122998
+0.0856902077794
+0.0845858603716
+0.0837073251605
+0.0820282697678
+0.0809481814504
+0.0759676024318
+0.0732876509428
+0.0709682554007
+0.0648226290941
+0.999891281128
+0.980310916901
+0.511214911938
+0.460870653391
+0.302790910006
+0.296945005655
+0.231662154198
+0.22549675405
+0.219288006425
+0.16159747541
+0.139944180846
+0.0910494998097
+0.070450425148
+0.0657297149301
+0.0620163716376
+0.060229241848
+0.999928355217
+0.225855559111
+0.20847210288
+0.0799448564649
+0.997085392475
+0.561557650566
+0.441562652588
+0.418189525604
+0.31065967679
+0.241753026843
+0.145225599408
+0.0920219123363
+0.0900227352977
+0.0842716246843
+0.0617374442518
+0.993637561798
+0.960684418678
+0.956613242626
+0.937926530838
+0.93555945158
+0.903111815453
+0.896665215492
+0.875808238983
+0.87112480402
+0.848295331001
+0.833937108517
+0.800671517849
+0.786137461662
+0.782592356205
+0.728530168533
+0.726586580276
+0.70488768816
+0.68560475111
+0.654543340206
+0.627118349075
+0.573421657085
+0.572720348835
+0.558781862259
+0.52694863081
+0.52484112978
+0.519244968891
+0.491170465946
+0.488644838333
+0.476614147425
+0.473701357841
+0.470730483532
+0.432818889618
+0.413084030151
+0.402788460255
+0.399568349123
+0.381180644035
+0.375348448753
+0.373334914446
+0.370947182178
+0.342486172915
+0.330176770687
+0.316466748714
+0.311164736748
+0.308964639902
+0.306840747595
+0.302039384842
+0.299615889788
+0.295907795429
+0.291600197554
+0.278708189726
+0.266727924347
+0.249577015638
+0.249483451247
+0.248235091567
+0.246852204204
+0.24648553133
+0.244383111596
+0.239467218518
+0.23769724369
+0.236938372254
+0.219536274672
+0.219087809324
+0.215942174196
+0.215562835336
+0.215444713831
+0.214848831296
+0.212868824601
+0.204313054681
+0.202027991414
+0.18546551466
+0.183999434114
+0.178068682551
+0.173594892025
+0.171664327383
+0.167488545179
+0.167048081756
+0.157913848758
+0.15613552928
+0.155310064554
+0.15522582829
+0.150842294097
+0.14706403017
+0.137683436275
+0.137451544404
+0.136762678623
+0.134847670794
+0.129706770182
+0.128233700991
+0.128089562058
+0.127669394016
+0.121358402073
+0.116723135114
+0.116605170071
+0.112269029021
+0.109458297491
+0.109448507428
+0.107224985957
+0.104446865618
+0.103663705289
+0.0977155938745
+0.0973436161876
+0.0956006795168
+0.0931207612157
+0.0911561921239
+0.090764015913
+0.0855933502316
+0.0855251401663
+0.0852272510529
+0.0833472386003
+0.0832343026996
+0.0828409194946
+0.0822093933821
+0.0812081769109
+0.0807216688991
+0.079481780529
+0.0794054195285
+0.0792907476425
+0.078891903162
+0.0784951597452
+0.0780909061432
+0.0741547048092
+0.0738916546106
+0.0708476305008
+0.0685186758637
+0.0680771768093
+0.067343428731
+0.0670657455921
+0.0643190965056
+0.0641789957881
+0.0629176571965
+0.0620568394661
+0.0615977495909
+0.061546780169
+0.060433447361
diff --git a/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/test-dev_images.txt b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/test-dev_images.txt
new file mode 100644
index 0000000000000000000000000000000000000000..262da0592248b0eb6187e2571cf0285ec93d68e6
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco-minival-images-newnms/test-dev_images.txt
@@ -0,0 +1,11916 @@
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000375521.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000244455.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000375530.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000244462.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113397.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000113408.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000375554.jpg
+COCO_val2014_000000171819.jpg
+COCO_val2014_000000171819.jpg
+COCO_val2014_000000171819.jpg
+COCO_val2014_000000171819.jpg
+COCO_val2014_000000171819.jpg
+COCO_val2014_000000171819.jpg
+COCO_val2014_000000171819.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000375566.jpg
+COCO_val2014_000000244496.jpg
+COCO_val2014_000000244496.jpg
+COCO_val2014_000000139077.jpg
+COCO_val2014_000000139077.jpg
+COCO_val2014_000000139077.jpg
+COCO_val2014_000000139077.jpg
+COCO_val2014_000000139077.jpg
+COCO_val2014_000000139077.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000506656.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000375606.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000244539.jpg
+COCO_val2014_000000565045.jpg
+COCO_val2014_000000565045.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000113473.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000375621.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000244550.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000492605.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000506707.jpg
+COCO_val2014_000000113493.jpg
+COCO_val2014_000000113493.jpg
+COCO_val2014_000000113493.jpg
+COCO_val2014_000000113493.jpg
+COCO_val2014_000000113493.jpg
+COCO_val2014_000000113493.jpg
+COCO_val2014_000000113493.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000215524.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506717.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000506723.jpg
+COCO_val2014_000000433980.jpg
+COCO_val2014_000000433980.jpg
+COCO_val2014_000000433980.jpg
+COCO_val2014_000000433980.jpg
+COCO_val2014_000000433980.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000244586.jpg
+COCO_val2014_000000113533.jpg
+COCO_val2014_000000113533.jpg
+COCO_val2014_000000113533.jpg
+COCO_val2014_000000113533.jpg
+COCO_val2014_000000113533.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000113559.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000390298.jpg
+COCO_val2014_000000281072.jpg
+COCO_val2014_000000281072.jpg
+COCO_val2014_000000281072.jpg
+COCO_val2014_000000281072.jpg
+COCO_val2014_000000281072.jpg
+COCO_val2014_000000281072.jpg
+COCO_val2014_000000281072.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000113571.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000543218.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000506803.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113588.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000113590.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000244665.jpg
+COCO_val2014_000000375755.jpg
+COCO_val2014_000000375755.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000375769.jpg
+COCO_val2014_000000324776.jpg
+COCO_val2014_000000324776.jpg
+COCO_val2014_000000324776.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506872.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000506874.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375810.jpg
+COCO_val2014_000000375812.jpg
+COCO_val2014_000000375812.jpg
+COCO_val2014_000000375812.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000113678.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000375823.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000261292.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000506933.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113720.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000113722.jpg
+COCO_val2014_000000506945.jpg
+COCO_val2014_000000506945.jpg
+COCO_val2014_000000506945.jpg
+COCO_val2014_000000506945.jpg
+COCO_val2014_000000506945.jpg
+COCO_val2014_000000375875.jpg
+COCO_val2014_000000375875.jpg
+COCO_val2014_000000375875.jpg
+COCO_val2014_000000375875.jpg
+COCO_val2014_000000375875.jpg
+COCO_val2014_000000375875.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000506950.jpg
+COCO_val2014_000000113736.jpg
+COCO_val2014_000000113736.jpg
+COCO_val2014_000000113736.jpg
+COCO_val2014_000000113736.jpg
+COCO_val2014_000000113736.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000375881.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000244815.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113745.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113756.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000113757.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000375902.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244833.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000244834.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000507015.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000178370.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000412184.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507037.jpg
+COCO_val2014_000000507065.jpg
+COCO_val2014_000000507065.jpg
+COCO_val2014_000000507065.jpg
+COCO_val2014_000000507065.jpg
+COCO_val2014_000000244925.jpg
+COCO_val2014_000000244925.jpg
+COCO_val2014_000000244925.jpg
+COCO_val2014_000000244925.jpg
+COCO_val2014_000000244925.jpg
+COCO_val2014_000000244925.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000244931.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000507080.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000193743.jpg
+COCO_val2014_000000104176.jpg
+COCO_val2014_000000104176.jpg
+COCO_val2014_000000104176.jpg
+COCO_val2014_000000113890.jpg
+COCO_val2014_000000113890.jpg
+COCO_val2014_000000113890.jpg
+COCO_val2014_000000113890.jpg
+COCO_val2014_000000113890.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113905.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000113914.jpg
+COCO_val2014_000000376059.jpg
+COCO_val2014_000000376059.jpg
+COCO_val2014_000000376059.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000244999.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507147.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000507154.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000245013.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000376093.jpg
+COCO_val2014_000000507167.jpg
+COCO_val2014_000000507167.jpg
+COCO_val2014_000000507167.jpg
+COCO_val2014_000000245026.jpg
+COCO_val2014_000000245026.jpg
+COCO_val2014_000000245026.jpg
+COCO_val2014_000000245026.jpg
+COCO_val2014_000000245026.jpg
+COCO_val2014_000000507171.jpg
+COCO_val2014_000000507171.jpg
+COCO_val2014_000000507171.jpg
+COCO_val2014_000000507171.jpg
+COCO_val2014_000000507171.jpg
+COCO_val2014_000000507171.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000106375.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000507180.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000376112.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000507187.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113975.jpg
+COCO_val2014_000000113989.jpg
+COCO_val2014_000000113989.jpg
+COCO_val2014_000000113989.jpg
+COCO_val2014_000000507211.jpg
+COCO_val2014_000000507211.jpg
+COCO_val2014_000000507211.jpg
+COCO_val2014_000000507211.jpg
+COCO_val2014_000000507211.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000507223.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000376160.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000114025.jpg
+COCO_val2014_000000376185.jpg
+COCO_val2014_000000376185.jpg
+COCO_val2014_000000376185.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114049.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000114055.jpg
+COCO_val2014_000000507273.jpg
+COCO_val2014_000000507273.jpg
+COCO_val2014_000000507273.jpg
+COCO_val2014_000000507273.jpg
+COCO_val2014_000000376206.jpg
+COCO_val2014_000000376206.jpg
+COCO_val2014_000000376206.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376208.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376209.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376233.jpg
+COCO_val2014_000000376236.jpg
+COCO_val2014_000000376236.jpg
+COCO_val2014_000000376236.jpg
+COCO_val2014_000000376236.jpg
+COCO_val2014_000000507312.jpg
+COCO_val2014_000000507312.jpg
+COCO_val2014_000000507312.jpg
+COCO_val2014_000000507312.jpg
+COCO_val2014_000000245174.jpg
+COCO_val2014_000000245174.jpg
+COCO_val2014_000000245174.jpg
+COCO_val2014_000000245174.jpg
+COCO_val2014_000000245174.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000376247.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000114108.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000245182.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507330.jpg
+COCO_val2014_000000507352.jpg
+COCO_val2014_000000507352.jpg
+COCO_val2014_000000507352.jpg
+COCO_val2014_000000507352.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000114147.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000376295.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000245227.jpg
+COCO_val2014_000000114158.jpg
+COCO_val2014_000000114158.jpg
+COCO_val2014_000000114158.jpg
+COCO_val2014_000000114158.jpg
+COCO_val2014_000000114158.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376307.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000376322.jpg
+COCO_val2014_000000106411.jpg
+COCO_val2014_000000106411.jpg
+COCO_val2014_000000106411.jpg
+COCO_val2014_000000106411.jpg
+COCO_val2014_000000106411.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000114183.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000376342.jpg
+COCO_val2014_000000507427.jpg
+COCO_val2014_000000507427.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000062726.jpg
+COCO_val2014_000000507436.jpg
+COCO_val2014_000000507436.jpg
+COCO_val2014_000000507436.jpg
+COCO_val2014_000000507436.jpg
+COCO_val2014_000000434098.jpg
+COCO_val2014_000000434098.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000245295.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000376372.jpg
+COCO_val2014_000000245301.jpg
+COCO_val2014_000000245301.jpg
+COCO_val2014_000000245301.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245311.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245313.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245315.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000245320.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000376393.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000507473.jpg
+COCO_val2014_000000038721.jpg
+COCO_val2014_000000038721.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000376416.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114282.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000114291.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376441.jpg
+COCO_val2014_000000376449.jpg
+COCO_val2014_000000376449.jpg
+COCO_val2014_000000376449.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000376450.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000245383.jpg
+COCO_val2014_000000376456.jpg
+COCO_val2014_000000376456.jpg
+COCO_val2014_000000376456.jpg
+COCO_val2014_000000376456.jpg
+COCO_val2014_000000376456.jpg
+COCO_val2014_000000376456.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114313.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000114317.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000376469.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000507551.jpg
+COCO_val2014_000000245411.jpg
+COCO_val2014_000000245411.jpg
+COCO_val2014_000000245411.jpg
+COCO_val2014_000000245411.jpg
+COCO_val2014_000000245411.jpg
+COCO_val2014_000000245411.jpg
+COCO_val2014_000000245414.jpg
+COCO_val2014_000000245414.jpg
+COCO_val2014_000000114352.jpg
+COCO_val2014_000000114352.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245426.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245430.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245432.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245447.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000245448.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000376521.jpg
+COCO_val2014_000000245453.jpg
+COCO_val2014_000000245453.jpg
+COCO_val2014_000000245453.jpg
+COCO_val2014_000000376531.jpg
+COCO_val2014_000000376531.jpg
+COCO_val2014_000000245460.jpg
+COCO_val2014_000000245460.jpg
+COCO_val2014_000000245460.jpg
+COCO_val2014_000000114389.jpg
+COCO_val2014_000000114389.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000245462.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376545.jpg
+COCO_val2014_000000376549.jpg
+COCO_val2014_000000376549.jpg
+COCO_val2014_000000376549.jpg
+COCO_val2014_000000376549.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000114414.jpg
+COCO_val2014_000000507633.jpg
+COCO_val2014_000000507633.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000245497.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000376575.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507663.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000507667.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000114453.jpg
+COCO_val2014_000000281220.jpg
+COCO_val2014_000000281220.jpg
+COCO_val2014_000000281220.jpg
+COCO_val2014_000000281220.jpg
+COCO_val2014_000000114458.jpg
+COCO_val2014_000000114458.jpg
+COCO_val2014_000000114458.jpg
+COCO_val2014_000000114458.jpg
+COCO_val2014_000000114458.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000376603.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000114481.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000376628.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000507719.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000139113.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000172004.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000507750.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000376679.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000346774.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000507782.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000245642.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000507797.jpg
+COCO_val2014_000000245660.jpg
+COCO_val2014_000000245660.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000245667.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000303089.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000376751.jpg
+COCO_val2014_000000507826.jpg
+COCO_val2014_000000507826.jpg
+COCO_val2014_000000507826.jpg
+COCO_val2014_000000507826.jpg
+COCO_val2014_000000507826.jpg
+COCO_val2014_000000507826.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000507833.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000376773.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000114634.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000245716.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000303101.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000108130.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000245754.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000114684.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000477867.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000114710.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000507927.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000376859.jpg
+COCO_val2014_000000114717.jpg
+COCO_val2014_000000114717.jpg
+COCO_val2014_000000114717.jpg
+COCO_val2014_000000114717.jpg
+COCO_val2014_000000114717.jpg
+COCO_val2014_000000114717.jpg
+COCO_val2014_000000507935.jpg
+COCO_val2014_000000507935.jpg
+COCO_val2014_000000507935.jpg
+COCO_val2014_000000507945.jpg
+COCO_val2014_000000507945.jpg
+COCO_val2014_000000507945.jpg
+COCO_val2014_000000507945.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114744.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000114745.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000245818.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000376891.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000507966.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000376900.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000507975.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000106508.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000507979.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000019129.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000508006.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376939.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000376959.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000245898.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000376990.jpg
+COCO_val2014_000000114855.jpg
+COCO_val2014_000000114855.jpg
+COCO_val2014_000000114855.jpg
+COCO_val2014_000000114855.jpg
+COCO_val2014_000000114855.jpg
+COCO_val2014_000000114855.jpg
+COCO_val2014_000000114868.jpg
+COCO_val2014_000000114868.jpg
+COCO_val2014_000000114868.jpg
+COCO_val2014_000000114870.jpg
+COCO_val2014_000000114870.jpg
+COCO_val2014_000000114870.jpg
+COCO_val2014_000000114870.jpg
+COCO_val2014_000000114870.jpg
+COCO_val2014_000000114870.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000508087.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000114884.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508101.jpg
+COCO_val2014_000000508119.jpg
+COCO_val2014_000000508119.jpg
+COCO_val2014_000000508119.jpg
+COCO_val2014_000000508119.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000114907.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000150224.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000377060.jpg
+COCO_val2014_000000246001.jpg
+COCO_val2014_000000246001.jpg
+COCO_val2014_000000246001.jpg
+COCO_val2014_000000246001.jpg
+COCO_val2014_000000246001.jpg
+COCO_val2014_000000246004.jpg
+COCO_val2014_000000246004.jpg
+COCO_val2014_000000246004.jpg
+COCO_val2014_000000246004.jpg
+COCO_val2014_000000246004.jpg
+COCO_val2014_000000246004.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246005.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000246014.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000114946.jpg
+COCO_val2014_000000377091.jpg
+COCO_val2014_000000377091.jpg
+COCO_val2014_000000377091.jpg
+COCO_val2014_000000377091.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000508167.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377097.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000377111.jpg
+COCO_val2014_000000246040.jpg
+COCO_val2014_000000246040.jpg
+COCO_val2014_000000246040.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000377113.jpg
+COCO_val2014_000000246053.jpg
+COCO_val2014_000000246053.jpg
+COCO_val2014_000000246053.jpg
+COCO_val2014_000000246053.jpg
+COCO_val2014_000000246053.jpg
+COCO_val2014_000000246053.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000246057.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000061605.jpg
+COCO_val2014_000000246064.jpg
+COCO_val2014_000000246064.jpg
+COCO_val2014_000000246064.jpg
+COCO_val2014_000000246064.jpg
+COCO_val2014_000000246064.jpg
+COCO_val2014_000000246064.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000246066.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000115006.jpg
+COCO_val2014_000000377155.jpg
+COCO_val2014_000000377155.jpg
+COCO_val2014_000000377155.jpg
+COCO_val2014_000000377155.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000508230.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246105.jpg
+COCO_val2014_000000246106.jpg
+COCO_val2014_000000246106.jpg
+COCO_val2014_000000246106.jpg
+COCO_val2014_000000246106.jpg
+COCO_val2014_000000246106.jpg
+COCO_val2014_000000246106.jpg
+COCO_val2014_000000246106.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000377183.jpg
+COCO_val2014_000000115043.jpg
+COCO_val2014_000000115043.jpg
+COCO_val2014_000000115043.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000377195.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246124.jpg
+COCO_val2014_000000246125.jpg
+COCO_val2014_000000246125.jpg
+COCO_val2014_000000246125.jpg
+COCO_val2014_000000246125.jpg
+COCO_val2014_000000246125.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000115060.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508288.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508302.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000508303.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000377239.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000246183.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000508339.jpg
+COCO_val2014_000000062878.jpg
+COCO_val2014_000000062878.jpg
+COCO_val2014_000000062878.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115128.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000115146.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508370.jpg
+COCO_val2014_000000508373.jpg
+COCO_val2014_000000508373.jpg
+COCO_val2014_000000508373.jpg
+COCO_val2014_000000508373.jpg
+COCO_val2014_000000508373.jpg
+COCO_val2014_000000508373.jpg
+COCO_val2014_000000508373.jpg
+COCO_val2014_000000246231.jpg
+COCO_val2014_000000246231.jpg
+COCO_val2014_000000246231.jpg
+COCO_val2014_000000246231.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246252.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000246265.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377352.jpg
+COCO_val2014_000000377361.jpg
+COCO_val2014_000000377361.jpg
+COCO_val2014_000000377361.jpg
+COCO_val2014_000000377361.jpg
+COCO_val2014_000000377361.jpg
+COCO_val2014_000000377361.jpg
+COCO_val2014_000000377385.jpg
+COCO_val2014_000000377385.jpg
+COCO_val2014_000000377385.jpg
+COCO_val2014_000000377385.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115243.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000115245.jpg
+COCO_val2014_000000508470.jpg
+COCO_val2014_000000508470.jpg
+COCO_val2014_000000508470.jpg
+COCO_val2014_000000508470.jpg
+COCO_val2014_000000508470.jpg
+COCO_val2014_000000508470.jpg
+COCO_val2014_000000521655.jpg
+COCO_val2014_000000521655.jpg
+COCO_val2014_000000521655.jpg
+COCO_val2014_000000521655.jpg
+COCO_val2014_000000521655.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000377427.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000182784.jpg
+COCO_val2014_000000390585.jpg
+COCO_val2014_000000390585.jpg
+COCO_val2014_000000508514.jpg
+COCO_val2014_000000508514.jpg
+COCO_val2014_000000508514.jpg
+COCO_val2014_000000508514.jpg
+COCO_val2014_000000508514.jpg
+COCO_val2014_000000508514.jpg
+COCO_val2014_000000508514.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000115314.jpg
+COCO_val2014_000000565353.jpg
+COCO_val2014_000000565353.jpg
+COCO_val2014_000000565353.jpg
+COCO_val2014_000000565353.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000508538.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000456127.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000246398.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000377486.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115363.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000115370.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000246454.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000508605.jpg
+COCO_val2014_000000377577.jpg
+COCO_val2014_000000377577.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000508656.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000377588.jpg
+COCO_val2014_000000115455.jpg
+COCO_val2014_000000115455.jpg
+COCO_val2014_000000115455.jpg
+COCO_val2014_000000115455.jpg
+COCO_val2014_000000115455.jpg
+COCO_val2014_000000115455.jpg
+COCO_val2014_000000115455.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000115459.jpg
+COCO_val2014_000000246535.jpg
+COCO_val2014_000000246535.jpg
+COCO_val2014_000000246535.jpg
+COCO_val2014_000000246562.jpg
+COCO_val2014_000000246562.jpg
+COCO_val2014_000000246562.jpg
+COCO_val2014_000000246562.jpg
+COCO_val2014_000000246562.jpg
+COCO_val2014_000000246562.jpg
+COCO_val2014_000000246562.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000377635.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000128476.jpg
+COCO_val2014_000000246576.jpg
+COCO_val2014_000000246576.jpg
+COCO_val2014_000000246576.jpg
+COCO_val2014_000000246576.jpg
+COCO_val2014_000000246576.jpg
+COCO_val2014_000000246576.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000377652.jpg
+COCO_val2014_000000508730.jpg
+COCO_val2014_000000508730.jpg
+COCO_val2014_000000508730.jpg
+COCO_val2014_000000508730.jpg
+COCO_val2014_000000508730.jpg
+COCO_val2014_000000508730.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246589.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000246590.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000115521.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246597.jpg
+COCO_val2014_000000246612.jpg
+COCO_val2014_000000246612.jpg
+COCO_val2014_000000246612.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246626.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000246629.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000377706.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000305000.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000377715.jpg
+COCO_val2014_000000246649.jpg
+COCO_val2014_000000246649.jpg
+COCO_val2014_000000246649.jpg
+COCO_val2014_000000246649.jpg
+COCO_val2014_000000115579.jpg
+COCO_val2014_000000115579.jpg
+COCO_val2014_000000115579.jpg
+COCO_val2014_000000115579.jpg
+COCO_val2014_000000115579.jpg
+COCO_val2014_000000115579.jpg
+COCO_val2014_000000115584.jpg
+COCO_val2014_000000115584.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000377730.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508811.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000508822.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000246686.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000150342.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115626.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000115636.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000508872.jpg
+COCO_val2014_000000377802.jpg
+COCO_val2014_000000377802.jpg
+COCO_val2014_000000377802.jpg
+COCO_val2014_000000377802.jpg
+COCO_val2014_000000377802.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000377809.jpg
+COCO_val2014_000000115667.jpg
+COCO_val2014_000000115667.jpg
+COCO_val2014_000000115667.jpg
+COCO_val2014_000000115667.jpg
+COCO_val2014_000000115667.jpg
+COCO_val2014_000000115667.jpg
+COCO_val2014_000000246746.jpg
+COCO_val2014_000000246746.jpg
+COCO_val2014_000000246746.jpg
+COCO_val2014_000000246746.jpg
+COCO_val2014_000000246746.jpg
+COCO_val2014_000000246746.jpg
+COCO_val2014_000000246746.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000377832.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000368807.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000115700.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000508917.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000246782.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000115721.jpg
+COCO_val2014_000000508949.jpg
+COCO_val2014_000000508949.jpg
+COCO_val2014_000000508949.jpg
+COCO_val2014_000000508949.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000508950.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000543577.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000508962.jpg
+COCO_val2014_000000115752.jpg
+COCO_val2014_000000115752.jpg
+COCO_val2014_000000115752.jpg
+COCO_val2014_000000115752.jpg
+COCO_val2014_000000115752.jpg
+COCO_val2014_000000115752.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508972.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000508977.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000309371.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000115765.jpg
+COCO_val2014_000000377910.jpg
+COCO_val2014_000000377910.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000508985.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000246843.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000115772.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000106677.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115791.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115793.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000115796.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509014.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000509020.jpg
+COCO_val2014_000000377949.jpg
+COCO_val2014_000000377949.jpg
+COCO_val2014_000000377949.jpg
+COCO_val2014_000000377949.jpg
+COCO_val2014_000000377949.jpg
+COCO_val2014_000000377949.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000246878.jpg
+COCO_val2014_000000377951.jpg
+COCO_val2014_000000377951.jpg
+COCO_val2014_000000377951.jpg
+COCO_val2014_000000377951.jpg
+COCO_val2014_000000377951.jpg
+COCO_val2014_000000377951.jpg
+COCO_val2014_000000246883.jpg
+COCO_val2014_000000246883.jpg
+COCO_val2014_000000246883.jpg
+COCO_val2014_000000246883.jpg
+COCO_val2014_000000246883.jpg
+COCO_val2014_000000246883.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509028.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000509037.jpg
+COCO_val2014_000000115823.jpg
+COCO_val2014_000000115823.jpg
+COCO_val2014_000000115823.jpg
+COCO_val2014_000000115823.jpg
+COCO_val2014_000000115823.jpg
+COCO_val2014_000000115823.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000115830.jpg
+COCO_val2014_000000195896.jpg
+COCO_val2014_000000195896.jpg
+COCO_val2014_000000195896.jpg
+COCO_val2014_000000195896.jpg
+COCO_val2014_000000195896.jpg
+COCO_val2014_000000195896.jpg
+COCO_val2014_000000195896.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377984.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000377999.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000368836.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115870.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000115875.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246951.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246963.jpg
+COCO_val2014_000000246968.jpg
+COCO_val2014_000000246968.jpg
+COCO_val2014_000000246968.jpg
+COCO_val2014_000000246968.jpg
+COCO_val2014_000000246968.jpg
+COCO_val2014_000000115898.jpg
+COCO_val2014_000000115898.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000378048.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000115912.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000182805.jpg
+COCO_val2014_000000115924.jpg
+COCO_val2014_000000115924.jpg
+COCO_val2014_000000115924.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000115930.jpg
+COCO_val2014_000000562519.jpg
+COCO_val2014_000000562519.jpg
+COCO_val2014_000000562519.jpg
+COCO_val2014_000000562519.jpg
+COCO_val2014_000000562519.jpg
+COCO_val2014_000000562519.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000509158.jpg
+COCO_val2014_000000378096.jpg
+COCO_val2014_000000378096.jpg
+COCO_val2014_000000378096.jpg
+COCO_val2014_000000378096.jpg
+COCO_val2014_000000378096.jpg
+COCO_val2014_000000378116.jpg
+COCO_val2014_000000378116.jpg
+COCO_val2014_000000378116.jpg
+COCO_val2014_000000509192.jpg
+COCO_val2014_000000509192.jpg
+COCO_val2014_000000509194.jpg
+COCO_val2014_000000509194.jpg
+COCO_val2014_000000509194.jpg
+COCO_val2014_000000509194.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000368855.jpg
+COCO_val2014_000000378126.jpg
+COCO_val2014_000000378126.jpg
+COCO_val2014_000000378126.jpg
+COCO_val2014_000000378126.jpg
+COCO_val2014_000000378126.jpg
+COCO_val2014_000000378126.jpg
+COCO_val2014_000000378126.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000247057.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378134.jpg
+COCO_val2014_000000378147.jpg
+COCO_val2014_000000378147.jpg
+COCO_val2014_000000378147.jpg
+COCO_val2014_000000509223.jpg
+COCO_val2014_000000509223.jpg
+COCO_val2014_000000509227.jpg
+COCO_val2014_000000509227.jpg
+COCO_val2014_000000116017.jpg
+COCO_val2014_000000116017.jpg
+COCO_val2014_000000116017.jpg
+COCO_val2014_000000116017.jpg
+COCO_val2014_000000116017.jpg
+COCO_val2014_000000116017.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000378163.jpg
+COCO_val2014_000000116023.jpg
+COCO_val2014_000000116023.jpg
+COCO_val2014_000000116023.jpg
+COCO_val2014_000000116023.jpg
+COCO_val2014_000000116023.jpg
+COCO_val2014_000000378169.jpg
+COCO_val2014_000000378169.jpg
+COCO_val2014_000000378169.jpg
+COCO_val2014_000000378169.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000200267.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116037.jpg
+COCO_val2014_000000116046.jpg
+COCO_val2014_000000116046.jpg
+COCO_val2014_000000116046.jpg
+COCO_val2014_000000116046.jpg
+COCO_val2014_000000116046.jpg
+COCO_val2014_000000116046.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000259640.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000378204.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116061.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116067.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000116068.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000247141.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116083.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000116096.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000063040.jpg
+COCO_val2014_000000378244.jpg
+COCO_val2014_000000378244.jpg
+COCO_val2014_000000378244.jpg
+COCO_val2014_000000378244.jpg
+COCO_val2014_000000509319.jpg
+COCO_val2014_000000509319.jpg
+COCO_val2014_000000509319.jpg
+COCO_val2014_000000509319.jpg
+COCO_val2014_000000509319.jpg
+COCO_val2014_000000509319.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000247179.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116132.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000116133.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000247206.jpg
+COCO_val2014_000000543644.jpg
+COCO_val2014_000000543644.jpg
+COCO_val2014_000000543644.jpg
+COCO_val2014_000000543644.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000378284.jpg
+COCO_val2014_000000247216.jpg
+COCO_val2014_000000247216.jpg
+COCO_val2014_000000247216.jpg
+COCO_val2014_000000247216.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000116149.jpg
+COCO_val2014_000000509366.jpg
+COCO_val2014_000000509366.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000116173.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000247259.jpg
+COCO_val2014_000000378334.jpg
+COCO_val2014_000000378334.jpg
+COCO_val2014_000000378334.jpg
+COCO_val2014_000000378334.jpg
+COCO_val2014_000000378334.jpg
+COCO_val2014_000000378334.jpg
+COCO_val2014_000000378334.jpg
+COCO_val2014_000000247264.jpg
+COCO_val2014_000000247264.jpg
+COCO_val2014_000000247264.jpg
+COCO_val2014_000000247264.jpg
+COCO_val2014_000000247264.jpg
+COCO_val2014_000000247264.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000116202.jpg
+COCO_val2014_000000378347.jpg
+COCO_val2014_000000378347.jpg
+COCO_val2014_000000378347.jpg
+COCO_val2014_000000378347.jpg
+COCO_val2014_000000378347.jpg
+COCO_val2014_000000378347.jpg
+COCO_val2014_000000378347.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000116208.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000281512.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247285.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000247306.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509451.jpg
+COCO_val2014_000000509459.jpg
+COCO_val2014_000000509459.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000247317.jpg
+COCO_val2014_000000509471.jpg
+COCO_val2014_000000509471.jpg
+COCO_val2014_000000509471.jpg
+COCO_val2014_000000509471.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000116261.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509497.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000509514.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000247378.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509526.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000509536.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000378467.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000543676.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000378482.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000116341.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000194153.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000116354.jpg
+COCO_val2014_000000378502.jpg
+COCO_val2014_000000378502.jpg
+COCO_val2014_000000378502.jpg
+COCO_val2014_000000378502.jpg
+COCO_val2014_000000378502.jpg
+COCO_val2014_000000509577.jpg
+COCO_val2014_000000509577.jpg
+COCO_val2014_000000509577.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000247438.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000378515.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509589.jpg
+COCO_val2014_000000509590.jpg
+COCO_val2014_000000509590.jpg
+COCO_val2014_000000509590.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000116377.jpg
+COCO_val2014_000000172315.jpg
+COCO_val2014_000000172315.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000116389.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000378538.jpg
+COCO_val2014_000000247474.jpg
+COCO_val2014_000000247474.jpg
+COCO_val2014_000000247474.jpg
+COCO_val2014_000000247474.jpg
+COCO_val2014_000000247474.jpg
+COCO_val2014_000000247474.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000116405.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000247484.jpg
+COCO_val2014_000000378561.jpg
+COCO_val2014_000000378561.jpg
+COCO_val2014_000000378561.jpg
+COCO_val2014_000000378561.jpg
+COCO_val2014_000000378561.jpg
+COCO_val2014_000000116434.jpg
+COCO_val2014_000000116434.jpg
+COCO_val2014_000000116434.jpg
+COCO_val2014_000000116434.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000116439.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000247519.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000116455.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000291936.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000116466.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000378614.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000509695.jpg
+COCO_val2014_000000378652.jpg
+COCO_val2014_000000378652.jpg
+COCO_val2014_000000378652.jpg
+COCO_val2014_000000378652.jpg
+COCO_val2014_000000378652.jpg
+COCO_val2014_000000378652.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378657.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000378658.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000247587.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000116517.jpg
+COCO_val2014_000000378667.jpg
+COCO_val2014_000000378667.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000378673.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000172342.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000509766.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247625.jpg
+COCO_val2014_000000247639.jpg
+COCO_val2014_000000247639.jpg
+COCO_val2014_000000247639.jpg
+COCO_val2014_000000247639.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000509786.jpg
+COCO_val2014_000000116574.jpg
+COCO_val2014_000000116574.jpg
+COCO_val2014_000000116574.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000378727.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509811.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000509826.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247692.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247714.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000247720.jpg
+COCO_val2014_000000509867.jpg
+COCO_val2014_000000509867.jpg
+COCO_val2014_000000509891.jpg
+COCO_val2014_000000509891.jpg
+COCO_val2014_000000509891.jpg
+COCO_val2014_000000378823.jpg
+COCO_val2014_000000378823.jpg
+COCO_val2014_000000378823.jpg
+COCO_val2014_000000378823.jpg
+COCO_val2014_000000378823.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378825.jpg
+COCO_val2014_000000378831.jpg
+COCO_val2014_000000378831.jpg
+COCO_val2014_000000378831.jpg
+COCO_val2014_000000378831.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000116696.jpg
+COCO_val2014_000000247782.jpg
+COCO_val2014_000000247782.jpg
+COCO_val2014_000000247782.jpg
+COCO_val2014_000000247782.jpg
+COCO_val2014_000000247782.jpg
+COCO_val2014_000000247782.jpg
+COCO_val2014_000000116712.jpg
+COCO_val2014_000000116712.jpg
+COCO_val2014_000000116712.jpg
+COCO_val2014_000000116712.jpg
+COCO_val2014_000000116712.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247788.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000247790.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000378873.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000247808.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000259755.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000378894.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247839.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000247840.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378928.jpg
+COCO_val2014_000000378940.jpg
+COCO_val2014_000000378940.jpg
+COCO_val2014_000000378940.jpg
+COCO_val2014_000000378940.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378948.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
+COCO_val2014_000000378962.jpg
diff --git a/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco_minival.py b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco_minival.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c6c7528fe85f2f1ede01a2ef7d0f9f1c4ba3d70
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/predict/annot/coco_minival.py
@@ -0,0 +1,89 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import os
+import h5py
+import torch
+import torch.utils.data as data
+from train_sppe.src.utils.img import (load_image, cropBox)
+from opt import opt
+
+
+class Mscoco_minival(data.Dataset):
+ def __init__(self, annoSet='coco-minival-images-newnms/test-dev'):
+ self.img_folder = '../data/coco/images' # root image folders
+ self.annot = dict()
+
+ # Read in annotation information from hdf5 file
+ tags = ['xmin', 'ymin', 'xmax', 'ymax']
+ with h5py.File('./predict/annot/' + annoSet + '.h5', 'r') as a:
+ for tag in tags:
+ self.annot[tag] = a[tag][:]
+
+ # Load in image file names
+ with open('./predict/annot/' + annoSet + '_images.txt', 'r') as f:
+ self.images = f.readlines()
+ self.images = list(map(lambda x: x.strip('\n'), self.images))
+ assert len(self.images) == self.annot['xmin'].shape[0]
+ self.size = len(self.images)
+
+ self.flipRef = ((2, 3), (4, 5), (6, 7),
+ (8, 9), (10, 11), (12, 13),
+ (14, 15), (16, 17))
+ self.year = 2017
+
+ def __getitem__(self, index):
+ if self.year == 2014:
+ imgname = self.images[index]
+ else:
+ imgname = self.images[index].split('_')[2]
+
+ img_path = os.path.join(self.img_folder, imgname)
+ img = load_image(img_path)
+
+ ori_img = img.clone()
+ img[0].add_(-0.406)
+ img[1].add_(-0.457)
+ img[2].add_(-0.480)
+
+ imght = img.size(1)
+ imgwidth = img.size(2)
+ upLeft = torch.Tensor(
+ (float(self.annot['xmin'][index]), float(self.annot['ymin'][index])))
+ bottomRight = torch.Tensor(
+ (float(self.annot['xmax'][index]), float(self.annot['ymax'][index])))
+
+ ht = bottomRight[1] - upLeft[1]
+ width = bottomRight[0] - upLeft[0]
+ if width > 100:
+ scaleRate = 0.2
+ else:
+ scaleRate = 0.3
+
+ upLeft[0] = max(0, upLeft[0] - width * scaleRate / 2)
+ upLeft[1] = max(0, upLeft[1] - ht * scaleRate / 2)
+ bottomRight[0] = max(
+ min(imgwidth - 1, bottomRight[0] + width * scaleRate / 2), upLeft[0] + 5)
+ bottomRight[1] = max(
+ min(imght - 1, bottomRight[1] + ht * scaleRate / 2), upLeft[1] + 5)
+
+ inp = cropBox(img, upLeft, bottomRight, opt.inputResH, opt.inputResW)
+ ori_inp = cropBox(ori_img, upLeft, bottomRight,
+ opt.inputResH, opt.inputResW)
+ metaData = (
+ upLeft,
+ bottomRight,
+ ori_inp
+ )
+ box = torch.zeros(4)
+ box[0] = upLeft[0]
+ box[1] = upLeft[1]
+ box[2] = bottomRight[0]
+ box[3] = bottomRight[1]
+
+ return inp, box, imgname, metaData
+
+ def __len__(self):
+ return self.size
diff --git a/joints_detectors/Alphapose/train_sppe/src/predict/p_poseNMS.py b/joints_detectors/Alphapose/train_sppe/src/predict/p_poseNMS.py
new file mode 100644
index 0000000000000000000000000000000000000000..ce8cbc9f1afffaeb8dc52009012ee43e26264399
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/predict/p_poseNMS.py
@@ -0,0 +1,313 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import torch
+import json
+import os
+import numpy as np
+
+''' Constant Configuration '''
+delta1 = 1
+mu = 1.7
+delta2 = 1.3
+gamma = 22.48
+scoreThreds = 0.15
+matchThreds = 5
+alpha = 0.1
+
+
+def pose_nms(bboxes, pose_preds, pose_scores):
+ '''
+ Parametric Pose NMS algorithm
+ bboxes: bbox locations list (n, 4)
+ bbox_scores: bbox scores list (n,)
+ pose_preds: pose locations list (n, 17, 2)
+ pose_scores: pose scores list (n, 17, 1)
+ '''
+ pose_scores[pose_scores <= scoreThreds] = 1e-5
+ pose_scores[pose_scores > 1] = 1
+ final_result = []
+
+ ori_pose_preds = pose_preds.clone()
+ ori_pose_scores = pose_scores.clone()
+
+ xmax = bboxes[:, 2]
+ xmin = bboxes[:, 0]
+ ymax = bboxes[:, 3]
+ ymin = bboxes[:, 1]
+
+ widths = xmax - xmin
+ heights = ymax - ymin
+ ref_dists = alpha * np.maximum(widths, heights)
+
+ nsamples = bboxes.shape[0]
+ human_scores = pose_scores.mean(dim=1)
+
+ human_ids = np.arange(nsamples)
+ # Do pPose-NMS
+ pick = []
+ merge_ids = []
+ while(human_scores.shape[0] != 0):
+ # Pick the one with highest score
+ pick_id = torch.argmax(human_scores)
+ pick.append(human_ids[pick_id])
+ # num_visPart = torch.sum(pose_scores[pick_id] > 0.2)
+
+ # Get numbers of match keypoints by calling PCK_match
+ ref_dist = ref_dists[human_ids[pick_id]]
+ simi = get_parametric_distance(
+ pick_id, pose_preds, pose_scores, ref_dist)
+ num_match_keypoints = PCK_match(
+ pose_preds[pick_id], pose_preds, ref_dist)
+
+ # Delete humans who have more than matchThreds keypoints overlap and high similarity
+ delete_ids = torch.from_numpy(np.arange(human_scores.shape[0]))[
+ (simi > gamma) | (num_match_keypoints >= matchThreds)]
+
+ if delete_ids.shape[0] == 0:
+ delete_ids = pick_id
+ #else:
+ # delete_ids = torch.from_numpy(delete_ids)
+
+ merge_ids.append(human_ids[delete_ids])
+ pose_preds = np.delete(pose_preds, delete_ids, axis=0)
+ pose_scores = np.delete(pose_scores, delete_ids, axis=0)
+ human_ids = np.delete(human_ids, delete_ids)
+ human_scores = np.delete(human_scores, delete_ids, axis=0)
+
+ assert len(merge_ids) == len(pick)
+ preds_pick = ori_pose_preds[pick]
+ scores_pick = ori_pose_scores[pick]
+
+ for j in range(len(pick)):
+ ids = np.arange(17)
+ max_score = torch.max(scores_pick[j, ids, 0])
+
+ if max_score < scoreThreds:
+ continue
+
+ # Merge poses
+ merge_id = merge_ids[j]
+ merge_pose, merge_score = p_merge_fast(
+ preds_pick[j], ori_pose_preds[merge_id], ori_pose_scores[merge_id], ref_dists[pick[j]])
+
+ max_score = torch.max(merge_score[ids])
+ if max_score < scoreThreds:
+ continue
+
+ xmax = max(merge_pose[:, 0])
+ xmin = min(merge_pose[:, 0])
+ ymax = max(merge_pose[:, 1])
+ ymin = min(merge_pose[:, 1])
+
+ if (1.5 ** 2 * (xmax - xmin) * (ymax - ymin) < 40 * 40.5):
+ continue
+
+ final_result.append({
+ 'keypoints': merge_pose - 0.3,
+ 'kp_score': merge_score,
+ 'proposal_score': torch.mean(merge_score) + 1.25 * max(merge_score)
+ })
+
+ return final_result
+
+
+def filter_result(args):
+ score_pick, merge_id, pred_pick, pick, bbox_score_pick = args
+ global ori_pose_preds, ori_pose_scores, ref_dists
+ ids = np.arange(17)
+ max_score = torch.max(score_pick[ids, 0])
+
+ if max_score < scoreThreds:
+ return None
+
+ # Merge poses
+ merge_pose, merge_score = p_merge_fast(
+ pred_pick, ori_pose_preds[merge_id], ori_pose_scores[merge_id], ref_dists[pick])
+
+ max_score = torch.max(merge_score[ids])
+ if max_score < scoreThreds:
+ return None
+
+ xmax = max(merge_pose[:, 0])
+ xmin = min(merge_pose[:, 0])
+ ymax = max(merge_pose[:, 1])
+ ymin = min(merge_pose[:, 1])
+
+ if (1.5 ** 2 * (xmax - xmin) * (ymax - ymin) < 40 * 40.5):
+ return None
+
+ return {
+ 'keypoints': merge_pose - 0.3,
+ 'kp_score': merge_score,
+ 'proposal_score': torch.mean(merge_score) + bbox_score_pick + 1.25 * max(merge_score)
+ }
+
+
+def p_merge(ref_pose, cluster_preds, cluster_scores, ref_dist):
+ '''
+ Score-weighted pose merging
+ INPUT:
+ ref_pose: reference pose -- [17, 2]
+ cluster_preds: redundant poses -- [n, 17, 2]
+ cluster_scores: redundant poses score -- [n, 17, 1]
+ ref_dist: reference scale -- Constant
+ OUTPUT:
+ final_pose: merged pose -- [17, 2]
+ final_score: merged score -- [17]
+ '''
+ dist = torch.sqrt(torch.sum(
+ torch.pow(ref_pose[np.newaxis, :] - cluster_preds, 2),
+ dim=2
+ )) # [n, 17]
+
+ kp_num = 17
+ ref_dist = min(ref_dist, 15)
+
+ mask = (dist <= ref_dist)
+ final_pose = torch.zeros(kp_num, 2)
+ final_score = torch.zeros(kp_num)
+
+ if cluster_preds.dim() == 2:
+ cluster_preds.unsqueeze_(0)
+ cluster_scores.unsqueeze_(0)
+ if mask.dim() == 1:
+ mask.unsqueeze_(0)
+
+ for i in range(kp_num):
+ cluster_joint_scores = cluster_scores[:, i][mask[:, i]] # [k, 1]
+ cluster_joint_location = cluster_preds[:, i, :][mask[:, i].unsqueeze(
+ -1).repeat(1, 2)].view((torch.sum(mask[:, i]), -1))
+
+ # Get an normalized score
+ normed_scores = cluster_joint_scores / torch.sum(cluster_joint_scores)
+
+ # Merge poses by a weighted sum
+ final_pose[i, 0] = torch.dot(
+ cluster_joint_location[:, 0], normed_scores.squeeze(-1))
+ final_pose[i, 1] = torch.dot(
+ cluster_joint_location[:, 1], normed_scores.squeeze(-1))
+
+ final_score[i] = torch.dot(cluster_joint_scores.transpose(
+ 0, 1).squeeze(0), normed_scores.squeeze(-1))
+
+ return final_pose, final_score
+
+
+def p_merge_fast(ref_pose, cluster_preds, cluster_scores, ref_dist):
+ '''
+ Score-weighted pose merging
+ INPUT:
+ ref_pose: reference pose -- [17, 2]
+ cluster_preds: redundant poses -- [n, 17, 2]
+ cluster_scores: redundant poses score -- [n, 17, 1]
+ ref_dist: reference scale -- Constant
+ OUTPUT:
+ final_pose: merged pose -- [17, 2]
+ final_score: merged score -- [17]
+ '''
+ dist = torch.sqrt(torch.sum(
+ torch.pow(ref_pose[np.newaxis, :] - cluster_preds, 2),
+ dim=2
+ ))
+
+ kp_num = 17
+ ref_dist = min(ref_dist, 15)
+
+ mask = (dist <= ref_dist)
+ final_pose = torch.zeros(kp_num, 2)
+ final_score = torch.zeros(kp_num)
+
+ if cluster_preds.dim() == 2:
+ cluster_preds.unsqueeze_(0)
+ cluster_scores.unsqueeze_(0)
+ if mask.dim() == 1:
+ mask.unsqueeze_(0)
+
+ # Weighted Merge
+ masked_scores = cluster_scores.mul(mask.float().unsqueeze(-1))
+ normed_scores = masked_scores / torch.sum(masked_scores, dim=0)
+
+ final_pose = torch.mul(
+ cluster_preds, normed_scores.repeat(1, 1, 2)).sum(dim=0)
+ final_score = torch.mul(masked_scores, normed_scores).sum(dim=0)
+ return final_pose, final_score
+
+
+def get_parametric_distance(i, all_preds, keypoint_scores, ref_dist):
+ pick_preds = all_preds[i]
+ pred_scores = keypoint_scores[i]
+ dist = torch.sqrt(torch.sum(
+ torch.pow(pick_preds[np.newaxis, :] - all_preds, 2),
+ dim=2
+ ))
+ mask = (dist <= 1)
+
+ # Define a keypoints distance
+ score_dists = torch.zeros(all_preds.shape[0], 17)
+ keypoint_scores.squeeze_()
+ if keypoint_scores.dim() == 1:
+ keypoint_scores.unsqueeze_(0)
+ if pred_scores.dim() == 1:
+ pred_scores.unsqueeze_(1)
+ # The predicted scores are repeated up to do broadcast
+ pred_scores = pred_scores.repeat(1, all_preds.shape[0]).transpose(0, 1)
+
+ score_dists[mask] = torch.tanh(
+ pred_scores[mask] / delta1) * torch.tanh(keypoint_scores[mask] / delta1)
+
+ point_dist = torch.exp((-1) * dist / delta2)
+ final_dist = torch.sum(score_dists, dim=1) + mu * \
+ torch.sum(point_dist, dim=1)
+
+ return final_dist
+
+
+def PCK_match(pick_pred, all_preds, ref_dist):
+ dist = torch.sqrt(torch.sum(
+ torch.pow(pick_pred[np.newaxis, :] - all_preds, 2),
+ dim=2
+ ))
+ ref_dist = min(ref_dist, 7)
+ num_match_keypoints = torch.sum(
+ dist / ref_dist <= 1,
+ dim=1
+ )
+
+ return num_match_keypoints
+
+
+def write_json(all_results, outputpath, for_eval=False):
+ '''
+ all_result: result dict of predictions
+ outputpath: output directory
+ '''
+ json_results = []
+ for im_res in all_results:
+ im_name = im_res['imgname']
+ for human in im_res['result']:
+ keypoints = []
+ result = {}
+ if for_eval:
+ result['image_id'] = int(im_name.split(
+ '/')[-1].split('.')[0].split('_')[-1])
+ else:
+ result['image_id'] = im_name.split('/')[-1]
+ result['category_id'] = 1
+
+ kp_preds = human['keypoints']
+ kp_scores = human['kp_score']
+ pro_scores = human['proposal_score']
+ for n in range(kp_scores.shape[0]):
+ keypoints.append(float(kp_preds[n, 0]))
+ keypoints.append(float(kp_preds[n, 1]))
+ keypoints.append(float(kp_scores[n]))
+ result['keypoints'] = keypoints
+ result['score'] = float(pro_scores)
+
+ json_results.append(result)
+
+ with open(os.path.join(outputpath, 'alphapose-results.json'), 'w') as json_file:
+ json_file.write(json.dumps(json_results))
diff --git a/joints_detectors/Alphapose/train_sppe/src/train.py b/joints_detectors/Alphapose/train_sppe/src/train.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0b477cdaefbfadd0db1a4aa2aeaedff9a8d7f37
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/train.py
@@ -0,0 +1,210 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import torch
+import torch.utils.data
+from .utils.dataset import coco
+from opt import opt
+from tqdm import tqdm
+from models.FastPose import createModel
+from .utils.eval import DataLogger, accuracy
+from .utils.img import flip, shuffleLR
+from .evaluation import prediction
+
+from tensorboardX import SummaryWriter
+import os
+
+
+def train(train_loader, m, criterion, optimizer, writer):
+ lossLogger = DataLogger()
+ accLogger = DataLogger()
+ m.train()
+
+ train_loader_desc = tqdm(train_loader)
+
+ for i, (inps, labels, setMask, imgset) in enumerate(train_loader_desc):
+ inps = inps.cuda().requires_grad_()
+ labels = labels.cuda()
+ setMask = setMask.cuda()
+ out = m(inps)
+
+ loss = criterion(out.mul(setMask), labels)
+
+ acc = accuracy(out.data.mul(setMask), labels.data, train_loader.dataset)
+
+ accLogger.update(acc[0], inps.size(0))
+ lossLogger.update(loss.item(), inps.size(0))
+
+ optimizer.zero_grad()
+ loss.backward()
+ optimizer.step()
+
+ opt.trainIters += 1
+ # Tensorboard
+ writer.add_scalar(
+ 'Train/Loss', lossLogger.avg, opt.trainIters)
+ writer.add_scalar(
+ 'Train/Acc', accLogger.avg, opt.trainIters)
+
+ # TQDM
+ train_loader_desc.set_description(
+ 'loss: {loss:.8f} | acc: {acc:.2f}'.format(
+ loss=lossLogger.avg,
+ acc=accLogger.avg * 100)
+ )
+
+ train_loader_desc.close()
+
+ return lossLogger.avg, accLogger.avg
+
+
+def valid(val_loader, m, criterion, optimizer, writer):
+ lossLogger = DataLogger()
+ accLogger = DataLogger()
+ m.eval()
+
+ val_loader_desc = tqdm(val_loader)
+
+ for i, (inps, labels, setMask, imgset) in enumerate(val_loader_desc):
+ inps = inps.cuda()
+ labels = labels.cuda()
+ setMask = setMask.cuda()
+
+ with torch.no_grad():
+ out = m(inps)
+
+ loss = criterion(out.mul(setMask), labels)
+
+ flip_out = m(flip(inps))
+ flip_out = flip(shuffleLR(flip_out, val_loader.dataset))
+
+ out = (flip_out + out) / 2
+
+ acc = accuracy(out.mul(setMask), labels, val_loader.dataset)
+
+ lossLogger.update(loss.item(), inps.size(0))
+ accLogger.update(acc[0], inps.size(0))
+
+ opt.valIters += 1
+
+ # Tensorboard
+ writer.add_scalar(
+ 'Valid/Loss', lossLogger.avg, opt.valIters)
+ writer.add_scalar(
+ 'Valid/Acc', accLogger.avg, opt.valIters)
+
+ val_loader_desc.set_description(
+ 'loss: {loss:.8f} | acc: {acc:.2f}'.format(
+ loss=lossLogger.avg,
+ acc=accLogger.avg * 100)
+ )
+
+ val_loader_desc.close()
+
+ return lossLogger.avg, accLogger.avg
+
+
+def main():
+
+ # Model Initialize
+ m = createModel().cuda()
+ if opt.loadModel:
+ print('Loading Model from {}'.format(opt.loadModel))
+ m.load_state_dict(torch.load(opt.loadModel))
+ if not os.path.exists("../exp/{}/{}".format(opt.dataset, opt.expID)):
+ try:
+ os.mkdir("../exp/{}/{}".format(opt.dataset, opt.expID))
+ except FileNotFoundError:
+ os.mkdir("../exp/{}".format(opt.dataset))
+ os.mkdir("../exp/{}/{}".format(opt.dataset, opt.expID))
+ else:
+ print('Create new model')
+ if not os.path.exists("../exp/{}/{}".format(opt.dataset, opt.expID)):
+ try:
+ os.mkdir("../exp/{}/{}".format(opt.dataset, opt.expID))
+ except FileNotFoundError:
+ os.mkdir("../exp/{}".format(opt.dataset))
+ os.mkdir("../exp/{}/{}".format(opt.dataset, opt.expID))
+
+ criterion = torch.nn.MSELoss().cuda()
+
+ if opt.optMethod == 'rmsprop':
+ optimizer = torch.optim.RMSprop(m.parameters(),
+ lr=opt.LR,
+ momentum=opt.momentum,
+ weight_decay=opt.weightDecay)
+ elif opt.optMethod == 'adam':
+ optimizer = torch.optim.Adam(
+ m.parameters(),
+ lr=opt.LR
+ )
+ else:
+ raise Exception
+
+ writer = SummaryWriter(
+ '.tensorboard/{}/{}'.format(opt.dataset, opt.expID))
+
+ # Prepare Dataset
+ if opt.dataset == 'coco':
+ train_dataset = coco.Mscoco(train=True)
+ val_dataset = coco.Mscoco(train=False)
+
+ train_loader = torch.utils.data.DataLoader(
+ train_dataset, batch_size=opt.trainBatch, shuffle=True, num_workers=opt.nThreads, pin_memory=True)
+
+ val_loader = torch.utils.data.DataLoader(
+ val_dataset, batch_size=opt.validBatch, shuffle=False, num_workers=opt.nThreads, pin_memory=True)
+
+ # Model Transfer
+ m = torch.nn.DataParallel(m).cuda()
+
+ # Start Training
+ for i in range(opt.nEpochs):
+ opt.epoch = i
+
+ print('############# Starting Epoch {} #############'.format(opt.epoch))
+ loss, acc = train(train_loader, m, criterion, optimizer, writer)
+
+ print('Train-{idx:d} epoch | loss:{loss:.8f} | acc:{acc:.4f}'.format(
+ idx=opt.epoch,
+ loss=loss,
+ acc=acc
+ ))
+
+ opt.acc = acc
+ opt.loss = loss
+ m_dev = m.module
+ if i % opt.snapshot == 0:
+ torch.save(
+ m_dev.state_dict(), '../exp/{}/{}/model_{}.pkl'.format(opt.dataset, opt.expID, opt.epoch))
+ torch.save(
+ opt, '../exp/{}/{}/option.pkl'.format(opt.dataset, opt.expID, opt.epoch))
+ torch.save(
+ optimizer, '../exp/{}/{}/optimizer.pkl'.format(opt.dataset, opt.expID))
+
+ loss, acc = valid(val_loader, m, criterion, optimizer, writer)
+
+ print('Valid-{idx:d} epoch | loss:{loss:.8f} | acc:{acc:.4f}'.format(
+ idx=i,
+ loss=loss,
+ acc=acc
+ ))
+
+ '''
+ if opt.dataset != 'mpii':
+ with torch.no_grad():
+ mAP, mAP5 = prediction(m)
+
+ print('Prediction-{idx:d} epoch | mAP:{mAP:.3f} | mAP0.5:{mAP5:.3f}'.format(
+ idx=i,
+ mAP=mAP,
+ mAP5=mAP5
+ ))
+ '''
+ writer.close()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/joints_detectors/Alphapose/train_sppe/src/utils/dataset/__init__.py b/joints_detectors/Alphapose/train_sppe/src/utils/dataset/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/train_sppe/src/utils/dataset/coco.py b/joints_detectors/Alphapose/train_sppe/src/utils/dataset/coco.py
new file mode 100644
index 0000000000000000000000000000000000000000..650e13b610c3f8fba273f674fc071628743345ef
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/utils/dataset/coco.py
@@ -0,0 +1,79 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import os
+import h5py
+from functools import reduce
+
+import torch.utils.data as data
+from ..pose import generateSampleBox
+from opt import opt
+
+
+class Mscoco(data.Dataset):
+ def __init__(self, train=True, sigma=1,
+ scale_factor=(0.2, 0.3), rot_factor=40, label_type='Gaussian'):
+ self.img_folder = '../data/coco/images' # root image folders
+ self.is_train = train # training set or test set
+ self.inputResH = opt.inputResH
+ self.inputResW = opt.inputResW
+ self.outputResH = opt.outputResH
+ self.outputResW = opt.outputResW
+ self.sigma = sigma
+ self.scale_factor = scale_factor
+ self.rot_factor = rot_factor
+ self.label_type = label_type
+
+ self.nJoints_coco = 17
+ self.nJoints = 17
+
+ self.accIdxs = (1, 2, 3, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17)
+ self.flipRef = ((2, 3), (4, 5), (6, 7),
+ (8, 9), (10, 11), (12, 13),
+ (14, 15), (16, 17))
+
+ # create train/val split
+ with h5py.File('../data/coco/annot_coco.h5', 'r') as annot:
+ # train
+ self.imgname_coco_train = annot['imgname'][:-5887]
+ self.bndbox_coco_train = annot['bndbox'][:-5887]
+ self.part_coco_train = annot['part'][:-5887]
+ # val
+ self.imgname_coco_val = annot['imgname'][-5887:]
+ self.bndbox_coco_val = annot['bndbox'][-5887:]
+ self.part_coco_val = annot['part'][-5887:]
+
+ self.size_train = self.imgname_coco_train.shape[0]
+ self.size_val = self.imgname_coco_val.shape[0]
+
+ def __getitem__(self, index):
+ sf = self.scale_factor
+
+ if self.is_train:
+ part = self.part_coco_train[index]
+ bndbox = self.bndbox_coco_train[index]
+ imgname = self.imgname_coco_train[index]
+ else:
+ part = self.part_coco_val[index]
+ bndbox = self.bndbox_coco_val[index]
+ imgname = self.imgname_coco_val[index]
+
+ imgname = reduce(lambda x, y: x + y,
+ map(lambda x: chr(int(x)), imgname))
+ img_path = os.path.join(self.img_folder, imgname)
+
+ metaData = generateSampleBox(img_path, bndbox, part, self.nJoints,
+ 'coco', sf, self, train=self.is_train)
+
+ inp, out, setMask = metaData
+
+ return inp, out, setMask, 'coco'
+
+ def __len__(self):
+ if self.is_train:
+ return self.size_train
+ else:
+ return self.size_val
diff --git a/joints_detectors/Alphapose/train_sppe/src/utils/eval.py b/joints_detectors/Alphapose/train_sppe/src/utils/eval.py
new file mode 100644
index 0000000000000000000000000000000000000000..881b69bf1cd46e4448751550b08f75fd8902cb3e
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/utils/eval.py
@@ -0,0 +1,208 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+from opt import opt
+import sys
+import numpy as np
+
+import torch
+from pycocotools.coco import COCO
+from pycocotools.cocoeval import COCOeval
+
+from .img import transformBoxInvert
+
+
+class DataLogger(object):
+ def __init__(self):
+ self.clear()
+
+ def clear(self):
+ self.value = 0
+ self.sum = 0
+ self.cnt = 0
+ self.avg = 0
+
+ def update(self, value, n=1):
+ self.value = value
+ self.sum += value * n
+ self.cnt += n
+ self._cal_avg()
+
+ def _cal_avg(self):
+ self.avg = self.sum / self.cnt
+
+
+class NullWriter(object):
+ def write(self, arg):
+ pass
+
+
+def accuracy(output, label, dataset, out_offset=None):
+ if type(output) == list:
+ return accuracy(output[opt.nStack - 1], label[opt.nStack - 1], dataset, out_offset)
+ else:
+ return heatmapAccuracy(output.cpu().data, label.cpu().data, dataset.accIdxs)
+
+
+def heatmapAccuracy(output, label, idxs):
+ preds = getPreds(output)
+ gt = getPreds(label)
+
+ norm = torch.ones(preds.size(0)) * opt.outputResH / 10
+ dists = calc_dists(preds, gt, norm)
+
+ acc = torch.zeros(len(idxs) + 1)
+ avg_acc = 0
+ cnt = 0
+ for i in range(len(idxs)):
+ acc[i + 1] = dist_acc(dists[idxs[i] - 1])
+ if acc[i + 1] >= 0:
+ avg_acc = avg_acc + acc[i + 1]
+ cnt += 1
+ if cnt != 0:
+ acc[0] = avg_acc / cnt
+ return acc
+
+
+def getPreds(hm):
+ ''' get predictions from score maps in torch Tensor
+ return type: torch.LongTensor
+ '''
+ assert hm.dim() == 4, 'Score maps should be 4-dim'
+ maxval, idx = torch.max(hm.view(hm.size(0), hm.size(1), -1), 2)
+
+ maxval = maxval.view(hm.size(0), hm.size(1), 1)
+ idx = idx.view(hm.size(0), hm.size(1), 1) + 1
+
+ preds = idx.repeat(1, 1, 2).float()
+
+ preds[:, :, 0] = (preds[:, :, 0] - 1) % hm.size(3)
+ preds[:, :, 1] = torch.floor((preds[:, :, 1] - 1) / hm.size(3))
+
+ # pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
+ # preds *= pred_mask
+ return preds
+
+
+def calc_dists(preds, target, normalize):
+ preds = preds.float().clone()
+ target = target.float().clone()
+ dists = torch.zeros(preds.size(1), preds.size(0))
+ for n in range(preds.size(0)):
+ for c in range(preds.size(1)):
+ if target[n, c, 0] > 0 and target[n, c, 1] > 0:
+ dists[c, n] = torch.dist(
+ preds[n, c, :], target[n, c, :]) / normalize[n]
+ else:
+ dists[c, n] = -1
+ return dists
+
+
+def dist_acc(dists, thr=0.5):
+ ''' Return percentage below threshold while ignoring values with a -1 '''
+ if dists.ne(-1).sum() > 0:
+ return dists.le(thr).eq(dists.ne(-1)).float().sum() * 1.0 / dists.ne(-1).float().sum()
+ else:
+ return - 1
+
+
+def postprocess(output):
+ p = getPreds(output)
+
+ for i in range(p.size(0)):
+ for j in range(p.size(1)):
+ hm = output[i][j]
+ pX, pY = int(round(p[i][j][0])), int(round(p[i][j][1]))
+ if 0 < pX < opt.outputResW - 1 and 0 < pY < opt.outputResH - 1:
+ diff = torch.Tensor(
+ (hm[pY][pX + 1] - hm[pY][pX - 1], hm[pY + 1][pX] - hm[pY - 1][pX]))
+ p[i][j] += diff.sign() * 0.25
+ p -= 0.5
+
+ return p
+
+
+def getPrediction(hms, pt1, pt2, inpH, inpW, resH, resW):
+ assert hms.dim() == 4, 'Score maps should be 4-dim'
+ maxval, idx = torch.max(hms.view(hms.size(0), hms.size(1), -1), 2)
+
+ maxval = maxval.view(hms.size(0), hms.size(1), 1)
+ idx = idx.view(hms.size(0), hms.size(1), 1) + 1
+
+ preds = idx.repeat(1, 1, 2).float()
+
+ preds[:, :, 0] = (preds[:, :, 0] - 1) % hms.size(3)
+ preds[:, :, 1] = torch.floor((preds[:, :, 1] - 1) / hms.size(3))
+
+ pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
+ preds *= pred_mask
+
+ # Very simple post-processing step to improve performance at tight PCK thresholds
+ for i in range(preds.size(0)):
+ for j in range(preds.size(1)):
+ hm = hms[i][j]
+ pX, pY = int(round(float(preds[i][j][0]))), int(
+ round(float(preds[i][j][1])))
+ if 1 < pX < opt.outputResW - 2 and 1 < pY < opt.outputResH - 2:
+ diff = torch.Tensor(
+ (hm[pY][pX + 1] - hm[pY][pX - 1], hm[pY + 1][pX] - hm[pY - 1][pX]))
+ diff = diff.sign() * 0.25
+ diff[1] = diff[1] * inpH / inpW
+ preds[i][j] += diff
+
+ preds_tf = torch.zeros(preds.size())
+ for i in range(hms.size(0)): # Number of samples
+ for j in range(hms.size(1)): # Number of output heatmaps for one sample
+ preds_tf[i][j] = transformBoxInvert(
+ preds[i][j], pt1[i], pt2[i], inpH, inpW, resH, resW)
+
+ return preds, preds_tf, maxval
+
+
+def getmap(JsonDir='./val/alphapose-results.json'):
+ ListDir = '../coco-minival500_images.txt'
+
+ annType = ['segm', 'bbox', 'keypoints']
+ annType = annType[2] # specify type here
+ prefix = 'person_keypoints' if annType == 'keypoints' else 'instances'
+ print('Running evaluation for *%s* results.' % (annType))
+
+ # load Ground_truth
+ dataType = 'val2014'
+ annFile = '../%s_%s.json' % (prefix, dataType)
+ cocoGt = COCO(annFile)
+
+ # load Answer(json)
+ resFile = JsonDir
+ cocoDt = cocoGt.loadRes(resFile)
+
+ # load List
+ fin = open(ListDir, 'r')
+ imgIds_str = fin.readline()
+ if imgIds_str[-1] == '\n':
+ imgIds_str = imgIds_str[:-1]
+ imgIds_str = imgIds_str.split(',')
+
+ imgIds = []
+ for x in imgIds_str:
+ imgIds.append(int(x))
+
+ # running evaluation
+ iouThrs = np.linspace(.5, 0.95, np.round((0.95 - .5) / .05) + 1, endpoint=True)
+ t = np.where(0.5 == iouThrs)[0]
+
+ cocoEval = COCOeval(cocoGt, cocoDt, annType)
+ cocoEval.params.imgIds = imgIds
+ cocoEval.evaluate()
+ cocoEval.accumulate()
+
+ score = cocoEval.eval['precision'][:, :, :, 0, :]
+ mApAll, mAp5 = 0.01, 0.01
+ if len(score[score > -1]) != 0:
+ score2 = score[t]
+ mApAll = np.mean(score[score > -1])
+ mAp5 = np.mean(score2[score2 > -1])
+ cocoEval.summarize()
+ return mApAll, mAp5
diff --git a/joints_detectors/Alphapose/train_sppe/src/utils/img.py b/joints_detectors/Alphapose/train_sppe/src/utils/img.py
new file mode 100644
index 0000000000000000000000000000000000000000..45edc833e1306367697438f03879766cfe3883d3
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/utils/img.py
@@ -0,0 +1,308 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+import numpy as np
+import torch
+import scipy.misc
+import torch.nn.functional as F
+import cv2
+from opt import opt
+
+
+RED = (0, 0, 255)
+GREEN = (0, 255, 0)
+BLUE = (255, 0, 0)
+CYAN = (255, 255, 0)
+YELLOW = (0, 255, 255)
+ORANGE = (0, 165, 255)
+PURPLE = (255, 0, 255)
+
+
+def im_to_torch(img):
+ img = np.transpose(img, (2, 0, 1)) # C*H*W
+ img = to_torch(img).float()
+ if img.max() > 1:
+ img /= 255
+ return img
+
+
+def torch_to_im(img):
+ img = to_numpy(img)
+ img = np.transpose(img, (1, 2, 0)) # C*H*W
+ return img
+
+
+def load_image(img_path):
+ # H x W x C => C x H x W
+ return im_to_torch(scipy.misc.imread(img_path, mode='RGB'))
+
+
+def to_numpy(tensor):
+ if torch.is_tensor(tensor):
+ return tensor.cpu().numpy()
+ elif type(tensor).__module__ != 'numpy':
+ raise ValueError("Cannot convert {} to numpy array"
+ .format(type(tensor)))
+ return tensor
+
+
+def to_torch(ndarray):
+ if type(ndarray).__module__ == 'numpy':
+ return torch.from_numpy(ndarray)
+ elif not torch.is_tensor(ndarray):
+ raise ValueError("Cannot convert {} to torch tensor"
+ .format(type(ndarray)))
+ return ndarray
+
+
+def drawGaussian(img, pt, sigma):
+ img = to_numpy(img)
+ tmpSize = 3 * sigma
+ # Check that any part of the gaussian is in-bounds
+ ul = [int(pt[0] - tmpSize), int(pt[1] - tmpSize)]
+ br = [int(pt[0] + tmpSize + 1), int(pt[1] + tmpSize + 1)]
+
+ if (ul[0] >= img.shape[1] or ul[1] >= img.shape[0] or
+ br[0] < 0 or br[1] < 0):
+ # If not, just return the image as is
+ return to_torch(img)
+
+ # Generate gaussian
+ size = 2 * tmpSize + 1
+ x = np.arange(0, size, 1, float)
+ y = x[:, np.newaxis]
+ x0 = y0 = size // 2
+ sigma = size / 4.0
+ # The gaussian is not normalized, we want the center value to equal 1
+ g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
+
+ # Usable gaussian range
+ g_x = max(0, -ul[0]), min(br[0], img.shape[1]) - ul[0]
+ g_y = max(0, -ul[1]), min(br[1], img.shape[0]) - ul[1]
+ # Image range
+ img_x = max(0, ul[0]), min(br[0], img.shape[1])
+ img_y = max(0, ul[1]), min(br[1], img.shape[0])
+
+ img[img_y[0]:img_y[1], img_x[0]:img_x[1]] = g[g_y[0]:g_y[1], g_x[0]:g_x[1]]
+ return to_torch(img)
+
+
+def transformBox(pt, ul, br, inpH, inpW, resH, resW):
+ center = torch.zeros(2)
+ center[0] = (br[0] - 1 - ul[0]) / 2
+ center[1] = (br[1] - 1 - ul[1]) / 2
+
+ lenH = max(br[1] - ul[1], (br[0] - ul[0]) * inpH / inpW)
+ lenW = lenH * inpW / inpH
+
+ _pt = torch.zeros(2)
+ _pt[0] = pt[0] - ul[0]
+ _pt[1] = pt[1] - ul[1]
+ # Move to center
+ _pt[0] = _pt[0] + max(0, (lenW - 1) / 2 - center[0])
+ _pt[1] = _pt[1] + max(0, (lenH - 1) / 2 - center[1])
+ pt = (_pt * resH) / lenH
+ pt[0] = round(float(pt[0]))
+ pt[1] = round(float(pt[1]))
+ return pt.int()
+
+
+def transformBoxInvert(pt, ul, br, inpH, inpW, resH, resW):
+ center = torch.zeros(2)
+ center[0] = (br[0] - 1 - ul[0]) / 2
+ center[1] = (br[1] - 1 - ul[1]) / 2
+
+ lenH = max(br[1] - ul[1], (br[0] - ul[0]) * inpH / inpW)
+ lenW = lenH * inpW / inpH
+
+ _pt = (pt * lenH) / resH
+ _pt[0] = _pt[0] - max(0, (lenW - 1) / 2 - center[0])
+ _pt[1] = _pt[1] - max(0, (lenH - 1) / 2 - center[1])
+
+ new_point = torch.zeros(2)
+ new_point[0] = _pt[0] + ul[0]
+ new_point[1] = _pt[1] + ul[1]
+ return new_point
+
+
+def cropBox(img, ul, br, resH, resW):
+ ul = ul.int()
+ br = (br - 1).int()
+ # br = br.int()
+ lenH = max((br[1] - ul[1]).item(), (br[0] - ul[0]).item() * resH / resW)
+ lenW = lenH * resW / resH
+ if img.dim() == 2:
+ img = img[np.newaxis, :]
+
+ box_shape = [br[1] - ul[1], br[0] - ul[0]]
+ pad_size = [(lenH - box_shape[0]) // 2, (lenW - box_shape[1]) // 2]
+ # Padding Zeros
+ img[:, :ul[1], :], img[:, :, :ul[0]] = 0, 0
+ img[:, br[1] + 1:, :], img[:, :, br[0] + 1:] = 0, 0
+
+ src = np.zeros((3, 2), dtype=np.float32)
+ dst = np.zeros((3, 2), dtype=np.float32)
+
+ src[0, :] = np.array([ul[0] - pad_size[1], ul[1] - pad_size[0]], np.float32)
+ src[1, :] = np.array([br[0] + pad_size[1], br[1] + pad_size[0]], np.float32)
+ dst[0, :] = 0
+ dst[1, :] = np.array([resW - 1, resH - 1], np.float32)
+
+ src[2:, :] = get_3rd_point(src[0, :], src[1, :])
+ dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :])
+
+ trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))
+
+ dst_img = cv2.warpAffine(torch_to_im(img), trans,
+ (resW, resH), flags=cv2.INTER_LINEAR)
+
+ return im_to_torch(torch.Tensor(dst_img))
+
+
+def cv_rotate(img, rot, resW, resH):
+
+ center = np.array((resW - 1, resH - 1)) / 2
+ rot_rad = np.pi * rot / 180
+
+ src_dir = get_dir([0, (resH - 1) * -0.5], rot_rad)
+ dst_dir = np.array([0, (resH - 1) * -0.5], np.float32)
+
+ src = np.zeros((3, 2), dtype=np.float32)
+ dst = np.zeros((3, 2), dtype=np.float32)
+
+ src[0, :] = center
+ src[1, :] = center + src_dir
+ dst[0, :] = [(resW - 1) * 0.5, (resH - 1) * 0.5]
+ dst[1, :] = np.array([(resW - 1) * 0.5, (resH - 1) * 0.5]) + dst_dir
+
+ src[2:, :] = get_3rd_point(src[0, :], src[1, :])
+ dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :])
+
+ trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))
+
+ dst_img = cv2.warpAffine(torch_to_im(img), trans,
+ (resW, resH), flags=cv2.INTER_LINEAR)
+
+ return im_to_torch(torch.Tensor(dst_img))
+
+
+def flip(x):
+ assert (x.dim() == 3 or x.dim() == 4)
+ if '0.4.1' in torch.__version__:
+ dim = x.dim() - 1
+
+ return x.flip(dims=(dim,))
+ else:
+ is_cuda = False
+ if x.is_cuda:
+ x = x.cpu()
+ is_cuda = True
+ x = x.numpy().copy()
+ if x.ndim == 3:
+ x = np.transpose(np.fliplr(np.transpose(x, (0, 2, 1))), (0, 2, 1))
+ elif x.ndim == 4:
+ for i in range(x.shape[0]):
+ x[i] = np.transpose(
+ np.fliplr(np.transpose(x[i], (0, 2, 1))), (0, 2, 1))
+ x = torch.from_numpy(x.copy())
+ if is_cuda:
+ x = x.cuda()
+ return x
+
+
+def shuffleLR(x, dataset):
+ flipRef = dataset.flipRef
+ assert (x.dim() == 3 or x.dim() == 4)
+ for pair in flipRef:
+ dim0, dim1 = pair
+ dim0 -= 1
+ dim1 -= 1
+ if x.dim() == 4:
+ tmp = x[:, dim1].clone()
+ x[:, dim1] = x[:, dim0].clone()
+ x[:, dim0] = tmp.clone()
+ #x[:, dim0], x[:, dim1] = deepcopy((x[:, dim1], x[:, dim0]))
+ else:
+ tmp = x[dim1].clone()
+ x[dim1] = x[dim0].clone()
+ x[dim0] = tmp.clone()
+ #x[dim0], x[dim1] = deepcopy((x[dim1], x[dim0]))
+ return x
+
+
+def vis_frame(frame, im_res, format='coco'):
+ '''
+ frame: frame image
+ im_res: im_res of predictions
+ format: coco or mpii
+
+ return rendered image
+ '''
+ if format == 'coco':
+ l_pair = [
+ (0, 1), (0, 2), (1, 3), (2, 4), # Head
+ (5, 6), (5, 7), (7, 9), (6, 8), (8, 10),
+ (5, 11), (6, 12), # Body
+ (11, 13), (12, 14), (13, 15), (14, 16)
+ ]
+ p_color = [RED, RED, RED, RED, RED, YELLOW, YELLOW, YELLOW,
+ YELLOW, YELLOW, YELLOW, GREEN, GREEN, GREEN, GREEN, GREEN, GREEN]
+ line_color = [YELLOW, YELLOW, YELLOW, YELLOW, BLUE, BLUE,
+ BLUE, BLUE, BLUE, PURPLE, PURPLE, RED, RED, RED, RED]
+ elif format == 'mpii':
+ l_pair = [
+ (8, 9), (11, 12), (11, 10), (2, 1), (1, 0),
+ (13, 14), (14, 15), (3, 4), (4, 5),
+ (8, 7), (7, 6), (6, 2), (6, 3), (8, 12), (8, 13)
+ ]
+ p_color = [PURPLE, BLUE, BLUE, RED, RED, BLUE, BLUE, RED,
+ RED, PURPLE, PURPLE, PURPLE, RED, RED, BLUE, BLUE]
+ line_color = [PURPLE, BLUE, BLUE, RED, RED, BLUE, BLUE,
+ RED, RED, PURPLE, PURPLE, RED, RED, BLUE, BLUE]
+ else:
+ raise NotImplementedError
+
+ im_name = im_res['imgname'].split('/')[-1]
+ img = frame.copy()
+ for human in im_res['result']:
+ part_line = {}
+ kp_preds = human['keypoints']
+ kp_scores = human['kp_score']
+ # Draw keypoints
+ for n in range(kp_scores.shape[0]):
+ if kp_scores[n] <= 0.15:
+ continue
+ cor_x, cor_y = int(kp_preds[n, 0]), int(kp_preds[n, 1])
+ part_line[n] = (cor_x, cor_y)
+ cv2.circle(img, (cor_x, cor_y), 4, p_color[n], -1)
+ # Now create a mask of logo and create its inverse mask also
+ #transparency = max(0, min(1, kp_scores[n]))
+ #img = cv2.addWeighted(bg, transparency, img, 1, 0)
+ # Draw limbs
+ for i, (start_p, end_p) in enumerate(l_pair):
+ if start_p in part_line and end_p in part_line:
+ start_xy = part_line[start_p]
+ end_xy = part_line[end_p]
+ cv2.line(img, start_xy, end_xy,
+ line_color[i], (0.5 * (kp_scores[start_p] + kp_scores[end_p])) + 1)
+ #transparency = max(
+ # 0, min(1, (kp_scores[start_p] + kp_scores[end_p])))
+ #img = cv2.addWeighted(bg, transparency, img, 1, 0)
+ return img
+
+
+def get_3rd_point(a, b):
+ direct = a - b
+ return b + np.array([-direct[1], direct[0]], dtype=np.float32)
+
+
+def get_dir(src_point, rot_rad):
+ sn, cs = np.sin(rot_rad), np.cos(rot_rad)
+
+ src_result = [0, 0]
+ src_result[0] = src_point[0] * cs - src_point[1] * sn
+ src_result[1] = src_point[0] * sn + src_point[1] * cs
+
+ return src_result
diff --git a/joints_detectors/Alphapose/train_sppe/src/utils/pose.py b/joints_detectors/Alphapose/train_sppe/src/utils/pose.py
new file mode 100644
index 0000000000000000000000000000000000000000..cea08a948b9abcc98bf07c7a4b5f4b7310efb132
--- /dev/null
+++ b/joints_detectors/Alphapose/train_sppe/src/utils/pose.py
@@ -0,0 +1,142 @@
+# -----------------------------------------------------
+# Copyright (c) Shanghai Jiao Tong University. All rights reserved.
+# Written by Jiefeng Li (jeff.lee.sjtu@gmail.com)
+# -----------------------------------------------------
+
+from utils.img import (load_image, drawGaussian, cropBox, transformBox, flip, shuffleLR, cv_rotate)
+import torch
+import numpy as np
+import random
+from opt import opt
+
+
+def rnd(x):
+ return max(-2 * x, min(2 * x, np.random.randn(1)[0] * x))
+
+
+def generateSampleBox(img_path, bndbox, part, nJoints, imgset, scale_factor, dataset, train=True, nJoints_coco=17):
+
+ img = load_image(img_path)
+ if train:
+ img[0].mul_(random.uniform(0.7, 1.3)).clamp_(0, 1)
+ img[1].mul_(random.uniform(0.7, 1.3)).clamp_(0, 1)
+ img[2].mul_(random.uniform(0.7, 1.3)).clamp_(0, 1)
+
+ img[0].add_(-0.406)
+ img[1].add_(-0.457)
+ img[2].add_(-0.480)
+
+ upLeft = torch.Tensor((int(bndbox[0][0]), int(bndbox[0][1])))
+ bottomRight = torch.Tensor((int(bndbox[0][2]), int(bndbox[0][3])))
+ ht = bottomRight[1] - upLeft[1]
+ width = bottomRight[0] - upLeft[0]
+ imght = img.shape[1]
+ imgwidth = img.shape[2]
+ scaleRate = random.uniform(*scale_factor)
+
+ upLeft[0] = max(0, upLeft[0] - width * scaleRate / 2)
+ upLeft[1] = max(0, upLeft[1] - ht * scaleRate / 2)
+ bottomRight[0] = min(imgwidth - 1, bottomRight[0] + width * scaleRate / 2)
+ bottomRight[1] = min(imght - 1, bottomRight[1] + ht * scaleRate / 2)
+
+ # Doing Random Sample
+ if opt.addDPG:
+ PatchScale = random.uniform(0, 1)
+ if PatchScale > 0.85:
+ ratio = ht / width
+ if (width < ht):
+ patchWidth = PatchScale * width
+ patchHt = patchWidth * ratio
+ else:
+ patchHt = PatchScale * ht
+ patchWidth = patchHt / ratio
+
+ xmin = upLeft[0] + random.uniform(0, 1) * (width - patchWidth)
+ ymin = upLeft[1] + random.uniform(0, 1) * (ht - patchHt)
+ xmax = xmin + patchWidth + 1
+ ymax = ymin + patchHt + 1
+ else:
+ xmin = max(
+ 1, min(upLeft[0] + np.random.normal(-0.0142, 0.1158) * width, imgwidth - 3))
+ ymin = max(
+ 1, min(upLeft[1] + np.random.normal(0.0043, 0.068) * ht, imght - 3))
+ xmax = min(max(
+ xmin + 2, bottomRight[0] + np.random.normal(0.0154, 0.1337) * width), imgwidth - 3)
+ ymax = min(
+ max(ymin + 2, bottomRight[1] + np.random.normal(-0.0013, 0.0711) * ht), imght - 3)
+
+ upLeft[0] = xmin
+ upLeft[1] = ymin
+ bottomRight[0] = xmax
+ bottomRight[1] = ymax
+
+ # Counting Joints number
+ jointNum = 0
+ if imgset == 'coco':
+ for i in range(17):
+ if part[i][0] > 0 and part[i][0] > upLeft[0] and part[i][1] > upLeft[1] \
+ and part[i][0] < bottomRight[0] and part[i][1] < bottomRight[1]:
+ jointNum += 1
+
+ # Doing Random Crop
+ if opt.addDPG:
+ if jointNum > 13 and train:
+ switch = random.uniform(0, 1)
+ if switch > 0.96:
+ bottomRight[0] = (upLeft[0] + bottomRight[0]) / 2
+ bottomRight[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.92:
+ upLeft[0] = (upLeft[0] + bottomRight[0]) / 2
+ bottomRight[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.88:
+ upLeft[1] = (upLeft[1] + bottomRight[1]) / 2
+ bottomRight[0] = (upLeft[0] + bottomRight[0]) / 2
+ elif switch > 0.84:
+ upLeft[0] = (upLeft[0] + bottomRight[0]) / 2
+ upLeft[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.80:
+ bottomRight[0] = (upLeft[0] + bottomRight[0]) / 2
+ elif switch > 0.76:
+ upLeft[0] = (upLeft[0] + bottomRight[0]) / 2
+ elif switch > 0.72:
+ bottomRight[1] = (upLeft[1] + bottomRight[1]) / 2
+ elif switch > 0.68:
+ upLeft[1] = (upLeft[1] + bottomRight[1]) / 2
+
+ inputResH, inputResW = opt.inputResH, opt.inputResW
+ outputResH, outputResW = opt.outputResH, opt.outputResW
+
+ inp = cropBox(img, upLeft, bottomRight, inputResH, inputResW)
+
+ if jointNum == 0:
+ inp = torch.zeros(3, inputResH, inputResW)
+
+ out = torch.zeros(nJoints, outputResH, outputResW)
+ setMask = torch.zeros(nJoints, outputResH, outputResW)
+ # Draw Label
+ if imgset == 'coco':
+ for i in range(nJoints_coco):
+ if part[i][0] > 0 and part[i][0] > upLeft[0] and part[i][1] > upLeft[1] \
+ and part[i][0] < bottomRight[0] and part[i][1] < bottomRight[1]:
+ hm_part = transformBox(
+ part[i], upLeft, bottomRight, inputResH, inputResW, outputResH, outputResW)
+
+ out[i] = drawGaussian(out[i], hm_part, opt.hmGauss)
+
+ setMask[i].add_(1)
+
+ if train:
+ # Flip
+ if random.uniform(0, 1) < 0.5:
+ inp = flip(inp)
+ out = shuffleLR(flip(out), dataset)
+
+ # Rotate
+ r = rnd(opt.rotate)
+ if random.uniform(0, 1) < 0.6:
+ r = 0
+ if r != 0:
+ inp = cv_rotate(inp, r, opt.inputResW, opt.inputResH)
+ out = cv_rotate(out, r, opt.outputResW, opt.outputResH)
+
+ return inp, out, setMask
diff --git a/joints_detectors/Alphapose/video_demo.py b/joints_detectors/Alphapose/video_demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..be14651a56725e82bb1b1e0f6a8ab1c76951c2ac
--- /dev/null
+++ b/joints_detectors/Alphapose/video_demo.py
@@ -0,0 +1,120 @@
+import os
+
+from SPPE.src.main_fast_inference import *
+from dataloader import ImageLoader, DetectionLoader, DetectionProcessor, DataWriter, Mscoco
+from fn import getTime
+from opt import opt
+from pPose_nms import write_json
+from tqdm import tqdm
+
+
+def main(args):
+ inputpath = args.inputpath
+ inputlist = args.inputlist
+ mode = args.mode
+ if not os.path.exists(args.outputpath):
+ os.mkdir(args.outputpath)
+
+ if len(inputlist):
+ im_names = open(inputlist, 'r').readlines()
+ elif len(inputpath) and inputpath != '/':
+ for root, dirs, files in os.walk(inputpath):
+ im_names = files
+ else:
+ raise IOError('Error: must contain either --indir/--list')
+
+ # Load input images
+ data_loader = ImageLoader(im_names, batchSize=args.detbatch, format='yolo').start()
+
+ # Load detection loader
+ print('Loading YOLO model..')
+ sys.stdout.flush()
+ det_loader = DetectionLoader(data_loader, batchSize=args.detbatch).start()
+ det_processor = DetectionProcessor(det_loader).start()
+
+ # Load pose model
+ pose_dataset = Mscoco()
+ if args.fast_inference:
+ pose_model = InferenNet_fast(4 * 1 + 1, pose_dataset)
+ else:
+ pose_model = InferenNet(4 * 1 + 1, pose_dataset)
+ pose_model.cuda()
+ pose_model.eval()
+
+ runtime_profile = {
+ 'dt': [],
+ 'pt': [],
+ 'pn': []
+ }
+
+ # Init data writer
+ writer = DataWriter(args.save_video).start()
+
+ data_len = data_loader.length()
+ im_names_desc = tqdm(range(data_len))
+
+ batchSize = args.posebatch
+ for i in im_names_desc:
+ start_time = getTime()
+ with torch.no_grad():
+ (inps, orig_img, im_name, boxes, scores, pt1, pt2) = det_processor.read()
+ if boxes is None or boxes.nelement() == 0:
+ writer.save(None, None, None, None, None, orig_img, im_name.split('/')[-1])
+ continue
+
+ ckpt_time, det_time = getTime(start_time)
+ runtime_profile['dt'].append(det_time)
+ # Pose Estimation
+
+ datalen = inps.size(0)
+ leftover = 0
+ if (datalen) % batchSize:
+ leftover = 1
+ num_batches = datalen // batchSize + leftover
+ hm = []
+ for j in range(num_batches):
+ inps_j = inps[j * batchSize:min((j + 1) * batchSize, datalen)].cuda()
+ hm_j = pose_model(inps_j)
+ hm.append(hm_j)
+ hm = torch.cat(hm)
+ ckpt_time, pose_time = getTime(ckpt_time)
+ runtime_profile['pt'].append(pose_time)
+ hm = hm.cpu()
+ writer.save(boxes, scores, hm, pt1, pt2, orig_img, im_name.split('/')[-1])
+
+ ckpt_time, post_time = getTime(ckpt_time)
+ runtime_profile['pn'].append(post_time)
+
+ if args.profile:
+ # TQDM
+ im_names_desc.set_description(
+ 'det time: {dt:.3f} | pose time: {pt:.2f} | post processing: {pn:.4f}'.format(
+ dt=np.mean(runtime_profile['dt']), pt=np.mean(runtime_profile['pt']), pn=np.mean(runtime_profile['pn']))
+ )
+
+ print('===========================> Finish Model Running.')
+ if (args.save_img or args.save_video) and not args.vis_fast:
+ print('===========================> Rendering remaining images in the queue...')
+ print('===========================> If this step takes too long, you can enable the --vis_fast flag to use fast rendering (real-time).')
+ while (writer.running()):
+ pass
+ writer.stop()
+ final_result = writer.results()
+ write_json(final_result, args.outputpath)
+
+
+if __name__ == "__main__":
+ args = opt
+ args.dataset = 'coco'
+ args.sp = True
+ if not args.sp:
+ torch.multiprocessing.set_start_method('forkserver', force=True)
+ torch.multiprocessing.set_sharing_strategy('file_system')
+
+ video_name = 'kunkun'
+ args.inputpath = f'data/split_{video_name}'
+ args.outputpath = f'data/alphapose_{video_name}'
+
+ args.save_img = True
+
+ main(args)
diff --git a/joints_detectors/Alphapose/webcam_demo.py b/joints_detectors/Alphapose/webcam_demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..5fde77943ede76cd791cde45ce005082bd7c5d03
--- /dev/null
+++ b/joints_detectors/Alphapose/webcam_demo.py
@@ -0,0 +1,115 @@
+from opt import opt
+
+import os
+import numpy as np
+
+import cv2
+from tqdm import tqdm
+
+from SPPE.src.main_fast_inference import *
+from dataloader_webcam import WebcamLoader, DetectionLoader, DetectionProcessor, DataWriter, Mscoco
+from fn import getTime
+from opt import opt
+from pPose_nms import write_json
+
+args = opt
+args.dataset = 'coco'
+
+
+def loop():
+ n = 0
+ while True:
+ yield n
+ n += 1
+
+
+if __name__ == "__main__":
+ webcam = args.webcam
+ mode = args.mode
+ if not os.path.exists(args.outputpath):
+ os.mkdir(args.outputpath)
+
+ # Load input video
+ data_loader = WebcamLoader(webcam).start()
+ (fourcc, fps, frameSize) = data_loader.videoinfo()
+
+ # Load detection loader
+ print('Loading YOLO model..')
+ sys.stdout.flush()
+ det_loader = DetectionLoader(data_loader, batchSize=args.detbatch).start()
+ det_processor = DetectionProcessor(det_loader).start()
+
+ # Load pose model
+ pose_dataset = Mscoco()
+ if args.fast_inference:
+ pose_model = InferenNet_fast(4 * 1 + 1, pose_dataset)
+ else:
+ pose_model = InferenNet(4 * 1 + 1, pose_dataset)
+ pose_model.cuda()
+ pose_model.eval()
+
+ # Data writer
+ save_path = os.path.join(args.outputpath, 'AlphaPose_webcam' + webcam + '.avi')
+ writer = DataWriter(args.save_video, save_path, cv2.VideoWriter_fourcc(*'XVID'), fps, frameSize).start()
+
+ runtime_profile = {
+ 'dt': [],
+ 'pt': [],
+ 'pn': []
+ }
+
+ print('Starting webcam demo, press Ctrl + C to terminate...')
+ sys.stdout.flush()
+ im_names_desc = tqdm(loop())
+ batchSize = args.posebatch
+ for i in im_names_desc:
+ try:
+ start_time = getTime()
+ with torch.no_grad():
+ (inps, orig_img, im_name, boxes, scores, pt1, pt2) = det_processor.read()
+ if boxes is None or boxes.nelement() == 0:
+ writer.save(None, None, None, None, None, orig_img, im_name.split('/')[-1])
+ continue
+
+ ckpt_time, det_time = getTime(start_time)
+ runtime_profile['dt'].append(det_time)
+ # Pose Estimation
+
+ datalen = inps.size(0)
+ leftover = 0
+ if (datalen) % batchSize:
+ leftover = 1
+ num_batches = datalen // batchSize + leftover
+ hm = []
+ for j in range(num_batches):
+ inps_j = inps[j * batchSize:min((j + 1) * batchSize, datalen)].cuda()
+ hm_j = pose_model(inps_j)
+ hm.append(hm_j)
+ hm = torch.cat(hm)
+ ckpt_time, pose_time = getTime(ckpt_time)
+ runtime_profile['pt'].append(pose_time)
+
+ hm = hm.cpu().data
+ writer.save(boxes, scores, hm, pt1, pt2, orig_img, im_name.split('/')[-1])
+
+ ckpt_time, post_time = getTime(ckpt_time)
+ runtime_profile['pn'].append(post_time)
+ if args.profile:
+ # TQDM
+ im_names_desc.set_description(
+ 'det time: {dt:.3f} | pose time: {pt:.2f} | post processing: {pn:.4f}'.format(
+ dt=np.mean(runtime_profile['dt']), pt=np.mean(runtime_profile['pt']), pn=np.mean(runtime_profile['pn']))
+ )
+ except KeyboardInterrupt:
+ break
+
+ print(' ')
+ print('===========================> Finish Model Running.')
+ if (args.save_img or args.save_video) and not args.vis_fast:
+ print('===========================> Rendering remaining images in the queue...')
+ print('===========================> If this step takes too long, you can enable the --vis_fast flag to use fast rendering (real-time).')
+ while (writer.running()):
+ pass
+ writer.stop()
+ final_result = writer.results()
+ write_json(final_result, args.outputpath)
diff --git a/joints_detectors/Alphapose/yolo/README.md b/joints_detectors/Alphapose/yolo/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..ed47930a3c5eca8f22ca974e94d8f7b1d18a45da
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/README.md
@@ -0,0 +1,3 @@
+# A PyTorch implementation of a YOLO v3 Object Detector
+
+Forked from https://github.com/ayooshkathuria/pytorch-yolo-v3
diff --git a/joints_detectors/Alphapose/yolo/__init__.py b/joints_detectors/Alphapose/yolo/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/Alphapose/yolo/bbox.py b/joints_detectors/Alphapose/yolo/bbox.py
new file mode 100644
index 0000000000000000000000000000000000000000..999424173fe863433891fbde8b574adccaa9a488
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/bbox.py
@@ -0,0 +1,113 @@
+from __future__ import division
+
+import torch
+import random
+
+import numpy as np
+import cv2
+
+def confidence_filter(result, confidence):
+ conf_mask = (result[:,:,4] > confidence).float().unsqueeze(2)
+ result = result*conf_mask
+
+ return result
+
+def confidence_filter_cls(result, confidence):
+ max_scores = torch.max(result[:,:,5:25], 2)[0]
+ res = torch.cat((result, max_scores),2)
+ print(res.shape)
+
+
+ cond_1 = (res[:,:,4] > confidence).float()
+ cond_2 = (res[:,:,25] > 0.995).float()
+
+ conf = cond_1 + cond_2
+ conf = torch.clamp(conf, 0.0, 1.0)
+ conf = conf.unsqueeze(2)
+ result = result*conf
+ return result
+
+
+
+def get_abs_coord(box):
+ box[2], box[3] = abs(box[2]), abs(box[3])
+ x1 = (box[0] - box[2]/2) - 1
+ y1 = (box[1] - box[3]/2) - 1
+ x2 = (box[0] + box[2]/2) - 1
+ y2 = (box[1] + box[3]/2) - 1
+ return x1, y1, x2, y2
+
+
+
+def sanity_fix(box):
+ if (box[0] > box[2]):
+ box[0], box[2] = box[2], box[0]
+
+ if (box[1] > box[3]):
+ box[1], box[3] = box[3], box[1]
+
+ return box
+
+def bbox_iou(box1, box2):
+ """
+ Returns the IoU of two bounding boxes
+
+
+ """
+ #Get the coordinates of bounding boxes
+ b1_x1, b1_y1, b1_x2, b1_y2 = box1[:,0], box1[:,1], box1[:,2], box1[:,3]
+ b2_x1, b2_y1, b2_x2, b2_y2 = box2[:,0], box2[:,1], box2[:,2], box2[:,3]
+
+ #get the corrdinates of the intersection rectangle
+ inter_rect_x1 = torch.max(b1_x1, b2_x1)
+ inter_rect_y1 = torch.max(b1_y1, b2_y1)
+ inter_rect_x2 = torch.min(b1_x2, b2_x2)
+ inter_rect_y2 = torch.min(b1_y2, b2_y2)
+
+ #Intersection area
+
+ inter_area = torch.max(inter_rect_x2 - inter_rect_x1 + 1,torch.zeros(inter_rect_x2.shape).cuda())*torch.max(inter_rect_y2 - inter_rect_y1 + 1, torch.zeros(inter_rect_x2.shape).cuda())
+
+ #Union Area
+ b1_area = (b1_x2 - b1_x1 + 1)*(b1_y2 - b1_y1 + 1)
+ b2_area = (b2_x2 - b2_x1 + 1)*(b2_y2 - b2_y1 + 1)
+
+ iou = inter_area / (b1_area + b2_area - inter_area)
+
+ return iou
+
+
+def pred_corner_coord(prediction):
+ #Get indices of non-zero confidence bboxes
+ ind_nz = torch.nonzero(prediction[:,:,4]).transpose(0,1).contiguous()
+
+ box = prediction[ind_nz[0], ind_nz[1]]
+
+
+ box_a = box.new(box.shape)
+ box_a[:,0] = (box[:,0] - box[:,2]/2)
+ box_a[:,1] = (box[:,1] - box[:,3]/2)
+ box_a[:,2] = (box[:,0] + box[:,2]/2)
+ box_a[:,3] = (box[:,1] + box[:,3]/2)
+ box[:,:4] = box_a[:,:4]
+
+ prediction[ind_nz[0], ind_nz[1]] = box
+
+ return prediction
+
+
+
+
+def write(x, batches, results, colors, classes):
+ c1 = tuple(x[1:3].int())
+ c2 = tuple(x[3:5].int())
+ img = results[int(x[0])]
+ cls = int(x[-1])
+ label = "{0}".format(classes[cls])
+ color = random.choice(colors)
+ cv2.rectangle(img, c1, c2,color, 1)
+ t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 1 , 1)[0]
+ c2 = c1[0] + t_size[0] + 3, c1[1] + t_size[1] + 4
+ cv2.rectangle(img, c1, c2,color, -1)
+ cv2.putText(img, label, (c1[0], c1[1] + t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 1, [225,255,255], 1);
+ return img
diff --git a/joints_detectors/Alphapose/yolo/cam_demo.py b/joints_detectors/Alphapose/yolo/cam_demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c335e464205f061fc565a523d07326ceb23294f
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/cam_demo.py
@@ -0,0 +1,168 @@
+from __future__ import division
+import time
+import torch
+import torch.nn as nn
+from torch.autograd import Variable
+import numpy as np
+import cv2
+from .util import *
+from .darknet import Darknet
+from .preprocess import prep_image, inp_to_image
+import pandas as pd
+import random
+import argparse
+import pickle as pkl
+
+def get_test_input(input_dim, CUDA):
+ img = cv2.imread("imgs/messi.jpg")
+ img = cv2.resize(img, (input_dim, input_dim))
+ img_ = img[:,:,::-1].transpose((2,0,1))
+ img_ = img_[np.newaxis,:,:,:]/255.0
+ img_ = torch.from_numpy(img_).float()
+ img_ = Variable(img_)
+
+ if CUDA:
+ img_ = img_.cuda()
+
+ return img_
+
+def prep_image(img, inp_dim):
+ """
+ Prepare image for inputting to the neural network.
+
+ Returns a Variable
+ """
+
+ orig_im = img
+ dim = orig_im.shape[1], orig_im.shape[0]
+ img = cv2.resize(orig_im, (inp_dim, inp_dim))
+ img_ = img[:,:,::-1].transpose((2,0,1)).copy()
+ img_ = torch.from_numpy(img_).float().div(255.0).unsqueeze(0)
+ return img_, orig_im, dim
+
+def write(x, img):
+ c1 = tuple(x[1:3].int())
+ c2 = tuple(x[3:5].int())
+ cls = int(x[-1])
+ label = "{0}".format(classes[cls])
+ color = random.choice(colors)
+ cv2.rectangle(img, c1, c2,color, 1)
+ t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 1 , 1)[0]
+ c2 = c1[0] + t_size[0] + 3, c1[1] + t_size[1] + 4
+ cv2.rectangle(img, c1, c2,color, -1)
+ cv2.putText(img, label, (c1[0], c1[1] + t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 1, [225,255,255], 1);
+ return img
+
+def arg_parse():
+ """
+ Parse arguements to the detect module
+
+ """
+
+
+ parser = argparse.ArgumentParser(description='YOLO v3 Cam Demo')
+ parser.add_argument("--confidence", dest = "confidence", help = "Object Confidence to filter predictions", default = 0.25)
+ parser.add_argument("--nms_thresh", dest = "nms_thresh", help = "NMS Threshhold", default = 0.4)
+ parser.add_argument("--reso", dest = 'reso', help =
+ "Input resolution of the network. Increase to increase accuracy. Decrease to increase speed",
+ default = "160", type = str)
+ return parser.parse_args()
+
+
+
+if __name__ == '__main__':
+ cfgfile = "cfg/yolov3-spp.cfg"
+ weightsfile = "yolov3-spp.weights"
+ num_classes = 80
+
+ args = arg_parse()
+ confidence = float(args.confidence)
+ nms_thesh = float(args.nms_thresh)
+ start = 0
+ CUDA = torch.cuda.is_available()
+
+
+
+
+ num_classes = 80
+ bbox_attrs = 5 + num_classes
+
+ model = Darknet(cfgfile)
+ model.load_weights(weightsfile)
+
+ model.net_info["height"] = args.reso
+ inp_dim = int(model.net_info["height"])
+
+ assert inp_dim % 32 == 0
+ assert inp_dim > 32
+
+ if CUDA:
+ model.cuda()
+
+ model.eval()
+
+ videofile = 'video.avi'
+
+ cap = cv2.VideoCapture(0)
+
+ assert cap.isOpened(), 'Cannot capture source'
+
+ frames = 0
+ start = time.time()
+ while cap.isOpened():
+
+ ret, frame = cap.read()
+ if ret:
+
+ img, orig_im, dim = prep_image(frame, inp_dim)
+
+# im_dim = torch.FloatTensor(dim).repeat(1,2)
+
+
+ if CUDA:
+ im_dim = im_dim.cuda()
+ img = img.cuda()
+
+
+ output = model(Variable(img), CUDA)
+ output = write_results(output, confidence, num_classes, nms = True, nms_conf = nms_thesh)
+
+ if type(output) == int:
+ frames += 1
+ print("FPS of the video is {:5.2f}".format( frames / (time.time() - start)))
+ cv2.imshow("frame", orig_im)
+ key = cv2.waitKey(1)
+ if key & 0xFF == ord('q'):
+ break
+ continue
+
+
+
+ output[:,1:5] = torch.clamp(output[:,1:5], 0.0, float(inp_dim))/inp_dim
+
+# im_dim = im_dim.repeat(output.size(0), 1)
+ output[:,[1,3]] *= frame.shape[1]
+ output[:,[2,4]] *= frame.shape[0]
+
+
+ classes = load_classes('data/coco.names')
+ colors = pkl.load(open("pallete", "rb"))
+
+ list(map(lambda x: write(x, orig_im), output))
+
+
+ cv2.imshow("frame", orig_im)
+ key = cv2.waitKey(1)
+ if key & 0xFF == ord('q'):
+ break
+ frames += 1
+ print("FPS of the video is {:5.2f}".format( frames / (time.time() - start)))
+
+
+ else:
+ break
+
+
+
+
+
diff --git a/joints_detectors/Alphapose/yolo/cfg/tiny-yolo-voc.cfg b/joints_detectors/Alphapose/yolo/cfg/tiny-yolo-voc.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..ab2c066a216eacbee86e78c28f4d236e5d6b351a
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/cfg/tiny-yolo-voc.cfg
@@ -0,0 +1,134 @@
+[net]
+batch=64
+subdivisions=8
+width=416
+height=416
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+max_batches = 40200
+policy=steps
+steps=-1,100,20000,30000
+scales=.1,10,.1,.1
+
+[convolutional]
+batch_normalize=1
+filters=16
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=1
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+###########
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=125
+activation=linear
+
+[region]
+anchors = 1.08,1.19, 3.42,4.41, 6.63,11.38, 9.42,5.11, 16.62,10.52
+bias_match=1
+classes=20
+coords=4
+num=5
+softmax=1
+jitter=.2
+rescore=1
+
+object_scale=5
+noobject_scale=1
+class_scale=1
+coord_scale=1
+
+absolute=1
+thresh = .6
+random=1
diff --git a/joints_detectors/Alphapose/yolo/cfg/yolo-voc.cfg b/joints_detectors/Alphapose/yolo/cfg/yolo-voc.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..d5bdfc1c5bf2d34885d7614d76d980c90373f89a
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/cfg/yolo-voc.cfg
@@ -0,0 +1,258 @@
+[net]
+# Testing
+batch=64
+subdivisions=8
+# Training
+# batch=64
+# subdivisions=8
+height=416
+width=416
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 80200
+policy=steps
+steps=-1,500,40000,60000
+scales=0.1,10,.1,.1
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+
+#######
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[route]
+layers=-9
+
+[convolutional]
+batch_normalize=1
+size=1
+stride=1
+pad=1
+filters=64
+activation=leaky
+
+[reorg]
+stride=2
+
+[route]
+layers=-1,-4
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=125
+activation=linear
+
+
+[region]
+anchors = 1.3221, 1.73145, 3.19275, 4.00944, 5.05587, 8.09892, 9.47112, 4.84053, 11.2364, 10.0071
+bias_match=1
+classes=20
+coords=4
+num=5
+softmax=1
+jitter=.3
+rescore=1
+
+object_scale=5
+noobject_scale=1
+class_scale=1
+coord_scale=1
+
+absolute=1
+thresh = .6
+random=1
diff --git a/joints_detectors/Alphapose/yolo/cfg/yolo.cfg b/joints_detectors/Alphapose/yolo/cfg/yolo.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..2a0cd98fbd07c94aa0840c528a12b1b60a004928
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/cfg/yolo.cfg
@@ -0,0 +1,258 @@
+[net]
+# Testing
+batch=1
+subdivisions=1
+# Training
+# batch=64
+# subdivisions=8
+width=416
+height=416
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 500200
+policy=steps
+steps=400000,450000
+scales=.1,.1
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[maxpool]
+size=2
+stride=2
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+
+#######
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[route]
+layers=-9
+
+[convolutional]
+batch_normalize=1
+size=1
+stride=1
+pad=1
+filters=64
+activation=leaky
+
+[reorg]
+stride=2
+
+[route]
+layers=-1,-4
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=425
+activation=linear
+
+
+[region]
+anchors = 0.57273, 0.677385, 1.87446, 2.06253, 3.33843, 5.47434, 7.88282, 3.52778, 9.77052, 9.16828
+bias_match=1
+classes=80
+coords=4
+num=5
+softmax=1
+jitter=.3
+rescore=1
+
+object_scale=5
+noobject_scale=1
+class_scale=1
+coord_scale=1
+
+absolute=1
+thresh = .6
+random=1
diff --git a/joints_detectors/Alphapose/yolo/cfg/yolov3-spp.cfg b/joints_detectors/Alphapose/yolo/cfg/yolov3-spp.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..4ad2a052d88328a79cff5686ff4dd1df6993a2fd
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/cfg/yolov3-spp.cfg
@@ -0,0 +1,822 @@
+[net]
+# Testing
+batch=1
+subdivisions=1
+# Training
+# batch=64
+# subdivisions=16
+width=608
+height=608
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 500200
+policy=steps
+steps=400000,450000
+scales=.1,.1
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+######################
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+### SPP ###
+[maxpool]
+stride=1
+size=5
+
+[route]
+layers=-2
+
+[maxpool]
+stride=1
+size=9
+
+[route]
+layers=-4
+
+[maxpool]
+stride=1
+size=13
+
+[route]
+layers=-1,-3,-5,-6
+
+### End SPP ###
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=255
+activation=linear
+
+
+[yolo]
+mask = 6,7,8
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+classes=80
+num=9
+jitter=.3
+ignore_thresh = .7
+truth_thresh = 1
+random=1
+
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 61
+
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=255
+activation=linear
+
+
+[yolo]
+mask = 3,4,5
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+classes=80
+num=9
+jitter=.3
+ignore_thresh = .7
+truth_thresh = 1
+random=1
+
+
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 36
+
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=255
+activation=linear
+
+
+[yolo]
+mask = 0,1,2
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+classes=80
+num=9
+jitter=.3
+ignore_thresh = .7
+truth_thresh = 1
+random=1
+
diff --git a/joints_detectors/Alphapose/yolo/cfg/yolov3.cfg b/joints_detectors/Alphapose/yolo/cfg/yolov3.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..e94193b0e82e56b3b457f3d8c049ffb9ac7ed1f8
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/cfg/yolov3.cfg
@@ -0,0 +1,789 @@
+[net]
+# Testing
+batch=1
+subdivisions=1
+# Training
+# batch=64
+# subdivisions=16
+width= 320
+height = 320
+channels=3
+momentum=0.9
+decay=0.0005
+angle=0
+saturation = 1.5
+exposure = 1.5
+hue=.1
+
+learning_rate=0.001
+burn_in=1000
+max_batches = 500200
+policy=steps
+steps=400000,450000
+scales=.1,.1
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=3
+stride=1
+pad=1
+activation=leaky
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=32
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=64
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+# Downsample
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=2
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=1024
+size=3
+stride=1
+pad=1
+activation=leaky
+
+[shortcut]
+from=-3
+activation=linear
+
+######################
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=512
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=1024
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=255
+activation=linear
+
+
+[yolo]
+mask = 6,7,8
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+classes=80
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 61
+
+
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=256
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=512
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=255
+activation=linear
+
+
+[yolo]
+mask = 3,4,5
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+classes=80
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
+
+
+[route]
+layers = -4
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[upsample]
+stride=2
+
+[route]
+layers = -1, 36
+
+
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+filters=128
+size=1
+stride=1
+pad=1
+activation=leaky
+
+[convolutional]
+batch_normalize=1
+size=3
+stride=1
+pad=1
+filters=256
+activation=leaky
+
+[convolutional]
+size=1
+stride=1
+pad=1
+filters=255
+activation=linear
+
+
+[yolo]
+mask = 0,1,2
+anchors = 10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
+classes=80
+num=9
+jitter=.3
+ignore_thresh = .5
+truth_thresh = 1
+random=1
+
diff --git a/joints_detectors/Alphapose/yolo/darknet.py b/joints_detectors/Alphapose/yolo/darknet.py
new file mode 100644
index 0000000000000000000000000000000000000000..f12a8ac6a577217e2a797214f788fdf30d8ec989
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/darknet.py
@@ -0,0 +1,548 @@
+from __future__ import division
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.autograd import Variable
+import numpy as np
+import cv2
+import matplotlib.pyplot as plt
+try:
+ from util import count_parameters as count
+ from util import convert2cpu as cpu
+ from util import predict_transform
+except ImportError:
+ from yolo.util import count_parameters as count
+ from yolo.util import convert2cpu as cpu
+ from yolo.util import predict_transform
+
+class test_net(nn.Module):
+ def __init__(self, num_layers, input_size):
+ super(test_net, self).__init__()
+ self.num_layers= num_layers
+ self.linear_1 = nn.Linear(input_size, 5)
+ self.middle = nn.ModuleList([nn.Linear(5,5) for x in range(num_layers)])
+ self.output = nn.Linear(5,2)
+
+ def forward(self, x):
+ x = x.view(-1)
+ fwd = nn.Sequential(self.linear_1, *self.middle, self.output)
+ return fwd(x)
+
+def get_test_input():
+ img = cv2.imread("dog-cycle-car.png")
+ img = cv2.resize(img, (416,416))
+ img_ = img[:,:,::-1].transpose((2,0,1))
+ img_ = img_[np.newaxis,:,:,:]/255.0
+ img_ = torch.from_numpy(img_).float()
+ img_ = Variable(img_)
+ return img_
+
+
+def parse_cfg(cfgfile):
+ """
+ Takes a configuration file
+
+ Returns a list of blocks. Each blocks describes a block in the neural
+ network to be built. Block is represented as a dictionary in the list
+
+ """
+ file = open(cfgfile, 'r')
+ lines = file.read().split('\n') #store the lines in a list
+ lines = [x for x in lines if len(x) > 0] #get read of the empty lines
+ lines = [x for x in lines if x[0] != '#']
+ lines = [x.rstrip().lstrip() for x in lines]
+
+
+ block = {}
+ blocks = []
+
+ for line in lines:
+ if line[0] == "[": #This marks the start of a new block
+ if len(block) != 0:
+ blocks.append(block)
+ block = {}
+ block["type"] = line[1:-1].rstrip()
+ else:
+ key,value = line.split("=")
+ block[key.rstrip()] = value.lstrip()
+ blocks.append(block)
+
+ return blocks
+# print('\n\n'.join([repr(x) for x in blocks]))
+
+import pickle as pkl
+
+class MaxPoolStride1(nn.Module):
+ def __init__(self, kernel_size):
+ super(MaxPoolStride1, self).__init__()
+ self.kernel_size = kernel_size
+ self.pad = kernel_size - 1
+
+ def forward(self, x):
+ padding = int(self.pad / 2)
+ #padded_x = F.pad(x, (0,self.pad,0,self.pad), mode="replicate")
+ #pooled_x = nn.MaxPool2d(self.kernel_size, self.pad)(padded_x)
+ #padded_x = F.pad(x, (0, self.pad, 0, self.pad), mode="replicate")
+ padded_x = F.pad(x, (padding, padding, padding, padding), mode="constant", value=0)
+ pooled_x = nn.MaxPool2d(self.kernel_size, 1)(padded_x)
+ return pooled_x
+
+
+class EmptyLayer(nn.Module):
+ def __init__(self):
+ super(EmptyLayer, self).__init__()
+
+
+class DetectionLayer(nn.Module):
+ def __init__(self, anchors):
+ super(DetectionLayer, self).__init__()
+ self.anchors = anchors
+
+ def forward(self, x, inp_dim, num_classes, confidence):
+ x = x.data
+ global CUDA
+ prediction = x
+ prediction = predict_transform(prediction, inp_dim, self.anchors, num_classes, confidence, CUDA)
+ return prediction
+
+
+
+
+
+class Upsample(nn.Module):
+ def __init__(self, stride=2):
+ super(Upsample, self).__init__()
+ self.stride = stride
+
+ def forward(self, x):
+ stride = self.stride
+ assert(x.data.dim() == 4)
+ B = x.data.size(0)
+ C = x.data.size(1)
+ H = x.data.size(2)
+ W = x.data.size(3)
+ ws = stride
+ hs = stride
+ x = x.view(B, C, H, 1, W, 1).expand(B, C, H, stride, W, stride).contiguous().view(B, C, H*stride, W*stride)
+ return x
+#
+
+class ReOrgLayer(nn.Module):
+ def __init__(self, stride = 2):
+ super(ReOrgLayer, self).__init__()
+ self.stride= stride
+
+ def forward(self,x):
+ assert(x.data.dim() == 4)
+ B,C,H,W = x.data.shape
+ hs = self.stride
+ ws = self.stride
+ assert(H % hs == 0), "The stride " + str(self.stride) + " is not a proper divisor of height " + str(H)
+ assert(W % ws == 0), "The stride " + str(self.stride) + " is not a proper divisor of height " + str(W)
+ x = x.view(B,C, H // hs, hs, W // ws, ws).transpose(-2,-3).contiguous()
+ x = x.view(B,C, H // hs * W // ws, hs, ws)
+ x = x.view(B,C, H // hs * W // ws, hs*ws).transpose(-1,-2).contiguous()
+ x = x.view(B, C, ws*hs, H // ws, W // ws).transpose(1,2).contiguous()
+ x = x.view(B, C*ws*hs, H // ws, W // ws)
+ return x
+
+
+def create_modules(blocks):
+ net_info = blocks[0] #Captures the information about the input and pre-processing
+
+ module_list = nn.ModuleList()
+
+ index = 0 #indexing blocks helps with implementing route layers (skip connections)
+
+
+ prev_filters = 3
+
+ output_filters = []
+
+ for x in blocks:
+ module = nn.Sequential()
+
+ if (x["type"] == "net"):
+ continue
+
+ #If it's a convolutional layer
+ if (x["type"] == "convolutional"):
+ #Get the info about the layer
+ activation = x["activation"]
+ try:
+ batch_normalize = int(x["batch_normalize"])
+ bias = False
+ except:
+ batch_normalize = 0
+ bias = True
+
+ filters= int(x["filters"])
+ padding = int(x["pad"])
+ kernel_size = int(x["size"])
+ stride = int(x["stride"])
+
+ if padding:
+ pad = (kernel_size - 1) // 2
+ else:
+ pad = 0
+
+ #Add the convolutional layer
+ conv = nn.Conv2d(prev_filters, filters, kernel_size, stride, pad, bias=bias)
+ module.add_module("conv_{0}".format(index), conv)
+
+ #Add the Batch Norm Layer
+ if batch_normalize:
+ bn = nn.BatchNorm2d(filters)
+ module.add_module("batch_norm_{0}".format(index), bn)
+
+ #Check the activation.
+ #It is either Linear or a Leaky ReLU for YOLO
+ if activation == "leaky":
+ activn = nn.LeakyReLU(0.1, inplace = True)
+ module.add_module("leaky_{0}".format(index), activn)
+
+
+
+ #If it's an upsampling layer
+ #We use Bilinear2dUpsampling
+
+ elif (x["type"] == "upsample"):
+ stride = int(x["stride"])
+# upsample = Upsample(stride)
+ upsample = nn.Upsample(scale_factor = 2, mode = "nearest")
+ module.add_module("upsample_{}".format(index), upsample)
+
+ #If it is a route layer
+ elif (x["type"] == "route"):
+ x["layers"] = x["layers"].split(',')
+
+ #Start of a route
+ start = int(x["layers"][0])
+ if len(x["layers"]) <= 2:
+ #end, if there exists one.
+ try:
+ end = int(x["layers"][1])
+ except:
+ end = 0
+
+ #Positive anotation
+ if start > 0:
+ start = start - index
+
+ if end > 0:
+ end = end - index
+
+
+ route = EmptyLayer()
+ module.add_module("route_{0}".format(index), route)
+
+
+
+ if end < 0:
+ filters = output_filters[index + start] + output_filters[index + end]
+ else:
+ filters= output_filters[index + start]
+ else: #SPP-route
+ assert len(x["layers"]) == 4
+
+ round = EmptyLayer()
+ module.add_module("route_{0}".format(index), route)
+
+ filters = output_filters[index + start] + output_filters[index + int(x["layers"][1])] \
+ + output_filters[index + int(x["layers"][2])] + output_filters[index + int(x["layers"][3])]
+
+ #shortcut corresponds to skip connection
+ elif x["type"] == "shortcut":
+ from_ = int(x["from"])
+ shortcut = EmptyLayer()
+ module.add_module("shortcut_{}".format(index), shortcut)
+
+
+ elif x["type"] == "maxpool":
+ stride = int(x["stride"])
+ size = int(x["size"])
+ if stride != 1:
+ maxpool = nn.MaxPool2d(size, stride)
+ else:
+ maxpool = MaxPoolStride1(size)
+ #maxpool = nn.MaxPool2d(size, stride=1, padding=size-1)
+
+ module.add_module("maxpool_{}".format(index), maxpool)
+
+ #Yolo is the detection layer
+ elif x["type"] == "yolo":
+ mask = x["mask"].split(",")
+ mask = [int(x) for x in mask]
+
+
+ anchors = x["anchors"].split(",")
+ anchors = [int(a) for a in anchors]
+ anchors = [(anchors[i], anchors[i+1]) for i in range(0, len(anchors),2)]
+ anchors = [anchors[i] for i in mask]
+
+ detection = DetectionLayer(anchors)
+ module.add_module("Detection_{}".format(index), detection)
+
+
+
+ else:
+ print("Something I dunno")
+ assert False
+
+ module_list.append(module)
+ prev_filters = filters
+ output_filters.append(filters)
+ index += 1
+
+
+ return (net_info, module_list)
+
+
+
+class Darknet(nn.Module):
+ def __init__(self, cfgfile):
+ super(Darknet, self).__init__()
+ self.blocks = parse_cfg(cfgfile)
+ self.net_info, self.module_list = create_modules(self.blocks)
+ self.header = torch.IntTensor([0,0,0,0])
+ self.seen = 0
+
+
+
+ def get_blocks(self):
+ return self.blocks
+
+ def get_module_list(self):
+ return self.module_list
+
+
+ def forward(self, x, CUDA):
+ detections = []
+ modules = self.blocks[1:]
+ outputs = {} #We cache the outputs for the route layer
+
+
+ write = 0
+ for i in range(len(modules)):
+
+ module_type = (modules[i]["type"])
+ if module_type == "convolutional" or module_type == "upsample" or module_type == "maxpool":
+
+ x = self.module_list[i](x)
+ outputs[i] = x
+
+
+ elif module_type == "route":
+ layers = modules[i]["layers"]
+ layers = [int(a) for a in layers]
+
+ if (layers[0]) > 0:
+ layers[0] = layers[0] - i
+
+ if len(layers) == 1:
+ x = outputs[i + (layers[0])]
+
+ elif len(layers) == 2:
+ if (layers[1]) > 0:
+ layers[1] = layers[1] - i
+
+ map1 = outputs[i + layers[0]]
+ map2 = outputs[i + layers[1]]
+
+ x = torch.cat((map1, map2), 1)
+ elif len(layers) == 4: # SPP
+ map1 = outputs[i + layers[0]]
+ map2 = outputs[i + layers[1]]
+ map3 = outputs[i + layers[2]]
+ map4 = outputs[i + layers[3]]
+
+ x = torch.cat((map1, map2, map3, map4), 1)
+ outputs[i] = x
+
+ elif module_type == "shortcut":
+ from_ = int(modules[i]["from"])
+ x = outputs[i-1] + outputs[i+from_]
+ outputs[i] = x
+
+
+
+ elif module_type == 'yolo':
+
+ anchors = self.module_list[i][0].anchors
+ #Get the input dimensions
+ inp_dim = int (self.net_info["height"])
+
+ #Get the number of classes
+ num_classes = int (modules[i]["classes"])
+
+ #Output the result
+ x = x.data
+ x = predict_transform(x, inp_dim, anchors, num_classes, CUDA)
+
+ if type(x) == int:
+ continue
+
+
+ if not write:
+ detections = x
+ write = 1
+
+ else:
+ detections = torch.cat((detections, x), 1)
+
+ outputs[i] = outputs[i-1]
+
+
+
+ try:
+ return detections
+ except:
+ return 0
+
+
+ def load_weights(self, weightfile):
+
+ #Open the weights file
+ fp = open(weightfile, "rb")
+
+ #The first 4 values are header information
+ # 1. Major version number
+ # 2. Minor Version Number
+ # 3. Subversion number
+ # 4. IMages seen
+ header = np.fromfile(fp, dtype = np.int32, count = 5)
+ self.header = torch.from_numpy(header)
+ self.seen = self.header[3]
+
+ #The rest of the values are the weights
+ # Let's load them up
+ weights = np.fromfile(fp, dtype = np.float32)
+
+ ptr = 0
+ for i in range(len(self.module_list)):
+ module_type = self.blocks[i + 1]["type"]
+
+ if module_type == "convolutional":
+ model = self.module_list[i]
+ try:
+ batch_normalize = int(self.blocks[i+1]["batch_normalize"])
+ except:
+ batch_normalize = 0
+
+ conv = model[0]
+
+ if (batch_normalize):
+ bn = model[1]
+
+ #Get the number of weights of Batch Norm Layer
+ num_bn_biases = bn.bias.numel()
+
+ #Load the weights
+ bn_biases = torch.from_numpy(weights[ptr:ptr + num_bn_biases])
+ ptr += num_bn_biases
+
+ bn_weights = torch.from_numpy(weights[ptr: ptr + num_bn_biases])
+ ptr += num_bn_biases
+
+ bn_running_mean = torch.from_numpy(weights[ptr: ptr + num_bn_biases])
+ ptr += num_bn_biases
+
+ bn_running_var = torch.from_numpy(weights[ptr: ptr + num_bn_biases])
+ ptr += num_bn_biases
+
+ #Cast the loaded weights into dims of model weights.
+ bn_biases = bn_biases.view_as(bn.bias.data)
+ bn_weights = bn_weights.view_as(bn.weight.data)
+ bn_running_mean = bn_running_mean.view_as(bn.running_mean)
+ bn_running_var = bn_running_var.view_as(bn.running_var)
+
+ #Copy the data to model
+ bn.bias.data.copy_(bn_biases)
+ bn.weight.data.copy_(bn_weights)
+ bn.running_mean.copy_(bn_running_mean)
+ bn.running_var.copy_(bn_running_var)
+
+ else:
+ #Number of biases
+ num_biases = conv.bias.numel()
+
+ #Load the weights
+ conv_biases = torch.from_numpy(weights[ptr: ptr + num_biases])
+ ptr = ptr + num_biases
+
+ #reshape the loaded weights according to the dims of the model weights
+ conv_biases = conv_biases.view_as(conv.bias.data)
+
+ #Finally copy the data
+ conv.bias.data.copy_(conv_biases)
+
+
+ #Let us load the weights for the Convolutional layers
+ num_weights = conv.weight.numel()
+
+ #Do the same as above for weights
+ conv_weights = torch.from_numpy(weights[ptr:ptr+num_weights])
+ ptr = ptr + num_weights
+
+ conv_weights = conv_weights.view_as(conv.weight.data)
+ conv.weight.data.copy_(conv_weights)
+
+ def save_weights(self, savedfile, cutoff = 0):
+
+ if cutoff <= 0:
+ cutoff = len(self.blocks) - 1
+
+ fp = open(savedfile, 'wb')
+
+ # Attach the header at the top of the file
+ self.header[3] = self.seen
+ header = self.header
+
+ header = header.numpy()
+ header.tofile(fp)
+
+ # Now, let us save the weights
+ for i in range(len(self.module_list)):
+ module_type = self.blocks[i+1]["type"]
+
+ if (module_type) == "convolutional":
+ model = self.module_list[i]
+ try:
+ batch_normalize = int(self.blocks[i+1]["batch_normalize"])
+ except:
+ batch_normalize = 0
+
+ conv = model[0]
+
+ if (batch_normalize):
+ bn = model[1]
+
+ #If the parameters are on GPU, convert them back to CPU
+ #We don't convert the parameter to GPU
+ #Instead. we copy the parameter and then convert it to CPU
+ #This is done as weight are need to be saved during training
+ cpu(bn.bias.data).numpy().tofile(fp)
+ cpu(bn.weight.data).numpy().tofile(fp)
+ cpu(bn.running_mean).numpy().tofile(fp)
+ cpu(bn.running_var).numpy().tofile(fp)
+
+
+ else:
+ cpu(conv.bias.data).numpy().tofile(fp)
+
+
+ #Let us save the weights for the Convolutional layers
+ cpu(conv.weight.data).numpy().tofile(fp)
+
+
+
+
+
+#
+#dn = Darknet('cfg/yolov3.cfg')
+#dn.load_weights("yolov3.weights")
+#inp = get_test_input()
+#a, interms = dn(inp)
+#dn.eval()
+#a_i, interms_i = dn(inp)
diff --git a/joints_detectors/Alphapose/yolo/data/coco.names b/joints_detectors/Alphapose/yolo/data/coco.names
new file mode 100644
index 0000000000000000000000000000000000000000..ca76c80b5b2cd0b25047f75736656cfebc9da7aa
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/data/coco.names
@@ -0,0 +1,80 @@
+person
+bicycle
+car
+motorbike
+aeroplane
+bus
+train
+truck
+boat
+traffic light
+fire hydrant
+stop sign
+parking meter
+bench
+bird
+cat
+dog
+horse
+sheep
+cow
+elephant
+bear
+zebra
+giraffe
+backpack
+umbrella
+handbag
+tie
+suitcase
+frisbee
+skis
+snowboard
+sports ball
+kite
+baseball bat
+baseball glove
+skateboard
+surfboard
+tennis racket
+bottle
+wine glass
+cup
+fork
+knife
+spoon
+bowl
+banana
+apple
+sandwich
+orange
+broccoli
+carrot
+hot dog
+pizza
+donut
+cake
+chair
+sofa
+pottedplant
+bed
+diningtable
+toilet
+tvmonitor
+laptop
+mouse
+remote
+keyboard
+cell phone
+microwave
+oven
+toaster
+sink
+refrigerator
+book
+clock
+vase
+scissors
+teddy bear
+hair drier
+toothbrush
diff --git a/joints_detectors/Alphapose/yolo/data/voc.names b/joints_detectors/Alphapose/yolo/data/voc.names
new file mode 100644
index 0000000000000000000000000000000000000000..8420ab35ede7400974f25836a6bb543024686a0e
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/data/voc.names
@@ -0,0 +1,20 @@
+aeroplane
+bicycle
+bird
+boat
+bottle
+bus
+car
+cat
+chair
+cow
+diningtable
+dog
+horse
+motorbike
+person
+pottedplant
+sheep
+sofa
+train
+tvmonitor
diff --git a/joints_detectors/Alphapose/yolo/detect.py b/joints_detectors/Alphapose/yolo/detect.py
new file mode 100644
index 0000000000000000000000000000000000000000..d9fd2b84f553751cac436a24bd03a9f122458606
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/detect.py
@@ -0,0 +1,103 @@
+from __future__ import division
+import time
+import torch
+import torch.nn as nn
+from torch.autograd import Variable
+import numpy as np
+import cv2
+from .util import *
+import argparse
+import os
+import os.path as osp
+from .darknet import Darknet
+from .preprocess import prep_image, inp_to_image
+import pandas as pd
+import random
+import pickle as pkl
+import itertools
+
+
+if __name__ == '__main__':
+
+ scales = "1,2,3"
+ images = "imgs/messi.jpg"
+ batch_size = 1
+ confidence = 0.5
+ nms_thesh = 0.4
+
+ CUDA = torch.cuda.is_available()
+
+ num_classes = 80
+ classes = load_classes('data/coco.names')
+
+ #Set up the neural network
+ print("Loading network.....")
+ model = Darknet("cfg/yolov3-spp.cfg")
+ model.load_weights("yolov3-spp.weights")
+ print("Network successfully loaded")
+
+ model.net_info["height"] = "608"
+ inp_dim = int(model.net_info["height"])
+ assert inp_dim % 32 == 0
+ assert inp_dim > 32
+
+ #If there's a GPU availible, put the model on GPU
+ if CUDA:
+ model.cuda()
+
+ #Set the model in evaluation mode
+ model.eval()
+
+ #Detection phase
+ try:
+ imlist = []
+ imlist.append(osp.join(osp.realpath('.'), images))
+ except FileNotFoundError:
+ print ("No file or directory with the name {}".format(images))
+ exit()
+
+ batches = list(map(prep_image, imlist, [inp_dim for x in range(len(imlist))]))
+ im_batches = [x[0] for x in batches]
+ orig_ims = [x[1] for x in batches]
+ im_dim_list = [x[2] for x in batches]
+ im_dim_list = torch.FloatTensor(im_dim_list).repeat(1, 2)
+
+ if CUDA:
+ im_dim_list = im_dim_list.cuda()
+
+
+ for batch in im_batches:
+ #load the image
+ if CUDA:
+ batch = batch.cuda()
+ with torch.no_grad():
+ prediction = model(Variable(batch), CUDA)
+
+ prediction = write_results(prediction, confidence, num_classes, nms=True, nms_conf=nms_thesh)
+ output = prediction
+
+ if CUDA:
+ torch.cuda.synchronize()
+
+ try:
+ output
+ except NameError:
+ print("No detections were made")
+ exit()
+ print(im_dim_list.shape)
+ im_dim_list = torch.index_select(im_dim_list, 0, output[:,0].long())
+
+ scaling_factor = torch.min(inp_dim/im_dim_list,1)[0].view(-1,1)
+
+
+ output[:,[1,3]] -= (inp_dim - scaling_factor*im_dim_list[:,0].view(-1,1))/2
+ output[:,[2,4]] -= (inp_dim - scaling_factor*im_dim_list[:,1].view(-1,1))/2
+
+ output[:,1:5] /= scaling_factor
+
+ for i in range(output.shape[0]):
+ output[i, [1,3]] = torch.clamp(output[i, [1,3]], 0.0, im_dim_list[i,0])
+ output[i, [2,4]] = torch.clamp(output[i, [2,4]], 0.0, im_dim_list[i,1])
+
+ print(output)
+ print(output.shape)
diff --git a/joints_detectors/Alphapose/yolo/pallete b/joints_detectors/Alphapose/yolo/pallete
new file mode 100644
index 0000000000000000000000000000000000000000..25f0143e9c80c98923dac550f6cd52e20a9dbbe6
Binary files /dev/null and b/joints_detectors/Alphapose/yolo/pallete differ
diff --git a/joints_detectors/Alphapose/yolo/preprocess.py b/joints_detectors/Alphapose/yolo/preprocess.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9a2f7f52882e1b932d48bb9c2d2d8c15a9b7ce8
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/preprocess.py
@@ -0,0 +1,87 @@
+from __future__ import division
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.autograd import Variable
+import numpy as np
+import cv2
+import matplotlib.pyplot as plt
+try:
+ from util import count_parameters as count
+ from util import convert2cpu as cpu
+except ImportError:
+ from yolo.util import count_parameters as count
+ from yolo.util import convert2cpu as cpu
+from PIL import Image, ImageDraw
+
+
+def letterbox_image(img, inp_dim):
+ '''resize image with unchanged aspect ratio using padding'''
+ img_w, img_h = img.shape[1], img.shape[0]
+ w, h = inp_dim
+ new_w = int(img_w * min(w / img_w, h / img_h))
+ new_h = int(img_h * min(w / img_w, h / img_h))
+ resized_image = cv2.resize(img, (new_w, new_h), interpolation=cv2.INTER_CUBIC)
+
+ canvas = np.full((inp_dim[1], inp_dim[0], 3), 128)
+
+ canvas[(h - new_h) // 2:(h - new_h) // 2 + new_h, (w - new_w) // 2:(w - new_w) // 2 + new_w, :] = resized_image
+
+ return canvas
+
+
+def prep_image(img, inp_dim):
+ """
+ Prepare image for inputting to the neural network.
+
+ Returns a Variable
+ """
+
+ orig_im = cv2.imread(img)
+ shape = orig_im.shape
+ dim = orig_im.shape[1], orig_im.shape[0]
+ img = (letterbox_image(orig_im, (inp_dim, inp_dim)))
+ img_ = img[:, :, ::-1].transpose((2, 0, 1)).copy()
+ img_ = torch.from_numpy(img_).float().div(255.0).unsqueeze(0)
+ return img_, orig_im, dim
+
+
+def prep_frame(img, inp_dim):
+ """
+ Prepare image for inputting to the neural network.
+
+ Returns a Variable
+ """
+
+ orig_im = img
+ dim = orig_im.shape[1], orig_im.shape[0]
+ img = (letterbox_image(orig_im, (inp_dim, inp_dim)))
+ img_ = img[:, :, ::-1].transpose((2, 0, 1)).copy()
+ img_ = torch.from_numpy(img_).float().div(255.0).unsqueeze(0)
+ return img_, orig_im, dim
+
+
+def prep_image_pil(img, network_dim):
+ orig_im = Image.open(img)
+ img = orig_im.convert('RGB')
+ dim = img.size
+ img = img.resize(network_dim)
+ img = torch.ByteTensor(torch.ByteStorage.from_buffer(img.tobytes()))
+ img = img.view(*network_dim, 3).transpose(0, 1).transpose(0, 2).contiguous()
+ img = img.view(1, 3, *network_dim)
+ img = img.float().div(255.0)
+ return (img, orig_im, dim)
+
+
+def inp_to_image(inp):
+ inp = inp.cpu().squeeze()
+ inp = inp * 255
+ try:
+ inp = inp.data.numpy()
+ except RuntimeError:
+ inp = inp.numpy()
+ inp = inp.transpose(1, 2, 0)
+
+ inp = inp[:, :, ::-1]
+ return inp
diff --git a/joints_detectors/Alphapose/yolo/util.py b/joints_detectors/Alphapose/yolo/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..8eb4e245cafcfe5800298f2e4194836181a5fdd7
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/util.py
@@ -0,0 +1,387 @@
+
+from __future__ import division
+
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.autograd import Variable
+import numpy as np
+import cv2
+import matplotlib.pyplot as plt
+try:
+ from bbox import bbox_iou
+except ImportError:
+ from yolo.bbox import bbox_iou
+
+
+def count_parameters(model):
+ return sum(p.numel() for p in model.parameters())
+
+def count_learnable_parameters(model):
+ return sum(p.numel() for p in model.parameters() if p.requires_grad)
+
+def convert2cpu(matrix):
+ if matrix.is_cuda:
+ return torch.FloatTensor(matrix.size()).copy_(matrix)
+ else:
+ return matrix
+
+def predict_transform(prediction, inp_dim, anchors, num_classes, CUDA = True):
+ batch_size = prediction.size(0)
+ stride = inp_dim // prediction.size(2)
+ grid_size = inp_dim // stride
+ bbox_attrs = 5 + num_classes
+ num_anchors = len(anchors)
+
+ anchors = [(a[0]/stride, a[1]/stride) for a in anchors]
+
+
+
+ prediction = prediction.view(batch_size, bbox_attrs*num_anchors, grid_size*grid_size)
+ prediction = prediction.transpose(1,2).contiguous()
+ prediction = prediction.view(batch_size, grid_size*grid_size*num_anchors, bbox_attrs)
+
+
+ #Sigmoid the centre_X, centre_Y. and object confidencce
+ prediction[:,:,0] = torch.sigmoid(prediction[:,:,0])
+ prediction[:,:,1] = torch.sigmoid(prediction[:,:,1])
+ prediction[:,:,4] = torch.sigmoid(prediction[:,:,4])
+
+
+
+ #Add the center offsets
+ grid_len = np.arange(grid_size)
+ a,b = np.meshgrid(grid_len, grid_len)
+
+ x_offset = torch.FloatTensor(a).view(-1,1)
+ y_offset = torch.FloatTensor(b).view(-1,1)
+
+ if CUDA:
+ x_offset = x_offset.cuda()
+ y_offset = y_offset.cuda()
+
+ x_y_offset = torch.cat((x_offset, y_offset), 1).repeat(1,num_anchors).view(-1,2).unsqueeze(0)
+
+ prediction[:,:,:2] += x_y_offset
+
+ #log space transform height and the width
+ anchors = torch.FloatTensor(anchors)
+
+ if CUDA:
+ anchors = anchors.cuda()
+
+ anchors = anchors.repeat(grid_size*grid_size, 1).unsqueeze(0)
+ prediction[:,:,2:4] = torch.exp(prediction[:,:,2:4])*anchors
+
+ #Softmax the class scores
+ prediction[:,:,5: 5 + num_classes] = torch.sigmoid((prediction[:,:, 5 : 5 + num_classes]))
+
+ prediction[:,:,:4] *= stride
+
+
+ return prediction
+
+def load_classes(namesfile):
+ fp = open(namesfile, "r")
+ names = fp.read().split("\n")[:-1]
+ return names
+
+def get_im_dim(im):
+ im = cv2.imread(im)
+ w,h = im.shape[1], im.shape[0]
+ return w,h
+
+def unique(tensor):
+ tensor_np = tensor.cpu().numpy()
+ unique_np = np.unique(tensor_np)
+ unique_tensor = torch.from_numpy(unique_np)
+
+ tensor_res = tensor.new(unique_tensor.shape)
+ tensor_res.copy_(unique_tensor)
+ return tensor_res
+
+
+def dynamic_write_results(prediction, confidence, num_classes, nms=True, nms_conf=0.4):
+ prediction_bak = prediction.clone()
+ dets = write_results(prediction.clone(), confidence, num_classes, nms, nms_conf)
+ if isinstance(dets, int):
+ return dets
+
+ if dets.shape[0] > 100:
+ nms_conf -= 0.05
+ dets = write_results(prediction_bak.clone(), confidence, num_classes, nms, nms_conf)
+
+ return dets
+
+
+def write_results(prediction, confidence, num_classes, nms=True, nms_conf=0.4):
+ conf_mask = (prediction[:, :, 4] > confidence).float().float().unsqueeze(2)
+ prediction = prediction * conf_mask
+
+ try:
+ ind_nz = torch.nonzero(prediction[:,:,4]).transpose(0,1).contiguous()
+ except:
+ return 0
+
+ box_a = prediction.new(prediction.shape)
+ box_a[:,:,0] = (prediction[:,:,0] - prediction[:,:,2]/2)
+ box_a[:,:,1] = (prediction[:,:,1] - prediction[:,:,3]/2)
+ box_a[:,:,2] = (prediction[:,:,0] + prediction[:,:,2]/2)
+ box_a[:,:,3] = (prediction[:,:,1] + prediction[:,:,3]/2)
+ prediction[:,:,:4] = box_a[:,:,:4]
+
+ batch_size = prediction.size(0)
+
+ output = prediction.new(1, prediction.size(2) + 1)
+ write = False
+ num = 0
+ for ind in range(batch_size):
+ #select the image from the batch
+ image_pred = prediction[ind]
+
+ #Get the class having maximum score, and the index of that class
+ #Get rid of num_classes softmax scores
+ #Add the class index and the class score of class having maximum score
+ max_conf, max_conf_score = torch.max(image_pred[:,5:5+ num_classes], 1)
+ max_conf = max_conf.float().unsqueeze(1)
+ max_conf_score = max_conf_score.float().unsqueeze(1)
+ seq = (image_pred[:,:5], max_conf, max_conf_score)
+ image_pred = torch.cat(seq, 1)
+
+ #Get rid of the zero entries
+ non_zero_ind = (torch.nonzero(image_pred[:,4]))
+
+ image_pred_ = image_pred[non_zero_ind.squeeze(),:].view(-1,7)
+
+ #Get the various classes detected in the image
+ try:
+ img_classes = unique(image_pred_[:,-1])
+ except:
+ continue
+
+ #WE will do NMS classwise
+ #print(img_classes)
+ for cls in img_classes:
+ if cls != 0:
+ continue
+ #get the detections with one particular class
+ cls_mask = image_pred_*(image_pred_[:,-1] == cls).float().unsqueeze(1)
+ class_mask_ind = torch.nonzero(cls_mask[:,-2]).squeeze()
+
+ image_pred_class = image_pred_[class_mask_ind].view(-1,7)
+
+ #sort the detections such that the entry with the maximum objectness
+ #confidence is at the top
+ conf_sort_index = torch.sort(image_pred_class[:,4], descending = True )[1]
+ image_pred_class = image_pred_class[conf_sort_index]
+ idx = image_pred_class.size(0)
+
+ #if nms has to be done
+ if nms:
+ # Perform non-maximum suppression
+ max_detections = []
+ while image_pred_class.size(0):
+ # Get detection with highest confidence and save as max detection
+ max_detections.append(image_pred_class[0].unsqueeze(0))
+ # Stop if we're at the last detection
+ if len(image_pred_class) == 1:
+ break
+ # Get the IOUs for all boxes with lower confidence
+ ious = bbox_iou(max_detections[-1], image_pred_class[1:])
+ # Remove detections with IoU >= NMS threshold
+ image_pred_class = image_pred_class[1:][ious < nms_conf]
+
+ image_pred_class = torch.cat(max_detections).data
+
+
+ #Concatenate the batch_id of the image to the detection
+ #this helps us identify which image does the detection correspond to
+ #We use a linear straucture to hold ALL the detections from the batch
+ #the batch_dim is flattened
+ #batch is identified by extra batch column
+
+ batch_ind = image_pred_class.new(image_pred_class.size(0), 1).fill_(ind)
+ seq = batch_ind, image_pred_class
+ if not write:
+ output = torch.cat(seq,1)
+ write = True
+ else:
+ out = torch.cat(seq,1)
+ output = torch.cat((output,out))
+ num += 1
+
+ if not num:
+ return 0
+
+ return output
+
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Sat Mar 24 00:12:16 2018
+
+@author: ayooshmac
+"""
+
+def predict_transform_half(prediction, inp_dim, anchors, num_classes, CUDA = True):
+ batch_size = prediction.size(0)
+ stride = inp_dim // prediction.size(2)
+
+ bbox_attrs = 5 + num_classes
+ num_anchors = len(anchors)
+ grid_size = inp_dim // stride
+
+
+ prediction = prediction.view(batch_size, bbox_attrs*num_anchors, grid_size*grid_size)
+ prediction = prediction.transpose(1,2).contiguous()
+ prediction = prediction.view(batch_size, grid_size*grid_size*num_anchors, bbox_attrs)
+
+
+ #Sigmoid the centre_X, centre_Y. and object confidencce
+ prediction[:,:,0] = torch.sigmoid(prediction[:,:,0])
+ prediction[:,:,1] = torch.sigmoid(prediction[:,:,1])
+ prediction[:,:,4] = torch.sigmoid(prediction[:,:,4])
+
+
+ #Add the center offsets
+ grid_len = np.arange(grid_size)
+ a,b = np.meshgrid(grid_len, grid_len)
+
+ x_offset = torch.FloatTensor(a).view(-1,1)
+ y_offset = torch.FloatTensor(b).view(-1,1)
+
+ if CUDA:
+ x_offset = x_offset.cuda().half()
+ y_offset = y_offset.cuda().half()
+
+ x_y_offset = torch.cat((x_offset, y_offset), 1).repeat(1,num_anchors).view(-1,2).unsqueeze(0)
+
+ prediction[:,:,:2] += x_y_offset
+
+ #log space transform height and the width
+ anchors = torch.HalfTensor(anchors)
+
+ if CUDA:
+ anchors = anchors.cuda()
+
+ anchors = anchors.repeat(grid_size*grid_size, 1).unsqueeze(0)
+ prediction[:,:,2:4] = torch.exp(prediction[:,:,2:4])*anchors
+
+ #Softmax the class scores
+ prediction[:,:,5: 5 + num_classes] = nn.Softmax(-1)(Variable(prediction[:,:, 5 : 5 + num_classes])).data
+
+ prediction[:,:,:4] *= stride
+
+
+ return prediction
+
+
+def write_results_half(prediction, confidence, num_classes, nms = True, nms_conf = 0.4):
+ conf_mask = (prediction[:,:,4] > confidence).half().unsqueeze(2)
+ prediction = prediction*conf_mask
+
+ try:
+ ind_nz = torch.nonzero(prediction[:,:,4]).transpose(0,1).contiguous()
+ except:
+ return 0
+
+
+
+ box_a = prediction.new(prediction.shape)
+ box_a[:,:,0] = (prediction[:,:,0] - prediction[:,:,2]/2)
+ box_a[:,:,1] = (prediction[:,:,1] - prediction[:,:,3]/2)
+ box_a[:,:,2] = (prediction[:,:,0] + prediction[:,:,2]/2)
+ box_a[:,:,3] = (prediction[:,:,1] + prediction[:,:,3]/2)
+ prediction[:,:,:4] = box_a[:,:,:4]
+
+
+
+ batch_size = prediction.size(0)
+
+ output = prediction.new(1, prediction.size(2) + 1)
+ write = False
+
+ for ind in range(batch_size):
+ #select the image from the batch
+ image_pred = prediction[ind]
+
+
+ #Get the class having maximum score, and the index of that class
+ #Get rid of num_classes softmax scores
+ #Add the class index and the class score of class having maximum score
+ max_conf, max_conf_score = torch.max(image_pred[:,5:5+ num_classes], 1)
+ max_conf = max_conf.half().unsqueeze(1)
+ max_conf_score = max_conf_score.half().unsqueeze(1)
+ seq = (image_pred[:,:5], max_conf, max_conf_score)
+ image_pred = torch.cat(seq, 1)
+
+
+ #Get rid of the zero entries
+ non_zero_ind = (torch.nonzero(image_pred[:,4]))
+ try:
+ image_pred_ = image_pred[non_zero_ind.squeeze(),:]
+ except:
+ continue
+
+ #Get the various classes detected in the image
+ img_classes = unique(image_pred_[:,-1].long()).half()
+
+
+
+
+ #WE will do NMS classwise
+ for cls in img_classes:
+ #get the detections with one particular class
+ cls_mask = image_pred_*(image_pred_[:,-1] == cls).half().unsqueeze(1)
+ class_mask_ind = torch.nonzero(cls_mask[:,-2]).squeeze()
+
+
+ image_pred_class = image_pred_[class_mask_ind]
+
+
+ #sort the detections such that the entry with the maximum objectness
+ #confidence is at the top
+ conf_sort_index = torch.sort(image_pred_class[:,4], descending = True )[1]
+ image_pred_class = image_pred_class[conf_sort_index]
+ idx = image_pred_class.size(0)
+
+ #if nms has to be done
+ if nms:
+ #For each detection
+ for i in range(idx):
+ #Get the IOUs of all boxes that come after the one we are looking at
+ #in the loop
+ try:
+ ious = bbox_iou(image_pred_class[i].unsqueeze(0), image_pred_class[i+1:])
+ except ValueError:
+ break
+
+ except IndexError:
+ break
+
+ #Zero out all the detections that have IoU > treshhold
+ iou_mask = (ious < nms_conf).half().unsqueeze(1)
+ image_pred_class[i+1:] *= iou_mask
+
+ #Remove the non-zero entries
+ non_zero_ind = torch.nonzero(image_pred_class[:,4]).squeeze()
+ image_pred_class = image_pred_class[non_zero_ind]
+
+
+
+ #Concatenate the batch_id of the image to the detection
+ #this helps us identify which image does the detection correspond to
+ #We use a linear straucture to hold ALL the detections from the batch
+ #the batch_dim is flattened
+ #batch is identified by extra batch column
+ batch_ind = image_pred_class.new(image_pred_class.size(0), 1).fill_(ind)
+ seq = batch_ind, image_pred_class
+
+ if not write:
+ output = torch.cat(seq,1)
+ write = True
+ else:
+ out = torch.cat(seq,1)
+ output = torch.cat((output,out))
+
+ return output
diff --git a/joints_detectors/Alphapose/yolo/video_demo.py b/joints_detectors/Alphapose/yolo/video_demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..c55e6b7aba203fd5840c928f60e1359d7f57920a
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/video_demo.py
@@ -0,0 +1,186 @@
+from __future__ import division
+import time
+import torch
+import torch.nn as nn
+from torch.autograd import Variable
+import numpy as np
+import cv2
+from .util import *
+from .darknet import Darknet
+from .preprocess import prep_image, inp_to_image, letterbox_image
+import pandas as pd
+import random
+import pickle as pkl
+import argparse
+
+
+def get_test_input(input_dim, CUDA):
+ img = cv2.imread("dog-cycle-car.png")
+ img = cv2.resize(img, (input_dim, input_dim))
+ img_ = img[:,:,::-1].transpose((2,0,1))
+ img_ = img_[np.newaxis,:,:,:]/255.0
+ img_ = torch.from_numpy(img_).float()
+ img_ = Variable(img_)
+
+ if CUDA:
+ img_ = img_.cuda()
+
+ return img_
+
+def prep_image(img, inp_dim):
+ """
+ Prepare image for inputting to the neural network.
+
+ Returns a Variable
+ """
+
+ orig_im = img
+ dim = orig_im.shape[1], orig_im.shape[0]
+ img = (letterbox_image(orig_im, (inp_dim, inp_dim)))
+ img_ = img[:,:,::-1].transpose((2,0,1)).copy()
+ img_ = torch.from_numpy(img_).float().div(255.0).unsqueeze(0)
+ return img_, orig_im, dim
+
+def write(x, img):
+ c1 = tuple(x[1:3].int())
+ c2 = tuple(x[3:5].int())
+ cls = int(x[-1])
+ label = "{0}".format(classes[cls])
+ color = random.choice(colors)
+ cv2.rectangle(img, c1, c2,color, 1)
+ t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 1 , 1)[0]
+ c2 = c1[0] + t_size[0] + 3, c1[1] + t_size[1] + 4
+ cv2.rectangle(img, c1, c2,color, -1)
+ cv2.putText(img, label, (c1[0], c1[1] + t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 1, [225,255,255], 1);
+ return img
+
+def arg_parse():
+ """
+ Parse arguements to the detect module
+
+ """
+
+
+ parser = argparse.ArgumentParser(description='YOLO v3 Video Detection Module')
+
+ parser.add_argument("--video", dest = 'video', help =
+ "Video to run detection upon",
+ default = "video.avi", type = str)
+ parser.add_argument("--dataset", dest = "dataset", help = "Dataset on which the network has been trained", default = "pascal")
+ parser.add_argument("--confidence", dest = "confidence", help = "Object Confidence to filter predictions", default = 0.5)
+ parser.add_argument("--nms_thresh", dest = "nms_thresh", help = "NMS Threshhold", default = 0.4)
+ parser.add_argument("--cfg", dest = 'cfgfile', help =
+ "Config file",
+ default = "cfg/yolov3-spp.cfg", type = str)
+ parser.add_argument("--weights", dest = 'weightsfile', help =
+ "weightsfile",
+ default = "yolov3-spp.weights", type = str)
+ parser.add_argument("--reso", dest = 'reso', help =
+ "Input resolution of the network. Increase to increase accuracy. Decrease to increase speed",
+ default = "416", type = str)
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = arg_parse()
+ confidence = float(args.confidence)
+ nms_thesh = float(args.nms_thresh)
+ start = 0
+
+ CUDA = torch.cuda.is_available()
+
+ num_classes = 80
+
+ CUDA = torch.cuda.is_available()
+
+ bbox_attrs = 5 + num_classes
+
+ print("Loading network.....")
+ model = Darknet(args.cfgfile)
+ model.load_weights(args.weightsfile)
+ print("Network successfully loaded")
+
+ model.net_info["height"] = args.reso
+ inp_dim = int(model.net_info["height"])
+ assert inp_dim % 32 == 0
+ assert inp_dim > 32
+
+ if CUDA:
+ model.cuda()
+
+ model(get_test_input(inp_dim, CUDA), CUDA)
+
+ model.eval()
+
+ videofile = args.video
+
+ cap = cv2.VideoCapture(videofile)
+
+ assert cap.isOpened(), 'Cannot capture source'
+
+ frames = 0
+ start = time.time()
+ while cap.isOpened():
+
+ ret, frame = cap.read()
+ if ret:
+
+
+ img, orig_im, dim = prep_image(frame, inp_dim)
+
+ im_dim = torch.FloatTensor(dim).repeat(1,2)
+
+
+ if CUDA:
+ im_dim = im_dim.cuda()
+ img = img.cuda()
+
+ with torch.no_grad():
+ output = model(Variable(img), CUDA)
+ output = write_results(output, confidence, num_classes, nms = True, nms_conf = nms_thesh)
+
+ if type(output) == int:
+ frames += 1
+ print("FPS of the video is {:5.2f}".format( frames / (time.time() - start)))
+ cv2.imshow("frame", orig_im)
+ key = cv2.waitKey(1)
+ if key & 0xFF == ord('q'):
+ break
+ continue
+
+
+
+
+ im_dim = im_dim.repeat(output.size(0), 1)
+ scaling_factor = torch.min(inp_dim/im_dim,1)[0].view(-1,1)
+
+ output[:,[1,3]] -= (inp_dim - scaling_factor*im_dim[:,0].view(-1,1))/2
+ output[:,[2,4]] -= (inp_dim - scaling_factor*im_dim[:,1].view(-1,1))/2
+
+ output[:,1:5] /= scaling_factor
+
+ for i in range(output.shape[0]):
+ output[i, [1,3]] = torch.clamp(output[i, [1,3]], 0.0, im_dim[i,0])
+ output[i, [2,4]] = torch.clamp(output[i, [2,4]], 0.0, im_dim[i,1])
+
+ classes = load_classes('data/coco.names')
+ colors = pkl.load(open("pallete", "rb"))
+
+ list(map(lambda x: write(x, orig_im), output))
+
+
+ cv2.imshow("frame", orig_im)
+ key = cv2.waitKey(1)
+ if key & 0xFF == ord('q'):
+ break
+ frames += 1
+ print("FPS of the video is {:5.2f}".format( frames / (time.time() - start)))
+
+
+ else:
+ break
+
+
+
+
+
diff --git a/joints_detectors/Alphapose/yolo/video_demo_half.py b/joints_detectors/Alphapose/yolo/video_demo_half.py
new file mode 100644
index 0000000000000000000000000000000000000000..f710d496dd5fcb018569353131c4258483deb47c
--- /dev/null
+++ b/joints_detectors/Alphapose/yolo/video_demo_half.py
@@ -0,0 +1,189 @@
+from __future__ import division
+import time
+import torch
+import torch.nn as nn
+from torch.autograd import Variable
+import numpy as np
+import cv2
+from .util import *
+from .darknet import Darknet
+from .preprocess import prep_image, inp_to_image, letterbox_image
+import pandas as pd
+import random
+import pickle as pkl
+import argparse
+
+
+def get_test_input(input_dim, CUDA):
+ img = cv2.imread("dog-cycle-car.png")
+ img = cv2.resize(img, (input_dim, input_dim))
+ img_ = img[:,:,::-1].transpose((2,0,1))
+ img_ = img_[np.newaxis,:,:,:]/255.0
+ img_ = torch.from_numpy(img_).float()
+ img_ = Variable(img_)
+
+ if CUDA:
+ img_ = img_.cuda()
+
+ return img_
+
+def prep_image(img, inp_dim):
+ """
+ Prepare image for inputting to the neural network.
+
+ Returns a Variable
+ """
+
+ orig_im = img
+ dim = orig_im.shape[1], orig_im.shape[0]
+ img = (letterbox_image(orig_im, (inp_dim, inp_dim)))
+ img_ = img[:,:,::-1].transpose((2,0,1)).copy()
+ img_ = torch.from_numpy(img_).float().div(255.0).unsqueeze(0)
+ return img_, orig_im, dim
+
+def write(x, img):
+ c1 = tuple(x[1:3].int())
+ c2 = tuple(x[3:5].int())
+ cls = int(x[-1])
+ label = "{0}".format(classes[cls])
+ color = random.choice(colors)
+ cv2.rectangle(img, c1, c2,color, 1)
+ t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 1 , 1)[0]
+ c2 = c1[0] + t_size[0] + 3, c1[1] + t_size[1] + 4
+ cv2.rectangle(img, c1, c2,color, -1)
+ cv2.putText(img, label, (c1[0], c1[1] + t_size[1] + 4), cv2.FONT_HERSHEY_PLAIN, 1, [225,255,255], 1);
+ return img
+
+def arg_parse():
+ """
+ Parse arguements to the detect module
+
+ """
+
+
+ parser = argparse.ArgumentParser(description='YOLO v2 Video Detection Module')
+
+ parser.add_argument("--video", dest = 'video', help =
+ "Video to run detection upon",
+ default = "video.avi", type = str)
+ parser.add_argument("--dataset", dest = "dataset", help = "Dataset on which the network has been trained", default = "pascal")
+ parser.add_argument("--confidence", dest = "confidence", help = "Object Confidence to filter predictions", default = 0.5)
+ parser.add_argument("--nms_thresh", dest = "nms_thresh", help = "NMS Threshhold", default = 0.4)
+ parser.add_argument("--cfg", dest = 'cfgfile', help =
+ "Config file",
+ default = "cfg/yolov3-spp.cfg", type = str)
+ parser.add_argument("--weights", dest = 'weightsfile', help =
+ "weightsfile",
+ default = "yolov3-spp.weights", type = str)
+ parser.add_argument("--reso", dest = 'reso', help =
+ "Input resolution of the network. Increase to increase accuracy. Decrease to increase speed",
+ default = "416", type = str)
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = arg_parse()
+ confidence = float(args.confidence)
+ nms_thesh = float(args.nms_thresh)
+ start = 0
+
+ CUDA = torch.cuda.is_available()
+
+
+
+ CUDA = torch.cuda.is_available()
+ num_classes = 80
+ bbox_attrs = 5 + num_classes
+
+ print("Loading network.....")
+ model = Darknet(args.cfgfile)
+ model.load_weights(args.weightsfile)
+ print("Network successfully loaded")
+
+ model.net_info["height"] = args.reso
+ inp_dim = int(model.net_info["height"])
+ assert inp_dim % 32 == 0
+ assert inp_dim > 32
+
+
+ if CUDA:
+ model.cuda().half()
+
+ model(get_test_input(inp_dim, CUDA), CUDA)
+
+ model.eval()
+
+ videofile = 'video.avi'
+
+ cap = cv2.VideoCapture(videofile)
+
+ assert cap.isOpened(), 'Cannot capture source'
+
+ frames = 0
+ start = time.time()
+ while cap.isOpened():
+
+ ret, frame = cap.read()
+ if ret:
+
+
+ img, orig_im, dim = prep_image(frame, inp_dim)
+
+ im_dim = torch.FloatTensor(dim).repeat(1,2)
+
+
+ if CUDA:
+ img = img.cuda().half()
+ im_dim = im_dim.half().cuda()
+ write_results = write_results_half
+ predict_transform = predict_transform_half
+
+
+ output = model(Variable(img, volatile = True), CUDA)
+ output = write_results(output, confidence, num_classes, nms = True, nms_conf = nms_thesh)
+
+
+ if type(output) == int:
+ frames += 1
+ print("FPS of the video is {:5.2f}".format( frames / (time.time() - start)))
+ cv2.imshow("frame", orig_im)
+ key = cv2.waitKey(1)
+ if key & 0xFF == ord('q'):
+ break
+ continue
+
+
+ im_dim = im_dim.repeat(output.size(0), 1)
+ scaling_factor = torch.min(inp_dim/im_dim,1)[0].view(-1,1)
+
+ output[:,[1,3]] -= (inp_dim - scaling_factor*im_dim[:,0].view(-1,1))/2
+ output[:,[2,4]] -= (inp_dim - scaling_factor*im_dim[:,1].view(-1,1))/2
+
+ output[:,1:5] /= scaling_factor
+
+ for i in range(output.shape[0]):
+ output[i, [1,3]] = torch.clamp(output[i, [1,3]], 0.0, im_dim[i,0])
+ output[i, [2,4]] = torch.clamp(output[i, [2,4]], 0.0, im_dim[i,1])
+
+
+ classes = load_classes('data/coco.names')
+ colors = pkl.load(open("pallete", "rb"))
+
+ list(map(lambda x: write(x, orig_im), output))
+
+
+ cv2.imshow("frame", orig_im)
+ key = cv2.waitKey(1)
+ if key & 0xFF == ord('q'):
+ break
+ frames += 1
+ print("FPS of the video is {:5.2f}".format( frames / (time.time() - start)))
+
+
+ else:
+ break
+
+
+
+
+
diff --git a/joints_detectors/__init__.py b/joints_detectors/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/joints_detectors/mediapipe/pose.py b/joints_detectors/mediapipe/pose.py
new file mode 100644
index 0000000000000000000000000000000000000000..23fbfe6869ebdd2a31c66c89b8456a7a4c7568e0
--- /dev/null
+++ b/joints_detectors/mediapipe/pose.py
@@ -0,0 +1,51 @@
+from tqdm import tqdm
+import mediapipe as mp
+import numpy as np
+import cv2
+
+pose = mp.solutions.pose.Pose(static_image_mode=False, min_detection_confidence=0.5, min_tracking_confidence=0.5)
+
+def generate_kpts(video_file):
+ vid = cv2.VideoCapture(video_file)
+ kpts = []
+ video_length = int(vid.get(cv2.CAP_PROP_FRAME_COUNT))
+
+ for i in tqdm(range(video_length)):
+ ret, frame = vid.read()
+ if not ret:
+ break
+
+ results = pose.process(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
+ if(not results.pose_landmarks):
+ kpts.append(kpts[-1] if len(kpts) > 0 else [[0, 0] for _ in range(17)])
+ continue
+
+ # Take the coco keypoints
+ l = results.pose_landmarks.landmark
+ pl = mp.solutions.pose.PoseLandmark
+ kpts.append([
+ [l[pl.NOSE].x, l[pl.NOSE].y],
+ [l[pl.LEFT_EYE].x, l[pl.LEFT_EYE].y],
+ [l[pl.RIGHT_EYE].x, l[pl.RIGHT_EYE].y],
+ [l[pl.LEFT_EAR].x, l[pl.LEFT_EAR].y],
+ [l[pl.RIGHT_EAR].x, l[pl.RIGHT_EAR].y],
+ [l[pl.LEFT_SHOULDER].x, l[pl.LEFT_SHOULDER].y],
+ [l[pl.RIGHT_SHOULDER].x, l[pl.RIGHT_SHOULDER].y],
+ [l[pl.LEFT_ELBOW].x, l[pl.LEFT_ELBOW].y],
+ [l[pl.RIGHT_ELBOW].x, l[pl.RIGHT_ELBOW].y],
+ [l[pl.LEFT_WRIST].x, l[pl.LEFT_WRIST].y],
+ [l[pl.RIGHT_WRIST].x, l[pl.RIGHT_WRIST].y],
+ [l[pl.LEFT_HIP].x, l[pl.LEFT_HIP].y],
+ [l[pl.RIGHT_HIP].x, l[pl.RIGHT_HIP].y],
+ [l[pl.LEFT_KNEE].x, l[pl.LEFT_KNEE].y],
+ [l[pl.RIGHT_KNEE].x, l[pl.RIGHT_KNEE].y],
+ [l[pl.LEFT_ANKLE].x, l[pl.LEFT_ANKLE].y],
+ [l[pl.RIGHT_ANKLE].x, l[pl.RIGHT_ANKLE].y]
+ ])
+
+ # multiply all the x coordinates with frame width and y coordinates with frame height.
+ for i in range(len(kpts[-1])):
+ kpts[-1][i][0] *= frame.shape[1]
+ kpts[-1][i][1] *= frame.shape[0]
+
+ return np.array(kpts)
\ No newline at end of file
diff --git a/joints_detectors/openpose/README.md b/joints_detectors/openpose/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..648314d88913e688c3542c4f1521234e7a626586
--- /dev/null
+++ b/joints_detectors/openpose/README.md
@@ -0,0 +1,4 @@
+需要将该环境conda的python与openpose编译,才能调用openpose python API
+
+
+
diff --git a/joints_detectors/openpose/main.py b/joints_detectors/openpose/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2f33174c1ced8b53bd46e05c4253415aec71a63
--- /dev/null
+++ b/joints_detectors/openpose/main.py
@@ -0,0 +1,126 @@
+import os
+import sys
+
+import cv2
+
+dir_path = os.path.dirname(os.path.realpath(__file__))
+sys.path.insert(0, dir_path)
+import ipdb;
+
+pdb = ipdb.set_trace
+import argparse
+from tqdm import tqdm
+from utils import convert
+import numpy as np
+
+sys.path.remove(dir_path)
+
+try:
+ from openpose import pyopenpose as op
+except ImportError as e:
+ print('Error: OpenPose library could not be found. Did you enable `BUILD_PYTHON` in CMake and have this Python script in the right folder?')
+ raise e
+
+# Flags
+parser = argparse.ArgumentParser()
+parser.add_argument("--image_path", default="../../examples/media/COCO_val2014_000000000192.jpg",
+ help="Process an image. Read all standard formats (jpg, png, bmp, etc.).")
+args = parser.parse_known_args()
+
+params = dict()
+cur_dir = os.path.dirname(os.path.abspath(__file__))
+params["model_folder"] = cur_dir + "/models/"
+params['tracking'] = 5
+params['number_people_max'] = 1
+
+
+# params['num_gpu'] = 1
+# params['num_gpu_start'] = 1
+# import ipdb;ipdb.set_trace()
+
+
+def load_model():
+ try:
+ opWrapper = op.WrapperPython()
+ opWrapper.configure(params)
+ opWrapper.start()
+ except Exception as e:
+ print(e)
+ sys.exit(-1)
+
+ return opWrapper
+
+
+def test_video(model, video_name=0):
+ opWrapper = model
+
+ cam = cv2.VideoCapture(video_name)
+ # warm up
+ for i in range(5):
+ datum = op.Datum()
+ _, imageToProcess = cam.read()
+ datum.cvInputData = imageToProcess
+ opWrapper.emplaceAndPop([datum])
+
+ for i in tqdm(range(2000)):
+ datum = op.Datum()
+ _, imageToProcess = cam.read()
+ datum.cvInputData = imageToProcess
+ opWrapper.emplaceAndPop([datum])
+
+ # Display Image
+ # print("Body keypoints: \n" + str(datum.poseKeypoints))
+ # cv2.imshow("OpenPose 1.4.0 - Tutorial Python API", datum.cvOutputData)
+ # cv2.waitKey(10)
+ # cv2.destroyAllWindows()
+
+
+def generate_kpts(video_name):
+ kpt_results = []
+
+ cap = cv2.VideoCapture(video_name)
+ length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
+ opWrapper = load_model()
+ for i in tqdm(range(length)):
+
+ try:
+ datum = op.Datum()
+ _, imageToProcess = cap.read()
+ datum.cvInputData = imageToProcess
+ opWrapper.emplaceAndPop([datum])
+ results = datum.poseKeypoints
+
+ assert len(results) == 1, 'videopose3D only support one pserson restruction'
+ # 25 to 17
+ kpts = convert(results[0])
+ kpt_results.append(kpts)
+ except Exception as e:
+ print(e)
+
+ # pose processes
+ result = np.array(kpt_results)
+
+ # # save
+ # name = '/home/xyliu/experiments/VideoPose3D/data/tmp.npz'
+ # kpts = result.astype(np.float32)
+ # print('kpts npz save in ', name)
+ # np.savez_compressed(name, kpts=kpts)
+ return result
+
+
+def generate_frame_kpt(frame, opWrapper):
+ '''
+ 提供frame and model
+ '''
+ datum = op.Datum()
+ datum.cvInputData = frame
+ opWrapper.emplaceAndPop([datum])
+ re = datum.poseKeypoints
+ assert len(re) == 1, 'videopose3D only support one pserson restruction'
+ kpt = convert(re[0])
+
+ return kpt
+
+
+if __name__ == "__main__":
+ generate_kpts(os.environ.get('VIDEO_PATH') + 'dance.mp4')
diff --git a/joints_detectors/openpose/models/cameraParameters/flir/17012332.xml.example b/joints_detectors/openpose/models/cameraParameters/flir/17012332.xml.example
new file mode 100644
index 0000000000000000000000000000000000000000..84aa5e0a155e8bf0eee4e12040d01f557c6c98e0
--- /dev/null
+++ b/joints_detectors/openpose/models/cameraParameters/flir/17012332.xml.example
@@ -0,0 +1,28 @@
+
+
+
+
+ 3
+ 4
+ d
+
+ 1. 0. 0. 0.
+ 0. 1. 0. 0.
+ 0. 0. 1. 0.
+
+ 3
+ 3
+ d
+
+ 8.1793481631740565e+02 0. 6.0070689997785121e+02 0.
+ 8.1651774059837908e+02 5.1784529566329593e+02 0. 0. 1.
+
+ 8
+ 1
+ d
+
+ -1.8102158829399091e+00 9.1966147162623262e+00
+ -4.4293900343777355e-04 1.3638377686816653e-03
+ 1.3303863414979364e+00 -1.4189051636354870e+00
+ 8.4725535468475819e+00 4.7911023525901033e+00
+
diff --git a/joints_detectors/openpose/models/face/haarcascade_frontalface_alt.xml b/joints_detectors/openpose/models/face/haarcascade_frontalface_alt.xml
new file mode 100644
index 0000000000000000000000000000000000000000..665f4f9475b07d498855e68c57e0e12e4363be85
--- /dev/null
+++ b/joints_detectors/openpose/models/face/haarcascade_frontalface_alt.xml
@@ -0,0 +1,39705 @@
+
+
+
+
+ BOOST
+ HAAR
+ 20
+ 20
+
+ 213
+
+
+ 0
+
+ 22
+
+ <_>
+ 3
+ 8.2268941402435303e-01
+
+ <_>
+
+ 0 -1 0 4.0141958743333817e-03
+
+
+ 3.3794190734624863e-02 8.3781069517135620e-01
+
+
+ <_>
+
+ 0 -1 1 1.5151339583098888e-02
+
+
+ 1.5141320228576660e-01 7.4888122081756592e-01
+
+
+ <_>
+
+ 0 -1 2 4.2109931819140911e-03
+
+
+ 9.0049281716346741e-02 6.3748198747634888e-01
+
+
+
+
+ <_>
+ 16
+ 6.9566087722778320e+00
+
+ <_>
+
+ 0 -1 3 1.6227109590545297e-03
+
+
+ 6.9308586418628693e-02 7.1109461784362793e-01
+
+
+ <_>
+
+ 0 -1 4 2.2906649392098188e-03
+
+
+ 1.7958030104637146e-01 6.6686922311782837e-01
+
+
+ <_>
+
+ 0 -1 5 5.0025708042085171e-03
+
+
+ 1.6936729848384857e-01 6.5540069341659546e-01
+
+
+ <_>
+
+ 0 -1 6 7.9659894108772278e-03
+
+
+ 5.8663320541381836e-01 9.1414518654346466e-02
+
+
+ <_>
+
+ 0 -1 7 -3.5227010957896709e-03
+
+
+ 1.4131669700145721e-01 6.0318958759307861e-01
+
+
+ <_>
+
+ 0 -1 8 3.6667689681053162e-02
+
+
+ 3.6756721138954163e-01 7.9203182458877563e-01
+
+
+ <_>
+
+ 0 -1 9 9.3361474573612213e-03
+
+
+ 6.1613857746124268e-01 2.0885099470615387e-01
+
+
+ <_>
+
+ 0 -1 10 8.6961314082145691e-03
+
+
+ 2.8362309932708740e-01 6.3602739572525024e-01
+
+
+ <_>
+
+ 0 -1 11 1.1488880263641477e-03
+
+
+ 2.2235809266567230e-01 5.8007007837295532e-01
+
+
+ <_>
+
+ 0 -1 12 -2.1484689787030220e-03
+
+
+ 2.4064640700817108e-01 5.7870548963546753e-01
+
+
+ <_>
+
+ 0 -1 13 2.1219060290604830e-03
+
+
+ 5.5596548318862915e-01 1.3622370362281799e-01
+
+
+ <_>
+
+ 0 -1 14 -9.3949146568775177e-02
+
+
+ 8.5027372837066650e-01 4.7177401185035706e-01
+
+
+ <_>
+
+ 0 -1 15 1.3777789426967502e-03
+
+
+ 5.9936738014221191e-01 2.8345298767089844e-01
+
+
+ <_>
+
+ 0 -1 16 7.3063157498836517e-02
+
+
+ 4.3418860435485840e-01 7.0600342750549316e-01
+
+
+ <_>
+
+ 0 -1 17 3.6767389974556863e-04
+
+
+ 3.0278879404067993e-01 6.0515749454498291e-01
+
+
+ <_>
+
+ 0 -1 18 -6.0479710809886456e-03
+
+
+ 1.7984339594841003e-01 5.6752568483352661e-01
+
+
+
+
+ <_>
+ 21
+ 9.4985427856445312e+00
+
+ <_>
+
+ 0 -1 19 -1.6510689631104469e-02
+
+
+ 6.6442251205444336e-01 1.4248579740524292e-01
+
+
+ <_>
+
+ 0 -1 20 2.7052499353885651e-03
+
+
+ 6.3253521919250488e-01 1.2884770333766937e-01
+
+
+ <_>
+
+ 0 -1 21 2.8069869149476290e-03
+
+
+ 1.2402880191802979e-01 6.1931931972503662e-01
+
+
+ <_>
+
+ 0 -1 22 -1.5402400167658925e-03
+
+
+ 1.4321430027484894e-01 5.6700158119201660e-01
+
+
+ <_>
+
+ 0 -1 23 -5.6386279175058007e-04
+
+
+ 1.6574330627918243e-01 5.9052079916000366e-01
+
+
+ <_>
+
+ 0 -1 24 1.9253729842603207e-03
+
+
+ 2.6955071091651917e-01 5.7388240098953247e-01
+
+
+ <_>
+
+ 0 -1 25 -5.0214841030538082e-03
+
+
+ 1.8935389816761017e-01 5.7827740907669067e-01
+
+
+ <_>
+
+ 0 -1 26 2.6365420781075954e-03
+
+
+ 2.3093290627002716e-01 5.6954258680343628e-01
+
+
+ <_>
+
+ 0 -1 27 -1.5127769438549876e-03
+
+
+ 2.7596020698547363e-01 5.9566420316696167e-01
+
+
+ <_>
+
+ 0 -1 28 -1.0157439857721329e-02
+
+
+ 1.7325380444526672e-01 5.5220472812652588e-01
+
+
+ <_>
+
+ 0 -1 29 -1.1953660286962986e-02
+
+
+ 1.3394099473953247e-01 5.5590140819549561e-01
+
+
+ <_>
+
+ 0 -1 30 4.8859491944313049e-03
+
+
+ 3.6287039518356323e-01 6.1888492107391357e-01
+
+
+ <_>
+
+ 0 -1 31 -8.0132916569709778e-02
+
+
+ 9.1211050748825073e-02 5.4759448766708374e-01
+
+
+ <_>
+
+ 0 -1 32 1.0643280111253262e-03
+
+
+ 3.7151429057121277e-01 5.7113999128341675e-01
+
+
+ <_>
+
+ 0 -1 33 -1.3419450260698795e-03
+
+
+ 5.9533137083053589e-01 3.3180978894233704e-01
+
+
+ <_>
+
+ 0 -1 34 -5.4601140320301056e-02
+
+
+ 1.8440659344196320e-01 5.6028461456298828e-01
+
+
+ <_>
+
+ 0 -1 35 2.9071690514683723e-03
+
+
+ 3.5942441225051880e-01 6.1317151784896851e-01
+
+
+ <_>
+
+ 0 -1 36 7.4718717951327562e-04
+
+
+ 5.9943532943725586e-01 3.4595629572868347e-01
+
+
+ <_>
+
+ 0 -1 37 4.3013808317482471e-03
+
+
+ 4.1726520657539368e-01 6.9908452033996582e-01
+
+
+ <_>
+
+ 0 -1 38 4.5017572119832039e-03
+
+
+ 4.5097151398658752e-01 7.8014570474624634e-01
+
+
+ <_>
+
+ 0 -1 39 2.4138500913977623e-02
+
+
+ 5.4382127523422241e-01 1.3198269903659821e-01
+
+
+
+
+ <_>
+ 39
+ 1.8412969589233398e+01
+
+ <_>
+
+ 0 -1 40 1.9212230108678341e-03
+
+
+ 1.4152669906616211e-01 6.1998707056045532e-01
+
+
+ <_>
+
+ 0 -1 41 -1.2748669541906565e-04
+
+
+ 6.1910742521286011e-01 1.8849289417266846e-01
+
+
+ <_>
+
+ 0 -1 42 5.1409931620582938e-04
+
+
+ 1.4873969554901123e-01 5.8579277992248535e-01
+
+
+ <_>
+
+ 0 -1 43 4.1878609918057919e-03
+
+
+ 2.7469098567962646e-01 6.3592398166656494e-01
+
+
+ <_>
+
+ 0 -1 44 5.1015717908740044e-03
+
+
+ 5.8708512783050537e-01 2.1756289899349213e-01
+
+
+ <_>
+
+ 0 -1 45 -2.1448440384119749e-03
+
+
+ 5.8809447288513184e-01 2.9795908927917480e-01
+
+
+ <_>
+
+ 0 -1 46 -2.8977119363844395e-03
+
+
+ 2.3733270168304443e-01 5.8766472339630127e-01
+
+
+ <_>
+
+ 0 -1 47 -2.1610679104924202e-02
+
+
+ 1.2206549942493439e-01 5.1942020654678345e-01
+
+
+ <_>
+
+ 0 -1 48 -4.6299318782985210e-03
+
+
+ 2.6312309503555298e-01 5.8174091577529907e-01
+
+
+ <_>
+
+ 0 -1 49 5.9393711853772402e-04
+
+
+ 3.6386200785636902e-01 5.6985449790954590e-01
+
+
+ <_>
+
+ 0 -1 50 5.3878661245107651e-02
+
+
+ 4.3035310506820679e-01 7.5593662261962891e-01
+
+
+ <_>
+
+ 0 -1 51 1.8887349870055914e-03
+
+
+ 2.1226030588150024e-01 5.6134271621704102e-01
+
+
+ <_>
+
+ 0 -1 52 -2.3635339457541704e-03
+
+
+ 5.6318491697311401e-01 2.6427671313285828e-01
+
+
+ <_>
+
+ 0 -1 53 2.4017799645662308e-02
+
+
+ 5.7971078157424927e-01 2.7517059445381165e-01
+
+
+ <_>
+
+ 0 -1 54 2.0543030404951423e-04
+
+
+ 2.7052420377731323e-01 5.7525688409805298e-01
+
+
+ <_>
+
+ 0 -1 55 8.4790197433903813e-04
+
+
+ 5.4356247186660767e-01 2.3348769545555115e-01
+
+
+ <_>
+
+ 0 -1 56 1.4091329649090767e-03
+
+
+ 5.3194248676300049e-01 2.0631550252437592e-01
+
+
+ <_>
+
+ 0 -1 57 1.4642629539594054e-03
+
+
+ 5.4189807176589966e-01 3.0688610672950745e-01
+
+
+ <_>
+
+ 0 -1 58 1.6352549428120255e-03
+
+
+ 3.6953729391098022e-01 6.1128681898117065e-01
+
+
+ <_>
+
+ 0 -1 59 8.3172752056270838e-04
+
+
+ 3.5650369524955750e-01 6.0252362489700317e-01
+
+
+ <_>
+
+ 0 -1 60 -2.0998890977352858e-03
+
+
+ 1.9139820337295532e-01 5.3628271818161011e-01
+
+
+ <_>
+
+ 0 -1 61 -7.4213981861248612e-04
+
+
+ 3.8355550169944763e-01 5.5293101072311401e-01
+
+
+ <_>
+
+ 0 -1 62 3.2655049581080675e-03
+
+
+ 4.3128961324691772e-01 7.1018958091735840e-01
+
+
+ <_>
+
+ 0 -1 63 8.9134991867467761e-04
+
+
+ 3.9848309755325317e-01 6.3919639587402344e-01
+
+
+ <_>
+
+ 0 -1 64 -1.5284179709851742e-02
+
+
+ 2.3667329549789429e-01 5.4337137937545776e-01
+
+
+ <_>
+
+ 0 -1 65 4.8381411470472813e-03
+
+
+ 5.8175009489059448e-01 3.2391890883445740e-01
+
+
+ <_>
+
+ 0 -1 66 -9.1093179071322083e-04
+
+
+ 5.5405938625335693e-01 2.9118689894676208e-01
+
+
+ <_>
+
+ 0 -1 67 -6.1275060288608074e-03
+
+
+ 1.7752550542354584e-01 5.1966291666030884e-01
+
+
+ <_>
+
+ 0 -1 68 -4.4576259097084403e-04
+
+
+ 3.0241701006889343e-01 5.5335938930511475e-01
+
+
+ <_>
+
+ 0 -1 69 2.2646540775895119e-02
+
+
+ 4.4149309396743774e-01 6.9753772020339966e-01
+
+
+ <_>
+
+ 0 -1 70 -1.8804960418492556e-03
+
+
+ 2.7913948893547058e-01 5.4979521036148071e-01
+
+
+ <_>
+
+ 0 -1 71 7.0889107882976532e-03
+
+
+ 5.2631992101669312e-01 2.3855470120906830e-01
+
+
+ <_>
+
+ 0 -1 72 1.7318050377070904e-03
+
+
+ 4.3193790316581726e-01 6.9836008548736572e-01
+
+
+ <_>
+
+ 0 -1 73 -6.8482700735330582e-03
+
+
+ 3.0820429325103760e-01 5.3909200429916382e-01
+
+
+ <_>
+
+ 0 -1 74 -1.5062530110299122e-05
+
+
+ 5.5219221115112305e-01 3.1203660368919373e-01
+
+
+ <_>
+
+ 0 -1 75 2.9475569725036621e-02
+
+
+ 5.4013228416442871e-01 1.7706030607223511e-01
+
+
+ <_>
+
+ 0 -1 76 8.1387329846620560e-03
+
+
+ 5.1786178350448608e-01 1.2110190093517303e-01
+
+
+ <_>
+
+ 0 -1 77 2.0942950621247292e-02
+
+
+ 5.2902942895889282e-01 3.3112218976020813e-01
+
+
+ <_>
+
+ 0 -1 78 -9.5665529370307922e-03
+
+
+ 7.4719941616058350e-01 4.4519689679145813e-01
+
+
+
+
+ <_>
+ 33
+ 1.5324139595031738e+01
+
+ <_>
+
+ 0 -1 79 -2.8206960996612906e-04
+
+
+ 2.0640860497951508e-01 6.0767322778701782e-01
+
+
+ <_>
+
+ 0 -1 80 1.6790600493550301e-03
+
+
+ 5.8519971370697021e-01 1.2553839385509491e-01
+
+
+ <_>
+
+ 0 -1 81 6.9827912375330925e-04
+
+
+ 9.4018429517745972e-02 5.7289612293243408e-01
+
+
+ <_>
+
+ 0 -1 82 7.8959012171253562e-04
+
+
+ 1.7819879949092865e-01 5.6943088769912720e-01
+
+
+ <_>
+
+ 0 -1 83 -2.8560499195009470e-03
+
+
+ 1.6383990645408630e-01 5.7886648178100586e-01
+
+
+ <_>
+
+ 0 -1 84 -3.8122469559311867e-03
+
+
+ 2.0854400098323822e-01 5.5085647106170654e-01
+
+
+ <_>
+
+ 0 -1 85 1.5896620461717248e-03
+
+
+ 5.7027608156204224e-01 1.8572150170803070e-01
+
+
+ <_>
+
+ 0 -1 86 1.0078339837491512e-02
+
+
+ 5.1169431209564209e-01 2.1897700428962708e-01
+
+
+ <_>
+
+ 0 -1 87 -6.3526302576065063e-02
+
+
+ 7.1313798427581787e-01 4.0438130497932434e-01
+
+
+ <_>
+
+ 0 -1 88 -9.1031491756439209e-03
+
+
+ 2.5671818852424622e-01 5.4639732837677002e-01
+
+
+ <_>
+
+ 0 -1 89 -2.4035000242292881e-03
+
+
+ 1.7006659507751465e-01 5.5909740924835205e-01
+
+
+ <_>
+
+ 0 -1 90 1.5226360410451889e-03
+
+
+ 5.4105567932128906e-01 2.6190540194511414e-01
+
+
+ <_>
+
+ 0 -1 91 1.7997439950704575e-02
+
+
+ 3.7324368953704834e-01 6.5352207422256470e-01
+
+
+ <_>
+
+ 0 -1 92 -6.4538191072642803e-03
+
+
+ 2.6264819502830505e-01 5.5374461412429810e-01
+
+
+ <_>
+
+ 0 -1 93 -1.1880760081112385e-02
+
+
+ 2.0037539303302765e-01 5.5447459220886230e-01
+
+
+ <_>
+
+ 0 -1 94 1.2713660253211856e-03
+
+
+ 5.5919027328491211e-01 3.0319759249687195e-01
+
+
+ <_>
+
+ 0 -1 95 1.1376109905540943e-03
+
+
+ 2.7304071187973022e-01 5.6465089321136475e-01
+
+
+ <_>
+
+ 0 -1 96 -4.2651998810470104e-03
+
+
+ 1.4059090614318848e-01 5.4618209600448608e-01
+
+
+ <_>
+
+ 0 -1 97 -2.9602861031889915e-03
+
+
+ 1.7950350046157837e-01 5.4592901468276978e-01
+
+
+ <_>
+
+ 0 -1 98 -8.8448226451873779e-03
+
+
+ 5.7367831468582153e-01 2.8092199563980103e-01
+
+
+ <_>
+
+ 0 -1 99 -6.6430689767003059e-03
+
+
+ 2.3706759512424469e-01 5.5038261413574219e-01
+
+
+ <_>
+
+ 0 -1 100 3.9997808635234833e-03
+
+
+ 5.6081998348236084e-01 3.3042821288108826e-01
+
+
+ <_>
+
+ 0 -1 101 -4.1221720166504383e-03
+
+
+ 1.6401059925556183e-01 5.3789931535720825e-01
+
+
+ <_>
+
+ 0 -1 102 1.5624909661710262e-02
+
+
+ 5.2276492118835449e-01 2.2886039316654205e-01
+
+
+ <_>
+
+ 0 -1 103 -1.0356419719755650e-02
+
+
+ 7.0161938667297363e-01 4.2529278993606567e-01
+
+
+ <_>
+
+ 0 -1 104 -8.7960809469223022e-03
+
+
+ 2.7673470973968506e-01 5.3558301925659180e-01
+
+
+ <_>
+
+ 0 -1 105 1.6226939857006073e-01
+
+
+ 4.3422400951385498e-01 7.4425792694091797e-01
+
+
+ <_>
+
+ 0 -1 106 4.5542530715465546e-03
+
+
+ 5.7264858484268188e-01 2.5821250677108765e-01
+
+
+ <_>
+
+ 0 -1 107 -2.1309209987521172e-03
+
+
+ 2.1068480610847473e-01 5.3610187768936157e-01
+
+
+ <_>
+
+ 0 -1 108 -1.3208420015871525e-02
+
+
+ 7.5937908887863159e-01 4.5524680614471436e-01
+
+
+ <_>
+
+ 0 -1 109 -6.5996676683425903e-02
+
+
+ 1.2524759769439697e-01 5.3440397977828979e-01
+
+
+ <_>
+
+ 0 -1 110 7.9142656177282333e-03
+
+
+ 3.3153840899467468e-01 5.6010431051254272e-01
+
+
+ <_>
+
+ 0 -1 111 2.0894279703497887e-02
+
+
+ 5.5060499906539917e-01 2.7688381075859070e-01
+
+
+
+
+ <_>
+ 44
+ 2.1010639190673828e+01
+
+ <_>
+
+ 0 -1 112 1.1961159761995077e-03
+
+
+ 1.7626909911632538e-01 6.1562412977218628e-01
+
+
+ <_>
+
+ 0 -1 113 -1.8679830245673656e-03
+
+
+ 6.1181068420410156e-01 1.8323999643325806e-01
+
+
+ <_>
+
+ 0 -1 114 -1.9579799845814705e-04
+
+
+ 9.9044263362884521e-02 5.7238161563873291e-01
+
+
+ <_>
+
+ 0 -1 115 -8.0255657667294145e-04
+
+
+ 5.5798798799514771e-01 2.3772829771041870e-01
+
+
+ <_>
+
+ 0 -1 116 -2.4510810617357492e-03
+
+
+ 2.2314579784870148e-01 5.8589351177215576e-01
+
+
+ <_>
+
+ 0 -1 117 5.0361850298941135e-04
+
+
+ 2.6539939641952515e-01 5.7941037416458130e-01
+
+
+ <_>
+
+ 0 -1 118 4.0293349884450436e-03
+
+
+ 5.8038270473480225e-01 2.4848650395870209e-01
+
+
+ <_>
+
+ 0 -1 119 -1.4451709575951099e-02
+
+
+ 1.8303519487380981e-01 5.4842048883438110e-01
+
+
+ <_>
+
+ 0 -1 120 2.0380979403853416e-03
+
+
+ 3.3635589480400085e-01 6.0510927438735962e-01
+
+
+ <_>
+
+ 0 -1 121 -1.6155190533027053e-03
+
+
+ 2.2866420447826385e-01 5.4412460327148438e-01
+
+
+ <_>
+
+ 0 -1 122 3.3458340913057327e-03
+
+
+ 5.6259131431579590e-01 2.3923380672931671e-01
+
+
+ <_>
+
+ 0 -1 123 1.6379579901695251e-03
+
+
+ 3.9069938659667969e-01 5.9646219015121460e-01
+
+
+ <_>
+
+ 0 -1 124 3.0251210555434227e-02
+
+
+ 5.2484822273254395e-01 1.5757469832897186e-01
+
+
+ <_>
+
+ 0 -1 125 3.7251990288496017e-02
+
+
+ 4.1943109035491943e-01 6.7484188079833984e-01
+
+
+ <_>
+
+ 0 -1 126 -2.5109790265560150e-02
+
+
+ 1.8825499713420868e-01 5.4734510183334351e-01
+
+
+ <_>
+
+ 0 -1 127 -5.3099058568477631e-03
+
+
+ 1.3399730622768402e-01 5.2271109819412231e-01
+
+
+ <_>
+
+ 0 -1 128 1.2086479691788554e-03
+
+
+ 3.7620881199836731e-01 6.1096358299255371e-01
+
+
+ <_>
+
+ 0 -1 129 -2.1907679736614227e-02
+
+
+ 2.6631429791450500e-01 5.4040068387985229e-01
+
+
+ <_>
+
+ 0 -1 130 5.4116579703986645e-03
+
+
+ 5.3635787963867188e-01 2.2322730720043182e-01
+
+
+ <_>
+
+ 0 -1 131 6.9946326315402985e-02
+
+
+ 5.3582328557968140e-01 2.4536980688571930e-01
+
+
+ <_>
+
+ 0 -1 132 3.4520021290518343e-04
+
+
+ 2.4096719920635223e-01 5.3769302368164062e-01
+
+
+ <_>
+
+ 0 -1 133 1.2627709656953812e-03
+
+
+ 5.4258567094802856e-01 3.1556931138038635e-01
+
+
+ <_>
+
+ 0 -1 134 2.2719509899616241e-02
+
+
+ 4.1584059596061707e-01 6.5978652238845825e-01
+
+
+ <_>
+
+ 0 -1 135 -1.8111000536009669e-03
+
+
+ 2.8112530708312988e-01 5.5052447319030762e-01
+
+
+ <_>
+
+ 0 -1 136 3.3469670452177525e-03
+
+
+ 5.2600282430648804e-01 1.8914650380611420e-01
+
+
+ <_>
+
+ 0 -1 137 4.0791751234792173e-04
+
+
+ 5.6735092401504517e-01 3.3442100882530212e-01
+
+
+ <_>
+
+ 0 -1 138 1.2734799645841122e-02
+
+
+ 5.3435921669006348e-01 2.3956120014190674e-01
+
+
+ <_>
+
+ 0 -1 139 -7.3119727894663811e-03
+
+
+ 6.0108900070190430e-01 4.0222078561782837e-01
+
+
+ <_>
+
+ 0 -1 140 -5.6948751211166382e-02
+
+
+ 8.1991511583328247e-01 4.5431908965110779e-01
+
+
+ <_>
+
+ 0 -1 141 -5.0116591155529022e-03
+
+
+ 2.2002810239791870e-01 5.3577107191085815e-01
+
+
+ <_>
+
+ 0 -1 142 6.0334368608891964e-03
+
+
+ 4.4130811095237732e-01 7.1817511320114136e-01
+
+
+ <_>
+
+ 0 -1 143 3.9437441155314445e-03
+
+
+ 5.4788607358932495e-01 2.7917331457138062e-01
+
+
+ <_>
+
+ 0 -1 144 -3.6591119132936001e-03
+
+
+ 6.3578677177429199e-01 3.9897239208221436e-01
+
+
+ <_>
+
+ 0 -1 145 -3.8456181064248085e-03
+
+
+ 3.4936860203742981e-01 5.3006649017333984e-01
+
+
+ <_>
+
+ 0 -1 146 -7.1926261298358440e-03
+
+
+ 1.1196149885654449e-01 5.2296727895736694e-01
+
+
+ <_>
+
+ 0 -1 147 -5.2798941731452942e-02
+
+
+ 2.3871029913425446e-01 5.4534512758255005e-01
+
+
+ <_>
+
+ 0 -1 148 -7.9537667334079742e-03
+
+
+ 7.5869178771972656e-01 4.4393768906593323e-01
+
+
+ <_>
+
+ 0 -1 149 -2.7344180271029472e-03
+
+
+ 2.5654768943786621e-01 5.4893219470977783e-01
+
+
+ <_>
+
+ 0 -1 150 -1.8507939530536532e-03
+
+
+ 6.7343479394912720e-01 4.2524749040603638e-01
+
+
+ <_>
+
+ 0 -1 151 1.5918919816613197e-02
+
+
+ 5.4883527755737305e-01 2.2926619648933411e-01
+
+
+ <_>
+
+ 0 -1 152 -1.2687679845839739e-03
+
+
+ 6.1043310165405273e-01 4.0223899483680725e-01
+
+
+ <_>
+
+ 0 -1 153 6.2883910723030567e-03
+
+
+ 5.3108531236648560e-01 1.5361930429935455e-01
+
+
+ <_>
+
+ 0 -1 154 -6.2259892001748085e-03
+
+
+ 1.7291119694709778e-01 5.2416062355041504e-01
+
+
+ <_>
+
+ 0 -1 155 -1.2132599949836731e-02
+
+
+ 6.5977597236633301e-01 4.3251821398735046e-01
+
+
+
+
+ <_>
+ 50
+ 2.3918790817260742e+01
+
+ <_>
+
+ 0 -1 156 -3.9184908382594585e-03
+
+
+ 6.1034351587295532e-01 1.4693309366703033e-01
+
+
+ <_>
+
+ 0 -1 157 1.5971299726516008e-03
+
+
+ 2.6323631405830383e-01 5.8964669704437256e-01
+
+
+ <_>
+
+ 0 -1 158 1.7780110239982605e-02
+
+
+ 5.8728742599487305e-01 1.7603619396686554e-01
+
+
+ <_>
+
+ 0 -1 159 6.5334769897162914e-04
+
+
+ 1.5678019821643829e-01 5.5960661172866821e-01
+
+
+ <_>
+
+ 0 -1 160 -2.8353091329336166e-04
+
+
+ 1.9131539762020111e-01 5.7320362329483032e-01
+
+
+ <_>
+
+ 0 -1 161 1.6104689566418529e-03
+
+
+ 2.9149138927459717e-01 5.6230807304382324e-01
+
+
+ <_>
+
+ 0 -1 162 -9.7750619053840637e-02
+
+
+ 1.9434769451618195e-01 5.6482332944869995e-01
+
+
+ <_>
+
+ 0 -1 163 5.5182358482852578e-04
+
+
+ 3.1346169114112854e-01 5.5046397447586060e-01
+
+
+ <_>
+
+ 0 -1 164 -1.2858220376074314e-02
+
+
+ 2.5364819169044495e-01 5.7601428031921387e-01
+
+
+ <_>
+
+ 0 -1 165 4.1530239395797253e-03
+
+
+ 5.7677221298217773e-01 3.6597740650177002e-01
+
+
+ <_>
+
+ 0 -1 166 1.7092459602281451e-03
+
+
+ 2.8431910276412964e-01 5.9189391136169434e-01
+
+
+ <_>
+
+ 0 -1 167 7.5217359699308872e-03
+
+
+ 4.0524271130561829e-01 6.1831092834472656e-01
+
+
+ <_>
+
+ 0 -1 168 2.2479810286313295e-03
+
+
+ 5.7837551832199097e-01 3.1354010105133057e-01
+
+
+ <_>
+
+ 0 -1 169 5.2006211131811142e-02
+
+
+ 5.5413120985031128e-01 1.9166369736194611e-01
+
+
+ <_>
+
+ 0 -1 170 1.2085529975593090e-02
+
+
+ 4.0326559543609619e-01 6.6445910930633545e-01
+
+
+ <_>
+
+ 0 -1 171 1.4687820112158079e-05
+
+
+ 3.5359779000282288e-01 5.7093828916549683e-01
+
+
+ <_>
+
+ 0 -1 172 7.1395188570022583e-06
+
+
+ 3.0374449491500854e-01 5.6102699041366577e-01
+
+
+ <_>
+
+ 0 -1 173 -4.6001640148460865e-03
+
+
+ 7.1810871362686157e-01 4.5803260803222656e-01
+
+
+ <_>
+
+ 0 -1 174 2.0058949012309313e-03
+
+
+ 5.6219518184661865e-01 2.9536840319633484e-01
+
+
+ <_>
+
+ 0 -1 175 4.5050270855426788e-03
+
+
+ 4.6153879165649414e-01 7.6190179586410522e-01
+
+
+ <_>
+
+ 0 -1 176 1.1746830306947231e-02
+
+
+ 5.3438371419906616e-01 1.7725290358066559e-01
+
+
+ <_>
+
+ 0 -1 177 -5.8316338807344437e-02
+
+
+ 1.6862459480762482e-01 5.3407722711563110e-01
+
+
+ <_>
+
+ 0 -1 178 2.3629379575140774e-04
+
+
+ 3.7920561432838440e-01 6.0268038511276245e-01
+
+
+ <_>
+
+ 0 -1 179 -7.8156180679798126e-03
+
+
+ 1.5128670632839203e-01 5.3243237733840942e-01
+
+
+ <_>
+
+ 0 -1 180 -1.0876160115003586e-02
+
+
+ 2.0818220078945160e-01 5.3199452161788940e-01
+
+
+ <_>
+
+ 0 -1 181 -2.7745519764721394e-03
+
+
+ 4.0982469916343689e-01 5.2103281021118164e-01
+
+
+ <_>
+
+ 0 -1 182 -7.8276381827890873e-04
+
+
+ 5.6932741403579712e-01 3.4788420796394348e-01
+
+
+ <_>
+
+ 0 -1 183 1.3870409689843655e-02
+
+
+ 5.3267508745193481e-01 2.2576980292797089e-01
+
+
+ <_>
+
+ 0 -1 184 -2.3674910888075829e-02
+
+
+ 1.5513050556182861e-01 5.2007079124450684e-01
+
+
+ <_>
+
+ 0 -1 185 -1.4879409718560055e-05
+
+
+ 5.5005669593811035e-01 3.8201761245727539e-01
+
+
+ <_>
+
+ 0 -1 186 3.6190641112625599e-03
+
+
+ 4.2386838793754578e-01 6.6397482156753540e-01
+
+
+ <_>
+
+ 0 -1 187 -1.9817110151052475e-02
+
+
+ 2.1500380337238312e-01 5.3823578357696533e-01
+
+
+ <_>
+
+ 0 -1 188 -3.8154039066284895e-03
+
+
+ 6.6757112741470337e-01 4.2152971029281616e-01
+
+
+ <_>
+
+ 0 -1 189 -4.9775829538702965e-03
+
+
+ 2.2672890126705170e-01 5.3863281011581421e-01
+
+
+ <_>
+
+ 0 -1 190 2.2441020701080561e-03
+
+
+ 4.3086910247802734e-01 6.8557357788085938e-01
+
+
+ <_>
+
+ 0 -1 191 1.2282459996640682e-02
+
+
+ 5.8366149663925171e-01 3.4674790501594543e-01
+
+
+ <_>
+
+ 0 -1 192 -2.8548699337989092e-03
+
+
+ 7.0169448852539062e-01 4.3114539980888367e-01
+
+
+ <_>
+
+ 0 -1 193 -3.7875669077038765e-03
+
+
+ 2.8953450918197632e-01 5.2249461412429810e-01
+
+
+ <_>
+
+ 0 -1 194 -1.2201230274513364e-03
+
+
+ 2.9755708575248718e-01 5.4816448688507080e-01
+
+
+ <_>
+
+ 0 -1 195 1.0160599835216999e-02
+
+
+ 4.8888179659843445e-01 8.1826978921890259e-01
+
+
+ <_>
+
+ 0 -1 196 -1.6174569725990295e-02
+
+
+ 1.4814929664134979e-01 5.2399927377700806e-01
+
+
+ <_>
+
+ 0 -1 197 1.9292460754513741e-02
+
+
+ 4.7863098978996277e-01 7.3781907558441162e-01
+
+
+ <_>
+
+ 0 -1 198 -3.2479539513587952e-03
+
+
+ 7.3742228746414185e-01 4.4706439971923828e-01
+
+
+ <_>
+
+ 0 -1 199 -9.3803480267524719e-03
+
+
+ 3.4891548752784729e-01 5.5379962921142578e-01
+
+
+ <_>
+
+ 0 -1 200 -1.2606129981577396e-02
+
+
+ 2.3796869814395905e-01 5.3154432773590088e-01
+
+
+ <_>
+
+ 0 -1 201 -2.5621930137276649e-02
+
+
+ 1.9646880030632019e-01 5.1387697458267212e-01
+
+
+ <_>
+
+ 0 -1 202 -7.5741496402770281e-05
+
+
+ 5.5905228853225708e-01 3.3658531308174133e-01
+
+
+ <_>
+
+ 0 -1 203 -8.9210882782936096e-02
+
+
+ 6.3404656946659088e-02 5.1626348495483398e-01
+
+
+ <_>
+
+ 0 -1 204 -2.7670480776578188e-03
+
+
+ 7.3234677314758301e-01 4.4907060265541077e-01
+
+
+ <_>
+
+ 0 -1 205 2.7152578695677221e-04
+
+
+ 4.1148349642753601e-01 5.9855180978775024e-01
+
+
+
+
+ <_>
+ 51
+ 2.4527879714965820e+01
+
+ <_>
+
+ 0 -1 206 1.4786219689995050e-03
+
+
+ 2.6635450124740601e-01 6.6433167457580566e-01
+
+
+ <_>
+
+ 0 -1 207 -1.8741659587249160e-03
+
+
+ 6.1438488960266113e-01 2.5185129046440125e-01
+
+
+ <_>
+
+ 0 -1 208 -1.7151009524241090e-03
+
+
+ 5.7663410902023315e-01 2.3974630236625671e-01
+
+
+ <_>
+
+ 0 -1 209 -1.8939269939437509e-03
+
+
+ 5.6820458173751831e-01 2.5291448831558228e-01
+
+
+ <_>
+
+ 0 -1 210 -5.3006052039563656e-03
+
+
+ 1.6406759619712830e-01 5.5560797452926636e-01
+
+
+ <_>
+
+ 0 -1 211 -4.6662531793117523e-02
+
+
+ 6.1231541633605957e-01 4.7628301382064819e-01
+
+
+ <_>
+
+ 0 -1 212 -7.9431332414969802e-04
+
+
+ 5.7078588008880615e-01 2.8394040465354919e-01
+
+
+ <_>
+
+ 0 -1 213 1.4891670085489750e-02
+
+
+ 4.0896728634834290e-01 6.0063672065734863e-01
+
+
+ <_>
+
+ 0 -1 214 -1.2046529445797205e-03
+
+
+ 5.7124507427215576e-01 2.7052891254425049e-01
+
+
+ <_>
+
+ 0 -1 215 6.0619381256401539e-03
+
+
+ 5.2625042200088501e-01 3.2622259855270386e-01
+
+
+ <_>
+
+ 0 -1 216 -2.5286648888140917e-03
+
+
+ 6.8538308143615723e-01 4.1992568969726562e-01
+
+
+ <_>
+
+ 0 -1 217 -5.9010218828916550e-03
+
+
+ 3.2662820816040039e-01 5.4348129034042358e-01
+
+
+ <_>
+
+ 0 -1 218 5.6702760048210621e-03
+
+
+ 5.4684108495712280e-01 2.3190039396286011e-01
+
+
+ <_>
+
+ 0 -1 219 -3.0304100364446640e-03
+
+
+ 5.5706679821014404e-01 2.7082380652427673e-01
+
+
+ <_>
+
+ 0 -1 220 2.9803649522364140e-03
+
+
+ 3.7005689740180969e-01 5.8906257152557373e-01
+
+
+ <_>
+
+ 0 -1 221 -7.5840510427951813e-02
+
+
+ 2.1400700509548187e-01 5.4199481010437012e-01
+
+
+ <_>
+
+ 0 -1 222 1.9262539222836494e-02
+
+
+ 5.5267721414566040e-01 2.7265900373458862e-01
+
+
+ <_>
+
+ 0 -1 223 1.8888259364757687e-04
+
+
+ 3.9580118656158447e-01 6.0172098875045776e-01
+
+
+ <_>
+
+ 0 -1 224 2.9369549825787544e-02
+
+
+ 5.2413737773895264e-01 1.4357580244541168e-01
+
+
+ <_>
+
+ 0 -1 225 1.0417619487270713e-03
+
+
+ 3.3854091167449951e-01 5.9299832582473755e-01
+
+
+ <_>
+
+ 0 -1 226 2.6125640142709017e-03
+
+
+ 5.4853779077529907e-01 3.0215978622436523e-01
+
+
+ <_>
+
+ 0 -1 227 9.6977467183023691e-04
+
+
+ 3.3752760291099548e-01 5.5320328474044800e-01
+
+
+ <_>
+
+ 0 -1 228 5.9512659208849072e-04
+
+
+ 5.6317430734634399e-01 3.3593991398811340e-01
+
+
+ <_>
+
+ 0 -1 229 -1.0156559944152832e-01
+
+
+ 6.3735038042068481e-02 5.2304250001907349e-01
+
+
+ <_>
+
+ 0 -1 230 3.6156699061393738e-02
+
+
+ 5.1369631290435791e-01 1.0295289754867554e-01
+
+
+ <_>
+
+ 0 -1 231 3.4624140243977308e-03
+
+
+ 3.8793200254440308e-01 5.5582892894744873e-01
+
+
+ <_>
+
+ 0 -1 232 1.9554980099201202e-02
+
+
+ 5.2500867843627930e-01 1.8758599460124969e-01
+
+
+ <_>
+
+ 0 -1 233 -2.3121440317481756e-03
+
+
+ 6.6720288991928101e-01 4.6796411275863647e-01
+
+
+ <_>
+
+ 0 -1 234 -1.8605289515107870e-03
+
+
+ 7.1633791923522949e-01 4.3346709012985229e-01
+
+
+ <_>
+
+ 0 -1 235 -9.4026362057775259e-04
+
+
+ 3.0213609337806702e-01 5.6502032279968262e-01
+
+
+ <_>
+
+ 0 -1 236 -5.2418331615626812e-03
+
+
+ 1.8200090527534485e-01 5.2502560615539551e-01
+
+
+ <_>
+
+ 0 -1 237 1.1729019752237946e-04
+
+
+ 3.3891880512237549e-01 5.4459732770919800e-01
+
+
+ <_>
+
+ 0 -1 238 1.1878840159624815e-03
+
+
+ 4.0853491425514221e-01 6.2535631656646729e-01
+
+
+ <_>
+
+ 0 -1 239 -1.0881359688937664e-02
+
+
+ 3.3783990144729614e-01 5.7000827789306641e-01
+
+
+ <_>
+
+ 0 -1 240 1.7354859737679362e-03
+
+
+ 4.2046359181404114e-01 6.5230387449264526e-01
+
+
+ <_>
+
+ 0 -1 241 -6.5119052305817604e-03
+
+
+ 2.5952160358428955e-01 5.4281437397003174e-01
+
+
+ <_>
+
+ 0 -1 242 -1.2136430013924837e-03
+
+
+ 6.1651438474655151e-01 3.9778938889503479e-01
+
+
+ <_>
+
+ 0 -1 243 -1.0354240424931049e-02
+
+
+ 1.6280280053615570e-01 5.2195048332214355e-01
+
+
+ <_>
+
+ 0 -1 244 5.5858830455690622e-04
+
+
+ 3.1996509432792664e-01 5.5035740137100220e-01
+
+
+ <_>
+
+ 0 -1 245 1.5299649909138680e-02
+
+
+ 4.1039940714836121e-01 6.1223882436752319e-01
+
+
+ <_>
+
+ 0 -1 246 -2.1588210016489029e-02
+
+
+ 1.0349129885435104e-01 5.1973849534988403e-01
+
+
+ <_>
+
+ 0 -1 247 -1.2834629416465759e-01
+
+
+ 8.4938651323318481e-01 4.8931029438972473e-01
+
+
+ <_>
+
+ 0 -1 248 -2.2927189711481333e-03
+
+
+ 3.1301578879356384e-01 5.4715752601623535e-01
+
+
+ <_>
+
+ 0 -1 249 7.9915106296539307e-02
+
+
+ 4.8563209176063538e-01 6.0739892721176147e-01
+
+
+ <_>
+
+ 0 -1 250 -7.9441092908382416e-02
+
+
+ 8.3946740627288818e-01 4.6245330572128296e-01
+
+
+ <_>
+
+ 0 -1 251 -5.2800010889768600e-03
+
+
+ 1.8816959857940674e-01 5.3066980838775635e-01
+
+
+ <_>
+
+ 0 -1 252 1.0463109938427806e-03
+
+
+ 5.2712291479110718e-01 2.5830659270286560e-01
+
+
+ <_>
+
+ 0 -1 253 2.6317298761568964e-04
+
+
+ 4.2353048920631409e-01 5.7354408502578735e-01
+
+
+ <_>
+
+ 0 -1 254 -3.6173160187900066e-03
+
+
+ 6.9343960285186768e-01 4.4954448938369751e-01
+
+
+ <_>
+
+ 0 -1 255 1.1421879753470421e-02
+
+
+ 5.9009212255477905e-01 4.1381931304931641e-01
+
+
+ <_>
+
+ 0 -1 256 -1.9963278900831938e-03
+
+
+ 6.4663827419281006e-01 4.3272399902343750e-01
+
+
+
+
+ <_>
+ 56
+ 2.7153350830078125e+01
+
+ <_>
+
+ 0 -1 257 -9.9691245704889297e-03
+
+
+ 6.1423242092132568e-01 2.4822120368480682e-01
+
+
+ <_>
+
+ 0 -1 258 7.3073059320449829e-04
+
+
+ 5.7049518823623657e-01 2.3219659924507141e-01
+
+
+ <_>
+
+ 0 -1 259 6.4045301405712962e-04
+
+
+ 2.1122519671916962e-01 5.8149331808090210e-01
+
+
+ <_>
+
+ 0 -1 260 4.5424019917845726e-03
+
+
+ 2.9504820704460144e-01 5.8663117885589600e-01
+
+
+ <_>
+
+ 0 -1 261 9.2477443104144186e-05
+
+
+ 2.9909908771514893e-01 5.7913267612457275e-01
+
+
+ <_>
+
+ 0 -1 262 -8.6603146046400070e-03
+
+
+ 2.8130298852920532e-01 5.6355422735214233e-01
+
+
+ <_>
+
+ 0 -1 263 8.0515816807746887e-03
+
+
+ 3.5353690385818481e-01 6.0547572374343872e-01
+
+
+ <_>
+
+ 0 -1 264 4.3835240649059415e-04
+
+
+ 5.5965322256088257e-01 2.7315109968185425e-01
+
+
+ <_>
+
+ 0 -1 265 -9.8168973636347800e-05
+
+
+ 5.9780317544937134e-01 3.6385610699653625e-01
+
+
+ <_>
+
+ 0 -1 266 -1.1298790341243148e-03
+
+
+ 2.7552521228790283e-01 5.4327291250228882e-01
+
+
+ <_>
+
+ 0 -1 267 6.4356150105595589e-03
+
+
+ 4.3056419491767883e-01 7.0698332786560059e-01
+
+
+ <_>
+
+ 0 -1 268 -5.6829329580068588e-02
+
+
+ 2.4952429533004761e-01 5.2949970960617065e-01
+
+
+ <_>
+
+ 0 -1 269 4.0668169967830181e-03
+
+
+ 5.4785531759262085e-01 2.4977239966392517e-01
+
+
+ <_>
+
+ 0 -1 270 4.8164798499783501e-05
+
+
+ 3.9386010169982910e-01 5.7063561677932739e-01
+
+
+ <_>
+
+ 0 -1 271 6.1795017682015896e-03
+
+
+ 4.4076061248779297e-01 7.3947668075561523e-01
+
+
+ <_>
+
+ 0 -1 272 6.4985752105712891e-03
+
+
+ 5.4452431201934814e-01 2.4791529774665833e-01
+
+
+ <_>
+
+ 0 -1 273 -1.0211090557277203e-03
+
+
+ 2.5447669625282288e-01 5.3389710187911987e-01
+
+
+ <_>
+
+ 0 -1 274 -5.4247528314590454e-03
+
+
+ 2.7188581228256226e-01 5.3240692615509033e-01
+
+
+ <_>
+
+ 0 -1 275 -1.0559899965301156e-03
+
+
+ 3.1782880425453186e-01 5.5345088243484497e-01
+
+
+ <_>
+
+ 0 -1 276 6.6465808777138591e-04
+
+
+ 4.2842191457748413e-01 6.5581941604614258e-01
+
+
+ <_>
+
+ 0 -1 277 -2.7524109464138746e-04
+
+
+ 5.9028607606887817e-01 3.8102629780769348e-01
+
+
+ <_>
+
+ 0 -1 278 4.2293202131986618e-03
+
+
+ 3.8164898753166199e-01 5.7093858718872070e-01
+
+
+ <_>
+
+ 0 -1 279 -3.2868210691958666e-03
+
+
+ 1.7477439343929291e-01 5.2595442533493042e-01
+
+
+ <_>
+
+ 0 -1 280 1.5611879643984139e-04
+
+
+ 3.6017221212387085e-01 5.7256120443344116e-01
+
+
+ <_>
+
+ 0 -1 281 -7.3621381488919724e-06
+
+
+ 5.4018580913543701e-01 3.0444970726966858e-01
+
+
+ <_>
+
+ 0 -1 282 -1.4767250046133995e-02
+
+
+ 3.2207700610160828e-01 5.5734348297119141e-01
+
+
+ <_>
+
+ 0 -1 283 2.4489590898156166e-02
+
+
+ 4.3015280365943909e-01 6.5188127756118774e-01
+
+
+ <_>
+
+ 0 -1 284 -3.7652091123163700e-04
+
+
+ 3.5645830631256104e-01 5.5982369184494019e-01
+
+
+ <_>
+
+ 0 -1 285 7.3657688517414499e-06
+
+
+ 3.4907829761505127e-01 5.5618977546691895e-01
+
+
+ <_>
+
+ 0 -1 286 -1.5099939890205860e-02
+
+
+ 1.7762720584869385e-01 5.3352999687194824e-01
+
+
+ <_>
+
+ 0 -1 287 -3.8316650316119194e-03
+
+
+ 6.1496877670288086e-01 4.2213940620422363e-01
+
+
+ <_>
+
+ 0 -1 288 1.6925400123000145e-02
+
+
+ 5.4130148887634277e-01 2.1665850281715393e-01
+
+
+ <_>
+
+ 0 -1 289 -3.0477850232273340e-03
+
+
+ 6.4494907855987549e-01 4.3546178936958313e-01
+
+
+ <_>
+
+ 0 -1 290 3.2140589319169521e-03
+
+
+ 5.4001551866531372e-01 3.5232171416282654e-01
+
+
+ <_>
+
+ 0 -1 291 -4.0023201145231724e-03
+
+
+ 2.7745240926742554e-01 5.3384172916412354e-01
+
+
+ <_>
+
+ 0 -1 292 7.4182129465043545e-03
+
+
+ 5.6767392158508301e-01 3.7028178572654724e-01
+
+
+ <_>
+
+ 0 -1 293 -8.8764587417244911e-03
+
+
+ 7.7492219209671021e-01 4.5836889743804932e-01
+
+
+ <_>
+
+ 0 -1 294 2.7311739977449179e-03
+
+
+ 5.3387218713760376e-01 3.9966610074043274e-01
+
+
+ <_>
+
+ 0 -1 295 -2.5082379579544067e-03
+
+
+ 5.6119632720947266e-01 3.7774989008903503e-01
+
+
+ <_>
+
+ 0 -1 296 -8.0541074275970459e-03
+
+
+ 2.9152289032936096e-01 5.1791828870773315e-01
+
+
+ <_>
+
+ 0 -1 297 -9.7938813269138336e-04
+
+
+ 5.5364328622817993e-01 3.7001928687095642e-01
+
+
+ <_>
+
+ 0 -1 298 -5.8745909482240677e-03
+
+
+ 3.7543910741806030e-01 5.6793761253356934e-01
+
+
+ <_>
+
+ 0 -1 299 -4.4936719350516796e-03
+
+
+ 7.0196992158889771e-01 4.4809499382972717e-01
+
+
+ <_>
+
+ 0 -1 300 -5.4389229044318199e-03
+
+
+ 2.3103649914264679e-01 5.3133869171142578e-01
+
+
+ <_>
+
+ 0 -1 301 -7.5094640487805009e-04
+
+
+ 5.8648687601089478e-01 4.1293430328369141e-01
+
+
+ <_>
+
+ 0 -1 302 1.4528800420521293e-05
+
+
+ 3.7324070930480957e-01 5.6196212768554688e-01
+
+
+ <_>
+
+ 0 -1 303 4.0758069604635239e-02
+
+
+ 5.3120911121368408e-01 2.7205219864845276e-01
+
+
+ <_>
+
+ 0 -1 304 6.6505931317806244e-03
+
+
+ 4.7100159525871277e-01 6.6934937238693237e-01
+
+
+ <_>
+
+ 0 -1 305 4.5759351924061775e-03
+
+
+ 5.1678192615509033e-01 1.6372759640216827e-01
+
+
+ <_>
+
+ 0 -1 306 6.5269311890006065e-03
+
+
+ 5.3976088762283325e-01 2.9385319352149963e-01
+
+
+ <_>
+
+ 0 -1 307 -1.3660379685461521e-02
+
+
+ 7.0864880084991455e-01 4.5322000980377197e-01
+
+
+ <_>
+
+ 0 -1 308 2.7358869090676308e-02
+
+
+ 5.2064812183380127e-01 3.5892319679260254e-01
+
+
+ <_>
+
+ 0 -1 309 6.2197551596909761e-04
+
+
+ 3.5070759057998657e-01 5.4411232471466064e-01
+
+
+ <_>
+
+ 0 -1 310 -3.3077080734074116e-03
+
+
+ 5.8595228195190430e-01 4.0248918533325195e-01
+
+
+ <_>
+
+ 0 -1 311 -1.0631109587848186e-02
+
+
+ 6.7432671785354614e-01 4.4226029515266418e-01
+
+
+ <_>
+
+ 0 -1 312 1.9441649317741394e-02
+
+
+ 5.2827161550521851e-01 1.7979049682617188e-01
+
+
+
+
+ <_>
+ 71
+ 3.4554111480712891e+01
+
+ <_>
+
+ 0 -1 313 -5.5052167735993862e-03
+
+
+ 5.9147310256958008e-01 2.6265591382980347e-01
+
+
+ <_>
+
+ 0 -1 314 1.9562279339879751e-03
+
+
+ 2.3125819861888885e-01 5.7416272163391113e-01
+
+
+ <_>
+
+ 0 -1 315 -8.8924784213304520e-03
+
+
+ 1.6565300524234772e-01 5.6266540288925171e-01
+
+
+ <_>
+
+ 0 -1 316 8.3638377487659454e-02
+
+
+ 5.4234498739242554e-01 1.9572949409484863e-01
+
+
+ <_>
+
+ 0 -1 317 1.2282270472496748e-03
+
+
+ 3.4179040789604187e-01 5.9925037622451782e-01
+
+
+ <_>
+
+ 0 -1 318 5.7629169896245003e-03
+
+
+ 3.7195819616317749e-01 6.0799038410186768e-01
+
+
+ <_>
+
+ 0 -1 319 -1.6417410224676132e-03
+
+
+ 2.5774860382080078e-01 5.5769157409667969e-01
+
+
+ <_>
+
+ 0 -1 320 3.4113149158656597e-03
+
+
+ 2.9507490992546082e-01 5.5141717195510864e-01
+
+
+ <_>
+
+ 0 -1 321 -1.1069320142269135e-02
+
+
+ 7.5693589448928833e-01 4.4770789146423340e-01
+
+
+ <_>
+
+ 0 -1 322 3.4865971654653549e-02
+
+
+ 5.5837088823318481e-01 2.6696211099624634e-01
+
+
+ <_>
+
+ 0 -1 323 6.5701099811121821e-04
+
+
+ 5.6273132562637329e-01 2.9888901114463806e-01
+
+
+ <_>
+
+ 0 -1 324 -2.4339130148291588e-02
+
+
+ 2.7711850404739380e-01 5.1088631153106689e-01
+
+
+ <_>
+
+ 0 -1 325 5.9435202274471521e-04
+
+
+ 5.5806517601013184e-01 3.1203418970108032e-01
+
+
+ <_>
+
+ 0 -1 326 2.2971509024500847e-03
+
+
+ 3.3302500844001770e-01 5.6790757179260254e-01
+
+
+ <_>
+
+ 0 -1 327 -3.7801829166710377e-03
+
+
+ 2.9905349016189575e-01 5.3448081016540527e-01
+
+
+ <_>
+
+ 0 -1 328 -1.3420669734477997e-01
+
+
+ 1.4638589322566986e-01 5.3925681114196777e-01
+
+
+ <_>
+
+ 0 -1 329 7.5224548345431685e-04
+
+
+ 3.7469539046287537e-01 5.6927347183227539e-01
+
+
+ <_>
+
+ 0 -1 330 -4.0545541793107986e-02
+
+
+ 2.7547478675842285e-01 5.4842978715896606e-01
+
+
+ <_>
+
+ 0 -1 331 1.2572970008477569e-03
+
+
+ 3.7445840239524841e-01 5.7560759782791138e-01
+
+
+ <_>
+
+ 0 -1 332 -7.4249948374927044e-03
+
+
+ 7.5138592720031738e-01 4.7282311320304871e-01
+
+
+ <_>
+
+ 0 -1 333 5.0908129196614027e-04
+
+
+ 5.4048967361450195e-01 2.9323211312294006e-01
+
+
+ <_>
+
+ 0 -1 334 -1.2808450264856219e-03
+
+
+ 6.1697798967361450e-01 4.2733490467071533e-01
+
+
+ <_>
+
+ 0 -1 335 -1.8348860321566463e-03
+
+
+ 2.0484960079193115e-01 5.2064722776412964e-01
+
+
+ <_>
+
+ 0 -1 336 2.7484869584441185e-02
+
+
+ 5.2529847621917725e-01 1.6755220293998718e-01
+
+
+ <_>
+
+ 0 -1 337 2.2372419480234385e-03
+
+
+ 5.2677828073501587e-01 2.7776581048965454e-01
+
+
+ <_>
+
+ 0 -1 338 -8.8635291904211044e-03
+
+
+ 6.9545578956604004e-01 4.8120489716529846e-01
+
+
+ <_>
+
+ 0 -1 339 4.1753971017897129e-03
+
+
+ 4.2918878793716431e-01 6.3491958379745483e-01
+
+
+ <_>
+
+ 0 -1 340 -1.7098189564421773e-03
+
+
+ 2.9305368661880493e-01 5.3612488508224487e-01
+
+
+ <_>
+
+ 0 -1 341 6.5328548662364483e-03
+
+
+ 4.4953250885009766e-01 7.4096941947937012e-01
+
+
+ <_>
+
+ 0 -1 342 -9.5372907817363739e-03
+
+
+ 3.1491199135780334e-01 5.4165017604827881e-01
+
+
+ <_>
+
+ 0 -1 343 2.5310989469289780e-02
+
+
+ 5.1218920946121216e-01 1.3117079436779022e-01
+
+
+ <_>
+
+ 0 -1 344 3.6460969597101212e-02
+
+
+ 5.1759117841720581e-01 2.5913399457931519e-01
+
+
+ <_>
+
+ 0 -1 345 2.0854329690337181e-02
+
+
+ 5.1371401548385620e-01 1.5823160111904144e-01
+
+
+ <_>
+
+ 0 -1 346 -8.7207747856155038e-04
+
+
+ 5.5743098258972168e-01 4.3989789485931396e-01
+
+
+ <_>
+
+ 0 -1 347 -1.5227000403683633e-05
+
+
+ 5.5489408969879150e-01 3.7080699205398560e-01
+
+
+ <_>
+
+ 0 -1 348 -8.4316509310156107e-04
+
+
+ 3.3874198794364929e-01 5.5542111396789551e-01
+
+
+ <_>
+
+ 0 -1 349 3.6037859972566366e-03
+
+
+ 5.3580617904663086e-01 3.4111711382865906e-01
+
+
+ <_>
+
+ 0 -1 350 -6.8057891912758350e-03
+
+
+ 6.1252027750015259e-01 4.3458628654479980e-01
+
+
+ <_>
+
+ 0 -1 351 -4.7021660953760147e-02
+
+
+ 2.3581659793853760e-01 5.1937389373779297e-01
+
+
+ <_>
+
+ 0 -1 352 -3.6954108625650406e-02
+
+
+ 7.3231112957000732e-01 4.7609439492225647e-01
+
+
+ <_>
+
+ 0 -1 353 1.0439479956403375e-03
+
+
+ 5.4194551706314087e-01 3.4113308787345886e-01
+
+
+ <_>
+
+ 0 -1 354 -2.1050689974799752e-04
+
+
+ 2.8216940164566040e-01 5.5549472570419312e-01
+
+
+ <_>
+
+ 0 -1 355 -8.0831587314605713e-02
+
+
+ 9.1299301385879517e-01 4.6974349021911621e-01
+
+
+ <_>
+
+ 0 -1 356 -3.6579059087671340e-04
+
+
+ 6.0226702690124512e-01 3.9782929420471191e-01
+
+
+ <_>
+
+ 0 -1 357 -1.2545920617412776e-04
+
+
+ 5.6132131814956665e-01 3.8455399870872498e-01
+
+
+ <_>
+
+ 0 -1 358 -6.8786486983299255e-02
+
+
+ 2.2616119682788849e-01 5.3004968166351318e-01
+
+
+ <_>
+
+ 0 -1 359 1.2415789999067783e-02
+
+
+ 4.0756919980049133e-01 5.8288121223449707e-01
+
+
+ <_>
+
+ 0 -1 360 -4.7174817882478237e-03
+
+
+ 2.8272539377212524e-01 5.2677577733993530e-01
+
+
+ <_>
+
+ 0 -1 361 3.8136858493089676e-02
+
+
+ 5.0747412443161011e-01 1.0236159712076187e-01
+
+
+ <_>
+
+ 0 -1 362 -2.8168049175292253e-03
+
+
+ 6.1690068244934082e-01 4.3596929311752319e-01
+
+
+ <_>
+
+ 0 -1 363 8.1303603947162628e-03
+
+
+ 4.5244330167770386e-01 7.6060950756072998e-01
+
+
+ <_>
+
+ 0 -1 364 6.0056019574403763e-03
+
+
+ 5.2404087781906128e-01 1.8597120046615601e-01
+
+
+ <_>
+
+ 0 -1 365 1.9139319658279419e-02
+
+
+ 5.2093791961669922e-01 2.3320719599723816e-01
+
+
+ <_>
+
+ 0 -1 366 1.6445759683847427e-02
+
+
+ 5.4507029056549072e-01 3.2642349600791931e-01
+
+
+ <_>
+
+ 0 -1 367 -3.7356890738010406e-02
+
+
+ 6.9990468025207520e-01 4.5332419872283936e-01
+
+
+ <_>
+
+ 0 -1 368 -1.9727900624275208e-02
+
+
+ 2.6536649465560913e-01 5.4128098487854004e-01
+
+
+ <_>
+
+ 0 -1 369 6.6972579807043076e-03
+
+
+ 4.4805660843849182e-01 7.1386522054672241e-01
+
+
+ <_>
+
+ 0 -1 370 7.4457528535276651e-04
+
+
+ 4.2313501238822937e-01 5.4713201522827148e-01
+
+
+ <_>
+
+ 0 -1 371 1.1790640419349074e-03
+
+
+ 5.3417021036148071e-01 3.1304550170898438e-01
+
+
+ <_>
+
+ 0 -1 372 3.4980610013008118e-02
+
+
+ 5.1186597347259521e-01 3.4305301308631897e-01
+
+
+ <_>
+
+ 0 -1 373 5.6859792675822973e-04
+
+
+ 3.5321870446205139e-01 5.4686397314071655e-01
+
+
+ <_>
+
+ 0 -1 374 -1.1340649798512459e-02
+
+
+ 2.8423538804054260e-01 5.3487008810043335e-01
+
+
+ <_>
+
+ 0 -1 375 -6.6228108480572701e-03
+
+
+ 6.8836402893066406e-01 4.4926649332046509e-01
+
+
+ <_>
+
+ 0 -1 376 -8.0160330981016159e-03
+
+
+ 1.7098939418792725e-01 5.2243089675903320e-01
+
+
+ <_>
+
+ 0 -1 377 1.4206819469109178e-03
+
+
+ 5.2908462285995483e-01 2.9933831095695496e-01
+
+
+ <_>
+
+ 0 -1 378 -2.7801711112260818e-03
+
+
+ 6.4988541603088379e-01 4.4604998826980591e-01
+
+
+ <_>
+
+ 0 -1 379 -1.4747589593753219e-03
+
+
+ 3.2604381442070007e-01 5.3881132602691650e-01
+
+
+ <_>
+
+ 0 -1 380 -2.3830339312553406e-02
+
+
+ 7.5289410352706909e-01 4.8012199997901917e-01
+
+
+ <_>
+
+ 0 -1 381 6.9369790144264698e-03
+
+
+ 5.3351658582687378e-01 3.2614278793334961e-01
+
+
+ <_>
+
+ 0 -1 382 8.2806255668401718e-03
+
+
+ 4.5803940296173096e-01 5.7378298044204712e-01
+
+
+ <_>
+
+ 0 -1 383 -1.0439500212669373e-02
+
+
+ 2.5923201441764832e-01 5.2338278293609619e-01
+
+
+
+
+ <_>
+ 80
+ 3.9107288360595703e+01
+
+ <_>
+
+ 0 -1 384 7.2006587870419025e-03
+
+
+ 3.2588860392570496e-01 6.8498080968856812e-01
+
+
+ <_>
+
+ 0 -1 385 -2.8593589086085558e-03
+
+
+ 5.8388811349868774e-01 2.5378298759460449e-01
+
+
+ <_>
+
+ 0 -1 386 6.8580528022721410e-04
+
+
+ 5.7080817222595215e-01 2.8124240040779114e-01
+
+
+ <_>
+
+ 0 -1 387 7.9580191522836685e-03
+
+
+ 2.5010511279106140e-01 5.5442607402801514e-01
+
+
+ <_>
+
+ 0 -1 388 -1.2124150525778532e-03
+
+
+ 2.3853680491447449e-01 5.4333502054214478e-01
+
+
+ <_>
+
+ 0 -1 389 7.9426132142543793e-03
+
+
+ 3.9550709724426270e-01 6.2207579612731934e-01
+
+
+ <_>
+
+ 0 -1 390 2.4630590341985226e-03
+
+
+ 5.6397080421447754e-01 2.9923579096794128e-01
+
+
+ <_>
+
+ 0 -1 391 -6.0396599583327770e-03
+
+
+ 2.1865129470825195e-01 5.4116767644882202e-01
+
+
+ <_>
+
+ 0 -1 392 -1.2988339876756072e-03
+
+
+ 2.3507060110569000e-01 5.3645849227905273e-01
+
+
+ <_>
+
+ 0 -1 393 2.2299369447864592e-04
+
+
+ 3.8041129708290100e-01 5.7296061515808105e-01
+
+
+ <_>
+
+ 0 -1 394 1.4654280385002494e-03
+
+
+ 2.5101679563522339e-01 5.2582687139511108e-01
+
+
+ <_>
+
+ 0 -1 395 -8.1210042117163539e-04
+
+
+ 5.9928238391876221e-01 3.8511589169502258e-01
+
+
+ <_>
+
+ 0 -1 396 -1.3836020370945334e-03
+
+
+ 5.6813961267471313e-01 3.6365869641304016e-01
+
+
+ <_>
+
+ 0 -1 397 -2.7936449274420738e-02
+
+
+ 1.4913170039653778e-01 5.3775602579116821e-01
+
+
+ <_>
+
+ 0 -1 398 -4.6919551095925272e-04
+
+
+ 3.6924299597740173e-01 5.5724847316741943e-01
+
+
+ <_>
+
+ 0 -1 399 -4.9829659983515739e-03
+
+
+ 6.7585092782974243e-01 4.5325040817260742e-01
+
+
+ <_>
+
+ 0 -1 400 1.8815309740602970e-03
+
+
+ 5.3680229187011719e-01 2.9325398802757263e-01
+
+
+ <_>
+
+ 0 -1 401 -1.9067550078034401e-02
+
+
+ 1.6493770480155945e-01 5.3300672769546509e-01
+
+
+ <_>
+
+ 0 -1 402 -4.6906559728085995e-03
+
+
+ 1.9639259576797485e-01 5.1193618774414062e-01
+
+
+ <_>
+
+ 0 -1 403 5.9777139686048031e-03
+
+
+ 4.6711719036102295e-01 7.0083981752395630e-01
+
+
+ <_>
+
+ 0 -1 404 -3.3303130418062210e-02
+
+
+ 1.1554169654846191e-01 5.1041620969772339e-01
+
+
+ <_>
+
+ 0 -1 405 9.0744107961654663e-02
+
+
+ 5.1496601104736328e-01 1.3061730563640594e-01
+
+
+ <_>
+
+ 0 -1 406 9.3555898638442159e-04
+
+
+ 3.6054810881614685e-01 5.4398590326309204e-01
+
+
+ <_>
+
+ 0 -1 407 1.4901650138199329e-02
+
+
+ 4.8862120509147644e-01 7.6875698566436768e-01
+
+
+ <_>
+
+ 0 -1 408 6.1594118596985936e-04
+
+
+ 5.3568130731582642e-01 3.2409390807151794e-01
+
+
+ <_>
+
+ 0 -1 409 -5.0670988857746124e-02
+
+
+ 1.8486219644546509e-01 5.2304041385650635e-01
+
+
+ <_>
+
+ 0 -1 410 6.8665749859064817e-04
+
+
+ 3.8405799865722656e-01 5.5179458856582642e-01
+
+
+ <_>
+
+ 0 -1 411 8.3712432533502579e-03
+
+
+ 4.2885640263557434e-01 6.1317539215087891e-01
+
+
+ <_>
+
+ 0 -1 412 -1.2953069526702166e-03
+
+
+ 2.9136741161346436e-01 5.2807378768920898e-01
+
+
+ <_>
+
+ 0 -1 413 -4.1941680014133453e-02
+
+
+ 7.5547999143600464e-01 4.8560309410095215e-01
+
+
+ <_>
+
+ 0 -1 414 -2.3529380559921265e-02
+
+
+ 2.8382799029350281e-01 5.2560812234878540e-01
+
+
+ <_>
+
+ 0 -1 415 4.0857449173927307e-02
+
+
+ 4.8709350824356079e-01 6.2772971391677856e-01
+
+
+ <_>
+
+ 0 -1 416 -2.5406869128346443e-02
+
+
+ 7.0997077226638794e-01 4.5750290155410767e-01
+
+
+ <_>
+
+ 0 -1 417 -4.1415440500713885e-04
+
+
+ 4.0308868885040283e-01 5.4694122076034546e-01
+
+
+ <_>
+
+ 0 -1 418 2.1824119612574577e-02
+
+
+ 4.5020240545272827e-01 6.7687010765075684e-01
+
+
+ <_>
+
+ 0 -1 419 1.4114039950072765e-02
+
+
+ 5.4428607225418091e-01 3.7917000055313110e-01
+
+
+ <_>
+
+ 0 -1 420 6.7214590671937913e-05
+
+
+ 4.2004638910293579e-01 5.8734762668609619e-01
+
+
+ <_>
+
+ 0 -1 421 -7.9417638480663300e-03
+
+
+ 3.7925618886947632e-01 5.5852657556533813e-01
+
+
+ <_>
+
+ 0 -1 422 -7.2144409641623497e-03
+
+
+ 7.2531038522720337e-01 4.6035489439964294e-01
+
+
+ <_>
+
+ 0 -1 423 2.5817339774221182e-03
+
+
+ 4.6933019161224365e-01 5.9002387523651123e-01
+
+
+ <_>
+
+ 0 -1 424 1.3409319519996643e-01
+
+
+ 5.1492130756378174e-01 1.8088449537754059e-01
+
+
+ <_>
+
+ 0 -1 425 2.2962710354477167e-03
+
+
+ 5.3997439146041870e-01 3.7178671360015869e-01
+
+
+ <_>
+
+ 0 -1 426 -2.1575849968940020e-03
+
+
+ 2.4084959924221039e-01 5.1488637924194336e-01
+
+
+ <_>
+
+ 0 -1 427 -4.9196188338100910e-03
+
+
+ 6.5735882520675659e-01 4.7387400269508362e-01
+
+
+ <_>
+
+ 0 -1 428 1.6267469618469477e-03
+
+
+ 4.1928219795227051e-01 6.3031142950057983e-01
+
+
+ <_>
+
+ 0 -1 429 3.3413388882763684e-04
+
+
+ 5.5402982234954834e-01 3.7021011114120483e-01
+
+
+ <_>
+
+ 0 -1 430 -2.6698080822825432e-02
+
+
+ 1.7109179496765137e-01 5.1014107465744019e-01
+
+
+ <_>
+
+ 0 -1 431 -3.0561879277229309e-02
+
+
+ 1.9042180478572845e-01 5.1687937974929810e-01
+
+
+ <_>
+
+ 0 -1 432 2.8511548880487680e-03
+
+
+ 4.4475069642066956e-01 6.3138538599014282e-01
+
+
+ <_>
+
+ 0 -1 433 -3.6211479455232620e-02
+
+
+ 2.4907270073890686e-01 5.3773492574691772e-01
+
+
+ <_>
+
+ 0 -1 434 -2.4115189444273710e-03
+
+
+ 5.3812432289123535e-01 3.6642369627952576e-01
+
+
+ <_>
+
+ 0 -1 435 -7.7253201743587852e-04
+
+
+ 5.5302321910858154e-01 3.5415500402450562e-01
+
+
+ <_>
+
+ 0 -1 436 2.9481729143299162e-04
+
+
+ 4.1326990723609924e-01 5.6672430038452148e-01
+
+
+ <_>
+
+ 0 -1 437 -6.2334560789167881e-03
+
+
+ 9.8787233233451843e-02 5.1986688375473022e-01
+
+
+ <_>
+
+ 0 -1 438 -2.6274729520082474e-02
+
+
+ 9.1127492487430573e-02 5.0281071662902832e-01
+
+
+ <_>
+
+ 0 -1 439 5.3212260827422142e-03
+
+
+ 4.7266489267349243e-01 6.2227207422256470e-01
+
+
+ <_>
+
+ 0 -1 440 -4.1129058226943016e-03
+
+
+ 2.1574570238590240e-01 5.1378047466278076e-01
+
+
+ <_>
+
+ 0 -1 441 3.2457809429615736e-03
+
+
+ 5.4107707738876343e-01 3.7217769026756287e-01
+
+
+ <_>
+
+ 0 -1 442 -1.6359709203243256e-02
+
+
+ 7.7878749370574951e-01 4.6852919459342957e-01
+
+
+ <_>
+
+ 0 -1 443 3.2166109303943813e-04
+
+
+ 5.4789870977401733e-01 4.2403739690780640e-01
+
+
+ <_>
+
+ 0 -1 444 6.4452440710738301e-04
+
+
+ 5.3305608034133911e-01 3.5013249516487122e-01
+
+
+ <_>
+
+ 0 -1 445 -7.8909732401371002e-03
+
+
+ 6.9235211610794067e-01 4.7265690565109253e-01
+
+
+ <_>
+
+ 0 -1 446 4.8336211591959000e-02
+
+
+ 5.0559002161026001e-01 7.5749203562736511e-02
+
+
+ <_>
+
+ 0 -1 447 -7.5178127735853195e-04
+
+
+ 3.7837418913841248e-01 5.5385738611221313e-01
+
+
+ <_>
+
+ 0 -1 448 -2.4953910615295172e-03
+
+
+ 3.0816510319709778e-01 5.3596121072769165e-01
+
+
+ <_>
+
+ 0 -1 449 -2.2385010961443186e-03
+
+
+ 6.6339588165283203e-01 4.6493428945541382e-01
+
+
+ <_>
+
+ 0 -1 450 -1.7988430336117744e-03
+
+
+ 6.5968447923660278e-01 4.3471878767013550e-01
+
+
+ <_>
+
+ 0 -1 451 8.7860915809869766e-03
+
+
+ 5.2318328619003296e-01 2.3155799508094788e-01
+
+
+ <_>
+
+ 0 -1 452 3.6715380847454071e-03
+
+
+ 5.2042502164840698e-01 2.9773768782615662e-01
+
+
+ <_>
+
+ 0 -1 453 -3.5336449742317200e-02
+
+
+ 7.2388780117034912e-01 4.8615050315856934e-01
+
+
+ <_>
+
+ 0 -1 454 -6.9189240457490087e-04
+
+
+ 3.1050220131874084e-01 5.2298247814178467e-01
+
+
+ <_>
+
+ 0 -1 455 -3.3946109469980001e-03
+
+
+ 3.1389680504798889e-01 5.2101737260818481e-01
+
+
+ <_>
+
+ 0 -1 456 9.8569283727556467e-04
+
+
+ 4.5365801453590393e-01 6.5850979089736938e-01
+
+
+ <_>
+
+ 0 -1 457 -5.0163101404905319e-02
+
+
+ 1.8044540286064148e-01 5.1989167928695679e-01
+
+
+ <_>
+
+ 0 -1 458 -2.2367259953171015e-03
+
+
+ 7.2557020187377930e-01 4.6513590216636658e-01
+
+
+ <_>
+
+ 0 -1 459 7.4326287722215056e-04
+
+
+ 4.4129210710525513e-01 5.8985459804534912e-01
+
+
+ <_>
+
+ 0 -1 460 -9.3485182151198387e-04
+
+
+ 3.5000529885292053e-01 5.3660178184509277e-01
+
+
+ <_>
+
+ 0 -1 461 1.7497939988970757e-02
+
+
+ 4.9121949076652527e-01 8.3152848482131958e-01
+
+
+ <_>
+
+ 0 -1 462 -1.5200000489130616e-03
+
+
+ 3.5702759027481079e-01 5.3705602884292603e-01
+
+
+ <_>
+
+ 0 -1 463 7.8003940870985389e-04
+
+
+ 4.3537721037864685e-01 5.9673351049423218e-01
+
+
+
+
+ <_>
+ 103
+ 5.0610481262207031e+01
+
+ <_>
+
+ 0 -1 464 -9.9945552647113800e-03
+
+
+ 6.1625832319259644e-01 3.0545330047607422e-01
+
+
+ <_>
+
+ 0 -1 465 -1.1085229925811291e-03
+
+
+ 5.8182948827743530e-01 3.1555780768394470e-01
+
+
+ <_>
+
+ 0 -1 466 1.0364380432292819e-03
+
+
+ 2.5520521402359009e-01 5.6929117441177368e-01
+
+
+ <_>
+
+ 0 -1 467 6.8211311008781195e-04
+
+
+ 3.6850899457931519e-01 5.9349310398101807e-01
+
+
+ <_>
+
+ 0 -1 468 -6.8057340104132891e-04
+
+
+ 2.3323920369148254e-01 5.4747921228408813e-01
+
+
+ <_>
+
+ 0 -1 469 2.6068789884448051e-04
+
+
+ 3.2574570178985596e-01 5.6675457954406738e-01
+
+
+ <_>
+
+ 0 -1 470 5.1607372006401420e-04
+
+
+ 3.7447169423103333e-01 5.8454728126525879e-01
+
+
+ <_>
+
+ 0 -1 471 8.5007521556690335e-04
+
+
+ 3.4203711152076721e-01 5.5228072404861450e-01
+
+
+ <_>
+
+ 0 -1 472 -1.8607829697430134e-03
+
+
+ 2.8044199943542480e-01 5.3754240274429321e-01
+
+
+ <_>
+
+ 0 -1 473 -1.5033970121294260e-03
+
+
+ 2.5790509581565857e-01 5.4989522695541382e-01
+
+
+ <_>
+
+ 0 -1 474 2.3478909861296415e-03
+
+
+ 4.1751560568809509e-01 6.3137108087539673e-01
+
+
+ <_>
+
+ 0 -1 475 -2.8880240279249847e-04
+
+
+ 5.8651697635650635e-01 4.0526661276817322e-01
+
+
+ <_>
+
+ 0 -1 476 8.9405477046966553e-03
+
+
+ 5.2111411094665527e-01 2.3186540603637695e-01
+
+
+ <_>
+
+ 0 -1 477 -1.9327739253640175e-02
+
+
+ 2.7534329891204834e-01 5.2415257692337036e-01
+
+
+ <_>
+
+ 0 -1 478 -2.0202060113660991e-04
+
+
+ 5.7229787111282349e-01 3.6771959066390991e-01
+
+
+ <_>
+
+ 0 -1 479 2.1179069299250841e-03
+
+
+ 4.4661080837249756e-01 5.5424308776855469e-01
+
+
+ <_>
+
+ 0 -1 480 -1.7743760254234076e-03
+
+
+ 2.8132531046867371e-01 5.3009599447250366e-01
+
+
+ <_>
+
+ 0 -1 481 4.2234458960592747e-03
+
+
+ 4.3997099995613098e-01 5.7954281568527222e-01
+
+
+ <_>
+
+ 0 -1 482 -1.4375220052897930e-02
+
+
+ 2.9811179637908936e-01 5.2920591831207275e-01
+
+
+ <_>
+
+ 0 -1 483 -1.5349180437624454e-02
+
+
+ 7.7052152156829834e-01 4.7481718659400940e-01
+
+
+ <_>
+
+ 0 -1 484 1.5152279956964776e-05
+
+
+ 3.7188440561294556e-01 5.5768972635269165e-01
+
+
+ <_>
+
+ 0 -1 485 -9.1293919831514359e-03
+
+
+ 3.6151960492134094e-01 5.2867668867111206e-01
+
+
+ <_>
+
+ 0 -1 486 2.2512159775942564e-03
+
+
+ 5.3647047281265259e-01 3.4862980246543884e-01
+
+
+ <_>
+
+ 0 -1 487 -4.9696918576955795e-03
+
+
+ 6.9276517629623413e-01 4.6768361330032349e-01
+
+
+ <_>
+
+ 0 -1 488 -1.2829010374844074e-02
+
+
+ 7.7121537923812866e-01 4.6607351303100586e-01
+
+
+ <_>
+
+ 0 -1 489 -9.3660065904259682e-03
+
+
+ 3.3749839663505554e-01 5.3512877225875854e-01
+
+
+ <_>
+
+ 0 -1 490 3.2452319283038378e-03
+
+
+ 5.3251898288726807e-01 3.2896101474761963e-01
+
+
+ <_>
+
+ 0 -1 491 -1.1723560281097889e-02
+
+
+ 6.8376529216766357e-01 4.7543001174926758e-01
+
+
+ <_>
+
+ 0 -1 492 2.9257940695970319e-05
+
+
+ 3.5720878839492798e-01 5.3605020046234131e-01
+
+
+ <_>
+
+ 0 -1 493 -2.2244219508138485e-05
+
+
+ 5.5414271354675293e-01 3.5520640015602112e-01
+
+
+ <_>
+
+ 0 -1 494 5.0881509669125080e-03
+
+
+ 5.0708442926406860e-01 1.2564620375633240e-01
+
+
+ <_>
+
+ 0 -1 495 2.7429679408669472e-02
+
+
+ 5.2695602178573608e-01 1.6258180141448975e-01
+
+
+ <_>
+
+ 0 -1 496 -6.4142867922782898e-03
+
+
+ 7.1455889940261841e-01 4.5841971039772034e-01
+
+
+ <_>
+
+ 0 -1 497 3.3479959238320589e-03
+
+
+ 5.3986120223999023e-01 3.4946969151496887e-01
+
+
+ <_>
+
+ 0 -1 498 -8.2635492086410522e-02
+
+
+ 2.4391929805278778e-01 5.1602262258529663e-01
+
+
+ <_>
+
+ 0 -1 499 1.0261740535497665e-03
+
+
+ 3.8868919014930725e-01 5.7679080963134766e-01
+
+
+ <_>
+
+ 0 -1 500 -1.6307090409100056e-03
+
+
+ 3.3894580602645874e-01 5.3477007150650024e-01
+
+
+ <_>
+
+ 0 -1 501 2.4546680506318808e-03
+
+
+ 4.6014139056205750e-01 6.3872468471527100e-01
+
+
+ <_>
+
+ 0 -1 502 -9.9476519972085953e-04
+
+
+ 5.7698792219161987e-01 4.1203960776329041e-01
+
+
+ <_>
+
+ 0 -1 503 1.5409190207719803e-02
+
+
+ 4.8787090182304382e-01 7.0898222923278809e-01
+
+
+ <_>
+
+ 0 -1 504 1.1784400558099151e-03
+
+
+ 5.2635532617568970e-01 2.8952449560165405e-01
+
+
+ <_>
+
+ 0 -1 505 -2.7701919898390770e-02
+
+
+ 1.4988289773464203e-01 5.2196067571640015e-01
+
+
+ <_>
+
+ 0 -1 506 -2.9505399987101555e-02
+
+
+ 2.4893319234251976e-02 4.9998161196708679e-01
+
+
+ <_>
+
+ 0 -1 507 4.5159430010244250e-04
+
+
+ 5.4646229743957520e-01 4.0296629071235657e-01
+
+
+ <_>
+
+ 0 -1 508 7.1772639639675617e-03
+
+
+ 4.2710569500923157e-01 5.8662968873977661e-01
+
+
+ <_>
+
+ 0 -1 509 -7.4182048439979553e-02
+
+
+ 6.8741792440414429e-01 4.9190279841423035e-01
+
+
+ <_>
+
+ 0 -1 510 -1.7254160717129707e-02
+
+
+ 3.3706760406494141e-01 5.3487390279769897e-01
+
+
+ <_>
+
+ 0 -1 511 1.4851559884846210e-02
+
+
+ 4.6267929673194885e-01 6.1299049854278564e-01
+
+
+ <_>
+
+ 0 -1 512 1.0002000257372856e-02
+
+
+ 5.3461229801177979e-01 3.4234538674354553e-01
+
+
+ <_>
+
+ 0 -1 513 2.0138120744377375e-03
+
+
+ 4.6438300609588623e-01 5.8243042230606079e-01
+
+
+ <_>
+
+ 0 -1 514 1.5135470312088728e-03
+
+
+ 5.1963961124420166e-01 2.8561499714851379e-01
+
+
+ <_>
+
+ 0 -1 515 3.1381431035697460e-03
+
+
+ 4.8381629586219788e-01 5.9585297107696533e-01
+
+
+ <_>
+
+ 0 -1 516 -5.1450440660119057e-03
+
+
+ 8.9203029870986938e-01 4.7414121031761169e-01
+
+
+ <_>
+
+ 0 -1 517 -4.4736708514392376e-03
+
+
+ 2.0339429378509521e-01 5.3372788429260254e-01
+
+
+ <_>
+
+ 0 -1 518 1.9628470763564110e-03
+
+
+ 4.5716339349746704e-01 6.7258632183074951e-01
+
+
+ <_>
+
+ 0 -1 519 5.4260450415313244e-03
+
+
+ 5.2711081504821777e-01 2.8456708788871765e-01
+
+
+ <_>
+
+ 0 -1 520 4.9611460417509079e-04
+
+
+ 4.1383129358291626e-01 5.7185977697372437e-01
+
+
+ <_>
+
+ 0 -1 521 9.3728788197040558e-03
+
+
+ 5.2251511812210083e-01 2.8048470616340637e-01
+
+
+ <_>
+
+ 0 -1 522 6.0500897234305739e-04
+
+
+ 5.2367687225341797e-01 3.3145239949226379e-01
+
+
+ <_>
+
+ 0 -1 523 5.6792551185935736e-04
+
+
+ 4.5310598611831665e-01 6.2769711017608643e-01
+
+
+ <_>
+
+ 0 -1 524 2.4644339457154274e-02
+
+
+ 5.1308518648147583e-01 2.0171439647674561e-01
+
+
+ <_>
+
+ 0 -1 525 -1.0290450416505337e-02
+
+
+ 7.7865952253341675e-01 4.8766410350799561e-01
+
+
+ <_>
+
+ 0 -1 526 2.0629419013857841e-03
+
+
+ 4.2885988950729370e-01 5.8812642097473145e-01
+
+
+ <_>
+
+ 0 -1 527 -5.0519481301307678e-03
+
+
+ 3.5239779949188232e-01 5.2860087156295776e-01
+
+
+ <_>
+
+ 0 -1 528 -5.7692620903253555e-03
+
+
+ 6.8410861492156982e-01 4.5880940556526184e-01
+
+
+ <_>
+
+ 0 -1 529 -4.5789941214025021e-04
+
+
+ 3.5655200481414795e-01 5.4859781265258789e-01
+
+
+ <_>
+
+ 0 -1 530 -7.5918837683275342e-04
+
+
+ 3.3687931299209595e-01 5.2541971206665039e-01
+
+
+ <_>
+
+ 0 -1 531 -1.7737259622663260e-03
+
+
+ 3.4221610426902771e-01 5.4540151357650757e-01
+
+
+ <_>
+
+ 0 -1 532 -8.5610467940568924e-03
+
+
+ 6.5336120128631592e-01 4.4858568906784058e-01
+
+
+ <_>
+
+ 0 -1 533 1.7277270089834929e-03
+
+
+ 5.3075802326202393e-01 3.9253529906272888e-01
+
+
+ <_>
+
+ 0 -1 534 -2.8199609369039536e-02
+
+
+ 6.8574589490890503e-01 4.5885840058326721e-01
+
+
+ <_>
+
+ 0 -1 535 -1.7781109781935811e-03
+
+
+ 4.0378510951995850e-01 5.3698569536209106e-01
+
+
+ <_>
+
+ 0 -1 536 3.3177141449414194e-04
+
+
+ 5.3997987508773804e-01 3.7057501077651978e-01
+
+
+ <_>
+
+ 0 -1 537 2.6385399978607893e-03
+
+
+ 4.6654370427131653e-01 6.4527308940887451e-01
+
+
+ <_>
+
+ 0 -1 538 -2.1183069329708815e-03
+
+
+ 5.9147810935974121e-01 4.0646770596504211e-01
+
+
+ <_>
+
+ 0 -1 539 -1.4773289673030376e-02
+
+
+ 3.6420381069183350e-01 5.2947628498077393e-01
+
+
+ <_>
+
+ 0 -1 540 -1.6815440729260445e-02
+
+
+ 2.6642319560050964e-01 5.1449728012084961e-01
+
+
+ <_>
+
+ 0 -1 541 -6.3370140269398689e-03
+
+
+ 6.7795312404632568e-01 4.8520979285240173e-01
+
+
+ <_>
+
+ 0 -1 542 -4.4560048991115764e-05
+
+
+ 5.6139647960662842e-01 4.1530540585517883e-01
+
+
+ <_>
+
+ 0 -1 543 -1.0240620467811823e-03
+
+
+ 5.9644782543182373e-01 4.5663040876388550e-01
+
+
+ <_>
+
+ 0 -1 544 -2.3161689750850201e-03
+
+
+ 2.9761150479316711e-01 5.1881599426269531e-01
+
+
+ <_>
+
+ 0 -1 545 5.3217571973800659e-01
+
+
+ 5.1878392696380615e-01 2.2026319801807404e-01
+
+
+ <_>
+
+ 0 -1 546 -1.6643050312995911e-01
+
+
+ 1.8660229444503784e-01 5.0603431463241577e-01
+
+
+ <_>
+
+ 0 -1 547 1.1253529787063599e-01
+
+
+ 5.2121251821517944e-01 1.1850229650735855e-01
+
+
+ <_>
+
+ 0 -1 548 9.3046864494681358e-03
+
+
+ 4.5899370312690735e-01 6.8261492252349854e-01
+
+
+ <_>
+
+ 0 -1 549 -4.6255099587142467e-03
+
+
+ 3.0799409747123718e-01 5.2250087261199951e-01
+
+
+ <_>
+
+ 0 -1 550 -1.1116469651460648e-01
+
+
+ 2.1010440587997437e-01 5.0808018445968628e-01
+
+
+ <_>
+
+ 0 -1 551 -1.0888439603149891e-02
+
+
+ 5.7653552293777466e-01 4.7904640436172485e-01
+
+
+ <_>
+
+ 0 -1 552 5.8564301580190659e-03
+
+
+ 5.0651001930236816e-01 1.5635989606380463e-01
+
+
+ <_>
+
+ 0 -1 553 5.4854389280080795e-02
+
+
+ 4.9669149518013000e-01 7.2305107116699219e-01
+
+
+ <_>
+
+ 0 -1 554 -1.1197339743375778e-02
+
+
+ 2.1949790418148041e-01 5.0987982749938965e-01
+
+
+ <_>
+
+ 0 -1 555 4.4069071300327778e-03
+
+
+ 4.7784018516540527e-01 6.7709028720855713e-01
+
+
+ <_>
+
+ 0 -1 556 -6.3665293157100677e-02
+
+
+ 1.9363629817962646e-01 5.0810241699218750e-01
+
+
+ <_>
+
+ 0 -1 557 -9.8081491887569427e-03
+
+
+ 5.9990632534027100e-01 4.8103410005569458e-01
+
+
+ <_>
+
+ 0 -1 558 -2.1717099007219076e-03
+
+
+ 3.3383339643478394e-01 5.2354729175567627e-01
+
+
+ <_>
+
+ 0 -1 559 -1.3315520249307156e-02
+
+
+ 6.6170698404312134e-01 4.9192130565643311e-01
+
+
+ <_>
+
+ 0 -1 560 2.5442079640924931e-03
+
+
+ 4.4887441396713257e-01 6.0821849107742310e-01
+
+
+ <_>
+
+ 0 -1 561 1.2037839740514755e-02
+
+
+ 5.4093921184539795e-01 3.2924321293830872e-01
+
+
+ <_>
+
+ 0 -1 562 -2.0701050758361816e-02
+
+
+ 6.8191200494766235e-01 4.5949959754943848e-01
+
+
+ <_>
+
+ 0 -1 563 2.7608279138803482e-02
+
+
+ 4.6307921409606934e-01 5.7672828435897827e-01
+
+
+ <_>
+
+ 0 -1 564 1.2370620388537645e-03
+
+
+ 5.1653790473937988e-01 2.6350161433219910e-01
+
+
+ <_>
+
+ 0 -1 565 -3.7669338285923004e-02
+
+
+ 2.5363931059837341e-01 5.2789801359176636e-01
+
+
+ <_>
+
+ 0 -1 566 -1.8057259730994701e-03
+
+
+ 3.9851561188697815e-01 5.5175000429153442e-01
+
+
+
+
+ <_>
+ 111
+ 5.4620071411132812e+01
+
+ <_>
+
+ 0 -1 567 4.4299028813838959e-03
+
+
+ 2.8910180926322937e-01 6.3352262973785400e-01
+
+
+ <_>
+
+ 0 -1 568 -2.3813319858163595e-03
+
+
+ 6.2117892503738403e-01 3.4774878621101379e-01
+
+
+ <_>
+
+ 0 -1 569 2.2915711160749197e-03
+
+
+ 2.2544120252132416e-01 5.5821180343627930e-01
+
+
+ <_>
+
+ 0 -1 570 9.9457940086722374e-04
+
+
+ 3.7117108702659607e-01 5.9300708770751953e-01
+
+
+ <_>
+
+ 0 -1 571 7.7164667891338468e-04
+
+
+ 5.6517201662063599e-01 3.3479958772659302e-01
+
+
+ <_>
+
+ 0 -1 572 -1.1386410333216190e-03
+
+
+ 3.0691260099411011e-01 5.5086308717727661e-01
+
+
+ <_>
+
+ 0 -1 573 -1.6403039626311511e-04
+
+
+ 5.7628279924392700e-01 3.6990478634834290e-01
+
+
+ <_>
+
+ 0 -1 574 2.9793529392918572e-05
+
+
+ 2.6442441344261169e-01 5.4379111528396606e-01
+
+
+ <_>
+
+ 0 -1 575 8.5774902254343033e-03
+
+
+ 5.0511389970779419e-01 1.7957249283790588e-01
+
+
+ <_>
+
+ 0 -1 576 -2.6032689493149519e-04
+
+
+ 5.8269691467285156e-01 4.4468268752098083e-01
+
+
+ <_>
+
+ 0 -1 577 -6.1404630541801453e-03
+
+
+ 3.1138521432876587e-01 5.3469717502593994e-01
+
+
+ <_>
+
+ 0 -1 578 -2.3086950182914734e-02
+
+
+ 3.2779461145401001e-01 5.3311979770660400e-01
+
+
+ <_>
+
+ 0 -1 579 -1.4243650250136852e-02
+
+
+ 7.3817098140716553e-01 4.5880630612373352e-01
+
+
+ <_>
+
+ 0 -1 580 1.9487129524350166e-02
+
+
+ 5.2566307783126831e-01 2.2744719684123993e-01
+
+
+ <_>
+
+ 0 -1 581 -9.6681108698248863e-04
+
+
+ 5.5112308263778687e-01 3.8150069117546082e-01
+
+
+ <_>
+
+ 0 -1 582 3.1474709976464510e-03
+
+
+ 5.4256367683410645e-01 2.5437268614768982e-01
+
+
+ <_>
+
+ 0 -1 583 -1.8026070029009134e-04
+
+
+ 5.3801918029785156e-01 3.4063041210174561e-01
+
+
+ <_>
+
+ 0 -1 584 -6.0266260989010334e-03
+
+
+ 3.0358019471168518e-01 5.4205721616744995e-01
+
+
+ <_>
+
+ 0 -1 585 4.4462960795499384e-04
+
+
+ 3.9909970760345459e-01 5.6601101160049438e-01
+
+
+ <_>
+
+ 0 -1 586 2.2609760053455830e-03
+
+
+ 5.5628067255020142e-01 3.9406880736351013e-01
+
+
+ <_>
+
+ 0 -1 587 5.1133058965206146e-02
+
+
+ 4.6096539497375488e-01 7.1185618638992310e-01
+
+
+ <_>
+
+ 0 -1 588 -1.7786309123039246e-02
+
+
+ 2.3161660134792328e-01 5.3221440315246582e-01
+
+
+ <_>
+
+ 0 -1 589 -4.9679628573358059e-03
+
+
+ 2.3307719826698303e-01 5.1220291852951050e-01
+
+
+ <_>
+
+ 0 -1 590 2.0667689386755228e-03
+
+
+ 4.6574440598487854e-01 6.4554882049560547e-01
+
+
+ <_>
+
+ 0 -1 591 7.4413768015801907e-03
+
+
+ 5.1543921232223511e-01 2.3616339266300201e-01
+
+
+ <_>
+
+ 0 -1 592 -3.6277279723435640e-03
+
+
+ 6.2197732925415039e-01 4.4766610860824585e-01
+
+
+ <_>
+
+ 0 -1 593 -5.3530759178102016e-03
+
+
+ 1.8373550474643707e-01 5.1022082567214966e-01
+
+
+ <_>
+
+ 0 -1 594 1.4530919492244720e-01
+
+
+ 5.1459872722625732e-01 1.5359309315681458e-01
+
+
+ <_>
+
+ 0 -1 595 2.4394490756094456e-03
+
+
+ 5.3436601161956787e-01 3.6246618628501892e-01
+
+
+ <_>
+
+ 0 -1 596 -3.1283390708267689e-03
+
+
+ 6.2150079011917114e-01 4.8455920815467834e-01
+
+
+ <_>
+
+ 0 -1 597 1.7940260004252195e-03
+
+
+ 4.2992618680000305e-01 5.8241981267929077e-01
+
+
+ <_>
+
+ 0 -1 598 3.6253821104764938e-02
+
+
+ 5.2603340148925781e-01 1.4394679665565491e-01
+
+
+ <_>
+
+ 0 -1 599 -5.1746722310781479e-03
+
+
+ 3.5065388679504395e-01 5.2870452404022217e-01
+
+
+ <_>
+
+ 0 -1 600 6.5383297624066472e-04
+
+
+ 4.8096409440040588e-01 6.1220401525497437e-01
+
+
+ <_>
+
+ 0 -1 601 -2.6480229571461678e-02
+
+
+ 1.1393620073795319e-01 5.0455862283706665e-01
+
+
+ <_>
+
+ 0 -1 602 -3.0440660193562508e-03
+
+
+ 6.3520950078964233e-01 4.7947341203689575e-01
+
+
+ <_>
+
+ 0 -1 603 3.6993520334362984e-03
+
+
+ 5.1311182975769043e-01 2.4985109269618988e-01
+
+
+ <_>
+
+ 0 -1 604 -3.6762931267730892e-04
+
+
+ 5.4213947057723999e-01 3.7095320224761963e-01
+
+
+ <_>
+
+ 0 -1 605 -4.1382260620594025e-02
+
+
+ 1.8949599564075470e-01 5.0816917419433594e-01
+
+
+ <_>
+
+ 0 -1 606 -1.0532729793339968e-03
+
+
+ 6.4543670415878296e-01 4.7836089134216309e-01
+
+
+ <_>
+
+ 0 -1 607 -2.1648600231856108e-03
+
+
+ 6.2150311470031738e-01 4.4998261332511902e-01
+
+
+ <_>
+
+ 0 -1 608 -5.6747748749330640e-04
+
+
+ 3.7126109004020691e-01 5.4193347692489624e-01
+
+
+ <_>
+
+ 0 -1 609 1.7375840246677399e-01
+
+
+ 5.0236439704895020e-01 1.2157420068979263e-01
+
+
+ <_>
+
+ 0 -1 610 -2.9049699660390615e-03
+
+
+ 3.2402679324150085e-01 5.3818839788436890e-01
+
+
+ <_>
+
+ 0 -1 611 1.2299539521336555e-03
+
+
+ 4.1655078530311584e-01 5.7034862041473389e-01
+
+
+ <_>
+
+ 0 -1 612 -5.4329237900674343e-04
+
+
+ 3.8540428876876831e-01 5.5475491285324097e-01
+
+
+ <_>
+
+ 0 -1 613 -8.3297258242964745e-03
+
+
+ 2.2044940292835236e-01 5.0970828533172607e-01
+
+
+ <_>
+
+ 0 -1 614 -1.0417630255687982e-04
+
+
+ 5.6070661544799805e-01 4.3030360341072083e-01
+
+
+ <_>
+
+ 0 -1 615 3.1204700469970703e-02
+
+
+ 4.6216571331024170e-01 6.9820040464401245e-01
+
+
+ <_>
+
+ 0 -1 616 7.8943502157926559e-03
+
+
+ 5.2695941925048828e-01 2.2690680623054504e-01
+
+
+ <_>
+
+ 0 -1 617 -4.3645310215651989e-03
+
+
+ 6.3592231273651123e-01 4.5379561185836792e-01
+
+
+ <_>
+
+ 0 -1 618 7.6793059706687927e-03
+
+
+ 5.2747678756713867e-01 2.7404838800430298e-01
+
+
+ <_>
+
+ 0 -1 619 -2.5431139394640923e-02
+
+
+ 2.0385199785232544e-01 5.0717329978942871e-01
+
+
+ <_>
+
+ 0 -1 620 8.2000601105391979e-04
+
+
+ 4.5874550938606262e-01 6.1198681592941284e-01
+
+
+ <_>
+
+ 0 -1 621 2.9284600168466568e-03
+
+
+ 5.0712740421295166e-01 2.0282049477100372e-01
+
+
+ <_>
+
+ 0 -1 622 4.5256470912136137e-05
+
+
+ 4.8121041059494019e-01 5.4308217763900757e-01
+
+
+ <_>
+
+ 0 -1 623 1.3158309739083052e-03
+
+
+ 4.6258139610290527e-01 6.7793232202529907e-01
+
+
+ <_>
+
+ 0 -1 624 1.5870389761403203e-03
+
+
+ 5.3862917423248291e-01 3.4314650297164917e-01
+
+
+ <_>
+
+ 0 -1 625 -2.1539660170674324e-02
+
+
+ 2.5942500680685043e-02 5.0032228231430054e-01
+
+
+ <_>
+
+ 0 -1 626 1.4334480278193951e-02
+
+
+ 5.2028447389602661e-01 1.5906329452991486e-01
+
+
+ <_>
+
+ 0 -1 627 -8.3881383761763573e-03
+
+
+ 7.2824811935424805e-01 4.6480441093444824e-01
+
+
+ <_>
+
+ 0 -1 628 9.1906841844320297e-03
+
+
+ 5.5623567104339600e-01 3.9231911301612854e-01
+
+
+ <_>
+
+ 0 -1 629 -5.8453059755265713e-03
+
+
+ 6.8033927679061890e-01 4.6291279792785645e-01
+
+
+ <_>
+
+ 0 -1 630 -5.4707799106836319e-02
+
+
+ 2.5616711378097534e-01 5.2061259746551514e-01
+
+
+ <_>
+
+ 0 -1 631 9.1142775490880013e-03
+
+
+ 5.1896202564239502e-01 3.0538770556449890e-01
+
+
+ <_>
+
+ 0 -1 632 -1.5575000084936619e-02
+
+
+ 1.2950749695301056e-01 5.1690948009490967e-01
+
+
+ <_>
+
+ 0 -1 633 -1.2050600344082341e-04
+
+
+ 5.7350981235504150e-01 4.2308250069618225e-01
+
+
+ <_>
+
+ 0 -1 634 1.2273970060050488e-03
+
+
+ 5.2898782491683960e-01 4.0797919034957886e-01
+
+
+ <_>
+
+ 0 -1 635 -1.2186600361019373e-03
+
+
+ 6.5756398439407349e-01 4.5744091272354126e-01
+
+
+ <_>
+
+ 0 -1 636 -3.3256649039685726e-03
+
+
+ 3.6280471086502075e-01 5.1950198411941528e-01
+
+
+ <_>
+
+ 0 -1 637 -1.3288309797644615e-02
+
+
+ 1.2842659652233124e-01 5.0434887409210205e-01
+
+
+ <_>
+
+ 0 -1 638 -3.3839771058410406e-03
+
+
+ 6.2922400236129761e-01 4.7575059533119202e-01
+
+
+ <_>
+
+ 0 -1 639 -2.1954220533370972e-01
+
+
+ 1.4877319335937500e-01 5.0650137662887573e-01
+
+
+ <_>
+
+ 0 -1 640 4.9111708067357540e-03
+
+
+ 4.2561021447181702e-01 5.6658387184143066e-01
+
+
+ <_>
+
+ 0 -1 641 -1.8744950648397207e-04
+
+
+ 4.0041440725326538e-01 5.5868571996688843e-01
+
+
+ <_>
+
+ 0 -1 642 -5.2178641781210899e-03
+
+
+ 6.0091161727905273e-01 4.8127061128616333e-01
+
+
+ <_>
+
+ 0 -1 643 -1.1111519997939467e-03
+
+
+ 3.5149338841438293e-01 5.2870899438858032e-01
+
+
+ <_>
+
+ 0 -1 644 4.4036400504410267e-03
+
+
+ 4.6422758698463440e-01 5.9240859746932983e-01
+
+
+ <_>
+
+ 0 -1 645 1.2299499660730362e-01
+
+
+ 5.0255292654037476e-01 6.9152481853961945e-02
+
+
+ <_>
+
+ 0 -1 646 -1.2313510291278362e-02
+
+
+ 5.8845919370651245e-01 4.9340128898620605e-01
+
+
+ <_>
+
+ 0 -1 647 4.1471039876341820e-03
+
+
+ 4.3722391128540039e-01 5.8934777975082397e-01
+
+
+ <_>
+
+ 0 -1 648 -3.5502649843692780e-03
+
+
+ 4.3275511264801025e-01 5.3962701559066772e-01
+
+
+ <_>
+
+ 0 -1 649 -1.9224269315600395e-02
+
+
+ 1.9131340086460114e-01 5.0683307647705078e-01
+
+
+ <_>
+
+ 0 -1 650 1.4395059552043676e-03
+
+
+ 5.3081780672073364e-01 4.2435330152511597e-01
+
+
+ <_>
+
+ 0 -1 651 -6.7751999013125896e-03
+
+
+ 6.3653957843780518e-01 4.5400860905647278e-01
+
+
+ <_>
+
+ 0 -1 652 7.0119630545377731e-03
+
+
+ 5.1898342370986938e-01 3.0261999368667603e-01
+
+
+ <_>
+
+ 0 -1 653 5.4014651104807854e-03
+
+
+ 5.1050621271133423e-01 2.5576829910278320e-01
+
+
+ <_>
+
+ 0 -1 654 9.0274988906458020e-04
+
+
+ 4.6969148516654968e-01 5.8618277311325073e-01
+
+
+ <_>
+
+ 0 -1 655 1.1474450118839741e-02
+
+
+ 5.0536459684371948e-01 1.5271779894828796e-01
+
+
+ <_>
+
+ 0 -1 656 -6.7023430019617081e-03
+
+
+ 6.5089809894561768e-01 4.8906040191650391e-01
+
+
+ <_>
+
+ 0 -1 657 -2.0462959073483944e-03
+
+
+ 6.2418168783187866e-01 4.5146000385284424e-01
+
+
+ <_>
+
+ 0 -1 658 -9.9951568990945816e-03
+
+
+ 3.4327811002731323e-01 5.4009538888931274e-01
+
+
+ <_>
+
+ 0 -1 659 -3.5700708627700806e-02
+
+
+ 1.8780590593814850e-01 5.0740778446197510e-01
+
+
+ <_>
+
+ 0 -1 660 4.5584561303257942e-04
+
+
+ 3.8052770495414734e-01 5.4025697708129883e-01
+
+
+ <_>
+
+ 0 -1 661 -5.4260600358247757e-02
+
+
+ 6.8437147140502930e-01 4.5950970053672791e-01
+
+
+ <_>
+
+ 0 -1 662 6.0600461438298225e-03
+
+
+ 5.5029052495956421e-01 4.5005279779434204e-01
+
+
+ <_>
+
+ 0 -1 663 -6.4791832119226456e-03
+
+
+ 3.3688580989837646e-01 5.3107571601867676e-01
+
+
+ <_>
+
+ 0 -1 664 -1.4939469983801246e-03
+
+
+ 6.4876401424407959e-01 4.7561758756637573e-01
+
+
+ <_>
+
+ 0 -1 665 1.4610530342906713e-05
+
+
+ 4.0345790982246399e-01 5.4510641098022461e-01
+
+
+ <_>
+
+ 0 -1 666 -7.2321938350796700e-03
+
+
+ 6.3868737220764160e-01 4.8247399926185608e-01
+
+
+ <_>
+
+ 0 -1 667 -4.0645818226039410e-03
+
+
+ 2.9864218831062317e-01 5.1573359966278076e-01
+
+
+ <_>
+
+ 0 -1 668 3.0463080853223801e-02
+
+
+ 5.0221997499465942e-01 7.1599560976028442e-01
+
+
+ <_>
+
+ 0 -1 669 -8.0544911324977875e-03
+
+
+ 6.4924520254135132e-01 4.6192750334739685e-01
+
+
+ <_>
+
+ 0 -1 670 3.9505138993263245e-02
+
+
+ 5.1505708694458008e-01 2.4506139755249023e-01
+
+
+ <_>
+
+ 0 -1 671 8.4530208259820938e-03
+
+
+ 4.5736691355705261e-01 6.3940370082855225e-01
+
+
+ <_>
+
+ 0 -1 672 -1.1688120430335402e-03
+
+
+ 3.8655120134353638e-01 5.4836612939834595e-01
+
+
+ <_>
+
+ 0 -1 673 2.8070670086890459e-03
+
+
+ 5.1285791397094727e-01 2.7014800906181335e-01
+
+
+ <_>
+
+ 0 -1 674 4.7365209320560098e-04
+
+
+ 4.0515819191932678e-01 5.3874611854553223e-01
+
+
+ <_>
+
+ 0 -1 675 1.1741080321371555e-02
+
+
+ 5.2959501743316650e-01 3.7194138765335083e-01
+
+
+ <_>
+
+ 0 -1 676 3.1833238899707794e-03
+
+
+ 4.7894069552421570e-01 6.8951261043548584e-01
+
+
+ <_>
+
+ 0 -1 677 7.0241501089185476e-04
+
+
+ 5.3844892978668213e-01 3.9180809259414673e-01
+
+
+
+
+ <_>
+ 102
+ 5.0169731140136719e+01
+
+ <_>
+
+ 0 -1 678 1.7059929668903351e-02
+
+
+ 3.9485278725624084e-01 7.1425348520278931e-01
+
+
+ <_>
+
+ 0 -1 679 2.1840840578079224e-02
+
+
+ 3.3703160285949707e-01 6.0900169610977173e-01
+
+
+ <_>
+
+ 0 -1 680 2.4520049919374287e-04
+
+
+ 3.5005760192871094e-01 5.9879022836685181e-01
+
+
+ <_>
+
+ 0 -1 681 8.3272606134414673e-03
+
+
+ 3.2675281167030334e-01 5.6972408294677734e-01
+
+
+ <_>
+
+ 0 -1 682 5.7148298947140574e-04
+
+
+ 3.0445998907089233e-01 5.5316567420959473e-01
+
+
+ <_>
+
+ 0 -1 683 6.7373987985774875e-04
+
+
+ 3.6500120162963867e-01 5.6726312637329102e-01
+
+
+ <_>
+
+ 0 -1 684 3.4681590477703139e-05
+
+
+ 3.3135411143302917e-01 5.3887271881103516e-01
+
+
+ <_>
+
+ 0 -1 685 -5.8563398197293282e-03
+
+
+ 2.6979428529739380e-01 5.4987788200378418e-01
+
+
+ <_>
+
+ 0 -1 686 8.5102273151278496e-03
+
+
+ 5.2693581581115723e-01 2.7628791332244873e-01
+
+
+ <_>
+
+ 0 -1 687 -6.9817207753658295e-02
+
+
+ 2.9096031188964844e-01 5.2592468261718750e-01
+
+
+ <_>
+
+ 0 -1 688 -8.6113670840859413e-04
+
+
+ 5.8925771713256836e-01 4.0736979246139526e-01
+
+
+ <_>
+
+ 0 -1 689 9.7149249631911516e-04
+
+
+ 3.5235640406608582e-01 5.4158622026443481e-01
+
+
+ <_>
+
+ 0 -1 690 -1.4727490452060010e-05
+
+
+ 5.4230177402496338e-01 3.5031560063362122e-01
+
+
+ <_>
+
+ 0 -1 691 4.8420291393995285e-02
+
+
+ 5.1939457654953003e-01 3.4111958742141724e-01
+
+
+ <_>
+
+ 0 -1 692 1.3257140526548028e-03
+
+
+ 3.1577691435813904e-01 5.3353762626647949e-01
+
+
+ <_>
+
+ 0 -1 693 1.4922149603080470e-05
+
+
+ 4.4512999057769775e-01 5.5365538597106934e-01
+
+
+ <_>
+
+ 0 -1 694 -2.7173398993909359e-03
+
+
+ 3.0317419767379761e-01 5.2480888366699219e-01
+
+
+ <_>
+
+ 0 -1 695 2.9219500720500946e-03
+
+
+ 4.7814530134201050e-01 6.6060417890548706e-01
+
+
+ <_>
+
+ 0 -1 696 -1.9804988987743855e-03
+
+
+ 3.1863081455230713e-01 5.2876251935958862e-01
+
+
+ <_>
+
+ 0 -1 697 -4.0012109093368053e-03
+
+
+ 6.4135968685150146e-01 4.7499281167984009e-01
+
+
+ <_>
+
+ 0 -1 698 -4.3491991236805916e-03
+
+
+ 1.5074980258941650e-01 5.0989967584609985e-01
+
+
+ <_>
+
+ 0 -1 699 1.3490889687091112e-03
+
+
+ 4.3161588907241821e-01 5.8811670541763306e-01
+
+
+ <_>
+
+ 0 -1 700 1.8597070127725601e-02
+
+
+ 4.7355538606643677e-01 9.0897941589355469e-01
+
+
+ <_>
+
+ 0 -1 701 -1.8562379991635680e-03
+
+
+ 3.5531890392303467e-01 5.5778372287750244e-01
+
+
+ <_>
+
+ 0 -1 702 2.2940430790185928e-03
+
+
+ 4.5000949501991272e-01 6.5808779001235962e-01
+
+
+ <_>
+
+ 0 -1 703 2.9982850537635386e-04
+
+
+ 5.6292420625686646e-01 3.9758789539337158e-01
+
+
+ <_>
+
+ 0 -1 704 3.5455459728837013e-03
+
+
+ 5.3815472126007080e-01 3.6054858565330505e-01
+
+
+ <_>
+
+ 0 -1 705 9.6104722470045090e-03
+
+
+ 5.2559971809387207e-01 1.7967459559440613e-01
+
+
+ <_>
+
+ 0 -1 706 -6.2783220782876015e-03
+
+
+ 2.2728569805622101e-01 5.1140302419662476e-01
+
+
+ <_>
+
+ 0 -1 707 3.4598479978740215e-03
+
+
+ 4.6263080835342407e-01 6.6082191467285156e-01
+
+
+ <_>
+
+ 0 -1 708 -1.3112019514665008e-03
+
+
+ 6.3175398111343384e-01 4.4368579983711243e-01
+
+
+ <_>
+
+ 0 -1 709 2.6876179035753012e-03
+
+
+ 5.4211097955703735e-01 4.0540221333503723e-01
+
+
+ <_>
+
+ 0 -1 710 3.9118169806897640e-03
+
+
+ 5.3584778308868408e-01 3.2734549045562744e-01
+
+
+ <_>
+
+ 0 -1 711 -1.4206450432538986e-02
+
+
+ 7.7935767173767090e-01 4.9757811427116394e-01
+
+
+ <_>
+
+ 0 -1 712 7.1705528534948826e-04
+
+
+ 5.2973198890686035e-01 3.5609039664268494e-01
+
+
+ <_>
+
+ 0 -1 713 1.6635019565001130e-03
+
+
+ 4.6780940890312195e-01 5.8164817094802856e-01
+
+
+ <_>
+
+ 0 -1 714 3.3686188980937004e-03
+
+
+ 5.2767342329025269e-01 3.4464201331138611e-01
+
+
+ <_>
+
+ 0 -1 715 1.2799530290067196e-02
+
+
+ 4.8346799612045288e-01 7.4721592664718628e-01
+
+
+ <_>
+
+ 0 -1 716 3.3901201095432043e-03
+
+
+ 4.5118591189384460e-01 6.4017212390899658e-01
+
+
+ <_>
+
+ 0 -1 717 4.7070779837667942e-03
+
+
+ 5.3356587886810303e-01 3.5552209615707397e-01
+
+
+ <_>
+
+ 0 -1 718 1.4819339849054813e-03
+
+
+ 4.2507070302963257e-01 5.7727241516113281e-01
+
+
+ <_>
+
+ 0 -1 719 -6.9995759986341000e-03
+
+
+ 3.0033200979232788e-01 5.2929002046585083e-01
+
+
+ <_>
+
+ 0 -1 720 1.5939010307192802e-02
+
+
+ 5.0673192739486694e-01 1.6755819320678711e-01
+
+
+ <_>
+
+ 0 -1 721 7.6377349905669689e-03
+
+
+ 4.7950699925422668e-01 7.0856010913848877e-01
+
+
+ <_>
+
+ 0 -1 722 6.7334040068089962e-03
+
+
+ 5.1331132650375366e-01 2.1624700725078583e-01
+
+
+ <_>
+
+ 0 -1 723 -1.2858809903264046e-02
+
+
+ 1.9388419389724731e-01 5.2513718605041504e-01
+
+
+ <_>
+
+ 0 -1 724 -6.2270800117403269e-04
+
+
+ 5.6865382194519043e-01 4.1978681087493896e-01
+
+
+ <_>
+
+ 0 -1 725 -5.2651681471616030e-04
+
+
+ 4.2241689562797546e-01 5.4296958446502686e-01
+
+
+ <_>
+
+ 0 -1 726 1.1075099930167198e-02
+
+
+ 5.1137751340866089e-01 2.5145179033279419e-01
+
+
+ <_>
+
+ 0 -1 727 -3.6728251725435257e-02
+
+
+ 7.1946620941162109e-01 4.8496189713478088e-01
+
+
+ <_>
+
+ 0 -1 728 -2.8207109426148236e-04
+
+
+ 3.8402619957923889e-01 5.3944462537765503e-01
+
+
+ <_>
+
+ 0 -1 729 -2.7489690110087395e-03
+
+
+ 5.9370887279510498e-01 4.5691820979118347e-01
+
+
+ <_>
+
+ 0 -1 730 1.0047519579529762e-02
+
+
+ 5.1385760307312012e-01 2.8022980690002441e-01
+
+
+ <_>
+
+ 0 -1 731 -8.1497840583324432e-03
+
+
+ 6.0900372266769409e-01 4.6361210942268372e-01
+
+
+ <_>
+
+ 0 -1 732 -6.8833888508379459e-03
+
+
+ 3.4586110711097717e-01 5.2546602487564087e-01
+
+
+ <_>
+
+ 0 -1 733 -1.4039360394235700e-05
+
+
+ 5.6931042671203613e-01 4.0820831060409546e-01
+
+
+ <_>
+
+ 0 -1 734 1.5498419525101781e-03
+
+
+ 4.3505370616912842e-01 5.8065170049667358e-01
+
+
+ <_>
+
+ 0 -1 735 -6.7841499112546444e-03
+
+
+ 1.4688730239868164e-01 5.1827752590179443e-01
+
+
+ <_>
+
+ 0 -1 736 2.1705629478674382e-04
+
+
+ 5.2935242652893066e-01 3.4561741352081299e-01
+
+
+ <_>
+
+ 0 -1 737 3.1198898795992136e-04
+
+
+ 4.6524509787559509e-01 5.9424138069152832e-01
+
+
+ <_>
+
+ 0 -1 738 5.4507530294358730e-03
+
+
+ 4.6535089612007141e-01 7.0248460769653320e-01
+
+
+ <_>
+
+ 0 -1 739 -2.5818689027801156e-04
+
+
+ 5.4972952604293823e-01 3.7689670920372009e-01
+
+
+ <_>
+
+ 0 -1 740 -1.7442539334297180e-02
+
+
+ 3.9190879464149475e-01 5.4574978351593018e-01
+
+
+ <_>
+
+ 0 -1 741 -4.5343529433012009e-02
+
+
+ 1.6313570737838745e-01 5.1549088954925537e-01
+
+
+ <_>
+
+ 0 -1 742 1.9190689781680703e-03
+
+
+ 5.1458978652954102e-01 2.7918958663940430e-01
+
+
+ <_>
+
+ 0 -1 743 -6.0177869163453579e-03
+
+
+ 6.5176361799240112e-01 4.7563329339027405e-01
+
+
+ <_>
+
+ 0 -1 744 -4.0720738470554352e-03
+
+
+ 5.5146527290344238e-01 4.0926858782768250e-01
+
+
+ <_>
+
+ 0 -1 745 3.9855059003457427e-04
+
+
+ 3.1652408838272095e-01 5.2855509519577026e-01
+
+
+ <_>
+
+ 0 -1 746 -6.5418570302426815e-03
+
+
+ 6.8533778190612793e-01 4.6528089046478271e-01
+
+
+ <_>
+
+ 0 -1 747 3.4845089539885521e-03
+
+
+ 5.4845881462097168e-01 4.5027598738670349e-01
+
+
+ <_>
+
+ 0 -1 748 -1.3696780428290367e-02
+
+
+ 6.3957798480987549e-01 4.5725551247596741e-01
+
+
+ <_>
+
+ 0 -1 749 -1.7347140237689018e-02
+
+
+ 2.7510729432106018e-01 5.1816147565841675e-01
+
+
+ <_>
+
+ 0 -1 750 -4.0885428898036480e-03
+
+
+ 3.3256360888481140e-01 5.1949840784072876e-01
+
+
+ <_>
+
+ 0 -1 751 -9.4687901437282562e-03
+
+
+ 5.9422808885574341e-01 4.8518198728561401e-01
+
+
+ <_>
+
+ 0 -1 752 1.7084840219467878e-03
+
+
+ 4.1671109199523926e-01 5.5198061466217041e-01
+
+
+ <_>
+
+ 0 -1 753 9.4809094443917274e-03
+
+
+ 5.4338949918746948e-01 4.2085149884223938e-01
+
+
+ <_>
+
+ 0 -1 754 -4.7389650717377663e-03
+
+
+ 6.4071899652481079e-01 4.5606550574302673e-01
+
+
+ <_>
+
+ 0 -1 755 6.5761050209403038e-03
+
+
+ 5.2145552635192871e-01 2.2582270205020905e-01
+
+
+ <_>
+
+ 0 -1 756 -2.1690549328923225e-03
+
+
+ 3.1515279412269592e-01 5.1567047834396362e-01
+
+
+ <_>
+
+ 0 -1 757 1.4660170301795006e-02
+
+
+ 4.8708370327949524e-01 6.6899412870407104e-01
+
+
+ <_>
+
+ 0 -1 758 1.7231999663636088e-04
+
+
+ 3.5697489976882935e-01 5.2510780096054077e-01
+
+
+ <_>
+
+ 0 -1 759 -2.1803760901093483e-02
+
+
+ 8.8259208202362061e-01 4.9663299322128296e-01
+
+
+ <_>
+
+ 0 -1 760 -9.4736106693744659e-02
+
+
+ 1.4461620151996613e-01 5.0611138343811035e-01
+
+
+ <_>
+
+ 0 -1 761 5.5825551971793175e-03
+
+
+ 5.3964787721633911e-01 4.2380660772323608e-01
+
+
+ <_>
+
+ 0 -1 762 1.9517090404406190e-03
+
+
+ 4.1704109311103821e-01 5.4977869987487793e-01
+
+
+ <_>
+
+ 0 -1 763 1.2149900197982788e-02
+
+
+ 4.6983671188354492e-01 5.6642740964889526e-01
+
+
+ <_>
+
+ 0 -1 764 -7.5169620104134083e-03
+
+
+ 6.2677729129791260e-01 4.4631358981132507e-01
+
+
+ <_>
+
+ 0 -1 765 -7.1667909622192383e-02
+
+
+ 3.0970111489295959e-01 5.2210032939910889e-01
+
+
+ <_>
+
+ 0 -1 766 -8.8292419910430908e-02
+
+
+ 8.1123888492584229e-02 5.0063651800155640e-01
+
+
+ <_>
+
+ 0 -1 767 3.1063079833984375e-02
+
+
+ 5.1555037498474121e-01 1.2822559475898743e-01
+
+
+ <_>
+
+ 0 -1 768 4.6621840447187424e-02
+
+
+ 4.6997779607772827e-01 7.3639607429504395e-01
+
+
+ <_>
+
+ 0 -1 769 -1.2189489789307117e-02
+
+
+ 3.9205300807952881e-01 5.5189967155456543e-01
+
+
+ <_>
+
+ 0 -1 770 1.3016110286116600e-02
+
+
+ 5.2606582641601562e-01 3.6851361393928528e-01
+
+
+ <_>
+
+ 0 -1 771 -3.4952899441123009e-03
+
+
+ 6.3392949104309082e-01 4.7162809967994690e-01
+
+
+ <_>
+
+ 0 -1 772 -4.4015039748046547e-05
+
+
+ 5.3330272436141968e-01 3.7761849164962769e-01
+
+
+ <_>
+
+ 0 -1 773 -1.0966490209102631e-01
+
+
+ 1.7653420567512512e-01 5.1983469724655151e-01
+
+
+ <_>
+
+ 0 -1 774 -9.0279558207839727e-04
+
+
+ 5.3241598606109619e-01 3.8389080762863159e-01
+
+
+ <_>
+
+ 0 -1 775 7.1126641705632210e-04
+
+
+ 4.6479299664497375e-01 5.7552242279052734e-01
+
+
+ <_>
+
+ 0 -1 776 -3.1250279862433672e-03
+
+
+ 3.2367089390754700e-01 5.1667708158493042e-01
+
+
+ <_>
+
+ 0 -1 777 2.4144679773598909e-03
+
+
+ 4.7874391078948975e-01 6.4597177505493164e-01
+
+
+ <_>
+
+ 0 -1 778 4.4391240226104856e-04
+
+
+ 4.4093081355094910e-01 6.0102558135986328e-01
+
+
+ <_>
+
+ 0 -1 779 -2.2611189342569560e-04
+
+
+ 4.0381139516830444e-01 5.4932558536529541e-01
+
+
+
+
+ <_>
+ 135
+ 6.6669120788574219e+01
+
+ <_>
+
+ 0 -1 780 -4.6901289373636246e-02
+
+
+ 6.6001719236373901e-01 3.7438011169433594e-01
+
+
+ <_>
+
+ 0 -1 781 -1.4568349579349160e-03
+
+
+ 5.7839912176132202e-01 3.4377971291542053e-01
+
+
+ <_>
+
+ 0 -1 782 5.5598369799554348e-03
+
+
+ 3.6222669482231140e-01 5.9082162380218506e-01
+
+
+ <_>
+
+ 0 -1 783 7.3170487303286791e-04
+
+
+ 5.5004191398620605e-01 2.8735581040382385e-01
+
+
+ <_>
+
+ 0 -1 784 1.3318009441718459e-03
+
+
+ 2.6731699705123901e-01 5.4310190677642822e-01
+
+
+ <_>
+
+ 0 -1 785 2.4347059661522508e-04
+
+
+ 3.8550278544425964e-01 5.7413887977600098e-01
+
+
+ <_>
+
+ 0 -1 786 -3.0512469820678234e-03
+
+
+ 5.5032098293304443e-01 3.4628450870513916e-01
+
+
+ <_>
+
+ 0 -1 787 -6.8657199153676629e-04
+
+
+ 3.2912218570709229e-01 5.4295092821121216e-01
+
+
+ <_>
+
+ 0 -1 788 1.4668200165033340e-03
+
+
+ 3.5883820056915283e-01 5.3518110513687134e-01
+
+
+ <_>
+
+ 0 -1 789 3.2021870720200241e-04
+
+
+ 4.2968419194221497e-01 5.7002341747283936e-01
+
+
+ <_>
+
+ 0 -1 790 7.4122188379988074e-04
+
+
+ 5.2821648120880127e-01 3.3668708801269531e-01
+
+
+ <_>
+
+ 0 -1 791 3.8330298848450184e-03
+
+
+ 4.5595678687095642e-01 6.2573361396789551e-01
+
+
+ <_>
+
+ 0 -1 792 -1.5456439927220345e-02
+
+
+ 2.3501169681549072e-01 5.1294529438018799e-01
+
+
+ <_>
+
+ 0 -1 793 2.6796779129654169e-03
+
+
+ 5.3294152021408081e-01 4.1550621390342712e-01
+
+
+ <_>
+
+ 0 -1 794 2.8296569362282753e-03
+
+
+ 4.2730879783630371e-01 5.8045381307601929e-01
+
+
+ <_>
+
+ 0 -1 795 -3.9444249123334885e-03
+
+
+ 2.9126119613647461e-01 5.2026861906051636e-01
+
+
+ <_>
+
+ 0 -1 796 2.7179559692740440e-03
+
+
+ 5.3076881170272827e-01 3.5856771469116211e-01
+
+
+ <_>
+
+ 0 -1 797 5.9077627956867218e-03
+
+
+ 4.7037750482559204e-01 5.9415858983993530e-01
+
+
+ <_>
+
+ 0 -1 798 -4.2240349575877190e-03
+
+
+ 2.1415670216083527e-01 5.0887960195541382e-01
+
+
+ <_>
+
+ 0 -1 799 4.0725888684391975e-03
+
+
+ 4.7664138674736023e-01 6.8410611152648926e-01
+
+
+ <_>
+
+ 0 -1 800 1.0149530135095119e-02
+
+
+ 5.3607988357543945e-01 3.7484970688819885e-01
+
+
+ <_>
+
+ 0 -1 801 -1.8864999583456665e-04
+
+
+ 5.7201302051544189e-01 3.8538050651550293e-01
+
+
+ <_>
+
+ 0 -1 802 -4.8864358104765415e-03
+
+
+ 3.6931228637695312e-01 5.3409588336944580e-01
+
+
+ <_>
+
+ 0 -1 803 2.6158479973673820e-02
+
+
+ 4.9623748660087585e-01 6.0599899291992188e-01
+
+
+ <_>
+
+ 0 -1 804 4.8560759751126170e-04
+
+
+ 4.4389459490776062e-01 6.0124689340591431e-01
+
+
+ <_>
+
+ 0 -1 805 1.1268709786236286e-02
+
+
+ 5.2442502975463867e-01 1.8403880298137665e-01
+
+
+ <_>
+
+ 0 -1 806 -2.8114619199186563e-03
+
+
+ 6.0602837800979614e-01 4.4098970293998718e-01
+
+
+ <_>
+
+ 0 -1 807 -5.6112729944288731e-03
+
+
+ 3.8911709189414978e-01 5.5892372131347656e-01
+
+
+ <_>
+
+ 0 -1 808 8.5680093616247177e-03
+
+
+ 5.0693458318710327e-01 2.0626190304756165e-01
+
+
+ <_>
+
+ 0 -1 809 -3.8172779022715986e-04
+
+
+ 5.8822017908096313e-01 4.1926109790802002e-01
+
+
+ <_>
+
+ 0 -1 810 -1.7680290329735726e-04
+
+
+ 5.5336058139801025e-01 4.0033689141273499e-01
+
+
+ <_>
+
+ 0 -1 811 6.5112537704408169e-03
+
+
+ 3.3101469278335571e-01 5.4441910982131958e-01
+
+
+ <_>
+
+ 0 -1 812 -6.5948683186434209e-05
+
+
+ 5.4338318109512329e-01 3.9449059963226318e-01
+
+
+ <_>
+
+ 0 -1 813 6.9939051754772663e-03
+
+
+ 5.6003582477569580e-01 4.1927140951156616e-01
+
+
+ <_>
+
+ 0 -1 814 -4.6744439750909805e-03
+
+
+ 6.6854667663574219e-01 4.6049609780311584e-01
+
+
+ <_>
+
+ 0 -1 815 1.1589850299060345e-02
+
+
+ 5.3571212291717529e-01 2.9268300533294678e-01
+
+
+ <_>
+
+ 0 -1 816 1.3007840141654015e-02
+
+
+ 4.6798178553581238e-01 7.3074632883071899e-01
+
+
+ <_>
+
+ 0 -1 817 -1.1008579749614000e-03
+
+
+ 3.9375010132789612e-01 5.4150652885437012e-01
+
+
+ <_>
+
+ 0 -1 818 6.0472649056464434e-04
+
+
+ 4.2423760890960693e-01 5.6040412187576294e-01
+
+
+ <_>
+
+ 0 -1 819 -1.4494840055704117e-02
+
+
+ 3.6312100291252136e-01 5.2931827306747437e-01
+
+
+ <_>
+
+ 0 -1 820 -5.3056948818266392e-03
+
+
+ 6.8604522943496704e-01 4.6218210458755493e-01
+
+
+ <_>
+
+ 0 -1 821 -8.1829127157106996e-04
+
+
+ 3.9440968632698059e-01 5.4204392433166504e-01
+
+
+ <_>
+
+ 0 -1 822 -1.9077520817518234e-02
+
+
+ 1.9626219570636749e-01 5.0378918647766113e-01
+
+
+ <_>
+
+ 0 -1 823 3.5549470339901745e-04
+
+
+ 4.0862590074539185e-01 5.6139731407165527e-01
+
+
+ <_>
+
+ 0 -1 824 1.9679730758070946e-03
+
+
+ 4.4891211390495300e-01 5.9261232614517212e-01
+
+
+ <_>
+
+ 0 -1 825 6.9189141504466534e-03
+
+
+ 5.3359258174896240e-01 3.7283858656883240e-01
+
+
+ <_>
+
+ 0 -1 826 2.9872779268771410e-03
+
+
+ 5.1113212108612061e-01 2.9756438732147217e-01
+
+
+ <_>
+
+ 0 -1 827 -6.2264618463814259e-03
+
+
+ 5.5414897203445435e-01 4.8245379328727722e-01
+
+
+ <_>
+
+ 0 -1 828 1.3353300280869007e-02
+
+
+ 4.5864239335060120e-01 6.4147979021072388e-01
+
+
+ <_>
+
+ 0 -1 829 3.3505238592624664e-02
+
+
+ 5.3924250602722168e-01 3.4299948811531067e-01
+
+
+ <_>
+
+ 0 -1 830 -2.5294460356235504e-03
+
+
+ 1.7037139832973480e-01 5.0133150815963745e-01
+
+
+ <_>
+
+ 0 -1 831 -1.2801629491150379e-03
+
+
+ 5.3054618835449219e-01 4.6974050998687744e-01
+
+
+ <_>
+
+ 0 -1 832 7.0687388069927692e-03
+
+
+ 4.6155458688735962e-01 6.4365047216415405e-01
+
+
+ <_>
+
+ 0 -1 833 9.6880499040707946e-04
+
+
+ 4.8335990309715271e-01 6.0438942909240723e-01
+
+
+ <_>
+
+ 0 -1 834 3.9647659286856651e-03
+
+
+ 5.1876372098922729e-01 3.2318168878555298e-01
+
+
+ <_>
+
+ 0 -1 835 -2.2057730704545975e-02
+
+
+ 4.0792569518089294e-01 5.2009809017181396e-01
+
+
+ <_>
+
+ 0 -1 836 -6.6906312713399529e-04
+
+
+ 5.3316092491149902e-01 3.8156008720397949e-01
+
+
+ <_>
+
+ 0 -1 837 -6.7009328631684184e-04
+
+
+ 5.6554222106933594e-01 4.6889019012451172e-01
+
+
+ <_>
+
+ 0 -1 838 7.4284552829340100e-04
+
+
+ 4.5343810319900513e-01 6.2874001264572144e-01
+
+
+ <_>
+
+ 0 -1 839 2.2227810695767403e-03
+
+
+ 5.3506332635879517e-01 3.3036559820175171e-01
+
+
+ <_>
+
+ 0 -1 840 -5.4130521602928638e-03
+
+
+ 1.1136870086193085e-01 5.0054347515106201e-01
+
+
+ <_>
+
+ 0 -1 841 -1.4520040167553816e-05
+
+
+ 5.6287378072738647e-01 4.3251338601112366e-01
+
+
+ <_>
+
+ 0 -1 842 2.3369169502984732e-04
+
+
+ 4.1658350825309753e-01 5.4477912187576294e-01
+
+
+ <_>
+
+ 0 -1 843 4.2894547805190086e-03
+
+
+ 4.8603910207748413e-01 6.7786490917205811e-01
+
+
+ <_>
+
+ 0 -1 844 5.9103150852024555e-03
+
+
+ 5.2623051404953003e-01 3.6121138930320740e-01
+
+
+ <_>
+
+ 0 -1 845 1.2900539673864841e-02
+
+
+ 5.3193771839141846e-01 3.2502880692481995e-01
+
+
+ <_>
+
+ 0 -1 846 4.6982979401946068e-03
+
+
+ 4.6182450652122498e-01 6.6659259796142578e-01
+
+
+ <_>
+
+ 0 -1 847 1.0439859703183174e-02
+
+
+ 5.5056709051132202e-01 3.8836041092872620e-01
+
+
+ <_>
+
+ 0 -1 848 3.0443191062659025e-03
+
+
+ 4.6978530287742615e-01 7.3018449544906616e-01
+
+
+ <_>
+
+ 0 -1 849 -6.1593751888722181e-04
+
+
+ 3.8308390974998474e-01 5.4649841785430908e-01
+
+
+ <_>
+
+ 0 -1 850 -3.4247159492224455e-03
+
+
+ 2.5663000345230103e-01 5.0895309448242188e-01
+
+
+ <_>
+
+ 0 -1 851 -9.3538565561175346e-03
+
+
+ 6.4699661731719971e-01 4.9407958984375000e-01
+
+
+ <_>
+
+ 0 -1 852 5.2338998764753342e-02
+
+
+ 4.7459828853607178e-01 7.8787708282470703e-01
+
+
+ <_>
+
+ 0 -1 853 3.5765620414167643e-03
+
+
+ 5.3066647052764893e-01 2.7484980225563049e-01
+
+
+ <_>
+
+ 0 -1 854 7.1555317845195532e-04
+
+
+ 5.4131257534027100e-01 4.0419089794158936e-01
+
+
+ <_>
+
+ 0 -1 855 -1.0516679845750332e-02
+
+
+ 6.1585122346878052e-01 4.8152831196784973e-01
+
+
+ <_>
+
+ 0 -1 856 7.7347927726805210e-03
+
+
+ 4.6958059072494507e-01 7.0289808511734009e-01
+
+
+ <_>
+
+ 0 -1 857 -4.3226778507232666e-03
+
+
+ 2.8495660424232483e-01 5.3046840429306030e-01
+
+
+ <_>
+
+ 0 -1 858 -2.5534399319440126e-03
+
+
+ 7.0569849014282227e-01 4.6888920664787292e-01
+
+
+ <_>
+
+ 0 -1 859 1.0268510231981054e-04
+
+
+ 3.9029321074485779e-01 5.5734640359878540e-01
+
+
+ <_>
+
+ 0 -1 860 7.1395188570022583e-06
+
+
+ 3.6842319369316101e-01 5.2639877796173096e-01
+
+
+ <_>
+
+ 0 -1 861 -1.6711989883333445e-03
+
+
+ 3.8491758704185486e-01 5.3872710466384888e-01
+
+
+ <_>
+
+ 0 -1 862 4.9260449595749378e-03
+
+
+ 4.7297719120979309e-01 7.4472510814666748e-01
+
+
+ <_>
+
+ 0 -1 863 4.3908702209591866e-03
+
+
+ 4.8091810941696167e-01 5.5919218063354492e-01
+
+
+ <_>
+
+ 0 -1 864 -1.7793629318475723e-02
+
+
+ 6.9036781787872314e-01 4.6769270300865173e-01
+
+
+ <_>
+
+ 0 -1 865 2.0469669252634048e-03
+
+
+ 5.3706902265548706e-01 3.3081620931625366e-01
+
+
+ <_>
+
+ 0 -1 866 2.9891489073634148e-02
+
+
+ 5.1398652791976929e-01 3.3090591430664062e-01
+
+
+ <_>
+
+ 0 -1 867 1.5494900289922953e-03
+
+
+ 4.6602371335029602e-01 6.0783427953720093e-01
+
+
+ <_>
+
+ 0 -1 868 1.4956969534978271e-03
+
+
+ 4.4048359990119934e-01 5.8639198541641235e-01
+
+
+ <_>
+
+ 0 -1 869 9.5885928021743894e-04
+
+
+ 5.4359710216522217e-01 4.2085230350494385e-01
+
+
+ <_>
+
+ 0 -1 870 4.9643701640889049e-04
+
+
+ 5.3705781698226929e-01 4.0006220340728760e-01
+
+
+ <_>
+
+ 0 -1 871 -2.7280810754746199e-03
+
+
+ 5.6594127416610718e-01 4.2596429586410522e-01
+
+
+ <_>
+
+ 0 -1 872 2.3026480339467525e-03
+
+
+ 5.1616579294204712e-01 3.3508691191673279e-01
+
+
+ <_>
+
+ 0 -1 873 2.5151631236076355e-01
+
+
+ 4.8696619272232056e-01 7.1473097801208496e-01
+
+
+ <_>
+
+ 0 -1 874 -4.6328022144734859e-03
+
+
+ 2.7274489402770996e-01 5.0837898254394531e-01
+
+
+ <_>
+
+ 0 -1 875 -4.0434490889310837e-02
+
+
+ 6.8514388799667358e-01 5.0217670202255249e-01
+
+
+ <_>
+
+ 0 -1 876 1.4972220014897175e-05
+
+
+ 4.2844650149345398e-01 5.5225551128387451e-01
+
+
+ <_>
+
+ 0 -1 877 -2.4050309730228037e-04
+
+
+ 4.2261189222335815e-01 5.3900748491287231e-01
+
+
+ <_>
+
+ 0 -1 878 2.3657839745283127e-02
+
+
+ 4.7446319460868835e-01 7.5043660402297974e-01
+
+
+ <_>
+
+ 0 -1 879 -8.1449104472994804e-03
+
+
+ 4.2450588941574097e-01 5.5383628606796265e-01
+
+
+ <_>
+
+ 0 -1 880 -3.6992130335420370e-03
+
+
+ 5.9523570537567139e-01 4.5297130942344666e-01
+
+
+ <_>
+
+ 0 -1 881 -6.7718601785600185e-03
+
+
+ 4.1377940773963928e-01 5.4733997583389282e-01
+
+
+ <_>
+
+ 0 -1 882 4.2669530957937241e-03
+
+
+ 4.4841149449348450e-01 5.7979941368103027e-01
+
+
+ <_>
+
+ 0 -1 883 1.7791989957913756e-03
+
+
+ 5.6248587369918823e-01 4.4324448704719543e-01
+
+
+ <_>
+
+ 0 -1 884 1.6774770338088274e-03
+
+
+ 4.6377518773078918e-01 6.3642418384552002e-01
+
+
+ <_>
+
+ 0 -1 885 1.1732629500329494e-03
+
+
+ 4.5445030927658081e-01 5.9144157171249390e-01
+
+
+ <_>
+
+ 0 -1 886 8.6998171173036098e-04
+
+
+ 5.3347527980804443e-01 3.8859179615974426e-01
+
+
+ <_>
+
+ 0 -1 887 7.6378340600058436e-04
+
+
+ 5.3985852003097534e-01 3.7449419498443604e-01
+
+
+ <_>
+
+ 0 -1 888 1.5684569370932877e-04
+
+
+ 4.3178731203079224e-01 5.6146162748336792e-01
+
+
+ <_>
+
+ 0 -1 889 -2.1511370316147804e-02
+
+
+ 1.7859250307083130e-01 5.1855427026748657e-01
+
+
+ <_>
+
+ 0 -1 890 1.3081369979772717e-04
+
+
+ 4.3424990773200989e-01 5.6828498840332031e-01
+
+
+ <_>
+
+ 0 -1 891 2.1992040798068047e-02
+
+
+ 5.1617169380187988e-01 2.3793940246105194e-01
+
+
+ <_>
+
+ 0 -1 892 -8.0136500764638186e-04
+
+
+ 5.9867632389068604e-01 4.4664269685745239e-01
+
+
+ <_>
+
+ 0 -1 893 -8.2736099138855934e-03
+
+
+ 4.1082179546356201e-01 5.2510571479797363e-01
+
+
+ <_>
+
+ 0 -1 894 3.6831789184361696e-03
+
+
+ 5.1738142967224121e-01 3.3975180983543396e-01
+
+
+ <_>
+
+ 0 -1 895 -7.9525681212544441e-03
+
+
+ 6.8889832496643066e-01 4.8459240794181824e-01
+
+
+ <_>
+
+ 0 -1 896 1.5382299898192286e-03
+
+
+ 5.1785671710968018e-01 3.4541139006614685e-01
+
+
+ <_>
+
+ 0 -1 897 -1.4043530449271202e-02
+
+
+ 1.6784210503101349e-01 5.1886677742004395e-01
+
+
+ <_>
+
+ 0 -1 898 1.4315890148282051e-03
+
+
+ 4.3682569265365601e-01 5.6557738780975342e-01
+
+
+ <_>
+
+ 0 -1 899 -3.4014228731393814e-02
+
+
+ 7.8022962808609009e-01 4.9592170119285583e-01
+
+
+ <_>
+
+ 0 -1 900 -1.2027299962937832e-02
+
+
+ 1.5851010382175446e-01 5.0322318077087402e-01
+
+
+ <_>
+
+ 0 -1 901 1.3316619396209717e-01
+
+
+ 5.1633048057556152e-01 2.7551281452178955e-01
+
+
+ <_>
+
+ 0 -1 902 -1.5221949433907866e-03
+
+
+ 3.7283179163932800e-01 5.2145522832870483e-01
+
+
+ <_>
+
+ 0 -1 903 -9.3929271679371595e-04
+
+
+ 5.8383792638778687e-01 4.5111650228500366e-01
+
+
+ <_>
+
+ 0 -1 904 2.7719739824533463e-02
+
+
+ 4.7282868623733521e-01 7.3315447568893433e-01
+
+
+ <_>
+
+ 0 -1 905 3.1030150130391121e-03
+
+
+ 5.3022021055221558e-01 4.1015630960464478e-01
+
+
+ <_>
+
+ 0 -1 906 7.7861219644546509e-02
+
+
+ 4.9983340501785278e-01 1.2729619443416595e-01
+
+
+ <_>
+
+ 0 -1 907 -1.5854939818382263e-02
+
+
+ 5.0833359360694885e-02 5.1656562089920044e-01
+
+
+ <_>
+
+ 0 -1 908 -4.9725300632417202e-03
+
+
+ 6.7981338500976562e-01 4.6842318773269653e-01
+
+
+ <_>
+
+ 0 -1 909 -9.7676506265997887e-04
+
+
+ 6.0107719898223877e-01 4.7889319062232971e-01
+
+
+ <_>
+
+ 0 -1 910 -2.4647710379213095e-03
+
+
+ 3.3933979272842407e-01 5.2205038070678711e-01
+
+
+ <_>
+
+ 0 -1 911 -6.7937700077891350e-03
+
+
+ 4.3651369214057922e-01 5.2396631240844727e-01
+
+
+ <_>
+
+ 0 -1 912 3.2608021050691605e-02
+
+
+ 5.0527238845825195e-01 2.4252149462699890e-01
+
+
+ <_>
+
+ 0 -1 913 -5.8514421107247472e-04
+
+
+ 5.7339739799499512e-01 4.7585740685462952e-01
+
+
+ <_>
+
+ 0 -1 914 -2.9632600024342537e-02
+
+
+ 3.8922891020774841e-01 5.2635979652404785e-01
+
+
+
+
+ <_>
+ 137
+ 6.7698921203613281e+01
+
+ <_>
+
+ 0 -1 915 4.6550851315259933e-02
+
+
+ 3.2769501209259033e-01 6.2405228614807129e-01
+
+
+ <_>
+
+ 0 -1 916 7.9537127166986465e-03
+
+
+ 4.2564851045608521e-01 6.9429391622543335e-01
+
+
+ <_>
+
+ 0 -1 917 6.8221561377868056e-04
+
+
+ 3.7114870548248291e-01 5.9007328748703003e-01
+
+
+ <_>
+
+ 0 -1 918 -1.9348249770700932e-04
+
+
+ 2.0411339402198792e-01 5.3005450963973999e-01
+
+
+ <_>
+
+ 0 -1 919 -2.6710508973337710e-04
+
+
+ 5.4161262512207031e-01 3.1031790375709534e-01
+
+
+ <_>
+
+ 0 -1 920 2.7818060480058193e-03
+
+
+ 5.2778327465057373e-01 3.4670698642730713e-01
+
+
+ <_>
+
+ 0 -1 921 -4.6779078547842801e-04
+
+
+ 5.3082311153411865e-01 3.2944920659065247e-01
+
+
+ <_>
+
+ 0 -1 922 -3.0335160772665404e-05
+
+
+ 5.7738727331161499e-01 3.8520970940589905e-01
+
+
+ <_>
+
+ 0 -1 923 7.8038009814918041e-04
+
+
+ 4.3174389004707336e-01 6.1500579118728638e-01
+
+
+ <_>
+
+ 0 -1 924 -4.2553851380944252e-03
+
+
+ 2.9339039325714111e-01 5.3242927789688110e-01
+
+
+ <_>
+
+ 0 -1 925 -2.4735610350035131e-04
+
+
+ 5.4688447713851929e-01 3.8430300354957581e-01
+
+
+ <_>
+
+ 0 -1 926 -1.4724259381182492e-04
+
+
+ 4.2815428972244263e-01 5.7555872201919556e-01
+
+
+ <_>
+
+ 0 -1 927 1.1864770203828812e-03
+
+
+ 3.7473011016845703e-01 5.4714661836624146e-01
+
+
+ <_>
+
+ 0 -1 928 2.3936580400913954e-03
+
+
+ 4.5377838611602783e-01 6.1115288734436035e-01
+
+
+ <_>
+
+ 0 -1 929 -1.5390539774671197e-03
+
+
+ 2.9713419079780579e-01 5.1895380020141602e-01
+
+
+ <_>
+
+ 0 -1 930 -7.1968790143728256e-03
+
+
+ 6.6990667581558228e-01 4.7264769673347473e-01
+
+
+ <_>
+
+ 0 -1 931 -4.1499789222143590e-04
+
+
+ 3.3849540352821350e-01 5.2603179216384888e-01
+
+
+ <_>
+
+ 0 -1 932 4.4359830208122730e-03
+
+
+ 5.3991222381591797e-01 3.9201408624649048e-01
+
+
+ <_>
+
+ 0 -1 933 2.6606200262904167e-03
+
+
+ 4.4825780391693115e-01 6.1196178197860718e-01
+
+
+ <_>
+
+ 0 -1 934 -1.5287200221791863e-03
+
+
+ 3.7112379074096680e-01 5.3402662277221680e-01
+
+
+ <_>
+
+ 0 -1 935 -4.7397250309586525e-03
+
+
+ 6.0310882329940796e-01 4.4551450014114380e-01
+
+
+ <_>
+
+ 0 -1 936 -1.4829129911959171e-02
+
+
+ 2.8387540578842163e-01 5.3418618440628052e-01
+
+
+ <_>
+
+ 0 -1 937 9.2275557108223438e-04
+
+
+ 5.2095472812652588e-01 3.3616539835929871e-01
+
+
+ <_>
+
+ 0 -1 938 8.3529807627201080e-02
+
+
+ 5.1199698448181152e-01 8.1164449453353882e-02
+
+
+ <_>
+
+ 0 -1 939 -7.5633148662745953e-04
+
+
+ 3.3171200752258301e-01 5.1898312568664551e-01
+
+
+ <_>
+
+ 0 -1 940 9.8403859883546829e-03
+
+
+ 5.2475982904434204e-01 2.3349590599536896e-01
+
+
+ <_>
+
+ 0 -1 941 -1.5953830443322659e-03
+
+
+ 5.7500940561294556e-01 4.2956221103668213e-01
+
+
+ <_>
+
+ 0 -1 942 3.4766020689858124e-05
+
+
+ 4.3424451351165771e-01 5.5640292167663574e-01
+
+
+ <_>
+
+ 0 -1 943 2.9862910509109497e-02
+
+
+ 4.5791471004486084e-01 6.5791881084442139e-01
+
+
+ <_>
+
+ 0 -1 944 1.1325590312480927e-02
+
+
+ 5.2743119001388550e-01 3.6738881468772888e-01
+
+
+ <_>
+
+ 0 -1 945 -8.7828645482659340e-03
+
+
+ 7.1003687381744385e-01 4.6421670913696289e-01
+
+
+ <_>
+
+ 0 -1 946 4.3639959767460823e-03
+
+
+ 5.2792161703109741e-01 2.7058771252632141e-01
+
+
+ <_>
+
+ 0 -1 947 4.1804728098213673e-03
+
+
+ 5.0725251436233521e-01 2.4490830302238464e-01
+
+
+ <_>
+
+ 0 -1 948 -4.5668511302210391e-04
+
+
+ 4.2831051349639893e-01 5.5486911535263062e-01
+
+
+ <_>
+
+ 0 -1 949 -3.7140368949621916e-03
+
+
+ 5.5193877220153809e-01 4.1036531329154968e-01
+
+
+ <_>
+
+ 0 -1 950 -2.5304289534687996e-02
+
+
+ 6.8670022487640381e-01 4.8698890209197998e-01
+
+
+ <_>
+
+ 0 -1 951 -3.4454080741852522e-04
+
+
+ 3.7288740277290344e-01 5.2876931428909302e-01
+
+
+ <_>
+
+ 0 -1 952 -8.3935231668874621e-04
+
+
+ 6.0601520538330078e-01 4.6160620450973511e-01
+
+
+ <_>
+
+ 0 -1 953 1.7280049622058868e-02
+
+
+ 5.0496357679367065e-01 1.8198239803314209e-01
+
+
+ <_>
+
+ 0 -1 954 -6.3595077954232693e-03
+
+
+ 1.6312399506568909e-01 5.2327787876129150e-01
+
+
+ <_>
+
+ 0 -1 955 1.0298109846189618e-03
+
+
+ 4.4632780551910400e-01 6.1765491962432861e-01
+
+
+ <_>
+
+ 0 -1 956 1.0117109632119536e-03
+
+
+ 5.4733848571777344e-01 4.3006989359855652e-01
+
+
+ <_>
+
+ 0 -1 957 -1.0308800265192986e-02
+
+
+ 1.1669850349426270e-01 5.0008672475814819e-01
+
+
+ <_>
+
+ 0 -1 958 5.4682018235325813e-03
+
+
+ 4.7692871093750000e-01 6.7192137241363525e-01
+
+
+ <_>
+
+ 0 -1 959 -9.1696460731327534e-04
+
+
+ 3.4710898995399475e-01 5.1781648397445679e-01
+
+
+ <_>
+
+ 0 -1 960 2.3922820109874010e-03
+
+
+ 4.7852361202239990e-01 6.2163108587265015e-01
+
+
+ <_>
+
+ 0 -1 961 -7.5573818758130074e-03
+
+
+ 5.8147960901260376e-01 4.4100850820541382e-01
+
+
+ <_>
+
+ 0 -1 962 -7.7024032361805439e-04
+
+
+ 3.8780000805854797e-01 5.4657220840454102e-01
+
+
+ <_>
+
+ 0 -1 963 -8.7125990539789200e-03
+
+
+ 1.6600510478019714e-01 4.9958360195159912e-01
+
+
+ <_>
+
+ 0 -1 964 -1.0306320153176785e-02
+
+
+ 4.0933910012245178e-01 5.2742338180541992e-01
+
+
+ <_>
+
+ 0 -1 965 -2.0940979011356831e-03
+
+
+ 6.2061947584152222e-01 4.5722800493240356e-01
+
+
+ <_>
+
+ 0 -1 966 6.8099051713943481e-03
+
+
+ 5.5677592754364014e-01 4.1556000709533691e-01
+
+
+ <_>
+
+ 0 -1 967 -1.0746059706434608e-03
+
+
+ 5.6389278173446655e-01 4.3530249595642090e-01
+
+
+ <_>
+
+ 0 -1 968 2.1550289820879698e-03
+
+
+ 4.8262658715248108e-01 6.7497581243515015e-01
+
+
+ <_>
+
+ 0 -1 969 3.1742319464683533e-02
+
+
+ 5.0483798980712891e-01 1.8832489848136902e-01
+
+
+ <_>
+
+ 0 -1 970 -7.8382723033428192e-02
+
+
+ 2.3695489764213562e-01 5.2601581811904907e-01
+
+
+ <_>
+
+ 0 -1 971 5.7415119372308254e-03
+
+
+ 5.0488287210464478e-01 2.7764698863029480e-01
+
+
+ <_>
+
+ 0 -1 972 -2.9014600440859795e-03
+
+
+ 6.2386047840118408e-01 4.6933171153068542e-01
+
+
+ <_>
+
+ 0 -1 973 -2.6427931152284145e-03
+
+
+ 3.3141419291496277e-01 5.1697772741317749e-01
+
+
+ <_>
+
+ 0 -1 974 -1.0949660092592239e-01
+
+
+ 2.3800450563430786e-01 5.1834410429000854e-01
+
+
+ <_>
+
+ 0 -1 975 7.4075913289561868e-05
+
+
+ 4.0696358680725098e-01 5.3621500730514526e-01
+
+
+ <_>
+
+ 0 -1 976 -5.0593802006915212e-04
+
+
+ 5.5067062377929688e-01 4.3745940923690796e-01
+
+
+ <_>
+
+ 0 -1 977 -8.2131777890026569e-04
+
+
+ 5.5257099866867065e-01 4.2093759775161743e-01
+
+
+ <_>
+
+ 0 -1 978 -6.0276539443293586e-05
+
+
+ 5.4554748535156250e-01 4.7482660412788391e-01
+
+
+ <_>
+
+ 0 -1 979 6.8065142259001732e-03
+
+
+ 5.1579958200454712e-01 3.4245771169662476e-01
+
+
+ <_>
+
+ 0 -1 980 1.7202789895236492e-03
+
+
+ 5.0132077932357788e-01 6.3312637805938721e-01
+
+
+ <_>
+
+ 0 -1 981 -1.3016929733566940e-04
+
+
+ 5.5397182703018188e-01 4.2268699407577515e-01
+
+
+ <_>
+
+ 0 -1 982 -4.8016388900578022e-03
+
+
+ 4.4250950217247009e-01 5.4307800531387329e-01
+
+
+ <_>
+
+ 0 -1 983 -2.5399310979992151e-03
+
+
+ 7.1457821130752563e-01 4.6976050734519958e-01
+
+
+ <_>
+
+ 0 -1 984 -1.4278929447755218e-03
+
+
+ 4.0704450011253357e-01 5.3996050357818604e-01
+
+
+ <_>
+
+ 0 -1 985 -2.5142550468444824e-02
+
+
+ 7.8846907615661621e-01 4.7473520040512085e-01
+
+
+ <_>
+
+ 0 -1 986 -3.8899609353393316e-03
+
+
+ 4.2961919307708740e-01 5.5771100521087646e-01
+
+
+ <_>
+
+ 0 -1 987 4.3947459198534489e-03
+
+
+ 4.6931621432304382e-01 7.0239442586898804e-01
+
+
+ <_>
+
+ 0 -1 988 2.4678420275449753e-02
+
+
+ 5.2423220872879028e-01 3.8125100731849670e-01
+
+
+ <_>
+
+ 0 -1 989 3.8047678768634796e-02
+
+
+ 5.0117397308349609e-01 1.6878280043601990e-01
+
+
+ <_>
+
+ 0 -1 990 7.9424865543842316e-03
+
+
+ 4.8285821080207825e-01 6.3695681095123291e-01
+
+
+ <_>
+
+ 0 -1 991 -1.5110049862414598e-03
+
+
+ 5.9064859151840210e-01 4.4876679778099060e-01
+
+
+ <_>
+
+ 0 -1 992 6.4201741479337215e-03
+
+
+ 5.2410978078842163e-01 2.9905700683593750e-01
+
+
+ <_>
+
+ 0 -1 993 -2.9802159406244755e-03
+
+
+ 3.0414658784866333e-01 5.0784897804260254e-01
+
+
+ <_>
+
+ 0 -1 994 -7.4580078944563866e-04
+
+
+ 4.1281390190124512e-01 5.2568262815475464e-01
+
+
+ <_>
+
+ 0 -1 995 -1.0470950044691563e-02
+
+
+ 5.8083951473236084e-01 4.4942960143089294e-01
+
+
+ <_>
+
+ 0 -1 996 9.3369204550981522e-03
+
+
+ 5.2465528249740601e-01 2.6589488983154297e-01
+
+
+ <_>
+
+ 0 -1 997 2.7936900034546852e-02
+
+
+ 4.6749550104141235e-01 7.0872569084167480e-01
+
+
+ <_>
+
+ 0 -1 998 7.4277678504586220e-03
+
+
+ 5.4094868898391724e-01 3.7585180997848511e-01
+
+
+ <_>
+
+ 0 -1 999 -2.3584509268403053e-02
+
+
+ 3.7586399912834167e-01 5.2385509014129639e-01
+
+
+ <_>
+
+ 0 -1 1000 1.1452640173956752e-03
+
+
+ 4.3295788764953613e-01 5.8042472600936890e-01
+
+
+ <_>
+
+ 0 -1 1001 -4.3468660442158580e-04
+
+
+ 5.2806180715560913e-01 3.8730698823928833e-01
+
+
+ <_>
+
+ 0 -1 1002 1.0648540221154690e-02
+
+
+ 4.9021130800247192e-01 5.6812518835067749e-01
+
+
+ <_>
+
+ 0 -1 1003 -3.9418050437234342e-04
+
+
+ 5.5708801746368408e-01 4.3182510137557983e-01
+
+
+ <_>
+
+ 0 -1 1004 -1.3270479394122958e-04
+
+
+ 5.6584399938583374e-01 4.3435549736022949e-01
+
+
+ <_>
+
+ 0 -1 1005 -2.0125510636717081e-03
+
+
+ 6.0567390918731689e-01 4.5375239849090576e-01
+
+
+ <_>
+
+ 0 -1 1006 2.4854319635778666e-03
+
+
+ 5.3904771804809570e-01 4.1380101442337036e-01
+
+
+ <_>
+
+ 0 -1 1007 1.8237880431115627e-03
+
+
+ 4.3548288941383362e-01 5.7171887159347534e-01
+
+
+ <_>
+
+ 0 -1 1008 -1.6656659543514252e-02
+
+
+ 3.0109131336212158e-01 5.2161228656768799e-01
+
+
+ <_>
+
+ 0 -1 1009 8.0349558265879750e-04
+
+
+ 5.3001511096954346e-01 3.8183969259262085e-01
+
+
+ <_>
+
+ 0 -1 1010 3.4170378930866718e-03
+
+
+ 5.3280287981033325e-01 4.2414000630378723e-01
+
+
+ <_>
+
+ 0 -1 1011 -3.6222729249857366e-04
+
+
+ 5.4917281866073608e-01 4.1869771480560303e-01
+
+
+ <_>
+
+ 0 -1 1012 -1.1630020290613174e-01
+
+
+ 1.4407220482826233e-01 5.2264511585235596e-01
+
+
+ <_>
+
+ 0 -1 1013 -1.4695010147988796e-02
+
+
+ 7.7477252483367920e-01 4.7157171368598938e-01
+
+
+ <_>
+
+ 0 -1 1014 2.1972130052745342e-03
+
+
+ 5.3554338216781616e-01 3.3156448602676392e-01
+
+
+ <_>
+
+ 0 -1 1015 -4.6965209185145795e-04
+
+
+ 5.7672351598739624e-01 4.4581368565559387e-01
+
+
+ <_>
+
+ 0 -1 1016 6.5144998952746391e-03
+
+
+ 5.2156740427017212e-01 3.6478888988494873e-01
+
+
+ <_>
+
+ 0 -1 1017 2.1300060674548149e-02
+
+
+ 4.9942049384117126e-01 1.5679509937763214e-01
+
+
+ <_>
+
+ 0 -1 1018 3.1881409231573343e-03
+
+
+ 4.7422000765800476e-01 6.2872701883316040e-01
+
+
+ <_>
+
+ 0 -1 1019 9.0019777417182922e-04
+
+
+ 5.3479540348052979e-01 3.9437520503997803e-01
+
+
+ <_>
+
+ 0 -1 1020 -5.1772277802228928e-03
+
+
+ 6.7271918058395386e-01 5.0131380558013916e-01
+
+
+ <_>
+
+ 0 -1 1021 -4.3764649890363216e-03
+
+
+ 3.1066751480102539e-01 5.1287931203842163e-01
+
+
+ <_>
+
+ 0 -1 1022 2.6299960445612669e-03
+
+
+ 4.8863101005554199e-01 5.7552158832550049e-01
+
+
+ <_>
+
+ 0 -1 1023 -2.0458688959479332e-03
+
+
+ 6.0257941484451294e-01 4.5580768585205078e-01
+
+
+ <_>
+
+ 0 -1 1024 6.9482706487178802e-02
+
+
+ 5.2407479286193848e-01 2.1852590143680573e-01
+
+
+ <_>
+
+ 0 -1 1025 2.4048939347267151e-02
+
+
+ 5.0118672847747803e-01 2.0906220376491547e-01
+
+
+ <_>
+
+ 0 -1 1026 3.1095340382307768e-03
+
+
+ 4.8667120933532715e-01 7.1085482835769653e-01
+
+
+ <_>
+
+ 0 -1 1027 -1.2503260513767600e-03
+
+
+ 3.4078910946846008e-01 5.1561951637268066e-01
+
+
+ <_>
+
+ 0 -1 1028 -1.0281190043315291e-03
+
+
+ 5.5755722522735596e-01 4.4394320249557495e-01
+
+
+ <_>
+
+ 0 -1 1029 -8.8893622159957886e-03
+
+
+ 6.4020007848739624e-01 4.6204420924186707e-01
+
+
+ <_>
+
+ 0 -1 1030 -6.1094801640138030e-04
+
+
+ 3.7664419412612915e-01 5.4488998651504517e-01
+
+
+ <_>
+
+ 0 -1 1031 -5.7686357758939266e-03
+
+
+ 3.3186489343643188e-01 5.1336771249771118e-01
+
+
+ <_>
+
+ 0 -1 1032 1.8506490159779787e-03
+
+
+ 4.9035701155662537e-01 6.4069348573684692e-01
+
+
+ <_>
+
+ 0 -1 1033 -9.9799469113349915e-02
+
+
+ 1.5360510349273682e-01 5.0155621767044067e-01
+
+
+ <_>
+
+ 0 -1 1034 -3.5128349065780640e-01
+
+
+ 5.8823131024837494e-02 5.1743787527084351e-01
+
+
+ <_>
+
+ 0 -1 1035 -4.5244570821523666e-02
+
+
+ 6.9614887237548828e-01 4.6778729557991028e-01
+
+
+ <_>
+
+ 0 -1 1036 7.1481578052043915e-02
+
+
+ 5.1679861545562744e-01 1.0380929708480835e-01
+
+
+ <_>
+
+ 0 -1 1037 2.1895780228078365e-03
+
+
+ 4.2730781435966492e-01 5.5320608615875244e-01
+
+
+ <_>
+
+ 0 -1 1038 -5.9242651332169771e-04
+
+
+ 4.6389439702033997e-01 5.2763891220092773e-01
+
+
+ <_>
+
+ 0 -1 1039 1.6788389766588807e-03
+
+
+ 5.3016489744186401e-01 3.9320349693298340e-01
+
+
+ <_>
+
+ 0 -1 1040 -2.2163488902151585e-03
+
+
+ 5.6306940317153931e-01 4.7570338845252991e-01
+
+
+ <_>
+
+ 0 -1 1041 1.1568699846975505e-04
+
+
+ 4.3075358867645264e-01 5.5357027053833008e-01
+
+
+ <_>
+
+ 0 -1 1042 -7.2017288766801357e-03
+
+
+ 1.4448820054531097e-01 5.1930642127990723e-01
+
+
+ <_>
+
+ 0 -1 1043 8.9081272017210722e-04
+
+
+ 4.3844321370124817e-01 5.5936211347579956e-01
+
+
+ <_>
+
+ 0 -1 1044 1.9605009583756328e-04
+
+
+ 5.3404158353805542e-01 4.7059568762779236e-01
+
+
+ <_>
+
+ 0 -1 1045 5.2022142335772514e-04
+
+
+ 5.2138561010360718e-01 3.8100790977478027e-01
+
+
+ <_>
+
+ 0 -1 1046 9.4588572392240167e-04
+
+
+ 4.7694149613380432e-01 6.1307388544082642e-01
+
+
+ <_>
+
+ 0 -1 1047 9.1698471806012094e-05
+
+
+ 4.2450091242790222e-01 5.4293632507324219e-01
+
+
+ <_>
+
+ 0 -1 1048 2.1833200007677078e-03
+
+
+ 5.4577308893203735e-01 4.1910758614540100e-01
+
+
+ <_>
+
+ 0 -1 1049 -8.6039671441540122e-04
+
+
+ 5.7645887136459351e-01 4.4716599583625793e-01
+
+
+ <_>
+
+ 0 -1 1050 -1.3236239552497864e-02
+
+
+ 6.3728231191635132e-01 4.6950098872184753e-01
+
+
+ <_>
+
+ 0 -1 1051 4.3376701069064438e-04
+
+
+ 5.3178739547729492e-01 3.9458298683166504e-01
+
+
+
+
+ <_>
+ 140
+ 6.9229873657226562e+01
+
+ <_>
+
+ 0 -1 1052 -2.4847149848937988e-02
+
+
+ 6.5555167198181152e-01 3.8733118772506714e-01
+
+
+ <_>
+
+ 0 -1 1053 6.1348611488938332e-03
+
+
+ 3.7480720877647400e-01 5.9739977121353149e-01
+
+
+ <_>
+
+ 0 -1 1054 6.4498498104512691e-03
+
+
+ 5.4254919290542603e-01 2.5488111376762390e-01
+
+
+ <_>
+
+ 0 -1 1055 6.3491211039945483e-04
+
+
+ 2.4624420702457428e-01 5.3872537612915039e-01
+
+
+ <_>
+
+ 0 -1 1056 1.4023890253156424e-03
+
+
+ 5.5943220853805542e-01 3.5286578536033630e-01
+
+
+ <_>
+
+ 0 -1 1057 3.0044000595808029e-04
+
+
+ 3.9585039019584656e-01 5.7659381628036499e-01
+
+
+ <_>
+
+ 0 -1 1058 1.0042409849120304e-04
+
+
+ 3.6989969015121460e-01 5.5349981784820557e-01
+
+
+ <_>
+
+ 0 -1 1059 -5.0841490738093853e-03
+
+
+ 3.7110909819602966e-01 5.5478000640869141e-01
+
+
+ <_>
+
+ 0 -1 1060 -1.9537260755896568e-02
+
+
+ 7.4927550554275513e-01 4.5792970061302185e-01
+
+
+ <_>
+
+ 0 -1 1061 -7.4532740654831287e-06
+
+
+ 5.6497871875762939e-01 3.9040699601173401e-01
+
+
+ <_>
+
+ 0 -1 1062 -3.6079459823668003e-03
+
+
+ 3.3810880780220032e-01 5.2678012847900391e-01
+
+
+ <_>
+
+ 0 -1 1063 2.0697501022368670e-03
+
+
+ 5.5192911624908447e-01 3.7143889069557190e-01
+
+
+ <_>
+
+ 0 -1 1064 -4.6463840408250690e-04
+
+
+ 5.6082147359848022e-01 4.1135668754577637e-01
+
+
+ <_>
+
+ 0 -1 1065 7.5490452582016587e-04
+
+
+ 3.5592061281204224e-01 5.3293561935424805e-01
+
+
+ <_>
+
+ 0 -1 1066 -9.8322238773107529e-04
+
+
+ 5.4147958755493164e-01 3.7632051110267639e-01
+
+
+ <_>
+
+ 0 -1 1067 -1.9940640777349472e-02
+
+
+ 6.3479030132293701e-01 4.7052991390228271e-01
+
+
+ <_>
+
+ 0 -1 1068 3.7680300883948803e-03
+
+
+ 3.9134898781776428e-01 5.5637162923812866e-01
+
+
+ <_>
+
+ 0 -1 1069 -9.4528505578637123e-03
+
+
+ 2.5548928976058960e-01 5.2151167392730713e-01
+
+
+ <_>
+
+ 0 -1 1070 2.9560849070549011e-03
+
+
+ 5.1746791601181030e-01 3.0639201402664185e-01
+
+
+ <_>
+
+ 0 -1 1071 9.1078737750649452e-03
+
+
+ 5.3884482383728027e-01 2.8859630227088928e-01
+
+
+ <_>
+
+ 0 -1 1072 1.8219229532405734e-03
+
+
+ 4.3360430002212524e-01 5.8521968126296997e-01
+
+
+ <_>
+
+ 0 -1 1073 1.4688739553093910e-02
+
+
+ 5.2873617410659790e-01 2.8700059652328491e-01
+
+
+ <_>
+
+ 0 -1 1074 -1.4387990348041058e-02
+
+
+ 7.0194488763809204e-01 4.6473708748817444e-01
+
+
+ <_>
+
+ 0 -1 1075 -1.8986649811267853e-02
+
+
+ 2.9865521192550659e-01 5.2470117807388306e-01
+
+
+ <_>
+
+ 0 -1 1076 1.1527639580890536e-03
+
+
+ 4.3234738707542419e-01 5.9316617250442505e-01
+
+
+ <_>
+
+ 0 -1 1077 1.0933670215308666e-02
+
+
+ 5.2868640422821045e-01 3.1303191184997559e-01
+
+
+ <_>
+
+ 0 -1 1078 -1.4932730235159397e-02
+
+
+ 2.6584190130233765e-01 5.0840771198272705e-01
+
+
+ <_>
+
+ 0 -1 1079 -2.9970539617352188e-04
+
+
+ 5.4635268449783325e-01 3.7407240271568298e-01
+
+
+ <_>
+
+ 0 -1 1080 4.1677621193230152e-03
+
+
+ 4.7034969925880432e-01 7.4357217550277710e-01
+
+
+ <_>
+
+ 0 -1 1081 -6.3905320130288601e-03
+
+
+ 2.0692589879035950e-01 5.2805382013320923e-01
+
+
+ <_>
+
+ 0 -1 1082 4.5029609464108944e-03
+
+
+ 5.1826488971710205e-01 3.4835430979728699e-01
+
+
+ <_>
+
+ 0 -1 1083 -9.2040365561842918e-03
+
+
+ 6.8037772178649902e-01 4.9323600530624390e-01
+
+
+ <_>
+
+ 0 -1 1084 8.1327259540557861e-02
+
+
+ 5.0583988428115845e-01 2.2530519962310791e-01
+
+
+ <_>
+
+ 0 -1 1085 -1.5079280734062195e-01
+
+
+ 2.9634249210357666e-01 5.2646797895431519e-01
+
+
+ <_>
+
+ 0 -1 1086 3.3179009333252907e-03
+
+
+ 4.6554958820343018e-01 7.0729321241378784e-01
+
+
+ <_>
+
+ 0 -1 1087 7.7402801252901554e-04
+
+
+ 4.7803479433059692e-01 5.6682378053665161e-01
+
+
+ <_>
+
+ 0 -1 1088 6.8199541419744492e-04
+
+
+ 4.2869961261749268e-01 5.7221567630767822e-01
+
+
+ <_>
+
+ 0 -1 1089 5.3671570494771004e-03
+
+
+ 5.2993071079254150e-01 3.1146219372749329e-01
+
+
+ <_>
+
+ 0 -1 1090 9.7018666565418243e-05
+
+
+ 3.6746388673782349e-01 5.2694618701934814e-01
+
+
+ <_>
+
+ 0 -1 1091 -1.2534089386463165e-01
+
+
+ 2.3514920473098755e-01 5.2457910776138306e-01
+
+
+ <_>
+
+ 0 -1 1092 -5.2516269497573376e-03
+
+
+ 7.1159368753433228e-01 4.6937671303749084e-01
+
+
+ <_>
+
+ 0 -1 1093 -7.8342109918594360e-03
+
+
+ 4.4626510143280029e-01 5.4090857505798340e-01
+
+
+ <_>
+
+ 0 -1 1094 -1.1310069821774960e-03
+
+
+ 5.9456187486648560e-01 4.4176620244979858e-01
+
+
+ <_>
+
+ 0 -1 1095 1.7601120052859187e-03
+
+
+ 5.3532499074935913e-01 3.9734530448913574e-01
+
+
+ <_>
+
+ 0 -1 1096 -8.1581249833106995e-04
+
+
+ 3.7602680921554565e-01 5.2647268772125244e-01
+
+
+ <_>
+
+ 0 -1 1097 -3.8687589112669230e-03
+
+
+ 6.3099128007888794e-01 4.7498199343681335e-01
+
+
+ <_>
+
+ 0 -1 1098 1.5207129763439298e-03
+
+
+ 5.2301818132400513e-01 3.3612239360809326e-01
+
+
+ <_>
+
+ 0 -1 1099 5.4586738348007202e-01
+
+
+ 5.1671397686004639e-01 1.1726350337266922e-01
+
+
+ <_>
+
+ 0 -1 1100 1.5650190412998199e-02
+
+
+ 4.9794390797615051e-01 1.3932949304580688e-01
+
+
+ <_>
+
+ 0 -1 1101 -1.1731860227882862e-02
+
+
+ 7.1296507120132446e-01 4.9211961030960083e-01
+
+
+ <_>
+
+ 0 -1 1102 -6.1765122227370739e-03
+
+
+ 2.2881029546260834e-01 5.0497019290924072e-01
+
+
+ <_>
+
+ 0 -1 1103 2.2457661107182503e-03
+
+
+ 4.6324339509010315e-01 6.0487258434295654e-01
+
+
+ <_>
+
+ 0 -1 1104 -5.1915869116783142e-03
+
+
+ 6.4674210548400879e-01 4.6021929383277893e-01
+
+
+ <_>
+
+ 0 -1 1105 -2.3827880620956421e-02
+
+
+ 1.4820009469985962e-01 5.2260792255401611e-01
+
+
+ <_>
+
+ 0 -1 1106 1.0284580057486892e-03
+
+
+ 5.1354891061782837e-01 3.3759570121765137e-01
+
+
+ <_>
+
+ 0 -1 1107 -1.0078850202262402e-02
+
+
+ 2.7405610680580139e-01 5.3035670518875122e-01
+
+
+ <_>
+
+ 0 -1 1108 2.6168930344283581e-03
+
+
+ 5.3326708078384399e-01 3.9724540710449219e-01
+
+
+ <_>
+
+ 0 -1 1109 5.4385367548093200e-04
+
+
+ 5.3656041622161865e-01 4.0634119510650635e-01
+
+
+ <_>
+
+ 0 -1 1110 5.3510512225329876e-03
+
+
+ 4.6537590026855469e-01 6.8890458345413208e-01
+
+
+ <_>
+
+ 0 -1 1111 -1.5274790348485112e-03
+
+
+ 5.4495012760162354e-01 3.6247238516807556e-01
+
+
+ <_>
+
+ 0 -1 1112 -8.0624416470527649e-02
+
+
+ 1.6560870409011841e-01 5.0002872943878174e-01
+
+
+ <_>
+
+ 0 -1 1113 2.2192029282450676e-02
+
+
+ 5.1327311992645264e-01 2.0028080046176910e-01
+
+
+ <_>
+
+ 0 -1 1114 7.3100631125271320e-03
+
+
+ 4.6179479360580444e-01 6.3665360212326050e-01
+
+
+ <_>
+
+ 0 -1 1115 -6.4063072204589844e-03
+
+
+ 5.9162509441375732e-01 4.8678609728813171e-01
+
+
+ <_>
+
+ 0 -1 1116 -7.6415040530264378e-04
+
+
+ 3.8884091377258301e-01 5.3157979249954224e-01
+
+
+ <_>
+
+ 0 -1 1117 7.6734489994123578e-04
+
+
+ 4.1590648889541626e-01 5.6052798032760620e-01
+
+
+ <_>
+
+ 0 -1 1118 6.1474501853808761e-04
+
+
+ 3.0890220403671265e-01 5.1201480627059937e-01
+
+
+ <_>
+
+ 0 -1 1119 -5.0105270929634571e-03
+
+
+ 3.9721998572349548e-01 5.2073061466217041e-01
+
+
+ <_>
+
+ 0 -1 1120 -8.6909132078289986e-03
+
+
+ 6.2574082612991333e-01 4.6085759997367859e-01
+
+
+ <_>
+
+ 0 -1 1121 -1.6391459852457047e-02
+
+
+ 2.0852099359035492e-01 5.2422660589218140e-01
+
+
+ <_>
+
+ 0 -1 1122 4.0973909199237823e-04
+
+
+ 5.2224272489547729e-01 3.7803208827972412e-01
+
+
+ <_>
+
+ 0 -1 1123 -2.5242289993911982e-03
+
+
+ 5.8039271831512451e-01 4.6118900179862976e-01
+
+
+ <_>
+
+ 0 -1 1124 5.0945312250405550e-04
+
+
+ 4.4012719392776489e-01 5.8460158109664917e-01
+
+
+ <_>
+
+ 0 -1 1125 1.9656419754028320e-03
+
+
+ 5.3223252296447754e-01 4.1845908761024475e-01
+
+
+ <_>
+
+ 0 -1 1126 5.6298897834494710e-04
+
+
+ 3.7418448925018311e-01 5.2345657348632812e-01
+
+
+ <_>
+
+ 0 -1 1127 -6.7946797935292125e-04
+
+
+ 4.6310418844223022e-01 5.3564780950546265e-01
+
+
+ <_>
+
+ 0 -1 1128 7.2856349870562553e-03
+
+
+ 5.0446701049804688e-01 2.3775640130043030e-01
+
+
+ <_>
+
+ 0 -1 1129 -1.7459489405155182e-02
+
+
+ 7.2891211509704590e-01 5.0504350662231445e-01
+
+
+ <_>
+
+ 0 -1 1130 -2.5421749800443649e-02
+
+
+ 6.6671347618103027e-01 4.6781000494956970e-01
+
+
+ <_>
+
+ 0 -1 1131 -1.5647639520466328e-03
+
+
+ 4.3917590379714966e-01 5.3236269950866699e-01
+
+
+ <_>
+
+ 0 -1 1132 1.1444360017776489e-02
+
+
+ 4.3464401364326477e-01 5.6800121068954468e-01
+
+
+ <_>
+
+ 0 -1 1133 -6.7352550104260445e-04
+
+
+ 4.4771409034729004e-01 5.2968120574951172e-01
+
+
+ <_>
+
+ 0 -1 1134 9.3194209039211273e-03
+
+
+ 4.7402000427246094e-01 7.4626070261001587e-01
+
+
+ <_>
+
+ 0 -1 1135 1.3328490604180843e-04
+
+
+ 5.3650617599487305e-01 4.7521349787712097e-01
+
+
+ <_>
+
+ 0 -1 1136 -7.8815799206495285e-03
+
+
+ 1.7522190511226654e-01 5.0152552127838135e-01
+
+
+ <_>
+
+ 0 -1 1137 -5.7985680177807808e-03
+
+
+ 7.2712367773056030e-01 4.8962008953094482e-01
+
+
+ <_>
+
+ 0 -1 1138 -3.8922499516047537e-04
+
+
+ 4.0039089322090149e-01 5.3449410200119019e-01
+
+
+ <_>
+
+ 0 -1 1139 -1.9288610201328993e-03
+
+
+ 5.6056129932403564e-01 4.8039558529853821e-01
+
+
+ <_>
+
+ 0 -1 1140 8.4214154630899429e-03
+
+
+ 4.7532469034194946e-01 7.6236087083816528e-01
+
+
+ <_>
+
+ 0 -1 1141 8.1655876711010933e-03
+
+
+ 5.3932619094848633e-01 4.1916438937187195e-01
+
+
+ <_>
+
+ 0 -1 1142 4.8280550981871784e-04
+
+
+ 4.2408001422882080e-01 5.3998219966888428e-01
+
+
+ <_>
+
+ 0 -1 1143 -2.7186630759388208e-03
+
+
+ 4.2445999383926392e-01 5.4249238967895508e-01
+
+
+ <_>
+
+ 0 -1 1144 -1.2507230043411255e-02
+
+
+ 5.8958417177200317e-01 4.5504111051559448e-01
+
+
+ <_>
+
+ 0 -1 1145 -2.4286519736051559e-02
+
+
+ 2.6471349596977234e-01 5.1891797780990601e-01
+
+
+ <_>
+
+ 0 -1 1146 -2.9676330741494894e-03
+
+
+ 7.3476827144622803e-01 4.7497498989105225e-01
+
+
+ <_>
+
+ 0 -1 1147 -1.2528999708592892e-02
+
+
+ 2.7560499310493469e-01 5.1775997877120972e-01
+
+
+ <_>
+
+ 0 -1 1148 -1.0104000102728605e-03
+
+
+ 3.5105609893798828e-01 5.1447242498397827e-01
+
+
+ <_>
+
+ 0 -1 1149 -2.1348530426621437e-03
+
+
+ 5.6379258632659912e-01 4.6673199534416199e-01
+
+
+ <_>
+
+ 0 -1 1150 1.9564259797334671e-02
+
+
+ 4.6145731210708618e-01 6.1376398801803589e-01
+
+
+ <_>
+
+ 0 -1 1151 -9.7146347165107727e-02
+
+
+ 2.9983788728713989e-01 5.1935559511184692e-01
+
+
+ <_>
+
+ 0 -1 1152 4.5014568604528904e-03
+
+
+ 5.0778847932815552e-01 3.0457559227943420e-01
+
+
+ <_>
+
+ 0 -1 1153 6.3706971704959869e-03
+
+
+ 4.8610189557075500e-01 6.8875008821487427e-01
+
+
+ <_>
+
+ 0 -1 1154 -9.0721528977155685e-03
+
+
+ 1.6733959317207336e-01 5.0175631046295166e-01
+
+
+ <_>
+
+ 0 -1 1155 -5.3537208586931229e-03
+
+
+ 2.6927569508552551e-01 5.2426332235336304e-01
+
+
+ <_>
+
+ 0 -1 1156 -1.0932840406894684e-02
+
+
+ 7.1838641166687012e-01 4.7360289096832275e-01
+
+
+ <_>
+
+ 0 -1 1157 8.2356072962284088e-03
+
+
+ 5.2239668369293213e-01 2.3898629844188690e-01
+
+
+ <_>
+
+ 0 -1 1158 -1.0038160253316164e-03
+
+
+ 5.7193559408187866e-01 4.4339430332183838e-01
+
+
+ <_>
+
+ 0 -1 1159 4.0859128348529339e-03
+
+
+ 5.4728418588638306e-01 4.1488361358642578e-01
+
+
+ <_>
+
+ 0 -1 1160 1.5485419332981110e-01
+
+
+ 4.9738121032714844e-01 6.1061598360538483e-02
+
+
+ <_>
+
+ 0 -1 1161 2.0897459762636572e-04
+
+
+ 4.7091740369796753e-01 5.4238891601562500e-01
+
+
+ <_>
+
+ 0 -1 1162 3.3316991175524890e-04
+
+
+ 4.0896269679069519e-01 5.3009921312332153e-01
+
+
+ <_>
+
+ 0 -1 1163 -1.0813400149345398e-02
+
+
+ 6.1043697595596313e-01 4.9573341012001038e-01
+
+
+ <_>
+
+ 0 -1 1164 4.5656010508537292e-02
+
+
+ 5.0696891546249390e-01 2.8666600584983826e-01
+
+
+ <_>
+
+ 0 -1 1165 1.2569549726322293e-03
+
+
+ 4.8469170928001404e-01 6.3181710243225098e-01
+
+
+ <_>
+
+ 0 -1 1166 -1.2015070021152496e-01
+
+
+ 6.0526140034198761e-02 4.9809598922729492e-01
+
+
+ <_>
+
+ 0 -1 1167 -1.0533799650147557e-04
+
+
+ 5.3631097078323364e-01 4.7080421447753906e-01
+
+
+ <_>
+
+ 0 -1 1168 -2.0703190565109253e-01
+
+
+ 5.9660330414772034e-02 4.9790981411933899e-01
+
+
+ <_>
+
+ 0 -1 1169 1.2909180077258497e-04
+
+
+ 4.7129771113395691e-01 5.3779977560043335e-01
+
+
+ <_>
+
+ 0 -1 1170 3.8818528992123902e-04
+
+
+ 4.3635380268096924e-01 5.5341911315917969e-01
+
+
+ <_>
+
+ 0 -1 1171 -2.9243610333651304e-03
+
+
+ 5.8111858367919922e-01 4.8252159357070923e-01
+
+
+ <_>
+
+ 0 -1 1172 8.3882332546636462e-04
+
+
+ 5.3117001056671143e-01 4.0381389856338501e-01
+
+
+ <_>
+
+ 0 -1 1173 -1.9061550265178084e-03
+
+
+ 3.7707018852233887e-01 5.2600151300430298e-01
+
+
+ <_>
+
+ 0 -1 1174 8.9514348655939102e-03
+
+
+ 4.7661679983139038e-01 7.6821839809417725e-01
+
+
+ <_>
+
+ 0 -1 1175 1.3083459809422493e-02
+
+
+ 5.2644628286361694e-01 3.0622220039367676e-01
+
+
+ <_>
+
+ 0 -1 1176 -2.1159330010414124e-01
+
+
+ 6.7371982336044312e-01 4.6958100795745850e-01
+
+
+ <_>
+
+ 0 -1 1177 3.1493250280618668e-03
+
+
+ 5.6448352336883545e-01 4.3869531154632568e-01
+
+
+ <_>
+
+ 0 -1 1178 3.9754100725986063e-04
+
+
+ 4.5260611176490784e-01 5.8956301212310791e-01
+
+
+ <_>
+
+ 0 -1 1179 -1.3814480043947697e-03
+
+
+ 6.0705822706222534e-01 4.9424138665199280e-01
+
+
+ <_>
+
+ 0 -1 1180 -5.8122188784182072e-04
+
+
+ 5.9982132911682129e-01 4.5082521438598633e-01
+
+
+ <_>
+
+ 0 -1 1181 -2.3905329871922731e-03
+
+
+ 4.2055889964103699e-01 5.2238482236862183e-01
+
+
+ <_>
+
+ 0 -1 1182 2.7268929407000542e-02
+
+
+ 5.2064472436904907e-01 3.5633018612861633e-01
+
+
+ <_>
+
+ 0 -1 1183 -3.7658358924090862e-03
+
+
+ 3.1447041034698486e-01 5.2188140153884888e-01
+
+
+ <_>
+
+ 0 -1 1184 -1.4903489500284195e-03
+
+
+ 3.3801960945129395e-01 5.1244372129440308e-01
+
+
+ <_>
+
+ 0 -1 1185 -1.7428230494260788e-02
+
+
+ 5.8299607038497925e-01 4.9197259545326233e-01
+
+
+ <_>
+
+ 0 -1 1186 -1.5278030186891556e-02
+
+
+ 6.1631447076797485e-01 4.6178871393203735e-01
+
+
+ <_>
+
+ 0 -1 1187 3.1995609402656555e-02
+
+
+ 5.1663571596145630e-01 1.7127640545368195e-01
+
+
+ <_>
+
+ 0 -1 1188 -3.8256710395216942e-03
+
+
+ 3.4080120921134949e-01 5.1313877105712891e-01
+
+
+ <_>
+
+ 0 -1 1189 -8.5186436772346497e-03
+
+
+ 6.1055189371109009e-01 4.9979418516159058e-01
+
+
+ <_>
+
+ 0 -1 1190 9.0641621500253677e-04
+
+
+ 4.3272709846496582e-01 5.5823111534118652e-01
+
+
+ <_>
+
+ 0 -1 1191 1.0344849899411201e-02
+
+
+ 4.8556530475616455e-01 5.4524201154708862e-01
+
+
+
+
+ <_>
+ 160
+ 7.9249076843261719e+01
+
+ <_>
+
+ 0 -1 1192 7.8981826081871986e-03
+
+
+ 3.3325248956680298e-01 5.9464621543884277e-01
+
+
+ <_>
+
+ 0 -1 1193 1.6170160379260778e-03
+
+
+ 3.4906411170959473e-01 5.5778688192367554e-01
+
+
+ <_>
+
+ 0 -1 1194 -5.5449741194024682e-04
+
+
+ 5.5425661802291870e-01 3.2915300130844116e-01
+
+
+ <_>
+
+ 0 -1 1195 1.5428980113938451e-03
+
+
+ 3.6125791072845459e-01 5.5459791421890259e-01
+
+
+ <_>
+
+ 0 -1 1196 -1.0329450014978647e-03
+
+
+ 3.5301390290260315e-01 5.5761402845382690e-01
+
+
+ <_>
+
+ 0 -1 1197 7.7698158565908670e-04
+
+
+ 3.9167788624763489e-01 5.6453210115432739e-01
+
+
+ <_>
+
+ 0 -1 1198 1.4320300519466400e-01
+
+
+ 4.6674820780754089e-01 7.0236331224441528e-01
+
+
+ <_>
+
+ 0 -1 1199 -7.3866490274667740e-03
+
+
+ 3.0736848711967468e-01 5.2892577648162842e-01
+
+
+ <_>
+
+ 0 -1 1200 -6.2936742324382067e-04
+
+
+ 5.6221181154251099e-01 4.0370491147041321e-01
+
+
+ <_>
+
+ 0 -1 1201 7.8893528552725911e-04
+
+
+ 5.2676612138748169e-01 3.5578748583793640e-01
+
+
+ <_>
+
+ 0 -1 1202 -1.2228050269186497e-02
+
+
+ 6.6683208942413330e-01 4.6255499124526978e-01
+
+
+ <_>
+
+ 0 -1 1203 3.5420239437371492e-03
+
+
+ 5.5214381217956543e-01 3.8696730136871338e-01
+
+
+ <_>
+
+ 0 -1 1204 -1.0585320414975286e-03
+
+
+ 3.6286780238151550e-01 5.3209269046783447e-01
+
+
+ <_>
+
+ 0 -1 1205 1.4935660146875307e-05
+
+
+ 4.6324449777603149e-01 5.3633230924606323e-01
+
+
+ <_>
+
+ 0 -1 1206 5.2537708543241024e-03
+
+
+ 5.1322317123413086e-01 3.2657089829444885e-01
+
+
+ <_>
+
+ 0 -1 1207 -8.2338023930788040e-03
+
+
+ 6.6936898231506348e-01 4.7741401195526123e-01
+
+
+ <_>
+
+ 0 -1 1208 2.1866810129722580e-05
+
+
+ 4.0538620948791504e-01 5.4579311609268188e-01
+
+
+ <_>
+
+ 0 -1 1209 -3.8150229956954718e-03
+
+
+ 6.4549958705902100e-01 4.7931781411170959e-01
+
+
+ <_>
+
+ 0 -1 1210 1.1105879675596952e-03
+
+
+ 5.2704071998596191e-01 3.5296788811683655e-01
+
+
+ <_>
+
+ 0 -1 1211 -5.7707689702510834e-03
+
+
+ 3.8035470247268677e-01 5.3529578447341919e-01
+
+
+ <_>
+
+ 0 -1 1212 -3.0158339068293571e-03
+
+
+ 5.3394031524658203e-01 3.8871330022811890e-01
+
+
+ <_>
+
+ 0 -1 1213 -8.5453689098358154e-04
+
+
+ 3.5646161437034607e-01 5.2736037969589233e-01
+
+
+ <_>
+
+ 0 -1 1214 1.1050510220229626e-02
+
+
+ 4.6719071269035339e-01 6.8497377634048462e-01
+
+
+ <_>
+
+ 0 -1 1215 4.2605839669704437e-02
+
+
+ 5.1514732837677002e-01 7.0220090448856354e-02
+
+
+ <_>
+
+ 0 -1 1216 -3.0781750101596117e-03
+
+
+ 3.0416610836982727e-01 5.1526021957397461e-01
+
+
+ <_>
+
+ 0 -1 1217 -5.4815728217363358e-03
+
+
+ 6.4302957057952881e-01 4.8972299695014954e-01
+
+
+ <_>
+
+ 0 -1 1218 3.1881860923022032e-03
+
+
+ 5.3074932098388672e-01 3.8262099027633667e-01
+
+
+ <_>
+
+ 0 -1 1219 3.5947180003859103e-04
+
+
+ 4.6500471234321594e-01 5.4219049215316772e-01
+
+
+ <_>
+
+ 0 -1 1220 -4.0705031715333462e-03
+
+
+ 2.8496798872947693e-01 5.0791162252426147e-01
+
+
+ <_>
+
+ 0 -1 1221 -1.4594170264899731e-02
+
+
+ 2.9716458916664124e-01 5.1284617185592651e-01
+
+
+ <_>
+
+ 0 -1 1222 -1.1947689927183092e-04
+
+
+ 5.6310981512069702e-01 4.3430820107460022e-01
+
+
+ <_>
+
+ 0 -1 1223 -6.9344649091362953e-04
+
+
+ 4.4035780429840088e-01 5.3599590063095093e-01
+
+
+ <_>
+
+ 0 -1 1224 1.4834799912932795e-05
+
+
+ 3.4210088849067688e-01 5.1646977663040161e-01
+
+
+ <_>
+
+ 0 -1 1225 9.0296985581517220e-03
+
+
+ 4.6393430233001709e-01 6.1140751838684082e-01
+
+
+ <_>
+
+ 0 -1 1226 -8.0640818923711777e-03
+
+
+ 2.8201588988304138e-01 5.0754940509796143e-01
+
+
+ <_>
+
+ 0 -1 1227 2.6062119752168655e-02
+
+
+ 5.2089059352874756e-01 2.6887780427932739e-01
+
+
+ <_>
+
+ 0 -1 1228 1.7314659431576729e-02
+
+
+ 4.6637138724327087e-01 6.7385399341583252e-01
+
+
+ <_>
+
+ 0 -1 1229 2.2666640579700470e-02
+
+
+ 5.2093499898910522e-01 2.2127239406108856e-01
+
+
+ <_>
+
+ 0 -1 1230 -2.1965929772704840e-03
+
+
+ 6.0631012916564941e-01 4.5381900668144226e-01
+
+
+ <_>
+
+ 0 -1 1231 -9.5282476395368576e-03
+
+
+ 4.6352049708366394e-01 5.2474308013916016e-01
+
+
+ <_>
+
+ 0 -1 1232 8.0943619832396507e-03
+
+
+ 5.2894401550292969e-01 3.9138820767402649e-01
+
+
+ <_>
+
+ 0 -1 1233 -7.2877332568168640e-02
+
+
+ 7.7520018815994263e-01 4.9902349710464478e-01
+
+
+ <_>
+
+ 0 -1 1234 -6.9009521976113319e-03
+
+
+ 2.4280390143394470e-01 5.0480902194976807e-01
+
+
+ <_>
+
+ 0 -1 1235 -1.1308239772915840e-02
+
+
+ 5.7343649864196777e-01 4.8423761129379272e-01
+
+
+ <_>
+
+ 0 -1 1236 5.9613201767206192e-02
+
+
+ 5.0298362970352173e-01 2.5249770283699036e-01
+
+
+ <_>
+
+ 0 -1 1237 -2.8624620754271746e-03
+
+
+ 6.0730451345443726e-01 4.8984599113464355e-01
+
+
+ <_>
+
+ 0 -1 1238 4.4781449250876904e-03
+
+
+ 5.0152891874313354e-01 2.2203169763088226e-01
+
+
+ <_>
+
+ 0 -1 1239 -1.7513240454718471e-03
+
+
+ 6.6144287586212158e-01 4.9338689446449280e-01
+
+
+ <_>
+
+ 0 -1 1240 4.0163420140743256e-02
+
+
+ 5.1808780431747437e-01 3.7410449981689453e-01
+
+
+ <_>
+
+ 0 -1 1241 3.4768949262797832e-04
+
+
+ 4.7204169631004333e-01 5.8180320262908936e-01
+
+
+ <_>
+
+ 0 -1 1242 2.6551650371402502e-03
+
+
+ 3.8050109148025513e-01 5.2213358879089355e-01
+
+
+ <_>
+
+ 0 -1 1243 -8.7706279009580612e-03
+
+
+ 2.9441660642623901e-01 5.2312952280044556e-01
+
+
+ <_>
+
+ 0 -1 1244 -5.5122091434895992e-03
+
+
+ 7.3461771011352539e-01 4.7228169441223145e-01
+
+
+ <_>
+
+ 0 -1 1245 6.8672042107209563e-04
+
+
+ 5.4528760910034180e-01 4.2424130439758301e-01
+
+
+ <_>
+
+ 0 -1 1246 5.6019669864326715e-04
+
+
+ 4.3988621234893799e-01 5.6012850999832153e-01
+
+
+ <_>
+
+ 0 -1 1247 2.4143769405782223e-03
+
+
+ 4.7416868805885315e-01 6.1366218328475952e-01
+
+
+ <_>
+
+ 0 -1 1248 -1.5680900542065501e-03
+
+
+ 6.0445529222488403e-01 4.5164099335670471e-01
+
+
+ <_>
+
+ 0 -1 1249 -3.6827491130679846e-03
+
+
+ 2.4524590373039246e-01 5.2949821949005127e-01
+
+
+ <_>
+
+ 0 -1 1250 -2.9409190756268799e-04
+
+
+ 3.7328380346298218e-01 5.2514511346817017e-01
+
+
+ <_>
+
+ 0 -1 1251 4.2847759323194623e-04
+
+
+ 5.4988098144531250e-01 4.0655350685119629e-01
+
+
+ <_>
+
+ 0 -1 1252 -4.8817070201039314e-03
+
+
+ 2.1399089694023132e-01 4.9999570846557617e-01
+
+
+ <_>
+
+ 0 -1 1253 2.7272020815871656e-04
+
+
+ 4.6502870321273804e-01 5.8134287595748901e-01
+
+
+ <_>
+
+ 0 -1 1254 2.0947199664078653e-04
+
+
+ 4.3874868750572205e-01 5.5727928876876831e-01
+
+
+ <_>
+
+ 0 -1 1255 4.8501189798116684e-02
+
+
+ 5.2449727058410645e-01 3.2128891348838806e-01
+
+
+ <_>
+
+ 0 -1 1256 -4.5166411437094212e-03
+
+
+ 6.0568130016326904e-01 4.5458820462226868e-01
+
+
+ <_>
+
+ 0 -1 1257 -1.2291680090129375e-02
+
+
+ 2.0409290492534637e-01 5.1522141695022583e-01
+
+
+ <_>
+
+ 0 -1 1258 4.8549679922871292e-04
+
+
+ 5.2376049757003784e-01 3.7395030260086060e-01
+
+
+ <_>
+
+ 0 -1 1259 3.0556049197912216e-02
+
+
+ 4.9605339765548706e-01 5.9382462501525879e-01
+
+
+ <_>
+
+ 0 -1 1260 -1.5105320198927075e-04
+
+
+ 5.3513038158416748e-01 4.1452041268348694e-01
+
+
+ <_>
+
+ 0 -1 1261 2.4937440175563097e-03
+
+
+ 4.6933668851852417e-01 5.5149412155151367e-01
+
+
+ <_>
+
+ 0 -1 1262 -1.2382130138576031e-02
+
+
+ 6.7913967370986938e-01 4.6816679835319519e-01
+
+
+ <_>
+
+ 0 -1 1263 -5.1333461888134480e-03
+
+
+ 3.6087390780448914e-01 5.2291601896286011e-01
+
+
+ <_>
+
+ 0 -1 1264 5.1919277757406235e-04
+
+
+ 5.3000730276107788e-01 3.6336138844490051e-01
+
+
+ <_>
+
+ 0 -1 1265 1.5060420334339142e-01
+
+
+ 5.1573169231414795e-01 2.2117820382118225e-01
+
+
+ <_>
+
+ 0 -1 1266 7.7144149690866470e-03
+
+
+ 4.4104969501495361e-01 5.7766091823577881e-01
+
+
+ <_>
+
+ 0 -1 1267 9.4443522393703461e-03
+
+
+ 5.4018551111221313e-01 3.7566500902175903e-01
+
+
+ <_>
+
+ 0 -1 1268 2.5006249779835343e-04
+
+
+ 4.3682709336280823e-01 5.6073749065399170e-01
+
+
+ <_>
+
+ 0 -1 1269 -3.3077150583267212e-03
+
+
+ 4.2447990179061890e-01 5.5182307958602905e-01
+
+
+ <_>
+
+ 0 -1 1270 7.4048910755664110e-04
+
+
+ 4.4969621300697327e-01 5.9005767107009888e-01
+
+
+ <_>
+
+ 0 -1 1271 4.4092051684856415e-02
+
+
+ 5.2934932708740234e-01 3.1563550233840942e-01
+
+
+ <_>
+
+ 0 -1 1272 3.3639909233897924e-03
+
+
+ 4.4832968711853027e-01 5.8486622571945190e-01
+
+
+ <_>
+
+ 0 -1 1273 -3.9760079234838486e-03
+
+
+ 4.5595070719718933e-01 5.4836392402648926e-01
+
+
+ <_>
+
+ 0 -1 1274 2.7716930489987135e-03
+
+
+ 5.3417861461639404e-01 3.7924841046333313e-01
+
+
+ <_>
+
+ 0 -1 1275 -2.4123019829858094e-04
+
+
+ 5.6671887636184692e-01 4.5769730210304260e-01
+
+
+ <_>
+
+ 0 -1 1276 4.9425667384639382e-04
+
+
+ 4.4212448596954346e-01 5.6287872791290283e-01
+
+
+ <_>
+
+ 0 -1 1277 -3.8876468897797167e-04
+
+
+ 4.2883709073066711e-01 5.3910630941390991e-01
+
+
+ <_>
+
+ 0 -1 1278 -5.0048898905515671e-02
+
+
+ 6.8995130062103271e-01 4.7037428617477417e-01
+
+
+ <_>
+
+ 0 -1 1279 -3.6635480821132660e-02
+
+
+ 2.2177790105342865e-01 5.1918262243270874e-01
+
+
+ <_>
+
+ 0 -1 1280 2.4273579474538565e-03
+
+
+ 5.1362240314483643e-01 3.4973978996276855e-01
+
+
+ <_>
+
+ 0 -1 1281 1.9558030180633068e-03
+
+
+ 4.8261928558349609e-01 6.4083808660507202e-01
+
+
+ <_>
+
+ 0 -1 1282 -1.7494610510766506e-03
+
+
+ 3.9228358864784241e-01 5.2726852893829346e-01
+
+
+ <_>
+
+ 0 -1 1283 1.3955079950392246e-02
+
+
+ 5.0782018899917603e-01 8.4165048599243164e-01
+
+
+ <_>
+
+ 0 -1 1284 -2.1896739781368524e-04
+
+
+ 5.5204898118972778e-01 4.3142348527908325e-01
+
+
+ <_>
+
+ 0 -1 1285 -1.5131309628486633e-03
+
+
+ 3.9346051216125488e-01 5.3825712203979492e-01
+
+
+ <_>
+
+ 0 -1 1286 -4.3622800149023533e-03
+
+
+ 7.3706287145614624e-01 4.7364759445190430e-01
+
+
+ <_>
+
+ 0 -1 1287 6.5160587430000305e-02
+
+
+ 5.1592797040939331e-01 3.2815951108932495e-01
+
+
+ <_>
+
+ 0 -1 1288 -2.3567399475723505e-03
+
+
+ 3.6728268861770630e-01 5.1728862524032593e-01
+
+
+ <_>
+
+ 0 -1 1289 1.5146659687161446e-02
+
+
+ 5.0314939022064209e-01 6.6876041889190674e-01
+
+
+ <_>
+
+ 0 -1 1290 -2.2850960493087769e-02
+
+
+ 6.7675197124481201e-01 4.7095969319343567e-01
+
+
+ <_>
+
+ 0 -1 1291 4.8867650330066681e-03
+
+
+ 5.2579981088638306e-01 4.0598788857460022e-01
+
+
+ <_>
+
+ 0 -1 1292 1.7619599821045995e-03
+
+
+ 4.6962729096412659e-01 6.6882789134979248e-01
+
+
+ <_>
+
+ 0 -1 1293 -1.2942519970238209e-03
+
+
+ 4.3207129836082458e-01 5.3442817926406860e-01
+
+
+ <_>
+
+ 0 -1 1294 1.0929949581623077e-02
+
+
+ 4.9977061152458191e-01 1.6374860703945160e-01
+
+
+ <_>
+
+ 0 -1 1295 2.9958489903947338e-05
+
+
+ 4.2824178934097290e-01 5.6332242488861084e-01
+
+
+ <_>
+
+ 0 -1 1296 -6.5884361974895000e-03
+
+
+ 6.7721211910247803e-01 4.7005268931388855e-01
+
+
+ <_>
+
+ 0 -1 1297 3.2527779694646597e-03
+
+
+ 5.3133970499038696e-01 4.5361489057540894e-01
+
+
+ <_>
+
+ 0 -1 1298 -4.0435739792883396e-03
+
+
+ 5.6600618362426758e-01 4.4133889675140381e-01
+
+
+ <_>
+
+ 0 -1 1299 -1.2523540062829852e-03
+
+
+ 3.7319138646125793e-01 5.3564518690109253e-01
+
+
+ <_>
+
+ 0 -1 1300 1.9246719602961093e-04
+
+
+ 5.1899862289428711e-01 3.7388110160827637e-01
+
+
+ <_>
+
+ 0 -1 1301 -3.8589671254158020e-02
+
+
+ 2.9563739895820618e-01 5.1888108253479004e-01
+
+
+ <_>
+
+ 0 -1 1302 1.5489870565943420e-04
+
+
+ 4.3471351265907288e-01 5.5095332860946655e-01
+
+
+ <_>
+
+ 0 -1 1303 -3.3763848245143890e-02
+
+
+ 3.2303300499916077e-01 5.1954758167266846e-01
+
+
+ <_>
+
+ 0 -1 1304 -8.2657067105174065e-03
+
+
+ 5.9754890203475952e-01 4.5521140098571777e-01
+
+
+ <_>
+
+ 0 -1 1305 1.4481440302915871e-05
+
+
+ 4.7456780076026917e-01 5.4974269866943359e-01
+
+
+ <_>
+
+ 0 -1 1306 1.4951299817766994e-05
+
+
+ 4.3244731426239014e-01 5.4806441068649292e-01
+
+
+ <_>
+
+ 0 -1 1307 -1.8741799518465996e-02
+
+
+ 1.5800529718399048e-01 5.1785331964492798e-01
+
+
+ <_>
+
+ 0 -1 1308 1.7572239739820361e-03
+
+
+ 4.5176368951797485e-01 5.7737642526626587e-01
+
+
+ <_>
+
+ 0 -1 1309 -3.1391119118779898e-03
+
+
+ 4.1496479511260986e-01 5.4608422517776489e-01
+
+
+ <_>
+
+ 0 -1 1310 6.6656779381446540e-05
+
+
+ 4.0390908718109131e-01 5.2930849790573120e-01
+
+
+ <_>
+
+ 0 -1 1311 6.7743421532213688e-03
+
+
+ 4.7676518559455872e-01 6.1219561100006104e-01
+
+
+ <_>
+
+ 0 -1 1312 -7.3868161998689175e-03
+
+
+ 3.5862588882446289e-01 5.1872807741165161e-01
+
+
+ <_>
+
+ 0 -1 1313 1.4040930196642876e-02
+
+
+ 4.7121399641036987e-01 5.5761557817459106e-01
+
+
+ <_>
+
+ 0 -1 1314 -5.5258329957723618e-03
+
+
+ 2.6610270142555237e-01 5.0392812490463257e-01
+
+
+ <_>
+
+ 0 -1 1315 3.8684239983558655e-01
+
+
+ 5.1443397998809814e-01 2.5258991122245789e-01
+
+
+ <_>
+
+ 0 -1 1316 1.1459240340627730e-04
+
+
+ 4.2849949002265930e-01 5.4233711957931519e-01
+
+
+ <_>
+
+ 0 -1 1317 -1.8467569723725319e-02
+
+
+ 3.8858351111412048e-01 5.2130621671676636e-01
+
+
+ <_>
+
+ 0 -1 1318 -4.5907011372037232e-04
+
+
+ 5.4125630855560303e-01 4.2359098792076111e-01
+
+
+ <_>
+
+ 0 -1 1319 1.2527540093287826e-03
+
+
+ 4.8993051052093506e-01 6.6240912675857544e-01
+
+
+ <_>
+
+ 0 -1 1320 1.4910609461367130e-03
+
+
+ 5.2867782115936279e-01 4.0400519967079163e-01
+
+
+ <_>
+
+ 0 -1 1321 -7.5435562757775187e-04
+
+
+ 6.0329902172088623e-01 4.7951200604438782e-01
+
+
+ <_>
+
+ 0 -1 1322 -6.9478838704526424e-03
+
+
+ 4.0844011306762695e-01 5.3735041618347168e-01
+
+
+ <_>
+
+ 0 -1 1323 2.8092920547351241e-04
+
+
+ 4.8460629582405090e-01 5.7593822479248047e-01
+
+
+ <_>
+
+ 0 -1 1324 9.6073717577382922e-04
+
+
+ 5.1647412776947021e-01 3.5549798607826233e-01
+
+
+ <_>
+
+ 0 -1 1325 -2.6883929967880249e-04
+
+
+ 5.6775820255279541e-01 4.7317659854888916e-01
+
+
+ <_>
+
+ 0 -1 1326 2.1599370520561934e-03
+
+
+ 4.7314870357513428e-01 7.0705670118331909e-01
+
+
+ <_>
+
+ 0 -1 1327 5.6235301308333874e-03
+
+
+ 5.2402430772781372e-01 2.7817919850349426e-01
+
+
+ <_>
+
+ 0 -1 1328 -5.0243991427123547e-03
+
+
+ 2.8370139002799988e-01 5.0623041391372681e-01
+
+
+ <_>
+
+ 0 -1 1329 -9.7611639648675919e-03
+
+
+ 7.4007177352905273e-01 4.9345690011978149e-01
+
+
+ <_>
+
+ 0 -1 1330 4.1515100747346878e-03
+
+
+ 5.1191312074661255e-01 3.4070080518722534e-01
+
+
+ <_>
+
+ 0 -1 1331 6.2465080991387367e-03
+
+
+ 4.9237880110740662e-01 6.5790587663650513e-01
+
+
+ <_>
+
+ 0 -1 1332 -7.0597478188574314e-03
+
+
+ 2.4347110092639923e-01 5.0328421592712402e-01
+
+
+ <_>
+
+ 0 -1 1333 -2.0587709732353687e-03
+
+
+ 5.9003108739852905e-01 4.6950870752334595e-01
+
+
+ <_>
+
+ 0 -1 1334 -2.4146060459315777e-03
+
+
+ 3.6473178863525391e-01 5.1892018318176270e-01
+
+
+ <_>
+
+ 0 -1 1335 -1.4817609917372465e-03
+
+
+ 6.0349482297897339e-01 4.9401280283927917e-01
+
+
+ <_>
+
+ 0 -1 1336 -6.3016400672495365e-03
+
+
+ 5.8189898729324341e-01 4.5604279637336731e-01
+
+
+ <_>
+
+ 0 -1 1337 3.4763428848236799e-03
+
+
+ 5.2174758911132812e-01 3.4839931130409241e-01
+
+
+ <_>
+
+ 0 -1 1338 -2.2250870242714882e-02
+
+
+ 2.3607000708580017e-01 5.0320827960968018e-01
+
+
+ <_>
+
+ 0 -1 1339 -3.0612550675868988e-02
+
+
+ 6.4991867542266846e-01 4.9149191379547119e-01
+
+
+ <_>
+
+ 0 -1 1340 1.3057479634881020e-02
+
+
+ 4.4133231043815613e-01 5.6837642192840576e-01
+
+
+ <_>
+
+ 0 -1 1341 -6.0095742810517550e-04
+
+
+ 4.3597310781478882e-01 5.3334832191467285e-01
+
+
+ <_>
+
+ 0 -1 1342 -4.1514250915497541e-04
+
+
+ 5.5040627717971802e-01 4.3260601162910461e-01
+
+
+ <_>
+
+ 0 -1 1343 -1.3776290230453014e-02
+
+
+ 4.0641129016876221e-01 5.2015489339828491e-01
+
+
+ <_>
+
+ 0 -1 1344 -3.2296508550643921e-02
+
+
+ 4.7351971268653870e-02 4.9771949648857117e-01
+
+
+ <_>
+
+ 0 -1 1345 5.3556978702545166e-02
+
+
+ 4.8817330598831177e-01 6.6669392585754395e-01
+
+
+ <_>
+
+ 0 -1 1346 8.1889545544981956e-03
+
+
+ 5.4000371694564819e-01 4.2408201098442078e-01
+
+
+ <_>
+
+ 0 -1 1347 2.1055320394225419e-04
+
+
+ 4.8020479083061218e-01 5.5638527870178223e-01
+
+
+ <_>
+
+ 0 -1 1348 -2.4382730480283499e-03
+
+
+ 7.3877930641174316e-01 4.7736850380897522e-01
+
+
+ <_>
+
+ 0 -1 1349 3.2835570164024830e-03
+
+
+ 5.2885460853576660e-01 3.1712919473648071e-01
+
+
+ <_>
+
+ 0 -1 1350 2.3729570675641298e-03
+
+
+ 4.7508129477500916e-01 7.0601707696914673e-01
+
+
+ <_>
+
+ 0 -1 1351 -1.4541699783876538e-03
+
+
+ 3.8117301464080811e-01 5.3307390213012695e-01
+
+
+
+
+ <_>
+ 177
+ 8.7696029663085938e+01
+
+ <_>
+
+ 0 -1 1352 5.5755238980054855e-02
+
+
+ 4.0191569924354553e-01 6.8060368299484253e-01
+
+
+ <_>
+
+ 0 -1 1353 2.4730248842388391e-03
+
+
+ 3.3511489629745483e-01 5.9657198190689087e-01
+
+
+ <_>
+
+ 0 -1 1354 -3.5031698644161224e-04
+
+
+ 5.5577081441879272e-01 3.4822869300842285e-01
+
+
+ <_>
+
+ 0 -1 1355 5.4167630150914192e-04
+
+
+ 4.2608588933944702e-01 5.6933808326721191e-01
+
+
+ <_>
+
+ 0 -1 1356 7.7193678589537740e-04
+
+
+ 3.4942400455474854e-01 5.4336887598037720e-01
+
+
+ <_>
+
+ 0 -1 1357 -1.5999219613149762e-03
+
+
+ 4.0284991264343262e-01 5.4843592643737793e-01
+
+
+ <_>
+
+ 0 -1 1358 -1.1832080053864047e-04
+
+
+ 3.8069018721580505e-01 5.4254651069641113e-01
+
+
+ <_>
+
+ 0 -1 1359 3.2909031142480671e-04
+
+
+ 2.6201000809669495e-01 5.4295217990875244e-01
+
+
+ <_>
+
+ 0 -1 1360 2.9518108931370080e-04
+
+
+ 3.7997689843177795e-01 5.3992640972137451e-01
+
+
+ <_>
+
+ 0 -1 1361 9.0466710389591753e-05
+
+
+ 4.4336450099945068e-01 5.4402261972427368e-01
+
+
+ <_>
+
+ 0 -1 1362 1.5007190086180344e-05
+
+
+ 3.7196549773216248e-01 5.4091197252273560e-01
+
+
+ <_>
+
+ 0 -1 1363 1.3935610651969910e-01
+
+
+ 5.5253958702087402e-01 4.4790428876876831e-01
+
+
+ <_>
+
+ 0 -1 1364 1.6461990308016539e-03
+
+
+ 4.2645010352134705e-01 5.7721698284149170e-01
+
+
+ <_>
+
+ 0 -1 1365 4.9984431825578213e-04
+
+
+ 4.3595260381698608e-01 5.6858712434768677e-01
+
+
+ <_>
+
+ 0 -1 1366 -1.0971280280500650e-03
+
+
+ 3.3901369571685791e-01 5.2054089307785034e-01
+
+
+ <_>
+
+ 0 -1 1367 6.6919892560690641e-04
+
+
+ 4.5574560761451721e-01 5.9806597232818604e-01
+
+
+ <_>
+
+ 0 -1 1368 8.6471042595803738e-04
+
+
+ 5.1348412036895752e-01 2.9440331459045410e-01
+
+
+ <_>
+
+ 0 -1 1369 -2.7182599296793342e-04
+
+
+ 3.9065781235694885e-01 5.3771811723709106e-01
+
+
+ <_>
+
+ 0 -1 1370 3.0249499104684219e-05
+
+
+ 3.6796098947525024e-01 5.2256888151168823e-01
+
+
+ <_>
+
+ 0 -1 1371 -8.5225896909832954e-03
+
+
+ 7.2931021451950073e-01 4.8923650383949280e-01
+
+
+ <_>
+
+ 0 -1 1372 1.6705560265108943e-03
+
+
+ 4.3453249335289001e-01 5.6961381435394287e-01
+
+
+ <_>
+
+ 0 -1 1373 -7.1433838456869125e-03
+
+
+ 2.5912800431251526e-01 5.2256238460540771e-01
+
+
+ <_>
+
+ 0 -1 1374 -1.6319369897246361e-02
+
+
+ 6.9222790002822876e-01 4.6515759825706482e-01
+
+
+ <_>
+
+ 0 -1 1375 4.8034260980784893e-03
+
+
+ 5.3522628545761108e-01 3.2863029837608337e-01
+
+
+ <_>
+
+ 0 -1 1376 -7.5421929359436035e-03
+
+
+ 2.0405440032482147e-01 5.0345462560653687e-01
+
+
+ <_>
+
+ 0 -1 1377 -1.4363110065460205e-02
+
+
+ 6.8048888444900513e-01 4.8890590667724609e-01
+
+
+ <_>
+
+ 0 -1 1378 8.9063588529825211e-04
+
+
+ 5.3106957674026489e-01 3.8954809308052063e-01
+
+
+ <_>
+
+ 0 -1 1379 -4.4060191139578819e-03
+
+
+ 5.7415628433227539e-01 4.3724268674850464e-01
+
+
+ <_>
+
+ 0 -1 1380 -1.8862540309783071e-04
+
+
+ 2.8317859768867493e-01 5.0982052087783813e-01
+
+
+ <_>
+
+ 0 -1 1381 -3.7979281041771173e-03
+
+
+ 3.3725079894065857e-01 5.2465802431106567e-01
+
+
+ <_>
+
+ 0 -1 1382 1.4627049677073956e-04
+
+
+ 5.3066742420196533e-01 3.9117100834846497e-01
+
+
+ <_>
+
+ 0 -1 1383 -4.9164638767251745e-05
+
+
+ 5.4624962806701660e-01 3.9427208900451660e-01
+
+
+ <_>
+
+ 0 -1 1384 -3.3582501113414764e-02
+
+
+ 2.1578240394592285e-01 5.0482118129730225e-01
+
+
+ <_>
+
+ 0 -1 1385 -3.5339309833943844e-03
+
+
+ 6.4653122425079346e-01 4.8726969957351685e-01
+
+
+ <_>
+
+ 0 -1 1386 5.0144111737608910e-03
+
+
+ 4.6176680922508240e-01 6.2480747699737549e-01
+
+
+ <_>
+
+ 0 -1 1387 1.8817370757460594e-02
+
+
+ 5.2206891775131226e-01 2.0000520348548889e-01
+
+
+ <_>
+
+ 0 -1 1388 -1.3434339780360460e-03
+
+
+ 4.0145379304885864e-01 5.3016197681427002e-01
+
+
+ <_>
+
+ 0 -1 1389 1.7557960236445069e-03
+
+
+ 4.7940391302108765e-01 5.6531697511672974e-01
+
+
+ <_>
+
+ 0 -1 1390 -9.5637463033199310e-02
+
+
+ 2.0341950654983521e-01 5.0067067146301270e-01
+
+
+ <_>
+
+ 0 -1 1391 -2.2241229191422462e-02
+
+
+ 7.6724731922149658e-01 5.0463402271270752e-01
+
+
+ <_>
+
+ 0 -1 1392 -1.5575819648802280e-02
+
+
+ 7.4903422594070435e-01 4.7558510303497314e-01
+
+
+ <_>
+
+ 0 -1 1393 5.3599118255078793e-03
+
+
+ 5.3653037548065186e-01 4.0046709775924683e-01
+
+
+ <_>
+
+ 0 -1 1394 -2.1763499826192856e-02
+
+
+ 7.4015498161315918e-02 4.9641749262809753e-01
+
+
+ <_>
+
+ 0 -1 1395 -1.6561590135097504e-01
+
+
+ 2.8591030836105347e-01 5.2180862426757812e-01
+
+
+ <_>
+
+ 0 -1 1396 1.6461320046801120e-04
+
+
+ 4.1916158795356750e-01 5.3807932138442993e-01
+
+
+ <_>
+
+ 0 -1 1397 -8.9077502489089966e-03
+
+
+ 6.2731927633285522e-01 4.8774048686027527e-01
+
+
+ <_>
+
+ 0 -1 1398 8.6346449097618461e-04
+
+
+ 5.1599407196044922e-01 3.6710259318351746e-01
+
+
+ <_>
+
+ 0 -1 1399 -1.3751760125160217e-03
+
+
+ 5.8843767642974854e-01 4.5790839195251465e-01
+
+
+ <_>
+
+ 0 -1 1400 -1.4081239933148026e-03
+
+
+ 3.5605099797248840e-01 5.1399451494216919e-01
+
+
+ <_>
+
+ 0 -1 1401 -3.9342888630926609e-03
+
+
+ 5.9942889213562012e-01 4.6642720699310303e-01
+
+
+ <_>
+
+ 0 -1 1402 -3.1966928392648697e-02
+
+
+ 3.3454620838165283e-01 5.1441830396652222e-01
+
+
+ <_>
+
+ 0 -1 1403 -1.5089280168467667e-05
+
+
+ 5.5826562643051147e-01 4.4140571355819702e-01
+
+
+ <_>
+
+ 0 -1 1404 5.1994470413774252e-04
+
+
+ 4.6236801147460938e-01 6.1689937114715576e-01
+
+
+ <_>
+
+ 0 -1 1405 -3.4220460802316666e-03
+
+
+ 6.5570747852325439e-01 4.9748051166534424e-01
+
+
+ <_>
+
+ 0 -1 1406 1.7723299970384687e-04
+
+
+ 5.2695018053054810e-01 3.9019080996513367e-01
+
+
+ <_>
+
+ 0 -1 1407 1.5716759953647852e-03
+
+
+ 4.6333730220794678e-01 5.7904577255249023e-01
+
+
+ <_>
+
+ 0 -1 1408 -8.9041329920291901e-03
+
+
+ 2.6896080374717712e-01 5.0535911321640015e-01
+
+
+ <_>
+
+ 0 -1 1409 4.0677518700249493e-04
+
+
+ 5.4566031694412231e-01 4.3298989534378052e-01
+
+
+ <_>
+
+ 0 -1 1410 6.7604780197143555e-03
+
+
+ 4.6489939093589783e-01 6.6897618770599365e-01
+
+
+ <_>
+
+ 0 -1 1411 2.9100088868290186e-03
+
+
+ 5.3097039461135864e-01 3.3778399229049683e-01
+
+
+ <_>
+
+ 0 -1 1412 1.3885459629818797e-03
+
+
+ 4.0747389197349548e-01 5.3491330146789551e-01
+
+
+ <_>
+
+ 0 -1 1413 -7.6764263212680817e-02
+
+
+ 1.9921760261058807e-01 5.2282422780990601e-01
+
+
+ <_>
+
+ 0 -1 1414 -2.2688310127705336e-04
+
+
+ 5.4385018348693848e-01 4.2530721426010132e-01
+
+
+ <_>
+
+ 0 -1 1415 -6.3094152137637138e-03
+
+
+ 4.2591789364814758e-01 5.3789097070693970e-01
+
+
+ <_>
+
+ 0 -1 1416 -1.1007279902696609e-01
+
+
+ 6.9041568040847778e-01 4.7217491269111633e-01
+
+
+ <_>
+
+ 0 -1 1417 2.8619659133255482e-04
+
+
+ 4.5249149203300476e-01 5.5483061075210571e-01
+
+
+ <_>
+
+ 0 -1 1418 2.9425329557852820e-05
+
+
+ 5.3703737258911133e-01 4.2364639043807983e-01
+
+
+ <_>
+
+ 0 -1 1419 -2.4886570870876312e-02
+
+
+ 6.4235579967498779e-01 4.9693039059638977e-01
+
+
+ <_>
+
+ 0 -1 1420 3.3148851245641708e-02
+
+
+ 4.9884751439094543e-01 1.6138119995594025e-01
+
+
+ <_>
+
+ 0 -1 1421 7.8491691965609789e-04
+
+
+ 5.4160261154174805e-01 4.2230090498924255e-01
+
+
+ <_>
+
+ 0 -1 1422 4.7087189741432667e-03
+
+
+ 4.5763289928436279e-01 6.0275578498840332e-01
+
+
+ <_>
+
+ 0 -1 1423 2.4144479539245367e-03
+
+
+ 5.3089731931686401e-01 4.4224989414215088e-01
+
+
+ <_>
+
+ 0 -1 1424 1.9523180089890957e-03
+
+
+ 4.7056341171264648e-01 6.6633248329162598e-01
+
+
+ <_>
+
+ 0 -1 1425 1.3031980488449335e-03
+
+
+ 4.4061261415481567e-01 5.5269622802734375e-01
+
+
+ <_>
+
+ 0 -1 1426 4.4735497795045376e-03
+
+
+ 5.1290237903594971e-01 3.3014988899230957e-01
+
+
+ <_>
+
+ 0 -1 1427 -2.6652868837118149e-03
+
+
+ 3.1354710459709167e-01 5.1750361919403076e-01
+
+
+ <_>
+
+ 0 -1 1428 1.3666770246345550e-04
+
+
+ 4.1193708777427673e-01 5.3068768978118896e-01
+
+
+ <_>
+
+ 0 -1 1429 -1.7126450315117836e-02
+
+
+ 6.1778062582015991e-01 4.8365789651870728e-01
+
+
+ <_>
+
+ 0 -1 1430 -2.6601430727168918e-04
+
+
+ 3.6543309688568115e-01 5.1697367429733276e-01
+
+
+ <_>
+
+ 0 -1 1431 -2.2932380437850952e-02
+
+
+ 3.4909150004386902e-01 5.1639920473098755e-01
+
+
+ <_>
+
+ 0 -1 1432 2.3316550068557262e-03
+
+
+ 5.1662999391555786e-01 3.7093898653984070e-01
+
+
+ <_>
+
+ 0 -1 1433 1.6925660893321037e-02
+
+
+ 5.0147360563278198e-01 8.0539882183074951e-01
+
+
+ <_>
+
+ 0 -1 1434 -8.9858826249837875e-03
+
+
+ 6.4707887172698975e-01 4.6570208668708801e-01
+
+
+ <_>
+
+ 0 -1 1435 -1.1874699965119362e-02
+
+
+ 3.2463788986206055e-01 5.2587550878524780e-01
+
+
+ <_>
+
+ 0 -1 1436 1.9350569345988333e-04
+
+
+ 5.1919418573379517e-01 3.8396438956260681e-01
+
+
+ <_>
+
+ 0 -1 1437 5.8713490143418312e-03
+
+
+ 4.9181339144706726e-01 6.1870431900024414e-01
+
+
+ <_>
+
+ 0 -1 1438 -2.4838790297508240e-01
+
+
+ 1.8368029594421387e-01 4.9881500005722046e-01
+
+
+ <_>
+
+ 0 -1 1439 1.2256000190973282e-02
+
+
+ 5.2270537614822388e-01 3.6320298910140991e-01
+
+
+ <_>
+
+ 0 -1 1440 8.3990179700776935e-04
+
+
+ 4.4902500510215759e-01 5.7741481065750122e-01
+
+
+ <_>
+
+ 0 -1 1441 2.5407369248569012e-03
+
+
+ 4.8047870397567749e-01 5.8582991361618042e-01
+
+
+ <_>
+
+ 0 -1 1442 -1.4822429977357388e-02
+
+
+ 2.5210499763488770e-01 5.0235372781753540e-01
+
+
+ <_>
+
+ 0 -1 1443 -5.7973959483206272e-03
+
+
+ 5.9966957569122314e-01 4.8537150025367737e-01
+
+
+ <_>
+
+ 0 -1 1444 7.2662148158997297e-04
+
+
+ 5.1537168025970459e-01 3.6717799305915833e-01
+
+
+ <_>
+
+ 0 -1 1445 -1.7232580110430717e-02
+
+
+ 6.6217190027236938e-01 4.9946561455726624e-01
+
+
+ <_>
+
+ 0 -1 1446 7.8624086454510689e-03
+
+
+ 4.6333950757980347e-01 6.2561017274856567e-01
+
+
+ <_>
+
+ 0 -1 1447 -4.7343620099127293e-03
+
+
+ 3.6155730485916138e-01 5.2818852663040161e-01
+
+
+ <_>
+
+ 0 -1 1448 8.3048478700220585e-04
+
+
+ 4.4428890943527222e-01 5.5509579181671143e-01
+
+
+ <_>
+
+ 0 -1 1449 7.6602199114859104e-03
+
+
+ 5.1629352569580078e-01 2.6133549213409424e-01
+
+
+ <_>
+
+ 0 -1 1450 -4.1048377752304077e-03
+
+
+ 2.7896320819854736e-01 5.0190317630767822e-01
+
+
+ <_>
+
+ 0 -1 1451 4.8512578941881657e-03
+
+
+ 4.9689841270446777e-01 5.6616681814193726e-01
+
+
+ <_>
+
+ 0 -1 1452 9.9896453320980072e-04
+
+
+ 4.4456079602241516e-01 5.5518132448196411e-01
+
+
+ <_>
+
+ 0 -1 1453 -2.7023631334304810e-01
+
+
+ 2.9388209804892540e-02 5.1513141393661499e-01
+
+
+ <_>
+
+ 0 -1 1454 -1.3090680353343487e-02
+
+
+ 5.6993997097015381e-01 4.4474598765373230e-01
+
+
+ <_>
+
+ 0 -1 1455 -9.4342790544033051e-03
+
+
+ 4.3054661154747009e-01 5.4878950119018555e-01
+
+
+ <_>
+
+ 0 -1 1456 -1.5482039889320731e-03
+
+
+ 3.6803171038627625e-01 5.1280808448791504e-01
+
+
+ <_>
+
+ 0 -1 1457 5.3746132180094719e-03
+
+
+ 4.8389169573783875e-01 6.1015558242797852e-01
+
+
+ <_>
+
+ 0 -1 1458 1.5786769799888134e-03
+
+
+ 5.3252232074737549e-01 4.1185480356216431e-01
+
+
+ <_>
+
+ 0 -1 1459 3.6856050137430429e-03
+
+
+ 4.8109480738639832e-01 6.2523031234741211e-01
+
+
+ <_>
+
+ 0 -1 1460 9.3887019902467728e-03
+
+
+ 5.2002298831939697e-01 3.6294108629226685e-01
+
+
+ <_>
+
+ 0 -1 1461 1.2792630121111870e-02
+
+
+ 4.9617099761962891e-01 6.7380160093307495e-01
+
+
+ <_>
+
+ 0 -1 1462 -3.3661040943115950e-03
+
+
+ 4.0602791309356689e-01 5.2835988998413086e-01
+
+
+ <_>
+
+ 0 -1 1463 3.9771420415490866e-04
+
+
+ 4.6741139888763428e-01 5.9007751941680908e-01
+
+
+ <_>
+
+ 0 -1 1464 1.4868030557408929e-03
+
+
+ 4.5191168785095215e-01 6.0820537805557251e-01
+
+
+ <_>
+
+ 0 -1 1465 -8.8686749339103699e-02
+
+
+ 2.8078991174697876e-01 5.1809918880462646e-01
+
+
+ <_>
+
+ 0 -1 1466 -7.4296112870797515e-05
+
+
+ 5.2955842018127441e-01 4.0876251459121704e-01
+
+
+ <_>
+
+ 0 -1 1467 -1.4932939848222304e-05
+
+
+ 5.4614001512527466e-01 4.5385429263114929e-01
+
+
+ <_>
+
+ 0 -1 1468 5.9162238612771034e-03
+
+
+ 5.3291612863540649e-01 4.1921341419219971e-01
+
+
+ <_>
+
+ 0 -1 1469 1.1141640134155750e-03
+
+
+ 4.5120179653167725e-01 5.7062172889709473e-01
+
+
+ <_>
+
+ 0 -1 1470 8.9249362645205110e-05
+
+
+ 4.5778059959411621e-01 5.8976382017135620e-01
+
+
+ <_>
+
+ 0 -1 1471 2.5319510605186224e-03
+
+
+ 5.2996039390563965e-01 3.3576390147209167e-01
+
+
+ <_>
+
+ 0 -1 1472 1.2426200322806835e-02
+
+
+ 4.9590590596199036e-01 1.3466019928455353e-01
+
+
+ <_>
+
+ 0 -1 1473 2.8335750102996826e-02
+
+
+ 5.1170790195465088e-01 6.1043637106195092e-04
+
+
+ <_>
+
+ 0 -1 1474 6.6165882162749767e-03
+
+
+ 4.7363498806953430e-01 7.0116281509399414e-01
+
+
+ <_>
+
+ 0 -1 1475 8.0468766391277313e-03
+
+
+ 5.2164179086685181e-01 3.2828199863433838e-01
+
+
+ <_>
+
+ 0 -1 1476 -1.1193980462849140e-03
+
+
+ 5.8098608255386353e-01 4.5637390017509460e-01
+
+
+ <_>
+
+ 0 -1 1477 1.3277590274810791e-02
+
+
+ 5.3983622789382935e-01 4.1039010882377625e-01
+
+
+ <_>
+
+ 0 -1 1478 4.8794739996083081e-04
+
+
+ 4.2492860555648804e-01 5.4105907678604126e-01
+
+
+ <_>
+
+ 0 -1 1479 1.1243170127272606e-02
+
+
+ 5.2699637413024902e-01 3.4382158517837524e-01
+
+
+ <_>
+
+ 0 -1 1480 -8.9896668214350939e-04
+
+
+ 5.6330758333206177e-01 4.4566130638122559e-01
+
+
+ <_>
+
+ 0 -1 1481 6.6677159629762173e-03
+
+
+ 5.3128892183303833e-01 4.3626791238784790e-01
+
+
+ <_>
+
+ 0 -1 1482 2.8947299346327782e-02
+
+
+ 4.7017949819564819e-01 6.5757977962493896e-01
+
+
+ <_>
+
+ 0 -1 1483 -2.3400049656629562e-02
+
+
+ 0. 5.1373988389968872e-01
+
+
+ <_>
+
+ 0 -1 1484 -8.9117050170898438e-02
+
+
+ 2.3745279759168625e-02 4.9424308538436890e-01
+
+
+ <_>
+
+ 0 -1 1485 -1.4054600149393082e-02
+
+
+ 3.1273230910301208e-01 5.1175111532211304e-01
+
+
+ <_>
+
+ 0 -1 1486 8.1239398568868637e-03
+
+
+ 5.0090491771697998e-01 2.5200259685516357e-01
+
+
+ <_>
+
+ 0 -1 1487 -4.9964650534093380e-03
+
+
+ 6.3871437311172485e-01 4.9278119206428528e-01
+
+
+ <_>
+
+ 0 -1 1488 3.1253970228135586e-03
+
+
+ 5.1368498802185059e-01 3.6804521083831787e-01
+
+
+ <_>
+
+ 0 -1 1489 6.7669642157852650e-03
+
+
+ 5.5098438262939453e-01 4.3636319041252136e-01
+
+
+ <_>
+
+ 0 -1 1490 -2.3711440153419971e-03
+
+
+ 6.1623352766036987e-01 4.5869469642639160e-01
+
+
+ <_>
+
+ 0 -1 1491 -5.3522791713476181e-03
+
+
+ 6.1854577064514160e-01 4.9204909801483154e-01
+
+
+ <_>
+
+ 0 -1 1492 -1.5968859195709229e-02
+
+
+ 1.3826179504394531e-01 4.9832528829574585e-01
+
+
+ <_>
+
+ 0 -1 1493 4.7676060348749161e-03
+
+
+ 4.6880578994750977e-01 5.4900461435317993e-01
+
+
+ <_>
+
+ 0 -1 1494 -2.4714691098779440e-03
+
+
+ 2.3685149848461151e-01 5.0039529800415039e-01
+
+
+ <_>
+
+ 0 -1 1495 -7.1033788844943047e-04
+
+
+ 5.8563941717147827e-01 4.7215330600738525e-01
+
+
+ <_>
+
+ 0 -1 1496 -1.4117559790611267e-01
+
+
+ 8.6900062859058380e-02 4.9615910649299622e-01
+
+
+ <_>
+
+ 0 -1 1497 1.0651809722185135e-01
+
+
+ 5.1388370990753174e-01 1.7410050332546234e-01
+
+
+ <_>
+
+ 0 -1 1498 -5.2744749933481216e-02
+
+
+ 7.3536360263824463e-01 4.7728818655014038e-01
+
+
+ <_>
+
+ 0 -1 1499 -4.7431760467588902e-03
+
+
+ 3.8844060897827148e-01 5.2927017211914062e-01
+
+
+ <_>
+
+ 0 -1 1500 9.9676765967160463e-04
+
+
+ 5.2234929800033569e-01 4.0034240484237671e-01
+
+
+ <_>
+
+ 0 -1 1501 8.0284131690859795e-03
+
+
+ 4.9591061472892761e-01 7.2129642963409424e-01
+
+
+ <_>
+
+ 0 -1 1502 8.6025858763605356e-04
+
+
+ 4.4448840618133545e-01 5.5384761095046997e-01
+
+
+ <_>
+
+ 0 -1 1503 9.3191501218825579e-04
+
+
+ 5.3983712196350098e-01 4.1632440686225891e-01
+
+
+ <_>
+
+ 0 -1 1504 -2.5082060601562262e-03
+
+
+ 5.8542650938034058e-01 4.5625001192092896e-01
+
+
+ <_>
+
+ 0 -1 1505 -2.1378761157393456e-03
+
+
+ 4.6080690622329712e-01 5.2802592515945435e-01
+
+
+ <_>
+
+ 0 -1 1506 -2.1546049974858761e-03
+
+
+ 3.7911269068717957e-01 5.2559971809387207e-01
+
+
+ <_>
+
+ 0 -1 1507 -7.6214009895920753e-03
+
+
+ 5.9986090660095215e-01 4.9520739912986755e-01
+
+
+ <_>
+
+ 0 -1 1508 2.2055360022932291e-03
+
+
+ 4.4842061400413513e-01 5.5885308980941772e-01
+
+
+ <_>
+
+ 0 -1 1509 1.2586950324475765e-03
+
+
+ 5.4507470130920410e-01 4.4238409399986267e-01
+
+
+ <_>
+
+ 0 -1 1510 -5.0926720723509789e-03
+
+
+ 4.1182750463485718e-01 5.2630358934402466e-01
+
+
+ <_>
+
+ 0 -1 1511 -2.5095739401876926e-03
+
+
+ 5.7879078388214111e-01 4.9984949827194214e-01
+
+
+ <_>
+
+ 0 -1 1512 -7.7327556908130646e-02
+
+
+ 8.3978658914566040e-01 4.8111200332641602e-01
+
+
+ <_>
+
+ 0 -1 1513 -4.1485819965600967e-02
+
+
+ 2.4086110293865204e-01 5.1769930124282837e-01
+
+
+ <_>
+
+ 0 -1 1514 1.0355669655837119e-04
+
+
+ 4.3553608655929565e-01 5.4170542955398560e-01
+
+
+ <_>
+
+ 0 -1 1515 1.3255809899419546e-03
+
+
+ 5.4539710283279419e-01 4.8940950632095337e-01
+
+
+ <_>
+
+ 0 -1 1516 -8.0598732456564903e-03
+
+
+ 5.7710242271423340e-01 4.5779189467430115e-01
+
+
+ <_>
+
+ 0 -1 1517 1.9058620557188988e-02
+
+
+ 5.1698678731918335e-01 3.4004750847816467e-01
+
+
+ <_>
+
+ 0 -1 1518 -3.5057891160249710e-02
+
+
+ 2.2032439708709717e-01 5.0005030632019043e-01
+
+
+ <_>
+
+ 0 -1 1519 5.7296059094369411e-03
+
+
+ 5.0434082746505737e-01 6.5975707769393921e-01
+
+
+ <_>
+
+ 0 -1 1520 -1.1648329906165600e-02
+
+
+ 2.1862849593162537e-01 4.9966529011726379e-01
+
+
+ <_>
+
+ 0 -1 1521 1.4544479781761765e-03
+
+
+ 5.0076818466186523e-01 5.5037277936935425e-01
+
+
+ <_>
+
+ 0 -1 1522 -2.5030909455381334e-04
+
+
+ 4.1298410296440125e-01 5.2416700124740601e-01
+
+
+ <_>
+
+ 0 -1 1523 -8.2907272735610604e-04
+
+
+ 5.4128682613372803e-01 4.9744960665702820e-01
+
+
+ <_>
+
+ 0 -1 1524 1.0862209601327777e-03
+
+
+ 4.6055299043655396e-01 5.8792287111282349e-01
+
+
+ <_>
+
+ 0 -1 1525 2.0000500080641359e-04
+
+
+ 5.2788549661636353e-01 4.7052091360092163e-01
+
+
+ <_>
+
+ 0 -1 1526 2.9212920926511288e-03
+
+
+ 5.1296097040176392e-01 3.7555369734764099e-01
+
+
+ <_>
+
+ 0 -1 1527 2.5387400761246681e-02
+
+
+ 4.8226919770240784e-01 5.7907682657241821e-01
+
+
+ <_>
+
+ 0 -1 1528 -3.1968469265848398e-03
+
+
+ 5.2483952045440674e-01 3.9628401398658752e-01
+
+
+
+
+ <_>
+ 182
+ 9.0253349304199219e+01
+
+ <_>
+
+ 0 -1 1529 5.8031738735735416e-03
+
+
+ 3.4989839792251587e-01 5.9619832038879395e-01
+
+
+ <_>
+
+ 0 -1 1530 -9.0003069490194321e-03
+
+
+ 6.8166369199752808e-01 4.4785520434379578e-01
+
+
+ <_>
+
+ 0 -1 1531 -1.1549659539014101e-03
+
+
+ 5.5857062339782715e-01 3.5782510042190552e-01
+
+
+ <_>
+
+ 0 -1 1532 -1.1069850297644734e-03
+
+
+ 5.3650361299514771e-01 3.0504280328750610e-01
+
+
+ <_>
+
+ 0 -1 1533 1.0308309720130637e-04
+
+
+ 3.6390951275825500e-01 5.3446358442306519e-01
+
+
+ <_>
+
+ 0 -1 1534 -5.0984839908778667e-03
+
+
+ 2.8591570258140564e-01 5.5042648315429688e-01
+
+
+ <_>
+
+ 0 -1 1535 8.2572200335562229e-04
+
+
+ 5.2365237474441528e-01 3.4760418534278870e-01
+
+
+ <_>
+
+ 0 -1 1536 9.9783325567841530e-03
+
+
+ 4.7503221035003662e-01 6.2196469306945801e-01
+
+
+ <_>
+
+ 0 -1 1537 -3.7402529269456863e-02
+
+
+ 3.3433759212493896e-01 5.2780628204345703e-01
+
+
+ <_>
+
+ 0 -1 1538 4.8548257909715176e-03
+
+
+ 5.1921808719635010e-01 3.7004441022872925e-01
+
+
+ <_>
+
+ 0 -1 1539 -1.8664470408111811e-03
+
+
+ 2.9298439621925354e-01 5.0919449329376221e-01
+
+
+ <_>
+
+ 0 -1 1540 1.6888890415430069e-02
+
+
+ 3.6868458986282349e-01 5.4312258958816528e-01
+
+
+ <_>
+
+ 0 -1 1541 -5.8372621424496174e-03
+
+
+ 3.6321839690208435e-01 5.2213358879089355e-01
+
+
+ <_>
+
+ 0 -1 1542 -1.4713739510625601e-03
+
+
+ 5.8706837892532349e-01 4.7006508708000183e-01
+
+
+ <_>
+
+ 0 -1 1543 -1.1522950371727347e-03
+
+
+ 3.1958949565887451e-01 5.1409542560577393e-01
+
+
+ <_>
+
+ 0 -1 1544 -4.2560300789773464e-03
+
+
+ 6.3018590211868286e-01 4.8149210214614868e-01
+
+
+ <_>
+
+ 0 -1 1545 -6.7378291860222816e-03
+
+
+ 1.9770480692386627e-01 5.0258082151412964e-01
+
+
+ <_>
+
+ 0 -1 1546 1.1382670141756535e-02
+
+
+ 4.9541321396827698e-01 6.8670457601547241e-01
+
+
+ <_>
+
+ 0 -1 1547 5.1794708706438541e-03
+
+
+ 5.1644277572631836e-01 3.3506479859352112e-01
+
+
+ <_>
+
+ 0 -1 1548 -1.1743789911270142e-01
+
+
+ 2.3152460157871246e-01 5.2344137430191040e-01
+
+
+ <_>
+
+ 0 -1 1549 2.8703449293971062e-02
+
+
+ 4.6642971038818359e-01 6.7225211858749390e-01
+
+
+ <_>
+
+ 0 -1 1550 4.8231030814349651e-03
+
+
+ 5.2208751440048218e-01 2.7235329151153564e-01
+
+
+ <_>
+
+ 0 -1 1551 2.6798530016094446e-03
+
+
+ 5.0792771577835083e-01 2.9069489240646362e-01
+
+
+ <_>
+
+ 0 -1 1552 8.0504082143306732e-03
+
+
+ 4.8859509825706482e-01 6.3950210809707642e-01
+
+
+ <_>
+
+ 0 -1 1553 4.8054959625005722e-03
+
+
+ 5.1972568035125732e-01 3.6566638946533203e-01
+
+
+ <_>
+
+ 0 -1 1554 -2.2420159075409174e-03
+
+
+ 6.1534678936004639e-01 4.7637018561363220e-01
+
+
+ <_>
+
+ 0 -1 1555 -1.3757710345089436e-02
+
+
+ 2.6373448967933655e-01 5.0309032201766968e-01
+
+
+ <_>
+
+ 0 -1 1556 -1.0338299721479416e-01
+
+
+ 2.2875219583511353e-01 5.1824611425399780e-01
+
+
+ <_>
+
+ 0 -1 1557 -9.4432085752487183e-03
+
+
+ 6.9533038139343262e-01 4.6949490904808044e-01
+
+
+ <_>
+
+ 0 -1 1558 8.0271181650459766e-04
+
+
+ 5.4506552219390869e-01 4.2687839269638062e-01
+
+
+ <_>
+
+ 0 -1 1559 -4.1945669800043106e-03
+
+
+ 6.0913878679275513e-01 4.5716428756713867e-01
+
+
+ <_>
+
+ 0 -1 1560 1.0942210443317890e-02
+
+
+ 5.2410632371902466e-01 3.2845470309257507e-01
+
+
+ <_>
+
+ 0 -1 1561 -5.7841069065034389e-04
+
+
+ 5.3879290819168091e-01 4.1793689131736755e-01
+
+
+ <_>
+
+ 0 -1 1562 -2.0888620056211948e-03
+
+
+ 4.2926910519599915e-01 5.3017157316207886e-01
+
+
+ <_>
+
+ 0 -1 1563 3.2383969519287348e-03
+
+
+ 3.7923479080200195e-01 5.2207440137863159e-01
+
+
+ <_>
+
+ 0 -1 1564 4.9075027927756310e-03
+
+
+ 5.2372831106185913e-01 4.1267579793930054e-01
+
+
+ <_>
+
+ 0 -1 1565 -3.2277941703796387e-02
+
+
+ 1.9476559758186340e-01 4.9945020675659180e-01
+
+
+ <_>
+
+ 0 -1 1566 -8.9711230248212814e-03
+
+
+ 6.0112851858139038e-01 4.9290320277214050e-01
+
+
+ <_>
+
+ 0 -1 1567 1.5321089886128902e-02
+
+
+ 5.0097537040710449e-01 2.0398220419883728e-01
+
+
+ <_>
+
+ 0 -1 1568 2.0855569746345282e-03
+
+
+ 4.8621898889541626e-01 5.7216948270797729e-01
+
+
+ <_>
+
+ 0 -1 1569 5.0615021027624607e-03
+
+
+ 5.0002187490463257e-01 1.8018059432506561e-01
+
+
+ <_>
+
+ 0 -1 1570 -3.7174751050770283e-03
+
+
+ 5.5301171541213989e-01 4.8975929617881775e-01
+
+
+ <_>
+
+ 0 -1 1571 -1.2170500122010708e-02
+
+
+ 4.1786059737205505e-01 5.3837239742279053e-01
+
+
+ <_>
+
+ 0 -1 1572 4.6248398721218109e-03
+
+
+ 4.9971699714660645e-01 5.7613271474838257e-01
+
+
+ <_>
+
+ 0 -1 1573 -2.1040429419372231e-04
+
+
+ 5.3318071365356445e-01 4.0976810455322266e-01
+
+
+ <_>
+
+ 0 -1 1574 -1.4641780406236649e-02
+
+
+ 5.7559251785278320e-01 5.0517761707305908e-01
+
+
+ <_>
+
+ 0 -1 1575 3.3199489116668701e-03
+
+
+ 4.5769768953323364e-01 6.0318058729171753e-01
+
+
+ <_>
+
+ 0 -1 1576 3.7236879579722881e-03
+
+
+ 4.3803969025611877e-01 5.4158830642700195e-01
+
+
+ <_>
+
+ 0 -1 1577 8.2951161311939359e-04
+
+
+ 5.1630318164825439e-01 3.7022191286087036e-01
+
+
+ <_>
+
+ 0 -1 1578 -1.1408490128815174e-02
+
+
+ 6.0729467868804932e-01 4.8625651001930237e-01
+
+
+ <_>
+
+ 0 -1 1579 -4.5320121571421623e-03
+
+
+ 3.2924759387969971e-01 5.0889629125595093e-01
+
+
+ <_>
+
+ 0 -1 1580 5.1276017911732197e-03
+
+
+ 4.8297679424285889e-01 6.1227089166641235e-01
+
+
+ <_>
+
+ 0 -1 1581 9.8583158105611801e-03
+
+
+ 4.6606799960136414e-01 6.5561771392822266e-01
+
+
+ <_>
+
+ 0 -1 1582 3.6985918879508972e-02
+
+
+ 5.2048492431640625e-01 1.6904720664024353e-01
+
+
+ <_>
+
+ 0 -1 1583 4.6491161920130253e-03
+
+
+ 5.1673221588134766e-01 3.7252250313758850e-01
+
+
+ <_>
+
+ 0 -1 1584 -4.2664702050387859e-03
+
+
+ 6.4064931869506836e-01 4.9873429536819458e-01
+
+
+ <_>
+
+ 0 -1 1585 -4.7956590424291790e-04
+
+
+ 5.8972930908203125e-01 4.4648739695549011e-01
+
+
+ <_>
+
+ 0 -1 1586 3.6827160511165857e-03
+
+
+ 5.4415607452392578e-01 3.4726628661155701e-01
+
+
+ <_>
+
+ 0 -1 1587 -1.0059880092740059e-02
+
+
+ 2.1431629359722137e-01 5.0048297643661499e-01
+
+
+ <_>
+
+ 0 -1 1588 -3.0361840617842972e-04
+
+
+ 5.3864240646362305e-01 4.5903238654136658e-01
+
+
+ <_>
+
+ 0 -1 1589 -1.4545479789376259e-03
+
+
+ 5.7511842250823975e-01 4.4970950484275818e-01
+
+
+ <_>
+
+ 0 -1 1590 1.6515209572389722e-03
+
+
+ 5.4219377040863037e-01 4.2385208606719971e-01
+
+
+ <_>
+
+ 0 -1 1591 -7.8468639403581619e-03
+
+
+ 4.0779209136962891e-01 5.2581572532653809e-01
+
+
+ <_>
+
+ 0 -1 1592 -5.1259850151836872e-03
+
+
+ 4.2292758822441101e-01 5.4794532060623169e-01
+
+
+ <_>
+
+ 0 -1 1593 -3.6890961229801178e-02
+
+
+ 6.5963757038116455e-01 4.6746781468391418e-01
+
+
+ <_>
+
+ 0 -1 1594 2.4035639944486320e-04
+
+
+ 4.2511358857154846e-01 5.5732029676437378e-01
+
+
+ <_>
+
+ 0 -1 1595 -1.5150169929256663e-05
+
+
+ 5.2592468261718750e-01 4.0741148591041565e-01
+
+
+ <_>
+
+ 0 -1 1596 2.2108471021056175e-03
+
+
+ 4.6717229485511780e-01 5.8863520622253418e-01
+
+
+ <_>
+
+ 0 -1 1597 -1.1568620102480054e-03
+
+
+ 5.7110661268234253e-01 4.4871619343757629e-01
+
+
+ <_>
+
+ 0 -1 1598 4.9996292218565941e-03
+
+
+ 5.2641981840133667e-01 2.8983271121978760e-01
+
+
+ <_>
+
+ 0 -1 1599 -1.4656189596280456e-03
+
+
+ 3.8917380571365356e-01 5.1978719234466553e-01
+
+
+ <_>
+
+ 0 -1 1600 -1.1975039960816503e-03
+
+
+ 5.7958728075027466e-01 4.9279558658599854e-01
+
+
+ <_>
+
+ 0 -1 1601 -4.4954330660402775e-03
+
+
+ 2.3776030540466309e-01 5.0125551223754883e-01
+
+
+ <_>
+
+ 0 -1 1602 1.4997160178609192e-04
+
+
+ 4.8766261339187622e-01 5.6176078319549561e-01
+
+
+ <_>
+
+ 0 -1 1603 2.6391509454697371e-03
+
+
+ 5.1680880784988403e-01 3.7655091285705566e-01
+
+
+ <_>
+
+ 0 -1 1604 -2.9368131072260439e-04
+
+
+ 5.4466491937637329e-01 4.8746308684349060e-01
+
+
+ <_>
+
+ 0 -1 1605 1.4211760135367513e-03
+
+
+ 4.6878978610038757e-01 6.6913318634033203e-01
+
+
+ <_>
+
+ 0 -1 1606 7.9427637159824371e-02
+
+
+ 5.1934438943862915e-01 2.7329459786415100e-01
+
+
+ <_>
+
+ 0 -1 1607 7.9937502741813660e-02
+
+
+ 4.9717310070991516e-01 1.7820839583873749e-01
+
+
+ <_>
+
+ 0 -1 1608 1.1089259758591652e-02
+
+
+ 5.1659947633743286e-01 3.2094758749008179e-01
+
+
+ <_>
+
+ 0 -1 1609 1.6560709627810866e-04
+
+
+ 4.0584719181060791e-01 5.3072762489318848e-01
+
+
+ <_>
+
+ 0 -1 1610 -5.3354292176663876e-03
+
+
+ 3.4450569748878479e-01 5.1581299304962158e-01
+
+
+ <_>
+
+ 0 -1 1611 1.1287260567769408e-03
+
+
+ 4.5948630571365356e-01 6.0755330324172974e-01
+
+
+ <_>
+
+ 0 -1 1612 -2.1969219669699669e-02
+
+
+ 1.6804009675979614e-01 5.2285957336425781e-01
+
+
+ <_>
+
+ 0 -1 1613 -2.1775320055894554e-04
+
+
+ 3.8615968823432922e-01 5.2156728506088257e-01
+
+
+ <_>
+
+ 0 -1 1614 2.0200149447191507e-04
+
+
+ 5.5179792642593384e-01 4.3630391359329224e-01
+
+
+ <_>
+
+ 0 -1 1615 -2.1733149886131287e-02
+
+
+ 7.9994601011276245e-01 4.7898510098457336e-01
+
+
+ <_>
+
+ 0 -1 1616 -8.4399932529777288e-04
+
+
+ 4.0859758853912354e-01 5.3747731447219849e-01
+
+
+ <_>
+
+ 0 -1 1617 -4.3895249837078154e-04
+
+
+ 5.4704052209854126e-01 4.3661430478096008e-01
+
+
+ <_>
+
+ 0 -1 1618 1.5092400135472417e-03
+
+
+ 4.9889969825744629e-01 5.8421492576599121e-01
+
+
+ <_>
+
+ 0 -1 1619 -3.5547839943319559e-03
+
+
+ 6.7536902427673340e-01 4.7210058569908142e-01
+
+
+ <_>
+
+ 0 -1 1620 4.8191400128416717e-04
+
+
+ 5.4158538579940796e-01 4.3571090698242188e-01
+
+
+ <_>
+
+ 0 -1 1621 -6.0264398343861103e-03
+
+
+ 2.2585099935531616e-01 4.9918809533119202e-01
+
+
+ <_>
+
+ 0 -1 1622 -1.1668140068650246e-02
+
+
+ 6.2565547227859497e-01 4.9274989962577820e-01
+
+
+ <_>
+
+ 0 -1 1623 -2.8718370012938976e-03
+
+
+ 3.9477849006652832e-01 5.2458018064498901e-01
+
+
+ <_>
+
+ 0 -1 1624 1.7051169648766518e-02
+
+
+ 4.7525110840797424e-01 5.7942241430282593e-01
+
+
+ <_>
+
+ 0 -1 1625 -1.3352080248296261e-02
+
+
+ 6.0411047935485840e-01 4.5445358753204346e-01
+
+
+ <_>
+
+ 0 -1 1626 -3.9301801007241011e-04
+
+
+ 4.2582759261131287e-01 5.5449050664901733e-01
+
+
+ <_>
+
+ 0 -1 1627 3.0483349692076445e-03
+
+
+ 5.2334201335906982e-01 3.7802729010581970e-01
+
+
+ <_>
+
+ 0 -1 1628 -4.3579288758337498e-03
+
+
+ 6.3718891143798828e-01 4.8386740684509277e-01
+
+
+ <_>
+
+ 0 -1 1629 5.6661018170416355e-03
+
+
+ 5.3747057914733887e-01 4.1636660695075989e-01
+
+
+ <_>
+
+ 0 -1 1630 6.0677339206449687e-05
+
+
+ 4.6387958526611328e-01 5.3116250038146973e-01
+
+
+ <_>
+
+ 0 -1 1631 3.6738160997629166e-02
+
+
+ 4.6886560320854187e-01 6.4665240049362183e-01
+
+
+ <_>
+
+ 0 -1 1632 8.6528137326240540e-03
+
+
+ 5.2043187618255615e-01 2.1886579692363739e-01
+
+
+ <_>
+
+ 0 -1 1633 -1.5371359884738922e-01
+
+
+ 1.6303719580173492e-01 4.9588400125503540e-01
+
+
+ <_>
+
+ 0 -1 1634 -4.1560421232134104e-04
+
+
+ 5.7744592428207397e-01 4.6964588761329651e-01
+
+
+ <_>
+
+ 0 -1 1635 -1.2640169588848948e-03
+
+
+ 3.9771759510040283e-01 5.2171981334686279e-01
+
+
+ <_>
+
+ 0 -1 1636 -3.5473341122269630e-03
+
+
+ 6.0465282201766968e-01 4.8083150386810303e-01
+
+
+ <_>
+
+ 0 -1 1637 3.0019069527043030e-05
+
+
+ 3.9967238903045654e-01 5.2282011508941650e-01
+
+
+ <_>
+
+ 0 -1 1638 1.3113019522279501e-03
+
+
+ 4.7121581435203552e-01 5.7659977674484253e-01
+
+
+ <_>
+
+ 0 -1 1639 -1.3374709524214268e-03
+
+
+ 4.1095849871635437e-01 5.2531701326370239e-01
+
+
+ <_>
+
+ 0 -1 1640 2.0876709371805191e-02
+
+
+ 5.2029937505722046e-01 1.7579819262027740e-01
+
+
+ <_>
+
+ 0 -1 1641 -7.5497948564589024e-03
+
+
+ 6.5666097402572632e-01 4.6949750185012817e-01
+
+
+ <_>
+
+ 0 -1 1642 2.4188550189137459e-02
+
+
+ 5.1286739110946655e-01 3.3702209591865540e-01
+
+
+ <_>
+
+ 0 -1 1643 -2.9358828905969858e-03
+
+
+ 6.5807867050170898e-01 4.6945410966873169e-01
+
+
+ <_>
+
+ 0 -1 1644 5.7557929307222366e-02
+
+
+ 5.1464450359344482e-01 2.7752599120140076e-01
+
+
+ <_>
+
+ 0 -1 1645 -1.1343370424583554e-03
+
+
+ 3.8366019725799561e-01 5.1926672458648682e-01
+
+
+ <_>
+
+ 0 -1 1646 1.6816999763250351e-02
+
+
+ 5.0855928659439087e-01 6.1772608757019043e-01
+
+
+ <_>
+
+ 0 -1 1647 5.0535178743302822e-03
+
+
+ 5.1387631893157959e-01 3.6847919225692749e-01
+
+
+ <_>
+
+ 0 -1 1648 -4.5874710194766521e-03
+
+
+ 5.9896552562713623e-01 4.8352020978927612e-01
+
+
+ <_>
+
+ 0 -1 1649 1.6882460331544280e-03
+
+
+ 4.5094868540763855e-01 5.7230567932128906e-01
+
+
+ <_>
+
+ 0 -1 1650 -1.6554000321775675e-03
+
+
+ 3.4967708587646484e-01 5.2433192729949951e-01
+
+
+ <_>
+
+ 0 -1 1651 -1.9373800605535507e-02
+
+
+ 1.1205369979143143e-01 4.9687129259109497e-01
+
+
+ <_>
+
+ 0 -1 1652 1.0374450124800205e-02
+
+
+ 5.1481968164443970e-01 4.3952131271362305e-01
+
+
+ <_>
+
+ 0 -1 1653 1.4973050565458834e-04
+
+
+ 4.0849998593330383e-01 5.2698868513107300e-01
+
+
+ <_>
+
+ 0 -1 1654 -4.2981930077075958e-02
+
+
+ 6.3941049575805664e-01 5.0185042619705200e-01
+
+
+ <_>
+
+ 0 -1 1655 8.3065936341881752e-03
+
+
+ 4.7075539827346802e-01 6.6983532905578613e-01
+
+
+ <_>
+
+ 0 -1 1656 -4.1285790503025055e-03
+
+
+ 4.5413690805435181e-01 5.3236472606658936e-01
+
+
+ <_>
+
+ 0 -1 1657 1.7399420030415058e-03
+
+
+ 4.3339619040489197e-01 5.4398661851882935e-01
+
+
+ <_>
+
+ 0 -1 1658 1.1739750334527344e-04
+
+
+ 4.5796871185302734e-01 5.5434262752532959e-01
+
+
+ <_>
+
+ 0 -1 1659 1.8585780344437808e-04
+
+
+ 4.3246439099311829e-01 5.4267549514770508e-01
+
+
+ <_>
+
+ 0 -1 1660 5.5587692186236382e-03
+
+
+ 5.2572208642959595e-01 3.5506111383438110e-01
+
+
+ <_>
+
+ 0 -1 1661 -7.9851560294628143e-03
+
+
+ 6.0430181026458740e-01 4.6306359767913818e-01
+
+
+ <_>
+
+ 0 -1 1662 6.0594122624024749e-04
+
+
+ 4.5982548594474792e-01 5.5331951379776001e-01
+
+
+ <_>
+
+ 0 -1 1663 -2.2983040253166109e-04
+
+
+ 4.1307520866394043e-01 5.3224611282348633e-01
+
+
+ <_>
+
+ 0 -1 1664 4.3740210821852088e-04
+
+
+ 4.0430399775505066e-01 5.4092890024185181e-01
+
+
+ <_>
+
+ 0 -1 1665 2.9482020181603730e-04
+
+
+ 4.4949638843536377e-01 5.6288522481918335e-01
+
+
+ <_>
+
+ 0 -1 1666 1.0312659665942192e-02
+
+
+ 5.1775109767913818e-01 2.7043169736862183e-01
+
+
+ <_>
+
+ 0 -1 1667 -7.7241109684109688e-03
+
+
+ 1.9880190491676331e-01 4.9805539846420288e-01
+
+
+ <_>
+
+ 0 -1 1668 -4.6797208487987518e-03
+
+
+ 6.6447502374649048e-01 5.0182962417602539e-01
+
+
+ <_>
+
+ 0 -1 1669 -5.0755459815263748e-03
+
+
+ 3.8983049988746643e-01 5.1852691173553467e-01
+
+
+ <_>
+
+ 0 -1 1670 2.2479740437120199e-03
+
+
+ 4.8018088936805725e-01 5.6603360176086426e-01
+
+
+ <_>
+
+ 0 -1 1671 8.3327008178457618e-04
+
+
+ 5.2109199762344360e-01 3.9571881294250488e-01
+
+
+ <_>
+
+ 0 -1 1672 -4.1279330849647522e-02
+
+
+ 6.1545419692993164e-01 5.0070542097091675e-01
+
+
+ <_>
+
+ 0 -1 1673 -5.0930189900100231e-04
+
+
+ 3.9759421348571777e-01 5.2284038066864014e-01
+
+
+ <_>
+
+ 0 -1 1674 1.2568780221045017e-03
+
+
+ 4.9791380763053894e-01 5.9391832351684570e-01
+
+
+ <_>
+
+ 0 -1 1675 8.0048497766256332e-03
+
+
+ 4.9844971299171448e-01 1.6333660483360291e-01
+
+
+ <_>
+
+ 0 -1 1676 -1.1879300000146031e-03
+
+
+ 5.9049648046493530e-01 4.9426248669624329e-01
+
+
+ <_>
+
+ 0 -1 1677 6.1948952497914433e-04
+
+
+ 4.1995579004287720e-01 5.3287261724472046e-01
+
+
+ <_>
+
+ 0 -1 1678 6.6829859279096127e-03
+
+
+ 5.4186028242111206e-01 4.9058890342712402e-01
+
+
+ <_>
+
+ 0 -1 1679 -3.7062340416014194e-03
+
+
+ 3.7259390950202942e-01 5.1380002498626709e-01
+
+
+ <_>
+
+ 0 -1 1680 -3.9739411324262619e-02
+
+
+ 6.4789611101150513e-01 5.0503468513488770e-01
+
+
+ <_>
+
+ 0 -1 1681 1.4085009461268783e-03
+
+
+ 4.6823391318321228e-01 6.3778841495513916e-01
+
+
+ <_>
+
+ 0 -1 1682 3.9322688826359808e-04
+
+
+ 5.4585301876068115e-01 4.1504821181297302e-01
+
+
+ <_>
+
+ 0 -1 1683 -1.8979819724336267e-03
+
+
+ 3.6901599168777466e-01 5.1497042179107666e-01
+
+
+ <_>
+
+ 0 -1 1684 -1.3970440253615379e-02
+
+
+ 6.0505628585815430e-01 4.8113578557968140e-01
+
+
+ <_>
+
+ 0 -1 1685 -1.0100819915533066e-01
+
+
+ 2.0170800387859344e-01 4.9923619627952576e-01
+
+
+ <_>
+
+ 0 -1 1686 -1.7346920445561409e-02
+
+
+ 5.7131487131118774e-01 4.8994860053062439e-01
+
+
+ <_>
+
+ 0 -1 1687 1.5619759506080300e-04
+
+
+ 4.2153888940811157e-01 5.3926420211791992e-01
+
+
+ <_>
+
+ 0 -1 1688 1.3438929617404938e-01
+
+
+ 5.1361519098281860e-01 3.7676128745079041e-01
+
+
+ <_>
+
+ 0 -1 1689 -2.4582240730524063e-02
+
+
+ 7.0273578166961670e-01 4.7479069232940674e-01
+
+
+ <_>
+
+ 0 -1 1690 -3.8553720805794001e-03
+
+
+ 4.3174090981483459e-01 5.4277169704437256e-01
+
+
+ <_>
+
+ 0 -1 1691 -2.3165249731391668e-03
+
+
+ 5.9426987171173096e-01 4.6186479926109314e-01
+
+
+ <_>
+
+ 0 -1 1692 -4.8518120311200619e-03
+
+
+ 6.1915689706802368e-01 4.8848950862884521e-01
+
+
+ <_>
+
+ 0 -1 1693 2.4699938949197531e-03
+
+
+ 5.2566647529602051e-01 4.0171998739242554e-01
+
+
+ <_>
+
+ 0 -1 1694 4.5496959239244461e-02
+
+
+ 5.2378678321838379e-01 2.6857739686965942e-01
+
+
+ <_>
+
+ 0 -1 1695 -2.0319599658250809e-02
+
+
+ 2.1304459869861603e-01 4.9797388911247253e-01
+
+
+ <_>
+
+ 0 -1 1696 2.6994998916052282e-04
+
+
+ 4.8140418529510498e-01 5.5431222915649414e-01
+
+
+ <_>
+
+ 0 -1 1697 -1.8232699949294329e-03
+
+
+ 6.4825797080993652e-01 4.7099891304969788e-01
+
+
+ <_>
+
+ 0 -1 1698 -6.3015790656208992e-03
+
+
+ 4.5819279551506042e-01 5.3062361478805542e-01
+
+
+ <_>
+
+ 0 -1 1699 -2.4139499873854220e-04
+
+
+ 5.2320867776870728e-01 4.0517631173133850e-01
+
+
+ <_>
+
+ 0 -1 1700 -1.0330369696021080e-03
+
+
+ 5.5562019348144531e-01 4.7891938686370850e-01
+
+
+ <_>
+
+ 0 -1 1701 1.8041160365100950e-04
+
+
+ 5.2294427156448364e-01 4.0118101239204407e-01
+
+
+ <_>
+
+ 0 -1 1702 -6.1407860368490219e-02
+
+
+ 6.2986820936203003e-01 5.0107032060623169e-01
+
+
+ <_>
+
+ 0 -1 1703 -6.9543913006782532e-02
+
+
+ 7.2282809019088745e-01 4.7731840610504150e-01
+
+
+ <_>
+
+ 0 -1 1704 -7.0542663335800171e-02
+
+
+ 2.2695130109786987e-01 5.1825290918350220e-01
+
+
+ <_>
+
+ 0 -1 1705 2.4423799477517605e-03
+
+
+ 5.2370971441268921e-01 4.0981510281562805e-01
+
+
+ <_>
+
+ 0 -1 1706 1.5494349645450711e-03
+
+
+ 4.7737509012222290e-01 5.4680430889129639e-01
+
+
+ <_>
+
+ 0 -1 1707 -2.3914219811558723e-02
+
+
+ 7.1469759941101074e-01 4.7838249802589417e-01
+
+
+ <_>
+
+ 0 -1 1708 -1.2453690171241760e-02
+
+
+ 2.6352968811988831e-01 5.2411228418350220e-01
+
+
+ <_>
+
+ 0 -1 1709 -2.0760179904755205e-04
+
+
+ 3.6237570643424988e-01 5.1136088371276855e-01
+
+
+ <_>
+
+ 0 -1 1710 2.9781080229440704e-05
+
+
+ 4.7059321403503418e-01 5.4328018426895142e-01
+
+
+
+
+ <_>
+ 211
+ 1.0474919891357422e+02
+
+ <_>
+
+ 0 -1 1711 1.1772749945521355e-02
+
+
+ 3.8605189323425293e-01 6.4211672544479370e-01
+
+
+ <_>
+
+ 0 -1 1712 2.7037570253014565e-02
+
+
+ 4.3856549263000488e-01 6.7540389299392700e-01
+
+
+ <_>
+
+ 0 -1 1713 -3.6419500247575343e-05
+
+
+ 5.4871010780334473e-01 3.4233158826828003e-01
+
+
+ <_>
+
+ 0 -1 1714 1.9995409529656172e-03
+
+
+ 3.2305321097373962e-01 5.4003179073333740e-01
+
+
+ <_>
+
+ 0 -1 1715 4.5278300531208515e-03
+
+
+ 5.0916397571563721e-01 2.9350438714027405e-01
+
+
+ <_>
+
+ 0 -1 1716 4.7890920541249216e-04
+
+
+ 4.1781538724899292e-01 5.3440642356872559e-01
+
+
+ <_>
+
+ 0 -1 1717 1.1720920447260141e-03
+
+
+ 2.8991821408271790e-01 5.1320707798004150e-01
+
+
+ <_>
+
+ 0 -1 1718 9.5305702416226268e-04
+
+
+ 4.2801249027252197e-01 5.5608451366424561e-01
+
+
+ <_>
+
+ 0 -1 1719 1.5099150004971307e-05
+
+
+ 4.0448719263076782e-01 5.4047602415084839e-01
+
+
+ <_>
+
+ 0 -1 1720 -6.0817901976406574e-04
+
+
+ 4.2717689275741577e-01 5.5034661293029785e-01
+
+
+ <_>
+
+ 0 -1 1721 3.3224520739167929e-03
+
+
+ 3.9627239108085632e-01 5.3697347640991211e-01
+
+
+ <_>
+
+ 0 -1 1722 -1.1037490330636501e-03
+
+
+ 4.7271779179573059e-01 5.2377498149871826e-01
+
+
+ <_>
+
+ 0 -1 1723 -1.4350269921123981e-03
+
+
+ 5.6030082702636719e-01 4.2235091328620911e-01
+
+
+ <_>
+
+ 0 -1 1724 2.0767399109899998e-03
+
+
+ 5.2259171009063721e-01 4.7327259182929993e-01
+
+
+ <_>
+
+ 0 -1 1725 -1.6412809782195836e-04
+
+
+ 3.9990758895874023e-01 5.4327398538589478e-01
+
+
+ <_>
+
+ 0 -1 1726 8.8302437216043472e-03
+
+
+ 4.6783858537673950e-01 6.0273271799087524e-01
+
+
+ <_>
+
+ 0 -1 1727 -1.0552070103585720e-02
+
+
+ 3.4939670562744141e-01 5.2139747142791748e-01
+
+
+ <_>
+
+ 0 -1 1728 -2.2731600329279900e-03
+
+
+ 6.1858189105987549e-01 4.7490629553794861e-01
+
+
+ <_>
+
+ 0 -1 1729 -8.4786332445219159e-04
+
+
+ 5.2853411436080933e-01 3.8434821367263794e-01
+
+
+ <_>
+
+ 0 -1 1730 1.2081359745934606e-03
+
+
+ 5.3606408834457397e-01 3.4473359584808350e-01
+
+
+ <_>
+
+ 0 -1 1731 2.6512730401009321e-03
+
+
+ 4.5582920312881470e-01 6.1939620971679688e-01
+
+
+ <_>
+
+ 0 -1 1732 -1.1012479662895203e-03
+
+
+ 3.6802300810813904e-01 5.3276282548904419e-01
+
+
+ <_>
+
+ 0 -1 1733 4.9561518244445324e-04
+
+
+ 3.9605951309204102e-01 5.2749407291412354e-01
+
+
+ <_>
+
+ 0 -1 1734 -4.3901771306991577e-02
+
+
+ 7.0204448699951172e-01 4.9928390979766846e-01
+
+
+ <_>
+
+ 0 -1 1735 3.4690350294113159e-02
+
+
+ 5.0491642951965332e-01 2.7666029334068298e-01
+
+
+ <_>
+
+ 0 -1 1736 -2.7442190330475569e-03
+
+
+ 2.6726329326629639e-01 5.2749711275100708e-01
+
+
+ <_>
+
+ 0 -1 1737 3.3316588960587978e-03
+
+
+ 4.5794829726219177e-01 6.0011017322540283e-01
+
+
+ <_>
+
+ 0 -1 1738 -2.0044570788741112e-02
+
+
+ 3.1715941429138184e-01 5.2357178926467896e-01
+
+
+ <_>
+
+ 0 -1 1739 1.3492030557245016e-03
+
+
+ 5.2653628587722778e-01 4.0343248844146729e-01
+
+
+ <_>
+
+ 0 -1 1740 2.9702018946409225e-03
+
+
+ 5.3324568271636963e-01 4.5719841122627258e-01
+
+
+ <_>
+
+ 0 -1 1741 6.3039981760084629e-03
+
+
+ 4.5933109521865845e-01 6.0346359014511108e-01
+
+
+ <_>
+
+ 0 -1 1742 -1.2936590239405632e-02
+
+
+ 4.4379639625549316e-01 5.3729712963104248e-01
+
+
+ <_>
+
+ 0 -1 1743 4.0148729458451271e-03
+
+
+ 4.6803238987922668e-01 6.4378339052200317e-01
+
+
+ <_>
+
+ 0 -1 1744 -2.6401679497212172e-03
+
+
+ 3.7096318602561951e-01 5.3143328428268433e-01
+
+
+ <_>
+
+ 0 -1 1745 1.3918439857661724e-02
+
+
+ 4.7235551476478577e-01 7.1308088302612305e-01
+
+
+ <_>
+
+ 0 -1 1746 -4.5087869511917233e-04
+
+
+ 4.4923940300941467e-01 5.3704041242599487e-01
+
+
+ <_>
+
+ 0 -1 1747 2.5384349282830954e-04
+
+
+ 4.4068640470504761e-01 5.5144029855728149e-01
+
+
+ <_>
+
+ 0 -1 1748 2.2710000630468130e-03
+
+
+ 4.6824169158935547e-01 5.9679841995239258e-01
+
+
+ <_>
+
+ 0 -1 1749 2.4120779708027840e-03
+
+
+ 5.0793921947479248e-01 3.0185988545417786e-01
+
+
+ <_>
+
+ 0 -1 1750 -3.6025670851813629e-05
+
+
+ 5.6010371446609497e-01 4.4710969924926758e-01
+
+
+ <_>
+
+ 0 -1 1751 -7.4905529618263245e-03
+
+
+ 2.2075350582599640e-01 4.9899441003799438e-01
+
+
+ <_>
+
+ 0 -1 1752 -1.7513120546936989e-02
+
+
+ 6.5312159061431885e-01 5.0176489353179932e-01
+
+
+ <_>
+
+ 0 -1 1753 1.4281630516052246e-01
+
+
+ 4.9679630994796753e-01 1.4820620417594910e-01
+
+
+ <_>
+
+ 0 -1 1754 5.5345268920063972e-03
+
+
+ 4.8989468812942505e-01 5.9542238712310791e-01
+
+
+ <_>
+
+ 0 -1 1755 -9.6323591424152255e-04
+
+
+ 3.9271169900894165e-01 5.1960742473602295e-01
+
+
+ <_>
+
+ 0 -1 1756 -2.0370010752230883e-03
+
+
+ 5.6133252382278442e-01 4.8848581314086914e-01
+
+
+ <_>
+
+ 0 -1 1757 1.6614829655736685e-03
+
+
+ 4.4728800654411316e-01 5.5788809061050415e-01
+
+
+ <_>
+
+ 0 -1 1758 -3.1188090797513723e-03
+
+
+ 3.8405328989028931e-01 5.3974777460098267e-01
+
+
+ <_>
+
+ 0 -1 1759 -6.4000617712736130e-03
+
+
+ 5.8439838886260986e-01 4.5332181453704834e-01
+
+
+ <_>
+
+ 0 -1 1760 3.1319601112045348e-04
+
+
+ 5.4392218589782715e-01 4.2347279191017151e-01
+
+
+ <_>
+
+ 0 -1 1761 -1.8222099170088768e-02
+
+
+ 1.2884649634361267e-01 4.9584048986434937e-01
+
+
+ <_>
+
+ 0 -1 1762 8.7969247251749039e-03
+
+
+ 4.9512979388237000e-01 7.1534800529479980e-01
+
+
+ <_>
+
+ 0 -1 1763 -4.2395070195198059e-03
+
+
+ 3.9465999603271484e-01 5.1949369907379150e-01
+
+
+ <_>
+
+ 0 -1 1764 9.7086271271109581e-03
+
+
+ 4.8975038528442383e-01 6.0649001598358154e-01
+
+
+ <_>
+
+ 0 -1 1765 -3.9934171363711357e-03
+
+
+ 3.2454401254653931e-01 5.0608289241790771e-01
+
+
+ <_>
+
+ 0 -1 1766 -1.6785059124231339e-02
+
+
+ 1.5819530189037323e-01 5.2037787437438965e-01
+
+
+ <_>
+
+ 0 -1 1767 1.8272090703248978e-02
+
+
+ 4.6809351444244385e-01 6.6269791126251221e-01
+
+
+ <_>
+
+ 0 -1 1768 5.6872838176786900e-03
+
+
+ 5.2116978168487549e-01 3.5121849179267883e-01
+
+
+ <_>
+
+ 0 -1 1769 -1.0739039862528443e-03
+
+
+ 5.7683861255645752e-01 4.5298451185226440e-01
+
+
+ <_>
+
+ 0 -1 1770 -3.7093870341777802e-03
+
+
+ 4.5077630877494812e-01 5.3135812282562256e-01
+
+
+ <_>
+
+ 0 -1 1771 -2.1110709349159151e-04
+
+
+ 5.4608201980590820e-01 4.3333768844604492e-01
+
+
+ <_>
+
+ 0 -1 1772 1.0670139454305172e-03
+
+
+ 5.3718560934066772e-01 4.0783908963203430e-01
+
+
+ <_>
+
+ 0 -1 1773 3.5943021066486835e-03
+
+
+ 4.4712871313095093e-01 5.6438362598419189e-01
+
+
+ <_>
+
+ 0 -1 1774 -5.1776031032204628e-03
+
+
+ 4.4993931055068970e-01 5.2803301811218262e-01
+
+
+ <_>
+
+ 0 -1 1775 -2.5414369883947074e-04
+
+
+ 5.5161732435226440e-01 4.4077080488204956e-01
+
+
+ <_>
+
+ 0 -1 1776 6.3522560521960258e-03
+
+
+ 5.1941901445388794e-01 2.4652279913425446e-01
+
+
+ <_>
+
+ 0 -1 1777 -4.4205080484971404e-04
+
+
+ 3.8307058811187744e-01 5.1396822929382324e-01
+
+
+ <_>
+
+ 0 -1 1778 7.4488727841526270e-04
+
+
+ 4.8910909891128540e-01 5.9747868776321411e-01
+
+
+ <_>
+
+ 0 -1 1779 -3.5116379149258137e-03
+
+
+ 7.4136817455291748e-01 4.7687649726867676e-01
+
+
+ <_>
+
+ 0 -1 1780 -1.2540910392999649e-02
+
+
+ 3.6488190293312073e-01 5.2528268098831177e-01
+
+
+ <_>
+
+ 0 -1 1781 9.4931852072477341e-03
+
+
+ 5.1004928350448608e-01 3.6295869946479797e-01
+
+
+ <_>
+
+ 0 -1 1782 1.2961150147020817e-02
+
+
+ 5.2324420213699341e-01 4.3335610628128052e-01
+
+
+ <_>
+
+ 0 -1 1783 4.7209449112415314e-03
+
+
+ 4.6481490135192871e-01 6.3310527801513672e-01
+
+
+ <_>
+
+ 0 -1 1784 -2.3119079414755106e-03
+
+
+ 5.9303098917007446e-01 4.5310580730438232e-01
+
+
+ <_>
+
+ 0 -1 1785 -2.8262299019843340e-03
+
+
+ 3.8704779744148254e-01 5.2571010589599609e-01
+
+
+ <_>
+
+ 0 -1 1786 -1.4311339473351836e-03
+
+
+ 5.5225032567977905e-01 4.5618548989295959e-01
+
+
+ <_>
+
+ 0 -1 1787 1.9378310535103083e-03
+
+
+ 4.5462208986282349e-01 5.7369667291641235e-01
+
+
+ <_>
+
+ 0 -1 1788 2.6343559147790074e-04
+
+
+ 5.3457391262054443e-01 4.5718750357627869e-01
+
+
+ <_>
+
+ 0 -1 1789 7.8257522545754910e-04
+
+
+ 3.9678159356117249e-01 5.2201879024505615e-01
+
+
+ <_>
+
+ 0 -1 1790 -1.9550440832972527e-02
+
+
+ 2.8296428918838501e-01 5.2435082197189331e-01
+
+
+ <_>
+
+ 0 -1 1791 4.3914958951063454e-04
+
+
+ 4.5900669693946838e-01 5.8990901708602905e-01
+
+
+ <_>
+
+ 0 -1 1792 2.1452000364661217e-02
+
+
+ 5.2314108610153198e-01 2.8553789854049683e-01
+
+
+ <_>
+
+ 0 -1 1793 5.8973580598831177e-04
+
+
+ 4.3972569704055786e-01 5.5064219236373901e-01
+
+
+ <_>
+
+ 0 -1 1794 -2.6157610118389130e-02
+
+
+ 3.1350791454315186e-01 5.1891750097274780e-01
+
+
+ <_>
+
+ 0 -1 1795 -1.3959860429167747e-02
+
+
+ 3.2132729887962341e-01 5.0407177209854126e-01
+
+
+ <_>
+
+ 0 -1 1796 -6.3699018210172653e-03
+
+
+ 6.3875448703765869e-01 4.8495069146156311e-01
+
+
+ <_>
+
+ 0 -1 1797 -8.5613820701837540e-03
+
+
+ 2.7591320872306824e-01 5.0320190191268921e-01
+
+
+ <_>
+
+ 0 -1 1798 9.6622901037335396e-04
+
+
+ 4.6856409311294556e-01 5.8348792791366577e-01
+
+
+ <_>
+
+ 0 -1 1799 7.6550268568098545e-04
+
+
+ 5.1752072572708130e-01 3.8964220881462097e-01
+
+
+ <_>
+
+ 0 -1 1800 -8.1833340227603912e-03
+
+
+ 2.0691369473934174e-01 5.2081221342086792e-01
+
+
+ <_>
+
+ 0 -1 1801 -9.3976939097046852e-03
+
+
+ 6.1340910196304321e-01 4.6412229537963867e-01
+
+
+ <_>
+
+ 0 -1 1802 4.8028980381786823e-03
+
+
+ 5.4541081190109253e-01 4.3952199816703796e-01
+
+
+ <_>
+
+ 0 -1 1803 -3.5680569708347321e-03
+
+
+ 6.3444852828979492e-01 4.6810939908027649e-01
+
+
+ <_>
+
+ 0 -1 1804 4.0733120404183865e-03
+
+
+ 5.2926832437515259e-01 4.0156200528144836e-01
+
+
+ <_>
+
+ 0 -1 1805 1.2568129459396005e-03
+
+
+ 4.3929880857467651e-01 5.4528248310089111e-01
+
+
+ <_>
+
+ 0 -1 1806 -2.9065010603517294e-03
+
+
+ 5.8988320827484131e-01 4.8633798956871033e-01
+
+
+ <_>
+
+ 0 -1 1807 -2.4409340694546700e-03
+
+
+ 4.0693649649620056e-01 5.2474218606948853e-01
+
+
+ <_>
+
+ 0 -1 1808 2.4830700829625130e-02
+
+
+ 5.1827257871627808e-01 3.6825248599052429e-01
+
+
+ <_>
+
+ 0 -1 1809 -4.8854008316993713e-02
+
+
+ 1.3075779378414154e-01 4.9612811207771301e-01
+
+
+ <_>
+
+ 0 -1 1810 -1.6110379947349429e-03
+
+
+ 6.4210057258605957e-01 4.8726621270179749e-01
+
+
+ <_>
+
+ 0 -1 1811 -9.7009479999542236e-02
+
+
+ 4.7769349068403244e-02 4.9509888887405396e-01
+
+
+ <_>
+
+ 0 -1 1812 1.1209240183234215e-03
+
+
+ 4.6162670850753784e-01 5.3547459840774536e-01
+
+
+ <_>
+
+ 0 -1 1813 -1.3064090162515640e-03
+
+
+ 6.2618541717529297e-01 4.6388059854507446e-01
+
+
+ <_>
+
+ 0 -1 1814 4.5771620352752507e-04
+
+
+ 5.3844177722930908e-01 4.6466401219367981e-01
+
+
+ <_>
+
+ 0 -1 1815 -6.3149951165542006e-04
+
+
+ 3.8040471076965332e-01 5.1302570104598999e-01
+
+
+ <_>
+
+ 0 -1 1816 1.4505970466416329e-04
+
+
+ 4.5543101429939270e-01 5.6644618511199951e-01
+
+
+ <_>
+
+ 0 -1 1817 -1.6474550589919090e-02
+
+
+ 6.5969580411911011e-01 4.7158598899841309e-01
+
+
+ <_>
+
+ 0 -1 1818 1.3369579799473286e-02
+
+
+ 5.1954662799835205e-01 3.0359649658203125e-01
+
+
+ <_>
+
+ 0 -1 1819 1.0271780047332868e-04
+
+
+ 5.2291762828826904e-01 4.1070660948753357e-01
+
+
+ <_>
+
+ 0 -1 1820 -5.5311559699475765e-03
+
+
+ 6.3528877496719360e-01 4.9609071016311646e-01
+
+
+ <_>
+
+ 0 -1 1821 -2.6187049224972725e-03
+
+
+ 3.8245460391044617e-01 5.1409840583801270e-01
+
+
+ <_>
+
+ 0 -1 1822 5.0834268331527710e-03
+
+
+ 4.9504399299621582e-01 6.2208187580108643e-01
+
+
+ <_>
+
+ 0 -1 1823 7.9818159341812134e-02
+
+
+ 4.9523359537124634e-01 1.3224759697914124e-01
+
+
+ <_>
+
+ 0 -1 1824 -9.9226586520671844e-02
+
+
+ 7.5427287817001343e-01 5.0084167718887329e-01
+
+
+ <_>
+
+ 0 -1 1825 -6.5174017800018191e-04
+
+
+ 3.6993029713630676e-01 5.1301211118698120e-01
+
+
+ <_>
+
+ 0 -1 1826 -1.8996849656105042e-02
+
+
+ 6.6891789436340332e-01 4.9212029576301575e-01
+
+
+ <_>
+
+ 0 -1 1827 1.7346899956464767e-02
+
+
+ 4.9833008646965027e-01 1.8591980636119843e-01
+
+
+ <_>
+
+ 0 -1 1828 5.5082101607695222e-04
+
+
+ 4.5744240283966064e-01 5.5221217870712280e-01
+
+
+ <_>
+
+ 0 -1 1829 2.0056050270795822e-03
+
+
+ 5.1317447423934937e-01 3.8564699888229370e-01
+
+
+ <_>
+
+ 0 -1 1830 -7.7688191086053848e-03
+
+
+ 4.3617001175880432e-01 5.4343092441558838e-01
+
+
+ <_>
+
+ 0 -1 1831 5.0878278911113739e-02
+
+
+ 4.6827208995819092e-01 6.8406397104263306e-01
+
+
+ <_>
+
+ 0 -1 1832 -2.2901780903339386e-03
+
+
+ 4.3292450904846191e-01 5.3060990571975708e-01
+
+
+ <_>
+
+ 0 -1 1833 -1.5715380141045898e-04
+
+
+ 5.3700572252273560e-01 4.3781641125679016e-01
+
+
+ <_>
+
+ 0 -1 1834 1.0519240051507950e-01
+
+
+ 5.1372742652893066e-01 6.7361466586589813e-02
+
+
+ <_>
+
+ 0 -1 1835 2.7198919560760260e-03
+
+
+ 4.1120609641075134e-01 5.2556651830673218e-01
+
+
+ <_>
+
+ 0 -1 1836 4.8337779939174652e-02
+
+
+ 5.4046237468719482e-01 4.4389671087265015e-01
+
+
+ <_>
+
+ 0 -1 1837 9.5703761326149106e-04
+
+
+ 4.3559691309928894e-01 5.3995108604431152e-01
+
+
+ <_>
+
+ 0 -1 1838 -2.5371259078383446e-02
+
+
+ 5.9951752424240112e-01 5.0310248136520386e-01
+
+
+ <_>
+
+ 0 -1 1839 5.2457951009273529e-02
+
+
+ 4.9502879381179810e-01 1.3983510434627533e-01
+
+
+ <_>
+
+ 0 -1 1840 -1.2365629896521568e-02
+
+
+ 6.3972991704940796e-01 4.9641060829162598e-01
+
+
+ <_>
+
+ 0 -1 1841 -1.4589719474315643e-01
+
+
+ 1.0016699880361557e-01 4.9463221430778503e-01
+
+
+ <_>
+
+ 0 -1 1842 -1.5908600762486458e-02
+
+
+ 3.3123299479484558e-01 5.2083408832550049e-01
+
+
+ <_>
+
+ 0 -1 1843 3.9486068999394774e-04
+
+
+ 4.4063639640808105e-01 5.4261028766632080e-01
+
+
+ <_>
+
+ 0 -1 1844 -5.2454001270234585e-03
+
+
+ 2.7995899319648743e-01 5.1899671554565430e-01
+
+
+ <_>
+
+ 0 -1 1845 -5.0421799533069134e-03
+
+
+ 6.9875800609588623e-01 4.7521421313285828e-01
+
+
+ <_>
+
+ 0 -1 1846 2.9812189750373363e-03
+
+
+ 4.9832889437675476e-01 6.3074797391891479e-01
+
+
+ <_>
+
+ 0 -1 1847 -7.2884308174252510e-03
+
+
+ 2.9823330044746399e-01 5.0268697738647461e-01
+
+
+ <_>
+
+ 0 -1 1848 1.5094350092113018e-03
+
+
+ 5.3084421157836914e-01 3.8329708576202393e-01
+
+
+ <_>
+
+ 0 -1 1849 -9.3340799212455750e-03
+
+
+ 2.0379640161991119e-01 4.9698171019554138e-01
+
+
+ <_>
+
+ 0 -1 1850 2.8667140752077103e-02
+
+
+ 5.0256967544555664e-01 6.9280272722244263e-01
+
+
+ <_>
+
+ 0 -1 1851 1.7019680142402649e-01
+
+
+ 4.9600529670715332e-01 1.4764429628849030e-01
+
+
+ <_>
+
+ 0 -1 1852 -3.2614478841423988e-03
+
+
+ 5.6030637025833130e-01 4.8260560631752014e-01
+
+
+ <_>
+
+ 0 -1 1853 5.5769277969375253e-04
+
+
+ 5.2055621147155762e-01 4.1296330094337463e-01
+
+
+ <_>
+
+ 0 -1 1854 3.6258339881896973e-01
+
+
+ 5.2216529846191406e-01 3.7686121463775635e-01
+
+
+ <_>
+
+ 0 -1 1855 -1.1615130119025707e-02
+
+
+ 6.0226827859878540e-01 4.6374899148941040e-01
+
+
+ <_>
+
+ 0 -1 1856 -4.0795197710394859e-03
+
+
+ 4.0704470872879028e-01 5.3374791145324707e-01
+
+
+ <_>
+
+ 0 -1 1857 5.7204300537705421e-04
+
+
+ 4.6018350124359131e-01 5.9003931283950806e-01
+
+
+ <_>
+
+ 0 -1 1858 6.7543348995968699e-04
+
+
+ 5.3982520103454590e-01 4.3454289436340332e-01
+
+
+ <_>
+
+ 0 -1 1859 6.3295697327703238e-04
+
+
+ 5.2015632390975952e-01 4.0513589978218079e-01
+
+
+ <_>
+
+ 0 -1 1860 1.2435320531949401e-03
+
+
+ 4.6423879265785217e-01 5.5474412441253662e-01
+
+
+ <_>
+
+ 0 -1 1861 -4.7363857738673687e-03
+
+
+ 6.1985671520233154e-01 4.6725520491600037e-01
+
+
+ <_>
+
+ 0 -1 1862 -6.4658462069928646e-03
+
+
+ 6.8373328447341919e-01 5.0190007686614990e-01
+
+
+ <_>
+
+ 0 -1 1863 3.5017321351915598e-04
+
+
+ 4.3448030948638916e-01 5.3636229038238525e-01
+
+
+ <_>
+
+ 0 -1 1864 1.5754920605104417e-04
+
+
+ 4.7600790858268738e-01 5.7320207357406616e-01
+
+
+ <_>
+
+ 0 -1 1865 9.9774366244673729e-03
+
+
+ 5.0909858942031860e-01 3.6350399255752563e-01
+
+
+ <_>
+
+ 0 -1 1866 -4.1464529931545258e-04
+
+
+ 5.5700647830963135e-01 4.5938020944595337e-01
+
+
+ <_>
+
+ 0 -1 1867 -3.5888899583369493e-04
+
+
+ 5.3568458557128906e-01 4.3391349911689758e-01
+
+
+ <_>
+
+ 0 -1 1868 4.0463250479660928e-04
+
+
+ 4.4398030638694763e-01 5.4367768764495850e-01
+
+
+ <_>
+
+ 0 -1 1869 -8.2184787606820464e-04
+
+
+ 4.0422949194908142e-01 5.1762992143630981e-01
+
+
+ <_>
+
+ 0 -1 1870 5.9467419050633907e-03
+
+
+ 4.9276518821716309e-01 5.6337797641754150e-01
+
+
+ <_>
+
+ 0 -1 1871 -2.1753389388322830e-02
+
+
+ 8.0062937736511230e-01 4.8008409142494202e-01
+
+
+ <_>
+
+ 0 -1 1872 -1.4540379866957664e-02
+
+
+ 3.9460548758506775e-01 5.1822227239608765e-01
+
+
+ <_>
+
+ 0 -1 1873 -4.0510769933462143e-02
+
+
+ 2.1324990317225456e-02 4.9357929825782776e-01
+
+
+ <_>
+
+ 0 -1 1874 -5.8458268176764250e-04
+
+
+ 4.0127959847450256e-01 5.3140252828598022e-01
+
+
+ <_>
+
+ 0 -1 1875 5.5151800625026226e-03
+
+
+ 4.6424189209938049e-01 5.8962607383728027e-01
+
+
+ <_>
+
+ 0 -1 1876 -6.0626221820712090e-03
+
+
+ 6.5021592378616333e-01 5.0164777040481567e-01
+
+
+ <_>
+
+ 0 -1 1877 9.4535842537879944e-02
+
+
+ 5.2647089958190918e-01 4.1268271207809448e-01
+
+
+ <_>
+
+ 0 -1 1878 4.7315051779150963e-03
+
+
+ 4.8791998624801636e-01 5.8924478292465210e-01
+
+
+ <_>
+
+ 0 -1 1879 -5.2571471314877272e-04
+
+
+ 3.9172801375389099e-01 5.1894128322601318e-01
+
+
+ <_>
+
+ 0 -1 1880 -2.5464049540460110e-03
+
+
+ 5.8375990390777588e-01 4.9857059121131897e-01
+
+
+ <_>
+
+ 0 -1 1881 -2.6075689122080803e-02
+
+
+ 1.2619839608669281e-01 4.9558219313621521e-01
+
+
+ <_>
+
+ 0 -1 1882 -5.4779709316790104e-03
+
+
+ 5.7225137948989868e-01 5.0102657079696655e-01
+
+
+ <_>
+
+ 0 -1 1883 5.1337741315364838e-03
+
+
+ 5.2732622623443604e-01 4.2263761162757874e-01
+
+
+ <_>
+
+ 0 -1 1884 4.7944980906322598e-04
+
+
+ 4.4500669836997986e-01 5.8195871114730835e-01
+
+
+ <_>
+
+ 0 -1 1885 -2.1114079281687737e-03
+
+
+ 5.7576531171798706e-01 4.5117148756980896e-01
+
+
+ <_>
+
+ 0 -1 1886 -1.3179990462958813e-02
+
+
+ 1.8843810260295868e-01 5.1607340574264526e-01
+
+
+ <_>
+
+ 0 -1 1887 -4.7968099825084209e-03
+
+
+ 6.5897899866104126e-01 4.7361189126968384e-01
+
+
+ <_>
+
+ 0 -1 1888 6.7483168095350266e-03
+
+
+ 5.2594298124313354e-01 3.3563950657844543e-01
+
+
+ <_>
+
+ 0 -1 1889 1.4623369788751006e-03
+
+
+ 5.3552711009979248e-01 4.2640921473503113e-01
+
+
+ <_>
+
+ 0 -1 1890 4.7645159065723419e-03
+
+
+ 5.0344067811965942e-01 5.7868278026580811e-01
+
+
+ <_>
+
+ 0 -1 1891 6.8066660314798355e-03
+
+
+ 4.7566050291061401e-01 6.6778290271759033e-01
+
+
+ <_>
+
+ 0 -1 1892 3.6608621012419462e-03
+
+
+ 5.3696119785308838e-01 4.3115469813346863e-01
+
+
+ <_>
+
+ 0 -1 1893 2.1449640393257141e-02
+
+
+ 4.9686419963836670e-01 1.8888160586357117e-01
+
+
+ <_>
+
+ 0 -1 1894 4.1678901761770248e-03
+
+
+ 4.9307331442832947e-01 5.8153688907623291e-01
+
+
+ <_>
+
+ 0 -1 1895 8.6467564105987549e-03
+
+
+ 5.2052050828933716e-01 4.1325950622558594e-01
+
+
+ <_>
+
+ 0 -1 1896 -3.6114078829996288e-04
+
+
+ 5.4835551977157593e-01 4.8009279370307922e-01
+
+
+ <_>
+
+ 0 -1 1897 1.0808729566633701e-03
+
+
+ 4.6899020671844482e-01 6.0414212942123413e-01
+
+
+ <_>
+
+ 0 -1 1898 5.7719959877431393e-03
+
+
+ 5.1711422204971313e-01 3.0532771348953247e-01
+
+
+ <_>
+
+ 0 -1 1899 1.5720770461484790e-03
+
+
+ 5.2199780941009521e-01 4.1788038611412048e-01
+
+
+ <_>
+
+ 0 -1 1900 -1.9307859474793077e-03
+
+
+ 5.8603698015213013e-01 4.8129200935363770e-01
+
+
+ <_>
+
+ 0 -1 1901 -7.8926272690296173e-03
+
+
+ 1.7492769658565521e-01 4.9717339873313904e-01
+
+
+ <_>
+
+ 0 -1 1902 -2.2224679123610258e-03
+
+
+ 4.3425890803337097e-01 5.2128481864929199e-01
+
+
+ <_>
+
+ 0 -1 1903 1.9011989934369922e-03
+
+
+ 4.7651869058609009e-01 6.8920552730560303e-01
+
+
+ <_>
+
+ 0 -1 1904 2.7576119173318148e-03
+
+
+ 5.2621912956237793e-01 4.3374860286712646e-01
+
+
+ <_>
+
+ 0 -1 1905 5.1787449046969414e-03
+
+
+ 4.8040691018104553e-01 7.8437292575836182e-01
+
+
+ <_>
+
+ 0 -1 1906 -9.0273341629654169e-04
+
+
+ 4.1208469867706299e-01 5.3534239530563354e-01
+
+
+ <_>
+
+ 0 -1 1907 5.1797959022223949e-03
+
+
+ 4.7403728961944580e-01 6.4259600639343262e-01
+
+
+ <_>
+
+ 0 -1 1908 -1.0114000178873539e-02
+
+
+ 2.4687920510768890e-01 5.1750177145004272e-01
+
+
+ <_>
+
+ 0 -1 1909 -1.8617060035467148e-02
+
+
+ 5.7562941312789917e-01 4.6289789676666260e-01
+
+
+ <_>
+
+ 0 -1 1910 5.9225959703326225e-03
+
+
+ 5.1696258783340454e-01 3.2142710685729980e-01
+
+
+ <_>
+
+ 0 -1 1911 -6.2945079989731312e-03
+
+
+ 3.8720148801803589e-01 5.1416367292404175e-01
+
+
+ <_>
+
+ 0 -1 1912 6.5353019163012505e-03
+
+
+ 4.8530489206314087e-01 6.3104897737503052e-01
+
+
+ <_>
+
+ 0 -1 1913 1.0878399480134249e-03
+
+
+ 5.1173150539398193e-01 3.7232589721679688e-01
+
+
+ <_>
+
+ 0 -1 1914 -2.2542240098118782e-02
+
+
+ 5.6927400827407837e-01 4.8871129751205444e-01
+
+
+ <_>
+
+ 0 -1 1915 -3.0065660830587149e-03
+
+
+ 2.5560128688812256e-01 5.0039929151535034e-01
+
+
+ <_>
+
+ 0 -1 1916 7.4741272255778313e-03
+
+
+ 4.8108729720115662e-01 5.6759268045425415e-01
+
+
+ <_>
+
+ 0 -1 1917 2.6162320747971535e-02
+
+
+ 4.9711948633193970e-01 1.7772370576858521e-01
+
+
+ <_>
+
+ 0 -1 1918 9.4352738233283162e-04
+
+
+ 4.9400109052658081e-01 5.4912507534027100e-01
+
+
+ <_>
+
+ 0 -1 1919 3.3363241702318192e-02
+
+
+ 5.0076121091842651e-01 2.7907240390777588e-01
+
+
+ <_>
+
+ 0 -1 1920 -1.5118650160729885e-02
+
+
+ 7.0595788955688477e-01 4.9730318784713745e-01
+
+
+ <_>
+
+ 0 -1 1921 9.8648946732282639e-04
+
+
+ 5.1286202669143677e-01 3.7767618894577026e-01
+
+
+
+
+ <_>
+ 213
+ 1.0576110076904297e+02
+
+ <_>
+
+ 0 -1 1922 -9.5150798559188843e-02
+
+
+ 6.4707571268081665e-01 4.0172868967056274e-01
+
+
+ <_>
+
+ 0 -1 1923 6.2702340073883533e-03
+
+
+ 3.9998221397399902e-01 5.7464492321014404e-01
+
+
+ <_>
+
+ 0 -1 1924 3.0018089455552399e-04
+
+
+ 3.5587701201438904e-01 5.5388098955154419e-01
+
+
+ <_>
+
+ 0 -1 1925 1.1757409665733576e-03
+
+
+ 4.2565348744392395e-01 5.3826177120208740e-01
+
+
+ <_>
+
+ 0 -1 1926 4.4235268433112651e-05
+
+
+ 3.6829081177711487e-01 5.5899268388748169e-01
+
+
+ <_>
+
+ 0 -1 1927 -2.9936920327600092e-05
+
+
+ 5.4524701833724976e-01 4.0203678607940674e-01
+
+
+ <_>
+
+ 0 -1 1928 3.0073199886828661e-03
+
+
+ 5.2390581369400024e-01 3.3178439736366272e-01
+
+
+ <_>
+
+ 0 -1 1929 -1.0513889603316784e-02
+
+
+ 4.3206891417503357e-01 5.3079837560653687e-01
+
+
+ <_>
+
+ 0 -1 1930 8.3476826548576355e-03
+
+
+ 4.5046371221542358e-01 6.4532989263534546e-01
+
+
+ <_>
+
+ 0 -1 1931 -3.1492270063608885e-03
+
+
+ 4.3134251236915588e-01 5.3705251216888428e-01
+
+
+ <_>
+
+ 0 -1 1932 -1.4435649973165710e-05
+
+
+ 5.3266030550003052e-01 3.8179719448089600e-01
+
+
+ <_>
+
+ 0 -1 1933 -4.2855090578086674e-04
+
+
+ 4.3051639199256897e-01 5.3820097446441650e-01
+
+
+ <_>
+
+ 0 -1 1934 1.5062429883982986e-04
+
+
+ 4.2359709739685059e-01 5.5449652671813965e-01
+
+
+ <_>
+
+ 0 -1 1935 7.1559831500053406e-02
+
+
+ 5.3030598163604736e-01 2.6788029074668884e-01
+
+
+ <_>
+
+ 0 -1 1936 8.4095180500298738e-04
+
+
+ 3.5571089386940002e-01 5.2054339647293091e-01
+
+
+ <_>
+
+ 0 -1 1937 6.2986500561237335e-02
+
+
+ 5.2253627777099609e-01 2.8613761067390442e-01
+
+
+ <_>
+
+ 0 -1 1938 -3.3798629883676767e-03
+
+
+ 3.6241859197616577e-01 5.2016979455947876e-01
+
+
+ <_>
+
+ 0 -1 1939 -1.1810739670181647e-04
+
+
+ 5.4744768142700195e-01 3.9598938822746277e-01
+
+
+ <_>
+
+ 0 -1 1940 -5.4505601292476058e-04
+
+
+ 3.7404221296310425e-01 5.2157157659530640e-01
+
+
+ <_>
+
+ 0 -1 1941 -1.8454910023137927e-03
+
+
+ 5.8930522203445435e-01 4.5844489336013794e-01
+
+
+ <_>
+
+ 0 -1 1942 -4.3832371011376381e-04
+
+
+ 4.0845820307731628e-01 5.3853511810302734e-01
+
+
+ <_>
+
+ 0 -1 1943 -2.4000830017030239e-03
+
+
+ 3.7774550914764404e-01 5.2935802936553955e-01
+
+
+ <_>
+
+ 0 -1 1944 -9.8795741796493530e-02
+
+
+ 2.9636120796203613e-01 5.0700891017913818e-01
+
+
+ <_>
+
+ 0 -1 1945 3.1798239797353745e-03
+
+
+ 4.8776328563690186e-01 6.7264437675476074e-01
+
+
+ <_>
+
+ 0 -1 1946 3.2406419632025063e-04
+
+
+ 4.3669110536575317e-01 5.5611097812652588e-01
+
+
+ <_>
+
+ 0 -1 1947 -3.2547250390052795e-02
+
+
+ 3.1281578540802002e-01 5.3086161613464355e-01
+
+
+ <_>
+
+ 0 -1 1948 -7.7561130747199059e-03
+
+
+ 6.5602248907089233e-01 4.6398720145225525e-01
+
+
+ <_>
+
+ 0 -1 1949 1.6027249395847321e-02
+
+
+ 5.1726800203323364e-01 3.1418979167938232e-01
+
+
+ <_>
+
+ 0 -1 1950 7.1002350523485802e-06
+
+
+ 4.0844461321830750e-01 5.3362947702407837e-01
+
+
+ <_>
+
+ 0 -1 1951 7.3422808200120926e-03
+
+
+ 4.9669221043586731e-01 6.6034650802612305e-01
+
+
+ <_>
+
+ 0 -1 1952 -1.6970280557870865e-03
+
+
+ 5.9082370996475220e-01 4.5001828670501709e-01
+
+
+ <_>
+
+ 0 -1 1953 2.4118260480463505e-03
+
+
+ 5.3151607513427734e-01 3.5997208952903748e-01
+
+
+ <_>
+
+ 0 -1 1954 -5.5300937965512276e-03
+
+
+ 2.3340409994125366e-01 4.9968141317367554e-01
+
+
+ <_>
+
+ 0 -1 1955 -2.6478730142116547e-03
+
+
+ 5.8809357881546021e-01 4.6847340464591980e-01
+
+
+ <_>
+
+ 0 -1 1956 1.1295629665255547e-02
+
+
+ 4.9837771058082581e-01 1.8845909833908081e-01
+
+
+ <_>
+
+ 0 -1 1957 -6.6952878842130303e-04
+
+
+ 5.8721381425857544e-01 4.7990199923515320e-01
+
+
+ <_>
+
+ 0 -1 1958 1.4410680159926414e-03
+
+
+ 5.1311892271041870e-01 3.5010111331939697e-01
+
+
+ <_>
+
+ 0 -1 1959 2.4637870956212282e-03
+
+
+ 5.3393721580505371e-01 4.1176390647888184e-01
+
+
+ <_>
+
+ 0 -1 1960 3.3114518737420440e-04
+
+
+ 4.3133831024169922e-01 5.3982460498809814e-01
+
+
+ <_>
+
+ 0 -1 1961 -3.3557269722223282e-02
+
+
+ 2.6753368973731995e-01 5.1791548728942871e-01
+
+
+ <_>
+
+ 0 -1 1962 1.8539419397711754e-02
+
+
+ 4.9738699197769165e-01 2.3171770572662354e-01
+
+
+ <_>
+
+ 0 -1 1963 -2.9698139405809343e-04
+
+
+ 5.5297082662582397e-01 4.6436640620231628e-01
+
+
+ <_>
+
+ 0 -1 1964 -4.5577259152196348e-04
+
+
+ 5.6295841932296753e-01 4.4691911339759827e-01
+
+
+ <_>
+
+ 0 -1 1965 -1.0158980265259743e-02
+
+
+ 6.7062127590179443e-01 4.9259188771247864e-01
+
+
+ <_>
+
+ 0 -1 1966 -2.2413829356082715e-05
+
+
+ 5.2394217252731323e-01 3.9129018783569336e-01
+
+
+ <_>
+
+ 0 -1 1967 7.2034963523037732e-05
+
+
+ 4.7994381189346313e-01 5.5017888545989990e-01
+
+
+ <_>
+
+ 0 -1 1968 -6.9267209619283676e-03
+
+
+ 6.9300097227096558e-01 4.6980848908424377e-01
+
+
+ <_>
+
+ 0 -1 1969 -7.6997838914394379e-03
+
+
+ 4.0996238589286804e-01 5.4808831214904785e-01
+
+
+ <_>
+
+ 0 -1 1970 -7.3130549862980843e-03
+
+
+ 3.2834759354591370e-01 5.0578862428665161e-01
+
+
+ <_>
+
+ 0 -1 1971 1.9650589674711227e-03
+
+
+ 4.9780470132827759e-01 6.3982498645782471e-01
+
+
+ <_>
+
+ 0 -1 1972 7.1647600270807743e-03
+
+
+ 4.6611601114273071e-01 6.2221372127532959e-01
+
+
+ <_>
+
+ 0 -1 1973 -2.4078639224171638e-02
+
+
+ 2.3346449434757233e-01 5.2221620082855225e-01
+
+
+ <_>
+
+ 0 -1 1974 -2.1027969196438789e-02
+
+
+ 1.1836539953947067e-01 4.9382260441780090e-01
+
+
+ <_>
+
+ 0 -1 1975 3.6017020465806127e-04
+
+
+ 5.3250199556350708e-01 4.1167110204696655e-01
+
+
+ <_>
+
+ 0 -1 1976 -1.7219729721546173e-02
+
+
+ 6.2787622213363647e-01 4.6642690896987915e-01
+
+
+ <_>
+
+ 0 -1 1977 -7.8672142699360847e-03
+
+
+ 3.4034150838851929e-01 5.2497369050979614e-01
+
+
+ <_>
+
+ 0 -1 1978 -4.4777389848604798e-04
+
+
+ 3.6104118824005127e-01 5.0862592458724976e-01
+
+
+ <_>
+
+ 0 -1 1979 5.5486010387539864e-03
+
+
+ 4.8842659592628479e-01 6.2034982442855835e-01
+
+
+ <_>
+
+ 0 -1 1980 -6.9461148232221603e-03
+
+
+ 2.6259300112724304e-01 5.0110971927642822e-01
+
+
+ <_>
+
+ 0 -1 1981 1.3569870498031378e-04
+
+
+ 4.3407949805259705e-01 5.6283122301101685e-01
+
+
+ <_>
+
+ 0 -1 1982 -4.5880250632762909e-02
+
+
+ 6.5079987049102783e-01 4.6962749958038330e-01
+
+
+ <_>
+
+ 0 -1 1983 -2.1582560613751411e-02
+
+
+ 3.8265028595924377e-01 5.2876168489456177e-01
+
+
+ <_>
+
+ 0 -1 1984 -2.0209539681673050e-02
+
+
+ 3.2333680987358093e-01 5.0744771957397461e-01
+
+
+ <_>
+
+ 0 -1 1985 5.8496710844337940e-03
+
+
+ 5.1776039600372314e-01 4.4896709918975830e-01
+
+
+ <_>
+
+ 0 -1 1986 -5.7476379879517481e-05
+
+
+ 4.0208509564399719e-01 5.2463638782501221e-01
+
+
+ <_>
+
+ 0 -1 1987 -1.1513100471347570e-03
+
+
+ 6.3150721788406372e-01 4.9051541090011597e-01
+
+
+ <_>
+
+ 0 -1 1988 1.9862831104546785e-03
+
+
+ 4.7024598717689514e-01 6.4971512556076050e-01
+
+
+ <_>
+
+ 0 -1 1989 -5.2719512023031712e-03
+
+
+ 3.6503839492797852e-01 5.2276527881622314e-01
+
+
+ <_>
+
+ 0 -1 1990 1.2662699446082115e-03
+
+
+ 5.1661008596420288e-01 3.8776180148124695e-01
+
+
+ <_>
+
+ 0 -1 1991 -6.2919440679252148e-03
+
+
+ 7.3758941888809204e-01 5.0238478183746338e-01
+
+
+ <_>
+
+ 0 -1 1992 6.7360111279413104e-04
+
+
+ 4.4232261180877686e-01 5.4955857992172241e-01
+
+
+ <_>
+
+ 0 -1 1993 -1.0523450328037143e-03
+
+
+ 5.9763962030410767e-01 4.8595830798149109e-01
+
+
+ <_>
+
+ 0 -1 1994 -4.4216238893568516e-04
+
+
+ 5.9559392929077148e-01 4.3989309668540955e-01
+
+
+ <_>
+
+ 0 -1 1995 1.1747940443456173e-03
+
+
+ 5.3498882055282593e-01 4.6050581336021423e-01
+
+
+ <_>
+
+ 0 -1 1996 5.2457437850534916e-03
+
+
+ 5.0491911172866821e-01 2.9415771365165710e-01
+
+
+ <_>
+
+ 0 -1 1997 -2.4539720267057419e-02
+
+
+ 2.5501778721809387e-01 5.2185869216918945e-01
+
+
+ <_>
+
+ 0 -1 1998 7.3793041519820690e-04
+
+
+ 4.4248610734939575e-01 5.4908162355422974e-01
+
+
+ <_>
+
+ 0 -1 1999 1.4233799884095788e-03
+
+
+ 5.3195142745971680e-01 4.0813559293746948e-01
+
+
+ <_>
+
+ 0 -1 2000 -2.4149110540747643e-03
+
+
+ 4.0876591205596924e-01 5.2389502525329590e-01
+
+
+ <_>
+
+ 0 -1 2001 -1.2165299849584699e-03
+
+
+ 5.6745791435241699e-01 4.9080529808998108e-01
+
+
+ <_>
+
+ 0 -1 2002 -1.2438809499144554e-03
+
+
+ 4.1294258832931519e-01 5.2561181783676147e-01
+
+
+ <_>
+
+ 0 -1 2003 6.1942739412188530e-03
+
+
+ 5.0601941347122192e-01 7.3136532306671143e-01
+
+
+ <_>
+
+ 0 -1 2004 -1.6607169527560472e-03
+
+
+ 5.9796321392059326e-01 4.5963698625564575e-01
+
+
+ <_>
+
+ 0 -1 2005 -2.7316259220242500e-02
+
+
+ 4.1743651032447815e-01 5.3088420629501343e-01
+
+
+ <_>
+
+ 0 -1 2006 -1.5845570014789701e-03
+
+
+ 5.6158047914505005e-01 4.5194861292839050e-01
+
+
+ <_>
+
+ 0 -1 2007 -1.5514739789068699e-03
+
+
+ 4.0761870145797729e-01 5.3607851266860962e-01
+
+
+ <_>
+
+ 0 -1 2008 3.8446558755822480e-04
+
+
+ 4.3472939729690552e-01 5.4304420948028564e-01
+
+
+ <_>
+
+ 0 -1 2009 -1.4672259800136089e-02
+
+
+ 1.6593049466609955e-01 5.1460939645767212e-01
+
+
+ <_>
+
+ 0 -1 2010 8.1608882173895836e-03
+
+
+ 4.9618190526962280e-01 1.8847459554672241e-01
+
+
+ <_>
+
+ 0 -1 2011 1.1121659772470593e-03
+
+
+ 4.8682639002799988e-01 6.0938161611557007e-01
+
+
+ <_>
+
+ 0 -1 2012 -7.2603770531713963e-03
+
+
+ 6.2843251228332520e-01 4.6903759241104126e-01
+
+
+ <_>
+
+ 0 -1 2013 -2.4046430189628154e-04
+
+
+ 5.5750000476837158e-01 4.0460440516471863e-01
+
+
+ <_>
+
+ 0 -1 2014 -2.3348190006799996e-04
+
+
+ 4.1157621145248413e-01 5.2528482675552368e-01
+
+
+ <_>
+
+ 0 -1 2015 5.5736480280756950e-03
+
+
+ 4.7300729155540466e-01 5.6901007890701294e-01
+
+
+ <_>
+
+ 0 -1 2016 3.0623769387602806e-02
+
+
+ 4.9718868732452393e-01 1.7400950193405151e-01
+
+
+ <_>
+
+ 0 -1 2017 9.2074798885732889e-04
+
+
+ 5.3721177577972412e-01 4.3548721075057983e-01
+
+
+ <_>
+
+ 0 -1 2018 -4.3550739064812660e-05
+
+
+ 5.3668838739395142e-01 4.3473169207572937e-01
+
+
+ <_>
+
+ 0 -1 2019 -6.6452710889279842e-03
+
+
+ 3.4355181455612183e-01 5.1605331897735596e-01
+
+
+ <_>
+
+ 0 -1 2020 4.3221998959779739e-02
+
+
+ 4.7667920589447021e-01 7.2936528921127319e-01
+
+
+ <_>
+
+ 0 -1 2021 2.2331769578158855e-03
+
+
+ 5.0293159484863281e-01 5.6331712007522583e-01
+
+
+ <_>
+
+ 0 -1 2022 3.1829739455133677e-03
+
+
+ 4.0160921216011047e-01 5.1921367645263672e-01
+
+
+ <_>
+
+ 0 -1 2023 -1.8027749320026487e-04
+
+
+ 4.0883159637451172e-01 5.4179197549819946e-01
+
+
+ <_>
+
+ 0 -1 2024 -5.2934689447283745e-03
+
+
+ 4.0756770968437195e-01 5.2435618638992310e-01
+
+
+ <_>
+
+ 0 -1 2025 1.2750959722325206e-03
+
+
+ 4.9132829904556274e-01 6.3870108127593994e-01
+
+
+ <_>
+
+ 0 -1 2026 4.3385322205722332e-03
+
+
+ 5.0316721200942993e-01 2.9473468661308289e-01
+
+
+ <_>
+
+ 0 -1 2027 8.5250744596123695e-03
+
+
+ 4.9497890472412109e-01 6.3088691234588623e-01
+
+
+ <_>
+
+ 0 -1 2028 -9.4266352243721485e-04
+
+
+ 5.3283667564392090e-01 4.2856499552726746e-01
+
+
+ <_>
+
+ 0 -1 2029 1.3609660090878606e-03
+
+
+ 4.9915251135826111e-01 5.9415012598037720e-01
+
+
+ <_>
+
+ 0 -1 2030 4.4782509212382138e-04
+
+
+ 4.5735040307044983e-01 5.8544808626174927e-01
+
+
+ <_>
+
+ 0 -1 2031 1.3360050506889820e-03
+
+
+ 4.6043589711189270e-01 5.8490520715713501e-01
+
+
+ <_>
+
+ 0 -1 2032 -6.0967548051849008e-04
+
+
+ 3.9693889021873474e-01 5.2294230461120605e-01
+
+
+ <_>
+
+ 0 -1 2033 -2.3656780831515789e-03
+
+
+ 5.8083200454711914e-01 4.8983570933341980e-01
+
+
+ <_>
+
+ 0 -1 2034 1.0734340175986290e-03
+
+
+ 4.3512108922004700e-01 5.4700392484664917e-01
+
+
+ <_>
+
+ 0 -1 2035 2.1923359017819166e-03
+
+
+ 5.3550601005554199e-01 3.8429039716720581e-01
+
+
+ <_>
+
+ 0 -1 2036 5.4968618787825108e-03
+
+
+ 5.0181388854980469e-01 2.8271919488906860e-01
+
+
+ <_>
+
+ 0 -1 2037 -7.5368821620941162e-02
+
+
+ 1.2250760197639465e-01 5.1488268375396729e-01
+
+
+ <_>
+
+ 0 -1 2038 2.5134470313787460e-02
+
+
+ 4.7317668795585632e-01 7.0254462957382202e-01
+
+
+ <_>
+
+ 0 -1 2039 -2.9358599931583740e-05
+
+
+ 5.4305320978164673e-01 4.6560868620872498e-01
+
+
+ <_>
+
+ 0 -1 2040 -5.8355910005047917e-04
+
+
+ 4.0310400724411011e-01 5.1901197433471680e-01
+
+
+ <_>
+
+ 0 -1 2041 -2.6639450807124376e-03
+
+
+ 4.3081268668174744e-01 5.1617711782455444e-01
+
+
+ <_>
+
+ 0 -1 2042 -1.3804089976474643e-03
+
+
+ 6.2198299169540405e-01 4.6955159306526184e-01
+
+
+ <_>
+
+ 0 -1 2043 1.2313219485804439e-03
+
+
+ 5.3793638944625854e-01 4.4258311390876770e-01
+
+
+ <_>
+
+ 0 -1 2044 -1.4644179827882908e-05
+
+
+ 5.2816402912139893e-01 4.2225030064582825e-01
+
+
+ <_>
+
+ 0 -1 2045 -1.2818809598684311e-02
+
+
+ 2.5820928812026978e-01 5.1799327135086060e-01
+
+
+ <_>
+
+ 0 -1 2046 2.2852189838886261e-02
+
+
+ 4.7786930203437805e-01 7.6092642545700073e-01
+
+
+ <_>
+
+ 0 -1 2047 8.2305970136076212e-04
+
+
+ 5.3409922122955322e-01 4.6717241406440735e-01
+
+
+ <_>
+
+ 0 -1 2048 1.2770120054483414e-02
+
+
+ 4.9657610058784485e-01 1.4723660051822662e-01
+
+
+ <_>
+
+ 0 -1 2049 -5.0051510334014893e-02
+
+
+ 6.4149940013885498e-01 5.0165921449661255e-01
+
+
+ <_>
+
+ 0 -1 2050 1.5775270760059357e-02
+
+
+ 4.5223200321197510e-01 5.6853622198104858e-01
+
+
+ <_>
+
+ 0 -1 2051 -1.8501620739698410e-02
+
+
+ 2.7647489309310913e-01 5.1379591226577759e-01
+
+
+ <_>
+
+ 0 -1 2052 2.4626250378787518e-03
+
+
+ 5.1419419050216675e-01 3.7954080104827881e-01
+
+
+ <_>
+
+ 0 -1 2053 6.2916167080402374e-02
+
+
+ 5.0606489181518555e-01 6.5804338455200195e-01
+
+
+ <_>
+
+ 0 -1 2054 -2.1648500478477217e-05
+
+
+ 5.1953881978988647e-01 4.0198868513107300e-01
+
+
+ <_>
+
+ 0 -1 2055 2.1180990152060986e-03
+
+
+ 4.9623650312423706e-01 5.9544587135314941e-01
+
+
+ <_>
+
+ 0 -1 2056 -1.6634890809655190e-02
+
+
+ 3.7579330801963806e-01 5.1754468679428101e-01
+
+
+ <_>
+
+ 0 -1 2057 -2.8899470344185829e-03
+
+
+ 6.6240137815475464e-01 5.0571787357330322e-01
+
+
+ <_>
+
+ 0 -1 2058 7.6783262193202972e-02
+
+
+ 4.7957968711853027e-01 8.0477148294448853e-01
+
+
+ <_>
+
+ 0 -1 2059 3.9170677773654461e-03
+
+
+ 4.9378821253776550e-01 5.7199418544769287e-01
+
+
+ <_>
+
+ 0 -1 2060 -7.2670601308345795e-02
+
+
+ 5.3894560784101486e-02 4.9439039826393127e-01
+
+
+ <_>
+
+ 0 -1 2061 5.4039502143859863e-01
+
+
+ 5.1297742128372192e-01 1.1433389782905579e-01
+
+
+ <_>
+
+ 0 -1 2062 2.9510019812732935e-03
+
+
+ 4.5283439755439758e-01 5.6985741853713989e-01
+
+
+ <_>
+
+ 0 -1 2063 3.4508369863033295e-03
+
+
+ 5.3577268123626709e-01 4.2187309265136719e-01
+
+
+ <_>
+
+ 0 -1 2064 -4.2077939724549651e-04
+
+
+ 5.9161728620529175e-01 4.6379259228706360e-01
+
+
+ <_>
+
+ 0 -1 2065 3.3051050268113613e-03
+
+
+ 5.2733850479125977e-01 4.3820428848266602e-01
+
+
+ <_>
+
+ 0 -1 2066 4.7735060798004270e-04
+
+
+ 4.0465280413627625e-01 5.1818847656250000e-01
+
+
+ <_>
+
+ 0 -1 2067 -2.5928510352969170e-02
+
+
+ 7.4522358179092407e-01 5.0893861055374146e-01
+
+
+ <_>
+
+ 0 -1 2068 -2.9729790985584259e-03
+
+
+ 3.2954359054565430e-01 5.0587952136993408e-01
+
+
+ <_>
+
+ 0 -1 2069 5.8508329093456268e-03
+
+
+ 4.8571440577507019e-01 5.7930248975753784e-01
+
+
+ <_>
+
+ 0 -1 2070 -4.5967519283294678e-02
+
+
+ 4.3127310276031494e-01 5.3806531429290771e-01
+
+
+ <_>
+
+ 0 -1 2071 1.5585960447788239e-01
+
+
+ 5.1961702108383179e-01 1.6847139596939087e-01
+
+
+ <_>
+
+ 0 -1 2072 1.5164829790592194e-02
+
+
+ 4.7357571125030518e-01 6.7350268363952637e-01
+
+
+ <_>
+
+ 0 -1 2073 -1.0604249546304345e-03
+
+
+ 5.8229267597198486e-01 4.7757029533386230e-01
+
+
+ <_>
+
+ 0 -1 2074 6.6476291976869106e-03
+
+
+ 4.9991989135742188e-01 2.3195350170135498e-01
+
+
+ <_>
+
+ 0 -1 2075 -1.2231130152940750e-02
+
+
+ 4.7508931159973145e-01 5.2629822492599487e-01
+
+
+ <_>
+
+ 0 -1 2076 5.6528882123529911e-03
+
+
+ 5.0697678327560425e-01 3.5618188977241516e-01
+
+
+ <_>
+
+ 0 -1 2077 1.2977829901501536e-03
+
+
+ 4.8756939172744751e-01 5.6190627813339233e-01
+
+
+ <_>
+
+ 0 -1 2078 1.0781589895486832e-02
+
+
+ 4.7507700324058533e-01 6.7823082208633423e-01
+
+
+ <_>
+
+ 0 -1 2079 2.8654779307544231e-03
+
+
+ 5.3054618835449219e-01 4.2907360196113586e-01
+
+
+ <_>
+
+ 0 -1 2080 2.8663428965955973e-03
+
+
+ 4.5184791088104248e-01 5.5393511056900024e-01
+
+
+ <_>
+
+ 0 -1 2081 -5.1983320154249668e-03
+
+
+ 4.1491198539733887e-01 5.4341888427734375e-01
+
+
+ <_>
+
+ 0 -1 2082 5.3739990107715130e-03
+
+
+ 4.7178968787193298e-01 6.5076571702957153e-01
+
+
+ <_>
+
+ 0 -1 2083 -1.4641529880464077e-02
+
+
+ 2.1721640229225159e-01 5.1617771387100220e-01
+
+
+ <_>
+
+ 0 -1 2084 -1.5042580344015732e-05
+
+
+ 5.3373837471008301e-01 4.2988368868827820e-01
+
+
+ <_>
+
+ 0 -1 2085 -1.1875660129589960e-04
+
+
+ 4.6045941114425659e-01 5.5824470520019531e-01
+
+
+ <_>
+
+ 0 -1 2086 1.6995530575513840e-02
+
+
+ 4.9458950757980347e-01 7.3880076408386230e-02
+
+
+ <_>
+
+ 0 -1 2087 -3.5095941275358200e-02
+
+
+ 7.0055091381072998e-01 4.9775910377502441e-01
+
+
+ <_>
+
+ 0 -1 2088 2.4217350874096155e-03
+
+
+ 4.4662651419639587e-01 5.4776942729949951e-01
+
+
+ <_>
+
+ 0 -1 2089 -9.6340337768197060e-04
+
+
+ 4.7140988707542419e-01 5.3133380413055420e-01
+
+
+ <_>
+
+ 0 -1 2090 1.6391130338888615e-04
+
+
+ 4.3315461277961731e-01 5.3422421216964722e-01
+
+
+ <_>
+
+ 0 -1 2091 -2.1141460165381432e-02
+
+
+ 2.6447001099586487e-01 5.2044987678527832e-01
+
+
+ <_>
+
+ 0 -1 2092 8.7775202700868249e-04
+
+
+ 5.2083498239517212e-01 4.1527429223060608e-01
+
+
+ <_>
+
+ 0 -1 2093 -2.7943920344114304e-02
+
+
+ 6.3441252708435059e-01 5.0188118219375610e-01
+
+
+ <_>
+
+ 0 -1 2094 6.7297378554940224e-03
+
+
+ 5.0504380464553833e-01 3.5008639097213745e-01
+
+
+ <_>
+
+ 0 -1 2095 2.3281039670109749e-02
+
+
+ 4.9663180112838745e-01 6.9686770439147949e-01
+
+
+ <_>
+
+ 0 -1 2096 -1.1644979938864708e-02
+
+
+ 3.3002600073814392e-01 5.0496298074722290e-01
+
+
+ <_>
+
+ 0 -1 2097 1.5764309093356133e-02
+
+
+ 4.9915981292724609e-01 7.3211538791656494e-01
+
+
+ <_>
+
+ 0 -1 2098 -1.3611479662358761e-03
+
+
+ 3.9117351174354553e-01 5.1606708765029907e-01
+
+
+ <_>
+
+ 0 -1 2099 -8.1522337859496474e-04
+
+
+ 5.6289112567901611e-01 4.9497190117835999e-01
+
+
+ <_>
+
+ 0 -1 2100 -6.0066272271797061e-04
+
+
+ 5.8535951375961304e-01 4.5505958795547485e-01
+
+
+ <_>
+
+ 0 -1 2101 4.9715518252924085e-04
+
+
+ 4.2714700102806091e-01 5.4435992240905762e-01
+
+
+ <_>
+
+ 0 -1 2102 2.3475370835512877e-03
+
+
+ 5.1431107521057129e-01 3.8876569271087646e-01
+
+
+ <_>
+
+ 0 -1 2103 -8.9261569082736969e-03
+
+
+ 6.0445022583007812e-01 4.9717208743095398e-01
+
+
+ <_>
+
+ 0 -1 2104 -1.3919910416007042e-02
+
+
+ 2.5831609964370728e-01 5.0003677606582642e-01
+
+
+ <_>
+
+ 0 -1 2105 1.0209949687123299e-03
+
+
+ 4.8573741316795349e-01 5.5603581666946411e-01
+
+
+ <_>
+
+ 0 -1 2106 -2.7441629208624363e-03
+
+
+ 5.9368848800659180e-01 4.6457770466804504e-01
+
+
+ <_>
+
+ 0 -1 2107 -1.6200130805373192e-02
+
+
+ 3.1630149483680725e-01 5.1934951543807983e-01
+
+
+ <_>
+
+ 0 -1 2108 4.3331980705261230e-03
+
+
+ 5.0612241029739380e-01 3.4588789939880371e-01
+
+
+ <_>
+
+ 0 -1 2109 5.8497930876910686e-04
+
+
+ 4.7790178656578064e-01 5.8701777458190918e-01
+
+
+ <_>
+
+ 0 -1 2110 -2.2466450463980436e-03
+
+
+ 4.2978510260581970e-01 5.3747731447219849e-01
+
+
+ <_>
+
+ 0 -1 2111 2.3146099410951138e-03
+
+
+ 5.4386717081069946e-01 4.6409699320793152e-01
+
+
+ <_>
+
+ 0 -1 2112 8.7679121643304825e-03
+
+
+ 4.7268930077552795e-01 6.7717897891998291e-01
+
+
+ <_>
+
+ 0 -1 2113 -2.2448020172305405e-04
+
+
+ 4.2291730642318726e-01 5.4280489683151245e-01
+
+
+ <_>
+
+ 0 -1 2114 -7.4336021207273006e-03
+
+
+ 6.0988807678222656e-01 4.6836739778518677e-01
+
+
+ <_>
+
+ 0 -1 2115 -2.3189240600913763e-03
+
+
+ 5.6894367933273315e-01 4.4242420792579651e-01
+
+
+ <_>
+
+ 0 -1 2116 -2.1042178850620985e-03
+
+
+ 3.7622210383415222e-01 5.1870870590209961e-01
+
+
+ <_>
+
+ 0 -1 2117 4.6034841216169298e-04
+
+
+ 4.6994051337242126e-01 5.7712072134017944e-01
+
+
+ <_>
+
+ 0 -1 2118 1.0547629790380597e-03
+
+
+ 4.4652169942855835e-01 5.6017017364501953e-01
+
+
+ <_>
+
+ 0 -1 2119 8.7148818420246243e-04
+
+
+ 5.4498052597045898e-01 3.9147090911865234e-01
+
+
+ <_>
+
+ 0 -1 2120 3.3364820410497487e-04
+
+
+ 4.5640090107917786e-01 5.6457388401031494e-01
+
+
+ <_>
+
+ 0 -1 2121 -1.4853250468149781e-03
+
+
+ 5.7473778724670410e-01 4.6927788853645325e-01
+
+
+ <_>
+
+ 0 -1 2122 3.0251620337367058e-03
+
+
+ 5.1661968231201172e-01 3.7628141045570374e-01
+
+
+ <_>
+
+ 0 -1 2123 5.0280741415917873e-03
+
+
+ 5.0021117925643921e-01 6.1515271663665771e-01
+
+
+ <_>
+
+ 0 -1 2124 -5.8164511574432254e-04
+
+
+ 5.3945982456207275e-01 4.3907511234283447e-01
+
+
+ <_>
+
+ 0 -1 2125 4.5141529291868210e-02
+
+
+ 5.1883268356323242e-01 2.0630359649658203e-01
+
+
+ <_>
+
+ 0 -1 2126 -1.0795620037242770e-03
+
+
+ 3.9046850800514221e-01 5.1379072666168213e-01
+
+
+ <_>
+
+ 0 -1 2127 1.5995999274309725e-04
+
+
+ 4.8953229188919067e-01 5.4275041818618774e-01
+
+
+ <_>
+
+ 0 -1 2128 -1.9359270110726357e-02
+
+
+ 6.9752287864685059e-01 4.7735071182250977e-01
+
+
+ <_>
+
+ 0 -1 2129 2.0725509524345398e-01
+
+
+ 5.2336359024047852e-01 3.0349919199943542e-01
+
+
+ <_>
+
+ 0 -1 2130 -4.1953290929086506e-04
+
+
+ 5.4193967580795288e-01 4.4601860642433167e-01
+
+
+ <_>
+
+ 0 -1 2131 2.2582069505006075e-03
+
+
+ 4.8157641291618347e-01 6.0274088382720947e-01
+
+
+ <_>
+
+ 0 -1 2132 -6.7811207845807076e-03
+
+
+ 3.9802789688110352e-01 5.1833057403564453e-01
+
+
+ <_>
+
+ 0 -1 2133 1.1154309846460819e-02
+
+
+ 5.4312318563461304e-01 4.1887599229812622e-01
+
+
+ <_>
+
+ 0 -1 2134 4.3162431567907333e-02
+
+
+ 4.7382280230522156e-01 6.5229612588882446e-01
+
+
+
+
+
+
+ <_>
+
+ <_>
+ 3 7 14 4 -1.
+
+ <_>
+ 3 9 14 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 18 4 -1.
+
+ <_>
+ 7 2 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 1 7 15 9 -1.
+
+ <_>
+ 1 10 15 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 2 6 -1.
+
+ <_>
+ 5 9 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 6 3 -1.
+
+ <_>
+ 9 5 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 4 0 12 9 -1.
+
+ <_>
+ 4 3 12 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 9 10 8 -1.
+
+ <_>
+ 6 13 10 4 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 14 8 -1.
+
+ <_>
+ 3 10 14 4 2.
+
+
+
+ <_>
+
+ <_>
+ 14 1 6 10 -1.
+
+ <_>
+ 14 1 3 10 2.
+
+
+
+ <_>
+
+ <_>
+ 7 8 5 12 -1.
+
+ <_>
+ 7 12 5 4 3.
+
+
+
+ <_>
+
+ <_>
+ 1 1 18 3 -1.
+
+ <_>
+ 7 1 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 1 8 17 2 -1.
+
+ <_>
+ 1 9 17 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 6 4 2 -1.
+
+ <_>
+ 16 7 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 17 2 2 -1.
+
+ <_>
+ 5 18 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 2 6 12 -1.
+
+ <_>
+ 14 2 3 12 2.
+
+
+
+ <_>
+
+ <_>
+ 4 0 4 12 -1.
+
+ <_>
+ 4 0 2 6 2.
+
+ <_>
+ 6 6 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 2 11 18 8 -1.
+
+ <_>
+ 8 11 6 8 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 10 2 -1.
+
+ <_>
+ 5 8 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 11 5 3 -1.
+
+ <_>
+ 15 12 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 3 10 9 -1.
+
+ <_>
+ 5 6 10 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 4 2 14 -1.
+
+ <_>
+ 9 11 2 7 2.
+
+
+
+ <_>
+
+ <_>
+ 3 5 4 12 -1.
+
+ <_>
+ 3 9 4 4 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 5 -1.
+
+ <_>
+ 8 5 4 5 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 10 8 -1.
+
+ <_>
+ 5 10 10 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 0 6 9 -1.
+
+ <_>
+ 8 3 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 1 8 -1.
+
+ <_>
+ 9 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 7 20 6 -1.
+
+ <_>
+ 0 9 20 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 0 6 17 -1.
+
+ <_>
+ 9 0 2 17 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 6 4 -1.
+
+ <_>
+ 11 0 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 5 1 6 4 -1.
+
+ <_>
+ 7 1 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 12 1 6 16 -1.
+
+ <_>
+ 14 1 2 16 3.
+
+
+
+ <_>
+
+ <_>
+ 0 5 18 8 -1.
+
+ <_>
+ 0 5 9 4 2.
+
+ <_>
+ 9 9 9 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 15 10 4 -1.
+
+ <_>
+ 13 15 5 2 2.
+
+ <_>
+ 8 17 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 1 4 8 -1.
+
+ <_>
+ 3 1 2 4 2.
+
+ <_>
+ 5 5 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 14 10 -1.
+
+ <_>
+ 10 6 7 5 2.
+
+ <_>
+ 3 11 7 5 2.
+
+
+
+ <_>
+
+ <_>
+ 2 1 6 16 -1.
+
+ <_>
+ 4 1 2 16 3.
+
+
+
+ <_>
+
+ <_>
+ 0 18 20 2 -1.
+
+ <_>
+ 0 19 20 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 3 -1.
+
+ <_>
+ 8 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 12 9 6 -1.
+
+ <_>
+ 0 14 9 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 3 4 -1.
+
+ <_>
+ 5 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 2 16 -1.
+
+ <_>
+ 9 11 2 8 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 13 8 -1.
+
+ <_>
+ 3 10 13 4 2.
+
+
+
+ <_>
+
+ <_>
+ 12 3 8 2 -1.
+
+ <_>
+ 12 3 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 8 4 12 -1.
+
+ <_>
+ 8 12 4 4 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 8 6 -1.
+
+ <_>
+ 15 3 4 3 2.
+
+ <_>
+ 11 6 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 1 6 19 -1.
+
+ <_>
+ 9 1 2 19 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 6 4 -1.
+
+ <_>
+ 11 0 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 3 1 9 3 -1.
+
+ <_>
+ 6 1 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 10 4 -1.
+
+ <_>
+ 13 15 5 2 2.
+
+ <_>
+ 8 17 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 6 10 -1.
+
+ <_>
+ 3 3 3 10 2.
+
+
+
+ <_>
+
+ <_>
+ 3 4 15 15 -1.
+
+ <_>
+ 3 9 15 5 3.
+
+
+
+ <_>
+
+ <_>
+ 6 5 8 6 -1.
+
+ <_>
+ 6 7 8 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 4 12 10 -1.
+
+ <_>
+ 10 4 6 5 2.
+
+ <_>
+ 4 9 6 5 2.
+
+
+
+ <_>
+
+ <_>
+ 6 4 4 4 -1.
+
+ <_>
+ 8 4 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 15 11 1 2 -1.
+
+ <_>
+ 15 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 11 2 2 -1.
+
+ <_>
+ 3 12 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 11 1 3 -1.
+
+ <_>
+ 16 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 15 6 4 -1.
+
+ <_>
+ 3 15 3 2 2.
+
+ <_>
+ 6 17 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 7 8 2 -1.
+
+ <_>
+ 6 8 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 11 1 3 -1.
+
+ <_>
+ 3 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 12 2 -1.
+
+ <_>
+ 6 1 12 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 15 6 2 -1.
+
+ <_>
+ 7 16 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 4 6 -1.
+
+ <_>
+ 0 7 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 12 12 2 -1.
+
+ <_>
+ 8 12 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 3 1 9 -1.
+
+ <_>
+ 6 6 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 10 17 3 2 -1.
+
+ <_>
+ 11 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 9 2 2 -1.
+
+ <_>
+ 9 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 6 4 -1.
+
+ <_>
+ 9 6 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 17 3 2 -1.
+
+ <_>
+ 8 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 17 3 3 -1.
+
+ <_>
+ 11 17 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 3 2 -1.
+
+ <_>
+ 8 13 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 6 2 -1.
+
+ <_>
+ 11 3 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 11 14 4 -1.
+
+ <_>
+ 3 13 14 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 10 18 4 -1.
+
+ <_>
+ 10 10 9 2 2.
+
+ <_>
+ 1 12 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 10 3 3 -1.
+
+ <_>
+ 0 11 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 1 6 6 -1.
+
+ <_>
+ 11 1 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 6 -1.
+
+ <_>
+ 9 7 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 9 -1.
+
+ <_>
+ 1 3 18 3 3.
+
+
+
+ <_>
+
+ <_>
+ 12 10 2 6 -1.
+
+ <_>
+ 12 13 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 19 8 -1.
+
+ <_>
+ 0 9 19 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 6 9 -1.
+
+ <_>
+ 9 0 2 9 3.
+
+
+
+ <_>
+
+ <_>
+ 5 3 6 1 -1.
+
+ <_>
+ 7 3 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 6 1 -1.
+
+ <_>
+ 13 3 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 10 4 6 -1.
+
+ <_>
+ 5 13 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 11 3 6 1 -1.
+
+ <_>
+ 13 3 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 4 12 6 -1.
+
+ <_>
+ 4 6 12 2 3.
+
+
+
+ <_>
+
+ <_>
+ 15 12 2 6 -1.
+
+ <_>
+ 15 14 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 3 2 2 -1.
+
+ <_>
+ 10 3 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 3 1 -1.
+
+ <_>
+ 10 3 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 1 4 14 -1.
+
+ <_>
+ 3 1 2 14 2.
+
+
+
+ <_>
+
+ <_>
+ 9 0 4 4 -1.
+
+ <_>
+ 11 0 2 2 2.
+
+ <_>
+ 9 2 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 1 14 -1.
+
+ <_>
+ 7 12 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 19 0 1 4 -1.
+
+ <_>
+ 19 2 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 6 4 -1.
+
+ <_>
+ 8 5 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 18 3 2 -1.
+
+ <_>
+ 10 18 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 18 3 2 -1.
+
+ <_>
+ 9 18 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 6 -1.
+
+ <_>
+ 4 7 12 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 2 6 -1.
+
+ <_>
+ 3 14 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 8 2 12 -1.
+
+ <_>
+ 10 12 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 18 3 2 -1.
+
+ <_>
+ 8 18 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 6 2 -1.
+
+ <_>
+ 11 0 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 9 3 -1.
+
+ <_>
+ 5 12 9 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 6 2 -1.
+
+ <_>
+ 11 0 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 1 18 5 -1.
+
+ <_>
+ 7 1 6 5 3.
+
+
+
+ <_>
+
+ <_>
+ 8 0 4 4 -1.
+
+ <_>
+ 10 0 2 2 2.
+
+ <_>
+ 8 2 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 12 1 3 -1.
+
+ <_>
+ 3 13 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 5 3 -1.
+
+ <_>
+ 8 15 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 10 12 -1.
+
+ <_>
+ 5 4 5 6 2.
+
+ <_>
+ 10 10 5 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 9 12 -1.
+
+ <_>
+ 9 10 9 4 3.
+
+
+
+ <_>
+
+ <_>
+ 2 2 12 14 -1.
+
+ <_>
+ 2 2 6 7 2.
+
+ <_>
+ 8 9 6 7 2.
+
+
+
+ <_>
+
+ <_>
+ 4 7 12 2 -1.
+
+ <_>
+ 8 7 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 6 4 -1.
+
+ <_>
+ 7 6 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 11 8 -1.
+
+ <_>
+ 4 9 11 4 2.
+
+
+
+ <_>
+
+ <_>
+ 3 10 16 4 -1.
+
+ <_>
+ 3 12 16 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 16 2 -1.
+
+ <_>
+ 0 1 16 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 6 2 -1.
+
+ <_>
+ 9 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 2 6 10 -1.
+
+ <_>
+ 3 2 3 5 2.
+
+ <_>
+ 6 7 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 10 5 8 15 -1.
+
+ <_>
+ 10 10 8 5 3.
+
+
+
+ <_>
+
+ <_>
+ 3 14 8 6 -1.
+
+ <_>
+ 3 14 4 3 2.
+
+ <_>
+ 7 17 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 2 2 2 -1.
+
+ <_>
+ 14 3 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 10 7 6 -1.
+
+ <_>
+ 1 13 7 3 2.
+
+
+
+ <_>
+
+ <_>
+ 15 4 4 3 -1.
+
+ <_>
+ 15 4 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 2 9 14 6 -1.
+
+ <_>
+ 2 9 7 3 2.
+
+ <_>
+ 9 12 7 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 10 4 -1.
+
+ <_>
+ 5 9 10 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 9 8 8 -1.
+
+ <_>
+ 6 9 4 4 2.
+
+ <_>
+ 10 13 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 14 1 3 2 -1.
+
+ <_>
+ 14 2 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 4 4 2 -1.
+
+ <_>
+ 3 4 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 11 10 2 8 -1.
+
+ <_>
+ 11 14 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 5 3 -1.
+
+ <_>
+ 0 1 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 5 18 8 -1.
+
+ <_>
+ 11 5 9 4 2.
+
+ <_>
+ 2 9 9 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 1 6 -1.
+
+ <_>
+ 6 9 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 19 1 1 3 -1.
+
+ <_>
+ 19 2 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 6 6 6 -1.
+
+ <_>
+ 9 6 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 19 1 1 3 -1.
+
+ <_>
+ 19 2 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 13 2 3 -1.
+
+ <_>
+ 3 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 4 8 12 -1.
+
+ <_>
+ 12 4 4 6 2.
+
+ <_>
+ 8 10 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 5 2 6 3 -1.
+
+ <_>
+ 7 2 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 1 9 10 -1.
+
+ <_>
+ 6 6 9 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 6 12 -1.
+
+ <_>
+ 2 4 2 12 3.
+
+
+
+ <_>
+
+ <_>
+ 15 13 2 3 -1.
+
+ <_>
+ 15 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 14 5 3 -1.
+
+ <_>
+ 7 15 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 13 3 3 -1.
+
+ <_>
+ 15 14 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 14 8 3 -1.
+
+ <_>
+ 6 15 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 13 3 3 -1.
+
+ <_>
+ 15 14 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 13 3 3 -1.
+
+ <_>
+ 2 14 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 7 12 12 -1.
+
+ <_>
+ 10 7 6 6 2.
+
+ <_>
+ 4 13 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 6 -1.
+
+ <_>
+ 10 7 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 8 9 5 2 -1.
+
+ <_>
+ 8 10 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 6 3 4 -1.
+
+ <_>
+ 9 6 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 8 -1.
+
+ <_>
+ 9 10 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 7 3 6 -1.
+
+ <_>
+ 8 7 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 3 3 -1.
+
+ <_>
+ 12 3 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 6 1 -1.
+
+ <_>
+ 7 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 10 3 -1.
+
+ <_>
+ 5 7 10 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 3 6 9 -1.
+
+ <_>
+ 7 6 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 7 9 1 -1.
+
+ <_>
+ 9 7 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 8 16 8 -1.
+
+ <_>
+ 2 12 16 4 2.
+
+
+
+ <_>
+
+ <_>
+ 14 6 2 6 -1.
+
+ <_>
+ 14 9 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 5 6 15 -1.
+
+ <_>
+ 1 10 6 5 3.
+
+
+
+ <_>
+
+ <_>
+ 10 0 6 9 -1.
+
+ <_>
+ 10 3 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 7 14 -1.
+
+ <_>
+ 6 13 7 7 2.
+
+
+
+ <_>
+
+ <_>
+ 13 7 3 6 -1.
+
+ <_>
+ 13 9 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 8 15 4 -1.
+
+ <_>
+ 6 8 5 4 3.
+
+
+
+ <_>
+
+ <_>
+ 11 2 3 10 -1.
+
+ <_>
+ 11 7 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 3 7 4 6 -1.
+
+ <_>
+ 3 9 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 3 6 10 -1.
+
+ <_>
+ 15 3 2 10 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 8 10 -1.
+
+ <_>
+ 5 7 4 5 2.
+
+ <_>
+ 9 12 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 4 4 12 12 -1.
+
+ <_>
+ 10 4 6 6 2.
+
+ <_>
+ 4 10 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 1 4 6 9 -1.
+
+ <_>
+ 3 4 2 9 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 2 5 -1.
+
+ <_>
+ 11 3 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 7 3 2 5 -1.
+
+ <_>
+ 8 3 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 10 14 2 3 -1.
+
+ <_>
+ 10 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 12 6 2 -1.
+
+ <_>
+ 8 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 12 6 -1.
+
+ <_>
+ 4 14 12 3 2.
+
+
+
+ <_>
+
+ <_>
+ 11 11 5 9 -1.
+
+ <_>
+ 11 14 5 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 15 3 2 -1.
+
+ <_>
+ 6 16 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 0 3 5 -1.
+
+ <_>
+ 12 0 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 6 7 -1.
+
+ <_>
+ 8 5 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 13 0 1 9 -1.
+
+ <_>
+ 13 3 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 3 2 4 8 -1.
+
+ <_>
+ 3 2 2 4 2.
+
+ <_>
+ 5 6 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 13 12 4 6 -1.
+
+ <_>
+ 13 14 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 4 6 -1.
+
+ <_>
+ 3 14 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 11 3 4 -1.
+
+ <_>
+ 13 13 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 4 4 3 -1.
+
+ <_>
+ 4 5 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 5 11 8 -1.
+
+ <_>
+ 7 9 11 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 8 3 4 -1.
+
+ <_>
+ 8 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 1 6 1 -1.
+
+ <_>
+ 11 1 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 3 3 -1.
+
+ <_>
+ 5 6 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 9 20 6 -1.
+
+ <_>
+ 10 9 10 3 2.
+
+ <_>
+ 0 12 10 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 6 3 5 -1.
+
+ <_>
+ 9 6 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 11 0 1 3 -1.
+
+ <_>
+ 11 1 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 2 4 2 -1.
+
+ <_>
+ 4 3 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 6 4 3 -1.
+
+ <_>
+ 12 7 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 6 4 -1.
+
+ <_>
+ 7 0 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 8 -1.
+
+ <_>
+ 10 7 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 2 -1.
+
+ <_>
+ 10 7 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 7 14 4 -1.
+
+ <_>
+ 13 7 7 2 2.
+
+ <_>
+ 6 9 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 3 6 -1.
+
+ <_>
+ 0 7 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 11 3 4 -1.
+
+ <_>
+ 13 13 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 11 3 4 -1.
+
+ <_>
+ 4 13 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 12 8 -1.
+
+ <_>
+ 11 9 6 4 2.
+
+ <_>
+ 5 13 6 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 1 3 -1.
+
+ <_>
+ 9 13 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 15 2 4 -1.
+
+ <_>
+ 10 17 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 1 -1.
+
+ <_>
+ 9 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 6 6 -1.
+
+ <_>
+ 15 3 3 3 2.
+
+ <_>
+ 12 6 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 10 6 -1.
+
+ <_>
+ 0 6 10 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 3 8 14 -1.
+
+ <_>
+ 12 3 4 7 2.
+
+ <_>
+ 8 10 4 7 2.
+
+
+
+ <_>
+
+ <_>
+ 4 4 7 15 -1.
+
+ <_>
+ 4 9 7 5 3.
+
+
+
+ <_>
+
+ <_>
+ 12 2 6 8 -1.
+
+ <_>
+ 15 2 3 4 2.
+
+ <_>
+ 12 6 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 2 2 6 8 -1.
+
+ <_>
+ 2 2 3 4 2.
+
+ <_>
+ 5 6 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 2 13 18 7 -1.
+
+ <_>
+ 8 13 6 7 3.
+
+
+
+ <_>
+
+ <_>
+ 4 3 8 14 -1.
+
+ <_>
+ 4 3 4 7 2.
+
+ <_>
+ 8 10 4 7 2.
+
+
+
+ <_>
+
+ <_>
+ 18 1 2 6 -1.
+
+ <_>
+ 18 3 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 1 2 6 -1.
+
+ <_>
+ 18 3 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 1 2 6 -1.
+
+ <_>
+ 0 3 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 5 18 6 -1.
+
+ <_>
+ 1 7 18 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 6 7 -1.
+
+ <_>
+ 3 2 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 7 3 6 14 -1.
+
+ <_>
+ 7 10 6 7 2.
+
+
+
+ <_>
+
+ <_>
+ 3 7 13 10 -1.
+
+ <_>
+ 3 12 13 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 15 2 2 -1.
+
+ <_>
+ 11 16 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 11 16 4 -1.
+
+ <_>
+ 2 11 8 2 2.
+
+ <_>
+ 10 13 8 2 2.
+
+
+
+ <_>
+
+ <_>
+ 13 7 6 4 -1.
+
+ <_>
+ 16 7 3 2 2.
+
+ <_>
+ 13 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 10 3 9 -1.
+
+ <_>
+ 6 13 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 14 6 1 6 -1.
+
+ <_>
+ 14 9 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 10 4 1 -1.
+
+ <_>
+ 7 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 8 15 5 -1.
+
+ <_>
+ 8 8 5 5 3.
+
+
+
+ <_>
+
+ <_>
+ 1 6 5 4 -1.
+
+ <_>
+ 1 8 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 1 17 6 -1.
+
+ <_>
+ 3 3 17 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 7 8 2 -1.
+
+ <_>
+ 10 7 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 2 -1.
+
+ <_>
+ 10 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 2 -1.
+
+ <_>
+ 9 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 9 4 2 -1.
+
+ <_>
+ 8 10 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 8 4 3 -1.
+
+ <_>
+ 8 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 6 4 -1.
+
+ <_>
+ 9 5 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 3 -1.
+
+ <_>
+ 8 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 7 12 6 -1.
+
+ <_>
+ 10 7 6 3 2.
+
+ <_>
+ 4 10 6 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 3 -1.
+
+ <_>
+ 9 8 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 3 8 -1.
+
+ <_>
+ 8 4 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 10 0 3 6 -1.
+
+ <_>
+ 11 0 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 6 3 4 8 -1.
+
+ <_>
+ 8 3 2 8 2.
+
+
+
+ <_>
+
+ <_>
+ 14 3 6 13 -1.
+
+ <_>
+ 14 3 3 13 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 3 6 -1.
+
+ <_>
+ 8 16 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 3 6 13 -1.
+
+ <_>
+ 14 3 3 13 2.
+
+
+
+ <_>
+
+ <_>
+ 0 7 10 4 -1.
+
+ <_>
+ 0 7 5 2 2.
+
+ <_>
+ 5 9 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 14 3 6 13 -1.
+
+ <_>
+ 14 3 3 13 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 6 13 -1.
+
+ <_>
+ 3 3 3 13 2.
+
+
+
+ <_>
+
+ <_>
+ 9 1 4 1 -1.
+
+ <_>
+ 9 1 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 0 2 1 -1.
+
+ <_>
+ 9 0 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 16 4 4 -1.
+
+ <_>
+ 12 16 2 2 2.
+
+ <_>
+ 10 18 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 3 -1.
+
+ <_>
+ 10 6 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 2 -1.
+
+ <_>
+ 8 5 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 5 -1.
+
+ <_>
+ 9 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 6 4 8 6 -1.
+
+ <_>
+ 6 6 8 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 12 -1.
+
+ <_>
+ 9 11 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 4 6 6 8 -1.
+
+ <_>
+ 4 10 6 4 2.
+
+
+
+ <_>
+
+ <_>
+ 12 2 8 5 -1.
+
+ <_>
+ 12 2 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 8 18 3 -1.
+
+ <_>
+ 0 9 18 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 8 -1.
+
+ <_>
+ 8 16 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 8 5 -1.
+
+ <_>
+ 4 2 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 13 11 3 4 -1.
+
+ <_>
+ 13 13 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 11 6 1 -1.
+
+ <_>
+ 7 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 3 1 -1.
+
+ <_>
+ 12 3 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 5 3 -1.
+
+ <_>
+ 7 14 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 11 7 6 -1.
+
+ <_>
+ 11 14 7 3 2.
+
+
+
+ <_>
+
+ <_>
+ 2 11 7 6 -1.
+
+ <_>
+ 2 14 7 3 2.
+
+
+
+ <_>
+
+ <_>
+ 12 14 2 6 -1.
+
+ <_>
+ 12 16 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 3 3 -1.
+
+ <_>
+ 8 15 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 0 3 5 -1.
+
+ <_>
+ 12 0 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 6 1 4 9 -1.
+
+ <_>
+ 8 1 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 10 3 6 1 -1.
+
+ <_>
+ 12 3 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 3 4 -1.
+
+ <_>
+ 8 10 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 2 -1.
+
+ <_>
+ 8 13 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 18 4 2 -1.
+
+ <_>
+ 5 19 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 1 18 6 -1.
+
+ <_>
+ 2 3 18 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 3 2 -1.
+
+ <_>
+ 7 0 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 8 6 2 -1.
+
+ <_>
+ 16 8 3 1 2.
+
+ <_>
+ 13 9 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 10 3 6 -1.
+
+ <_>
+ 6 13 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 13 20 4 -1.
+
+ <_>
+ 10 13 10 2 2.
+
+ <_>
+ 0 15 10 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 5 -1.
+
+ <_>
+ 9 7 2 5 3.
+
+
+
+ <_>
+
+ <_>
+ 11 0 2 2 -1.
+
+ <_>
+ 11 1 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 8 6 2 -1.
+
+ <_>
+ 1 8 3 1 2.
+
+ <_>
+ 4 9 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 20 2 -1.
+
+ <_>
+ 10 2 10 1 2.
+
+ <_>
+ 0 3 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 14 5 3 -1.
+
+ <_>
+ 7 15 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 6 6 -1.
+
+ <_>
+ 10 13 3 3 2.
+
+ <_>
+ 7 16 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 2 3 -1.
+
+ <_>
+ 9 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 11 1 6 -1.
+
+ <_>
+ 16 13 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 11 1 6 -1.
+
+ <_>
+ 3 13 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 4 14 12 -1.
+
+ <_>
+ 11 4 7 6 2.
+
+ <_>
+ 4 10 7 6 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 3 3 -1.
+
+ <_>
+ 13 3 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 8 3 -1.
+
+ <_>
+ 6 7 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 3 3 -1.
+
+ <_>
+ 13 3 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 3 1 4 10 -1.
+
+ <_>
+ 3 1 2 5 2.
+
+ <_>
+ 5 6 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 10 2 -1.
+
+ <_>
+ 5 7 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 3 -1.
+
+ <_>
+ 9 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 15 12 2 3 -1.
+
+ <_>
+ 15 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 8 3 4 -1.
+
+ <_>
+ 8 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 13 4 1 12 -1.
+
+ <_>
+ 13 10 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 12 -1.
+
+ <_>
+ 4 5 6 6 2.
+
+ <_>
+ 10 11 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 7 14 7 3 -1.
+
+ <_>
+ 7 15 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 2 3 -1.
+
+ <_>
+ 3 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 2 14 2 -1.
+
+ <_>
+ 10 2 7 1 2.
+
+ <_>
+ 3 3 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 3 10 -1.
+
+ <_>
+ 1 1 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 6 5 -1.
+
+ <_>
+ 11 0 2 5 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 6 2 -1.
+
+ <_>
+ 8 7 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 1 6 10 -1.
+
+ <_>
+ 7 6 6 5 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 18 3 -1.
+
+ <_>
+ 7 1 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 16 3 3 6 -1.
+
+ <_>
+ 16 5 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 3 7 6 -1.
+
+ <_>
+ 6 6 7 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 7 12 2 -1.
+
+ <_>
+ 8 7 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 17 10 -1.
+
+ <_>
+ 0 9 17 5 2.
+
+
+
+ <_>
+
+ <_>
+ 3 4 15 16 -1.
+
+ <_>
+ 3 12 15 8 2.
+
+
+
+ <_>
+
+ <_>
+ 7 15 6 4 -1.
+
+ <_>
+ 7 17 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 15 2 4 9 -1.
+
+ <_>
+ 15 2 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 2 3 3 2 -1.
+
+ <_>
+ 2 4 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 6 7 9 -1.
+
+ <_>
+ 13 9 7 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 11 4 3 -1.
+
+ <_>
+ 8 12 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 20 6 -1.
+
+ <_>
+ 10 2 10 3 2.
+
+ <_>
+ 0 5 10 3 2.
+
+
+
+ <_>
+
+ <_>
+ 3 2 6 10 -1.
+
+ <_>
+ 3 2 3 5 2.
+
+ <_>
+ 6 7 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 13 10 3 4 -1.
+
+ <_>
+ 13 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 10 3 4 -1.
+
+ <_>
+ 4 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 6 3 -1.
+
+ <_>
+ 9 5 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 7 6 6 8 -1.
+
+ <_>
+ 7 10 6 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 11 20 6 -1.
+
+ <_>
+ 0 14 20 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 13 4 6 -1.
+
+ <_>
+ 4 13 2 3 2.
+
+ <_>
+ 6 16 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 0 8 12 -1.
+
+ <_>
+ 10 0 4 6 2.
+
+ <_>
+ 6 6 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 2 0 15 2 -1.
+
+ <_>
+ 2 1 15 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 2 3 -1.
+
+ <_>
+ 9 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 1 2 -1.
+
+ <_>
+ 3 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 3 3 1 -1.
+
+ <_>
+ 8 3 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 17 7 3 6 -1.
+
+ <_>
+ 17 9 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 2 3 2 -1.
+
+ <_>
+ 8 2 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 4 5 3 -1.
+
+ <_>
+ 11 5 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 4 5 3 -1.
+
+ <_>
+ 4 5 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 19 3 1 2 -1.
+
+ <_>
+ 19 4 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 4 3 -1.
+
+ <_>
+ 5 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 17 7 3 6 -1.
+
+ <_>
+ 17 9 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 7 3 6 -1.
+
+ <_>
+ 0 9 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 2 6 9 -1.
+
+ <_>
+ 14 5 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 5 6 -1.
+
+ <_>
+ 0 6 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 5 6 2 -1.
+
+ <_>
+ 12 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 6 2 -1.
+
+ <_>
+ 6 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 1 4 6 -1.
+
+ <_>
+ 8 3 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 3 6 -1.
+
+ <_>
+ 0 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 8 3 -1.
+
+ <_>
+ 6 7 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 1 5 9 -1.
+
+ <_>
+ 0 4 5 3 3.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 15 -1.
+
+ <_>
+ 16 0 2 15 2.
+
+
+
+ <_>
+
+ <_>
+ 1 10 3 2 -1.
+
+ <_>
+ 1 11 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 4 1 10 -1.
+
+ <_>
+ 14 9 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 4 12 -1.
+
+ <_>
+ 2 1 2 12 2.
+
+
+
+ <_>
+
+ <_>
+ 11 11 4 2 -1.
+
+ <_>
+ 11 11 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 11 4 2 -1.
+
+ <_>
+ 7 11 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 8 15 5 -1.
+
+ <_>
+ 8 8 5 5 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 6 10 -1.
+
+ <_>
+ 3 0 3 10 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 3 2 -1.
+
+ <_>
+ 12 4 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 3 8 -1.
+
+ <_>
+ 8 16 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 5 3 -1.
+
+ <_>
+ 8 15 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 14 4 3 -1.
+
+ <_>
+ 7 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 4 3 2 -1.
+
+ <_>
+ 12 4 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 15 14 4 -1.
+
+ <_>
+ 3 15 7 2 2.
+
+ <_>
+ 10 17 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 2 2 16 4 -1.
+
+ <_>
+ 10 2 8 2 2.
+
+ <_>
+ 2 4 8 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 8 6 12 -1.
+
+ <_>
+ 3 8 3 12 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 10 2 -1.
+
+ <_>
+ 5 7 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 5 -1.
+
+ <_>
+ 10 7 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 13 7 6 4 -1.
+
+ <_>
+ 16 7 3 2 2.
+
+ <_>
+ 13 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 13 8 2 -1.
+
+ <_>
+ 0 14 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 7 6 4 -1.
+
+ <_>
+ 16 7 3 2 2.
+
+ <_>
+ 13 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 7 6 4 -1.
+
+ <_>
+ 1 7 3 2 2.
+
+ <_>
+ 4 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 12 6 1 12 -1.
+
+ <_>
+ 12 12 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 6 -1.
+
+ <_>
+ 10 5 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 14 12 2 3 -1.
+
+ <_>
+ 14 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 12 2 3 -1.
+
+ <_>
+ 4 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 3 -1.
+
+ <_>
+ 8 13 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 2 4 -1.
+
+ <_>
+ 5 2 1 2 2.
+
+ <_>
+ 6 4 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 11 3 -1.
+
+ <_>
+ 5 6 11 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 6 4 12 -1.
+
+ <_>
+ 7 12 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 12 13 8 5 -1.
+
+ <_>
+ 12 13 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 1 12 -1.
+
+ <_>
+ 7 12 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 6 3 -1.
+
+ <_>
+ 4 2 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 6 10 -1.
+
+ <_>
+ 12 5 3 5 2.
+
+ <_>
+ 9 10 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 8 12 -1.
+
+ <_>
+ 5 5 4 6 2.
+
+ <_>
+ 9 11 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 7 20 6 -1.
+
+ <_>
+ 0 9 20 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 2 2 2 -1.
+
+ <_>
+ 4 3 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 18 12 2 -1.
+
+ <_>
+ 8 18 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 4 16 -1.
+
+ <_>
+ 7 12 4 8 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 7 8 -1.
+
+ <_>
+ 7 10 7 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 3 3 1 -1.
+
+ <_>
+ 7 3 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 15 2 4 -1.
+
+ <_>
+ 11 17 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 5 4 8 -1.
+
+ <_>
+ 3 9 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 1 6 12 -1.
+
+ <_>
+ 7 7 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 4 6 6 2 -1.
+
+ <_>
+ 6 6 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 16 4 4 6 -1.
+
+ <_>
+ 16 6 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 3 5 2 -1.
+
+ <_>
+ 3 4 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 16 4 2 -1.
+
+ <_>
+ 2 17 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 13 6 6 -1.
+
+ <_>
+ 10 13 3 3 2.
+
+ <_>
+ 7 16 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 3 4 -1.
+
+ <_>
+ 8 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 3 -1.
+
+ <_>
+ 8 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 4 6 -1.
+
+ <_>
+ 0 6 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 12 3 -1.
+
+ <_>
+ 9 6 4 3 3.
+
+
+
+ <_>
+
+ <_>
+ 7 6 6 14 -1.
+
+ <_>
+ 9 6 2 14 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 3 -1.
+
+ <_>
+ 10 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 12 2 4 -1.
+
+ <_>
+ 6 14 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 12 7 6 -1.
+
+ <_>
+ 10 14 7 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 0 15 2 -1.
+
+ <_>
+ 1 1 15 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 6 -1.
+
+ <_>
+ 14 0 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 5 3 3 1 -1.
+
+ <_>
+ 6 3 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 6 -1.
+
+ <_>
+ 14 0 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 20 10 -1.
+
+ <_>
+ 0 8 20 5 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 6 -1.
+
+ <_>
+ 14 0 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 6 6 -1.
+
+ <_>
+ 3 0 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 19 15 1 2 -1.
+
+ <_>
+ 19 16 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 4 8 -1.
+
+ <_>
+ 2 2 2 8 2.
+
+
+
+ <_>
+
+ <_>
+ 2 1 18 4 -1.
+
+ <_>
+ 11 1 9 2 2.
+
+ <_>
+ 2 3 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 1 2 -1.
+
+ <_>
+ 8 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 2 10 6 -1.
+
+ <_>
+ 10 2 5 3 2.
+
+ <_>
+ 5 5 5 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 4 -1.
+
+ <_>
+ 10 7 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 3 -1.
+
+ <_>
+ 10 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 8 -1.
+
+ <_>
+ 8 5 4 8 3.
+
+
+
+ <_>
+
+ <_>
+ 15 15 4 3 -1.
+
+ <_>
+ 15 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 18 3 1 -1.
+
+ <_>
+ 9 18 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 13 4 3 -1.
+
+ <_>
+ 9 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 4 3 -1.
+
+ <_>
+ 7 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 19 15 1 2 -1.
+
+ <_>
+ 19 16 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 15 8 4 -1.
+
+ <_>
+ 0 17 8 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 6 4 -1.
+
+ <_>
+ 11 3 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 14 14 6 -1.
+
+ <_>
+ 3 16 14 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 3 6 6 -1.
+
+ <_>
+ 6 6 6 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 11 10 6 -1.
+
+ <_>
+ 5 14 10 3 2.
+
+
+
+ <_>
+
+ <_>
+ 3 10 3 4 -1.
+
+ <_>
+ 4 10 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 13 9 2 2 -1.
+
+ <_>
+ 13 9 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 3 6 4 -1.
+
+ <_>
+ 7 3 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 3 -1.
+
+ <_>
+ 10 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 2 12 2 3 -1.
+
+ <_>
+ 2 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 8 3 12 -1.
+
+ <_>
+ 9 12 3 4 3.
+
+
+
+ <_>
+
+ <_>
+ 3 14 4 6 -1.
+
+ <_>
+ 3 14 2 3 2.
+
+ <_>
+ 5 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 16 15 2 2 -1.
+
+ <_>
+ 16 16 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 15 2 2 -1.
+
+ <_>
+ 2 16 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 3 -1.
+
+ <_>
+ 8 13 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 7 20 1 -1.
+
+ <_>
+ 10 7 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 8 3 -1.
+
+ <_>
+ 7 6 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 8 2 -1.
+
+ <_>
+ 9 7 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 5 -1.
+
+ <_>
+ 10 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 5 -1.
+
+ <_>
+ 9 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 11 1 3 5 -1.
+
+ <_>
+ 12 1 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 6 2 3 6 -1.
+
+ <_>
+ 7 2 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 14 14 6 5 -1.
+
+ <_>
+ 14 14 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 8 2 2 -1.
+
+ <_>
+ 9 9 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 1 3 -1.
+
+ <_>
+ 10 8 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 2 2 -1.
+
+ <_>
+ 6 6 1 1 2.
+
+ <_>
+ 7 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 11 18 4 -1.
+
+ <_>
+ 11 11 9 2 2.
+
+ <_>
+ 2 13 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 2 2 -1.
+
+ <_>
+ 6 6 1 1 2.
+
+ <_>
+ 7 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 15 20 2 -1.
+
+ <_>
+ 0 16 20 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 14 2 3 -1.
+
+ <_>
+ 4 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 2 3 -1.
+
+ <_>
+ 8 8 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 10 2 3 -1.
+
+ <_>
+ 9 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 10 4 -1.
+
+ <_>
+ 5 6 10 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 6 4 -1.
+
+ <_>
+ 12 7 3 2 2.
+
+ <_>
+ 9 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 7 3 6 -1.
+
+ <_>
+ 4 9 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 15 4 4 -1.
+
+ <_>
+ 13 15 2 2 2.
+
+ <_>
+ 11 17 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 8 4 2 -1.
+
+ <_>
+ 7 9 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 1 4 3 -1.
+
+ <_>
+ 13 1 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 15 4 4 -1.
+
+ <_>
+ 5 15 2 2 2.
+
+ <_>
+ 7 17 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 4 7 -1.
+
+ <_>
+ 9 5 2 7 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 8 3 -1.
+
+ <_>
+ 9 6 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 9 2 2 -1.
+
+ <_>
+ 9 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 15 5 3 -1.
+
+ <_>
+ 7 16 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 10 4 3 -1.
+
+ <_>
+ 11 10 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 9 8 10 -1.
+
+ <_>
+ 6 14 8 5 2.
+
+
+
+ <_>
+
+ <_>
+ 10 11 6 2 -1.
+
+ <_>
+ 10 11 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 11 6 2 -1.
+
+ <_>
+ 7 11 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 11 3 8 1 -1.
+
+ <_>
+ 11 3 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 3 3 2 -1.
+
+ <_>
+ 7 3 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 5 6 5 -1.
+
+ <_>
+ 14 5 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 2 12 -1.
+
+ <_>
+ 7 11 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 8 11 4 3 -1.
+
+ <_>
+ 8 12 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 1 2 3 -1.
+
+ <_>
+ 5 1 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 18 3 2 6 -1.
+
+ <_>
+ 18 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 3 2 6 -1.
+
+ <_>
+ 0 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 2 3 -1.
+
+ <_>
+ 9 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 4 3 -1.
+
+ <_>
+ 7 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 0 2 6 -1.
+
+ <_>
+ 18 2 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 2 6 -1.
+
+ <_>
+ 0 2 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 6 3 -1.
+
+ <_>
+ 8 15 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 2 4 -1.
+
+ <_>
+ 8 4 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 5 4 6 -1.
+
+ <_>
+ 8 7 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 4 2 2 -1.
+
+ <_>
+ 7 4 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 14 14 4 -1.
+
+ <_>
+ 10 14 7 2 2.
+
+ <_>
+ 3 16 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 15 6 2 -1.
+
+ <_>
+ 6 15 3 1 2.
+
+ <_>
+ 9 16 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 15 6 2 -1.
+
+ <_>
+ 14 16 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 12 12 8 -1.
+
+ <_>
+ 2 16 12 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 7 7 2 -1.
+
+ <_>
+ 7 8 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 18 2 -1.
+
+ <_>
+ 0 3 18 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 5 -1.
+
+ <_>
+ 9 6 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 3 8 -1.
+
+ <_>
+ 8 5 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 9 6 3 4 -1.
+
+ <_>
+ 10 6 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 4 13 3 2 -1.
+
+ <_>
+ 4 14 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 4 6 3 -1.
+
+ <_>
+ 11 4 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 6 3 -1.
+
+ <_>
+ 7 4 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 14 11 5 2 -1.
+
+ <_>
+ 14 12 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 6 9 -1.
+
+ <_>
+ 3 2 2 9 3.
+
+
+
+ <_>
+
+ <_>
+ 14 6 6 13 -1.
+
+ <_>
+ 14 6 3 13 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 14 8 -1.
+
+ <_>
+ 3 6 7 4 2.
+
+ <_>
+ 10 10 7 4 2.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 11 -1.
+
+ <_>
+ 16 0 2 11 2.
+
+
+
+ <_>
+
+ <_>
+ 3 4 12 12 -1.
+
+ <_>
+ 3 4 6 6 2.
+
+ <_>
+ 9 10 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 5 3 -1.
+
+ <_>
+ 11 5 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 4 2 -1.
+
+ <_>
+ 4 12 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 2 2 -1.
+
+ <_>
+ 10 7 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 7 2 2 -1.
+
+ <_>
+ 9 7 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 17 3 2 -1.
+
+ <_>
+ 10 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 3 3 -1.
+
+ <_>
+ 5 7 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 0 3 3 -1.
+
+ <_>
+ 11 0 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 6 2 -1.
+
+ <_>
+ 5 6 3 1 2.
+
+ <_>
+ 8 7 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 16 4 3 -1.
+
+ <_>
+ 12 17 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 3 2 -1.
+
+ <_>
+ 3 13 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 3 2 -1.
+
+ <_>
+ 9 13 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 11 16 4 -1.
+
+ <_>
+ 1 11 8 2 2.
+
+ <_>
+ 9 13 8 2 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 3 -1.
+
+ <_>
+ 12 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 4 5 3 -1.
+
+ <_>
+ 4 5 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 16 4 3 -1.
+
+ <_>
+ 12 17 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 2 2 -1.
+
+ <_>
+ 9 1 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 9 4 2 -1.
+
+ <_>
+ 8 10 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 8 4 3 -1.
+
+ <_>
+ 8 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 13 6 3 -1.
+
+ <_>
+ 2 13 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 16 14 3 2 -1.
+
+ <_>
+ 16 15 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 18 18 2 -1.
+
+ <_>
+ 7 18 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 16 14 3 2 -1.
+
+ <_>
+ 16 15 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 14 3 2 -1.
+
+ <_>
+ 1 15 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 14 6 3 -1.
+
+ <_>
+ 7 15 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 14 8 3 -1.
+
+ <_>
+ 5 15 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 6 4 14 -1.
+
+ <_>
+ 10 6 2 14 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 4 14 -1.
+
+ <_>
+ 8 6 2 14 2.
+
+
+
+ <_>
+
+ <_>
+ 13 5 2 3 -1.
+
+ <_>
+ 13 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 16 6 1 -1.
+
+ <_>
+ 9 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 3 3 -1.
+
+ <_>
+ 9 13 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 0 3 3 -1.
+
+ <_>
+ 8 0 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 4 0 16 18 -1.
+
+ <_>
+ 4 9 16 9 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 16 14 -1.
+
+ <_>
+ 1 8 16 7 2.
+
+
+
+ <_>
+
+ <_>
+ 3 9 15 4 -1.
+
+ <_>
+ 8 9 5 4 3.
+
+
+
+ <_>
+
+ <_>
+ 6 12 7 3 -1.
+
+ <_>
+ 6 13 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 15 2 3 -1.
+
+ <_>
+ 14 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 3 16 14 -1.
+
+ <_>
+ 2 3 8 7 2.
+
+ <_>
+ 10 10 8 7 2.
+
+
+
+ <_>
+
+ <_>
+ 16 2 4 18 -1.
+
+ <_>
+ 18 2 2 9 2.
+
+ <_>
+ 16 11 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 4 15 2 3 -1.
+
+ <_>
+ 4 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 2 4 18 -1.
+
+ <_>
+ 18 2 2 9 2.
+
+ <_>
+ 16 11 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 8 3 -1.
+
+ <_>
+ 1 2 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 11 4 3 -1.
+
+ <_>
+ 8 12 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 5 9 -1.
+
+ <_>
+ 5 14 5 3 3.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 11 -1.
+
+ <_>
+ 16 0 2 11 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 6 1 -1.
+
+ <_>
+ 9 0 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 3 3 7 -1.
+
+ <_>
+ 17 3 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 1 3 3 7 -1.
+
+ <_>
+ 2 3 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 7 8 6 12 -1.
+
+ <_>
+ 7 12 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 4 11 -1.
+
+ <_>
+ 2 0 2 11 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 20 -1.
+
+ <_>
+ 14 0 3 20 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 1 2 -1.
+
+ <_>
+ 0 4 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 10 8 -1.
+
+ <_>
+ 10 5 5 4 2.
+
+ <_>
+ 5 9 5 4 2.
+
+
+
+ <_>
+
+ <_>
+ 4 7 12 4 -1.
+
+ <_>
+ 4 7 6 2 2.
+
+ <_>
+ 10 9 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 2 1 6 4 -1.
+
+ <_>
+ 5 1 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 6 4 -1.
+
+ <_>
+ 12 7 3 2 2.
+
+ <_>
+ 9 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 2 6 -1.
+
+ <_>
+ 5 9 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 16 6 4 -1.
+
+ <_>
+ 12 16 3 2 2.
+
+ <_>
+ 9 18 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 4 2 12 -1.
+
+ <_>
+ 9 10 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 7 1 6 18 -1.
+
+ <_>
+ 9 1 2 18 3.
+
+
+
+ <_>
+
+ <_>
+ 4 12 12 2 -1.
+
+ <_>
+ 8 12 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 6 2 -1.
+
+ <_>
+ 8 9 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 0 3 6 -1.
+
+ <_>
+ 9 0 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 11 18 3 2 -1.
+
+ <_>
+ 11 19 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 17 4 -1.
+
+ <_>
+ 1 3 17 2 2.
+
+
+
+ <_>
+
+ <_>
+ 11 8 4 12 -1.
+
+ <_>
+ 11 8 2 12 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 2 17 -1.
+
+ <_>
+ 12 3 1 17 2.
+
+
+
+ <_>
+
+ <_>
+ 4 7 6 1 -1.
+
+ <_>
+ 6 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 3 2 3 -1.
+
+ <_>
+ 18 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 4 3 4 -1.
+
+ <_>
+ 8 6 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 10 -1.
+
+ <_>
+ 4 10 12 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 18 4 2 -1.
+
+ <_>
+ 7 18 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 17 2 3 6 -1.
+
+ <_>
+ 17 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 6 -1.
+
+ <_>
+ 9 7 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 17 2 3 6 -1.
+
+ <_>
+ 17 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 0 3 4 -1.
+
+ <_>
+ 9 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 12 6 3 -1.
+
+ <_>
+ 0 13 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 2 3 -1.
+
+ <_>
+ 3 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 12 7 -1.
+
+ <_>
+ 9 6 4 7 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 3 6 -1.
+
+ <_>
+ 0 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 6 1 3 -1.
+
+ <_>
+ 14 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 0 3 14 -1.
+
+ <_>
+ 3 0 1 14 3.
+
+
+
+ <_>
+
+ <_>
+ 12 14 5 6 -1.
+
+ <_>
+ 12 16 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 14 5 6 -1.
+
+ <_>
+ 4 16 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 10 2 2 -1.
+
+ <_>
+ 12 10 1 1 2.
+
+ <_>
+ 11 11 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 0 3 14 -1.
+
+ <_>
+ 6 0 1 14 3.
+
+
+
+ <_>
+
+ <_>
+ 10 15 2 3 -1.
+
+ <_>
+ 10 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 2 3 -1.
+
+ <_>
+ 0 3 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 12 6 -1.
+
+ <_>
+ 5 14 12 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 11 3 9 -1.
+
+ <_>
+ 6 14 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 11 10 2 2 -1.
+
+ <_>
+ 12 10 1 1 2.
+
+ <_>
+ 11 11 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 1 3 -1.
+
+ <_>
+ 5 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 9 13 3 -1.
+
+ <_>
+ 4 10 13 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 7 15 6 -1.
+
+ <_>
+ 6 7 5 6 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 6 -1.
+
+ <_>
+ 8 5 4 6 3.
+
+
+
+ <_>
+
+ <_>
+ 8 10 4 3 -1.
+
+ <_>
+ 8 11 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 14 1 3 -1.
+
+ <_>
+ 15 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 11 5 3 -1.
+
+ <_>
+ 1 12 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 1 7 12 -1.
+
+ <_>
+ 7 7 7 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 6 10 -1.
+
+ <_>
+ 0 1 3 5 2.
+
+ <_>
+ 3 6 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 16 1 4 3 -1.
+
+ <_>
+ 16 2 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 2 3 -1.
+
+ <_>
+ 5 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 2 3 5 -1.
+
+ <_>
+ 13 2 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 0 3 4 6 -1.
+
+ <_>
+ 0 5 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 2 -1.
+
+ <_>
+ 8 13 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 18 3 1 -1.
+
+ <_>
+ 9 18 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 10 2 2 -1.
+
+ <_>
+ 12 10 1 1 2.
+
+ <_>
+ 11 11 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 10 2 2 -1.
+
+ <_>
+ 7 10 1 1 2.
+
+ <_>
+ 8 11 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 11 4 4 -1.
+
+ <_>
+ 11 13 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 3 8 -1.
+
+ <_>
+ 9 12 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 13 0 6 3 -1.
+
+ <_>
+ 13 1 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 3 4 -1.
+
+ <_>
+ 9 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 10 10 -1.
+
+ <_>
+ 10 7 5 5 2.
+
+ <_>
+ 5 12 5 5 2.
+
+
+
+ <_>
+
+ <_>
+ 3 18 8 2 -1.
+
+ <_>
+ 3 18 4 1 2.
+
+ <_>
+ 7 19 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 2 6 8 -1.
+
+ <_>
+ 12 2 2 8 3.
+
+
+
+ <_>
+
+ <_>
+ 4 2 6 8 -1.
+
+ <_>
+ 6 2 2 8 3.
+
+
+
+ <_>
+
+ <_>
+ 11 0 3 7 -1.
+
+ <_>
+ 12 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 7 11 2 1 -1.
+
+ <_>
+ 8 11 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 14 1 3 -1.
+
+ <_>
+ 15 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 15 2 2 -1.
+
+ <_>
+ 7 15 1 1 2.
+
+ <_>
+ 8 16 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 14 1 3 -1.
+
+ <_>
+ 15 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 3 7 -1.
+
+ <_>
+ 7 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 18 1 2 7 -1.
+
+ <_>
+ 18 1 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 2 0 8 20 -1.
+
+ <_>
+ 2 10 8 10 2.
+
+
+
+ <_>
+
+ <_>
+ 3 0 15 6 -1.
+
+ <_>
+ 3 2 15 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 3 12 2 -1.
+
+ <_>
+ 4 4 12 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 5 -1.
+
+ <_>
+ 16 0 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 3 4 -1.
+
+ <_>
+ 8 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 5 -1.
+
+ <_>
+ 16 0 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 1 7 6 13 -1.
+
+ <_>
+ 3 7 2 13 3.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 5 -1.
+
+ <_>
+ 16 0 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 4 5 -1.
+
+ <_>
+ 2 0 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 14 12 3 6 -1.
+
+ <_>
+ 14 14 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 3 6 -1.
+
+ <_>
+ 3 14 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 16 1 4 3 -1.
+
+ <_>
+ 16 2 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 2 10 -1.
+
+ <_>
+ 8 7 1 5 2.
+
+ <_>
+ 9 12 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 11 4 4 -1.
+
+ <_>
+ 11 13 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 4 3 -1.
+
+ <_>
+ 0 2 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 4 1 3 -1.
+
+ <_>
+ 13 5 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 15 3 5 -1.
+
+ <_>
+ 8 15 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 5 -1.
+
+ <_>
+ 10 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 5 -1.
+
+ <_>
+ 9 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 10 6 4 14 -1.
+
+ <_>
+ 10 6 2 14 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 5 6 -1.
+
+ <_>
+ 0 7 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 6 4 -1.
+
+ <_>
+ 9 5 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 18 10 -1.
+
+ <_>
+ 6 0 6 10 3.
+
+
+
+ <_>
+
+ <_>
+ 10 6 4 14 -1.
+
+ <_>
+ 10 6 2 14 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 4 14 -1.
+
+ <_>
+ 8 6 2 14 2.
+
+
+
+ <_>
+
+ <_>
+ 13 4 1 3 -1.
+
+ <_>
+ 13 5 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 1 2 3 -1.
+
+ <_>
+ 6 1 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 18 1 2 18 -1.
+
+ <_>
+ 19 1 1 9 2.
+
+ <_>
+ 18 10 1 9 2.
+
+
+
+ <_>
+
+ <_>
+ 2 1 4 3 -1.
+
+ <_>
+ 2 2 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 1 2 18 -1.
+
+ <_>
+ 19 1 1 9 2.
+
+ <_>
+ 18 10 1 9 2.
+
+
+
+ <_>
+
+ <_>
+ 1 14 4 6 -1.
+
+ <_>
+ 1 14 2 3 2.
+
+ <_>
+ 3 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 11 7 6 -1.
+
+ <_>
+ 10 13 7 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 10 6 10 -1.
+
+ <_>
+ 0 10 3 5 2.
+
+ <_>
+ 3 15 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 0 3 4 -1.
+
+ <_>
+ 12 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 5 10 5 6 -1.
+
+ <_>
+ 5 13 5 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 6 1 8 -1.
+
+ <_>
+ 14 10 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 1 7 18 6 -1.
+
+ <_>
+ 1 7 9 3 2.
+
+ <_>
+ 10 10 9 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 2 -1.
+
+ <_>
+ 9 7 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 4 5 -1.
+
+ <_>
+ 7 9 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 6 3 -1.
+
+ <_>
+ 9 6 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 4 -1.
+
+ <_>
+ 7 0 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 15 2 4 -1.
+
+ <_>
+ 7 17 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 19 9 -1.
+
+ <_>
+ 1 3 19 3 3.
+
+
+
+ <_>
+
+ <_>
+ 3 7 3 6 -1.
+
+ <_>
+ 3 9 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 7 4 4 -1.
+
+ <_>
+ 15 7 2 2 2.
+
+ <_>
+ 13 9 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 7 4 4 -1.
+
+ <_>
+ 3 7 2 2 2.
+
+ <_>
+ 5 9 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 10 8 -1.
+
+ <_>
+ 9 10 10 4 2.
+
+
+
+ <_>
+
+ <_>
+ 3 8 14 12 -1.
+
+ <_>
+ 3 14 14 6 2.
+
+
+
+ <_>
+
+ <_>
+ 6 5 10 12 -1.
+
+ <_>
+ 11 5 5 6 2.
+
+ <_>
+ 6 11 5 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 6 5 -1.
+
+ <_>
+ 9 5 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 4 2 4 -1.
+
+ <_>
+ 9 6 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 6 5 -1.
+
+ <_>
+ 9 5 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 6 5 -1.
+
+ <_>
+ 8 5 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 2 6 1 -1.
+
+ <_>
+ 13 2 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 2 6 1 -1.
+
+ <_>
+ 5 2 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 5 2 3 -1.
+
+ <_>
+ 13 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 10 1 4 -1.
+
+ <_>
+ 0 12 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 13 5 2 3 -1.
+
+ <_>
+ 13 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 18 3 2 -1.
+
+ <_>
+ 9 18 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 15 9 2 -1.
+
+ <_>
+ 6 16 9 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 4 2 4 -1.
+
+ <_>
+ 18 6 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 2 3 -1.
+
+ <_>
+ 5 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 16 3 2 -1.
+
+ <_>
+ 15 17 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 3 9 -1.
+
+ <_>
+ 0 3 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 3 -1.
+
+ <_>
+ 9 8 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 3 -1.
+
+ <_>
+ 8 8 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 6 -1.
+
+ <_>
+ 9 5 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 8 6 3 4 -1.
+
+ <_>
+ 9 6 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 6 8 12 -1.
+
+ <_>
+ 11 6 4 6 2.
+
+ <_>
+ 7 12 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 8 12 -1.
+
+ <_>
+ 5 6 4 6 2.
+
+ <_>
+ 9 12 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 3 -1.
+
+ <_>
+ 12 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 16 3 2 -1.
+
+ <_>
+ 2 17 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 3 -1.
+
+ <_>
+ 12 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 12 6 6 -1.
+
+ <_>
+ 2 14 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 6 3 -1.
+
+ <_>
+ 7 14 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 14 6 3 -1.
+
+ <_>
+ 6 15 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 15 5 3 -1.
+
+ <_>
+ 14 16 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 15 5 3 -1.
+
+ <_>
+ 14 16 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 3 6 2 -1.
+
+ <_>
+ 7 3 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 3 -1.
+
+ <_>
+ 8 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 15 5 3 -1.
+
+ <_>
+ 1 16 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 6 -1.
+
+ <_>
+ 10 13 2 3 2.
+
+ <_>
+ 8 16 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 8 3 3 -1.
+
+ <_>
+ 8 8 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 12 0 5 4 -1.
+
+ <_>
+ 12 2 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 20 2 -1.
+
+ <_>
+ 0 2 10 1 2.
+
+ <_>
+ 10 3 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 4 -1.
+
+ <_>
+ 7 0 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 4 3 6 1 -1.
+
+ <_>
+ 6 3 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 18 13 2 -1.
+
+ <_>
+ 4 19 13 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 10 3 6 -1.
+
+ <_>
+ 2 12 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 12 6 8 -1.
+
+ <_>
+ 17 12 3 4 2.
+
+ <_>
+ 14 16 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 4 13 10 6 -1.
+
+ <_>
+ 4 13 5 3 2.
+
+ <_>
+ 9 16 5 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 12 1 2 -1.
+
+ <_>
+ 14 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 3 -1.
+
+ <_>
+ 8 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 12 2 2 -1.
+
+ <_>
+ 14 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 12 2 2 -1.
+
+ <_>
+ 4 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 9 2 -1.
+
+ <_>
+ 8 13 9 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 10 3 6 -1.
+
+ <_>
+ 11 13 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 9 12 -1.
+
+ <_>
+ 5 12 9 6 2.
+
+
+
+ <_>
+
+ <_>
+ 11 10 3 6 -1.
+
+ <_>
+ 11 13 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 10 3 6 -1.
+
+ <_>
+ 6 13 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 11 3 -1.
+
+ <_>
+ 5 5 11 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 1 5 10 -1.
+
+ <_>
+ 7 6 5 5 2.
+
+
+
+ <_>
+
+ <_>
+ 2 8 18 2 -1.
+
+ <_>
+ 2 9 18 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 17 5 3 -1.
+
+ <_>
+ 7 18 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 9 12 1 -1.
+
+ <_>
+ 9 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 14 6 6 -1.
+
+ <_>
+ 0 14 3 3 2.
+
+ <_>
+ 3 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 12 1 -1.
+
+ <_>
+ 9 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 9 12 1 -1.
+
+ <_>
+ 7 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 10 6 7 -1.
+
+ <_>
+ 14 10 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 16 2 -1.
+
+ <_>
+ 1 1 16 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 9 10 9 -1.
+
+ <_>
+ 10 12 10 3 3.
+
+
+
+ <_>
+
+ <_>
+ 0 1 10 2 -1.
+
+ <_>
+ 5 1 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 17 3 2 3 -1.
+
+ <_>
+ 17 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 3 2 3 -1.
+
+ <_>
+ 1 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 6 -1.
+
+ <_>
+ 10 7 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 6 5 4 3 -1.
+
+ <_>
+ 8 5 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 6 6 -1.
+
+ <_>
+ 9 5 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 3 4 12 12 -1.
+
+ <_>
+ 3 4 6 6 2.
+
+ <_>
+ 9 10 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 2 6 15 -1.
+
+ <_>
+ 11 2 2 15 3.
+
+
+
+ <_>
+
+ <_>
+ 2 2 6 17 -1.
+
+ <_>
+ 4 2 2 17 3.
+
+
+
+ <_>
+
+ <_>
+ 14 10 6 7 -1.
+
+ <_>
+ 14 10 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 0 10 6 7 -1.
+
+ <_>
+ 3 10 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 9 2 6 15 -1.
+
+ <_>
+ 11 2 2 15 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 6 15 -1.
+
+ <_>
+ 7 2 2 15 3.
+
+
+
+ <_>
+
+ <_>
+ 17 9 3 6 -1.
+
+ <_>
+ 17 11 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 7 6 6 -1.
+
+ <_>
+ 8 7 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 1 10 18 6 -1.
+
+ <_>
+ 10 10 9 3 2.
+
+ <_>
+ 1 13 9 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 9 10 9 -1.
+
+ <_>
+ 0 12 10 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 3 -1.
+
+ <_>
+ 8 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 12 3 4 -1.
+
+ <_>
+ 5 14 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 3 16 12 -1.
+
+ <_>
+ 3 9 16 6 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 12 12 -1.
+
+ <_>
+ 1 1 6 6 2.
+
+ <_>
+ 7 7 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 10 4 2 4 -1.
+
+ <_>
+ 11 4 1 2 2.
+
+ <_>
+ 10 6 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 9 10 2 -1.
+
+ <_>
+ 0 9 5 1 2.
+
+ <_>
+ 5 10 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 3 3 -1.
+
+ <_>
+ 9 12 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 12 9 2 -1.
+
+ <_>
+ 3 13 9 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 9 2 2 -1.
+
+ <_>
+ 9 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 4 13 6 -1.
+
+ <_>
+ 3 6 13 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 6 4 -1.
+
+ <_>
+ 12 7 3 2 2.
+
+ <_>
+ 9 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 6 8 -1.
+
+ <_>
+ 4 0 3 8 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 12 -1.
+
+ <_>
+ 9 11 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 4 4 3 10 -1.
+
+ <_>
+ 4 9 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 6 17 8 3 -1.
+
+ <_>
+ 6 18 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 5 10 6 -1.
+
+ <_>
+ 0 7 10 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 2 3 2 -1.
+
+ <_>
+ 13 3 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 4 5 -1.
+
+ <_>
+ 9 5 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 12 14 3 6 -1.
+
+ <_>
+ 12 16 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 11 8 2 -1.
+
+ <_>
+ 1 12 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 13 6 3 -1.
+
+ <_>
+ 7 14 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 5 3 6 -1.
+
+ <_>
+ 0 7 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 2 3 2 -1.
+
+ <_>
+ 13 3 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 14 4 6 -1.
+
+ <_>
+ 4 14 2 3 2.
+
+ <_>
+ 6 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 13 2 3 2 -1.
+
+ <_>
+ 13 3 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 2 4 12 -1.
+
+ <_>
+ 8 6 4 4 3.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 8 -1.
+
+ <_>
+ 17 0 3 4 2.
+
+ <_>
+ 14 4 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 17 3 2 -1.
+
+ <_>
+ 8 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 2 -1.
+
+ <_>
+ 8 13 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 0 8 12 -1.
+
+ <_>
+ 6 0 4 6 2.
+
+ <_>
+ 10 6 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 2 10 -1.
+
+ <_>
+ 15 0 1 5 2.
+
+ <_>
+ 14 5 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 3 8 6 -1.
+
+ <_>
+ 5 3 4 3 2.
+
+ <_>
+ 9 6 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 10 -1.
+
+ <_>
+ 17 0 3 5 2.
+
+ <_>
+ 14 5 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 1 2 -1.
+
+ <_>
+ 9 15 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 10 4 3 -1.
+
+ <_>
+ 15 11 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 2 3 -1.
+
+ <_>
+ 8 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 13 14 4 -1.
+
+ <_>
+ 10 13 7 2 2.
+
+ <_>
+ 3 15 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 10 4 3 -1.
+
+ <_>
+ 1 11 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 11 6 1 -1.
+
+ <_>
+ 11 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 6 1 -1.
+
+ <_>
+ 7 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 5 16 15 -1.
+
+ <_>
+ 3 10 16 5 3.
+
+
+
+ <_>
+
+ <_>
+ 6 12 4 2 -1.
+
+ <_>
+ 8 12 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 4 12 10 -1.
+
+ <_>
+ 10 4 6 5 2.
+
+ <_>
+ 4 9 6 5 2.
+
+
+
+ <_>
+
+ <_>
+ 8 6 3 4 -1.
+
+ <_>
+ 9 6 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 8 -1.
+
+ <_>
+ 10 12 2 4 2.
+
+ <_>
+ 8 16 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 2 3 2 -1.
+
+ <_>
+ 13 2 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 3 2 -1.
+
+ <_>
+ 8 16 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 0 9 14 -1.
+
+ <_>
+ 9 0 3 14 3.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 3 -1.
+
+ <_>
+ 10 6 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 8 2 3 -1.
+
+ <_>
+ 10 9 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 9 4 6 -1.
+
+ <_>
+ 0 11 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 8 2 -1.
+
+ <_>
+ 6 1 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 14 7 3 -1.
+
+ <_>
+ 6 15 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 10 8 9 -1.
+
+ <_>
+ 8 13 8 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 3 2 -1.
+
+ <_>
+ 6 2 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 1 6 8 -1.
+
+ <_>
+ 17 1 3 4 2.
+
+ <_>
+ 14 5 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 6 8 -1.
+
+ <_>
+ 0 1 3 4 2.
+
+ <_>
+ 3 5 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 18 6 -1.
+
+ <_>
+ 10 2 9 3 2.
+
+ <_>
+ 1 5 9 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 2 1 -1.
+
+ <_>
+ 10 3 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 2 4 6 -1.
+
+ <_>
+ 15 2 2 3 2.
+
+ <_>
+ 13 5 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 5 1 3 -1.
+
+ <_>
+ 13 6 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 16 5 3 -1.
+
+ <_>
+ 2 17 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 2 4 6 -1.
+
+ <_>
+ 15 2 2 3 2.
+
+ <_>
+ 13 5 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 3 2 4 6 -1.
+
+ <_>
+ 3 2 2 3 2.
+
+ <_>
+ 5 5 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 13 5 1 2 -1.
+
+ <_>
+ 13 6 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 2 2 -1.
+
+ <_>
+ 5 6 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 9 2 2 -1.
+
+ <_>
+ 13 9 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 2 2 -1.
+
+ <_>
+ 6 9 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 13 17 3 2 -1.
+
+ <_>
+ 13 18 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 16 4 4 -1.
+
+ <_>
+ 6 16 2 2 2.
+
+ <_>
+ 8 18 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 16 2 3 -1.
+
+ <_>
+ 9 17 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 13 9 6 -1.
+
+ <_>
+ 0 15 9 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 6 -1.
+
+ <_>
+ 9 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 15 2 3 -1.
+
+ <_>
+ 9 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 10 18 6 -1.
+
+ <_>
+ 1 12 18 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 11 4 2 -1.
+
+ <_>
+ 8 12 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 9 6 2 -1.
+
+ <_>
+ 7 10 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 8 2 3 -1.
+
+ <_>
+ 8 9 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 17 5 3 4 -1.
+
+ <_>
+ 18 5 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 1 19 18 1 -1.
+
+ <_>
+ 7 19 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 3 2 -1.
+
+ <_>
+ 10 0 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 8 1 6 -1.
+
+ <_>
+ 1 10 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 17 8 3 -1.
+
+ <_>
+ 12 17 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 3 4 -1.
+
+ <_>
+ 1 5 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 3 -1.
+
+ <_>
+ 9 8 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 11 2 2 -1.
+
+ <_>
+ 7 11 1 1 2.
+
+ <_>
+ 8 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 3 2 5 -1.
+
+ <_>
+ 11 3 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 7 3 2 5 -1.
+
+ <_>
+ 8 3 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 15 13 2 3 -1.
+
+ <_>
+ 15 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 2 3 -1.
+
+ <_>
+ 5 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 19 15 1 -1.
+
+ <_>
+ 9 19 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 19 15 1 -1.
+
+ <_>
+ 6 19 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 13 2 3 -1.
+
+ <_>
+ 15 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 4 15 -1.
+
+ <_>
+ 7 0 2 15 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 5 -1.
+
+ <_>
+ 9 6 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 7 -1.
+
+ <_>
+ 10 5 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 16 11 3 3 -1.
+
+ <_>
+ 16 12 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 11 3 3 -1.
+
+ <_>
+ 1 12 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 8 3 -1.
+
+ <_>
+ 6 7 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 15 6 2 -1.
+
+ <_>
+ 0 16 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 6 -1.
+
+ <_>
+ 7 0 6 6 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 3 4 -1.
+
+ <_>
+ 7 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 14 10 4 10 -1.
+
+ <_>
+ 16 10 2 5 2.
+
+ <_>
+ 14 15 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 3 2 3 2 -1.
+
+ <_>
+ 4 2 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 2 2 2 -1.
+
+ <_>
+ 11 3 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 10 4 10 -1.
+
+ <_>
+ 2 10 2 5 2.
+
+ <_>
+ 4 15 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 13 20 6 -1.
+
+ <_>
+ 10 13 10 3 2.
+
+ <_>
+ 0 16 10 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 2 15 -1.
+
+ <_>
+ 1 5 1 15 2.
+
+
+
+ <_>
+
+ <_>
+ 1 7 18 4 -1.
+
+ <_>
+ 10 7 9 2 2.
+
+ <_>
+ 1 9 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 2 17 -1.
+
+ <_>
+ 1 0 1 17 2.
+
+
+
+ <_>
+
+ <_>
+ 2 6 16 6 -1.
+
+ <_>
+ 10 6 8 3 2.
+
+ <_>
+ 2 9 8 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 1 3 -1.
+
+ <_>
+ 8 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 2 -1.
+
+ <_>
+ 8 16 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 2 8 2 -1.
+
+ <_>
+ 5 2 4 1 2.
+
+ <_>
+ 9 3 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 11 8 6 -1.
+
+ <_>
+ 6 14 8 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 13 2 2 -1.
+
+ <_>
+ 9 14 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 18 4 2 6 -1.
+
+ <_>
+ 18 6 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 2 2 -1.
+
+ <_>
+ 9 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 18 4 2 6 -1.
+
+ <_>
+ 18 6 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 13 1 3 -1.
+
+ <_>
+ 9 14 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 4 2 6 -1.
+
+ <_>
+ 18 6 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 2 6 -1.
+
+ <_>
+ 0 6 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 3 3 -1.
+
+ <_>
+ 9 13 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 13 2 3 -1.
+
+ <_>
+ 3 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 13 4 3 -1.
+
+ <_>
+ 13 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 10 6 -1.
+
+ <_>
+ 5 4 10 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 13 4 3 -1.
+
+ <_>
+ 3 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 7 15 5 -1.
+
+ <_>
+ 8 7 5 5 3.
+
+
+
+ <_>
+
+ <_>
+ 3 7 12 2 -1.
+
+ <_>
+ 7 7 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 3 3 9 -1.
+
+ <_>
+ 11 3 1 9 3.
+
+
+
+ <_>
+
+ <_>
+ 8 6 4 6 -1.
+
+ <_>
+ 10 6 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 4 3 -1.
+
+ <_>
+ 9 8 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 9 4 9 -1.
+
+ <_>
+ 2 9 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 9 13 3 5 -1.
+
+ <_>
+ 10 13 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 3 -1.
+
+ <_>
+ 9 7 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 5 -1.
+
+ <_>
+ 10 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 8 2 -1.
+
+ <_>
+ 9 7 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 12 2 -1.
+
+ <_>
+ 9 9 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 10 3 -1.
+
+ <_>
+ 10 6 5 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 12 3 1 -1.
+
+ <_>
+ 11 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 1 11 15 -1.
+
+ <_>
+ 0 6 11 5 3.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 6 -1.
+
+ <_>
+ 7 0 6 6 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 1 -1.
+
+ <_>
+ 9 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 16 6 4 -1.
+
+ <_>
+ 5 16 3 2 2.
+
+ <_>
+ 8 18 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 5 9 8 -1.
+
+ <_>
+ 6 9 9 4 2.
+
+
+
+ <_>
+
+ <_>
+ 5 10 2 6 -1.
+
+ <_>
+ 5 13 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 8 10 -1.
+
+ <_>
+ 11 6 4 5 2.
+
+ <_>
+ 7 11 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 8 10 -1.
+
+ <_>
+ 5 6 4 5 2.
+
+ <_>
+ 9 11 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 2 -1.
+
+ <_>
+ 9 6 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 12 8 2 -1.
+
+ <_>
+ 5 13 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 2 8 2 -1.
+
+ <_>
+ 10 3 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 0 2 10 -1.
+
+ <_>
+ 4 0 1 5 2.
+
+ <_>
+ 5 5 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 10 2 2 -1.
+
+ <_>
+ 9 11 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 8 15 3 -1.
+
+ <_>
+ 2 9 15 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 3 -1.
+
+ <_>
+ 8 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 2 3 2 -1.
+
+ <_>
+ 8 2 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 6 3 -1.
+
+ <_>
+ 7 14 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 9 2 2 -1.
+
+ <_>
+ 9 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 17 2 3 6 -1.
+
+ <_>
+ 17 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 5 3 4 -1.
+
+ <_>
+ 2 5 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 14 8 4 6 -1.
+
+ <_>
+ 14 10 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 4 3 8 -1.
+
+ <_>
+ 2 4 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 6 -1.
+
+ <_>
+ 8 16 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 3 14 2 2 -1.
+
+ <_>
+ 3 15 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 8 4 6 -1.
+
+ <_>
+ 14 10 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 2 8 4 6 -1.
+
+ <_>
+ 2 10 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 14 1 6 -1.
+
+ <_>
+ 10 17 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 3 6 -1.
+
+ <_>
+ 8 5 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 11 2 2 6 -1.
+
+ <_>
+ 12 2 1 3 2.
+
+ <_>
+ 11 5 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 6 5 -1.
+
+ <_>
+ 8 6 2 5 3.
+
+
+
+ <_>
+
+ <_>
+ 17 1 3 6 -1.
+
+ <_>
+ 17 3 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 5 -1.
+
+ <_>
+ 9 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 9 18 3 2 -1.
+
+ <_>
+ 10 18 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 18 3 2 -1.
+
+ <_>
+ 9 18 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 5 2 -1.
+
+ <_>
+ 12 4 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 1 5 12 -1.
+
+ <_>
+ 7 7 5 6 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 4 -1.
+
+ <_>
+ 7 0 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 4 2 2 2 -1.
+
+ <_>
+ 4 3 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 14 4 2 -1.
+
+ <_>
+ 13 14 2 1 2.
+
+ <_>
+ 11 15 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 3 6 -1.
+
+ <_>
+ 0 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 3 -1.
+
+ <_>
+ 9 8 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 1 3 -1.
+
+ <_>
+ 5 6 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 10 6 1 -1.
+
+ <_>
+ 10 10 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 10 6 1 -1.
+
+ <_>
+ 7 10 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 17 3 3 -1.
+
+ <_>
+ 9 18 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 14 1 3 -1.
+
+ <_>
+ 4 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 5 3 3 -1.
+
+ <_>
+ 12 6 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 3 -1.
+
+ <_>
+ 4 6 12 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 8 2 3 -1.
+
+ <_>
+ 9 9 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 9 3 3 -1.
+
+ <_>
+ 5 9 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 9 17 -1.
+
+ <_>
+ 9 0 3 17 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 1 3 -1.
+
+ <_>
+ 9 13 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 15 -1.
+
+ <_>
+ 9 10 2 5 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 2 3 -1.
+
+ <_>
+ 8 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 14 1 3 -1.
+
+ <_>
+ 10 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 1 6 5 -1.
+
+ <_>
+ 9 1 2 5 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 20 2 -1.
+
+ <_>
+ 0 0 10 2 2.
+
+
+
+ <_>
+
+ <_>
+ 2 13 5 3 -1.
+
+ <_>
+ 2 14 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 5 9 15 -1.
+
+ <_>
+ 2 10 9 5 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 12 10 -1.
+
+ <_>
+ 11 0 6 5 2.
+
+ <_>
+ 5 5 6 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 1 2 3 -1.
+
+ <_>
+ 6 1 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 6 1 -1.
+
+ <_>
+ 12 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 1 2 10 -1.
+
+ <_>
+ 3 1 1 5 2.
+
+ <_>
+ 4 6 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 13 7 2 1 -1.
+
+ <_>
+ 13 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 13 4 6 -1.
+
+ <_>
+ 4 15 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 7 2 1 -1.
+
+ <_>
+ 13 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 2 1 -1.
+
+ <_>
+ 6 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 12 18 4 -1.
+
+ <_>
+ 11 12 9 2 2.
+
+ <_>
+ 2 14 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 2 2 -1.
+
+ <_>
+ 5 7 1 1 2.
+
+ <_>
+ 6 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 3 4 2 -1.
+
+ <_>
+ 16 4 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 2 18 -1.
+
+ <_>
+ 0 2 1 9 2.
+
+ <_>
+ 1 11 1 9 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 18 4 -1.
+
+ <_>
+ 10 2 9 2 2.
+
+ <_>
+ 1 4 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 1 3 -1.
+
+ <_>
+ 9 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 12 18 4 -1.
+
+ <_>
+ 11 12 9 2 2.
+
+ <_>
+ 2 14 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 12 18 4 -1.
+
+ <_>
+ 0 12 9 2 2.
+
+ <_>
+ 9 14 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 5 3 -1.
+
+ <_>
+ 11 5 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 4 7 3 -1.
+
+ <_>
+ 6 5 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 17 3 3 -1.
+
+ <_>
+ 13 18 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 1 3 4 -1.
+
+ <_>
+ 9 1 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 11 4 2 4 -1.
+
+ <_>
+ 11 4 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 17 9 3 -1.
+
+ <_>
+ 3 17 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 11 0 2 8 -1.
+
+ <_>
+ 12 0 1 4 2.
+
+ <_>
+ 11 4 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 8 6 12 -1.
+
+ <_>
+ 0 8 3 6 2.
+
+ <_>
+ 3 14 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 4 12 -1.
+
+ <_>
+ 10 13 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 5 3 8 14 -1.
+
+ <_>
+ 5 10 8 7 2.
+
+
+
+ <_>
+
+ <_>
+ 14 10 6 1 -1.
+
+ <_>
+ 14 10 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 10 4 -1.
+
+ <_>
+ 0 6 10 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 0 5 8 -1.
+
+ <_>
+ 10 4 5 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 1 4 8 -1.
+
+ <_>
+ 8 1 2 4 2.
+
+ <_>
+ 10 5 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 6 1 -1.
+
+ <_>
+ 11 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 9 3 4 -1.
+
+ <_>
+ 9 9 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 18 4 2 6 -1.
+
+ <_>
+ 18 6 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 3 4 -1.
+
+ <_>
+ 9 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 1 13 3 -1.
+
+ <_>
+ 7 2 13 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 6 1 -1.
+
+ <_>
+ 9 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 11 3 6 -1.
+
+ <_>
+ 12 13 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 6 1 -1.
+
+ <_>
+ 7 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 4 18 10 -1.
+
+ <_>
+ 10 4 9 5 2.
+
+ <_>
+ 1 9 9 5 2.
+
+
+
+ <_>
+
+ <_>
+ 8 6 4 9 -1.
+
+ <_>
+ 8 9 4 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 6 4 3 -1.
+
+ <_>
+ 8 7 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 3 -1.
+
+ <_>
+ 9 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 14 15 4 3 -1.
+
+ <_>
+ 14 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 10 3 10 -1.
+
+ <_>
+ 6 10 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 3 -1.
+
+ <_>
+ 8 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 8 1 6 -1.
+
+ <_>
+ 0 10 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 15 1 3 -1.
+
+ <_>
+ 10 16 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 15 4 3 -1.
+
+ <_>
+ 2 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 3 2 8 -1.
+
+ <_>
+ 19 3 1 4 2.
+
+ <_>
+ 18 7 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 2 8 -1.
+
+ <_>
+ 0 3 1 4 2.
+
+ <_>
+ 1 7 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 3 7 14 10 -1.
+
+ <_>
+ 10 7 7 5 2.
+
+ <_>
+ 3 12 7 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 7 19 3 -1.
+
+ <_>
+ 0 8 19 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 6 3 3 -1.
+
+ <_>
+ 12 7 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 6 1 3 -1.
+
+ <_>
+ 0 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 6 3 3 -1.
+
+ <_>
+ 12 7 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 3 3 -1.
+
+ <_>
+ 5 7 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 2 4 2 -1.
+
+ <_>
+ 8 3 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 3 4 12 -1.
+
+ <_>
+ 8 3 2 12 2.
+
+
+
+ <_>
+
+ <_>
+ 13 6 2 3 -1.
+
+ <_>
+ 13 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 10 20 4 -1.
+
+ <_>
+ 0 12 20 2 2.
+
+
+
+ <_>
+
+ <_>
+ 2 0 17 14 -1.
+
+ <_>
+ 2 7 17 7 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 6 10 -1.
+
+ <_>
+ 0 0 3 5 2.
+
+ <_>
+ 3 5 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 14 6 6 4 -1.
+
+ <_>
+ 14 6 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 6 6 4 -1.
+
+ <_>
+ 3 6 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 13 2 7 2 -1.
+
+ <_>
+ 13 3 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 7 2 -1.
+
+ <_>
+ 0 3 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 11 14 2 -1.
+
+ <_>
+ 13 11 7 1 2.
+
+ <_>
+ 6 12 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 5 2 2 -1.
+
+ <_>
+ 8 5 1 1 2.
+
+ <_>
+ 9 6 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 9 2 3 -1.
+
+ <_>
+ 13 9 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 3 12 -1.
+
+ <_>
+ 2 1 1 12 3.
+
+
+
+ <_>
+
+ <_>
+ 17 4 1 3 -1.
+
+ <_>
+ 17 5 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 4 1 3 -1.
+
+ <_>
+ 2 5 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 5 1 3 -1.
+
+ <_>
+ 14 6 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 16 2 3 -1.
+
+ <_>
+ 7 17 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 6 -1.
+
+ <_>
+ 10 13 2 3 2.
+
+ <_>
+ 8 16 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 1 3 -1.
+
+ <_>
+ 5 6 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 20 -1.
+
+ <_>
+ 16 0 2 20 2.
+
+
+
+ <_>
+
+ <_>
+ 5 1 2 6 -1.
+
+ <_>
+ 5 1 1 3 2.
+
+ <_>
+ 6 4 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 10 4 -1.
+
+ <_>
+ 5 6 10 2 2.
+
+
+
+ <_>
+
+ <_>
+ 15 2 4 12 -1.
+
+ <_>
+ 15 2 2 12 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 4 12 -1.
+
+ <_>
+ 7 12 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 14 5 1 8 -1.
+
+ <_>
+ 14 9 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 1 4 14 10 -1.
+
+ <_>
+ 1 4 7 5 2.
+
+ <_>
+ 8 9 7 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 6 6 14 -1.
+
+ <_>
+ 14 6 3 7 2.
+
+ <_>
+ 11 13 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 6 14 -1.
+
+ <_>
+ 3 6 3 7 2.
+
+ <_>
+ 6 13 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 4 9 15 2 -1.
+
+ <_>
+ 9 9 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 14 6 3 -1.
+
+ <_>
+ 7 15 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 3 14 4 -1.
+
+ <_>
+ 13 3 7 2 2.
+
+ <_>
+ 6 5 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 9 15 2 -1.
+
+ <_>
+ 6 9 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 11 8 9 -1.
+
+ <_>
+ 6 14 8 3 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 3 8 -1.
+
+ <_>
+ 8 4 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 14 6 2 6 -1.
+
+ <_>
+ 14 9 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 6 4 -1.
+
+ <_>
+ 5 7 3 2 2.
+
+ <_>
+ 8 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 18 19 -1.
+
+ <_>
+ 7 1 6 19 3.
+
+
+
+ <_>
+
+ <_>
+ 1 2 6 5 -1.
+
+ <_>
+ 4 2 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 12 17 6 2 -1.
+
+ <_>
+ 12 18 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 17 6 2 -1.
+
+ <_>
+ 2 18 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 17 3 3 6 -1.
+
+ <_>
+ 17 5 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 17 3 3 -1.
+
+ <_>
+ 8 18 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 13 2 6 -1.
+
+ <_>
+ 10 16 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 13 6 3 -1.
+
+ <_>
+ 7 14 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 17 3 3 6 -1.
+
+ <_>
+ 17 5 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 2 3 -1.
+
+ <_>
+ 8 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 3 6 2 -1.
+
+ <_>
+ 11 3 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 3 3 6 -1.
+
+ <_>
+ 0 5 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 5 4 6 -1.
+
+ <_>
+ 8 7 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 3 2 -1.
+
+ <_>
+ 5 6 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 1 3 4 -1.
+
+ <_>
+ 11 1 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 1 2 5 9 -1.
+
+ <_>
+ 1 5 5 3 3.
+
+
+
+ <_>
+
+ <_>
+ 13 6 2 3 -1.
+
+ <_>
+ 13 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 6 14 3 -1.
+
+ <_>
+ 7 6 7 3 2.
+
+
+
+ <_>
+
+ <_>
+ 2 11 18 8 -1.
+
+ <_>
+ 2 15 18 4 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 2 3 -1.
+
+ <_>
+ 5 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 6 4 2 -1.
+
+ <_>
+ 12 6 2 1 2.
+
+ <_>
+ 10 7 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 4 2 -1.
+
+ <_>
+ 6 6 2 1 2.
+
+ <_>
+ 8 7 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 1 3 4 -1.
+
+ <_>
+ 11 1 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 1 2 7 -1.
+
+ <_>
+ 8 1 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 4 2 15 14 -1.
+
+ <_>
+ 4 9 15 7 2.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 2 -1.
+
+ <_>
+ 9 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 2 3 18 4 -1.
+
+ <_>
+ 11 3 9 2 2.
+
+ <_>
+ 2 5 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 2 -1.
+
+ <_>
+ 10 7 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 13 9 2 3 -1.
+
+ <_>
+ 13 9 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 2 6 2 -1.
+
+ <_>
+ 7 2 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 7 -1.
+
+ <_>
+ 9 5 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 2 3 -1.
+
+ <_>
+ 6 9 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 0 14 18 -1.
+
+ <_>
+ 6 9 14 9 2.
+
+
+
+ <_>
+
+ <_>
+ 2 16 6 3 -1.
+
+ <_>
+ 2 17 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 6 -1.
+
+ <_>
+ 10 7 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 7 8 4 3 -1.
+
+ <_>
+ 7 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 12 6 3 -1.
+
+ <_>
+ 7 13 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 2 3 -1.
+
+ <_>
+ 9 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 12 6 2 -1.
+
+ <_>
+ 9 12 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 4 6 -1.
+
+ <_>
+ 5 14 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 11 12 7 2 -1.
+
+ <_>
+ 11 13 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 10 8 6 -1.
+
+ <_>
+ 6 10 4 3 2.
+
+ <_>
+ 10 13 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 11 10 3 4 -1.
+
+ <_>
+ 11 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 16 2 3 -1.
+
+ <_>
+ 9 17 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 3 1 9 -1.
+
+ <_>
+ 13 6 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 1 13 14 6 -1.
+
+ <_>
+ 1 15 14 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 6 1 6 -1.
+
+ <_>
+ 13 9 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 3 8 -1.
+
+ <_>
+ 1 4 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 18 0 2 18 -1.
+
+ <_>
+ 18 0 1 18 2.
+
+
+
+ <_>
+
+ <_>
+ 2 3 6 2 -1.
+
+ <_>
+ 2 4 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 0 8 6 -1.
+
+ <_>
+ 9 2 8 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 1 6 -1.
+
+ <_>
+ 6 9 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 8 6 3 -1.
+
+ <_>
+ 14 9 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 2 18 -1.
+
+ <_>
+ 1 0 1 18 2.
+
+
+
+ <_>
+
+ <_>
+ 1 18 18 2 -1.
+
+ <_>
+ 10 18 9 1 2.
+
+ <_>
+ 1 19 9 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 15 2 2 -1.
+
+ <_>
+ 3 16 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 5 3 -1.
+
+ <_>
+ 8 15 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 2 3 -1.
+
+ <_>
+ 8 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 3 3 -1.
+
+ <_>
+ 13 3 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 7 5 6 2 -1.
+
+ <_>
+ 9 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 15 5 5 2 -1.
+
+ <_>
+ 15 6 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 5 2 -1.
+
+ <_>
+ 0 6 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 17 14 1 6 -1.
+
+ <_>
+ 17 17 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 2 9 9 3 -1.
+
+ <_>
+ 5 9 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 3 3 -1.
+
+ <_>
+ 13 3 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 4 18 -1.
+
+ <_>
+ 2 0 2 18 2.
+
+
+
+ <_>
+
+ <_>
+ 17 6 1 3 -1.
+
+ <_>
+ 17 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 14 1 6 -1.
+
+ <_>
+ 2 17 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 19 8 1 2 -1.
+
+ <_>
+ 19 9 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 3 3 3 -1.
+
+ <_>
+ 6 3 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 16 2 3 -1.
+
+ <_>
+ 9 17 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 6 1 3 -1.
+
+ <_>
+ 2 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 4 8 2 -1.
+
+ <_>
+ 16 4 4 1 2.
+
+ <_>
+ 12 5 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 8 2 -1.
+
+ <_>
+ 0 4 4 1 2.
+
+ <_>
+ 4 5 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 16 18 4 -1.
+
+ <_>
+ 2 18 18 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 15 2 4 -1.
+
+ <_>
+ 7 17 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 0 14 3 -1.
+
+ <_>
+ 4 1 14 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 4 20 -1.
+
+ <_>
+ 2 0 2 20 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 4 8 -1.
+
+ <_>
+ 14 4 2 4 2.
+
+ <_>
+ 12 8 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 7 2 2 -1.
+
+ <_>
+ 6 7 1 1 2.
+
+ <_>
+ 7 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 6 2 3 -1.
+
+ <_>
+ 10 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 2 -1.
+
+ <_>
+ 8 8 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 2 6 12 -1.
+
+ <_>
+ 8 8 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 4 0 11 12 -1.
+
+ <_>
+ 4 4 11 4 3.
+
+
+
+ <_>
+
+ <_>
+ 14 9 6 11 -1.
+
+ <_>
+ 16 9 2 11 3.
+
+
+
+ <_>
+
+ <_>
+ 0 14 4 3 -1.
+
+ <_>
+ 0 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 10 2 3 -1.
+
+ <_>
+ 9 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 3 2 -1.
+
+ <_>
+ 5 12 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 15 3 3 -1.
+
+ <_>
+ 10 15 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 3 4 -1.
+
+ <_>
+ 9 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 15 3 3 -1.
+
+ <_>
+ 10 15 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 3 2 -1.
+
+ <_>
+ 8 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 2 10 16 4 -1.
+
+ <_>
+ 10 10 8 2 2.
+
+ <_>
+ 2 12 8 2 2.
+
+
+
+ <_>
+
+ <_>
+ 2 3 4 17 -1.
+
+ <_>
+ 4 3 2 17 2.
+
+
+
+ <_>
+
+ <_>
+ 15 13 2 7 -1.
+
+ <_>
+ 15 13 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 2 2 6 1 -1.
+
+ <_>
+ 5 2 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 2 12 4 -1.
+
+ <_>
+ 9 2 4 4 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 8 12 -1.
+
+ <_>
+ 6 0 4 6 2.
+
+ <_>
+ 10 6 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 13 7 2 2 -1.
+
+ <_>
+ 14 7 1 1 2.
+
+ <_>
+ 13 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 12 20 6 -1.
+
+ <_>
+ 0 14 20 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 7 2 3 -1.
+
+ <_>
+ 14 7 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 8 9 12 -1.
+
+ <_>
+ 3 8 3 12 3.
+
+
+
+ <_>
+
+ <_>
+ 3 0 16 2 -1.
+
+ <_>
+ 3 0 8 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 15 3 3 -1.
+
+ <_>
+ 6 16 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 6 3 -1.
+
+ <_>
+ 8 16 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 10 1 6 -1.
+
+ <_>
+ 0 12 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 9 4 3 -1.
+
+ <_>
+ 10 10 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 15 2 3 -1.
+
+ <_>
+ 9 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 10 1 -1.
+
+ <_>
+ 5 7 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 0 12 19 -1.
+
+ <_>
+ 10 0 6 19 2.
+
+
+
+ <_>
+
+ <_>
+ 0 6 20 6 -1.
+
+ <_>
+ 10 6 10 3 2.
+
+ <_>
+ 0 9 10 3 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 2 2 -1.
+
+ <_>
+ 3 6 1 1 2.
+
+ <_>
+ 4 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 6 2 2 -1.
+
+ <_>
+ 16 6 1 1 2.
+
+ <_>
+ 15 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 2 2 -1.
+
+ <_>
+ 3 6 1 1 2.
+
+ <_>
+ 4 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 4 1 12 -1.
+
+ <_>
+ 14 10 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 2 5 16 10 -1.
+
+ <_>
+ 2 5 8 5 2.
+
+ <_>
+ 10 10 8 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 17 3 2 -1.
+
+ <_>
+ 10 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 4 2 2 -1.
+
+ <_>
+ 1 5 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 0 15 5 -1.
+
+ <_>
+ 10 0 5 5 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 15 5 -1.
+
+ <_>
+ 5 0 5 5 3.
+
+
+
+ <_>
+
+ <_>
+ 11 2 2 17 -1.
+
+ <_>
+ 11 2 1 17 2.
+
+
+
+ <_>
+
+ <_>
+ 7 2 2 17 -1.
+
+ <_>
+ 8 2 1 17 2.
+
+
+
+ <_>
+
+ <_>
+ 15 11 2 9 -1.
+
+ <_>
+ 15 11 1 9 2.
+
+
+
+ <_>
+
+ <_>
+ 3 11 2 9 -1.
+
+ <_>
+ 4 11 1 9 2.
+
+
+
+ <_>
+
+ <_>
+ 5 16 14 4 -1.
+
+ <_>
+ 5 16 7 4 2.
+
+
+
+ <_>
+
+ <_>
+ 1 4 18 1 -1.
+
+ <_>
+ 7 4 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 7 6 4 -1.
+
+ <_>
+ 16 7 3 2 2.
+
+ <_>
+ 13 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 8 2 12 -1.
+
+ <_>
+ 9 12 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 12 1 6 6 -1.
+
+ <_>
+ 12 3 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 6 6 -1.
+
+ <_>
+ 5 2 3 3 2.
+
+ <_>
+ 8 5 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 16 6 4 -1.
+
+ <_>
+ 12 16 3 2 2.
+
+ <_>
+ 9 18 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 18 3 -1.
+
+ <_>
+ 7 2 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 9 10 -1.
+
+ <_>
+ 7 9 9 5 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 4 4 -1.
+
+ <_>
+ 7 9 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 11 10 3 6 -1.
+
+ <_>
+ 11 13 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 11 5 3 -1.
+
+ <_>
+ 7 12 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 11 6 6 -1.
+
+ <_>
+ 10 11 3 3 2.
+
+ <_>
+ 7 14 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 10 9 -1.
+
+ <_>
+ 0 3 10 3 3.
+
+
+
+ <_>
+
+ <_>
+ 13 14 1 6 -1.
+
+ <_>
+ 13 16 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 3 6 -1.
+
+ <_>
+ 0 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 14 1 6 -1.
+
+ <_>
+ 6 16 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 15 2 3 -1.
+
+ <_>
+ 9 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 4 3 3 -1.
+
+ <_>
+ 7 4 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 0 11 3 -1.
+
+ <_>
+ 9 1 11 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 6 20 3 -1.
+
+ <_>
+ 0 7 20 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 1 1 2 -1.
+
+ <_>
+ 10 2 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 6 -1.
+
+ <_>
+ 10 6 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 5 8 12 1 -1.
+
+ <_>
+ 9 8 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 8 12 1 -1.
+
+ <_>
+ 7 8 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 5 -1.
+
+ <_>
+ 10 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 3 9 6 2 -1.
+
+ <_>
+ 6 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 12 9 3 3 -1.
+
+ <_>
+ 12 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 0 6 1 -1.
+
+ <_>
+ 9 0 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 9 3 3 -1.
+
+ <_>
+ 12 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 10 2 1 -1.
+
+ <_>
+ 8 10 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 4 9 13 -1.
+
+ <_>
+ 9 4 3 13 3.
+
+
+
+ <_>
+
+ <_>
+ 6 8 4 2 -1.
+
+ <_>
+ 6 9 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 2 4 6 -1.
+
+ <_>
+ 16 2 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 17 6 3 -1.
+
+ <_>
+ 0 18 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 10 3 10 -1.
+
+ <_>
+ 10 15 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 5 -1.
+
+ <_>
+ 9 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 10 4 4 3 -1.
+
+ <_>
+ 10 4 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 4 3 8 -1.
+
+ <_>
+ 9 4 1 8 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 9 13 -1.
+
+ <_>
+ 9 6 3 13 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 8 12 -1.
+
+ <_>
+ 6 0 4 6 2.
+
+ <_>
+ 10 6 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 14 2 6 8 -1.
+
+ <_>
+ 16 2 2 8 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 3 6 -1.
+
+ <_>
+ 7 0 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 14 2 6 8 -1.
+
+ <_>
+ 16 2 2 8 3.
+
+
+
+ <_>
+
+ <_>
+ 0 5 6 6 -1.
+
+ <_>
+ 0 8 6 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 6 2 -1.
+
+ <_>
+ 12 12 3 1 2.
+
+ <_>
+ 9 13 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 17 3 2 -1.
+
+ <_>
+ 9 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 6 2 2 -1.
+
+ <_>
+ 12 6 1 1 2.
+
+ <_>
+ 11 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 9 18 2 -1.
+
+ <_>
+ 7 9 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 6 2 2 -1.
+
+ <_>
+ 12 6 1 1 2.
+
+ <_>
+ 11 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 4 12 8 -1.
+
+ <_>
+ 7 4 4 8 3.
+
+
+
+ <_>
+
+ <_>
+ 13 11 5 3 -1.
+
+ <_>
+ 13 12 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 10 2 3 -1.
+
+ <_>
+ 9 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 7 2 3 -1.
+
+ <_>
+ 14 7 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 1 3 -1.
+
+ <_>
+ 5 5 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 4 2 3 -1.
+
+ <_>
+ 13 5 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 2 3 -1.
+
+ <_>
+ 5 5 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 8 2 3 -1.
+
+ <_>
+ 9 9 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 9 2 2 -1.
+
+ <_>
+ 8 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 14 1 4 -1.
+
+ <_>
+ 15 16 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 12 2 2 -1.
+
+ <_>
+ 3 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 15 2 2 -1.
+
+ <_>
+ 13 15 1 1 2.
+
+ <_>
+ 12 16 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 13 2 2 -1.
+
+ <_>
+ 9 14 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 11 14 9 -1.
+
+ <_>
+ 4 14 14 3 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 4 3 -1.
+
+ <_>
+ 7 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 14 1 4 -1.
+
+ <_>
+ 15 16 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 14 1 4 -1.
+
+ <_>
+ 4 16 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 13 -1.
+
+ <_>
+ 16 0 2 13 3.
+
+
+
+ <_>
+
+ <_>
+ 4 1 2 12 -1.
+
+ <_>
+ 4 1 1 6 2.
+
+ <_>
+ 5 7 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 11 14 6 6 -1.
+
+ <_>
+ 14 14 3 3 2.
+
+ <_>
+ 11 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 3 14 6 6 -1.
+
+ <_>
+ 3 14 3 3 2.
+
+ <_>
+ 6 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 17 3 2 -1.
+
+ <_>
+ 14 18 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 17 3 2 -1.
+
+ <_>
+ 3 18 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 13 -1.
+
+ <_>
+ 16 0 2 13 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 6 13 -1.
+
+ <_>
+ 2 0 2 13 3.
+
+
+
+ <_>
+
+ <_>
+ 10 10 7 6 -1.
+
+ <_>
+ 10 12 7 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 15 2 2 -1.
+
+ <_>
+ 6 15 1 1 2.
+
+ <_>
+ 7 16 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 11 8 6 -1.
+
+ <_>
+ 10 11 4 3 2.
+
+ <_>
+ 6 14 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 2 2 -1.
+
+ <_>
+ 7 6 1 1 2.
+
+ <_>
+ 8 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 2 16 6 -1.
+
+ <_>
+ 10 2 8 3 2.
+
+ <_>
+ 2 5 8 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 7 3 10 -1.
+
+ <_>
+ 11 12 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 6 7 3 10 -1.
+
+ <_>
+ 6 12 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 3 2 -1.
+
+ <_>
+ 11 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 2 -1.
+
+ <_>
+ 8 13 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 1 1 3 -1.
+
+ <_>
+ 10 2 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 2 4 18 -1.
+
+ <_>
+ 1 2 2 9 2.
+
+ <_>
+ 3 11 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 4 12 -1.
+
+ <_>
+ 12 10 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 1 6 -1.
+
+ <_>
+ 0 2 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 4 3 -1.
+
+ <_>
+ 8 8 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 7 3 2 -1.
+
+ <_>
+ 11 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 3 2 -1.
+
+ <_>
+ 8 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 4 6 1 -1.
+
+ <_>
+ 11 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 2 3 -1.
+
+ <_>
+ 9 7 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 12 7 8 6 -1.
+
+ <_>
+ 16 7 4 3 2.
+
+ <_>
+ 12 10 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 7 8 6 -1.
+
+ <_>
+ 0 7 4 3 2.
+
+ <_>
+ 4 10 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 18 2 2 10 -1.
+
+ <_>
+ 19 2 1 5 2.
+
+ <_>
+ 18 7 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 6 4 -1.
+
+ <_>
+ 3 2 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 4 6 1 -1.
+
+ <_>
+ 11 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 15 2 2 -1.
+
+ <_>
+ 7 15 1 1 2.
+
+ <_>
+ 8 16 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 13 1 6 -1.
+
+ <_>
+ 11 16 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 1 6 -1.
+
+ <_>
+ 8 16 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 3 2 1 -1.
+
+ <_>
+ 14 3 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 15 2 3 -1.
+
+ <_>
+ 8 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 15 7 4 -1.
+
+ <_>
+ 12 17 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 14 12 3 -1.
+
+ <_>
+ 4 15 12 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 3 3 2 -1.
+
+ <_>
+ 11 3 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 12 2 2 -1.
+
+ <_>
+ 4 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 11 4 6 -1.
+
+ <_>
+ 10 14 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 13 2 2 -1.
+
+ <_>
+ 7 13 1 1 2.
+
+ <_>
+ 8 14 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 11 14 4 -1.
+
+ <_>
+ 11 11 7 2 2.
+
+ <_>
+ 4 13 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 18 18 2 -1.
+
+ <_>
+ 7 18 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 18 2 2 -1.
+
+ <_>
+ 12 18 1 1 2.
+
+ <_>
+ 11 19 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 18 2 2 -1.
+
+ <_>
+ 7 18 1 1 2.
+
+ <_>
+ 8 19 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 18 8 2 -1.
+
+ <_>
+ 12 19 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 14 6 2 -1.
+
+ <_>
+ 7 15 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 8 -1.
+
+ <_>
+ 10 12 2 4 2.
+
+ <_>
+ 8 16 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 4 9 3 3 -1.
+
+ <_>
+ 4 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 10 6 2 -1.
+
+ <_>
+ 9 10 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 4 15 -1.
+
+ <_>
+ 7 0 2 15 2.
+
+
+
+ <_>
+
+ <_>
+ 8 6 12 14 -1.
+
+ <_>
+ 12 6 4 14 3.
+
+
+
+ <_>
+
+ <_>
+ 5 16 3 3 -1.
+
+ <_>
+ 5 17 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 1 12 19 -1.
+
+ <_>
+ 12 1 4 19 3.
+
+
+
+ <_>
+
+ <_>
+ 3 0 3 2 -1.
+
+ <_>
+ 3 1 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 12 4 5 -1.
+
+ <_>
+ 10 12 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 6 12 4 5 -1.
+
+ <_>
+ 8 12 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 11 2 2 -1.
+
+ <_>
+ 12 11 1 1 2.
+
+ <_>
+ 11 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 3 6 -1.
+
+ <_>
+ 0 4 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 11 2 2 -1.
+
+ <_>
+ 12 11 1 1 2.
+
+ <_>
+ 11 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 4 10 -1.
+
+ <_>
+ 7 11 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 11 2 2 -1.
+
+ <_>
+ 12 11 1 1 2.
+
+ <_>
+ 11 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 13 5 2 -1.
+
+ <_>
+ 2 14 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 11 2 2 -1.
+
+ <_>
+ 12 11 1 1 2.
+
+ <_>
+ 11 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 11 2 2 -1.
+
+ <_>
+ 7 11 1 1 2.
+
+ <_>
+ 8 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 13 3 3 -1.
+
+ <_>
+ 14 14 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 13 3 3 -1.
+
+ <_>
+ 3 14 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 3 -1.
+
+ <_>
+ 8 8 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 5 3 3 -1.
+
+ <_>
+ 13 6 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 9 5 3 -1.
+
+ <_>
+ 0 10 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 5 3 3 -1.
+
+ <_>
+ 13 6 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 2 8 -1.
+
+ <_>
+ 9 12 1 4 2.
+
+ <_>
+ 10 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 11 7 2 2 -1.
+
+ <_>
+ 12 7 1 1 2.
+
+ <_>
+ 11 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 16 6 4 -1.
+
+ <_>
+ 3 16 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 10 6 2 3 -1.
+
+ <_>
+ 10 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 6 -1.
+
+ <_>
+ 9 7 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 15 8 4 -1.
+
+ <_>
+ 12 15 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 14 8 6 -1.
+
+ <_>
+ 4 14 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 0 3 2 -1.
+
+ <_>
+ 10 0 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 15 4 2 -1.
+
+ <_>
+ 6 15 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 12 7 3 13 -1.
+
+ <_>
+ 13 7 1 13 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 3 13 -1.
+
+ <_>
+ 6 7 1 13 3.
+
+
+
+ <_>
+
+ <_>
+ 9 6 3 9 -1.
+
+ <_>
+ 9 9 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 4 4 7 12 -1.
+
+ <_>
+ 4 10 7 6 2.
+
+
+
+ <_>
+
+ <_>
+ 12 12 2 2 -1.
+
+ <_>
+ 13 12 1 1 2.
+
+ <_>
+ 12 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 12 2 2 -1.
+
+ <_>
+ 6 12 1 1 2.
+
+ <_>
+ 7 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 9 4 2 -1.
+
+ <_>
+ 10 9 2 1 2.
+
+ <_>
+ 8 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 6 2 2 -1.
+
+ <_>
+ 3 6 1 1 2.
+
+ <_>
+ 4 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 6 3 2 -1.
+
+ <_>
+ 16 7 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 7 19 4 -1.
+
+ <_>
+ 0 9 19 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 2 10 1 -1.
+
+ <_>
+ 10 2 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 4 2 12 -1.
+
+ <_>
+ 9 10 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 12 18 4 1 -1.
+
+ <_>
+ 12 18 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 7 6 4 -1.
+
+ <_>
+ 1 7 3 2 2.
+
+ <_>
+ 4 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 12 0 6 13 -1.
+
+ <_>
+ 14 0 2 13 3.
+
+
+
+ <_>
+
+ <_>
+ 2 0 6 13 -1.
+
+ <_>
+ 4 0 2 13 3.
+
+
+
+ <_>
+
+ <_>
+ 10 5 8 8 -1.
+
+ <_>
+ 10 9 8 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 3 2 5 -1.
+
+ <_>
+ 9 3 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 8 4 9 1 -1.
+
+ <_>
+ 11 4 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 4 9 1 -1.
+
+ <_>
+ 6 4 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 10 -1.
+
+ <_>
+ 7 0 6 10 3.
+
+
+
+ <_>
+
+ <_>
+ 7 17 5 3 -1.
+
+ <_>
+ 7 18 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 11 6 1 -1.
+
+ <_>
+ 9 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 2 3 2 -1.
+
+ <_>
+ 2 3 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 2 -1.
+
+ <_>
+ 8 13 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 10 3 6 -1.
+
+ <_>
+ 6 13 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 2 4 -1.
+
+ <_>
+ 11 4 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 4 2 4 -1.
+
+ <_>
+ 8 4 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 4 -1.
+
+ <_>
+ 9 6 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 13 8 3 -1.
+
+ <_>
+ 6 14 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 15 3 4 -1.
+
+ <_>
+ 10 15 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 2 2 17 -1.
+
+ <_>
+ 10 2 1 17 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 6 1 -1.
+
+ <_>
+ 9 0 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 3 4 -1.
+
+ <_>
+ 9 15 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 7 3 -1.
+
+ <_>
+ 7 14 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 16 3 3 -1.
+
+ <_>
+ 9 16 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 2 8 10 -1.
+
+ <_>
+ 6 7 8 5 2.
+
+
+
+ <_>
+
+ <_>
+ 2 5 8 8 -1.
+
+ <_>
+ 2 9 8 4 2.
+
+
+
+ <_>
+
+ <_>
+ 14 16 2 2 -1.
+
+ <_>
+ 14 17 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 16 2 2 -1.
+
+ <_>
+ 4 17 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 11 4 6 -1.
+
+ <_>
+ 10 14 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 11 4 6 -1.
+
+ <_>
+ 6 14 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 14 1 3 -1.
+
+ <_>
+ 10 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 0 4 6 -1.
+
+ <_>
+ 12 0 2 3 2.
+
+ <_>
+ 10 3 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 20 2 -1.
+
+ <_>
+ 0 4 20 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 0 8 2 -1.
+
+ <_>
+ 16 0 4 1 2.
+
+ <_>
+ 12 1 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 12 10 8 -1.
+
+ <_>
+ 2 16 10 4 2.
+
+
+
+ <_>
+
+ <_>
+ 17 7 2 10 -1.
+
+ <_>
+ 18 7 1 5 2.
+
+ <_>
+ 17 12 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 1 7 2 10 -1.
+
+ <_>
+ 1 7 1 5 2.
+
+ <_>
+ 2 12 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 15 10 3 6 -1.
+
+ <_>
+ 15 12 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 4 6 2 -1.
+
+ <_>
+ 6 4 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 5 20 6 -1.
+
+ <_>
+ 0 7 20 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 8 2 -1.
+
+ <_>
+ 0 0 4 1 2.
+
+ <_>
+ 4 1 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 4 -1.
+
+ <_>
+ 7 0 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 1 13 6 2 -1.
+
+ <_>
+ 1 14 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 8 3 4 -1.
+
+ <_>
+ 11 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 6 1 6 1 -1.
+
+ <_>
+ 8 1 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 6 18 2 -1.
+
+ <_>
+ 10 6 9 2 2.
+
+
+
+ <_>
+
+ <_>
+ 15 11 1 2 -1.
+
+ <_>
+ 15 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 5 1 2 -1.
+
+ <_>
+ 6 6 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 4 1 3 -1.
+
+ <_>
+ 13 5 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 15 1 2 -1.
+
+ <_>
+ 2 16 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 4 3 -1.
+
+ <_>
+ 12 5 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 7 3 -1.
+
+ <_>
+ 0 1 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 6 2 -1.
+
+ <_>
+ 9 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 2 3 -1.
+
+ <_>
+ 5 5 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 4 2 3 -1.
+
+ <_>
+ 18 5 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 0 8 6 -1.
+
+ <_>
+ 3 2 8 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 20 6 -1.
+
+ <_>
+ 10 2 10 3 2.
+
+ <_>
+ 0 5 10 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 7 2 4 -1.
+
+ <_>
+ 5 7 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 3 10 15 2 -1.
+
+ <_>
+ 8 10 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 0 12 11 -1.
+
+ <_>
+ 9 0 6 11 2.
+
+
+
+ <_>
+
+ <_>
+ 13 0 2 6 -1.
+
+ <_>
+ 13 0 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 19 2 1 -1.
+
+ <_>
+ 1 19 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 10 4 10 -1.
+
+ <_>
+ 18 10 2 5 2.
+
+ <_>
+ 16 15 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 4 8 10 3 -1.
+
+ <_>
+ 4 9 10 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 12 3 3 -1.
+
+ <_>
+ 14 13 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 10 4 10 -1.
+
+ <_>
+ 0 10 2 5 2.
+
+ <_>
+ 2 15 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 18 3 2 6 -1.
+
+ <_>
+ 18 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 1 3 -1.
+
+ <_>
+ 6 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 7 2 -1.
+
+ <_>
+ 7 8 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 2 6 -1.
+
+ <_>
+ 0 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 1 3 1 -1.
+
+ <_>
+ 12 1 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 2 6 -1.
+
+ <_>
+ 6 0 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 1 1 18 14 -1.
+
+ <_>
+ 7 1 6 14 3.
+
+
+
+ <_>
+
+ <_>
+ 4 6 8 3 -1.
+
+ <_>
+ 8 6 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 6 2 -1.
+
+ <_>
+ 9 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 12 6 2 -1.
+
+ <_>
+ 8 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 3 5 -1.
+
+ <_>
+ 11 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 3 5 -1.
+
+ <_>
+ 8 7 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 13 0 3 10 -1.
+
+ <_>
+ 14 0 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 3 2 -1.
+
+ <_>
+ 4 12 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 17 3 3 6 -1.
+
+ <_>
+ 18 3 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 1 8 18 10 -1.
+
+ <_>
+ 1 13 18 5 2.
+
+
+
+ <_>
+
+ <_>
+ 13 0 3 10 -1.
+
+ <_>
+ 14 0 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 3 3 7 -1.
+
+ <_>
+ 17 3 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 4 0 3 10 -1.
+
+ <_>
+ 5 0 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 16 3 3 7 -1.
+
+ <_>
+ 17 3 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 0 9 1 2 -1.
+
+ <_>
+ 0 10 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 18 1 2 10 -1.
+
+ <_>
+ 18 1 1 10 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 2 10 -1.
+
+ <_>
+ 1 1 1 10 2.
+
+
+
+ <_>
+
+ <_>
+ 10 16 3 4 -1.
+
+ <_>
+ 11 16 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 2 8 3 3 -1.
+
+ <_>
+ 3 8 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 11 0 2 6 -1.
+
+ <_>
+ 12 0 1 3 2.
+
+ <_>
+ 11 3 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 2 6 -1.
+
+ <_>
+ 7 0 1 3 2.
+
+ <_>
+ 8 3 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 16 3 3 7 -1.
+
+ <_>
+ 17 3 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 1 3 3 7 -1.
+
+ <_>
+ 2 3 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 14 1 6 16 -1.
+
+ <_>
+ 16 1 2 16 3.
+
+
+
+ <_>
+
+ <_>
+ 0 1 6 16 -1.
+
+ <_>
+ 2 1 2 16 3.
+
+
+
+ <_>
+
+ <_>
+ 2 0 16 8 -1.
+
+ <_>
+ 10 0 8 4 2.
+
+ <_>
+ 2 4 8 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 8 5 3 -1.
+
+ <_>
+ 6 9 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 3 -1.
+
+ <_>
+ 10 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 4 3 -1.
+
+ <_>
+ 8 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 4 -1.
+
+ <_>
+ 9 6 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 7 15 1 -1.
+
+ <_>
+ 5 7 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 2 7 9 -1.
+
+ <_>
+ 8 5 7 3 3.
+
+
+
+ <_>
+
+ <_>
+ 1 7 16 4 -1.
+
+ <_>
+ 1 7 8 2 2.
+
+ <_>
+ 9 9 8 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 12 8 2 -1.
+
+ <_>
+ 6 13 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 11 3 3 -1.
+
+ <_>
+ 8 12 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 14 10 -1.
+
+ <_>
+ 11 5 7 5 2.
+
+ <_>
+ 4 10 7 5 2.
+
+
+
+ <_>
+
+ <_>
+ 4 12 3 2 -1.
+
+ <_>
+ 4 13 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 6 1 -1.
+
+ <_>
+ 11 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 9 7 6 -1.
+
+ <_>
+ 4 11 7 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 10 6 3 -1.
+
+ <_>
+ 7 11 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 2 -1.
+
+ <_>
+ 9 12 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 20 6 -1.
+
+ <_>
+ 0 7 20 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 4 6 1 -1.
+
+ <_>
+ 8 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 11 6 1 -1.
+
+ <_>
+ 11 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 6 1 -1.
+
+ <_>
+ 7 11 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 16 3 4 -1.
+
+ <_>
+ 11 16 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 3 -1.
+
+ <_>
+ 9 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 2 12 16 8 -1.
+
+ <_>
+ 2 16 16 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 15 15 2 -1.
+
+ <_>
+ 0 16 15 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 4 5 6 -1.
+
+ <_>
+ 15 6 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 4 -1.
+
+ <_>
+ 10 5 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 10 9 6 -1.
+
+ <_>
+ 8 12 9 2 3.
+
+
+
+ <_>
+
+ <_>
+ 2 19 15 1 -1.
+
+ <_>
+ 7 19 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 16 3 4 -1.
+
+ <_>
+ 11 16 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 0 15 20 4 -1.
+
+ <_>
+ 0 17 20 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 16 3 4 -1.
+
+ <_>
+ 11 16 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 16 3 4 -1.
+
+ <_>
+ 8 16 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 16 3 3 -1.
+
+ <_>
+ 9 17 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 11 4 6 -1.
+
+ <_>
+ 8 14 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 12 -1.
+
+ <_>
+ 9 10 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 8 17 4 3 -1.
+
+ <_>
+ 8 18 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 18 8 2 -1.
+
+ <_>
+ 13 18 4 1 2.
+
+ <_>
+ 9 19 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 18 8 2 -1.
+
+ <_>
+ 1 19 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 5 6 15 -1.
+
+ <_>
+ 15 5 2 15 3.
+
+
+
+ <_>
+
+ <_>
+ 9 8 2 2 -1.
+
+ <_>
+ 9 9 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 3 -1.
+
+ <_>
+ 9 5 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 5 6 15 -1.
+
+ <_>
+ 3 5 2 15 3.
+
+
+
+ <_>
+
+ <_>
+ 4 1 14 8 -1.
+
+ <_>
+ 11 1 7 4 2.
+
+ <_>
+ 4 5 7 4 2.
+
+
+
+ <_>
+
+ <_>
+ 2 4 4 16 -1.
+
+ <_>
+ 2 4 2 8 2.
+
+ <_>
+ 4 12 2 8 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 12 -1.
+
+ <_>
+ 12 10 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 10 12 -1.
+
+ <_>
+ 4 5 5 6 2.
+
+ <_>
+ 9 11 5 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 2 3 -1.
+
+ <_>
+ 5 5 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 2 4 10 -1.
+
+ <_>
+ 14 2 2 5 2.
+
+ <_>
+ 12 7 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 6 4 7 3 -1.
+
+ <_>
+ 6 5 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 0 18 2 -1.
+
+ <_>
+ 11 0 9 1 2.
+
+ <_>
+ 2 1 9 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 18 2 -1.
+
+ <_>
+ 0 0 9 1 2.
+
+ <_>
+ 9 1 9 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 13 4 6 -1.
+
+ <_>
+ 15 13 2 3 2.
+
+ <_>
+ 13 16 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 3 13 4 6 -1.
+
+ <_>
+ 3 13 2 3 2.
+
+ <_>
+ 5 16 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 12 2 6 -1.
+
+ <_>
+ 10 15 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 10 10 -1.
+
+ <_>
+ 5 9 5 5 2.
+
+ <_>
+ 10 14 5 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 4 2 -1.
+
+ <_>
+ 13 4 2 1 2.
+
+ <_>
+ 11 5 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 12 6 8 -1.
+
+ <_>
+ 10 12 3 8 2.
+
+
+
+ <_>
+
+ <_>
+ 12 2 4 10 -1.
+
+ <_>
+ 14 2 2 5 2.
+
+ <_>
+ 12 7 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 8 11 2 1 -1.
+
+ <_>
+ 9 11 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 5 1 12 -1.
+
+ <_>
+ 10 9 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 0 11 6 9 -1.
+
+ <_>
+ 3 11 3 9 2.
+
+
+
+ <_>
+
+ <_>
+ 12 2 4 10 -1.
+
+ <_>
+ 14 2 2 5 2.
+
+ <_>
+ 12 7 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 4 2 4 10 -1.
+
+ <_>
+ 4 2 2 5 2.
+
+ <_>
+ 6 7 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 4 2 -1.
+
+ <_>
+ 13 4 2 1 2.
+
+ <_>
+ 11 5 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 14 6 3 -1.
+
+ <_>
+ 0 15 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 4 4 2 -1.
+
+ <_>
+ 13 4 2 1 2.
+
+ <_>
+ 11 5 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 1 3 2 -1.
+
+ <_>
+ 7 1 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 4 4 2 -1.
+
+ <_>
+ 13 4 2 1 2.
+
+ <_>
+ 11 5 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 4 4 2 -1.
+
+ <_>
+ 5 4 2 1 2.
+
+ <_>
+ 7 5 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 0 2 12 -1.
+
+ <_>
+ 14 0 1 6 2.
+
+ <_>
+ 13 6 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 6 0 3 10 -1.
+
+ <_>
+ 7 0 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 3 0 17 8 -1.
+
+ <_>
+ 3 4 17 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 20 4 -1.
+
+ <_>
+ 0 6 20 2 2.
+
+
+
+ <_>
+
+ <_>
+ 0 3 8 2 -1.
+
+ <_>
+ 4 3 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 11 4 3 -1.
+
+ <_>
+ 8 12 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 6 4 -1.
+
+ <_>
+ 5 7 3 2 2.
+
+ <_>
+ 8 9 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 3 4 9 -1.
+
+ <_>
+ 8 6 4 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 15 1 4 -1.
+
+ <_>
+ 8 17 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 7 -1.
+
+ <_>
+ 8 5 4 7 3.
+
+
+
+ <_>
+
+ <_>
+ 4 2 4 10 -1.
+
+ <_>
+ 4 2 2 5 2.
+
+ <_>
+ 6 7 2 5 2.
+
+
+
+ <_>
+
+ <_>
+ 3 0 17 2 -1.
+
+ <_>
+ 3 1 17 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 2 16 15 -1.
+
+ <_>
+ 2 7 16 5 3.
+
+
+
+ <_>
+
+ <_>
+ 15 2 5 2 -1.
+
+ <_>
+ 15 3 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 2 2 -1.
+
+ <_>
+ 10 3 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 16 15 -1.
+
+ <_>
+ 4 10 16 5 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 5 6 -1.
+
+ <_>
+ 7 16 5 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 3 2 -1.
+
+ <_>
+ 11 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 3 3 1 -1.
+
+ <_>
+ 9 3 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 16 3 3 -1.
+
+ <_>
+ 9 17 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 5 2 -1.
+
+ <_>
+ 0 3 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 5 4 3 -1.
+
+ <_>
+ 12 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 7 12 1 -1.
+
+ <_>
+ 5 7 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 5 6 14 -1.
+
+ <_>
+ 7 12 6 7 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 8 10 -1.
+
+ <_>
+ 0 0 4 5 2.
+
+ <_>
+ 4 5 4 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 1 3 2 -1.
+
+ <_>
+ 10 1 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 1 3 2 -1.
+
+ <_>
+ 9 1 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 3 -1.
+
+ <_>
+ 12 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 6 16 -1.
+
+ <_>
+ 7 12 6 8 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 3 -1.
+
+ <_>
+ 12 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 3 2 6 -1.
+
+ <_>
+ 2 5 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 2 6 9 -1.
+
+ <_>
+ 14 5 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 17 3 2 -1.
+
+ <_>
+ 10 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 2 3 -1.
+
+ <_>
+ 5 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 11 3 6 -1.
+
+ <_>
+ 13 13 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 14 2 6 -1.
+
+ <_>
+ 3 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 3 6 2 -1.
+
+ <_>
+ 14 4 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 8 16 2 -1.
+
+ <_>
+ 0 9 16 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 3 6 2 -1.
+
+ <_>
+ 14 4 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 5 6 -1.
+
+ <_>
+ 0 2 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 5 4 3 -1.
+
+ <_>
+ 12 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 3 6 -1.
+
+ <_>
+ 4 13 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 5 4 3 -1.
+
+ <_>
+ 12 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 1 3 -1.
+
+ <_>
+ 9 6 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 5 4 3 -1.
+
+ <_>
+ 12 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 8 12 -1.
+
+ <_>
+ 6 12 8 6 2.
+
+
+
+ <_>
+
+ <_>
+ 12 5 4 3 -1.
+
+ <_>
+ 12 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 12 9 2 -1.
+
+ <_>
+ 8 12 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 5 4 3 -1.
+
+ <_>
+ 12 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 4 3 -1.
+
+ <_>
+ 4 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 9 2 -1.
+
+ <_>
+ 9 6 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 1 3 -1.
+
+ <_>
+ 4 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 12 6 6 -1.
+
+ <_>
+ 14 12 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 3 7 -1.
+
+ <_>
+ 8 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 9 8 3 3 -1.
+
+ <_>
+ 10 8 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 3 3 -1.
+
+ <_>
+ 9 8 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 10 11 3 -1.
+
+ <_>
+ 5 11 11 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 10 1 -1.
+
+ <_>
+ 10 7 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 2 -1.
+
+ <_>
+ 10 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 2 -1.
+
+ <_>
+ 9 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 9 4 2 -1.
+
+ <_>
+ 11 9 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 9 4 2 -1.
+
+ <_>
+ 7 9 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 14 10 2 4 -1.
+
+ <_>
+ 14 12 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 7 3 2 -1.
+
+ <_>
+ 8 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 17 6 3 -1.
+
+ <_>
+ 14 18 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 12 12 -1.
+
+ <_>
+ 4 5 6 6 2.
+
+ <_>
+ 10 11 6 6 2.
+
+
+
+ <_>
+
+ <_>
+ 6 9 8 8 -1.
+
+ <_>
+ 10 9 4 4 2.
+
+ <_>
+ 6 13 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 15 4 -1.
+
+ <_>
+ 5 4 5 4 3.
+
+
+
+ <_>
+
+ <_>
+ 13 2 4 1 -1.
+
+ <_>
+ 13 2 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 12 2 2 -1.
+
+ <_>
+ 4 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 3 -1.
+
+ <_>
+ 8 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 13 2 3 -1.
+
+ <_>
+ 9 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 11 2 3 -1.
+
+ <_>
+ 13 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 12 4 4 -1.
+
+ <_>
+ 7 12 2 2 2.
+
+ <_>
+ 9 14 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 11 2 2 -1.
+
+ <_>
+ 11 11 1 1 2.
+
+ <_>
+ 10 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 17 3 2 -1.
+
+ <_>
+ 9 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 11 2 2 -1.
+
+ <_>
+ 11 11 1 1 2.
+
+ <_>
+ 10 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 17 6 3 -1.
+
+ <_>
+ 0 18 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 11 2 2 -1.
+
+ <_>
+ 11 11 1 1 2.
+
+ <_>
+ 10 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 11 2 2 -1.
+
+ <_>
+ 8 11 1 1 2.
+
+ <_>
+ 9 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 5 8 4 -1.
+
+ <_>
+ 12 5 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 5 8 4 -1.
+
+ <_>
+ 4 5 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 13 2 4 1 -1.
+
+ <_>
+ 13 2 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 2 4 1 -1.
+
+ <_>
+ 5 2 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 0 4 2 -1.
+
+ <_>
+ 12 0 2 1 2.
+
+ <_>
+ 10 1 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 12 3 1 -1.
+
+ <_>
+ 8 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 11 4 8 -1.
+
+ <_>
+ 10 11 2 4 2.
+
+ <_>
+ 8 15 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 9 2 2 -1.
+
+ <_>
+ 9 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 18 15 2 -1.
+
+ <_>
+ 3 19 15 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 6 2 12 -1.
+
+ <_>
+ 2 6 1 6 2.
+
+ <_>
+ 3 12 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 8 2 3 -1.
+
+ <_>
+ 9 9 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 10 3 2 -1.
+
+ <_>
+ 8 10 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 11 3 1 -1.
+
+ <_>
+ 12 11 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 11 3 1 -1.
+
+ <_>
+ 7 11 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 2 4 2 -1.
+
+ <_>
+ 11 2 2 1 2.
+
+ <_>
+ 9 3 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 12 2 3 -1.
+
+ <_>
+ 4 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 1 18 3 -1.
+
+ <_>
+ 8 1 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 1 4 14 -1.
+
+ <_>
+ 7 1 2 14 2.
+
+
+
+ <_>
+
+ <_>
+ 8 16 12 3 -1.
+
+ <_>
+ 8 16 6 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 17 18 3 -1.
+
+ <_>
+ 7 17 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 6 -1.
+
+ <_>
+ 9 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 1 8 -1.
+
+ <_>
+ 9 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 12 -1.
+
+ <_>
+ 9 10 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 12 9 3 3 -1.
+
+ <_>
+ 12 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 1 4 8 -1.
+
+ <_>
+ 2 1 2 8 2.
+
+
+
+ <_>
+
+ <_>
+ 9 1 6 2 -1.
+
+ <_>
+ 12 1 3 1 2.
+
+ <_>
+ 9 2 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 3 12 14 -1.
+
+ <_>
+ 1 10 12 7 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 2 -1.
+
+ <_>
+ 10 12 2 1 2.
+
+ <_>
+ 8 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 9 10 2 -1.
+
+ <_>
+ 1 9 5 1 2.
+
+ <_>
+ 6 10 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 3 -1.
+
+ <_>
+ 8 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 8 8 3 -1.
+
+ <_>
+ 6 9 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 15 5 3 -1.
+
+ <_>
+ 9 16 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 4 3 -1.
+
+ <_>
+ 8 8 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 2 -1.
+
+ <_>
+ 7 8 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 8 2 -1.
+
+ <_>
+ 5 7 4 1 2.
+
+ <_>
+ 9 8 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 9 3 3 -1.
+
+ <_>
+ 12 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 7 4 2 -1.
+
+ <_>
+ 4 8 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 2 6 9 -1.
+
+ <_>
+ 14 5 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 4 9 3 3 -1.
+
+ <_>
+ 5 9 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 12 9 3 3 -1.
+
+ <_>
+ 12 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 6 9 -1.
+
+ <_>
+ 0 5 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 17 3 3 6 -1.
+
+ <_>
+ 18 3 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 0 3 3 6 -1.
+
+ <_>
+ 1 3 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 17 14 1 2 -1.
+
+ <_>
+ 17 15 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 9 4 3 -1.
+
+ <_>
+ 6 9 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 12 9 3 3 -1.
+
+ <_>
+ 12 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 9 3 3 -1.
+
+ <_>
+ 5 10 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 6 8 -1.
+
+ <_>
+ 12 5 3 4 2.
+
+ <_>
+ 9 9 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 6 8 -1.
+
+ <_>
+ 5 5 3 4 2.
+
+ <_>
+ 8 9 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 16 1 4 6 -1.
+
+ <_>
+ 16 4 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 6 20 -1.
+
+ <_>
+ 3 0 2 20 3.
+
+
+
+ <_>
+
+ <_>
+ 12 11 3 2 -1.
+
+ <_>
+ 13 11 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 3 2 -1.
+
+ <_>
+ 6 11 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 4 6 1 -1.
+
+ <_>
+ 11 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 0 8 3 -1.
+
+ <_>
+ 4 0 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 15 0 2 5 -1.
+
+ <_>
+ 15 0 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 4 1 3 2 -1.
+
+ <_>
+ 5 1 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 0 6 15 -1.
+
+ <_>
+ 9 0 2 15 3.
+
+
+
+ <_>
+
+ <_>
+ 6 11 3 1 -1.
+
+ <_>
+ 7 11 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 0 3 4 -1.
+
+ <_>
+ 13 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 6 1 -1.
+
+ <_>
+ 7 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 7 3 2 -1.
+
+ <_>
+ 12 8 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 4 6 -1.
+
+ <_>
+ 0 4 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 12 7 3 2 -1.
+
+ <_>
+ 12 8 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 16 3 3 -1.
+
+ <_>
+ 2 17 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 8 6 10 -1.
+
+ <_>
+ 16 8 3 5 2.
+
+ <_>
+ 13 13 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 9 5 2 -1.
+
+ <_>
+ 0 10 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 11 2 2 -1.
+
+ <_>
+ 13 11 1 1 2.
+
+ <_>
+ 12 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 15 3 3 -1.
+
+ <_>
+ 3 16 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 7 3 2 -1.
+
+ <_>
+ 12 8 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 3 2 -1.
+
+ <_>
+ 5 8 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 9 9 -1.
+
+ <_>
+ 9 8 9 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 3 7 -1.
+
+ <_>
+ 6 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 12 5 -1.
+
+ <_>
+ 9 2 4 5 3.
+
+
+
+ <_>
+
+ <_>
+ 6 11 2 2 -1.
+
+ <_>
+ 6 11 1 1 2.
+
+ <_>
+ 7 12 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 15 3 2 -1.
+
+ <_>
+ 15 16 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 15 3 2 -1.
+
+ <_>
+ 2 16 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 12 6 8 -1.
+
+ <_>
+ 17 12 3 4 2.
+
+ <_>
+ 14 16 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 2 8 15 6 -1.
+
+ <_>
+ 7 8 5 6 3.
+
+
+
+ <_>
+
+ <_>
+ 2 2 18 17 -1.
+
+ <_>
+ 8 2 6 17 3.
+
+
+
+ <_>
+
+ <_>
+ 5 1 4 1 -1.
+
+ <_>
+ 7 1 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 2 12 5 -1.
+
+ <_>
+ 9 2 4 5 3.
+
+
+
+ <_>
+
+ <_>
+ 3 2 12 5 -1.
+
+ <_>
+ 7 2 4 5 3.
+
+
+
+ <_>
+
+ <_>
+ 4 9 12 4 -1.
+
+ <_>
+ 10 9 6 2 2.
+
+ <_>
+ 4 11 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 15 6 2 -1.
+
+ <_>
+ 5 15 3 1 2.
+
+ <_>
+ 8 16 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 14 2 3 -1.
+
+ <_>
+ 10 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 13 20 2 -1.
+
+ <_>
+ 0 13 10 1 2.
+
+ <_>
+ 10 14 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 9 12 8 -1.
+
+ <_>
+ 10 9 6 4 2.
+
+ <_>
+ 4 13 6 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 3 6 -1.
+
+ <_>
+ 8 16 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 12 2 2 -1.
+
+ <_>
+ 10 13 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 12 2 2 -1.
+
+ <_>
+ 9 12 1 1 2.
+
+ <_>
+ 10 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 11 14 4 -1.
+
+ <_>
+ 11 11 7 2 2.
+
+ <_>
+ 4 13 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 5 4 2 -1.
+
+ <_>
+ 8 6 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 10 6 3 -1.
+
+ <_>
+ 12 10 2 3 3.
+
+
+
+ <_>
+
+ <_>
+ 2 14 1 2 -1.
+
+ <_>
+ 2 15 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 8 6 12 -1.
+
+ <_>
+ 16 8 3 6 2.
+
+ <_>
+ 13 14 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 1 8 6 12 -1.
+
+ <_>
+ 1 8 3 6 2.
+
+ <_>
+ 4 14 3 6 2.
+
+
+
+ <_>
+
+ <_>
+ 10 0 6 10 -1.
+
+ <_>
+ 12 0 2 10 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 8 4 -1.
+
+ <_>
+ 5 11 4 2 2.
+
+ <_>
+ 9 13 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 16 8 4 -1.
+
+ <_>
+ 14 16 4 2 2.
+
+ <_>
+ 10 18 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 6 -1.
+
+ <_>
+ 9 7 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 10 2 4 10 -1.
+
+ <_>
+ 10 2 2 10 2.
+
+
+
+ <_>
+
+ <_>
+ 6 1 4 9 -1.
+
+ <_>
+ 8 1 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 12 19 2 1 -1.
+
+ <_>
+ 12 19 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 4 9 -1.
+
+ <_>
+ 3 2 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 7 5 6 4 -1.
+
+ <_>
+ 9 5 2 4 3.
+
+
+
+ <_>
+
+ <_>
+ 9 4 2 4 -1.
+
+ <_>
+ 9 6 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 14 5 2 8 -1.
+
+ <_>
+ 14 9 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 5 12 -1.
+
+ <_>
+ 7 12 5 6 2.
+
+
+
+ <_>
+
+ <_>
+ 14 6 2 6 -1.
+
+ <_>
+ 14 9 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 6 2 6 -1.
+
+ <_>
+ 4 9 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 15 10 4 -1.
+
+ <_>
+ 13 15 5 2 2.
+
+ <_>
+ 8 17 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 18 2 2 -1.
+
+ <_>
+ 7 18 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 11 3 6 2 -1.
+
+ <_>
+ 11 4 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 0 16 6 -1.
+
+ <_>
+ 2 2 16 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 6 2 -1.
+
+ <_>
+ 11 4 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 11 10 3 -1.
+
+ <_>
+ 4 12 10 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 6 2 -1.
+
+ <_>
+ 11 4 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 3 6 2 -1.
+
+ <_>
+ 3 4 6 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 7 -1.
+
+ <_>
+ 16 0 2 7 2.
+
+
+
+ <_>
+
+ <_>
+ 0 14 9 6 -1.
+
+ <_>
+ 0 16 9 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 16 3 3 -1.
+
+ <_>
+ 9 17 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 6 6 2 -1.
+
+ <_>
+ 6 6 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 15 11 1 3 -1.
+
+ <_>
+ 15 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 2 3 -1.
+
+ <_>
+ 5 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 9 2 2 -1.
+
+ <_>
+ 10 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 1 4 3 -1.
+
+ <_>
+ 5 1 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 16 0 4 7 -1.
+
+ <_>
+ 16 0 2 7 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 20 1 -1.
+
+ <_>
+ 10 0 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 15 11 1 3 -1.
+
+ <_>
+ 15 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 3 4 -1.
+
+ <_>
+ 1 4 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 16 3 3 6 -1.
+
+ <_>
+ 16 5 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 3 3 6 -1.
+
+ <_>
+ 1 5 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 2 12 6 -1.
+
+ <_>
+ 12 2 6 3 2.
+
+ <_>
+ 6 5 6 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 10 4 3 -1.
+
+ <_>
+ 8 11 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 2 14 6 -1.
+
+ <_>
+ 11 2 7 3 2.
+
+ <_>
+ 4 5 7 3 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 13 2 3 -1.
+
+ <_>
+ 15 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 3 -1.
+
+ <_>
+ 8 13 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 11 1 3 -1.
+
+ <_>
+ 15 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 13 5 2 -1.
+
+ <_>
+ 7 14 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 12 6 3 -1.
+
+ <_>
+ 7 13 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 11 4 4 -1.
+
+ <_>
+ 5 13 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 3 3 -1.
+
+ <_>
+ 12 4 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 6 4 3 3 -1.
+
+ <_>
+ 7 4 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 16 5 3 6 -1.
+
+ <_>
+ 17 5 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 3 6 12 7 -1.
+
+ <_>
+ 7 6 4 7 3.
+
+
+
+ <_>
+
+ <_>
+ 16 5 3 6 -1.
+
+ <_>
+ 17 5 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 3 13 2 3 -1.
+
+ <_>
+ 3 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 5 3 6 -1.
+
+ <_>
+ 17 5 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 1 5 3 6 -1.
+
+ <_>
+ 2 5 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 1 9 18 1 -1.
+
+ <_>
+ 7 9 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 9 8 7 -1.
+
+ <_>
+ 4 9 4 7 2.
+
+
+
+ <_>
+
+ <_>
+ 12 11 8 2 -1.
+
+ <_>
+ 12 12 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 11 8 2 -1.
+
+ <_>
+ 0 12 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 13 2 3 -1.
+
+ <_>
+ 9 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 10 12 4 -1.
+
+ <_>
+ 4 10 6 2 2.
+
+ <_>
+ 10 12 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 3 7 -1.
+
+ <_>
+ 10 3 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 7 2 3 5 -1.
+
+ <_>
+ 8 2 1 5 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 4 6 -1.
+
+ <_>
+ 11 12 2 3 2.
+
+ <_>
+ 9 15 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 6 -1.
+
+ <_>
+ 9 7 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 15 4 4 2 -1.
+
+ <_>
+ 15 5 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 3 -1.
+
+ <_>
+ 9 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 14 2 6 4 -1.
+
+ <_>
+ 14 4 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 7 16 6 1 -1.
+
+ <_>
+ 9 16 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 13 2 3 -1.
+
+ <_>
+ 15 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 10 -1.
+
+ <_>
+ 9 7 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 11 10 2 6 -1.
+
+ <_>
+ 11 12 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 10 4 1 -1.
+
+ <_>
+ 8 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 9 2 2 -1.
+
+ <_>
+ 10 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 9 2 2 -1.
+
+ <_>
+ 8 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 7 2 2 -1.
+
+ <_>
+ 13 7 1 1 2.
+
+ <_>
+ 12 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 2 2 -1.
+
+ <_>
+ 5 7 1 1 2.
+
+ <_>
+ 6 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 0 3 14 -1.
+
+ <_>
+ 14 0 1 14 3.
+
+
+
+ <_>
+
+ <_>
+ 4 0 3 14 -1.
+
+ <_>
+ 5 0 1 14 3.
+
+
+
+ <_>
+
+ <_>
+ 13 4 3 14 -1.
+
+ <_>
+ 14 4 1 14 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 3 -1.
+
+ <_>
+ 9 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 2 3 16 -1.
+
+ <_>
+ 5 2 1 16 3.
+
+
+
+ <_>
+
+ <_>
+ 7 2 8 10 -1.
+
+ <_>
+ 7 7 8 5 2.
+
+
+
+ <_>
+
+ <_>
+ 6 14 7 3 -1.
+
+ <_>
+ 6 15 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 2 10 12 -1.
+
+ <_>
+ 14 2 5 6 2.
+
+ <_>
+ 9 8 5 6 2.
+
+
+
+ <_>
+
+ <_>
+ 6 7 8 2 -1.
+
+ <_>
+ 6 8 8 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 4 6 -1.
+
+ <_>
+ 8 16 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 1 3 -1.
+
+ <_>
+ 6 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 2 4 6 -1.
+
+ <_>
+ 16 4 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 4 2 -1.
+
+ <_>
+ 6 6 2 1 2.
+
+ <_>
+ 8 7 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 16 2 4 6 -1.
+
+ <_>
+ 16 4 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 4 6 -1.
+
+ <_>
+ 0 4 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 6 2 6 -1.
+
+ <_>
+ 9 6 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 3 4 6 10 -1.
+
+ <_>
+ 3 9 6 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 6 -1.
+
+ <_>
+ 9 5 1 6 2.
+
+
+
+ <_>
+
+ <_>
+ 3 13 2 3 -1.
+
+ <_>
+ 3 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 13 3 2 -1.
+
+ <_>
+ 13 14 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 16 10 4 -1.
+
+ <_>
+ 2 16 5 2 2.
+
+ <_>
+ 7 18 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 5 6 10 6 -1.
+
+ <_>
+ 10 6 5 3 2.
+
+ <_>
+ 5 9 5 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 14 1 3 -1.
+
+ <_>
+ 7 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 16 6 3 -1.
+
+ <_>
+ 14 17 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 4 10 3 -1.
+
+ <_>
+ 7 5 10 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 5 4 -1.
+
+ <_>
+ 0 6 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 13 11 3 9 -1.
+
+ <_>
+ 13 14 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 3 9 -1.
+
+ <_>
+ 4 14 3 3 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 1 -1.
+
+ <_>
+ 9 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 0 6 17 -1.
+
+ <_>
+ 7 0 2 17 3.
+
+
+
+ <_>
+
+ <_>
+ 10 3 6 3 -1.
+
+ <_>
+ 10 3 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 2 2 15 4 -1.
+
+ <_>
+ 7 2 5 4 3.
+
+
+
+ <_>
+
+ <_>
+ 8 2 8 2 -1.
+
+ <_>
+ 12 2 4 1 2.
+
+ <_>
+ 8 3 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 1 3 6 -1.
+
+ <_>
+ 8 3 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 17 2 2 -1.
+
+ <_>
+ 9 18 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 2 14 -1.
+
+ <_>
+ 1 0 1 14 2.
+
+
+
+ <_>
+
+ <_>
+ 12 0 7 3 -1.
+
+ <_>
+ 12 1 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 14 1 2 -1.
+
+ <_>
+ 1 15 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 12 2 8 -1.
+
+ <_>
+ 15 12 1 4 2.
+
+ <_>
+ 14 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 7 3 -1.
+
+ <_>
+ 1 1 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 12 2 8 -1.
+
+ <_>
+ 15 12 1 4 2.
+
+ <_>
+ 14 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 0 8 12 -1.
+
+ <_>
+ 6 0 4 6 2.
+
+ <_>
+ 10 6 4 6 2.
+
+
+
+ <_>
+
+ <_>
+ 6 1 8 9 -1.
+
+ <_>
+ 6 4 8 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 2 2 -1.
+
+ <_>
+ 5 3 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 14 6 6 -1.
+
+ <_>
+ 16 14 3 3 2.
+
+ <_>
+ 13 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 17 20 2 -1.
+
+ <_>
+ 0 17 10 1 2.
+
+ <_>
+ 10 18 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 3 2 6 -1.
+
+ <_>
+ 11 3 1 3 2.
+
+ <_>
+ 10 6 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 12 6 2 -1.
+
+ <_>
+ 8 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 6 13 -1.
+
+ <_>
+ 10 7 3 13 2.
+
+
+
+ <_>
+
+ <_>
+ 5 15 10 5 -1.
+
+ <_>
+ 10 15 5 5 2.
+
+
+
+ <_>
+
+ <_>
+ 10 4 4 10 -1.
+
+ <_>
+ 10 4 2 10 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 2 1 -1.
+
+ <_>
+ 6 7 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 3 6 7 -1.
+
+ <_>
+ 10 3 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 4 3 6 7 -1.
+
+ <_>
+ 7 3 3 7 2.
+
+
+
+ <_>
+
+ <_>
+ 1 7 18 5 -1.
+
+ <_>
+ 7 7 6 5 3.
+
+
+
+ <_>
+
+ <_>
+ 3 17 4 3 -1.
+
+ <_>
+ 5 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 12 6 -1.
+
+ <_>
+ 14 14 6 3 2.
+
+ <_>
+ 8 17 6 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 13 20 4 -1.
+
+ <_>
+ 0 13 10 2 2.
+
+ <_>
+ 10 15 10 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 5 14 2 -1.
+
+ <_>
+ 11 5 7 1 2.
+
+ <_>
+ 4 6 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 10 12 -1.
+
+ <_>
+ 1 2 5 6 2.
+
+ <_>
+ 6 8 5 6 2.
+
+
+
+ <_>
+
+ <_>
+ 6 1 14 3 -1.
+
+ <_>
+ 6 2 14 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 16 2 3 -1.
+
+ <_>
+ 8 17 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 17 3 2 -1.
+
+ <_>
+ 10 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 15 4 2 -1.
+
+ <_>
+ 5 15 2 1 2.
+
+ <_>
+ 7 16 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 15 1 3 -1.
+
+ <_>
+ 10 16 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 16 4 4 -1.
+
+ <_>
+ 8 16 2 2 2.
+
+ <_>
+ 10 18 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 11 8 6 -1.
+
+ <_>
+ 6 14 8 3 2.
+
+
+
+ <_>
+
+ <_>
+ 2 13 5 2 -1.
+
+ <_>
+ 2 14 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 14 6 6 -1.
+
+ <_>
+ 16 14 3 3 2.
+
+ <_>
+ 13 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 9 18 4 -1.
+
+ <_>
+ 7 9 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 13 14 6 6 -1.
+
+ <_>
+ 16 14 3 3 2.
+
+ <_>
+ 13 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 1 6 -1.
+
+ <_>
+ 0 4 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 15 20 -1.
+
+ <_>
+ 5 10 15 10 2.
+
+
+
+ <_>
+
+ <_>
+ 1 14 6 6 -1.
+
+ <_>
+ 1 14 3 3 2.
+
+ <_>
+ 4 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 6 -1.
+
+ <_>
+ 10 14 2 3 2.
+
+ <_>
+ 8 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 11 2 1 -1.
+
+ <_>
+ 8 11 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 17 3 2 -1.
+
+ <_>
+ 10 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 17 3 2 -1.
+
+ <_>
+ 9 17 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 14 4 6 -1.
+
+ <_>
+ 14 14 2 3 2.
+
+ <_>
+ 12 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 14 4 6 -1.
+
+ <_>
+ 4 14 2 3 2.
+
+ <_>
+ 6 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 13 14 2 6 -1.
+
+ <_>
+ 14 14 1 3 2.
+
+ <_>
+ 13 17 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 14 2 6 -1.
+
+ <_>
+ 5 14 1 3 2.
+
+ <_>
+ 6 17 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 0 6 12 -1.
+
+ <_>
+ 7 4 6 4 3.
+
+
+
+ <_>
+
+ <_>
+ 0 7 12 2 -1.
+
+ <_>
+ 4 7 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 10 3 3 13 -1.
+
+ <_>
+ 11 3 1 13 3.
+
+
+
+ <_>
+
+ <_>
+ 7 3 3 13 -1.
+
+ <_>
+ 8 3 1 13 3.
+
+
+
+ <_>
+
+ <_>
+ 10 8 6 3 -1.
+
+ <_>
+ 10 9 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 11 3 2 -1.
+
+ <_>
+ 4 11 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 13 12 6 8 -1.
+
+ <_>
+ 16 12 3 4 2.
+
+ <_>
+ 13 16 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 6 6 5 -1.
+
+ <_>
+ 9 6 2 5 3.
+
+
+
+ <_>
+
+ <_>
+ 17 11 2 7 -1.
+
+ <_>
+ 17 11 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 3 13 8 2 -1.
+
+ <_>
+ 7 13 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 9 8 3 -1.
+
+ <_>
+ 6 10 8 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 3 4 3 -1.
+
+ <_>
+ 4 4 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 4 3 -1.
+
+ <_>
+ 11 4 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 4 17 12 -1.
+
+ <_>
+ 1 8 17 4 3.
+
+
+
+ <_>
+
+ <_>
+ 11 3 4 3 -1.
+
+ <_>
+ 11 4 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 8 6 3 -1.
+
+ <_>
+ 4 9 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 3 5 3 -1.
+
+ <_>
+ 12 4 5 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 11 2 7 -1.
+
+ <_>
+ 2 11 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 15 12 2 8 -1.
+
+ <_>
+ 16 12 1 4 2.
+
+ <_>
+ 15 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 4 8 11 3 -1.
+
+ <_>
+ 4 9 11 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 13 6 2 -1.
+
+ <_>
+ 12 13 3 1 2.
+
+ <_>
+ 9 14 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 13 4 3 -1.
+
+ <_>
+ 6 14 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 3 3 -1.
+
+ <_>
+ 10 12 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 3 3 3 -1.
+
+ <_>
+ 5 4 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 4 2 3 -1.
+
+ <_>
+ 9 5 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 16 3 -1.
+
+ <_>
+ 0 3 16 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 12 2 8 -1.
+
+ <_>
+ 16 12 1 4 2.
+
+ <_>
+ 15 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 3 12 2 8 -1.
+
+ <_>
+ 3 12 1 4 2.
+
+ <_>
+ 4 16 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 14 13 3 6 -1.
+
+ <_>
+ 14 15 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 3 13 3 6 -1.
+
+ <_>
+ 3 15 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 6 5 10 2 -1.
+
+ <_>
+ 11 5 5 1 2.
+
+ <_>
+ 6 6 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 14 14 6 -1.
+
+ <_>
+ 2 17 14 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 14 1 3 -1.
+
+ <_>
+ 10 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 16 2 2 -1.
+
+ <_>
+ 4 16 1 1 2.
+
+ <_>
+ 5 17 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 6 2 3 -1.
+
+ <_>
+ 10 7 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 17 20 2 -1.
+
+ <_>
+ 0 17 10 1 2.
+
+ <_>
+ 10 18 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 6 1 3 -1.
+
+ <_>
+ 13 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 3 2 -1.
+
+ <_>
+ 9 13 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 2 3 3 -1.
+
+ <_>
+ 13 2 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 3 18 2 2 -1.
+
+ <_>
+ 3 18 1 1 2.
+
+ <_>
+ 4 19 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 16 3 4 -1.
+
+ <_>
+ 10 16 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 6 6 1 3 -1.
+
+ <_>
+ 6 7 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 1 5 2 -1.
+
+ <_>
+ 13 2 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 14 6 2 -1.
+
+ <_>
+ 7 14 3 1 2.
+
+ <_>
+ 10 15 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 3 3 4 -1.
+
+ <_>
+ 12 3 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 1 13 12 6 -1.
+
+ <_>
+ 5 13 4 6 3.
+
+
+
+ <_>
+
+ <_>
+ 14 11 5 2 -1.
+
+ <_>
+ 14 12 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 15 14 4 -1.
+
+ <_>
+ 2 15 7 2 2.
+
+ <_>
+ 9 17 7 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 7 14 2 -1.
+
+ <_>
+ 10 7 7 1 2.
+
+ <_>
+ 3 8 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 11 4 2 -1.
+
+ <_>
+ 1 12 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 14 -1.
+
+ <_>
+ 16 0 2 14 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 1 3 -1.
+
+ <_>
+ 4 12 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 0 6 14 -1.
+
+ <_>
+ 16 0 2 14 3.
+
+
+
+ <_>
+
+ <_>
+ 1 10 3 7 -1.
+
+ <_>
+ 2 10 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 8 12 9 2 -1.
+
+ <_>
+ 8 13 9 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 6 20 1 -1.
+
+ <_>
+ 10 6 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 4 4 4 -1.
+
+ <_>
+ 8 4 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 2 2 -1.
+
+ <_>
+ 0 1 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 3 10 9 -1.
+
+ <_>
+ 5 6 10 3 3.
+
+
+
+ <_>
+
+ <_>
+ 15 2 4 10 -1.
+
+ <_>
+ 15 2 2 10 2.
+
+
+
+ <_>
+
+ <_>
+ 8 2 2 7 -1.
+
+ <_>
+ 9 2 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 7 4 12 1 -1.
+
+ <_>
+ 11 4 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 4 9 1 -1.
+
+ <_>
+ 6 4 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 15 10 1 4 -1.
+
+ <_>
+ 15 12 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 4 10 6 4 -1.
+
+ <_>
+ 7 10 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 15 9 1 6 -1.
+
+ <_>
+ 15 12 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 17 6 3 -1.
+
+ <_>
+ 7 18 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 3 2 16 -1.
+
+ <_>
+ 15 3 1 8 2.
+
+ <_>
+ 14 11 1 8 2.
+
+
+
+ <_>
+
+ <_>
+ 4 9 1 6 -1.
+
+ <_>
+ 4 12 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 12 1 5 2 -1.
+
+ <_>
+ 12 2 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 18 4 2 -1.
+
+ <_>
+ 6 18 2 1 2.
+
+ <_>
+ 8 19 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 4 16 10 -1.
+
+ <_>
+ 10 4 8 5 2.
+
+ <_>
+ 2 9 8 5 2.
+
+
+
+ <_>
+
+ <_>
+ 6 5 1 10 -1.
+
+ <_>
+ 6 10 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 4 8 15 2 -1.
+
+ <_>
+ 9 8 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 1 8 15 2 -1.
+
+ <_>
+ 6 8 5 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 3 6 -1.
+
+ <_>
+ 9 7 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 8 2 -1.
+
+ <_>
+ 9 7 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 11 2 3 -1.
+
+ <_>
+ 9 12 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 1 0 16 3 -1.
+
+ <_>
+ 1 1 16 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 2 7 2 -1.
+
+ <_>
+ 11 3 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 1 10 18 -1.
+
+ <_>
+ 5 7 10 6 3.
+
+
+
+ <_>
+
+ <_>
+ 17 4 3 2 -1.
+
+ <_>
+ 18 4 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 1 3 -1.
+
+ <_>
+ 8 14 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 14 14 6 -1.
+
+ <_>
+ 3 16 14 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 2 3 4 -1.
+
+ <_>
+ 1 2 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 12 1 5 2 -1.
+
+ <_>
+ 12 2 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 3 1 5 2 -1.
+
+ <_>
+ 3 2 5 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 13 2 3 -1.
+
+ <_>
+ 10 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 13 2 3 -1.
+
+ <_>
+ 8 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 12 2 3 -1.
+
+ <_>
+ 14 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 2 2 3 -1.
+
+ <_>
+ 7 3 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 10 4 -1.
+
+ <_>
+ 10 6 5 2 2.
+
+ <_>
+ 5 8 5 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 13 1 6 -1.
+
+ <_>
+ 9 16 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 12 2 2 -1.
+
+ <_>
+ 11 12 1 1 2.
+
+ <_>
+ 10 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 12 2 3 -1.
+
+ <_>
+ 4 13 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 4 6 6 -1.
+
+ <_>
+ 14 6 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 17 2 3 -1.
+
+ <_>
+ 8 18 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 16 4 4 6 -1.
+
+ <_>
+ 16 6 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 4 6 -1.
+
+ <_>
+ 0 6 4 2 3.
+
+
+
+ <_>
+
+ <_>
+ 14 6 2 3 -1.
+
+ <_>
+ 14 6 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 9 8 1 -1.
+
+ <_>
+ 8 9 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 12 4 3 -1.
+
+ <_>
+ 8 13 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 12 10 6 -1.
+
+ <_>
+ 5 14 10 2 3.
+
+
+
+ <_>
+
+ <_>
+ 11 12 1 2 -1.
+
+ <_>
+ 11 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 2 -1.
+
+ <_>
+ 8 16 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 6 9 8 8 -1.
+
+ <_>
+ 10 9 4 4 2.
+
+ <_>
+ 6 13 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 7 12 4 6 -1.
+
+ <_>
+ 7 12 2 3 2.
+
+ <_>
+ 9 15 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 11 3 1 -1.
+
+ <_>
+ 11 11 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 10 -1.
+
+ <_>
+ 9 7 1 5 2.
+
+ <_>
+ 10 12 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 8 0 6 6 -1.
+
+ <_>
+ 10 0 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 3 11 2 6 -1.
+
+ <_>
+ 3 13 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 16 12 1 2 -1.
+
+ <_>
+ 16 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 1 14 6 6 -1.
+
+ <_>
+ 1 14 3 3 2.
+
+ <_>
+ 4 17 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 13 1 3 6 -1.
+
+ <_>
+ 14 1 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 8 8 2 2 -1.
+
+ <_>
+ 8 9 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 9 3 3 -1.
+
+ <_>
+ 10 9 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 8 7 3 3 -1.
+
+ <_>
+ 8 8 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 0 2 3 -1.
+
+ <_>
+ 14 0 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 18 9 -1.
+
+ <_>
+ 7 0 6 9 3.
+
+
+
+ <_>
+
+ <_>
+ 11 5 4 15 -1.
+
+ <_>
+ 11 5 2 15 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 4 15 -1.
+
+ <_>
+ 7 5 2 15 2.
+
+
+
+ <_>
+
+ <_>
+ 14 0 2 3 -1.
+
+ <_>
+ 14 0 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 4 0 2 3 -1.
+
+ <_>
+ 5 0 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 11 12 2 2 -1.
+
+ <_>
+ 12 12 1 1 2.
+
+ <_>
+ 11 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 12 2 2 -1.
+
+ <_>
+ 7 12 1 1 2.
+
+ <_>
+ 8 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 0 3 4 -1.
+
+ <_>
+ 13 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 4 11 3 3 -1.
+
+ <_>
+ 4 12 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 7 4 2 -1.
+
+ <_>
+ 12 8 4 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 10 3 2 -1.
+
+ <_>
+ 9 10 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 9 3 2 -1.
+
+ <_>
+ 10 9 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 9 3 2 -1.
+
+ <_>
+ 9 9 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 0 3 4 -1.
+
+ <_>
+ 13 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 5 0 3 4 -1.
+
+ <_>
+ 6 0 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 4 14 12 4 -1.
+
+ <_>
+ 10 14 6 2 2.
+
+ <_>
+ 4 16 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 13 2 3 -1.
+
+ <_>
+ 8 14 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 10 3 8 -1.
+
+ <_>
+ 10 14 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 8 10 4 8 -1.
+
+ <_>
+ 8 10 2 4 2.
+
+ <_>
+ 10 14 2 4 2.
+
+
+
+ <_>
+
+ <_>
+ 10 8 3 1 -1.
+
+ <_>
+ 11 8 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 12 1 6 -1.
+
+ <_>
+ 9 15 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 8 3 1 -1.
+
+ <_>
+ 11 8 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 8 3 1 -1.
+
+ <_>
+ 8 8 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 2 15 14 -1.
+
+ <_>
+ 5 9 15 7 2.
+
+
+
+ <_>
+
+ <_>
+ 2 1 2 10 -1.
+
+ <_>
+ 2 1 1 5 2.
+
+ <_>
+ 3 6 1 5 2.
+
+
+
+ <_>
+
+ <_>
+ 14 14 2 3 -1.
+
+ <_>
+ 14 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 2 7 3 3 -1.
+
+ <_>
+ 3 7 1 3 3.
+
+
+
+ <_>
+
+ <_>
+ 17 4 3 3 -1.
+
+ <_>
+ 17 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 3 3 -1.
+
+ <_>
+ 0 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 5 6 2 -1.
+
+ <_>
+ 16 5 3 1 2.
+
+ <_>
+ 13 6 3 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 19 12 1 -1.
+
+ <_>
+ 8 19 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 12 2 4 -1.
+
+ <_>
+ 12 14 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 3 15 1 3 -1.
+
+ <_>
+ 3 16 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 16 6 4 -1.
+
+ <_>
+ 11 16 3 4 2.
+
+
+
+ <_>
+
+ <_>
+ 2 10 3 10 -1.
+
+ <_>
+ 3 10 1 10 3.
+
+
+
+ <_>
+
+ <_>
+ 12 8 2 4 -1.
+
+ <_>
+ 12 8 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 8 2 4 -1.
+
+ <_>
+ 7 8 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 10 14 2 3 -1.
+
+ <_>
+ 10 14 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 5 1 10 3 -1.
+
+ <_>
+ 10 1 5 3 2.
+
+
+
+ <_>
+
+ <_>
+ 10 7 3 2 -1.
+
+ <_>
+ 11 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 6 9 2 -1.
+
+ <_>
+ 8 6 3 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 8 2 2 -1.
+
+ <_>
+ 9 9 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 2 11 16 6 -1.
+
+ <_>
+ 2 11 8 3 2.
+
+ <_>
+ 10 14 8 3 2.
+
+
+
+ <_>
+
+ <_>
+ 12 7 2 2 -1.
+
+ <_>
+ 13 7 1 1 2.
+
+ <_>
+ 12 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 2 3 -1.
+
+ <_>
+ 9 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 2 -1.
+
+ <_>
+ 10 7 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 1 8 12 -1.
+
+ <_>
+ 5 7 8 6 2.
+
+
+
+ <_>
+
+ <_>
+ 13 5 2 2 -1.
+
+ <_>
+ 13 6 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 2 2 -1.
+
+ <_>
+ 5 6 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 3 -1.
+
+ <_>
+ 12 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 14 2 3 -1.
+
+ <_>
+ 4 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 12 4 3 3 -1.
+
+ <_>
+ 12 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 3 3 -1.
+
+ <_>
+ 5 5 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 6 -1.
+
+ <_>
+ 10 14 1 3 2.
+
+ <_>
+ 9 17 1 3 2.
+
+
+
+ <_>
+
+ <_>
+ 8 14 3 2 -1.
+
+ <_>
+ 9 14 1 2 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 6 6 -1.
+
+ <_>
+ 11 5 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 5 5 6 6 -1.
+
+ <_>
+ 7 5 2 6 3.
+
+
+
+ <_>
+
+ <_>
+ 13 13 1 2 -1.
+
+ <_>
+ 13 14 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 10 2 -1.
+
+ <_>
+ 0 3 10 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 13 1 2 -1.
+
+ <_>
+ 13 14 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 5 7 2 2 -1.
+
+ <_>
+ 5 7 1 1 2.
+
+ <_>
+ 6 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 13 5 2 7 -1.
+
+ <_>
+ 13 5 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 6 13 1 2 -1.
+
+ <_>
+ 6 14 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 0 3 7 -1.
+
+ <_>
+ 12 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 0 3 2 16 -1.
+
+ <_>
+ 0 3 1 8 2.
+
+ <_>
+ 1 11 1 8 2.
+
+
+
+ <_>
+
+ <_>
+ 11 0 3 7 -1.
+
+ <_>
+ 12 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 3 7 -1.
+
+ <_>
+ 7 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 11 16 8 4 -1.
+
+ <_>
+ 11 16 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 1 16 8 4 -1.
+
+ <_>
+ 5 16 4 4 2.
+
+
+
+ <_>
+
+ <_>
+ 13 5 2 7 -1.
+
+ <_>
+ 13 5 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 5 5 2 7 -1.
+
+ <_>
+ 6 5 1 7 2.
+
+
+
+ <_>
+
+ <_>
+ 18 6 2 14 -1.
+
+ <_>
+ 18 13 2 7 2.
+
+
+
+ <_>
+
+ <_>
+ 6 10 3 4 -1.
+
+ <_>
+ 6 12 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 14 7 1 2 -1.
+
+ <_>
+ 14 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 18 6 -1.
+
+ <_>
+ 0 1 9 3 2.
+
+ <_>
+ 9 4 9 3 2.
+
+
+
+ <_>
+
+ <_>
+ 14 7 1 2 -1.
+
+ <_>
+ 14 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 6 2 14 -1.
+
+ <_>
+ 0 13 2 7 2.
+
+
+
+ <_>
+
+ <_>
+ 17 0 3 12 -1.
+
+ <_>
+ 18 0 1 12 3.
+
+
+
+ <_>
+
+ <_>
+ 0 6 18 3 -1.
+
+ <_>
+ 0 7 18 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 14 16 -1.
+
+ <_>
+ 6 8 14 8 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 3 12 -1.
+
+ <_>
+ 1 0 1 12 3.
+
+
+
+ <_>
+
+ <_>
+ 13 0 3 7 -1.
+
+ <_>
+ 14 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 1 2 -1.
+
+ <_>
+ 5 8 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 14 4 6 6 -1.
+
+ <_>
+ 14 6 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 5 7 7 2 -1.
+
+ <_>
+ 5 8 7 1 2.
+
+
+
+ <_>
+
+ <_>
+ 8 6 6 9 -1.
+
+ <_>
+ 8 9 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 5 4 6 1 -1.
+
+ <_>
+ 7 4 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 13 0 6 4 -1.
+
+ <_>
+ 16 0 3 2 2.
+
+ <_>
+ 13 2 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 2 18 12 -1.
+
+ <_>
+ 1 6 18 4 3.
+
+
+
+ <_>
+
+ <_>
+ 3 2 17 12 -1.
+
+ <_>
+ 3 6 17 4 3.
+
+
+
+ <_>
+
+ <_>
+ 5 14 7 3 -1.
+
+ <_>
+ 5 15 7 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 14 1 3 -1.
+
+ <_>
+ 10 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 3 14 3 3 -1.
+
+ <_>
+ 3 15 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 14 4 6 6 -1.
+
+ <_>
+ 14 6 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 0 4 6 6 -1.
+
+ <_>
+ 0 6 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 12 5 4 3 -1.
+
+ <_>
+ 12 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 4 5 4 3 -1.
+
+ <_>
+ 4 6 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 18 0 2 6 -1.
+
+ <_>
+ 18 2 2 2 3.
+
+
+
+ <_>
+
+ <_>
+ 8 1 4 9 -1.
+
+ <_>
+ 10 1 2 9 2.
+
+
+
+ <_>
+
+ <_>
+ 6 6 8 2 -1.
+
+ <_>
+ 6 6 4 2 2.
+
+
+
+ <_>
+
+ <_>
+ 6 5 4 2 -1.
+
+ <_>
+ 6 5 2 1 2.
+
+ <_>
+ 8 6 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 10 5 2 3 -1.
+
+ <_>
+ 10 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 1 3 -1.
+
+ <_>
+ 9 6 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 10 2 2 -1.
+
+ <_>
+ 9 11 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 8 4 3 -1.
+
+ <_>
+ 0 9 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 6 0 8 6 -1.
+
+ <_>
+ 6 3 8 3 2.
+
+
+
+ <_>
+
+ <_>
+ 1 0 6 4 -1.
+
+ <_>
+ 1 0 3 2 2.
+
+ <_>
+ 4 2 3 2 2.
+
+
+
+ <_>
+
+ <_>
+ 13 0 3 7 -1.
+
+ <_>
+ 14 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 9 16 2 2 -1.
+
+ <_>
+ 9 17 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 11 4 6 10 -1.
+
+ <_>
+ 11 9 6 5 2.
+
+
+
+ <_>
+
+ <_>
+ 0 10 19 2 -1.
+
+ <_>
+ 0 11 19 1 2.
+
+
+
+ <_>
+
+ <_>
+ 9 5 8 9 -1.
+
+ <_>
+ 9 8 8 3 3.
+
+
+
+ <_>
+
+ <_>
+ 4 0 3 7 -1.
+
+ <_>
+ 5 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 8 6 4 12 -1.
+
+ <_>
+ 10 6 2 6 2.
+
+ <_>
+ 8 12 2 6 2.
+
+
+
+ <_>
+
+ <_>
+ 0 2 6 4 -1.
+
+ <_>
+ 0 4 6 2 2.
+
+
+
+ <_>
+
+ <_>
+ 8 15 4 3 -1.
+
+ <_>
+ 8 16 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 0 3 7 -1.
+
+ <_>
+ 9 0 1 7 3.
+
+
+
+ <_>
+
+ <_>
+ 9 5 3 4 -1.
+
+ <_>
+ 10 5 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 8 5 3 4 -1.
+
+ <_>
+ 9 5 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 6 6 1 -1.
+
+ <_>
+ 9 6 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 14 4 4 -1.
+
+ <_>
+ 7 14 2 2 2.
+
+ <_>
+ 9 16 2 2 2.
+
+
+
+ <_>
+
+ <_>
+ 13 14 4 6 -1.
+
+ <_>
+ 15 14 2 3 2.
+
+ <_>
+ 13 17 2 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 8 1 8 -1.
+
+ <_>
+ 7 12 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 16 0 2 8 -1.
+
+ <_>
+ 17 0 1 4 2.
+
+ <_>
+ 16 4 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 2 0 2 8 -1.
+
+ <_>
+ 2 0 1 4 2.
+
+ <_>
+ 3 4 1 4 2.
+
+
+
+ <_>
+
+ <_>
+ 6 1 14 3 -1.
+
+ <_>
+ 6 2 14 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 9 3 10 -1.
+
+ <_>
+ 7 14 3 5 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 2 2 -1.
+
+ <_>
+ 9 15 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 7 7 6 8 -1.
+
+ <_>
+ 7 11 6 4 2.
+
+
+
+ <_>
+
+ <_>
+ 9 7 3 6 -1.
+
+ <_>
+ 9 10 3 3 2.
+
+
+
+ <_>
+
+ <_>
+ 7 13 3 3 -1.
+
+ <_>
+ 7 14 3 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 9 2 2 -1.
+
+ <_>
+ 9 10 2 1 2.
+
+
+
+ <_>
+
+ <_>
+ 0 1 18 2 -1.
+
+ <_>
+ 6 1 6 2 3.
+
+
+
+ <_>
+
+ <_>
+ 7 1 6 14 -1.
+
+ <_>
+ 7 8 6 7 2.
+
+
+
+ <_>
+
+ <_>
+ 1 9 18 1 -1.
+
+ <_>
+ 7 9 6 1 3.
+
+
+
+ <_>
+
+ <_>
+ 9 7 2 2 -1.
+
+ <_>
+ 9 7 1 2 2.
+
+
+
+ <_>
+
+ <_>
+ 9 3 2 9 -1.
+
+ <_>
+ 10 3 1 9 2.
+
+
+
+ <_>
+
+ <_>
+ 18 14 2 3 -1.
+
+ <_>
+ 18 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 7 11 3 1 -1.
+
+ <_>
+ 8 11 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 10 8 3 4 -1.
+
+ <_>
+ 11 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 14 3 6 -1.
+
+ <_>
+ 8 14 1 6 3.
+
+
+
+ <_>
+
+ <_>
+ 10 8 3 4 -1.
+
+ <_>
+ 11 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 8 3 4 -1.
+
+ <_>
+ 8 8 1 4 3.
+
+
+
+ <_>
+
+ <_>
+ 7 9 6 9 -1.
+
+ <_>
+ 7 12 6 3 3.
+
+
+
+ <_>
+
+ <_>
+ 0 14 2 3 -1.
+
+ <_>
+ 0 15 2 1 3.
+
+
+
+ <_>
+
+ <_>
+ 11 12 1 2 -1.
+
+ <_>
+ 11 13 1 1 2.
+
+
+
+ <_>
+
+ <_>
+ 4 3 8 3 -1.
+
+ <_>
+ 8 3 4 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 4 20 6 -1.
+
+ <_>
+ 0 4 10 6 2.
+
+
+
+ <_>
+
+ <_>
+ 9 14 1 3 -1.
+
+ <_>
+ 9 15 1 1 3.
+
+
+
+ <_>
+
+ <_>
+ 8 14 4 3 -1.
+
+ <_>
+ 8 15 4 1 3.
+
+
+
+ <_>
+
+ <_>
+ 0 15 14 4 -1.
+
+ <_>
+ 0 17 14 2 2.
+
+
+
+ <_>
+
+ <_>
+ 1 14 18 6 -1.
+
+ <_>
+ 1 17 18 3 2.
+
+
+
+ <_>
+
+ <_>
+ 0 0 10 6 -1.
+
+ <_>
+ 0 0 5 3 2.
+
+ <_>
+ 5 3 5 3 2.
+
+
+
+
+
+
diff --git a/joints_detectors/openpose/models/face/pose_deploy.prototxt b/joints_detectors/openpose/models/face/pose_deploy.prototxt
new file mode 100644
index 0000000000000000000000000000000000000000..d347748cac0bd03b09a45bfe4772654145c7dda5
--- /dev/null
+++ b/joints_detectors/openpose/models/face/pose_deploy.prototxt
@@ -0,0 +1,1728 @@
+input: "image"
+input_dim: 1
+input_dim: 3
+input_dim: 1 # Original: 368
+input_dim: 1 # Original: 368
+# input: "weights"
+# input_dim: 1
+# input_dim: 71
+# input_dim: 184
+# input_dim: 184
+# input: "labels"
+# input_dim: 1
+# input_dim: 71
+# input_dim: 184
+# input_dim: 184
+
+layer {
+ name: "conv1_1"
+ type: "Convolution"
+ bottom: "image"
+ top: "conv1_1"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv1_1_re"
+ type: "ReLU"
+ bottom: "conv1_1"
+ top: "conv1_1"
+}
+layer {
+ name: "conv1_2"
+ type: "Convolution"
+ bottom: "conv1_1"
+ top: "conv1_2"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv1_2_re"
+ type: "ReLU"
+ bottom: "conv1_2"
+ top: "conv1_2"
+}
+layer {
+ name: "pool1"
+ type: "Pooling"
+ bottom: "conv1_2"
+ top: "pool1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv2_1"
+ type: "Convolution"
+ bottom: "pool1"
+ top: "conv2_1"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv2_1_re"
+ type: "ReLU"
+ bottom: "conv2_1"
+ top: "conv2_1"
+}
+layer {
+ name: "conv2_2"
+ type: "Convolution"
+ bottom: "conv2_1"
+ top: "conv2_2"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv2_2_re"
+ type: "ReLU"
+ bottom: "conv2_2"
+ top: "conv2_2"
+}
+layer {
+ name: "pool2"
+ type: "Pooling"
+ bottom: "conv2_2"
+ top: "pool2"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv3_1"
+ type: "Convolution"
+ bottom: "pool2"
+ top: "conv3_1"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv3_1_re"
+ type: "ReLU"
+ bottom: "conv3_1"
+ top: "conv3_1"
+}
+layer {
+ name: "conv3_2"
+ type: "Convolution"
+ bottom: "conv3_1"
+ top: "conv3_2"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv3_2_re"
+ type: "ReLU"
+ bottom: "conv3_2"
+ top: "conv3_2"
+}
+layer {
+ name: "conv3_3"
+ type: "Convolution"
+ bottom: "conv3_2"
+ top: "conv3_3"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv3_3_re"
+ type: "ReLU"
+ bottom: "conv3_3"
+ top: "conv3_3"
+}
+layer {
+ name: "conv3_4"
+ type: "Convolution"
+ bottom: "conv3_3"
+ top: "conv3_4"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv3_4_re"
+ type: "ReLU"
+ bottom: "conv3_4"
+ top: "conv3_4"
+}
+layer {
+ name: "pool3"
+ type: "Pooling"
+ bottom: "conv3_4"
+ top: "pool3"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv4_1"
+ type: "Convolution"
+ bottom: "pool3"
+ top: "conv4_1"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv4_1_re"
+ type: "ReLU"
+ bottom: "conv4_1"
+ top: "conv4_1"
+}
+layer {
+ name: "conv4_2"
+ type: "Convolution"
+ bottom: "conv4_1"
+ top: "conv4_2"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv4_2_re"
+ type: "ReLU"
+ bottom: "conv4_2"
+ top: "conv4_2"
+}
+layer {
+ name: "conv4_3"
+ type: "Convolution"
+ bottom: "conv4_2"
+ top: "conv4_3"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv4_3_re"
+ type: "ReLU"
+ bottom: "conv4_3"
+ top: "conv4_3"
+}
+layer {
+ name: "conv4_4"
+ type: "Convolution"
+ bottom: "conv4_3"
+ top: "conv4_4"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv4_4_re"
+ type: "ReLU"
+ bottom: "conv4_4"
+ top: "conv4_4"
+}
+layer {
+ name: "conv5_1"
+ type: "Convolution"
+ bottom: "conv4_4"
+ top: "conv5_1"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv5_1_re"
+ type: "ReLU"
+ bottom: "conv5_1"
+ top: "conv5_1"
+}
+layer {
+ name: "conv5_2"
+ type: "Convolution"
+ bottom: "conv5_1"
+ top: "conv5_2"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv5_2_re"
+ type: "ReLU"
+ bottom: "conv5_2"
+ top: "conv5_2"
+}
+layer {
+ name: "conv5_3_CPM"
+ type: "Convolution"
+ bottom: "conv5_2"
+ top: "conv5_3_CPM"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv5_3_CPM_re"
+ type: "ReLU"
+ bottom: "conv5_3_CPM"
+ top: "conv5_3_CPM"
+}
+layer {
+ name: "conv6_1_CPM"
+ type: "Convolution"
+ bottom: "conv5_3_CPM"
+ top: "conv6_1_CPM"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv6_1_CPM_re"
+ type: "ReLU"
+ bottom: "conv6_1_CPM"
+ top: "conv6_1_CPM"
+}
+layer {
+ name: "conv6_2_CPM"
+ type: "Convolution"
+ bottom: "conv6_1_CPM"
+ top: "conv6_2_CPM"
+ param {
+ lr_mult: 1
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 71
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "features_in_stage_2"
+ type: "Concat"
+ bottom: "conv6_2_CPM"
+ bottom: "conv5_3_CPM"
+ top: "features_in_stage_2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage2"
+ type: "Convolution"
+ bottom: "features_in_stage_2"
+ top: "Mconv1_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv1_stage2_re"
+ type: "ReLU"
+ bottom: "Mconv1_stage2"
+ top: "Mconv1_stage2"
+}
+layer {
+ name: "Mconv2_stage2"
+ type: "Convolution"
+ bottom: "Mconv1_stage2"
+ top: "Mconv2_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv2_stage2_re"
+ type: "ReLU"
+ bottom: "Mconv2_stage2"
+ top: "Mconv2_stage2"
+}
+layer {
+ name: "Mconv3_stage2"
+ type: "Convolution"
+ bottom: "Mconv2_stage2"
+ top: "Mconv3_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv3_stage2_re"
+ type: "ReLU"
+ bottom: "Mconv3_stage2"
+ top: "Mconv3_stage2"
+}
+layer {
+ name: "Mconv4_stage2"
+ type: "Convolution"
+ bottom: "Mconv3_stage2"
+ top: "Mconv4_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv4_stage2_re"
+ type: "ReLU"
+ bottom: "Mconv4_stage2"
+ top: "Mconv4_stage2"
+}
+layer {
+ name: "Mconv5_stage2"
+ type: "Convolution"
+ bottom: "Mconv4_stage2"
+ top: "Mconv5_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv5_stage2_re"
+ type: "ReLU"
+ bottom: "Mconv5_stage2"
+ top: "Mconv5_stage2"
+}
+layer {
+ name: "Mconv6_stage2"
+ type: "Convolution"
+ bottom: "Mconv5_stage2"
+ top: "Mconv6_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv6_stage2_re"
+ type: "ReLU"
+ bottom: "Mconv6_stage2"
+ top: "Mconv6_stage2"
+}
+layer {
+ name: "Mconv7_stage2"
+ type: "Convolution"
+ bottom: "Mconv6_stage2"
+ top: "Mconv7_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 71
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "features_in_stage_3"
+ type: "Concat"
+ bottom: "Mconv7_stage2"
+ bottom: "conv5_3_CPM"
+ top: "features_in_stage_3"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage3"
+ type: "Convolution"
+ bottom: "features_in_stage_3"
+ top: "Mconv1_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv1_stage3_re"
+ type: "ReLU"
+ bottom: "Mconv1_stage3"
+ top: "Mconv1_stage3"
+}
+layer {
+ name: "Mconv2_stage3"
+ type: "Convolution"
+ bottom: "Mconv1_stage3"
+ top: "Mconv2_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv2_stage3_re"
+ type: "ReLU"
+ bottom: "Mconv2_stage3"
+ top: "Mconv2_stage3"
+}
+layer {
+ name: "Mconv3_stage3"
+ type: "Convolution"
+ bottom: "Mconv2_stage3"
+ top: "Mconv3_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv3_stage3_re"
+ type: "ReLU"
+ bottom: "Mconv3_stage3"
+ top: "Mconv3_stage3"
+}
+layer {
+ name: "Mconv4_stage3"
+ type: "Convolution"
+ bottom: "Mconv3_stage3"
+ top: "Mconv4_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv4_stage3_re"
+ type: "ReLU"
+ bottom: "Mconv4_stage3"
+ top: "Mconv4_stage3"
+}
+layer {
+ name: "Mconv5_stage3"
+ type: "Convolution"
+ bottom: "Mconv4_stage3"
+ top: "Mconv5_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv5_stage3_re"
+ type: "ReLU"
+ bottom: "Mconv5_stage3"
+ top: "Mconv5_stage3"
+}
+layer {
+ name: "Mconv6_stage3"
+ type: "Convolution"
+ bottom: "Mconv5_stage3"
+ top: "Mconv6_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv6_stage3_re"
+ type: "ReLU"
+ bottom: "Mconv6_stage3"
+ top: "Mconv6_stage3"
+}
+layer {
+ name: "Mconv7_stage3"
+ type: "Convolution"
+ bottom: "Mconv6_stage3"
+ top: "Mconv7_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 71
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "features_in_stage_4"
+ type: "Concat"
+ bottom: "Mconv7_stage3"
+ bottom: "conv5_3_CPM"
+ top: "features_in_stage_4"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage4"
+ type: "Convolution"
+ bottom: "features_in_stage_4"
+ top: "Mconv1_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv1_stage4_re"
+ type: "ReLU"
+ bottom: "Mconv1_stage4"
+ top: "Mconv1_stage4"
+}
+layer {
+ name: "Mconv2_stage4"
+ type: "Convolution"
+ bottom: "Mconv1_stage4"
+ top: "Mconv2_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv2_stage4_re"
+ type: "ReLU"
+ bottom: "Mconv2_stage4"
+ top: "Mconv2_stage4"
+}
+layer {
+ name: "Mconv3_stage4"
+ type: "Convolution"
+ bottom: "Mconv2_stage4"
+ top: "Mconv3_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv3_stage4_re"
+ type: "ReLU"
+ bottom: "Mconv3_stage4"
+ top: "Mconv3_stage4"
+}
+layer {
+ name: "Mconv4_stage4"
+ type: "Convolution"
+ bottom: "Mconv3_stage4"
+ top: "Mconv4_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv4_stage4_re"
+ type: "ReLU"
+ bottom: "Mconv4_stage4"
+ top: "Mconv4_stage4"
+}
+layer {
+ name: "Mconv5_stage4"
+ type: "Convolution"
+ bottom: "Mconv4_stage4"
+ top: "Mconv5_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv5_stage4_re"
+ type: "ReLU"
+ bottom: "Mconv5_stage4"
+ top: "Mconv5_stage4"
+}
+layer {
+ name: "Mconv6_stage4"
+ type: "Convolution"
+ bottom: "Mconv5_stage4"
+ top: "Mconv6_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv6_stage4_re"
+ type: "ReLU"
+ bottom: "Mconv6_stage4"
+ top: "Mconv6_stage4"
+}
+layer {
+ name: "Mconv7_stage4"
+ type: "Convolution"
+ bottom: "Mconv6_stage4"
+ top: "Mconv7_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 71
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "features_in_stage_5"
+ type: "Concat"
+ bottom: "Mconv7_stage4"
+ bottom: "conv5_3_CPM"
+ top: "features_in_stage_5"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage5"
+ type: "Convolution"
+ bottom: "features_in_stage_5"
+ top: "Mconv1_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv1_stage5_re"
+ type: "ReLU"
+ bottom: "Mconv1_stage5"
+ top: "Mconv1_stage5"
+}
+layer {
+ name: "Mconv2_stage5"
+ type: "Convolution"
+ bottom: "Mconv1_stage5"
+ top: "Mconv2_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv2_stage5_re"
+ type: "ReLU"
+ bottom: "Mconv2_stage5"
+ top: "Mconv2_stage5"
+}
+layer {
+ name: "Mconv3_stage5"
+ type: "Convolution"
+ bottom: "Mconv2_stage5"
+ top: "Mconv3_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv3_stage5_re"
+ type: "ReLU"
+ bottom: "Mconv3_stage5"
+ top: "Mconv3_stage5"
+}
+layer {
+ name: "Mconv4_stage5"
+ type: "Convolution"
+ bottom: "Mconv3_stage5"
+ top: "Mconv4_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv4_stage5_re"
+ type: "ReLU"
+ bottom: "Mconv4_stage5"
+ top: "Mconv4_stage5"
+}
+layer {
+ name: "Mconv5_stage5"
+ type: "Convolution"
+ bottom: "Mconv4_stage5"
+ top: "Mconv5_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv5_stage5_re"
+ type: "ReLU"
+ bottom: "Mconv5_stage5"
+ top: "Mconv5_stage5"
+}
+layer {
+ name: "Mconv6_stage5"
+ type: "Convolution"
+ bottom: "Mconv5_stage5"
+ top: "Mconv6_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv6_stage5_re"
+ type: "ReLU"
+ bottom: "Mconv6_stage5"
+ top: "Mconv6_stage5"
+}
+layer {
+ name: "Mconv7_stage5"
+ type: "Convolution"
+ bottom: "Mconv6_stage5"
+ top: "Mconv7_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 71
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "features_in_stage_6"
+ type: "Concat"
+ bottom: "Mconv7_stage5"
+ bottom: "conv5_3_CPM"
+ top: "features_in_stage_6"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage6"
+ type: "Convolution"
+ bottom: "features_in_stage_6"
+ top: "Mconv1_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv1_stage6_re"
+ type: "ReLU"
+ bottom: "Mconv1_stage6"
+ top: "Mconv1_stage6"
+}
+layer {
+ name: "Mconv2_stage6"
+ type: "Convolution"
+ bottom: "Mconv1_stage6"
+ top: "Mconv2_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv2_stage6_re"
+ type: "ReLU"
+ bottom: "Mconv2_stage6"
+ top: "Mconv2_stage6"
+}
+layer {
+ name: "Mconv3_stage6"
+ type: "Convolution"
+ bottom: "Mconv2_stage6"
+ top: "Mconv3_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv3_stage6_re"
+ type: "ReLU"
+ bottom: "Mconv3_stage6"
+ top: "Mconv3_stage6"
+}
+layer {
+ name: "Mconv4_stage6"
+ type: "Convolution"
+ bottom: "Mconv3_stage6"
+ top: "Mconv4_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv4_stage6_re"
+ type: "ReLU"
+ bottom: "Mconv4_stage6"
+ top: "Mconv4_stage6"
+}
+layer {
+ name: "Mconv5_stage6"
+ type: "Convolution"
+ bottom: "Mconv4_stage6"
+ top: "Mconv5_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv5_stage6_re"
+ type: "ReLU"
+ bottom: "Mconv5_stage6"
+ top: "Mconv5_stage6"
+}
+layer {
+ name: "Mconv6_stage6"
+ type: "Convolution"
+ bottom: "Mconv5_stage6"
+ top: "Mconv6_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv6_stage6_re"
+ type: "ReLU"
+ bottom: "Mconv6_stage6"
+ top: "Mconv6_stage6"
+}
+layer {
+ name: "Mconv7_stage6"
+ type: "Convolution"
+ bottom: "Mconv6_stage6"
+# top: "Mconv7_stage6"
+ top: "net_output"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 71
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
diff --git a/joints_detectors/openpose/models/getModels.bat b/joints_detectors/openpose/models/getModels.bat
new file mode 100644
index 0000000000000000000000000000000000000000..2eccb5a147732e8606d6cac9692288e4959be78c
--- /dev/null
+++ b/joints_detectors/openpose/models/getModels.bat
@@ -0,0 +1,43 @@
+:: Avoid printing all the comments in the Windows cmd
+@echo off
+
+echo ------------------------- BODY, FOOT, FACE, AND HAND MODELS -------------------------
+echo ----- Downloading body pose (COCO and MPI), face and hand models -----
+SET WGET_EXE=..\3rdparty\windows\wget\wget.exe
+SET OPENPOSE_URL=http://posefs1.perception.cs.cmu.edu/OpenPose/models/
+SET POSE_FOLDER=pose/
+SET FACE_FOLDER=face/
+SET HAND_FOLDER=hand/
+
+echo:
+echo ------------------------- POSE (BODY+FOOT) MODELS -------------------------
+echo Body (BODY_25)
+set BODY_25_FOLDER=%POSE_FOLDER%body_25/
+set BODY_25_MODEL=%BODY_25_FOLDER%pose_iter_584000.caffemodel
+%WGET_EXE% -c %OPENPOSE_URL%%BODY_25_MODEL% -P %BODY_25_FOLDER%
+
+echo Body (COCO)
+SET COCO_FOLDER=%POSE_FOLDER%coco/
+SET COCO_MODEL=%COCO_FOLDER%pose_iter_440000.caffemodel
+%WGET_EXE% -c %OPENPOSE_URL%%COCO_MODEL% -P %COCO_FOLDER%
+
+echo:
+echo Body (MPI)
+SET MPI_FOLDER=%POSE_FOLDER%mpi/
+SET MPI_MODEL=%MPI_FOLDER%pose_iter_160000.caffemodel
+%WGET_EXE% -c %OPENPOSE_URL%%MPI_MODEL% -P %MPI_FOLDER%
+echo ----------------------- POSE DOWNLOADED -----------------------
+
+echo:
+echo ------------------------- FACE MODELS -------------------------
+echo Face
+SET FACE_MODEL=%FACE_FOLDER%pose_iter_116000.caffemodel
+%WGET_EXE% -c %OPENPOSE_URL%%FACE_MODEL% -P %FACE_FOLDER%
+echo ----------------------- FACE DOWNLOADED -----------------------
+
+echo:
+echo ------------------------- HAND MODELS -------------------------
+echo Hand
+SET HAND_MODEL=%HAND_FOLDER%pose_iter_102000.caffemodel
+%WGET_EXE% -c %OPENPOSE_URL%%HAND_MODEL% -P %HAND_FOLDER%
+echo ----------------------- HAND DOWNLOADED -----------------------
diff --git a/joints_detectors/openpose/models/getModels.sh b/joints_detectors/openpose/models/getModels.sh
new file mode 100644
index 0000000000000000000000000000000000000000..63aef4d552905b312b95f2a4e367067f28eba3d7
--- /dev/null
+++ b/joints_detectors/openpose/models/getModels.sh
@@ -0,0 +1,36 @@
+# ------------------------- BODY, FOOT, FACE, AND HAND MODELS -------------------------
+# Downloading body pose (COCO and MPI), face and hand models
+OPENPOSE_URL="http://posefs1.perception.cs.cmu.edu/OpenPose/models/"
+POSE_FOLDER="pose/"
+FACE_FOLDER="face/"
+HAND_FOLDER="hand/"
+
+# ------------------------- POSE (BODY+FOOT) MODELS -------------------------
+# Body (BODY_25)
+BODY_25_FOLDER=${POSE_FOLDER}"body_25/"
+BODY_25_MODEL=${BODY_25_FOLDER}"pose_iter_584000.caffemodel"
+wget -c ${OPENPOSE_URL}${BODY_25_MODEL} -P ${BODY_25_FOLDER}
+
+# Body (COCO)
+COCO_FOLDER=${POSE_FOLDER}"coco/"
+COCO_MODEL=${COCO_FOLDER}"pose_iter_440000.caffemodel"
+wget -c ${OPENPOSE_URL}${COCO_MODEL} -P ${COCO_FOLDER}
+# Alternative: it will not check whether file was fully downloaded
+# if [ ! -f $COCO_MODEL ]; then
+# wget ${OPENPOSE_URL}$COCO_MODEL -P $COCO_FOLDER
+# fi
+
+# Body (MPI)
+MPI_FOLDER=${POSE_FOLDER}"mpi/"
+MPI_MODEL=${MPI_FOLDER}"pose_iter_160000.caffemodel"
+wget -c ${OPENPOSE_URL}${MPI_MODEL} -P ${MPI_FOLDER}
+
+# "------------------------- FACE MODELS -------------------------"
+# Face
+FACE_MODEL=${FACE_FOLDER}"pose_iter_116000.caffemodel"
+wget -c ${OPENPOSE_URL}${FACE_MODEL} -P ${FACE_FOLDER}
+
+# "------------------------- HAND MODELS -------------------------"
+# Hand
+HAND_MODEL=$HAND_FOLDER"pose_iter_102000.caffemodel"
+wget -c ${OPENPOSE_URL}${HAND_MODEL} -P ${HAND_FOLDER}
diff --git a/joints_detectors/openpose/models/hand/pose_deploy.prototxt b/joints_detectors/openpose/models/hand/pose_deploy.prototxt
new file mode 100644
index 0000000000000000000000000000000000000000..3554c3a02e3dc545b3da4f32a0c954b452289706
--- /dev/null
+++ b/joints_detectors/openpose/models/hand/pose_deploy.prototxt
@@ -0,0 +1,1756 @@
+input: "image"
+input_dim: 1 # Original: 2
+input_dim: 3 # It crashes if not left to 3
+input_dim: 1 # Original: 368
+input_dim: 1 # Original: 368
+layer {
+ name: "conv1_1"
+ type: "Convolution"
+ bottom: "image"
+ top: "conv1_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu1_1"
+ type: "ReLU"
+ bottom: "conv1_1"
+ top: "conv1_1"
+}
+layer {
+ name: "conv1_2"
+ type: "Convolution"
+ bottom: "conv1_1"
+ top: "conv1_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu1_2"
+ type: "ReLU"
+ bottom: "conv1_2"
+ top: "conv1_2"
+}
+layer {
+ name: "pool1_stage1"
+ type: "Pooling"
+ bottom: "conv1_2"
+ top: "pool1_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv2_1"
+ type: "Convolution"
+ bottom: "pool1_stage1"
+ top: "conv2_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu2_1"
+ type: "ReLU"
+ bottom: "conv2_1"
+ top: "conv2_1"
+}
+layer {
+ name: "conv2_2"
+ type: "Convolution"
+ bottom: "conv2_1"
+ top: "conv2_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu2_2"
+ type: "ReLU"
+ bottom: "conv2_2"
+ top: "conv2_2"
+}
+layer {
+ name: "pool2_stage1"
+ type: "Pooling"
+ bottom: "conv2_2"
+ top: "pool2_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv3_1"
+ type: "Convolution"
+ bottom: "pool2_stage1"
+ top: "conv3_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu3_1"
+ type: "ReLU"
+ bottom: "conv3_1"
+ top: "conv3_1"
+}
+layer {
+ name: "conv3_2"
+ type: "Convolution"
+ bottom: "conv3_1"
+ top: "conv3_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu3_2"
+ type: "ReLU"
+ bottom: "conv3_2"
+ top: "conv3_2"
+}
+layer {
+ name: "conv3_3"
+ type: "Convolution"
+ bottom: "conv3_2"
+ top: "conv3_3"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu3_3"
+ type: "ReLU"
+ bottom: "conv3_3"
+ top: "conv3_3"
+}
+layer {
+ name: "conv3_4"
+ type: "Convolution"
+ bottom: "conv3_3"
+ top: "conv3_4"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu3_4"
+ type: "ReLU"
+ bottom: "conv3_4"
+ top: "conv3_4"
+}
+layer {
+ name: "pool3_stage1"
+ type: "Pooling"
+ bottom: "conv3_4"
+ top: "pool3_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv4_1"
+ type: "Convolution"
+ bottom: "pool3_stage1"
+ top: "conv4_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu4_1"
+ type: "ReLU"
+ bottom: "conv4_1"
+ top: "conv4_1"
+}
+layer {
+ name: "conv4_2"
+ type: "Convolution"
+ bottom: "conv4_1"
+ top: "conv4_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu4_2"
+ type: "ReLU"
+ bottom: "conv4_2"
+ top: "conv4_2"
+}
+layer {
+ name: "conv4_3"
+ type: "Convolution"
+ bottom: "conv4_2"
+ top: "conv4_3"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu4_3"
+ type: "ReLU"
+ bottom: "conv4_3"
+ top: "conv4_3"
+}
+layer {
+ name: "conv4_4"
+ type: "Convolution"
+ bottom: "conv4_3"
+ top: "conv4_4"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu4_4"
+ type: "ReLU"
+ bottom: "conv4_4"
+ top: "conv4_4"
+}
+layer {
+ name: "conv5_1"
+ type: "Convolution"
+ bottom: "conv4_4"
+ top: "conv5_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu5_1"
+ type: "ReLU"
+ bottom: "conv5_1"
+ top: "conv5_1"
+}
+layer {
+ name: "conv5_2"
+ type: "Convolution"
+ bottom: "conv5_1"
+ top: "conv5_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "xavier"
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu5_2"
+ type: "ReLU"
+ bottom: "conv5_2"
+ top: "conv5_2"
+}
+layer {
+ name: "conv5_3_CPM"
+ type: "Convolution"
+ bottom: "conv5_2"
+ top: "conv5_3_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu5_4_stage1_3"
+ type: "ReLU"
+ bottom: "conv5_3_CPM"
+ top: "conv5_3_CPM"
+}
+layer {
+ name: "conv6_1_CPM"
+ type: "Convolution"
+ bottom: "conv5_3_CPM"
+ top: "conv6_1_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "relu6_4_stage1_1"
+ type: "ReLU"
+ bottom: "conv6_1_CPM"
+ top: "conv6_1_CPM"
+}
+layer {
+ name: "conv6_2_CPM"
+ type: "Convolution"
+ bottom: "conv6_1_CPM"
+ top: "conv6_2_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 22
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "concat_stage2"
+ type: "Concat"
+ bottom: "conv6_2_CPM"
+ bottom: "conv5_3_CPM"
+ top: "concat_stage2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage2"
+ type: "Convolution"
+ bottom: "concat_stage2"
+ top: "Mconv1_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_2_stage2_1"
+ type: "ReLU"
+ bottom: "Mconv1_stage2"
+ top: "Mconv1_stage2"
+}
+layer {
+ name: "Mconv2_stage2"
+ type: "Convolution"
+ bottom: "Mconv1_stage2"
+ top: "Mconv2_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_3_stage2_2"
+ type: "ReLU"
+ bottom: "Mconv2_stage2"
+ top: "Mconv2_stage2"
+}
+layer {
+ name: "Mconv3_stage2"
+ type: "Convolution"
+ bottom: "Mconv2_stage2"
+ top: "Mconv3_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_4_stage2_3"
+ type: "ReLU"
+ bottom: "Mconv3_stage2"
+ top: "Mconv3_stage2"
+}
+layer {
+ name: "Mconv4_stage2"
+ type: "Convolution"
+ bottom: "Mconv3_stage2"
+ top: "Mconv4_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_5_stage2_4"
+ type: "ReLU"
+ bottom: "Mconv4_stage2"
+ top: "Mconv4_stage2"
+}
+layer {
+ name: "Mconv5_stage2"
+ type: "Convolution"
+ bottom: "Mconv4_stage2"
+ top: "Mconv5_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_6_stage2_5"
+ type: "ReLU"
+ bottom: "Mconv5_stage2"
+ top: "Mconv5_stage2"
+}
+layer {
+ name: "Mconv6_stage2"
+ type: "Convolution"
+ bottom: "Mconv5_stage2"
+ top: "Mconv6_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_7_stage2_6"
+ type: "ReLU"
+ bottom: "Mconv6_stage2"
+ top: "Mconv6_stage2"
+}
+layer {
+ name: "Mconv7_stage2"
+ type: "Convolution"
+ bottom: "Mconv6_stage2"
+ top: "Mconv7_stage2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 22
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "concat_stage3"
+ type: "Concat"
+ bottom: "Mconv7_stage2"
+ bottom: "conv5_3_CPM"
+ top: "concat_stage3"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage3"
+ type: "Convolution"
+ bottom: "concat_stage3"
+ top: "Mconv1_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_2_stage3_1"
+ type: "ReLU"
+ bottom: "Mconv1_stage3"
+ top: "Mconv1_stage3"
+}
+layer {
+ name: "Mconv2_stage3"
+ type: "Convolution"
+ bottom: "Mconv1_stage3"
+ top: "Mconv2_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_3_stage3_2"
+ type: "ReLU"
+ bottom: "Mconv2_stage3"
+ top: "Mconv2_stage3"
+}
+layer {
+ name: "Mconv3_stage3"
+ type: "Convolution"
+ bottom: "Mconv2_stage3"
+ top: "Mconv3_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_4_stage3_3"
+ type: "ReLU"
+ bottom: "Mconv3_stage3"
+ top: "Mconv3_stage3"
+}
+layer {
+ name: "Mconv4_stage3"
+ type: "Convolution"
+ bottom: "Mconv3_stage3"
+ top: "Mconv4_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_5_stage3_4"
+ type: "ReLU"
+ bottom: "Mconv4_stage3"
+ top: "Mconv4_stage3"
+}
+layer {
+ name: "Mconv5_stage3"
+ type: "Convolution"
+ bottom: "Mconv4_stage3"
+ top: "Mconv5_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_6_stage3_5"
+ type: "ReLU"
+ bottom: "Mconv5_stage3"
+ top: "Mconv5_stage3"
+}
+layer {
+ name: "Mconv6_stage3"
+ type: "Convolution"
+ bottom: "Mconv5_stage3"
+ top: "Mconv6_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_7_stage3_6"
+ type: "ReLU"
+ bottom: "Mconv6_stage3"
+ top: "Mconv6_stage3"
+}
+layer {
+ name: "Mconv7_stage3"
+ type: "Convolution"
+ bottom: "Mconv6_stage3"
+ top: "Mconv7_stage3"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 22
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "concat_stage4"
+ type: "Concat"
+ bottom: "Mconv7_stage3"
+ bottom: "conv5_3_CPM"
+ top: "concat_stage4"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage4"
+ type: "Convolution"
+ bottom: "concat_stage4"
+ top: "Mconv1_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_2_stage4_1"
+ type: "ReLU"
+ bottom: "Mconv1_stage4"
+ top: "Mconv1_stage4"
+}
+layer {
+ name: "Mconv2_stage4"
+ type: "Convolution"
+ bottom: "Mconv1_stage4"
+ top: "Mconv2_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_3_stage4_2"
+ type: "ReLU"
+ bottom: "Mconv2_stage4"
+ top: "Mconv2_stage4"
+}
+layer {
+ name: "Mconv3_stage4"
+ type: "Convolution"
+ bottom: "Mconv2_stage4"
+ top: "Mconv3_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_4_stage4_3"
+ type: "ReLU"
+ bottom: "Mconv3_stage4"
+ top: "Mconv3_stage4"
+}
+layer {
+ name: "Mconv4_stage4"
+ type: "Convolution"
+ bottom: "Mconv3_stage4"
+ top: "Mconv4_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_5_stage4_4"
+ type: "ReLU"
+ bottom: "Mconv4_stage4"
+ top: "Mconv4_stage4"
+}
+layer {
+ name: "Mconv5_stage4"
+ type: "Convolution"
+ bottom: "Mconv4_stage4"
+ top: "Mconv5_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_6_stage4_5"
+ type: "ReLU"
+ bottom: "Mconv5_stage4"
+ top: "Mconv5_stage4"
+}
+layer {
+ name: "Mconv6_stage4"
+ type: "Convolution"
+ bottom: "Mconv5_stage4"
+ top: "Mconv6_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_7_stage4_6"
+ type: "ReLU"
+ bottom: "Mconv6_stage4"
+ top: "Mconv6_stage4"
+}
+layer {
+ name: "Mconv7_stage4"
+ type: "Convolution"
+ bottom: "Mconv6_stage4"
+ top: "Mconv7_stage4"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 22
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "concat_stage5"
+ type: "Concat"
+ bottom: "Mconv7_stage4"
+ bottom: "conv5_3_CPM"
+ top: "concat_stage5"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage5"
+ type: "Convolution"
+ bottom: "concat_stage5"
+ top: "Mconv1_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_2_stage5_1"
+ type: "ReLU"
+ bottom: "Mconv1_stage5"
+ top: "Mconv1_stage5"
+}
+layer {
+ name: "Mconv2_stage5"
+ type: "Convolution"
+ bottom: "Mconv1_stage5"
+ top: "Mconv2_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_3_stage5_2"
+ type: "ReLU"
+ bottom: "Mconv2_stage5"
+ top: "Mconv2_stage5"
+}
+layer {
+ name: "Mconv3_stage5"
+ type: "Convolution"
+ bottom: "Mconv2_stage5"
+ top: "Mconv3_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_4_stage5_3"
+ type: "ReLU"
+ bottom: "Mconv3_stage5"
+ top: "Mconv3_stage5"
+}
+layer {
+ name: "Mconv4_stage5"
+ type: "Convolution"
+ bottom: "Mconv3_stage5"
+ top: "Mconv4_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_5_stage5_4"
+ type: "ReLU"
+ bottom: "Mconv4_stage5"
+ top: "Mconv4_stage5"
+}
+layer {
+ name: "Mconv5_stage5"
+ type: "Convolution"
+ bottom: "Mconv4_stage5"
+ top: "Mconv5_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_6_stage5_5"
+ type: "ReLU"
+ bottom: "Mconv5_stage5"
+ top: "Mconv5_stage5"
+}
+layer {
+ name: "Mconv6_stage5"
+ type: "Convolution"
+ bottom: "Mconv5_stage5"
+ top: "Mconv6_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_7_stage5_6"
+ type: "ReLU"
+ bottom: "Mconv6_stage5"
+ top: "Mconv6_stage5"
+}
+layer {
+ name: "Mconv7_stage5"
+ type: "Convolution"
+ bottom: "Mconv6_stage5"
+ top: "Mconv7_stage5"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 22
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "concat_stage6"
+ type: "Concat"
+ bottom: "Mconv7_stage5"
+ bottom: "conv5_3_CPM"
+ top: "concat_stage6"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage6"
+ type: "Convolution"
+ bottom: "concat_stage6"
+ top: "Mconv1_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_2_stage6_1"
+ type: "ReLU"
+ bottom: "Mconv1_stage6"
+ top: "Mconv1_stage6"
+}
+layer {
+ name: "Mconv2_stage6"
+ type: "Convolution"
+ bottom: "Mconv1_stage6"
+ top: "Mconv2_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_3_stage6_2"
+ type: "ReLU"
+ bottom: "Mconv2_stage6"
+ top: "Mconv2_stage6"
+}
+layer {
+ name: "Mconv3_stage6"
+ type: "Convolution"
+ bottom: "Mconv2_stage6"
+ top: "Mconv3_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_4_stage6_3"
+ type: "ReLU"
+ bottom: "Mconv3_stage6"
+ top: "Mconv3_stage6"
+}
+layer {
+ name: "Mconv4_stage6"
+ type: "Convolution"
+ bottom: "Mconv3_stage6"
+ top: "Mconv4_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_5_stage6_4"
+ type: "ReLU"
+ bottom: "Mconv4_stage6"
+ top: "Mconv4_stage6"
+}
+layer {
+ name: "Mconv5_stage6"
+ type: "Convolution"
+ bottom: "Mconv4_stage6"
+ top: "Mconv5_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_6_stage6_5"
+ type: "ReLU"
+ bottom: "Mconv5_stage6"
+ top: "Mconv5_stage6"
+}
+layer {
+ name: "Mconv6_stage6"
+ type: "Convolution"
+ bottom: "Mconv5_stage6"
+ top: "Mconv6_stage6"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+layer {
+ name: "Mrelu1_7_stage6_6"
+ type: "ReLU"
+ bottom: "Mconv6_stage6"
+ top: "Mconv6_stage6"
+}
+layer {
+ name: "Mconv7_stage6"
+ type: "Convolution"
+ bottom: "Mconv6_stage6"
+# top: "Mconv7_stage6"
+ top: "net_output"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 22
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ dilation: 1
+ }
+}
+
diff --git a/joints_detectors/openpose/models/pose/body_25/pose_deploy.prototxt b/joints_detectors/openpose/models/pose/body_25/pose_deploy.prototxt
new file mode 100644
index 0000000000000000000000000000000000000000..bd18b9d3a225cf64c172a51cc7b03d9c481ca604
--- /dev/null
+++ b/joints_detectors/openpose/models/pose/body_25/pose_deploy.prototxt
@@ -0,0 +1,2332 @@
+name: "OpenPose - BODY_25"
+input: "image"
+input_dim: 1 # This value will be defined at runtime
+input_dim: 3
+input_dim: 16 # This value will be defined at runtime
+input_dim: 16 # This value will be defined at runtime
+layer {
+ name: "conv1_1"
+ type: "Convolution"
+ bottom: "image"
+ top: "conv1_1"
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu1_1"
+ type: "ReLU"
+ bottom: "conv1_1"
+ top: "conv1_1"
+}
+layer {
+ name: "conv1_2"
+ type: "Convolution"
+ bottom: "conv1_1"
+ top: "conv1_2"
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu1_2"
+ type: "ReLU"
+ bottom: "conv1_2"
+ top: "conv1_2"
+}
+layer {
+ name: "pool1_stage1"
+ type: "Pooling"
+ bottom: "conv1_2"
+ top: "pool1_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv2_1"
+ type: "Convolution"
+ bottom: "pool1_stage1"
+ top: "conv2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu2_1"
+ type: "ReLU"
+ bottom: "conv2_1"
+ top: "conv2_1"
+}
+layer {
+ name: "conv2_2"
+ type: "Convolution"
+ bottom: "conv2_1"
+ top: "conv2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu2_2"
+ type: "ReLU"
+ bottom: "conv2_2"
+ top: "conv2_2"
+}
+layer {
+ name: "pool2_stage1"
+ type: "Pooling"
+ bottom: "conv2_2"
+ top: "pool2_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv3_1"
+ type: "Convolution"
+ bottom: "pool2_stage1"
+ top: "conv3_1"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu3_1"
+ type: "ReLU"
+ bottom: "conv3_1"
+ top: "conv3_1"
+}
+layer {
+ name: "conv3_2"
+ type: "Convolution"
+ bottom: "conv3_1"
+ top: "conv3_2"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu3_2"
+ type: "ReLU"
+ bottom: "conv3_2"
+ top: "conv3_2"
+}
+layer {
+ name: "conv3_3"
+ type: "Convolution"
+ bottom: "conv3_2"
+ top: "conv3_3"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu3_3"
+ type: "ReLU"
+ bottom: "conv3_3"
+ top: "conv3_3"
+}
+layer {
+ name: "conv3_4"
+ type: "Convolution"
+ bottom: "conv3_3"
+ top: "conv3_4"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu3_4"
+ type: "ReLU"
+ bottom: "conv3_4"
+ top: "conv3_4"
+}
+layer {
+ name: "pool3_stage1"
+ type: "Pooling"
+ bottom: "conv3_4"
+ top: "pool3_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv4_1"
+ type: "Convolution"
+ bottom: "pool3_stage1"
+ top: "conv4_1"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "relu4_1"
+ type: "ReLU"
+ bottom: "conv4_1"
+ top: "conv4_1"
+}
+layer {
+ name: "conv4_2"
+ type: "Convolution"
+ bottom: "conv4_1"
+ top: "conv4_2"
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "prelu4_2"
+ type: "PReLU"
+ bottom: "conv4_2"
+ top: "conv4_2"
+}
+layer {
+ name: "conv4_3_CPM"
+ type: "Convolution"
+ bottom: "conv4_2"
+ top: "conv4_3_CPM"
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "prelu4_3_CPM"
+ type: "PReLU"
+ bottom: "conv4_3_CPM"
+ top: "conv4_3_CPM"
+}
+layer {
+ name: "conv4_4_CPM"
+ type: "Convolution"
+ bottom: "conv4_3_CPM"
+ top: "conv4_4_CPM"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "prelu4_4_CPM"
+ type: "PReLU"
+ bottom: "conv4_4_CPM"
+ top: "conv4_4_CPM"
+}
+layer {
+ name: "Mconv1_stage0_L2_0"
+ type: "Convolution"
+ bottom: "conv4_4_CPM"
+ top: "Mconv1_stage0_L2_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage0_L2_0"
+ type: "PReLU"
+ bottom: "Mconv1_stage0_L2_0"
+ top: "Mconv1_stage0_L2_0"
+}
+layer {
+ name: "Mconv1_stage0_L2_1"
+ type: "Convolution"
+ bottom: "Mconv1_stage0_L2_0"
+ top: "Mconv1_stage0_L2_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage0_L2_1"
+ type: "PReLU"
+ bottom: "Mconv1_stage0_L2_1"
+ top: "Mconv1_stage0_L2_1"
+}
+layer {
+ name: "Mconv1_stage0_L2_2"
+ type: "Convolution"
+ bottom: "Mconv1_stage0_L2_1"
+ top: "Mconv1_stage0_L2_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage0_L2_2"
+ type: "PReLU"
+ bottom: "Mconv1_stage0_L2_2"
+ top: "Mconv1_stage0_L2_2"
+}
+layer {
+ name: "Mconv1_stage0_L2_concat"
+ type: "Concat"
+ bottom: "Mconv1_stage0_L2_0"
+ bottom: "Mconv1_stage0_L2_1"
+ bottom: "Mconv1_stage0_L2_2"
+ top: "Mconv1_stage0_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv2_stage0_L2_0"
+ type: "Convolution"
+ bottom: "Mconv1_stage0_L2_concat"
+ top: "Mconv2_stage0_L2_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage0_L2_0"
+ type: "PReLU"
+ bottom: "Mconv2_stage0_L2_0"
+ top: "Mconv2_stage0_L2_0"
+}
+layer {
+ name: "Mconv2_stage0_L2_1"
+ type: "Convolution"
+ bottom: "Mconv2_stage0_L2_0"
+ top: "Mconv2_stage0_L2_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage0_L2_1"
+ type: "PReLU"
+ bottom: "Mconv2_stage0_L2_1"
+ top: "Mconv2_stage0_L2_1"
+}
+layer {
+ name: "Mconv2_stage0_L2_2"
+ type: "Convolution"
+ bottom: "Mconv2_stage0_L2_1"
+ top: "Mconv2_stage0_L2_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage0_L2_2"
+ type: "PReLU"
+ bottom: "Mconv2_stage0_L2_2"
+ top: "Mconv2_stage0_L2_2"
+}
+layer {
+ name: "Mconv2_stage0_L2_concat"
+ type: "Concat"
+ bottom: "Mconv2_stage0_L2_0"
+ bottom: "Mconv2_stage0_L2_1"
+ bottom: "Mconv2_stage0_L2_2"
+ top: "Mconv2_stage0_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv3_stage0_L2_0"
+ type: "Convolution"
+ bottom: "Mconv2_stage0_L2_concat"
+ top: "Mconv3_stage0_L2_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage0_L2_0"
+ type: "PReLU"
+ bottom: "Mconv3_stage0_L2_0"
+ top: "Mconv3_stage0_L2_0"
+}
+layer {
+ name: "Mconv3_stage0_L2_1"
+ type: "Convolution"
+ bottom: "Mconv3_stage0_L2_0"
+ top: "Mconv3_stage0_L2_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage0_L2_1"
+ type: "PReLU"
+ bottom: "Mconv3_stage0_L2_1"
+ top: "Mconv3_stage0_L2_1"
+}
+layer {
+ name: "Mconv3_stage0_L2_2"
+ type: "Convolution"
+ bottom: "Mconv3_stage0_L2_1"
+ top: "Mconv3_stage0_L2_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage0_L2_2"
+ type: "PReLU"
+ bottom: "Mconv3_stage0_L2_2"
+ top: "Mconv3_stage0_L2_2"
+}
+layer {
+ name: "Mconv3_stage0_L2_concat"
+ type: "Concat"
+ bottom: "Mconv3_stage0_L2_0"
+ bottom: "Mconv3_stage0_L2_1"
+ bottom: "Mconv3_stage0_L2_2"
+ top: "Mconv3_stage0_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv4_stage0_L2_0"
+ type: "Convolution"
+ bottom: "Mconv3_stage0_L2_concat"
+ top: "Mconv4_stage0_L2_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage0_L2_0"
+ type: "PReLU"
+ bottom: "Mconv4_stage0_L2_0"
+ top: "Mconv4_stage0_L2_0"
+}
+layer {
+ name: "Mconv4_stage0_L2_1"
+ type: "Convolution"
+ bottom: "Mconv4_stage0_L2_0"
+ top: "Mconv4_stage0_L2_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage0_L2_1"
+ type: "PReLU"
+ bottom: "Mconv4_stage0_L2_1"
+ top: "Mconv4_stage0_L2_1"
+}
+layer {
+ name: "Mconv4_stage0_L2_2"
+ type: "Convolution"
+ bottom: "Mconv4_stage0_L2_1"
+ top: "Mconv4_stage0_L2_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage0_L2_2"
+ type: "PReLU"
+ bottom: "Mconv4_stage0_L2_2"
+ top: "Mconv4_stage0_L2_2"
+}
+layer {
+ name: "Mconv4_stage0_L2_concat"
+ type: "Concat"
+ bottom: "Mconv4_stage0_L2_0"
+ bottom: "Mconv4_stage0_L2_1"
+ bottom: "Mconv4_stage0_L2_2"
+ top: "Mconv4_stage0_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv5_stage0_L2_0"
+ type: "Convolution"
+ bottom: "Mconv4_stage0_L2_concat"
+ top: "Mconv5_stage0_L2_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage0_L2_0"
+ type: "PReLU"
+ bottom: "Mconv5_stage0_L2_0"
+ top: "Mconv5_stage0_L2_0"
+}
+layer {
+ name: "Mconv5_stage0_L2_1"
+ type: "Convolution"
+ bottom: "Mconv5_stage0_L2_0"
+ top: "Mconv5_stage0_L2_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage0_L2_1"
+ type: "PReLU"
+ bottom: "Mconv5_stage0_L2_1"
+ top: "Mconv5_stage0_L2_1"
+}
+layer {
+ name: "Mconv5_stage0_L2_2"
+ type: "Convolution"
+ bottom: "Mconv5_stage0_L2_1"
+ top: "Mconv5_stage0_L2_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage0_L2_2"
+ type: "PReLU"
+ bottom: "Mconv5_stage0_L2_2"
+ top: "Mconv5_stage0_L2_2"
+}
+layer {
+ name: "Mconv5_stage0_L2_concat"
+ type: "Concat"
+ bottom: "Mconv5_stage0_L2_0"
+ bottom: "Mconv5_stage0_L2_1"
+ bottom: "Mconv5_stage0_L2_2"
+ top: "Mconv5_stage0_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv6_stage0_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage0_L2_concat"
+ top: "Mconv6_stage0_L2"
+ convolution_param {
+ num_output: 256
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "Mprelu6_stage0_L2"
+ type: "PReLU"
+ bottom: "Mconv6_stage0_L2"
+ top: "Mconv6_stage0_L2"
+}
+layer {
+ name: "Mconv7_stage0_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage0_L2"
+ top: "Mconv7_stage0_L2"
+ convolution_param {
+ num_output: 52
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "concat_stage1_L2"
+ type: "Concat"
+ bottom: "conv4_4_CPM"
+ bottom: "Mconv7_stage0_L2"
+ top: "concat_stage1_L2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage1_L2_0"
+ type: "Convolution"
+ bottom: "concat_stage1_L2"
+ top: "Mconv1_stage1_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage1_L2_0"
+ type: "PReLU"
+ bottom: "Mconv1_stage1_L2_0"
+ top: "Mconv1_stage1_L2_0"
+}
+layer {
+ name: "Mconv1_stage1_L2_1"
+ type: "Convolution"
+ bottom: "Mconv1_stage1_L2_0"
+ top: "Mconv1_stage1_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage1_L2_1"
+ type: "PReLU"
+ bottom: "Mconv1_stage1_L2_1"
+ top: "Mconv1_stage1_L2_1"
+}
+layer {
+ name: "Mconv1_stage1_L2_2"
+ type: "Convolution"
+ bottom: "Mconv1_stage1_L2_1"
+ top: "Mconv1_stage1_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage1_L2_2"
+ type: "PReLU"
+ bottom: "Mconv1_stage1_L2_2"
+ top: "Mconv1_stage1_L2_2"
+}
+layer {
+ name: "Mconv1_stage1_L2_concat"
+ type: "Concat"
+ bottom: "Mconv1_stage1_L2_0"
+ bottom: "Mconv1_stage1_L2_1"
+ bottom: "Mconv1_stage1_L2_2"
+ top: "Mconv1_stage1_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv2_stage1_L2_0"
+ type: "Convolution"
+ bottom: "Mconv1_stage1_L2_concat"
+ top: "Mconv2_stage1_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage1_L2_0"
+ type: "PReLU"
+ bottom: "Mconv2_stage1_L2_0"
+ top: "Mconv2_stage1_L2_0"
+}
+layer {
+ name: "Mconv2_stage1_L2_1"
+ type: "Convolution"
+ bottom: "Mconv2_stage1_L2_0"
+ top: "Mconv2_stage1_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage1_L2_1"
+ type: "PReLU"
+ bottom: "Mconv2_stage1_L2_1"
+ top: "Mconv2_stage1_L2_1"
+}
+layer {
+ name: "Mconv2_stage1_L2_2"
+ type: "Convolution"
+ bottom: "Mconv2_stage1_L2_1"
+ top: "Mconv2_stage1_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage1_L2_2"
+ type: "PReLU"
+ bottom: "Mconv2_stage1_L2_2"
+ top: "Mconv2_stage1_L2_2"
+}
+layer {
+ name: "Mconv2_stage1_L2_concat"
+ type: "Concat"
+ bottom: "Mconv2_stage1_L2_0"
+ bottom: "Mconv2_stage1_L2_1"
+ bottom: "Mconv2_stage1_L2_2"
+ top: "Mconv2_stage1_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv3_stage1_L2_0"
+ type: "Convolution"
+ bottom: "Mconv2_stage1_L2_concat"
+ top: "Mconv3_stage1_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage1_L2_0"
+ type: "PReLU"
+ bottom: "Mconv3_stage1_L2_0"
+ top: "Mconv3_stage1_L2_0"
+}
+layer {
+ name: "Mconv3_stage1_L2_1"
+ type: "Convolution"
+ bottom: "Mconv3_stage1_L2_0"
+ top: "Mconv3_stage1_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage1_L2_1"
+ type: "PReLU"
+ bottom: "Mconv3_stage1_L2_1"
+ top: "Mconv3_stage1_L2_1"
+}
+layer {
+ name: "Mconv3_stage1_L2_2"
+ type: "Convolution"
+ bottom: "Mconv3_stage1_L2_1"
+ top: "Mconv3_stage1_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage1_L2_2"
+ type: "PReLU"
+ bottom: "Mconv3_stage1_L2_2"
+ top: "Mconv3_stage1_L2_2"
+}
+layer {
+ name: "Mconv3_stage1_L2_concat"
+ type: "Concat"
+ bottom: "Mconv3_stage1_L2_0"
+ bottom: "Mconv3_stage1_L2_1"
+ bottom: "Mconv3_stage1_L2_2"
+ top: "Mconv3_stage1_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv4_stage1_L2_0"
+ type: "Convolution"
+ bottom: "Mconv3_stage1_L2_concat"
+ top: "Mconv4_stage1_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage1_L2_0"
+ type: "PReLU"
+ bottom: "Mconv4_stage1_L2_0"
+ top: "Mconv4_stage1_L2_0"
+}
+layer {
+ name: "Mconv4_stage1_L2_1"
+ type: "Convolution"
+ bottom: "Mconv4_stage1_L2_0"
+ top: "Mconv4_stage1_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage1_L2_1"
+ type: "PReLU"
+ bottom: "Mconv4_stage1_L2_1"
+ top: "Mconv4_stage1_L2_1"
+}
+layer {
+ name: "Mconv4_stage1_L2_2"
+ type: "Convolution"
+ bottom: "Mconv4_stage1_L2_1"
+ top: "Mconv4_stage1_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage1_L2_2"
+ type: "PReLU"
+ bottom: "Mconv4_stage1_L2_2"
+ top: "Mconv4_stage1_L2_2"
+}
+layer {
+ name: "Mconv4_stage1_L2_concat"
+ type: "Concat"
+ bottom: "Mconv4_stage1_L2_0"
+ bottom: "Mconv4_stage1_L2_1"
+ bottom: "Mconv4_stage1_L2_2"
+ top: "Mconv4_stage1_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv5_stage1_L2_0"
+ type: "Convolution"
+ bottom: "Mconv4_stage1_L2_concat"
+ top: "Mconv5_stage1_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage1_L2_0"
+ type: "PReLU"
+ bottom: "Mconv5_stage1_L2_0"
+ top: "Mconv5_stage1_L2_0"
+}
+layer {
+ name: "Mconv5_stage1_L2_1"
+ type: "Convolution"
+ bottom: "Mconv5_stage1_L2_0"
+ top: "Mconv5_stage1_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage1_L2_1"
+ type: "PReLU"
+ bottom: "Mconv5_stage1_L2_1"
+ top: "Mconv5_stage1_L2_1"
+}
+layer {
+ name: "Mconv5_stage1_L2_2"
+ type: "Convolution"
+ bottom: "Mconv5_stage1_L2_1"
+ top: "Mconv5_stage1_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage1_L2_2"
+ type: "PReLU"
+ bottom: "Mconv5_stage1_L2_2"
+ top: "Mconv5_stage1_L2_2"
+}
+layer {
+ name: "Mconv5_stage1_L2_concat"
+ type: "Concat"
+ bottom: "Mconv5_stage1_L2_0"
+ bottom: "Mconv5_stage1_L2_1"
+ bottom: "Mconv5_stage1_L2_2"
+ top: "Mconv5_stage1_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv6_stage1_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage1_L2_concat"
+ top: "Mconv6_stage1_L2"
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "Mprelu6_stage1_L2"
+ type: "PReLU"
+ bottom: "Mconv6_stage1_L2"
+ top: "Mconv6_stage1_L2"
+}
+layer {
+ name: "Mconv7_stage1_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage1_L2"
+ top: "Mconv7_stage1_L2"
+ convolution_param {
+ num_output: 52
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "concat_stage2_L2"
+ type: "Concat"
+ bottom: "conv4_4_CPM"
+ bottom: "Mconv7_stage1_L2"
+ top: "concat_stage2_L2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage2_L2_0"
+ type: "Convolution"
+ bottom: "concat_stage2_L2"
+ top: "Mconv1_stage2_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage2_L2_0"
+ type: "PReLU"
+ bottom: "Mconv1_stage2_L2_0"
+ top: "Mconv1_stage2_L2_0"
+}
+layer {
+ name: "Mconv1_stage2_L2_1"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L2_0"
+ top: "Mconv1_stage2_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage2_L2_1"
+ type: "PReLU"
+ bottom: "Mconv1_stage2_L2_1"
+ top: "Mconv1_stage2_L2_1"
+}
+layer {
+ name: "Mconv1_stage2_L2_2"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L2_1"
+ top: "Mconv1_stage2_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage2_L2_2"
+ type: "PReLU"
+ bottom: "Mconv1_stage2_L2_2"
+ top: "Mconv1_stage2_L2_2"
+}
+layer {
+ name: "Mconv1_stage2_L2_concat"
+ type: "Concat"
+ bottom: "Mconv1_stage2_L2_0"
+ bottom: "Mconv1_stage2_L2_1"
+ bottom: "Mconv1_stage2_L2_2"
+ top: "Mconv1_stage2_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv2_stage2_L2_0"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L2_concat"
+ top: "Mconv2_stage2_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage2_L2_0"
+ type: "PReLU"
+ bottom: "Mconv2_stage2_L2_0"
+ top: "Mconv2_stage2_L2_0"
+}
+layer {
+ name: "Mconv2_stage2_L2_1"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L2_0"
+ top: "Mconv2_stage2_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage2_L2_1"
+ type: "PReLU"
+ bottom: "Mconv2_stage2_L2_1"
+ top: "Mconv2_stage2_L2_1"
+}
+layer {
+ name: "Mconv2_stage2_L2_2"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L2_1"
+ top: "Mconv2_stage2_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage2_L2_2"
+ type: "PReLU"
+ bottom: "Mconv2_stage2_L2_2"
+ top: "Mconv2_stage2_L2_2"
+}
+layer {
+ name: "Mconv2_stage2_L2_concat"
+ type: "Concat"
+ bottom: "Mconv2_stage2_L2_0"
+ bottom: "Mconv2_stage2_L2_1"
+ bottom: "Mconv2_stage2_L2_2"
+ top: "Mconv2_stage2_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv3_stage2_L2_0"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L2_concat"
+ top: "Mconv3_stage2_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage2_L2_0"
+ type: "PReLU"
+ bottom: "Mconv3_stage2_L2_0"
+ top: "Mconv3_stage2_L2_0"
+}
+layer {
+ name: "Mconv3_stage2_L2_1"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L2_0"
+ top: "Mconv3_stage2_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage2_L2_1"
+ type: "PReLU"
+ bottom: "Mconv3_stage2_L2_1"
+ top: "Mconv3_stage2_L2_1"
+}
+layer {
+ name: "Mconv3_stage2_L2_2"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L2_1"
+ top: "Mconv3_stage2_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage2_L2_2"
+ type: "PReLU"
+ bottom: "Mconv3_stage2_L2_2"
+ top: "Mconv3_stage2_L2_2"
+}
+layer {
+ name: "Mconv3_stage2_L2_concat"
+ type: "Concat"
+ bottom: "Mconv3_stage2_L2_0"
+ bottom: "Mconv3_stage2_L2_1"
+ bottom: "Mconv3_stage2_L2_2"
+ top: "Mconv3_stage2_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv4_stage2_L2_0"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L2_concat"
+ top: "Mconv4_stage2_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage2_L2_0"
+ type: "PReLU"
+ bottom: "Mconv4_stage2_L2_0"
+ top: "Mconv4_stage2_L2_0"
+}
+layer {
+ name: "Mconv4_stage2_L2_1"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L2_0"
+ top: "Mconv4_stage2_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage2_L2_1"
+ type: "PReLU"
+ bottom: "Mconv4_stage2_L2_1"
+ top: "Mconv4_stage2_L2_1"
+}
+layer {
+ name: "Mconv4_stage2_L2_2"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L2_1"
+ top: "Mconv4_stage2_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage2_L2_2"
+ type: "PReLU"
+ bottom: "Mconv4_stage2_L2_2"
+ top: "Mconv4_stage2_L2_2"
+}
+layer {
+ name: "Mconv4_stage2_L2_concat"
+ type: "Concat"
+ bottom: "Mconv4_stage2_L2_0"
+ bottom: "Mconv4_stage2_L2_1"
+ bottom: "Mconv4_stage2_L2_2"
+ top: "Mconv4_stage2_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv5_stage2_L2_0"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L2_concat"
+ top: "Mconv5_stage2_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage2_L2_0"
+ type: "PReLU"
+ bottom: "Mconv5_stage2_L2_0"
+ top: "Mconv5_stage2_L2_0"
+}
+layer {
+ name: "Mconv5_stage2_L2_1"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L2_0"
+ top: "Mconv5_stage2_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage2_L2_1"
+ type: "PReLU"
+ bottom: "Mconv5_stage2_L2_1"
+ top: "Mconv5_stage2_L2_1"
+}
+layer {
+ name: "Mconv5_stage2_L2_2"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L2_1"
+ top: "Mconv5_stage2_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage2_L2_2"
+ type: "PReLU"
+ bottom: "Mconv5_stage2_L2_2"
+ top: "Mconv5_stage2_L2_2"
+}
+layer {
+ name: "Mconv5_stage2_L2_concat"
+ type: "Concat"
+ bottom: "Mconv5_stage2_L2_0"
+ bottom: "Mconv5_stage2_L2_1"
+ bottom: "Mconv5_stage2_L2_2"
+ top: "Mconv5_stage2_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv6_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L2_concat"
+ top: "Mconv6_stage2_L2"
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "Mprelu6_stage2_L2"
+ type: "PReLU"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv6_stage2_L2"
+}
+layer {
+ name: "Mconv7_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv7_stage2_L2"
+ convolution_param {
+ num_output: 52
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "concat_stage3_L2"
+ type: "Concat"
+ bottom: "conv4_4_CPM"
+ bottom: "Mconv7_stage2_L2"
+ top: "concat_stage3_L2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage3_L2_0"
+ type: "Convolution"
+ bottom: "concat_stage3_L2"
+ top: "Mconv1_stage3_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage3_L2_0"
+ type: "PReLU"
+ bottom: "Mconv1_stage3_L2_0"
+ top: "Mconv1_stage3_L2_0"
+}
+layer {
+ name: "Mconv1_stage3_L2_1"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L2_0"
+ top: "Mconv1_stage3_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage3_L2_1"
+ type: "PReLU"
+ bottom: "Mconv1_stage3_L2_1"
+ top: "Mconv1_stage3_L2_1"
+}
+layer {
+ name: "Mconv1_stage3_L2_2"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L2_1"
+ top: "Mconv1_stage3_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage3_L2_2"
+ type: "PReLU"
+ bottom: "Mconv1_stage3_L2_2"
+ top: "Mconv1_stage3_L2_2"
+}
+layer {
+ name: "Mconv1_stage3_L2_concat"
+ type: "Concat"
+ bottom: "Mconv1_stage3_L2_0"
+ bottom: "Mconv1_stage3_L2_1"
+ bottom: "Mconv1_stage3_L2_2"
+ top: "Mconv1_stage3_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv2_stage3_L2_0"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L2_concat"
+ top: "Mconv2_stage3_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage3_L2_0"
+ type: "PReLU"
+ bottom: "Mconv2_stage3_L2_0"
+ top: "Mconv2_stage3_L2_0"
+}
+layer {
+ name: "Mconv2_stage3_L2_1"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L2_0"
+ top: "Mconv2_stage3_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage3_L2_1"
+ type: "PReLU"
+ bottom: "Mconv2_stage3_L2_1"
+ top: "Mconv2_stage3_L2_1"
+}
+layer {
+ name: "Mconv2_stage3_L2_2"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L2_1"
+ top: "Mconv2_stage3_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage3_L2_2"
+ type: "PReLU"
+ bottom: "Mconv2_stage3_L2_2"
+ top: "Mconv2_stage3_L2_2"
+}
+layer {
+ name: "Mconv2_stage3_L2_concat"
+ type: "Concat"
+ bottom: "Mconv2_stage3_L2_0"
+ bottom: "Mconv2_stage3_L2_1"
+ bottom: "Mconv2_stage3_L2_2"
+ top: "Mconv2_stage3_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv3_stage3_L2_0"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L2_concat"
+ top: "Mconv3_stage3_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage3_L2_0"
+ type: "PReLU"
+ bottom: "Mconv3_stage3_L2_0"
+ top: "Mconv3_stage3_L2_0"
+}
+layer {
+ name: "Mconv3_stage3_L2_1"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L2_0"
+ top: "Mconv3_stage3_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage3_L2_1"
+ type: "PReLU"
+ bottom: "Mconv3_stage3_L2_1"
+ top: "Mconv3_stage3_L2_1"
+}
+layer {
+ name: "Mconv3_stage3_L2_2"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L2_1"
+ top: "Mconv3_stage3_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage3_L2_2"
+ type: "PReLU"
+ bottom: "Mconv3_stage3_L2_2"
+ top: "Mconv3_stage3_L2_2"
+}
+layer {
+ name: "Mconv3_stage3_L2_concat"
+ type: "Concat"
+ bottom: "Mconv3_stage3_L2_0"
+ bottom: "Mconv3_stage3_L2_1"
+ bottom: "Mconv3_stage3_L2_2"
+ top: "Mconv3_stage3_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv4_stage3_L2_0"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L2_concat"
+ top: "Mconv4_stage3_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage3_L2_0"
+ type: "PReLU"
+ bottom: "Mconv4_stage3_L2_0"
+ top: "Mconv4_stage3_L2_0"
+}
+layer {
+ name: "Mconv4_stage3_L2_1"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L2_0"
+ top: "Mconv4_stage3_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage3_L2_1"
+ type: "PReLU"
+ bottom: "Mconv4_stage3_L2_1"
+ top: "Mconv4_stage3_L2_1"
+}
+layer {
+ name: "Mconv4_stage3_L2_2"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L2_1"
+ top: "Mconv4_stage3_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage3_L2_2"
+ type: "PReLU"
+ bottom: "Mconv4_stage3_L2_2"
+ top: "Mconv4_stage3_L2_2"
+}
+layer {
+ name: "Mconv4_stage3_L2_concat"
+ type: "Concat"
+ bottom: "Mconv4_stage3_L2_0"
+ bottom: "Mconv4_stage3_L2_1"
+ bottom: "Mconv4_stage3_L2_2"
+ top: "Mconv4_stage3_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv5_stage3_L2_0"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L2_concat"
+ top: "Mconv5_stage3_L2_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage3_L2_0"
+ type: "PReLU"
+ bottom: "Mconv5_stage3_L2_0"
+ top: "Mconv5_stage3_L2_0"
+}
+layer {
+ name: "Mconv5_stage3_L2_1"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L2_0"
+ top: "Mconv5_stage3_L2_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage3_L2_1"
+ type: "PReLU"
+ bottom: "Mconv5_stage3_L2_1"
+ top: "Mconv5_stage3_L2_1"
+}
+layer {
+ name: "Mconv5_stage3_L2_2"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L2_1"
+ top: "Mconv5_stage3_L2_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage3_L2_2"
+ type: "PReLU"
+ bottom: "Mconv5_stage3_L2_2"
+ top: "Mconv5_stage3_L2_2"
+}
+layer {
+ name: "Mconv5_stage3_L2_concat"
+ type: "Concat"
+ bottom: "Mconv5_stage3_L2_0"
+ bottom: "Mconv5_stage3_L2_1"
+ bottom: "Mconv5_stage3_L2_2"
+ top: "Mconv5_stage3_L2_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv6_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L2_concat"
+ top: "Mconv6_stage3_L2"
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "Mprelu6_stage3_L2"
+ type: "PReLU"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv6_stage3_L2"
+}
+layer {
+ name: "Mconv7_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv7_stage3_L2"
+ convolution_param {
+ num_output: 52
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "concat_stage0_L1"
+ type: "Concat"
+ bottom: "conv4_4_CPM"
+ bottom: "Mconv7_stage3_L2"
+ top: "concat_stage0_L1"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage0_L1_0"
+ type: "Convolution"
+ bottom: "concat_stage0_L1"
+ top: "Mconv1_stage0_L1_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage0_L1_0"
+ type: "PReLU"
+ bottom: "Mconv1_stage0_L1_0"
+ top: "Mconv1_stage0_L1_0"
+}
+layer {
+ name: "Mconv1_stage0_L1_1"
+ type: "Convolution"
+ bottom: "Mconv1_stage0_L1_0"
+ top: "Mconv1_stage0_L1_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage0_L1_1"
+ type: "PReLU"
+ bottom: "Mconv1_stage0_L1_1"
+ top: "Mconv1_stage0_L1_1"
+}
+layer {
+ name: "Mconv1_stage0_L1_2"
+ type: "Convolution"
+ bottom: "Mconv1_stage0_L1_1"
+ top: "Mconv1_stage0_L1_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage0_L1_2"
+ type: "PReLU"
+ bottom: "Mconv1_stage0_L1_2"
+ top: "Mconv1_stage0_L1_2"
+}
+layer {
+ name: "Mconv1_stage0_L1_concat"
+ type: "Concat"
+ bottom: "Mconv1_stage0_L1_0"
+ bottom: "Mconv1_stage0_L1_1"
+ bottom: "Mconv1_stage0_L1_2"
+ top: "Mconv1_stage0_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv2_stage0_L1_0"
+ type: "Convolution"
+ bottom: "Mconv1_stage0_L1_concat"
+ top: "Mconv2_stage0_L1_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage0_L1_0"
+ type: "PReLU"
+ bottom: "Mconv2_stage0_L1_0"
+ top: "Mconv2_stage0_L1_0"
+}
+layer {
+ name: "Mconv2_stage0_L1_1"
+ type: "Convolution"
+ bottom: "Mconv2_stage0_L1_0"
+ top: "Mconv2_stage0_L1_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage0_L1_1"
+ type: "PReLU"
+ bottom: "Mconv2_stage0_L1_1"
+ top: "Mconv2_stage0_L1_1"
+}
+layer {
+ name: "Mconv2_stage0_L1_2"
+ type: "Convolution"
+ bottom: "Mconv2_stage0_L1_1"
+ top: "Mconv2_stage0_L1_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage0_L1_2"
+ type: "PReLU"
+ bottom: "Mconv2_stage0_L1_2"
+ top: "Mconv2_stage0_L1_2"
+}
+layer {
+ name: "Mconv2_stage0_L1_concat"
+ type: "Concat"
+ bottom: "Mconv2_stage0_L1_0"
+ bottom: "Mconv2_stage0_L1_1"
+ bottom: "Mconv2_stage0_L1_2"
+ top: "Mconv2_stage0_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv3_stage0_L1_0"
+ type: "Convolution"
+ bottom: "Mconv2_stage0_L1_concat"
+ top: "Mconv3_stage0_L1_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage0_L1_0"
+ type: "PReLU"
+ bottom: "Mconv3_stage0_L1_0"
+ top: "Mconv3_stage0_L1_0"
+}
+layer {
+ name: "Mconv3_stage0_L1_1"
+ type: "Convolution"
+ bottom: "Mconv3_stage0_L1_0"
+ top: "Mconv3_stage0_L1_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage0_L1_1"
+ type: "PReLU"
+ bottom: "Mconv3_stage0_L1_1"
+ top: "Mconv3_stage0_L1_1"
+}
+layer {
+ name: "Mconv3_stage0_L1_2"
+ type: "Convolution"
+ bottom: "Mconv3_stage0_L1_1"
+ top: "Mconv3_stage0_L1_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage0_L1_2"
+ type: "PReLU"
+ bottom: "Mconv3_stage0_L1_2"
+ top: "Mconv3_stage0_L1_2"
+}
+layer {
+ name: "Mconv3_stage0_L1_concat"
+ type: "Concat"
+ bottom: "Mconv3_stage0_L1_0"
+ bottom: "Mconv3_stage0_L1_1"
+ bottom: "Mconv3_stage0_L1_2"
+ top: "Mconv3_stage0_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv4_stage0_L1_0"
+ type: "Convolution"
+ bottom: "Mconv3_stage0_L1_concat"
+ top: "Mconv4_stage0_L1_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage0_L1_0"
+ type: "PReLU"
+ bottom: "Mconv4_stage0_L1_0"
+ top: "Mconv4_stage0_L1_0"
+}
+layer {
+ name: "Mconv4_stage0_L1_1"
+ type: "Convolution"
+ bottom: "Mconv4_stage0_L1_0"
+ top: "Mconv4_stage0_L1_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage0_L1_1"
+ type: "PReLU"
+ bottom: "Mconv4_stage0_L1_1"
+ top: "Mconv4_stage0_L1_1"
+}
+layer {
+ name: "Mconv4_stage0_L1_2"
+ type: "Convolution"
+ bottom: "Mconv4_stage0_L1_1"
+ top: "Mconv4_stage0_L1_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage0_L1_2"
+ type: "PReLU"
+ bottom: "Mconv4_stage0_L1_2"
+ top: "Mconv4_stage0_L1_2"
+}
+layer {
+ name: "Mconv4_stage0_L1_concat"
+ type: "Concat"
+ bottom: "Mconv4_stage0_L1_0"
+ bottom: "Mconv4_stage0_L1_1"
+ bottom: "Mconv4_stage0_L1_2"
+ top: "Mconv4_stage0_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv5_stage0_L1_0"
+ type: "Convolution"
+ bottom: "Mconv4_stage0_L1_concat"
+ top: "Mconv5_stage0_L1_0"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage0_L1_0"
+ type: "PReLU"
+ bottom: "Mconv5_stage0_L1_0"
+ top: "Mconv5_stage0_L1_0"
+}
+layer {
+ name: "Mconv5_stage0_L1_1"
+ type: "Convolution"
+ bottom: "Mconv5_stage0_L1_0"
+ top: "Mconv5_stage0_L1_1"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage0_L1_1"
+ type: "PReLU"
+ bottom: "Mconv5_stage0_L1_1"
+ top: "Mconv5_stage0_L1_1"
+}
+layer {
+ name: "Mconv5_stage0_L1_2"
+ type: "Convolution"
+ bottom: "Mconv5_stage0_L1_1"
+ top: "Mconv5_stage0_L1_2"
+ convolution_param {
+ num_output: 96
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage0_L1_2"
+ type: "PReLU"
+ bottom: "Mconv5_stage0_L1_2"
+ top: "Mconv5_stage0_L1_2"
+}
+layer {
+ name: "Mconv5_stage0_L1_concat"
+ type: "Concat"
+ bottom: "Mconv5_stage0_L1_0"
+ bottom: "Mconv5_stage0_L1_1"
+ bottom: "Mconv5_stage0_L1_2"
+ top: "Mconv5_stage0_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv6_stage0_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage0_L1_concat"
+ top: "Mconv6_stage0_L1"
+ convolution_param {
+ num_output: 256
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "Mprelu6_stage0_L1"
+ type: "PReLU"
+ bottom: "Mconv6_stage0_L1"
+ top: "Mconv6_stage0_L1"
+}
+layer {
+ name: "Mconv7_stage0_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage0_L1"
+ top: "Mconv7_stage0_L1"
+ convolution_param {
+ num_output: 26
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "concat_stage1_L1"
+ type: "Concat"
+ bottom: "conv4_4_CPM"
+ bottom: "Mconv7_stage0_L1"
+ bottom: "Mconv7_stage3_L2"
+ top: "concat_stage1_L1"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage1_L1_0"
+ type: "Convolution"
+ bottom: "concat_stage1_L1"
+ top: "Mconv1_stage1_L1_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage1_L1_0"
+ type: "PReLU"
+ bottom: "Mconv1_stage1_L1_0"
+ top: "Mconv1_stage1_L1_0"
+}
+layer {
+ name: "Mconv1_stage1_L1_1"
+ type: "Convolution"
+ bottom: "Mconv1_stage1_L1_0"
+ top: "Mconv1_stage1_L1_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage1_L1_1"
+ type: "PReLU"
+ bottom: "Mconv1_stage1_L1_1"
+ top: "Mconv1_stage1_L1_1"
+}
+layer {
+ name: "Mconv1_stage1_L1_2"
+ type: "Convolution"
+ bottom: "Mconv1_stage1_L1_1"
+ top: "Mconv1_stage1_L1_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu1_stage1_L1_2"
+ type: "PReLU"
+ bottom: "Mconv1_stage1_L1_2"
+ top: "Mconv1_stage1_L1_2"
+}
+layer {
+ name: "Mconv1_stage1_L1_concat"
+ type: "Concat"
+ bottom: "Mconv1_stage1_L1_0"
+ bottom: "Mconv1_stage1_L1_1"
+ bottom: "Mconv1_stage1_L1_2"
+ top: "Mconv1_stage1_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv2_stage1_L1_0"
+ type: "Convolution"
+ bottom: "Mconv1_stage1_L1_concat"
+ top: "Mconv2_stage1_L1_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage1_L1_0"
+ type: "PReLU"
+ bottom: "Mconv2_stage1_L1_0"
+ top: "Mconv2_stage1_L1_0"
+}
+layer {
+ name: "Mconv2_stage1_L1_1"
+ type: "Convolution"
+ bottom: "Mconv2_stage1_L1_0"
+ top: "Mconv2_stage1_L1_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage1_L1_1"
+ type: "PReLU"
+ bottom: "Mconv2_stage1_L1_1"
+ top: "Mconv2_stage1_L1_1"
+}
+layer {
+ name: "Mconv2_stage1_L1_2"
+ type: "Convolution"
+ bottom: "Mconv2_stage1_L1_1"
+ top: "Mconv2_stage1_L1_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu2_stage1_L1_2"
+ type: "PReLU"
+ bottom: "Mconv2_stage1_L1_2"
+ top: "Mconv2_stage1_L1_2"
+}
+layer {
+ name: "Mconv2_stage1_L1_concat"
+ type: "Concat"
+ bottom: "Mconv2_stage1_L1_0"
+ bottom: "Mconv2_stage1_L1_1"
+ bottom: "Mconv2_stage1_L1_2"
+ top: "Mconv2_stage1_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv3_stage1_L1_0"
+ type: "Convolution"
+ bottom: "Mconv2_stage1_L1_concat"
+ top: "Mconv3_stage1_L1_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage1_L1_0"
+ type: "PReLU"
+ bottom: "Mconv3_stage1_L1_0"
+ top: "Mconv3_stage1_L1_0"
+}
+layer {
+ name: "Mconv3_stage1_L1_1"
+ type: "Convolution"
+ bottom: "Mconv3_stage1_L1_0"
+ top: "Mconv3_stage1_L1_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage1_L1_1"
+ type: "PReLU"
+ bottom: "Mconv3_stage1_L1_1"
+ top: "Mconv3_stage1_L1_1"
+}
+layer {
+ name: "Mconv3_stage1_L1_2"
+ type: "Convolution"
+ bottom: "Mconv3_stage1_L1_1"
+ top: "Mconv3_stage1_L1_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu3_stage1_L1_2"
+ type: "PReLU"
+ bottom: "Mconv3_stage1_L1_2"
+ top: "Mconv3_stage1_L1_2"
+}
+layer {
+ name: "Mconv3_stage1_L1_concat"
+ type: "Concat"
+ bottom: "Mconv3_stage1_L1_0"
+ bottom: "Mconv3_stage1_L1_1"
+ bottom: "Mconv3_stage1_L1_2"
+ top: "Mconv3_stage1_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv4_stage1_L1_0"
+ type: "Convolution"
+ bottom: "Mconv3_stage1_L1_concat"
+ top: "Mconv4_stage1_L1_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage1_L1_0"
+ type: "PReLU"
+ bottom: "Mconv4_stage1_L1_0"
+ top: "Mconv4_stage1_L1_0"
+}
+layer {
+ name: "Mconv4_stage1_L1_1"
+ type: "Convolution"
+ bottom: "Mconv4_stage1_L1_0"
+ top: "Mconv4_stage1_L1_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage1_L1_1"
+ type: "PReLU"
+ bottom: "Mconv4_stage1_L1_1"
+ top: "Mconv4_stage1_L1_1"
+}
+layer {
+ name: "Mconv4_stage1_L1_2"
+ type: "Convolution"
+ bottom: "Mconv4_stage1_L1_1"
+ top: "Mconv4_stage1_L1_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu4_stage1_L1_2"
+ type: "PReLU"
+ bottom: "Mconv4_stage1_L1_2"
+ top: "Mconv4_stage1_L1_2"
+}
+layer {
+ name: "Mconv4_stage1_L1_concat"
+ type: "Concat"
+ bottom: "Mconv4_stage1_L1_0"
+ bottom: "Mconv4_stage1_L1_1"
+ bottom: "Mconv4_stage1_L1_2"
+ top: "Mconv4_stage1_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv5_stage1_L1_0"
+ type: "Convolution"
+ bottom: "Mconv4_stage1_L1_concat"
+ top: "Mconv5_stage1_L1_0"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage1_L1_0"
+ type: "PReLU"
+ bottom: "Mconv5_stage1_L1_0"
+ top: "Mconv5_stage1_L1_0"
+}
+layer {
+ name: "Mconv5_stage1_L1_1"
+ type: "Convolution"
+ bottom: "Mconv5_stage1_L1_0"
+ top: "Mconv5_stage1_L1_1"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage1_L1_1"
+ type: "PReLU"
+ bottom: "Mconv5_stage1_L1_1"
+ top: "Mconv5_stage1_L1_1"
+}
+layer {
+ name: "Mconv5_stage1_L1_2"
+ type: "Convolution"
+ bottom: "Mconv5_stage1_L1_1"
+ top: "Mconv5_stage1_L1_2"
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ }
+}
+layer {
+ name: "Mprelu5_stage1_L1_2"
+ type: "PReLU"
+ bottom: "Mconv5_stage1_L1_2"
+ top: "Mconv5_stage1_L1_2"
+}
+layer {
+ name: "Mconv5_stage1_L1_concat"
+ type: "Concat"
+ bottom: "Mconv5_stage1_L1_0"
+ bottom: "Mconv5_stage1_L1_1"
+ bottom: "Mconv5_stage1_L1_2"
+ top: "Mconv5_stage1_L1_concat"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv6_stage1_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage1_L1_concat"
+ top: "Mconv6_stage1_L1"
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "Mprelu6_stage1_L1"
+ type: "PReLU"
+ bottom: "Mconv6_stage1_L1"
+ top: "Mconv6_stage1_L1"
+}
+layer {
+ name: "Mconv7_stage1_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage1_L1"
+ top: "Mconv7_stage1_L1"
+ convolution_param {
+ num_output: 26
+ pad: 0
+ kernel_size: 1
+ }
+}
+layer {
+ name: "net_output"
+ type: "Concat"
+ bottom: "Mconv7_stage1_L1"
+ bottom: "Mconv7_stage3_L2"
+ top: "net_output"
+ concat_param {
+ axis: 1
+ }
+}
diff --git a/joints_detectors/openpose/models/pose/coco/pose_deploy_linevec.prototxt b/joints_detectors/openpose/models/pose/coco/pose_deploy_linevec.prototxt
new file mode 100644
index 0000000000000000000000000000000000000000..fbe0c824588d61f079de03b06ab428e817b99819
--- /dev/null
+++ b/joints_detectors/openpose/models/pose/coco/pose_deploy_linevec.prototxt
@@ -0,0 +1,2976 @@
+input: "image"
+input_dim: 1
+input_dim: 3
+input_dim: 1 # This value will be defined at runtime
+input_dim: 1 # This value will be defined at runtime
+layer {
+ name: "conv1_1"
+ type: "Convolution"
+ bottom: "image"
+ top: "conv1_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu1_1"
+ type: "ReLU"
+ bottom: "conv1_1"
+ top: "conv1_1"
+}
+layer {
+ name: "conv1_2"
+ type: "Convolution"
+ bottom: "conv1_1"
+ top: "conv1_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu1_2"
+ type: "ReLU"
+ bottom: "conv1_2"
+ top: "conv1_2"
+}
+layer {
+ name: "pool1_stage1"
+ type: "Pooling"
+ bottom: "conv1_2"
+ top: "pool1_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv2_1"
+ type: "Convolution"
+ bottom: "pool1_stage1"
+ top: "conv2_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu2_1"
+ type: "ReLU"
+ bottom: "conv2_1"
+ top: "conv2_1"
+}
+layer {
+ name: "conv2_2"
+ type: "Convolution"
+ bottom: "conv2_1"
+ top: "conv2_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu2_2"
+ type: "ReLU"
+ bottom: "conv2_2"
+ top: "conv2_2"
+}
+layer {
+ name: "pool2_stage1"
+ type: "Pooling"
+ bottom: "conv2_2"
+ top: "pool2_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv3_1"
+ type: "Convolution"
+ bottom: "pool2_stage1"
+ top: "conv3_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_1"
+ type: "ReLU"
+ bottom: "conv3_1"
+ top: "conv3_1"
+}
+layer {
+ name: "conv3_2"
+ type: "Convolution"
+ bottom: "conv3_1"
+ top: "conv3_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_2"
+ type: "ReLU"
+ bottom: "conv3_2"
+ top: "conv3_2"
+}
+layer {
+ name: "conv3_3"
+ type: "Convolution"
+ bottom: "conv3_2"
+ top: "conv3_3"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_3"
+ type: "ReLU"
+ bottom: "conv3_3"
+ top: "conv3_3"
+}
+layer {
+ name: "conv3_4"
+ type: "Convolution"
+ bottom: "conv3_3"
+ top: "conv3_4"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_4"
+ type: "ReLU"
+ bottom: "conv3_4"
+ top: "conv3_4"
+}
+layer {
+ name: "pool3_stage1"
+ type: "Pooling"
+ bottom: "conv3_4"
+ top: "pool3_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv4_1"
+ type: "Convolution"
+ bottom: "pool3_stage1"
+ top: "conv4_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_1"
+ type: "ReLU"
+ bottom: "conv4_1"
+ top: "conv4_1"
+}
+layer {
+ name: "conv4_2"
+ type: "Convolution"
+ bottom: "conv4_1"
+ top: "conv4_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_2"
+ type: "ReLU"
+ bottom: "conv4_2"
+ top: "conv4_2"
+}
+layer {
+ name: "conv4_3_CPM"
+ type: "Convolution"
+ bottom: "conv4_2"
+ top: "conv4_3_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_3_CPM"
+ type: "ReLU"
+ bottom: "conv4_3_CPM"
+ top: "conv4_3_CPM"
+}
+layer {
+ name: "conv4_4_CPM"
+ type: "Convolution"
+ bottom: "conv4_3_CPM"
+ top: "conv4_4_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_4_CPM"
+ type: "ReLU"
+ bottom: "conv4_4_CPM"
+ top: "conv4_4_CPM"
+}
+layer {
+ name: "conv5_1_CPM_L1"
+ type: "Convolution"
+ bottom: "conv4_4_CPM"
+ top: "conv5_1_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_1_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_1_CPM_L1"
+ top: "conv5_1_CPM_L1"
+}
+layer {
+ name: "conv5_1_CPM_L2"
+ type: "Convolution"
+ bottom: "conv4_4_CPM"
+ top: "conv5_1_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_1_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_1_CPM_L2"
+ top: "conv5_1_CPM_L2"
+}
+layer {
+ name: "conv5_2_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_1_CPM_L1"
+ top: "conv5_2_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_2_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_2_CPM_L1"
+ top: "conv5_2_CPM_L1"
+}
+layer {
+ name: "conv5_2_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_1_CPM_L2"
+ top: "conv5_2_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_2_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_2_CPM_L2"
+ top: "conv5_2_CPM_L2"
+}
+layer {
+ name: "conv5_3_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_2_CPM_L1"
+ top: "conv5_3_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_3_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_3_CPM_L1"
+ top: "conv5_3_CPM_L1"
+}
+layer {
+ name: "conv5_3_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_2_CPM_L2"
+ top: "conv5_3_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_3_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_3_CPM_L2"
+ top: "conv5_3_CPM_L2"
+}
+layer {
+ name: "conv5_4_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_3_CPM_L1"
+ top: "conv5_4_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_4_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_4_CPM_L1"
+ top: "conv5_4_CPM_L1"
+}
+layer {
+ name: "conv5_4_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_3_CPM_L2"
+ top: "conv5_4_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_4_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_4_CPM_L2"
+ top: "conv5_4_CPM_L2"
+}
+layer {
+ name: "conv5_5_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_4_CPM_L1"
+ top: "conv5_5_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 38
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv5_5_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_4_CPM_L2"
+ top: "conv5_5_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 19
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage2"
+ type: "Concat"
+ bottom: "conv5_5_CPM_L1"
+ bottom: "conv5_5_CPM_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage2_L1"
+ type: "Convolution"
+ bottom: "concat_stage2"
+ top: "Mconv1_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage2_L1"
+ top: "Mconv1_stage2_L1"
+}
+layer {
+ name: "Mconv1_stage2_L2"
+ type: "Convolution"
+ bottom: "concat_stage2"
+ top: "Mconv1_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage2_L2"
+ top: "Mconv1_stage2_L2"
+}
+layer {
+ name: "Mconv2_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L1"
+ top: "Mconv2_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage2_L1"
+ top: "Mconv2_stage2_L1"
+}
+layer {
+ name: "Mconv2_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L2"
+ top: "Mconv2_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage2_L2"
+ top: "Mconv2_stage2_L2"
+}
+layer {
+ name: "Mconv3_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L1"
+ top: "Mconv3_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage2_L1"
+ top: "Mconv3_stage2_L1"
+}
+layer {
+ name: "Mconv3_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L2"
+ top: "Mconv3_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage2_L2"
+ top: "Mconv3_stage2_L2"
+}
+layer {
+ name: "Mconv4_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L1"
+ top: "Mconv4_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage2_L1"
+ top: "Mconv4_stage2_L1"
+}
+layer {
+ name: "Mconv4_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L2"
+ top: "Mconv4_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage2_L2"
+ top: "Mconv4_stage2_L2"
+}
+layer {
+ name: "Mconv5_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L1"
+ top: "Mconv5_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage2_L1"
+ top: "Mconv5_stage2_L1"
+}
+layer {
+ name: "Mconv5_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L2"
+ top: "Mconv5_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage2_L2"
+ top: "Mconv5_stage2_L2"
+}
+layer {
+ name: "Mconv6_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L1"
+ top: "Mconv6_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage2_L1"
+ top: "Mconv6_stage2_L1"
+}
+layer {
+ name: "Mconv6_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L2"
+ top: "Mconv6_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv6_stage2_L2"
+}
+layer {
+ name: "Mconv7_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage2_L1"
+ top: "Mconv7_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 38
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv7_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 19
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage3"
+ type: "Concat"
+ bottom: "Mconv7_stage2_L1"
+ bottom: "Mconv7_stage2_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage3"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage3_L1"
+ type: "Convolution"
+ bottom: "concat_stage3"
+ top: "Mconv1_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage3_L1"
+ top: "Mconv1_stage3_L1"
+}
+layer {
+ name: "Mconv1_stage3_L2"
+ type: "Convolution"
+ bottom: "concat_stage3"
+ top: "Mconv1_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage3_L2"
+ top: "Mconv1_stage3_L2"
+}
+layer {
+ name: "Mconv2_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L1"
+ top: "Mconv2_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage3_L1"
+ top: "Mconv2_stage3_L1"
+}
+layer {
+ name: "Mconv2_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L2"
+ top: "Mconv2_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage3_L2"
+ top: "Mconv2_stage3_L2"
+}
+layer {
+ name: "Mconv3_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L1"
+ top: "Mconv3_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage3_L1"
+ top: "Mconv3_stage3_L1"
+}
+layer {
+ name: "Mconv3_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L2"
+ top: "Mconv3_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage3_L2"
+ top: "Mconv3_stage3_L2"
+}
+layer {
+ name: "Mconv4_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L1"
+ top: "Mconv4_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage3_L1"
+ top: "Mconv4_stage3_L1"
+}
+layer {
+ name: "Mconv4_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L2"
+ top: "Mconv4_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage3_L2"
+ top: "Mconv4_stage3_L2"
+}
+layer {
+ name: "Mconv5_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L1"
+ top: "Mconv5_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage3_L1"
+ top: "Mconv5_stage3_L1"
+}
+layer {
+ name: "Mconv5_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L2"
+ top: "Mconv5_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage3_L2"
+ top: "Mconv5_stage3_L2"
+}
+layer {
+ name: "Mconv6_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L1"
+ top: "Mconv6_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage3_L1"
+ top: "Mconv6_stage3_L1"
+}
+layer {
+ name: "Mconv6_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L2"
+ top: "Mconv6_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv6_stage3_L2"
+}
+layer {
+ name: "Mconv7_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage3_L1"
+ top: "Mconv7_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 38
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv7_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 19
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage4"
+ type: "Concat"
+ bottom: "Mconv7_stage3_L1"
+ bottom: "Mconv7_stage3_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage4"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage4_L1"
+ type: "Convolution"
+ bottom: "concat_stage4"
+ top: "Mconv1_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage4_L1"
+ top: "Mconv1_stage4_L1"
+}
+layer {
+ name: "Mconv1_stage4_L2"
+ type: "Convolution"
+ bottom: "concat_stage4"
+ top: "Mconv1_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage4_L2"
+ top: "Mconv1_stage4_L2"
+}
+layer {
+ name: "Mconv2_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage4_L1"
+ top: "Mconv2_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage4_L1"
+ top: "Mconv2_stage4_L1"
+}
+layer {
+ name: "Mconv2_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage4_L2"
+ top: "Mconv2_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage4_L2"
+ top: "Mconv2_stage4_L2"
+}
+layer {
+ name: "Mconv3_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage4_L1"
+ top: "Mconv3_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage4_L1"
+ top: "Mconv3_stage4_L1"
+}
+layer {
+ name: "Mconv3_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage4_L2"
+ top: "Mconv3_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage4_L2"
+ top: "Mconv3_stage4_L2"
+}
+layer {
+ name: "Mconv4_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage4_L1"
+ top: "Mconv4_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage4_L1"
+ top: "Mconv4_stage4_L1"
+}
+layer {
+ name: "Mconv4_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage4_L2"
+ top: "Mconv4_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage4_L2"
+ top: "Mconv4_stage4_L2"
+}
+layer {
+ name: "Mconv5_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage4_L1"
+ top: "Mconv5_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage4_L1"
+ top: "Mconv5_stage4_L1"
+}
+layer {
+ name: "Mconv5_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage4_L2"
+ top: "Mconv5_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage4_L2"
+ top: "Mconv5_stage4_L2"
+}
+layer {
+ name: "Mconv6_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage4_L1"
+ top: "Mconv6_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage4_L1"
+ top: "Mconv6_stage4_L1"
+}
+layer {
+ name: "Mconv6_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage4_L2"
+ top: "Mconv6_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage4_L2"
+ top: "Mconv6_stage4_L2"
+}
+layer {
+ name: "Mconv7_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage4_L1"
+ top: "Mconv7_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 38
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage4_L2"
+ top: "Mconv7_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 19
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage5"
+ type: "Concat"
+ bottom: "Mconv7_stage4_L1"
+ bottom: "Mconv7_stage4_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage5"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage5_L1"
+ type: "Convolution"
+ bottom: "concat_stage5"
+ top: "Mconv1_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage5_L1"
+ top: "Mconv1_stage5_L1"
+}
+layer {
+ name: "Mconv1_stage5_L2"
+ type: "Convolution"
+ bottom: "concat_stage5"
+ top: "Mconv1_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage5_L2"
+ top: "Mconv1_stage5_L2"
+}
+layer {
+ name: "Mconv2_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage5_L1"
+ top: "Mconv2_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage5_L1"
+ top: "Mconv2_stage5_L1"
+}
+layer {
+ name: "Mconv2_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage5_L2"
+ top: "Mconv2_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage5_L2"
+ top: "Mconv2_stage5_L2"
+}
+layer {
+ name: "Mconv3_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage5_L1"
+ top: "Mconv3_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage5_L1"
+ top: "Mconv3_stage5_L1"
+}
+layer {
+ name: "Mconv3_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage5_L2"
+ top: "Mconv3_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage5_L2"
+ top: "Mconv3_stage5_L2"
+}
+layer {
+ name: "Mconv4_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage5_L1"
+ top: "Mconv4_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage5_L1"
+ top: "Mconv4_stage5_L1"
+}
+layer {
+ name: "Mconv4_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage5_L2"
+ top: "Mconv4_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage5_L2"
+ top: "Mconv4_stage5_L2"
+}
+layer {
+ name: "Mconv5_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage5_L1"
+ top: "Mconv5_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage5_L1"
+ top: "Mconv5_stage5_L1"
+}
+layer {
+ name: "Mconv5_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage5_L2"
+ top: "Mconv5_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage5_L2"
+ top: "Mconv5_stage5_L2"
+}
+layer {
+ name: "Mconv6_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage5_L1"
+ top: "Mconv6_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage5_L1"
+ top: "Mconv6_stage5_L1"
+}
+layer {
+ name: "Mconv6_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage5_L2"
+ top: "Mconv6_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage5_L2"
+ top: "Mconv6_stage5_L2"
+}
+layer {
+ name: "Mconv7_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage5_L1"
+ top: "Mconv7_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 38
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage5_L2"
+ top: "Mconv7_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 19
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage6"
+ type: "Concat"
+ bottom: "Mconv7_stage5_L1"
+ bottom: "Mconv7_stage5_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage6"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage6_L1"
+ type: "Convolution"
+ bottom: "concat_stage6"
+ top: "Mconv1_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage6_L1"
+ top: "Mconv1_stage6_L1"
+}
+layer {
+ name: "Mconv1_stage6_L2"
+ type: "Convolution"
+ bottom: "concat_stage6"
+ top: "Mconv1_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage6_L2"
+ top: "Mconv1_stage6_L2"
+}
+layer {
+ name: "Mconv2_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage6_L1"
+ top: "Mconv2_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage6_L1"
+ top: "Mconv2_stage6_L1"
+}
+layer {
+ name: "Mconv2_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage6_L2"
+ top: "Mconv2_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage6_L2"
+ top: "Mconv2_stage6_L2"
+}
+layer {
+ name: "Mconv3_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage6_L1"
+ top: "Mconv3_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage6_L1"
+ top: "Mconv3_stage6_L1"
+}
+layer {
+ name: "Mconv3_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage6_L2"
+ top: "Mconv3_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage6_L2"
+ top: "Mconv3_stage6_L2"
+}
+layer {
+ name: "Mconv4_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage6_L1"
+ top: "Mconv4_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage6_L1"
+ top: "Mconv4_stage6_L1"
+}
+layer {
+ name: "Mconv4_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage6_L2"
+ top: "Mconv4_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage6_L2"
+ top: "Mconv4_stage6_L2"
+}
+layer {
+ name: "Mconv5_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage6_L1"
+ top: "Mconv5_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage6_L1"
+ top: "Mconv5_stage6_L1"
+}
+layer {
+ name: "Mconv5_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage6_L2"
+ top: "Mconv5_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage6_L2"
+ top: "Mconv5_stage6_L2"
+}
+layer {
+ name: "Mconv6_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage6_L1"
+ top: "Mconv6_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage6_L1"
+ top: "Mconv6_stage6_L1"
+}
+layer {
+ name: "Mconv6_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage6_L2"
+ top: "Mconv6_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage6_L2"
+ top: "Mconv6_stage6_L2"
+}
+layer {
+ name: "Mconv7_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage6_L1"
+ top: "Mconv7_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 38
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage6_L2"
+ top: "Mconv7_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 19
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage7"
+ type: "Concat"
+ bottom: "Mconv7_stage6_L2"
+ bottom: "Mconv7_stage6_L1"
+ # top: "concat_stage7"
+ top: "net_output"
+ concat_param {
+ axis: 1
+ }
+}
diff --git a/joints_detectors/openpose/models/pose/mpi/pose_deploy_linevec.prototxt b/joints_detectors/openpose/models/pose/mpi/pose_deploy_linevec.prototxt
new file mode 100644
index 0000000000000000000000000000000000000000..54492d68a185cc50661078f370c7aa0041d8761a
--- /dev/null
+++ b/joints_detectors/openpose/models/pose/mpi/pose_deploy_linevec.prototxt
@@ -0,0 +1,2975 @@
+input: "image"
+input_dim: 1
+input_dim: 3
+input_dim: 1 # This value will be defined at runtime
+input_dim: 1 # This value will be defined at runtime
+layer {
+ name: "conv1_1"
+ type: "Convolution"
+ bottom: "image"
+ top: "conv1_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu1_1"
+ type: "ReLU"
+ bottom: "conv1_1"
+ top: "conv1_1"
+}
+layer {
+ name: "conv1_2"
+ type: "Convolution"
+ bottom: "conv1_1"
+ top: "conv1_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu1_2"
+ type: "ReLU"
+ bottom: "conv1_2"
+ top: "conv1_2"
+}
+layer {
+ name: "pool1_stage1"
+ type: "Pooling"
+ bottom: "conv1_2"
+ top: "pool1_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv2_1"
+ type: "Convolution"
+ bottom: "pool1_stage1"
+ top: "conv2_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu2_1"
+ type: "ReLU"
+ bottom: "conv2_1"
+ top: "conv2_1"
+}
+layer {
+ name: "conv2_2"
+ type: "Convolution"
+ bottom: "conv2_1"
+ top: "conv2_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu2_2"
+ type: "ReLU"
+ bottom: "conv2_2"
+ top: "conv2_2"
+}
+layer {
+ name: "pool2_stage1"
+ type: "Pooling"
+ bottom: "conv2_2"
+ top: "pool2_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv3_1"
+ type: "Convolution"
+ bottom: "pool2_stage1"
+ top: "conv3_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_1"
+ type: "ReLU"
+ bottom: "conv3_1"
+ top: "conv3_1"
+}
+layer {
+ name: "conv3_2"
+ type: "Convolution"
+ bottom: "conv3_1"
+ top: "conv3_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_2"
+ type: "ReLU"
+ bottom: "conv3_2"
+ top: "conv3_2"
+}
+layer {
+ name: "conv3_3"
+ type: "Convolution"
+ bottom: "conv3_2"
+ top: "conv3_3"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_3"
+ type: "ReLU"
+ bottom: "conv3_3"
+ top: "conv3_3"
+}
+layer {
+ name: "conv3_4"
+ type: "Convolution"
+ bottom: "conv3_3"
+ top: "conv3_4"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_4"
+ type: "ReLU"
+ bottom: "conv3_4"
+ top: "conv3_4"
+}
+layer {
+ name: "pool3_stage1"
+ type: "Pooling"
+ bottom: "conv3_4"
+ top: "pool3_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv4_1"
+ type: "Convolution"
+ bottom: "pool3_stage1"
+ top: "conv4_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_1"
+ type: "ReLU"
+ bottom: "conv4_1"
+ top: "conv4_1"
+}
+layer {
+ name: "conv4_2"
+ type: "Convolution"
+ bottom: "conv4_1"
+ top: "conv4_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_2"
+ type: "ReLU"
+ bottom: "conv4_2"
+ top: "conv4_2"
+}
+layer {
+ name: "conv4_3_CPM"
+ type: "Convolution"
+ bottom: "conv4_2"
+ top: "conv4_3_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_3_CPM"
+ type: "ReLU"
+ bottom: "conv4_3_CPM"
+ top: "conv4_3_CPM"
+}
+layer {
+ name: "conv4_4_CPM"
+ type: "Convolution"
+ bottom: "conv4_3_CPM"
+ top: "conv4_4_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_4_CPM"
+ type: "ReLU"
+ bottom: "conv4_4_CPM"
+ top: "conv4_4_CPM"
+}
+layer {
+ name: "conv5_1_CPM_L1"
+ type: "Convolution"
+ bottom: "conv4_4_CPM"
+ top: "conv5_1_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_1_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_1_CPM_L1"
+ top: "conv5_1_CPM_L1"
+}
+layer {
+ name: "conv5_1_CPM_L2"
+ type: "Convolution"
+ bottom: "conv4_4_CPM"
+ top: "conv5_1_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_1_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_1_CPM_L2"
+ top: "conv5_1_CPM_L2"
+}
+layer {
+ name: "conv5_2_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_1_CPM_L1"
+ top: "conv5_2_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_2_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_2_CPM_L1"
+ top: "conv5_2_CPM_L1"
+}
+layer {
+ name: "conv5_2_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_1_CPM_L2"
+ top: "conv5_2_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_2_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_2_CPM_L2"
+ top: "conv5_2_CPM_L2"
+}
+layer {
+ name: "conv5_3_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_2_CPM_L1"
+ top: "conv5_3_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_3_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_3_CPM_L1"
+ top: "conv5_3_CPM_L1"
+}
+layer {
+ name: "conv5_3_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_2_CPM_L2"
+ top: "conv5_3_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_3_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_3_CPM_L2"
+ top: "conv5_3_CPM_L2"
+}
+layer {
+ name: "conv5_4_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_3_CPM_L1"
+ top: "conv5_4_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_4_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_4_CPM_L1"
+ top: "conv5_4_CPM_L1"
+}
+layer {
+ name: "conv5_4_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_3_CPM_L2"
+ top: "conv5_4_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_4_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_4_CPM_L2"
+ top: "conv5_4_CPM_L2"
+}
+layer {
+ name: "conv5_5_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_4_CPM_L1"
+ top: "conv5_5_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv5_5_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_4_CPM_L2"
+ top: "conv5_5_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage2"
+ type: "Concat"
+ bottom: "conv5_5_CPM_L1"
+ bottom: "conv5_5_CPM_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage2_L1"
+ type: "Convolution"
+ bottom: "concat_stage2"
+ top: "Mconv1_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage2_L1"
+ top: "Mconv1_stage2_L1"
+}
+layer {
+ name: "Mconv1_stage2_L2"
+ type: "Convolution"
+ bottom: "concat_stage2"
+ top: "Mconv1_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage2_L2"
+ top: "Mconv1_stage2_L2"
+}
+layer {
+ name: "Mconv2_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L1"
+ top: "Mconv2_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage2_L1"
+ top: "Mconv2_stage2_L1"
+}
+layer {
+ name: "Mconv2_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L2"
+ top: "Mconv2_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage2_L2"
+ top: "Mconv2_stage2_L2"
+}
+layer {
+ name: "Mconv3_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L1"
+ top: "Mconv3_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage2_L1"
+ top: "Mconv3_stage2_L1"
+}
+layer {
+ name: "Mconv3_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L2"
+ top: "Mconv3_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage2_L2"
+ top: "Mconv3_stage2_L2"
+}
+layer {
+ name: "Mconv4_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L1"
+ top: "Mconv4_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage2_L1"
+ top: "Mconv4_stage2_L1"
+}
+layer {
+ name: "Mconv4_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L2"
+ top: "Mconv4_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage2_L2"
+ top: "Mconv4_stage2_L2"
+}
+layer {
+ name: "Mconv5_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L1"
+ top: "Mconv5_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage2_L1"
+ top: "Mconv5_stage2_L1"
+}
+layer {
+ name: "Mconv5_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L2"
+ top: "Mconv5_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage2_L2"
+ top: "Mconv5_stage2_L2"
+}
+layer {
+ name: "Mconv6_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L1"
+ top: "Mconv6_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage2_L1"
+ top: "Mconv6_stage2_L1"
+}
+layer {
+ name: "Mconv6_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L2"
+ top: "Mconv6_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv6_stage2_L2"
+}
+layer {
+ name: "Mconv7_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage2_L1"
+ top: "Mconv7_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv7_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage3"
+ type: "Concat"
+ bottom: "Mconv7_stage2_L1"
+ bottom: "Mconv7_stage2_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage3"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage3_L1"
+ type: "Convolution"
+ bottom: "concat_stage3"
+ top: "Mconv1_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage3_L1"
+ top: "Mconv1_stage3_L1"
+}
+layer {
+ name: "Mconv1_stage3_L2"
+ type: "Convolution"
+ bottom: "concat_stage3"
+ top: "Mconv1_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage3_L2"
+ top: "Mconv1_stage3_L2"
+}
+layer {
+ name: "Mconv2_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L1"
+ top: "Mconv2_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage3_L1"
+ top: "Mconv2_stage3_L1"
+}
+layer {
+ name: "Mconv2_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L2"
+ top: "Mconv2_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage3_L2"
+ top: "Mconv2_stage3_L2"
+}
+layer {
+ name: "Mconv3_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L1"
+ top: "Mconv3_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage3_L1"
+ top: "Mconv3_stage3_L1"
+}
+layer {
+ name: "Mconv3_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L2"
+ top: "Mconv3_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage3_L2"
+ top: "Mconv3_stage3_L2"
+}
+layer {
+ name: "Mconv4_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L1"
+ top: "Mconv4_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage3_L1"
+ top: "Mconv4_stage3_L1"
+}
+layer {
+ name: "Mconv4_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L2"
+ top: "Mconv4_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage3_L2"
+ top: "Mconv4_stage3_L2"
+}
+layer {
+ name: "Mconv5_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L1"
+ top: "Mconv5_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage3_L1"
+ top: "Mconv5_stage3_L1"
+}
+layer {
+ name: "Mconv5_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L2"
+ top: "Mconv5_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage3_L2"
+ top: "Mconv5_stage3_L2"
+}
+layer {
+ name: "Mconv6_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L1"
+ top: "Mconv6_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage3_L1"
+ top: "Mconv6_stage3_L1"
+}
+layer {
+ name: "Mconv6_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L2"
+ top: "Mconv6_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv6_stage3_L2"
+}
+layer {
+ name: "Mconv7_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage3_L1"
+ top: "Mconv7_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv7_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage4"
+ type: "Concat"
+ bottom: "Mconv7_stage3_L1"
+ bottom: "Mconv7_stage3_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage4"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage4_L1"
+ type: "Convolution"
+ bottom: "concat_stage4"
+ top: "Mconv1_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage4_L1"
+ top: "Mconv1_stage4_L1"
+}
+layer {
+ name: "Mconv1_stage4_L2"
+ type: "Convolution"
+ bottom: "concat_stage4"
+ top: "Mconv1_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage4_L2"
+ top: "Mconv1_stage4_L2"
+}
+layer {
+ name: "Mconv2_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage4_L1"
+ top: "Mconv2_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage4_L1"
+ top: "Mconv2_stage4_L1"
+}
+layer {
+ name: "Mconv2_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage4_L2"
+ top: "Mconv2_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage4_L2"
+ top: "Mconv2_stage4_L2"
+}
+layer {
+ name: "Mconv3_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage4_L1"
+ top: "Mconv3_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage4_L1"
+ top: "Mconv3_stage4_L1"
+}
+layer {
+ name: "Mconv3_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage4_L2"
+ top: "Mconv3_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage4_L2"
+ top: "Mconv3_stage4_L2"
+}
+layer {
+ name: "Mconv4_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage4_L1"
+ top: "Mconv4_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage4_L1"
+ top: "Mconv4_stage4_L1"
+}
+layer {
+ name: "Mconv4_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage4_L2"
+ top: "Mconv4_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage4_L2"
+ top: "Mconv4_stage4_L2"
+}
+layer {
+ name: "Mconv5_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage4_L1"
+ top: "Mconv5_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage4_L1"
+ top: "Mconv5_stage4_L1"
+}
+layer {
+ name: "Mconv5_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage4_L2"
+ top: "Mconv5_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage4_L2"
+ top: "Mconv5_stage4_L2"
+}
+layer {
+ name: "Mconv6_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage4_L1"
+ top: "Mconv6_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage4_L1"
+ top: "Mconv6_stage4_L1"
+}
+layer {
+ name: "Mconv6_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage4_L2"
+ top: "Mconv6_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage4_L2"
+ top: "Mconv6_stage4_L2"
+}
+layer {
+ name: "Mconv7_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage4_L1"
+ top: "Mconv7_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage4_L2"
+ top: "Mconv7_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage5"
+ type: "Concat"
+ bottom: "Mconv7_stage4_L1"
+ bottom: "Mconv7_stage4_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage5"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage5_L1"
+ type: "Convolution"
+ bottom: "concat_stage5"
+ top: "Mconv1_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage5_L1"
+ top: "Mconv1_stage5_L1"
+}
+layer {
+ name: "Mconv1_stage5_L2"
+ type: "Convolution"
+ bottom: "concat_stage5"
+ top: "Mconv1_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage5_L2"
+ top: "Mconv1_stage5_L2"
+}
+layer {
+ name: "Mconv2_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage5_L1"
+ top: "Mconv2_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage5_L1"
+ top: "Mconv2_stage5_L1"
+}
+layer {
+ name: "Mconv2_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage5_L2"
+ top: "Mconv2_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage5_L2"
+ top: "Mconv2_stage5_L2"
+}
+layer {
+ name: "Mconv3_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage5_L1"
+ top: "Mconv3_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage5_L1"
+ top: "Mconv3_stage5_L1"
+}
+layer {
+ name: "Mconv3_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage5_L2"
+ top: "Mconv3_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage5_L2"
+ top: "Mconv3_stage5_L2"
+}
+layer {
+ name: "Mconv4_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage5_L1"
+ top: "Mconv4_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage5_L1"
+ top: "Mconv4_stage5_L1"
+}
+layer {
+ name: "Mconv4_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage5_L2"
+ top: "Mconv4_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage5_L2"
+ top: "Mconv4_stage5_L2"
+}
+layer {
+ name: "Mconv5_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage5_L1"
+ top: "Mconv5_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage5_L1"
+ top: "Mconv5_stage5_L1"
+}
+layer {
+ name: "Mconv5_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage5_L2"
+ top: "Mconv5_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage5_L2"
+ top: "Mconv5_stage5_L2"
+}
+layer {
+ name: "Mconv6_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage5_L1"
+ top: "Mconv6_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage5_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage5_L1"
+ top: "Mconv6_stage5_L1"
+}
+layer {
+ name: "Mconv6_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage5_L2"
+ top: "Mconv6_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage5_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage5_L2"
+ top: "Mconv6_stage5_L2"
+}
+layer {
+ name: "Mconv7_stage5_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage5_L1"
+ top: "Mconv7_stage5_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage5_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage5_L2"
+ top: "Mconv7_stage5_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage6"
+ type: "Concat"
+ bottom: "Mconv7_stage5_L1"
+ bottom: "Mconv7_stage5_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage6"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage6_L1"
+ type: "Convolution"
+ bottom: "concat_stage6"
+ top: "Mconv1_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage6_L1"
+ top: "Mconv1_stage6_L1"
+}
+layer {
+ name: "Mconv1_stage6_L2"
+ type: "Convolution"
+ bottom: "concat_stage6"
+ top: "Mconv1_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage6_L2"
+ top: "Mconv1_stage6_L2"
+}
+layer {
+ name: "Mconv2_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage6_L1"
+ top: "Mconv2_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage6_L1"
+ top: "Mconv2_stage6_L1"
+}
+layer {
+ name: "Mconv2_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage6_L2"
+ top: "Mconv2_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage6_L2"
+ top: "Mconv2_stage6_L2"
+}
+layer {
+ name: "Mconv3_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage6_L1"
+ top: "Mconv3_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage6_L1"
+ top: "Mconv3_stage6_L1"
+}
+layer {
+ name: "Mconv3_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage6_L2"
+ top: "Mconv3_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage6_L2"
+ top: "Mconv3_stage6_L2"
+}
+layer {
+ name: "Mconv4_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage6_L1"
+ top: "Mconv4_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage6_L1"
+ top: "Mconv4_stage6_L1"
+}
+layer {
+ name: "Mconv4_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage6_L2"
+ top: "Mconv4_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage6_L2"
+ top: "Mconv4_stage6_L2"
+}
+layer {
+ name: "Mconv5_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage6_L1"
+ top: "Mconv5_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage6_L1"
+ top: "Mconv5_stage6_L1"
+}
+layer {
+ name: "Mconv5_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage6_L2"
+ top: "Mconv5_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage6_L2"
+ top: "Mconv5_stage6_L2"
+}
+layer {
+ name: "Mconv6_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage6_L1"
+ top: "Mconv6_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage6_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage6_L1"
+ top: "Mconv6_stage6_L1"
+}
+layer {
+ name: "Mconv6_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage6_L2"
+ top: "Mconv6_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage6_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage6_L2"
+ top: "Mconv6_stage6_L2"
+}
+layer {
+ name: "Mconv7_stage6_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage6_L1"
+ top: "Mconv7_stage6_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage6_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage6_L2"
+ top: "Mconv7_stage6_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage7"
+ type: "Concat"
+ bottom: "Mconv7_stage6_L2"
+ bottom: "Mconv7_stage6_L1"
+ top: "net_output"
+ concat_param {
+ axis: 1
+ }
+}
diff --git a/joints_detectors/openpose/models/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt b/joints_detectors/openpose/models/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt
new file mode 100644
index 0000000000000000000000000000000000000000..02ec183300ecbf5509d897e73c7663f16fb3b1a6
--- /dev/null
+++ b/joints_detectors/openpose/models/pose/mpi/pose_deploy_linevec_faster_4_stages.prototxt
@@ -0,0 +1,2081 @@
+input: "image"
+input_dim: 1
+input_dim: 3
+input_dim: 1 # This value will be defined at runtime
+input_dim: 1 # This value will be defined at runtime
+layer {
+ name: "conv1_1"
+ type: "Convolution"
+ bottom: "image"
+ top: "conv1_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu1_1"
+ type: "ReLU"
+ bottom: "conv1_1"
+ top: "conv1_1"
+}
+layer {
+ name: "conv1_2"
+ type: "Convolution"
+ bottom: "conv1_1"
+ top: "conv1_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 64
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu1_2"
+ type: "ReLU"
+ bottom: "conv1_2"
+ top: "conv1_2"
+}
+layer {
+ name: "pool1_stage1"
+ type: "Pooling"
+ bottom: "conv1_2"
+ top: "pool1_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv2_1"
+ type: "Convolution"
+ bottom: "pool1_stage1"
+ top: "conv2_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu2_1"
+ type: "ReLU"
+ bottom: "conv2_1"
+ top: "conv2_1"
+}
+layer {
+ name: "conv2_2"
+ type: "Convolution"
+ bottom: "conv2_1"
+ top: "conv2_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu2_2"
+ type: "ReLU"
+ bottom: "conv2_2"
+ top: "conv2_2"
+}
+layer {
+ name: "pool2_stage1"
+ type: "Pooling"
+ bottom: "conv2_2"
+ top: "pool2_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv3_1"
+ type: "Convolution"
+ bottom: "pool2_stage1"
+ top: "conv3_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_1"
+ type: "ReLU"
+ bottom: "conv3_1"
+ top: "conv3_1"
+}
+layer {
+ name: "conv3_2"
+ type: "Convolution"
+ bottom: "conv3_1"
+ top: "conv3_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_2"
+ type: "ReLU"
+ bottom: "conv3_2"
+ top: "conv3_2"
+}
+layer {
+ name: "conv3_3"
+ type: "Convolution"
+ bottom: "conv3_2"
+ top: "conv3_3"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_3"
+ type: "ReLU"
+ bottom: "conv3_3"
+ top: "conv3_3"
+}
+layer {
+ name: "conv3_4"
+ type: "Convolution"
+ bottom: "conv3_3"
+ top: "conv3_4"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu3_4"
+ type: "ReLU"
+ bottom: "conv3_4"
+ top: "conv3_4"
+}
+layer {
+ name: "pool3_stage1"
+ type: "Pooling"
+ bottom: "conv3_4"
+ top: "pool3_stage1"
+ pooling_param {
+ pool: MAX
+ kernel_size: 2
+ stride: 2
+ }
+}
+layer {
+ name: "conv4_1"
+ type: "Convolution"
+ bottom: "pool3_stage1"
+ top: "conv4_1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_1"
+ type: "ReLU"
+ bottom: "conv4_1"
+ top: "conv4_1"
+}
+layer {
+ name: "conv4_2"
+ type: "Convolution"
+ bottom: "conv4_1"
+ top: "conv4_2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_2"
+ type: "ReLU"
+ bottom: "conv4_2"
+ top: "conv4_2"
+}
+layer {
+ name: "conv4_3_CPM"
+ type: "Convolution"
+ bottom: "conv4_2"
+ top: "conv4_3_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 256
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_3_CPM"
+ type: "ReLU"
+ bottom: "conv4_3_CPM"
+ top: "conv4_3_CPM"
+}
+layer {
+ name: "conv4_4_CPM"
+ type: "Convolution"
+ bottom: "conv4_3_CPM"
+ top: "conv4_4_CPM"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu4_4_CPM"
+ type: "ReLU"
+ bottom: "conv4_4_CPM"
+ top: "conv4_4_CPM"
+}
+layer {
+ name: "conv5_1_CPM_L1"
+ type: "Convolution"
+ bottom: "conv4_4_CPM"
+ top: "conv5_1_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_1_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_1_CPM_L1"
+ top: "conv5_1_CPM_L1"
+}
+layer {
+ name: "conv5_1_CPM_L2"
+ type: "Convolution"
+ bottom: "conv4_4_CPM"
+ top: "conv5_1_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_1_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_1_CPM_L2"
+ top: "conv5_1_CPM_L2"
+}
+layer {
+ name: "conv5_2_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_1_CPM_L1"
+ top: "conv5_2_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_2_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_2_CPM_L1"
+ top: "conv5_2_CPM_L1"
+}
+layer {
+ name: "conv5_2_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_1_CPM_L2"
+ top: "conv5_2_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_2_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_2_CPM_L2"
+ top: "conv5_2_CPM_L2"
+}
+layer {
+ name: "conv5_3_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_2_CPM_L1"
+ top: "conv5_3_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_3_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_3_CPM_L1"
+ top: "conv5_3_CPM_L1"
+}
+layer {
+ name: "conv5_3_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_2_CPM_L2"
+ top: "conv5_3_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 1
+ kernel_size: 3
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_3_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_3_CPM_L2"
+ top: "conv5_3_CPM_L2"
+}
+layer {
+ name: "conv5_4_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_3_CPM_L1"
+ top: "conv5_4_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_4_CPM_L1"
+ type: "ReLU"
+ bottom: "conv5_4_CPM_L1"
+ top: "conv5_4_CPM_L1"
+}
+layer {
+ name: "conv5_4_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_3_CPM_L2"
+ top: "conv5_4_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 512
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "relu5_4_CPM_L2"
+ type: "ReLU"
+ bottom: "conv5_4_CPM_L2"
+ top: "conv5_4_CPM_L2"
+}
+layer {
+ name: "conv5_5_CPM_L1"
+ type: "Convolution"
+ bottom: "conv5_4_CPM_L1"
+ top: "conv5_5_CPM_L1"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "conv5_5_CPM_L2"
+ type: "Convolution"
+ bottom: "conv5_4_CPM_L2"
+ top: "conv5_5_CPM_L2"
+ param {
+ lr_mult: 1.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 2.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage2"
+ type: "Concat"
+ bottom: "conv5_5_CPM_L1"
+ bottom: "conv5_5_CPM_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage2"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage2_L1"
+ type: "Convolution"
+ bottom: "concat_stage2"
+ top: "Mconv1_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage2_L1"
+ top: "Mconv1_stage2_L1"
+}
+layer {
+ name: "Mconv1_stage2_L2"
+ type: "Convolution"
+ bottom: "concat_stage2"
+ top: "Mconv1_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage2_L2"
+ top: "Mconv1_stage2_L2"
+}
+layer {
+ name: "Mconv2_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L1"
+ top: "Mconv2_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage2_L1"
+ top: "Mconv2_stage2_L1"
+}
+layer {
+ name: "Mconv2_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage2_L2"
+ top: "Mconv2_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage2_L2"
+ top: "Mconv2_stage2_L2"
+}
+layer {
+ name: "Mconv3_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L1"
+ top: "Mconv3_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage2_L1"
+ top: "Mconv3_stage2_L1"
+}
+layer {
+ name: "Mconv3_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage2_L2"
+ top: "Mconv3_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage2_L2"
+ top: "Mconv3_stage2_L2"
+}
+layer {
+ name: "Mconv4_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L1"
+ top: "Mconv4_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage2_L1"
+ top: "Mconv4_stage2_L1"
+}
+layer {
+ name: "Mconv4_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage2_L2"
+ top: "Mconv4_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage2_L2"
+ top: "Mconv4_stage2_L2"
+}
+layer {
+ name: "Mconv5_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L1"
+ top: "Mconv5_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage2_L1"
+ top: "Mconv5_stage2_L1"
+}
+layer {
+ name: "Mconv5_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage2_L2"
+ top: "Mconv5_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage2_L2"
+ top: "Mconv5_stage2_L2"
+}
+layer {
+ name: "Mconv6_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L1"
+ top: "Mconv6_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage2_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage2_L1"
+ top: "Mconv6_stage2_L1"
+}
+layer {
+ name: "Mconv6_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage2_L2"
+ top: "Mconv6_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage2_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv6_stage2_L2"
+}
+layer {
+ name: "Mconv7_stage2_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage2_L1"
+ top: "Mconv7_stage2_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage2_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage2_L2"
+ top: "Mconv7_stage2_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage3"
+ type: "Concat"
+ bottom: "Mconv7_stage2_L1"
+ bottom: "Mconv7_stage2_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage3"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage3_L1"
+ type: "Convolution"
+ bottom: "concat_stage3"
+ top: "Mconv1_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage3_L1"
+ top: "Mconv1_stage3_L1"
+}
+layer {
+ name: "Mconv1_stage3_L2"
+ type: "Convolution"
+ bottom: "concat_stage3"
+ top: "Mconv1_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage3_L2"
+ top: "Mconv1_stage3_L2"
+}
+layer {
+ name: "Mconv2_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L1"
+ top: "Mconv2_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage3_L1"
+ top: "Mconv2_stage3_L1"
+}
+layer {
+ name: "Mconv2_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage3_L2"
+ top: "Mconv2_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage3_L2"
+ top: "Mconv2_stage3_L2"
+}
+layer {
+ name: "Mconv3_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L1"
+ top: "Mconv3_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage3_L1"
+ top: "Mconv3_stage3_L1"
+}
+layer {
+ name: "Mconv3_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage3_L2"
+ top: "Mconv3_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage3_L2"
+ top: "Mconv3_stage3_L2"
+}
+layer {
+ name: "Mconv4_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L1"
+ top: "Mconv4_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage3_L1"
+ top: "Mconv4_stage3_L1"
+}
+layer {
+ name: "Mconv4_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage3_L2"
+ top: "Mconv4_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage3_L2"
+ top: "Mconv4_stage3_L2"
+}
+layer {
+ name: "Mconv5_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L1"
+ top: "Mconv5_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage3_L1"
+ top: "Mconv5_stage3_L1"
+}
+layer {
+ name: "Mconv5_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage3_L2"
+ top: "Mconv5_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage3_L2"
+ top: "Mconv5_stage3_L2"
+}
+layer {
+ name: "Mconv6_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L1"
+ top: "Mconv6_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage3_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage3_L1"
+ top: "Mconv6_stage3_L1"
+}
+layer {
+ name: "Mconv6_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage3_L2"
+ top: "Mconv6_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage3_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv6_stage3_L2"
+}
+layer {
+ name: "Mconv7_stage3_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage3_L1"
+ top: "Mconv7_stage3_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage3_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage3_L2"
+ top: "Mconv7_stage3_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage4"
+ type: "Concat"
+ bottom: "Mconv7_stage3_L1"
+ bottom: "Mconv7_stage3_L2"
+ bottom: "conv4_4_CPM"
+ top: "concat_stage4"
+ concat_param {
+ axis: 1
+ }
+}
+layer {
+ name: "Mconv1_stage4_L1"
+ type: "Convolution"
+ bottom: "concat_stage4"
+ top: "Mconv1_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv1_stage4_L1"
+ top: "Mconv1_stage4_L1"
+}
+layer {
+ name: "Mconv1_stage4_L2"
+ type: "Convolution"
+ bottom: "concat_stage4"
+ top: "Mconv1_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu1_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv1_stage4_L2"
+ top: "Mconv1_stage4_L2"
+}
+layer {
+ name: "Mconv2_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv1_stage4_L1"
+ top: "Mconv2_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv2_stage4_L1"
+ top: "Mconv2_stage4_L1"
+}
+layer {
+ name: "Mconv2_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv1_stage4_L2"
+ top: "Mconv2_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu2_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv2_stage4_L2"
+ top: "Mconv2_stage4_L2"
+}
+layer {
+ name: "Mconv3_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv2_stage4_L1"
+ top: "Mconv3_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv3_stage4_L1"
+ top: "Mconv3_stage4_L1"
+}
+layer {
+ name: "Mconv3_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv2_stage4_L2"
+ top: "Mconv3_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu3_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv3_stage4_L2"
+ top: "Mconv3_stage4_L2"
+}
+layer {
+ name: "Mconv4_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv3_stage4_L1"
+ top: "Mconv4_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv4_stage4_L1"
+ top: "Mconv4_stage4_L1"
+}
+layer {
+ name: "Mconv4_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv3_stage4_L2"
+ top: "Mconv4_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu4_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv4_stage4_L2"
+ top: "Mconv4_stage4_L2"
+}
+layer {
+ name: "Mconv5_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv4_stage4_L1"
+ top: "Mconv5_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv5_stage4_L1"
+ top: "Mconv5_stage4_L1"
+}
+layer {
+ name: "Mconv5_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv4_stage4_L2"
+ top: "Mconv5_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 3
+ kernel_size: 7
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu5_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv5_stage4_L2"
+ top: "Mconv5_stage4_L2"
+}
+layer {
+ name: "Mconv6_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv5_stage4_L1"
+ top: "Mconv6_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage4_L1"
+ type: "ReLU"
+ bottom: "Mconv6_stage4_L1"
+ top: "Mconv6_stage4_L1"
+}
+layer {
+ name: "Mconv6_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv5_stage4_L2"
+ top: "Mconv6_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 128
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mrelu6_stage4_L2"
+ type: "ReLU"
+ bottom: "Mconv6_stage4_L2"
+ top: "Mconv6_stage4_L2"
+}
+layer {
+ name: "Mconv7_stage4_L1"
+ type: "Convolution"
+ bottom: "Mconv6_stage4_L1"
+ top: "Mconv7_stage4_L1"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 28
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "Mconv7_stage4_L2"
+ type: "Convolution"
+ bottom: "Mconv6_stage4_L2"
+ top: "Mconv7_stage4_L2"
+ param {
+ lr_mult: 4.0
+ decay_mult: 1
+ }
+ param {
+ lr_mult: 8.0
+ decay_mult: 0
+ }
+ convolution_param {
+ num_output: 16
+ pad: 0
+ kernel_size: 1
+ weight_filler {
+ type: "gaussian"
+ std: 0.01
+ }
+ bias_filler {
+ type: "constant"
+ }
+ }
+}
+layer {
+ name: "concat_stage7"
+ type: "Concat"
+ bottom: "Mconv7_stage4_L2"
+ bottom: "Mconv7_stage4_L1"
+ top: "net_output"
+ concat_param {
+ axis: 1
+ }
+}
diff --git a/joints_detectors/openpose/utils.py b/joints_detectors/openpose/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..e75dac339a4464edc4379721996e2d3ff74020a5
--- /dev/null
+++ b/joints_detectors/openpose/utils.py
@@ -0,0 +1,64 @@
+import cv2
+
+joint_pairs = [[0, 1], [1, 3], [0, 2], [2, 4],
+ [5, 6], [5, 7], [7, 9], [6, 8], [8, 10],
+ [5, 11], [6, 12], [11, 12],
+ [11, 13], [12, 14], [13, 15], [14, 16]]
+
+colors = [[255, 0, 0], [255, 85, 0], [255, 170, 0], [255, 255, 0], [170, 255, 0], [85, 255, 0], [0, 255, 0], \
+ [0, 255, 85], [0, 255, 170], [0, 255, 255], [0, 170, 255], [0, 85, 255], [0, 0, 255], [85, 0, 255], \
+ [170, 0, 255], [255, 0, 255], [255, 85, 255]]
+
+
+def plot_keypoint(image, keypoints, keypoint_thresh=0.1):
+ confidence = keypoints[:, :, 2:]
+ coordinates = keypoints[:, :, :2]
+ joint_visible = confidence[:, :, 0] > keypoint_thresh
+
+ # 描点
+ for people in keypoints:
+ for i in range(len(people)):
+ x, y, p = people[i]
+ if p < 0.1:
+ continue
+ x = int(x)
+ y = int(y)
+ cv2.circle(image, (x, y), 4, colors[i], thickness=-1)
+
+ for i in range(coordinates.shape[0]):
+ pts = coordinates[i]
+ for color_i, jp in zip(colors, joint_pairs):
+ if joint_visible[i, jp[0]] and joint_visible[i, jp[1]]:
+ pt0 = pts[jp, 0];
+ pt1 = pts[jp, 1]
+ pt0_0, pt0_1, pt1_0, pt1_1 = int(pt0[0]), int(pt0[1]), int(pt1[0]), int(pt1[1])
+ cv2.line(image, (pt0_0, pt1_0), (pt0_1, pt1_1), color_i, 2)
+ return image
+
+
+# convert openpose keypoints(25) format to coco keypoints(17) format
+def convert(op_kpts):
+ '''
+ 0-16 map to 0,16,15,18,17,5,2,6,3,7,4,12,9,13,10,14,11
+ '''
+ coco_kpts = []
+ for i, j in enumerate([0, 16, 15, 18, 17, 5, 2, 6, 3, 7, 4, 12, 9, 13, 10, 14, 11]):
+ score = op_kpts[j][-1]
+ # if eye, ear keypoints score is lower, map it to mouth
+ if score < 0.2 and j in [15, 16, 17, 18]:
+ coco_kpts.append(op_kpts[0])
+ else:
+ coco_kpts.append(op_kpts[j])
+
+ return coco_kpts
+
+
+# convert openpose keypoints(25) format to keypoints(18) format
+def convert_18(op_kpts):
+ coco_kpts = []
+ for i, j in enumerate(range(0, 18)):
+ if i < 8:
+ coco_kpts.append(op_kpts[j])
+ else:
+ coco_kpts.append(op_kpts[j + 1])
+ return coco_kpts
diff --git a/model/block/refine.py b/model/block/refine.py
new file mode 100644
index 0000000000000000000000000000000000000000..c0baa4d81302894868cd8435c795b7160bead369
--- /dev/null
+++ b/model/block/refine.py
@@ -0,0 +1,36 @@
+import torch
+import torch.nn as nn
+from torch.autograd import Variable
+
+fc_out = 256
+fc_unit = 1024
+
+class refine(nn.Module):
+ def __init__(self, opt):
+ super().__init__()
+
+ out_seqlen = 1
+ fc_in = opt.out_channels*2*out_seqlen*opt.n_joints
+ fc_out = opt.in_channels * opt.n_joints
+
+ self.post_refine = nn.Sequential(
+ nn.Linear(fc_in, fc_unit),
+ nn.ReLU(),
+ nn.Dropout(0.5,inplace=True),
+ nn.Linear(fc_unit, fc_out),
+ nn.Sigmoid()
+ )
+
+ def forward(self, x, x_1):
+ N, T, V,_ = x.size()
+ x_in = torch.cat((x, x_1), -1)
+ x_in = x_in.view(N, -1)
+
+ score = self.post_refine(x_in).view(N,T,V,2)
+ score_cm = Variable(torch.ones(score.size()), requires_grad=False).cuda() - score
+ x_out = x.clone()
+ x_out[:, :, :, :2] = score * x[:, :, :, :2] + score_cm * x_1[:, :, :, :2]
+
+ return x_out
+
+
diff --git a/model/block/strided_transformer_encoder.py b/model/block/strided_transformer_encoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..6fbcb2c608ad05f2ffd1be44662c2c75f1628cd8
--- /dev/null
+++ b/model/block/strided_transformer_encoder.py
@@ -0,0 +1,171 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.autograd import Variable
+import numpy as np
+import math
+import os
+import copy
+
+def clones(module, N):
+ return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
+
+class Encoder(nn.Module):
+ def __init__(self, layer, N, length, d_model):
+ super(Encoder, self).__init__()
+ self.layers = layer
+ self.norm = LayerNorm(d_model)
+
+ self.pos_embedding_1 = nn.Parameter(torch.randn(1, length, d_model))
+ self.pos_embedding_2 = nn.Parameter(torch.randn(1, length, d_model))
+ self.pos_embedding_3 = nn.Parameter(torch.randn(1, length, d_model))
+
+ def forward(self, x, mask):
+ for i, layer in enumerate(self.layers):
+ if i == 0:
+ x += self.pos_embedding_1[:, :x.shape[1]]
+ elif i == 1:
+ x += self.pos_embedding_2[:, :x.shape[1]]
+ elif i == 2:
+ x += self.pos_embedding_3[:, :x.shape[1]]
+
+ x = layer(x, mask, i)
+
+ return x
+
+class LayerNorm(nn.Module):
+ def __init__(self, features, eps=1e-6):
+ super(LayerNorm, self).__init__()
+ self.a_2 = nn.Parameter(torch.ones(features))
+ self.b_2 = nn.Parameter(torch.zeros(features))
+ self.eps = eps
+
+ def forward(self, x):
+ mean = x.mean(-1, keepdim=True)
+ std = x.std(-1, keepdim=True)
+ return self.a_2 * (x - mean) / (std + self.eps) + self.b_2
+
+def attention(query, key, value, mask=None, dropout=None):
+ d_k = query.size(-1)
+ scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k)
+ if mask is not None:
+ scores = scores.masked_fill(mask == 0, -1e9)
+ p_attn = F.softmax(scores, dim=-1)
+
+ if dropout is not None:
+ p_attn = dropout(p_attn)
+ return torch.matmul(p_attn, value), p_attn
+
+
+class SublayerConnection(nn.Module):
+ def __init__(self, size, dropout, stride_num, i):
+ super(SublayerConnection, self).__init__()
+ self.norm = LayerNorm(size)
+ self.dropout = nn.Dropout(dropout)
+ self.pooling = nn.MaxPool1d(1, stride_num[i])
+
+ def forward(self, x, sublayer, i=-1, stride_num=-1):
+ if i != -1:
+ if stride_num[i] != 1:
+ res = self.pooling(x.permute(0, 2, 1))
+ res = res.permute(0, 2, 1)
+
+ return res + self.dropout(sublayer(self.norm(x)))
+ else:
+ return x + self.dropout(sublayer(self.norm(x)))
+ else:
+ return x + self.dropout(sublayer(self.norm(x)))
+
+
+class EncoderLayer(nn.Module):
+ def __init__(self, size, self_attn, feed_forward, dropout, stride_num, i):
+ super(EncoderLayer, self).__init__()
+ self.self_attn = self_attn
+ self.feed_forward = feed_forward
+ self.stride_num = stride_num
+ self.sublayer = clones(SublayerConnection(size, dropout, stride_num, i), 2)
+ self.size = size
+
+ def forward(self, x, mask, i):
+ x = self.sublayer[0](x, lambda x: self.self_attn(x, x, x, mask))
+ x = self.sublayer[1](x, self.feed_forward, i, self.stride_num)
+ return x
+
+
+class MultiHeadedAttention(nn.Module):
+ def __init__(self, h, d_model, dropout=0.1):
+ super(MultiHeadedAttention, self).__init__()
+ assert d_model % h == 0
+ self.d_k = d_model // h
+ self.h = h
+ self.linears = clones(nn.Linear(d_model, d_model), 4)
+ self.attn = None
+ self.dropout = nn.Dropout(p=dropout)
+
+ def forward(self, query, key, value, mask=None):
+ if mask is not None:
+ mask = mask.unsqueeze(1)
+ nbatches = query.size(0)
+
+ query, key, value = [l(x).view(nbatches, -1, self.h, self.d_k).transpose(1, 2)
+ for l, x in zip(self.linears, (query, key, value))]
+
+ x, self.attn = attention(query, key, value, mask=mask,
+ dropout=self.dropout)
+
+ x = x.transpose(1, 2).contiguous().view(nbatches, -1, self.h * self.d_k)
+ return self.linears[-1](x)
+
+
+class PositionwiseFeedForward(nn.Module):
+ def __init__(self, d_model, d_ff, dropout=0.1, number = -1, stride_num=-1):
+ super(PositionwiseFeedForward, self).__init__()
+ self.w_1 = nn.Conv1d(d_model, d_ff, kernel_size=1, stride=1)
+ self.w_2 = nn.Conv1d(d_ff, d_model, kernel_size=3, stride=stride_num[number], padding = 1)
+
+ self.gelu = nn.ReLU()
+
+ self.dropout = nn.Dropout(dropout)
+
+ def forward(self, x):
+ x = x.permute(0, 2, 1)
+ x = self.w_2(self.dropout(self.gelu(self.w_1(x))))
+ x = x.permute(0, 2, 1)
+
+ return x
+
+class Transformer(nn.Module):
+ def __init__(self, n_layers=3, d_model=256, d_ff=512, h=8, length=27, stride_num=None, dropout=0.1):
+ super(Transformer, self).__init__()
+
+ self.length = length
+
+ self.stride_num = stride_num
+ self.model = self.make_model(N=n_layers, d_model=d_model, d_ff=d_ff, h=h, dropout=dropout, length = self.length)
+
+ def forward(self, x, mask=None):
+ x = self.model(x, mask)
+
+ return x
+
+ def make_model(self, N=3, d_model=256, d_ff=512, h=8, dropout=0.1, length=27):
+ c = copy.deepcopy
+ attn = MultiHeadedAttention(h, d_model)
+
+ model_EncoderLayer = []
+ for i in range(N):
+ ff = PositionwiseFeedForward(d_model, d_ff, dropout, i, self.stride_num)
+ model_EncoderLayer.append(EncoderLayer(d_model, c(attn), c(ff), dropout, self.stride_num, i))
+
+ model_EncoderLayer = nn.ModuleList(model_EncoderLayer)
+
+ model = Encoder(model_EncoderLayer, N, length, d_model)
+
+ return model
+
+
+
+
+
+
+
diff --git a/model/block/vanilla_transformer_encoder.py b/model/block/vanilla_transformer_encoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b03d4869ab447f9465e78791b9321462c4f0e7e
--- /dev/null
+++ b/model/block/vanilla_transformer_encoder.py
@@ -0,0 +1,133 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.autograd import Variable
+import numpy as np
+import math
+import os
+import copy
+
+def clones(module, N):
+ return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
+
+class Encoder(nn.Module):
+ def __init__(self, layer, N):
+ super(Encoder, self).__init__()
+ self.layers = clones(layer, N)
+ self.norm = LayerNorm(layer.size)
+
+ def forward(self, x, mask):
+ for layer in self.layers:
+ x = layer(x, mask)
+ return x
+
+class LayerNorm(nn.Module):
+ def __init__(self, features, eps=1e-6):
+ super(LayerNorm, self).__init__()
+ self.a_2 = nn.Parameter(torch.ones(features))
+ self.b_2 = nn.Parameter(torch.zeros(features))
+ self.eps = eps
+
+ def forward(self, x):
+ mean = x.mean(-1, keepdim=True)
+ std = x.std(-1, keepdim=True)
+ return self.a_2 * (x - mean) / (std + self.eps) + self.b_2
+
+def attention(query, key, value, mask=None, dropout=None):
+ d_k = query.size(-1)
+ scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k)
+
+ if mask is not None:
+ scores = scores.masked_fill(mask == 0, -1e9)
+ p_attn = F.softmax(scores, dim=-1)
+
+ if dropout is not None:
+ p_attn = dropout(p_attn)
+ return torch.matmul(p_attn, value), p_attn
+
+
+class SublayerConnection(nn.Module):
+ def __init__(self, size, dropout):
+ super(SublayerConnection, self).__init__()
+ self.norm = LayerNorm(size)
+ self.dropout = nn.Dropout(dropout)
+
+ def forward(self, x, sublayer):
+ return x + self.dropout(sublayer(self.norm(x)))
+
+
+class EncoderLayer(nn.Module):
+ def __init__(self, size, self_attn, feed_forward, dropout):
+ super(EncoderLayer, self).__init__()
+ self.self_attn = self_attn
+ self.feed_forward = feed_forward
+ self.sublayer = clones(SublayerConnection(size, dropout), 2)
+ self.size = size
+
+ def forward(self, x, mask):
+ x = self.sublayer[0](x, lambda x: self.self_attn(x, x, x, mask))
+ return self.sublayer[1](x, self.feed_forward)
+
+
+class MultiHeadedAttention(nn.Module):
+ def __init__(self, h, d_model, dropout=0.1):
+ super(MultiHeadedAttention, self).__init__()
+ assert d_model % h == 0
+ self.d_k = d_model // h
+ self.h = h
+ self.linears = clones(nn.Linear(d_model, d_model), 4)
+ self.attn = None
+ self.dropout = nn.Dropout(p=dropout)
+
+ def forward(self, query, key, value, mask=None):
+ if mask is not None:
+ mask = mask.unsqueeze(1)
+ nbatches = query.size(0)
+
+ query, key, value = \
+ [l(x).view(nbatches, -1, self.h, self.d_k).transpose(1, 2)
+ for l, x in zip(self.linears, (query, key, value))]
+
+ x, self.attn = attention(query, key, value, mask=mask, dropout=self.dropout)
+
+ x = x.transpose(1, 2).contiguous().view(nbatches, -1, self.h * self.d_k)
+ return self.linears[-1](x)
+
+
+class PositionwiseFeedForward(nn.Module):
+ def __init__(self, d_model, d_ff, dropout=0.1):
+ super(PositionwiseFeedForward, self).__init__()
+ self.w_1 = nn.Linear(d_model, d_ff)
+ self.w_2 = nn.Linear(d_ff, d_model)
+ self.gelu = nn.ReLU()
+ self.dropout = nn.Dropout(dropout)
+
+ def forward(self, x):
+ return self.w_2(self.dropout(self.gelu(self.w_1(x))))
+
+class Transformer(nn.Module):
+ def __init__(self, n_layers=3, d_model=256, d_ff=512, h=8, dropout=0.1, length=27):
+ super(Transformer, self).__init__()
+
+ self.pos_embedding = nn.Parameter(torch.randn(1, length, d_model))
+ self.model = self.make_model(N=n_layers, d_model=d_model, d_ff=d_ff, h=h, dropout=dropout)
+
+ def forward(self, x, mask=None):
+
+ x += self.pos_embedding
+
+ x = self.model(x, mask)
+
+ return x
+
+ def make_model(self, N=3, d_model=256, d_ff=512, h=8, dropout=0.1):
+ c = copy.deepcopy
+ attn = MultiHeadedAttention(h, d_model)
+ ff = PositionwiseFeedForward(d_model, d_ff, dropout)
+ model = Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N)
+ return model
+
+
+
+
+
diff --git a/model/block/vanilla_transformer_encoder_pretrain.py b/model/block/vanilla_transformer_encoder_pretrain.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb748b16f89c8aae557fa5d35dba0053c3b312c5
--- /dev/null
+++ b/model/block/vanilla_transformer_encoder_pretrain.py
@@ -0,0 +1,158 @@
+import torch
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.autograd import Variable
+import numpy as np
+import math
+import os
+import copy
+
+def clones(module, N):
+ return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
+
+class Encoder(nn.Module):
+ def __init__(self, layer, N):
+ super(Encoder, self).__init__()
+ self.layers = clones(layer, N)
+ self.norm = LayerNorm(layer.size)
+
+ def forward(self, x, mask):
+ for layer in self.layers:
+ x = layer(x, mask)
+ return x
+
+class LayerNorm(nn.Module):
+ def __init__(self, features, eps=1e-6):
+ super(LayerNorm, self).__init__()
+ self.a_2 = nn.Parameter(torch.ones(features))
+ self.b_2 = nn.Parameter(torch.zeros(features))
+ self.eps = eps
+
+ def forward(self, x):
+ mean = x.mean(-1, keepdim=True)
+ std = x.std(-1, keepdim=True)
+ return self.a_2 * (x - mean) / (std + self.eps) + self.b_2
+
+def attention(query, key, value, mask=None, dropout=None):
+ d_k = query.size(-1)
+ scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k)
+
+ if mask is not None:
+ scores = scores.masked_fill(mask == 0, -1e9)
+ p_attn = F.softmax(scores, dim=-1)
+
+ if dropout is not None:
+ p_attn = dropout(p_attn)
+ return torch.matmul(p_attn, value), p_attn
+
+
+class SublayerConnection(nn.Module):
+ def __init__(self, size, dropout):
+ super(SublayerConnection, self).__init__()
+ self.norm = LayerNorm(size)
+ self.dropout = nn.Dropout(dropout)
+
+ def forward(self, x, sublayer):
+ return x + self.dropout(sublayer(self.norm(x)))
+
+
+class EncoderLayer(nn.Module):
+ def __init__(self, size, self_attn, feed_forward, dropout):
+ super(EncoderLayer, self).__init__()
+ self.self_attn = self_attn
+ self.feed_forward = feed_forward
+ self.sublayer = clones(SublayerConnection(size, dropout), 2)
+ self.size = size
+
+ def forward(self, x, mask):
+ x = self.sublayer[0](x, lambda x: self.self_attn(x, x, x, mask))
+ return self.sublayer[1](x, self.feed_forward)
+
+
+class MultiHeadedAttention(nn.Module):
+ def __init__(self, h, d_model, dropout=0.1):
+ super(MultiHeadedAttention, self).__init__()
+ assert d_model % h == 0
+ self.d_k = d_model // h
+ self.h = h
+ self.linears = clones(nn.Linear(d_model, d_model), 4)
+ self.attn = None
+ self.dropout = nn.Dropout(p=dropout)
+
+ def forward(self, query, key, value, mask=None):
+ if mask is not None:
+ mask = mask.unsqueeze(1)
+ nbatches = query.size(0)
+
+ query, key, value = \
+ [l(x).view(nbatches, -1, self.h, self.d_k).transpose(1, 2)
+ for l, x in zip(self.linears, (query, key, value))]
+
+ x, self.attn = attention(query, key, value, mask=mask, dropout=self.dropout)
+
+ x = x.transpose(1, 2).contiguous().view(nbatches, -1, self.h * self.d_k)
+ return self.linears[-1](x)
+
+
+class PositionwiseFeedForward(nn.Module):
+ def __init__(self, d_model, d_ff, dropout=0.1):
+ super(PositionwiseFeedForward, self).__init__()
+ self.w_1 = nn.Linear(d_model, d_ff)
+ self.w_2 = nn.Linear(d_ff, d_model)
+ self.gelu = nn.ReLU()
+ self.dropout = nn.Dropout(dropout)
+
+ def forward(self, x):
+ return self.w_2(self.dropout(self.gelu(self.w_1(x))))
+
+class Transformer(nn.Module):
+ def __init__(self, n_layers=3, d_model=256, d_ff=512, h=8, dropout=0.1, length=27):
+ super(Transformer, self).__init__()
+
+ self.pos_embedding = nn.Parameter(torch.randn(1, length, d_model))
+ self.model = self.make_model(N=n_layers, d_model=d_model, d_ff=d_ff, h=h, dropout=dropout)
+
+ def forward(self, x, mask_MAE=None, mask=None):
+ x += self.pos_embedding
+ #print(str(mask_MAE))
+ if mask_MAE is not None:
+ B, _, C = x.shape
+ x_vis = x[:,~mask_MAE].reshape(B, -1, C) # ~mask means visible
+
+ x = self.model(x_vis, mask)
+ else:
+ x = self.model(x, mask)
+
+ return x
+
+ def make_model(self, N=3, d_model=256, d_ff=512, h=8, dropout=0.1):
+ c = copy.deepcopy
+ attn = MultiHeadedAttention(h, d_model)
+ ff = PositionwiseFeedForward(d_model, d_ff, dropout)
+ model = Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N)
+ return model
+
+
+class Transformer_dec(nn.Module):
+ def __init__(self, n_layers=3, d_model=256, d_ff=512, h=8, dropout=0.1, length=27):
+ super(Transformer_dec, self).__init__()
+
+ self.model = self.make_model(N=n_layers, d_model=d_model, d_ff=d_ff, h=h, dropout=dropout)
+
+
+ def forward(self, x, return_token_num, mask=None):
+
+ x = self.model(x, mask)
+
+ return x
+
+ def make_model(self, N=3, d_model=256, d_ff=512, h=8, dropout=0.1):
+ c = copy.deepcopy
+ attn = MultiHeadedAttention(h, d_model)
+ ff = PositionwiseFeedForward(d_model, d_ff, dropout)
+ model = Encoder(EncoderLayer(d_model, c(attn), c(ff), dropout), N)
+ return model
+
+
+
+
diff --git a/model/stmo.py b/model/stmo.py
new file mode 100644
index 0000000000000000000000000000000000000000..7da885e5e54b810906f7c1ffcd81ee916c2e2835
--- /dev/null
+++ b/model/stmo.py
@@ -0,0 +1,126 @@
+import torch
+import torch.nn as nn
+from model.block.vanilla_transformer_encoder import Transformer
+from model.block.strided_transformer_encoder import Transformer as Transformer_reduce
+
+class Linear(nn.Module):
+ def __init__(self, linear_size, p_dropout=0.25):
+ super(Linear, self).__init__()
+ self.l_size = linear_size
+
+ self.relu = nn.LeakyReLU(0.2, inplace=True)
+ self.dropout = nn.Dropout(p_dropout)
+
+ #self.w1 = nn.Linear(self.l_size, self.l_size)
+ self.w1 = nn.Conv1d(self.l_size, self.l_size, kernel_size=1)
+ self.batch_norm1 = nn.BatchNorm1d(self.l_size)
+
+ #self.w2 = nn.Linear(self.l_size, self.l_size)
+ self.w2 = nn.Conv1d(self.l_size, self.l_size, kernel_size=1)
+ self.batch_norm2 = nn.BatchNorm1d(self.l_size)
+
+ def forward(self, x):
+ y = self.w1(x)
+ y = self.batch_norm1(y)
+ y = self.relu(y)
+ y = self.dropout(y)
+
+ y = self.w2(y)
+ y = self.batch_norm2(y)
+ y = self.relu(y)
+ y = self.dropout(y)
+
+ out = x + y
+
+ return out
+
+class FCBlock(nn.Module):
+
+ def __init__(self, channel_in, channel_out, linear_size, block_num):
+ super(FCBlock, self).__init__()
+
+ self.linear_size = linear_size
+ self.block_num = block_num
+ self.layers = []
+ self.channel_in = channel_in
+ self.stage_num = 3
+ self.p_dropout = 0.1
+ #self.fc_1 = nn.Linear(self.channel_in, self.linear_size)
+ self.fc_1 = nn.Conv1d(self.channel_in, self.linear_size, kernel_size=1)
+ self.bn_1 = nn.BatchNorm1d(self.linear_size)
+ for i in range(block_num):
+ self.layers.append(Linear(self.linear_size, self.p_dropout))
+ #self.fc_2 = nn.Linear(self.linear_size, channel_out)
+ self.fc_2 = nn.Conv1d(self.linear_size, channel_out, kernel_size=1)
+
+ self.layers = nn.ModuleList(self.layers)
+ self.relu = nn.LeakyReLU(0.2, inplace=True)
+ self.dropout = nn.Dropout(self.p_dropout)
+
+ def forward(self, x):
+
+ x = self.fc_1(x)
+ x = self.bn_1(x)
+ x = self.relu(x)
+ x = self.dropout(x)
+ for i in range(self.block_num):
+ x = self.layers[i](x)
+ x = self.fc_2(x)
+
+ return x
+
+class Model(nn.Module):
+ def __init__(self, args):
+ super().__init__()
+
+ layers, channel, d_hid, length = args.layers, args.channel, args.d_hid, args.frames
+ stride_num = args.stride_num
+ self.num_joints_in, self.num_joints_out = args.n_joints, args.out_joints
+
+ self.encoder = FCBlock(2*self.num_joints_in, channel, 2*channel, 1)
+
+ self.Transformer = Transformer(layers, channel, d_hid, length=length)
+ self.Transformer_reduce = Transformer_reduce(len(stride_num), channel, d_hid, \
+ length=length, stride_num=stride_num)
+
+ self.fcn = nn.Sequential(
+ nn.BatchNorm1d(channel, momentum=0.1),
+ nn.Conv1d(channel, 3*self.num_joints_out, kernel_size=1)
+ )
+
+ self.fcn_1 = nn.Sequential(
+ nn.BatchNorm1d(channel, momentum=0.1),
+ nn.Conv1d(channel, 3*self.num_joints_out, kernel_size=1)
+ )
+
+ def forward(self, x):
+ x = x[:, :, :, :, 0].permute(0, 2, 3, 1).contiguous()
+ x_shape = x.shape
+
+ x = x.view(x.shape[0], x.shape[1], -1)
+ x = x.permute(0, 2, 1).contiguous()
+
+ x = self.encoder(x)
+
+ x = x.permute(0, 2, 1).contiguous()
+ x = self.Transformer(x)
+
+ x_VTE = x
+ x_VTE = x_VTE.permute(0, 2, 1).contiguous()
+ x_VTE = self.fcn_1(x_VTE)
+
+ x_VTE = x_VTE.view(x_shape[0], self.num_joints_out, -1, x_VTE.shape[2])
+ x_VTE = x_VTE.permute(0, 2, 3, 1).contiguous().unsqueeze(dim=-1)
+
+ x = self.Transformer_reduce(x)
+ x = x.permute(0, 2, 1).contiguous()
+ x = self.fcn(x)
+
+ x = x.view(x_shape[0], self.num_joints_out, -1, x.shape[2])
+ x = x.permute(0, 2, 3, 1).contiguous().unsqueeze(dim=-1)
+
+ return x, x_VTE
+
+
+
+
diff --git a/model/stmo_pretrain.py b/model/stmo_pretrain.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc19d8ea5de3e44b841890596cd4e93062a8deb2
--- /dev/null
+++ b/model/stmo_pretrain.py
@@ -0,0 +1,163 @@
+import torch
+import torch.nn as nn
+from model.block.vanilla_transformer_encoder_pretrain import Transformer, Transformer_dec
+from model.block.strided_transformer_encoder import Transformer as Transformer_reduce
+import numpy as np
+
+class LayerNorm(nn.Module):
+ def __init__(self, features, eps=1e-6):
+ super(LayerNorm, self).__init__()
+ self.a_2 = nn.Parameter(torch.ones(features))
+ self.b_2 = nn.Parameter(torch.zeros(features))
+ self.eps = eps
+
+ def forward(self, x):
+ mean = x.mean(-1, keepdim=True)
+ std = x.std(-1, keepdim=True)
+ return self.a_2 * (x - mean) / (std + self.eps) + self.b_2
+
+class Linear(nn.Module):
+ def __init__(self, linear_size, p_dropout=0.25):
+ super(Linear, self).__init__()
+ self.l_size = linear_size
+
+ self.relu = nn.LeakyReLU(0.2, inplace=True)
+ self.dropout = nn.Dropout(p_dropout)
+
+ #self.w1 = nn.Linear(self.l_size, self.l_size)
+ self.w1 = nn.Conv1d(self.l_size, self.l_size, kernel_size=1)
+ self.batch_norm1 = nn.BatchNorm1d(self.l_size)
+
+ #self.w2 = nn.Linear(self.l_size, self.l_size)
+ self.w2 = nn.Conv1d(self.l_size, self.l_size, kernel_size=1)
+ self.batch_norm2 = nn.BatchNorm1d(self.l_size)
+
+ def forward(self, x):
+ y = self.w1(x)
+ y = self.batch_norm1(y)
+ y = self.relu(y)
+ y = self.dropout(y)
+
+ y = self.w2(y)
+ y = self.batch_norm2(y)
+ y = self.relu(y)
+ y = self.dropout(y)
+
+ out = x + y
+
+ return out
+
+class FCBlock(nn.Module):
+
+ def __init__(self, channel_in, channel_out, linear_size, block_num):
+ super(FCBlock, self).__init__()
+
+ self.linear_size = linear_size
+ self.block_num = block_num
+ self.layers = []
+ self.channel_in = channel_in
+ self.stage_num = 3
+ self.p_dropout = 0.1
+ #self.fc_1 = nn.Linear(self.channel_in, self.linear_size)
+ self.fc_1 = nn.Conv1d(self.channel_in, self.linear_size, kernel_size=1)
+ self.bn_1 = nn.BatchNorm1d(self.linear_size)
+ for i in range(block_num):
+ self.layers.append(Linear(self.linear_size, self.p_dropout))
+ #self.fc_2 = nn.Linear(self.linear_size, channel_out)
+ self.fc_2 = nn.Conv1d(self.linear_size, channel_out, kernel_size=1)
+
+ self.layers = nn.ModuleList(self.layers)
+ self.relu = nn.LeakyReLU(0.2, inplace=True)
+ self.dropout = nn.Dropout(self.p_dropout)
+
+ def forward(self, x):
+
+ x = self.fc_1(x)
+ x = self.bn_1(x)
+ x = self.relu(x)
+ x = self.dropout(x)
+ for i in range(self.block_num):
+ x = self.layers[i](x)
+ x = self.fc_2(x)
+
+ return x
+
+class Model_MAE(nn.Module):
+ def __init__(self, args):
+ super().__init__()
+
+ layers, channel, d_hid, length = args.layers, args.channel, args.d_hid, args.frames
+ stride_num = args.stride_num
+ self.spatial_mask_num = args.spatial_mask_num
+ self.num_joints_in, self.num_joints_out = args.n_joints, args.out_joints
+
+ self.length = length
+ dec_dim_shrink = 2
+
+ self.encoder = FCBlock(2*self.num_joints_in, channel, 2*channel, 1)
+
+ self.Transformer = Transformer(layers, channel, d_hid, length=length)
+ self.Transformer_dec = Transformer_dec(layers-1, channel//dec_dim_shrink, d_hid//dec_dim_shrink, length=length)
+
+ self.encoder_to_decoder = nn.Linear(channel, channel//dec_dim_shrink, bias=False)
+ self.encoder_LN = LayerNorm(channel)
+
+ self.fcn_dec = nn.Sequential(
+ nn.BatchNorm1d(channel//dec_dim_shrink, momentum=0.1),
+ nn.Conv1d(channel//dec_dim_shrink, 2*self.num_joints_out, kernel_size=1)
+ )
+
+ # self.fcn_1 = nn.Sequential(
+ # nn.BatchNorm1d(channel, momentum=0.1),
+ # nn.Conv1d(channel, 3*self.num_joints_out, kernel_size=1)
+ # )
+
+ self.dec_pos_embedding = nn.Parameter(torch.randn(1, length, channel//dec_dim_shrink))
+ self.mask_token = nn.Parameter(torch.randn(1, 1, channel//dec_dim_shrink))
+
+ self.spatial_mask_token = nn.Parameter(torch.randn(1, 1, 2))
+
+ def forward(self, x_in, mask, spatial_mask):
+ x_in = x_in[:, :, :, :, 0].permute(0, 2, 3, 1).contiguous()
+ b,f,_,_ = x_in.shape
+
+ # spatial mask out
+ x = x_in.clone()
+
+ x[:,spatial_mask] = self.spatial_mask_token.expand(b,self.spatial_mask_num*f,2)
+
+
+ x = x.view(b, f, -1)
+
+ x = x.permute(0, 2, 1).contiguous()
+
+ x = self.encoder(x)
+
+ x = x.permute(0, 2, 1).contiguous()
+ feas = self.Transformer(x, mask_MAE=mask)
+
+ feas = self.encoder_LN(feas)
+ feas = self.encoder_to_decoder(feas)
+
+ B, N, C = feas.shape
+
+ # we don't unshuffle the correct visible token order,
+ # but shuffle the pos embedding accorddingly.
+ expand_pos_embed = self.dec_pos_embedding.expand(B, -1, -1).clone()
+ pos_emd_vis = expand_pos_embed[:, ~mask].reshape(B, -1, C)
+ pos_emd_mask = expand_pos_embed[:, mask].reshape(B, -1, C)
+ x_full = torch.cat([feas + pos_emd_vis, self.mask_token + pos_emd_mask], dim=1)
+
+ x_out = self.Transformer_dec(x_full, pos_emd_mask.shape[1])
+
+ x_out = x_out.permute(0, 2, 1).contiguous()
+ x_out = self.fcn_dec(x_out)
+
+ x_out = x_out.view(b, self.num_joints_out, 2, -1)
+ x_out = x_out.permute(0, 2, 3, 1).contiguous().unsqueeze(dim=-1)
+
+ return x_out
+
+
+
+
diff --git a/skeleton_num_visualization.py b/skeleton_num_visualization.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c1914b53c7675fe18107bbe28281824607e331e
--- /dev/null
+++ b/skeleton_num_visualization.py
@@ -0,0 +1,46 @@
+import numpy as np
+import matplotlib.pyplot as plt
+
+# 导入数据
+import pickle
+with open("output_3Dpose_npy/kun_1280x720_30fps_0-14_0-32.npy", 'rb') as file:
+ data = np.load(file)
+with open("skeleton.npy", 'rb') as file:
+ skeleton = pickle.load(file)
+
+# 提取第0帧坐标
+xyz_0 = data[0]
+
+# 创建3D坐标系
+fig = plt.figure(figsize=(10, 8))
+ax = fig.add_subplot(111, projection='3d')
+# 设置俯仰角和方位角
+ax.view_init(elev=0., azim=70)
+
+# 绘制3D点
+radius = 1.7
+ax.scatter(xyz_0[:, 0], xyz_0[:, 1], xyz_0[:, 2])
+# 添加文本标记
+for i in range(xyz_0.shape[0]):
+ ax.text(xyz_0[i, 0], xyz_0[i, 1], xyz_0[i, 2], str(i), fontsize=10)
+
+# 绘制两点间的线段
+for num1 in range(xyz_0.shape[0]):
+ parent = skeleton._parents
+ num2 = parent[num1]
+ if num2 != -1:
+ x1, y1, z1 = xyz_0[num1, :]
+ x2, y2, z2 = xyz_0[num2, :]
+ ax.plot([x1, x2], [y1, y2], [z1, z2])
+
+ax.set_xlabel('X')
+ax.set_ylabel('Y')
+ax.set_zlabel('Z')
+# ax.set_xlim3d([-radius/2, radius/2])
+# ax.set_ylim3d([-radius/2, radius/2])
+# ax.set_xticklabels([])
+# ax.set_yticklabels([])
+# ax.set_zticklabels([])
+
+# 保存图像
+plt.savefig('plot.png')
diff --git a/test/dignose.py b/test/dignose.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf5270f5e595da02251d846eadc55999b34207dd
--- /dev/null
+++ b/test/dignose.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""Diagnose script for checking OS/hardware/python/pip/mxnet/network.
+The output of this script can be a very good hint to issue/problem.
+"""
+import os
+import platform
+import socket
+import subprocess
+import sys
+import time
+
+try:
+ from urllib.request import urlopen
+ from urllib.parse import urlparse
+except ImportError:
+ from urlparse import urlparse
+ from urllib2 import urlopen
+import argparse
+
+
+def parse_args():
+ """Parse arguments."""
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description='Diagnose script for checking the current system.')
+ choices = ['python', 'pip', 'mxnet', 'os', 'hardware', 'network']
+ for choice in choices:
+ parser.add_argument('--' + choice, default=1, type=int,
+ help='Diagnose {}.'.format(choice))
+ parser.add_argument('--region', default='', type=str,
+ help="Additional sites in which region(s) to test. \
+ Specify 'cn' for example to test mirror sites in China.")
+ parser.add_argument('--timeout', default=10, type=int,
+ help="Connection test timeout threshold, 0 to disable.")
+ args = parser.parse_args()
+ return args
+
+
+URLS = {
+ 'MXNet': 'https://github.com/apache/incubator-mxnet',
+ 'Gluon Tutorial(en)': 'http://gluon.mxnet.io',
+ 'Gluon Tutorial(cn)': 'https://zh.gluon.ai',
+ 'FashionMNIST': 'https://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/gluon/dataset/fashion-mnist/train-labels-idx1-ubyte.gz',
+ 'PYPI': 'https://pypi.python.org/pypi/pip',
+ 'Conda': 'https://repo.continuum.io/pkgs/free/',
+}
+REGIONAL_URLS = {
+ 'cn': {
+ 'PYPI(douban)': 'https://pypi.douban.com/',
+ 'Conda(tsinghua)': 'https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/',
+ }
+}
+
+
+def connection_test(name, url, timeout=10):
+ """Simple connection test"""
+ urlinfo = urlparse(url)
+ start = time.time()
+ try:
+ ip = socket.gethostbyname(urlinfo.netloc)
+ except Exception as e:
+ print('Error resolving DNS for {}: {}, {}'.format(name, url, e))
+ return
+ dns_elapsed = time.time() - start
+ start = time.time()
+ try:
+ _ = urlopen(url, timeout=timeout)
+ except Exception as e:
+ print("Error open {}: {}, {}, DNS finished in {} sec.".format(name, url, e, dns_elapsed))
+ return
+ load_elapsed = time.time() - start
+ print("Timing for {}: {}, DNS: {:.4f} sec, LOAD: {:.4f} sec.".format(name, url, dns_elapsed, load_elapsed))
+
+
+def check_python():
+ print('----------Python Info----------')
+ print('Version :', platform.python_version())
+ print('Compiler :', platform.python_compiler())
+ print('Build :', platform.python_build())
+ print('Arch :', platform.architecture())
+
+
+def check_pip():
+ print('------------Pip Info-----------')
+ try:
+ import pip
+ print('Version :', pip.__version__)
+ print('Directory :', os.path.dirname(pip.__file__))
+ except ImportError:
+ print('No corresponding pip install for current python.')
+
+
+def check_mxnet():
+ print('----------MXNet Info-----------')
+ try:
+ import mxnet
+ print('Version :', mxnet.__version__)
+ mx_dir = os.path.dirname(mxnet.__file__)
+ print('Directory :', mx_dir)
+ commit_hash = os.path.join(mx_dir, 'COMMIT_HASH')
+ with open(commit_hash, 'r') as f:
+ ch = f.read().strip()
+ print('Commit Hash :', ch)
+ except ImportError:
+ print('No MXNet installed.')
+ except IOError:
+ print('Hashtag not found. Not installed from pre-built package.')
+ except Exception as e:
+ import traceback
+ if not isinstance(e, IOError):
+ print("An error occured trying to import mxnet.")
+ print("This is very likely due to missing missing or incompatible library files.")
+ print(traceback.format_exc())
+
+
+def check_os():
+ print('----------System Info----------')
+ print('Platform :', platform.platform())
+ print('system :', platform.system())
+ print('node :', platform.node())
+ print('release :', platform.release())
+ print('version :', platform.version())
+
+
+def check_hardware():
+ print('----------Hardware Info----------')
+ print('machine :', platform.machine())
+ print('processor :', platform.processor())
+ if sys.platform.startswith('darwin'):
+ pipe = subprocess.Popen(('sysctl', '-a'), stdout=subprocess.PIPE)
+ output = pipe.communicate()[0]
+ for line in output.split(b'\n'):
+ if b'brand_string' in line or b'features' in line:
+ print(line.strip())
+ elif sys.platform.startswith('linux'):
+ subprocess.call(['lscpu'])
+ elif sys.platform.startswith('win32'):
+ subprocess.call(['wmic', 'cpu', 'get', 'name'])
+
+
+def check_network(args):
+ print('----------Network Test----------')
+ if args.timeout > 0:
+ print('Setting timeout: {}'.format(args.timeout))
+ socket.setdefaulttimeout(10)
+ for region in args.region.strip().split(','):
+ r = region.strip().lower()
+ if not r:
+ continue
+ if r in REGIONAL_URLS:
+ URLS.update(REGIONAL_URLS[r])
+ else:
+ import warnings
+ warnings.warn('Region {} do not need specific test, please refer to global sites.'.format(r))
+ for name, url in URLS.items():
+ connection_test(name, url, args.timeout)
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ if args.python:
+ check_python()
+
+ if args.pip:
+ check_pip()
+
+ if args.mxnet:
+ check_mxnet()
+
+ if args.os:
+ check_os()
+
+ if args.hardware:
+ check_hardware()
+
+ if args.network:
+ check_network(args)
diff --git a/test/opencv_capture_test.py b/test/opencv_capture_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..18dbb6315ae9ebb11b8430f9f01937f091343906
--- /dev/null
+++ b/test/opencv_capture_test.py
@@ -0,0 +1,26 @@
+import cv2
+
+from tqdm import tqdm
+
+path = '../outputs/nba2k.mp4'
+stream = cv2.VideoCapture(path)
+assert stream.isOpened(), 'Cannot capture source'
+
+video_length = int(stream.get(cv2.CAP_PROP_FRAME_COUNT))
+video_fps = stream.get(cv2.CAP_PROP_FPS)
+video_size = (int(stream.get(cv2.CAP_PROP_FRAME_WIDTH)), int(stream.get(cv2.CAP_PROP_FRAME_HEIGHT)))
+writer = cv2.VideoWriter('out.mp4', cv2.VideoWriter_fourcc(*'MP4V'), video_fps, video_size)
+
+for i in tqdm(range(video_length)):
+ i += 1
+ grabbed, frame = stream.read()
+
+ writer.write(frame)
+
+ # if the `grabbed` boolean is `False`, then we have
+ # reached the end of the video file
+ if not grabbed:
+ print('\n===========================> This video get ' + str(i) + ' frames in total.')
+ break
+
+writer.release()
diff --git a/tools/amination.py b/tools/amination.py
new file mode 100644
index 0000000000000000000000000000000000000000..21e1fde9442dff6b9fb6e879dff538a27d4c1ca3
--- /dev/null
+++ b/tools/amination.py
@@ -0,0 +1,167 @@
+'''
+Realtime Display 3D Human Reconstrction
+3D image drawing by pygtagrph based on OpenGL
+speed about 25 FPS
+'''
+import math
+import sys
+from argparse import ArgumentParser
+
+import cv2
+import pyqtgraph as pg
+import pyqtgraph.opengl as gl
+from pyqtgraph.Qt import QtCore, QtGui
+from pyqtgraph.opengl import *
+
+from joints_detectors.openpose.main import load_model as Model2Dload
+
+model2D = Model2Dload()
+from joints_detectors.openpose.main import generate_frame_kpt as OpenPoseInterface
+
+interface2D = OpenPoseInterface
+from tools.utils import videopose_model_load as Model3Dload
+
+model3D = Model3Dload()
+from tools.utils import interface as VideoPoseInterface
+
+interface3D = VideoPoseInterface
+from tools.utils import draw_2Dimg, resize_img, common
+
+common = common()
+item = 0
+item_num = 0
+pos_init = np.zeros(shape=(17, 3))
+
+
+class Visualizer(object):
+ def __init__(self, input_video):
+ self.traces = dict()
+ self.app = QtGui.QApplication(sys.argv)
+ self.w = gl.GLViewWidget()
+ self.w.opts['distance'] = 45.0 ## distance of camera from center
+ self.w.opts['fov'] = 60 ## horizontal field of view in degrees
+ self.w.opts['elevation'] = 10 ## camera's angle of elevation in degrees 仰俯角
+ self.w.opts['azimuth'] = 90 ## camera's azimuthal angle in degrees 方位角
+ self.w.setWindowTitle('pyqtgraph example: GLLinePlotItem')
+ self.w.setGeometry(450, 700, 980, 700) # 原点在左上角
+ self.w.show()
+
+ # create the background grids
+ gx = gl.GLGridItem()
+ gx.rotate(90, 0, 1, 0)
+ gx.translate(-10, 0, 0)
+ self.w.addItem(gx)
+ gy = gl.GLGridItem()
+ gy.rotate(90, 1, 0, 0)
+ gy.translate(0, -10, 0)
+ self.w.addItem(gy)
+ gz = gl.GLGridItem()
+ gz.translate(0, 0, -10)
+ self.w.addItem(gz)
+
+ # special setting
+ self.cap = cv2.VideoCapture(input_video)
+ self.video_name = input_video.split('/')[-1].split('.')[0]
+ self.kpt2Ds = []
+ pos = pos_init
+
+ for j, j_parent in enumerate(common.skeleton_parents):
+ if j_parent == -1:
+ continue
+
+ x = np.array([pos[j, 0], pos[j_parent, 0]]) * 10
+ y = np.array([pos[j, 1], pos[j_parent, 1]]) * 10
+ z = np.array([pos[j, 2], pos[j_parent, 2]]) * 10 - 10
+ pos_total = np.vstack([x, y, z]).transpose()
+ self.traces[j] = gl.GLLinePlotItem(pos=pos_total, color=pg.glColor((j, 10)), width=6, antialias=True)
+ self.w.addItem(self.traces[j])
+
+ def start(self):
+ if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
+ QtGui.QApplication.instance().exec_()
+
+ def set_plotdata(self, name, points, color, width):
+ self.traces[name].setData(pos=points, color=color, width=width)
+
+ def update(self):
+ global item
+ global item_num
+ num = item / 2
+ azimuth_value = abs(num % 120 + math.pow(-1, int((num / 120))) * 120) % 120
+ self.w.opts['azimuth'] = azimuth_value
+ print(item, ' ')
+ _, frame = self.cap.read()
+ if item % 2 != 1:
+ frame, W, H = resize_img(frame)
+ joint2D = interface2D(frame, model2D)
+ img2D = draw_2Dimg(frame, joint2D, 1)
+ if item == 0:
+ for _ in range(30):
+ self.kpt2Ds.append(joint2D)
+ elif item < 30:
+ self.kpt2Ds.append(joint2D)
+ self.kpt2Ds.pop(0)
+ else:
+ self.kpt2Ds.append(joint2D)
+ self.kpt2Ds.pop(0)
+
+ item += 1
+ joint3D = interface3D(model3D, np.array(self.kpt2Ds), W, H)
+ pos = joint3D[-1] # (17, 3)
+
+ for j, j_parent in enumerate(common.skeleton_parents):
+ if j_parent == -1:
+ continue
+ x = np.array([pos[j, 0], pos[j_parent, 0]]) * 10
+ y = np.array([pos[j, 1], pos[j_parent, 1]]) * 10
+ z = np.array([pos[j, 2], pos[j_parent, 2]]) * 10 - 10
+ pos_total = np.vstack([x, y, z]).transpose()
+ self.set_plotdata(
+ name=j, points=pos_total,
+ color=pg.glColor((j, 10)),
+ width=6)
+
+ # save
+ if item_num < 10:
+ name = '000' + str(item_num)
+
+ elif item_num < 100:
+ name = '00' + str(item_num)
+
+ elif item_num < 1000:
+ name = '0' + str(item_num)
+
+ else:
+ name = str(item_num)
+ im3Dname = 'VideoSave/' + '3D_' + name + '.png'
+ d = self.w.renderToArray((img2D.shape[1], img2D.shape[0])) # (W, H)
+ print('Save 3D image: ', im3Dname)
+ pg.makeQImage(d).save(im3Dname)
+
+ im2Dname = 'VideoSave/' + '2D_' + name + '.png'
+ print('Save 2D image: ', im2Dname)
+ cv2.imwrite(im2Dname, img2D)
+
+ item_num += 1
+ else:
+ item += 1
+
+ def animation(self):
+ timer = QtCore.QTimer()
+ timer.timeout.connect(self.update)
+ timer.start(1)
+ self.start()
+
+
+def main():
+ parser = ArgumentParser()
+ parser.add_argument("-i", "--video", help="input video file name", default="/home/xyliu/Videos/sports/dance.mp4")
+ args = parser.parse_args()
+ print(args.video)
+ v = Visualizer(args.video)
+ v.animation()
+ cv2.destroyAllWindows()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/utils.py b/tools/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..8752f922157e3697eb79b149d04cc9ac2da47a67
--- /dev/null
+++ b/tools/utils.py
@@ -0,0 +1,165 @@
+import os
+
+import cv2
+import matplotlib.pyplot as plt
+import numpy as np
+import torch
+
+from common.utils import evaluate
+
+path = os.path.split(os.path.realpath(__file__))[0]
+main_path = os.path.join(path, '..')
+
+
+class common:
+ keypoints_symmetry = [[1, 3, 5, 7, 9, 11, 13, 15], [2, 4, 6, 8, 10, 12, 14, 16]]
+ rot = np.array([0.14070565, -0.15007018, -0.7552408, 0.62232804], dtype=np.float32)
+ skeleton_parents = np.array([-1, 0, 1, 2, 0, 4, 5, 0, 7, 8, 9, 8, 11, 12, 8, 14, 15])
+ pairs = [(1, 2), (5, 4), (6, 5), (8, 7), (8, 9), (10, 1), (11, 10), (12, 11), (13, 1), (14, 13), (15, 14), (16, 2), (16, 3), (16, 4), (16, 7)]
+
+ kps_left, kps_right = list(keypoints_symmetry[0]), list(keypoints_symmetry[1])
+ joints_left, joints_right = list([4, 5, 6, 11, 12, 13]), list([1, 2, 3, 14, 15, 16])
+ pad = (243 - 1) // 2 # Padding on each side
+ causal_shift = 0
+ joint_pairs = [[0, 1], [1, 3], [0, 2], [2, 4],
+ [5, 6], [5, 7], [7, 9], [6, 8], [8, 10],
+ [5, 11], [6, 12], [11, 12],
+ [11, 13], [12, 14], [13, 15], [14, 16]]
+
+
+def resize_img(frame, max_length=640):
+ H, W = frame.shape[:2]
+ if max(W, H) > max_length:
+ if W > H:
+ W_resize = max_length
+ H_resize = int(H * max_length / W)
+ else:
+ H_resize = max_length
+ W_resize = int(W * max_length / H)
+ frame = cv2.resize(frame, (W_resize, H_resize), interpolation=cv2.INTER_AREA)
+ return frame, W_resize, H_resize
+
+ else:
+ return frame, W, H
+
+
+def rotate_bound(image, angle):
+ (h, w) = image.shape[:2]
+ (cX, cY) = (w // 2, h // 2)
+
+ M = cv2.getRotationMatrix2D((cX, cY), -angle, 1.0)
+ cos = np.abs(M[0, 0])
+ sin = np.abs(M[0, 1])
+
+ nW = int((h * sin) + (w * cos))
+ nH = int((h * cos) + (w * sin))
+
+ M[0, 2] += (nW / 2) - cX
+ M[1, 2] += (nH / 2) - cY
+
+ return cv2.warpAffine(image, M, (nW, nH))
+
+
+def draw_2Dimg(img, kpt, display=None):
+ # kpts : (17, 3) 3-->(x, y, score)
+ im = img.copy()
+ joint_pairs = common.joint_pairs
+ for item in kpt:
+ score = item[-1]
+ if score > 0.1:
+ x, y = int(item[0]), int(item[1])
+ cv2.circle(im, (x, y), 1, (255, 5, 0), 5)
+ for pair in joint_pairs:
+ j, j_parent = pair
+ pt1 = (int(kpt[j][0]), int(kpt[j][1]))
+ pt2 = (int(kpt[j_parent][0]), int(kpt[j_parent][1]))
+ cv2.line(im, pt1, pt2, (0, 255, 0), 2)
+
+ if display:
+ cv2.imshow('im', im)
+ cv2.waitKey(3)
+ return im
+
+
+def draw_3Dimg(pos, image, display=None, kpt2D=None):
+ from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
+ fig = plt.figure(figsize=(12, 6))
+ canvas = FigureCanvas(fig)
+
+ # 2D
+ fig.add_subplot(121)
+ if kpt2D:
+ plt.imshow(draw_2Dimg(image, kpt2D))
+ else:
+ plt.imshow(image)
+
+ # 3D
+ ax = fig.add_subplot(122, projection='3d')
+ radius = 1.7
+ ax.view_init(elev=15., azim=70.)
+ ax.set_xlim3d([-radius / 2, radius / 2])
+ ax.set_zlim3d([0, radius])
+ ax.set_ylim3d([-radius / 2, radius / 2])
+ ax.set_aspect('equal')
+ # 坐标轴刻度
+ ax.set_xticklabels([])
+ ax.set_yticklabels([])
+ ax.set_zticklabels([])
+ ax.dist = 7.5
+ parents = common.skeleton_parents
+ joints_right = common.joints_right
+
+ for j, j_parent in enumerate(parents):
+ if j_parent == -1:
+ continue
+
+ col = 'red' if j in joints_right else 'black'
+ # 画图3D
+ ax.plot([pos[j, 0], pos[j_parent, 0]],
+ [pos[j, 1], pos[j_parent, 1]],
+ [pos[j, 2], pos[j_parent, 2]], zdir='z', c=col)
+ width, height = fig.get_size_inches() * fig.get_dpi()
+ canvas.draw() # draw the canvas, cache the renderer
+ image = np.fromstring(canvas.tostring_rgb(), dtype='uint8').reshape(int(height), int(width), 3)
+ if display:
+ cv2.imshow('im', image)
+ cv2.waitKey(3)
+
+ return image
+
+
+def videoInfo(VideoName):
+ cap = cv2.VideoCapture(VideoName)
+ length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
+ return cap, length
+
+
+def videopose_model_load():
+ # load trained model
+ from common.model import TemporalModel
+ chk_filename = main_path + '/checkpoint/pretrained_h36m_detectron_coco.bin'
+ checkpoint = torch.load(chk_filename, map_location=lambda storage, loc: storage) # 把loc映射到storage
+ model_pos = TemporalModel(17, 2, 17, filter_widths=[3, 3, 3, 3, 3], causal=False, dropout=False, channels=1024, dense=False)
+ model_pos = model_pos.cuda()
+ model_pos.load_state_dict(checkpoint['model_pos'])
+ receptive_field = model_pos.receptive_field()
+ return model_pos
+
+
+def interface(model_pos, keypoints, W, H):
+ # input (N, 17, 2) return (N, 17, 3)
+ if not isinstance(keypoints, np.ndarray):
+ keypoints = np.array(keypoints)
+
+ from common.camera import camera_to_world, normalize_screen_coordinates
+ # keypoints = normalize_screen_coordinates_new(keypoints[..., :2], w=W, h=H)
+ keypoints = normalize_screen_coordinates(keypoints[..., :2], w=1000, h=1002)
+ input_keypoints = keypoints.copy()
+ # test_time_augmentation True
+ from common.generators import UnchunkedGenerator
+ gen = UnchunkedGenerator(None, None, [input_keypoints], pad=common.pad, causal_shift=common.causal_shift, augment=True, kps_left=common.kps_left,
+ kps_right=common.kps_right, joints_left=common.joints_left, joints_right=common.joints_right)
+ prediction = evaluate(gen, model_pos, return_predictions=True)
+ prediction = camera_to_world(prediction, R=common.rot, t=0)
+ prediction[:, :, 2] -= np.min(prediction[:, :, 2])
+ return prediction
diff --git a/tools/videoThenAmination.py b/tools/videoThenAmination.py
new file mode 100644
index 0000000000000000000000000000000000000000..a7f0d29f4ccc4e869422d72214b9b18f66a47691
--- /dev/null
+++ b/tools/videoThenAmination.py
@@ -0,0 +1,146 @@
+'''
+先生成所有的2D坐标
+再生成3D坐标,
+再绘图,不是实时的
+'''
+import sys
+import time
+from argparse import ArgumentParser
+
+import cv2
+import pyqtgraph as pg
+import pyqtgraph.opengl as gl
+from pyqtgraph.Qt import QtCore, QtGui
+from pyqtgraph.opengl import *
+from tqdm import tqdm
+
+from joints_detectors.openpose.main import load_model as Model2Dload
+
+model2D = Model2Dload()
+from joints_detectors.openpose.main import generate_frame_kpt as OpenPoseInterface
+
+interface2D = OpenPoseInterface
+from tools.utils import videopose_model_load as Model3Dload
+
+model3D = Model3Dload()
+from tools.utils import interface as VideoPoseInterface
+
+interface3D = VideoPoseInterface
+from tools.utils import draw_2Dimg, videoInfo, resize_img, common
+
+common = common()
+
+
+# 先得到所有视频的2D坐标,再统一生成3D坐标
+def VideoPoseJoints(VideoName):
+ cap, cap_length = videoInfo(VideoName)
+ kpt2Ds = []
+ for i in tqdm(range(cap_length)):
+ _, frame = cap.read()
+ frame, W, H = resize_img(frame)
+
+ try:
+ joint2D = interface2D(frame, model2D)
+ except Exception as e:
+ print(e)
+ continue
+ draw_2Dimg(frame, joint2D, 1)
+ kpt2Ds.append(joint2D)
+
+ joint3D = interface3D(model3D, np.array(kpt2Ds), W, H)
+ return joint3D
+
+
+item = 0
+pos_init = np.zeros(shape=(17, 3))
+
+
+class Visualizer(object):
+ def __init__(self, skeletons_3d):
+ self.traces = dict()
+ self.app = QtGui.QApplication(sys.argv)
+ self.w = gl.GLViewWidget()
+ self.w.opts['distance'] = 45.0 ## distance of camera from center
+ self.w.opts['fov'] = 60 ## horizontal field of view in degrees
+ self.w.opts['elevation'] = 10 ## camera's angle of elevation in degrees 仰俯角
+ self.w.opts['azimuth'] = 90 ## camera's azimuthal angle in degrees 方位角
+ self.w.setWindowTitle('pyqtgraph example: GLLinePlotItem')
+ self.w.setGeometry(450, 700, 980, 700) # 原点在左上角
+ self.w.show()
+
+ # create the background grids
+ gx = gl.GLGridItem()
+ gx.rotate(90, 0, 1, 0)
+ gx.translate(-10, 0, 0)
+ self.w.addItem(gx)
+ gy = gl.GLGridItem()
+ gy.rotate(90, 1, 0, 0)
+ gy.translate(0, -10, 0)
+ self.w.addItem(gy)
+ gz = gl.GLGridItem()
+ gz.translate(0, 0, -10)
+ self.w.addItem(gz)
+
+ # special setting
+ pos = pos_init
+ self.skeleton_parents = common.skeleton_parents
+ self.skeletons_3d = skeletons_3d
+
+ for j, j_parent in enumerate(self.skeleton_parents):
+ if j_parent == -1:
+ continue
+ x = np.array([pos[j, 0], pos[j_parent, 0]]) * 10
+ y = np.array([pos[j, 1], pos[j_parent, 1]]) * 10
+ z = np.array([pos[j, 2], pos[j_parent, 2]]) * 10 - 10
+ pos_total = np.vstack([x, y, z]).transpose()
+ self.traces[j] = gl.GLLinePlotItem(pos=pos_total, color=pg.glColor((j, 10)), width=6, antialias=True)
+ self.w.addItem(self.traces[j])
+
+ def start(self):
+ if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
+ QtGui.QApplication.instance().exec_()
+
+ def set_plotdata(self, name, points, color, width):
+ self.traces[name].setData(pos=points, color=color, width=width)
+
+ def update(self):
+ time.sleep(0.03)
+ global item
+ pos = self.skeletons_3d[item]
+ print(item, ' ')
+ item += 1
+
+ for j, j_parent in enumerate(self.skeleton_parents):
+ if j_parent == -1:
+ continue
+
+ x = np.array([pos[j, 0], pos[j_parent, 0]]) * 10
+ y = np.array([pos[j, 1], pos[j_parent, 1]]) * 10
+ z = np.array([pos[j, 2], pos[j_parent, 2]]) * 10 - 10
+ pos_total = np.vstack([x, y, z]).transpose()
+ self.set_plotdata(
+ name=j, points=pos_total,
+ color=pg.glColor((j, 10)),
+ width=6)
+
+ def animation(self):
+ timer = QtCore.QTimer()
+ timer.timeout.connect(self.update)
+ timer.start(1)
+ self.start()
+
+
+def main(VideoName):
+ print(VideoName)
+ joint3D = VideoPoseJoints(VideoName)
+ v = Visualizer(joint3D)
+ v.animation()
+ cv2.destroyAllWindows()
+
+
+if __name__ == '__main__':
+ parser = ArgumentParser()
+ parser.add_argument("-video", "--video_input", help="input video file name", default="/home/xyliu/Videos/sports/dance.mp4")
+ args = parser.parse_args()
+ VideoName = args.video_input
+ main(VideoName)
diff --git a/videopose_PSTMO.py b/videopose_PSTMO.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b7cabd0f1f4ae101df322473af49f5c39c62f38
--- /dev/null
+++ b/videopose_PSTMO.py
@@ -0,0 +1,209 @@
+import os
+import time
+
+from common.arguments import parse_args
+from common.camera import *
+from common.generators import *
+from common.loss import *
+from common.model import *
+from common.utils import Timer, evaluate, add_path
+from common.inference_3d import *
+
+from model.block.refine import refine
+from model.stmo import Model
+
+import HPE2keyframes as Hk
+
+# from joints_detectors.openpose.main import generate_kpts as open_pose
+
+
+os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
+os.environ["CUDA_VISIBLE_DEVICES"] = "0"
+
+metadata = {'layout_name': 'coco', 'num_joints': 17, 'keypoints_symmetry': [[1, 3, 5, 7, 9, 11, 13, 15], [2, 4, 6, 8, 10, 12, 14, 16]]}
+
+add_path()
+
+
+# record time
+def ckpt_time(ckpt=None):
+ if not ckpt:
+ return time.time()
+ else:
+ return time.time() - float(ckpt), time.time()
+
+
+time0 = ckpt_time()
+
+
+def get_detector_2d(detector_name):
+ def get_alpha_pose():
+ from joints_detectors.Alphapose.gene_npz import generate_kpts as alpha_pose
+ return alpha_pose
+
+ def get_hr_pose():
+ from joints_detectors.hrnet.pose_estimation.video import generate_kpts as hr_pose
+ return hr_pose
+
+ detector_map = {
+ 'alpha_pose': get_alpha_pose,
+ 'hr_pose': get_hr_pose,
+ # 'open_pose': open_pose
+ }
+
+ assert detector_name in detector_map, f'2D detector: {detector_name} not implemented yet!'
+
+ return detector_map[detector_name]()
+
+
+class Skeleton:
+ def parents(self):
+ return np.array([-1, 0, 1, 2, 0, 4, 5, 0, 7, 8, 9, 8, 11, 12, 8, 14, 15])
+
+ def joints_right(self):
+ return [1, 2, 3, 14, 15, 16]
+
+
+def main(args):
+ detector_2d = get_detector_2d(args.detector_2d)
+
+ assert detector_2d, 'detector_2d should be in ({alpha, hr, open}_pose)'
+
+ # 2D kpts loads or generate
+ #args.input_npz = './outputs/alpha_pose_skiing_cut/skiing_cut.npz'
+ if not args.input_npz:
+ video_name = args.viz_video
+ keypoints = detector_2d(video_name)
+ else:
+ npz = np.load(args.input_npz)
+ keypoints = npz['kpts'] # (N, 17, 2)
+
+ keypoints_symmetry = metadata['keypoints_symmetry']
+ kps_left, kps_right = list(keypoints_symmetry[0]), list(keypoints_symmetry[1])
+ joints_left, joints_right = list([4, 5, 6, 11, 12, 13]), list([1, 2, 3, 14, 15, 16])
+
+ # normlization keypoints Suppose using the camera parameter
+ keypoints = normalize_screen_coordinates(keypoints[..., :2], w=1000, h=1002)
+
+ # model_pos = TemporalModel(17, 2, 17, filter_widths=[3, 3, 3, 3, 3], causal=args.causal, dropout=args.dropout, channels=args.channels,
+ # dense=args.dense)
+
+ model = {}
+ model['trans'] = Model(args).cuda()
+
+
+ # if torch.cuda.is_available():
+ # model_pos = model_pos.cuda()
+
+ ckpt, time1 = ckpt_time(time0)
+ print('-------------- load data spends {:.2f} seconds'.format(ckpt))
+
+ # load trained model
+ # chk_filename = os.path.join(args.checkpoint, args.resume if args.resume else args.evaluate)
+ # print('Loading checkpoint', chk_filename)
+ # checkpoint = torch.load(chk_filename, map_location=lambda storage, loc: storage) # 把loc映射到storage
+ # model_pos.load_state_dict(checkpoint['model_pos'])
+
+ model_dict = model['trans'].state_dict()
+
+ no_refine_path = "checkpoint/PSTMOS_no_refine_48_5137_in_the_wild.pth"
+ pre_dict = torch.load(no_refine_path)
+ for key, value in pre_dict.items():
+ name = key[7:]
+ model_dict[name] = pre_dict[key]
+ model['trans'].load_state_dict(model_dict)
+
+
+ ckpt, time2 = ckpt_time(time1)
+ print('-------------- load 3D model spends {:.2f} seconds'.format(ckpt))
+
+ # Receptive field: 243 frames for args.arc [3, 3, 3, 3, 3]
+ receptive_field = args.frames
+ pad = (receptive_field - 1) // 2 # Padding on each side
+ causal_shift = 0
+
+ print('Rendering...')
+ input_keypoints = keypoints.copy()
+ print(input_keypoints.shape)
+ # gen = UnchunkedGenerator(None, None, [input_keypoints],
+ # pad=pad, causal_shift=causal_shift, augment=args.test_time_augmentation,
+ # kps_left=kps_left, kps_right=kps_right, joints_left=joints_left, joints_right=joints_right)
+ # test_data = Fusion(opt=args, train=False, dataset=dataset, root_path=root_path, MAE=opt.MAE)
+ # test_dataloader = torch.utils.data.DataLoader(test_data, batch_size=1,
+ # shuffle=False, num_workers=0, pin_memory=True)
+ #prediction = evaluate(gen, model_pos, return_predictions=True)
+
+ gen = Evaluate_Generator(128, None, None, [input_keypoints], args.stride,
+ pad=pad, causal_shift=causal_shift, augment=args.test_time_augmentation, shuffle=False,
+ kps_left=kps_left, kps_right=kps_right, joints_left=joints_left, joints_right=joints_right)
+
+ prediction = val(args, gen, model)
+
+ # save 3D joint points
+ # np.save(f'outputs/test_3d_{args.video_name}_output.npy', prediction, allow_pickle=True)
+
+ rot = np.array([0.14070565, -0.15007018, -0.7552408, 0.62232804], dtype=np.float32)
+ prediction = camera_to_world(prediction, R=rot, t=0)
+
+ # We don't have the trajectory, but at least we can rebase the height
+ prediction[:, :, 2] -= np.min(prediction[:, :, 2])
+
+ output_dir_dict = {}
+ npy_filename = f'output_3Dpose_npy/{args.video_name}.npy'
+ output_dir_dict['npy'] = npy_filename
+ np.save(npy_filename, prediction, allow_pickle=True)
+
+ anim_output = {'Ours': prediction}
+ input_keypoints = image_coordinates(input_keypoints[..., :2], w=1000, h=1002)
+
+ ckpt, time3 = ckpt_time(time2)
+ print('-------------- generate reconstruction 3D data spends {:.2f} seconds'.format(ckpt))
+
+ if not args.viz_output:
+ args.viz_output = 'outputs/alpha_result.mp4'
+
+ from common.visualization import render_animation
+ render_animation(input_keypoints, anim_output,
+ Skeleton(), 25, args.viz_bitrate, np.array(70., dtype=np.float32), args.viz_output,
+ limit=args.viz_limit, downsample=args.viz_downsample, size=args.viz_size,
+ input_video_path=args.viz_video, viewport=(1000, 1002),
+ input_video_skip=args.viz_skip)
+
+ ckpt, time4 = ckpt_time(time3)
+ print('total spend {:2f} second'.format(ckpt))
+
+ return output_dir_dict
+
+
+def inference_video(video_path, detector_2d):
+ """
+ Do image -> 2d points -> 3d points to video.
+ :param detector_2d: used 2d joints detector. Can be {alpha_pose, hr_pose}
+ :param video_path: relative to outputs
+ :return: None
+ """
+ args = parse_args()
+
+ args.detector_2d = detector_2d
+ dir_name = os.path.dirname(video_path)
+ basename = os.path.basename(video_path)
+ args.video_name = basename[:basename.rfind('.')]
+ args.viz_video = video_path
+ args.viz_output = f'output_videos/{args.video_name}.mp4'
+ args.evaluate = 'pretrained_h36m_detectron_coco.bin'
+
+ with Timer(video_path):
+ output_dir_dict = main(args)
+
+ output_dir_dict["output_videos"] = args.viz_output
+ output_dir_dict["video_name"] = args.video_name
+ return output_dir_dict
+
+
+if __name__ == '__main__':
+
+ files = os.listdir('./input_videos')
+ FPS_mine_imator = 30
+ for file in files:
+ output_dir_dict = inference_video(os.path.join('input_videos', file), 'alpha_pose')
+ Hk.hpe2keyframes(output_dir_dict['npy'], FPS_mine_imator, f"output_miframes/{output_dir_dict['video_name']}.miframes")