pengc02 commited on
Commit
ec9a6bc
1 Parent(s): 3ecb1ee
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +20 -0
  2. AnimatableGaussians +0 -1
  3. AnimatableGaussians/.DS_Store +0 -0
  4. AnimatableGaussians/AVATARREX_DATASET.md +168 -0
  5. AnimatableGaussians/LICENSE +39 -0
  6. AnimatableGaussians/PREPROCESSED_DATASET.md +48 -0
  7. AnimatableGaussians/PRETRAINED_MODEL.md +48 -0
  8. AnimatableGaussians/README.md +117 -0
  9. AnimatableGaussians/__pycache__/config.cpython-310.pyc +0 -0
  10. AnimatableGaussians/assets/avatarrex.jpg +0 -0
  11. AnimatableGaussians/assets/avatarrex_dataset_demo.gif +3 -0
  12. AnimatableGaussians/assets/avatarrex_lbn1.jpg +0 -0
  13. AnimatableGaussians/assets/avatarrex_lbn2.jpg +0 -0
  14. AnimatableGaussians/assets/avatarrex_zzr.jpg +0 -0
  15. AnimatableGaussians/assets/ball.obj +2214 -0
  16. AnimatableGaussians/assets/cylinder.obj +198 -0
  17. AnimatableGaussians/base_trainer.py +258 -0
  18. AnimatableGaussians/cat.sh +0 -0
  19. AnimatableGaussians/config.py +35 -0
  20. AnimatableGaussians/configs/awesome_amass_poses.yaml +25 -0
  21. AnimatableGaussians/configs/huawei_0425/avatar.yaml +75 -0
  22. AnimatableGaussians/configs/huawei_0425/avatar1.yaml +75 -0
  23. AnimatableGaussians/configs/huawei_0425/avatar2.yaml +75 -0
  24. AnimatableGaussians/configs/huawei_0425/nzc.yaml +77 -0
  25. AnimatableGaussians/configs/huawei_0425/nzc_new.yaml +77 -0
  26. AnimatableGaussians/configs/new0829/avatar.yaml +75 -0
  27. AnimatableGaussians/configs/pengcheng/0921_nzc_ckpt_ys.yaml +77 -0
  28. AnimatableGaussians/configs/pengcheng/0923_cys.yaml +77 -0
  29. AnimatableGaussians/configs/pengcheng/0924_nzc_new_pose.yaml +77 -0
  30. AnimatableGaussians/configs/pengcheng/0925_nzc_new_pose.yaml +77 -0
  31. AnimatableGaussians/configs/pengcheng/0926_nzc_new_pose.yaml +78 -0
  32. AnimatableGaussians/configs/pengcheng/0929_lodge.yaml +78 -0
  33. AnimatableGaussians/configs/pengcheng/0930_sing.yaml +78 -0
  34. AnimatableGaussians/configs/pengcheng/1002_nzc_new_pose.yaml +79 -0
  35. AnimatableGaussians/configs/pengcheng/1002_train_pose.yaml +79 -0
  36. AnimatableGaussians/configs/pengcheng/1003_cat_pose.yaml +79 -0
  37. AnimatableGaussians/configs/pengcheng/1004_smooth_train_pose.yaml +79 -0
  38. AnimatableGaussians/configs/pengcheng/1007_slow10.yaml +79 -0
  39. AnimatableGaussians/dataset/__pycache__/commons.cpython-310.pyc +0 -0
  40. AnimatableGaussians/dataset/__pycache__/commons.cpython-38.pyc +0 -0
  41. AnimatableGaussians/dataset/__pycache__/dataset_mv_rgb.cpython-310.pyc +0 -0
  42. AnimatableGaussians/dataset/__pycache__/dataset_mv_rgb.cpython-38.pyc +0 -0
  43. AnimatableGaussians/dataset/__pycache__/dataset_pose.cpython-310.pyc +0 -0
  44. AnimatableGaussians/dataset/__pycache__/dataset_pose.cpython-38.pyc +0 -0
  45. AnimatableGaussians/dataset/commons.py +31 -0
  46. AnimatableGaussians/dataset/dataset_mv_rgb.py +506 -0
  47. AnimatableGaussians/dataset/dataset_pose.py +573 -0
  48. AnimatableGaussians/eval/comparison_body_only_avatars.py +114 -0
  49. AnimatableGaussians/eval/score.py +108 -0
  50. AnimatableGaussians/gaussians/__pycache__/gaussian_model.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -32,6 +32,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.xz filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
 
 
 
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  checkpoints/face_0929/gaussianhead_latest filter=lfs diff=lfs merge=lfs -text
37
  checkpoints/face_0929/supres_latest filter=lfs diff=lfs merge=lfs -text
@@ -40,3 +43,20 @@ checkpoints/pos_map_ys/body_mix/smpl_pos_map/cano_smpl_nml_map.exr filter=lfs di
40
  checkpoints/pos_map_ys/body_mix/smpl_pos_map/cano_smpl_pos_map.exr filter=lfs diff=lfs merge=lfs -text
41
  checkpoints/ref_gaussian/head/000000.ply filter=lfs diff=lfs merge=lfs -text
42
  checkpoints/ filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  *.xz filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *.so filter=lfs diff=lfs merge=lfs -text
36
+ *.o filter=lfs diff=lfs merge=lfs -text
37
+ *.obj filter=lfs diff=lfs merge=lfs -text
38
  *tfevents* filter=lfs diff=lfs merge=lfs -text
39
  checkpoints/face_0929/gaussianhead_latest filter=lfs diff=lfs merge=lfs -text
40
  checkpoints/face_0929/supres_latest filter=lfs diff=lfs merge=lfs -text
 
43
  checkpoints/pos_map_ys/body_mix/smpl_pos_map/cano_smpl_pos_map.exr filter=lfs diff=lfs merge=lfs -text
44
  checkpoints/ref_gaussian/head/000000.ply filter=lfs diff=lfs merge=lfs -text
45
  checkpoints/ filter=lfs diff=lfs merge=lfs -text
46
+ AnimatableGaussians/network/styleunet/build/lib.linux-x86_64-cpython-310/fused.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
47
+ AnimatableGaussians/network/styleunet/build/lib.linux-x86_64-cpython-310/upfirdn2d.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
48
+ AnimatableGaussians/network/styleunet/build/temp.linux-x86_64-cpython-310/fused_bias_act.o filter=lfs diff=lfs merge=lfs -text
49
+ AnimatableGaussians/network/styleunet/build/temp.linux-x86_64-cpython-310/upfirdn2d.o filter=lfs diff=lfs merge=lfs -text
50
+ AnimatableGaussians/network/styleunet/build/temp.win-amd64-cpython-38/Release/fused_bias_act.obj filter=lfs diff=lfs merge=lfs -text
51
+ AnimatableGaussians/network/styleunet/build/temp.win-amd64-cpython-38/Release/upfirdn2d.obj filter=lfs diff=lfs merge=lfs -text
52
+ AnimatableGaussians/utils/posevocab_custom_ops/build/lib.linux-x86_64-cpython-38/posevocab_custom_ops.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
53
+ AnimatableGaussians/utils/posevocab_custom_ops/build/temp.linux-x86_64-cpython-38/bind.o filter=lfs diff=lfs merge=lfs -text
54
+ AnimatableGaussians/utils/root_finding/build/lib.linux-x86_64-cpython-38/root_finding.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
55
+ AnimatableGaussians/utils/root_finding/build/temp.linux-x86_64-cpython-38/bind.o filter=lfs diff=lfs merge=lfs -text
56
+ AnimatableGaussians/assets/avatarrex_dataset_demo.gif filter=lfs diff=lfs merge=lfs -text
57
+ AnimatableGaussians/network/styleunet/build/temp.win-amd64-cpython-38/Release/fused_bias_act_kernel.obj filter=lfs diff=lfs merge=lfs -text
58
+ AnimatableGaussians/network/styleunet/build/temp.win-amd64-cpython-38/Release/upfirdn2d_kernel.obj filter=lfs diff=lfs merge=lfs -text
59
+ AnimatableGaussians/network/styleunet/dist/fused-0.0.0-py3.10-linux-x86_64.egg filter=lfs diff=lfs merge=lfs -text
60
+ AnimatableGaussians/network/styleunet/dist/upfirdn2d-0.0.0-py3.10-linux-x86_64.egg filter=lfs diff=lfs merge=lfs -text
61
+ AnimatableGaussians/utils/posevocab_custom_ops/dist/posevocab_custom_ops-0.0.0-py3.8-linux-x86_64.egg filter=lfs diff=lfs merge=lfs -text
62
+ AnimatableGaussians/utils/root_finding/dist/root_finding-0.0.0-py3.8-linux-x86_64.egg filter=lfs diff=lfs merge=lfs -text
AnimatableGaussians DELETED
@@ -1 +0,0 @@
1
- Subproject commit f92794c6aaaf7ee69de63afce579b5fddc9e5467
 
 
AnimatableGaussians/.DS_Store ADDED
Binary file (8.2 kB). View file
 
AnimatableGaussians/AVATARREX_DATASET.md ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # AvatarReX Dataset
2
+
3
+ ### AvatarReX: Real-time Expressive Full-body Avatars
4
+ Zerong Zheng, Xiaochen Zhao, Hongwen Zhang, Boning Liu, Yebin Liu. SIGGRAPH 2023
5
+
6
+ [[Project Page]](https://liuyebin.com/AvatarRex/)
7
+
8
+ ![teaser](./assets/avatarrex.jpg)
9
+
10
+ This dataset contains four multi-view image sequences used in our paper "AvatarReX: Real-time Expressive Full-body Avatars". They are captured with 16 well-calibrated RGB cameras in 30 fps, with a resolution of 1500×2048 and lengths ranging from 1800 to 2000 frames. We use the data to evaluate our method for building animatable human body avatars.
11
+
12
+ We also provide the SMPL-X fitting in the dataset.
13
+
14
+
15
+ ## Agreement
16
+ 1. The AvatarReX dataset (the "Dataset") is available for **non-commercial** research purposes only. Any other use, in particular any use for commercial purposes, is prohibited. This includes, without limitation, incorporation in a commercial product, use in a commercial service, as training data for a commercial product, for commercial ergonomic analysis (e.g. product design, architectural design, etc.), or production of other artifacts for commercial purposes including, for example, web services, movies, television programs, mobile applications, or video games. The dataset may not be used for pornographic purposes or to generate pornographic material whether commercial or not. The Dataset may not be reproduced, modified and/or made available in any form to any third party without Tsinghua University’s prior written permission.
17
+
18
+ 2. You agree **not to** reproduce, modified, duplicate, copy, sell, trade, resell or exploit any portion of the images and any portion of derived data in any form to any third party without Tsinghua University’s prior written permission.
19
+
20
+ 3. You agree **not to** further copy, publish or distribute any portion of the Dataset. Except, for internal use at a single site within the same organization it is allowed to make copies of the dataset.
21
+
22
+ 4. Tsinghua University reserves the right to terminate your access to the Dataset at any time.
23
+
24
+
25
+ ## Download Instructions
26
+ The dataset can be directly downloaded from the following links.
27
+
28
+ * avatarrex_zzr: [this link](https://drive.google.com/file/d/1sCQJ3YU-F3lY9p_HYNIQbT7QyfVKy0HT/view?usp=sharing), 2001 frames in total, ~21 GB
29
+ * avatarrex_zxc: [this link](https://drive.google.com/file/d/1pY1qRj2n6b2YOCmZRVM1D--CXKR02qXU/view?usp=sharing), 1801 frames in total, ~12 GB
30
+ * avatarrex_lbn1: [this link](https://drive.google.com/file/d/1DuESdA5YwvJKapyo7i_KoQxKHHFWzi-w/view?usp=sharing), 1901 frames in total, ~11 GB
31
+ * avatarrex_lbn2: [this link](https://drive.google.com/file/d/1J7ITsYhuWlqhoIkmYni8dL2KJw-wmcy_/view?usp=sharing), 1871 frames in total, ~16 GB
32
+
33
+ Note again that by downloading the dataset you acknowledge that you have read the agreement, understand it, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Dataset.
34
+
35
+
36
+ ## Data Explanation
37
+ For each subject, we provide the multi-view images (```./avatarrex_zzr/********/```) as well as the foreground segmentation (```./avatarrex_zzr/********/mask/pha```), which are obtained using [BackgroundMattingV2](https://github.com/PeterL1n/BackgroundMattingV2). The calibration data is provided in ```calibration_full.json```, and the SMPL fitting in ```smpl_params.npz```. Some frames are losed during the capture process, and we provide their filename in ```missing_img_files.txt```.
38
+
39
+ Here we provide a code snip to show how to parse and visualize the data:
40
+ ```python
41
+ import os
42
+ import json
43
+ import numpy as np
44
+ import cv2 as cv
45
+ import torch
46
+ import smplx # (please setup the official SMPL-X model according to: https://pypi.org/project/smplx/)
47
+
48
+ subject = './avatarrex_zzr'
49
+ # subject = './avatarrex_zxc'
50
+ # subject = './avatarrex_lbn1'
51
+ # subject = './avatarrex_lbn2'
52
+
53
+ # initialize smpl model
54
+ smpl = smplx.SMPLX(model_path = './smplx', gender = 'neutral', use_pca = False, num_pca_comps = 45, flat_hand_mean = True, batch_size = 1)
55
+
56
+ # load camera data
57
+ with open(os.path.join(subject, 'calibration_full.json'), 'r') as fp:
58
+ cam_data = json.load(fp)
59
+
60
+ # load smpl data
61
+ smpl_data = np.load(os.path.join(subject, 'smpl_params.npz'), allow_pickle = True)
62
+ smpl_data = dict(smpl_data)
63
+ smpl_data = {k: torch.from_numpy(v.astype(np.float32)) for k, v in smpl_data.items()}
64
+
65
+ frame_num = smpl_data['body_pose'].shape[0]
66
+ for frame_id in range(0, frame_num, 30):
67
+ smpl_out = smpl.forward(
68
+ global_orient = smpl_data['global_orient'][frame_id].unsqueeze(0),
69
+ transl = smpl_data['transl'][frame_id].unsqueeze(0),
70
+ body_pose = smpl_data['body_pose'][frame_id].unsqueeze(0),
71
+ jaw_pose = smpl_data['jaw_pose'][frame_id].unsqueeze(0),
72
+ betas = smpl_data['betas'][0].unsqueeze(0),
73
+ expression = smpl_data['expression'][frame_id].unsqueeze(0),
74
+ left_hand_pose = smpl_data['left_hand_pose'][frame_id].unsqueeze(0),
75
+ right_hand_pose = smpl_data['right_hand_pose'][frame_id].unsqueeze(0),
76
+ )
77
+ smpl_verts = smpl_out.vertices # smpl vertices in live poses
78
+ smpl_verts = smpl_verts.detach().cpu().numpy().squeeze(0)
79
+
80
+ smpl_proj_vis = []
81
+ for cam_id in range(0, len(cam_data), 3):
82
+ cam_sn = list(cam_data.keys())[cam_id]
83
+
84
+ img_fpath = os.path.join(subject, '%s/%08d.jpg' % (cam_sn, frame_id))
85
+ msk_fpath = os.path.join(subject, '%s/mask/pha/%08d.jpg' % (cam_sn, frame_id))
86
+
87
+ if (not os.path.isfile(img_fpath)) or (not os.path.isfile(msk_fpath)):
88
+ break
89
+
90
+ img = cv.imread(img_fpath, cv.IMREAD_UNCHANGED)
91
+ msk = cv.imread(msk_fpath, cv.IMREAD_GRAYSCALE)
92
+ img = img * np.uint8(msk > 128)[:, :, np.newaxis] # remove background
93
+ img_ = cv.resize(img, (img.shape[1] // 2, img.shape[0] // 2))
94
+
95
+ # transform smpl from world to camera
96
+ cam_R = np.array(cam_data[cam_sn]['R']).astype(np.float32).reshape((3, 3))
97
+ cam_t = np.array(cam_data[cam_sn]['T']).astype(np.float32).reshape((3,))
98
+ smpl_verts_cam = np.matmul(smpl_verts, cam_R.transpose()) + cam_t.reshape(1, 3)
99
+
100
+ # project smpl vertices to the image
101
+ cam_K = np.array(cam_data[cam_sn]['K']).astype(np.float32).reshape((3, 3))
102
+ cam_K *= np.array([img_.shape[1] / img.shape[1], img_.shape[0] / img.shape[0], 1.0], dtype = np.float32).reshape(3, 1)
103
+ smpl_verts_proj = np.matmul(smpl_verts_cam / smpl_verts_cam[:, 2:], cam_K.transpose())
104
+
105
+ # visualize the projection
106
+ smpl_verts_proj = np.round(smpl_verts_proj).astype(np.int32)
107
+ smpl_verts_proj[:, 0] = np.clip(smpl_verts_proj[:, 0], 0, img_.shape[1] - 1)
108
+ smpl_verts_proj[:, 1] = np.clip(smpl_verts_proj[:, 1], 0, img_.shape[0] - 1)
109
+
110
+ for v in smpl_verts_proj:
111
+ img_[v[1], v[0], :] = np.array([255, 255, 255], dtype = np.uint8)
112
+ smpl_proj_vis.append(img_)
113
+
114
+ if len(smpl_proj_vis) != 6:
115
+ continue
116
+
117
+ vis = np.concatenate([
118
+ np.concatenate(smpl_proj_vis[:3], axis = 1),
119
+ np.concatenate(smpl_proj_vis[3:], axis = 1),
120
+ ], axis = 0)
121
+ vis = cv.resize(vis, (0, 0), fx = 0.5, fy = 0.5)
122
+ cv.imshow('vis', vis)
123
+ cv.waitKey(1)
124
+ ```
125
+ If everything is setup properly, you can see an animation like this:
126
+ <p align="center">
127
+ <img src="./assets/avatarrex_dataset_demo.gif">
128
+ </p>
129
+
130
+
131
+ ## Related Datasets from THU3DV Lab [[Link]](https://liuyebin.com/dataset.html)
132
+ [[THuman4.0 Dataset]](https://github.com/ZhengZerong/THUman4.0-Dataset/) Containing 3 multi-view RGB sequences captured with 24 well-calibrated cameras as well as corresponding SMPL-X registration.
133
+
134
+ [[THuman3.0 Dataset]](https://github.com/fwbx529/THuman3.0-Dataset) Containing 20 human-garment combinations, where each combination has 15 to 35 high-quality human scans captured by a dense DLSR rig.
135
+
136
+ [[MultiHuman Dataset]](https://github.com/y-zheng18/MultiHuman-Dataset/) Containing 453 high-quality scans, each contains 1-3 persons. The dataset can be used to train and evaluate multi-person reconstruction algorithms.
137
+
138
+ [[THuman2.0 Dataset]](https://github.com/ytrock/THuman2.0-Dataset) Containing 500 high-quality human scans captured by a dense DLSR rig, with SMPL annotations.
139
+
140
+
141
+
142
+
143
+ ## Citation
144
+ If you use this dataset for your research, please consider citing:
145
+ ```bibtex
146
+ @article{zheng2023avatarrex,
147
+ title={AvatarRex: Real-time Expressive Full-body Avatars},
148
+ author={Zheng, Zerong and Zhao, Xiaochen and Zhang, Hongwen and Liu, Boning and Liu, Yebin},
149
+ journal={ACM Transactions on Graphics (TOG)},
150
+ volume={42},
151
+ number={4},
152
+ articleno={},
153
+ year={2023},
154
+ publisher={ACM New York, NY, USA}
155
+ }
156
+
157
+ @inproceedings{li2023animatablegaussians,
158
+ title={Animatable Gaussians: Learning Pose-dependent Gaussian Maps for High-fidelity Human Avatar Modeling},
159
+ author={Li, Zhe and Zheng, Zerong and Wang, Lizhen and Liu, Yebin},
160
+ booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
161
+ year={2024}
162
+ }
163
+ ```
164
+
165
+ ## Contact
166
+ - Zerong Zheng [([email protected])](mailto:[email protected])
167
+ - Zhe Li [([email protected])](mailto:[email protected])
168
+ - Yebin Liu [([email protected])](mailto:[email protected])
AnimatableGaussians/LICENSE ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Please read carefully the following terms and conditions and any accompanying documentation before you download and/or use Animatable Gaussians Software/Code/Data (the "Software"). By downloading and/or using the Software, you acknowledge that you have read these terms and conditions, understand them, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Software.
2
+
3
+ Ownership
4
+
5
+ The Software has been developed at the Tsinghua University and is owned by and proprietary material of the Tsinghua University.
6
+
7
+ License Grant
8
+
9
+ Tsinghua University grants you a non-exclusive, non-transferable, free of charge right:
10
+
11
+ To download the Software and use it on computers owned, leased or otherwise controlled by you and/or your organisation;
12
+
13
+ To use the Software for the sole purpose of performing non-commercial scientific research, non-commercial education, or non-commercial artistic projects.
14
+
15
+ Any other use, in particular any use for commercial purposes, is prohibited. This includes, without limitation, incorporation in a commercial product, use in a commercial service, as training data for a commercial product, for commercial ergonomic analysis (e.g. product design, architectural design, etc.), or production of other artifacts for commercial purposes including, for example, web services, movies, television programs, mobile applications, or video games. The Software may not be used for pornographic purposes or to generate pornographic material whether commercial or not. This license also prohibits the use of the Software to train methods/algorithms/neural networks/etc. for commercial use of any kind. The Software may not be reproduced, modified and/or made available in any form to any third party without Tsinghua University’s prior written permission. By downloading the Software, you agree not to reverse engineer it.
16
+
17
+ Disclaimer of Representations and Warranties
18
+
19
+ You expressly acknowledge and agree that the Software results from basic research, is provided “AS IS”, may contain errors, and that any use of the Software is at your sole risk. TSINGHUA UNIVERSITY MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE SOFTWARE, NEITHER EXPRESS NOR IMPLIED, AND THE ABSENCE OF ANY LEGAL OR ACTUAL DEFECTS, WHETHER DISCOVERABLE OR NOT. Specifically, and not to limit the foregoing, Tsinghua University makes no representations or warranties (i) regarding the merchantability or fitness for a particular purpose of the Software, (ii) that the use of the Software will not infringe any patents, copyrights or other intellectual property rights of a third party, and (iii) that the use of the Software will not cause any damage of any kind to you or a third party.
20
+
21
+ Limitation of Liability
22
+
23
+ Under no circumstances shall Tsinghua University be liable for any incidental, special, indirect or consequential damages arising out of or relating to this license, including but not limited to, any lost profits, business interruption, loss of programs or other data, or all other commercial damages or losses, even if advised of the possibility thereof.
24
+
25
+ No Maintenance Services
26
+
27
+ You understand and agree that Tsinghua University is under no obligation to provide either maintenance services, update services, notices of latent defects, or corrections of defects with regard to the Software. Tsinghua University nevertheless reserves the right to update, modify, or discontinue the Software at any time.
28
+
29
+ Publication with the Software
30
+
31
+ You agree to cite the paper describing the software and algorithm as specified on the download website.
32
+
33
+ Media Projects with the Software
34
+
35
+ When using the Software in a media project please give credit to Tsinghua University. For example: the Software was used for performance capture courtesy of the Tsinghua University.
36
+
37
+ Commercial Licensing Opportunities
38
+
39
+ For commercial use and commercial license please contact: [email protected].
AnimatableGaussians/PREPROCESSED_DATASET.md ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+
3
+ # Preprocessed Dataset
4
+
5
+ </div>
6
+
7
+ ## AvatarReX Dataset
8
+
9
+ <div>
10
+ <table style="width:100%;border-spacing:0px;border-collapse:separate;margin-right:auto;margin-left:auto;font-size: large">
11
+ <tr>
12
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
13
+ Figure
14
+ </td>
15
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
16
+ <img width="350" src="assets/avatarrex_zzr.jpg"/>
17
+ </td>
18
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
19
+ <img width="350" src="assets/avatarrex_lbn1.jpg"/>
20
+ </td>
21
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
22
+ <img width="350" src="assets/avatarrex_lbn2.jpg"/>
23
+ </td>
24
+ </tr>
25
+ <tr>
26
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
27
+ Character
28
+ </td>
29
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
30
+ <a href="https://drive.google.com/file/d/1o5tIisBAhYxCl81SUZ4HGaEKyslCBD16/view?usp=sharing">avatarrex_zzr</a>
31
+ </td>
32
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
33
+ <a href="https://drive.google.com/file/d/1RDM3v5P4XF6Sp88EusDvokw-yHg6Je0C/view?usp=sharing">avatarrex_lbn1</a>
34
+ </td>
35
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
36
+ <a href="https://drive.google.com/file/d/1AuITI1KDHG4MbaNplnzmkcYDwii_Q419/view?usp=sharing">avatarrex_lbn2</a>
37
+ </td>
38
+ </tr>
39
+ </table>
40
+ </div>
41
+
42
+ ## ActorsHQ Dataset
43
+
44
+ Stay tuned.
45
+
46
+ ## THuman4.0 Dataset
47
+
48
+ Stay tuned.
AnimatableGaussians/PRETRAINED_MODEL.md ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <div align="center">
2
+
3
+ # Pretrained Model
4
+
5
+ </div>
6
+
7
+ ## AvatarReX Dataset
8
+
9
+ <div>
10
+ <table style="width:100%;border-spacing:0px;border-collapse:separate;margin-right:auto;margin-left:auto;font-size: large">
11
+ <tr>
12
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
13
+ Figure
14
+ </td>
15
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
16
+ <img width="350" src="assets/avatarrex_zzr.jpg"/>
17
+ </td>
18
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
19
+ <img width="350" src="assets/avatarrex_lbn1.jpg"/>
20
+ </td>
21
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
22
+ <img width="350" src="assets/avatarrex_lbn2.jpg"/>
23
+ </td>
24
+ </tr>
25
+ <tr>
26
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
27
+ Character
28
+ </td>
29
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
30
+ <a href="https://drive.google.com/file/d/1lR_O9m0J_lwc8POA_UtCDM9LsTWOIu4m/view?usp=sharing">avatarrex_zzr</a>
31
+ </td>
32
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
33
+ <a href="https://drive.google.com/file/d/1P-s-RcJ5_Z7ZVSzjjl-xhPCExqN8td7S/view?usp=sharing">avatarrex_lbn1</a>
34
+ </td>
35
+ <td style="padding:20px;width:20%;vertical-align:middle;border:none" align="center">
36
+ <a href="https://drive.google.com/file/d/1KakiePoLpV3Wa0QGtnzrt8MAhZbNQi6n/view?usp=sharing">avatarrex_lbn2</a>
37
+ </td>
38
+ </tr>
39
+ </table>
40
+ </div>
41
+
42
+ ## ActorsHQ Dataset
43
+
44
+ Stay tuned.
45
+
46
+ ## THuman4.0 Dataset
47
+
48
+ Stay tuned.
AnimatableGaussians/README.md ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ News
2
+ - `05/22/2024` :loudspeaker: <font color='magenta'><b> An extension work of Animatable Gaussians for human avatar relighting is available at [here](https://animatable-gaussians.github.io/relight). Welcome to check it!</b></font>
3
+ - `03/11/2024` The code has been released. Welcome to have a try!
4
+ - `03/11/2024` [AvatarReX](AVATARREX_DATASET.md) dataset, a high-resolution multi-view video dataset for avatar modeling, has been released.
5
+ - `02/27/2024` Animatable Gaussians is accepted by CVPR 2024!
6
+
7
+ Todo
8
+ - [x] Release the code.
9
+ - [x] Release AvatarReX dataset.
10
+ - [ ] Release all the checkpoints and preprocessed dataset.
11
+
12
+ <div align="center">
13
+
14
+ # <b>Animatable Gaussians</b>: Learning Pose-dependent Gaussian Maps for High-fidelity Human Avatar Modeling
15
+
16
+ <h2>CVPR 2024</h2>
17
+
18
+ [Zhe Li](https://lizhe00.github.io/) <sup>1</sup>, [Zerong Zheng](https://zhengzerong.github.io/) <sup>2</sup>, [Lizhen Wang](https://lizhenwangt.github.io/) <sup>1</sup>, [Yebin Liu](https://www.liuyebin.com) <sup>1</sup>
19
+
20
+ <sup>1</sup>Tsinghua Univserity <sup>2</sup>NNKosmos Technology
21
+
22
+ ### [Projectpage](https://animatable-gaussians.github.io/) · [Paper](https://arxiv.org/pdf/2311.16096.pdf) · [Video](https://www.youtube.com/watch?v=kOmZxD0HxZI)
23
+
24
+ </div>
25
+
26
+ https://github.com/lizhe00/AnimatableGaussians/assets/61936670/484e1263-06ed-409b-b9a1-790f5b514832
27
+
28
+ ***Abstract**: Modeling animatable human avatars from RGB videos is a long-standing and challenging problem. Recent works usually adopt MLP-based neural radiance fields (NeRF) to represent 3D humans, but it remains difficult for pure MLPs to regress pose-dependent garment details. To this end, we introduce Animatable Gaussians, a new avatar representation that leverages powerful 2D CNNs and 3D Gaussian splatting to create high-fidelity avatars. To associate 3D Gaussians with the animatable avatar, we learn a parametric template from the input videos, and then parameterize the template on two front & back canonical Gaussian maps where each pixel represents a 3D Gaussian. The learned template is adaptive to the wearing garments for modeling looser clothes like dresses. Such template-guided 2D parameterization enables us to employ a powerful StyleGAN-based CNN to learn the pose-dependent Gaussian maps for modeling detailed dynamic appearances. Furthermore, we introduce a pose projection strategy for better generalization given novel poses. Overall, our method can create lifelike avatars with dynamic, realistic and generalized appearances. Experiments show that our method outperforms other state-of-the-art approaches.*
29
+
30
+ ## Demo Results
31
+ We show avatars animated by challenging motions from [AMASS](https://amass.is.tue.mpg.de/) dataset.
32
+
33
+ https://github.com/lizhe00/AnimatableGaussians/assets/61936670/123b026a-3fac-473c-a263-c3dcdd2ecc4c
34
+ <details><summary>More results (click to expand)</summary>
35
+
36
+ https://github.com/lizhe00/AnimatableGaussians/assets/61936670/9abfa02f-65ec-46b3-9690-ac26191a5a7e
37
+
38
+ https://github.com/lizhe00/AnimatableGaussians/assets/61936670/c4f1e499-9bea-419c-916b-8d9ec4169ac3
39
+
40
+ https://github.com/lizhe00/AnimatableGaussians/assets/61936670/47b08e6f-a1f2-4597-bb75-d85e784cd97c
41
+ </details>
42
+
43
+ # Installation
44
+ 0. Clone this repo.
45
+ ```
46
+ git clone https://github.com/lizhe00/AnimatableGaussians.git
47
+ # or
48
+ git clone [email protected]:lizhe00/AnimatableGaussians.git
49
+ ```
50
+ 1. Install environments.
51
+ ```
52
+ # install requirements
53
+ pip install -r requirements.txt
54
+
55
+ # install diff-gaussian-rasterization-depth-alpha
56
+ cd gaussians/diff_gaussian_rasterization_depth_alpha
57
+ python setup.py install
58
+ cd ../..
59
+
60
+ # install styleunet
61
+ cd network/styleunet
62
+ python setup.py install
63
+ cd ../..
64
+ ```
65
+ 2. Download [SMPL-X](https://smpl-x.is.tue.mpg.de/download.php) model, and place pkl files to ```./smpl_files/smplx```.
66
+
67
+ # Data Preparation
68
+ ## AvatarReX, ActorsHQ or THuman4.0 Dataset
69
+ 1. Download [AvatarReX](./AVATARREX_DATASET.md), [ActorsHQ](https://www.actors-hq.com/dataset), or [THuman4.0](https://github.com/ZhengZerong/THUman4.0-Dataset) datasets.
70
+ 2. Data preprocessing. We provide two manners below. The first way is recommended if you plan to employ our pretrained models, because the renderer utilized in preprocessing may cause slight differences.
71
+ 1. (Recommended) Download our preprocessed files from [PREPROCESSED_DATASET.md](PREPROCESSED_DATASET.md), and unzip them to the root path of each character.
72
+ 2. Follow the instructions in [gen_data/GEN_DATA.md](gen_data/GEN_DATA.md#Preprocessing) to preprocess the dataset.
73
+
74
+ *Note for ActorsHQ dataset: 1) **DATA PATH.** The subject from ActorsHQ dataset may include more than one sequences, but we only utilize the first sequence, i.e., ```Sequence1```. The root path is ```ActorsHQ/Actor0*/Sequence1```. 2) **SMPL-X Registration.** We provide SMPL-X fitting for ActorsHQ dataset. You can download it from [here](https://drive.google.com/file/d/1DVk3k-eNbVqVCkLhGJhD_e9ILLCwhspR/view?usp=sharing), and place `smpl_params.npz` at the corresponding root path of each subject.*
75
+
76
+ ## Customized Dataset
77
+ Please refer to [gen_data/GEN_DATA.md](gen_data/GEN_DATA.md) to run on your own data.
78
+
79
+ # Avatar Training
80
+ Take `avatarrex_zzr` from AvatarReX dataset as an example, run:
81
+ ```
82
+ python main_avatar.py -c configs/avatarrex_zzr/avatar.yaml --mode=train
83
+ ```
84
+ After training, the checkpoint will be saved in `./results/avatarrex_zzr/avatar`.
85
+
86
+ # Avatar Animation
87
+ 1. Download pretrained checkpoint from [PRETRAINED_MODEL.md](./PRETRAINED_MODEL.md), unzip it to `./results/avatarrex_zzr/avatar`, or train the network from scratch.
88
+ 2. Download [THuman4.0_POSE](https://drive.google.com/file/d/1pbToBV6klq6-dXCorwjjsmnINXZCG8n9/view?usp=sharing) or [AMASS](https://amass.is.tue.mpg.de/) dataset for acquiring driving pose sequences.
89
+ We list some awesome pose sequences from AMASS dataset in [configs/awesome_amass_poses.yaml](configs/awesome_amass_poses.yaml).
90
+ Specify the testing pose path in [configs/avatarrex_zzr/avatar.yaml#L57](configs/avatarrex_zzr/avatar.yaml#L57).
91
+ 3. Run:
92
+ ```
93
+ python main_avatar.py -c configs/avatarrex_zzr/avatar.yaml --mode=test
94
+ ```
95
+ You will see the animation results like below in `./test_results/avatarrex_zzr/avatar`.
96
+
97
+ https://github.com/lizhe00/AnimatableGaussians/assets/61936670/5aad39d2-2adb-4b7b-ab90-dea46240344a
98
+
99
+ # Evaluation
100
+ We provide evaluation metrics and example codes of comparison with body-only avatars in [eval/comparison_body_only_avatars.py](eval/comparison_body_only_avatars.py).
101
+
102
+ # Acknowledgement
103
+ Our code is based on these wonderful repos:
104
+ - [3D Gaussian Splatting](https://github.com/graphdeco-inria/diff-gaussian-rasterization) and its [adapted version](https://github.com/ashawkey/diff-gaussian-rasterization)
105
+ - [StyleAvatar](https://github.com/LizhenWangT/StyleAvatar)
106
+
107
+ # Citation
108
+ If you find our code or data is helpful to your research, please consider citing our paper.
109
+ ```bibtex
110
+ @inproceedings{li2024animatablegaussians,
111
+ title={Animatable Gaussians: Learning Pose-dependent Gaussian Maps for High-fidelity Human Avatar Modeling},
112
+ author={Li, Zhe and Zheng, Zerong and Wang, Lizhen and Liu, Yebin},
113
+ booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
114
+ year={2024}
115
+ }
116
+ ```
117
+
AnimatableGaussians/__pycache__/config.cpython-310.pyc ADDED
Binary file (1.82 kB). View file
 
AnimatableGaussians/assets/avatarrex.jpg ADDED
AnimatableGaussians/assets/avatarrex_dataset_demo.gif ADDED

Git LFS Details

  • SHA256: c0473f05f976dac2e4900102ff08a328c53a44a7b0f3c9cfacd80420fcb822e4
  • Pointer size: 132 Bytes
  • Size of remote file: 1.69 MB
AnimatableGaussians/assets/avatarrex_lbn1.jpg ADDED
AnimatableGaussians/assets/avatarrex_lbn2.jpg ADDED
AnimatableGaussians/assets/avatarrex_zzr.jpg ADDED
AnimatableGaussians/assets/ball.obj ADDED
@@ -0,0 +1,2214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Blender v2.74 (sub 0) OBJ File: ''
2
+ # www.blender.org
3
+ v 0.000000 0.500000 -0.000000
4
+ v 0.065300 0.495700 -0.000000
5
+ v 0.129400 0.483000 -0.000000
6
+ v 0.191300 0.461900 -0.000000
7
+ v 0.250000 0.433000 -0.000000
8
+ v 0.304400 0.396700 -0.000000
9
+ v 0.353600 0.353600 -0.000000
10
+ v 0.396700 0.304400 -0.000000
11
+ v 0.433000 0.250000 -0.000000
12
+ v 0.461900 0.191300 -0.000000
13
+ v 0.483000 0.129400 -0.000000
14
+ v 0.495700 0.065300 -0.000000
15
+ v 0.500000 0.000000 0.000000
16
+ v 0.495700 -0.065300 0.000000
17
+ v 0.483000 -0.129400 0.000000
18
+ v 0.461900 -0.191300 0.000000
19
+ v 0.433000 -0.250000 0.000000
20
+ v 0.396700 -0.304400 0.000000
21
+ v 0.353600 -0.353600 0.000000
22
+ v 0.304400 -0.396700 0.000000
23
+ v 0.250000 -0.433000 0.000000
24
+ v 0.191300 -0.461900 0.000000
25
+ v 0.129400 -0.483000 0.000000
26
+ v 0.065300 -0.495700 0.000000
27
+ v 0.000000 -0.500000 0.000000
28
+ v 0.063000 0.495700 0.016900
29
+ v 0.125000 0.483000 0.033500
30
+ v 0.184800 0.461900 0.049500
31
+ v 0.241500 0.433000 0.064700
32
+ v 0.294000 0.396700 0.078800
33
+ v 0.341500 0.353600 0.091500
34
+ v 0.383200 0.304400 0.102700
35
+ v 0.418300 0.250000 0.112100
36
+ v 0.446200 0.191300 0.119600
37
+ v 0.466500 0.129400 0.125000
38
+ v 0.478800 0.065300 0.128300
39
+ v 0.483000 0.000000 0.129400
40
+ v 0.478800 -0.065300 0.128300
41
+ v 0.466500 -0.129400 0.125000
42
+ v 0.446200 -0.191300 0.119600
43
+ v 0.418300 -0.250000 0.112100
44
+ v 0.383200 -0.304400 0.102700
45
+ v 0.341500 -0.353600 0.091500
46
+ v 0.294000 -0.396700 0.078800
47
+ v 0.241500 -0.433000 0.064700
48
+ v 0.184800 -0.461900 0.049500
49
+ v 0.125000 -0.483000 0.033500
50
+ v 0.063000 -0.495700 0.016900
51
+ v 0.056500 0.495700 0.032600
52
+ v 0.112100 0.483000 0.064700
53
+ v 0.165700 0.461900 0.095700
54
+ v 0.216500 0.433000 0.125000
55
+ v 0.263600 0.396700 0.152200
56
+ v 0.306200 0.353600 0.176800
57
+ v 0.343500 0.304400 0.198300
58
+ v 0.375000 0.250000 0.216500
59
+ v 0.400100 0.191300 0.231000
60
+ v 0.418300 0.129400 0.241500
61
+ v 0.429300 0.065300 0.247900
62
+ v 0.433000 0.000000 0.250000
63
+ v 0.429300 -0.065300 0.247900
64
+ v 0.418300 -0.129400 0.241500
65
+ v 0.400100 -0.191300 0.231000
66
+ v 0.375000 -0.250000 0.216500
67
+ v 0.343500 -0.304400 0.198300
68
+ v 0.306200 -0.353600 0.176800
69
+ v 0.263600 -0.396700 0.152200
70
+ v 0.216500 -0.433000 0.125000
71
+ v 0.165700 -0.461900 0.095700
72
+ v 0.112100 -0.483000 0.064700
73
+ v 0.056500 -0.495700 0.032600
74
+ v 0.046100 0.495700 0.046100
75
+ v 0.091500 0.483000 0.091500
76
+ v 0.135300 0.461900 0.135300
77
+ v 0.176800 0.433000 0.176800
78
+ v 0.215200 0.396700 0.215200
79
+ v 0.250000 0.353600 0.250000
80
+ v 0.280500 0.304400 0.280500
81
+ v 0.306200 0.250000 0.306200
82
+ v 0.326600 0.191300 0.326600
83
+ v 0.341500 0.129400 0.341500
84
+ v 0.350500 0.065300 0.350500
85
+ v 0.353600 0.000000 0.353600
86
+ v 0.350500 -0.065300 0.350500
87
+ v 0.341500 -0.129400 0.341500
88
+ v 0.326600 -0.191300 0.326600
89
+ v 0.306200 -0.250000 0.306200
90
+ v 0.280500 -0.304400 0.280500
91
+ v 0.250000 -0.353600 0.250000
92
+ v 0.215200 -0.396700 0.215200
93
+ v 0.176800 -0.433000 0.176800
94
+ v 0.135300 -0.461900 0.135300
95
+ v 0.091500 -0.483000 0.091500
96
+ v 0.046100 -0.495700 0.046100
97
+ v 0.032600 0.495700 0.056500
98
+ v 0.064700 0.483000 0.112100
99
+ v 0.095700 0.461900 0.165700
100
+ v 0.125000 0.433000 0.216500
101
+ v 0.152200 0.396700 0.263600
102
+ v 0.176800 0.353600 0.306200
103
+ v 0.198300 0.304400 0.343500
104
+ v 0.216500 0.250000 0.375000
105
+ v 0.231000 0.191300 0.400100
106
+ v 0.241500 0.129400 0.418300
107
+ v 0.247900 0.065300 0.429300
108
+ v 0.250000 0.000000 0.433000
109
+ v 0.247900 -0.065300 0.429300
110
+ v 0.241500 -0.129400 0.418300
111
+ v 0.231000 -0.191300 0.400100
112
+ v 0.216500 -0.250000 0.375000
113
+ v 0.198300 -0.304400 0.343500
114
+ v 0.176800 -0.353600 0.306200
115
+ v 0.152200 -0.396700 0.263600
116
+ v 0.125000 -0.433000 0.216500
117
+ v 0.095700 -0.461900 0.165700
118
+ v 0.064700 -0.483000 0.112100
119
+ v 0.032600 -0.495700 0.056500
120
+ v 0.016900 0.495700 0.063000
121
+ v 0.033500 0.483000 0.125000
122
+ v 0.049500 0.461900 0.184800
123
+ v 0.064700 0.433000 0.241500
124
+ v 0.078800 0.396700 0.294000
125
+ v 0.091500 0.353600 0.341500
126
+ v 0.102700 0.304400 0.383200
127
+ v 0.112100 0.250000 0.418300
128
+ v 0.119600 0.191300 0.446200
129
+ v 0.125000 0.129400 0.466500
130
+ v 0.128300 0.065300 0.478800
131
+ v 0.129400 0.000000 0.483000
132
+ v 0.128300 -0.065300 0.478800
133
+ v 0.125000 -0.129400 0.466500
134
+ v 0.119600 -0.191300 0.446200
135
+ v 0.112100 -0.250000 0.418300
136
+ v 0.102700 -0.304400 0.383200
137
+ v 0.091500 -0.353600 0.341500
138
+ v 0.078800 -0.396700 0.294000
139
+ v 0.064700 -0.433000 0.241500
140
+ v 0.049500 -0.461900 0.184800
141
+ v 0.033500 -0.483000 0.125000
142
+ v 0.016900 -0.495700 0.063000
143
+ v 0.000000 0.495700 0.065300
144
+ v 0.000000 0.483000 0.129400
145
+ v 0.000000 0.461900 0.191300
146
+ v 0.000000 0.433000 0.250000
147
+ v 0.000000 0.396700 0.304400
148
+ v 0.000000 0.353600 0.353600
149
+ v 0.000000 0.304400 0.396700
150
+ v 0.000000 0.250000 0.433000
151
+ v 0.000000 0.191300 0.461900
152
+ v 0.000000 0.129400 0.483000
153
+ v 0.000000 0.065300 0.495700
154
+ v 0.000000 0.000000 0.500000
155
+ v 0.000000 -0.065300 0.495700
156
+ v 0.000000 -0.129400 0.483000
157
+ v 0.000000 -0.191300 0.461900
158
+ v 0.000000 -0.250000 0.433000
159
+ v 0.000000 -0.304400 0.396700
160
+ v 0.000000 -0.353600 0.353600
161
+ v 0.000000 -0.396700 0.304400
162
+ v 0.000000 -0.433000 0.250000
163
+ v 0.000000 -0.461900 0.191300
164
+ v 0.000000 -0.483000 0.129400
165
+ v 0.000000 -0.495700 0.065300
166
+ v -0.016900 0.495700 0.063000
167
+ v -0.033500 0.483000 0.125000
168
+ v -0.049500 0.461900 0.184800
169
+ v -0.064700 0.433000 0.241500
170
+ v -0.078800 0.396700 0.294000
171
+ v -0.091500 0.353600 0.341500
172
+ v -0.102700 0.304400 0.383200
173
+ v -0.112100 0.250000 0.418300
174
+ v -0.119600 0.191300 0.446200
175
+ v -0.125000 0.129400 0.466500
176
+ v -0.128300 0.065300 0.478800
177
+ v -0.129400 0.000000 0.483000
178
+ v -0.128300 -0.065300 0.478800
179
+ v -0.125000 -0.129400 0.466500
180
+ v -0.119600 -0.191300 0.446200
181
+ v -0.112100 -0.250000 0.418300
182
+ v -0.102700 -0.304400 0.383200
183
+ v -0.091500 -0.353600 0.341500
184
+ v -0.078800 -0.396700 0.294000
185
+ v -0.064700 -0.433000 0.241500
186
+ v -0.049500 -0.461900 0.184800
187
+ v -0.033500 -0.483000 0.125000
188
+ v -0.016900 -0.495700 0.063000
189
+ v -0.032600 0.495700 0.056500
190
+ v -0.064700 0.483000 0.112100
191
+ v -0.095700 0.461900 0.165700
192
+ v -0.125000 0.433000 0.216500
193
+ v -0.152200 0.396700 0.263600
194
+ v -0.176800 0.353600 0.306200
195
+ v -0.198300 0.304400 0.343500
196
+ v -0.216500 0.250000 0.375000
197
+ v -0.231000 0.191300 0.400100
198
+ v -0.241500 0.129400 0.418300
199
+ v -0.247900 0.065300 0.429300
200
+ v -0.250000 0.000000 0.433000
201
+ v -0.247900 -0.065300 0.429300
202
+ v -0.241500 -0.129400 0.418300
203
+ v -0.231000 -0.191300 0.400100
204
+ v -0.216500 -0.250000 0.375000
205
+ v -0.198300 -0.304400 0.343500
206
+ v -0.176800 -0.353600 0.306200
207
+ v -0.152200 -0.396700 0.263600
208
+ v -0.125000 -0.433000 0.216500
209
+ v -0.095700 -0.461900 0.165700
210
+ v -0.064700 -0.483000 0.112100
211
+ v -0.032600 -0.495700 0.056500
212
+ v -0.046100 0.495700 0.046100
213
+ v -0.091500 0.483000 0.091500
214
+ v -0.135300 0.461900 0.135300
215
+ v -0.176800 0.433000 0.176800
216
+ v -0.215200 0.396700 0.215200
217
+ v -0.250000 0.353600 0.250000
218
+ v -0.280500 0.304400 0.280500
219
+ v -0.306200 0.250000 0.306200
220
+ v -0.326600 0.191300 0.326600
221
+ v -0.341500 0.129400 0.341500
222
+ v -0.350500 0.065300 0.350500
223
+ v -0.353600 0.000000 0.353600
224
+ v -0.350500 -0.065300 0.350500
225
+ v -0.341500 -0.129400 0.341500
226
+ v -0.326600 -0.191300 0.326600
227
+ v -0.306200 -0.250000 0.306200
228
+ v -0.280500 -0.304400 0.280500
229
+ v -0.250000 -0.353600 0.250000
230
+ v -0.215200 -0.396700 0.215200
231
+ v -0.176800 -0.433000 0.176800
232
+ v -0.135300 -0.461900 0.135300
233
+ v -0.091500 -0.483000 0.091500
234
+ v -0.046100 -0.495700 0.046100
235
+ v -0.056500 0.495700 0.032600
236
+ v -0.112100 0.483000 0.064700
237
+ v -0.165700 0.461900 0.095700
238
+ v -0.216500 0.433000 0.125000
239
+ v -0.263600 0.396700 0.152200
240
+ v -0.306200 0.353600 0.176800
241
+ v -0.343500 0.304400 0.198300
242
+ v -0.375000 0.250000 0.216500
243
+ v -0.400100 0.191300 0.231000
244
+ v -0.418300 0.129400 0.241500
245
+ v -0.429300 0.065300 0.247900
246
+ v -0.433000 0.000000 0.250000
247
+ v -0.429300 -0.065300 0.247900
248
+ v -0.418300 -0.129400 0.241500
249
+ v -0.400100 -0.191300 0.231000
250
+ v -0.375000 -0.250000 0.216500
251
+ v -0.343500 -0.304400 0.198300
252
+ v -0.306200 -0.353600 0.176800
253
+ v -0.263600 -0.396700 0.152200
254
+ v -0.216500 -0.433000 0.125000
255
+ v -0.165700 -0.461900 0.095700
256
+ v -0.112100 -0.483000 0.064700
257
+ v -0.056500 -0.495700 0.032600
258
+ v -0.063000 0.495700 0.016900
259
+ v -0.125000 0.483000 0.033500
260
+ v -0.184800 0.461900 0.049500
261
+ v -0.241500 0.433000 0.064700
262
+ v -0.294000 0.396700 0.078800
263
+ v -0.341500 0.353600 0.091500
264
+ v -0.383200 0.304400 0.102700
265
+ v -0.418300 0.250000 0.112100
266
+ v -0.446200 0.191300 0.119600
267
+ v -0.466500 0.129400 0.125000
268
+ v -0.478800 0.065300 0.128300
269
+ v -0.483000 0.000000 0.129400
270
+ v -0.478800 -0.065300 0.128300
271
+ v -0.466500 -0.129400 0.125000
272
+ v -0.446200 -0.191300 0.119600
273
+ v -0.418300 -0.250000 0.112100
274
+ v -0.383200 -0.304400 0.102700
275
+ v -0.341500 -0.353600 0.091500
276
+ v -0.294000 -0.396700 0.078800
277
+ v -0.241500 -0.433000 0.064700
278
+ v -0.184800 -0.461900 0.049500
279
+ v -0.125000 -0.483000 0.033500
280
+ v -0.063000 -0.495700 0.016900
281
+ v -0.065300 0.495700 -0.000000
282
+ v -0.129400 0.483000 -0.000000
283
+ v -0.191300 0.461900 -0.000000
284
+ v -0.250000 0.433000 -0.000000
285
+ v -0.304400 0.396700 -0.000000
286
+ v -0.353600 0.353600 -0.000000
287
+ v -0.396700 0.304400 -0.000000
288
+ v -0.433000 0.250000 -0.000000
289
+ v -0.461900 0.191300 -0.000000
290
+ v -0.483000 0.129400 -0.000000
291
+ v -0.495700 0.065300 -0.000000
292
+ v -0.500000 0.000000 0.000000
293
+ v -0.495700 -0.065300 0.000000
294
+ v -0.483000 -0.129400 0.000000
295
+ v -0.461900 -0.191300 0.000000
296
+ v -0.433000 -0.250000 0.000000
297
+ v -0.396700 -0.304400 0.000000
298
+ v -0.353600 -0.353600 0.000000
299
+ v -0.304400 -0.396700 0.000000
300
+ v -0.250000 -0.433000 0.000000
301
+ v -0.191300 -0.461900 0.000000
302
+ v -0.129400 -0.483000 0.000000
303
+ v -0.065300 -0.495700 0.000000
304
+ v -0.063000 0.495700 -0.016900
305
+ v -0.125000 0.483000 -0.033500
306
+ v -0.184800 0.461900 -0.049500
307
+ v -0.241500 0.433000 -0.064700
308
+ v -0.294000 0.396700 -0.078800
309
+ v -0.341500 0.353600 -0.091500
310
+ v -0.383200 0.304400 -0.102700
311
+ v -0.418300 0.250000 -0.112100
312
+ v -0.446200 0.191300 -0.119600
313
+ v -0.466500 0.129400 -0.125000
314
+ v -0.478800 0.065300 -0.128300
315
+ v -0.483000 -0.000000 -0.129400
316
+ v -0.478800 -0.065300 -0.128300
317
+ v -0.466500 -0.129400 -0.125000
318
+ v -0.446200 -0.191300 -0.119600
319
+ v -0.418300 -0.250000 -0.112100
320
+ v -0.383200 -0.304400 -0.102700
321
+ v -0.341500 -0.353600 -0.091500
322
+ v -0.294000 -0.396700 -0.078800
323
+ v -0.241500 -0.433000 -0.064700
324
+ v -0.184800 -0.461900 -0.049500
325
+ v -0.125000 -0.483000 -0.033500
326
+ v -0.063000 -0.495700 -0.016900
327
+ v -0.056500 0.495700 -0.032600
328
+ v -0.112100 0.483000 -0.064700
329
+ v -0.165700 0.461900 -0.095700
330
+ v -0.216500 0.433000 -0.125000
331
+ v -0.263600 0.396700 -0.152200
332
+ v -0.306200 0.353600 -0.176800
333
+ v -0.343500 0.304400 -0.198300
334
+ v -0.375000 0.250000 -0.216500
335
+ v -0.400100 0.191300 -0.231000
336
+ v -0.418300 0.129400 -0.241500
337
+ v -0.429300 0.065300 -0.247900
338
+ v -0.433000 -0.000000 -0.250000
339
+ v -0.429300 -0.065300 -0.247900
340
+ v -0.418300 -0.129400 -0.241500
341
+ v -0.400100 -0.191300 -0.231000
342
+ v -0.375000 -0.250000 -0.216500
343
+ v -0.343500 -0.304400 -0.198300
344
+ v -0.306200 -0.353600 -0.176800
345
+ v -0.263600 -0.396700 -0.152200
346
+ v -0.216500 -0.433000 -0.125000
347
+ v -0.165700 -0.461900 -0.095700
348
+ v -0.112100 -0.483000 -0.064700
349
+ v -0.056500 -0.495700 -0.032600
350
+ v -0.046100 0.495700 -0.046100
351
+ v -0.091500 0.483000 -0.091500
352
+ v -0.135300 0.461900 -0.135300
353
+ v -0.176800 0.433000 -0.176800
354
+ v -0.215200 0.396700 -0.215200
355
+ v -0.250000 0.353600 -0.250000
356
+ v -0.280500 0.304400 -0.280500
357
+ v -0.306200 0.250000 -0.306200
358
+ v -0.326600 0.191300 -0.326600
359
+ v -0.341500 0.129400 -0.341500
360
+ v -0.350500 0.065300 -0.350500
361
+ v -0.353600 -0.000000 -0.353600
362
+ v -0.350500 -0.065300 -0.350500
363
+ v -0.341500 -0.129400 -0.341500
364
+ v -0.326600 -0.191300 -0.326600
365
+ v -0.306200 -0.250000 -0.306200
366
+ v -0.280500 -0.304400 -0.280500
367
+ v -0.250000 -0.353600 -0.250000
368
+ v -0.215200 -0.396700 -0.215200
369
+ v -0.176800 -0.433000 -0.176800
370
+ v -0.135300 -0.461900 -0.135300
371
+ v -0.091500 -0.483000 -0.091500
372
+ v -0.046100 -0.495700 -0.046100
373
+ v -0.032600 0.495700 -0.056500
374
+ v -0.064700 0.483000 -0.112100
375
+ v -0.095700 0.461900 -0.165700
376
+ v -0.125000 0.433000 -0.216500
377
+ v -0.152200 0.396700 -0.263600
378
+ v -0.176800 0.353600 -0.306200
379
+ v -0.198300 0.304400 -0.343500
380
+ v -0.216500 0.250000 -0.375000
381
+ v -0.231000 0.191300 -0.400100
382
+ v -0.241500 0.129400 -0.418300
383
+ v -0.247900 0.065300 -0.429300
384
+ v -0.250000 -0.000000 -0.433000
385
+ v -0.247900 -0.065300 -0.429300
386
+ v -0.241500 -0.129400 -0.418300
387
+ v -0.231000 -0.191300 -0.400100
388
+ v -0.216500 -0.250000 -0.375000
389
+ v -0.198300 -0.304400 -0.343500
390
+ v -0.176800 -0.353600 -0.306200
391
+ v -0.152200 -0.396700 -0.263600
392
+ v -0.125000 -0.433000 -0.216500
393
+ v -0.095700 -0.461900 -0.165700
394
+ v -0.064700 -0.483000 -0.112100
395
+ v -0.032600 -0.495700 -0.056500
396
+ v -0.016900 0.495700 -0.063000
397
+ v -0.033500 0.483000 -0.125000
398
+ v -0.049500 0.461900 -0.184800
399
+ v -0.064700 0.433000 -0.241500
400
+ v -0.078800 0.396700 -0.294000
401
+ v -0.091500 0.353600 -0.341500
402
+ v -0.102700 0.304400 -0.383200
403
+ v -0.112100 0.250000 -0.418300
404
+ v -0.119600 0.191300 -0.446200
405
+ v -0.125000 0.129400 -0.466500
406
+ v -0.128300 0.065300 -0.478800
407
+ v -0.129400 -0.000000 -0.483000
408
+ v -0.128300 -0.065300 -0.478800
409
+ v -0.125000 -0.129400 -0.466500
410
+ v -0.119600 -0.191300 -0.446200
411
+ v -0.112100 -0.250000 -0.418300
412
+ v -0.102700 -0.304400 -0.383200
413
+ v -0.091500 -0.353600 -0.341500
414
+ v -0.078800 -0.396700 -0.294000
415
+ v -0.064700 -0.433000 -0.241500
416
+ v -0.049500 -0.461900 -0.184800
417
+ v -0.033500 -0.483000 -0.125000
418
+ v -0.016900 -0.495700 -0.063000
419
+ v 0.000000 0.495700 -0.065300
420
+ v 0.000000 0.483000 -0.129400
421
+ v 0.000000 0.461900 -0.191300
422
+ v 0.000000 0.433000 -0.250000
423
+ v 0.000000 0.396700 -0.304400
424
+ v 0.000000 0.353600 -0.353600
425
+ v 0.000000 0.304400 -0.396700
426
+ v 0.000000 0.250000 -0.433000
427
+ v 0.000000 0.191300 -0.461900
428
+ v 0.000000 0.129400 -0.483000
429
+ v 0.000000 0.065300 -0.495700
430
+ v 0.000000 -0.000000 -0.500000
431
+ v 0.000000 -0.065300 -0.495700
432
+ v 0.000000 -0.129400 -0.483000
433
+ v 0.000000 -0.191300 -0.461900
434
+ v 0.000000 -0.250000 -0.433000
435
+ v 0.000000 -0.304400 -0.396700
436
+ v 0.000000 -0.353600 -0.353600
437
+ v 0.000000 -0.396700 -0.304400
438
+ v 0.000000 -0.433000 -0.250000
439
+ v 0.000000 -0.461900 -0.191300
440
+ v 0.000000 -0.483000 -0.129400
441
+ v 0.000000 -0.495700 -0.065300
442
+ v 0.016900 0.495700 -0.063000
443
+ v 0.033500 0.483000 -0.125000
444
+ v 0.049500 0.461900 -0.184800
445
+ v 0.064700 0.433000 -0.241500
446
+ v 0.078800 0.396700 -0.294000
447
+ v 0.091500 0.353600 -0.341500
448
+ v 0.102700 0.304400 -0.383200
449
+ v 0.112100 0.250000 -0.418300
450
+ v 0.119600 0.191300 -0.446200
451
+ v 0.125000 0.129400 -0.466500
452
+ v 0.128300 0.065300 -0.478800
453
+ v 0.129400 -0.000000 -0.483000
454
+ v 0.128300 -0.065300 -0.478800
455
+ v 0.125000 -0.129400 -0.466500
456
+ v 0.119600 -0.191300 -0.446200
457
+ v 0.112100 -0.250000 -0.418300
458
+ v 0.102700 -0.304400 -0.383200
459
+ v 0.091500 -0.353600 -0.341500
460
+ v 0.078800 -0.396700 -0.294000
461
+ v 0.064700 -0.433000 -0.241500
462
+ v 0.049500 -0.461900 -0.184800
463
+ v 0.033500 -0.483000 -0.125000
464
+ v 0.016900 -0.495700 -0.063000
465
+ v 0.032600 0.495700 -0.056500
466
+ v 0.064700 0.483000 -0.112100
467
+ v 0.095700 0.461900 -0.165700
468
+ v 0.125000 0.433000 -0.216500
469
+ v 0.152200 0.396700 -0.263600
470
+ v 0.176800 0.353600 -0.306200
471
+ v 0.198300 0.304400 -0.343500
472
+ v 0.216500 0.250000 -0.375000
473
+ v 0.231000 0.191300 -0.400100
474
+ v 0.241500 0.129400 -0.418300
475
+ v 0.247900 0.065300 -0.429300
476
+ v 0.250000 -0.000000 -0.433000
477
+ v 0.247900 -0.065300 -0.429300
478
+ v 0.241500 -0.129400 -0.418300
479
+ v 0.231000 -0.191300 -0.400100
480
+ v 0.216500 -0.250000 -0.375000
481
+ v 0.198300 -0.304400 -0.343500
482
+ v 0.176800 -0.353600 -0.306200
483
+ v 0.152200 -0.396700 -0.263600
484
+ v 0.125000 -0.433000 -0.216500
485
+ v 0.095700 -0.461900 -0.165700
486
+ v 0.064700 -0.483000 -0.112100
487
+ v 0.032600 -0.495700 -0.056500
488
+ v 0.046100 0.495700 -0.046100
489
+ v 0.091500 0.483000 -0.091500
490
+ v 0.135300 0.461900 -0.135300
491
+ v 0.176800 0.433000 -0.176800
492
+ v 0.215200 0.396700 -0.215200
493
+ v 0.250000 0.353600 -0.250000
494
+ v 0.280500 0.304400 -0.280500
495
+ v 0.306200 0.250000 -0.306200
496
+ v 0.326600 0.191300 -0.326600
497
+ v 0.341500 0.129400 -0.341500
498
+ v 0.350500 0.065300 -0.350500
499
+ v 0.353600 -0.000000 -0.353600
500
+ v 0.350500 -0.065300 -0.350500
501
+ v 0.341500 -0.129400 -0.341500
502
+ v 0.326600 -0.191300 -0.326600
503
+ v 0.306200 -0.250000 -0.306200
504
+ v 0.280500 -0.304400 -0.280500
505
+ v 0.250000 -0.353600 -0.250000
506
+ v 0.215200 -0.396700 -0.215200
507
+ v 0.176800 -0.433000 -0.176800
508
+ v 0.135300 -0.461900 -0.135300
509
+ v 0.091500 -0.483000 -0.091500
510
+ v 0.046100 -0.495700 -0.046100
511
+ v 0.056500 0.495700 -0.032600
512
+ v 0.112100 0.483000 -0.064700
513
+ v 0.165700 0.461900 -0.095700
514
+ v 0.216500 0.433000 -0.125000
515
+ v 0.263600 0.396700 -0.152200
516
+ v 0.306200 0.353600 -0.176800
517
+ v 0.343500 0.304400 -0.198300
518
+ v 0.375000 0.250000 -0.216500
519
+ v 0.400100 0.191300 -0.231000
520
+ v 0.418300 0.129400 -0.241500
521
+ v 0.429300 0.065300 -0.247900
522
+ v 0.433000 -0.000000 -0.250000
523
+ v 0.429300 -0.065300 -0.247900
524
+ v 0.418300 -0.129400 -0.241500
525
+ v 0.400100 -0.191300 -0.231000
526
+ v 0.375000 -0.250000 -0.216500
527
+ v 0.343500 -0.304400 -0.198300
528
+ v 0.306200 -0.353600 -0.176800
529
+ v 0.263600 -0.396700 -0.152200
530
+ v 0.216500 -0.433000 -0.125000
531
+ v 0.165700 -0.461900 -0.095700
532
+ v 0.112100 -0.483000 -0.064700
533
+ v 0.056500 -0.495700 -0.032600
534
+ v 0.063000 0.495700 -0.016900
535
+ v 0.125000 0.483000 -0.033500
536
+ v 0.184800 0.461900 -0.049500
537
+ v 0.241500 0.433000 -0.064700
538
+ v 0.294000 0.396700 -0.078800
539
+ v 0.341500 0.353600 -0.091500
540
+ v 0.383200 0.304400 -0.102700
541
+ v 0.418300 0.250000 -0.112100
542
+ v 0.446200 0.191300 -0.119600
543
+ v 0.466500 0.129400 -0.125000
544
+ v 0.478800 0.065300 -0.128300
545
+ v 0.483000 -0.000000 -0.129400
546
+ v 0.478800 -0.065300 -0.128300
547
+ v 0.466500 -0.129400 -0.125000
548
+ v 0.446200 -0.191300 -0.119600
549
+ v 0.418300 -0.250000 -0.112100
550
+ v 0.383200 -0.304400 -0.102700
551
+ v 0.341500 -0.353600 -0.091500
552
+ v 0.294000 -0.396700 -0.078800
553
+ v 0.241500 -0.433000 -0.064700
554
+ v 0.184800 -0.461900 -0.049500
555
+ v 0.125000 -0.483000 -0.033500
556
+ v 0.063000 -0.495700 -0.016900
557
+ vn 0.000000 1.000000 0.000000
558
+ vn 0.130700 0.990800 0.035300
559
+ vn 0.135700 0.990700 -0.000100
560
+ vn 0.255100 0.964500 0.068300
561
+ vn 0.264100 0.964500 -0.000000
562
+ vn 0.374400 0.921800 0.100200
563
+ vn 0.387500 0.921800 0.000000
564
+ vn 0.486500 0.863900 0.130300
565
+ vn 0.503500 0.863900 0.000000
566
+ vn 0.591000 0.790900 0.158400
567
+ vn 0.611700 0.791000 0.000100
568
+ vn 0.685400 0.704500 0.183900
569
+ vn 0.709800 0.704400 -0.000100
570
+ vn 0.768100 0.606200 0.206100
571
+ vn 0.795600 0.605800 -0.000100
572
+ vn 0.838000 0.497300 0.224400
573
+ vn 0.867700 0.497100 0.000000
574
+ vn 0.893400 0.380400 0.239000
575
+ vn 0.924600 0.381000 0.000100
576
+ vn 0.933400 0.257400 0.249900
577
+ vn 0.966200 0.257600 0.000000
578
+ vn 0.957700 0.130000 0.256700
579
+ vn 0.991500 0.129700 -0.000100
580
+ vn 0.965900 0.000000 0.258800
581
+ vn 1.000000 0.000000 0.000000
582
+ vn 0.957700 -0.130100 0.256400
583
+ vn 0.991500 -0.129700 0.000100
584
+ vn 0.933400 -0.257300 0.250000
585
+ vn 0.966200 -0.257600 -0.000000
586
+ vn 0.893200 -0.380600 0.239300
587
+ vn 0.924600 -0.381000 -0.000100
588
+ vn 0.838000 -0.497300 0.224600
589
+ vn 0.867700 -0.497100 0.000000
590
+ vn 0.768200 -0.606100 0.206000
591
+ vn 0.795600 -0.605800 0.000100
592
+ vn 0.685400 -0.704600 0.183600
593
+ vn 0.709800 -0.704400 0.000100
594
+ vn 0.591000 -0.790900 0.158500
595
+ vn 0.611700 -0.791000 -0.000100
596
+ vn 0.486500 -0.863900 0.130400
597
+ vn 0.503500 -0.863900 -0.000000
598
+ vn 0.374300 -0.921800 0.100100
599
+ vn 0.387500 -0.921800 0.000000
600
+ vn 0.255100 -0.964500 0.068200
601
+ vn 0.264100 -0.964500 0.000000
602
+ vn 0.130800 -0.990800 0.035300
603
+ vn 0.135700 -0.990700 0.000100
604
+ vn 0.000000 -1.000000 0.000000
605
+ vn 0.117300 0.990800 0.067800
606
+ vn 0.228600 0.964500 0.132000
607
+ vn 0.335800 0.921800 0.193700
608
+ vn 0.436200 0.863900 0.251800
609
+ vn 0.529700 0.791000 0.306000
610
+ vn 0.614900 0.704200 0.354900
611
+ vn 0.688900 0.606000 0.397800
612
+ vn 0.750800 0.497900 0.433900
613
+ vn 0.800500 0.381000 0.462500
614
+ vn 0.836900 0.257300 0.483100
615
+ vn 0.858700 0.129600 0.495700
616
+ vn 0.866100 -0.000000 0.499900
617
+ vn 0.858600 -0.129500 0.496000
618
+ vn 0.836800 -0.257400 0.483200
619
+ vn 0.800700 -0.380800 0.462400
620
+ vn 0.751000 -0.497900 0.433500
621
+ vn 0.688900 -0.605900 0.397800
622
+ vn 0.614700 -0.704200 0.355100
623
+ vn 0.529800 -0.790900 0.306000
624
+ vn 0.436200 -0.863900 0.251700
625
+ vn 0.335700 -0.921800 0.193800
626
+ vn 0.228600 -0.964500 0.132000
627
+ vn 0.117300 -0.990800 0.067800
628
+ vn 0.095700 0.990800 0.095800
629
+ vn 0.186600 0.964500 0.186700
630
+ vn 0.274000 0.921800 0.274100
631
+ vn 0.356300 0.863800 0.356200
632
+ vn 0.432700 0.790900 0.432600
633
+ vn 0.501700 0.704500 0.501900
634
+ vn 0.562400 0.606000 0.562500
635
+ vn 0.613700 0.497200 0.613300
636
+ vn 0.653900 0.380700 0.653800
637
+ vn 0.683200 0.257600 0.683300
638
+ vn 0.700900 0.130300 0.701200
639
+ vn 0.707100 0.000000 0.707100
640
+ vn 0.701200 -0.130300 0.700900
641
+ vn 0.683300 -0.257600 0.683200
642
+ vn 0.653800 -0.380700 0.653900
643
+ vn 0.613300 -0.497200 0.613700
644
+ vn 0.562500 -0.606000 0.562400
645
+ vn 0.501900 -0.704500 0.501700
646
+ vn 0.432600 -0.790900 0.432700
647
+ vn 0.356200 -0.863800 0.356300
648
+ vn 0.274100 -0.921800 0.274000
649
+ vn 0.186700 -0.964500 0.186600
650
+ vn 0.095800 -0.990800 0.095700
651
+ vn 0.067800 0.990800 0.117300
652
+ vn 0.132000 0.964500 0.228600
653
+ vn 0.193800 0.921800 0.335700
654
+ vn 0.251700 0.863900 0.436200
655
+ vn 0.306000 0.790900 0.529800
656
+ vn 0.355100 0.704200 0.614700
657
+ vn 0.397800 0.605900 0.688900
658
+ vn 0.433500 0.497900 0.751000
659
+ vn 0.462400 0.380800 0.800700
660
+ vn 0.483200 0.257400 0.836800
661
+ vn 0.496000 0.129500 0.858600
662
+ vn 0.499900 0.000000 0.866100
663
+ vn 0.495700 -0.129600 0.858700
664
+ vn 0.483100 -0.257300 0.836900
665
+ vn 0.462500 -0.381000 0.800500
666
+ vn 0.433900 -0.497900 0.750800
667
+ vn 0.397800 -0.606000 0.688900
668
+ vn 0.354900 -0.704200 0.614900
669
+ vn 0.306000 -0.791000 0.529700
670
+ vn 0.251800 -0.863900 0.436200
671
+ vn 0.193700 -0.921800 0.335800
672
+ vn 0.132000 -0.964500 0.228600
673
+ vn 0.067800 -0.990800 0.117300
674
+ vn 0.035300 0.990800 0.130800
675
+ vn 0.068200 0.964500 0.255100
676
+ vn 0.100100 0.921800 0.374300
677
+ vn 0.130400 0.863900 0.486500
678
+ vn 0.158500 0.790900 0.591000
679
+ vn 0.183600 0.704600 0.685400
680
+ vn 0.206000 0.606100 0.768200
681
+ vn 0.224600 0.497300 0.838000
682
+ vn 0.239300 0.380600 0.893200
683
+ vn 0.250000 0.257300 0.933400
684
+ vn 0.256400 0.130100 0.957700
685
+ vn 0.258800 -0.000000 0.965900
686
+ vn 0.256700 -0.130000 0.957700
687
+ vn 0.249900 -0.257400 0.933400
688
+ vn 0.239000 -0.380400 0.893400
689
+ vn 0.224400 -0.497300 0.838000
690
+ vn 0.206100 -0.606200 0.768100
691
+ vn 0.183900 -0.704500 0.685400
692
+ vn 0.158400 -0.790900 0.591000
693
+ vn 0.130300 -0.863900 0.486500
694
+ vn 0.100200 -0.921800 0.374400
695
+ vn 0.068200 -0.964500 0.255100
696
+ vn 0.035300 -0.990800 0.130700
697
+ vn 0.000100 0.990700 0.135700
698
+ vn 0.000000 0.964500 0.264100
699
+ vn 0.000000 0.921800 0.387500
700
+ vn -0.000000 0.863900 0.503500
701
+ vn -0.000100 0.791000 0.611700
702
+ vn 0.000100 0.704400 0.709800
703
+ vn 0.000100 0.605800 0.795600
704
+ vn 0.000000 0.497100 0.867700
705
+ vn -0.000100 0.381000 0.924600
706
+ vn -0.000000 0.257600 0.966200
707
+ vn 0.000100 0.129700 0.991500
708
+ vn 0.000000 0.000000 1.000000
709
+ vn -0.000100 -0.129700 0.991500
710
+ vn 0.000000 -0.257600 0.966200
711
+ vn 0.000100 -0.381000 0.924600
712
+ vn 0.000000 -0.497100 0.867700
713
+ vn -0.000100 -0.605800 0.795600
714
+ vn -0.000100 -0.704400 0.709800
715
+ vn 0.000100 -0.791000 0.611700
716
+ vn 0.000000 -0.863900 0.503500
717
+ vn 0.000000 -0.921800 0.387500
718
+ vn -0.000000 -0.964500 0.264100
719
+ vn -0.000100 -0.990700 0.135700
720
+ vn -0.035300 0.990800 0.130700
721
+ vn -0.068200 0.964500 0.255100
722
+ vn -0.100200 0.921800 0.374400
723
+ vn -0.130300 0.863900 0.486500
724
+ vn -0.158400 0.790900 0.591000
725
+ vn -0.183900 0.704500 0.685400
726
+ vn -0.206100 0.606200 0.768100
727
+ vn -0.224400 0.497300 0.838000
728
+ vn -0.239000 0.380400 0.893400
729
+ vn -0.249900 0.257400 0.933400
730
+ vn -0.256700 0.130000 0.957700
731
+ vn -0.258800 0.000000 0.965900
732
+ vn -0.256400 -0.130100 0.957700
733
+ vn -0.250000 -0.257300 0.933400
734
+ vn -0.239300 -0.380600 0.893200
735
+ vn -0.224600 -0.497300 0.838000
736
+ vn -0.206000 -0.606100 0.768200
737
+ vn -0.183600 -0.704600 0.685400
738
+ vn -0.158500 -0.790900 0.591000
739
+ vn -0.130400 -0.863900 0.486500
740
+ vn -0.100100 -0.921800 0.374300
741
+ vn -0.068200 -0.964500 0.255100
742
+ vn -0.035300 -0.990800 0.130800
743
+ vn -0.067800 0.990800 0.117300
744
+ vn -0.132000 0.964500 0.228600
745
+ vn -0.193700 0.921800 0.335800
746
+ vn -0.251800 0.863900 0.436200
747
+ vn -0.306000 0.791000 0.529700
748
+ vn -0.354900 0.704200 0.614900
749
+ vn -0.397800 0.606000 0.688900
750
+ vn -0.433900 0.497900 0.750800
751
+ vn -0.462500 0.381000 0.800500
752
+ vn -0.483100 0.257300 0.836900
753
+ vn -0.495700 0.129600 0.858700
754
+ vn -0.499900 -0.000000 0.866100
755
+ vn -0.496000 -0.129500 0.858600
756
+ vn -0.483200 -0.257400 0.836800
757
+ vn -0.462400 -0.380800 0.800700
758
+ vn -0.433500 -0.497900 0.751000
759
+ vn -0.397800 -0.605900 0.688900
760
+ vn -0.355100 -0.704200 0.614700
761
+ vn -0.306000 -0.790900 0.529800
762
+ vn -0.251700 -0.863900 0.436200
763
+ vn -0.193800 -0.921800 0.335700
764
+ vn -0.132000 -0.964500 0.228600
765
+ vn -0.067800 -0.990800 0.117300
766
+ vn -0.095800 0.990800 0.095700
767
+ vn -0.186700 0.964500 0.186600
768
+ vn -0.274100 0.921800 0.274000
769
+ vn -0.356200 0.863800 0.356300
770
+ vn -0.432600 0.790900 0.432700
771
+ vn -0.501900 0.704500 0.501700
772
+ vn -0.562500 0.606000 0.562400
773
+ vn -0.613300 0.497200 0.613700
774
+ vn -0.653800 0.380700 0.653900
775
+ vn -0.683300 0.257600 0.683200
776
+ vn -0.701200 0.130300 0.700900
777
+ vn -0.707100 0.000000 0.707100
778
+ vn -0.700900 -0.130300 0.701200
779
+ vn -0.683200 -0.257600 0.683300
780
+ vn -0.653900 -0.380700 0.653800
781
+ vn -0.613700 -0.497200 0.613300
782
+ vn -0.562400 -0.606000 0.562500
783
+ vn -0.501700 -0.704500 0.501900
784
+ vn -0.432700 -0.790900 0.432600
785
+ vn -0.356300 -0.863800 0.356200
786
+ vn -0.274000 -0.921800 0.274100
787
+ vn -0.186600 -0.964500 0.186700
788
+ vn -0.095700 -0.990800 0.095800
789
+ vn -0.117300 0.990800 0.067800
790
+ vn -0.228600 0.964500 0.132000
791
+ vn -0.335700 0.921800 0.193800
792
+ vn -0.436200 0.863900 0.251700
793
+ vn -0.529800 0.790900 0.306000
794
+ vn -0.614700 0.704200 0.355100
795
+ vn -0.688900 0.605900 0.397800
796
+ vn -0.751000 0.497900 0.433500
797
+ vn -0.800700 0.380800 0.462400
798
+ vn -0.836800 0.257400 0.483200
799
+ vn -0.858600 0.129500 0.496000
800
+ vn -0.866100 0.000000 0.499900
801
+ vn -0.858700 -0.129600 0.495700
802
+ vn -0.836900 -0.257300 0.483100
803
+ vn -0.800500 -0.381000 0.462500
804
+ vn -0.750800 -0.497900 0.433900
805
+ vn -0.688900 -0.606000 0.397800
806
+ vn -0.614900 -0.704200 0.354900
807
+ vn -0.529700 -0.791000 0.306000
808
+ vn -0.436200 -0.863900 0.251800
809
+ vn -0.335800 -0.921800 0.193700
810
+ vn -0.228600 -0.964500 0.132000
811
+ vn -0.117300 -0.990800 0.067800
812
+ vn -0.130800 0.990800 0.035300
813
+ vn -0.255100 0.964500 0.068200
814
+ vn -0.374300 0.921800 0.100100
815
+ vn -0.486500 0.863900 0.130400
816
+ vn -0.591000 0.790900 0.158500
817
+ vn -0.685400 0.704600 0.183600
818
+ vn -0.768200 0.606100 0.206000
819
+ vn -0.838000 0.497300 0.224600
820
+ vn -0.893200 0.380600 0.239300
821
+ vn -0.933400 0.257300 0.250000
822
+ vn -0.957700 0.130100 0.256400
823
+ vn -0.965900 -0.000000 0.258800
824
+ vn -0.957700 -0.130000 0.256700
825
+ vn -0.933400 -0.257400 0.249900
826
+ vn -0.893400 -0.380400 0.239000
827
+ vn -0.838000 -0.497300 0.224400
828
+ vn -0.768100 -0.606200 0.206100
829
+ vn -0.685400 -0.704500 0.183900
830
+ vn -0.591000 -0.790900 0.158400
831
+ vn -0.486500 -0.863900 0.130300
832
+ vn -0.374400 -0.921800 0.100200
833
+ vn -0.255100 -0.964500 0.068300
834
+ vn -0.130700 -0.990800 0.035300
835
+ vn -0.135700 0.990700 0.000100
836
+ vn -0.264100 0.964500 0.000000
837
+ vn -0.387500 0.921800 0.000000
838
+ vn -0.503500 0.863900 -0.000000
839
+ vn -0.611700 0.791000 -0.000100
840
+ vn -0.709800 0.704400 0.000100
841
+ vn -0.795600 0.605800 0.000100
842
+ vn -0.867700 0.497100 0.000000
843
+ vn -0.924600 0.381000 -0.000100
844
+ vn -0.966200 0.257600 -0.000000
845
+ vn -0.991500 0.129700 0.000100
846
+ vn -1.000000 0.000000 0.000000
847
+ vn -0.991500 -0.129700 -0.000100
848
+ vn -0.966200 -0.257600 0.000000
849
+ vn -0.924600 -0.381000 0.000100
850
+ vn -0.867700 -0.497100 0.000000
851
+ vn -0.795600 -0.605800 -0.000100
852
+ vn -0.709800 -0.704400 -0.000100
853
+ vn -0.611700 -0.791000 0.000100
854
+ vn -0.503500 -0.863900 0.000000
855
+ vn -0.387500 -0.921800 0.000000
856
+ vn -0.264100 -0.964500 -0.000000
857
+ vn -0.135700 -0.990700 -0.000100
858
+ vn -0.130700 0.990800 -0.035300
859
+ vn -0.255100 0.964500 -0.068300
860
+ vn -0.374400 0.921800 -0.100200
861
+ vn -0.486500 0.863900 -0.130300
862
+ vn -0.591000 0.790900 -0.158400
863
+ vn -0.685400 0.704500 -0.183900
864
+ vn -0.768100 0.606200 -0.206100
865
+ vn -0.838000 0.497300 -0.224400
866
+ vn -0.893400 0.380400 -0.239000
867
+ vn -0.933400 0.257400 -0.249900
868
+ vn -0.957700 0.130000 -0.256700
869
+ vn -0.965900 0.000000 -0.258800
870
+ vn -0.957700 -0.130100 -0.256400
871
+ vn -0.933400 -0.257300 -0.250000
872
+ vn -0.893200 -0.380600 -0.239300
873
+ vn -0.838000 -0.497300 -0.224600
874
+ vn -0.768200 -0.606100 -0.206000
875
+ vn -0.685400 -0.704600 -0.183600
876
+ vn -0.591000 -0.790900 -0.158500
877
+ vn -0.486500 -0.863900 -0.130400
878
+ vn -0.374300 -0.921800 -0.100100
879
+ vn -0.255100 -0.964500 -0.068200
880
+ vn -0.130800 -0.990800 -0.035300
881
+ vn -0.117300 0.990800 -0.067800
882
+ vn -0.228600 0.964500 -0.132000
883
+ vn -0.335800 0.921800 -0.193700
884
+ vn -0.436200 0.863900 -0.251800
885
+ vn -0.529700 0.791000 -0.306000
886
+ vn -0.614900 0.704200 -0.354900
887
+ vn -0.688900 0.606000 -0.397800
888
+ vn -0.750800 0.497900 -0.433900
889
+ vn -0.800500 0.381000 -0.462500
890
+ vn -0.836900 0.257300 -0.483100
891
+ vn -0.858700 0.129600 -0.495700
892
+ vn -0.866100 -0.000000 -0.499900
893
+ vn -0.858600 -0.129500 -0.496000
894
+ vn -0.836800 -0.257400 -0.483200
895
+ vn -0.800700 -0.380800 -0.462400
896
+ vn -0.751000 -0.497900 -0.433500
897
+ vn -0.688900 -0.605900 -0.397800
898
+ vn -0.614700 -0.704200 -0.355100
899
+ vn -0.529800 -0.790900 -0.306000
900
+ vn -0.436200 -0.863900 -0.251700
901
+ vn -0.335700 -0.921800 -0.193800
902
+ vn -0.228600 -0.964500 -0.132000
903
+ vn -0.117300 -0.990800 -0.067800
904
+ vn -0.095700 0.990800 -0.095800
905
+ vn -0.186600 0.964500 -0.186700
906
+ vn -0.274000 0.921800 -0.274100
907
+ vn -0.356300 0.863800 -0.356200
908
+ vn -0.432700 0.790900 -0.432600
909
+ vn -0.501700 0.704500 -0.501900
910
+ vn -0.562400 0.606000 -0.562500
911
+ vn -0.613700 0.497200 -0.613300
912
+ vn -0.653900 0.380700 -0.653800
913
+ vn -0.683200 0.257600 -0.683300
914
+ vn -0.700900 0.130300 -0.701200
915
+ vn -0.707100 0.000000 -0.707100
916
+ vn -0.701200 -0.130300 -0.700900
917
+ vn -0.683300 -0.257600 -0.683200
918
+ vn -0.653800 -0.380700 -0.653900
919
+ vn -0.613300 -0.497200 -0.613700
920
+ vn -0.562500 -0.606000 -0.562400
921
+ vn -0.501900 -0.704500 -0.501700
922
+ vn -0.432600 -0.790900 -0.432700
923
+ vn -0.356200 -0.863800 -0.356300
924
+ vn -0.274100 -0.921800 -0.274000
925
+ vn -0.186700 -0.964500 -0.186600
926
+ vn -0.095800 -0.990800 -0.095700
927
+ vn -0.067800 0.990800 -0.117300
928
+ vn -0.132000 0.964500 -0.228600
929
+ vn -0.193800 0.921800 -0.335700
930
+ vn -0.251700 0.863900 -0.436200
931
+ vn -0.306000 0.790900 -0.529800
932
+ vn -0.355100 0.704200 -0.614700
933
+ vn -0.397800 0.605900 -0.688900
934
+ vn -0.433500 0.497900 -0.751000
935
+ vn -0.462400 0.380800 -0.800700
936
+ vn -0.483200 0.257400 -0.836800
937
+ vn -0.496000 0.129500 -0.858600
938
+ vn -0.499900 0.000000 -0.866100
939
+ vn -0.495700 -0.129600 -0.858700
940
+ vn -0.483100 -0.257300 -0.836900
941
+ vn -0.462500 -0.381000 -0.800500
942
+ vn -0.433900 -0.497900 -0.750800
943
+ vn -0.397800 -0.606000 -0.688900
944
+ vn -0.354900 -0.704200 -0.614900
945
+ vn -0.306000 -0.791000 -0.529700
946
+ vn -0.251800 -0.863900 -0.436200
947
+ vn -0.193700 -0.921800 -0.335800
948
+ vn -0.132000 -0.964500 -0.228600
949
+ vn -0.067800 -0.990800 -0.117300
950
+ vn -0.035300 0.990800 -0.130800
951
+ vn -0.068200 0.964500 -0.255100
952
+ vn -0.100100 0.921800 -0.374300
953
+ vn -0.130400 0.863900 -0.486500
954
+ vn -0.158500 0.790900 -0.591000
955
+ vn -0.183600 0.704600 -0.685400
956
+ vn -0.206000 0.606100 -0.768200
957
+ vn -0.224600 0.497300 -0.838000
958
+ vn -0.239300 0.380600 -0.893200
959
+ vn -0.250000 0.257300 -0.933400
960
+ vn -0.256400 0.130100 -0.957700
961
+ vn -0.258800 -0.000000 -0.965900
962
+ vn -0.256700 -0.130000 -0.957700
963
+ vn -0.249900 -0.257400 -0.933400
964
+ vn -0.239000 -0.380400 -0.893400
965
+ vn -0.224400 -0.497300 -0.838000
966
+ vn -0.206100 -0.606200 -0.768100
967
+ vn -0.183900 -0.704500 -0.685400
968
+ vn -0.158400 -0.790900 -0.591000
969
+ vn -0.130300 -0.863900 -0.486500
970
+ vn -0.100200 -0.921800 -0.374400
971
+ vn -0.068200 -0.964500 -0.255100
972
+ vn -0.035300 -0.990800 -0.130700
973
+ vn -0.000100 0.990700 -0.135700
974
+ vn -0.000000 0.964500 -0.264100
975
+ vn 0.000000 0.921800 -0.387500
976
+ vn 0.000000 0.863900 -0.503500
977
+ vn 0.000100 0.791000 -0.611700
978
+ vn -0.000100 0.704400 -0.709800
979
+ vn -0.000100 0.605800 -0.795600
980
+ vn 0.000000 0.497100 -0.867700
981
+ vn 0.000100 0.381000 -0.924600
982
+ vn 0.000000 0.257600 -0.966200
983
+ vn -0.000100 0.129700 -0.991500
984
+ vn 0.000000 0.000000 -1.000000
985
+ vn 0.000100 -0.129700 -0.991500
986
+ vn -0.000000 -0.257600 -0.966200
987
+ vn -0.000100 -0.381000 -0.924600
988
+ vn 0.000000 -0.497100 -0.867700
989
+ vn 0.000100 -0.605800 -0.795600
990
+ vn 0.000100 -0.704400 -0.709800
991
+ vn -0.000100 -0.791000 -0.611700
992
+ vn -0.000000 -0.863900 -0.503500
993
+ vn 0.000000 -0.921800 -0.387500
994
+ vn 0.000000 -0.964500 -0.264100
995
+ vn 0.000100 -0.990700 -0.135700
996
+ vn 0.035300 0.990800 -0.130700
997
+ vn 0.068200 0.964500 -0.255100
998
+ vn 0.100200 0.921800 -0.374400
999
+ vn 0.130300 0.863900 -0.486500
1000
+ vn 0.158400 0.790900 -0.591000
1001
+ vn 0.183900 0.704500 -0.685400
1002
+ vn 0.206100 0.606200 -0.768100
1003
+ vn 0.224400 0.497300 -0.838000
1004
+ vn 0.239000 0.380400 -0.893400
1005
+ vn 0.249900 0.257400 -0.933400
1006
+ vn 0.256700 0.130000 -0.957700
1007
+ vn 0.258800 0.000000 -0.965900
1008
+ vn 0.256400 -0.130100 -0.957700
1009
+ vn 0.250000 -0.257300 -0.933400
1010
+ vn 0.239300 -0.380600 -0.893200
1011
+ vn 0.224600 -0.497300 -0.838000
1012
+ vn 0.206000 -0.606100 -0.768200
1013
+ vn 0.183600 -0.704600 -0.685400
1014
+ vn 0.158500 -0.790900 -0.591000
1015
+ vn 0.130400 -0.863900 -0.486500
1016
+ vn 0.100100 -0.921800 -0.374300
1017
+ vn 0.068200 -0.964500 -0.255100
1018
+ vn 0.035300 -0.990800 -0.130800
1019
+ vn 0.067800 0.990800 -0.117300
1020
+ vn 0.132000 0.964500 -0.228600
1021
+ vn 0.193700 0.921800 -0.335800
1022
+ vn 0.251800 0.863900 -0.436200
1023
+ vn 0.306000 0.791000 -0.529700
1024
+ vn 0.354900 0.704200 -0.614900
1025
+ vn 0.397800 0.606000 -0.688900
1026
+ vn 0.433900 0.497900 -0.750800
1027
+ vn 0.462500 0.381000 -0.800500
1028
+ vn 0.483100 0.257300 -0.836900
1029
+ vn 0.495700 0.129600 -0.858700
1030
+ vn 0.499900 -0.000000 -0.866100
1031
+ vn 0.496000 -0.129500 -0.858600
1032
+ vn 0.483200 -0.257400 -0.836800
1033
+ vn 0.462400 -0.380800 -0.800700
1034
+ vn 0.433500 -0.497900 -0.751000
1035
+ vn 0.397800 -0.605900 -0.688900
1036
+ vn 0.355100 -0.704200 -0.614700
1037
+ vn 0.306000 -0.790900 -0.529800
1038
+ vn 0.251700 -0.863900 -0.436200
1039
+ vn 0.193800 -0.921800 -0.335700
1040
+ vn 0.132000 -0.964500 -0.228600
1041
+ vn 0.067800 -0.990800 -0.117300
1042
+ vn 0.095800 0.990800 -0.095700
1043
+ vn 0.186700 0.964500 -0.186600
1044
+ vn 0.274100 0.921800 -0.274000
1045
+ vn 0.356200 0.863800 -0.356300
1046
+ vn 0.432600 0.790900 -0.432700
1047
+ vn 0.501900 0.704500 -0.501700
1048
+ vn 0.562500 0.606000 -0.562400
1049
+ vn 0.613300 0.497200 -0.613700
1050
+ vn 0.653800 0.380700 -0.653900
1051
+ vn 0.683300 0.257600 -0.683200
1052
+ vn 0.701200 0.130300 -0.700900
1053
+ vn 0.707100 0.000000 -0.707100
1054
+ vn 0.700900 -0.130300 -0.701200
1055
+ vn 0.683200 -0.257600 -0.683300
1056
+ vn 0.653900 -0.380700 -0.653800
1057
+ vn 0.613700 -0.497200 -0.613300
1058
+ vn 0.562400 -0.606000 -0.562500
1059
+ vn 0.501700 -0.704500 -0.501900
1060
+ vn 0.432700 -0.790900 -0.432600
1061
+ vn 0.356300 -0.863800 -0.356200
1062
+ vn 0.274000 -0.921800 -0.274100
1063
+ vn 0.186600 -0.964500 -0.186700
1064
+ vn 0.095700 -0.990800 -0.095800
1065
+ vn 0.117300 0.990800 -0.067800
1066
+ vn 0.228600 0.964500 -0.132000
1067
+ vn 0.335700 0.921800 -0.193800
1068
+ vn 0.436200 0.863900 -0.251700
1069
+ vn 0.529800 0.790900 -0.306000
1070
+ vn 0.614700 0.704200 -0.355100
1071
+ vn 0.688900 0.605900 -0.397800
1072
+ vn 0.751000 0.497900 -0.433500
1073
+ vn 0.800700 0.380800 -0.462400
1074
+ vn 0.836800 0.257400 -0.483200
1075
+ vn 0.858600 0.129500 -0.496000
1076
+ vn 0.866100 0.000000 -0.499900
1077
+ vn 0.858700 -0.129600 -0.495700
1078
+ vn 0.836900 -0.257300 -0.483100
1079
+ vn 0.800500 -0.381000 -0.462500
1080
+ vn 0.750800 -0.497900 -0.433900
1081
+ vn 0.688900 -0.606000 -0.397800
1082
+ vn 0.614900 -0.704200 -0.354900
1083
+ vn 0.529700 -0.791000 -0.306000
1084
+ vn 0.436200 -0.863900 -0.251800
1085
+ vn 0.335800 -0.921800 -0.193700
1086
+ vn 0.228600 -0.964500 -0.132000
1087
+ vn 0.117300 -0.990800 -0.067800
1088
+ vn 0.130800 0.990800 -0.035300
1089
+ vn 0.255100 0.964500 -0.068200
1090
+ vn 0.374300 0.921800 -0.100100
1091
+ vn 0.486500 0.863900 -0.130400
1092
+ vn 0.591000 0.790900 -0.158500
1093
+ vn 0.685400 0.704600 -0.183600
1094
+ vn 0.768200 0.606100 -0.206000
1095
+ vn 0.838000 0.497300 -0.224600
1096
+ vn 0.893200 0.380600 -0.239300
1097
+ vn 0.933400 0.257300 -0.250000
1098
+ vn 0.957700 0.130100 -0.256400
1099
+ vn 0.965900 -0.000000 -0.258800
1100
+ vn 0.957700 -0.130000 -0.256700
1101
+ vn 0.933400 -0.257400 -0.249900
1102
+ vn 0.893400 -0.380400 -0.239000
1103
+ vn 0.838000 -0.497300 -0.224400
1104
+ vn 0.768100 -0.606200 -0.206100
1105
+ vn 0.685400 -0.704500 -0.183900
1106
+ vn 0.591000 -0.790900 -0.158400
1107
+ vn 0.486500 -0.863900 -0.130300
1108
+ vn 0.374400 -0.921800 -0.100200
1109
+ vn 0.255100 -0.964500 -0.068300
1110
+ vn 0.130700 -0.990800 -0.035300
1111
+ f 1//1 26//2 2//3
1112
+ f 2//3 26//2 27//4
1113
+ f 2//3 27//4 3//5
1114
+ f 3//5 27//4 28//6
1115
+ f 3//5 28//6 4//7
1116
+ f 4//7 28//6 29//8
1117
+ f 4//7 29//8 5//9
1118
+ f 5//9 29//8 30//10
1119
+ f 5//9 30//10 6//11
1120
+ f 6//11 30//10 31//12
1121
+ f 6//11 31//12 7//13
1122
+ f 7//13 31//12 32//14
1123
+ f 7//13 32//14 8//15
1124
+ f 8//15 32//14 33//16
1125
+ f 8//15 33//16 9//17
1126
+ f 9//17 33//16 34//18
1127
+ f 9//17 34//18 10//19
1128
+ f 10//19 34//18 35//20
1129
+ f 10//19 35//20 11//21
1130
+ f 11//21 35//20 36//22
1131
+ f 11//21 36//22 12//23
1132
+ f 12//23 36//22 37//24
1133
+ f 12//23 37//24 13//25
1134
+ f 13//25 37//24 38//26
1135
+ f 13//25 38//26 14//27
1136
+ f 14//27 38//26 39//28
1137
+ f 14//27 39//28 15//29
1138
+ f 15//29 39//28 40//30
1139
+ f 15//29 40//30 16//31
1140
+ f 16//31 40//30 41//32
1141
+ f 16//31 41//32 17//33
1142
+ f 17//33 41//32 42//34
1143
+ f 17//33 42//34 18//35
1144
+ f 18//35 42//34 43//36
1145
+ f 18//35 43//36 19//37
1146
+ f 19//37 43//36 44//38
1147
+ f 19//37 44//38 20//39
1148
+ f 20//39 44//38 45//40
1149
+ f 20//39 45//40 21//41
1150
+ f 21//41 45//40 46//42
1151
+ f 21//41 46//42 22//43
1152
+ f 22//43 46//42 47//44
1153
+ f 22//43 47//44 23//45
1154
+ f 23//45 47//44 48//46
1155
+ f 23//45 48//46 24//47
1156
+ f 24//47 48//46 25//48
1157
+ f 1//1 49//49 26//2
1158
+ f 26//2 49//49 50//50
1159
+ f 26//2 50//50 27//4
1160
+ f 27//4 50//50 51//51
1161
+ f 27//4 51//51 28//6
1162
+ f 28//6 51//51 52//52
1163
+ f 28//6 52//52 29//8
1164
+ f 29//8 52//52 53//53
1165
+ f 29//8 53//53 30//10
1166
+ f 30//10 53//53 54//54
1167
+ f 30//10 54//54 31//12
1168
+ f 31//12 54//54 55//55
1169
+ f 31//12 55//55 32//14
1170
+ f 32//14 55//55 56//56
1171
+ f 32//14 56//56 33//16
1172
+ f 33//16 56//56 57//57
1173
+ f 33//16 57//57 34//18
1174
+ f 34//18 57//57 58//58
1175
+ f 34//18 58//58 35//20
1176
+ f 35//20 58//58 59//59
1177
+ f 35//20 59//59 36//22
1178
+ f 36//22 59//59 60//60
1179
+ f 36//22 60//60 37//24
1180
+ f 37//24 60//60 61//61
1181
+ f 37//24 61//61 38//26
1182
+ f 38//26 61//61 62//62
1183
+ f 38//26 62//62 39//28
1184
+ f 39//28 62//62 63//63
1185
+ f 39//28 63//63 40//30
1186
+ f 40//30 63//63 64//64
1187
+ f 40//30 64//64 41//32
1188
+ f 41//32 64//64 65//65
1189
+ f 41//32 65//65 42//34
1190
+ f 42//34 65//65 66//66
1191
+ f 42//34 66//66 43//36
1192
+ f 43//36 66//66 67//67
1193
+ f 43//36 67//67 44//38
1194
+ f 44//38 67//67 68//68
1195
+ f 44//38 68//68 45//40
1196
+ f 45//40 68//68 69//69
1197
+ f 45//40 69//69 46//42
1198
+ f 46//42 69//69 70//70
1199
+ f 46//42 70//70 47//44
1200
+ f 47//44 70//70 71//71
1201
+ f 47//44 71//71 48//46
1202
+ f 48//46 71//71 25//48
1203
+ f 1//1 72//72 49//49
1204
+ f 49//49 72//72 73//73
1205
+ f 49//49 73//73 50//50
1206
+ f 50//50 73//73 74//74
1207
+ f 50//50 74//74 51//51
1208
+ f 51//51 74//74 75//75
1209
+ f 51//51 75//75 52//52
1210
+ f 52//52 75//75 76//76
1211
+ f 52//52 76//76 53//53
1212
+ f 53//53 76//76 77//77
1213
+ f 53//53 77//77 54//54
1214
+ f 54//54 77//77 78//78
1215
+ f 54//54 78//78 55//55
1216
+ f 55//55 78//78 79//79
1217
+ f 55//55 79//79 56//56
1218
+ f 56//56 79//79 80//80
1219
+ f 56//56 80//80 57//57
1220
+ f 57//57 80//80 81//81
1221
+ f 57//57 81//81 58//58
1222
+ f 58//58 81//81 82//82
1223
+ f 58//58 82//82 59//59
1224
+ f 59//59 82//82 83//83
1225
+ f 59//59 83//83 60//60
1226
+ f 60//60 83//83 84//84
1227
+ f 60//60 84//84 61//61
1228
+ f 61//61 84//84 85//85
1229
+ f 61//61 85//85 62//62
1230
+ f 62//62 85//85 86//86
1231
+ f 62//62 86//86 63//63
1232
+ f 63//63 86//86 87//87
1233
+ f 63//63 87//87 64//64
1234
+ f 64//64 87//87 88//88
1235
+ f 64//64 88//88 65//65
1236
+ f 65//65 88//88 89//89
1237
+ f 65//65 89//89 66//66
1238
+ f 66//66 89//89 90//90
1239
+ f 66//66 90//90 67//67
1240
+ f 67//67 90//90 91//91
1241
+ f 67//67 91//91 68//68
1242
+ f 68//68 91//91 92//92
1243
+ f 68//68 92//92 69//69
1244
+ f 69//69 92//92 93//93
1245
+ f 69//69 93//93 70//70
1246
+ f 70//70 93//93 94//94
1247
+ f 70//70 94//94 71//71
1248
+ f 71//71 94//94 25//48
1249
+ f 1//1 95//95 72//72
1250
+ f 72//72 95//95 96//96
1251
+ f 72//72 96//96 73//73
1252
+ f 73//73 96//96 97//97
1253
+ f 73//73 97//97 74//74
1254
+ f 74//74 97//97 98//98
1255
+ f 74//74 98//98 75//75
1256
+ f 75//75 98//98 99//99
1257
+ f 75//75 99//99 76//76
1258
+ f 76//76 99//99 100//100
1259
+ f 76//76 100//100 77//77
1260
+ f 77//77 100//100 101//101
1261
+ f 77//77 101//101 78//78
1262
+ f 78//78 101//101 102//102
1263
+ f 78//78 102//102 79//79
1264
+ f 79//79 102//102 103//103
1265
+ f 79//79 103//103 80//80
1266
+ f 80//80 103//103 104//104
1267
+ f 80//80 104//104 81//81
1268
+ f 81//81 104//104 105//105
1269
+ f 81//81 105//105 82//82
1270
+ f 82//82 105//105 106//106
1271
+ f 82//82 106//106 83//83
1272
+ f 83//83 106//106 107//107
1273
+ f 83//83 107//107 84//84
1274
+ f 84//84 107//107 108//108
1275
+ f 84//84 108//108 85//85
1276
+ f 85//85 108//108 109//109
1277
+ f 85//85 109//109 86//86
1278
+ f 86//86 109//109 110//110
1279
+ f 86//86 110//110 87//87
1280
+ f 87//87 110//110 111//111
1281
+ f 87//87 111//111 88//88
1282
+ f 88//88 111//111 112//112
1283
+ f 88//88 112//112 89//89
1284
+ f 89//89 112//112 113//113
1285
+ f 89//89 113//113 90//90
1286
+ f 90//90 113//113 114//114
1287
+ f 90//90 114//114 91//91
1288
+ f 91//91 114//114 115//115
1289
+ f 91//91 115//115 92//92
1290
+ f 92//92 115//115 116//116
1291
+ f 92//92 116//116 93//93
1292
+ f 93//93 116//116 117//117
1293
+ f 93//93 117//117 94//94
1294
+ f 94//94 117//117 25//48
1295
+ f 1//1 118//118 95//95
1296
+ f 95//95 118//118 119//119
1297
+ f 95//95 119//119 96//96
1298
+ f 96//96 119//119 120//120
1299
+ f 96//96 120//120 97//97
1300
+ f 97//97 120//120 121//121
1301
+ f 97//97 121//121 98//98
1302
+ f 98//98 121//121 122//122
1303
+ f 98//98 122//122 99//99
1304
+ f 99//99 122//122 123//123
1305
+ f 99//99 123//123 100//100
1306
+ f 100//100 123//123 124//124
1307
+ f 100//100 124//124 101//101
1308
+ f 101//101 124//124 125//125
1309
+ f 101//101 125//125 102//102
1310
+ f 102//102 125//125 126//126
1311
+ f 102//102 126//126 103//103
1312
+ f 103//103 126//126 127//127
1313
+ f 103//103 127//127 104//104
1314
+ f 104//104 127//127 128//128
1315
+ f 104//104 128//128 105//105
1316
+ f 105//105 128//128 129//129
1317
+ f 105//105 129//129 106//106
1318
+ f 106//106 129//129 130//130
1319
+ f 106//106 130//130 107//107
1320
+ f 107//107 130//130 131//131
1321
+ f 107//107 131//131 108//108
1322
+ f 108//108 131//131 132//132
1323
+ f 108//108 132//132 109//109
1324
+ f 109//109 132//132 133//133
1325
+ f 109//109 133//133 110//110
1326
+ f 110//110 133//133 134//134
1327
+ f 110//110 134//134 111//111
1328
+ f 111//111 134//134 135//135
1329
+ f 111//111 135//135 112//112
1330
+ f 112//112 135//135 136//136
1331
+ f 112//112 136//136 113//113
1332
+ f 113//113 136//136 137//137
1333
+ f 113//113 137//137 114//114
1334
+ f 114//114 137//137 138//138
1335
+ f 114//114 138//138 115//115
1336
+ f 115//115 138//138 139//139
1337
+ f 115//115 139//139 116//116
1338
+ f 116//116 139//139 140//140
1339
+ f 116//116 140//140 117//117
1340
+ f 117//117 140//140 25//48
1341
+ f 1//1 141//141 118//118
1342
+ f 118//118 141//141 142//142
1343
+ f 118//118 142//142 119//119
1344
+ f 119//119 142//142 143//143
1345
+ f 119//119 143//143 120//120
1346
+ f 120//120 143//143 144//144
1347
+ f 120//120 144//144 121//121
1348
+ f 121//121 144//144 145//145
1349
+ f 121//121 145//145 122//122
1350
+ f 122//122 145//145 146//146
1351
+ f 122//122 146//146 123//123
1352
+ f 123//123 146//146 147//147
1353
+ f 123//123 147//147 124//124
1354
+ f 124//124 147//147 148//148
1355
+ f 124//124 148//148 125//125
1356
+ f 125//125 148//148 149//149
1357
+ f 125//125 149//149 126//126
1358
+ f 126//126 149//149 150//150
1359
+ f 126//126 150//150 127//127
1360
+ f 127//127 150//150 151//151
1361
+ f 127//127 151//151 128//128
1362
+ f 128//128 151//151 152//152
1363
+ f 128//128 152//152 129//129
1364
+ f 129//129 152//152 153//153
1365
+ f 129//129 153//153 130//130
1366
+ f 130//130 153//153 154//154
1367
+ f 130//130 154//154 131//131
1368
+ f 131//131 154//154 155//155
1369
+ f 131//131 155//155 132//132
1370
+ f 132//132 155//155 156//156
1371
+ f 132//132 156//156 133//133
1372
+ f 133//133 156//156 157//157
1373
+ f 133//133 157//157 134//134
1374
+ f 134//134 157//157 158//158
1375
+ f 134//134 158//158 135//135
1376
+ f 135//135 158//158 159//159
1377
+ f 135//135 159//159 136//136
1378
+ f 136//136 159//159 160//160
1379
+ f 136//136 160//160 137//137
1380
+ f 137//137 160//160 161//161
1381
+ f 137//137 161//161 138//138
1382
+ f 138//138 161//161 162//162
1383
+ f 138//138 162//162 139//139
1384
+ f 139//139 162//162 163//163
1385
+ f 139//139 163//163 140//140
1386
+ f 140//140 163//163 25//48
1387
+ f 1//1 164//164 141//141
1388
+ f 141//141 164//164 165//165
1389
+ f 141//141 165//165 142//142
1390
+ f 142//142 165//165 166//166
1391
+ f 142//142 166//166 143//143
1392
+ f 143//143 166//166 167//167
1393
+ f 143//143 167//167 144//144
1394
+ f 144//144 167//167 168//168
1395
+ f 144//144 168//168 145//145
1396
+ f 145//145 168//168 169//169
1397
+ f 145//145 169//169 146//146
1398
+ f 146//146 169//169 170//170
1399
+ f 146//146 170//170 147//147
1400
+ f 147//147 170//170 171//171
1401
+ f 147//147 171//171 148//148
1402
+ f 148//148 171//171 172//172
1403
+ f 148//148 172//172 149//149
1404
+ f 149//149 172//172 173//173
1405
+ f 149//149 173//173 150//150
1406
+ f 150//150 173//173 174//174
1407
+ f 150//150 174//174 151//151
1408
+ f 151//151 174//174 175//175
1409
+ f 151//151 175//175 152//152
1410
+ f 152//152 175//175 176//176
1411
+ f 152//152 176//176 153//153
1412
+ f 153//153 176//176 177//177
1413
+ f 153//153 177//177 154//154
1414
+ f 154//154 177//177 178//178
1415
+ f 154//154 178//178 155//155
1416
+ f 155//155 178//178 179//179
1417
+ f 155//155 179//179 156//156
1418
+ f 156//156 179//179 180//180
1419
+ f 156//156 180//180 157//157
1420
+ f 157//157 180//180 181//181
1421
+ f 157//157 181//181 158//158
1422
+ f 158//158 181//181 182//182
1423
+ f 158//158 182//182 159//159
1424
+ f 159//159 182//182 183//183
1425
+ f 159//159 183//183 160//160
1426
+ f 160//160 183//183 184//184
1427
+ f 160//160 184//184 161//161
1428
+ f 161//161 184//184 185//185
1429
+ f 161//161 185//185 162//162
1430
+ f 162//162 185//185 186//186
1431
+ f 162//162 186//186 163//163
1432
+ f 163//163 186//186 25//48
1433
+ f 1//1 187//187 164//164
1434
+ f 164//164 187//187 188//188
1435
+ f 164//164 188//188 165//165
1436
+ f 165//165 188//188 189//189
1437
+ f 165//165 189//189 166//166
1438
+ f 166//166 189//189 190//190
1439
+ f 166//166 190//190 167//167
1440
+ f 167//167 190//190 191//191
1441
+ f 167//167 191//191 168//168
1442
+ f 168//168 191//191 192//192
1443
+ f 168//168 192//192 169//169
1444
+ f 169//169 192//192 193//193
1445
+ f 169//169 193//193 170//170
1446
+ f 170//170 193//193 194//194
1447
+ f 170//170 194//194 171//171
1448
+ f 171//171 194//194 195//195
1449
+ f 171//171 195//195 172//172
1450
+ f 172//172 195//195 196//196
1451
+ f 172//172 196//196 173//173
1452
+ f 173//173 196//196 197//197
1453
+ f 173//173 197//197 174//174
1454
+ f 174//174 197//197 198//198
1455
+ f 174//174 198//198 175//175
1456
+ f 175//175 198//198 199//199
1457
+ f 175//175 199//199 176//176
1458
+ f 176//176 199//199 200//200
1459
+ f 176//176 200//200 177//177
1460
+ f 177//177 200//200 201//201
1461
+ f 177//177 201//201 178//178
1462
+ f 178//178 201//201 202//202
1463
+ f 178//178 202//202 179//179
1464
+ f 179//179 202//202 203//203
1465
+ f 179//179 203//203 180//180
1466
+ f 180//180 203//203 204//204
1467
+ f 180//180 204//204 181//181
1468
+ f 181//181 204//204 205//205
1469
+ f 181//181 205//205 182//182
1470
+ f 182//182 205//205 206//206
1471
+ f 182//182 206//206 183//183
1472
+ f 183//183 206//206 207//207
1473
+ f 183//183 207//207 184//184
1474
+ f 184//184 207//207 208//208
1475
+ f 184//184 208//208 185//185
1476
+ f 185//185 208//208 209//209
1477
+ f 185//185 209//209 186//186
1478
+ f 186//186 209//209 25//48
1479
+ f 1//1 210//210 187//187
1480
+ f 187//187 210//210 211//211
1481
+ f 187//187 211//211 188//188
1482
+ f 188//188 211//211 212//212
1483
+ f 188//188 212//212 189//189
1484
+ f 189//189 212//212 213//213
1485
+ f 189//189 213//213 190//190
1486
+ f 190//190 213//213 214//214
1487
+ f 190//190 214//214 191//191
1488
+ f 191//191 214//214 215//215
1489
+ f 191//191 215//215 192//192
1490
+ f 192//192 215//215 216//216
1491
+ f 192//192 216//216 193//193
1492
+ f 193//193 216//216 217//217
1493
+ f 193//193 217//217 194//194
1494
+ f 194//194 217//217 218//218
1495
+ f 194//194 218//218 195//195
1496
+ f 195//195 218//218 219//219
1497
+ f 195//195 219//219 196//196
1498
+ f 196//196 219//219 220//220
1499
+ f 196//196 220//220 197//197
1500
+ f 197//197 220//220 221//221
1501
+ f 197//197 221//221 198//198
1502
+ f 198//198 221//221 222//222
1503
+ f 198//198 222//222 199//199
1504
+ f 199//199 222//222 223//223
1505
+ f 199//199 223//223 200//200
1506
+ f 200//200 223//223 224//224
1507
+ f 200//200 224//224 201//201
1508
+ f 201//201 224//224 225//225
1509
+ f 201//201 225//225 202//202
1510
+ f 202//202 225//225 226//226
1511
+ f 202//202 226//226 203//203
1512
+ f 203//203 226//226 227//227
1513
+ f 203//203 227//227 204//204
1514
+ f 204//204 227//227 228//228
1515
+ f 204//204 228//228 205//205
1516
+ f 205//205 228//228 229//229
1517
+ f 205//205 229//229 206//206
1518
+ f 206//206 229//229 230//230
1519
+ f 206//206 230//230 207//207
1520
+ f 207//207 230//230 231//231
1521
+ f 207//207 231//231 208//208
1522
+ f 208//208 231//231 232//232
1523
+ f 208//208 232//232 209//209
1524
+ f 209//209 232//232 25//48
1525
+ f 1//1 233//233 210//210
1526
+ f 210//210 233//233 234//234
1527
+ f 210//210 234//234 211//211
1528
+ f 211//211 234//234 235//235
1529
+ f 211//211 235//235 212//212
1530
+ f 212//212 235//235 236//236
1531
+ f 212//212 236//236 213//213
1532
+ f 213//213 236//236 237//237
1533
+ f 213//213 237//237 214//214
1534
+ f 214//214 237//237 238//238
1535
+ f 214//214 238//238 215//215
1536
+ f 215//215 238//238 239//239
1537
+ f 215//215 239//239 216//216
1538
+ f 216//216 239//239 240//240
1539
+ f 216//216 240//240 217//217
1540
+ f 217//217 240//240 241//241
1541
+ f 217//217 241//241 218//218
1542
+ f 218//218 241//241 242//242
1543
+ f 218//218 242//242 219//219
1544
+ f 219//219 242//242 243//243
1545
+ f 219//219 243//243 220//220
1546
+ f 220//220 243//243 244//244
1547
+ f 220//220 244//244 221//221
1548
+ f 221//221 244//244 245//245
1549
+ f 221//221 245//245 222//222
1550
+ f 222//222 245//245 246//246
1551
+ f 222//222 246//246 223//223
1552
+ f 223//223 246//246 247//247
1553
+ f 223//223 247//247 224//224
1554
+ f 224//224 247//247 248//248
1555
+ f 224//224 248//248 225//225
1556
+ f 225//225 248//248 249//249
1557
+ f 225//225 249//249 226//226
1558
+ f 226//226 249//249 250//250
1559
+ f 226//226 250//250 227//227
1560
+ f 227//227 250//250 251//251
1561
+ f 227//227 251//251 228//228
1562
+ f 228//228 251//251 252//252
1563
+ f 228//228 252//252 229//229
1564
+ f 229//229 252//252 253//253
1565
+ f 229//229 253//253 230//230
1566
+ f 230//230 253//253 254//254
1567
+ f 230//230 254//254 231//231
1568
+ f 231//231 254//254 255//255
1569
+ f 231//231 255//255 232//232
1570
+ f 232//232 255//255 25//48
1571
+ f 1//1 256//256 233//233
1572
+ f 233//233 256//256 257//257
1573
+ f 233//233 257//257 234//234
1574
+ f 234//234 257//257 258//258
1575
+ f 234//234 258//258 235//235
1576
+ f 235//235 258//258 259//259
1577
+ f 235//235 259//259 236//236
1578
+ f 236//236 259//259 260//260
1579
+ f 236//236 260//260 237//237
1580
+ f 237//237 260//260 261//261
1581
+ f 237//237 261//261 238//238
1582
+ f 238//238 261//261 262//262
1583
+ f 238//238 262//262 239//239
1584
+ f 239//239 262//262 263//263
1585
+ f 239//239 263//263 240//240
1586
+ f 240//240 263//263 264//264
1587
+ f 240//240 264//264 241//241
1588
+ f 241//241 264//264 265//265
1589
+ f 241//241 265//265 242//242
1590
+ f 242//242 265//265 266//266
1591
+ f 242//242 266//266 243//243
1592
+ f 243//243 266//266 267//267
1593
+ f 243//243 267//267 244//244
1594
+ f 244//244 267//267 268//268
1595
+ f 244//244 268//268 245//245
1596
+ f 245//245 268//268 269//269
1597
+ f 245//245 269//269 246//246
1598
+ f 246//246 269//269 270//270
1599
+ f 246//246 270//270 247//247
1600
+ f 247//247 270//270 271//271
1601
+ f 247//247 271//271 248//248
1602
+ f 248//248 271//271 272//272
1603
+ f 248//248 272//272 249//249
1604
+ f 249//249 272//272 273//273
1605
+ f 249//249 273//273 250//250
1606
+ f 250//250 273//273 274//274
1607
+ f 250//250 274//274 251//251
1608
+ f 251//251 274//274 275//275
1609
+ f 251//251 275//275 252//252
1610
+ f 252//252 275//275 276//276
1611
+ f 252//252 276//276 253//253
1612
+ f 253//253 276//276 277//277
1613
+ f 253//253 277//277 254//254
1614
+ f 254//254 277//277 278//278
1615
+ f 254//254 278//278 255//255
1616
+ f 255//255 278//278 25//48
1617
+ f 1//1 279//279 256//256
1618
+ f 256//256 279//279 280//280
1619
+ f 256//256 280//280 257//257
1620
+ f 257//257 280//280 281//281
1621
+ f 257//257 281//281 258//258
1622
+ f 258//258 281//281 282//282
1623
+ f 258//258 282//282 259//259
1624
+ f 259//259 282//282 283//283
1625
+ f 259//259 283//283 260//260
1626
+ f 260//260 283//283 284//284
1627
+ f 260//260 284//284 261//261
1628
+ f 261//261 284//284 285//285
1629
+ f 261//261 285//285 262//262
1630
+ f 262//262 285//285 286//286
1631
+ f 262//262 286//286 263//263
1632
+ f 263//263 286//286 287//287
1633
+ f 263//263 287//287 264//264
1634
+ f 264//264 287//287 288//288
1635
+ f 264//264 288//288 265//265
1636
+ f 265//265 288//288 289//289
1637
+ f 265//265 289//289 266//266
1638
+ f 266//266 289//289 290//290
1639
+ f 266//266 290//290 267//267
1640
+ f 267//267 290//290 291//291
1641
+ f 267//267 291//291 268//268
1642
+ f 268//268 291//291 292//292
1643
+ f 268//268 292//292 269//269
1644
+ f 269//269 292//292 293//293
1645
+ f 269//269 293//293 270//270
1646
+ f 270//270 293//293 294//294
1647
+ f 270//270 294//294 271//271
1648
+ f 271//271 294//294 295//295
1649
+ f 271//271 295//295 272//272
1650
+ f 272//272 295//295 296//296
1651
+ f 272//272 296//296 273//273
1652
+ f 273//273 296//296 297//297
1653
+ f 273//273 297//297 274//274
1654
+ f 274//274 297//297 298//298
1655
+ f 274//274 298//298 275//275
1656
+ f 275//275 298//298 299//299
1657
+ f 275//275 299//299 276//276
1658
+ f 276//276 299//299 300//300
1659
+ f 276//276 300//300 277//277
1660
+ f 277//277 300//300 301//301
1661
+ f 277//277 301//301 278//278
1662
+ f 278//278 301//301 25//48
1663
+ f 1//1 302//302 279//279
1664
+ f 279//279 302//302 303//303
1665
+ f 279//279 303//303 280//280
1666
+ f 280//280 303//303 304//304
1667
+ f 280//280 304//304 281//281
1668
+ f 281//281 304//304 305//305
1669
+ f 281//281 305//305 282//282
1670
+ f 282//282 305//305 306//306
1671
+ f 282//282 306//306 283//283
1672
+ f 283//283 306//306 307//307
1673
+ f 283//283 307//307 284//284
1674
+ f 284//284 307//307 308//308
1675
+ f 284//284 308//308 285//285
1676
+ f 285//285 308//308 309//309
1677
+ f 285//285 309//309 286//286
1678
+ f 286//286 309//309 310//310
1679
+ f 286//286 310//310 287//287
1680
+ f 287//287 310//310 311//311
1681
+ f 287//287 311//311 288//288
1682
+ f 288//288 311//311 312//312
1683
+ f 288//288 312//312 289//289
1684
+ f 289//289 312//312 313//313
1685
+ f 289//289 313//313 290//290
1686
+ f 290//290 313//313 314//314
1687
+ f 290//290 314//314 291//291
1688
+ f 291//291 314//314 315//315
1689
+ f 291//291 315//315 292//292
1690
+ f 292//292 315//315 316//316
1691
+ f 292//292 316//316 293//293
1692
+ f 293//293 316//316 317//317
1693
+ f 293//293 317//317 294//294
1694
+ f 294//294 317//317 318//318
1695
+ f 294//294 318//318 295//295
1696
+ f 295//295 318//318 319//319
1697
+ f 295//295 319//319 296//296
1698
+ f 296//296 319//319 320//320
1699
+ f 296//296 320//320 297//297
1700
+ f 297//297 320//320 321//321
1701
+ f 297//297 321//321 298//298
1702
+ f 298//298 321//321 322//322
1703
+ f 298//298 322//322 299//299
1704
+ f 299//299 322//322 323//323
1705
+ f 299//299 323//323 300//300
1706
+ f 300//300 323//323 324//324
1707
+ f 300//300 324//324 301//301
1708
+ f 301//301 324//324 25//48
1709
+ f 1//1 325//325 302//302
1710
+ f 302//302 325//325 326//326
1711
+ f 302//302 326//326 303//303
1712
+ f 303//303 326//326 327//327
1713
+ f 303//303 327//327 304//304
1714
+ f 304//304 327//327 328//328
1715
+ f 304//304 328//328 305//305
1716
+ f 305//305 328//328 329//329
1717
+ f 305//305 329//329 306//306
1718
+ f 306//306 329//329 330//330
1719
+ f 306//306 330//330 307//307
1720
+ f 307//307 330//330 331//331
1721
+ f 307//307 331//331 308//308
1722
+ f 308//308 331//331 332//332
1723
+ f 308//308 332//332 309//309
1724
+ f 309//309 332//332 333//333
1725
+ f 309//309 333//333 310//310
1726
+ f 310//310 333//333 334//334
1727
+ f 310//310 334//334 311//311
1728
+ f 311//311 334//334 335//335
1729
+ f 311//311 335//335 312//312
1730
+ f 312//312 335//335 336//336
1731
+ f 312//312 336//336 313//313
1732
+ f 313//313 336//336 337//337
1733
+ f 313//313 337//337 314//314
1734
+ f 314//314 337//337 338//338
1735
+ f 314//314 338//338 315//315
1736
+ f 315//315 338//338 339//339
1737
+ f 315//315 339//339 316//316
1738
+ f 316//316 339//339 340//340
1739
+ f 316//316 340//340 317//317
1740
+ f 317//317 340//340 341//341
1741
+ f 317//317 341//341 318//318
1742
+ f 318//318 341//341 342//342
1743
+ f 318//318 342//342 319//319
1744
+ f 319//319 342//342 343//343
1745
+ f 319//319 343//343 320//320
1746
+ f 320//320 343//343 344//344
1747
+ f 320//320 344//344 321//321
1748
+ f 321//321 344//344 345//345
1749
+ f 321//321 345//345 322//322
1750
+ f 322//322 345//345 346//346
1751
+ f 322//322 346//346 323//323
1752
+ f 323//323 346//346 347//347
1753
+ f 323//323 347//347 324//324
1754
+ f 324//324 347//347 25//48
1755
+ f 1//1 348//348 325//325
1756
+ f 325//325 348//348 349//349
1757
+ f 325//325 349//349 326//326
1758
+ f 326//326 349//349 350//350
1759
+ f 326//326 350//350 327//327
1760
+ f 327//327 350//350 351//351
1761
+ f 327//327 351//351 328//328
1762
+ f 328//328 351//351 352//352
1763
+ f 328//328 352//352 329//329
1764
+ f 329//329 352//352 353//353
1765
+ f 329//329 353//353 330//330
1766
+ f 330//330 353//353 354//354
1767
+ f 330//330 354//354 331//331
1768
+ f 331//331 354//354 355//355
1769
+ f 331//331 355//355 332//332
1770
+ f 332//332 355//355 356//356
1771
+ f 332//332 356//356 333//333
1772
+ f 333//333 356//356 357//357
1773
+ f 333//333 357//357 334//334
1774
+ f 334//334 357//357 358//358
1775
+ f 334//334 358//358 335//335
1776
+ f 335//335 358//358 359//359
1777
+ f 335//335 359//359 336//336
1778
+ f 336//336 359//359 360//360
1779
+ f 336//336 360//360 337//337
1780
+ f 337//337 360//360 361//361
1781
+ f 337//337 361//361 338//338
1782
+ f 338//338 361//361 362//362
1783
+ f 338//338 362//362 339//339
1784
+ f 339//339 362//362 363//363
1785
+ f 339//339 363//363 340//340
1786
+ f 340//340 363//363 364//364
1787
+ f 340//340 364//364 341//341
1788
+ f 341//341 364//364 365//365
1789
+ f 341//341 365//365 342//342
1790
+ f 342//342 365//365 366//366
1791
+ f 342//342 366//366 343//343
1792
+ f 343//343 366//366 367//367
1793
+ f 343//343 367//367 344//344
1794
+ f 344//344 367//367 368//368
1795
+ f 344//344 368//368 345//345
1796
+ f 345//345 368//368 369//369
1797
+ f 345//345 369//369 346//346
1798
+ f 346//346 369//369 370//370
1799
+ f 346//346 370//370 347//347
1800
+ f 347//347 370//370 25//48
1801
+ f 1//1 371//371 348//348
1802
+ f 348//348 371//371 372//372
1803
+ f 348//348 372//372 349//349
1804
+ f 349//349 372//372 373//373
1805
+ f 349//349 373//373 350//350
1806
+ f 350//350 373//373 374//374
1807
+ f 350//350 374//374 351//351
1808
+ f 351//351 374//374 375//375
1809
+ f 351//351 375//375 352//352
1810
+ f 352//352 375//375 376//376
1811
+ f 352//352 376//376 353//353
1812
+ f 353//353 376//376 377//377
1813
+ f 353//353 377//377 354//354
1814
+ f 354//354 377//377 378//378
1815
+ f 354//354 378//378 355//355
1816
+ f 355//355 378//378 379//379
1817
+ f 355//355 379//379 356//356
1818
+ f 356//356 379//379 380//380
1819
+ f 356//356 380//380 357//357
1820
+ f 357//357 380//380 381//381
1821
+ f 357//357 381//381 358//358
1822
+ f 358//358 381//381 382//382
1823
+ f 358//358 382//382 359//359
1824
+ f 359//359 382//382 383//383
1825
+ f 359//359 383//383 360//360
1826
+ f 360//360 383//383 384//384
1827
+ f 360//360 384//384 361//361
1828
+ f 361//361 384//384 385//385
1829
+ f 361//361 385//385 362//362
1830
+ f 362//362 385//385 386//386
1831
+ f 362//362 386//386 363//363
1832
+ f 363//363 386//386 387//387
1833
+ f 363//363 387//387 364//364
1834
+ f 364//364 387//387 388//388
1835
+ f 364//364 388//388 365//365
1836
+ f 365//365 388//388 389//389
1837
+ f 365//365 389//389 366//366
1838
+ f 366//366 389//389 390//390
1839
+ f 366//366 390//390 367//367
1840
+ f 367//367 390//390 391//391
1841
+ f 367//367 391//391 368//368
1842
+ f 368//368 391//391 392//392
1843
+ f 368//368 392//392 369//369
1844
+ f 369//369 392//392 393//393
1845
+ f 369//369 393//393 370//370
1846
+ f 370//370 393//393 25//48
1847
+ f 1//1 394//394 371//371
1848
+ f 371//371 394//394 395//395
1849
+ f 371//371 395//395 372//372
1850
+ f 372//372 395//395 396//396
1851
+ f 372//372 396//396 373//373
1852
+ f 373//373 396//396 397//397
1853
+ f 373//373 397//397 374//374
1854
+ f 374//374 397//397 398//398
1855
+ f 374//374 398//398 375//375
1856
+ f 375//375 398//398 399//399
1857
+ f 375//375 399//399 376//376
1858
+ f 376//376 399//399 400//400
1859
+ f 376//376 400//400 377//377
1860
+ f 377//377 400//400 401//401
1861
+ f 377//377 401//401 378//378
1862
+ f 378//378 401//401 402//402
1863
+ f 378//378 402//402 379//379
1864
+ f 379//379 402//402 403//403
1865
+ f 379//379 403//403 380//380
1866
+ f 380//380 403//403 404//404
1867
+ f 380//380 404//404 381//381
1868
+ f 381//381 404//404 405//405
1869
+ f 381//381 405//405 382//382
1870
+ f 382//382 405//405 406//406
1871
+ f 382//382 406//406 383//383
1872
+ f 383//383 406//406 407//407
1873
+ f 383//383 407//407 384//384
1874
+ f 384//384 407//407 408//408
1875
+ f 384//384 408//408 385//385
1876
+ f 385//385 408//408 409//409
1877
+ f 385//385 409//409 386//386
1878
+ f 386//386 409//409 410//410
1879
+ f 386//386 410//410 387//387
1880
+ f 387//387 410//410 411//411
1881
+ f 387//387 411//411 388//388
1882
+ f 388//388 411//411 412//412
1883
+ f 388//388 412//412 389//389
1884
+ f 389//389 412//412 413//413
1885
+ f 389//389 413//413 390//390
1886
+ f 390//390 413//413 414//414
1887
+ f 390//390 414//414 391//391
1888
+ f 391//391 414//414 415//415
1889
+ f 391//391 415//415 392//392
1890
+ f 392//392 415//415 416//416
1891
+ f 392//392 416//416 393//393
1892
+ f 393//393 416//416 25//48
1893
+ f 1//1 417//417 394//394
1894
+ f 394//394 417//417 418//418
1895
+ f 394//394 418//418 395//395
1896
+ f 395//395 418//418 419//419
1897
+ f 395//395 419//419 396//396
1898
+ f 396//396 419//419 420//420
1899
+ f 396//396 420//420 397//397
1900
+ f 397//397 420//420 421//421
1901
+ f 397//397 421//421 398//398
1902
+ f 398//398 421//421 422//422
1903
+ f 398//398 422//422 399//399
1904
+ f 399//399 422//422 423//423
1905
+ f 399//399 423//423 400//400
1906
+ f 400//400 423//423 424//424
1907
+ f 400//400 424//424 401//401
1908
+ f 401//401 424//424 425//425
1909
+ f 401//401 425//425 402//402
1910
+ f 402//402 425//425 426//426
1911
+ f 402//402 426//426 403//403
1912
+ f 403//403 426//426 427//427
1913
+ f 403//403 427//427 404//404
1914
+ f 404//404 427//427 428//428
1915
+ f 404//404 428//428 405//405
1916
+ f 405//405 428//428 429//429
1917
+ f 405//405 429//429 406//406
1918
+ f 406//406 429//429 430//430
1919
+ f 406//406 430//430 407//407
1920
+ f 407//407 430//430 431//431
1921
+ f 407//407 431//431 408//408
1922
+ f 408//408 431//431 432//432
1923
+ f 408//408 432//432 409//409
1924
+ f 409//409 432//432 433//433
1925
+ f 409//409 433//433 410//410
1926
+ f 410//410 433//433 434//434
1927
+ f 410//410 434//434 411//411
1928
+ f 411//411 434//434 435//435
1929
+ f 411//411 435//435 412//412
1930
+ f 412//412 435//435 436//436
1931
+ f 412//412 436//436 413//413
1932
+ f 413//413 436//436 437//437
1933
+ f 413//413 437//437 414//414
1934
+ f 414//414 437//437 438//438
1935
+ f 414//414 438//438 415//415
1936
+ f 415//415 438//438 439//439
1937
+ f 415//415 439//439 416//416
1938
+ f 416//416 439//439 25//48
1939
+ f 1//1 440//440 417//417
1940
+ f 417//417 440//440 441//441
1941
+ f 417//417 441//441 418//418
1942
+ f 418//418 441//441 442//442
1943
+ f 418//418 442//442 419//419
1944
+ f 419//419 442//442 443//443
1945
+ f 419//419 443//443 420//420
1946
+ f 420//420 443//443 444//444
1947
+ f 420//420 444//444 421//421
1948
+ f 421//421 444//444 445//445
1949
+ f 421//421 445//445 422//422
1950
+ f 422//422 445//445 446//446
1951
+ f 422//422 446//446 423//423
1952
+ f 423//423 446//446 447//447
1953
+ f 423//423 447//447 424//424
1954
+ f 424//424 447//447 448//448
1955
+ f 424//424 448//448 425//425
1956
+ f 425//425 448//448 449//449
1957
+ f 425//425 449//449 426//426
1958
+ f 426//426 449//449 450//450
1959
+ f 426//426 450//450 427//427
1960
+ f 427//427 450//450 451//451
1961
+ f 427//427 451//451 428//428
1962
+ f 428//428 451//451 452//452
1963
+ f 428//428 452//452 429//429
1964
+ f 429//429 452//452 453//453
1965
+ f 429//429 453//453 430//430
1966
+ f 430//430 453//453 454//454
1967
+ f 430//430 454//454 431//431
1968
+ f 431//431 454//454 455//455
1969
+ f 431//431 455//455 432//432
1970
+ f 432//432 455//455 456//456
1971
+ f 432//432 456//456 433//433
1972
+ f 433//433 456//456 457//457
1973
+ f 433//433 457//457 434//434
1974
+ f 434//434 457//457 458//458
1975
+ f 434//434 458//458 435//435
1976
+ f 435//435 458//458 459//459
1977
+ f 435//435 459//459 436//436
1978
+ f 436//436 459//459 460//460
1979
+ f 436//436 460//460 437//437
1980
+ f 437//437 460//460 461//461
1981
+ f 437//437 461//461 438//438
1982
+ f 438//438 461//461 462//462
1983
+ f 438//438 462//462 439//439
1984
+ f 439//439 462//462 25//48
1985
+ f 1//1 463//463 440//440
1986
+ f 440//440 463//463 464//464
1987
+ f 440//440 464//464 441//441
1988
+ f 441//441 464//464 465//465
1989
+ f 441//441 465//465 442//442
1990
+ f 442//442 465//465 466//466
1991
+ f 442//442 466//466 443//443
1992
+ f 443//443 466//466 467//467
1993
+ f 443//443 467//467 444//444
1994
+ f 444//444 467//467 468//468
1995
+ f 444//444 468//468 445//445
1996
+ f 445//445 468//468 469//469
1997
+ f 445//445 469//469 446//446
1998
+ f 446//446 469//469 470//470
1999
+ f 446//446 470//470 447//447
2000
+ f 447//447 470//470 471//471
2001
+ f 447//447 471//471 448//448
2002
+ f 448//448 471//471 472//472
2003
+ f 448//448 472//472 449//449
2004
+ f 449//449 472//472 473//473
2005
+ f 449//449 473//473 450//450
2006
+ f 450//450 473//473 474//474
2007
+ f 450//450 474//474 451//451
2008
+ f 451//451 474//474 475//475
2009
+ f 451//451 475//475 452//452
2010
+ f 452//452 475//475 476//476
2011
+ f 452//452 476//476 453//453
2012
+ f 453//453 476//476 477//477
2013
+ f 453//453 477//477 454//454
2014
+ f 454//454 477//477 478//478
2015
+ f 454//454 478//478 455//455
2016
+ f 455//455 478//478 479//479
2017
+ f 455//455 479//479 456//456
2018
+ f 456//456 479//479 480//480
2019
+ f 456//456 480//480 457//457
2020
+ f 457//457 480//480 481//481
2021
+ f 457//457 481//481 458//458
2022
+ f 458//458 481//481 482//482
2023
+ f 458//458 482//482 459//459
2024
+ f 459//459 482//482 483//483
2025
+ f 459//459 483//483 460//460
2026
+ f 460//460 483//483 484//484
2027
+ f 460//460 484//484 461//461
2028
+ f 461//461 484//484 485//485
2029
+ f 461//461 485//485 462//462
2030
+ f 462//462 485//485 25//48
2031
+ f 1//1 486//486 463//463
2032
+ f 463//463 486//486 487//487
2033
+ f 463//463 487//487 464//464
2034
+ f 464//464 487//487 488//488
2035
+ f 464//464 488//488 465//465
2036
+ f 465//465 488//488 489//489
2037
+ f 465//465 489//489 466//466
2038
+ f 466//466 489//489 490//490
2039
+ f 466//466 490//490 467//467
2040
+ f 467//467 490//490 491//491
2041
+ f 467//467 491//491 468//468
2042
+ f 468//468 491//491 492//492
2043
+ f 468//468 492//492 469//469
2044
+ f 469//469 492//492 493//493
2045
+ f 469//469 493//493 470//470
2046
+ f 470//470 493//493 494//494
2047
+ f 470//470 494//494 471//471
2048
+ f 471//471 494//494 495//495
2049
+ f 471//471 495//495 472//472
2050
+ f 472//472 495//495 496//496
2051
+ f 472//472 496//496 473//473
2052
+ f 473//473 496//496 497//497
2053
+ f 473//473 497//497 474//474
2054
+ f 474//474 497//497 498//498
2055
+ f 474//474 498//498 475//475
2056
+ f 475//475 498//498 499//499
2057
+ f 475//475 499//499 476//476
2058
+ f 476//476 499//499 500//500
2059
+ f 476//476 500//500 477//477
2060
+ f 477//477 500//500 501//501
2061
+ f 477//477 501//501 478//478
2062
+ f 478//478 501//501 502//502
2063
+ f 478//478 502//502 479//479
2064
+ f 479//479 502//502 503//503
2065
+ f 479//479 503//503 480//480
2066
+ f 480//480 503//503 504//504
2067
+ f 480//480 504//504 481//481
2068
+ f 481//481 504//504 505//505
2069
+ f 481//481 505//505 482//482
2070
+ f 482//482 505//505 506//506
2071
+ f 482//482 506//506 483//483
2072
+ f 483//483 506//506 507//507
2073
+ f 483//483 507//507 484//484
2074
+ f 484//484 507//507 508//508
2075
+ f 484//484 508//508 485//485
2076
+ f 485//485 508//508 25//48
2077
+ f 1//1 509//509 486//486
2078
+ f 486//486 509//509 510//510
2079
+ f 486//486 510//510 487//487
2080
+ f 487//487 510//510 511//511
2081
+ f 487//487 511//511 488//488
2082
+ f 488//488 511//511 512//512
2083
+ f 488//488 512//512 489//489
2084
+ f 489//489 512//512 513//513
2085
+ f 489//489 513//513 490//490
2086
+ f 490//490 513//513 514//514
2087
+ f 490//490 514//514 491//491
2088
+ f 491//491 514//514 515//515
2089
+ f 491//491 515//515 492//492
2090
+ f 492//492 515//515 516//516
2091
+ f 492//492 516//516 493//493
2092
+ f 493//493 516//516 517//517
2093
+ f 493//493 517//517 494//494
2094
+ f 494//494 517//517 518//518
2095
+ f 494//494 518//518 495//495
2096
+ f 495//495 518//518 519//519
2097
+ f 495//495 519//519 496//496
2098
+ f 496//496 519//519 520//520
2099
+ f 496//496 520//520 497//497
2100
+ f 497//497 520//520 521//521
2101
+ f 497//497 521//521 498//498
2102
+ f 498//498 521//521 522//522
2103
+ f 498//498 522//522 499//499
2104
+ f 499//499 522//522 523//523
2105
+ f 499//499 523//523 500//500
2106
+ f 500//500 523//523 524//524
2107
+ f 500//500 524//524 501//501
2108
+ f 501//501 524//524 525//525
2109
+ f 501//501 525//525 502//502
2110
+ f 502//502 525//525 526//526
2111
+ f 502//502 526//526 503//503
2112
+ f 503//503 526//526 527//527
2113
+ f 503//503 527//527 504//504
2114
+ f 504//504 527//527 528//528
2115
+ f 504//504 528//528 505//505
2116
+ f 505//505 528//528 529//529
2117
+ f 505//505 529//529 506//506
2118
+ f 506//506 529//529 530//530
2119
+ f 506//506 530//530 507//507
2120
+ f 507//507 530//530 531//531
2121
+ f 507//507 531//531 508//508
2122
+ f 508//508 531//531 25//48
2123
+ f 1//1 532//532 509//509
2124
+ f 509//509 532//532 533//533
2125
+ f 509//509 533//533 510//510
2126
+ f 510//510 533//533 534//534
2127
+ f 510//510 534//534 511//511
2128
+ f 511//511 534//534 535//535
2129
+ f 511//511 535//535 512//512
2130
+ f 512//512 535//535 536//536
2131
+ f 512//512 536//536 513//513
2132
+ f 513//513 536//536 537//537
2133
+ f 513//513 537//537 514//514
2134
+ f 514//514 537//537 538//538
2135
+ f 514//514 538//538 515//515
2136
+ f 515//515 538//538 539//539
2137
+ f 515//515 539//539 516//516
2138
+ f 516//516 539//539 540//540
2139
+ f 516//516 540//540 517//517
2140
+ f 517//517 540//540 541//541
2141
+ f 517//517 541//541 518//518
2142
+ f 518//518 541//541 542//542
2143
+ f 518//518 542//542 519//519
2144
+ f 519//519 542//542 543//543
2145
+ f 519//519 543//543 520//520
2146
+ f 520//520 543//543 544//544
2147
+ f 520//520 544//544 521//521
2148
+ f 521//521 544//544 545//545
2149
+ f 521//521 545//545 522//522
2150
+ f 522//522 545//545 546//546
2151
+ f 522//522 546//546 523//523
2152
+ f 523//523 546//546 547//547
2153
+ f 523//523 547//547 524//524
2154
+ f 524//524 547//547 548//548
2155
+ f 524//524 548//548 525//525
2156
+ f 525//525 548//548 549//549
2157
+ f 525//525 549//549 526//526
2158
+ f 526//526 549//549 550//550
2159
+ f 526//526 550//550 527//527
2160
+ f 527//527 550//550 551//551
2161
+ f 527//527 551//551 528//528
2162
+ f 528//528 551//551 552//552
2163
+ f 528//528 552//552 529//529
2164
+ f 529//529 552//552 553//553
2165
+ f 529//529 553//553 530//530
2166
+ f 530//530 553//553 554//554
2167
+ f 530//530 554//554 531//531
2168
+ f 531//531 554//554 25//48
2169
+ f 1//1 2//3 532//532
2170
+ f 532//532 2//3 3//5
2171
+ f 532//532 3//5 533//533
2172
+ f 533//533 3//5 4//7
2173
+ f 533//533 4//7 534//534
2174
+ f 534//534 4//7 5//9
2175
+ f 534//534 5//9 535//535
2176
+ f 535//535 5//9 6//11
2177
+ f 535//535 6//11 536//536
2178
+ f 536//536 6//11 7//13
2179
+ f 536//536 7//13 537//537
2180
+ f 537//537 7//13 8//15
2181
+ f 537//537 8//15 538//538
2182
+ f 538//538 8//15 9//17
2183
+ f 538//538 9//17 539//539
2184
+ f 539//539 9//17 10//19
2185
+ f 539//539 10//19 540//540
2186
+ f 540//540 10//19 11//21
2187
+ f 540//540 11//21 541//541
2188
+ f 541//541 11//21 12//23
2189
+ f 541//541 12//23 542//542
2190
+ f 542//542 12//23 13//25
2191
+ f 542//542 13//25 543//543
2192
+ f 543//543 13//25 14//27
2193
+ f 543//543 14//27 544//544
2194
+ f 544//544 14//27 15//29
2195
+ f 544//544 15//29 545//545
2196
+ f 545//545 15//29 16//31
2197
+ f 545//545 16//31 546//546
2198
+ f 546//546 16//31 17//33
2199
+ f 546//546 17//33 547//547
2200
+ f 547//547 17//33 18//35
2201
+ f 547//547 18//35 548//548
2202
+ f 548//548 18//35 19//37
2203
+ f 548//548 19//37 549//549
2204
+ f 549//549 19//37 20//39
2205
+ f 549//549 20//39 550//550
2206
+ f 550//550 20//39 21//41
2207
+ f 550//550 21//41 551//551
2208
+ f 551//551 21//41 22//43
2209
+ f 551//551 22//43 552//552
2210
+ f 552//552 22//43 23//45
2211
+ f 552//552 23//45 553//553
2212
+ f 553//553 23//45 24//47
2213
+ f 553//553 24//47 554//554
2214
+ f 554//554 24//47 25//48
AnimatableGaussians/assets/cylinder.obj ADDED
@@ -0,0 +1,198 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Blender v2.74 (sub 0) OBJ File: ''
2
+ # www.blender.org
3
+ v 0.000000 0.500000 0.500000
4
+ v 0.129400 0.500000 0.483000
5
+ v 0.250000 0.500000 0.433000
6
+ v 0.353600 0.500000 0.353600
7
+ v 0.433000 0.500000 0.250000
8
+ v 0.483000 0.500000 0.129400
9
+ v 0.500000 0.500000 0.000000
10
+ v 0.483000 0.500000 -0.129400
11
+ v 0.433000 0.500000 -0.250000
12
+ v 0.353600 0.500000 -0.353600
13
+ v 0.250000 0.500000 -0.433000
14
+ v 0.129400 0.500000 -0.483000
15
+ v 0.000000 0.500000 -0.500000
16
+ v -0.129400 0.500000 -0.483000
17
+ v -0.250000 0.500000 -0.433000
18
+ v -0.353600 0.500000 -0.353600
19
+ v -0.433000 0.500000 -0.250000
20
+ v -0.483000 0.500000 -0.129400
21
+ v -0.500000 0.500000 0.000000
22
+ v -0.483000 0.500000 0.129400
23
+ v -0.433000 0.500000 0.250000
24
+ v -0.353600 0.500000 0.353600
25
+ v -0.250000 0.500000 0.433000
26
+ v -0.129400 0.500000 0.483000
27
+ v 0.000000 -0.500000 0.500000
28
+ v 0.129400 -0.500000 0.483000
29
+ v 0.250000 -0.500000 0.433000
30
+ v 0.353600 -0.500000 0.353600
31
+ v 0.433000 -0.500000 0.250000
32
+ v 0.483000 -0.500000 0.129400
33
+ v 0.500000 -0.500000 0.000000
34
+ v 0.483000 -0.500000 -0.129400
35
+ v 0.433000 -0.500000 -0.250000
36
+ v 0.353600 -0.500000 -0.353600
37
+ v 0.250000 -0.500000 -0.433000
38
+ v 0.129400 -0.500000 -0.483000
39
+ v 0.000000 -0.500000 -0.500000
40
+ v -0.129400 -0.500000 -0.483000
41
+ v -0.250000 -0.500000 -0.433000
42
+ v -0.353600 -0.500000 -0.353600
43
+ v -0.433000 -0.500000 -0.250000
44
+ v -0.483000 -0.500000 -0.129400
45
+ v -0.500000 -0.500000 0.000000
46
+ v -0.483000 -0.500000 0.129400
47
+ v -0.433000 -0.500000 0.250000
48
+ v -0.353600 -0.500000 0.353600
49
+ v -0.250000 -0.500000 0.433000
50
+ v -0.129400 -0.500000 0.483000
51
+ v 0.000000 0.500000 0.000000
52
+ v 0.000000 -0.500000 0.000000
53
+ vn 0.000000 0.678900 0.734200
54
+ vn 0.000000 -0.678900 0.734200
55
+ vn 0.190100 -0.678800 0.709300
56
+ vn 0.190100 0.678800 0.709300
57
+ vn 0.367000 -0.678900 0.635900
58
+ vn 0.367000 0.678900 0.635900
59
+ vn 0.519300 -0.678700 0.519300
60
+ vn 0.519300 0.678700 0.519300
61
+ vn 0.635900 -0.678900 0.367000
62
+ vn 0.635900 0.678900 0.367000
63
+ vn 0.709300 -0.678800 0.190100
64
+ vn 0.709300 0.678800 0.190100
65
+ vn 0.734200 -0.678900 0.000000
66
+ vn 0.734200 0.678900 0.000000
67
+ vn 0.709300 -0.678800 -0.190100
68
+ vn 0.709300 0.678800 -0.190100
69
+ vn 0.635900 -0.678900 -0.367000
70
+ vn 0.635900 0.678900 -0.367000
71
+ vn 0.519300 -0.678700 -0.519300
72
+ vn 0.519300 0.678700 -0.519300
73
+ vn 0.367000 -0.678900 -0.635900
74
+ vn 0.367000 0.678900 -0.635900
75
+ vn 0.190100 -0.678800 -0.709300
76
+ vn 0.190100 0.678800 -0.709300
77
+ vn 0.000000 -0.678900 -0.734200
78
+ vn 0.000000 0.678900 -0.734200
79
+ vn -0.190100 -0.678800 -0.709300
80
+ vn -0.190100 0.678800 -0.709300
81
+ vn -0.367000 -0.678900 -0.635900
82
+ vn -0.367000 0.678900 -0.635900
83
+ vn -0.519300 -0.678700 -0.519300
84
+ vn -0.519300 0.678700 -0.519300
85
+ vn -0.635900 -0.678900 -0.367000
86
+ vn -0.635900 0.678900 -0.367000
87
+ vn -0.709300 -0.678800 -0.190100
88
+ vn -0.709300 0.678800 -0.190100
89
+ vn -0.734200 -0.678900 0.000000
90
+ vn -0.734200 0.678900 0.000000
91
+ vn -0.709300 -0.678800 0.190100
92
+ vn -0.709300 0.678800 0.190100
93
+ vn -0.635900 -0.678900 0.367000
94
+ vn -0.635900 0.678900 0.367000
95
+ vn -0.519300 -0.678700 0.519300
96
+ vn -0.519300 0.678700 0.519300
97
+ vn -0.367000 -0.678900 0.635900
98
+ vn -0.367000 0.678900 0.635900
99
+ vn -0.190100 -0.678800 0.709300
100
+ vn -0.190100 0.678800 0.709300
101
+ vn 0.000000 1.000000 0.000000
102
+ vn 0.000000 -1.000000 0.000000
103
+ f 1//1 25//2 26//3
104
+ f 1//1 26//3 2//4
105
+ f 2//4 26//3 27//5
106
+ f 2//4 27//5 3//6
107
+ f 3//6 27//5 28//7
108
+ f 3//6 28//7 4//8
109
+ f 4//8 28//7 29//9
110
+ f 4//8 29//9 5//10
111
+ f 5//10 29//9 30//11
112
+ f 5//10 30//11 6//12
113
+ f 6//12 30//11 31//13
114
+ f 6//12 31//13 7//14
115
+ f 7//14 31//13 32//15
116
+ f 7//14 32//15 8//16
117
+ f 8//16 32//15 33//17
118
+ f 8//16 33//17 9//18
119
+ f 9//18 33//17 34//19
120
+ f 9//18 34//19 10//20
121
+ f 10//20 34//19 35//21
122
+ f 10//20 35//21 11//22
123
+ f 11//22 35//21 36//23
124
+ f 11//22 36//23 12//24
125
+ f 12//24 36//23 37//25
126
+ f 12//24 37//25 13//26
127
+ f 13//26 37//25 38//27
128
+ f 13//26 38//27 14//28
129
+ f 14//28 38//27 39//29
130
+ f 14//28 39//29 15//30
131
+ f 15//30 39//29 40//31
132
+ f 15//30 40//31 16//32
133
+ f 16//32 40//31 41//33
134
+ f 16//32 41//33 17//34
135
+ f 17//34 41//33 42//35
136
+ f 17//34 42//35 18//36
137
+ f 18//36 42//35 43//37
138
+ f 18//36 43//37 19//38
139
+ f 19//38 43//37 44//39
140
+ f 19//38 44//39 20//40
141
+ f 20//40 44//39 45//41
142
+ f 20//40 45//41 21//42
143
+ f 21//42 45//41 46//43
144
+ f 21//42 46//43 22//44
145
+ f 22//44 46//43 47//45
146
+ f 22//44 47//45 23//46
147
+ f 23//46 47//45 48//47
148
+ f 23//46 48//47 24//48
149
+ f 24//48 48//47 25//2
150
+ f 24//48 25//2 1//1
151
+ f 1//1 2//4 49//49
152
+ f 2//4 3//6 49//49
153
+ f 3//6 4//8 49//49
154
+ f 4//8 5//10 49//49
155
+ f 5//10 6//12 49//49
156
+ f 6//12 7//14 49//49
157
+ f 7//14 8//16 49//49
158
+ f 8//16 9//18 49//49
159
+ f 9//18 10//20 49//49
160
+ f 10//20 11//22 49//49
161
+ f 11//22 12//24 49//49
162
+ f 12//24 13//26 49//49
163
+ f 13//26 14//28 49//49
164
+ f 14//28 15//30 49//49
165
+ f 15//30 16//32 49//49
166
+ f 16//32 17//34 49//49
167
+ f 17//34 18//36 49//49
168
+ f 18//36 19//38 49//49
169
+ f 19//38 20//40 49//49
170
+ f 20//40 21//42 49//49
171
+ f 21//42 22//44 49//49
172
+ f 22//44 23//46 49//49
173
+ f 23//46 24//48 49//49
174
+ f 24//48 1//1 49//49
175
+ f 26//3 25//2 50//50
176
+ f 27//5 26//3 50//50
177
+ f 28//7 27//5 50//50
178
+ f 29//9 28//7 50//50
179
+ f 30//11 29//9 50//50
180
+ f 31//13 30//11 50//50
181
+ f 32//15 31//13 50//50
182
+ f 33//17 32//15 50//50
183
+ f 34//19 33//17 50//50
184
+ f 35//21 34//19 50//50
185
+ f 36//23 35//21 50//50
186
+ f 37//25 36//23 50//50
187
+ f 38//27 37//25 50//50
188
+ f 39//29 38//27 50//50
189
+ f 40//31 39//29 50//50
190
+ f 41//33 40//31 50//50
191
+ f 42//35 41//33 50//50
192
+ f 43//37 42//35 50//50
193
+ f 44//39 43//37 50//50
194
+ f 45//41 44//39 50//50
195
+ f 46//43 45//41 50//50
196
+ f 47//45 46//43 50//50
197
+ f 48//47 47//45 50//50
198
+ f 25//2 48//47 50//50
AnimatableGaussians/base_trainer.py ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import platform
3
+ import time
4
+ import yaml
5
+ import torch
6
+ import datetime
7
+ from torch.utils.tensorboard import SummaryWriter
8
+ import torch.utils.data
9
+ import numpy as np
10
+ import glob
11
+ import shutil
12
+
13
+ from utils.net_util import to_cuda
14
+
15
+
16
+ def worker_init_fn(worker_id): # set numpy's random seed
17
+ seed = torch.initial_seed()
18
+ seed = seed % (2 ** 32)
19
+ np.random.seed(seed + worker_id)
20
+
21
+
22
+ class BaseTrainer:
23
+ def __init__(self, opt):
24
+ self.opt = opt
25
+
26
+ self.dataset = None
27
+ self.network = None
28
+ self.net_dict = {}
29
+ self.optm_dict = {}
30
+ self.update_keys = None
31
+ self.lr_schedule_dict = {}
32
+ self.iter_idx = 0
33
+ self.epoch_idx = 0
34
+ self.iter_num = 9999999999
35
+
36
+ self.loss_weight = self.opt['train']['loss_weight']
37
+
38
+ @staticmethod
39
+ def load_pretrained(path, dict_):
40
+ data = torch.load(path)
41
+ for k in dict_:
42
+ if k in data:
43
+ print('# Loading %s...' % k)
44
+ dict_[k].load_state_dict(data[k])
45
+ else:
46
+ print('# %s not found!' % k)
47
+ return data.get('epoch_idx', None)
48
+
49
+ def load_ckpt(self, path, load_optm = True):
50
+ epoch_idx = self.load_pretrained(path + '/net.pt', self.net_dict)
51
+ if load_optm:
52
+ if os.path.exists(path + '/optm.pt'):
53
+ self.load_pretrained(path + '/optm.pt', self.optm_dict)
54
+ else:
55
+ print('# Optimizer not found!')
56
+ return epoch_idx
57
+
58
+ # @staticmethod
59
+ def save_trained(self, path, dict_):
60
+ data = {}
61
+ for k in dict_:
62
+ data[k] = dict_[k].state_dict()
63
+ data.update({
64
+ 'epoch_idx': self.epoch_idx,
65
+ })
66
+ torch.save(data, path)
67
+
68
+ def save_ckpt(self, path, save_optm = True):
69
+ self.save_trained(path + '/net.pt', self.net_dict)
70
+ if save_optm:
71
+ self.save_trained(path + '/optm.pt', self.optm_dict)
72
+
73
+ def zero_grad(self):
74
+ if self.update_keys is None:
75
+ update_keys = self.optm_dict.keys()
76
+ else:
77
+ update_keys = self.update_keys
78
+ for k in update_keys:
79
+ self.optm_dict[k].zero_grad()
80
+
81
+ def step(self):
82
+ if self.update_keys is None:
83
+ update_keys = self.optm_dict.keys()
84
+ else:
85
+ update_keys = self.update_keys
86
+ for k in update_keys:
87
+ self.optm_dict[k].step()
88
+
89
+ def update_lr(self, iter_idx):
90
+ lr_dict = {}
91
+ if self.update_keys is None:
92
+ update_keys = self.optm_dict.keys()
93
+ else:
94
+ update_keys = self.update_keys
95
+ for k in update_keys:
96
+ lr = self.lr_schedule_dict[k].get_learning_rate(iter_idx)
97
+ for param_group in self.optm_dict[k].param_groups:
98
+ param_group['lr'] = lr
99
+ lr_dict[k] = lr
100
+ return lr_dict
101
+
102
+ def set_dataset(self, dataset):
103
+ self.dataset = dataset
104
+
105
+ def set_network(self, network):
106
+ self.network = network
107
+
108
+ def set_net_dict(self, net_dict):
109
+ self.net_dict = net_dict
110
+
111
+ def set_optm_dict(self, optm_dict):
112
+ self.optm_dict = optm_dict
113
+
114
+ def set_update_keys(self, update_keys):
115
+ self.update_keys = update_keys
116
+
117
+ def set_lr_schedule_dict(self, lr_schedule_dict):
118
+ self.lr_schedule_dict = lr_schedule_dict
119
+
120
+ def set_train(self, flag = True):
121
+ if flag:
122
+ for k, net in self.net_dict.items():
123
+ if k in self.update_keys:
124
+ net.train()
125
+ else:
126
+ net.eval()
127
+ else:
128
+ for k, net in self.net_dict.items():
129
+ net.eval()
130
+
131
+ def train(self):
132
+ # log
133
+ os.makedirs(self.opt['train']['net_ckpt_dir'], exist_ok = True)
134
+ log_dir = self.opt['train']['net_ckpt_dir'] + '/' + datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')
135
+ os.makedirs(log_dir, exist_ok = True)
136
+ writer = SummaryWriter(log_dir)
137
+ yaml.dump(self.opt, open(log_dir + '/config_bk.yaml', 'w'), sort_keys = False)
138
+
139
+ self.set_train()
140
+ self.dataset.training = True
141
+ batch_size = self.opt['train'].get('batch_size', 1)
142
+ num_workers = self.opt['train'].get('num_workers', 0)
143
+ dataloader = torch.utils.data.DataLoader(self.dataset,
144
+ batch_size = batch_size,
145
+ shuffle = True,
146
+ num_workers = num_workers,
147
+ worker_init_fn = worker_init_fn,
148
+ drop_last = True)
149
+ self.batch_num = len(self.dataset) // batch_size
150
+
151
+ if self.opt['train'].get('save_init_ckpt', False) and self.opt['train'].get('start_epoch', 0) == 0:
152
+ init_folder = self.opt['train']['net_ckpt_dir'] + '/init_ckpt'
153
+ if not os.path.exists(init_folder) or self.opt['train']['start_epoch'] == 0:
154
+ os.makedirs(init_folder, exist_ok = True)
155
+ self.save_ckpt(init_folder, False)
156
+ else:
157
+ print('# Init checkpoint has been saved!')
158
+
159
+ if self.opt['train']['prev_ckpt'] is not None:
160
+ start_epoch = self.load_ckpt(self.opt['train']['prev_ckpt']) + 1
161
+ else:
162
+ prev_ckpt_path = self.opt['train']['net_ckpt_dir'] + '/epoch_latest'
163
+ if os.path.exists(prev_ckpt_path):
164
+ start_epoch = self.load_ckpt(prev_ckpt_path) + 1
165
+ else:
166
+ start_epoch = None
167
+
168
+ if start_epoch is None:
169
+ start_epoch = self.opt['train'].get('start_epoch', 0)
170
+ end_epoch = self.opt['train'].get('end_epoch', 999)
171
+
172
+ forward_one_pass = self.forward_one_pass
173
+
174
+ for epoch_idx in range(start_epoch, end_epoch):
175
+ self.epoch_idx = epoch_idx
176
+ self.update_config_before_epoch(epoch_idx)
177
+ epoch_losses = dict()
178
+
179
+ time0 = time.time()
180
+ for batch_idx, items in enumerate(dataloader):
181
+ iter_idx = batch_idx + self.batch_num * epoch_idx
182
+ self.iter_idx = iter_idx
183
+ lr_dict = self.update_lr(iter_idx)
184
+ items = to_cuda(items)
185
+
186
+ loss, batch_losses = forward_one_pass(items)
187
+ # self.zero_grad()
188
+ # loss.backward()
189
+ # self.step()
190
+
191
+ # record batch loss
192
+ log_info = 'epoch %d, batch %d, ' % (epoch_idx, batch_idx)
193
+ log_info += 'lr: '
194
+ for k in lr_dict.keys():
195
+ log_info += '%s %e, ' % (k, lr_dict[k])
196
+ for key in batch_losses.keys():
197
+ log_info = log_info + ('%s: %f, ' % (key, batch_losses[key]))
198
+ writer.add_scalar('%s/Batch' % key, batch_losses[key], iter_idx)
199
+ if key in epoch_losses:
200
+ epoch_losses[key] += batch_losses[key]
201
+ else:
202
+ epoch_losses[key] = batch_losses[key]
203
+ print(log_info)
204
+
205
+ with open(os.path.join(log_dir, 'loss.txt'), 'a') as fp:
206
+ # record loss weight
207
+ if batch_idx == 0:
208
+ loss_weights_info = ''
209
+ for k in self.opt['train']['loss_weight'].keys():
210
+ loss_weights_info += '%s: %f, ' % (k, self.opt['train']['loss_weight'][k])
211
+ fp.write('# Loss weights: \n' + loss_weights_info + '\n')
212
+ fp.write(log_info + '\n')
213
+
214
+ if iter_idx % self.opt['train']['ckpt_interval']['batch'] == 0 and iter_idx != 0:
215
+ for folder in glob.glob(self.opt['train']['net_ckpt_dir'] + '/batch_*'):
216
+ shutil.rmtree(folder)
217
+ model_folder = self.opt['train']['net_ckpt_dir'] + '/batch_%d' % iter_idx
218
+ os.makedirs(model_folder, exist_ok = True)
219
+ self.save_ckpt(model_folder, save_optm = False)
220
+
221
+ if iter_idx % self.opt['train']['eval_interval'] == 0 and iter_idx != 0:
222
+ # if True:
223
+ self.mini_test()
224
+ self.set_train()
225
+ time1 = time.time()
226
+ print('One iteration costs %f secs' % (time1 - time0))
227
+ time0 = time1
228
+
229
+ if iter_idx == self.iter_num:
230
+ return
231
+
232
+ """ EPOCH """
233
+ # record epoch loss
234
+ for key in epoch_losses.keys():
235
+ epoch_losses[key] /= self.batch_num
236
+ writer.add_scalar('%s/Epoch' % key, epoch_losses[key], epoch_idx)
237
+
238
+ if epoch_idx % self.opt['train']['ckpt_interval']['epoch'] == 0:
239
+ model_folder = self.opt['train']['net_ckpt_dir'] + '/epoch_%d' % epoch_idx
240
+ os.makedirs(model_folder, exist_ok = True)
241
+ self.save_ckpt(model_folder)
242
+
243
+ if self.batch_num > 50:
244
+ latest_folder = self.opt['train']['net_ckpt_dir'] + '/epoch_latest'
245
+ os.makedirs(latest_folder, exist_ok = True)
246
+ self.save_ckpt(latest_folder)
247
+ writer.close()
248
+
249
+ @torch.no_grad()
250
+ def mini_test(self):
251
+ """ Test during training """
252
+ pass
253
+
254
+ def forward_one_pass(self, items):
255
+ raise NotImplementedError('"forward_one_pass" method is not implemented!')
256
+
257
+ def update_config_before_epoch(self, epoch_idx):
258
+ pass
AnimatableGaussians/cat.sh ADDED
File without changes
AnimatableGaussians/config.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import numpy as np
3
+ import math
4
+ import os
5
+
6
+ device = torch.device('cuda:0')
7
+
8
+ # SMPL related
9
+ cano_smpl_pose = np.zeros(75, dtype = np.float32)
10
+ cano_smpl_pose[3+3*1+2] = math.radians(25)
11
+ cano_smpl_pose[3+3*2+2] = math.radians(-25)
12
+ cano_smpl_pose = torch.from_numpy(cano_smpl_pose)
13
+ cano_smpl_transl = cano_smpl_pose[:3]
14
+ cano_smpl_global_orient = cano_smpl_pose[3:6]
15
+ cano_smpl_body_pose = cano_smpl_pose[6:69]
16
+
17
+ # fist pose
18
+ left_hand_pose = torch.tensor([0.09001956135034561, 0.1604590266942978, -0.3295670449733734, 0.12445037066936493, -0.11897698789834976, -1.5051144361495972, -0.1194705069065094, -0.16281449794769287, -0.6292539834976196, -0.27713727951049805, 0.035170216113328934, -0.5893177390098572, -0.20759613811969757, 0.07492011040449142, -1.4485805034637451, -0.017797302454710007, -0.12478633224964142, -0.7844052314758301, -0.4157009720802307, -0.5140947103500366, -0.2961726784706116, -0.7421528100967407, -0.11505582183599472, -0.7972996830940247, -0.29345276951789856, -0.18898937106132507, -0.6230823397636414, -0.18764786422252655, -0.2696149945259094, -0.5542467832565308, -0.47717514634132385, -0.12663133442401886, -1.2747308015823364, -0.23940050601959229, -0.1586960405111313, -0.7655659914016724, 0.8745182156562805, 0.5848557353019714, -0.07204405218362808, -0.5052485466003418, 0.1797526329755783, 0.3281439244747162, 0.5276764035224915, -0.008714836090803146, -0.4373648762702942], dtype = torch.float32)
19
+ right_hand_pose = torch.tensor([0.034751810133457184, -0.12605343759059906, 0.5510415434837341, 0.19454114139080048, 0.11147838830947876, 1.4676157236099243, -0.14799435436725616, 0.17293521761894226, 0.4679432511329651, -0.3042353689670563, 0.007868679240345955, 0.8570928573608398, -0.1827319711446762, -0.07225851714611053, 1.307037591934204, -0.02989627793431282, 0.1208646297454834, 0.7142824530601501, -0.3403030335903168, 0.5368582606315613, 0.3839572072029114, -0.9722614884376526, 0.17358140647411346, 0.911861002445221, -0.29665058851242065, 0.21779759228229523, 0.7269846796989441, -0.15343312919139862, 0.3083758056163788, 0.7146623730659485, -0.5153037309646606, 0.1721675992012024, 1.2982604503631592, -0.2590428292751312, 0.12812566757202148, 0.7502076029777527, 0.8694817423820496, -0.5263001322746277, 0.06934576481580734, -0.4630220830440521, -0.19237111508846283, -0.25436165928840637, 0.5972414612770081, -0.08250168710947037, 0.5013565421104431], dtype = torch.float32)
20
+
21
+
22
+ # project
23
+ PROJ_DIR = os.path.dirname(os.path.realpath(__file__))
24
+
25
+ opt = dict()
26
+
27
+
28
+ def load_global_opt(path):
29
+ import yaml
30
+ global opt
31
+ opt = yaml.load(open(path, encoding = 'UTF-8'), Loader = yaml.FullLoader)
32
+
33
+ def set_opt(new_opt):
34
+ global opt
35
+ opt = new_opt
AnimatableGaussians/configs/awesome_amass_poses.yaml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # CMU sub-dataset
2
+ basketball:
3
+ - ./CMU/06/06_13_poses.npz
4
+ - ./CMU/06/06_14_poses.npz
5
+ tennis:
6
+ - ./CMU/02/02_08_poses.npz
7
+ - ./CMU/02/02_09_poses.npz
8
+ football:
9
+ - ./CMU/10/10_05_poses.npz
10
+ - ./CMU/11/11_01_poses.npz
11
+ punch:
12
+ - ./CMU/15/15_13_poses.npz
13
+ kick:
14
+ - ./CMU/144/144_05_poses.npz
15
+ others:
16
+ - ./CMU/144/144_28_poses.npz
17
+ dancing:
18
+ - ./CMU/131/131_03_poses.npz
19
+
20
+ # MPI_mosh sub-dataset
21
+ dancing2:
22
+ - ./MPI_mosh/00059/misc_poses.npz
23
+ - ./MPI_mosh/00093/irish_dance_poses.npz
24
+ - ./MPI_mosh/00093/misc_poses.npz
25
+ - ./MPI_mosh/50004/misc_poses.npz
AnimatableGaussians/configs/huawei_0425/avatar.yaml ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: huawei0425
6
+ data_dir: ../data/body_data
7
+ frame_range: &id001
8
+ - 124
9
+ - 144
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: ../data/body_data
54
+ frame_range: [0, 500]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/test_poses_ours.npz
58
+ frame_range: [0, 1000]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 1.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ n_pca: 20
71
+ sigma_pca: 2.0
72
+ prev_ckpt: ../checkpoints/body_avatar
73
+ model:
74
+ with_viewdirs: true
75
+ random_style: false
AnimatableGaussians/configs/huawei_0425/avatar1.yaml ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: dx1test
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam
7
+ frame_range: &id001
8
+ - 124
9
+ - 144
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar1
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam
54
+ frame_range: [0, 500]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_pred_new.npz
58
+ frame_range: [0, 128]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 1.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ n_pca: 20
71
+ sigma_pca: 2.0
72
+ prev_ckpt: ../checkpoints_new/body
73
+ model:
74
+ with_viewdirs: true
75
+ random_style: false
AnimatableGaussians/configs/huawei_0425/avatar2.yaml ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: dx_long_1_debug
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam
7
+ frame_range: &id001
8
+ - 124
9
+ - 144
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam
54
+ frame_range: [0, 500]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_long_1_debug.npz
58
+ frame_range: [0, 270]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 1.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ n_pca: 20
71
+ sigma_pca: 2.0
72
+ prev_ckpt: ../checkpoints_new/body
73
+ model:
74
+ with_viewdirs: true
75
+ random_style: false
AnimatableGaussians/configs/huawei_0425/nzc.yaml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: nzc_test_data_0916_comb_v2
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam_new
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam_new
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_0916_comb_v2.npz
58
+ frame_range: [0, 300]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 2.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ fix_hand: true
71
+ fix_hand_id: 23
72
+ n_pca: 20
73
+ sigma_pca: 2.0
74
+ prev_ckpt: ../checkpoints_new_v2/body
75
+ model:
76
+ with_viewdirs: true
77
+ random_style: false
AnimatableGaussians/configs/huawei_0425/nzc_new.yaml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0921_nzc_lz
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_0916_comb_v2.npz
58
+ frame_range: [0, 300]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 2.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ fix_hand: true
71
+ fix_hand_id: 23
72
+ n_pca: 20
73
+ sigma_pca: 2.0
74
+ prev_ckpt: ../checkpoints_new/body
75
+ model:
76
+ with_viewdirs: true
77
+ random_style: false
AnimatableGaussians/configs/new0829/avatar.yaml ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: new0829
6
+ data_dir: ../data/body_data
7
+ frame_range: &id001
8
+ - 124
9
+ - 144
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: ../data/body_data
54
+ frame_range: [0, 500]
55
+ subject_name: new0829
56
+ pose_data:
57
+ data_path: ../data/AMASS/test_poses_ours.npz
58
+ frame_range: [0, 1000]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 1.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ n_pca: 20
71
+ sigma_pca: 2.0
72
+ prev_ckpt: ../checkpoints_new/body
73
+ model:
74
+ with_viewdirs: true
75
+ random_style: false
AnimatableGaussians/configs/pengcheng/0921_nzc_ckpt_ys.yaml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0921_nzc_lz
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam_new
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/ag_gha/smplparam_lz
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_0916_comb_v2.npz
58
+ frame_range: [0, 300]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 2.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ fix_hand: true
71
+ fix_hand_id: 23
72
+ n_pca: 20
73
+ sigma_pca: 2.0
74
+ prev_ckpt: ../checkpoints_cys/body12
75
+ model:
76
+ with_viewdirs: true
77
+ random_style: false
AnimatableGaussians/configs/pengcheng/0923_cys.yaml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0923_cys
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_0916_comb_v2.npz
58
+ frame_range: [0, 300]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 2.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ fix_hand: true
71
+ fix_hand_id: 23
72
+ n_pca: 20
73
+ sigma_pca: 2.0
74
+ prev_ckpt: ../checkpoints/body_ys
75
+ model:
76
+ with_viewdirs: true
77
+ random_style: false
AnimatableGaussians/configs/pengcheng/0924_nzc_new_pose.yaml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0924_new_pose
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_0924.npz
58
+ frame_range: [0, 200]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 2.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ fix_hand: true
71
+ fix_hand_id: 23
72
+ n_pca: 20
73
+ sigma_pca: 2.0
74
+ prev_ckpt: ../checkpoints/body_ys
75
+ model:
76
+ with_viewdirs: true
77
+ random_style: false
AnimatableGaussians/configs/pengcheng/0925_nzc_new_pose.yaml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0926_new_pose
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/0926_dx_happy.npz
58
+ frame_range: [0, 200]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ view_setting: front
63
+ render_view_idx: 13
64
+ global_orient: true
65
+ img_scale: 2.0
66
+ save_mesh: false
67
+ render_skeleton: false
68
+ save_tex_map: false
69
+ save_ply: true
70
+ fix_hand: true
71
+ fix_hand_id: 23
72
+ n_pca: 20
73
+ sigma_pca: 2.0
74
+ prev_ckpt: ../checkpoints/body_ys
75
+ model:
76
+ with_viewdirs: true
77
+ random_style: false
AnimatableGaussians/configs/pengcheng/0926_nzc_new_pose.yaml ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0926_pose_long
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_0926_long_v1.npz
58
+ frame_range: [0, 2000]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ view_setting: degree120
64
+ render_view_idx: 13
65
+ global_orient: true
66
+ img_scale: 2.0
67
+ save_mesh: false
68
+ render_skeleton: false
69
+ save_tex_map: false
70
+ save_ply: true
71
+ fix_hand: true
72
+ fix_hand_id: 23
73
+ n_pca: 20
74
+ sigma_pca: 2.0
75
+ prev_ckpt: ../checkpoints/body_ys
76
+ model:
77
+ with_viewdirs: true
78
+ random_style: false
AnimatableGaussians/configs/pengcheng/0929_lodge.yaml ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0929_lodge_012
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/012.npz
58
+ frame_range: [0, 2000]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ view_setting: front
64
+ render_view_idx: 13
65
+ global_orient: true
66
+ img_scale: 2.0
67
+ save_mesh: false
68
+ render_skeleton: false
69
+ save_tex_map: false
70
+ save_ply: true
71
+ fix_hand: true
72
+ fix_hand_id: 23
73
+ n_pca: 20
74
+ sigma_pca: 2.0
75
+ prev_ckpt: ../checkpoints/body_ys
76
+ model:
77
+ with_viewdirs: true
78
+ random_style: false
AnimatableGaussians/configs/pengcheng/0930_sing.yaml ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 0930_sing_free
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/0930_sing.npz
58
+ frame_range: [0, 300]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ view_setting: free
64
+ render_view_idx: 13
65
+ global_orient: true
66
+ img_scale: 2.0
67
+ save_mesh: false
68
+ render_skeleton: false
69
+ save_tex_map: false
70
+ save_ply: true
71
+ fix_hand: true
72
+ fix_hand_id: 23
73
+ n_pca: 20
74
+ sigma_pca: 2.0
75
+ prev_ckpt: ../checkpoints/body_ys
76
+ model:
77
+ with_viewdirs: true
78
+ random_style: false
AnimatableGaussians/configs/pengcheng/1002_nzc_new_pose.yaml ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 1002_nzc_360_no_global
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/dx_0926_long_v1.npz
58
+ frame_range: [0, 360]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ # view_setting: degree120
64
+ view_setting: free
65
+ render_view_idx: 13
66
+ global_orient: false
67
+ img_scale: 2.0
68
+ save_mesh: false
69
+ render_skeleton: false
70
+ save_tex_map: false
71
+ save_ply: true
72
+ fix_hand: true
73
+ fix_hand_id: 23
74
+ n_pca: 20
75
+ sigma_pca: 2.0
76
+ prev_ckpt: ../checkpoints/body_ys
77
+ model:
78
+ with_viewdirs: true
79
+ random_style: false
AnimatableGaussians/configs/pengcheng/1002_train_pose.yaml ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 1002_train_pose
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/train_data_v4.npz
58
+ frame_range: [0, 300]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ # view_setting: degree120
64
+ view_setting: free
65
+ render_view_idx: 13
66
+ global_orient: true
67
+ img_scale: 2.0
68
+ save_mesh: false
69
+ render_skeleton: false
70
+ save_tex_map: false
71
+ save_ply: true
72
+ fix_hand: true
73
+ fix_hand_id: 23
74
+ n_pca: 20
75
+ sigma_pca: 2.0
76
+ prev_ckpt: ../checkpoints/body_ys
77
+ model:
78
+ with_viewdirs: true
79
+ random_style: false
AnimatableGaussians/configs/pengcheng/1003_cat_pose.yaml ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 1003_cat_pose_false
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/1003_cat_data.npz
58
+ frame_range: [0, 2000]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ view_setting: degree120
64
+ # view_setting: free
65
+ render_view_idx: 13
66
+ global_orient: true
67
+ img_scale: 2.0
68
+ save_mesh: false
69
+ render_skeleton: false
70
+ save_tex_map: false
71
+ save_ply: true
72
+ fix_hand: true
73
+ fix_hand_id: 23
74
+ n_pca: 20
75
+ sigma_pca: 2.0
76
+ prev_ckpt: ../checkpoints/body_ys
77
+ model:
78
+ with_viewdirs: true
79
+ random_style: false
AnimatableGaussians/configs/pengcheng/1004_smooth_train_pose.yaml ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 1006_smooth_train_pose
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/1004_smooth_train_data.npz
58
+ frame_range: [0, 2000]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ view_setting: degree120
64
+ # view_setting: free
65
+ render_view_idx: 13
66
+ global_orient: true
67
+ img_scale: 2.0
68
+ save_mesh: false
69
+ render_skeleton: false
70
+ save_tex_map: false
71
+ save_ply: true
72
+ fix_hand: true
73
+ fix_hand_id: 23
74
+ n_pca: 20
75
+ sigma_pca: 2.0
76
+ prev_ckpt: ../checkpoints/body_ys
77
+ model:
78
+ with_viewdirs: true
79
+ random_style: false
AnimatableGaussians/configs/pengcheng/1007_slow10.yaml ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ mode: train
2
+ train:
3
+ dataset: MvRgbDatasetAvatarReX
4
+ data:
5
+ subject_name: 1007_slow10
6
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
7
+ frame_range: &id001
8
+ - 0
9
+ - 200
10
+ - 1
11
+ used_cam_ids:
12
+ - 0
13
+ - 1
14
+ - 2
15
+ - 3
16
+ - 4
17
+ - 5
18
+ - 6
19
+ - 8
20
+ - 9
21
+ - 10
22
+ - 11
23
+ - 12
24
+ - 14
25
+ - 15
26
+ load_smpl_pos_map: true
27
+ pretrained_dir: null
28
+ net_ckpt_dir: ./results/huawei0425/avatar2
29
+ prev_ckpt: null
30
+ ckpt_interval:
31
+ epoch: 10
32
+ batch: 50000
33
+ eval_interval: 1000
34
+ eval_training_ids:
35
+ - 190
36
+ - 7
37
+ eval_testing_ids:
38
+ - 354
39
+ - 7
40
+ eval_img_factor: 1.0
41
+ lr_init: 0.0005
42
+ loss_weight:
43
+ l1: 1.0
44
+ lpips: 0.1
45
+ offset: 0.005
46
+ finetune_color: false
47
+ batch_size: 1
48
+ num_workers: 8
49
+ random_bg_color: true
50
+ test:
51
+ dataset: MvRgbDatasetAvatarReX
52
+ data:
53
+ data_dir: /home/pengc02/pengcheng/projects/gaussian_avatar/avatar_final/data/pos_map_ys/body_mix
54
+ frame_range: [0, 800]
55
+ subject_name: huawei0425
56
+ pose_data:
57
+ data_path: ../data/AMASS/1007_train_data_slow10.npz
58
+ frame_range: [0, 2000]
59
+ # data_path: Z:/Data/Pose/AMASS/CMU/06/06_13_poses.npz
60
+ # data_path: Z:/Data/Pose/AMASS/CMU/10/10_05_poses.npz
61
+ # frame_interval: 4
62
+ # view_setting: front
63
+ view_setting: degree90
64
+ # view_setting: free
65
+ render_view_idx: 13
66
+ global_orient: true
67
+ img_scale: 2.0
68
+ save_mesh: false
69
+ render_skeleton: false
70
+ save_tex_map: false
71
+ save_ply: true
72
+ fix_hand: true
73
+ fix_hand_id: 23
74
+ n_pca: 20
75
+ sigma_pca: 2.0
76
+ prev_ckpt: ../checkpoints/body_ys
77
+ model:
78
+ with_viewdirs: true
79
+ random_style: false
AnimatableGaussians/dataset/__pycache__/commons.cpython-310.pyc ADDED
Binary file (1.55 kB). View file
 
AnimatableGaussians/dataset/__pycache__/commons.cpython-38.pyc ADDED
Binary file (1.56 kB). View file
 
AnimatableGaussians/dataset/__pycache__/dataset_mv_rgb.cpython-310.pyc ADDED
Binary file (16.4 kB). View file
 
AnimatableGaussians/dataset/__pycache__/dataset_mv_rgb.cpython-38.pyc ADDED
Binary file (16.2 kB). View file
 
AnimatableGaussians/dataset/__pycache__/dataset_pose.cpython-310.pyc ADDED
Binary file (14 kB). View file
 
AnimatableGaussians/dataset/__pycache__/dataset_pose.cpython-38.pyc ADDED
Binary file (15.4 kB). View file
 
AnimatableGaussians/dataset/commons.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import torch
3
+ import trimesh
4
+
5
+ import AnimatableGaussians.config as config
6
+
7
+
8
+ def _initialize_hands(self):
9
+ smplx_lhand_to_mano_rhand_data = np.load(config.PROJ_DIR + '/smpl_files/mano/smplx_lhand_to_mano_rhand.npz', allow_pickle = True)
10
+ smplx_rhand_to_mano_rhand_data = np.load(config.PROJ_DIR + '/smpl_files/mano/smplx_rhand_to_mano_rhand.npz', allow_pickle = True)
11
+ smpl_lhand_vert_id = np.copy(smplx_lhand_to_mano_rhand_data['smpl_vert_id_to_mano'])
12
+ smpl_rhand_vert_id = np.copy(smplx_rhand_to_mano_rhand_data['smpl_vert_id_to_mano'])
13
+ self.smpl_lhand_vert_id = torch.from_numpy(smpl_lhand_vert_id)
14
+ self.smpl_rhand_vert_id = torch.from_numpy(smpl_rhand_vert_id)
15
+ self.smpl_hands_vert_id = torch.cat([self.smpl_lhand_vert_id, self.smpl_rhand_vert_id], 0)
16
+ mano_face_closed = np.loadtxt(config.PROJ_DIR + '/smpl_files/mano/mano_face_close.txt').astype(np.int64)
17
+ self.mano_face_closed = torch.from_numpy(mano_face_closed)
18
+ self.mano_face_closed_turned = self.mano_face_closed[:, [2, 1, 0]]
19
+ self.mano_face_closed_2hand = torch.cat([self.mano_face_closed[:, [2, 1, 0]], self.mano_face_closed + self.smpl_lhand_vert_id.shape[0]], 0)
20
+
21
+
22
+ def generate_two_manos(self, smplx_verts: torch.Tensor):
23
+ left_mano_v = smplx_verts[self.smpl_lhand_vert_id].cpu().numpy()
24
+ left_mano_trimesh = trimesh.Trimesh(left_mano_v, self.mano_face_closed_turned, process = False)
25
+ left_mano_n = left_mano_trimesh.vertex_normals.astype(np.float32)
26
+
27
+ right_mano_v = smplx_verts[self.smpl_rhand_vert_id].cpu().numpy()
28
+ right_mano_trimesh = trimesh.Trimesh(right_mano_v, self.mano_face_closed, process = False)
29
+ right_mano_n = right_mano_trimesh.vertex_normals.astype(np.float32)
30
+
31
+ return left_mano_v, left_mano_n, right_mano_v, right_mano_n
AnimatableGaussians/dataset/dataset_mv_rgb.py ADDED
@@ -0,0 +1,506 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import glob
2
+ import os
3
+ import numpy as np
4
+ import cv2 as cv
5
+ from sympy import li
6
+ import torch
7
+ from torch.utils.data import Dataset
8
+
9
+ import AnimatableGaussians.smplx as smplx
10
+ import AnimatableGaussians.config as config
11
+ import AnimatableGaussians.utils.nerf_util as nerf_util
12
+ import AnimatableGaussians.utils.visualize_util as visualize_util
13
+ import AnimatableGaussians.dataset.commons as commons
14
+
15
+
16
+ class MvRgbDatasetBase(Dataset):
17
+ @torch.no_grad()
18
+ def __init__(
19
+ self,
20
+ data_dir,
21
+ frame_range = None,
22
+ used_cam_ids = None,
23
+ training = True,
24
+ subject_name = None,
25
+ load_smpl_pos_map = False,
26
+ load_smpl_nml_map = False,
27
+ mode = '3dgs'
28
+ ):
29
+ super(MvRgbDatasetBase, self).__init__()
30
+
31
+ self.data_dir = data_dir
32
+ self.training = training
33
+ self.subject_name = subject_name
34
+ if self.subject_name is None:
35
+ self.subject_name = os.path.basename(self.data_dir)
36
+ self.load_smpl_pos_map = load_smpl_pos_map
37
+ self.load_smpl_nml_map = load_smpl_nml_map
38
+ self.mode = mode # '3dgs' or 'nerf'
39
+
40
+ self.load_cam_data()
41
+ self.load_smpl_data()
42
+
43
+ self.smpl_model = smplx.SMPLX(model_path = config.PROJ_DIR + '/smpl_files/smplx', gender = 'neutral', use_pca = False, num_pca_comps = 45, flat_hand_mean = True, batch_size = 1)
44
+ pose_list = list(range(self.smpl_data['body_pose'].shape[0]))
45
+ if frame_range is not None:
46
+ # print('# Selected frame range: ', frame_range)
47
+ # print(isinstance(frame_range, list))
48
+ # print(type(frame_range))
49
+ # to list
50
+ frame_range = list(frame_range)
51
+ if isinstance(frame_range, list):
52
+ if len(frame_range) == 2:
53
+ print(f'# Selected frame indices: range({frame_range[0]}, {frame_range[1]})')
54
+ frame_range = range(frame_range[0], frame_range[1])
55
+ elif len(frame_range) == 3:
56
+ print(f'# Selected frame indices: range({frame_range[0]}, {frame_range[1]}, {frame_range[2]})')
57
+ frame_range = range(frame_range[0], frame_range[1], frame_range[2])
58
+ elif isinstance(frame_range, str):
59
+ frame_range = np.loadtxt(self.data_dir + '/' + frame_range).astype(np.int).tolist()
60
+ print(f'# Selected frame indices: {frame_range}')
61
+ else:
62
+ raise TypeError('Invalid frame_range!')
63
+ self.pose_list = list(frame_range)
64
+ else:
65
+ self.pose_list = pose_list
66
+
67
+ if self.training:
68
+ if used_cam_ids is None:
69
+ self.used_cam_ids = list(range(self.view_num))
70
+ else:
71
+ self.used_cam_ids = used_cam_ids
72
+ print('# Used camera ids: ', self.used_cam_ids)
73
+ self.data_list = []
74
+ for pose_idx in self.pose_list:
75
+ for view_idx in self.used_cam_ids:
76
+ self.data_list.append((pose_idx, view_idx))
77
+ # filter missing files
78
+ self.filter_missing_files()
79
+
80
+ print('# Dataset contains %d items' % len(self))
81
+
82
+ # SMPL related
83
+ ret = self.smpl_model.forward(betas = self.smpl_data['betas'][0][None],
84
+ global_orient = config.cano_smpl_global_orient[None],
85
+ transl = config.cano_smpl_transl[None],
86
+ body_pose = config.cano_smpl_body_pose[None])
87
+
88
+ self.cano_smpl = {k: v[0] for k, v in ret.items() if isinstance(v, torch.Tensor)}
89
+ self.inv_cano_jnt_mats = torch.linalg.inv(self.cano_smpl['A'])
90
+ min_xyz = self.cano_smpl['vertices'].min(0)[0]
91
+ max_xyz = self.cano_smpl['vertices'].max(0)[0]
92
+ self.cano_smpl_center = 0.5 * (min_xyz + max_xyz)
93
+ min_xyz[:2] -= 0.05
94
+ max_xyz[:2] += 0.05
95
+ min_xyz[2] -= 0.15
96
+ max_xyz[2] += 0.15
97
+ self.cano_bounds = torch.stack([min_xyz, max_xyz], 0).to(torch.float32).numpy()
98
+ self.smpl_faces = self.smpl_model.faces.astype(np.int32)
99
+
100
+ commons._initialize_hands(self)
101
+
102
+ def __len__(self):
103
+ if self.training:
104
+ return len(self.data_list)
105
+ else:
106
+ return len(self.pose_list)
107
+
108
+ def __getitem__(self, index):
109
+ return self.getitem(index, self.training)
110
+
111
+ def getitem(self, index, training = True, **kwargs):
112
+ if training or kwargs.get('eval', False): # training or evaluation
113
+ pose_idx, view_idx = self.data_list[index]
114
+ pose_idx = kwargs['pose_idx'] if 'pose_idx' in kwargs else pose_idx
115
+ view_idx = kwargs['view_idx'] if 'view_idx' in kwargs else view_idx
116
+ data_idx = (pose_idx, view_idx)
117
+ if not training:
118
+ print('data index: (%d, %d)' % (pose_idx, view_idx))
119
+ else: # testing
120
+ pose_idx = self.pose_list[index]
121
+ data_idx = pose_idx
122
+ print('data index: %d' % pose_idx)
123
+
124
+ # SMPL
125
+ with torch.no_grad():
126
+ live_smpl = self.smpl_model.forward(
127
+ betas = self.smpl_data['betas'][0][None],
128
+ global_orient = self.smpl_data['global_orient'][pose_idx][None],
129
+ transl = self.smpl_data['transl'][pose_idx][None],
130
+ body_pose = self.smpl_data['body_pose'][pose_idx][None],
131
+ jaw_pose = self.smpl_data['jaw_pose'][pose_idx][None],
132
+ expression = self.smpl_data['expression'][pose_idx][None],
133
+ left_hand_pose = self.smpl_data['left_hand_pose'][pose_idx][None],
134
+ right_hand_pose = self.smpl_data['right_hand_pose'][pose_idx][None]
135
+ )
136
+ cano_smpl = self.smpl_model.forward(
137
+ betas = self.smpl_data['betas'][0][None],
138
+ global_orient = config.cano_smpl_global_orient[None],
139
+ transl = config.cano_smpl_transl[None],
140
+ body_pose = config.cano_smpl_body_pose[None],
141
+ jaw_pose = self.smpl_data['jaw_pose'][pose_idx][None],
142
+ expression = self.smpl_data['expression'][pose_idx][None],
143
+ )
144
+ live_smpl_woRoot = self.smpl_model.forward(
145
+ betas = self.smpl_data['betas'][0][None],
146
+ body_pose = self.smpl_data['body_pose'][pose_idx][None],
147
+ jaw_pose = self.smpl_data['jaw_pose'][pose_idx][None],
148
+ expression = self.smpl_data['expression'][pose_idx][None],
149
+ )
150
+
151
+ data_item = dict()
152
+ if self.load_smpl_pos_map:
153
+ smpl_pos_map = cv.imread(self.data_dir + '/smpl_pos_map/%08d.exr' % pose_idx, cv.IMREAD_UNCHANGED)
154
+ pos_map_size = smpl_pos_map.shape[1] // 2
155
+ smpl_pos_map = np.concatenate([smpl_pos_map[:, :pos_map_size], smpl_pos_map[:, pos_map_size:]], 2)
156
+ smpl_pos_map = smpl_pos_map.transpose((2, 0, 1))
157
+ data_item['smpl_pos_map'] = smpl_pos_map
158
+
159
+ if self.load_smpl_nml_map:
160
+ smpl_nml_map = cv.imread(self.data_dir + '/smpl_nml_map/%08d.jpg' % pose_idx, cv.IMREAD_UNCHANGED)
161
+ smpl_nml_map = (smpl_nml_map / 255.).astype(np.float32)
162
+ nml_map_size = smpl_nml_map.shape[1] // 2
163
+ smpl_nml_map = np.concatenate([smpl_nml_map[:, :nml_map_size], smpl_nml_map[:, nml_map_size:]], 2)
164
+ smpl_nml_map = smpl_nml_map.transpose((2, 0, 1))
165
+ data_item['smpl_nml_map'] = smpl_nml_map
166
+
167
+ data_item['joints'] = live_smpl.joints[0, :22]
168
+ data_item['kin_parent'] = self.smpl_model.parents[:22].to(torch.long)
169
+ data_item['item_idx'] = index
170
+ data_item['data_idx'] = data_idx
171
+ data_item['time_stamp'] = np.array(pose_idx, np.float32)
172
+ data_item['global_orient'] = self.smpl_data['global_orient'][pose_idx]
173
+ data_item['transl'] = self.smpl_data['transl'][pose_idx]
174
+ data_item['live_smpl_v'] = live_smpl.vertices[0]
175
+ data_item['live_smpl_v_woRoot'] = live_smpl_woRoot.vertices[0]
176
+ data_item['cano_smpl_v'] = cano_smpl.vertices[0]
177
+ data_item['cano_jnts'] = cano_smpl.joints[0]
178
+ data_item['cano2live_jnt_mats'] = torch.matmul(live_smpl.A[0], torch.linalg.inv(cano_smpl.A[0]))
179
+ data_item['cano2live_jnt_mats_woRoot'] = torch.matmul(live_smpl_woRoot.A[0], torch.linalg.inv(cano_smpl.A[0]))
180
+ data_item['cano_smpl_center'] = self.cano_smpl_center
181
+ data_item['cano_bounds'] = self.cano_bounds
182
+ data_item['smpl_faces'] = self.smpl_faces
183
+ min_xyz = live_smpl.vertices[0].min(0)[0] - 0.15
184
+ max_xyz = live_smpl.vertices[0].max(0)[0] + 0.15
185
+ live_bounds = torch.stack([min_xyz, max_xyz], 0).to(torch.float32).numpy()
186
+ data_item['live_bounds'] = live_bounds
187
+
188
+ if training:
189
+ color_img, mask_img = self.load_color_mask_images(pose_idx, view_idx)
190
+
191
+ color_img = (color_img / 255.).astype(np.float32)
192
+
193
+ boundary_mask_img, mask_img = self.get_boundary_mask(mask_img)
194
+
195
+ if self.mode == '3dgs':
196
+ data_item.update({
197
+ 'img_h': color_img.shape[0],
198
+ 'img_w': color_img.shape[1],
199
+ 'extr': self.extr_mats[view_idx],
200
+ 'intr': self.intr_mats[view_idx],
201
+ 'color_img': color_img,
202
+ 'mask_img': mask_img,
203
+ 'boundary_mask_img': boundary_mask_img
204
+ })
205
+ elif self.mode == 'nerf':
206
+ depth_img = np.zeros(color_img.shape[:2], np.float32)
207
+ nerf_random = nerf_util.sample_randomly_for_nerf_rendering(
208
+ color_img, mask_img, depth_img,
209
+ self.extr_mats[view_idx], self.intr_mats[view_idx],
210
+ live_bounds,
211
+ unsample_region_mask = boundary_mask_img
212
+ )
213
+ data_item.update({
214
+ 'nerf_random': nerf_random,
215
+ 'extr': self.extr_mats[view_idx],
216
+ 'intr': self.intr_mats[view_idx]
217
+ })
218
+ else:
219
+ raise ValueError('Invalid dataset mode!')
220
+ else:
221
+ """ synthesis config """
222
+ img_h = 512 if 'img_h' not in kwargs else kwargs['img_h']
223
+ img_w = 512 if 'img_w' not in kwargs else kwargs['img_w']
224
+ intr = np.array([[550, 0, 256], [0, 550, 256], [0, 0, 1]], np.float32) if 'intr' not in kwargs else kwargs['intr']
225
+ if 'extr' not in kwargs:
226
+ extr = visualize_util.calc_front_mv(live_bounds.mean(0), tar_pos = np.array([0, 0, 2.5]))
227
+ else:
228
+ extr = kwargs['extr']
229
+
230
+ data_item.update({
231
+ 'img_h': img_h,
232
+ 'img_w': img_w,
233
+ 'extr': extr,
234
+ 'intr': intr
235
+ })
236
+
237
+ if self.mode == 'nerf' or self.mode == '3dgs' and not training:
238
+ # mano
239
+ data_item['left_cano_mano_v'], data_item['left_cano_mano_n'], data_item['right_cano_mano_v'], data_item['right_cano_mano_n'] \
240
+ = commons.generate_two_manos(self, self.cano_smpl['vertices'])
241
+ data_item['left_live_mano_v'], data_item['left_live_mano_n'], data_item['right_live_mano_v'], data_item['right_live_mano_n'] \
242
+ = commons.generate_two_manos(self, live_smpl.vertices[0])
243
+
244
+ return data_item
245
+
246
+ def load_cam_data(self):
247
+ """
248
+ Initialize:
249
+ self.cam_names, self.view_num, self.extr_mats, self.intr_mats,
250
+ self.img_widths, self.img_heights
251
+ """
252
+ raise NotImplementedError
253
+
254
+ def load_smpl_data(self):
255
+ """
256
+ Initialize:
257
+ self.cam_data, a dict including ['body_pose', 'global_orient', 'transl', 'betas', ...]
258
+ """
259
+ smpl_data = np.load(self.data_dir + '/smpl_params.npz', allow_pickle = True)
260
+ smpl_data = dict(smpl_data)
261
+ self.smpl_data = {k: torch.from_numpy(v.astype(np.float32)) for k, v in smpl_data.items()}
262
+
263
+ def filter_missing_files(self):
264
+ pass
265
+
266
+ def load_color_mask_images(self, pose_idx, view_idx):
267
+ raise NotImplementedError
268
+
269
+ @staticmethod
270
+ def get_boundary_mask(mask, kernel_size = 5):
271
+ """
272
+ :param mask: np.uint8
273
+ :param kernel_size:
274
+ :return:
275
+ """
276
+ mask_bk = mask.copy()
277
+ thres = 128
278
+ mask[mask < thres] = 0
279
+ mask[mask > thres] = 1
280
+ kernel = np.ones((kernel_size, kernel_size), np.uint8)
281
+ mask_erode = cv.erode(mask.copy(), kernel)
282
+ mask_dilate = cv.dilate(mask.copy(), kernel)
283
+ boundary_mask = (mask_dilate - mask_erode) == 1
284
+ boundary_mask = np.logical_or(boundary_mask,
285
+ np.logical_and(mask_bk > 5, mask_bk < 250))
286
+
287
+ # boundary_mask_resized = cv.resize(boundary_mask.astype(np.uint8), (0, 0), fx = 0.5, fy = 0.5)
288
+ # cv.imshow('boundary_mask', boundary_mask_resized.astype(np.uint8) * 255)
289
+ # cv.waitKey(0)
290
+
291
+ return boundary_mask, mask == 1
292
+
293
+ def compute_pca(self, n_components = 10):
294
+ from sklearn.decomposition import PCA
295
+ from tqdm import tqdm
296
+ import joblib
297
+
298
+ if not os.path.exists(self.data_dir + '/smpl_pos_map/pca_%d.ckpt' % n_components):
299
+ pose_conds = []
300
+ mask = None
301
+ for pose_idx in tqdm(self.pose_list, desc = 'Loading position maps...'):
302
+ pose_map = cv.imread(self.data_dir + '/smpl_pos_map/%08d.exr' % pose_idx, cv.IMREAD_UNCHANGED)
303
+ pose_map = pose_map[:, :pose_map.shape[1] // 2]
304
+ if mask is None:
305
+ mask = np.linalg.norm(pose_map, axis = -1) > 1e-6
306
+ pose_conds.append(pose_map[mask])
307
+ pose_conds = np.stack(pose_conds, 0)
308
+ pose_conds = pose_conds.reshape(pose_conds.shape[0], -1)
309
+ self.pca = PCA(n_components = n_components)
310
+ self.pca.fit(pose_conds)
311
+ joblib.dump(self.pca, self.data_dir + '/smpl_pos_map/pca_%d.ckpt' % n_components)
312
+ self.pos_map_mask = mask
313
+ else:
314
+ self.pca = joblib.load(self.data_dir + '/smpl_pos_map/pca_%d.ckpt' % n_components)
315
+ pose_map = cv.imread(sorted(glob.glob(self.data_dir + '/smpl_pos_map/0*.exr'))[0], cv.IMREAD_UNCHANGED)
316
+ pose_map = pose_map[:, :pose_map.shape[1] // 2]
317
+ self.pos_map_mask = np.linalg.norm(pose_map, axis = -1) > 1e-6
318
+
319
+ def transform_pca(self, pose_conds, sigma_pca = 2.):
320
+ pose_conds = pose_conds.reshape(1, -1)
321
+ lowdim_pose_conds = self.pca.transform(pose_conds)
322
+ std = np.sqrt(self.pca.explained_variance_)
323
+ lowdim_pose_conds = np.maximum(lowdim_pose_conds, -sigma_pca * std)
324
+ lowdim_pose_conds = np.minimum(lowdim_pose_conds, sigma_pca * std)
325
+ new_pose_conds = self.pca.inverse_transform(lowdim_pose_conds)
326
+ new_pose_conds = new_pose_conds.reshape(-1, 3)
327
+ return new_pose_conds
328
+
329
+
330
+ class MvRgbDatasetTHuman4(MvRgbDatasetBase):
331
+ def __init__(
332
+ self,
333
+ data_dir,
334
+ frame_range = None,
335
+ used_cam_ids = None,
336
+ training = True,
337
+ subject_name = None,
338
+ load_smpl_pos_map = False,
339
+ load_smpl_nml_map = False,
340
+ mode = '3dgs'
341
+ ):
342
+ super(MvRgbDatasetTHuman4, self).__init__(
343
+ data_dir,
344
+ frame_range,
345
+ used_cam_ids,
346
+ training,
347
+ subject_name,
348
+ load_smpl_pos_map,
349
+ load_smpl_nml_map,
350
+ mode
351
+ )
352
+
353
+ def load_cam_data(self):
354
+ import json
355
+ cam_data = json.load(open(self.data_dir + '/calibration.json', 'r'))
356
+ self.view_num = len(cam_data)
357
+ self.extr_mats = []
358
+ self.cam_names = ['cam%02d' % view_idx for view_idx in range(self.view_num)]
359
+ for view_idx in range(self.view_num):
360
+ extr_mat = np.identity(4, np.float32)
361
+ extr_mat[:3, :3] = np.array(cam_data['cam%02d' % view_idx]['R'], np.float32).reshape(3, 3)
362
+ extr_mat[:3, 3] = np.array(cam_data['cam%02d' % view_idx]['T'], np.float32)
363
+ self.extr_mats.append(extr_mat)
364
+ self.intr_mats = [np.array(cam_data['cam%02d' % view_idx]['K'], np.float32).reshape(3, 3) for view_idx in range(self.view_num)]
365
+ self.img_heights = [cam_data['cam%02d' % view_idx]['imgSize'][1] for view_idx in range(self.view_num)]
366
+ self.img_widths = [cam_data['cam%02d' % view_idx]['imgSize'][0] for view_idx in range(self.view_num)]
367
+
368
+ def filter_missing_files(self):
369
+ missing_data_list = []
370
+ with open(self.data_dir + '/missing_img_files.txt', 'r') as fp:
371
+ lines = fp.readlines()
372
+ for line in lines:
373
+ line = line.replace('\\', '/') # considering both Windows and Ubuntu file system
374
+ frame_idx = int(os.path.basename(line).replace('.jpg', ''))
375
+ view_idx = int(os.path.basename(os.path.dirname(line)).replace('cam', ''))
376
+ missing_data_list.append((frame_idx, view_idx))
377
+ for missing_data_idx in missing_data_list:
378
+ if missing_data_idx in self.data_list:
379
+ self.data_list.remove(missing_data_idx)
380
+
381
+ def load_color_mask_images(self, pose_idx, view_idx):
382
+ color_img = cv.imread(self.data_dir + '/images/cam%02d/%08d.jpg' % (view_idx, pose_idx), cv.IMREAD_UNCHANGED)
383
+ mask_img = cv.imread(self.data_dir + '/masks/cam%02d/%08d.jpg' % (view_idx, pose_idx), cv.IMREAD_UNCHANGED)
384
+ return color_img, mask_img
385
+
386
+
387
+ class MvRgbDatasetAvatarReX(MvRgbDatasetBase):
388
+ def __init__(
389
+ self,
390
+ data_dir,
391
+ frame_range = None,
392
+ used_cam_ids = None,
393
+ training = True,
394
+ subject_name = None,
395
+ load_smpl_pos_map = False,
396
+ load_smpl_nml_map = False,
397
+ mode = '3dgs'
398
+ ):
399
+ super(MvRgbDatasetAvatarReX, self).__init__(
400
+ data_dir,
401
+ frame_range,
402
+ used_cam_ids,
403
+ training,
404
+ subject_name,
405
+ load_smpl_pos_map,
406
+ load_smpl_nml_map,
407
+ mode
408
+ )
409
+
410
+ def load_cam_data(self):
411
+ import json
412
+ cam_data = json.load(open(self.data_dir + '/calibration_full.json', 'r'))
413
+ self.cam_names = list(cam_data.keys())
414
+ self.view_num = len(self.cam_names)
415
+ self.extr_mats = []
416
+ for view_idx in range(self.view_num):
417
+ extr_mat = np.identity(4, np.float32)
418
+ extr_mat[:3, :3] = np.array(cam_data[self.cam_names[view_idx]]['R'], np.float32).reshape(3, 3)
419
+ extr_mat[:3, 3] = np.array(cam_data[self.cam_names[view_idx]]['T'], np.float32)
420
+ self.extr_mats.append(extr_mat)
421
+ self.intr_mats = [np.array(cam_data[self.cam_names[view_idx]]['K'], np.float32).reshape(3, 3) for view_idx in range(self.view_num)]
422
+ self.img_heights = [cam_data[self.cam_names[view_idx]]['imgSize'][1] for view_idx in range(self.view_num)]
423
+ self.img_widths = [cam_data[self.cam_names[view_idx]]['imgSize'][0] for view_idx in range(self.view_num)]
424
+
425
+ def filter_missing_files(self):
426
+ if os.path.exists(self.data_dir + '/missing_img_files.txt'):
427
+ missing_data_list = []
428
+ with open(self.data_dir + '/missing_img_files.txt', 'r') as fp:
429
+ lines = fp.readlines()
430
+ for line in lines:
431
+ line = line.replace('\\', '/') # considering both Windows and Ubuntu file system
432
+ frame_idx = int(os.path.basename(line).replace('.jpg', ''))
433
+ view_idx = self.cam_names.index(os.path.basename(os.path.dirname(line)))
434
+ missing_data_list.append((frame_idx, view_idx))
435
+ for missing_data_idx in missing_data_list:
436
+ if missing_data_idx in self.data_list:
437
+ self.data_list.remove(missing_data_idx)
438
+
439
+ def load_color_mask_images(self, pose_idx, view_idx):
440
+ cam_name = self.cam_names[view_idx]
441
+ color_img = cv.imread(self.data_dir + '/%s/%08d.jpg' % (cam_name, pose_idx), cv.IMREAD_UNCHANGED)
442
+ mask_img = cv.imread(self.data_dir + '/%s/mask/pha/%08d.jpg' % (cam_name, pose_idx), cv.IMREAD_UNCHANGED)
443
+ return color_img, mask_img
444
+
445
+
446
+ class MvRgbDatasetActorsHQ(MvRgbDatasetBase):
447
+ def __init__(
448
+ self,
449
+ data_dir,
450
+ frame_range = None,
451
+ used_cam_ids = None,
452
+ training = True,
453
+ subject_name = None,
454
+ load_smpl_pos_map = False,
455
+ load_smpl_nml_map = False,
456
+ mode = '3dgs'
457
+ ):
458
+ super(MvRgbDatasetActorsHQ, self).__init__(
459
+ data_dir,
460
+ frame_range,
461
+ used_cam_ids,
462
+ training,
463
+ subject_name,
464
+ load_smpl_pos_map,
465
+ load_smpl_nml_map,
466
+ mode
467
+ )
468
+
469
+ if subject_name is None:
470
+ self.subject_name = os.path.basename(os.path.dirname(self.data_dir))
471
+
472
+ def load_cam_data(self):
473
+ import csv
474
+ cam_names = []
475
+ extr_mats = []
476
+ intr_mats = []
477
+ img_widths = []
478
+ img_heights = []
479
+ with open(self.data_dir + '/4x/calibration.csv', "r", newline = "", encoding = 'utf-8') as fp:
480
+ reader = csv.DictReader(fp)
481
+ for row in reader:
482
+ cam_names.append(row['name'])
483
+ img_widths.append(int(row['w']))
484
+ img_heights.append(int(row['h']))
485
+
486
+ extr_mat = np.identity(4, np.float32)
487
+ extr_mat[:3, :3] = cv.Rodrigues(np.array([float(row['rx']), float(row['ry']), float(row['rz'])], np.float32))[0]
488
+ extr_mat[:3, 3] = np.array([float(row['tx']), float(row['ty']), float(row['tz'])])
489
+ extr_mat = np.linalg.inv(extr_mat)
490
+ extr_mats.append(extr_mat)
491
+
492
+ intr_mat = np.identity(3, np.float32)
493
+ intr_mat[0, 0] = float(row['fx']) * float(row['w'])
494
+ intr_mat[0, 2] = float(row['px']) * float(row['w'])
495
+ intr_mat[1, 1] = float(row['fy']) * float(row['h'])
496
+ intr_mat[1, 2] = float(row['py']) * float(row['h'])
497
+ intr_mats.append(intr_mat)
498
+
499
+ self.cam_names, self.img_widths, self.img_heights, self.extr_mats, self.intr_mats \
500
+ = cam_names, img_widths, img_heights, extr_mats, intr_mats
501
+
502
+ def load_color_mask_images(self, pose_idx, view_idx):
503
+ cam_name = self.cam_names[view_idx]
504
+ color_img = cv.imread(self.data_dir + '/4x/rgbs/%s/%s_rgb%06d.jpg' % (cam_name, cam_name, pose_idx), cv.IMREAD_UNCHANGED)
505
+ mask_img = cv.imread(self.data_dir + '/4x/masks/%s/%s_mask%06d.png' % (cam_name, cam_name, pose_idx), cv.IMREAD_UNCHANGED)
506
+ return color_img, mask_img
AnimatableGaussians/dataset/dataset_pose.py ADDED
@@ -0,0 +1,573 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import glob
2
+ import os
3
+ import pickle
4
+ import numpy as np
5
+ import cv2 as cv
6
+ import torch
7
+ import trimesh
8
+ from torch.utils.data import Dataset
9
+ import yaml
10
+ import json
11
+ import AnimatableGaussians.smplx as smplx
12
+
13
+ import AnimatableGaussians.dataset.commons as commons
14
+ import AnimatableGaussians.utils.nerf_util as nerf_util
15
+ import AnimatableGaussians.utils.visualize_util as visualize_util
16
+ import AnimatableGaussians.config as config
17
+
18
+
19
+ class PoseDataset(Dataset):
20
+ @torch.no_grad()
21
+ def __init__(
22
+ self,
23
+ data_path,
24
+ frame_range = None,
25
+ frame_interval = 1,
26
+ smpl_shape = None,
27
+ gender = 'neutral',
28
+ frame_win = 0,
29
+ fix_head_pose = True,
30
+ fix_hand_pose = True,
31
+ denoise = False,
32
+ hand_pose_type = 'ori',
33
+ constrain_leg_pose = False,
34
+ device = 'cuda:0'
35
+ ):
36
+ super(PoseDataset, self).__init__()
37
+
38
+ self.data_path = data_path
39
+ self.training = False
40
+
41
+ self.gender = gender
42
+
43
+ data_name, ext = os.path.splitext(os.path.basename(data_path))
44
+ print(data_name)
45
+ if ext == '.pkl':
46
+ smpl_data = pickle.load(open(data_path, 'rb'))
47
+ smpl_data = dict(smpl_data)
48
+ self.body_poses = torch.from_numpy(smpl_data['smpl_poses']).to(torch.float32)
49
+ self.transl = torch.from_numpy(smpl_data['smpl_trans']).to(torch.float32) * 1e-3
50
+ self.dataset_name = 'aist++'
51
+ self.seq_name = data_name
52
+ elif ext == '.npz':
53
+ potential_datasets = ['thuman4', 'actorshq', 'avatarrex', 'AMASS']
54
+ for i, potential_dataset in enumerate(potential_datasets):
55
+ start_pos = data_path.find(potential_dataset)
56
+ if start_pos == -1:
57
+ if i < len(potential_datasets) - 1:
58
+ continue
59
+ else:
60
+ raise ValueError('Invalid data_path!')
61
+ self.dataset_name = potential_dataset
62
+ self.seq_name = data_path[start_pos:].replace(self.dataset_name, '').replace('/', '_').replace('\\', '_').replace('.npz', '')
63
+ break
64
+ # print(self.dataset_name)
65
+ # print(f'# Dataset name: {self.dataset_name}, sequence name: {self.seq_name}')
66
+ if self.dataset_name == 'thuman4' or self.dataset_name == 'actorshq' or self.dataset_name == 'avatarrex':
67
+ smpl_data = np.load(data_path)
68
+ # if smpl_data.shape[1] == 156:
69
+ # # build dict
70
+ # smpl_data = {
71
+ # 'betas': smpl_data[:, :10],
72
+ # 'global_orient': smpl_data[:, 10:13],
73
+ # 'transl': smpl_data[:, 13:16],
74
+ # 'body_pose': smpl_data[:, 16:88],
75
+ # 'left_hand_pose': smpl_data[:, 88:133],
76
+ # 'right_hand_pose': smpl_data[:, 133:]
77
+ # }
78
+ smpl_data = dict(smpl_data)
79
+ for k in smpl_data.keys():
80
+ print(k, smpl_data[k].shape)
81
+ else: # AMASS dataset
82
+ pose_file = np.load(data_path)
83
+ smpl_data = {
84
+ 'betas': np.zeros((1, 10), np.float32),
85
+ 'global_orient': pose_file['poses'][:, :3],
86
+ 'transl': pose_file['trans'],
87
+ 'body_pose': pose_file['poses'][:, 3: 22 * 3],
88
+ 'left_hand_pose': pose_file['poses'][:, 22 * 3: 37 * 3],
89
+ 'right_hand_pose': pose_file['poses'][:, 37 * 3:]
90
+ }
91
+
92
+ # smpl_data['body_pose'][:, 13 * 3 + 2] -= 0.3
93
+ # smpl_data['body_pose'][:, 12 * 3 + 2] += 0.3
94
+ # # smpl_data['body_pose'][:, 16 * 3 + 2] -= 0.1
95
+ # # smpl_data['body_pose'][:, 15 * 3 + 2] += 0.1
96
+ # smpl_data['body_pose'][:, 19 * 3: 20 * 3] = 0.
97
+ # smpl_data['body_pose'][:, 20 * 3: 21 * 3] = 0.
98
+ # smpl_data['body_pose'][:, 14 * 3] = 0.
99
+ # print(smpl_data['body_pose'].shape)
100
+ if self.seq_name == '_actor01':
101
+ smpl_data['body_pose'][:, 6*3: 7*3] = 0.
102
+ smpl_data['body_pose'][:, 7*3: 8*3] = 0.
103
+
104
+ smpl_data = {k: torch.from_numpy(v).to(torch.float32) for k, v in smpl_data.items()}
105
+ frame_num = smpl_data['body_pose'].shape[0]
106
+ self.body_poses = torch.zeros((frame_num, 72), dtype = torch.float32)
107
+ self.body_poses[:, :3] = smpl_data['global_orient']
108
+ self.body_poses[:, 3:3+21*3] = smpl_data['body_pose']
109
+ self.transl = smpl_data['transl']
110
+ # print(self.body_poses)
111
+
112
+ data_dir = os.path.dirname(data_path)
113
+ calib_path = os.path.basename(data_path).replace('.npz', '.json').replace('pose', 'calibration')
114
+ calib_path = data_dir + '/' + calib_path
115
+ if os.path.exists(calib_path):
116
+ cam_data = json.load(open(calib_path, 'r'))
117
+ self.view_num = len(cam_data)
118
+ self.extr_mats = []
119
+ self.cam_names = list(cam_data.keys())
120
+ for view_idx in range(self.view_num):
121
+ extr_mat = np.identity(4, np.float32)
122
+ extr_mat[:3, :3] = np.array(cam_data[self.cam_names[view_idx]]['R'], np.float32).reshape(3, 3)
123
+ extr_mat[:3, 3] = np.array(cam_data[self.cam_names[view_idx]]['T'], np.float32)
124
+ self.extr_mats.append(extr_mat)
125
+ self.intr_mats = [np.array(cam_data[self.cam_names[view_idx]]['K'], np.float32).reshape(3, 3) for view_idx in range(self.view_num)]
126
+ self.img_heights = [cam_data[self.cam_names[view_idx]]['imgSize'][1] for view_idx in range(self.view_num)]
127
+ self.img_widths = [cam_data[self.cam_names[view_idx]]['imgSize'][0] for view_idx in range(self.view_num)]
128
+ else:
129
+ raise AssertionError('Invalid data_path!')
130
+
131
+ if 'left_hand_pose' in smpl_data:
132
+ self.left_hand_pose = smpl_data['left_hand_pose']
133
+ else:
134
+ self.left_hand_pose = config.left_hand_pose[None].expand(self.body_poses.shape[0], -1)
135
+ if 'right_hand_pose' in smpl_data:
136
+ self.right_hand_pose = smpl_data['right_hand_pose']
137
+ else:
138
+ self.right_hand_pose = config.right_hand_pose[None].expand(self.body_poses.shape[0], -1)
139
+
140
+ self.body_poses = self.body_poses.to(device)
141
+ self.transl = self.transl.to(device)
142
+
143
+ self.fix_head_pose = fix_head_pose
144
+ self.fix_hand_pose = fix_hand_pose
145
+
146
+ self.smpl_model = smplx.SMPLX(model_path = config.PROJ_DIR + '/smpl_files/smplx', gender = self.gender, use_pca = False, num_pca_comps = 45, flat_hand_mean = True, batch_size = 1).to(device)
147
+
148
+ pose_list = list(range(0, self.body_poses.shape[0], frame_interval))
149
+ if frame_range is not None:
150
+ frame_range = list(frame_range)
151
+ if isinstance(frame_range, list):
152
+ if isinstance(frame_range[0], list):
153
+ self.pose_list = []
154
+ for interval in frame_range:
155
+ if len(interval) == 2 or len(interval) == 3:
156
+ self.pose_list += list(range(*interval))
157
+ else:
158
+ for i in range(interval[3]):
159
+ self.pose_list += list(range(interval[0], interval[1], interval[2]))
160
+ else:
161
+ if len(frame_range) == 2:
162
+ print(f'# Selected frame indices: range({frame_range[0]}, {frame_range[1]})')
163
+ frame_range = range(frame_range[0], frame_range[1])
164
+ elif len(frame_range) == 3:
165
+ print(f'# Selected frame indices: range({frame_range[0]}, {frame_range[1]}, {frame_range[2]})')
166
+ frame_range = range(frame_range[0], frame_range[1], frame_range[2])
167
+ self.pose_list = list(frame_range)
168
+ else:
169
+ self.pose_list = pose_list
170
+
171
+ print('# Pose list: ', self.pose_list)
172
+ print('# Dataset contains %d items' % len(self))
173
+
174
+ # SMPL related
175
+ self.smpl_shape = smpl_shape.to(torch.float32).to(device) if smpl_shape is not None else torch.zeros(10, dtype = torch.float32)
176
+ ret = self.smpl_model.forward(betas = self.smpl_shape[None],
177
+ global_orient = config.cano_smpl_global_orient[None].to(device),
178
+ transl = config.cano_smpl_transl[None].to(device),
179
+ body_pose = config.cano_smpl_body_pose[None].to(device),
180
+ # left_hand_pose = config.left_hand_pose[None],
181
+ # right_hand_pose = config.right_hand_pose[None]
182
+ )
183
+ self.cano_smpl = {k: v[0] for k, v in ret.items() if isinstance(v, torch.Tensor)}
184
+ self.inv_cano_jnt_mats = torch.linalg.inv(self.cano_smpl['A'])
185
+ min_xyz = self.cano_smpl['vertices'].min(0)[0]
186
+ max_xyz = self.cano_smpl['vertices'].max(0)[0]
187
+ self.cano_smpl_center = 0.5 * (min_xyz + max_xyz)
188
+ min_xyz[:2] -= 0.05
189
+ max_xyz[:2] += 0.05
190
+ min_xyz[2] -= 0.15
191
+ max_xyz[2] += 0.15
192
+ self.cano_bounds = torch.stack([min_xyz, max_xyz], 0).to(torch.float32).cpu().numpy()
193
+ self.smpl_faces = self.smpl_model.faces.astype(np.int32)
194
+
195
+ self.frame_win = int(frame_win)
196
+ self.denoise = denoise
197
+ if self.denoise:
198
+ win_size = 1
199
+ body_poses_clone = self.body_poses.clone()
200
+ transl_clone = self.transl.clone()
201
+ frame_num = body_poses_clone.shape[0]
202
+ self.body_poses[win_size: frame_num-win_size] = 0
203
+ self.transl[win_size: frame_num-win_size] = 0
204
+ for i in range(-win_size, win_size + 1):
205
+ self.body_poses[win_size: frame_num-win_size] += body_poses_clone[win_size+i: frame_num-win_size+i]
206
+ self.transl[win_size: frame_num-win_size] += transl_clone[win_size+i: frame_num-win_size+i]
207
+ self.body_poses[win_size: frame_num-win_size] /= (2 * win_size + 1)
208
+ self.transl[win_size: frame_num-win_size] /= (2 * win_size + 1)
209
+
210
+ self.hand_pose_type = hand_pose_type
211
+
212
+ self.device = device
213
+ self.last_data_idx = 0
214
+
215
+ commons._initialize_hands(self)
216
+ self.left_cano_mano_v, self.left_cano_mano_n, self.right_cano_mano_v, self.right_cano_mano_n \
217
+ = commons.generate_two_manos(self, self.cano_smpl['vertices'])
218
+
219
+ if constrain_leg_pose:
220
+ # a = 14.
221
+ # # print(self.body_poses[284, 1*3:2*3])
222
+ # # print(self.body_poses[284, 2*3:3*3])
223
+ # self.body_poses[:, 1*3] = torch.clip(self.body_poses[:, 1 * 3], -np.pi / a, np.pi / a)
224
+ # self.body_poses[:, 2*3] = torch.clip(self.body_poses[:, 2 * 3], -np.pi / a, np.pi / a)
225
+ # self.body_poses[:, 1 * 3+2] = torch.clip(self.body_poses[:, 1 * 3+2], -np.pi / a, np.pi / a)
226
+ # self.body_poses[:, 2 * 3+2] = torch.clip(self.body_poses[:, 2 * 3+2], -np.pi / a, np.pi / a)
227
+ # exit(1)
228
+
229
+ self.body_poses[:, 4*3] = torch.clip(self.body_poses[:, 4*3], -0.3, 0.3)
230
+ self.body_poses[:, 5*3] = torch.clip(self.body_poses[:, 5*3], -0.3, 0.3)
231
+
232
+ def __len__(self):
233
+ return len(self.pose_list)
234
+
235
+ def __getitem__(self, index):
236
+ return self.getitem(index)
237
+
238
+ @torch.no_grad()
239
+ def getitem(self, index, **kwargs):
240
+ pose_idx = self.pose_list[index]
241
+ if pose_idx == 0 or pose_idx > self.pose_list[min(index - 1, 0)]:
242
+ data_idx = pose_idx
243
+ else:
244
+ data_idx = self.last_data_idx + 1
245
+ # print('data index: %d, pose index: %d' % (data_idx, pose_idx))
246
+
247
+ if self.hand_pose_type == 'fist':
248
+ left_hand_pose = config.left_hand_pose.to(self.device).clone()
249
+ right_hand_pose = config.right_hand_pose.to(self.device).clone()
250
+ left_hand_pose[:3] = 0.
251
+ right_hand_pose[:3] = 0.
252
+ elif self.hand_pose_type == 'normal':
253
+ left_hand_pose = torch.tensor([0.10859203338623047, 0.10181399434804916, -0.2822268009185791, 0.10211331397294998, -0.09689036756753922, -0.4484838545322418, -0.11360692232847214, -0.023141659796237946, 0.10571160167455673, -0.08793719857931137, -0.026760095730423927, -0.41390693187713623, -0.0923849567770958, 0.10266668349504471, -0.36039748787879944, 0.02140655182301998, -0.07156527787446976, -0.04903153330087662, -0.22358819842338562, -0.3716682195663452, -0.2683027982711792, -0.1506909281015396, 0.07079305499792099, -0.34404537081718445, -0.168443500995636, -0.014021224342286587, 0.09489774703979492, -0.050323735922575, -0.18992969393730164, -0.43895423412323, -0.1806418001651764, 0.0198075994849205, -0.25444355607032776, -0.10171788930892944, -0.10680688172578812, -0.09953738003969193, 0.8094075918197632, 0.5156061053276062, -0.07900168001651764, -0.45094889402389526, 0.24947893619537354, 0.23369410634040833, 0.45277315378189087, -0.17375235259532928, -0.3077943027019501], dtype = torch.float32, device = self.device)
254
+ right_hand_pose = torch.tensor([0.06415501981973648, -0.06942438334226608, 0.282951682806015, 0.09073827415704727, 0.0775153785943985, 0.2961004376411438, -0.07659692317247391, 0.004730052314698696, -0.12084470689296722, 0.007974660955369473, 0.05222926288843155, 0.32775357365608215, -0.10166633129119873, -0.06862349808216095, 0.174485981464386, -0.0023323255591094494, 0.04998664930462837, -0.03490559384226799, 0.12949667870998383, 0.26883721351623535, 0.06881044059991837, -0.18259745836257935, -0.08183271437883377, 0.17669665813446045, -0.08099694550037384, 0.04115655645728111, -0.17928685247898102, 0.07734024524688721, 0.13419172167778015, 0.2600148022174835, -0.151871919631958, -0.01772170141339302, 0.1267814189195633, -0.08800505846738815, 0.09480107575654984, 0.0016392067773267627, 0.6149336695671082, -0.32634419202804565, 0.02278662845492363, -0.39148610830307007, -0.22757330536842346, -0.07884717732667923, 0.38199105858802795, 0.13064607977867126, 0.20154500007629395], dtype = torch.float32, device = self.device)
255
+ elif self.hand_pose_type == 'zero':
256
+ left_hand_pose = torch.zeros(45, dtype = torch.float32, device = self.device)
257
+ right_hand_pose = torch.zeros(45, dtype = torch.float32, device = self.device)
258
+ elif self.hand_pose_type == 'ori':
259
+ left_hand_pose = self.left_hand_pose[pose_idx].to(self.device)
260
+ right_hand_pose = self.right_hand_pose[pose_idx].to(self.device)
261
+ else:
262
+ raise ValueError('Invalid hand_pose_type!')
263
+
264
+ # SMPL
265
+ live_smpl = self.smpl_model.forward(betas = self.smpl_shape[None],
266
+ global_orient = self.body_poses[pose_idx, :3][None],
267
+ transl = self.transl[pose_idx][None],
268
+ body_pose = self.body_poses[pose_idx, 3: 66][None],
269
+ left_hand_pose = left_hand_pose[None],
270
+ right_hand_pose = right_hand_pose[None]
271
+ )
272
+
273
+ # live_smpl_trimesh = trimesh.Trimesh(vertices = live_smpl.vertices[0].cpu().numpy(), faces = self.smpl_model.faces, process = False)
274
+ # live_smpl_trimesh.export('./debug/smpl_amass.ply')
275
+ # exit(1)
276
+
277
+ live_smpl_woRoot = self.smpl_model.forward(betas = self.smpl_shape[None],
278
+ # global_orient = self.body_poses[pose_idx, :3][None],
279
+ # transl = self.transl[pose_idx][None],
280
+ body_pose = self.body_poses[pose_idx, 3: 66][None],
281
+ # left_hand_pose = config.left_hand_pose[None],
282
+ # right_hand_pose = config.right_hand_pose[None]
283
+ )
284
+
285
+ # cano_smpl = self.smpl_model.forward(betas=self.smpl_shape[None],
286
+ # global_orient=config.cano_smpl_global_orient[None],
287
+ # transl=config.cano_smpl_transl[None],
288
+ # body_pose=config.cano_smpl_body_pose[None],
289
+ # # left_hand_pose = left_hand_pose[None],
290
+ # # right_hand_pose = right_hand_pose[None]
291
+ # )
292
+
293
+ data_item = dict()
294
+ data_item['item_idx'] = index
295
+ data_item['data_idx'] = data_idx
296
+ data_item['global_orient'] = self.body_poses[pose_idx, :3]
297
+ data_item['transl'] = self.transl[pose_idx]
298
+ data_item['joints'] = live_smpl.joints[0, :22]
299
+ data_item['kin_parent'] = self.smpl_model.parents[:22].to(torch.long)
300
+ data_item['pose_1st'] = self.body_poses[0, 3: 66]
301
+ if self.frame_win > 0:
302
+ total_frame_num = len(self.pose_list)
303
+ selected_frames = self.pose_list[max(0, index - self.frame_win): min(total_frame_num, index + self.frame_win + 1)]
304
+ data_item['pose'] = self.body_poses[selected_frames, 3: 66].clone()
305
+ else:
306
+ data_item['pose'] = self.body_poses[pose_idx, 3: 66].clone()
307
+
308
+ if self.fix_head_pose:
309
+ data_item['pose'][..., 3 * 11: 3 * 11 + 3] = 0.
310
+ data_item['pose'][..., 3 * 14: 3 * 14 + 3] = 0.
311
+ if self.fix_hand_pose:
312
+ data_item['pose'][..., 3 * 19: 3 * 19 + 3] = 0.
313
+ data_item['pose'][..., 3 * 20: 3 * 20 + 3] = 0.
314
+ data_item['lhand_pose'] = torch.zeros_like(config.left_hand_pose)
315
+ data_item['rhand_pose'] = torch.zeros_like(config.right_hand_pose)
316
+ data_item['time_stamp'] = np.array(pose_idx, np.float32)
317
+ data_item['live_smpl_v'] = live_smpl.vertices[0]
318
+ data_item['live_smpl_v_woRoot'] = live_smpl_woRoot.vertices[0]
319
+ data_item['cano_smpl_v'] = self.cano_smpl['vertices']
320
+ data_item['cano_jnts'] = self.cano_smpl['joints']
321
+ inv_cano_jnt_mats = torch.linalg.inv(self.cano_smpl['A'])
322
+ data_item['cano2live_jnt_mats'] = torch.matmul(live_smpl.A[0], inv_cano_jnt_mats)
323
+ data_item['cano2live_jnt_mats_woRoot'] = torch.matmul(live_smpl_woRoot.A[0], inv_cano_jnt_mats)
324
+ data_item['cano_smpl_center'] = self.cano_smpl_center
325
+ data_item['cano_bounds'] = self.cano_bounds
326
+ data_item['smpl_faces'] = self.smpl_faces
327
+ min_xyz = live_smpl.vertices[0].min(0)[0] - 0.15
328
+ max_xyz = live_smpl.vertices[0].max(0)[0] + 0.15
329
+ live_bounds = torch.stack([min_xyz, max_xyz], 0).to(torch.float32).cpu().numpy()
330
+ data_item['live_bounds'] = live_bounds
331
+
332
+ # # mano
333
+ # data_item['left_cano_mano_v'], data_item['left_cano_mano_n'], data_item['right_cano_mano_v'], data_item['right_cano_mano_n']\
334
+ # = commons.generate_two_manos(self, self.cano_smpl['vertices'])
335
+ # data_item['left_live_mano_v'], data_item['left_live_mano_n'], data_item['right_live_mano_v'], data_item['right_live_mano_n'] \
336
+ # = commons.generate_two_manos(self, live_smpl.vertices[0])
337
+
338
+ """ synthesis config """
339
+ img_h = 512 if 'img_h' not in kwargs else kwargs['img_h']
340
+ img_w = 512 if 'img_w' not in kwargs else kwargs['img_w']
341
+ intr = np.array([[550, 0, 256], [0, 550, 256], [0, 0, 1]], np.float32) if 'intr' not in kwargs else kwargs['intr']
342
+ if 'extr' not in kwargs:
343
+ extr = visualize_util.calc_front_mv(live_bounds.mean(0), tar_pos = np.array([0, 0, 2.5]))
344
+ else:
345
+ extr = kwargs['extr']
346
+
347
+ """ training data config of view_idx """
348
+ # view_idx = 0
349
+ # img_h = self.img_heights[view_idx]
350
+ # img_w = self.img_widths[view_idx]
351
+ # intr = self.intr_mats[view_idx]
352
+ # extr = self.extr_mats[view_idx]
353
+
354
+ uv = self.gen_uv(img_w, img_h)
355
+ uv = uv.reshape(-1, 2)
356
+ ray_d, ray_o = nerf_util.get_rays(uv, extr, intr)
357
+ near, far, mask_at_bound = nerf_util.get_near_far(live_bounds, ray_o, ray_d)
358
+ uv = uv[mask_at_bound]
359
+ ray_o = ray_o[mask_at_bound]
360
+ ray_d = ray_d[mask_at_bound]
361
+
362
+ data_item.update({
363
+ 'uv': uv,
364
+ 'ray_o': ray_o,
365
+ 'ray_d': ray_d,
366
+ 'near': near,
367
+ 'far': far,
368
+ 'dist': np.zeros_like(near),
369
+ 'img_h': img_h,
370
+ 'img_w': img_w,
371
+ 'extr': extr,
372
+ 'intr': intr
373
+ })
374
+
375
+ return data_item
376
+
377
+ def getitem_fast(self, index, **kwargs):
378
+ pose_idx = self.pose_list[index]
379
+ if pose_idx == 0 or pose_idx > self.last_data_idx:
380
+ data_idx = pose_idx
381
+ else:
382
+ data_idx = self.last_data_idx + 1
383
+ # print('data index: %d, pose index: %d' % (data_idx, pose_idx))
384
+
385
+ if self.hand_pose_type == 'fist':
386
+ left_hand_pose = config.left_hand_pose.to(self.device)
387
+ right_hand_pose = config.right_hand_pose.to(self.device)
388
+ elif self.hand_pose_type == 'normal':
389
+ left_hand_pose = torch.tensor(
390
+ [0.10859203338623047, 0.10181399434804916, -0.2822268009185791, 0.10211331397294998, -0.09689036756753922, -0.4484838545322418, -0.11360692232847214, -0.023141659796237946, 0.10571160167455673, -0.08793719857931137, -0.026760095730423927, -0.41390693187713623, -0.0923849567770958, 0.10266668349504471, -0.36039748787879944, 0.02140655182301998, -0.07156527787446976, -0.04903153330087662, -0.22358819842338562, -0.3716682195663452, -0.2683027982711792, -0.1506909281015396,
391
+ 0.07079305499792099, -0.34404537081718445, -0.168443500995636, -0.014021224342286587, 0.09489774703979492, -0.050323735922575, -0.18992969393730164, -0.43895423412323, -0.1806418001651764, 0.0198075994849205, -0.25444355607032776, -0.10171788930892944, -0.10680688172578812, -0.09953738003969193, 0.8094075918197632, 0.5156061053276062, -0.07900168001651764, -0.45094889402389526, 0.24947893619537354, 0.23369410634040833, 0.45277315378189087, -0.17375235259532928,
392
+ -0.3077943027019501], dtype = torch.float32, device = self.device)
393
+ right_hand_pose = torch.tensor(
394
+ [0.06415501981973648, -0.06942438334226608, 0.282951682806015, 0.09073827415704727, 0.0775153785943985, 0.2961004376411438, -0.07659692317247391, 0.004730052314698696, -0.12084470689296722, 0.007974660955369473, 0.05222926288843155, 0.32775357365608215, -0.10166633129119873, -0.06862349808216095, 0.174485981464386, -0.0023323255591094494, 0.04998664930462837, -0.03490559384226799, 0.12949667870998383, 0.26883721351623535, 0.06881044059991837, -0.18259745836257935,
395
+ -0.08183271437883377, 0.17669665813446045, -0.08099694550037384, 0.04115655645728111, -0.17928685247898102, 0.07734024524688721, 0.13419172167778015, 0.2600148022174835, -0.151871919631958, -0.01772170141339302, 0.1267814189195633, -0.08800505846738815, 0.09480107575654984, 0.0016392067773267627, 0.6149336695671082, -0.32634419202804565, 0.02278662845492363, -0.39148610830307007, -0.22757330536842346, -0.07884717732667923, 0.38199105858802795, 0.13064607977867126,
396
+ 0.20154500007629395], dtype = torch.float32, device = self.device)
397
+ elif self.hand_pose_type == 'zero':
398
+ left_hand_pose = torch.zeros(45, dtype = torch.float32, device = self.device)
399
+ right_hand_pose = torch.zeros(45, dtype = torch.float32, device = self.device)
400
+ elif self.hand_pose_type == 'ori':
401
+ left_hand_pose = self.left_hand_pose[pose_idx].to(self.device)
402
+ right_hand_pose = self.right_hand_pose[pose_idx].to(self.device)
403
+ else:
404
+ raise ValueError('Invalid hand_pose_type!')
405
+
406
+ # SMPL
407
+ live_smpl = self.smpl_model.forward(betas = self.smpl_shape[None],
408
+ global_orient = self.body_poses[pose_idx, :3][None],
409
+ transl = self.transl[pose_idx][None],
410
+ body_pose = self.body_poses[pose_idx, 3: 66][None],
411
+ left_hand_pose = left_hand_pose[None],
412
+ right_hand_pose = right_hand_pose[None]
413
+ )
414
+
415
+ live_smpl_woRoot = self.smpl_model.forward(betas = self.smpl_shape[None],
416
+ # global_orient = self.body_poses[pose_idx, :3][None],
417
+ # transl = self.transl[pose_idx][None],
418
+ body_pose = self.body_poses[pose_idx, 3: 66][None],
419
+ # left_hand_pose = config.left_hand_pose[None],
420
+ # right_hand_pose = config.right_hand_pose[None]
421
+ )
422
+
423
+ # cano_smpl = self.smpl_model.forward(betas = self.smpl_shape[None],
424
+ # global_orient = config.cano_smpl_global_orient[None],
425
+ # transl = config.cano_smpl_transl[None],
426
+ # body_pose = config.cano_smpl_body_pose[None],
427
+ # # left_hand_pose = left_hand_pose[None],
428
+ # # right_hand_pose = right_hand_pose[None]
429
+ # )
430
+
431
+ data_item = dict()
432
+ data_item['item_idx'] = index
433
+ data_item['data_idx'] = data_idx
434
+ data_item['global_orient'] = self.body_poses[pose_idx, :3]
435
+ data_item['body_pose'] = self.body_poses[pose_idx, 3:66]
436
+ data_item['transl'] = self.transl[pose_idx]
437
+ data_item['joints'] = live_smpl.joints[0, :22]
438
+ data_item['kin_parent'] = self.smpl_model.parents[:22].to(torch.long)
439
+ data_item['live_smpl_v'] = live_smpl.vertices[0]
440
+ data_item['live_smpl_v_woRoot'] = live_smpl_woRoot.vertices[0]
441
+ data_item['cano_smpl_v'] = self.cano_smpl['vertices']
442
+ data_item['cano_jnts'] = self.cano_smpl['joints']
443
+ inv_cano_jnt_mats = torch.linalg.inv(self.cano_smpl['A'])
444
+ data_item['cano2live_jnt_mats'] = torch.matmul(live_smpl.A[0], inv_cano_jnt_mats)
445
+ data_item['cano2live_jnt_mats_woRoot'] = torch.matmul(live_smpl_woRoot.A[0], inv_cano_jnt_mats)
446
+ data_item['cano_smpl_center'] = self.cano_smpl_center
447
+ data_item['cano_bounds'] = self.cano_bounds
448
+ data_item['smpl_faces'] = self.smpl_faces
449
+ min_xyz = live_smpl.vertices[0].min(0)[0] - 0.15
450
+ max_xyz = live_smpl.vertices[0].max(0)[0] + 0.15
451
+ live_bounds = torch.stack([min_xyz, max_xyz], 0).to(torch.float32).cpu().numpy()
452
+ data_item['live_bounds'] = live_bounds
453
+
454
+ data_item['left_cano_mano_v'], data_item['left_cano_mano_n'], data_item['right_cano_mano_v'], data_item['right_cano_mano_n'] \
455
+ = self.left_cano_mano_v, self.left_cano_mano_n, self.right_cano_mano_v, self.right_cano_mano_n
456
+
457
+ """ synthesis config """
458
+ img_h = 512 if 'img_h' not in kwargs else kwargs['img_h']
459
+ img_w = 512 if 'img_w' not in kwargs else kwargs['img_w']
460
+ intr = np.array([[550, 0, 256], [0, 550, 256], [0, 0, 1]], np.float32) if 'intr' not in kwargs else kwargs['intr']
461
+ if 'extr' not in kwargs:
462
+ extr = visualize_util.calc_front_mv(live_bounds.mean(0), tar_pos = np.array([0, 0, 2.5]))
463
+ else:
464
+ extr = kwargs['extr']
465
+
466
+ data_item.update({
467
+ 'img_h': img_h,
468
+ 'img_w': img_w,
469
+ 'extr': extr,
470
+ 'intr': intr
471
+ })
472
+
473
+ self.last_data_idx = data_idx
474
+
475
+ return data_item
476
+
477
+ def getitem_a_pose(self, **kwargs):
478
+ hand_pose_type = 'fist'
479
+ if hand_pose_type == 'fist':
480
+ left_hand_pose = config.left_hand_pose.to(self.device)
481
+ right_hand_pose = config.right_hand_pose.to(self.device)
482
+ elif hand_pose_type == 'normal':
483
+ left_hand_pose = torch.tensor(
484
+ [0.10859203338623047, 0.10181399434804916, -0.2822268009185791, 0.10211331397294998, -0.09689036756753922, -0.4484838545322418, -0.11360692232847214, -0.023141659796237946, 0.10571160167455673, -0.08793719857931137, -0.026760095730423927, -0.41390693187713623, -0.0923849567770958, 0.10266668349504471, -0.36039748787879944, 0.02140655182301998, -0.07156527787446976, -0.04903153330087662, -0.22358819842338562, -0.3716682195663452, -0.2683027982711792, -0.1506909281015396,
485
+ 0.07079305499792099, -0.34404537081718445, -0.168443500995636, -0.014021224342286587, 0.09489774703979492, -0.050323735922575, -0.18992969393730164, -0.43895423412323, -0.1806418001651764, 0.0198075994849205, -0.25444355607032776, -0.10171788930892944, -0.10680688172578812, -0.09953738003969193, 0.8094075918197632, 0.5156061053276062, -0.07900168001651764, -0.45094889402389526, 0.24947893619537354, 0.23369410634040833, 0.45277315378189087, -0.17375235259532928,
486
+ -0.3077943027019501], dtype = torch.float32, device = self.device)
487
+ right_hand_pose = torch.tensor(
488
+ [0.06415501981973648, -0.06942438334226608, 0.282951682806015, 0.09073827415704727, 0.0775153785943985, 0.2961004376411438, -0.07659692317247391, 0.004730052314698696, -0.12084470689296722, 0.007974660955369473, 0.05222926288843155, 0.32775357365608215, -0.10166633129119873, -0.06862349808216095, 0.174485981464386, -0.0023323255591094494, 0.04998664930462837, -0.03490559384226799, 0.12949667870998383, 0.26883721351623535, 0.06881044059991837, -0.18259745836257935,
489
+ -0.08183271437883377, 0.17669665813446045, -0.08099694550037384, 0.04115655645728111, -0.17928685247898102, 0.07734024524688721, 0.13419172167778015, 0.2600148022174835, -0.151871919631958, -0.01772170141339302, 0.1267814189195633, -0.08800505846738815, 0.09480107575654984, 0.0016392067773267627, 0.6149336695671082, -0.32634419202804565, 0.02278662845492363, -0.39148610830307007, -0.22757330536842346, -0.07884717732667923, 0.38199105858802795, 0.13064607977867126,
490
+ 0.20154500007629395], dtype = torch.float32, device = self.device)
491
+ elif self.hand_pose_type == 'zero':
492
+ left_hand_pose = torch.zeros(45, dtype = torch.float32, device = self.device)
493
+ right_hand_pose = torch.zeros(45, dtype = torch.float32, device = self.device)
494
+ else:
495
+ raise ValueError('Invalid hand_pose_type!')
496
+
497
+ body_pose = torch.zeros(21 * 3, dtype = torch.float32).to(self.device)
498
+ body_pose[15 * 3 + 2] += -0.8
499
+ body_pose[16 * 3 + 2] += 0.8
500
+
501
+ # SMPL
502
+ live_smpl = self.smpl_model.forward(betas = self.smpl_shape[None],
503
+ global_orient = None,
504
+ transl = None,
505
+ body_pose = body_pose[None],
506
+ left_hand_pose = left_hand_pose[None],
507
+ right_hand_pose = right_hand_pose[None]
508
+ )
509
+
510
+ live_smpl_woRoot = self.smpl_model.forward(betas = self.smpl_shape[None],
511
+ # global_orient = self.body_poses[pose_idx, :3][None],
512
+ # transl = self.transl[pose_idx][None],
513
+ body_pose = body_pose[None],
514
+ # left_hand_pose = config.left_hand_pose[None],
515
+ # right_hand_pose = config.right_hand_pose[None]
516
+ )
517
+
518
+ # cano_smpl = self.smpl_model.forward(betas = self.smpl_shape[None],
519
+ # global_orient = config.cano_smpl_global_orient[None],
520
+ # transl = config.cano_smpl_transl[None],
521
+ # body_pose = config.cano_smpl_body_pose[None],
522
+ # # left_hand_pose = left_hand_pose[None],
523
+ # # right_hand_pose = right_hand_pose[None]
524
+ # )
525
+
526
+ data_item = dict()
527
+ data_item['item_idx'] = 0
528
+ data_item['data_idx'] = 0
529
+ data_item['global_orient'] = torch.zeros(3, dtype = torch.float32)
530
+ data_item['joints'] = live_smpl.joints[0, :22]
531
+ data_item['kin_parent'] = self.smpl_model.parents[:22].to(torch.long)
532
+ data_item['live_smpl_v'] = live_smpl.vertices[0]
533
+ data_item['live_smpl_v_woRoot'] = live_smpl_woRoot.vertices[0]
534
+ data_item['cano_smpl_v'] = self.cano_smpl['vertices']
535
+ data_item['cano_jnts'] = self.cano_smpl['joints']
536
+ inv_cano_jnt_mats = torch.linalg.inv(self.cano_smpl['A'])
537
+ data_item['cano2live_jnt_mats'] = torch.matmul(live_smpl.A[0], inv_cano_jnt_mats)
538
+ data_item['cano2live_jnt_mats_woRoot'] = torch.matmul(live_smpl_woRoot.A[0], inv_cano_jnt_mats)
539
+ data_item['cano_smpl_center'] = self.cano_smpl_center
540
+ data_item['cano_bounds'] = self.cano_bounds
541
+ data_item['smpl_faces'] = self.smpl_faces
542
+ min_xyz = live_smpl.vertices[0].min(0)[0] - 0.15
543
+ max_xyz = live_smpl.vertices[0].max(0)[0] + 0.15
544
+ live_bounds = torch.stack([min_xyz, max_xyz], 0).to(torch.float32).cpu().numpy()
545
+ data_item['live_bounds'] = live_bounds
546
+
547
+ data_item['left_cano_mano_v'], data_item['left_cano_mano_n'], data_item['right_cano_mano_v'], data_item['right_cano_mano_n'] \
548
+ = self.left_cano_mano_v, self.left_cano_mano_n, self.right_cano_mano_v, self.right_cano_mano_n
549
+
550
+ """ synthesis config """
551
+ img_h = 512 if 'img_h' not in kwargs else kwargs['img_h']
552
+ img_w = 300 if 'img_w' not in kwargs else kwargs['img_w']
553
+ intr = np.array([[550, 0, 150], [0, 550, 256], [0, 0, 1]], np.float32) if 'intr' not in kwargs else kwargs['intr']
554
+ if 'extr' not in kwargs:
555
+ extr = visualize_util.calc_front_mv(live_bounds.mean(0), tar_pos = np.array([0, 0, 2.5]))
556
+ else:
557
+ extr = kwargs['extr']
558
+
559
+ data_item.update({
560
+ 'img_h': img_h,
561
+ 'img_w': img_w,
562
+ 'extr': extr,
563
+ 'intr': intr
564
+ })
565
+
566
+ return data_item
567
+
568
+ @staticmethod
569
+ def gen_uv(img_w, img_h):
570
+ x, y = np.meshgrid(np.linspace(0, img_w - 1, img_w, dtype = np.int32),
571
+ np.linspace(0, img_h - 1, img_h, dtype = np.int32))
572
+ uv = np.stack([x, y], axis = -1)
573
+ return uv
AnimatableGaussians/eval/comparison_body_only_avatars.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # To compute FID, first install pytorch_fid
2
+ # pip install pytorch-fid
3
+
4
+ import os
5
+ import cv2 as cv
6
+ from tqdm import tqdm
7
+ import shutil
8
+
9
+ from eval.score import *
10
+
11
+ cam_id = 18
12
+ ours_dir = './test_results/subject00/styleunet_gaussians3/testing__cam_%03d/batch_750000/rgb_map' % cam_id
13
+ posevocab_dir = './test_results/subject00/posevocab/testing__cam_%03d/rgb_map' % cam_id
14
+ tava_dir = './test_results/subject00/tava/cam_%03d' % cam_id
15
+ arah_dir = './test_results/subject00/arah/cam_%03d' % cam_id
16
+ slrf_dir = './test_results/subject00/slrf/cam_%03d' % cam_id
17
+ gt_dir = 'Z:/MultiviewRGB/THuman4/subject00/images/cam%02d' % cam_id
18
+ mask_dir = 'Z:/MultiviewRGB/THuman4/subject00/masks/cam%02d' % cam_id
19
+
20
+ frame_list = list(range(2000, 2500, 1))
21
+
22
+
23
+ if __name__ == '__main__':
24
+ ours_metrics = Metrics()
25
+ posevocab_metrics = Metrics()
26
+ slrf_metrics = Metrics()
27
+ arah_metrics = Metrics()
28
+ tava_metrics = Metrics()
29
+
30
+ shutil.rmtree('./tmp_quant')
31
+ os.makedirs('./tmp_quant/ours', exist_ok = True)
32
+ os.makedirs('./tmp_quant/posevocab', exist_ok = True)
33
+ os.makedirs('./tmp_quant/slrf', exist_ok = True)
34
+ os.makedirs('./tmp_quant/arah', exist_ok = True)
35
+ os.makedirs('./tmp_quant/tava', exist_ok = True)
36
+ os.makedirs('./tmp_quant/gt', exist_ok = True)
37
+
38
+ for frame_id in tqdm(frame_list):
39
+ ours_img = (cv.imread(ours_dir + '/%08d.jpg' % frame_id, cv.IMREAD_UNCHANGED) / 255.).astype(np.float32)
40
+ posevocab_img = (cv.imread(posevocab_dir + '/%08d.jpg' % frame_id, cv.IMREAD_UNCHANGED) / 255.).astype(np.float32)
41
+ slrf_img = (cv.imread(slrf_dir + '/%08d.png' % frame_id, cv.IMREAD_UNCHANGED) / 255.).astype(np.float32)
42
+ tava_img = (cv.imread(tava_dir + '/%d.jpg' % frame_id, cv.IMREAD_UNCHANGED) / 255.).astype(np.float32)
43
+ arah_img = (cv.imread(arah_dir + '/%d.jpg' % frame_id, cv.IMREAD_UNCHANGED) / 255.).astype(np.float32)
44
+ gt_img = (cv.imread(gt_dir + '/%08d.jpg' % frame_id, cv.IMREAD_UNCHANGED) / 255.).astype(np.float32)
45
+ mask_img = cv.imread(mask_dir + '/%08d.jpg' % frame_id, cv.IMREAD_UNCHANGED) > 128
46
+ gt_img[~mask_img] = 1.
47
+
48
+ ours_img_cropped, posevocab_img_cropped, slrf_img_cropped, tava_img_cropped, arah_img_cropped, gt_img_cropped = \
49
+ crop_image(
50
+ mask_img,
51
+ 512,
52
+ ours_img,
53
+ posevocab_img,
54
+ slrf_img,
55
+ tava_img,
56
+ arah_img,
57
+ gt_img
58
+ )
59
+
60
+ cv.imwrite('./tmp_quant/ours/%08d.png' % frame_id, (ours_img_cropped * 255).astype(np.uint8))
61
+ cv.imwrite('./tmp_quant/posevocab/%08d.png' % frame_id, (posevocab_img_cropped * 255).astype(np.uint8))
62
+ cv.imwrite('./tmp_quant/slrf/%08d.png' % frame_id, (slrf_img_cropped * 255).astype(np.uint8))
63
+ cv.imwrite('./tmp_quant/tava/%08d.png' % frame_id, (tava_img_cropped * 255).astype(np.uint8))
64
+ cv.imwrite('./tmp_quant/arah/%08d.png' % frame_id, (arah_img_cropped * 255).astype(np.uint8))
65
+ cv.imwrite('./tmp_quant/gt/%08d.png' % frame_id, (gt_img_cropped * 255).astype(np.uint8))
66
+
67
+ if ours_img is not None:
68
+ ours_metrics.psnr += compute_psnr(ours_img, gt_img)
69
+ ours_metrics.ssim += compute_ssim(ours_img, gt_img)
70
+ ours_metrics.lpips += compute_lpips(ours_img_cropped, gt_img_cropped)
71
+ ours_metrics.count += 1
72
+
73
+ if posevocab_img is not None:
74
+ posevocab_metrics.psnr += compute_psnr(posevocab_img, gt_img)
75
+ posevocab_metrics.ssim += compute_ssim(posevocab_img, gt_img)
76
+ posevocab_metrics.lpips += compute_lpips(posevocab_img_cropped, gt_img_cropped)
77
+ posevocab_metrics.count += 1
78
+
79
+ if slrf_img is not None:
80
+ slrf_metrics.psnr += compute_psnr(slrf_img, gt_img)
81
+ slrf_metrics.ssim += compute_ssim(slrf_img, gt_img)
82
+ slrf_metrics.lpips += compute_lpips(slrf_img_cropped, gt_img_cropped)
83
+ slrf_metrics.count += 1
84
+
85
+ if arah_img is not None:
86
+ arah_metrics.psnr += compute_psnr(arah_img, gt_img)
87
+ arah_metrics.ssim += compute_ssim(arah_img, gt_img)
88
+ arah_metrics.lpips += compute_lpips(arah_img_cropped, gt_img_cropped)
89
+ arah_metrics.count += 1
90
+
91
+ if tava_img is not None:
92
+ tava_metrics.psnr += compute_psnr(tava_img, gt_img)
93
+ tava_metrics.ssim += compute_ssim(tava_img, gt_img)
94
+ tava_metrics.lpips += compute_lpips(tava_img_cropped, gt_img_cropped)
95
+ tava_metrics.count += 1
96
+
97
+ print('Ours metrics: ', ours_metrics)
98
+ print('PoseVocab metrics: ', posevocab_metrics)
99
+ print('SLRF metrics: ', slrf_metrics)
100
+ print('ARAH metrics: ', arah_metrics)
101
+ print('TAVA metrics: ', tava_metrics)
102
+
103
+ print('--- Ours ---')
104
+ os.system('python -m pytorch_fid --device cuda {} {}'.format('./tmp_quant/ours', './tmp_quant/gt'))
105
+ print('--- PoseVocab ---')
106
+ os.system('python -m pytorch_fid --device cuda {} {}'.format('./tmp_quant/posevocab', './tmp_quant/gt'))
107
+ print('--- SLRF ---')
108
+ os.system('python -m pytorch_fid --device cuda {} {}'.format('./tmp_quant/slrf', './tmp_quant/gt'))
109
+ print('--- ARAH ---')
110
+ os.system('python -m pytorch_fid --device cuda {} {}'.format('./tmp_quant/arah', './tmp_quant/gt'))
111
+ print('--- TAVA ---')
112
+ os.system('python -m pytorch_fid --device cuda {} {}'.format('./tmp_quant/tava', './tmp_quant/gt'))
113
+
114
+
AnimatableGaussians/eval/score.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import skimage.metrics
2
+ import numpy as np
3
+ import torch
4
+ import cv2 as cv
5
+
6
+ from network.lpips import LPIPS
7
+
8
+
9
+ class Metrics:
10
+ def __init__(self):
11
+ self.psnr = 0.
12
+ self.ssim = 0.
13
+ self.lpips = 0.
14
+ self.count = 0
15
+
16
+ def __repr__(self):
17
+ if self.count > 0:
18
+ return f'Count: {self.count}, PSNR: {self.psnr / self.count}, SSIM: {self.ssim / self.count}, LPIPS: {self.lpips / self.count}'
19
+ else:
20
+ return 'count is 0!'
21
+
22
+
23
+ def crop_image(gt_mask, patch_size, *args):
24
+ """
25
+ :param gt_mask: (H, W)
26
+ :param patch_size: resize the cropped patch to the given patch_size
27
+ :param args: some images with shape of (H, W, C)
28
+ """
29
+ mask_uv = np.argwhere(gt_mask > 0.)
30
+ min_v, min_u = mask_uv.min(0)
31
+ max_v, max_u = mask_uv.max(0)
32
+ pad_size = 50
33
+ min_v = (min_v - pad_size).clip(0, gt_mask.shape[0])
34
+ min_u = (min_u - pad_size).clip(0, gt_mask.shape[1])
35
+ max_v = (max_v + pad_size).clip(0, gt_mask.shape[0])
36
+ max_u = (max_u + pad_size).clip(0, gt_mask.shape[1])
37
+ len_v = max_v - min_v
38
+ len_u = max_u - min_u
39
+ max_size = max(len_v, len_u)
40
+
41
+ cropped_images = []
42
+ for image in args:
43
+ if image is None:
44
+ cropped_images.append(None)
45
+ else:
46
+ cropped_image = np.ones((max_size, max_size, 3), dtype = image.dtype)
47
+ if len_v > len_u:
48
+ start_u = (max_size - len_u) // 2
49
+ cropped_image[:, start_u: start_u + len_u] = image[min_v: max_v, min_u: max_u]
50
+ else:
51
+ start_v = (max_size - len_v) // 2
52
+ cropped_image[start_v: start_v + len_v, :] = image[min_v: max_v, min_u: max_u]
53
+
54
+ cropped_image = cv.resize(cropped_image, (patch_size, patch_size), interpolation = cv.INTER_LINEAR)
55
+ cropped_images.append(cropped_image)
56
+
57
+ if len(cropped_images) > 1:
58
+ return cropped_images
59
+ else:
60
+ return cropped_images[0]
61
+
62
+
63
+ def to_tensor(array, device = 'cuda'):
64
+ if isinstance(array, np.ndarray):
65
+ array = torch.from_numpy(array).to(device)
66
+ elif isinstance(array, torch.Tensor):
67
+ array = array.to(device)
68
+ else:
69
+ raise TypeError('Invalid type of array.')
70
+ return array
71
+
72
+
73
+ def cut_rect(img):
74
+ h, w = img.shape[:2]
75
+ size = max(h, w)
76
+ img_ = torch.ones((size, size, img.shape[2])).to(img)
77
+ if h < w:
78
+ img_[:h] = img
79
+ else:
80
+ img_[:, :w] = img
81
+ return img_
82
+
83
+
84
+ lpips_net = None
85
+
86
+
87
+ def compute_lpips(src, tar, device = 'cuda'):
88
+ src = to_tensor(src, device)
89
+ tar = to_tensor(tar, device)
90
+ global lpips_net
91
+ if lpips_net is None:
92
+ lpips_net = LPIPS(net = 'vgg').to(device)
93
+ if src.shape[0] != src.shape[1]:
94
+ src = cut_rect(src)
95
+ tar = cut_rect(tar)
96
+ with torch.no_grad():
97
+ lpips = lpips_net.forward(src.permute(2, 0, 1)[None], tar.permute(2, 0, 1)[None], normalize = True).mean()
98
+ return lpips.item()
99
+
100
+
101
+ def compute_psnr(src, tar):
102
+ psnr = skimage.metrics.peak_signal_noise_ratio(tar, src, data_range=1)
103
+ return psnr
104
+
105
+
106
+ def compute_ssim(src, tar):
107
+ ssim = skimage.metrics.structural_similarity(src, tar, multichannel = True, data_range = 1)
108
+ return ssim
AnimatableGaussians/gaussians/__pycache__/gaussian_model.cpython-310.pyc ADDED
Binary file (15.8 kB). View file