├── .gitignore ├── Q1 ├── data │ ├── chair.ply │ └── truck │ │ ├── imgs │ │ ├── frame000001.jpg │ │ ├── frame000002.jpg │ │ ├── frame000003.jpg │ │ ├── frame000004.jpg │ │ ├── frame000005.jpg │ │ ├── frame000006.jpg │ │ ├── frame000007.jpg │ │ ├── frame000008.jpg │ │ ├── frame000009.jpg │ │ ├── frame000010.jpg │ │ ├── frame000011.jpg │ │ ├── frame000012.jpg │ │ ├── frame000013.jpg │ │ ├── frame000014.jpg │ │ ├── frame000015.jpg │ │ ├── frame000016.jpg │ │ ├── frame000017.jpg │ │ ├── frame000018.jpg │ │ ├── frame000019.jpg │ │ ├── frame000020.jpg │ │ ├── frame000021.jpg │ │ ├── frame000022.jpg │ │ ├── frame000023.jpg │ │ ├── frame000024.jpg │ │ ├── frame000025.jpg │ │ ├── frame000026.jpg │ │ ├── frame000027.jpg │ │ ├── frame000028.jpg │ │ ├── frame000029.jpg │ │ ├── frame000030.jpg │ │ ├── frame000031.jpg │ │ ├── frame000032.jpg │ │ ├── frame000033.jpg │ │ ├── frame000034.jpg │ │ ├── frame000035.jpg │ │ ├── frame000036.jpg │ │ ├── frame000037.jpg │ │ ├── frame000038.jpg │ │ ├── frame000039.jpg │ │ ├── frame000040.jpg │ │ ├── frame000041.jpg │ │ ├── frame000042.jpg │ │ ├── frame000043.jpg │ │ ├── frame000044.jpg │ │ ├── frame000045.jpg │ │ ├── frame000046.jpg │ │ ├── frame000047.jpg │ │ ├── frame000048.jpg │ │ ├── frame000049.jpg │ │ ├── frame000050.jpg │ │ ├── frame000051.jpg │ │ ├── frame000052.jpg │ │ ├── frame000053.jpg │ │ ├── frame000054.jpg │ │ ├── frame000055.jpg │ │ ├── frame000056.jpg │ │ ├── frame000057.jpg │ │ ├── frame000058.jpg │ │ ├── frame000059.jpg │ │ ├── frame000060.jpg │ │ ├── frame000061.jpg │ │ ├── frame000062.jpg │ │ ├── frame000063.jpg │ │ ├── frame000064.jpg │ │ ├── frame000065.jpg │ │ ├── frame000066.jpg │ │ ├── frame000067.jpg │ │ ├── frame000068.jpg │ │ ├── frame000069.jpg │ │ ├── frame000070.jpg │ │ ├── frame000071.jpg │ │ ├── frame000072.jpg │ │ ├── frame000073.jpg │ │ ├── frame000074.jpg │ │ ├── frame000075.jpg │ │ ├── frame000076.jpg │ │ ├── frame000077.jpg │ │ ├── frame000078.jpg │ │ ├── frame000079.jpg │ │ ├── frame000080.jpg │ │ ├── frame000081.jpg │ │ ├── frame000082.jpg │ │ ├── frame000083.jpg │ │ ├── frame000084.jpg │ │ ├── frame000085.jpg │ │ ├── frame000086.jpg │ │ ├── frame000087.jpg │ │ ├── frame000088.jpg │ │ ├── frame000089.jpg │ │ ├── frame000090.jpg │ │ ├── frame000091.jpg │ │ ├── frame000092.jpg │ │ ├── frame000093.jpg │ │ ├── frame000094.jpg │ │ ├── frame000095.jpg │ │ ├── frame000096.jpg │ │ ├── frame000097.jpg │ │ ├── frame000098.jpg │ │ ├── frame000099.jpg │ │ ├── frame000100.jpg │ │ ├── frame000101.jpg │ │ ├── frame000102.jpg │ │ ├── frame000103.jpg │ │ ├── frame000104.jpg │ │ ├── frame000105.jpg │ │ ├── frame000106.jpg │ │ ├── frame000107.jpg │ │ ├── frame000108.jpg │ │ ├── frame000109.jpg │ │ ├── frame000110.jpg │ │ ├── frame000111.jpg │ │ ├── frame000112.jpg │ │ ├── frame000113.jpg │ │ ├── frame000114.jpg │ │ ├── frame000115.jpg │ │ ├── frame000116.jpg │ │ ├── frame000117.jpg │ │ ├── frame000118.jpg │ │ ├── frame000119.jpg │ │ ├── frame000120.jpg │ │ ├── frame000121.jpg │ │ ├── frame000122.jpg │ │ ├── frame000123.jpg │ │ ├── frame000124.jpg │ │ ├── frame000125.jpg │ │ ├── frame000126.jpg │ │ ├── frame000127.jpg │ │ ├── frame000128.jpg │ │ ├── frame000129.jpg │ │ ├── frame000130.jpg │ │ ├── frame000131.jpg │ │ ├── frame000132.jpg │ │ ├── frame000133.jpg │ │ ├── frame000134.jpg │ │ ├── frame000135.jpg │ │ ├── frame000136.jpg │ │ ├── frame000137.jpg │ │ ├── frame000138.jpg │ │ ├── frame000139.jpg │ │ ├── frame000140.jpg │ │ ├── frame000141.jpg │ │ ├── frame000142.jpg │ │ ├── frame000143.jpg │ │ ├── frame000144.jpg │ │ ├── frame000145.jpg │ │ ├── frame000146.jpg │ │ ├── frame000147.jpg │ │ ├── frame000148.jpg │ │ ├── frame000149.jpg │ │ ├── frame000150.jpg │ │ ├── frame000151.jpg │ │ ├── frame000152.jpg │ │ ├── frame000153.jpg │ │ ├── frame000154.jpg │ │ ├── frame000155.jpg │ │ ├── frame000156.jpg │ │ ├── frame000157.jpg │ │ ├── frame000158.jpg │ │ ├── frame000159.jpg │ │ ├── frame000160.jpg │ │ ├── frame000161.jpg │ │ ├── frame000162.jpg │ │ ├── frame000163.jpg │ │ ├── frame000164.jpg │ │ ├── frame000165.jpg │ │ ├── frame000166.jpg │ │ ├── frame000167.jpg │ │ ├── frame000168.jpg │ │ ├── frame000169.jpg │ │ ├── frame000170.jpg │ │ ├── frame000171.jpg │ │ ├── frame000172.jpg │ │ ├── frame000173.jpg │ │ ├── frame000174.jpg │ │ ├── frame000175.jpg │ │ ├── frame000176.jpg │ │ ├── frame000177.jpg │ │ ├── frame000178.jpg │ │ ├── frame000179.jpg │ │ ├── frame000180.jpg │ │ ├── frame000181.jpg │ │ ├── frame000182.jpg │ │ ├── frame000183.jpg │ │ ├── frame000184.jpg │ │ ├── frame000185.jpg │ │ ├── frame000186.jpg │ │ ├── frame000187.jpg │ │ ├── frame000188.jpg │ │ ├── frame000189.jpg │ │ ├── frame000190.jpg │ │ ├── frame000191.jpg │ │ ├── frame000192.jpg │ │ ├── frame000193.jpg │ │ ├── frame000194.jpg │ │ ├── frame000195.jpg │ │ ├── frame000196.jpg │ │ ├── frame000197.jpg │ │ ├── frame000198.jpg │ │ ├── frame000199.jpg │ │ ├── frame000200.jpg │ │ ├── frame000201.jpg │ │ └── frame000202.jpg │ │ ├── points │ │ └── points_10000.npy │ │ └── poses │ │ ├── frame000001.npy │ │ ├── frame000002.npy │ │ ├── frame000003.npy │ │ ├── frame000004.npy │ │ ├── frame000005.npy │ │ ├── frame000006.npy │ │ ├── frame000007.npy │ │ ├── frame000008.npy │ │ ├── frame000009.npy │ │ ├── frame000010.npy │ │ ├── frame000011.npy │ │ ├── frame000012.npy │ │ ├── frame000013.npy │ │ ├── frame000014.npy │ │ ├── frame000015.npy │ │ ├── frame000016.npy │ │ ├── frame000017.npy │ │ ├── frame000018.npy │ │ ├── frame000019.npy │ │ ├── frame000020.npy │ │ ├── frame000021.npy │ │ ├── frame000022.npy │ │ ├── frame000023.npy │ │ ├── frame000024.npy │ │ ├── frame000025.npy │ │ ├── frame000026.npy │ │ ├── frame000027.npy │ │ ├── frame000028.npy │ │ ├── frame000029.npy │ │ ├── frame000030.npy │ │ ├── frame000031.npy │ │ ├── frame000032.npy │ │ ├── frame000033.npy │ │ ├── frame000034.npy │ │ ├── frame000035.npy │ │ ├── frame000036.npy │ │ ├── frame000037.npy │ │ ├── frame000038.npy │ │ ├── frame000039.npy │ │ ├── frame000040.npy │ │ ├── frame000041.npy │ │ ├── frame000042.npy │ │ ├── frame000043.npy │ │ ├── frame000044.npy │ │ ├── frame000045.npy │ │ ├── frame000046.npy │ │ ├── frame000047.npy │ │ ├── frame000048.npy │ │ ├── frame000049.npy │ │ ├── frame000050.npy │ │ ├── frame000051.npy │ │ ├── frame000052.npy │ │ ├── frame000053.npy │ │ ├── frame000054.npy │ │ ├── frame000055.npy │ │ ├── frame000056.npy │ │ ├── frame000057.npy │ │ ├── frame000058.npy │ │ ├── frame000059.npy │ │ ├── frame000060.npy │ │ ├── frame000061.npy │ │ ├── frame000062.npy │ │ ├── frame000063.npy │ │ ├── frame000064.npy │ │ ├── frame000065.npy │ │ ├── frame000066.npy │ │ ├── frame000067.npy │ │ ├── frame000068.npy │ │ ├── frame000069.npy │ │ ├── frame000070.npy │ │ ├── frame000071.npy │ │ ├── frame000072.npy │ │ ├── frame000073.npy │ │ ├── frame000074.npy │ │ ├── frame000075.npy │ │ ├── frame000076.npy │ │ ├── frame000077.npy │ │ ├── frame000078.npy │ │ ├── frame000079.npy │ │ ├── frame000080.npy │ │ ├── frame000081.npy │ │ ├── frame000082.npy │ │ ├── frame000083.npy │ │ ├── frame000084.npy │ │ ├── frame000085.npy │ │ ├── frame000086.npy │ │ ├── frame000087.npy │ │ ├── frame000088.npy │ │ ├── frame000089.npy │ │ ├── frame000090.npy │ │ ├── frame000091.npy │ │ ├── frame000092.npy │ │ ├── frame000093.npy │ │ ├── frame000094.npy │ │ ├── frame000095.npy │ │ ├── frame000096.npy │ │ ├── frame000097.npy │ │ ├── frame000098.npy │ │ ├── frame000099.npy │ │ ├── frame000100.npy │ │ ├── frame000101.npy │ │ ├── frame000102.npy │ │ ├── frame000103.npy │ │ ├── frame000104.npy │ │ ├── frame000105.npy │ │ ├── frame000106.npy │ │ ├── frame000107.npy │ │ ├── frame000108.npy │ │ ├── frame000109.npy │ │ ├── frame000110.npy │ │ ├── frame000111.npy │ │ ├── frame000112.npy │ │ ├── frame000113.npy │ │ ├── frame000114.npy │ │ ├── frame000115.npy │ │ ├── frame000116.npy │ │ ├── frame000117.npy │ │ ├── frame000118.npy │ │ ├── frame000119.npy │ │ ├── frame000120.npy │ │ ├── frame000121.npy │ │ ├── frame000122.npy │ │ ├── frame000123.npy │ │ ├── frame000124.npy │ │ ├── frame000125.npy │ │ ├── frame000126.npy │ │ ├── frame000127.npy │ │ ├── frame000128.npy │ │ ├── frame000129.npy │ │ ├── frame000130.npy │ │ ├── frame000131.npy │ │ ├── frame000132.npy │ │ ├── frame000133.npy │ │ ├── frame000134.npy │ │ ├── frame000135.npy │ │ ├── frame000136.npy │ │ ├── frame000137.npy │ │ ├── frame000138.npy │ │ ├── frame000139.npy │ │ ├── frame000140.npy │ │ ├── frame000141.npy │ │ ├── frame000142.npy │ │ ├── frame000143.npy │ │ ├── frame000144.npy │ │ ├── frame000145.npy │ │ ├── frame000146.npy │ │ ├── frame000147.npy │ │ ├── frame000148.npy │ │ ├── frame000149.npy │ │ ├── frame000150.npy │ │ ├── frame000151.npy │ │ ├── frame000152.npy │ │ ├── frame000153.npy │ │ ├── frame000154.npy │ │ ├── frame000155.npy │ │ ├── frame000156.npy │ │ ├── frame000157.npy │ │ ├── frame000158.npy │ │ ├── frame000159.npy │ │ ├── frame000160.npy │ │ ├── frame000161.npy │ │ ├── frame000162.npy │ │ ├── frame000163.npy │ │ ├── frame000164.npy │ │ ├── frame000165.npy │ │ ├── frame000166.npy │ │ ├── frame000167.npy │ │ ├── frame000168.npy │ │ ├── frame000169.npy │ │ ├── frame000170.npy │ │ ├── frame000171.npy │ │ ├── frame000172.npy │ │ ├── frame000173.npy │ │ ├── frame000174.npy │ │ ├── frame000175.npy │ │ ├── frame000176.npy │ │ ├── frame000177.npy │ │ ├── frame000178.npy │ │ ├── frame000179.npy │ │ ├── frame000180.npy │ │ ├── frame000181.npy │ │ ├── frame000182.npy │ │ ├── frame000183.npy │ │ ├── frame000184.npy │ │ ├── frame000185.npy │ │ ├── frame000186.npy │ │ ├── frame000187.npy │ │ ├── frame000188.npy │ │ ├── frame000189.npy │ │ ├── frame000190.npy │ │ ├── frame000191.npy │ │ ├── frame000192.npy │ │ ├── frame000193.npy │ │ ├── frame000194.npy │ │ ├── frame000195.npy │ │ ├── frame000196.npy │ │ ├── frame000197.npy │ │ ├── frame000198.npy │ │ ├── frame000199.npy │ │ ├── frame000200.npy │ │ ├── frame000201.npy │ │ └── frame000202.npy ├── data_utils.py ├── data_utils_harder_scene.py ├── model.py ├── render.py ├── train.py ├── train_harder_scene.py └── unit_test_gaussians.py ├── Q2 ├── Q21_image_optimization.py ├── Q22_mesh_optimization.py ├── Q23_nerf_optimization.py ├── SDS.py ├── activation.py ├── data │ ├── cow.mtl │ ├── cow.obj │ └── cow_texture.png ├── freqencoder │ ├── __init__.py │ ├── backend.py │ ├── build │ │ ├── lib.linux-x86_64-cpython-310 │ │ │ └── _freqencoder.cpython-310-x86_64-linux-gnu.so │ │ └── temp.linux-x86_64-cpython-310 │ │ │ ├── .ninja_deps │ │ │ ├── .ninja_log │ │ │ ├── build.ninja │ │ │ └── data │ │ │ └── ruihan │ │ │ └── 16825 │ │ │ └── a4_internal │ │ │ └── Q2 │ │ │ └── stable-dreamfusion │ │ │ └── freqencoder │ │ │ └── src │ │ │ ├── bindings.o │ │ │ └── freqencoder.o │ ├── freq.py │ ├── freqencoder.egg-info │ │ ├── PKG-INFO │ │ ├── SOURCES.txt │ │ ├── dependency_links.txt │ │ └── top_level.txt │ ├── setup.py │ └── src │ │ ├── bindings.cpp │ │ ├── freqencoder.cu │ │ └── freqencoder.h ├── gridencoder │ ├── __init__.py │ ├── backend.py │ ├── build │ │ ├── lib.linux-x86_64-cpython-310 │ │ │ └── _gridencoder.cpython-310-x86_64-linux-gnu.so │ │ └── temp.linux-x86_64-cpython-310 │ │ │ ├── .ninja_deps │ │ │ ├── .ninja_log │ │ │ ├── build.ninja │ │ │ └── data │ │ │ └── ruihan │ │ │ └── 16825 │ │ │ └── a4_internal │ │ │ └── Q2 │ │ │ └── stable-dreamfusion │ │ │ └── gridencoder │ │ │ └── src │ │ │ ├── bindings.o │ │ │ └── gridencoder.o │ ├── grid.py │ ├── gridencoder.egg-info │ │ ├── PKG-INFO │ │ ├── SOURCES.txt │ │ ├── dependency_links.txt │ │ └── top_level.txt │ ├── setup.py │ └── src │ │ ├── bindings.cpp │ │ ├── gridencoder.cu │ │ └── gridencoder.h ├── implicit.py ├── meshutils.py ├── nerf │ ├── config_parser.py │ ├── network_grid.py │ ├── provider.py │ ├── renderer.py │ └── utils.py ├── optimizer.py ├── raymarching │ ├── __init__.py │ ├── backend.py │ ├── build │ │ ├── lib.linux-x86_64-cpython-310 │ │ │ └── _raymarching.cpython-310-x86_64-linux-gnu.so │ │ └── temp.linux-x86_64-cpython-310 │ │ │ ├── .ninja_deps │ │ │ ├── .ninja_log │ │ │ ├── build.ninja │ │ │ └── data │ │ │ └── ruihan │ │ │ └── 16825 │ │ │ └── a4_internal │ │ │ └── Q2 │ │ │ └── stable-dreamfusion │ │ │ └── raymarching │ │ │ └── src │ │ │ ├── bindings.o │ │ │ └── raymarching.o │ ├── raymarching.egg-info │ │ ├── PKG-INFO │ │ ├── SOURCES.txt │ │ ├── dependency_links.txt │ │ └── top_level.txt │ ├── raymarching.py │ ├── setup.py │ └── src │ │ ├── bindings.cpp │ │ ├── raymarching.cu │ │ └── raymarching.h └── utils.py ├── README.md └── ref_output ├── Q21_a_hamburger_no_guidance.png ├── Q21_a_hamburger_w_guidance.png ├── Q22_a_dotted_black_and_white_cow.png ├── Q22_an_orange_golden_bull.png ├── Q23_a_standing_corgi_dog.png ├── q1_render_example.png ├── q1_training_example_1.png ├── q1_training_example_2.png ├── q1_with_sh.png └── q1_without_sh.png /.gitignore: -------------------------------------------------------------------------------- 1 | **/output**/** 2 | **__pycache__/** 3 | **.pyc 4 | **.pth -------------------------------------------------------------------------------- /Q1/data/chair.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/chair.ply -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000001.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000001.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000002.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000002.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000003.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000003.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000004.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000004.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000005.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000005.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000006.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000006.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000007.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000007.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000008.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000008.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000009.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000009.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000010.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000010.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000011.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000011.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000012.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000012.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000013.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000013.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000014.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000014.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000015.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000015.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000016.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000016.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000017.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000017.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000018.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000018.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000019.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000019.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000020.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000020.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000021.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000021.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000022.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000022.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000023.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000023.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000024.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000024.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000025.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000025.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000026.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000026.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000027.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000027.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000028.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000028.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000029.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000029.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000030.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000030.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000031.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000031.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000032.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000032.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000033.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000033.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000034.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000034.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000035.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000035.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000036.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000036.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000037.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000037.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000038.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000038.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000039.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000039.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000040.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000040.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000041.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000041.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000042.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000042.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000043.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000043.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000044.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000044.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000045.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000045.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000046.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000046.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000047.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000047.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000048.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000048.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000049.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000049.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000050.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000050.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000051.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000051.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000052.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000052.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000053.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000053.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000054.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000054.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000055.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000055.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000056.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000056.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000057.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000057.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000058.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000058.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000059.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000059.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000060.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000060.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000061.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000061.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000062.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000062.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000063.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000063.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000064.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000064.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000065.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000065.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000066.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000066.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000067.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000067.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000068.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000068.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000069.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000069.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000070.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000070.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000071.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000071.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000072.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000072.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000073.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000073.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000074.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000074.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000075.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000075.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000076.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000076.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000077.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000077.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000078.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000078.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000079.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000079.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000080.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000080.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000081.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000081.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000082.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000082.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000083.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000083.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000084.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000084.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000085.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000085.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000086.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000086.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000087.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000087.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000088.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000088.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000089.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000089.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000090.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000090.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000091.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000091.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000092.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000092.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000093.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000093.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000094.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000094.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000095.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000095.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000096.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000096.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000097.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000097.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000098.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000098.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000099.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000099.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000100.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000100.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000101.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000101.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000102.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000102.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000103.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000103.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000104.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000104.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000105.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000105.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000106.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000106.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000107.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000107.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000108.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000108.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000109.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000109.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000110.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000110.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000111.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000111.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000112.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000112.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000113.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000113.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000114.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000114.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000115.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000115.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000116.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000116.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000117.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000117.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000118.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000118.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000119.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000119.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000120.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000120.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000121.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000121.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000122.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000122.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000123.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000123.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000124.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000124.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000125.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000125.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000126.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000126.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000127.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000127.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000128.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000128.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000129.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000129.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000130.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000130.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000131.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000131.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000132.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000132.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000133.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000133.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000134.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000134.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000135.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000135.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000136.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000136.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000137.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000137.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000138.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000138.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000139.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000139.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000140.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000140.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000141.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000141.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000142.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000142.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000143.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000143.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000144.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000144.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000145.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000145.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000146.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000146.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000147.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000147.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000148.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000148.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000149.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000149.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000150.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000150.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000151.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000151.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000152.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000152.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000153.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000153.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000154.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000154.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000155.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000155.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000156.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000156.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000157.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000157.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000158.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000158.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000159.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000159.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000160.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000160.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000161.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000161.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000162.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000162.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000163.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000163.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000164.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000164.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000165.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000165.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000166.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000166.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000167.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000167.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000168.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000168.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000169.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000169.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000170.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000170.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000171.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000171.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000172.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000172.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000173.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000173.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000174.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000174.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000175.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000175.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000176.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000176.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000177.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000177.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000178.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000178.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000179.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000179.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000180.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000180.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000181.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000181.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000182.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000182.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000183.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000183.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000184.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000184.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000185.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000185.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000186.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000186.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000187.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000187.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000188.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000188.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000189.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000189.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000190.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000190.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000191.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000191.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000192.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000192.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000193.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000193.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000194.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000194.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000195.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000195.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000196.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000196.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000197.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000197.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000198.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000198.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000199.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000199.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000200.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000200.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000201.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000201.jpg -------------------------------------------------------------------------------- /Q1/data/truck/imgs/frame000202.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/imgs/frame000202.jpg -------------------------------------------------------------------------------- /Q1/data/truck/points/points_10000.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/points/points_10000.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000001.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000001.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000002.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000002.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000003.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000003.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000004.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000004.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000005.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000005.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000006.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000006.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000007.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000007.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000008.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000008.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000009.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000009.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000010.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000010.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000011.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000011.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000012.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000012.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000013.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000013.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000014.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000014.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000015.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000015.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000016.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000016.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000017.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000017.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000018.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000018.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000019.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000019.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000020.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000020.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000021.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000021.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000022.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000022.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000023.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000023.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000024.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000024.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000025.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000025.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000026.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000026.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000027.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000027.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000028.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000028.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000029.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000029.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000030.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000030.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000031.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000031.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000032.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000032.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000033.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000033.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000034.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000034.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000035.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000035.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000036.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000036.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000037.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000037.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000038.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000038.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000039.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000039.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000040.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000040.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000041.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000041.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000042.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000042.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000043.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000043.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000044.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000044.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000045.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000045.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000046.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000046.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000047.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000047.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000048.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000048.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000049.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000049.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000050.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000050.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000051.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000051.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000052.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000052.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000053.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000053.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000054.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000054.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000055.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000055.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000056.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000056.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000057.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000057.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000058.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000058.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000059.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000059.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000060.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000060.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000061.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000061.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000062.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000062.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000063.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000063.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000064.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000064.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000065.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000065.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000066.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000066.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000067.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000067.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000068.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000068.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000069.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000069.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000070.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000070.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000071.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000071.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000072.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000072.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000073.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000073.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000074.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000074.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000075.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000075.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000076.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000076.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000077.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000077.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000078.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000078.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000079.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000079.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000080.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000080.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000081.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000081.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000082.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000082.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000083.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000083.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000084.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000084.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000085.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000085.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000086.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000086.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000087.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000087.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000088.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000088.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000089.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000089.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000090.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000090.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000091.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000091.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000092.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000092.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000093.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000093.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000094.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000094.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000095.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000095.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000096.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000096.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000097.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000097.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000098.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000098.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000099.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000099.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000100.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000100.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000101.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000101.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000102.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000102.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000103.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000103.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000104.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000104.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000105.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000105.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000106.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000106.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000107.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000107.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000108.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000108.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000109.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000109.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000110.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000110.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000111.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000111.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000112.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000112.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000113.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000113.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000114.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000114.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000115.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000115.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000116.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000116.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000117.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000117.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000118.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000118.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000119.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000119.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000120.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000120.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000121.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000121.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000122.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000122.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000123.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000123.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000124.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000124.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000125.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000125.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000126.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000126.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000127.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000127.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000128.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000128.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000129.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000129.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000130.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000130.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000131.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000131.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000132.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000132.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000133.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000133.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000134.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000134.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000135.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000135.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000136.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000136.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000137.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000137.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000138.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000138.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000139.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000139.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000140.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000140.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000141.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000141.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000142.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000142.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000143.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000143.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000144.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000144.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000145.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000145.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000146.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000146.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000147.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000147.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000148.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000148.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000149.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000149.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000150.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000150.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000151.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000151.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000152.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000152.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000153.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000153.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000154.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000154.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000155.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000155.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000156.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000156.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000157.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000157.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000158.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000158.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000159.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000159.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000160.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000160.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000161.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000161.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000162.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000162.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000163.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000163.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000164.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000164.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000165.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000165.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000166.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000166.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000167.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000167.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000168.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000168.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000169.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000169.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000170.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000170.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000171.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000171.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000172.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000172.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000173.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000173.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000174.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000174.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000175.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000175.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000176.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000176.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000177.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000177.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000178.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000178.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000179.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000179.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000180.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000180.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000181.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000181.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000182.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000182.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000183.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000183.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000184.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000184.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000185.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000185.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000186.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000186.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000187.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000187.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000188.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000188.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000189.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000189.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000190.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000190.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000191.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000191.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000192.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000192.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000193.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000193.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000194.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000194.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000195.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000195.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000196.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000196.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000197.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000197.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000198.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000198.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000199.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000199.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000200.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000200.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000201.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000201.npy -------------------------------------------------------------------------------- /Q1/data/truck/poses/frame000202.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q1/data/truck/poses/frame000202.npy -------------------------------------------------------------------------------- /Q1/data_utils_harder_scene.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # All rights reserved. 3 | # 4 | # This source code is licensed under the BSD-style license found in the 5 | # LICENSE file in the root directory of this source tree. 6 | 7 | import os 8 | from typing import List, Optional, Tuple 9 | 10 | import numpy as np 11 | import requests 12 | import torch 13 | from PIL import Image 14 | from pytorch3d.renderer import PerspectiveCameras 15 | from torch.utils.data import Dataset 16 | 17 | import matplotlib.pyplot as plt 18 | 19 | 20 | DEFAULT_DATA_ROOT = os.path.join( 21 | os.path.dirname(os.path.realpath(__file__)), "data" 22 | ) 23 | 24 | DEFAULT_URL_ROOT = "https://dl.fbaipublicfiles.com/pytorch3d_nerf_data" 25 | 26 | def trivial_collate(batch): 27 | """ 28 | A trivial collate function that merely returns the uncollated batch. 29 | """ 30 | return batch 31 | 32 | 33 | class ListDataset(Dataset): 34 | """ 35 | A simple dataset made of a list of entries. 36 | """ 37 | 38 | def __init__(self, entries: List) -> None: 39 | """ 40 | Args: 41 | entries: The list of dataset entries. 42 | """ 43 | self._entries = entries 44 | 45 | def __len__( 46 | self, 47 | ) -> int: 48 | return len(self._entries) 49 | 50 | def __getitem__(self, index): 51 | return self._entries[index] 52 | 53 | 54 | def get_nerf_datasets( 55 | dataset_name: str, # 'lego | fern' 56 | image_size: Tuple[int, int], 57 | data_root: str = DEFAULT_DATA_ROOT, 58 | autodownload: bool = True, 59 | ) -> Tuple[Dataset, Dataset, Dataset]: 60 | """ 61 | Obtains the training and validation dataset object for a dataset specified 62 | with the `dataset_name` argument. 63 | 64 | Args: 65 | dataset_name: The name of the dataset to load. 66 | image_size: A tuple (height, width) denoting the sizes of the loaded dataset images. 67 | data_root: The root folder at which the data is stored. 68 | autodownload: Auto-download the dataset files in case they are missing. 69 | 70 | Returns: 71 | train_dataset: The training dataset object. 72 | val_dataset: The validation dataset object. 73 | test_dataset: The testing dataset object. 74 | """ 75 | 76 | # if dataset_name not in ALL_DATASETS: 77 | # raise ValueError(f"'{dataset_name}'' does not refer to a known dataset.") 78 | 79 | print(f"Loading dataset {dataset_name}, image size={str(image_size)} ...") 80 | 81 | cameras_path = os.path.join(data_root, dataset_name + ".pth") 82 | image_path = cameras_path.replace(".pth", ".png") 83 | 84 | train_data = torch.load(cameras_path) 85 | n_cameras = train_data["cameras"]["R"].shape[0] 86 | 87 | _image_max_image_pixels = Image.MAX_IMAGE_PIXELS 88 | Image.MAX_IMAGE_PIXELS = None # The dataset image is very large ... 89 | images = torch.FloatTensor(np.array(Image.open(image_path))) / 255.0 90 | images = torch.stack(torch.chunk(images, n_cameras, dim=0))[..., :3] 91 | Image.MAX_IMAGE_PIXELS = _image_max_image_pixels 92 | 93 | scale_factors = [s_new / s for s, s_new in zip(images.shape[1:3], image_size)] 94 | 95 | if abs(scale_factors[0] - scale_factors[1]) > 1e-3: 96 | raise ValueError( 97 | "Non-isotropic scaling is not allowed. Consider changing the 'image_size' argument." 98 | ) 99 | scale_factor = sum(scale_factors) * 0.5 100 | 101 | if scale_factor != 1.0: 102 | print(f"Rescaling dataset (factor={scale_factor})") 103 | images = torch.nn.functional.interpolate( 104 | images.permute(0, 3, 1, 2), 105 | size=tuple(image_size), 106 | mode="bilinear", 107 | ).permute(0, 2, 3, 1) 108 | 109 | cameras = [ 110 | PerspectiveCameras( 111 | **{k: v[cami][None] for k, v in train_data["cameras"].items()} 112 | ).to("cpu") 113 | for cami in range(n_cameras) 114 | ] 115 | 116 | train_idx, val_idx, test_idx = train_data["split"] 117 | 118 | train_dataset, val_dataset, test_dataset = [ 119 | ListDataset( 120 | [ 121 | {"image": images[i], "camera": cameras[i], "camera_idx": int(i)} 122 | for i in idx 123 | ] 124 | ) 125 | for idx in [train_idx, val_idx, test_idx] 126 | ] 127 | 128 | return train_dataset, val_dataset, test_dataset 129 | 130 | 131 | def download_data( 132 | dataset_names: Optional[List[str]] = None, 133 | data_root: str = DEFAULT_DATA_ROOT, 134 | url_root: str = DEFAULT_URL_ROOT, 135 | ) -> None: 136 | """ 137 | Downloads the relevant dataset files. 138 | """ 139 | 140 | if dataset_names is None: 141 | raise RuntimeError 142 | 143 | os.makedirs(data_root, exist_ok=True) 144 | 145 | for dataset_name in dataset_names: 146 | cameras_file = dataset_name + ".pth" 147 | images_file = cameras_file.replace(".pth", ".png") 148 | license_file = cameras_file.replace(".pth", "_license.txt") 149 | 150 | for fl in (cameras_file, images_file, license_file): 151 | local_fl = os.path.join(data_root, fl) 152 | remote_fl = os.path.join(url_root, fl) 153 | 154 | print(f"Downloading dataset {dataset_name} from {remote_fl} to {local_fl}.") 155 | 156 | r = requests.get(remote_fl) 157 | 158 | with open(local_fl, "wb") as f: 159 | f.write(r.content) 160 | 161 | 162 | -------------------------------------------------------------------------------- /Q1/render.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | import imageio 4 | import argparse 5 | import numpy as np 6 | 7 | from PIL import Image 8 | from tqdm import tqdm 9 | from model import Gaussians, Scene 10 | from data_utils import colour_depth_q1_render 11 | from pytorch3d.renderer.cameras import PerspectiveCameras, look_at_view_transform 12 | 13 | def create_renders(args): 14 | 15 | dim = args.img_dim 16 | img_size = (dim, dim) 17 | 18 | num_views = 32 19 | azims = np.linspace(-180, 180, num_views) 20 | elevs = np.linspace(-180, 180, num_views) 21 | 22 | debug_root = os.path.join(args.out_path, "q1_render") 23 | if not os.path.exists(debug_root): 24 | os.makedirs(debug_root, exist_ok=True) 25 | 26 | # Loading pre-trained gaussians 27 | gaussians = Gaussians( 28 | load_path=args.data_path, init_type="gaussians", 29 | device=args.device 30 | ) 31 | 32 | # Preprocessing for ease of rendering 33 | new_points = gaussians.means - gaussians.means.mean(dim=0, keepdims=True) 34 | gaussians.means = new_points 35 | 36 | # Creating the scene with the loaded gaussians 37 | scene = Scene(gaussians) 38 | 39 | imgs = [] 40 | for i in tqdm(range(num_views), desc="Rendering"): 41 | 42 | dist = 6.0 43 | R, T = look_at_view_transform(dist = dist, azim=azims[i], elev=elevs[i], up=((0, -1, 0),)) 44 | camera = PerspectiveCameras( 45 | focal_length=5.0 * dim/2.0, in_ndc=False, 46 | principal_point=((dim/2, dim/2),), 47 | R=R, T=T, image_size=(img_size,), 48 | ).to(args.device) 49 | 50 | with torch.no_grad(): 51 | # Rendering scene using gaussian splatting 52 | ### YOUR CODE HERE ### 53 | # HINT: Can any function from the Scene class help? 54 | # HINT: Set bg_colour to (1.0, 1.0, 1.0) 55 | # HINT: Get per_splat from args.gaussians_per_splat 56 | # HINT: img_size and camera are available above 57 | img, depth, mask = None 58 | 59 | debug_path = os.path.join(debug_root, f"{i:03d}.png") 60 | img = img.detach().cpu().numpy() 61 | mask = mask.repeat(1, 1, 3).detach().cpu().numpy() 62 | depth = depth.detach().cpu().numpy() 63 | 64 | img = (np.clip(img, 0.0, 1.0) * 255.0).astype(np.uint8) 65 | mask = np.where(mask > 0.5, 255.0, 0.0).astype(np.uint8) # (H, W, 3) 66 | 67 | # Colouring the depth map 68 | depth = depth[:, :, 0].astype(np.float32) # (H, W) 69 | coloured_depth = colour_depth_q1_render(depth) # (H, W, 3) 70 | 71 | concat = np.concatenate([img, coloured_depth, mask], axis = 1) 72 | resized = Image.fromarray(concat).resize((256*3, 256)) 73 | resized.save(debug_path) 74 | 75 | imgs.append(np.array(resized)) 76 | 77 | gif_path = os.path.join(args.out_path, "q1_render.gif") 78 | imageio.mimwrite(gif_path, imgs, duration=1000.0*(1/10.0), loop=0) 79 | 80 | def get_args(): 81 | 82 | parser = argparse.ArgumentParser() 83 | parser.add_argument( 84 | "--out_path", default="./output", type=str, 85 | help="Path to the directory where output should be saved to." 86 | ) 87 | parser.add_argument( 88 | "--data_path", default="./data/chair.ply", type=str, 89 | help="Path to the pre-trained gaussian data to be rendered." 90 | ) 91 | parser.add_argument( 92 | "--img_dim", default=256, type=int, 93 | help=( 94 | "Spatial dimension of the rendered image. " 95 | "The rendered image will have img_dim as its height and width." 96 | ) 97 | ) 98 | parser.add_argument( 99 | "--gaussians_per_splat", default=2048, type=int, 100 | help=( 101 | "Number of gaussians to splat in one function call. If set to -1, " 102 | "then all gaussians in the scene are splat in a single function call. " 103 | "If set to any other positive interger, then it determines the number of " 104 | "gaussians to splat per function call (the last function call might splat " 105 | "lesser number of gaussians). In general, the algorithm can run faster " 106 | "if more gaussians are splat per function call, but at the cost of higher GPU " 107 | "memory consumption." 108 | ) 109 | ) 110 | parser.add_argument("--device", default="cuda", type=str, choices=["cuda", "cpu"]) 111 | args = parser.parse_args() 112 | return args 113 | 114 | if __name__ == "__main__": 115 | 116 | args = get_args() 117 | create_renders(args) 118 | -------------------------------------------------------------------------------- /Q2/Q21_image_optimization.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import os.path as osp 4 | import time 5 | 6 | import torch 7 | import torch.nn as nn 8 | from PIL import Image 9 | from SDS import SDS 10 | from tqdm import tqdm 11 | from utils import get_cosine_schedule_with_warmup, prepare_embeddings, seed_everything 12 | 13 | 14 | def optimize_an_image( 15 | sds, 16 | prompt, 17 | neg_prompt="", 18 | img=None, 19 | log_interval=100, 20 | args=None 21 | ): 22 | """ 23 | Optimize an image to match the prompt. 24 | """ 25 | # Step 1. Create text embeddings from prompt 26 | embeddings = prepare_embeddings(sds, prompt, neg_prompt, view_dependent=False) 27 | sds.text_encoder.to("cpu") # free up GPU memory 28 | torch.cuda.empty_cache() 29 | 30 | # Step 2. Initialize latents to optimize 31 | latents = nn.Parameter(torch.randn(1, 4, 64, 64, device=sds.device)) 32 | 33 | # Step 3. Create optimizer and loss function 34 | optimizer = torch.optim.AdamW([latents], lr=1e-1, weight_decay=0) 35 | total_iter = 2000 36 | scheduler = get_cosine_schedule_with_warmup(optimizer, 100, int(total_iter * 1.5)) 37 | 38 | # Step 4. Training loop to optimize the latents 39 | for i in tqdm(range(total_iter)): 40 | optimizer.zero_grad() 41 | # Forward pass to compute the loss 42 | 43 | ### YOUR CODE HERE ### 44 | if args.sds_guidance: 45 | loss = 46 | else: 47 | loss = 48 | 49 | # Backward pass 50 | loss.backward() 51 | optimizer.step() 52 | scheduler.step() 53 | 54 | # clamping the latents to avoid over saturation 55 | latents.data = latents.data.clip(-1, 1) 56 | 57 | if i % log_interval == 0 or i == total_iter - 1: 58 | # Decode the image to visualize the progress 59 | img = sds.decode_latents(latents.detach()) 60 | # Save the image 61 | output_im = Image.fromarray(img.astype("uint8")) 62 | output_path = os.path.join( 63 | sds.output_dir, 64 | f"output_{prompt[0].replace(' ', '_')}_iter_{i}.png", 65 | ) 66 | output_im.save(output_path) 67 | 68 | return img 69 | 70 | 71 | if __name__ == "__main__": 72 | parser = argparse.ArgumentParser() 73 | parser.add_argument("--prompt", type=str, default="a hamburger") 74 | parser.add_argument("--seed", type=int, default=42) 75 | parser.add_argument("--output_dir", type=str, default="output") 76 | parser.add_argument("--sds_guidance", type=int, default=0, choices=[0, 1], help="boolen option to add guidance to the SDS loss") 77 | parser.add_argument( 78 | "--postfix", 79 | type=str, 80 | default="", 81 | help="postfix for the output directory to differentiate multiple runs", 82 | ) 83 | args = parser.parse_args() 84 | 85 | seed_everything(args.seed) 86 | 87 | # create output directory 88 | args.output_dir = osp.join(args.output_dir, "image") 89 | output_dir = os.path.join( 90 | args.output_dir, args.prompt.replace(" ", "_") + args.postfix 91 | ) 92 | os.makedirs(output_dir, exist_ok=True) 93 | 94 | # initialize SDS 95 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 96 | sds = SDS(sd_version="2.1", device=device, output_dir=output_dir) 97 | 98 | # optimize an image 99 | prompt = args.prompt 100 | start_time = time.time() 101 | img = optimize_an_image(sds, prompt=prompt, args=args) 102 | print(f"Optimization took {time.time() - start_time:.2f} seconds") 103 | 104 | # save the output image 105 | img = Image.fromarray(img.astype("uint8")) 106 | output_path = os.path.join(output_dir, f"output.png") 107 | print(f"Saving image to {output_path}") 108 | img.save(output_path) 109 | -------------------------------------------------------------------------------- /Q2/Q22_mesh_optimization.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import os.path as osp 4 | import time 5 | 6 | import numpy as np 7 | import pytorch3d 8 | import torch 9 | from implicit import ColorField 10 | from PIL import Image 11 | from pytorch3d.renderer import ( 12 | FoVPerspectiveCameras, 13 | TexturesVertex, 14 | look_at_view_transform, 15 | ) 16 | from SDS import SDS 17 | from tqdm import tqdm 18 | from utils import ( 19 | get_cosine_schedule_with_warmup, 20 | get_mesh_renderer_soft, 21 | init_mesh, 22 | prepare_embeddings, 23 | render_360_views, 24 | seed_everything, 25 | ) 26 | 27 | 28 | def optimize_mesh_texture( 29 | sds, 30 | mesh_path, 31 | prompt, 32 | neg_prompt="", 33 | device="cpu", 34 | log_interval=100, 35 | save_mesh=True, 36 | args=None, 37 | ): 38 | """ 39 | Optimize the texture map of a mesh to match the prompt. 40 | """ 41 | # Step 1. Create text embeddings from prompt 42 | embeddings = prepare_embeddings(sds, prompt, neg_prompt, view_dependent=False) 43 | sds.text_encoder.to("cpu") # free up GPU memory 44 | torch.cuda.empty_cache() 45 | 46 | # Step 2. Load the mesh 47 | mesh, vertices, faces, aux = init_mesh(mesh_path, device=device) 48 | vertices = vertices.unsqueeze(0).to(device) # (N_v, 3) -> (1, N_v, 3) 49 | faces = faces.unsqueeze(0).to(device) # (N_f, 3) -> (1, N_f, 3) 50 | 51 | # Step 2.1 Initialize a randome texture map (optimizable parameter) 52 | # create a texture field with implicit function 53 | color_field = ColorField().to(device) # input (1, N_v, xyz) -> output (1, N_v, rgb) 54 | mesh = pytorch3d.structures.Meshes( 55 | verts=vertices, 56 | faces=faces, 57 | textures=TexturesVertex(verts_features=color_field(vertices)), 58 | ) 59 | mesh = mesh.to(device) 60 | 61 | # Step 3.1 Initialize the renderer 62 | lights = pytorch3d.renderer.PointLights(location=[[0, 0, -3]], device=device) 63 | renderer = get_mesh_renderer_soft(image_size=512, device=device, lights=lights) 64 | 65 | # For logging purpose, render 360 views of the initial mesh 66 | if save_mesh: 67 | render_360_views( 68 | mesh.detach(), 69 | renderer, 70 | device=device, 71 | output_path=osp.join(sds.output_dir, "initial_mesh.gif"), 72 | ) 73 | 74 | # Step 3.2. Initialize the cameras 75 | # check the size of the mesh so that it is in the field of view 76 | print( 77 | f"check mesh range: {vertices.min()}, {vertices.max()}, center {vertices.mean(1)}" 78 | ) 79 | 80 | ### YOUR CODE HERE ### 81 | # create a list of query cameras as the training set 82 | # Note: to create the dataset, you can either pre-define a list of query cameras as below or randomly sample a camera pose on the fly in the training loop. 83 | query_cameras = [] # optional 84 | 85 | # Step 4. Create optimizer training parameters 86 | optimizer = torch.optim.AdamW(color_field.parameters(), lr=5e-4, weight_decay=0) 87 | total_iter = 2000 88 | scheduler = get_cosine_schedule_with_warmup(optimizer, 100, int(total_iter * 1.5)) 89 | 90 | # Step 5. Training loop to optimize the texture map 91 | loss_dict = {} 92 | for i in tqdm(range(total_iter)): 93 | # Initialize optimizer 94 | optimizer.zero_grad() 95 | 96 | # Update the textures 97 | mesh.textures = TexturesVertex(verts_features=color_field(vertices)) 98 | 99 | ### YOUR CODE HERE ### 100 | 101 | # Forward pass 102 | # Render a randomly sampled camera view to optimize in this iteration 103 | rend = 104 | # Encode the rendered image to latents 105 | latents = 106 | # Compute the loss 107 | loss = 108 | 109 | 110 | 111 | # Backward pass 112 | loss.backward() 113 | optimizer.step() 114 | scheduler.step() 115 | 116 | # clamping the latents to avoid over saturation 117 | latents.data = latents.data.clip(-1, 1) 118 | if i % log_interval == 0 or i == total_iter - 1: 119 | # save the loss 120 | loss_dict[i] = loss.item() 121 | 122 | # save the image 123 | img = sds.decode_latents(latents.detach()) 124 | output_im = Image.fromarray(img.astype("uint8")) 125 | output_path = os.path.join( 126 | sds.output_dir, 127 | f"output_{prompt[0].replace(' ', '_')}_iter_{i}.png", 128 | ) 129 | output_im.save(output_path) 130 | 131 | if save_mesh: 132 | render_360_views( 133 | mesh.detach(), 134 | renderer, 135 | device=device, 136 | output_path=osp.join(sds.output_dir, f"final_mesh.gif"), 137 | ) 138 | 139 | 140 | if __name__ == "__main__": 141 | parser = argparse.ArgumentParser() 142 | parser.add_argument("--prompt", type=str, default="a hamburger") 143 | parser.add_argument("--seed", type=int, default=42) 144 | parser.add_argument("--output_dir", type=str, default="output") 145 | parser.add_argument( 146 | "--postfix", 147 | type=str, 148 | default="", 149 | help="postfix for the output directory to differentiate multiple runs", 150 | ) 151 | 152 | parser.add_argument( 153 | "-m", 154 | "--mesh_path", 155 | type=str, 156 | default="data/cow.obj", 157 | help="Path to the input image", 158 | ) 159 | args = parser.parse_args() 160 | 161 | seed_everything(args.seed) 162 | 163 | # create output directory 164 | args.output_dir = osp.join(args.output_dir, "mesh") 165 | output_dir = os.path.join( 166 | args.output_dir, args.prompt.replace(" ", "_") + args.postfix 167 | ) 168 | os.makedirs(output_dir, exist_ok=True) 169 | 170 | # initialize SDS 171 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 172 | sds = SDS(sd_version="2.1", device=device, output_dir=output_dir) 173 | 174 | # optimize the texture map of a mesh 175 | start_time = time.time() 176 | assert ( 177 | args.mesh_path is not None 178 | ), "mesh_path should be provided for optimizing the texture map for a mesh" 179 | optimize_mesh_texture( 180 | sds, mesh_path=args.mesh_path, prompt=args.prompt, device=device, args=args 181 | ) 182 | print(f"Optimization took {time.time() - start_time:.2f} seconds") 183 | -------------------------------------------------------------------------------- /Q2/SDS.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | from diffusers import DDIMScheduler, StableDiffusionPipeline 4 | 5 | 6 | class SDS: 7 | """ 8 | Class to implement the SDS loss function. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | sd_version="2.1", 14 | device="cpu", 15 | t_range=[0.02, 0.98], 16 | output_dir="output", 17 | ): 18 | """ 19 | Load the Stable Diffusion model and set the parameters. 20 | 21 | Args: 22 | sd_version (str): version for stable diffusion model 23 | device (_type_): _description_ 24 | """ 25 | 26 | # Set the stable diffusion model key based on the version 27 | if sd_version == "2.1": 28 | sd_model_key = "stabilityai/stable-diffusion-2-1-base" 29 | else: 30 | raise NotImplementedError( 31 | f"Stable diffusion version {sd_version} not supported" 32 | ) 33 | 34 | # Set parameters 35 | self.H = 512 # default height of Stable Diffusion 36 | self.W = 512 # default width of Stable Diffusion 37 | self.num_inference_steps = 50 38 | self.output_dir = output_dir 39 | self.device = device 40 | self.precision_t = torch.float32 41 | 42 | # Create model 43 | sd_pipe = StableDiffusionPipeline.from_pretrained( 44 | sd_model_key, torch_dtype=self.precision_t 45 | ).to(device) 46 | 47 | self.vae = sd_pipe.vae 48 | self.tokenizer = sd_pipe.tokenizer 49 | self.text_encoder = sd_pipe.text_encoder 50 | self.unet = sd_pipe.unet 51 | self.scheduler = DDIMScheduler.from_pretrained( 52 | sd_model_key, subfolder="scheduler", torch_dtype=self.precision_t 53 | ) 54 | del sd_pipe 55 | 56 | self.num_train_timesteps = self.scheduler.config.num_train_timesteps 57 | self.min_step = int(self.num_train_timesteps * t_range[0]) 58 | self.max_step = int(self.num_train_timesteps * t_range[1]) 59 | self.alphas = self.scheduler.alphas_cumprod.to( 60 | self.device 61 | ) # for convenient access 62 | 63 | print(f"[INFO] loaded stable diffusion!") 64 | 65 | @torch.no_grad() 66 | def get_text_embeddings(self, prompt): 67 | """ 68 | Get the text embeddings for the prompt. 69 | 70 | Args: 71 | prompt (list of string): text prompt to encode. 72 | """ 73 | text_input = self.tokenizer( 74 | prompt, 75 | padding="max_length", 76 | max_length=self.tokenizer.model_max_length, 77 | return_tensors="pt", 78 | ) 79 | text_embeddings = self.text_encoder(text_input.input_ids.to(self.device))[0] 80 | return text_embeddings 81 | 82 | def encode_imgs(self, img): 83 | """ 84 | Encode the image to latent representation. 85 | 86 | Args: 87 | img (tensor): image to encode. shape (N, 3, H, W), range [0, 1] 88 | 89 | Returns: 90 | latents (tensor): latent representation. shape (1, 4, 64, 64) 91 | """ 92 | # check the shape of the image should be 512x512 93 | assert img.shape[-2:] == (512, 512), "Image shape should be 512x512" 94 | 95 | img = 2 * img - 1 # [0, 1] => [-1, 1] 96 | 97 | posterior = self.vae.encode(img).latent_dist 98 | latents = posterior.sample() * self.vae.config.scaling_factor 99 | 100 | return latents 101 | 102 | def decode_latents(self, latents): 103 | """ 104 | Decode the latent representation into RGB image. 105 | 106 | Args: 107 | latents (tensor): latent representation. shape (1, 4, 64, 64), range [-1, 1] 108 | 109 | Returns: 110 | imgs[0] (np.array): decoded image. shape (512, 512, 3), range [0, 255] 111 | """ 112 | latents = 1 / self.vae.config.scaling_factor * latents 113 | 114 | imgs = self.vae.decode(latents.type(self.precision_t)).sample 115 | imgs = (imgs / 2 + 0.5).clamp(0, 1) # [-1, 1] => [0, 1] 116 | imgs = imgs.detach().cpu().permute(0, 2, 3, 1).numpy() # torch to numpy 117 | imgs = (imgs * 255).round() # [0, 1] => [0, 255] 118 | return imgs[0] 119 | 120 | def sds_loss( 121 | self, 122 | latents, 123 | text_embeddings, 124 | text_embeddings_uncond=None, 125 | guidance_scale=100, 126 | grad_scale=1, 127 | ): 128 | """ 129 | Compute the SDS loss. 130 | 131 | Args: 132 | latents (tensor): input latents, shape [1, 4, 64, 64] 133 | text_embeddings (tensor): conditional text embedding (for positive prompt), shape [1, 77, 1024] 134 | text_embeddings_uncond (tensor, optional): unconditional text embedding (for negative prompt), shape [1, 77, 1024]. Defaults to None. 135 | guidance_scale (int, optional): weight scaling for guidance. Defaults to 100. 136 | grad_scale (int, optional): gradient scaling. Defaults to 1. 137 | 138 | Returns: 139 | loss (tensor): SDS loss 140 | """ 141 | 142 | # sample a timestep ~ U(0.02, 0.98) to avoid very high/low noise level 143 | t = torch.randint( 144 | self.min_step, 145 | self.max_step + 1, 146 | (latents.shape[0],), 147 | dtype=torch.long, 148 | device=self.device, 149 | ) 150 | 151 | # predict the noise residual with unet, NO grad! 152 | with torch.no_grad(): 153 | ### YOUR CODE HERE ### 154 | 155 | 156 | if text_embeddings_uncond is not None and guidance_scale != 1: 157 | ### YOUR CODE HERE ### 158 | pass 159 | 160 | 161 | 162 | # Compute SDS loss 163 | w = 1 - self.alphas[t] 164 | ### YOUR CODE HERE ### 165 | 166 | 167 | loss = 168 | 169 | return loss 170 | -------------------------------------------------------------------------------- /Q2/activation.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd import Function 3 | from torch.cuda.amp import custom_bwd, custom_fwd 4 | 5 | class _trunc_exp(Function): 6 | @staticmethod 7 | @custom_fwd(cast_inputs=torch.float) 8 | def forward(ctx, x): 9 | ctx.save_for_backward(x) 10 | return torch.exp(x) 11 | 12 | @staticmethod 13 | @custom_bwd 14 | def backward(ctx, g): 15 | x = ctx.saved_tensors[0] 16 | return g * torch.exp(x.clamp(max=15)) 17 | 18 | trunc_exp = _trunc_exp.apply 19 | 20 | def biased_softplus(x, bias=0): 21 | return torch.nn.functional.softplus(x - bias) -------------------------------------------------------------------------------- /Q2/data/cow.mtl: -------------------------------------------------------------------------------- 1 | newmtl material_1 2 | map_Kd cow_texture.png 3 | 4 | # Test colors 5 | 6 | Ka 1.000 1.000 1.000 # white 7 | Kd 1.000 1.000 1.000 # white 8 | Ks 0.000 0.000 0.000 # black 9 | Ns 10.0 10 | -------------------------------------------------------------------------------- /Q2/data/cow_texture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/data/cow_texture.png -------------------------------------------------------------------------------- /Q2/freqencoder/__init__.py: -------------------------------------------------------------------------------- 1 | from .freq import FreqEncoder -------------------------------------------------------------------------------- /Q2/freqencoder/backend.py: -------------------------------------------------------------------------------- 1 | import os 2 | from torch.utils.cpp_extension import load 3 | 4 | _src_path = os.path.dirname(os.path.abspath(__file__)) 5 | 6 | nvcc_flags = [ 7 | '-O3', '-std=c++14', 8 | '-U__CUDA_NO_HALF_OPERATORS__', '-U__CUDA_NO_HALF_CONVERSIONS__', '-U__CUDA_NO_HALF2_OPERATORS__', 9 | '-use_fast_math' 10 | ] 11 | 12 | if os.name == "posix": 13 | c_flags = ['-O3', '-std=c++14'] 14 | elif os.name == "nt": 15 | c_flags = ['/O2', '/std:c++17'] 16 | 17 | # find cl.exe 18 | def find_cl_path(): 19 | import glob 20 | for program_files in [r"C:\\Program Files (x86)", r"C:\\Program Files"]: 21 | for edition in ["Enterprise", "Professional", "BuildTools", "Community"]: 22 | paths = sorted(glob.glob(r"%s\\Microsoft Visual Studio\\*\\%s\\VC\\Tools\\MSVC\\*\\bin\\Hostx64\\x64" % (program_files, edition)), reverse=True) 23 | if paths: 24 | return paths[0] 25 | 26 | # If cl.exe is not on path, try to find it. 27 | if os.system("where cl.exe >nul 2>nul") != 0: 28 | cl_path = find_cl_path() 29 | if cl_path is None: 30 | raise RuntimeError("Could not locate a supported Microsoft Visual C++ installation") 31 | os.environ["PATH"] += ";" + cl_path 32 | 33 | _backend = load(name='_freqencoder', 34 | extra_cflags=c_flags, 35 | extra_cuda_cflags=nvcc_flags, 36 | sources=[os.path.join(_src_path, 'src', f) for f in [ 37 | 'freqencoder.cu', 38 | 'bindings.cpp', 39 | ]], 40 | ) 41 | 42 | __all__ = ['_backend'] -------------------------------------------------------------------------------- /Q2/freqencoder/build/lib.linux-x86_64-cpython-310/_freqencoder.cpython-310-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/freqencoder/build/lib.linux-x86_64-cpython-310/_freqencoder.cpython-310-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /Q2/freqencoder/build/temp.linux-x86_64-cpython-310/.ninja_deps: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/freqencoder/build/temp.linux-x86_64-cpython-310/.ninja_deps -------------------------------------------------------------------------------- /Q2/freqencoder/build/temp.linux-x86_64-cpython-310/.ninja_log: -------------------------------------------------------------------------------- 1 | # ninja log v5 2 | 0 9882 1707064551739982795 /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/bindings.o dd1839430168d104 3 | 0 30361 1707064572220307091 /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/freqencoder.o 8fe69fd0981553de 4 | -------------------------------------------------------------------------------- /Q2/freqencoder/build/temp.linux-x86_64-cpython-310/build.ninja: -------------------------------------------------------------------------------- 1 | ninja_required_version = 1.3 2 | cxx = c++ 3 | nvcc = /usr/local/cuda/bin/nvcc 4 | 5 | cflags = -pthread -B /home/ruihan/anaconda3/envs/stable-dreamfusion/compiler_compat -Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall -fPIC -O2 -isystem /home/ruihan/anaconda3/envs/stable-dreamfusion/include -fPIC -O2 -isystem /home/ruihan/anaconda3/envs/stable-dreamfusion/include -fPIC -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/TH -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/THC -I/usr/local/cuda/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/include/python3.10 -c 6 | post_cflags = -O3 -std=c++14 -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_freqencoder -D_GLIBCXX_USE_CXX11_ABI=0 7 | cuda_cflags = -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/TH -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/THC -I/usr/local/cuda/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/include/python3.10 -c 8 | cuda_post_cflags = -D__CUDA_NO_HALF_OPERATORS__ -D__CUDA_NO_HALF_CONVERSIONS__ -D__CUDA_NO_BFLOAT16_CONVERSIONS__ -D__CUDA_NO_HALF2_OPERATORS__ --expt-relaxed-constexpr --compiler-options ''"'"'-fPIC'"'"'' -O3 -std=c++14 -U__CUDA_NO_HALF_OPERATORS__ -U__CUDA_NO_HALF_CONVERSIONS__ -U__CUDA_NO_HALF2_OPERATORS__ -use_fast_math -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_freqencoder -D_GLIBCXX_USE_CXX11_ABI=0 -gencode=arch=compute_86,code=compute_86 -gencode=arch=compute_86,code=sm_86 9 | ldflags = 10 | 11 | rule compile 12 | command = $cxx -MMD -MF $out.d $cflags -c $in -o $out $post_cflags 13 | depfile = $out.d 14 | deps = gcc 15 | 16 | rule cuda_compile 17 | depfile = $out.d 18 | deps = gcc 19 | command = $nvcc $cuda_cflags -c $in -o $out $cuda_post_cflags 20 | 21 | 22 | 23 | build /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/bindings.o: compile /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/bindings.cpp 24 | build /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/freqencoder.o: cuda_compile /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/freqencoder.cu 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /Q2/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/bindings.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/bindings.o -------------------------------------------------------------------------------- /Q2/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/freqencoder.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/freqencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/freqencoder.o -------------------------------------------------------------------------------- /Q2/freqencoder/freq.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import torch 4 | import torch.nn as nn 5 | from torch.autograd import Function 6 | from torch.autograd.function import once_differentiable 7 | from torch.cuda.amp import custom_bwd, custom_fwd 8 | 9 | try: 10 | import _freqencoder as _backend 11 | except ImportError: 12 | from .backend import _backend 13 | 14 | 15 | class _freq_encoder(Function): 16 | @staticmethod 17 | @custom_fwd(cast_inputs=torch.float32) # force float32 for better precision 18 | def forward(ctx, inputs, degree, output_dim): 19 | # inputs: [B, input_dim], float 20 | # RETURN: [B, F], float 21 | 22 | if not inputs.is_cuda: inputs = inputs.cuda() 23 | inputs = inputs.contiguous() 24 | 25 | B, input_dim = inputs.shape # batch size, coord dim 26 | 27 | outputs = torch.empty(B, output_dim, dtype=inputs.dtype, device=inputs.device) 28 | 29 | _backend.freq_encode_forward(inputs, B, input_dim, degree, output_dim, outputs) 30 | 31 | ctx.save_for_backward(inputs, outputs) 32 | ctx.dims = [B, input_dim, degree, output_dim] 33 | 34 | return outputs 35 | 36 | @staticmethod 37 | #@once_differentiable 38 | @custom_bwd 39 | def backward(ctx, grad): 40 | # grad: [B, C * C] 41 | 42 | grad = grad.contiguous() 43 | inputs, outputs = ctx.saved_tensors 44 | B, input_dim, degree, output_dim = ctx.dims 45 | 46 | grad_inputs = torch.zeros_like(inputs) 47 | _backend.freq_encode_backward(grad, outputs, B, input_dim, degree, output_dim, grad_inputs) 48 | 49 | return grad_inputs, None, None 50 | 51 | 52 | freq_encode = _freq_encoder.apply 53 | 54 | 55 | class FreqEncoder(nn.Module): 56 | def __init__(self, input_dim=3, degree=4): 57 | super().__init__() 58 | 59 | self.input_dim = input_dim 60 | self.degree = degree 61 | self.output_dim = input_dim + input_dim * 2 * degree 62 | 63 | def __repr__(self): 64 | return f"FreqEncoder: input_dim={self.input_dim} degree={self.degree} output_dim={self.output_dim}" 65 | 66 | def forward(self, inputs, **kwargs): 67 | # inputs: [..., input_dim] 68 | # return: [..., ] 69 | 70 | prefix_shape = list(inputs.shape[:-1]) 71 | inputs = inputs.reshape(-1, self.input_dim) 72 | 73 | outputs = freq_encode(inputs, self.degree, self.output_dim) 74 | 75 | outputs = outputs.reshape(prefix_shape + [self.output_dim]) 76 | 77 | return outputs -------------------------------------------------------------------------------- /Q2/freqencoder/freqencoder.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: freqencoder 3 | Version: 0.0.0 4 | -------------------------------------------------------------------------------- /Q2/freqencoder/freqencoder.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | setup.py 2 | /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/bindings.cpp 3 | /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/freqencoder/src/freqencoder.cu 4 | freqencoder.egg-info/PKG-INFO 5 | freqencoder.egg-info/SOURCES.txt 6 | freqencoder.egg-info/dependency_links.txt 7 | freqencoder.egg-info/top_level.txt -------------------------------------------------------------------------------- /Q2/freqencoder/freqencoder.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Q2/freqencoder/freqencoder.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | _freqencoder 2 | -------------------------------------------------------------------------------- /Q2/freqencoder/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup 3 | from torch.utils.cpp_extension import BuildExtension, CUDAExtension 4 | 5 | _src_path = os.path.dirname(os.path.abspath(__file__)) 6 | 7 | nvcc_flags = [ 8 | '-O3', '-std=c++14', 9 | '-U__CUDA_NO_HALF_OPERATORS__', '-U__CUDA_NO_HALF_CONVERSIONS__', '-U__CUDA_NO_HALF2_OPERATORS__', 10 | '-use_fast_math' 11 | ] 12 | 13 | if os.name == "posix": 14 | c_flags = ['-O3', '-std=c++14'] 15 | elif os.name == "nt": 16 | c_flags = ['/O2', '/std:c++17'] 17 | 18 | # find cl.exe 19 | def find_cl_path(): 20 | import glob 21 | for program_files in [r"C:\\Program Files (x86)", r"C:\\Program Files"]: 22 | for edition in ["Enterprise", "Professional", "BuildTools", "Community"]: 23 | paths = sorted(glob.glob(r"%s\\Microsoft Visual Studio\\*\\%s\\VC\\Tools\\MSVC\\*\\bin\\Hostx64\\x64" % (program_files, edition)), reverse=True) 24 | if paths: 25 | return paths[0] 26 | 27 | # If cl.exe is not on path, try to find it. 28 | if os.system("where cl.exe >nul 2>nul") != 0: 29 | cl_path = find_cl_path() 30 | if cl_path is None: 31 | raise RuntimeError("Could not locate a supported Microsoft Visual C++ installation") 32 | os.environ["PATH"] += ";" + cl_path 33 | 34 | setup( 35 | name='freqencoder', # package name, import this to use python API 36 | ext_modules=[ 37 | CUDAExtension( 38 | name='_freqencoder', # extension name, import this to use CUDA API 39 | sources=[os.path.join(_src_path, 'src', f) for f in [ 40 | 'freqencoder.cu', 41 | 'bindings.cpp', 42 | ]], 43 | extra_compile_args={ 44 | 'cxx': c_flags, 45 | 'nvcc': nvcc_flags, 46 | } 47 | ), 48 | ], 49 | cmdclass={ 50 | 'build_ext': BuildExtension, 51 | } 52 | ) -------------------------------------------------------------------------------- /Q2/freqencoder/src/bindings.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "freqencoder.h" 4 | 5 | PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { 6 | m.def("freq_encode_forward", &freq_encode_forward, "freq encode forward (CUDA)"); 7 | m.def("freq_encode_backward", &freq_encode_backward, "freq encode backward (CUDA)"); 8 | } -------------------------------------------------------------------------------- /Q2/freqencoder/src/freqencoder.cu: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | #include 8 | #include 9 | 10 | #include 11 | #include 12 | 13 | #include 14 | 15 | 16 | #define CHECK_CUDA(x) TORCH_CHECK(x.device().is_cuda(), #x " must be a CUDA tensor") 17 | #define CHECK_CONTIGUOUS(x) TORCH_CHECK(x.is_contiguous(), #x " must be a contiguous tensor") 18 | #define CHECK_IS_INT(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Int, #x " must be an int tensor") 19 | #define CHECK_IS_FLOATING(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Float || x.scalar_type() == at::ScalarType::Half || x.scalar_type() == at::ScalarType::Double, #x " must be a floating tensor") 20 | 21 | inline constexpr __device__ float PI() { return 3.141592653589793f; } 22 | 23 | template 24 | __host__ __device__ T div_round_up(T val, T divisor) { 25 | return (val + divisor - 1) / divisor; 26 | } 27 | 28 | // inputs: [B, D] 29 | // outputs: [B, C], C = D + D * deg * 2 30 | __global__ void kernel_freq( 31 | const float * __restrict__ inputs, 32 | uint32_t B, uint32_t D, uint32_t deg, uint32_t C, 33 | float * outputs 34 | ) { 35 | // parallel on per-element 36 | const uint32_t t = threadIdx.x + blockIdx.x * blockDim.x; 37 | if (t >= B * C) return; 38 | 39 | // get index 40 | const uint32_t b = t / C; 41 | const uint32_t c = t - b * C; // t % C; 42 | 43 | // locate 44 | inputs += b * D; 45 | outputs += t; 46 | 47 | // write self 48 | if (c < D) { 49 | outputs[0] = inputs[c]; 50 | // write freq 51 | } else { 52 | const uint32_t col = c / D - 1; 53 | const uint32_t d = c % D; 54 | const uint32_t freq = col / 2; 55 | const float phase_shift = (col % 2) * (PI() / 2); 56 | outputs[0] = __sinf(scalbnf(inputs[d], freq) + phase_shift); 57 | } 58 | } 59 | 60 | // grad: [B, C], C = D + D * deg * 2 61 | // outputs: [B, C] 62 | // grad_inputs: [B, D] 63 | __global__ void kernel_freq_backward( 64 | const float * __restrict__ grad, 65 | const float * __restrict__ outputs, 66 | uint32_t B, uint32_t D, uint32_t deg, uint32_t C, 67 | float * grad_inputs 68 | ) { 69 | // parallel on per-element 70 | const uint32_t t = threadIdx.x + blockIdx.x * blockDim.x; 71 | if (t >= B * D) return; 72 | 73 | const uint32_t b = t / D; 74 | const uint32_t d = t - b * D; // t % D; 75 | 76 | // locate 77 | grad += b * C; 78 | outputs += b * C; 79 | grad_inputs += t; 80 | 81 | // register 82 | float result = grad[d]; 83 | grad += D; 84 | outputs += D; 85 | 86 | for (uint32_t f = 0; f < deg; f++) { 87 | result += scalbnf(1.0f, f) * (grad[d] * outputs[D + d] - grad[D + d] * outputs[d]); 88 | grad += 2 * D; 89 | outputs += 2 * D; 90 | } 91 | 92 | // write 93 | grad_inputs[0] = result; 94 | } 95 | 96 | 97 | void freq_encode_forward(at::Tensor inputs, const uint32_t B, const uint32_t D, const uint32_t deg, const uint32_t C, at::Tensor outputs) { 98 | CHECK_CUDA(inputs); 99 | CHECK_CUDA(outputs); 100 | 101 | CHECK_CONTIGUOUS(inputs); 102 | CHECK_CONTIGUOUS(outputs); 103 | 104 | CHECK_IS_FLOATING(inputs); 105 | CHECK_IS_FLOATING(outputs); 106 | 107 | static constexpr uint32_t N_THREADS = 128; 108 | 109 | kernel_freq<<>>(inputs.data_ptr(), B, D, deg, C, outputs.data_ptr()); 110 | } 111 | 112 | 113 | void freq_encode_backward(at::Tensor grad, at::Tensor outputs, const uint32_t B, const uint32_t D, const uint32_t deg, const uint32_t C, at::Tensor grad_inputs) { 114 | CHECK_CUDA(grad); 115 | CHECK_CUDA(outputs); 116 | CHECK_CUDA(grad_inputs); 117 | 118 | CHECK_CONTIGUOUS(grad); 119 | CHECK_CONTIGUOUS(outputs); 120 | CHECK_CONTIGUOUS(grad_inputs); 121 | 122 | CHECK_IS_FLOATING(grad); 123 | CHECK_IS_FLOATING(outputs); 124 | CHECK_IS_FLOATING(grad_inputs); 125 | 126 | static constexpr uint32_t N_THREADS = 128; 127 | 128 | kernel_freq_backward<<>>(grad.data_ptr(), outputs.data_ptr(), B, D, deg, C, grad_inputs.data_ptr()); 129 | } -------------------------------------------------------------------------------- /Q2/freqencoder/src/freqencoder.h: -------------------------------------------------------------------------------- 1 | # pragma once 2 | 3 | #include 4 | #include 5 | 6 | // _backend.freq_encode_forward(inputs, B, input_dim, degree, output_dim, outputs) 7 | void freq_encode_forward(at::Tensor inputs, const uint32_t B, const uint32_t D, const uint32_t deg, const uint32_t C, at::Tensor outputs); 8 | 9 | // _backend.freq_encode_backward(grad, outputs, B, input_dim, degree, output_dim, grad_inputs) 10 | void freq_encode_backward(at::Tensor grad, at::Tensor outputs, const uint32_t B, const uint32_t D, const uint32_t deg, const uint32_t C, at::Tensor grad_inputs); -------------------------------------------------------------------------------- /Q2/gridencoder/__init__.py: -------------------------------------------------------------------------------- 1 | from .grid import GridEncoder -------------------------------------------------------------------------------- /Q2/gridencoder/backend.py: -------------------------------------------------------------------------------- 1 | import os 2 | from torch.utils.cpp_extension import load 3 | 4 | _src_path = os.path.dirname(os.path.abspath(__file__)) 5 | 6 | nvcc_flags = [ 7 | '-O3', '-std=c++14', 8 | '-U__CUDA_NO_HALF_OPERATORS__', '-U__CUDA_NO_HALF_CONVERSIONS__', '-U__CUDA_NO_HALF2_OPERATORS__', 9 | ] 10 | 11 | if os.name == "posix": 12 | c_flags = ['-O3', '-std=c++14'] 13 | elif os.name == "nt": 14 | c_flags = ['/O2', '/std:c++17'] 15 | 16 | # find cl.exe 17 | def find_cl_path(): 18 | import glob 19 | for program_files in [r"C:\\Program Files (x86)", r"C:\\Program Files"]: 20 | for edition in ["Enterprise", "Professional", "BuildTools", "Community"]: 21 | paths = sorted(glob.glob(r"%s\\Microsoft Visual Studio\\*\\%s\\VC\\Tools\\MSVC\\*\\bin\\Hostx64\\x64" % (program_files, edition)), reverse=True) 22 | if paths: 23 | return paths[0] 24 | # If cl.exe is not on path, try to find it. 25 | if os.system("where cl.exe >nul 2>nul") != 0: 26 | cl_path = find_cl_path() 27 | if cl_path is None: 28 | raise RuntimeError("Could not locate a supported Microsoft Visual C++ installation") 29 | os.environ["PATH"] += ";" + cl_path 30 | 31 | _backend = load(name='_grid_encoder', 32 | extra_cflags=c_flags, 33 | extra_cuda_cflags=nvcc_flags, 34 | sources=[os.path.join(_src_path, 'src', f) for f in [ 35 | 'gridencoder.cu', 36 | 'bindings.cpp', 37 | ]], 38 | ) 39 | 40 | __all__ = ['_backend'] -------------------------------------------------------------------------------- /Q2/gridencoder/build/lib.linux-x86_64-cpython-310/_gridencoder.cpython-310-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/gridencoder/build/lib.linux-x86_64-cpython-310/_gridencoder.cpython-310-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /Q2/gridencoder/build/temp.linux-x86_64-cpython-310/.ninja_deps: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/gridencoder/build/temp.linux-x86_64-cpython-310/.ninja_deps -------------------------------------------------------------------------------- /Q2/gridencoder/build/temp.linux-x86_64-cpython-310/.ninja_log: -------------------------------------------------------------------------------- 1 | # ninja log v5 2 | 0 10156 1707064585608519082 /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/bindings.o cad7ba30cd96aa9a 3 | 0 151401 1707064726842755306 /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/gridencoder.o c9bbd31ba1b7d342 4 | -------------------------------------------------------------------------------- /Q2/gridencoder/build/temp.linux-x86_64-cpython-310/build.ninja: -------------------------------------------------------------------------------- 1 | ninja_required_version = 1.3 2 | cxx = c++ 3 | nvcc = /usr/local/cuda/bin/nvcc 4 | 5 | cflags = -pthread -B /home/ruihan/anaconda3/envs/stable-dreamfusion/compiler_compat -Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall -fPIC -O2 -isystem /home/ruihan/anaconda3/envs/stable-dreamfusion/include -fPIC -O2 -isystem /home/ruihan/anaconda3/envs/stable-dreamfusion/include -fPIC -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/TH -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/THC -I/usr/local/cuda/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/include/python3.10 -c 6 | post_cflags = -O3 -std=c++14 -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_gridencoder -D_GLIBCXX_USE_CXX11_ABI=0 7 | cuda_cflags = -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/TH -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/THC -I/usr/local/cuda/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/include/python3.10 -c 8 | cuda_post_cflags = -D__CUDA_NO_HALF_OPERATORS__ -D__CUDA_NO_HALF_CONVERSIONS__ -D__CUDA_NO_BFLOAT16_CONVERSIONS__ -D__CUDA_NO_HALF2_OPERATORS__ --expt-relaxed-constexpr --compiler-options ''"'"'-fPIC'"'"'' -O3 -std=c++14 -U__CUDA_NO_HALF_OPERATORS__ -U__CUDA_NO_HALF_CONVERSIONS__ -U__CUDA_NO_HALF2_OPERATORS__ -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_gridencoder -D_GLIBCXX_USE_CXX11_ABI=0 -gencode=arch=compute_86,code=compute_86 -gencode=arch=compute_86,code=sm_86 9 | ldflags = 10 | 11 | rule compile 12 | command = $cxx -MMD -MF $out.d $cflags -c $in -o $out $post_cflags 13 | depfile = $out.d 14 | deps = gcc 15 | 16 | rule cuda_compile 17 | depfile = $out.d 18 | deps = gcc 19 | command = $nvcc $cuda_cflags -c $in -o $out $cuda_post_cflags 20 | 21 | 22 | 23 | build /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/bindings.o: compile /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/bindings.cpp 24 | build /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/gridencoder.o: cuda_compile /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/gridencoder.cu 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /Q2/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/bindings.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/bindings.o -------------------------------------------------------------------------------- /Q2/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/gridencoder.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/gridencoder/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/gridencoder.o -------------------------------------------------------------------------------- /Q2/gridencoder/gridencoder.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: gridencoder 3 | Version: 0.0.0 4 | -------------------------------------------------------------------------------- /Q2/gridencoder/gridencoder.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | setup.py 2 | /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/bindings.cpp 3 | /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/gridencoder/src/gridencoder.cu 4 | gridencoder.egg-info/PKG-INFO 5 | gridencoder.egg-info/SOURCES.txt 6 | gridencoder.egg-info/dependency_links.txt 7 | gridencoder.egg-info/top_level.txt -------------------------------------------------------------------------------- /Q2/gridencoder/gridencoder.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Q2/gridencoder/gridencoder.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | _gridencoder 2 | -------------------------------------------------------------------------------- /Q2/gridencoder/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup 3 | from torch.utils.cpp_extension import BuildExtension, CUDAExtension 4 | 5 | _src_path = os.path.dirname(os.path.abspath(__file__)) 6 | 7 | nvcc_flags = [ 8 | '-O3', '-std=c++14', 9 | '-U__CUDA_NO_HALF_OPERATORS__', '-U__CUDA_NO_HALF_CONVERSIONS__', '-U__CUDA_NO_HALF2_OPERATORS__', 10 | ] 11 | 12 | if os.name == "posix": 13 | c_flags = ['-O3', '-std=c++14'] 14 | elif os.name == "nt": 15 | c_flags = ['/O2', '/std:c++17'] 16 | 17 | # find cl.exe 18 | def find_cl_path(): 19 | import glob 20 | for program_files in [r"C:\\Program Files (x86)", r"C:\\Program Files"]: 21 | for edition in ["Enterprise", "Professional", "BuildTools", "Community"]: 22 | paths = sorted(glob.glob(r"%s\\Microsoft Visual Studio\\*\\%s\\VC\\Tools\\MSVC\\*\\bin\\Hostx64\\x64" % (program_files, edition)), reverse=True) 23 | if paths: 24 | return paths[0] 25 | 26 | # If cl.exe is not on path, try to find it. 27 | if os.system("where cl.exe >nul 2>nul") != 0: 28 | cl_path = find_cl_path() 29 | if cl_path is None: 30 | raise RuntimeError("Could not locate a supported Microsoft Visual C++ installation") 31 | os.environ["PATH"] += ";" + cl_path 32 | 33 | setup( 34 | name='gridencoder', # package name, import this to use python API 35 | ext_modules=[ 36 | CUDAExtension( 37 | name='_gridencoder', # extension name, import this to use CUDA API 38 | sources=[os.path.join(_src_path, 'src', f) for f in [ 39 | 'gridencoder.cu', 40 | 'bindings.cpp', 41 | ]], 42 | extra_compile_args={ 43 | 'cxx': c_flags, 44 | 'nvcc': nvcc_flags, 45 | } 46 | ), 47 | ], 48 | cmdclass={ 49 | 'build_ext': BuildExtension, 50 | } 51 | ) -------------------------------------------------------------------------------- /Q2/gridencoder/src/bindings.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "gridencoder.h" 4 | 5 | PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { 6 | m.def("grid_encode_forward", &grid_encode_forward, "grid_encode_forward (CUDA)"); 7 | m.def("grid_encode_backward", &grid_encode_backward, "grid_encode_backward (CUDA)"); 8 | m.def("grad_total_variation", &grad_total_variation, "grad_total_variation (CUDA)"); 9 | m.def("grad_weight_decay", &grad_weight_decay, "grad_weight_decay (CUDA)"); 10 | } -------------------------------------------------------------------------------- /Q2/gridencoder/src/gridencoder.h: -------------------------------------------------------------------------------- 1 | #ifndef _HASH_ENCODE_H 2 | #define _HASH_ENCODE_H 3 | 4 | #include 5 | #include 6 | 7 | // inputs: [B, D], float, in [0, 1] 8 | // embeddings: [sO, C], float 9 | // offsets: [L + 1], uint32_t 10 | // outputs: [B, L * C], float 11 | // H: base resolution 12 | void grid_encode_forward(const at::Tensor inputs, const at::Tensor embeddings, const at::Tensor offsets, at::Tensor outputs, const uint32_t B, const uint32_t D, const uint32_t C, const uint32_t L, const uint32_t max_level, const float S, const uint32_t H, at::optional dy_dx, const uint32_t gridtype, const bool align_corners, const uint32_t interp); 13 | void grid_encode_backward(const at::Tensor grad, const at::Tensor inputs, const at::Tensor embeddings, const at::Tensor offsets, at::Tensor grad_embeddings, const uint32_t B, const uint32_t D, const uint32_t C, const uint32_t L, const uint32_t max_level, const float S, const uint32_t H, const at::optional dy_dx, at::optional grad_inputs, const uint32_t gridtype, const bool align_corners, const uint32_t interp); 14 | 15 | void grad_total_variation(const at::Tensor inputs, const at::Tensor embeddings, at::Tensor grad, const at::Tensor offsets, const float weight, const uint32_t B, const uint32_t D, const uint32_t C, const uint32_t L, const float S, const uint32_t H, const uint32_t gridtype, const bool align_corners); 16 | void grad_weight_decay(const at::Tensor embeddings, at::Tensor grad, const at::Tensor offsets, const float weight, const uint32_t B, const uint32_t C, const uint32_t L); 17 | 18 | #endif -------------------------------------------------------------------------------- /Q2/meshutils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pymeshlab as pml 3 | 4 | 5 | def poisson_mesh_reconstruction(points, normals=None): 6 | # points/normals: [N, 3] np.ndarray 7 | 8 | import open3d as o3d 9 | 10 | pcd = o3d.geometry.PointCloud() 11 | pcd.points = o3d.utility.Vector3dVector(points) 12 | 13 | # outlier removal 14 | pcd, ind = pcd.remove_statistical_outlier(nb_neighbors=20, std_ratio=10) 15 | 16 | # normals 17 | if normals is None: 18 | pcd.estimate_normals() 19 | else: 20 | pcd.normals = o3d.utility.Vector3dVector(normals[ind]) 21 | 22 | # visualize 23 | o3d.visualization.draw_geometries([pcd], point_show_normal=False) 24 | 25 | mesh, densities = o3d.geometry.TriangleMesh.create_from_point_cloud_poisson( 26 | pcd, depth=9 27 | ) 28 | vertices_to_remove = densities < np.quantile(densities, 0.1) 29 | mesh.remove_vertices_by_mask(vertices_to_remove) 30 | 31 | # visualize 32 | o3d.visualization.draw_geometries([mesh]) 33 | 34 | vertices = np.asarray(mesh.vertices) 35 | triangles = np.asarray(mesh.triangles) 36 | 37 | print( 38 | f"[INFO] poisson mesh reconstruction: {points.shape} --> {vertices.shape} / {triangles.shape}" 39 | ) 40 | 41 | return vertices, triangles 42 | 43 | 44 | def decimate_mesh( 45 | verts, faces, target, backend="pymeshlab", remesh=False, optimalplacement=True 46 | ): 47 | # optimalplacement: default is True, but for flat mesh must turn False to prevent spike artifect. 48 | 49 | _ori_vert_shape = verts.shape 50 | _ori_face_shape = faces.shape 51 | 52 | if backend == "pyfqmr": 53 | import pyfqmr 54 | 55 | solver = pyfqmr.Simplify() 56 | solver.setMesh(verts, faces) 57 | solver.simplify_mesh(target_count=target, preserve_border=False, verbose=False) 58 | verts, faces, normals = solver.getMesh() 59 | else: 60 | 61 | m = pml.Mesh(verts, faces) 62 | ms = pml.MeshSet() 63 | ms.add_mesh(m, "mesh") # will copy! 64 | 65 | # filters 66 | # ms.meshing_decimation_clustering(threshold=pml.Percentage(1)) 67 | ms.meshing_decimation_quadric_edge_collapse( 68 | targetfacenum=int(target), optimalplacement=optimalplacement 69 | ) 70 | 71 | if remesh: 72 | # ms.apply_coord_taubin_smoothing() 73 | ms.meshing_isotropic_explicit_remeshing( 74 | iterations=3, targetlen=pml.Percentage(1) 75 | ) 76 | 77 | # extract mesh 78 | m = ms.current_mesh() 79 | verts = m.vertex_matrix() 80 | faces = m.face_matrix() 81 | 82 | print( 83 | f"[INFO] mesh decimation: {_ori_vert_shape} --> {verts.shape}, {_ori_face_shape} --> {faces.shape}" 84 | ) 85 | 86 | return verts, faces 87 | 88 | 89 | def clean_mesh( 90 | verts, faces, v_pct=1, min_f=8, min_d=5, repair=True, remesh=True, remesh_size=0.01 91 | ): 92 | # verts: [N, 3] 93 | # faces: [N, 3] 94 | 95 | _ori_vert_shape = verts.shape 96 | _ori_face_shape = faces.shape 97 | 98 | m = pml.Mesh(verts, faces) 99 | ms = pml.MeshSet() 100 | ms.add_mesh(m, "mesh") # will copy! 101 | 102 | # filters 103 | ms.meshing_remove_unreferenced_vertices() # verts not refed by any faces 104 | 105 | if v_pct > 0: 106 | ms.meshing_merge_close_vertices( 107 | threshold=pml.Percentage(v_pct) 108 | ) # 1/10000 of bounding box diagonal 109 | 110 | ms.meshing_remove_duplicate_faces() # faces defined by the same verts 111 | ms.meshing_remove_null_faces() # faces with area == 0 112 | 113 | if min_d > 0: 114 | ms.meshing_remove_connected_component_by_diameter( 115 | mincomponentdiag=pml.Percentage(min_d) 116 | ) 117 | 118 | if min_f > 0: 119 | ms.meshing_remove_connected_component_by_face_number(mincomponentsize=min_f) 120 | 121 | if repair: 122 | # ms.meshing_remove_t_vertices(method=0, threshold=40, repeat=True) 123 | ms.meshing_repair_non_manifold_edges(method=0) 124 | ms.meshing_repair_non_manifold_vertices(vertdispratio=0) 125 | 126 | if remesh: 127 | # ms.apply_coord_taubin_smoothing() 128 | ms.meshing_isotropic_explicit_remeshing( 129 | iterations=3, targetlen=pml.AbsoluteValue(remesh_size) 130 | ) 131 | 132 | # extract mesh 133 | m = ms.current_mesh() 134 | verts = m.vertex_matrix() 135 | faces = m.face_matrix() 136 | 137 | print( 138 | f"[INFO] mesh cleaning: {_ori_vert_shape} --> {verts.shape}, {_ori_face_shape} --> {faces.shape}" 139 | ) 140 | 141 | return verts, faces 142 | -------------------------------------------------------------------------------- /Q2/nerf/config_parser.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import argparse 3 | import pandas as pd 4 | import sys 5 | 6 | 7 | def add_config_arguments(parser=None): 8 | if parser is None: 9 | parser = argparse.ArgumentParser(description="Add configuration for NeRF optimization using SDS loss.") 10 | 11 | parser.add_argument('--h', type=int, default=64, help="render height for NeRF in training") 12 | parser.add_argument('--w', type=int, default=64, help="render width for NeRF in training") 13 | parser.add_argument('--dataset_size_train', type=int, default=100, help="Length of train dataset i.e. # of iterations per epoch") 14 | parser.add_argument('--dataset_size_valid', type=int, default=8, help="# of frames to render in the turntable video in validation") 15 | parser.add_argument('--dataset_size_test', type=int, default=100, help="# of frames to render in the turntable video at test time") 16 | parser.add_argument('--batch_size', type=int, default=1, help="images to render per batch using NeRF") 17 | parser.add_argument('--iters', type=int, default=10000, help="training iters") 18 | parser.add_argument('--exp_start_iter', type=int, default=0, help="start iter # for experiment, to calculate progressive_view and progressive_level") 19 | parser.add_argument('--exp_end_iter', type=int, default=10000, help="end iter # for experiment, to calculate progressive_view and progressive_level") 20 | parser.add_argument('--min_ambient_ratio', type=float, default=0.1, help="minimum ambient ratio to use in lambertian shading") 21 | parser.add_argument('--textureless_ratio', type=float, default=0.2, help="ratio of textureless shading") 22 | parser.add_argument('--use_ema', type=int, default=1, help="use exponential moving average for model parameters") 23 | 24 | 25 | ### dataset options 26 | parser.add_argument('--min_near', type=float, default=0.01, help="minimum near distance for camera") 27 | parser.add_argument('--radius_range', type=float, nargs='*', default=[3.0, 3.5], help="training camera radius range") 28 | parser.add_argument('--theta_range', type=float, nargs='*', default=[45, 105], help="training camera range along the polar angles (i.e. up and down). See advanced.md for details.") 29 | parser.add_argument('--phi_range', type=float, nargs='*', default=[-180, 180], help="training camera range along the azimuth angles (i.e. left and right). See advanced.md for details.") 30 | parser.add_argument('--fovy_range', type=float, nargs='*', default=[10, 30], help="training camera fovy range") 31 | parser.add_argument('--angle_overhead', type=float, default=30, help="[0, angle_overhead] is the overhead region") 32 | parser.add_argument('--angle_front', type=float, default=60, help="[0, angle_front] is the front region, [180, 180+angle_front] the back region, otherwise the side region.") 33 | parser.add_argument('--default_radius', type=float, default=3.2, help="radius for the default view") 34 | parser.add_argument('--default_polar', type=float, default=90, help="polar for the default view") 35 | parser.add_argument('--default_azimuth', type=float, default=0, help="azimuth for the default view") 36 | parser.add_argument('--default_fovy', type=float, default=20, help="fovy for the default view") 37 | 38 | ### training options 39 | parser.add_argument('--jitter_pose', action='store_true', help="add jitters to the randomly sampled camera poses") 40 | parser.add_argument('--jitter_center', type=float, default=0.2, help="amount of jitter to add to sampled camera pose's center (camera location)") 41 | parser.add_argument('--jitter_target', type=float, default=0.2, help="amount of jitter to add to sampled camera pose's target (i.e. 'look-at')") 42 | parser.add_argument('--jitter_up', type=float, default=0.02, help="amount of jitter to add to sampled camera pose's up-axis (i.e. 'camera roll')") 43 | parser.add_argument('--uniform_sphere_rate', type=float, default=0, help="likelihood of sampling camera location uniformly on the sphere surface area") 44 | parser.add_argument('--max_steps', type=int, default=1024, help="max num steps sampled per ray (only valid when using --cuda_ray)") 45 | parser.add_argument('--num_steps', type=int, default=64, help="num steps sampled per ray (only valid when not using --cuda_ray)") 46 | parser.add_argument('--upsample_steps', type=int, default=32, help="num steps up-sampled per ray (only valid when not using --cuda_ray)") 47 | parser.add_argument('--update_extra_interval', type=int, default=16, help="iter interval to update extra status (only valid when using --cuda_ray)") 48 | parser.add_argument('--max_ray_batch', type=int, default=512, help="batch size of rays at inference to avoid OOM (only valid when not using --cuda_ray)") # 4096, tune it down if you have OOM 49 | 50 | # model options 51 | parser.add_argument('--bg_radius', type=float, default=1.4, help="if positive, use a background model at sphere(bg_radius)") 52 | parser.add_argument('--density_activation', type=str, default='exp', choices=['softplus', 'exp'], help="density activation function") 53 | parser.add_argument('--density_thresh', type=float, default=10, help="threshold for density grid to be occupied") 54 | parser.add_argument('--blob_density', type=float, default=5, help="max (center) density for the density blob") 55 | parser.add_argument('--blob_radius', type=float, default=0.2, help="control the radius for the density blob") 56 | return parser 57 | 58 | -------------------------------------------------------------------------------- /Q2/nerf/network_grid.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # Acknowledgements: 3 | # A large part of teh NeRF model and rendering code is adapted from the implementation of stable-dreamfusion. 4 | # @misc{stable-dreamfusion, 5 | # Author = {Jiaxiang Tang}, 6 | # Year = {2022}, 7 | # Note = {https://github.com/ashawkey/stable-dreamfusion}, 8 | # Title = {Stable-dreamfusion: Text-to-3D with Stable-diffusion} 9 | # } 10 | #################################################################################################### 11 | 12 | import torch 13 | import torch.nn as nn 14 | import torch.nn.functional as F 15 | from activation import trunc_exp 16 | from .renderer import NeRFRenderer 17 | 18 | from .utils import safe_normalize 19 | 20 | class MLP(nn.Module): 21 | def __init__(self, dim_in, dim_out, dim_hidden, num_layers, bias=True): 22 | super().__init__() 23 | self.dim_in = dim_in 24 | self.dim_out = dim_out 25 | self.dim_hidden = dim_hidden 26 | self.num_layers = num_layers 27 | 28 | net = [] 29 | for l in range(num_layers): 30 | net.append(nn.Linear(self.dim_in if l == 0 else self.dim_hidden, self.dim_out if l == num_layers - 1 else self.dim_hidden, bias=bias)) 31 | 32 | self.net = nn.ModuleList(net) 33 | 34 | def forward(self, x): 35 | for l in range(self.num_layers): 36 | x = self.net[l](x) 37 | if l != self.num_layers - 1: 38 | x = F.relu(x, inplace=True) 39 | return x 40 | 41 | 42 | class NeRFNetwork(NeRFRenderer): 43 | def __init__(self, 44 | opt, 45 | num_layers=3, 46 | hidden_dim=64, 47 | num_layers_bg=2, 48 | hidden_dim_bg=32, 49 | ): 50 | 51 | super().__init__(opt) 52 | 53 | from gridencoder import GridEncoder 54 | from freqencoder import FreqEncoder 55 | 56 | self.bg_radius = opt.bg_radius 57 | self.num_layers = num_layers 58 | self.hidden_dim = hidden_dim 59 | encoder = GridEncoder(input_dim=3, num_levels=16, level_dim=2, base_resolution=16, log2_hashmap_size=19, desired_resolution=2048 * self.bound, gridtype='hash', align_corners=False, interpolation='smoothstep') 60 | self.encoder, self.in_dim = encoder, encoder.output_dim 61 | 62 | self.sigma_net = MLP(self.in_dim, 4, hidden_dim, num_layers, bias=True) 63 | 64 | self.density_activation = trunc_exp 65 | # background network 66 | if self.bg_radius > 0: 67 | self.num_layers_bg = num_layers_bg 68 | self.hidden_dim_bg = hidden_dim_bg 69 | 70 | # use a very simple network to avoid it learning the prompt... 71 | encoder = FreqEncoder(input_dim=3, degree=6) 72 | self.encoder_bg, self.in_dim_bg = encoder, encoder.output_dim 73 | self.bg_net = MLP(self.in_dim_bg, 3, hidden_dim_bg, num_layers_bg, bias=True) 74 | 75 | else: 76 | self.bg_net = None 77 | 78 | def common_forward(self, x): 79 | 80 | # sigma 81 | enc = self.encoder(x, bound=self.bound, max_level=self.max_level) 82 | 83 | h = self.sigma_net(enc) 84 | 85 | sigma = self.density_activation(h[..., 0] + self.density_blob(x)) 86 | albedo = torch.sigmoid(h[..., 1:]) 87 | 88 | return sigma, albedo 89 | 90 | # ref: https://github.com/zhaofuq/Instant-NSR/blob/main/nerf/network_sdf.py#L192 91 | def finite_difference_normal(self, x, epsilon=1e-2): 92 | # x: [N, 3] 93 | dx_pos, _ = self.common_forward((x + torch.tensor([[epsilon, 0.00, 0.00]], device=x.device)).clamp(-self.bound, self.bound)) 94 | dx_neg, _ = self.common_forward((x + torch.tensor([[-epsilon, 0.00, 0.00]], device=x.device)).clamp(-self.bound, self.bound)) 95 | dy_pos, _ = self.common_forward((x + torch.tensor([[0.00, epsilon, 0.00]], device=x.device)).clamp(-self.bound, self.bound)) 96 | dy_neg, _ = self.common_forward((x + torch.tensor([[0.00, -epsilon, 0.00]], device=x.device)).clamp(-self.bound, self.bound)) 97 | dz_pos, _ = self.common_forward((x + torch.tensor([[0.00, 0.00, epsilon]], device=x.device)).clamp(-self.bound, self.bound)) 98 | dz_neg, _ = self.common_forward((x + torch.tensor([[0.00, 0.00, -epsilon]], device=x.device)).clamp(-self.bound, self.bound)) 99 | 100 | normal = torch.stack([ 101 | 0.5 * (dx_pos - dx_neg) / epsilon, 102 | 0.5 * (dy_pos - dy_neg) / epsilon, 103 | 0.5 * (dz_pos - dz_neg) / epsilon 104 | ], dim=-1) 105 | 106 | return -normal 107 | 108 | def normal(self, x): 109 | normal = self.finite_difference_normal(x) 110 | normal = safe_normalize(normal) 111 | normal = torch.nan_to_num(normal) 112 | return normal 113 | 114 | 115 | def forward(self, x, d, l=None, ratio=1, shading='albedo'): 116 | # x: [N, 3], in [-bound, bound] 117 | # d: [N, 3], view direction, nomalized in [-1, 1] 118 | # l: [3], plane light direction, nomalized in [-1, 1] 119 | # ratio: scalar, ambient ratio, 1 == no shading (albedo only), 0 == only shading (textureless) 120 | 121 | sigma, albedo = self.common_forward(x) 122 | 123 | if shading == 'albedo': 124 | normal = None 125 | color = albedo 126 | 127 | else: # lambertian shading 128 | normal = self.normal(x) 129 | 130 | lambertian = ratio + (1 - ratio) * (normal * l).sum(-1).clamp(min=0) # [N,] 131 | 132 | if shading == 'textureless': 133 | color = lambertian.unsqueeze(-1).repeat(1, 3) 134 | elif shading == 'normal': 135 | color = (normal + 1) / 2 136 | else: # 'lambertian' 137 | color = albedo * lambertian.unsqueeze(-1) 138 | 139 | return sigma, color, normal 140 | 141 | 142 | def density(self, x): 143 | # x: [N, 3], in [-bound, bound] 144 | 145 | sigma, albedo = self.common_forward(x) 146 | 147 | return { 148 | 'sigma': sigma, 149 | 'albedo': albedo, 150 | } 151 | 152 | 153 | def background(self, d): 154 | h = self.encoder_bg(d) # [N, C] 155 | h = self.bg_net(h) 156 | 157 | # sigmoid activation for rgb 158 | rgbs = torch.sigmoid(h) 159 | 160 | return rgbs 161 | 162 | # optimizer utils 163 | def get_params(self, lr): 164 | 165 | params = [ 166 | {'params': self.encoder.parameters(), 'lr': lr * 10}, 167 | {'params': self.sigma_net.parameters(), 'lr': lr}, 168 | # {'params': self.normal_net.parameters(), 'lr': lr}, 169 | ] 170 | 171 | if self.bg_radius > 0: 172 | # params.append({'params': self.encoder_bg.parameters(), 'lr': lr * 10}) 173 | params.append({'params': self.bg_net.parameters(), 'lr': lr}) 174 | 175 | 176 | return params -------------------------------------------------------------------------------- /Q2/raymarching/__init__.py: -------------------------------------------------------------------------------- 1 | from .raymarching import * -------------------------------------------------------------------------------- /Q2/raymarching/backend.py: -------------------------------------------------------------------------------- 1 | import os 2 | from torch.utils.cpp_extension import load 3 | 4 | _src_path = os.path.dirname(os.path.abspath(__file__)) 5 | 6 | nvcc_flags = [ 7 | '-O3', '-std=c++14', 8 | '-U__CUDA_NO_HALF_OPERATORS__', '-U__CUDA_NO_HALF_CONVERSIONS__', '-U__CUDA_NO_HALF2_OPERATORS__', 9 | ] 10 | 11 | if os.name == "posix": 12 | c_flags = ['-O3', '-std=c++14'] 13 | elif os.name == "nt": 14 | c_flags = ['/O2', '/std:c++17'] 15 | 16 | # find cl.exe 17 | def find_cl_path(): 18 | import glob 19 | for program_files in [r"C:\\Program Files (x86)", r"C:\\Program Files"]: 20 | for edition in ["Enterprise", "Professional", "BuildTools", "Community"]: 21 | paths = sorted(glob.glob(r"%s\\Microsoft Visual Studio\\*\\%s\\VC\\Tools\\MSVC\\*\\bin\\Hostx64\\x64" % (program_files, edition)), reverse=True) 22 | if paths: 23 | return paths[0] 24 | 25 | # If cl.exe is not on path, try to find it. 26 | if os.system("where cl.exe >nul 2>nul") != 0: 27 | cl_path = find_cl_path() 28 | if cl_path is None: 29 | raise RuntimeError("Could not locate a supported Microsoft Visual C++ installation") 30 | os.environ["PATH"] += ";" + cl_path 31 | 32 | _backend = load(name='_raymarching', 33 | extra_cflags=c_flags, 34 | extra_cuda_cflags=nvcc_flags, 35 | sources=[os.path.join(_src_path, 'src', f) for f in [ 36 | 'raymarching.cu', 37 | 'bindings.cpp', 38 | ]], 39 | ) 40 | 41 | __all__ = ['_backend'] -------------------------------------------------------------------------------- /Q2/raymarching/build/lib.linux-x86_64-cpython-310/_raymarching.cpython-310-x86_64-linux-gnu.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/raymarching/build/lib.linux-x86_64-cpython-310/_raymarching.cpython-310-x86_64-linux-gnu.so -------------------------------------------------------------------------------- /Q2/raymarching/build/temp.linux-x86_64-cpython-310/.ninja_deps: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/raymarching/build/temp.linux-x86_64-cpython-310/.ninja_deps -------------------------------------------------------------------------------- /Q2/raymarching/build/temp.linux-x86_64-cpython-310/.ninja_log: -------------------------------------------------------------------------------- 1 | # ninja log v5 2 | 0 10929 1707064483662904790 /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/bindings.o e38ed6c0409a7118 3 | 0 31686 1707064504427233600 /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/raymarching.o dbcc3b8f28013471 4 | -------------------------------------------------------------------------------- /Q2/raymarching/build/temp.linux-x86_64-cpython-310/build.ninja: -------------------------------------------------------------------------------- 1 | ninja_required_version = 1.3 2 | cxx = c++ 3 | nvcc = /usr/local/cuda/bin/nvcc 4 | 5 | cflags = -pthread -B /home/ruihan/anaconda3/envs/stable-dreamfusion/compiler_compat -Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall -fPIC -O2 -isystem /home/ruihan/anaconda3/envs/stable-dreamfusion/include -fPIC -O2 -isystem /home/ruihan/anaconda3/envs/stable-dreamfusion/include -fPIC -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/TH -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/THC -I/usr/local/cuda/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/include/python3.10 -c 6 | post_cflags = -O3 -std=c++14 -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_raymarching -D_GLIBCXX_USE_CXX11_ABI=0 7 | cuda_cflags = -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/TH -I/home/ruihan/anaconda3/envs/stable-dreamfusion/lib/python3.10/site-packages/torch/include/THC -I/usr/local/cuda/include -I/home/ruihan/anaconda3/envs/stable-dreamfusion/include/python3.10 -c 8 | cuda_post_cflags = -D__CUDA_NO_HALF_OPERATORS__ -D__CUDA_NO_HALF_CONVERSIONS__ -D__CUDA_NO_BFLOAT16_CONVERSIONS__ -D__CUDA_NO_HALF2_OPERATORS__ --expt-relaxed-constexpr --compiler-options ''"'"'-fPIC'"'"'' -O3 -std=c++14 -U__CUDA_NO_HALF_OPERATORS__ -U__CUDA_NO_HALF_CONVERSIONS__ -U__CUDA_NO_HALF2_OPERATORS__ -DTORCH_API_INCLUDE_EXTENSION_H '-DPYBIND11_COMPILER_TYPE="_gcc"' '-DPYBIND11_STDLIB="_libstdcpp"' '-DPYBIND11_BUILD_ABI="_cxxabi1011"' -DTORCH_EXTENSION_NAME=_raymarching -D_GLIBCXX_USE_CXX11_ABI=0 9 | ldflags = 10 | 11 | rule compile 12 | command = $cxx -MMD -MF $out.d $cflags -c $in -o $out $post_cflags 13 | depfile = $out.d 14 | deps = gcc 15 | 16 | rule cuda_compile 17 | depfile = $out.d 18 | deps = gcc 19 | command = $nvcc $cuda_cflags -c $in -o $out $cuda_post_cflags 20 | 21 | 22 | 23 | build /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/bindings.o: compile /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/bindings.cpp 24 | build /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/raymarching.o: cuda_compile /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/raymarching.cu 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /Q2/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/bindings.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/bindings.o -------------------------------------------------------------------------------- /Q2/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/raymarching.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/Q2/raymarching/build/temp.linux-x86_64-cpython-310/data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/raymarching.o -------------------------------------------------------------------------------- /Q2/raymarching/raymarching.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 2.1 2 | Name: raymarching 3 | Version: 0.0.0 4 | -------------------------------------------------------------------------------- /Q2/raymarching/raymarching.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | setup.py 2 | /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/bindings.cpp 3 | /data/ruihan/16825/a4_internal/Q2/stable-dreamfusion/raymarching/src/raymarching.cu 4 | raymarching.egg-info/PKG-INFO 5 | raymarching.egg-info/SOURCES.txt 6 | raymarching.egg-info/dependency_links.txt 7 | raymarching.egg-info/top_level.txt -------------------------------------------------------------------------------- /Q2/raymarching/raymarching.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Q2/raymarching/raymarching.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | _raymarching 2 | -------------------------------------------------------------------------------- /Q2/raymarching/setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from setuptools import setup 3 | from torch.utils.cpp_extension import BuildExtension, CUDAExtension 4 | 5 | _src_path = os.path.dirname(os.path.abspath(__file__)) 6 | 7 | nvcc_flags = [ 8 | '-O3', '-std=c++14', 9 | '-U__CUDA_NO_HALF_OPERATORS__', '-U__CUDA_NO_HALF_CONVERSIONS__', '-U__CUDA_NO_HALF2_OPERATORS__', 10 | ] 11 | 12 | if os.name == "posix": 13 | c_flags = ['-O3', '-std=c++14'] 14 | elif os.name == "nt": 15 | c_flags = ['/O2', '/std:c++17'] 16 | 17 | # find cl.exe 18 | def find_cl_path(): 19 | import glob 20 | for program_files in [r"C:\\Program Files (x86)", r"C:\\Program Files"]: 21 | for edition in ["Enterprise", "Professional", "BuildTools", "Community"]: 22 | paths = sorted(glob.glob(r"%s\\Microsoft Visual Studio\\*\\%s\\VC\\Tools\\MSVC\\*\\bin\\Hostx64\\x64" % (program_files, edition)), reverse=True) 23 | if paths: 24 | return paths[0] 25 | 26 | # If cl.exe is not on path, try to find it. 27 | if os.system("where cl.exe >nul 2>nul") != 0: 28 | cl_path = find_cl_path() 29 | if cl_path is None: 30 | raise RuntimeError("Could not locate a supported Microsoft Visual C++ installation") 31 | os.environ["PATH"] += ";" + cl_path 32 | 33 | ''' 34 | Usage: 35 | 36 | python setup.py build_ext --inplace # build extensions locally, do not install (only can be used from the parent directory) 37 | 38 | python setup.py install # build extensions and install (copy) to PATH. 39 | pip install . # ditto but better (e.g., dependency & metadata handling) 40 | 41 | python setup.py develop # build extensions and install (symbolic) to PATH. 42 | pip install -e . # ditto but better (e.g., dependency & metadata handling) 43 | 44 | ''' 45 | setup( 46 | name='raymarching', # package name, import this to use python API 47 | ext_modules=[ 48 | CUDAExtension( 49 | name='_raymarching', # extension name, import this to use CUDA API 50 | sources=[os.path.join(_src_path, 'src', f) for f in [ 51 | 'raymarching.cu', 52 | 'bindings.cpp', 53 | ]], 54 | extra_compile_args={ 55 | 'cxx': c_flags, 56 | 'nvcc': nvcc_flags, 57 | } 58 | ), 59 | ], 60 | cmdclass={ 61 | 'build_ext': BuildExtension, 62 | } 63 | ) -------------------------------------------------------------------------------- /Q2/raymarching/src/bindings.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "raymarching.h" 4 | 5 | PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { 6 | // utils 7 | m.def("flatten_rays", &flatten_rays, "flatten_rays (CUDA)"); 8 | m.def("packbits", &packbits, "packbits (CUDA)"); 9 | m.def("near_far_from_aabb", &near_far_from_aabb, "near_far_from_aabb (CUDA)"); 10 | m.def("sph_from_ray", &sph_from_ray, "sph_from_ray (CUDA)"); 11 | m.def("morton3D", &morton3D, "morton3D (CUDA)"); 12 | m.def("morton3D_invert", &morton3D_invert, "morton3D_invert (CUDA)"); 13 | // train 14 | m.def("march_rays_train", &march_rays_train, "march_rays_train (CUDA)"); 15 | m.def("composite_rays_train_forward", &composite_rays_train_forward, "composite_rays_train_forward (CUDA)"); 16 | m.def("composite_rays_train_backward", &composite_rays_train_backward, "composite_rays_train_backward (CUDA)"); 17 | // infer 18 | m.def("march_rays", &march_rays, "march rays (CUDA)"); 19 | m.def("composite_rays", &composite_rays, "composite rays (CUDA)"); 20 | } -------------------------------------------------------------------------------- /Q2/raymarching/src/raymarching.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | 6 | 7 | void near_far_from_aabb(const at::Tensor rays_o, const at::Tensor rays_d, const at::Tensor aabb, const uint32_t N, const float min_near, at::Tensor nears, at::Tensor fars); 8 | void sph_from_ray(const at::Tensor rays_o, const at::Tensor rays_d, const float radius, const uint32_t N, at::Tensor coords); 9 | void morton3D(const at::Tensor coords, const uint32_t N, at::Tensor indices); 10 | void morton3D_invert(const at::Tensor indices, const uint32_t N, at::Tensor coords); 11 | void packbits(const at::Tensor grid, const uint32_t N, const float density_thresh, at::Tensor bitfield); 12 | void flatten_rays(const at::Tensor rays, const uint32_t N, const uint32_t M, at::Tensor res); 13 | 14 | void march_rays_train(const at::Tensor rays_o, const at::Tensor rays_d, const at::Tensor grid, const float bound, const bool contract, const float dt_gamma, const uint32_t max_steps, const uint32_t N, const uint32_t C, const uint32_t H, const at::Tensor nears, const at::Tensor fars, at::optional xyzs, at::optional dirs, at::optional ts, at::Tensor rays, at::Tensor counter, at::Tensor noises); 15 | void composite_rays_train_forward(const at::Tensor sigmas, const at::Tensor rgbs, const at::Tensor ts, const at::Tensor rays, const uint32_t M, const uint32_t N, const float T_thresh, const bool binarize, at::Tensor weights, at::Tensor weights_sum, at::Tensor depth, at::Tensor image); 16 | void composite_rays_train_backward(const at::Tensor grad_weights, const at::Tensor grad_weights_sum, const at::Tensor grad_depth, const at::Tensor grad_image, const at::Tensor sigmas, const at::Tensor rgbs, const at::Tensor ts, const at::Tensor rays, const at::Tensor weights_sum, const at::Tensor depth, const at::Tensor image, const uint32_t M, const uint32_t N, const float T_thresh, const bool binarize, at::Tensor grad_sigmas, at::Tensor grad_rgbs); 17 | 18 | void march_rays(const uint32_t n_alive, const uint32_t n_step, const at::Tensor rays_alive, const at::Tensor rays_t, const at::Tensor rays_o, const at::Tensor rays_d, const float bound, const bool contract, const float dt_gamma, const uint32_t max_steps, const uint32_t C, const uint32_t H, const at::Tensor grid, const at::Tensor nears, const at::Tensor fars, at::Tensor xyzs, at::Tensor dirs, at::Tensor ts, at::Tensor noises); 19 | void composite_rays(const uint32_t n_alive, const uint32_t n_step, const float T_thresh, const bool binarize, at::Tensor rays_alive, at::Tensor rays_t, at::Tensor sigmas, at::Tensor rgbs, at::Tensor ts, at::Tensor weights_sum, at::Tensor depth, at::Tensor image); -------------------------------------------------------------------------------- /Q2/utils.py: -------------------------------------------------------------------------------- 1 | import math 2 | from typing import List 3 | 4 | import imageio 5 | import matplotlib.pyplot as plt 6 | import numpy as np 7 | import torch 8 | from pytorch3d.io import load_obj, load_objs_as_meshes 9 | from pytorch3d.renderer import ( 10 | FoVPerspectiveCameras, 11 | HardPhongShader, 12 | MeshRasterizer, 13 | MeshRenderer, 14 | PointLights, 15 | RasterizationSettings, 16 | look_at_view_transform, 17 | ) 18 | from skimage import img_as_ubyte 19 | from torch.optim.lr_scheduler import LambdaLR 20 | 21 | 22 | def seed_everything(seed): 23 | torch.manual_seed(seed) 24 | torch.cuda.manual_seed(seed) 25 | 26 | 27 | def get_cosine_schedule_with_warmup( 28 | optimizer, num_warmup_steps, num_training_steps, num_cycles: float = 0.5 29 | ): 30 | 31 | def lr_lambda(current_step): 32 | if current_step < num_warmup_steps: 33 | return float(current_step) / float(max(1, num_warmup_steps)) 34 | progress = float(current_step - num_warmup_steps) / float( 35 | max(1, num_training_steps - num_warmup_steps) 36 | ) 37 | return max( 38 | 0.0, 0.5 * (1.0 + math.cos(math.pi * float(num_cycles) * 2.0 * progress)) 39 | ) 40 | 41 | return LambdaLR(optimizer, lr_lambda, -1) 42 | 43 | 44 | def get_mesh_renderer(image_size=512, lights=None, device=None): 45 | """ 46 | Returns a Pytorch3D Mesh Renderer. 47 | 48 | Args: 49 | image_size (int): The rendered image size. 50 | lights: A default Pytorch3D lights object. 51 | device (torch.device): The torch device to use (CPU or GPU). If not specified, 52 | will automatically use GPU if available, otherwise CPU. 53 | """ 54 | if device is None: 55 | if torch.cuda.is_available(): 56 | device = torch.device("cuda:0") 57 | else: 58 | device = torch.device("cpu") 59 | raster_settings = RasterizationSettings( 60 | image_size=image_size, 61 | blur_radius=0.0, 62 | faces_per_pixel=1, 63 | ) 64 | renderer = MeshRenderer( 65 | rasterizer=MeshRasterizer(raster_settings=raster_settings), 66 | shader=HardPhongShader(device=device, lights=lights), 67 | ) 68 | return renderer 69 | 70 | 71 | def get_mesh_renderer_soft(image_size=512, lights=None, device=None, sigma=1e-4): 72 | """ 73 | Create a soft renderer for differentaible texture rendering. 74 | Ref: https://pytorch3d.org/tutorials/fit_textured_mesh#3.-Mesh-and-texture-prediction-via-textured-rendering 75 | 76 | Args: 77 | image_size (int): The rendered image size. 78 | lights: A default Pytorch3D lights object. 79 | device (torch.device): The torch device to use (CPU or GPU). If not specified, 80 | will automatically use GPU if available, otherwise CPU. 81 | """ 82 | if device is None: 83 | if torch.cuda.is_available(): 84 | device = torch.device("cuda:0") 85 | else: 86 | device = torch.device("cpu") 87 | 88 | # Rasterization settings for differentiable rendering, where the blur_radius 89 | # initialization is based on Liu et al, 'Soft Rasterizer: A Differentiable 90 | # Renderer for Image-based 3D Reasoning', ICCV 2019 91 | raster_settings_soft = RasterizationSettings( 92 | image_size=image_size, 93 | blur_radius=np.log(1.0 / 1e-4 - 1.0) * sigma, 94 | faces_per_pixel=50, 95 | perspective_correct=False, 96 | ) 97 | 98 | # Differentiable soft renderer using per vertex RGB colors for texture 99 | renderer = MeshRenderer( 100 | rasterizer=MeshRasterizer(raster_settings=raster_settings_soft), 101 | shader=HardPhongShader(device=device, lights=lights), 102 | ) 103 | return renderer 104 | 105 | 106 | def render_360_views(mesh, renderer, device, dist=3, elev=0, output_path=None): 107 | images = [] 108 | for azim in range(0, 360, 10): 109 | R, T = look_at_view_transform(dist, elev, azim) 110 | cameras = FoVPerspectiveCameras(device=device, R=R, T=T) 111 | 112 | # Place a point light in front of the cow. 113 | lights = PointLights(location=[[0, 0, -3]], device=device) 114 | 115 | rend = renderer(mesh, cameras=cameras, lights=lights) 116 | rend = rend.cpu().numpy()[0, ..., :3] # (B, H, W, 4) -> (H, W, 3) 117 | images.append(rend) 118 | 119 | # convert to uint8 to suppress "lossy conversion" warning 120 | images = [np.clip(img, -1, 1) for img in images] 121 | images = [img_as_ubyte(img) for img in images] 122 | 123 | # save a gif of the 360 rotation 124 | imageio.mimsave(output_path, images, fps=15) 125 | 126 | 127 | from pytorch3d.io import load_obj, load_objs_as_meshes 128 | 129 | def init_mesh( 130 | model_path, 131 | device="cpu", 132 | ): 133 | print("=> loading target mesh...") 134 | verts, faces, aux = load_obj( 135 | model_path, device=device, load_textures=True, create_texture_atlas=True 136 | ) 137 | mesh = load_objs_as_meshes([model_path], device=device) 138 | faces = faces.verts_idx 139 | return mesh, verts, faces, aux 140 | 141 | 142 | # calculate the text embs. 143 | @torch.no_grad() 144 | def prepare_embeddings(sds, prompt, neg_prompt="", view_dependent=False): 145 | # text embeddings (stable-diffusion) 146 | if isinstance(prompt, str): 147 | prompt = [prompt] 148 | if isinstance(neg_prompt, str): 149 | neg_prompt = [neg_prompt] 150 | embeddings = {} 151 | embeddings["default"] = sds.get_text_embeddings(prompt) # shape [1, 77, 1024] 152 | embeddings["uncond"] = sds.get_text_embeddings(neg_prompt) # shape [1, 77, 1024] 153 | if view_dependent: 154 | for d in ["front", "side", "back"]: 155 | embeddings[d] = sds.get_text_embeddings([f"{prompt}, {d} view"]) 156 | return embeddings 157 | -------------------------------------------------------------------------------- /ref_output/Q21_a_hamburger_no_guidance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/Q21_a_hamburger_no_guidance.png -------------------------------------------------------------------------------- /ref_output/Q21_a_hamburger_w_guidance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/Q21_a_hamburger_w_guidance.png -------------------------------------------------------------------------------- /ref_output/Q22_a_dotted_black_and_white_cow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/Q22_a_dotted_black_and_white_cow.png -------------------------------------------------------------------------------- /ref_output/Q22_an_orange_golden_bull.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/Q22_an_orange_golden_bull.png -------------------------------------------------------------------------------- /ref_output/Q23_a_standing_corgi_dog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/Q23_a_standing_corgi_dog.png -------------------------------------------------------------------------------- /ref_output/q1_render_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/q1_render_example.png -------------------------------------------------------------------------------- /ref_output/q1_training_example_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/q1_training_example_1.png -------------------------------------------------------------------------------- /ref_output/q1_training_example_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/q1_training_example_2.png -------------------------------------------------------------------------------- /ref_output/q1_with_sh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/q1_with_sh.png -------------------------------------------------------------------------------- /ref_output/q1_without_sh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/learning3d/assignment4/7edee531622a9f8bb133a28b60f7b7cc40e95a69/ref_output/q1_without_sh.png --------------------------------------------------------------------------------