├── .github ├── ISSUE_TEMPLATE │ └── genjax_access.md └── workflows │ ├── pre-commit.yaml │ └── release.yml ├── .gitignore ├── .pre-commit-config.yaml ├── DEVELOPING.md ├── LICENSE ├── README.md ├── assets ├── fonts │ └── IBMPlexSerif-Regular.ttf └── sample_objs │ ├── box_large.obj │ ├── box_small.obj │ ├── bunny.obj │ ├── cube.obj │ ├── diamond.obj │ ├── icosahedron.obj │ ├── occulder.obj │ ├── orange.obj │ ├── pyramid.obj │ ├── sphere.obj │ ├── table.obj │ └── toy_plane.ply ├── bayes3d ├── .DS_Store ├── __init__.py ├── _mkl │ ├── __init__.py │ ├── gaussian_particle_system_v0.py │ ├── gaussian_renderer.py │ ├── gaussian_sensor_model.py │ ├── generic.py │ ├── plotting.py │ ├── pose.py │ ├── simple_likelihood.py │ ├── table_scene_model.py │ ├── trimesh_to_gaussians.py │ ├── types.py │ └── utils.py ├── camera.py ├── colmap │ ├── __init__.py │ ├── colmap_loader.py │ ├── colmap_utils.py │ └── dataset_loader.py ├── distributions.py ├── documentation.md ├── genjax │ ├── __init__.py │ ├── genjax_distributions.py │ └── model.py ├── likelihood.py ├── neural │ ├── cosypose_baseline │ │ ├── INSTALL.md │ │ ├── __init__.py │ │ ├── cosypose_setup.sh │ │ └── cosypose_utils.py │ ├── dino.py │ ├── requirements_dino.txt │ └── segmentation.py ├── prototyping.py ├── renderer.py ├── rendering │ ├── __init__.py │ ├── nvdiffrast │ │ ├── __init__.py │ │ ├── common │ │ │ ├── __init__.py │ │ │ ├── common.cpp │ │ │ ├── common.h │ │ │ ├── framework.h │ │ │ ├── glutil.cpp │ │ │ ├── glutil.h │ │ │ ├── glutil_extlist.h │ │ │ ├── ops.py │ │ │ ├── rasterize_gl.cpp │ │ │ ├── rasterize_gl.h │ │ │ ├── torch_common.inl │ │ │ └── torch_types.h │ │ └── lib │ │ │ └── setgpu.lib │ ├── nvdiffrast_jax │ │ ├── LICENSE.txt │ │ ├── README.md │ │ ├── __init__.py │ │ ├── jax_renderer.py │ │ ├── nvdiffrast │ │ │ ├── common │ │ │ │ ├── __init__.py │ │ │ │ ├── common.cpp │ │ │ │ ├── common.h │ │ │ │ ├── framework.h │ │ │ │ ├── glutil.cpp │ │ │ │ ├── glutil.h │ │ │ │ ├── glutil_extlist.h │ │ │ │ ├── interpolate.cu │ │ │ │ ├── interpolate.h │ │ │ │ ├── ops.py │ │ │ │ ├── rasterize.cu │ │ │ │ ├── rasterize.h │ │ │ │ ├── rasterize_gl.cpp │ │ │ │ ├── rasterize_gl.h │ │ │ │ ├── torch_common.inl │ │ │ │ └── torch_types.h │ │ │ ├── jax │ │ │ │ ├── __init__.py │ │ │ │ ├── jax_binding_ops.h │ │ │ │ ├── jax_bindings.cpp │ │ │ │ ├── jax_interpolate.cpp │ │ │ │ ├── jax_interpolate.h │ │ │ │ ├── jax_rasterize_gl.cpp │ │ │ │ ├── jax_rasterize_gl.h │ │ │ │ ├── ops.py │ │ │ │ ├── torch_common.inl │ │ │ │ └── torch_types.h │ │ │ └── lib │ │ │ │ └── setgpu.lib │ │ ├── renderer_matching_pytorch.py │ │ ├── run_sample.sh │ │ ├── scripts │ │ │ └── test.sh │ │ └── setup.py │ └── photorealistic_renderers │ │ ├── __init__.py │ │ ├── _kubric_exec_parallel.py │ │ └── kubric_interface.py ├── rgbd.py ├── scene_graph.py ├── transforms_3d.py ├── utils │ ├── __init__.py │ ├── bbox.py │ ├── enumerations.py │ ├── gaussian_splatting.py │ ├── icp.py │ ├── mesh.py │ ├── occlusion.py │ ├── pybullet_sim.py │ ├── r3d_loader.py │ ├── utils.py │ └── ycb_loader.py └── viz │ ├── __init__.py │ ├── meshcatviz.py │ ├── open3dviz.py │ └── viz.py ├── demo.py ├── docker ├── Dockerfile ├── build.sh ├── requirements_docker.txt └── run.sh ├── docs ├── index.md ├── installation.md └── logo.png ├── download.sh ├── mkdocs.yml ├── pyproject.toml ├── requirements-dev.txt ├── scripts ├── _mkl │ └── notebooks │ │ ├── .gitignore │ │ ├── 00a - Types.ipynb │ │ ├── 00b - Utils.ipynb │ │ ├── 01 - Plotting.ipynb │ │ ├── 02 - Pose.ipynb │ │ ├── 05 - Trimesh to Gaussians.ipynb │ │ ├── 05b - Gaussian Approximation.ipynb │ │ ├── 06a - Gaussian Renderer.ipynb │ │ ├── 07 - Gaussian Sensor Model.ipynb │ │ ├── 08c - Gaussian particle system - Genjax Minimal.ipynb │ │ ├── 10 - Simple Likelihood.ipynb │ │ ├── 30 - Table Scene Model.ipynb │ │ ├── 31 - Generic Viewpoint.ipynb │ │ ├── 32 - Patch model.ipynb │ │ ├── 33 - LH debugging.ipynb │ │ ├── 33b - LH debugging step func.ipynb │ │ ├── 99 - Traceviz Test.ipynb │ │ ├── data │ │ └── likelihood_test │ │ │ ├── table_scene_1.npy │ │ │ ├── table_scene_2.npy │ │ │ └── table_scene_3.npy │ │ ├── kubric │ │ ├── .devcontainer │ │ │ └── devcontainer.json │ │ ├── .gitignore │ │ ├── 00 - Kubric Utils.ipynb │ │ ├── 01 - Create a Kubric Scene.ipynb │ │ └── kubric_helper.py │ │ ├── nbexporter.py │ │ └── viz │ │ ├── draw_utils.js │ │ └── main.js ├── experiments │ ├── collaborations │ │ ├── 09-15-2023-15:02:04.pkl │ │ ├── arijit_physics.py │ │ ├── aryan.ipynb │ │ ├── ben.ipynb │ │ ├── dining_room_table.obj │ │ ├── shoebox_fused.obj │ │ ├── single_object_model_mccoy.ipynb │ │ └── xuan.ipynb │ ├── colmap │ │ ├── colmap_loader.py │ │ ├── dataset_loader.py │ │ └── run.py │ ├── deeplearning │ │ ├── dino │ │ │ ├── dif_interactive.ipynb │ │ │ ├── dino_interactive.ipynb │ │ │ ├── f3dm_dino_extraction.ipynb │ │ │ ├── test_dift.ipynb │ │ │ └── test_dino.ipynb │ │ ├── duduo.ipynb │ │ ├── feature_detection │ │ │ └── feature_detector.ipynb │ │ ├── kubric_dataset_gen │ │ │ ├── breaking_cosypose.ipynb │ │ │ ├── densefusion_test.ipynb │ │ │ ├── get dense fusion to work.ipynb │ │ │ └── kubric_dataset_gen.py │ │ ├── sam │ │ │ ├── fastsam.ipynb │ │ │ ├── sam.ipynb │ │ │ └── sam.py │ │ └── tapir.ipynb │ ├── gaussian_splatting │ │ ├── 3dgs_from_rgb_video.ipynb │ │ ├── 3dgs_jax.ipynb │ │ ├── 3dgs_tracking.ipynb │ │ ├── 3dgs_validate_optimizing.ipynb │ │ ├── banana_tracking_with_spheres.ipynb │ │ ├── debug_renderer_jit.ipynb │ │ ├── fast_particles.ipynb │ │ ├── nvdiffrast_diff.ipynb │ │ ├── nvdiffrast_diff_original.ipynb │ │ ├── nvdiffrast_jax_optim.ipynb │ │ ├── nvdiffrast_optim.ipynb │ │ ├── optimization.py │ │ ├── splatting.ipynb │ │ ├── splatting_messing_with_it.ipynb │ │ ├── splatting_optim copy.ipynb │ │ ├── splatting_optim.ipynb │ │ ├── splatting_simple.ipynb │ │ └── viz_splat.ipynb │ ├── gradient_c2f │ │ └── pose.ipynb │ ├── icra │ │ ├── camera_pose_tracking │ │ │ ├── camera_pose_tracking.ipynb │ │ │ ├── diff_rendering.ipynb │ │ │ ├── object_tracking.ipynb │ │ │ ├── pose.ipynb │ │ │ └── util.py │ │ ├── experiment_server │ │ │ ├── plane_pc_video_capture.npy │ │ │ ├── real_airplane.ipynb │ │ │ ├── toy_final.ply │ │ │ ├── toy_plane.ply │ │ │ └── toy_plane_video_capture.ply │ │ ├── fork_knife │ │ │ ├── fork-knife-cnn.ipynb │ │ │ ├── fork-knife-datagen.ipynb │ │ │ ├── fork-knife.ipynb │ │ │ ├── m1.obj │ │ │ └── m2.obj │ │ ├── mug │ │ │ └── mug.ipynb │ │ ├── scene_parse │ │ │ ├── nice_top_figure.ipynb │ │ │ ├── real_airplane.ipynb │ │ │ ├── real_mug.ipynb │ │ │ ├── real_multiobject-Copy1.ipynb │ │ │ ├── real_multiobject.ipynb │ │ │ └── scene_parse.ipynb │ │ └── slam │ │ │ └── o3d_dense_slam.ipynb │ ├── learning.ipynb │ ├── likelihood_debug │ │ ├── likelihood_debug.ipynb │ │ ├── real_scene_parse.ipynb │ │ ├── scene_parse.ipynb │ │ └── scene_parse_genjax.ipynb │ ├── mcs │ │ ├── cognitive-battery │ │ │ ├── .gitignore │ │ │ ├── cog_utils.py │ │ │ ├── data │ │ │ │ ├── info.md │ │ │ │ ├── meshes │ │ │ │ │ ├── apple.mtl │ │ │ │ │ ├── apple.obj │ │ │ │ │ ├── door.mtl │ │ │ │ │ ├── door.obj │ │ │ │ │ ├── occluder.mtl │ │ │ │ │ ├── occluder.obj │ │ │ │ │ ├── table.mtl │ │ │ │ │ └── table.obj │ │ │ │ └── videos │ │ │ │ │ ├── depths │ │ │ │ │ ├── frame_0.npy │ │ │ │ │ ├── frame_1.npy │ │ │ │ │ ├── frame_10.npy │ │ │ │ │ ├── frame_100.npy │ │ │ │ │ ├── frame_101.npy │ │ │ │ │ ├── frame_102.npy │ │ │ │ │ ├── frame_103.npy │ │ │ │ │ ├── frame_11.npy │ │ │ │ │ ├── frame_12.npy │ │ │ │ │ ├── frame_13.npy │ │ │ │ │ ├── frame_14.npy │ │ │ │ │ ├── frame_15.npy │ │ │ │ │ ├── frame_16.npy │ │ │ │ │ ├── frame_17.npy │ │ │ │ │ ├── frame_18.npy │ │ │ │ │ ├── frame_19.npy │ │ │ │ │ ├── frame_2.npy │ │ │ │ │ ├── frame_20.npy │ │ │ │ │ ├── frame_21.npy │ │ │ │ │ ├── frame_22.npy │ │ │ │ │ ├── frame_23.npy │ │ │ │ │ ├── frame_24.npy │ │ │ │ │ ├── frame_25.npy │ │ │ │ │ ├── frame_26.npy │ │ │ │ │ ├── frame_27.npy │ │ │ │ │ ├── frame_28.npy │ │ │ │ │ ├── frame_29.npy │ │ │ │ │ ├── frame_3.npy │ │ │ │ │ ├── frame_30.npy │ │ │ │ │ ├── frame_31.npy │ │ │ │ │ ├── frame_32.npy │ │ │ │ │ ├── frame_33.npy │ │ │ │ │ ├── frame_34.npy │ │ │ │ │ ├── frame_35.npy │ │ │ │ │ ├── frame_36.npy │ │ │ │ │ ├── frame_37.npy │ │ │ │ │ ├── frame_38.npy │ │ │ │ │ ├── frame_39.npy │ │ │ │ │ ├── frame_4.npy │ │ │ │ │ ├── frame_40.npy │ │ │ │ │ ├── frame_41.npy │ │ │ │ │ ├── frame_42.npy │ │ │ │ │ ├── frame_43.npy │ │ │ │ │ ├── frame_44.npy │ │ │ │ │ ├── frame_45.npy │ │ │ │ │ ├── frame_46.npy │ │ │ │ │ ├── frame_47.npy │ │ │ │ │ ├── frame_48.npy │ │ │ │ │ ├── frame_49.npy │ │ │ │ │ ├── frame_5.npy │ │ │ │ │ ├── frame_50.npy │ │ │ │ │ ├── frame_51.npy │ │ │ │ │ ├── frame_52.npy │ │ │ │ │ ├── frame_53.npy │ │ │ │ │ ├── frame_54.npy │ │ │ │ │ ├── frame_55.npy │ │ │ │ │ ├── frame_56.npy │ │ │ │ │ ├── frame_57.npy │ │ │ │ │ ├── frame_58.npy │ │ │ │ │ ├── frame_59.npy │ │ │ │ │ ├── frame_6.npy │ │ │ │ │ ├── frame_60.npy │ │ │ │ │ ├── frame_61.npy │ │ │ │ │ ├── frame_62.npy │ │ │ │ │ ├── frame_63.npy │ │ │ │ │ ├── frame_64.npy │ │ │ │ │ ├── frame_65.npy │ │ │ │ │ ├── frame_66.npy │ │ │ │ │ ├── frame_67.npy │ │ │ │ │ ├── frame_68.npy │ │ │ │ │ ├── frame_69.npy │ │ │ │ │ ├── frame_7.npy │ │ │ │ │ ├── frame_70.npy │ │ │ │ │ ├── frame_71.npy │ │ │ │ │ ├── frame_72.npy │ │ │ │ │ ├── frame_73.npy │ │ │ │ │ ├── frame_74.npy │ │ │ │ │ ├── frame_75.npy │ │ │ │ │ ├── frame_76.npy │ │ │ │ │ ├── frame_77.npy │ │ │ │ │ ├── frame_78.npy │ │ │ │ │ ├── frame_79.npy │ │ │ │ │ ├── frame_8.npy │ │ │ │ │ ├── frame_80.npy │ │ │ │ │ ├── frame_81.npy │ │ │ │ │ ├── frame_82.npy │ │ │ │ │ ├── frame_83.npy │ │ │ │ │ ├── frame_84.npy │ │ │ │ │ ├── frame_85.npy │ │ │ │ │ ├── frame_86.npy │ │ │ │ │ ├── frame_87.npy │ │ │ │ │ ├── frame_88.npy │ │ │ │ │ ├── frame_89.npy │ │ │ │ │ ├── frame_9.npy │ │ │ │ │ ├── frame_90.npy │ │ │ │ │ ├── frame_91.npy │ │ │ │ │ ├── frame_92.npy │ │ │ │ │ ├── frame_93.npy │ │ │ │ │ ├── frame_94.npy │ │ │ │ │ ├── frame_95.npy │ │ │ │ │ ├── frame_96.npy │ │ │ │ │ ├── frame_97.npy │ │ │ │ │ ├── frame_98.npy │ │ │ │ │ └── frame_99.npy │ │ │ │ │ └── segmented │ │ │ │ │ ├── frame_0.npy │ │ │ │ │ ├── frame_1.npy │ │ │ │ │ ├── frame_10.npy │ │ │ │ │ ├── frame_100.npy │ │ │ │ │ ├── frame_101.npy │ │ │ │ │ ├── frame_102.npy │ │ │ │ │ ├── frame_103.npy │ │ │ │ │ ├── frame_11.npy │ │ │ │ │ ├── frame_12.npy │ │ │ │ │ ├── frame_13.npy │ │ │ │ │ ├── frame_14.npy │ │ │ │ │ ├── frame_15.npy │ │ │ │ │ ├── frame_16.npy │ │ │ │ │ ├── frame_17.npy │ │ │ │ │ ├── frame_18.npy │ │ │ │ │ ├── frame_19.npy │ │ │ │ │ ├── frame_2.npy │ │ │ │ │ ├── frame_20.npy │ │ │ │ │ ├── frame_21.npy │ │ │ │ │ ├── frame_22.npy │ │ │ │ │ ├── frame_23.npy │ │ │ │ │ ├── frame_24.npy │ │ │ │ │ ├── frame_25.npy │ │ │ │ │ ├── frame_26.npy │ │ │ │ │ ├── frame_27.npy │ │ │ │ │ ├── frame_28.npy │ │ │ │ │ ├── frame_29.npy │ │ │ │ │ ├── frame_3.npy │ │ │ │ │ ├── frame_30.npy │ │ │ │ │ ├── frame_31.npy │ │ │ │ │ ├── frame_32.npy │ │ │ │ │ ├── frame_33.npy │ │ │ │ │ ├── frame_34.npy │ │ │ │ │ ├── frame_35.npy │ │ │ │ │ ├── frame_36.npy │ │ │ │ │ ├── frame_37.npy │ │ │ │ │ ├── frame_38.npy │ │ │ │ │ ├── frame_39.npy │ │ │ │ │ ├── frame_4.npy │ │ │ │ │ ├── frame_40.npy │ │ │ │ │ ├── frame_41.npy │ │ │ │ │ ├── frame_42.npy │ │ │ │ │ ├── frame_43.npy │ │ │ │ │ ├── frame_44.npy │ │ │ │ │ ├── frame_45.npy │ │ │ │ │ ├── frame_46.npy │ │ │ │ │ ├── frame_47.npy │ │ │ │ │ ├── frame_48.npy │ │ │ │ │ ├── frame_49.npy │ │ │ │ │ ├── frame_5.npy │ │ │ │ │ ├── frame_50.npy │ │ │ │ │ ├── frame_51.npy │ │ │ │ │ ├── frame_52.npy │ │ │ │ │ ├── frame_53.npy │ │ │ │ │ ├── frame_54.npy │ │ │ │ │ ├── frame_55.npy │ │ │ │ │ ├── frame_56.npy │ │ │ │ │ ├── frame_57.npy │ │ │ │ │ ├── frame_58.npy │ │ │ │ │ ├── frame_59.npy │ │ │ │ │ ├── frame_6.npy │ │ │ │ │ ├── frame_60.npy │ │ │ │ │ ├── frame_61.npy │ │ │ │ │ ├── frame_62.npy │ │ │ │ │ ├── frame_63.npy │ │ │ │ │ ├── frame_64.npy │ │ │ │ │ ├── frame_65.npy │ │ │ │ │ ├── frame_66.npy │ │ │ │ │ ├── frame_67.npy │ │ │ │ │ ├── frame_68.npy │ │ │ │ │ ├── frame_69.npy │ │ │ │ │ ├── frame_7.npy │ │ │ │ │ ├── frame_70.npy │ │ │ │ │ ├── frame_71.npy │ │ │ │ │ ├── frame_72.npy │ │ │ │ │ ├── frame_73.npy │ │ │ │ │ ├── frame_74.npy │ │ │ │ │ ├── frame_75.npy │ │ │ │ │ ├── frame_76.npy │ │ │ │ │ ├── frame_77.npy │ │ │ │ │ ├── frame_78.npy │ │ │ │ │ ├── frame_79.npy │ │ │ │ │ ├── frame_8.npy │ │ │ │ │ ├── frame_80.npy │ │ │ │ │ ├── frame_81.npy │ │ │ │ │ ├── frame_82.npy │ │ │ │ │ ├── frame_83.npy │ │ │ │ │ ├── frame_84.npy │ │ │ │ │ ├── frame_85.npy │ │ │ │ │ ├── frame_86.npy │ │ │ │ │ ├── frame_87.npy │ │ │ │ │ ├── frame_88.npy │ │ │ │ │ ├── frame_89.npy │ │ │ │ │ ├── frame_9.npy │ │ │ │ │ ├── frame_90.npy │ │ │ │ │ ├── frame_91.npy │ │ │ │ │ ├── frame_92.npy │ │ │ │ │ ├── frame_93.npy │ │ │ │ │ ├── frame_94.npy │ │ │ │ │ ├── frame_95.npy │ │ │ │ │ ├── frame_96.npy │ │ │ │ │ ├── frame_97.npy │ │ │ │ │ ├── frame_98.npy │ │ │ │ │ └── frame_99.npy │ │ │ ├── gravity_model.ipynb │ │ │ ├── model.ipynb │ │ │ ├── model.py │ │ │ ├── scene_graph.py │ │ │ └── swap_model.ipynb │ │ ├── otp_gen │ │ │ └── otp_gen │ │ │ │ ├── physics_priors.py │ │ │ │ ├── pipeline.ipynb │ │ │ │ ├── pyb_sim.ipynb │ │ │ │ ├── tracking.ipynb │ │ │ │ └── tracking_collision.ipynb │ │ └── physics.ipynb │ ├── slam │ │ ├── InteriorTest.obj │ │ ├── slam.ipynb │ │ ├── slam_2d.ipynb │ │ └── slam_with_room_obj.ipynb │ └── tabletop │ │ ├── analysis.ipynb │ │ ├── data_gen.py │ │ ├── inference.py │ │ ├── mug.ipynb │ │ └── voxel_learning.ipynb ├── run_colmap.py └── ssh.py ├── setup.py └── test ├── test_bbox_intersect.py ├── test_colmap.ipynb ├── test_colmap.py ├── test_cosypose.py ├── test_differentiable_rendering.py ├── test_genjax_model.py ├── test_icp.py ├── test_jax_renderer.ipynb ├── test_kubric.py ├── test_likelihood.py ├── test_open3d.py ├── test_renderer.py ├── test_renderer_internals.py ├── test_renderer_memory.py ├── test_scene_graph.py ├── test_splatting.py ├── test_transforms_3d.py ├── test_viz.py └── test_ycb_loading.py /.github/ISSUE_TEMPLATE/genjax_access.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: GenJAX Access 3 | about: request access to GenJAX 4 | title: "[GENJAX]" 5 | labels: genjax 6 | assignees: sritchie 7 | 8 | --- 9 | 10 | **Say hi!** 11 | 12 | Who are you, and why do you need access to GenJAX? 13 | 14 | **Google Account** 15 | 16 | Share an email address associated with a google account. 17 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yaml: -------------------------------------------------------------------------------- 1 | name: pre-commit hooks 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - main 8 | 9 | jobs: 10 | pre-commit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | 15 | - uses: actions/setup-python@v4 16 | with: 17 | python-version: 3.11.5 18 | 19 | - uses: pre-commit/action@v3.0.0 20 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release to Artifact Registry 2 | 3 | on: 4 | workflow_dispatch: 5 | release: 6 | types: [published] 7 | 8 | jobs: 9 | release: 10 | runs-on: ubuntu-20.04 11 | 12 | # Add "id-token" with the intended permissions. 13 | permissions: 14 | contents: 'read' 15 | id-token: 'write' 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | with: 20 | # This is here so that the versioning plugin will be able to see tags 21 | # and version using them. 22 | fetch-depth: 0 23 | 24 | - name: install packages 25 | run: sudo apt-get install -y libegl1-mesa-dev libglu1-mesa-dev 26 | 27 | # This is needed to build our wheel. See 28 | # https://github.com/Jimver/cuda-toolkit/blob/master/src/links/linux-links.ts 29 | # for available versions 30 | - uses: Jimver/cuda-toolkit@v0.2.14 31 | id: cuda-toolkit 32 | with: 33 | cuda: '11.8.0' 34 | linux-local-args: '["--toolkit"]' 35 | method: 'network' 36 | 37 | - uses: actions/setup-python@v4 38 | with: 39 | python-version: 3.11.5 40 | 41 | - id: auth 42 | uses: google-github-actions/auth@v2 43 | with: 44 | credentials_json: "${{ secrets.ARTIFACT_REGISTRY_KEY }}" 45 | 46 | - name: Set up Cloud SDK 47 | uses: google-github-actions/setup-gcloud@v2 48 | 49 | - name: Display gcloud info 50 | run: gcloud info 51 | 52 | - name: Install release dependencies 53 | run: | 54 | python -m pip install -U pip 55 | python -m pip install cibuildwheel==2.12.0 build==0.10.0 wheel twine 56 | python -m pip install keyrings.google-artifactregistry-auth 57 | 58 | - name: Build the wheel and archive 59 | run: python -m build 60 | 61 | - name: Upload to Artifact Registry 62 | run: python3 -m twine upload --repository-url https://us-west1-python.pkg.dev/probcomp-caliban/probcomp/ dist/* 63 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv/* 2 | *.joblib 3 | *.gif 4 | *.png 5 | *.lock 6 | */__pycache__/* 7 | *.code-workspace 8 | *.pyc 9 | *.npz 10 | *.egg-info/* 11 | *.json 12 | assets/bop/* 13 | assets/panda_dataset/* 14 | assets/ycb_video_models/* 15 | *.jpeg 16 | experiments/3dnel/threednel 17 | .ipynb_checkpoints 18 | */.ipynb_checkpoints/* 19 | assets/sam/* 20 | assets/tum/* 21 | .vscode/* 22 | *.lock 23 | *.lock 24 | *.MOV 25 | assets/* 26 | *.jpg 27 | *.ply 28 | *.zip 29 | *.npz 30 | *.pdf 31 | *.pkl 32 | .DS_Store 33 | 34 | # C++ extensions 35 | *.so 36 | 37 | # Distribution / packaging 38 | build/ 39 | dist/ 40 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.6.0 4 | hooks: 5 | - id: check-yaml 6 | args: [--unsafe] 7 | - id: end-of-file-fixer 8 | - id: trailing-whitespace 9 | 10 | - repo: https://github.com/astral-sh/ruff-pre-commit 11 | rev: v0.4.10 12 | hooks: 13 | - id: ruff 14 | # types_or: [ python, pyi, jupyter ] 15 | 16 | - id: ruff-format 17 | # types_or: [ python, pyi, jupyter ] 18 | -------------------------------------------------------------------------------- /DEVELOPING.md: -------------------------------------------------------------------------------- 1 | # Developer's Guide 2 | 3 | This guide describes how to complete various tasks you'll encounter when working 4 | on the Bayes3D codebase. 5 | 6 | ### Commit Hooks 7 | 8 | We use [pre-commit](https://pre-commit.com/) to manage a series of git 9 | pre-commit hooks for the project; for example, each time you commit code, the 10 | hooks will make sure that your python is formatted properly. If your code isn't, 11 | the hook will format it, so when you try to commit the second time you'll get 12 | past the hook. 13 | 14 | All hooks are defined in `.pre-commit-config.yaml`. To install these hooks, 15 | install `pre-commit` if you don't yet have it. I prefer using 16 | [pipx](https://github.com/pipxproject/pipx) so that `pre-commit` stays globally 17 | available. 18 | 19 | ```bash 20 | pipx install pre-commit 21 | ``` 22 | 23 | Then install the hooks with this command: 24 | 25 | ```bash 26 | pre-commit install 27 | ``` 28 | 29 | Now they'll run on every commit. If you want to run them manually, run the 30 | following command: 31 | 32 | ```bash 33 | pre-commit run --all-files 34 | ``` 35 | -------------------------------------------------------------------------------- /assets/fonts/IBMPlexSerif-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/assets/fonts/IBMPlexSerif-Regular.ttf -------------------------------------------------------------------------------- /assets/sample_objs/cube.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v -0.50000000 -0.50000000 -0.50000000 3 | v -0.50000000 -0.50000000 -0.50000000 4 | v -0.50000000 -0.50000000 -0.50000000 5 | v -0.50000000 -0.50000000 0.50000000 6 | v -0.50000000 -0.50000000 0.50000000 7 | v -0.50000000 -0.50000000 0.50000000 8 | v -0.50000000 0.50000000 -0.50000000 9 | v -0.50000000 0.50000000 -0.50000000 10 | v -0.50000000 0.50000000 -0.50000000 11 | v -0.50000000 0.50000000 0.50000000 12 | v -0.50000000 0.50000000 0.50000000 13 | v -0.50000000 0.50000000 0.50000000 14 | v 0.50000000 -0.50000000 -0.50000000 15 | v 0.50000000 -0.50000000 -0.50000000 16 | v 0.50000000 -0.50000000 -0.50000000 17 | v 0.50000000 -0.50000000 0.50000000 18 | v 0.50000000 -0.50000000 0.50000000 19 | v 0.50000000 -0.50000000 0.50000000 20 | v 0.50000000 0.50000000 -0.50000000 21 | v 0.50000000 0.50000000 -0.50000000 22 | v 0.50000000 0.50000000 -0.50000000 23 | v 0.50000000 0.50000000 0.50000000 24 | v 0.50000000 0.50000000 0.50000000 25 | v 0.50000000 0.50000000 0.50000000 26 | vn -1.00000000 0.00000000 0.00000000 27 | vn 0.00000000 0.00000000 -1.00000000 28 | vn 0.00000000 -1.00000000 0.00000000 29 | vn 0.00000000 0.00000000 1.00000000 30 | vn -1.00000000 0.00000000 0.00000000 31 | vn 0.00000000 -1.00000000 0.00000000 32 | vn 0.00000000 0.00000000 -1.00000000 33 | vn -1.00000000 0.00000000 0.00000000 34 | vn 0.00000000 1.00000000 0.00000000 35 | vn -1.00000000 0.00000000 0.00000000 36 | vn 0.00000000 0.00000000 1.00000000 37 | vn 0.00000000 1.00000000 0.00000000 38 | vn 0.00000000 0.00000000 -1.00000000 39 | vn 0.00000000 -1.00000000 0.00000000 40 | vn 1.00000000 0.00000000 0.00000000 41 | vn 0.00000000 -1.00000000 0.00000000 42 | vn 1.00000000 0.00000000 0.00000000 43 | vn 0.00000000 0.00000000 1.00000000 44 | vn 0.00000000 1.00000000 0.00000000 45 | vn 0.00000000 0.00000000 -1.00000000 46 | vn 1.00000000 0.00000000 0.00000000 47 | vn 0.00000000 0.00000000 1.00000000 48 | vn 1.00000000 0.00000000 0.00000000 49 | vn 0.00000000 1.00000000 0.00000000 50 | f 2//2 20//20 13//13 51 | f 2//2 7//7 20//20 52 | f 1//1 10//10 8//8 53 | f 1//1 5//5 10//10 54 | f 9//9 24//24 19//19 55 | f 9//9 12//12 24//24 56 | f 15//15 21//21 23//23 57 | f 15//15 23//23 17//17 58 | f 3//3 14//14 16//16 59 | f 3//3 16//16 6//6 60 | f 4//4 18//18 22//22 61 | f 4//4 22//22 11//11 62 | -------------------------------------------------------------------------------- /assets/sample_objs/diamond.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v 0.00000000 0.00000000 0.39000001 3 | v 0.22500000 0.22500000 0.00000000 4 | v 0.22500000 -0.22500000 0.00000000 5 | v -0.22500000 -0.22500000 0.00000000 6 | v -0.22500000 0.22500000 0.00000000 7 | v 0.00000000 0.00000000 -0.39000001 8 | f 1 2 3 9 | f 1 3 4 10 | f 1 4 5 11 | f 1 5 2 12 | f 6 5 4 13 | f 6 4 3 14 | f 6 3 2 15 | f 6 2 1 16 | f 6 1 5 17 | -------------------------------------------------------------------------------- /assets/sample_objs/icosahedron.obj: -------------------------------------------------------------------------------- 1 | v -0.525731 0.000000 -0.850651 2 | v 0.525731 0.000000 -0.850651 3 | v 0.525731 0.000000 0.850651 4 | v -0.525731 0.000000 0.850651 5 | v -0.850651 -0.525731 0.000000 6 | v -0.850651 0.525731 0.000000 7 | v 0.850651 0.525731 0.000000 8 | v 0.850651 -0.525731 0.000000 9 | v 0.000000 -0.850651 0.525731 10 | v 0.000000 -0.850651 -0.525731 11 | v 0.000000 0.850651 -0.525731 12 | v 0.000000 0.850651 0.525731 13 | f 2 10 1 14 | f 11 2 1 15 | f 6 11 1 16 | f 5 6 1 17 | f 10 5 1 18 | f 9 3 4 19 | f 5 9 4 20 | f 6 5 4 21 | f 12 6 4 22 | f 3 12 4 23 | f 12 3 7 24 | f 11 12 7 25 | f 2 11 7 26 | f 8 2 7 27 | f 3 8 7 28 | f 12 11 6 29 | f 10 9 5 30 | f 8 3 9 31 | f 10 8 9 32 | f 2 8 10 33 | -------------------------------------------------------------------------------- /assets/sample_objs/occulder.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v -2.50000000 -0.05000000 -1.50000000 3 | v -2.50000000 -0.05000000 1.50000000 4 | v -2.50000000 0.05000000 -1.50000000 5 | v -2.50000000 0.05000000 1.50000000 6 | v 2.50000000 -0.05000000 -1.50000000 7 | v 2.50000000 -0.05000000 1.50000000 8 | v 2.50000000 0.05000000 -1.50000000 9 | v 2.50000000 0.05000000 1.50000000 10 | vn -0.57735027 -0.57735027 -0.57735027 11 | vn -0.57735027 -0.57735027 0.57735027 12 | vn -0.57735027 0.57735027 -0.57735027 13 | vn -0.57735027 0.57735027 0.57735027 14 | vn 0.57735027 -0.57735027 -0.57735027 15 | vn 0.57735027 -0.57735027 0.57735027 16 | vn 0.57735027 0.57735027 -0.57735027 17 | vn 0.57735027 0.57735027 0.57735027 18 | f 2//2 4//4 1//1 19 | f 5//5 2//2 1//1 20 | f 1//1 4//4 3//3 21 | f 3//3 5//5 1//1 22 | f 2//2 8//8 4//4 23 | f 6//6 2//2 5//5 24 | f 6//6 8//8 2//2 25 | f 4//4 8//8 3//3 26 | f 7//7 5//5 3//3 27 | f 3//3 8//8 7//7 28 | f 7//7 6//6 5//5 29 | f 8//8 6//6 7//7 30 | -------------------------------------------------------------------------------- /assets/sample_objs/pyramid.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v -0.50000001 -0.50000001 -0.79999998 3 | v 0.50000001 -0.50000001 -0.79999998 4 | v 0.50000001 0.50000001 -0.79999998 5 | v -0.50000001 0.50000001 -0.79999998 6 | v 0.00000000 0.00000000 0.79999998 7 | f 4 1 2 8 | f 3 4 2 9 | f 5 2 1 10 | f 4 5 1 11 | f 3 5 4 12 | f 5 3 2 13 | -------------------------------------------------------------------------------- /assets/sample_objs/toy_plane.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/assets/sample_objs/toy_plane.ply -------------------------------------------------------------------------------- /bayes3d/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/bayes3d/.DS_Store -------------------------------------------------------------------------------- /bayes3d/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | .. include:: ./documentation.md 3 | """ 4 | 5 | from importlib import metadata 6 | 7 | from . import colmap, distributions, scene_graph, utils 8 | from .camera import * 9 | from .likelihood import * 10 | from .renderer import * 11 | from .rgbd import * 12 | from .transforms_3d import * 13 | from .viz import * 14 | 15 | RENDERER = None 16 | 17 | __version__ = metadata.version("bayes3d") 18 | 19 | __all__ = ["colmap", "distributions", "scene_graph", "utils"] 20 | -------------------------------------------------------------------------------- /bayes3d/_mkl/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/bayes3d/_mkl/__init__.py -------------------------------------------------------------------------------- /bayes3d/_mkl/gaussian_particle_system_v0.py: -------------------------------------------------------------------------------- 1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../../scripts/_mkl/notebooks/07a - Gaussian particle system v0.ipynb. 2 | 3 | # %% auto 0 4 | __all__ = [ 5 | "normal_cdf", 6 | "normal_pdf", 7 | "normal_logpdf", 8 | "inv", 9 | "key", 10 | "Array", 11 | "Shape", 12 | "Bool", 13 | "Float", 14 | "Int", 15 | "Pose", 16 | ] 17 | 18 | # %% ../../scripts/_mkl/notebooks/07a - Gaussian particle system v0.ipynb 2 19 | import jax 20 | import jax.numpy as jnp 21 | import numpy as np 22 | 23 | from bayes3d._mkl.utils import * 24 | 25 | normal_cdf = jax.scipy.stats.norm.cdf 26 | normal_pdf = jax.scipy.stats.norm.pdf 27 | normal_logpdf = jax.scipy.stats.norm.logpdf 28 | inv = jnp.linalg.inv 29 | 30 | key = jax.random.PRNGKey(0) 31 | 32 | # %% ../../scripts/_mkl/notebooks/07a - Gaussian particle system v0.ipynb 4 33 | from typing import NamedTuple 34 | 35 | import jax 36 | import numpy as np 37 | 38 | Array = np.ndarray | jax.Array 39 | Shape = int | tuple[int, ...] 40 | Bool = Array 41 | Float = Array 42 | Int = Array 43 | 44 | Pose = tuple[jnp.ndarray, jnp.ndarray] 45 | 46 | 47 | class Pose(NamedTuple): 48 | quat: jnp.ndarray 49 | position: jnp.ndarray 50 | -------------------------------------------------------------------------------- /bayes3d/_mkl/gaussian_sensor_model.py: -------------------------------------------------------------------------------- 1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../../scripts/_mkl/notebooks/06b - Gaussian Sensor Model.ipynb. 2 | 3 | # %% auto 0 4 | __all__ = ["normal_cdf", "normal_pdf", "normal_logpdf", "inv", "key"] 5 | 6 | # %% ../../scripts/_mkl/notebooks/06b - Gaussian Sensor Model.ipynb 3 7 | import jax 8 | import jax.numpy as jnp 9 | 10 | # %% ../../scripts/_mkl/notebooks/06b - Gaussian Sensor Model.ipynb 4 11 | from bayes3d._mkl.types import * 12 | from bayes3d._mkl.utils import * 13 | 14 | # %% ../../scripts/_mkl/notebooks/06b - Gaussian Sensor Model.ipynb 5 15 | normal_cdf = jax.scipy.stats.norm.cdf 16 | normal_pdf = jax.scipy.stats.norm.pdf 17 | normal_logpdf = jax.scipy.stats.norm.logpdf 18 | inv = jnp.linalg.inv 19 | 20 | key = jax.random.PRNGKey(0) 21 | -------------------------------------------------------------------------------- /bayes3d/_mkl/types.py: -------------------------------------------------------------------------------- 1 | # AUTOGENERATED! DO NOT EDIT! File to edit: ../../scripts/_mkl/notebooks/00a - Types.ipynb. 2 | 3 | # %% auto 0 4 | __all__ = [ 5 | "Array", 6 | "Shape", 7 | "Bool", 8 | "Float", 9 | "Int", 10 | "FaceIndex", 11 | "FaceIndices", 12 | "ArrayN", 13 | "Array3", 14 | "Array2", 15 | "ArrayNx2", 16 | "ArrayNx3", 17 | "Matrix", 18 | "PrecisionMatrix", 19 | "CovarianceMatrix", 20 | "CholeskyMatrix", 21 | "SquareMatrix", 22 | "Vector", 23 | "Direction", 24 | "BaseVector", 25 | ] 26 | 27 | # %% ../../scripts/_mkl/notebooks/00a - Types.ipynb 1 28 | import jax 29 | import jaxlib 30 | import numpy as np 31 | 32 | Array = np.ndarray | jax.Array 33 | Shape = int | tuple[int, ...] 34 | Bool = Array 35 | Float = Array 36 | Int = Array 37 | FaceIndex = int 38 | FaceIndices = Array 39 | ArrayN = Array 40 | Array3 = Array 41 | Array2 = Array 42 | ArrayNx2 = Array 43 | ArrayNx3 = Array 44 | Matrix = jaxlib.xla_extension.ArrayImpl 45 | PrecisionMatrix = Matrix 46 | CovarianceMatrix = Matrix 47 | CholeskyMatrix = Matrix 48 | SquareMatrix = Matrix 49 | Vector = Array 50 | Direction = Vector 51 | BaseVector = Vector 52 | -------------------------------------------------------------------------------- /bayes3d/colmap/__init__.py: -------------------------------------------------------------------------------- 1 | from .dataset_loader import * 2 | -------------------------------------------------------------------------------- /bayes3d/distributions.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as jnp 3 | from jax.scipy.special import logsumexp 4 | from tensorflow_probability.substrates import jax as tfp 5 | 6 | from .transforms_3d import quaternion_to_rotation_matrix, rotation_matrix_to_quaternion 7 | 8 | 9 | def vmf(key, concentration): 10 | translation = jnp.zeros(3) 11 | quat = tfp.distributions.VonMisesFisher( 12 | jnp.array([1.0, 0.0, 0.0, 0.0]), concentration 13 | ).sample(seed=key) 14 | rot_matrix = quaternion_to_rotation_matrix(quat) 15 | return jnp.vstack( 16 | [ 17 | jnp.hstack([rot_matrix, translation.reshape(3, 1)]), 18 | jnp.array([0.0, 0.0, 0.0, 1.0]), 19 | ] 20 | ) 21 | 22 | 23 | vmf_jit = jax.jit(vmf) 24 | 25 | 26 | def gaussian_vmf_zero_mean(key, var, concentration): 27 | translation = tfp.distributions.MultivariateNormalDiag( 28 | jnp.zeros(3), jnp.ones(3) * var 29 | ).sample(seed=key) 30 | quat = tfp.distributions.VonMisesFisher( 31 | jnp.array([1.0, 0.0, 0.0, 0.0]), concentration 32 | ).sample(seed=key) 33 | rot_matrix = quaternion_to_rotation_matrix(quat) 34 | return jnp.vstack( 35 | [ 36 | jnp.hstack([rot_matrix, translation.reshape(3, 1)]), 37 | jnp.array([0.0, 0.0, 0.0, 1.0]), 38 | ] 39 | ) 40 | 41 | 42 | def gaussian_vmf(key, pose_mean, var, concentration): 43 | return pose_mean.dot(gaussian_vmf_zero_mean(key, var, concentration)) 44 | 45 | 46 | gaussian_vmf_jit = jax.jit(gaussian_vmf) 47 | 48 | 49 | def gaussian_vmf_logpdf(pose, pose_mean, var, concentration): 50 | translation_prob = tfp.distributions.MultivariateNormalDiag( 51 | pose_mean[:3, 3], jnp.ones(3) * var 52 | ).log_prob(pose[:3, 3]) 53 | quat_mean = rotation_matrix_to_quaternion(pose_mean[:3, :3]) 54 | quat = rotation_matrix_to_quaternion(pose[:3, :3]) 55 | quat_prob = tfp.distributions.VonMisesFisher(quat_mean, concentration).log_prob( 56 | quat 57 | ) 58 | return translation_prob + quat_prob 59 | 60 | 61 | gaussian_vmf_logpdf_jit = jax.jit(gaussian_vmf_logpdf) 62 | 63 | 64 | def gaussian_vmf_mixture_sample(key, pose_means, log_weights, var, concentration): 65 | idx = tfp.distributions.Categorical(logits=log_weights).sample(seed=key) 66 | return gaussian_vmf(key, pose_means[idx], var, concentration) 67 | 68 | 69 | def gaussian_vmf_mixture_logpdf(key, pose, pose_means, log_weights, var, concentration): 70 | log_probs = jax.vmap(gaussian_vmf_logpdf, in_axes=(None, 0, None, None))( 71 | pose, pose_means, var, concentration 72 | ) 73 | log_mixture_probabilites = log_probs + log_weights 74 | return logsumexp(log_mixture_probabilites) 75 | 76 | 77 | def gaussian_vmf_logpdf(pose, pose_mean, var, concentration): 78 | translation_prob = tfp.distributions.MultivariateNormalDiag( 79 | pose_mean[:3, 3], jnp.ones(3) * var 80 | ).log_prob(pose[:3, 3]) 81 | quat_mean = rotation_matrix_to_quaternion(pose_mean[:3, :3]) 82 | quat = rotation_matrix_to_quaternion(pose[:3, :3]) 83 | quat_prob = tfp.distributions.VonMisesFisher(quat_mean, concentration).log_prob( 84 | quat 85 | ) 86 | return translation_prob + quat_prob 87 | -------------------------------------------------------------------------------- /bayes3d/documentation.md: -------------------------------------------------------------------------------- 1 | `bayes3d` is a package for Bayesian 3D Inverse Graphics 2 | -------------------------------------------------------------------------------- /bayes3d/genjax/__init__.py: -------------------------------------------------------------------------------- 1 | from .genjax_distributions import * 2 | from .model import * 3 | -------------------------------------------------------------------------------- /bayes3d/genjax/genjax_distributions.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | import jax 4 | import jax.numpy as jnp 5 | from genjax.core.datatypes import JAXGenerativeFunction 6 | from genjax.generative_functions.distributions import ExactDensity 7 | 8 | import bayes3d as b 9 | 10 | 11 | @dataclass 12 | class GaussianVMFPose(ExactDensity, JAXGenerativeFunction): 13 | def sample(self, key, pose_mean, var, concentration, **kwargs): 14 | return b.distributions.gaussian_vmf(key, pose_mean, var, concentration) 15 | 16 | def logpdf(self, pose, pose_mean, var, concentration, **kwargs): 17 | return b.distributions.gaussian_vmf_logpdf(pose, pose_mean, var, concentration) 18 | 19 | 20 | @dataclass 21 | class UniformPose(ExactDensity, JAXGenerativeFunction): 22 | def sample(self, key, low, high, **kwargs): 23 | position = jax.random.uniform(key, shape=(3,)) * (high - low) + low 24 | orientation = b.quaternion_to_rotation_matrix( 25 | jax.random.normal(key, shape=(4,)) 26 | ) 27 | return b.transform_from_rot_and_pos(orientation, position) 28 | 29 | def logpdf(self, pose, low, high, **kwargs): 30 | position = pose[:3, 3] 31 | valid = (low <= position) & (position <= high) 32 | position_score = jnp.log( 33 | (valid * 1.0) * (jnp.ones_like(position) / (high - low)) 34 | ) 35 | return position_score.sum() + jnp.pi**2 36 | 37 | 38 | @dataclass 39 | class ImageLikelihood(ExactDensity, JAXGenerativeFunction): 40 | def sample(self, key, img, variance, outlier_prob): 41 | return img 42 | 43 | def logpdf(self, observed_image, latent_image, variance, outlier_prob): 44 | return b.threedp3_likelihood( 45 | observed_image, 46 | latent_image, 47 | variance, 48 | outlier_prob, 49 | ) 50 | 51 | 52 | @dataclass 53 | class ContactParamsUniform(ExactDensity, JAXGenerativeFunction): 54 | def sample(self, key, low, high): 55 | return jax.random.uniform(key, shape=(3,)) * (high - low) + low 56 | 57 | def logpdf(self, sampled_val, low, high, **kwargs): 58 | valid = (low <= sampled_val) & (sampled_val <= high) 59 | log_probs = jnp.log((valid * 1.0) * (jnp.ones_like(sampled_val) / (high - low))) 60 | return log_probs.sum() 61 | 62 | 63 | @dataclass 64 | class UniformDiscreteArray(ExactDensity, JAXGenerativeFunction): 65 | def sample(self, key, vals, arr): 66 | return jax.random.choice(key, vals, shape=arr.shape) 67 | 68 | def logpdf(self, sampled_val, vals, arr, **kwargs): 69 | return jnp.log(1.0 / (vals.shape[0])) * arr.shape[0] 70 | 71 | 72 | @dataclass 73 | class UniformDiscrete(ExactDensity, JAXGenerativeFunction): 74 | def sample(self, key, vals): 75 | return jax.random.choice(key, vals) 76 | 77 | def logpdf(self, sampled_val, vals, **kwargs): 78 | return jnp.log(1.0 / (vals.shape[0])) 79 | 80 | 81 | gaussian_vmf_pose = GaussianVMFPose() 82 | image_likelihood = ImageLikelihood() 83 | contact_params_uniform = ContactParamsUniform() 84 | uniform_discrete = UniformDiscrete() 85 | uniform_discrete_array = UniformDiscreteArray() 86 | uniform_pose = UniformPose() 87 | -------------------------------------------------------------------------------- /bayes3d/neural/cosypose_baseline/INSTALL.md: -------------------------------------------------------------------------------- 1 | # CosyPose Setup 2 | ``` 3 | git clone https://github.com/Simple-Robotics/cosypose.git 4 | cd jax3dp3/cosypose_baseline 5 | bash cosypose_setup.sh 6 | ``` 7 | To test setup, run `test/test_cosypose.py`. 8 | -------------------------------------------------------------------------------- /bayes3d/neural/cosypose_baseline/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/bayes3d/neural/cosypose_baseline/__init__.py -------------------------------------------------------------------------------- /bayes3d/neural/cosypose_baseline/cosypose_setup.sh: -------------------------------------------------------------------------------- 1 | # #!/usr/bin/env bash -l 2 | 3 | __conda_setup="$('/opt/conda/bin/conda' 'shell.bash' 'hook' 2> /dev/null)" 4 | if [ $? -eq 0 ]; then 5 | eval "$__conda_setup" 6 | else 7 | if [ -f "/opt/conda/etc/profile.d/conda.sh" ]; then 8 | . "/opt/conda/etc/profile.d/conda.sh" 9 | else 10 | export PATH="/opt/conda/bin:$PATH" 11 | fi 12 | fi 13 | unset __conda_setup 14 | 15 | git clone --recurse-submodules https://github.com/Simple-Robotics/cosypose.git 16 | cd cosypose 17 | # make sure to change numpy to version 1.19.2 18 | sed -i 's/numpy=1.17.4/numpy=1.19.2/g' environment.yaml 19 | conda env create -n cosypose --file environment.yaml 20 | source ~/.bashrc 21 | conda activate cosypose 22 | # make sure to install git-lfs before running this 23 | git lfs pull 24 | python setup.py install 25 | mkdir local_data 26 | 27 | echo "Downloading data..." 28 | # it is required to download 'train_real', 'train_synt', but not 'train_all' 29 | python -m cosypose.scripts.download --bop_dataset=ycbv 30 | python -m cosypose.scripts.download --bop_extra_files=ycbv 31 | python -m cosypose.scripts.download --urdf_models=ycbv 32 | 33 | echo "Downloading models..." 34 | python -m cosypose.scripts.download --model=detector-bop-ycbv-synt+real--292971 35 | python -m cosypose.scripts.download --model=coarse-bop-ycbv-synt+real--822463 36 | python -m cosypose.scripts.download --model=refiner-bop-ycbv-synt+real--631598 37 | -------------------------------------------------------------------------------- /bayes3d/neural/requirements_dino.txt: -------------------------------------------------------------------------------- 1 | omegaconf 2 | fvcore 3 | iopath 4 | xformers==0.0.18 5 | submitit 6 | -------------------------------------------------------------------------------- /bayes3d/neural/segmentation.py: -------------------------------------------------------------------------------- 1 | import jax.numpy as jnp 2 | 3 | import bayes3d as b 4 | 5 | HIINTERFACE = None 6 | 7 | 8 | def carvekit_get_foreground_mask(image: b.RGBD): 9 | global HIINTERFACE 10 | if HIINTERFACE is None: 11 | import torch 12 | from carvekit.api.high import HiInterface 13 | 14 | HIINTERFACE = HiInterface( 15 | object_type="object", # Can be "object" or "hairs-like". 16 | batch_size_seg=5, 17 | batch_size_matting=1, 18 | device="cuda" if torch.cuda.is_available() else "cpu", 19 | seg_mask_size=640, # Use 640 for Tracer B7 and 320 for U2Net 20 | matting_mask_size=2048, 21 | trimap_prob_threshold=220, # 231, 22 | trimap_dilation=15, 23 | trimap_erosion_iters=20, 24 | fp16=False, 25 | ) 26 | imgs = HIINTERFACE([b.get_rgb_image(image.rgb)]) 27 | mask = jnp.array(imgs[0])[..., -1] > 0.5 28 | return mask 29 | -------------------------------------------------------------------------------- /bayes3d/prototyping.py: -------------------------------------------------------------------------------- 1 | from ._mkl.utils import * 2 | -------------------------------------------------------------------------------- /bayes3d/rendering/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/bayes3d/rendering/__init__.py -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # NVIDIA CORPORATION and its licensors retain all intellectual property 4 | # and proprietary rights in and to this software, related documentation 5 | # and any modifications thereto. Any use, reproduction, disclosure or 6 | # distribution of this software and related documentation without an express 7 | # license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | __version__ = "0.3.0" 10 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast/common/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # NVIDIA CORPORATION and its licensors retain all intellectual property 4 | # and proprietary rights in and to this software, related documentation 5 | # and any modifications thereto. Any use, reproduction, disclosure or 6 | # distribution of this software and related documentation without an express 7 | # license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | from .ops import RasterizeGLContext, _get_plugin 10 | 11 | __all__ = ["RasterizeGLContext", "_get_plugin"] 12 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast/common/common.cpp: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #include 10 | 11 | //------------------------------------------------------------------------ 12 | // Block and grid size calculators for kernel launches. 13 | 14 | dim3 getLaunchBlockSize(int maxWidth, int maxHeight, int width, int height) 15 | { 16 | int maxThreads = maxWidth * maxHeight; 17 | if (maxThreads <= 1 || (width * height) <= 1) 18 | return dim3(1, 1, 1); // Degenerate. 19 | 20 | // Start from max size. 21 | int bw = maxWidth; 22 | int bh = maxHeight; 23 | 24 | // Optimizations for weirdly sized buffers. 25 | if (width < bw) 26 | { 27 | // Decrease block width to smallest power of two that covers the buffer width. 28 | while ((bw >> 1) >= width) 29 | bw >>= 1; 30 | 31 | // Maximize height. 32 | bh = maxThreads / bw; 33 | if (bh > height) 34 | bh = height; 35 | } 36 | else if (height < bh) 37 | { 38 | // Halve height and double width until fits completely inside buffer vertically. 39 | while (bh > height) 40 | { 41 | bh >>= 1; 42 | if (bw < width) 43 | bw <<= 1; 44 | } 45 | } 46 | 47 | // Done. 48 | return dim3(bw, bh, 1); 49 | } 50 | 51 | dim3 getLaunchGridSize(dim3 blockSize, int width, int height, int depth) 52 | { 53 | dim3 gridSize; 54 | gridSize.x = (width - 1) / blockSize.x + 1; 55 | gridSize.y = (height - 1) / blockSize.y + 1; 56 | gridSize.z = (depth - 1) / blockSize.z + 1; 57 | return gridSize; 58 | } 59 | 60 | //------------------------------------------------------------------------ 61 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast/common/framework.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | 11 | // Framework-specific macros to enable code sharing. 12 | 13 | //------------------------------------------------------------------------ 14 | // Tensorflow. 15 | 16 | #ifdef NVDR_TENSORFLOW 17 | #define EIGEN_USE_GPU 18 | #include "tensorflow/core/framework/op.h" 19 | #include "tensorflow/core/framework/op_kernel.h" 20 | #include "tensorflow/core/framework/shape_inference.h" 21 | #include "tensorflow/core/platform/default/logging.h" 22 | using namespace tensorflow; 23 | using namespace tensorflow::shape_inference; 24 | #define NVDR_CTX_ARGS OpKernelContext* _nvdr_ctx 25 | #define NVDR_CTX_PARAMS _nvdr_ctx 26 | #define NVDR_CHECK(COND, ERR) OP_REQUIRES(_nvdr_ctx, COND, errors::Internal(ERR)) 27 | #define NVDR_CHECK_CUDA_ERROR(CUDA_CALL) OP_CHECK_CUDA_ERROR(_nvdr_ctx, CUDA_CALL) 28 | #define NVDR_CHECK_GL_ERROR(GL_CALL) OP_CHECK_GL_ERROR(_nvdr_ctx, GL_CALL) 29 | #endif 30 | 31 | //------------------------------------------------------------------------ 32 | // PyTorch. 33 | 34 | #ifdef NVDR_TORCH 35 | #ifndef __CUDACC__ 36 | #include 37 | #include 38 | #include 39 | #include 40 | #include 41 | #endif 42 | #define NVDR_CTX_ARGS int _nvdr_ctx_dummy 43 | #define NVDR_CTX_PARAMS 0 44 | #define NVDR_CHECK(COND, ERR) do { TORCH_CHECK(COND, ERR) } while(0) 45 | #define NVDR_CHECK_CUDA_ERROR(CUDA_CALL) do { cudaError_t err = CUDA_CALL; TORCH_CHECK(!err, "Cuda error: ", cudaGetLastError(), "[", #CUDA_CALL, ";]"); } while(0) 46 | #define NVDR_CHECK_GL_ERROR(GL_CALL) do { GL_CALL; GLenum err = glGetError(); TORCH_CHECK(err == GL_NO_ERROR, "OpenGL error: ", getGLErrorString(err), " ", err, "[", #GL_CALL, ";]"); } while(0) 47 | #endif 48 | 49 | //------------------------------------------------------------------------ 50 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast/common/torch_common.inl: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | #include "../common/framework.h" 11 | 12 | //------------------------------------------------------------------------ 13 | // Input check helpers. 14 | //------------------------------------------------------------------------ 15 | 16 | #ifdef _MSC_VER 17 | #define __func__ __FUNCTION__ 18 | #endif 19 | 20 | #define NVDR_CHECK_DEVICE(...) do { TORCH_CHECK(at::cuda::check_device({__VA_ARGS__}), __func__, "(): Inputs " #__VA_ARGS__ " must reside on the same GPU device") } while(0) 21 | #define NVDR_CHECK_CPU(...) do { nvdr_check_cpu({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must reside on CPU"); } while(0) 22 | #define NVDR_CHECK_CONTIGUOUS(...) do { nvdr_check_contiguous({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be contiguous tensors"); } while(0) 23 | #define NVDR_CHECK_F32(...) do { nvdr_check_f32({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be float32 tensors"); } while(0) 24 | #define NVDR_CHECK_I32(...) do { nvdr_check_i32({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be int32 tensors"); } while(0) 25 | inline void nvdr_check_cpu(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.device().type() == c10::DeviceType::CPU, func, err_msg); } 26 | inline void nvdr_check_contiguous(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.is_contiguous(), func, err_msg); } 27 | inline void nvdr_check_f32(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.dtype() == torch::kFloat32, func, err_msg); } 28 | inline void nvdr_check_i32(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.dtype() == torch::kInt32, func, err_msg); } 29 | //------------------------------------------------------------------------ 30 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast/common/torch_types.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #include "torch_common.inl" 10 | 11 | //------------------------------------------------------------------------ 12 | // Python GL state wrapper. 13 | 14 | class RasterizeGLState; 15 | class RasterizeGLStateWrapper 16 | { 17 | public: 18 | RasterizeGLStateWrapper (bool enableDB, bool automatic, int cudaDeviceIdx); 19 | ~RasterizeGLStateWrapper (void); 20 | 21 | void setContext (void); 22 | void releaseContext (void); 23 | 24 | RasterizeGLState* pState; 25 | bool automatic; 26 | int cudaDeviceIdx; 27 | }; 28 | 29 | //------------------------------------------------------------------------ 30 | // Python CudaRaster state wrapper. 31 | 32 | namespace CR { class CudaRaster; } 33 | class RasterizeCRStateWrapper 34 | { 35 | public: 36 | RasterizeCRStateWrapper (int cudaDeviceIdx); 37 | ~RasterizeCRStateWrapper (void); 38 | 39 | CR::CudaRaster* cr; 40 | int cudaDeviceIdx; 41 | }; 42 | 43 | //------------------------------------------------------------------------ 44 | // Mipmap wrapper to prevent intrusion from Python side. 45 | 46 | class TextureMipWrapper 47 | { 48 | public: 49 | torch::Tensor mip; 50 | int max_mip_level; 51 | std::vector texture_size; // For error checking. 52 | bool cube_mode; // For error checking. 53 | }; 54 | 55 | 56 | //------------------------------------------------------------------------ 57 | // Antialias topology hash wrapper to prevent intrusion from Python side. 58 | 59 | class TopologyHashWrapper 60 | { 61 | public: 62 | torch::Tensor ev_hash; 63 | }; 64 | 65 | //------------------------------------------------------------------------ 66 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast/lib/setgpu.lib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/bayes3d/rendering/nvdiffrast/lib/setgpu.lib -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/README.md: -------------------------------------------------------------------------------- 1 | This is a JAX library developed for Bayes3D that provides high-performance primitive operations for rasterization-based differentiable rendering. 2 | 3 | The CUDA/OpenGL(C++) primitive operations are adapted from [NvDiffrast](https://github.com/probcomp/bayes3d/tree/diff_renderer/bayes3d/rendering/nvdiffrast_jax). 4 | The JAX custom operations are engineered based on the principles outlined in the official [JAX documentation](https://jax.readthedocs.io/en/latest/Custom_Operation_for_GPUs.html). 5 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # NVIDIA CORPORATION and its licensors retain all intellectual property 4 | # and proprietary rights in and to this software, related documentation 5 | # and any modifications thereto. Any use, reproduction, disclosure or 6 | # distribution of this software and related documentation without an express 7 | # license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | __version__ = "0.3.0" 10 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/common/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # NVIDIA CORPORATION and its licensors retain all intellectual property 4 | # and proprietary rights in and to this software, related documentation 5 | # and any modifications thereto. Any use, reproduction, disclosure or 6 | # distribution of this software and related documentation without an express 7 | # license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | from .ops import RasterizeGLContext, _get_plugin, get_log_level, set_log_level 10 | 11 | __all__ = ["RasterizeGLContext", "get_log_level", "set_log_level", "_get_plugin"] 12 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/common/common.cpp: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #include 10 | 11 | //------------------------------------------------------------------------ 12 | // Block and grid size calculators for kernel launches. 13 | 14 | dim3 getLaunchBlockSize(int maxWidth, int maxHeight, int width, int height) 15 | { 16 | int maxThreads = maxWidth * maxHeight; 17 | if (maxThreads <= 1 || (width * height) <= 1) 18 | return dim3(1, 1, 1); // Degenerate. 19 | 20 | // Start from max size. 21 | int bw = maxWidth; 22 | int bh = maxHeight; 23 | 24 | // Optimizations for weirdly sized buffers. 25 | if (width < bw) 26 | { 27 | // Decrease block width to smallest power of two that covers the buffer width. 28 | while ((bw >> 1) >= width) 29 | bw >>= 1; 30 | 31 | // Maximize height. 32 | bh = maxThreads / bw; 33 | if (bh > height) 34 | bh = height; 35 | } 36 | else if (height < bh) 37 | { 38 | // Halve height and double width until fits completely inside buffer vertically. 39 | while (bh > height) 40 | { 41 | bh >>= 1; 42 | if (bw < width) 43 | bw <<= 1; 44 | } 45 | } 46 | 47 | // Done. 48 | return dim3(bw, bh, 1); 49 | } 50 | 51 | dim3 getLaunchGridSize(dim3 blockSize, int width, int height, int depth) 52 | { 53 | dim3 gridSize; 54 | gridSize.x = (width - 1) / blockSize.x + 1; 55 | gridSize.y = (height - 1) / blockSize.y + 1; 56 | gridSize.z = (depth - 1) / blockSize.z + 1; 57 | return gridSize; 58 | } 59 | 60 | //------------------------------------------------------------------------ 61 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/common/framework.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | 11 | // Framework-specific macros to enable code sharing. 12 | 13 | //------------------------------------------------------------------------ 14 | // Tensorflow. 15 | 16 | #ifdef NVDR_TENSORFLOW 17 | #define EIGEN_USE_GPU 18 | #include "tensorflow/core/framework/op.h" 19 | #include "tensorflow/core/framework/op_kernel.h" 20 | #include "tensorflow/core/framework/shape_inference.h" 21 | #include "tensorflow/core/platform/default/logging.h" 22 | using namespace tensorflow; 23 | using namespace tensorflow::shape_inference; 24 | #define NVDR_CTX_ARGS OpKernelContext* _nvdr_ctx 25 | #define NVDR_CTX_PARAMS _nvdr_ctx 26 | #define NVDR_CHECK(COND, ERR) OP_REQUIRES(_nvdr_ctx, COND, errors::Internal(ERR)) 27 | #define NVDR_CHECK_CUDA_ERROR(CUDA_CALL) OP_CHECK_CUDA_ERROR(_nvdr_ctx, CUDA_CALL) 28 | #define NVDR_CHECK_GL_ERROR(GL_CALL) OP_CHECK_GL_ERROR(_nvdr_ctx, GL_CALL) 29 | #endif 30 | 31 | //------------------------------------------------------------------------ 32 | // PyTorch. 33 | 34 | #ifdef NVDR_TORCH 35 | #ifndef __CUDACC__ 36 | #include 37 | #include 38 | #include 39 | #include 40 | #include 41 | #endif 42 | #define NVDR_CTX_ARGS int _nvdr_ctx_dummy 43 | #define NVDR_CTX_PARAMS 0 44 | #define NVDR_CHECK(COND, ERR) do { TORCH_CHECK(COND, ERR) } while(0) 45 | #define NVDR_CHECK_CUDA_ERROR(CUDA_CALL) do { cudaError_t err = CUDA_CALL; TORCH_CHECK(!err, "Cuda error: ", cudaGetLastError(), "[", #CUDA_CALL, ";]"); } while(0) 46 | #define NVDR_CHECK_GL_ERROR(GL_CALL) do { GL_CALL; GLenum err = glGetError(); TORCH_CHECK(err == GL_NO_ERROR, "OpenGL error: ", getGLErrorString(err), " ", err, "[", #GL_CALL, ";]"); } while(0) 47 | #endif 48 | 49 | //------------------------------------------------------------------------ 50 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/common/interpolate.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | 11 | //------------------------------------------------------------------------ 12 | // Constants and helpers. 13 | 14 | #define IP_FWD_MAX_KERNEL_BLOCK_WIDTH 8 15 | #define IP_FWD_MAX_KERNEL_BLOCK_HEIGHT 8 16 | #define IP_GRAD_MAX_KERNEL_BLOCK_WIDTH 8 17 | #define IP_GRAD_MAX_KERNEL_BLOCK_HEIGHT 8 18 | #define IP_MAX_DIFF_ATTRS 32 19 | 20 | //------------------------------------------------------------------------ 21 | // CUDA kernel params. 22 | 23 | struct InterpolateKernelParams 24 | { 25 | const int* tri; // Incoming triangle buffer. 26 | const float* attr; // Incoming attribute buffer. 27 | const float* rast; // Incoming rasterizer output buffer. 28 | const float* rastDB; // Incoming rasterizer output buffer for bary derivatives. 29 | const float* dy; // Incoming attribute gradients. 30 | const float* dda; // Incoming attr diff gradients. 31 | float* out; // Outgoing interpolated attributes. 32 | float* outDA; // Outgoing texcoord major axis lengths. 33 | float* gradAttr; // Outgoing attribute gradients. 34 | float* gradRaster; // Outgoing rasterizer gradients. 35 | float* gradRasterDB; // Outgoing rasterizer bary diff gradients. 36 | int numTriangles; // Number of triangles. 37 | int numVertices; // Number of vertices. 38 | int numAttr; // Number of total vertex attributes. 39 | int numDiffAttr; // Number of attributes to differentiate. 40 | int width; // Image width. 41 | int height; // Image height. 42 | int depth; // Minibatch size. 43 | int attrBC; // 0=normal, 1=attr is broadcast. 44 | int instance_mode; // 0=normal, 1=instance mode. 45 | int diff_attrs_all; // 0=normal, 1=produce pixel differentials for all attributes. 46 | int diffAttrs[IP_MAX_DIFF_ATTRS]; // List of attributes to differentiate. 47 | }; 48 | 49 | //------------------------------------------------------------------------ 50 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/common/rasterize.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | 11 | //------------------------------------------------------------------------ 12 | // Constants and helpers. 13 | 14 | #define RAST_CUDA_FWD_SHADER_KERNEL_BLOCK_WIDTH 8 15 | #define RAST_CUDA_FWD_SHADER_KERNEL_BLOCK_HEIGHT 8 16 | #define RAST_GRAD_MAX_KERNEL_BLOCK_WIDTH 8 17 | #define RAST_GRAD_MAX_KERNEL_BLOCK_HEIGHT 8 18 | 19 | //------------------------------------------------------------------------ 20 | // CUDA forward rasterizer shader kernel params. 21 | 22 | struct RasterizeCudaFwdShaderParams 23 | { 24 | const float* pos; // Vertex positions. 25 | const int* tri; // Triangle indices. 26 | const int* in_idx; // Triangle idx buffer from rasterizer. 27 | float* out; // Main output buffer. 28 | float* out_db; // Bary pixel gradient output buffer. 29 | int numTriangles; // Number of triangles. 30 | int numVertices; // Number of vertices. 31 | int width; // Image width. 32 | int height; // Image height. 33 | int depth; // Size of minibatch. 34 | int instance_mode; // 1 if in instance rendering mode. 35 | float xs, xo, ys, yo; // Pixel position to clip-space x, y transform. 36 | }; 37 | 38 | //------------------------------------------------------------------------ 39 | // Gradient CUDA kernel params. 40 | 41 | struct RasterizeGradParams 42 | { 43 | const float* pos; // Incoming position buffer. 44 | const int* tri; // Incoming triangle buffer. 45 | const float* out; // Rasterizer output buffer. 46 | const float* dy; // Incoming gradients of rasterizer output buffer. 47 | const float* ddb; // Incoming gradients of bary diff output buffer. 48 | float* grad; // Outgoing position gradients. 49 | int numTriangles; // Number of triangles. 50 | int numVertices; // Number of vertices. 51 | int width; // Image width. 52 | int height; // Image height. 53 | int depth; // Size of minibatch. 54 | int instance_mode; // 1 if in instance rendering mode. 55 | float xs, xo, ys, yo; // Pixel position to clip-space x, y transform. 56 | }; 57 | 58 | //------------------------------------------------------------------------ 59 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/common/torch_common.inl: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | #include "../common/framework.h" 11 | 12 | //------------------------------------------------------------------------ 13 | // Input check helpers. 14 | //------------------------------------------------------------------------ 15 | 16 | #ifdef _MSC_VER 17 | #define __func__ __FUNCTION__ 18 | #endif 19 | 20 | #define NVDR_CHECK_DEVICE(...) do { TORCH_CHECK(at::cuda::check_device({__VA_ARGS__}), __func__, "(): Inputs " #__VA_ARGS__ " must reside on the same GPU device") } while(0) 21 | #define NVDR_CHECK_CPU(...) do { nvdr_check_cpu({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must reside on CPU"); } while(0) 22 | #define NVDR_CHECK_CONTIGUOUS(...) do { nvdr_check_contiguous({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be contiguous tensors"); } while(0) 23 | #define NVDR_CHECK_F32(...) do { nvdr_check_f32({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be float32 tensors"); } while(0) 24 | #define NVDR_CHECK_I32(...) do { nvdr_check_i32({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be int32 tensors"); } while(0) 25 | inline void nvdr_check_cpu(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.device().type() == c10::DeviceType::CPU, func, err_msg); } 26 | inline void nvdr_check_contiguous(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.is_contiguous(), func, err_msg); } 27 | inline void nvdr_check_f32(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.dtype() == torch::kFloat32, func, err_msg); } 28 | inline void nvdr_check_i32(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.dtype() == torch::kInt32, func, err_msg); } 29 | //------------------------------------------------------------------------ 30 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/common/torch_types.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #include "torch_common.inl" 10 | 11 | //------------------------------------------------------------------------ 12 | // Python GL state wrapper. 13 | 14 | class RasterizeGLState; 15 | class RasterizeGLStateWrapper 16 | { 17 | public: 18 | RasterizeGLStateWrapper (bool enableDB, bool automatic, int cudaDeviceIdx); 19 | ~RasterizeGLStateWrapper (void); 20 | 21 | void setContext (void); 22 | void releaseContext (void); 23 | 24 | RasterizeGLState* pState; 25 | bool automatic; 26 | int cudaDeviceIdx; 27 | }; 28 | 29 | //------------------------------------------------------------------------ 30 | // Python CudaRaster state wrapper. 31 | 32 | namespace CR { class CudaRaster; } 33 | class RasterizeCRStateWrapper 34 | { 35 | public: 36 | RasterizeCRStateWrapper (int cudaDeviceIdx); 37 | ~RasterizeCRStateWrapper (void); 38 | 39 | CR::CudaRaster* cr; 40 | int cudaDeviceIdx; 41 | }; 42 | 43 | //------------------------------------------------------------------------ 44 | // Mipmap wrapper to prevent intrusion from Python side. 45 | 46 | class TextureMipWrapper 47 | { 48 | public: 49 | torch::Tensor mip; 50 | int max_mip_level; 51 | std::vector texture_size; // For error checking. 52 | bool cube_mode; // For error checking. 53 | }; 54 | 55 | 56 | //------------------------------------------------------------------------ 57 | // Antialias topology hash wrapper to prevent intrusion from Python side. 58 | 59 | class TopologyHashWrapper 60 | { 61 | public: 62 | torch::Tensor ev_hash; 63 | }; 64 | 65 | //------------------------------------------------------------------------ 66 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/jax/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # NVIDIA CORPORATION and its licensors retain all intellectual property 4 | # and proprietary rights in and to this software, related documentation 5 | # and any modifications thereto. Any use, reproduction, disclosure or 6 | # distribution of this software and related documentation without an express 7 | # license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | from .ops import RasterizeGLContext, _get_plugin, get_log_level, set_log_level 10 | 11 | __all__ = ["RasterizeGLContext", "get_log_level", "set_log_level", "_get_plugin"] 12 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/jax/jax_binding_ops.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | // https://en.cppreference.com/w/cpp/numeric/bit_cast 5 | template 6 | typename std::enable_if::value && 7 | std::is_trivially_copyable::value, 8 | To>::type 9 | bit_cast(const From& src) noexcept { 10 | static_assert( 11 | std::is_trivially_constructible::value, 12 | "This implementation additionally requires destination type to be trivially constructible"); 13 | 14 | To dst; 15 | memcpy(&dst, &src, sizeof(To)); 16 | return dst; 17 | } 18 | 19 | // Note that bit_cast is only available in recent C++ standards so you might need 20 | // to provide a shim like the one in lib/kernel_helpers.h 21 | template 22 | std::string PackDescriptorAsString(const T& descriptor) { 23 | return std::string(bit_cast(&descriptor), sizeof(T)); 24 | } 25 | 26 | template 27 | pybind11::bytes PackDescriptor(const T& descriptor) { 28 | return pybind11::bytes(PackDescriptorAsString(descriptor)); 29 | } 30 | 31 | template 32 | const T* UnpackDescriptor(const char* opaque, std::size_t opaque_len) { 33 | if (opaque_len != sizeof(T)) { 34 | throw std::runtime_error("Invalid opaque object size"); 35 | } 36 | return bit_cast(opaque); 37 | } 38 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/jax/jax_interpolate.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | 11 | #if !(defined(NVDR_TORCH) && defined(__CUDACC__)) 12 | #include "../common/framework.h" 13 | #include "../common/glutil.h" 14 | 15 | struct DiffInterpolateCustomCallDescriptor { 16 | int num_images; // attr[0] 17 | int num_vertices; // attr[1] 18 | int num_attributes; // attr[2] 19 | int rast_height; // rast[1] 20 | int rast_width; // rast[2] 21 | int rast_depth; // rast[0] 22 | int num_triangles; // tri[0] 23 | int num_diff_attributes; // diff_attr 24 | }; 25 | 26 | #endif // !(defined(NVDR_TORCH) && defined(__CUDACC__)) 27 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/jax/torch_common.inl: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #pragma once 10 | #include "../common/framework.h" 11 | 12 | //------------------------------------------------------------------------ 13 | // Input check helpers. 14 | //------------------------------------------------------------------------ 15 | 16 | #ifdef _MSC_VER 17 | #define __func__ __FUNCTION__ 18 | #endif 19 | 20 | #define NVDR_CHECK_DEVICE(...) do { TORCH_CHECK(at::cuda::check_device({__VA_ARGS__}), __func__, "(): Inputs " #__VA_ARGS__ " must reside on the same GPU device") } while(0) 21 | #define NVDR_CHECK_CPU(...) do { nvdr_check_cpu({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must reside on CPU"); } while(0) 22 | #define NVDR_CHECK_CONTIGUOUS(...) do { nvdr_check_contiguous({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be contiguous tensors"); } while(0) 23 | #define NVDR_CHECK_F32(...) do { nvdr_check_f32({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be float32 tensors"); } while(0) 24 | #define NVDR_CHECK_I32(...) do { nvdr_check_i32({__VA_ARGS__}, __func__, "(): Inputs " #__VA_ARGS__ " must be int32 tensors"); } while(0) 25 | inline void nvdr_check_cpu(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.device().type() == c10::DeviceType::CPU, func, err_msg); } 26 | inline void nvdr_check_contiguous(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.is_contiguous(), func, err_msg); } 27 | inline void nvdr_check_f32(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.dtype() == torch::kFloat32, func, err_msg); } 28 | inline void nvdr_check_i32(at::ArrayRef ts, const char* func, const char* err_msg) { for (const at::Tensor& t : ts) TORCH_CHECK(t.dtype() == torch::kInt32, func, err_msg); } 29 | //------------------------------------------------------------------------ 30 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/jax/torch_types.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | // 3 | // NVIDIA CORPORATION and its licensors retain all intellectual property 4 | // and proprietary rights in and to this software, related documentation 5 | // and any modifications thereto. Any use, reproduction, disclosure or 6 | // distribution of this software and related documentation without an express 7 | // license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | #include "torch_common.inl" 10 | 11 | //------------------------------------------------------------------------ 12 | // Python GL state wrapper. 13 | 14 | class RasterizeGLState; 15 | class RasterizeGLStateWrapper 16 | { 17 | public: 18 | RasterizeGLStateWrapper (bool enableDB, bool automatic, int cudaDeviceIdx); 19 | ~RasterizeGLStateWrapper (void); 20 | 21 | void setContext (void); 22 | void releaseContext (void); 23 | 24 | RasterizeGLState* pState; 25 | bool automatic; 26 | int cudaDeviceIdx; 27 | }; 28 | 29 | //------------------------------------------------------------------------ 30 | // Python CudaRaster state wrapper. 31 | 32 | namespace CR { class CudaRaster; } 33 | class RasterizeCRStateWrapper 34 | { 35 | public: 36 | RasterizeCRStateWrapper (int cudaDeviceIdx); 37 | ~RasterizeCRStateWrapper (void); 38 | 39 | CR::CudaRaster* cr; 40 | int cudaDeviceIdx; 41 | }; 42 | 43 | //------------------------------------------------------------------------ 44 | // Mipmap wrapper to prevent intrusion from Python side. 45 | 46 | class TextureMipWrapper 47 | { 48 | public: 49 | torch::Tensor mip; 50 | int max_mip_level; 51 | std::vector texture_size; // For error checking. 52 | bool cube_mode; // For error checking. 53 | }; 54 | 55 | 56 | //------------------------------------------------------------------------ 57 | // Antialias topology hash wrapper to prevent intrusion from Python side. 58 | 59 | class TopologyHashWrapper 60 | { 61 | public: 62 | torch::Tensor ev_hash; 63 | }; 64 | 65 | //------------------------------------------------------------------------ 66 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/nvdiffrast/lib/setgpu.lib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/bayes3d/rendering/nvdiffrast_jax/nvdiffrast/lib/setgpu.lib -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/run_sample.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 4 | # 5 | # NVIDIA CORPORATION and its licensors retain all intellectual property 6 | # and proprietary rights in and to this software, related documentation 7 | # and any modifications thereto. Any use, reproduction, disclosure or 8 | # distribution of this software and related documentation without an express 9 | # license agreement from NVIDIA CORPORATION is strictly prohibited. 10 | 11 | function print_help { 12 | echo "Usage: `basename $0` [--build-container] " 13 | echo "" 14 | echo "Option --build-container will build the Docker container based on" 15 | echo "docker/Dockerfile and tag the image with gltorch:latest." 16 | echo "" 17 | echo "Example: `basename $0` samples/torch/envphong.py" 18 | } 19 | 20 | build_container=0 21 | sample="" 22 | while [[ "$#" -gt 0 ]]; do 23 | case $1 in 24 | --build-container) build_container=1;; 25 | -h|--help) print_help; exit 0 ;; 26 | --*) echo "Unknown parameter passed: $1"; exit 1 ;; 27 | *) sample="$1"; shift; break; 28 | esac 29 | shift 30 | done 31 | 32 | rest=$@ 33 | 34 | # Build the docker container 35 | if [ "$build_container" = "1" ]; then 36 | docker build --tag gltorch:latest -f docker/Dockerfile . 37 | fi 38 | 39 | if [ ! -f "$sample" ]; then 40 | echo 41 | echo "No python sample given or file '$sample' not found. Exiting." 42 | exit 1 43 | fi 44 | 45 | image="gltorch:latest" 46 | 47 | echo "Using container image: $image" 48 | echo "Running command: $sample $rest" 49 | 50 | # Run a sample with docker 51 | docker run --rm -it --gpus all --user $(id -u):$(id -g) \ 52 | -v `pwd`:/app --workdir /app -e TORCH_EXTENSIONS_DIR=/app/tmp $image python3 $sample $rest 53 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/scripts/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | python renderer_matching_pytorch.py jax # get jax renderer outputs 3 | python renderer_matching_pytorch.py torch # get torch renderer outputs 4 | -------------------------------------------------------------------------------- /bayes3d/rendering/nvdiffrast_jax/setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # NVIDIA CORPORATION and its licensors retain all intellectual property 4 | # and proprietary rights in and to this software, related documentation 5 | # and any modifications thereto. Any use, reproduction, disclosure or 6 | # distribution of this software and related documentation without an express 7 | # license agreement from NVIDIA CORPORATION is strictly prohibited. 8 | 9 | import os 10 | 11 | import nvdiffrast 12 | import setuptools 13 | 14 | with open("README.md", "r") as fh: 15 | long_description = fh.read() 16 | 17 | setuptools.setup( 18 | name="nvdiffrast", 19 | version=nvdiffrast.__version__, 20 | author="Samuli Laine", 21 | author_email="slaine@nvidia.com", 22 | description="nvdiffrast - modular primitives for high-performance differentiable rendering", 23 | long_description=long_description, 24 | long_description_content_type="text/markdown", 25 | url="https://github.com/NVlabs/nvdiffrast", 26 | packages=setuptools.find_packages(), 27 | package_data={ 28 | "nvdiffrast": [ 29 | "common/*.h", 30 | "common/*.inl", 31 | "common/*.cu", 32 | "common/*.cpp", 33 | "common/cudaraster/*.hpp", 34 | "common/cudaraster/impl/*.cpp", 35 | "common/cudaraster/impl/*.hpp", 36 | "common/cudaraster/impl/*.inl", 37 | "common/cudaraster/impl/*.cu", 38 | "lib/*.h", 39 | "torch/*.h", 40 | "torch/*.inl", 41 | "torch/*.cpp", 42 | "tensorflow/*.cu", 43 | "jax/*.h", 44 | "jax/*.inl", 45 | "jax/*.cpp", 46 | ] 47 | + (["lib/*.lib"] if os.name == "nt" else []) 48 | }, 49 | include_package_data=True, 50 | install_requires=[ 51 | "numpy" 52 | ], # note: can't require torch here as it will install torch even for a TensorFlow container 53 | classifiers=[ 54 | "Programming Language :: Python :: 3", 55 | "Operating System :: OS Independent", 56 | ], 57 | python_requires=">=3.6", 58 | ) 59 | -------------------------------------------------------------------------------- /bayes3d/rendering/photorealistic_renderers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/bayes3d/rendering/photorealistic_renderers/__init__.py -------------------------------------------------------------------------------- /bayes3d/rendering/photorealistic_renderers/_kubric_exec_parallel.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022 The Kubric Authors 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # https://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import logging 16 | 17 | import kubric as kb 18 | import numpy as np 19 | from kubric.renderer.blender import Blender as KubricRenderer 20 | 21 | # import kubric.core.color as color 22 | 23 | # unpacking the data from the npz file 24 | data_file = "/tmp/blenderproc_kubric.npz" 25 | data = np.load(data_file, allow_pickle=True) 26 | mesh_paths = data["mesh_paths"] 27 | mesh_scales = data["mesh_scales"] 28 | mesh_colors = data["mesh_colors"] 29 | poses = data["poses"] 30 | camera_pose = data["camera_pose"] 31 | K = data["K"] 32 | height = data["height"] 33 | width = data["width"] 34 | scaling_factor = data["scaling_factor"] 35 | fx = data["fx"] 36 | fy = data["fy"] 37 | cx = data["cx"] 38 | cy = data["cy"] 39 | near = data["near"] 40 | far = data["far"] 41 | intensity = float(data["intensity"]) 42 | background_color = data["background"] 43 | 44 | logging.basicConfig(level="INFO") 45 | 46 | # convert intrinsics to focal_length, sensor_width 47 | focal_length = float(fx) 48 | sensor_width = float(width) 49 | 50 | for scene_number in range(len(poses)): 51 | # --- create scene and attach a renderer to it 52 | scene = kb.Scene(resolution=(width.item(), height.item())) 53 | scene.background = kb.Color(*background_color) 54 | renderer = KubricRenderer(scene) 55 | # --- create perspective camera 56 | scene += kb.PerspectiveCamera( 57 | name="camera", 58 | position=camera_pose[0], 59 | quaternion=camera_pose[1], 60 | focal_length=focal_length, 61 | sensor_width=sensor_width, 62 | ) 63 | scene += kb.PointLight(name="sun", position=camera_pose[0], intensity=intensity) 64 | 65 | for obj_number in range(len(poses[scene_number])): 66 | mesh_scales = [e * scaling_factor for e in mesh_scales] 67 | rng = np.random.default_rng() 68 | obj_mat = kb.FlatMaterial(color=kb.Color(*mesh_colors[obj_number])) 69 | obj = kb.FileBasedObject( 70 | asset_id="1", 71 | render_filename=mesh_paths[obj_number], 72 | material=obj_mat, 73 | simulation_filename=None, 74 | scale=mesh_scales[obj_number], 75 | position=poses[scene_number][obj_number][0], 76 | quaternion=poses[scene_number][obj_number][1], 77 | ) 78 | print(obj.material) 79 | scene += obj 80 | 81 | frame = renderer.render_still() 82 | np.savez( 83 | f"/tmp/{scene_number}.npz", 84 | rgba=frame["rgba"], 85 | segmentation=frame["segmentation"], 86 | depth=frame["depth"], 87 | ) 88 | -------------------------------------------------------------------------------- /bayes3d/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .bbox import * 2 | from .enumerations import * 3 | from .icp import * 4 | from .mesh import * 5 | from .occlusion import * 6 | from .r3d_loader import * 7 | from .utils import * 8 | from .ycb_loader import * 9 | -------------------------------------------------------------------------------- /bayes3d/utils/bbox.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as jnp 3 | 4 | 5 | def separating_axis_test(axis, box1, box2): 6 | """ 7 | Projects both boxes onto the given axis and checks for overlap. 8 | """ 9 | min1, max1 = project_box(axis, box1) 10 | min2, max2 = project_box(axis, box2) 11 | 12 | return jax.lax.cond( 13 | jnp.logical_or(max1 < min2, max2 < min1), lambda: False, lambda: True 14 | ) 15 | 16 | # if max1 < min2 or max2 < min1: 17 | # return False 18 | # return True 19 | 20 | 21 | def project_box(axis, box): 22 | """ 23 | Projects a box onto an axis and returns the min and max projection values. 24 | """ 25 | corners = get_transformed_box_corners(box) 26 | projections = jnp.array([jnp.dot(corner, axis) for corner in corners]) 27 | return jnp.min(projections), jnp.max(projections) 28 | 29 | 30 | def get_transformed_box_corners(box): 31 | """ 32 | Returns the 8 corners of the box based on its dimensions and pose. 33 | """ 34 | dim, pose = box 35 | corners = [] 36 | for dx in [-dim[0] / 2, dim[0] / 2]: 37 | for dy in [-dim[1] / 2, dim[1] / 2]: 38 | for dz in [-dim[2] / 2, dim[2] / 2]: 39 | corner = jnp.array([dx, dy, dz, 1]) 40 | transformed_corner = pose @ corner 41 | corners.append(transformed_corner[:3]) 42 | return corners 43 | 44 | 45 | def are_bboxes_intersecting(dim1, dim2, pose1, pose2): 46 | """ 47 | Checks if two oriented bounding boxes (OBBs), which are AABBs with poses, are intersecting using the Separating 48 | Axis Theorem (SAT). 49 | 50 | Args: 51 | dim1 (jnp.ndarray): Bounding box dimensions of first object. Shape (3,) 52 | dim2 (jnp.ndarray): Bounding box dimensions of second object. Shape (3,) 53 | pose1 (jnp.ndarray): Pose of first object. Shape (4,4) 54 | pose2 (jnp.ndarray): Pose of second object. Shape (4,4) 55 | Output: 56 | Bool: Returns true if bboxes intersect 57 | """ 58 | box1 = (dim1, pose1) 59 | box2 = (dim2, pose2) 60 | 61 | # Axes to test - the face normals of each box 62 | axes_to_test = [] 63 | for i in range(3): # Add the face normals of box1 64 | axes_to_test.append(pose1[:3, i]) 65 | for i in range(3): # Add the face normals of box2 66 | axes_to_test.append(pose2[:3, i]) 67 | 68 | # Perform SAT on each axis 69 | count_ = 0 70 | for axis in axes_to_test: 71 | count_ += jax.lax.cond( 72 | separating_axis_test(axis, box1, box2), lambda: 0, lambda: -1 73 | ) 74 | 75 | return jax.lax.cond(count_ < 0, lambda: False, lambda: True) 76 | 77 | 78 | # For one reference pose (object 1) and many possible poses for the second object 79 | are_bboxes_intersecting_many = jax.vmap( 80 | are_bboxes_intersecting, in_axes=(None, None, None, 0) 81 | ) 82 | -------------------------------------------------------------------------------- /bayes3d/utils/occlusion.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as jnp 3 | 4 | import bayes3d as b 5 | 6 | 7 | def voxel_occupied_occluded_free(camera_pose, depth_image, grid, intrinsics, tolerance): 8 | grid_in_cam_frame = b.apply_transform(grid, b.t3d.inverse_pose(camera_pose)) 9 | pixels = b.project_cloud_to_pixels(grid_in_cam_frame, intrinsics).astype(jnp.int32) 10 | valid_pixels = ( 11 | (0 <= pixels[:, 0]) 12 | * (0 <= pixels[:, 1]) 13 | * (pixels[:, 0] < intrinsics.width) 14 | * (pixels[:, 1] < intrinsics.height) 15 | ) 16 | real_depth_vals = depth_image[pixels[:, 1], pixels[:, 0]] * valid_pixels + ( 17 | 1 - valid_pixels 18 | ) * (intrinsics.far + 1.0) 19 | 20 | projected_depth_vals = grid_in_cam_frame[:, 2] 21 | occupied = jnp.abs(real_depth_vals - projected_depth_vals) < tolerance 22 | occluded = real_depth_vals < projected_depth_vals 23 | occluded = occluded * (1.0 - occupied) 24 | _free = (1.0 - occluded) * (1.0 - occupied) 25 | return 1.0 * occupied + 0.5 * occluded 26 | 27 | 28 | voxel_occupied_occluded_free_jit = jax.jit(voxel_occupied_occluded_free) 29 | voxel_occupied_occluded_free_parallel_camera = jax.jit( 30 | jax.vmap(voxel_occupied_occluded_free, in_axes=(0, None, None, None, None)) 31 | ) 32 | voxel_occupied_occluded_free_parallel_camera_depth = jax.jit( 33 | jax.vmap(voxel_occupied_occluded_free, in_axes=(0, 0, None, None, None)) 34 | ) 35 | -------------------------------------------------------------------------------- /bayes3d/viz/__init__.py: -------------------------------------------------------------------------------- 1 | from .meshcatviz import * 2 | from .viz import * 3 | -------------------------------------------------------------------------------- /bayes3d/viz/meshcatviz.py: -------------------------------------------------------------------------------- 1 | import jax.numpy as jnp 2 | import meshcat 3 | import meshcat.geometry as g 4 | import numpy as np 5 | from matplotlib.colors import rgb2hex 6 | 7 | import bayes3d.transforms_3d as t3d 8 | 9 | RED = np.array([1.0, 0.0, 0.0]) 10 | GREEN = np.array([0.0, 1.0, 0.0]) 11 | BLUE = np.array([0.0, 0.0, 1.0]) 12 | 13 | VISUALIZER = None 14 | 15 | 16 | def setup_visualizer(): 17 | global VISUALIZER 18 | VISUALIZER = meshcat.Visualizer() 19 | set_background_color([1, 1, 1]) 20 | 21 | 22 | def get_visualizer(): 23 | global VISUALIZER 24 | return VISUALIZER 25 | 26 | 27 | def set_background_color(color): 28 | VISUALIZER["/Background"].set_property("top_color", color) 29 | VISUALIZER["/Background"].set_property("bottom_color", color) 30 | 31 | 32 | def clear_visualizer(): 33 | global VISUALIZER 34 | VISUALIZER.delete() 35 | 36 | 37 | def set_pose(channel, pose): 38 | VISUALIZER[channel].set_transform(np.array(pose, dtype=np.float64)) 39 | 40 | 41 | def show_cloud(channel, point_cloud, color=None, size=0.01): 42 | global VISUALIZER 43 | if len(point_cloud.shape) == 3: 44 | point_cloud = t3d.point_cloud_image_to_points(point_cloud) 45 | point_cloud = np.transpose(np.array(point_cloud)) 46 | if color is None: 47 | color = np.zeros_like(point_cloud) 48 | elif len(color.shape) == 1: 49 | color = np.tile(color.reshape(-1, 1), (1, point_cloud.shape[1])) 50 | color = np.array(color) 51 | obj = g.PointCloud(point_cloud, color, size=size) 52 | VISUALIZER[channel].set_object(obj) 53 | 54 | 55 | def show_trimesh(channel, mesh, color=None, wireframe=False, opacity=1.0): 56 | global VISUALIZER 57 | if color is None: 58 | color = [1, 0, 0] 59 | material = g.MeshLambertMaterial( 60 | color=int(rgb2hex(color)[1:], 16), wireframe=wireframe, opacity=opacity 61 | ) 62 | obj = g.TriangularMeshGeometry(mesh.vertices, mesh.faces) 63 | VISUALIZER[channel].set_object(obj, material) 64 | 65 | 66 | def show_pose(channel, pose, size=0.1): 67 | global VISUALIZER 68 | pose_x = t3d.transform_from_pos(jnp.array([size / 2.0, 0.0, 0.0])) 69 | objx = g.Box(np.array([size, size / 10.0, size / 10.0])) 70 | matx = g.MeshLambertMaterial(color=0xF41515, reflectivity=0.8) 71 | 72 | pose_y = t3d.transform_from_pos(jnp.array([0.0, size / 2.0, 0.0])) 73 | objy = g.Box(np.array([size / 10.0, size, size / 10.0])) 74 | maty = g.MeshLambertMaterial(color=0x40EC00, reflectivity=0.8) 75 | 76 | pose_z = t3d.transform_from_pos(jnp.array([0.0, 0.0, size / 2.0])) 77 | objz = g.Box(np.array([size / 10.0, size / 10.0, size])) 78 | matz = g.MeshLambertMaterial(color=0x0B5CFC, reflectivity=0.8) 79 | 80 | VISUALIZER[channel]["x"].set_object(objx, matx) 81 | VISUALIZER[channel]["x"].set_transform(np.array(pose @ pose_x, dtype=np.float64)) 82 | VISUALIZER[channel]["y"].set_object(objy, maty) 83 | VISUALIZER[channel]["y"].set_transform(np.array(pose @ pose_y, dtype=np.float64)) 84 | VISUALIZER[channel]["z"].set_object(objz, matz) 85 | VISUALIZER[channel]["z"].set_transform(np.array(pose @ pose_z, dtype=np.float64)) 86 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # Base container: CUDA 12.21, cuDNN 8.9.4, Python 3.10, PyTorch 2.1.0 2 | ARG BASE_IMG=nvcr.io/nvidia/pytorch:23.08-py3 3 | FROM ${BASE_IMG} 4 | 5 | WORKDIR /workspace 6 | 7 | # Install Bayes3d dependencies, including a local genjax install 8 | COPY ./docker/requirements_docker.txt /workspace/requirements.txt 9 | COPY ./genjax /workspace/genjax 10 | RUN pip install -r /workspace/requirements.txt 11 | RUN pip install -r /workspace/genjax/requirements.txt 12 | RUN pip install /workspace/genjax 13 | 14 | # Install JAX (0.4.16) and OpenGL dependencies 15 | ARG DEBIAN_FRONTEND=noninteractive 16 | RUN apt-get update 17 | RUN pip install --upgrade "jax[cuda12_pip]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html 18 | RUN apt-get install -y mesa-common-dev libegl1-mesa-dev libglfw3-dev libgl1-mesa-dev libglu1-mesa-dev 19 | 20 | # Cleanup and prepare env variables for graphics 21 | RUN rm -rf /workspace/requirements.txt 22 | RUN rm -rf /workspace/genjax 23 | ENV NVIDIA_VISIBLE_DEVICES all 24 | ENV NVIDIA_DRIVER_CAPABILITIES compute,utility,display 25 | -------------------------------------------------------------------------------- /docker/build.sh: -------------------------------------------------------------------------------- 1 | ############ 2 | # Build a Docker container image for Bayes3D 3 | ############ 4 | 5 | SCRIPT=$(realpath "$0") 6 | DOCKERPATH=$(dirname "$SCRIPT") 7 | BAYES3DPATH=$(dirname "$DOCKERPATH") 8 | 9 | cd ${BAYES3DPATH} 10 | git clone git+ssh://git@github.com/probcomp/genjax.git # temp clone for dependency resolution 11 | docker build -t bayes3d:latest ${BAYES3DPATH} # append --no-cache after changes to dockerfile 12 | rm -rf genjax 13 | -------------------------------------------------------------------------------- /docker/requirements_docker.txt: -------------------------------------------------------------------------------- 1 | scikit-learn 2 | wheel 3 | ipython 4 | jupyter 5 | numpy 6 | matplotlib 7 | pillow 8 | opencv-python-headless<4.3 9 | open3d 10 | graphviz 11 | distinctipy 12 | trimesh 13 | ninja 14 | pyransac3d 15 | meshcat 16 | h5py 17 | gdown 18 | pytest 19 | zmq 20 | tqdm 21 | jupyterlab 22 | imageio 23 | timm 24 | joblib 25 | pdoc3 26 | addict 27 | flax 28 | -------------------------------------------------------------------------------- /docker/run.sh: -------------------------------------------------------------------------------- 1 | ############ 2 | # Open an interactive Docker container shell for Bayes3D 3 | ############ 4 | 5 | SCRIPT=$(realpath "$0") 6 | DOCKERPATH=$(dirname "$SCRIPT") 7 | BAYES3DPATH=$(dirname "$DOCKERPATH") 8 | echo "Mounting $BAYES3DPATH into /workspace/bayes3d" 9 | docker run --runtime=nvidia -it -p 8888:8888 --gpus all --rm --ipc=host -v $(dirname "$BAYES3DPATH"):/workspace/ bayes3d:latest # mount the directory that contains Bayes3D into container 10 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | ![Screenshot](logo.png) 2 | 3 | Bayes3D is a 3D scene perception system based on probabilistic inverse graphics. 4 | 5 | - **[Installation](installation.md)** 6 | - **[GCP Setup](gcp-setup.md)** 7 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installing Bayes3D 2 | 3 | ### Fetch repository and setup Python environment. 4 | ``` 5 | git clone https://github.com/probcomp/bayes3d.git 6 | cd bayes3d 7 | conda create -n bayes3d python=3.9 8 | conda activate bayes3d 9 | pip install -r requirements.txt 10 | pip install -e . 11 | ``` 12 | 13 | ### Install GenJAX (optional) 14 | ``` 15 | pip install git+https://github.com/probcomp/genjax.git 16 | ``` 17 | 18 | ### Install JAX 19 | ``` 20 | pip install --upgrade "jax[cuda11_pip]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html 21 | ``` 22 | 23 | ### Install Torch 24 | ``` 25 | pip install torch torchvision torchaudio --upgrade --index-url https://download.pytorch.org/whl/cu118 26 | ``` 27 | -------------------------------------------------------------------------------- /docs/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/docs/logo.png -------------------------------------------------------------------------------- /download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function download_additional_ycb { 4 | filename="$1_google_16k.tgz" 5 | "Downloading additional ycb models: $1" 6 | wget "http://ycb-benchmarks.s3-website-us-east-1.amazonaws.com/data/google/$filename" 7 | tar -vxzf $filename -C assets/ycb_video_models/models 8 | rm $filename 9 | mv "assets/ycb_video_models/models/$1/google_16k"/* "assets/ycb_video_models/models/$1" 10 | rm -r "assets/ycb_video_models/models/$1/google_16k/" 11 | } 12 | 13 | 14 | mkdir -p assets/tum 15 | wget http://www.doc.ic.ac.uk/~ahanda/living_room_traj1_frei_png.tar.gz -P assets/tum 16 | tar -xf assets/tum/living_room_traj1_frei_png.tar.gz -C assets/tum 17 | 18 | export BOP_SITE=https://bop.felk.cvut.cz/media/data/bop_datasets 19 | mkdir -p assets/bop 20 | mkdir -p assets/ycb_video_models 21 | 22 | echo "Downloading ycb bop" 23 | wget $BOP_SITE/ycbv_base.zip -P assets/bop 24 | wget $BOP_SITE/ycbv_models.zip -P assets/bop 25 | wget $BOP_SITE/ycbv_test_bop19.zip -P assets/bop 26 | 27 | echo "Unpacking ycb bop" 28 | unzip assets/bop/ycbv_base.zip -d assets/bop 29 | unzip assets/bop/ycbv_models.zip -d assets/bop/ycbv 30 | unzip assets/bop/ycbv_test_bop19.zip -d assets/bop/ycbv 31 | 32 | echo "Removing zip files" 33 | rm assets/bop/ycbv_base.zip 34 | rm assets/bop/ycbv_models.zip 35 | rm assets/bop/ycbv_test_bop19.zip 36 | 37 | echo "Downloading ycb video models" 38 | file_id="1gmcDD-5bkJfcMKLZb3zGgH_HUFbulQWu" 39 | file_name="ycb_video_models.zip" 40 | gdown --id "${file_id}" -O "${file_name}" 41 | unzip "${file_name}" -d assets/ycb_video_models 42 | rm "${file_name}" 43 | 44 | download_additional_ycb 030_fork 45 | 46 | download_additional_ycb 032_knife 47 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Bayes3D 2 | 3 | plugins: 4 | - search 5 | - markdown-exec 6 | - mkdocstrings: 7 | enabled: !ENV [ENABLE_MKDOCSTRINGS, true] 8 | custom_templates: templates 9 | default_handler: python 10 | handlers: 11 | python: 12 | options: 13 | show_source: true 14 | show_root_heading: true 15 | show_root_full_path: false 16 | show_symbol_type_heading: true 17 | show_symbol_type_toc: true 18 | 19 | 20 | theme: 21 | name: material 22 | logo: logo.png 23 | icon: 24 | logo: material/cloud 25 | font: 26 | text: Ubuntu 27 | code: Ubuntu Mono 28 | static_templates: 29 | - 404.html 30 | features: 31 | - search.highlight 32 | - search.share 33 | - search.suggest 34 | palette: 35 | # Palette toggle for light mode 36 | - scheme: default 37 | primary: white 38 | toggle: 39 | icon: material/brightness-7 40 | name: Switch to dark mode 41 | 42 | # Palette toggle for dark mode 43 | - scheme: slate 44 | primary: black 45 | toggle: 46 | icon: material/brightness-4 47 | name: Switch to light mode 48 | 49 | nav: 50 | - 'index.md' 51 | - Installation: 'installation.md' 52 | 53 | markdown_extensions: 54 | - attr_list 55 | - md_in_html 56 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=64", "setuptools_scm>=8", "torch>=2.0.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "bayes3d" 7 | authors = [ 8 | {name = "Nishad Gothoskar", email = "nishadg@mit.edu"}, 9 | ] 10 | description = "Probabilistic inference in 3D." 11 | readme = "README.md" 12 | requires-python = ">=3.9" 13 | keywords = [ 14 | "artificial-intelligence", 15 | "probabilistic-programming", 16 | "bayesian-inference", 17 | "differentiable-programming" 18 | ] 19 | license = {text = "Apache 2.0"} 20 | classifiers = [ 21 | "Development Status :: 4 - Beta", 22 | "Programming Language :: Python :: 3.9", 23 | "Programming Language :: Python :: 3.10", 24 | "Programming Language :: Python :: 3.11", 25 | "Programming Language :: Python :: 3.12" 26 | ] 27 | dependencies = [ 28 | "distinctipy", 29 | "genjax==0.1.1", 30 | "graphviz", 31 | "imageio", 32 | "matplotlib", 33 | "meshcat", 34 | "natsort", 35 | "numpy", 36 | "open3d", 37 | "opencv-python", 38 | "plyfile", 39 | "pyliblzfse", 40 | "pyransac3d", 41 | "tensorflow-probability", 42 | "timm", 43 | "trimesh", 44 | ] 45 | dynamic = ["version", "optional-dependencies"] 46 | 47 | 48 | [tool.ruff] 49 | exclude = [ 50 | ".bzr", 51 | ".direnv", 52 | ".eggs", 53 | ".git", 54 | ".git-rewrite", 55 | ".hg", 56 | ".mypy_cache", 57 | ".nox", 58 | ".pants.d", 59 | ".pytype", 60 | ".ruff_cache", 61 | ".svn", 62 | ".tox", 63 | ".venv", 64 | "__pypackages__", 65 | "_build", 66 | "buck-out", 67 | "build", 68 | "dist", 69 | "node_modules", 70 | "venv" 71 | ] 72 | # extend-include = ["*.ipynb"] 73 | line-length = 88 74 | indent-width = 4 75 | 76 | [tool.ruff.lint] 77 | exclude = ["bayes3d/_mkl/*.py"] 78 | extend-select = ["I"] 79 | select = ["E4", "E7", "E9", "F"] 80 | 81 | # F403 disables errors from `*` imports, which we currently use heavily. 82 | ignore = ["F403"] 83 | fixable = ["ALL"] 84 | unfixable = [] 85 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" 86 | 87 | [tool.ruff.format] 88 | quote-style = "double" 89 | indent-style = "space" 90 | skip-magic-trailing-comma = false 91 | line-ending = "auto" 92 | 93 | 94 | [tool.setuptools_scm] 95 | 96 | 97 | [tool.setuptools.packages.find] 98 | include = ["bayes3d"] 99 | namespaces = false 100 | 101 | [tool.setuptools.dynamic] 102 | optional-dependencies = {dev = { file = ["requirements-dev.txt"] }} 103 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | ipython 2 | jupyter 3 | jupterlab 4 | joblib 5 | flax 6 | mkdocs 7 | mkdocs-material 8 | pyliblzfse 9 | -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/.gitignore: -------------------------------------------------------------------------------- 1 | #------------- 2 | # my ignores 3 | # ------------- 4 | _unstaged/ 5 | unstaged/ 6 | data/ 7 | outputs/ 8 | 9 | # _files are private, helps keeping the repo clean 10 | # while prototyping 11 | **/_*/ 12 | **/_* 13 | **/_*.* 14 | !**/__*__.py 15 | **/.DS_Store 16 | 17 | 18 | # github doesn't like big files 19 | # exclude where necessary 20 | shared/ 21 | shared 22 | local/ 23 | local 24 | # **/data/ 25 | 26 | # tensorboard runs folder and 27 | # pytorch lightning stuff 28 | **/runs/ 29 | **/lightning_logs/ 30 | **/logs/ 31 | 32 | # ------------- 33 | # nbdev ignores 34 | # ------------- 35 | _docs/ 36 | _proc/ 37 | 38 | *.bak 39 | .gitattributes 40 | .last_checked 41 | .gitconfig 42 | *.bak 43 | *.log 44 | *~ 45 | ~* 46 | _tmp* 47 | tmp* 48 | tags 49 | *.pkg 50 | 51 | # Byte-compiled / optimized / DLL files 52 | __pycache__/ 53 | *.py[cod] 54 | *$py.class 55 | 56 | # C extensions 57 | *.so 58 | 59 | # Distribution / packaging 60 | .Python 61 | env/ 62 | build/ 63 | develop-eggs/ 64 | dist/ 65 | downloads/ 66 | eggs/ 67 | .eggs/ 68 | lib/ 69 | lib64/ 70 | parts/ 71 | sdist/ 72 | var/ 73 | wheels/ 74 | *.egg-info/ 75 | .installed.cfg 76 | *.egg 77 | 78 | # PyInstaller 79 | # Usually these files are written by a python script from a template 80 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 81 | *.manifest 82 | *.spec 83 | 84 | # Installer logs 85 | pip-log.txt 86 | pip-delete-this-directory.txt 87 | 88 | # Unit test / coverage reports 89 | htmlcov/ 90 | .tox/ 91 | .coverage 92 | .coverage.* 93 | .cache 94 | nosetests.xml 95 | coverage.xml 96 | *.cover 97 | .hypothesis/ 98 | 99 | # Translations 100 | *.mo 101 | *.pot 102 | 103 | # Django stuff: 104 | *.log 105 | local_settings.py 106 | 107 | # Flask stuff: 108 | instance/ 109 | .webassets-cache 110 | 111 | # Scrapy stuff: 112 | .scrapy 113 | 114 | # Sphinx documentation 115 | docs/_build/ 116 | 117 | # PyBuilder 118 | target/ 119 | 120 | # Jupyter Notebook 121 | .ipynb_checkpoints 122 | 123 | # pyenv 124 | .python-version 125 | 126 | # celery beat schedule file 127 | celerybeat-schedule 128 | 129 | # SageMath parsed files 130 | *.sage.py 131 | 132 | # dotenv 133 | .env 134 | 135 | # virtualenv 136 | .venv 137 | venv/ 138 | ENV/ 139 | 140 | # Spyder project settings 141 | .spyderproject 142 | .spyproject 143 | 144 | # Rope project settings 145 | .ropeproject 146 | 147 | # mkdocs documentation 148 | /site 149 | 150 | # mypy 151 | .mypy_cache/ 152 | 153 | .vscode 154 | *.swp 155 | 156 | # osx generated files 157 | .DS_Store 158 | .DS_Store? 159 | .Trashes 160 | ehthumbs.db 161 | Thumbs.db 162 | .idea 163 | 164 | # pytest 165 | .pytest_cache 166 | 167 | # tools/trust-doc-nbs 168 | docs_src/.last_checked 169 | 170 | # symlinks to fastai 171 | docs_src/fastai 172 | tools/fastai 173 | 174 | # link checker 175 | checklink/cookies.txt 176 | 177 | # .gitconfig is now autogenerated 178 | .gitconfig 179 | 180 | # Quarto installer 181 | .deb 182 | .pkg 183 | 184 | # Quarto 185 | .quarto 186 | -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/00a - Types.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#|default_exp types" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 5, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "#|export\n", 19 | "from typing import Any, NamedTuple\n", 20 | "import numpy as np\n", 21 | "import jax\n", 22 | "import jaxlib\n", 23 | "\n", 24 | "\n", 25 | "Array = np.ndarray | jax.Array\n", 26 | "Shape = int | tuple[int, ...]\n", 27 | "Bool = Array\n", 28 | "Float = Array\n", 29 | "Int = Array\n", 30 | "FaceIndex = int\n", 31 | "FaceIndices = Array\n", 32 | "ArrayN = Array\n", 33 | "Array3 = Array\n", 34 | "Array2 = Array\n", 35 | "ArrayNx2 = Array\n", 36 | "ArrayNx3 = Array\n", 37 | "Matrix = jaxlib.xla_extension.ArrayImpl\n", 38 | "PrecisionMatrix = Matrix\n", 39 | "CovarianceMatrix = Matrix\n", 40 | "CholeskyMatrix = Matrix\n", 41 | "SquareMatrix = Matrix\n", 42 | "Vector = Array\n", 43 | "Direction = Vector\n", 44 | "BaseVector = Vector" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [] 53 | } 54 | ], 55 | "metadata": { 56 | "kernelspec": { 57 | "display_name": "dcolmap", 58 | "language": "python", 59 | "name": "python3" 60 | }, 61 | "language_info": { 62 | "codemirror_mode": { 63 | "name": "ipython", 64 | "version": 3 65 | }, 66 | "file_extension": ".py", 67 | "mimetype": "text/x-python", 68 | "name": "python", 69 | "nbconvert_exporter": "python", 70 | "pygments_lexer": "ipython3", 71 | "version": "3.11.6" 72 | } 73 | }, 74 | "nbformat": 4, 75 | "nbformat_minor": 2 76 | } 77 | -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/data/likelihood_test/table_scene_1.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/_mkl/notebooks/data/likelihood_test/table_scene_1.npy -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/data/likelihood_test/table_scene_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/_mkl/notebooks/data/likelihood_test/table_scene_2.npy -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/data/likelihood_test/table_scene_3.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/_mkl/notebooks/data/likelihood_test/table_scene_3.npy -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/kubric/.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "image": "kubricdockerhub/kubruntu" 3 | } 4 | -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/kubric/.gitignore: -------------------------------------------------------------------------------- 1 | /output/ 2 | /kubric/ 3 | /walking-astronaut/ 4 | -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/kubric/kubric_helper.py: -------------------------------------------------------------------------------- 1 | import kubric as kb 2 | import numpy as np 3 | 4 | rng = np.random.default_rng(2021) 5 | 6 | 7 | def get_linear_camera_motion_start_end( 8 | movement_speed: float, 9 | inner_radius: float = 8.0, 10 | outer_radius: float = 12.0, 11 | z_offset: float = 0.1, 12 | ): 13 | """Sample a linear path which starts and ends within a half-sphere shell.""" 14 | while True: 15 | camera_start = np.array( 16 | kb.sample_point_in_half_sphere_shell(inner_radius, outer_radius, z_offset) 17 | ) 18 | direction = rng.rand(3) - 0.5 19 | movement = direction / np.linalg.norm(direction) * movement_speed 20 | camera_end = camera_start + movement 21 | if ( 22 | inner_radius <= np.linalg.norm(camera_end) <= outer_radius 23 | and camera_end[2] > z_offset 24 | ): 25 | return camera_start, camera_end 26 | -------------------------------------------------------------------------------- /scripts/_mkl/notebooks/nbexporter.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Exports notebooks to `.py` files using `nbdev.nb_export`. 4 | 5 | """ 6 | 7 | import glob 8 | import os 9 | from pathlib import Path 10 | 11 | from nbdev.export import nb_export 12 | 13 | NBS = "." 14 | LIB = "../../../bayes3d/_mkl/" 15 | 16 | 17 | class bcolors: 18 | BLUE = "\033[94m" 19 | CYAN = "\033[96m" 20 | GREEN = "\033[92m" 21 | PURPLE = "\033[95m" 22 | ENDC = "\033[0m" 23 | BOLD = "\033[1m" 24 | UNDERLINE = "\033[4m" 25 | 26 | 27 | def main(): 28 | lib_path = Path(__file__).parents[0] / LIB 29 | rel_lib_path = os.path.relpath(lib_path) 30 | 31 | rel_nbs_path = os.path.relpath(Path(__file__).parents[0] / NBS) 32 | file_pattern = f"{rel_nbs_path}/**/[a-zA-Z0-9]*.ipynb" 33 | 34 | print(f"{bcolors.BLUE}Trying to export the following files") 35 | 36 | for fname in glob.glob(file_pattern, recursive=True): 37 | print(f"\t{bcolors.PURPLE}{fname}{bcolors.ENDC}") 38 | nb_export(fname, lib_path=rel_lib_path) 39 | 40 | print(f"{bcolors.BLUE}to{bcolors.ENDC}") 41 | print(f"\t{bcolors.PURPLE}{bcolors.BOLD}{rel_lib_path}{bcolors.ENDC}") 42 | 43 | 44 | if __name__ == "__main__": 45 | main() 46 | -------------------------------------------------------------------------------- /scripts/experiments/collaborations/09-15-2023-15:02:04.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/collaborations/09-15-2023-15:02:04.pkl -------------------------------------------------------------------------------- /scripts/experiments/collaborations/arijit_physics.py: -------------------------------------------------------------------------------- 1 | import genjax 2 | import jax 3 | import jax.numpy as jnp 4 | 5 | import bayes3d as b 6 | import bayes3d.genjax 7 | 8 | b.setup_visualizer() 9 | 10 | 11 | @genjax.gen 12 | def body_fun(prev): 13 | (t, pose, velocity) = prev 14 | velocity = b.gaussian_vmf_pose(velocity, 0.01, 10000.0) @ "velocity" 15 | pose = b.gaussian_vmf_pose(pose @ velocity, 0.01, 10000.0) @ "pose" 16 | # Render 17 | return (t + 1, pose, velocity) 18 | 19 | 20 | # Creating a `SwitchCombinator` via the preferred `new` class method. 21 | 22 | 23 | @genjax.gen 24 | def model(T): 25 | pose = b.uniform_pose(jnp.ones(3) * -1.0, jnp.ones(3) * 1.0) @ "init_pose" 26 | velocity = b.gaussian_vmf_pose(jnp.eye(4), 0.01, 10000.0) @ "init_velocity" 27 | _evolve = ( 28 | genjax.UnfoldCombinator.new(body_fun, 100)(50, (0, pose, velocity)) @ "dynamics" 29 | ) 30 | return 1.0 31 | 32 | 33 | key = jax.random.PRNGKey(314159) 34 | tr = model.simulate(key, (10,)) 35 | poses = tr["dynamics"]["pose"] 36 | for i in range(poses.shape[0]): 37 | b.show_pose(f"{i}", poses[i]) 38 | 39 | # TODO: 40 | # 1. Add rendering and images likelihood 41 | # Do simple SMC tracking of one object 42 | # 2. Make this multiobject 43 | -------------------------------------------------------------------------------- /scripts/experiments/deeplearning/kubric_dataset_gen/kubric_dataset_gen.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax 4 | import jax.numpy as jnp 5 | import numpy as np 6 | import trimesh 7 | from IPython import embed 8 | 9 | import bayes3d as j 10 | 11 | # --- creating the model dir from the working directory 12 | model_dir = os.path.join(j.utils.get_assets_dir(), "ycb_video_models/models") 13 | print(f"{model_dir} exists: {os.path.exists(model_dir)}") 14 | model_names = j.ycb_loader.MODEL_NAMES 15 | IDX = 13 16 | name = model_names[IDX] 17 | print(name) 18 | 19 | camera_pose = j.t3d.transform_from_pos_target_up( 20 | jnp.array([0.0, 0.5, 0.0]), 21 | jnp.array([0.0, 0.0, 0.0]), 22 | jnp.array([0.0, 0.0, 1.0]), 23 | ) 24 | 25 | 26 | bop_ycb_dir = os.path.join(j.utils.get_assets_dir(), "bop/ycbv") 27 | rgbd, gt_ids, gt_poses, masks = j.ycb_loader.get_test_img("52", "1", bop_ycb_dir) 28 | intrinsics = j.Intrinsics( 29 | height=rgbd.intrinsics.height, 30 | width=rgbd.intrinsics.width, 31 | fx=rgbd.intrinsics.fx, 32 | fy=rgbd.intrinsics.fx, 33 | cx=rgbd.intrinsics.width / 2.0, 34 | cy=rgbd.intrinsics.height / 2.0, 35 | near=0.001, 36 | far=3.0, 37 | ) 38 | 39 | 40 | NUM_IMAGES_PER_ITER = 10 41 | NUM_ITER = 100 42 | 43 | for iter in range(NUM_ITER): 44 | print("Iteration: ", iter) 45 | key = jax.random.PRNGKey(iter) 46 | object_poses = jax.vmap( 47 | lambda key: j.distributions.gaussian_vmf(key, 0.00001, 0.001) 48 | )(jax.random.split(key, NUM_IMAGES_PER_ITER)) 49 | object_poses = jnp.einsum("ij,ajk", j.t3d.inverse_pose(camera_pose), object_poses) 50 | 51 | mesh_paths = [] 52 | mesh_path = os.path.join(model_dir, name, "textured.obj") 53 | for _ in range(NUM_IMAGES_PER_ITER): 54 | mesh_paths.append(mesh_path) 55 | _, offset_pose = j.mesh.center_mesh(trimesh.load(mesh_path), return_pose=True) 56 | 57 | all_data = j.kubric_interface.render_multiobject_parallel( 58 | mesh_paths, 59 | object_poses[None, :, ...], 60 | intrinsics, 61 | scaling_factor=1.0, 62 | lighting=3.0, 63 | ) # multi img singleobj 64 | gt_poses = object_poses @ offset_pose 65 | 66 | DATASET_FILENAME = f"dataset_{iter}.npz" # npz file 67 | DATASET_FILE = os.path.join( 68 | j.utils.get_assets_dir(), f"datasets/{DATASET_FILENAME}" 69 | ) 70 | np.savez( 71 | DATASET_FILE, 72 | rgbds=all_data, 73 | poses=gt_poses, 74 | id=IDX, 75 | name=model_names[IDX], 76 | intrinsics=intrinsics, 77 | mesh_path=mesh_path, 78 | ) 79 | 80 | rgb_images = j.hstack_images([j.get_rgb_image(r.rgb) for r in all_data]).save( 81 | f"dataset_{iter}.png" 82 | ) 83 | 84 | embed() 85 | -------------------------------------------------------------------------------- /scripts/experiments/deeplearning/sam/sam.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | import sys 4 | import warnings 5 | 6 | import jax.numpy as jnp 7 | import numpy as np 8 | from segment_anything import SamAutomaticMaskGenerator, build_sam 9 | 10 | import bayes3d as j 11 | 12 | sys.path.extend(["/home/nishadgothoskar/ptamp/pybullet_planning"]) 13 | sys.path.extend(["/home/nishadgothoskar/ptamp"]) 14 | warnings.filterwarnings("ignore") 15 | 16 | 17 | bop_ycb_dir = os.path.join(j.utils.get_assets_dir(), "bop/ycbv") 18 | rgbd, gt_ids, gt_poses, masks = j.ycb_loader.get_test_img("52", "1", bop_ycb_dir) 19 | 20 | test_pkl_file = os.path.join( 21 | j.utils.get_assets_dir(), "sample_imgs/strawberry_error.pkl" 22 | ) 23 | test_pkl_file = os.path.join( 24 | j.utils.get_assets_dir(), "sample_imgs/knife_spoon_box_real.pkl" 25 | ) 26 | test_pkl_file = os.path.join(j.utils.get_assets_dir(), "sample_imgs/red_lego_multi.pkl") 27 | test_pkl_file = os.path.join(j.utils.get_assets_dir(), "sample_imgs/demo2_nolight.pkl") 28 | 29 | file = open(test_pkl_file, "rb") 30 | camera_images = pickle.load(file)["camera_images"] 31 | images = [j.RGBD.construct_from_camera_image(c) for c in camera_images] 32 | rgbd = images[0] 33 | 34 | j.get_rgb_image(rgbd.rgb).save("rgb.png") 35 | 36 | sam = build_sam( 37 | checkpoint="/home/nishadgothoskar/jax3dp3/assets/sam/sam_vit_h_4b8939.pth" 38 | ) 39 | sam.to(device="cuda") 40 | 41 | mask_generator = SamAutomaticMaskGenerator(sam) 42 | boxes = mask_generator.generate(np.array(rgbd.rgb)) 43 | 44 | full_segmentation = jnp.ones(rgbd.rgb.shape[:2]) * -1.0 45 | num_objects_so_far = 0 46 | for i in range(len(boxes)): 47 | seg_mask = jnp.array(boxes[i]["segmentation"]) 48 | 49 | matched = False 50 | for jj in range(num_objects_so_far): 51 | seg_mask_existing_object = full_segmentation == jj 52 | 53 | intersection = seg_mask * seg_mask_existing_object 54 | if intersection[seg_mask].mean() > 0.9: 55 | matched = True 56 | 57 | if not matched: 58 | full_segmentation = full_segmentation.at[seg_mask].set(num_objects_so_far) 59 | num_objects_so_far += 1 60 | 61 | segmentation_image = j.get_depth_image( 62 | full_segmentation + 1, max=full_segmentation.max() + 2 63 | ) 64 | seg_viz = j.get_depth_image(seg_mask) 65 | j.hstack_images([segmentation_image, seg_viz]).save(f"{i}.png") 66 | 67 | full_segmentation = full_segmentation.at[seg_mask].set(i + 1) 68 | 69 | 70 | # sam = build_sam() 71 | # sam.to(device="cuda") 72 | # mask_generator = SamAutomaticMaskGenerator(sam) 73 | 74 | # j.get_rgb_image(rgbd.rgb).save("rgb.png") 75 | # mask_generator.generate(np.array(rgbd.rgb)) 76 | 77 | # mask_generator.generate(np.array(img)) 78 | 79 | # sam = sam_model_registry["default"](checkpoint=args.checkpoint) 80 | # _ = sam.to(device=args.device) 81 | # output_mode = "coco_rle" if args.convert_to_rle else "binary_mask" 82 | # amg_kwargs = get_amg_kwargs(args) 83 | # generator = SamAutomaticMaskGenerator(sam, output_mode=output_mode, **amg_kwargs) 84 | 85 | 86 | # mask_generator = SamAutomaticMaskGenerator["default"](build_sam()) 87 | -------------------------------------------------------------------------------- /scripts/experiments/deeplearning/tapir.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "925a7e99-0fc6-4ed0-9a05-9c9932840c9f", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [] 10 | } 11 | ], 12 | "metadata": { 13 | "kernelspec": { 14 | "display_name": "Python 3 (ipykernel)", 15 | "language": "python", 16 | "name": "python3" 17 | }, 18 | "language_info": { 19 | "codemirror_mode": { 20 | "name": "ipython", 21 | "version": 3 22 | }, 23 | "file_extension": ".py", 24 | "mimetype": "text/x-python", 25 | "name": "python", 26 | "nbconvert_exporter": "python", 27 | "pygments_lexer": "ipython3", 28 | "version": "3.9.16" 29 | } 30 | }, 31 | "nbformat": 4, 32 | "nbformat_minor": 5 33 | } 34 | -------------------------------------------------------------------------------- /scripts/experiments/gaussian_splatting/optimization.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax.numpy as jnp 4 | import numpy as np 5 | import torch 6 | 7 | import bayes3d as b 8 | 9 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 10 | 11 | model_dir = os.path.join(b.utils.get_assets_dir(), "bop/ycbv/models") 12 | mesh_path = os.path.join(model_dir, "obj_" + "{}".format(3).rjust(6, "0") + ".ply") 13 | mesh = b.utils.load_mesh(mesh_path) 14 | vertices = torch.tensor(np.array(jnp.array(mesh.vertices) / 1000.0), device=device) 15 | -------------------------------------------------------------------------------- /scripts/experiments/icra/experiment_server/plane_pc_video_capture.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/icra/experiment_server/plane_pc_video_capture.npy -------------------------------------------------------------------------------- /scripts/experiments/icra/experiment_server/toy_final.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/icra/experiment_server/toy_final.ply -------------------------------------------------------------------------------- /scripts/experiments/icra/experiment_server/toy_plane.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/icra/experiment_server/toy_plane.ply -------------------------------------------------------------------------------- /scripts/experiments/icra/experiment_server/toy_plane_video_capture.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/icra/experiment_server/toy_plane_video_capture.ply -------------------------------------------------------------------------------- /scripts/experiments/icra/fork_knife/m1.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v -0.07500000 -0.01000000 -0.01000000 3 | v -0.07500000 -0.01000000 0.01000000 4 | v -0.07500000 0.01000000 -0.01000000 5 | v -0.07500000 0.01000000 0.01000000 6 | v 0.07500000 -0.01000000 -0.01000000 7 | v 0.07500000 -0.01000000 0.01000000 8 | v 0.07500000 0.01000000 -0.01000000 9 | v 0.07500000 0.01000000 0.01000000 10 | v 0.02500000 -0.02500000 -0.02500000 11 | v 0.02500000 -0.02500000 0.02500000 12 | v 0.02500000 0.02500000 -0.02500000 13 | v 0.02500000 0.02500000 0.02500000 14 | v 0.07500000 -0.02500000 -0.02500000 15 | v 0.07500000 -0.02500000 0.02500000 16 | v 0.07500000 0.02500000 -0.02500000 17 | v 0.07500000 0.02500000 0.02500000 18 | vn -0.57735027 -0.57735027 -0.57735027 19 | vn -0.57735027 -0.57735027 0.57735027 20 | vn -0.57735027 0.57735027 -0.57735027 21 | vn -0.57735027 0.57735027 0.57735027 22 | vn 0.57735027 -0.57735027 -0.57735027 23 | vn 0.57735027 -0.57735027 0.57735027 24 | vn 0.57735027 0.57735027 -0.57735027 25 | vn 0.57735027 0.57735027 0.57735027 26 | vn -0.57735027 -0.57735027 -0.57735027 27 | vn -0.57735027 -0.57735027 0.57735027 28 | vn -0.57735027 0.57735027 -0.57735027 29 | vn -0.57735027 0.57735027 0.57735027 30 | vn 0.57735027 -0.57735027 -0.57735027 31 | vn 0.57735027 -0.57735027 0.57735027 32 | vn 0.57735027 0.57735027 -0.57735027 33 | vn 0.57735027 0.57735027 0.57735027 34 | f 2//2 4//4 1//1 35 | f 5//5 2//2 1//1 36 | f 1//1 4//4 3//3 37 | f 3//3 5//5 1//1 38 | f 2//2 8//8 4//4 39 | f 6//6 2//2 5//5 40 | f 6//6 8//8 2//2 41 | f 4//4 8//8 3//3 42 | f 7//7 5//5 3//3 43 | f 3//3 8//8 7//7 44 | f 7//7 6//6 5//5 45 | f 8//8 6//6 7//7 46 | f 10//10 12//12 9//9 47 | f 13//13 10//10 9//9 48 | f 9//9 12//12 11//11 49 | f 11//11 13//13 9//9 50 | f 10//10 16//16 12//12 51 | f 14//14 10//10 13//13 52 | f 14//14 16//16 10//10 53 | f 12//12 16//16 11//11 54 | f 15//15 13//13 11//11 55 | f 11//11 16//16 15//15 56 | f 15//15 14//14 13//13 57 | f 16//16 14//14 15//15 58 | -------------------------------------------------------------------------------- /scripts/experiments/icra/fork_knife/m2.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v -0.07500000 -0.01000000 -0.01000000 3 | v -0.07500000 -0.01000000 0.01000000 4 | v -0.07500000 0.01000000 -0.01000000 5 | v -0.07500000 0.01000000 0.01000000 6 | v 0.07500000 -0.01000000 -0.01000000 7 | v 0.07500000 -0.01000000 0.01000000 8 | v 0.07500000 0.01000000 -0.01000000 9 | v 0.07500000 0.01000000 0.01000000 10 | v 0.02500000 -0.02500000 -0.02500000 11 | v 0.02500000 -0.02500000 0.02500000 12 | v 0.02500000 0.02500000 -0.02500000 13 | v 0.02500000 0.02500000 0.02500000 14 | v 0.07500000 -0.02500000 -0.02500000 15 | v 0.07500000 -0.02500000 0.02500000 16 | v 0.07500000 0.02500000 -0.02500000 17 | v 0.07500000 0.02500000 0.02500000 18 | v -0.07500000 -0.02500000 -0.02500000 19 | v -0.07500000 -0.02500000 0.02500000 20 | v -0.07500000 0.02500000 -0.02500000 21 | v -0.07500000 0.02500000 0.02500000 22 | v -0.02500000 -0.02500000 -0.02500000 23 | v -0.02500000 -0.02500000 0.02500000 24 | v -0.02500000 0.02500000 -0.02500000 25 | v -0.02500000 0.02500000 0.02500000 26 | vn -0.57735027 -0.57735027 -0.57735027 27 | vn -0.57735027 -0.57735027 0.57735027 28 | vn -0.57735027 0.57735027 -0.57735027 29 | vn -0.57735027 0.57735027 0.57735027 30 | vn 0.57735027 -0.57735027 -0.57735027 31 | vn 0.57735027 -0.57735027 0.57735027 32 | vn 0.57735027 0.57735027 -0.57735027 33 | vn 0.57735027 0.57735027 0.57735027 34 | vn -0.57735027 -0.57735027 -0.57735027 35 | vn -0.57735027 -0.57735027 0.57735027 36 | vn -0.57735027 0.57735027 -0.57735027 37 | vn -0.57735027 0.57735027 0.57735027 38 | vn 0.57735027 -0.57735027 -0.57735027 39 | vn 0.57735027 -0.57735027 0.57735027 40 | vn 0.57735027 0.57735027 -0.57735027 41 | vn 0.57735027 0.57735027 0.57735027 42 | vn -0.57735027 -0.57735027 -0.57735027 43 | vn -0.57735027 -0.57735027 0.57735027 44 | vn -0.57735027 0.57735027 -0.57735027 45 | vn -0.57735027 0.57735027 0.57735027 46 | vn 0.57735027 -0.57735027 -0.57735027 47 | vn 0.57735027 -0.57735027 0.57735027 48 | vn 0.57735027 0.57735027 -0.57735027 49 | vn 0.57735027 0.57735027 0.57735027 50 | f 2//2 4//4 1//1 51 | f 5//5 2//2 1//1 52 | f 1//1 4//4 3//3 53 | f 3//3 5//5 1//1 54 | f 2//2 8//8 4//4 55 | f 6//6 2//2 5//5 56 | f 6//6 8//8 2//2 57 | f 4//4 8//8 3//3 58 | f 7//7 5//5 3//3 59 | f 3//3 8//8 7//7 60 | f 7//7 6//6 5//5 61 | f 8//8 6//6 7//7 62 | f 10//10 12//12 9//9 63 | f 13//13 10//10 9//9 64 | f 9//9 12//12 11//11 65 | f 11//11 13//13 9//9 66 | f 10//10 16//16 12//12 67 | f 14//14 10//10 13//13 68 | f 14//14 16//16 10//10 69 | f 12//12 16//16 11//11 70 | f 15//15 13//13 11//11 71 | f 11//11 16//16 15//15 72 | f 15//15 14//14 13//13 73 | f 16//16 14//14 15//15 74 | f 18//18 20//20 17//17 75 | f 21//21 18//18 17//17 76 | f 17//17 20//20 19//19 77 | f 19//19 21//21 17//17 78 | f 18//18 24//24 20//20 79 | f 22//22 18//18 21//21 80 | f 22//22 24//24 18//18 81 | f 20//20 24//24 19//19 82 | f 23//23 21//21 19//19 83 | f 19//19 24//24 23//23 84 | f 23//23 22//22 21//21 85 | f 24//24 22//22 23//23 86 | -------------------------------------------------------------------------------- /scripts/experiments/icra/slam/o3d_dense_slam.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "f4ef9863-8ab0-4c83-abb3-3ab4f7733c4c", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import jax.numpy as jnp" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "id": "8e04d8fc-c3b2-49c2-9d5f-212997d291bd", 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": null, 24 | "id": "6b10d798-1c24-42ff-9762-5d6b0e4e28a6", 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [] 28 | } 29 | ], 30 | "metadata": { 31 | "kernelspec": { 32 | "display_name": "Python 3 (ipykernel)", 33 | "language": "python", 34 | "name": "python3" 35 | }, 36 | "language_info": { 37 | "codemirror_mode": { 38 | "name": "ipython", 39 | "version": 3 40 | }, 41 | "file_extension": ".py", 42 | "mimetype": "text/x-python", 43 | "name": "python", 44 | "nbconvert_exporter": "python", 45 | "pygments_lexer": "ipython3", 46 | "version": "3.9.18" 47 | } 48 | }, 49 | "nbformat": 4, 50 | "nbformat_minor": 5 51 | } 52 | -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/.gitignore: -------------------------------------------------------------------------------- 1 | _*.ipynb 2 | -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/info.md: -------------------------------------------------------------------------------- 1 | * `meshes` contains the four meshes visible (table, apple, occluders, door) in the the rotaiton experiment generated using Blender and their corresponding material (for 3DMax, if needed). 2 | * `videos` contains 50 videos with resolution `640 x 480` and fov `90`. Each subdirectory `videos/i` contains: 3 | * `experiment_video.mp4`: the generated video, with a framerate `30 fps`. 4 | * `experiment_stats.yaml`: contains information about the experiment and camera intrinsics (e.g. width, height, fov). 5 | * `frames/`: contains each RGB frame JPEG images. 6 | * `depths/`: contains each depth frame as loadable numpy arrays. 7 | -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/meshes/apple.mtl: -------------------------------------------------------------------------------- 1 | # Blender MTL File: 'None' 2 | # Material Count: 1 3 | 4 | newmtl ID1_mat 5 | Ns 179.999996 6 | Ka 1.000000 1.000000 1.000000 7 | Kd 1.000000 0.000000 0.000000 8 | Ks 0.500000 0.500000 0.500000 9 | Ke 0.000000 0.000000 0.000000 10 | Ni 1.450000 11 | d 1.000000 12 | illum 2 13 | -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/meshes/door.mtl: -------------------------------------------------------------------------------- 1 | # Blender MTL File: 'None' 2 | # Material Count: 1 3 | 4 | newmtl DoorMat 5 | Ns 180.000044 6 | Ka 1.000000 1.000000 1.000000 7 | Kd 0.051624 0.600000 0.025396 8 | Ks 1.000000 1.000000 1.000000 9 | Ke 0.000000 0.000000 0.000000 10 | Ni 1.450000 11 | d 1.000000 12 | illum 3 13 | -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/meshes/occluder.mtl: -------------------------------------------------------------------------------- 1 | # Blender MTL File: 'None' 2 | # Material Count: 2 3 | 4 | newmtl cup_decals_mat 5 | Ns 179.999996 6 | Ka 1.000000 1.000000 1.000000 7 | Kd 0.000000 1.000000 1.000000 8 | Ks 0.500000 0.500000 0.500000 9 | Ke 0.000000 0.000000 0.000000 10 | Ni 1.450000 11 | d 1.000000 12 | illum 2 13 | 14 | newmtl cup_primary_mat 15 | Ns 179.999996 16 | Ka 1.000000 1.000000 1.000000 17 | Kd 1.000000 0.000000 0.000000 18 | Ks 0.500000 0.500000 0.500000 19 | Ke 0.000000 0.000000 0.000000 20 | Ni 1.450000 21 | d 1.000000 22 | illum 2 23 | -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/meshes/table.mtl: -------------------------------------------------------------------------------- 1 | # Blender MTL File: 'None' 2 | # Material Count: 3 3 | 4 | newmtl BaseboardMat 5 | Ns 180.000044 6 | Ka 1.000000 1.000000 1.000000 7 | Kd 0.263877 0.600000 0.358452 8 | Ks 1.000000 1.000000 1.000000 9 | Ke 0.000000 0.000000 0.000000 10 | Ni 1.450000 11 | d 1.000000 12 | illum 3 13 | 14 | newmtl CountertopMat 15 | Ns 180.000044 16 | Ka 1.000000 1.000000 1.000000 17 | Kd 0.263877 0.600000 0.358452 18 | Ks 1.000000 1.000000 1.000000 19 | Ke 0.000000 0.000000 0.000000 20 | Ni 1.450000 21 | d 1.000000 22 | illum 3 23 | 24 | newmtl IslandMat 25 | Ns 180.000044 26 | Ka 1.000000 1.000000 1.000000 27 | Kd 0.600000 0.277371 0.096340 28 | Ks 1.000000 1.000000 1.000000 29 | Ke 0.000000 0.000000 0.000000 30 | Ni 1.450000 31 | d 1.000000 32 | illum 3 33 | -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_0.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_0.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_1.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_1.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_10.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_100.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_100.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_101.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_101.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_102.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_102.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_103.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_103.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_11.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_11.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_12.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_12.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_13.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_13.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_14.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_14.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_15.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_15.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_16.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_16.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_17.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_17.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_18.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_18.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_19.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_19.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_2.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_20.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_20.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_21.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_21.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_22.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_22.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_23.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_23.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_24.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_24.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_25.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_25.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_26.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_26.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_27.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_27.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_28.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_28.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_29.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_29.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_3.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_3.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_30.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_30.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_31.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_31.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_32.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_32.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_33.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_33.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_34.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_34.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_35.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_35.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_36.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_36.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_37.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_37.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_38.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_38.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_39.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_39.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_4.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_4.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_40.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_40.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_41.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_41.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_42.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_42.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_43.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_43.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_44.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_44.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_45.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_45.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_46.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_46.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_47.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_47.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_48.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_48.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_49.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_49.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_5.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_5.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_50.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_50.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_51.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_51.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_52.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_52.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_53.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_53.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_54.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_54.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_55.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_55.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_56.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_56.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_57.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_57.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_58.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_58.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_59.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_59.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_6.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_6.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_60.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_60.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_61.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_61.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_62.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_62.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_63.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_63.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_64.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_64.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_65.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_65.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_66.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_66.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_67.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_67.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_68.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_68.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_69.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_69.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_7.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_7.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_70.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_70.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_71.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_71.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_72.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_72.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_73.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_73.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_74.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_74.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_75.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_75.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_76.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_76.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_77.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_77.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_78.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_78.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_79.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_79.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_8.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_8.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_80.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_80.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_81.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_81.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_82.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_82.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_83.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_83.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_84.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_84.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_85.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_85.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_86.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_86.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_87.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_87.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_88.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_88.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_89.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_89.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_9.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_9.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_90.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_90.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_91.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_91.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_92.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_92.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_93.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_93.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_94.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_94.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_95.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_95.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_96.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_96.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_97.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_97.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_98.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_98.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_99.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/depths/frame_99.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_0.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_0.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_1.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_1.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_10.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_10.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_100.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_100.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_101.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_101.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_102.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_102.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_103.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_103.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_11.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_11.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_12.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_12.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_13.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_13.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_14.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_14.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_15.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_15.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_16.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_16.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_17.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_17.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_18.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_18.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_19.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_19.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_2.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_20.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_20.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_21.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_21.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_22.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_22.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_23.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_23.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_24.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_24.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_25.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_25.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_26.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_26.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_27.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_27.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_28.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_28.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_29.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_29.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_3.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_3.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_30.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_30.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_31.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_31.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_32.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_32.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_33.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_33.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_34.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_34.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_35.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_35.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_36.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_36.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_37.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_37.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_38.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_38.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_39.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_39.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_4.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_4.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_40.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_40.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_41.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_41.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_42.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_42.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_43.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_43.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_44.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_44.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_45.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_45.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_46.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_46.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_47.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_47.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_48.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_48.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_49.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_49.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_5.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_5.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_50.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_50.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_51.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_51.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_52.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_52.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_53.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_53.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_54.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_54.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_55.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_55.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_56.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_56.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_57.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_57.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_58.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_58.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_59.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_59.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_6.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_6.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_60.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_60.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_61.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_61.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_62.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_62.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_63.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_63.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_64.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_64.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_65.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_65.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_66.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_66.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_67.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_67.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_68.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_68.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_69.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_69.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_7.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_7.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_70.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_70.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_71.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_71.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_72.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_72.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_73.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_73.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_74.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_74.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_75.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_75.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_76.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_76.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_77.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_77.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_78.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_78.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_79.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_79.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_8.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_8.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_80.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_80.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_81.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_81.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_82.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_82.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_83.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_83.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_84.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_84.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_85.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_85.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_86.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_86.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_87.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_87.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_88.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_88.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_89.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_89.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_9.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_9.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_90.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_90.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_91.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_91.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_92.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_92.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_93.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_93.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_94.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_94.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_95.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_95.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_96.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_96.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_97.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_97.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_98.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_98.npy -------------------------------------------------------------------------------- /scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_99.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/probcomp/bayes3d/4e2919dd82c4596b7baca570a15bb7f3a89566a4/scripts/experiments/mcs/cognitive-battery/data/videos/segmented/frame_99.npy -------------------------------------------------------------------------------- /scripts/experiments/tabletop/data_gen.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import genjax 4 | import jax 5 | import jax.numpy as jnp 6 | import joblib 7 | 8 | import bayes3d as b 9 | import bayes3d.genjax 10 | 11 | console = genjax.pretty(show_locals=False) 12 | 13 | intrinsics = b.Intrinsics( 14 | height=100, width=100, fx=500.0, fy=500.0, cx=50.0, cy=50.0, near=0.01, far=20.0 15 | ) 16 | 17 | b.setup_renderer(intrinsics) 18 | model_dir = os.path.join(b.utils.get_assets_dir(), "bop/ycbv/models") 19 | meshes = [] 20 | for idx in range(1, 22): 21 | mesh_path = os.path.join( 22 | model_dir, "obj_" + "{}".format(idx).rjust(6, "0") + ".ply" 23 | ) 24 | b.RENDERER.add_mesh_from_file(mesh_path, scaling_factor=1.0 / 1000.0) 25 | 26 | b.RENDERER.add_mesh_from_file( 27 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj"), 28 | scaling_factor=1.0 / 1000000000.0, 29 | ) 30 | 31 | table_pose = b.t3d.inverse_pose( 32 | b.t3d.transform_from_pos_target_up( 33 | jnp.array([0.0, 2.0, 1.20]), 34 | jnp.array([0.0, 0.0, 0.0]), 35 | jnp.array([0.0, 0.0, 1.0]), 36 | ) 37 | ) 38 | 39 | OUTLIER_VOLUME = 100.0 40 | key = jax.random.PRNGKey(500) 41 | 42 | importance_jit = jax.jit(b.genjax.model.importance) 43 | 44 | scene_id = 0 45 | while True: 46 | if scene_id >= 100: 47 | break 48 | key, (_, trace) = importance_jit( 49 | key, 50 | genjax.choice_map( 51 | { 52 | "parent_0": -1, 53 | "parent_1": 0, 54 | "parent_2": 0, 55 | "parent_3": 0, 56 | "id_0": jnp.int32(21), 57 | "camera_pose": jnp.eye(4), 58 | "root_pose_0": table_pose, 59 | "face_parent_1": 2, 60 | "face_parent_2": 2, 61 | "face_parent_3": 2, 62 | "face_child_1": 3, 63 | "face_child_2": 3, 64 | "face_child_3": 3, 65 | } 66 | ), 67 | ( 68 | jnp.arange(4), 69 | jnp.arange(22), 70 | jnp.array([-jnp.ones(3) * 100.0, jnp.ones(3) * 100.0]), 71 | jnp.array( 72 | [ 73 | jnp.array([-0.2, -0.2, -2 * jnp.pi]), 74 | jnp.array([0.2, 0.2, 2 * jnp.pi]), 75 | ] 76 | ), 77 | b.RENDERER.model_box_dims, 78 | OUTLIER_VOLUME, 79 | ), 80 | ) 81 | if (b.genjax.get_indices(trace) == 21).sum() > 1: 82 | continue 83 | 84 | joblib.dump( 85 | (trace.get_choices(), trace.get_args()), f"data/trace_{scene_id}.joblib" 86 | ) 87 | scene_id += 1 88 | -------------------------------------------------------------------------------- /scripts/ssh.py: -------------------------------------------------------------------------------- 1 | import paramiko 2 | from scp import SCPClient 3 | 4 | 5 | class SSHSender: 6 | def __init__(self, hostname, username, ssh_key_path, result_directory): 7 | self.ssh = paramiko.SSHClient() 8 | self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 9 | self.ssh.connect(hostname, username=username, key_filename=ssh_key_path) 10 | self.scp = SCPClient(self.ssh.get_transport()) 11 | 12 | self.result_directory = result_directory 13 | 14 | 15 | sender = SSHSender( 16 | "34.123.143.56", 17 | "nishadgothoskar", 18 | "/Users/nishadgothoskar/.ssh/id_ed25519.pub", 19 | ".", 20 | ) 21 | sender = SSHSender( 22 | "34.123.143.56", 23 | "nishadgothoskar", 24 | "/Users/nishadgothoskar/.ssh/id_ed25519.pub", 25 | ".", 26 | ) 27 | -------------------------------------------------------------------------------- /test/test_bbox_intersect.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax 4 | import jax.numpy as jnp 5 | 6 | import bayes3d as b 7 | 8 | are_bboxes_intersecting_jit = jax.jit(b.utils.are_bboxes_intersecting) 9 | 10 | # set up renderer 11 | intrinsics = b.Intrinsics( 12 | height=100, width=100, fx=250, fy=250, cx=100 / 2.0, cy=100 / 2.0, near=0.1, far=20 13 | ) 14 | 15 | b.setup_renderer(intrinsics) 16 | 17 | b.RENDERER.add_mesh_from_file( 18 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj"), 19 | scaling_factor=0.1, 20 | mesh_name="cube_1", 21 | ) 22 | b.RENDERER.add_mesh_from_file( 23 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj"), 24 | scaling_factor=0.1, 25 | mesh_name="cube_2", 26 | ) 27 | 28 | # make poses intersect/collide/penetrate 29 | pose_1 = jnp.eye(4).at[:3, 3].set([-0.1, 0, 1.5]) 30 | pose_1 = pose_1 @ b.transform_from_axis_angle(jnp.array([1, 0, 0]), jnp.pi / 4) 31 | pose_2 = jnp.eye(4).at[:3, 3].set([-0.05, 0, 1.5]) 32 | pose_2 = pose_2 @ b.transform_from_axis_angle(jnp.array([1, 1, 1]), jnp.pi / 4) 33 | 34 | # make sure the output confirms the intersection 35 | b.scale_image( 36 | b.get_depth_image( 37 | b.RENDERER.render(jnp.stack([pose_1, pose_2]), jnp.array([0, 1]))[:, :, 2] 38 | ), 39 | 4, 40 | ).save("intersecting.png") 41 | is_intersecting = are_bboxes_intersecting_jit( 42 | b.RENDERER.model_box_dims[0], b.RENDERER.model_box_dims[1], pose_1, pose_2 43 | ) 44 | assert is_intersecting is True 45 | 46 | # make poses NOT intersect/collided/penetrate 47 | pose_2 = jnp.eye(4).at[:3, 3].set([0.04, 0, 1.5]) 48 | pose_2 = pose_2 @ b.transform_from_axis_angle(jnp.array([1, 1, 1]), jnp.pi / 4) 49 | 50 | # make sure the output confirms NO intersection 51 | b.scale_image( 52 | b.get_depth_image( 53 | b.RENDERER.render(jnp.stack([pose_1, pose_2]), jnp.array([0, 1]))[:, :, 2] 54 | ), 55 | 4, 56 | ).save("no_intersecting.png") 57 | is_intersecting = are_bboxes_intersecting_jit( 58 | b.RENDERER.model_box_dims[0], b.RENDERER.model_box_dims[1], pose_1, pose_2 59 | ) 60 | assert is_intersecting is False 61 | -------------------------------------------------------------------------------- /test/test_colmap.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import glob 3 | import subprocess 4 | from pathlib import Path 5 | 6 | import bayes3d as b 7 | import bayes3d.colmap 8 | 9 | parser = argparse.ArgumentParser() 10 | parser.add_argument("movie_path", help="Path to movie file", type=str) 11 | args = parser.parse_args() 12 | 13 | b.setup_visualizer() 14 | 15 | movie_file_path = Path(args.movie_path) 16 | 17 | dataset_path = Path(b.utils.get_assets_dir()) / Path( 18 | movie_file_path.name + "_colmap_dataset" 19 | ) 20 | input_path = dataset_path / Path("input") 21 | input_path.mkdir(parents=True, exist_ok=True) 22 | b.utils.video_to_images(movie_file_path, input_path) 23 | 24 | 25 | assets_dir = Path(b.utils.get_assets_dir()) 26 | script_path = assets_dir.parent / Path("scripts/run_colmap.py") 27 | 28 | subprocess.run([f"python {str(script_path)} -s {str(dataset_path)}"], shell=True) 29 | 30 | 31 | image_paths = sorted(glob.glob(str(input_path / Path("*.jpg")))) 32 | print(len(image_paths)) 33 | images = [b.viz.load_image_from_file(f) for f in image_paths] 34 | # b.make_gif_from_pil_images(images, "input.gif") 35 | (positions, colors, normals), train_cam_infos = b.colmap.readColmapSceneInfo( 36 | dataset_path, "images", False 37 | ) 38 | 39 | train_cam_infos[0].FovY 40 | 41 | b.clear() 42 | scaling_factor = 0.1 43 | poses = [ 44 | b.transform_from_rot_and_pos(i.R, i.T * scaling_factor) for i in train_cam_infos 45 | ] 46 | 47 | b.show_cloud("cloud", positions * scaling_factor) 48 | for i, p in enumerate(poses): 49 | b.show_pose(f"{i}", p) 50 | -------------------------------------------------------------------------------- /test/test_cosypose.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | 5 | import bayes3d as b 6 | from bayes3d.neural.cosypose_baseline import cosypose_utils 7 | 8 | bop_ycb_dir = os.path.join(b.utils.get_assets_dir(), "bop/ycbv") 9 | rgbd, gt_ids, gt_poses, masks = b.utils.ycb_loader.get_test_img( 10 | "55", "1592", bop_ycb_dir 11 | ) 12 | 13 | pred = cosypose_utils.cosypose_interface( 14 | np.array(rgbd.rgb), b.K_from_intrinsics(rgbd.intrinsics) 15 | ) 16 | -------------------------------------------------------------------------------- /test/test_genjax_model.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import genjax 4 | import jax 5 | import jax.numpy as jnp 6 | 7 | import bayes3d as b 8 | import bayes3d.genjax 9 | 10 | key = jax.random.PRNGKey(1) 11 | 12 | intrinsics = b.Intrinsics( 13 | height=100, width=100, fx=300.0, fy=300.0, cx=50.0, cy=50.0, near=0.01, far=20.0 14 | ) 15 | 16 | b.setup_renderer(intrinsics) 17 | b.RENDERER.add_mesh_from_file( 18 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj") 19 | ) 20 | b.RENDERER.add_mesh_from_file( 21 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj") 22 | ) 23 | 24 | importance_jit = jax.jit(b.model.importance) 25 | 26 | table_pose = b.t3d.inverse_pose( 27 | b.t3d.transform_from_pos_target_up( 28 | jnp.array([0.0, 4.8, 4.15]), 29 | jnp.array([0.0, 0.0, 0.0]), 30 | jnp.array([0.0, 0.0, 1.0]), 31 | ) 32 | ) 33 | 34 | enumerators = b.make_enumerator(["contact_params_1"]) 35 | 36 | 37 | def test_genjax_trace_contains_right_info(): 38 | key = jax.random.PRNGKey(1) 39 | low, high = jnp.array([-0.2, -0.2, -jnp.pi]), jnp.array([0.2, 0.2, jnp.pi]) 40 | weight, trace = importance_jit( 41 | key, 42 | genjax.choice_map( 43 | { 44 | "parent_0": -1, 45 | "parent_1": 0, 46 | "id_0": jnp.int32(1), 47 | "id_1": jnp.int32(0), 48 | "root_pose_0": table_pose, 49 | "camera_pose": jnp.eye(4), 50 | "face_parent_1": 3, 51 | "face_child_1": 2, 52 | "variance": 0.0001, 53 | "outlier_prob": 0.0001, 54 | "contact_params_1": jax.random.uniform( 55 | key, shape=(3,), minval=low, maxval=high 56 | ), 57 | } 58 | ), 59 | ( 60 | jnp.arange(2), 61 | jnp.arange(22), 62 | jnp.array([-jnp.ones(3) * 100.0, jnp.ones(3) * 100.0]), 63 | jnp.array( 64 | [ 65 | jnp.array([-0.5, -0.5, -2 * jnp.pi]), 66 | jnp.array([0.5, 0.5, 2 * jnp.pi]), 67 | ] 68 | ), 69 | b.RENDERER.model_box_dims, 70 | 1.0, 71 | intrinsics.fx, 72 | ), 73 | ) 74 | 75 | _scores = enumerators.enumerate_choices_get_scores(trace, key, jnp.zeros((100, 3))) 76 | 77 | assert trace["parent_0"] == -1 78 | assert (trace["camera_pose"] == jnp.eye(4)).all() 79 | assert trace["id_0"] == 0 80 | -------------------------------------------------------------------------------- /test/test_icp.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax 4 | import jax.numpy as jnp 5 | 6 | import bayes3d as b 7 | 8 | b.setup_visualizer() 9 | 10 | 11 | N = 100 12 | cloud = jax.random.uniform(jax.random.PRNGKey(10), shape=(N, 3)) * 0.1 13 | b.show_cloud("c", cloud) 14 | 15 | 16 | pose = b.distributions.gaussian_vmf_zero_mean(jax.random.PRNGKey(5), 0.1, 10.0) 17 | 18 | cloud_transformed = b.apply_transform(cloud, pose) 19 | b.show_cloud("d", cloud_transformed, color=b.RED) 20 | 21 | transform = b.utils.find_least_squares_transform_between_clouds( 22 | cloud, cloud_transformed 23 | ) 24 | 25 | print(jnp.abs(cloud - cloud_transformed).sum()) 26 | print(jnp.abs(cloud_transformed - b.apply_transform(cloud, transform)).sum()) 27 | 28 | 29 | intrinsics = b.Intrinsics( 30 | height=50, width=50, fx=50.0, fy=50.0, cx=25.0, cy=25.0, near=0.01, far=1.0 31 | ) 32 | 33 | b.setup_renderer(intrinsics) 34 | model_dir = os.path.join(b.utils.get_assets_dir(), "bop/ycbv/models") 35 | meshes = [] 36 | for idx in range(1, 22): 37 | mesh_path = os.path.join( 38 | model_dir, "obj_" + "{}".format(idx).rjust(6, "0") + ".ply" 39 | ) 40 | b.RENDERER.add_mesh_from_file(mesh_path, scaling_factor=1.0 / 1000.0) 41 | 42 | b.RENDERER.add_mesh_from_file( 43 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj"), 44 | scaling_factor=1.0 / 1000000000.0, 45 | ) 46 | 47 | 48 | pose = b.t3d.transform_from_pos(jnp.array([-1.0, -1.0, 4.0])) 49 | pose2 = pose @ b.distributions.gaussian_vmf_zero_mean( 50 | jax.random.PRNGKey(5), 0.05, 1000.0 51 | ) 52 | 53 | 54 | b.show_pose("1", pose) 55 | b.show_pose("2", pose2) 56 | 57 | img1 = b.RENDERER.render(pose.reshape(-1, 4, 4), jnp.array([0]))[..., :3] 58 | img2 = b.RENDERER.render(pose2.reshape(-1, 4, 4), jnp.array([0]))[..., :3] 59 | 60 | b.clear() 61 | b.show_cloud("c", img1.reshape(-1, 3)) 62 | b.show_cloud("d", img2.reshape(-1, 3), color=b.RED) 63 | 64 | mask = (img1[:, :, 2] < intrinsics.far) * (img2[:, :, 2] < intrinsics.far) 65 | 66 | transform = b.utils.find_least_squares_transform_between_clouds( 67 | img1[mask, :], img2[mask, :] 68 | ) 69 | 70 | print(jnp.abs(img2[mask, :] - img1[mask, :]).sum()) 71 | print(jnp.abs(img2[mask, :] - b.apply_transform(img1[mask, :], transform)).sum()) 72 | print(jnp.abs(cloud_transformed - b.apply_transform(cloud, transform)).sum()) 73 | -------------------------------------------------------------------------------- /test/test_kubric.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax.numpy as jnp 4 | import trimesh 5 | from IPython import embed 6 | from tqdm import tqdm 7 | 8 | import bayes3d as b 9 | from bayes3d.rendering.photorealistic_renderers.kubric_interface import render_many 10 | 11 | # --- creating the ycb dir from the working directory 12 | bop_ycb_dir = os.path.join(b.utils.get_assets_dir(), "bop/ycbv") 13 | rgbd, gt_ids, gt_poses, masks = b.utils.ycb_loader.get_test_img("52", "1", bop_ycb_dir) 14 | 15 | 16 | mesh_paths = [] 17 | offset_poses = [] 18 | model_dir = os.path.join(b.utils.get_assets_dir(), "ycb_video_models/models") 19 | for i in tqdm(gt_ids): 20 | mesh_path = os.path.join( 21 | model_dir, b.utils.ycb_loader.MODEL_NAMES[i], "textured.obj" 22 | ) 23 | _, pose = b.utils.mesh.center_mesh(trimesh.load(mesh_path), return_pose=True) 24 | offset_poses.append(pose) 25 | mesh_paths.append(mesh_path) 26 | 27 | intrinsics = b.Intrinsics( 28 | rgbd.intrinsics.height, 29 | rgbd.intrinsics.width, 30 | 200.0, 31 | 200.0, 32 | rgbd.intrinsics.width / 2, 33 | rgbd.intrinsics.height / 2, 34 | rgbd.intrinsics.near, 35 | rgbd.intrinsics.far, 36 | ) 37 | print(intrinsics) 38 | 39 | poses = [] 40 | for i in range(len(gt_ids)): 41 | poses.append(gt_poses[i] @ b.t3d.inverse_pose(offset_poses[i])) 42 | poses = jnp.array(poses) 43 | 44 | rgbds = render_many( 45 | mesh_paths, poses[None, ...], intrinsics, scaling_factor=1.0, lighting=5.0 46 | ) 47 | 48 | 49 | b.setup_renderer(intrinsics) 50 | for path in mesh_paths: 51 | b.RENDERER.add_mesh_from_file(path) 52 | 53 | img = b.RENDERER.render(gt_poses, jnp.arange(gt_poses.shape[0])) 54 | 55 | kubri_rgb = b.get_rgb_image(rgbds[0].rgb) 56 | kubric_depth = b.get_depth_image(rgbds[0].depth) 57 | rerendered_depth = b.get_depth_image(img[:, :, 2]) 58 | overlay = b.overlay_image(kubric_depth, rerendered_depth, alpha=0.5) 59 | b.multi_panel( 60 | [kubri_rgb, kubric_depth, rerendered_depth, overlay], 61 | labels=["kubric_rgb", "kubric_depth", "rerendered_depth", "overlay"], 62 | ).save("test_kubric.png") 63 | 64 | embed() 65 | -------------------------------------------------------------------------------- /test/test_likelihood.py: -------------------------------------------------------------------------------- 1 | import jax.numpy as jnp 2 | 3 | import bayes3d as b 4 | 5 | H = 100 6 | W = 200 7 | observed_xyz, rendered_xyz = jnp.ones((H, W, 3)), jnp.ones((H, W, 3)) 8 | b.threedp3_likelihood(observed_xyz, rendered_xyz, 0.007, 0.1, 0.1, 1.0, 3) 9 | -------------------------------------------------------------------------------- /test/test_open3d.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax.numpy as jnp 4 | import trimesh 5 | from IPython import embed 6 | from tqdm import tqdm 7 | 8 | import bayes3d as b 9 | from bayes3d.viz.open3dviz import Open3DVisualizer 10 | 11 | # --- creating the ycb dir from the working directory 12 | bop_ycb_dir = os.path.join(b.utils.get_assets_dir(), "bop/ycbv") 13 | rgbd, gt_ids, gt_poses, masks = b.utils.ycb_loader.get_test_img("52", "1", bop_ycb_dir) 14 | 15 | 16 | mesh_paths = [] 17 | offset_poses = [] 18 | model_dir = os.path.join(b.utils.get_assets_dir(), "ycb_video_models/models") 19 | for i in tqdm(gt_ids): 20 | mesh_path = os.path.join( 21 | model_dir, b.utils.ycb_loader.MODEL_NAMES[i], "textured.obj" 22 | ) 23 | _, pose = b.utils.mesh.center_mesh(trimesh.load(mesh_path), return_pose=True) 24 | offset_poses.append(pose) 25 | mesh_paths.append(mesh_path) 26 | 27 | intrinsics = b.Intrinsics( 28 | rgbd.intrinsics.height, 29 | rgbd.intrinsics.width, 30 | rgbd.intrinsics.fx, 31 | rgbd.intrinsics.fx, 32 | rgbd.intrinsics.width / 2, 33 | rgbd.intrinsics.height / 2, 34 | rgbd.intrinsics.near, 35 | rgbd.intrinsics.far, 36 | ) 37 | 38 | poses = [] 39 | for i in range(len(gt_ids)): 40 | poses.append(gt_poses[i] @ b.t3d.inverse_pose(offset_poses[i])) 41 | poses = jnp.array(poses) 42 | 43 | 44 | visualizer = Open3DVisualizer(intrinsics) 45 | 46 | visualizer.clear() 47 | for pose, path in zip(poses, mesh_paths): 48 | visualizer.make_mesh_from_file(path, pose) 49 | rgbd_textured_reconstruction = visualizer.capture_image(intrinsics, jnp.eye(4)) 50 | 51 | visualizer.clear() 52 | colors = b.viz.distinct_colors(len(gt_ids)) 53 | for i, (pose, path) in enumerate(zip(poses, mesh_paths)): 54 | mesh = b.utils.load_mesh(path) 55 | visualizer.make_trimesh(mesh, pose, (*tuple(colors[i]), 1.0)) 56 | 57 | rgbd_color_mesh_reconstruction = visualizer.capture_image(intrinsics, jnp.eye(4)) 58 | 59 | panel = b.viz.multi_panel( 60 | [ 61 | b.get_rgb_image(rgbd.rgb), 62 | b.get_rgb_image(rgbd_textured_reconstruction.rgb), 63 | b.get_rgb_image(rgbd_color_mesh_reconstruction.rgb), 64 | b.overlay_image( 65 | b.get_rgb_image(rgbd.rgb), 66 | b.get_rgb_image(rgbd_color_mesh_reconstruction.rgb), 67 | ), 68 | ] 69 | ) 70 | 71 | panel.save("test.png") 72 | embed() 73 | -------------------------------------------------------------------------------- /test/test_renderer_internals.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax 4 | import jax.numpy as jnp 5 | import torch 6 | 7 | import bayes3d as b 8 | import bayes3d._rendering.nvdiffrast.common as dr 9 | 10 | intrinsics = b.Intrinsics(300, 300, 200.0, 200.0, 150.0, 150.0, 0.001, 50.0) 11 | b.setup_renderer(intrinsics) 12 | renderer = b.RENDERER 13 | 14 | r = 0.1 15 | outlier_prob = 0.01 16 | max_depth = 15.0 17 | 18 | renderer.add_mesh_from_file( 19 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj") 20 | ) 21 | renderer.add_mesh_from_file( 22 | os.path.join(b.utils.get_assets_dir(), "sample_objs/sphere.obj") 23 | ) 24 | 25 | poses = jnp.array( 26 | [ 27 | [1.0, 0.0, 0.0, 0.0], 28 | [0.0, 1.0, 0.0, -1.0], 29 | [0.0, 0.0, 1.0, 8.0], 30 | [0.0, 0.0, 0.0, 1.0], 31 | ] 32 | )[None, None, ...] 33 | indices = jnp.array([0]) 34 | 35 | 36 | img = jax.dlpack.from_dlpack( 37 | torch.utils.dlpack.to_dlpack( 38 | dr._get_plugin(gl=True).rasterize_fwd_gl( 39 | b.RENDERER.renderer_env.cpp_wrapper, 40 | torch.utils.dlpack.from_dlpack( 41 | jax.dlpack.to_dlpack(jnp.tile(poses, (1, 2, 1, 1))) 42 | ), 43 | b.RENDERER.proj_list, 44 | [0], 45 | ) 46 | ) 47 | ) 48 | b.get_depth_image(img[0, :, :, 2]).save("1.png") 49 | assert not jnp.all(img[0, :, :, 2] == 0.0) 50 | 51 | multiobject_scene_img = renderer._render_many( 52 | jnp.tile(poses, (2, 1, 1, 1)), jnp.array([1]) 53 | )[0] 54 | b.get_depth_image(multiobject_scene_img[:, :, 2]).save("0.png") 55 | assert not jnp.all(multiobject_scene_img[:, :, 2] == 0.0) 56 | -------------------------------------------------------------------------------- /test/test_renderer_memory.py: -------------------------------------------------------------------------------- 1 | import gc 2 | import os 3 | 4 | import jax.numpy as jnp 5 | 6 | import bayes3d as b 7 | 8 | # setup renderer 9 | intrinsics = b.Intrinsics(50, 50, 200.0, 200.0, 25.0, 25.0, 0.001, 10.0) 10 | # Note: removing the b.RENDERER object does the same operation in C++ as clear_meshmem() 11 | b.setup_renderer(intrinsics, num_layers=1) 12 | renderer = b.RENDERER 13 | 14 | pre_test_clearmesh = b.utils.get_gpu_memory()[0] 15 | 16 | for i in range(5): 17 | b.setup_renderer(intrinsics, num_layers=1) 18 | renderer = b.RENDERER 19 | 20 | pre_add_mesh = b.utils.get_gpu_memory()[0] 21 | for x in range(1): 22 | renderer.add_mesh_from_file( 23 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj"), 24 | mesh_name=f"cube_{i+1}", 25 | ) 26 | 27 | post_add_mesh = b.utils.get_gpu_memory()[0] 28 | 29 | pose = jnp.array( 30 | [ 31 | [1.0, 0.0, 0.0, 0.5], 32 | [0.0, 1.0, 0.0, 0.0], 33 | [0.0, 0.0, 1.0, 10.0], 34 | [0.0, 0.0, 0.0, 1.0], 35 | ] 36 | ) 37 | depth = renderer.render(pose[None, ...], jnp.array([0]))[..., 2] 38 | 39 | post_render = b.utils.get_gpu_memory()[0] 40 | 41 | renderer.clear_gpu_meshmem() 42 | 43 | post_clear_meshmem = b.utils.get_gpu_memory()[0] 44 | 45 | # ensure the mesh memory is fully cleared 46 | assert pre_add_mesh - post_add_mesh == post_clear_meshmem - post_render 47 | 48 | gc.collect() 49 | 50 | print(f"{i}: ", b.utils.get_gpu_memory()[0]) 51 | 52 | post_test_clearmesh = b.utils.get_gpu_memory()[0] 53 | 54 | # Expected result should be around 2MiB for the given camera intrinsics 55 | print( 56 | "GPU memory lost with clear_meshmem() --> ", 57 | pre_test_clearmesh - post_test_clearmesh, 58 | " MiB", 59 | ) 60 | print( 61 | "The memeory lost is from the JAX memeory in GPU and not accumulations in the GPU" 62 | ) 63 | -------------------------------------------------------------------------------- /test/test_scene_graph.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as jnp 3 | 4 | import bayes3d as b 5 | 6 | N = 4 7 | scene_graph = b.scene_graph.SceneGraph( 8 | root_poses=jnp.tile(jnp.eye(4)[None, ...], (N, 1, 1)), 9 | box_dimensions=jnp.ones((N, 3)), 10 | parents=jnp.array([-1, 0, 0, 2]), 11 | contact_params=jax.random.uniform( 12 | jax.random.PRNGKey(10), (N, 3), minval=-1.0, maxval=1.0 13 | ), 14 | face_parent=jnp.array([0, 1, 1, 2]), 15 | face_child=jnp.array([2, 3, 4, 5]), 16 | ) 17 | scene_graph.visualize("graph.png", node_names=["table", "apple", "can", "banana"]) 18 | 19 | floating_scene_graph = b.scene_graph.create_floating_scene_graph(scene_graph) 20 | assert jnp.isclose(floating_scene_graph.get_poses(), scene_graph.get_poses()).all() 21 | 22 | 23 | parent_object_index = 0 24 | child_object_index = 1 25 | parent_pose = scene_graph.get_poses()[parent_object_index] 26 | child_pose = scene_graph.get_poses()[child_object_index] 27 | face_parent = scene_graph.face_parent[child_object_index] 28 | face_child = scene_graph.face_child[child_object_index] 29 | dims_parent = scene_graph.box_dimensions[parent_object_index] 30 | dims_child = scene_graph.box_dimensions[child_object_index] 31 | 32 | parent_contact_plane = ( 33 | parent_pose @ b.scene_graph.get_contact_planes(dims_parent)[face_parent] 34 | ) 35 | child_contact_plane = ( 36 | child_pose @ b.scene_graph.get_contact_planes(dims_child)[face_child] 37 | ) 38 | 39 | contact_params, slack = b.scene_graph.closest_approximate_contact_params( 40 | parent_contact_plane, child_contact_plane 41 | ) 42 | assert jnp.isclose(slack[:3, 3], 0.0, atol=1e-7).all() 43 | assert jnp.isclose(slack[:3, :3], jnp.eye(3), atol=1e-7).all() 44 | 45 | assert jnp.isclose(contact_params, scene_graph.contact_params[child_object_index]).all() 46 | -------------------------------------------------------------------------------- /test/test_transforms_3d.py: -------------------------------------------------------------------------------- 1 | import jax 2 | import jax.numpy as jnp 3 | 4 | import bayes3d as b 5 | 6 | 7 | def test_estimate_transform_between_clouds(): 8 | key = jax.random.PRNGKey(500) 9 | c1 = jax.random.uniform(jax.random.PRNGKey(0), (10, 3)) * 5.0 10 | random_pose = b.distributions.gaussian_vmf_zero_mean(key, 0.1, 1.0) 11 | c2 = b.t3d.apply_transform(c1, random_pose) 12 | 13 | estimated = b.estimate_transform_between_clouds(c1, c2) 14 | assert jnp.isclose(b.apply_transform(c1, estimated), c2, atol=1e-5).all() 15 | -------------------------------------------------------------------------------- /test/test_viz.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import jax.numpy as jnp 4 | import matplotlib.pyplot as plt 5 | 6 | import bayes3d as b 7 | 8 | bop_ycb_dir = os.path.join(b.utils.get_assets_dir(), "bop/ycbv") 9 | rgbd, gt_ids, gt_poses, masks = b.utils.ycb_loader.get_test_img("52", "1", bop_ycb_dir) 10 | fig = b.viz_depth_image(rgbd.depth) 11 | fig.savefig("depth.png", **b.saveargs) 12 | fig = b.viz_rgb_image(rgbd.rgb) 13 | fig.savefig("rgb.png", **b.saveargs) 14 | 15 | fig = plt.figure() 16 | ax = fig.add_subplot(1, 2, 1) 17 | b.add_rgb_image(ax, rgbd.rgb) 18 | ax.set_title("RGB") 19 | ax = fig.add_subplot(1, 2, 2) 20 | b.add_depth_image(ax, rgbd.depth) 21 | ax.set_title("DEPTH") 22 | fig.savefig("fig.png", **b.saveargs) 23 | 24 | 25 | ################################################################################## 26 | # Testing 2 edge cases in getting color-mapped depth image from rendered output # 27 | ################################################################################## 28 | 29 | # set up renderer 30 | intrinsics = b.Intrinsics(50, 50, 200.0, 200.0, 25.0, 25.0, 0.001, 20.0) 31 | b.setup_renderer(intrinsics) 32 | renderer = b.RENDERER 33 | renderer.add_mesh_from_file( 34 | os.path.join(b.utils.get_assets_dir(), "sample_objs/cube.obj") 35 | ) 36 | 37 | 38 | # Test 1: check if b.get_depth_image returns a valid image if there is no object in the scene 39 | no_object_in_scene_pose = jnp.array( 40 | [ 41 | [1.0, 0.0, 0.0, -100.0], 42 | [0.0, 1.0, 0.0, 0.0], 43 | [0.0, 0.0, 1.0, 10.0], 44 | [0.0, 0.0, 0.0, 1.0], 45 | ] 46 | ) 47 | depth = renderer.render(no_object_in_scene_pose[None, ...], jnp.array([0]))[..., 2] 48 | depth_image = b.scale_image(b.get_depth_image(depth), 8) 49 | depth_image.save("viz_test_no_object_in_scene.png") 50 | 51 | # Test 2: check if b.get_depth_image returns a valid image if object has only one unique depth value 52 | object_unique_depth_pose = jnp.array( 53 | [ 54 | [1.0, 0.0, 0.0, 0.0], 55 | [0.0, 1.0, 0.0, 0.0], 56 | [0.0, 0.0, 1.0, 10.0], 57 | [0.0, 0.0, 0.0, 1.0], 58 | ] 59 | ) 60 | depth = renderer.render(object_unique_depth_pose[None, ...], jnp.array([0]))[..., 2] 61 | assert jnp.unique(depth).size == 2 # far and object's depth 62 | depth_image = b.scale_image(b.get_depth_image(depth), 8) 63 | depth_image.save("viz_test_object_unique_depth.png") 64 | --------------------------------------------------------------------------------