├── .gitattributes ├── .gitignore ├── README.md ├── __init__.py ├── environment.yml ├── img_src ├── model_diagram.jpg ├── orientation_diagram.jpg └── poster.jpg ├── model ├── __init__.py ├── add_output_layers.py ├── backbone_xception.py ├── build_model.py ├── check_pickle.py ├── data_generator.py ├── data_processing.py ├── loss_function.py ├── metrics.py ├── orientation_converters.py ├── positional_encoder.py ├── tmp_data.pkl ├── training.py ├── training.sh └── training_dataset.py ├── pyproject.toml └── utils ├── Exp-A_gt_0.png ├── __init__.py ├── convert_to_csv.py ├── csv_output ├── angular_loss_cleaned_training_accuracy.csv ├── angular_loss_cleaned_training_loss.csv ├── angular_loss_cleaned_validation_accuracy.csv ├── angular_loss_cleaned_validation_loss.csv ├── angular_new_cleaned_training_accuracy.csv ├── angular_new_cleaned_training_loss.csv ├── angular_new_cleaned_validation_accuracy.csv ├── angular_new_cleaned_validation_loss.csv ├── cleaned_training_accuracy.csv ├── cleaned_training_loss.csv ├── cleaned_validation_accuracy.csv ├── cleaned_validation_loss.csv ├── depth+pos_enc_cleaned_training_accuracy.csv ├── depth+pos_enc_cleaned_training_loss.csv ├── depth+pos_enc_cleaned_validation_accuracy.csv ├── depth+pos_enc_cleaned_validation_loss.csv ├── depth_cleaned_training_accuracy.csv ├── depth_cleaned_training_loss.csv ├── depth_cleaned_validation_accuracy.csv ├── depth_cleaned_validation_loss.csv ├── exp-A_cleaned_training_accuracy.csv ├── exp-A_cleaned_training_loss.csv ├── exp-A_cleaned_validation_accuracy.csv ├── exp-A_cleaned_validation_loss.csv ├── exp-B_cleaned_training_accuracy.csv ├── exp-B_cleaned_training_loss.csv ├── exp-B_cleaned_validation_accuracy.csv ├── exp-B_cleaned_validation_loss.csv ├── exp-C_cleaned_training_accuracy.csv ├── exp-C_cleaned_training_loss.csv ├── exp-C_cleaned_validation_accuracy.csv ├── exp-C_cleaned_validation_loss.csv ├── multi_2_bin_cleaned_training_accuracy.csv ├── multi_2_bin_cleaned_training_loss.csv ├── multi_2_bin_cleaned_validation_accuracy.csv ├── multi_2_bin_cleaned_validation_loss.csv ├── multi_4_bin_cleaned_training_accuracy.csv ├── multi_4_bin_cleaned_training_loss.csv ├── multi_4_bin_cleaned_validation_accuracy.csv ├── multi_4_bin_cleaned_validation_loss.csv ├── multi_affinity_cleaned_training_accuracy.csv ├── multi_affinity_cleaned_training_loss.csv ├── multi_affinity_cleaned_validation_accuracy.csv ├── multi_affinity_cleaned_validation_loss.csv ├── pos_enc_cleaned_training_accuracy.csv ├── pos_enc_cleaned_training_loss.csv ├── pos_enc_cleaned_validation_accuracy.csv ├── pos_enc_cleaned_validation_loss.csv ├── pos_enc_second_cleaned_training_accuracy.csv ├── pos_enc_second_cleaned_training_loss.csv ├── pos_enc_second_cleaned_validation_accuracy.csv ├── pos_enc_second_cleaned_validation_loss.csv ├── pos_enc_third_cleaned_training_accuracy.csv ├── pos_enc_third_cleaned_training_loss.csv ├── pos_enc_third_cleaned_validation_accuracy.csv └── pos_enc_third_cleaned_validation_loss.csv ├── data_checker.py ├── train_utils.py ├── view_tensorboard.sh └── visualize_loss_function.py /.gitattributes: -------------------------------------------------------------------------------- 1 | *.js linguist-language=python 2 | *.css linguist-language=python 3 | *.html linguist-language=python 4 | *.cpp linguist-language=python 5 | demo_model.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text 6 | demo_model/demo_model.data-00000-of-00001 filter=lfs diff=lfs merge=lfs -text 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | loss_function_graph 2 | multibin 3 | dataset 4 | weights 5 | depth_single_bin/ 6 | preds 7 | .ipynb_checkpoints 8 | records/ 9 | __pycache__ 10 | *.py[cod] 11 | *$py.class 12 | *.swp 13 | *.log 14 | .idea/* 15 | .vscode 16 | *.tfrec 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SoK: Vehicle Orientation Representations for Deep Rotation Estimation 2 | ## Raymond H. Tu, Siyuan Peng, Valdimir Leung, Richard Gao, Jerry Lan 3 | This is the official implementation for the paper [SoK: Vehicle Orientation Representations for Deep Rotation Estimation](https://arxiv.org/abs/2112.04421) 4 | 5 | 6 | ![Model Diagram](./img_src/poster.jpg ) 7 | 8 | ## Table of Conents 9 | - [Environment Setup](#Envrionment-Setup) 10 | - [Training](#Training) 11 | - [Training Result](#Training-Result) 12 | 13 | ## Envrionment Setup 14 | Install required packages via conda 15 | ``` bash 16 | # create conda environment based on yml file 17 | conda env update --file environment.yml 18 | # activate conda environment 19 | conda activate KITTI-Orientation 20 | ``` 21 | Clone git repo: 22 | ``` bash 23 | git clone git@github.com:umd-fire-coml/KITTI-orientation-learning.git 24 | ``` 25 | ## Training 26 | Check training.sh for example training script 27 | 28 | ### Training Parameter setup: 29 | Training parameters can be configured using cmd arguments 30 | - --predict: Specify prediction target. Options are rot-y, alpha 31 | - --converter: Specify prediction method. Options are alpha, rot-y, tricosine, multibin, voting-bin, single-bin 32 | - --kitti_dir: path to kitti dataset directory. Its subdirectory should have training/ and testing/ Default path is dataset/ 33 | - --training_record: root directory of all training record, parent of weights and logs directory. Default path is training_record 34 | - --resume: Resume from previous training under training_record directory 35 | - --add_pos_enc: Add positional encoding to input 36 | - --add_depth_map: Add depth map information to input 37 | 38 | For all the training parameter setup, please using 39 | ``` 40 | python3 model/training.py -h 41 | ``` 42 | 43 | ## Training Result 44 | | Exp ID | Target | Loss Functions | Additional Inputs | Accuracy (\%) | 45 | |--------|-------------------------|----------------|-------------------|----------------| 46 | | E1 | rot-y | L2 Loss | - | 90.490 | 47 | | E2 | rot-y | Angle Loss | - | 89.052 | 48 | | E3 | alpha | L2 Loss | - | 90.132 | 49 | | E4 | Single Bin | L2 Loss | - | 94.815 | 50 | | E5 | Single Bin | L2 Loss | Pos Enc | 94.277 | 51 | | E6 | Single Bin | L2 Loss | Dep Map | 93.952 | 52 | | E7 | Voting Bins (4-Bin) | L2 Loss | - | 93.609 | 53 | | E8 | Tricosine | L2 Loss | - | 94.249 | 54 | | E9 | Tricosine | L2 Loss | Pos Enc | 94.351 | 55 | | E10 | Tricosine | L2 Loss | Dep Map | 94.384 | 56 | | E11 | 2 Conf Bins | L2(Bins,Confs) | - | 83.304 | 57 | | E12 | 4 Conf Bins | L2(Bins,Confs) | - | 88.071 | 58 | 59 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/__init__.py -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: KITTI-Orientation 2 | channels: 3 | - defaults 4 | dependencies: 5 | - python=3.8 6 | - tensorflow 7 | - pathlib2 8 | - tqdm 9 | - scikit-image 10 | -------------------------------------------------------------------------------- /img_src/model_diagram.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/img_src/model_diagram.jpg -------------------------------------------------------------------------------- /img_src/orientation_diagram.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/img_src/orientation_diagram.jpg -------------------------------------------------------------------------------- /img_src/poster.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/img_src/poster.jpg -------------------------------------------------------------------------------- /model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/model/__init__.py -------------------------------------------------------------------------------- /model/add_output_layers.py: -------------------------------------------------------------------------------- 1 | from tensorflow.keras import layers 2 | from tensorflow import math as K 3 | from functools import reduce 4 | from orientation_converters import ( 5 | SHAPE_MULTIBIN, 6 | SHAPE_SINGLE_BIN, 7 | SHAPE_TRICOSINE, 8 | SHAPE_ALPHA_ROT_Y, 9 | SHAPE_VOTING_BIN, 10 | SHAPE_MULTI_AFFINITY_BIN, 11 | SHAPE_EXP_A 12 | ) 13 | 14 | LAYER_OUTPUT_NAME_TRICOSINE = 'tricosine_layer_output' 15 | LAYER_OUTPUT_NAME_ALPHA_ROT_Y = 'alpha_rot_y_layer_output' 16 | LAYER_OUTPUT_NAME_MULTIBIN = 'multibin_layer_output' 17 | LAYER_OUTPUT_NAME_VOTING_BIN = 'voting_bin_layer_output' 18 | LAYER_OUTPUT_NAME_SINGLE_BIN = 'single_bin_layer_output' 19 | LAYER_OUTPUT_NAME_EXP_A = 'exp_A_layer_output' 20 | 21 | 22 | def add_dense_layers(backbone_layer, output_shape, out_layer_name=''): 23 | y = layers.Dense(256)(backbone_layer) 24 | y = layers.LeakyReLU(0.1)(y) 25 | y = layers.Dropout(0.5)(y) 26 | # prepare number of outputs 27 | y = layers.Dense(reduce(lambda x, y: x * y, output_shape))(y) 28 | y = layers.Reshape(output_shape, name=out_layer_name)(y) 29 | return y 30 | 31 | 32 | def add_output_layers(orientation_type, backbone_layer): 33 | backbone_layer = layers.Flatten()(backbone_layer) 34 | if orientation_type == 'multibin': 35 | return add_dense_layers( 36 | backbone_layer, SHAPE_MULTI_AFFINITY_BIN, out_layer_name=LAYER_OUTPUT_NAME_MULTIBIN 37 | ) 38 | elif orientation_type == 'tricosine': 39 | return add_dense_layers( 40 | backbone_layer, SHAPE_TRICOSINE, out_layer_name=LAYER_OUTPUT_NAME_TRICOSINE 41 | ) 42 | elif orientation_type == 'alpha' or orientation_type == 'rot-y': 43 | return add_dense_layers( 44 | backbone_layer, SHAPE_ALPHA_ROT_Y, out_layer_name=LAYER_OUTPUT_NAME_ALPHA_ROT_Y 45 | ) 46 | if orientation_type == 'voting-bin': 47 | return add_dense_layers( 48 | backbone_layer, SHAPE_VOTING_BIN, out_layer_name=LAYER_OUTPUT_NAME_VOTING_BIN 49 | ) 50 | if orientation_type == 'single-bin': 51 | return add_dense_layers( 52 | backbone_layer, SHAPE_SINGLE_BIN, out_layer_name=LAYER_OUTPUT_NAME_SINGLE_BIN 53 | ) 54 | if orientation_type == 'exp-A': 55 | return add_dense_layers( 56 | backbone_layer, SHAPE_EXP_A, out_layer_name=LAYER_OUTPUT_NAME_EXP_A 57 | ) 58 | else: 59 | raise NameError("Invalid orientation_output_type") 60 | -------------------------------------------------------------------------------- /model/backbone_xception.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | from tensorflow.keras import layers 3 | import tensorflow.keras.backend as backend 4 | 5 | # Reference 6 | """ 7 | This is a modified version of 8 | - [Xception: Deep Learning with Depthwise Separable Convolutions]( 9 | https://arxiv.org/abs/1610.02357) (CVPR 2017) 10 | """ 11 | 12 | 13 | # Pre-trained model weights 14 | TF_WEIGHTS_PATH = ( 15 | 'https://github.com/fchollet/deep-learning-models/' 16 | 'releases/download/v0.4/' 17 | 'xception_weights_tf_dim_ordering_tf_kernels.h5' 18 | ) 19 | TF_WEIGHTS_PATH_NO_TOP = ( 20 | 'https://github.com/fchollet/deep-learning-models/' 21 | 'releases/download/v0.4/' 22 | 'xception_weights_tf_dim_ordering_tf_kernels_notop.h5' 23 | ) 24 | 25 | 26 | """ 27 | input_shape: optional shape tuple, only to be specified 28 | if `include_top` is False (otherwise the input shape 29 | has to be `(299, 299, 3)`. 30 | It should have exactly 3 inputs channels, 31 | and width and height should be no smaller than 71. 32 | E.g. `(150, 150, 3)` would be one valid value. 33 | include_top: whether to include the fully-connected 34 | layer at the top of the network. 35 | pooling: Optional pooling mode for feature extraction 36 | when `include_top` is `False`. 37 | - `None` means that the output of the model will be 38 | the 4D tensor output of the 39 | last convolutional block. 40 | - `avg` means that global average pooling 41 | will be applied to the output of the 42 | last convolutional block, and thus 43 | the output of the model will be a 2D tensor. 44 | - `max` means that global max pooling will 45 | be applied. 46 | classes: optional number of classes to classify images 47 | into, only to be specified if `include_top` is True, 48 | and if no `weights` argument is specified 49 | weights: one of `None` (random initialization), 50 | 'imagenet' (pre-training on ImageNet), 51 | or the path to the weights file to be loaded. 52 | """ 53 | 54 | 55 | def Xception_model(img_input, pooling=None): 56 | 57 | channel_axis = 1 if backend.image_data_format() == 'channels_first' else -1 # ??? 58 | 59 | x = layers.Conv2D(32, (3, 3), strides=(2, 2), use_bias=False, name='block1_conv1')( 60 | img_input 61 | ) 62 | x = layers.BatchNormalization(axis=channel_axis, name='block1_conv1_bn')(x) 63 | x = layers.Activation('relu', name='block1_conv1_act')(x) 64 | x = layers.Conv2D(64, (3, 3), use_bias=False, name='block1_conv2')(x) 65 | x = layers.BatchNormalization(axis=channel_axis, name='block1_conv2_bn')(x) 66 | x = layers.Activation('relu', name='block1_conv2_act')(x) 67 | 68 | residual = layers.Conv2D(128, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) 69 | residual = layers.BatchNormalization(axis=channel_axis)(residual) 70 | 71 | x = layers.SeparableConv2D( 72 | 128, (3, 3), padding='same', use_bias=False, name='block2_sepconv1' 73 | )(x) 74 | x = layers.BatchNormalization(axis=channel_axis, name='block2_sepconv1_bn')(x) 75 | x = layers.Activation('relu', name='block2_sepconv2_act')(x) 76 | x = layers.SeparableConv2D( 77 | 128, (3, 3), padding='same', use_bias=False, name='block2_sepconv2' 78 | )(x) 79 | x = layers.BatchNormalization(axis=channel_axis, name='block2_sepconv2_bn')(x) 80 | 81 | x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same', name='block2_pool')(x) 82 | x = layers.add([x, residual]) 83 | 84 | residual = layers.Conv2D(256, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) 85 | residual = layers.BatchNormalization(axis=channel_axis)(residual) 86 | 87 | x = layers.Activation('relu', name='block3_sepconv1_act')(x) 88 | x = layers.SeparableConv2D( 89 | 256, (3, 3), padding='same', use_bias=False, name='block3_sepconv1' 90 | )(x) 91 | x = layers.BatchNormalization(axis=channel_axis, name='block3_sepconv1_bn')(x) 92 | x = layers.Activation('relu', name='block3_sepconv2_act')(x) 93 | x = layers.SeparableConv2D( 94 | 256, (3, 3), padding='same', use_bias=False, name='block3_sepconv2' 95 | )(x) 96 | x = layers.BatchNormalization(axis=channel_axis, name='block3_sepconv2_bn')(x) 97 | 98 | x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same', name='block3_pool')(x) 99 | x = layers.add([x, residual]) 100 | 101 | residual = layers.Conv2D(728, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) 102 | residual = layers.BatchNormalization(axis=channel_axis)(residual) 103 | 104 | x = layers.Activation('relu', name='block4_sepconv1_act')(x) 105 | x = layers.SeparableConv2D( 106 | 728, (3, 3), padding='same', use_bias=False, name='block4_sepconv1' 107 | )(x) 108 | x = layers.BatchNormalization(axis=channel_axis, name='block4_sepconv1_bn')(x) 109 | x = layers.Activation('relu', name='block4_sepconv2_act')(x) 110 | x = layers.SeparableConv2D( 111 | 728, (3, 3), padding='same', use_bias=False, name='block4_sepconv2' 112 | )(x) 113 | x = layers.BatchNormalization(axis=channel_axis, name='block4_sepconv2_bn')(x) 114 | 115 | x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same', name='block4_pool')(x) 116 | x = layers.add([x, residual]) 117 | 118 | for i in range(8): 119 | residual = x 120 | prefix = 'block' + str(i + 5) 121 | 122 | x = layers.Activation('relu', name=prefix + '_sepconv1_act')(x) 123 | x = layers.SeparableConv2D( 124 | 728, (3, 3), padding='same', use_bias=False, name=prefix + '_sepconv1' 125 | )(x) 126 | x = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv1_bn')(x) 127 | x = layers.Activation('relu', name=prefix + '_sepconv2_act')(x) 128 | x = layers.SeparableConv2D( 129 | 728, (3, 3), padding='same', use_bias=False, name=prefix + '_sepconv2' 130 | )(x) 131 | x = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv2_bn')(x) 132 | x = layers.Activation('relu', name=prefix + '_sepconv3_act')(x) 133 | x = layers.SeparableConv2D( 134 | 728, (3, 3), padding='same', use_bias=False, name=prefix + '_sepconv3' 135 | )(x) 136 | x = layers.BatchNormalization(axis=channel_axis, name=prefix + '_sepconv3_bn')(x) 137 | 138 | x = layers.add([x, residual]) 139 | 140 | residual = layers.Conv2D(1024, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) 141 | residual = layers.BatchNormalization(axis=channel_axis)(residual) 142 | 143 | x = layers.Activation('relu', name='block13_sepconv1_act')(x) 144 | x = layers.SeparableConv2D( 145 | 728, (3, 3), padding='same', use_bias=False, name='block13_sepconv1' 146 | )(x) 147 | x = layers.BatchNormalization(axis=channel_axis, name='block13_sepconv1_bn')(x) 148 | x = layers.Activation('relu', name='block13_sepconv2_act')(x) 149 | x = layers.SeparableConv2D( 150 | 1024, (3, 3), padding='same', use_bias=False, name='block13_sepconv2' 151 | )(x) 152 | x = layers.BatchNormalization(axis=channel_axis, name='block13_sepconv2_bn')(x) 153 | 154 | x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same', name='block13_pool')(x) 155 | x = layers.add([x, residual]) 156 | 157 | x = layers.SeparableConv2D( 158 | 1536, (3, 3), padding='same', use_bias=False, name='block14_sepconv1' 159 | )(x) 160 | x = layers.BatchNormalization(axis=channel_axis, name='block14_sepconv1_bn')(x) 161 | x = layers.Activation('relu', name='block14_sepconv1_act')(x) 162 | 163 | x = layers.SeparableConv2D( 164 | 2048, (3, 3), padding='same', use_bias=False, name='block14_sepconv2' 165 | )(x) 166 | x = layers.BatchNormalization(axis=channel_axis, name='block14_sepconv2_bn')(x) 167 | x = layers.Activation('relu', name='block14_sepconv2_act')(x) 168 | 169 | if pooling == 'avg': 170 | x = layers.GlobalAveragePooling2D()(x) 171 | elif pooling == 'max': 172 | x = layers.GlobalMaxPooling2D()(x) 173 | 174 | return x 175 | -------------------------------------------------------------------------------- /model/build_model.py: -------------------------------------------------------------------------------- 1 | from tensorflow.keras import Input, Model 2 | from backbone_xception import Xception_model 3 | from add_output_layers import add_output_layers 4 | from data_processing import CROP_RESIZE_H, CROP_RESIZE_W 5 | 6 | 7 | def build_model(orientation, img_h=CROP_RESIZE_H, img_w=CROP_RESIZE_W, n_channel=3): 8 | inputs = Input(shape=(img_h, img_w, n_channel)) 9 | x = Xception_model(inputs, pooling='avg') 10 | x = add_output_layers(orientation, x) 11 | model = Model(inputs=inputs, outputs=x) 12 | return model 13 | -------------------------------------------------------------------------------- /model/check_pickle.py: -------------------------------------------------------------------------------- 1 | import pickle as pkl 2 | train_pkl = "train_tmp_data.pkl" 3 | val_pkl = "val_tmp_data.pkl" 4 | 5 | with open(train_pkl, "rb") as file: 6 | train_data = pkl.load(file) 7 | with open(val_pkl, "rb") as file: 8 | val_data = pkl.load(file) 9 | 10 | print(f'length of train data: {len(train_data)}') 11 | print(f'length of validation data:{len(val_data)}') -------------------------------------------------------------------------------- /model/loss_function.py: -------------------------------------------------------------------------------- 1 | import sys, pathlib 2 | 3 | sys.path.append(str(pathlib.Path(__file__).resolve().parent)) 4 | 5 | import tensorflow as tf 6 | from orientation_converters import multibin_to_radians, angle_normed_to_radians 7 | from add_output_layers import ( 8 | LAYER_OUTPUT_NAME_SINGLE_BIN, 9 | LAYER_OUTPUT_NAME_TRICOSINE, 10 | LAYER_OUTPUT_NAME_ALPHA_ROT_Y, 11 | LAYER_OUTPUT_NAME_MULTIBIN, 12 | LAYER_OUTPUT_NAME_VOTING_BIN, 13 | LAYER_OUTPUT_NAME_EXP_A 14 | ) 15 | from tensorflow.keras.losses import mean_squared_error as l2_loss 16 | import numpy as np 17 | 18 | 19 | def loss_tricosine_(y_true, y_pred): 20 | return l2_loss(y_true, y_pred) 21 | 22 | 23 | loss_tricosine = {LAYER_OUTPUT_NAME_TRICOSINE: loss_tricosine_} 24 | loss_tricosine_weights = {LAYER_OUTPUT_NAME_TRICOSINE: 1.0} 25 | 26 | 27 | def loss_alpha_rot_y_l2_(y_true, y_pred): 28 | return l2_loss(y_true, y_pred) 29 | 30 | 31 | loss_alpha_rot_y = {LAYER_OUTPUT_NAME_ALPHA_ROT_Y: loss_alpha_rot_y_l2_} 32 | loss_alpha_rot_y_weights = {LAYER_OUTPUT_NAME_ALPHA_ROT_Y: 1.0} 33 | 34 | 35 | def loss_alpha_rot_y_angular_normed_(y_true, y_pred): 36 | y_true_rad = angle_normed_to_radians(y_true) 37 | y_pred_rad = angle_normed_to_radians(y_pred) 38 | y_true_vector = tf.transpose(tf.stack([tf.cos(y_true_rad), tf.sin(y_true_rad)])) 39 | y_pred_vector = tf.transpose(tf.stack([tf.cos(y_pred_rad), tf.sin(y_pred_rad)])) 40 | # perform dot product 41 | dot_producted = tf.reduce_sum(tf.multiply(y_true_vector, y_pred_vector), 1) 42 | loss = dot_producted / (tf.norm(y_true_vector, axis=1) * tf.norm(y_pred_vector, axis=1)) 43 | return 1 - loss 44 | 45 | 46 | def loss_alpha_rot_y_angular_(y_true, y_pred): 47 | y_true_rad = y_true 48 | y_pred_rad = y_pred 49 | y_true_vector = tf.transpose(tf.stack([tf.cos(y_true_rad), tf.sin(y_true_rad)])) 50 | y_pred_vector = tf.transpose(tf.stack([tf.cos(y_pred_rad), tf.sin(y_pred_rad)])) 51 | # perform dot product 52 | dot_producted = tf.reduce_sum(tf.multiply(y_true_vector, y_pred_vector), 1) 53 | loss = dot_producted / (tf.norm(y_true_vector, axis=1) * tf.norm(y_pred_vector, axis=1)) 54 | return 1 - loss 55 | 56 | 57 | loss_alpha_rot_y_angular = {LAYER_OUTPUT_NAME_ALPHA_ROT_Y: loss_alpha_rot_y_angular_normed_} 58 | loss_alpha_rot_y_angular_weights = {LAYER_OUTPUT_NAME_ALPHA_ROT_Y: 1.0} 59 | 60 | # Current multi affinity loss 61 | def loss_multi_affinity__(y_true, y_pred): 62 | loss_conf = l2_loss(y_true[..., 2], y_pred[..., 2]) 63 | loss_orientation = l2_loss(y_true[..., 0], y_pred[..., 0]) + l2_loss( 64 | y_true[..., 1], y_pred[..., 1] 65 | ) 66 | return loss_conf + loss_orientation 67 | # return l2_loss(y_true, y_pred) 68 | # def loss_multi_affinity__(y_true, y_pred): 69 | 70 | # loss_conf = tf.reduce_sum(l2_loss(y_true[..., 2:], y_pred[..., 2:]), 1) 71 | 72 | # loss_orientation = l2_loss(y_true[..., 0], y_pred[..., 0]) + l2_loss(y_true[..., 1], y_pred[..., 1]) 73 | 74 | # # print(f'shape of loss_conf:{loss_conf.shape}\nshape of loss_orientation: {loss_orientation.shape}') 75 | 76 | # return loss_conf + loss_orientation 77 | 78 | 79 | loss_multibin = {LAYER_OUTPUT_NAME_MULTIBIN: loss_multi_affinity__} 80 | loss_multibin_weights = {LAYER_OUTPUT_NAME_MULTIBIN: 1.0} 81 | 82 | 83 | def loss_single_bin_l2_(y_true, y_pred): 84 | return l2_loss(y_true, y_pred) 85 | 86 | 87 | def loss_single_bin_angular_(y_true, y_pred): 88 | pass 89 | 90 | 91 | loss_single_bin = {LAYER_OUTPUT_NAME_SINGLE_BIN: loss_single_bin_l2_} 92 | loss_single_bin_weights = {LAYER_OUTPUT_NAME_SINGLE_BIN: 1.0} 93 | 94 | 95 | def loss_voting_bin_(y_true, y_pred): 96 | return l2_loss(y_true, y_pred) 97 | 98 | 99 | loss_voting_bin = {LAYER_OUTPUT_NAME_VOTING_BIN: loss_voting_bin_} 100 | loss_voting_bin_weights = {LAYER_OUTPUT_NAME_VOTING_BIN: 1.0} 101 | 102 | def loss_exp_A_(y_true, y_pred): 103 | return l2_loss(y_true, y_pred) 104 | 105 | loss_exp_A = {LAYER_OUTPUT_NAME_EXP_A: loss_exp_A_} 106 | loss_exp_A_weights = {LAYER_OUTPUT_NAME_EXP_A: 1.0} 107 | 108 | 109 | def get_loss_params(orientation, use_angular_loss): 110 | if orientation == 'tricosine': 111 | return loss_tricosine, loss_tricosine_weights 112 | elif orientation == 'alpha' or orientation == 'rot-y': 113 | return loss_alpha_rot_y, loss_alpha_rot_y_weights 114 | elif orientation == 'multibin': 115 | return loss_multibin, loss_multibin_weights 116 | elif orientation == 'voting-bin': 117 | return loss_voting_bin, loss_voting_bin_weights 118 | elif orientation == 'single-bin': 119 | return loss_single_bin, loss_single_bin_weights 120 | elif orientation == 'exp-A': 121 | return loss_exp_A, loss_exp_A_weights 122 | elif use_angular_loss: 123 | return loss_alpha_rot_y_angular, loss_alpha_rot_y_angular_weights 124 | else: 125 | raise Exception('Incorrect orientation type for loss function') 126 | -------------------------------------------------------------------------------- /model/metrics.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | from orientation_converters import * 3 | from orientation_converters import SHAPE_TRICOSINE, SHAPE_SINGLE_BIN, SHAPE_VOTING_BIN 4 | 5 | 6 | TF_TYPE = tf.dtypes.float32 7 | 8 | # Stateful metric over the entire dataset. 9 | # Because metrics are evaluated for each batch during training and evaluation, 10 | # this metric will keep track of average accuracy over the entire dataset, 11 | # not the average accuracy of each batch. 12 | class OrientationAccuracy(tf.keras.metrics.Metric): 13 | 14 | # Create the state variables in __init__ 15 | def __init__(self, orientation_type, name='orientation_accuracy', **kwargs): 16 | super(OrientationAccuracy, self).__init__(name=name, **kwargs) 17 | # internal state variables 18 | self.orientation_type = orientation_type 19 | self.reset_states() 20 | 21 | def sum_angle_accuracy(self, alpha_true, alpha_pred): 22 | alpha_delta = alpha_true - alpha_pred 23 | orientation_accuracies = 0.5 * (tf.math.cos(alpha_delta) + 1.0) 24 | return tf.math.reduce_sum(orientation_accuracies) 25 | 26 | def convert_to_radians(self, tensor): 27 | # if orientation type is already 'alpha' or 'rot_y', no need to change 28 | if self.orientation_type in ['rot-y', 'alpha']: 29 | return angle_normed_to_radians(tensor) 30 | elif self.orientation_type == 'multibin': 31 | # return batch_multibin_to_batch_radians(tensor) 32 | return batch_multi_affinity_to_radians(tensor) 33 | else: 34 | return self.recursively_convert_to_radians(tensor) 35 | 36 | @tf.autograph.experimental.do_not_convert 37 | def recursively_convert_to_radians(self, tensor): 38 | # recursively unpacks tensor until the tensor dimension is same shape as orientation_converters 39 | tensor_shape = tensor.get_shape() 40 | arr = tensor.numpy() 41 | output_shape = get_output_shape_dict()[str(self.orientation_type)] 42 | if self.orientation_type == 'tricosine': 43 | if tensor_shape == SHAPE_TRICOSINE: 44 | alpha = tricosine_to_radians(arr) 45 | return tf.constant(alpha, dtype=TF_TYPE) 46 | elif len(tensor_shape) > len(SHAPE_TRICOSINE): 47 | return tf.stack( 48 | [ 49 | self.recursively_convert_to_radians(un_packed_tensor) 50 | for un_packed_tensor in tf.unstack(tensor) 51 | ] 52 | ) 53 | # elif self.orientation_type == 'multibin': 54 | # if tensor_shape == SHAPE_MULTIBIN: 55 | # radians = multibin_orientation_confidence_to_radians(tensor[..., :2], tensor[..., 2:]) 56 | # return tf.constant(radians, dtype=TF_TYPE) 57 | # elif len(tensor_shape) > len(SHAPE_MULTIBIN): 58 | # return tf.stack([self.recursively_convert_to_radians(un_packed_tensor) 59 | # for un_packed_tensor in tf.unstack(tensor)]) 60 | elif self.orientation_type == 'voting-bin': 61 | if tensor_shape == SHAPE_VOTING_BIN: 62 | alpha = voting_bin_to_radians(arr) 63 | return tf.constant(alpha, dtype=TF_TYPE) 64 | elif len(tensor_shape) > len(SHAPE_VOTING_BIN): 65 | return tf.stack( 66 | [ 67 | self.recursively_convert_to_radians(un_packed_tensor) 68 | for un_packed_tensor in tf.unstack(tensor) 69 | ] 70 | ) 71 | elif self.orientation_type == 'single-bin': 72 | if tensor_shape == SHAPE_SINGLE_BIN: 73 | alpha = single_bin_to_radians(arr) 74 | return tf.constant(alpha, dtype=TF_TYPE) 75 | elif len(tensor_shape) > len(SHAPE_SINGLE_BIN): 76 | return tf.stack( 77 | [ 78 | self.recursively_convert_to_radians(un_packed_tensor) 79 | for un_packed_tensor in tf.unstack(tensor) 80 | ] 81 | ) 82 | elif self.orientation_type == 'exp-A': 83 | if tensor_shape == output_shape: 84 | alpha = expA_to_radians(arr) 85 | return tf.constant(alpha, dtype=TF_TYPE) 86 | elif len(tensor_shape) > len(output_shape): 87 | return tf.stack( 88 | [ 89 | self.recursively_convert_to_radians(un_packed_tensor) 90 | for un_packed_tensor in tf.unstack(tensor) 91 | ] 92 | ) 93 | else: 94 | raise Exception("Invalid self.orientation_type: " + self.orientation_type) 95 | 96 | def update_state(self, y_true, y_pred, sample_weight=None): 97 | # Update the variables given y_true and y_pred in update_state() 98 | # convert to alphas using orientation_converters and calculate the batch_accuracies 99 | alpha_true = self.convert_to_radians(y_true) 100 | alpha_pred = self.convert_to_radians(y_pred) 101 | batch_sum_accuracy = tf.cast( 102 | self.sum_angle_accuracy(alpha_true, alpha_pred), tf.float32 103 | ) 104 | 105 | # update the cur_accuracy 106 | self.sum_accuracy.assign_add(batch_sum_accuracy) 107 | self.num_pairs.assign_add(y_pred.get_shape()[0]) 108 | 109 | # Return the metric result in result() 110 | def result(self): 111 | return tf.math.divide(self.sum_accuracy, tf.cast(self.num_pairs, dtype=TF_TYPE)) 112 | 113 | # Reset state 114 | def reset_state(self): 115 | self.num_pairs = tf.Variable( 116 | 0, dtype=tf.dtypes.int32 117 | ) # num of pairs of y_true, y_pred 118 | # sum of accuracies for each pair of y_true, y_pred 119 | self.sum_accuracy = tf.Variable(0.0, dtype=TF_TYPE) 120 | -------------------------------------------------------------------------------- /model/positional_encoder.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | 4 | # generates and returns a position encoding matrix in numpy 5 | def get_2d_pos_enc(height, width, n_channels): 6 | """ 7 | :param n_channels: number of pos_enc channels 8 | :param height: height of the image 9 | :param width: width of the image 10 | :return: (height, width, n_channels) position encoding matrix 11 | """ 12 | pe = np.empty(shape=(n_channels, height, width)) 13 | 14 | d_model = int(n_channels / 2) 15 | div_term = np.exp( 16 | np.arange(0.0, d_model, 2) * -(math.log(10000.0) / d_model) 17 | ) # (n_channels/2) 18 | pos_w = np.expand_dims(np.arange(0.0, width), axis=1) 19 | pos_h = np.expand_dims(np.arange(0.0, height), axis=1) 20 | 21 | pe[0:d_model:2, :, :] = np.expand_dims( 22 | np.repeat(np.sin(pos_w * div_term).T, height, axis=0), axis=0 23 | ) 24 | pe[1:d_model:2, :, :] = np.expand_dims( 25 | np.repeat(np.cos(pos_w * div_term).T, height, axis=0), axis=0 26 | ) 27 | pe[d_model::2, :, :] = np.expand_dims( 28 | np.repeat(np.sin(pos_h * div_term), width, axis=1), axis=0 29 | ) 30 | pe[d_model + 1 :: 2, :, :] = np.expand_dims( 31 | np.repeat(np.cos(pos_h * div_term), width, axis=1), axis=0 32 | ) 33 | pe = np.moveaxis(pe, 0, -1) 34 | return pe 35 | -------------------------------------------------------------------------------- /model/tmp_data.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/model/tmp_data.pkl -------------------------------------------------------------------------------- /model/training.py: -------------------------------------------------------------------------------- 1 | from logging import log 2 | from numpy import dtype 3 | from numpy.core.fromnumeric import sort 4 | import tensorflow as tf 5 | from build_model import build_model 6 | from loss_function import get_loss_params 7 | from metrics import OrientationAccuracy 8 | import data_processing as dp 9 | import os, re, argparse, time, sys 10 | from datetime import datetime 11 | import pathlib 12 | import tensorflow as tf 13 | import orientation_converters 14 | from orientation_converters import get_output_shape_dict as output_shape 15 | sys.path.append('../') 16 | import utils.train_utils as train_utils 17 | 18 | 19 | # setup and config gpu 20 | train_utils.setup_gpu() 21 | # Processing argument 22 | parser = argparse.ArgumentParser(description='Training Model') 23 | args = train_utils.setup_cmd_arg(parser) 24 | BATCH_SIZE = args.batch_size 25 | NUM_EPOCH = args.num_epoch 26 | ORIENTATION = args.orientation 27 | KITTI_DIR = args.kitti_dir 28 | WEIGHT_DIR_ROOT = args.weight_dir 29 | LOG_DIR_ROOT = args.log_dir 30 | VAL_SPLIT = args.val_split 31 | PREDICTION_TARGET = args.predict 32 | RESUME = args.resume 33 | ADD_POS_ENC = args.add_pos_enc 34 | TRAINING_RECORD = pathlib.Path(args.training_record) 35 | ANGULAR_LOSS = args.use_angular_loss 36 | ADD_DEPTH_MAP = args.add_depth_map 37 | DEPTH_PATH_DIR = os.path.join(KITTI_DIR, "training/predict_depth") 38 | LABEL_DIR = os.path.join(KITTI_DIR, 'training/label_2/') 39 | IMG_DIR = os.path.join(KITTI_DIR, 'training/image_2/') 40 | 41 | 42 | if __name__ == "__main__": 43 | # checking if receving valid arguments 44 | train_utils.check_args(args, DEPTH_PATH_DIR,LABEL_DIR, IMG_DIR) 45 | # get training starting time and construct stamps 46 | start_time = time.time() 47 | timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + '_' + str(int(start_time)) 48 | training_stamp = f'{PREDICTION_TARGET}_{ORIENTATION}' 49 | if ADD_POS_ENC: 50 | training_stamp += "_with_pos_enc" 51 | if ADD_DEPTH_MAP: 52 | training_stamp += "_with_depth_map" 53 | training_stamp += f"_{timestamp}" 54 | print(f'training stamp with timestamp:{training_stamp}') 55 | # format for .h5 weight file 56 | # old weight_format = 'epoch-{epoch:02d}-loss-{loss:.4f}-val_loss-{val_loss:.4f}.h5' 57 | weight_format = 'epoch-{epoch:02d}-val_acc-{val_orientation_accuracy:.4f}-train_acc-{orientation_accuracy:.4f}-val_loss-{val_loss:.4f}-train_loss-{loss:.4f}.h5' 58 | weights_directory = TRAINING_RECORD / 'weights' if not WEIGHT_DIR_ROOT else WEIGHT_DIR_ROOT 59 | logs_directory = TRAINING_RECORD / 'logs' if not LOG_DIR_ROOT else LOG_DIR_ROOT 60 | weights_directory.mkdir(parents=True, exist_ok=True) 61 | logs_directory.mkdir(parents=True, exist_ok=True) 62 | init_epoch = 0 63 | 64 | if not RESUME: 65 | log_dir = logs_directory / training_stamp 66 | log_dir.mkdir(parents=True, exist_ok=True) 67 | checkpoint_dir = weights_directory / training_stamp 68 | checkpoint_dir.mkdir(parents=True, exist_ok=True) 69 | 70 | # model callback config 71 | checkpoint_file_name = checkpoint_dir / weight_format 72 | cp_callback = tf.keras.callbacks.ModelCheckpoint( 73 | filepath=checkpoint_file_name, save_weights_only=True, verbose=1 74 | ) 75 | # tensorboard logs path 76 | tb_log_dir = log_dir / "logs/scalars/" 77 | tb_callback = tf.keras.callbacks.TensorBoard(log_dir=tb_log_dir, histogram_freq=1) 78 | 79 | # Generator config 80 | train_gen = dp.KittiGenerator( 81 | label_dir=LABEL_DIR, 82 | image_dir=IMG_DIR, 83 | batch_size=BATCH_SIZE, 84 | orientation_type=ORIENTATION, 85 | mode='train', 86 | val_split=VAL_SPLIT, 87 | prediction_target=PREDICTION_TARGET, 88 | add_pos_enc=ADD_POS_ENC, 89 | add_depth_map=ADD_DEPTH_MAP, 90 | ) 91 | val_gen = dp.KittiGenerator( 92 | label_dir=LABEL_DIR, 93 | image_dir=IMG_DIR, 94 | batch_size=BATCH_SIZE, 95 | orientation_type=ORIENTATION, 96 | mode='val', 97 | val_split=VAL_SPLIT, 98 | all_objs=train_gen.all_objs, 99 | prediction_target=PREDICTION_TARGET, 100 | add_pos_enc=ADD_POS_ENC, 101 | add_depth_map=ADD_DEPTH_MAP, 102 | ) 103 | # Building Model 104 | n_channel = 3 105 | if ADD_DEPTH_MAP and ADD_DEPTH_MAP: 106 | n_channel = 7 107 | elif ADD_POS_ENC: 108 | n_channel = 6 109 | elif ADD_DEPTH_MAP: 110 | n_channel = 4 111 | height = dp.CROP_RESIZE_H 112 | width = dp.CROP_RESIZE_W 113 | model = build_model(ORIENTATION, height, width, n_channel) 114 | 115 | loss_func, loss_weights = get_loss_params(ORIENTATION, ANGULAR_LOSS) 116 | 117 | model.compile( 118 | loss=loss_func, 119 | loss_weights=loss_weights, 120 | optimizer='adam', 121 | metrics=OrientationAccuracy(ORIENTATION), 122 | run_eagerly=True 123 | ) 124 | 125 | # early stop callback and accuracy callback 126 | # early_stop_callback = tf.keras.callbacks.EarlyStopping( 127 | # monitor='val_loss', patience=20) 128 | if RESUME: 129 | latest_training_dir, latest_epoch, latest_weight = train_utils.find_latest_epoch_and_weights(weights_directory, verbose = True) 130 | init_epoch = int(latest_epoch) 131 | if not init_epoch: 132 | raise Exception("Fail to match epoch number") 133 | if init_epoch == 1: 134 | raise Exception("No existing record found!") 135 | if not os.path.isfile(latest_weight): 136 | raise FileNotFoundError( 137 | f'stored weights directory "{latest_weight}" is not a valid file' 138 | ) 139 | model.load_weights(latest_weight) 140 | # overwrite tensorboard callback 141 | print(f'current log directory: {logs_directory}') 142 | tb_log_dir = logs_directory / latest_training_dir.name / "logs" / "scalars" 143 | if not tb_log_dir.is_dir(): 144 | raise FileNotFoundError( 145 | f'tensorboard log directory "{tb_log_dir}" is not a valid directory' 146 | ) 147 | tb_callback = tf.keras.callbacks.TensorBoard(log_dir=tb_log_dir, histogram_freq=1) 148 | # overwrite call back directory 149 | cp_callback_file = weights_directory / latest_training_dir / weight_format 150 | cp_callback = tf.keras.callbacks.ModelCheckpoint( 151 | filepath=cp_callback_file, save_weights_only=True, verbose=1 152 | ) 153 | 154 | train_history = model.fit( 155 | x=train_gen, 156 | epochs=NUM_EPOCH, 157 | verbose=1, 158 | validation_data=val_gen, 159 | callbacks=[tb_callback, cp_callback], 160 | initial_epoch=init_epoch, 161 | use_multiprocessing = True, 162 | workers = 8 163 | ) 164 | 165 | print('Training Finished. Weights and history are saved under directory:', WEIGHT_DIR_ROOT) 166 | print('Total training time is', train_utils.timer(start_time, time.time())) 167 | -------------------------------------------------------------------------------- /model/training.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | python3 training.py \ 3 | --predict rot-y \ 4 | --converter exp-A \ 5 | --epoch 100 \ 6 | --batch_size 25 \ 7 | --kitti_dir "/home/siyuan/dataset/kitti" \ 8 | --training_record "/home/siyuan/fire/kitti_orientation/weights/experimentC" \ 9 | --resume True 10 | 11 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # ===== INSTALL ===== 2 | # `npm i -g pyright` 3 | # `pip install -U black pylint` 4 | # ===== HOW TO USE ===== 5 | # `black *.py` 6 | # `pyright *.py` 7 | # `pylint *.py` 8 | 9 | 10 | 11 | # code formatter 12 | [tool.black] 13 | experimental-string-processing = true 14 | skip-string-normalization = true 15 | 16 | line-length = 95 # Also remember to change this in [pylint] 17 | 18 | workers = 4 19 | fast = true 20 | -------------------------------------------------------------------------------- /utils/Exp-A_gt_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/utils/Exp-A_gt_0.png -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/umd-fire-coml/survey-orientation-representations/01db1eee2f0f65ad86ef018b4f44844f5ca03f28/utils/__init__.py -------------------------------------------------------------------------------- /utils/convert_to_csv.py: -------------------------------------------------------------------------------- 1 | # This pythons script is used to convert tensorboard data to csv file 2 | 3 | # access to tensorboard https://tensorboard.dev/experiment/JfMZJUwdReC8FPp83s3RDg/ 4 | 5 | import pandas as pd 6 | import os 7 | import tensorboard as tb 8 | import pathlib 9 | 10 | experiment_id = ( # multi 2 bins'Goh3jdHwQpedTjWqAxsgoQ' #multi affinity # depth+ pos_enc"5Jh1z944SBCGobZLLmQdwg" # depth: "K1NKDNUYTqK9GaGVtWvnCA";'w5mbDYCmS5Oz0gYqbw1qLA' # angular loss : 'n05xObObQU24MjsM7KnRRQ'; "w5mbDYCmS5Oz0gYqbw1qLA" 'zgVmzRRNSa2q2smYZClHjQ';multi four bins iJvlXxzgRFKSZwQT5JefxA;exp-A 1mnIdo5MToy3bLZxfDunDw/ 11 | 'RTaW72EnQpy00g8LpsXFvA' 12 | ) 13 | csv_file_dir = 'csv_output' 14 | csv_file_prefix = 'exp-C' 15 | 16 | 17 | def clean_data(input_df, csv_name): 18 | # remove accuracy tag and wall time columns 19 | input_df = input_df.drop(columns=['tag', 'wall_time']) 20 | # remove data in "run" columns 21 | input_df['run'] = input_df['run'].str.extract(r'(\D*)_\d\d\d\d') 22 | cleaned_df = pd.pivot_table(input_df, values='value', index='step', columns='run') 23 | # export to csv 24 | output_dir = pathlib.Path(csv_file_dir) 25 | cleaned_df.to_csv(output_dir / f'{csv_file_prefix}_{csv_name}', index=True) 26 | print("Finish Exporting ", csv_name) 27 | 28 | 29 | if __name__ == '__main__': 30 | # retrieve tensorboard data by experiment_id 31 | experiment = tb.data.experimental.ExperimentFromDev(experiment_id) 32 | df = experiment.get_scalars(include_wall_time=True) 33 | print(df["tag"].unique()) 34 | # split into loss and accuracy datafram 35 | df_acc = df[df['tag'] == 'epoch_orientation_accuracy'] 36 | df_loss = df[df['tag'] == 'epoch_loss'] 37 | # further split into validation accuracy, training accuracy, validation loss and training loss 38 | ''' 39 | val_acc = df_acc[df.run.str.endswith("\\validation")] 40 | train_acc = df_acc[df.run.str.endswith("\\train")] 41 | val_loss = df_loss[df.run.str.endswith("\\validation")] 42 | train_loss = df_loss[df.run.str.endswith("\\train")] 43 | ''' 44 | val_acc = df_acc[df['run'].str.endswith("validation")] 45 | train_acc = df_acc[df['run'].str.endswith("train")] 46 | val_loss = df_loss[df['run'].str.endswith("validation")] 47 | train_loss = df_loss[df['run'].str.endswith("train")] 48 | 49 | process_queue = [ 50 | [val_acc, "cleaned_validation_accuracy.csv"], 51 | [train_acc, 'cleaned_training_accuracy.csv'], 52 | [val_loss, "cleaned_validation_loss.csv"], 53 | [train_loss, "cleaned_training_loss.csv"], 54 | ] 55 | for (dataframe, export_file_name) in process_queue: 56 | clean_data(dataframe, export_file_name) 57 | print("Finish Processing Queue") 58 | -------------------------------------------------------------------------------- /utils/csv_output/angular_loss_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y_ 2 | 0,0.504248321056366 3 | 1,0.5025882124900818 4 | 2,0.4912187457084656 5 | 3,0.4860266447067261 6 | 4,0.5007277131080627 7 | 5,0.5241675972938538 8 | 6,0.5454705953598022 9 | 7,0.5652396082878113 10 | 8,0.5907096266746521 11 | 9,0.6133077144622803 12 | 10,0.6335776448249817 13 | 11,0.6529375910758972 14 | 12,0.6752609610557556 15 | 13,0.6959012746810913 16 | 14,0.7156404852867126 17 | 15,0.7280368804931641 18 | 16,0.7450242638587952 19 | 17,0.7609291672706604 20 | 18,0.7749764323234558 21 | 19,0.7813233137130737 22 | 20,0.7938706278800964 23 | 21,0.8019059896469116 24 | 22,0.8166964650154114 25 | 23,0.7913317680358887 26 | 24,0.785425066947937 27 | 25,0.8400245904922485 28 | 26,0.854489803314209 29 | 27,0.8458760380744934 30 | 28,0.8445354104042053 31 | 29,0.8381197452545166 32 | 30,0.8490833640098572 33 | 31,0.8541801571846008 34 | 32,0.8558889031410217 35 | 33,0.8558452129364014 36 | 34,0.8821500539779663 37 | 35,0.8610474467277527 38 | 36,0.8666417002677917 39 | 37,0.886346161365509 40 | 38,0.8917025327682495 41 | 39,0.8633013963699341 42 | 40,0.8587505221366882 43 | 41,0.8966394662857056 44 | 42,0.9188207983970642 45 | 43,0.9146217107772827 46 | 44,0.8724303841590881 47 | 45,0.895306408405304 48 | 46,0.9066863059997559 49 | 47,0.9028963446617126 50 | 48,0.8944008946418762 51 | 49,0.9084211587905884 52 | 50,0.8991260528564453 53 | 51,0.907235324382782 54 | 52,0.922489583492279 55 | 53,0.8908874988555908 56 | 54,0.9267019629478455 57 | 55,0.9251587986946106 58 | 56,0.9054123759269714 59 | 57,0.8868712782859802 60 | 58,0.9242310523986816 61 | 59,0.9374754428863525 62 | 60,0.9387658834457397 63 | 61,0.8430939316749573 64 | 62,0.9239190220832825 65 | 63,0.9399462342262268 66 | 64,0.9432892203330994 67 | 65,0.9412984251976013 68 | 66,0.9196323156356812 69 | 67,0.9243893027305603 70 | 68,0.9335126280784607 71 | 69,0.9351579546928406 72 | 70,0.9429674744606018 73 | 71,0.9290556311607361 74 | 72,0.9073454737663269 75 | 73,0.9422027468681335 76 | 74,0.9328165650367737 77 | 75,0.9264193773269653 78 | 76,0.9439099431037903 79 | 77,0.9313366413116455 80 | 78,0.9356454014778137 81 | 79,0.9321760535240173 82 | 80,0.9306778311729431 83 | 81,0.9466137290000916 84 | 82,0.943120539188385 85 | 83,0.9291098713874817 86 | 84,0.939127504825592 87 | 85,0.9363754391670227 88 | 86,0.9462277293205261 89 | 87,0.9312967658042908 90 | 88,0.9316602945327759 91 | 89,0.9516134262084961 92 | 90,0.9607097506523132 93 | 91,0.9389271140098572 94 | 92,0.935572624206543 95 | 93,0.9489071369171143 96 | 94,0.9575835466384888 97 | 95,0.9524654746055603 98 | 96,0.9303984045982361 99 | 97,0.9462411403656006 100 | 98,0.9540814161300659 101 | 99,0.9623716473579407 102 | -------------------------------------------------------------------------------- /utils/csv_output/angular_loss_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y_ 2 | 0,1.4529091119766235 3 | 1,1.3520869016647339 4 | 2,1.2528306245803833 5 | 3,1.1461665630340576 6 | 4,1.0414702892303467 7 | 5,0.9217432141304016 8 | 6,0.814099133014679 9 | 7,0.7200740575790405 10 | 8,0.6281593441963196 11 | 9,0.5451237559318542 12 | 10,0.4815070927143097 13 | 11,0.4297008216381073 14 | 12,0.3660883903503418 15 | 13,0.32115209102630615 16 | 14,0.280605673789978 17 | 15,0.2536276578903198 18 | 16,0.2241261899471283 19 | 17,0.19503670930862427 20 | 18,0.1759820133447647 21 | 19,0.17022913694381714 22 | 20,0.15561100840568542 23 | 21,0.14322879910469055 24 | 22,0.1251765638589859 25 | 23,0.18313977122306824 26 | 24,0.17024098336696625 27 | 25,0.09787257760763168 28 | 26,0.08585923910140991 29 | 27,0.09382926672697067 30 | 28,0.10059145838022232 31 | 29,0.11118154972791672 32 | 30,0.09849944710731506 33 | 31,0.09629210829734802 34 | 32,0.09320804476737976 35 | 33,0.09477507323026657 36 | 34,0.06571220606565475 37 | 35,0.08748972415924072 38 | 36,0.08508611470460892 39 | 37,0.06466600298881531 40 | 38,0.061030931770801544 41 | 39,0.09975255280733109 42 | 40,0.0964052677154541 43 | 41,0.05668199807405472 44 | 42,0.040246281772851944 45 | 43,0.044178519397974014 46 | 44,0.0883784294128418 47 | 45,0.06104882061481476 48 | 46,0.04968603327870369 49 | 47,0.05678446963429451 50 | 48,0.06665606796741486 51 | 49,0.05123705789446831 52 | 50,0.06105582043528557 53 | 51,0.05404148995876312 54 | 52,0.04004952684044838 55 | 53,0.07080269604921341 56 | 54,0.03724351525306702 57 | 55,0.03896912559866905 58 | 56,0.05875426530838013 59 | 57,0.08044879138469696 60 | 58,0.03892214596271515 61 | 59,0.0300257820636034 62 | 60,0.029751591384410858 63 | 61,0.1624239832162857 64 | 62,0.03983994200825691 65 | 63,0.028571389615535736 66 | 64,0.02698703296482563 67 | 65,0.02845434844493866 68 | 66,0.04773000627756119 69 | 67,0.044043708592653275 70 | 68,0.034815914928913116 71 | 69,0.033574141561985016 72 | 70,0.027686534449458122 73 | 71,0.04217524453997612 74 | 72,0.06284783035516739 75 | 73,0.028134748339653015 76 | 74,0.03907924145460129 77 | 75,0.04213878512382507 78 | 76,0.02766307257115841 79 | 77,0.039759404957294464 80 | 78,0.03625928983092308 81 | 79,0.039729245007038116 82 | 80,0.04033144935965538 83 | 81,0.026651259511709213 84 | 82,0.032626084983348846 85 | 83,0.04274502024054527 86 | 84,0.032374411821365356 87 | 85,0.037577152252197266 88 | 86,0.027025675401091576 89 | 87,0.044170886278152466 90 | 88,0.04175424203276634 91 | 89,0.02386896312236786 92 | 90,0.017488334327936172 93 | 91,0.03618200868368149 94 | 92,0.04046117514371872 95 | 93,0.02634427510201931 96 | 94,0.01946616731584072 97 | 95,0.025607969611883163 98 | 96,0.042608000338077545 99 | 97,0.029677608981728554 100 | 98,0.023075420409440994 101 | 99,0.017000863328576088 102 | -------------------------------------------------------------------------------- /utils/csv_output/angular_loss_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y_ 2 | 0,0.4955681264400482 3 | 1,0.4950912594795227 4 | 2,0.5195824503898621 5 | 3,0.49639689922332764 6 | 4,0.5057838559150696 7 | 5,0.4862247407436371 8 | 6,0.5418111681938171 9 | 7,0.5562462210655212 10 | 8,0.6036317944526672 11 | 9,0.6208523511886597 12 | 10,0.6435024738311768 13 | 11,0.6444815397262573 14 | 12,0.6506688594818115 15 | 13,0.6921549439430237 16 | 14,0.6658746600151062 17 | 15,0.7313348054885864 18 | 16,0.7319267988204956 19 | 17,0.7003620266914368 20 | 18,0.7428123354911804 21 | 19,0.7068580985069275 22 | 20,0.7385614514350891 23 | 21,0.7542026042938232 24 | 22,0.7668461203575134 25 | 23,0.6855390071868896 26 | 24,0.7873578071594238 27 | 25,0.800115168094635 28 | 26,0.7885480523109436 29 | 27,0.7752568125724792 30 | 28,0.7872125506401062 31 | 29,0.7920073866844177 32 | 30,0.7576708793640137 33 | 31,0.8144089579582214 34 | 32,0.796413779258728 35 | 33,0.8144843578338623 36 | 34,0.8208973407745361 37 | 35,0.7951667904853821 38 | 36,0.8150101900100708 39 | 37,0.8324528336524963 40 | 38,0.799235463142395 41 | 39,0.7069006562232971 42 | 40,0.824252188205719 43 | 41,0.8424709439277649 44 | 42,0.8365665078163147 45 | 43,0.8197453022003174 46 | 44,0.8046183586120605 47 | 45,0.8245127201080322 48 | 46,0.8415224552154541 49 | 47,0.8209959268569946 50 | 48,0.8277053833007812 51 | 49,0.835818886756897 52 | 50,0.8240928649902344 53 | 51,0.845435380935669 54 | 52,0.7479183673858643 55 | 53,0.8466465473175049 56 | 54,0.8531203866004944 57 | 55,0.8323982357978821 58 | 56,0.608557403087616 59 | 57,0.8438522219657898 60 | 58,0.8550332188606262 61 | 59,0.8655673861503601 62 | 60,0.8160346150398254 63 | 61,0.8389930725097656 64 | 62,0.863530695438385 65 | 63,0.8637052178382874 66 | 64,0.863852858543396 67 | 65,0.8736580014228821 68 | 66,0.8305009007453918 69 | 67,0.8298125863075256 70 | 68,0.8380216956138611 71 | 69,0.8564708232879639 72 | 70,0.8681747913360596 73 | 71,0.7848761677742004 74 | 72,0.8661102056503296 75 | 73,0.8653983473777771 76 | 74,0.81748366355896 77 | 75,0.8637294769287109 78 | 76,0.8667978048324585 79 | 77,0.8650375008583069 80 | 78,0.8576409220695496 81 | 79,0.8466237187385559 82 | 80,0.8547138571739197 83 | 81,0.8701891303062439 84 | 82,0.8297528028488159 85 | 83,0.8414587378501892 86 | 84,0.8600905537605286 87 | 85,0.8390781283378601 88 | 86,0.8690242767333984 89 | 87,0.8405621647834778 90 | 88,0.8684183359146118 91 | 89,0.8660440444946289 92 | 90,0.8738062381744385 93 | 91,0.860980749130249 94 | 92,0.8653450012207031 95 | 93,0.8790399432182312 96 | 94,0.8875169157981873 97 | 95,0.8274993896484375 98 | 96,0.8514635562896729 99 | 97,0.8772455453872681 100 | 98,0.8707215785980225 101 | 99,0.8905285000801086 102 | -------------------------------------------------------------------------------- /utils/csv_output/angular_loss_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y_ 2 | 0,1.4984192848205566 3 | 1,1.5034719705581665 4 | 2,1.5977965593338013 5 | 3,1.6338386535644531 6 | 4,1.1271358728408813 7 | 5,1.0203967094421387 8 | 6,0.9468104839324951 9 | 7,0.814285159111023 10 | 8,0.7300958633422852 11 | 9,0.6531491875648499 12 | 10,0.6233357191085815 13 | 11,0.5637027025222778 14 | 12,0.6581941843032837 15 | 13,0.5143630504608154 16 | 14,0.534142017364502 17 | 15,0.46582698822021484 18 | 16,0.460336834192276 19 | 17,0.5368232131004333 20 | 18,0.4722960889339447 21 | 19,0.46087267994880676 22 | 20,0.40397465229034424 23 | 21,0.5013206601142883 24 | 22,0.4313608705997467 25 | 23,0.661666989326477 26 | 24,0.39699694514274597 27 | 25,0.3629196584224701 28 | 26,0.37686413526535034 29 | 27,0.40377557277679443 30 | 28,0.3992716073989868 31 | 29,0.3952227532863617 32 | 30,0.8439855575561523 33 | 31,0.3525540232658386 34 | 32,0.3958984315395355 35 | 33,0.3788067400455475 36 | 34,0.3521164655685425 37 | 35,0.36416521668434143 38 | 36,0.35921910405158997 39 | 37,0.32152602076530457 40 | 38,0.4378245770931244 41 | 39,0.5776848196983337 42 | 40,0.37261489033699036 43 | 41,0.30711522698402405 44 | 42,0.3398797810077667 45 | 43,0.35604381561279297 46 | 44,0.39364102482795715 47 | 45,0.3335343599319458 48 | 46,0.3395850360393524 49 | 47,0.38823357224464417 50 | 48,0.35938891768455505 51 | 49,0.3742044270038605 52 | 50,0.35230767726898193 53 | 51,0.33136388659477234 54 | 52,0.5724770426750183 55 | 53,0.31578528881073 56 | 54,0.3145599067211151 57 | 55,0.34739434719085693 58 | 56,1.0426265001296997 59 | 57,0.34581851959228516 60 | 58,0.3072831630706787 61 | 59,0.29726436734199524 62 | 60,0.39316555857658386 63 | 61,0.3155578076839447 64 | 62,0.29752975702285767 65 | 63,0.299716979265213 66 | 64,0.2899693250656128 67 | 65,0.2823318541049957 68 | 66,0.3852212131023407 69 | 67,0.36249926686286926 70 | 68,0.3436923325061798 71 | 69,0.3329448699951172 72 | 70,0.3172379434108734 73 | 71,0.49822548031806946 74 | 72,0.3117760121822357 75 | 73,0.27858448028564453 76 | 74,0.4949460029602051 77 | 75,0.292584627866745 78 | 76,0.3238256275653839 79 | 77,0.333413302898407 80 | 78,0.31140604615211487 81 | 79,0.3216639757156372 82 | 80,0.33099961280822754 83 | 81,0.32200542092323303 84 | 82,0.3949887752532959 85 | 83,0.3647134006023407 86 | 84,0.3522370755672455 87 | 85,0.4076969027519226 88 | 86,0.3224846124649048 89 | 87,0.34089744091033936 90 | 88,0.35761621594429016 91 | 89,1.046319842338562 92 | 90,4.794926643371582 93 | 91,0.3608977794647217 94 | 92,0.3528764545917511 95 | 93,0.3008458614349365 96 | 94,0.325351744890213 97 | 95,0.43323978781700134 98 | 96,0.39751940965652466 99 | 97,0.30406564474105835 100 | 98,0.29590100049972534 101 | 99,0.2742185592651367 102 | -------------------------------------------------------------------------------- /utils/csv_output/angular_new_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y 2 | 0,0.5013235211372375 3 | 1,0.4968540668487549 4 | 2,0.48472845554351807 5 | 3,0.4826815724372864 6 | 4,0.5012570023536682 7 | 5,0.5203861594200134 8 | 6,0.5425650477409363 9 | 7,0.564710795879364 10 | 8,0.592724621295929 11 | 9,0.619809091091156 12 | 10,0.6426357626914978 13 | 11,0.6625778079032898 14 | 12,0.6872155070304871 15 | 13,0.7011244297027588 16 | 14,0.7242547869682312 17 | 15,0.7382166981697083 18 | 16,0.7528338432312012 19 | 17,0.7647994160652161 20 | 18,0.7763652205467224 21 | 19,0.7871938347816467 22 | 20,0.7939792275428772 23 | 21,0.8094677925109863 24 | 22,0.8107476234436035 25 | 23,0.8162111043930054 26 | 24,0.8344674706459045 27 | 25,0.8310880661010742 28 | 26,0.8416420817375183 29 | 27,0.8472343683242798 30 | 28,0.852292001247406 31 | 29,0.8417804837226868 32 | 30,0.8638180494308472 33 | 31,0.8557329177856445 34 | 32,0.8622496724128723 35 | 33,0.8819885849952698 36 | 34,0.8882120847702026 37 | 35,0.8773617744445801 38 | 36,0.8549951314926147 39 | 37,0.8917603492736816 40 | 38,0.8937589526176453 41 | 39,0.8761247396469116 42 | 40,0.8888033628463745 43 | 41,0.9081621766090393 44 | 42,0.8933471441268921 45 | 43,0.8884556293487549 46 | 44,0.9049285650253296 47 | 45,0.9078072309494019 48 | 46,0.8941872119903564 49 | 47,0.9089944362640381 50 | 48,0.9067202806472778 51 | 49,0.9066126942634583 52 | 50,0.9235138893127441 53 | 51,0.9039634466171265 54 | 52,0.9095069766044617 55 | 53,0.9155465960502625 56 | 54,0.9166596531867981 57 | 55,0.9038047194480896 58 | 56,0.9335416555404663 59 | 57,0.9354127645492554 60 | 58,0.8984792828559875 61 | 59,0.920810341835022 62 | 60,0.9250268936157227 63 | 61,0.9347450733184814 64 | 62,0.9461701512336731 65 | 63,0.9094690084457397 66 | 64,0.9229007959365845 67 | 65,0.9391629099845886 68 | 66,0.9430568218231201 69 | 67,0.921960711479187 70 | 68,0.9234327077865601 71 | 69,0.9406862854957581 72 | 70,0.9411168694496155 73 | 71,0.9358199238777161 74 | 72,0.9397832751274109 75 | 73,0.9245489239692688 76 | 74,0.925345778465271 77 | 75,0.9343218207359314 78 | 76,0.9444677829742432 79 | 77,0.9529827237129211 80 | 78,0.9240158200263977 81 | 79,0.9411101341247559 82 | 80,0.9380730390548706 83 | 81,0.9350529909133911 84 | 82,0.9540112018585205 85 | 83,0.9495143890380859 86 | 84,0.9325588345527649 87 | 85,0.9485390186309814 88 | 86,0.9456525444984436 89 | 87,0.9321964979171753 90 | 88,0.9420156478881836 91 | 89,0.935049831867218 92 | 90,0.9509822726249695 93 | 91,0.9474825263023376 94 | 92,0.943229615688324 95 | 93,0.9413719177246094 96 | 94,0.9529052972793579 97 | 95,0.953166663646698 98 | 96,0.9422719478607178 99 | 97,0.9326786994934082 100 | 98,0.9539793133735657 101 | 99,0.9600315690040588 102 | -------------------------------------------------------------------------------- /utils/csv_output/angular_new_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y 2 | 0,1.4695310592651367 3 | 1,1.3231145143508911 4 | 2,1.2198508977890015 5 | 3,1.106249451637268 6 | 4,1.0031206607818604 7 | 5,0.8922222256660461 8 | 6,0.7752459645271301 9 | 7,0.679674506187439 10 | 8,0.5795271992683411 11 | 9,0.5009099841117859 12 | 10,0.43354594707489014 13 | 11,0.38106393814086914 14 | 12,0.3336460292339325 15 | 13,0.29105687141418457 16 | 14,0.24860478937625885 17 | 15,0.22593775391578674 18 | 16,0.1959194839000702 19 | 17,0.1837722212076187 20 | 18,0.16725270450115204 21 | 19,0.15202677249908447 22 | 20,0.14818407595157623 23 | 21,0.1286437064409256 24 | 22,0.1274559050798416 25 | 23,0.12321224808692932 26 | 24,0.10395948588848114 27 | 25,0.10781269520521164 28 | 26,0.0987592414021492 29 | 27,0.09291936457157135 30 | 28,0.09063117951154709 31 | 29,0.10147914290428162 32 | 30,0.08048718422651291 33 | 31,0.08898002654314041 34 | 32,0.08326136320829391 35 | 33,0.0635242909193039 36 | 34,0.05993073433637619 37 | 35,0.07529586553573608 38 | 36,0.09377641975879669 39 | 37,0.058188602328300476 40 | 38,0.05762188509106636 41 | 39,0.07616406679153442 42 | 40,0.06347828358411789 43 | 41,0.04640449583530426 44 | 42,0.059906892478466034 45 | 43,0.06556576490402222 46 | 44,0.05014214664697647 47 | 45,0.050576210021972656 48 | 46,0.06170646473765373 49 | 47,0.049956582486629486 50 | 48,0.051225289702415466 51 | 49,0.05260779708623886 52 | 50,0.037954822182655334 53 | 51,0.05750284343957901 54 | 52,0.04999784007668495 55 | 53,0.04570631682872772 56 | 54,0.04487986117601395 57 | 55,0.05684419721364975 58 | 56,0.032012708485126495 59 | 57,0.03131667524576187 60 | 58,0.07106490433216095 61 | 59,0.04115366190671921 62 | 60,0.040070176124572754 63 | 61,0.03158682584762573 64 | 62,0.02434728294610977 65 | 63,0.05971360206604004 66 | 64,0.04161347821354866 67 | 65,0.02936457097530365 68 | 66,0.027028916403651237 69 | 67,0.04448544606566429 70 | 68,0.0435069277882576 71 | 69,0.028602082282304764 72 | 70,0.02854541689157486 73 | 71,0.03405936807394028 74 | 72,0.029885701835155487 75 | 73,0.0446954183280468 76 | 74,0.044087767601013184 77 | 75,0.03601270541548729 78 | 76,0.02725715935230255 79 | 77,0.02135862037539482 80 | 78,0.04694090783596039 81 | 79,0.029603352770209312 82 | 80,0.031964533030986786 83 | 81,0.03682877868413925 84 | 82,0.021047089248895645 85 | 83,0.025877147912979126 86 | 84,0.03805797919631004 87 | 85,0.025122033432126045 88 | 86,0.029070308431982994 89 | 87,0.03974553197622299 90 | 88,0.03304542228579521 91 | 89,0.03692466393113136 92 | 90,0.023813161998987198 93 | 91,0.027097895741462708 94 | 92,0.03069598227739334 95 | 93,0.03228464350104332 96 | 94,0.02357598952949047 97 | 95,0.023467950522899628 98 | 96,0.03229478746652603 99 | 97,0.04130434989929199 100 | 98,0.021619422361254692 101 | 99,0.018208788707852364 102 | -------------------------------------------------------------------------------- /utils/csv_output/angular_new_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y 2 | 0,0.502623438835144 3 | 1,0.5074866414070129 4 | 2,0.5059090256690979 5 | 3,0.49718064069747925 6 | 4,0.522676944732666 7 | 5,0.49297505617141724 8 | 6,0.5535367131233215 9 | 7,0.5641736388206482 10 | 8,0.625169575214386 11 | 9,0.5985233783721924 12 | 10,0.6354146003723145 13 | 11,0.6654178500175476 14 | 12,0.6720720529556274 15 | 13,0.7052438855171204 16 | 14,0.7219710946083069 17 | 15,0.7199854850769043 18 | 16,0.7131543159484863 19 | 17,0.6749459505081177 20 | 18,0.7342464327812195 21 | 19,0.7533978819847107 22 | 20,0.7078733444213867 23 | 21,0.7601085901260376 24 | 22,0.7820562124252319 25 | 23,0.774742841720581 26 | 24,0.7643229365348816 27 | 25,0.7835316061973572 28 | 26,0.7837396264076233 29 | 27,0.7972762584686279 30 | 28,0.7852574586868286 31 | 29,0.787767231464386 32 | 30,0.8010711073875427 33 | 31,0.760795533657074 34 | 32,0.804857075214386 35 | 33,0.8121854066848755 36 | 34,0.8280028104782104 37 | 35,0.7618710398674011 38 | 36,0.8118011355400085 39 | 37,0.8201785683631897 40 | 38,0.808265745639801 41 | 39,0.8235337734222412 42 | 40,0.8209035396575928 43 | 41,0.8406409621238708 44 | 42,0.8452603816986084 45 | 43,0.8269098997116089 46 | 44,0.8526175618171692 47 | 45,0.8331819772720337 48 | 46,0.8433047533035278 49 | 47,0.8357546329498291 50 | 48,0.8388782739639282 51 | 49,0.8380843997001648 52 | 50,0.8155855536460876 53 | 51,0.8133977055549622 54 | 52,0.8534029126167297 55 | 53,0.845703661441803 56 | 54,0.8471303582191467 57 | 55,0.854104220867157 58 | 56,0.8634374141693115 59 | 57,0.835771918296814 60 | 58,0.8476243615150452 61 | 59,0.8588671684265137 62 | 60,0.8596782088279724 63 | 61,0.872244656085968 64 | 62,0.8726338148117065 65 | 63,0.8253715634346008 66 | 64,0.8548355102539062 67 | 65,0.8824058175086975 68 | 66,0.8768911957740784 69 | 67,0.8301836848258972 70 | 68,0.8684590458869934 71 | 69,0.8773350119590759 72 | 70,0.8665638566017151 73 | 71,0.8681973218917847 74 | 72,0.867901086807251 75 | 73,0.8606418967247009 76 | 74,0.8465065360069275 77 | 75,0.8749076128005981 78 | 76,0.87406325340271 79 | 77,0.8924373388290405 80 | 78,0.8596535325050354 81 | 79,0.8658937215805054 82 | 80,0.8547795414924622 83 | 81,0.8842577338218689 84 | 82,0.8811867833137512 85 | 83,0.822113037109375 86 | 84,0.8725407719612122 87 | 85,0.8766664862632751 88 | 86,0.8389716744422913 89 | 87,0.8819721937179565 90 | 88,0.8436711430549622 91 | 89,0.8737813234329224 92 | 90,0.8815782070159912 93 | 91,0.8606937527656555 94 | 92,0.8550257086753845 95 | 93,0.8763505816459656 96 | 94,0.8822418451309204 97 | 95,0.8691402673721313 98 | 96,0.84376060962677 99 | 97,0.8783707022666931 100 | 98,0.8968255519866943 101 | 99,0.8866305351257324 102 | -------------------------------------------------------------------------------- /utils/csv_output/angular_new_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_rot-y 2 | 0,1.5430747270584106 3 | 1,1.568215250968933 4 | 2,1.3089386224746704 5 | 3,1.121673583984375 6 | 4,1.0373845100402832 7 | 5,0.9758307337760925 8 | 6,0.814741849899292 9 | 7,0.738810658454895 10 | 8,0.6186637282371521 11 | 9,0.8252032399177551 12 | 10,0.5601669549942017 13 | 11,0.5494053363800049 14 | 12,0.4997766315937042 15 | 13,0.4638175070285797 16 | 14,0.4256911277770996 17 | 15,0.45712926983833313 18 | 16,0.4817471504211426 19 | 17,0.7049186825752258 20 | 18,0.42977842688560486 21 | 19,0.3872026801109314 22 | 20,0.49167704582214355 23 | 21,0.382004052400589 24 | 22,0.38108572363853455 25 | 23,0.3842446506023407 26 | 24,0.405153751373291 27 | 25,0.3525576889514923 28 | 26,0.39733797311782837 29 | 27,0.3305884003639221 30 | 28,0.37644827365875244 31 | 29,0.36801350116729736 32 | 30,0.3575533330440521 33 | 31,0.37996551394462585 34 | 32,0.3465023338794708 35 | 33,0.3486807346343994 36 | 34,0.32685285806655884 37 | 35,0.44080597162246704 38 | 36,0.3359292447566986 39 | 37,0.3272055387496948 40 | 38,0.3247890770435333 41 | 39,0.36514806747436523 42 | 40,0.3283107876777649 43 | 41,0.32048240303993225 44 | 42,0.3056206703186035 45 | 43,0.32240158319473267 46 | 44,0.2898021340370178 47 | 45,0.3328641355037689 48 | 46,0.3239363729953766 49 | 47,0.3190620541572571 50 | 48,0.3028276264667511 51 | 49,0.3185049593448639 52 | 50,0.29506078362464905 53 | 51,0.39249780774116516 54 | 52,0.32193681597709656 55 | 53,0.2900969386100769 56 | 54,0.30922579765319824 57 | 55,0.29126161336898804 58 | 56,0.2680826485157013 59 | 57,0.29507145285606384 60 | 58,0.3430424928665161 61 | 59,0.3321876525878906 62 | 60,0.30248937010765076 63 | 61,0.2793501317501068 64 | 62,0.2784467041492462 65 | 63,0.3506787121295929 66 | 64,0.31075695157051086 67 | 65,0.27156519889831543 68 | 66,0.2715034782886505 69 | 67,0.4278056025505066 70 | 68,0.27981889247894287 71 | 69,0.2707074284553528 72 | 70,0.2673831880092621 73 | 71,0.28956666588783264 74 | 72,0.30886855721473694 75 | 73,0.32487472891807556 76 | 74,0.3531299829483032 77 | 75,0.27572453022003174 78 | 76,0.2619282007217407 79 | 77,0.2552402913570404 80 | 78,0.3132539987564087 81 | 79,0.3161362409591675 82 | 80,0.2946551442146301 83 | 81,0.25963908433914185 84 | 82,0.26536333560943604 85 | 83,0.39776894450187683 86 | 84,0.3500286638736725 87 | 85,0.29846835136413574 88 | 86,0.3599076271057129 89 | 87,0.2839187979698181 90 | 88,0.3507434129714966 91 | 89,0.3210849463939667 92 | 90,0.26633989810943604 93 | 91,0.301442950963974 94 | 92,0.3248384892940521 95 | 93,0.29561907052993774 96 | 94,0.27176135778427124 97 | 95,0.2930724620819092 98 | 96,0.39013466238975525 99 | 97,0.27610212564468384 100 | 98,0.24536766111850739 101 | 99,0.2769909203052521 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth+pos_enc_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_pos_enc_with_depth_map 2 | 0,0.5019286274909973 3 | 1,0.5957905054092407 4 | 2,0.6706607341766357 5 | 3,0.7045989632606506 6 | 4,0.7247263789176941 7 | 5,0.7437635064125061 8 | 6,0.7657357454299927 9 | 7,0.7837409377098083 10 | 8,0.8081523776054382 11 | 9,0.8331688642501831 12 | 10,0.8588377237319946 13 | 11,0.8784855604171753 14 | 12,0.9004148244857788 15 | 13,0.9139468669891357 16 | 14,0.9258914589881897 17 | 15,0.9360237121582031 18 | 16,0.9397944211959839 19 | 17,0.9493911266326904 20 | 18,0.955885648727417 21 | 19,0.9618895649909973 22 | 20,0.955045759677887 23 | 21,0.9658722281455994 24 | 22,0.9706549048423767 25 | 23,0.9645416736602783 26 | 24,0.9714709520339966 27 | 25,0.9749064445495605 28 | 26,0.9723249673843384 29 | 27,0.9772944450378418 30 | 28,0.9724729061126709 31 | 29,0.9742136597633362 32 | 30,0.9844686388969421 33 | 31,0.9858437776565552 34 | 32,0.9843611121177673 35 | 33,0.9846099019050598 36 | 34,0.9684745669364929 37 | 35,0.9835737943649292 38 | 36,0.985645055770874 39 | 37,0.9776303172111511 40 | 38,0.9839625358581543 41 | 39,0.9805203080177307 42 | 40,0.9883551001548767 43 | 41,0.9914281964302063 44 | 42,0.9901358485221863 45 | 43,0.9907274842262268 46 | 44,0.9903032779693604 47 | 45,0.9889842867851257 48 | 46,0.9908870458602905 49 | 47,0.98712557554245 50 | 48,0.9896885752677917 51 | 49,0.9879147410392761 52 | 50,0.9876586198806763 53 | 51,0.9939265251159668 54 | 52,0.9933578372001648 55 | 53,0.991568922996521 56 | 54,0.9902616739273071 57 | 55,0.9940009713172913 58 | 56,0.9909957647323608 59 | 57,0.9892624616622925 60 | 58,0.9937770366668701 61 | 59,0.9924342036247253 62 | 60,0.9931078553199768 63 | 61,0.99247807264328 64 | 62,0.9941808581352234 65 | 63,0.9880872964859009 66 | 64,0.9938260316848755 67 | 65,0.9942887425422668 68 | 66,0.9935979843139648 69 | 67,0.992397129535675 70 | 68,0.992669939994812 71 | 69,0.9948652982711792 72 | 70,0.9926344156265259 73 | 71,0.987032413482666 74 | 72,0.9940281510353088 75 | 73,0.9958663582801819 76 | 74,0.995036780834198 77 | 75,0.9947634339332581 78 | 76,0.9916528463363647 79 | 77,0.992535412311554 80 | 78,0.9954563975334167 81 | 79,0.9962247014045715 82 | 80,0.9938072562217712 83 | 81,0.9939221143722534 84 | 82,0.9943260550498962 85 | 83,0.9952303767204285 86 | 84,0.993381142616272 87 | 85,0.99480140209198 88 | 86,0.9947032928466797 89 | 87,0.9957528710365295 90 | 88,0.9942193627357483 91 | 89,0.9949866533279419 92 | 90,0.9944876432418823 93 | 91,0.9950389862060547 94 | 92,0.99500972032547 95 | 93,0.9940201640129089 96 | 94,0.9965003132820129 97 | 95,0.9954636096954346 98 | 96,0.9952881932258606 99 | 97,0.9940229058265686 100 | 98,0.9956180453300476 101 | 99,0.9961059093475342 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth+pos_enc_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_pos_enc_with_depth_map 2 | 0,0.5174271464347839 3 | 1,0.47005265951156616 4 | 2,0.4147830307483673 5 | 3,0.3814946115016937 6 | 4,0.3575378954410553 7 | 5,0.33472397923469543 8 | 6,0.3092934191226959 9 | 7,0.29004162549972534 10 | 8,0.26865649223327637 11 | 9,0.24176672101020813 12 | 10,0.21309509873390198 13 | 11,0.18740630149841309 14 | 12,0.1606513112783432 15 | 13,0.14326722919940948 16 | 14,0.1279168725013733 17 | 15,0.11373846977949142 18 | 16,0.10787860304117203 19 | 17,0.09377405792474747 20 | 18,0.08401365578174591 21 | 19,0.07536947727203369 22 | 20,0.08579828590154648 23 | 21,0.06949639320373535 24 | 22,0.060988541692495346 25 | 23,0.06967976689338684 26 | 24,0.05840238183736801 27 | 25,0.052420612424612045 28 | 26,0.05727927014231682 29 | 27,0.04808562621474266 30 | 28,0.05551562458276749 31 | 29,0.055178217589855194 32 | 30,0.03590232878923416 33 | 31,0.03384724259376526 34 | 32,0.035649847239255905 35 | 33,0.035166095942258835 36 | 34,0.06162736564874649 37 | 35,0.03744710981845856 38 | 36,0.034199588000774384 39 | 37,0.04573509469628334 40 | 38,0.03608841821551323 41 | 39,0.04164450988173485 42 | 40,0.02889893390238285 43 | 41,0.02192334271967411 44 | 42,0.024564016610383987 45 | 43,0.02327512949705124 46 | 44,0.02297436259686947 47 | 45,0.02604757808148861 48 | 46,0.022860947996377945 49 | 47,0.029050417244434357 50 | 48,0.02530703693628311 51 | 49,0.02735532820224762 52 | 50,0.02782943658530712 53 | 51,0.0166594460606575 54 | 52,0.017724426463246346 55 | 53,0.021307935938239098 56 | 54,0.023158542811870575 57 | 55,0.0163764376193285 58 | 56,0.02149912901222706 59 | 57,0.024429241195321083 60 | 58,0.01614353060722351 61 | 59,0.018650105223059654 62 | 60,0.017478864639997482 63 | 61,0.019185461103916168 64 | 62,0.01540991012006998 65 | 63,0.027249271050095558 66 | 64,0.015736481174826622 67 | 65,0.014849355444312096 68 | 66,0.01608949527144432 69 | 67,0.01859157718718052 70 | 68,0.01778753288090229 71 | 69,0.014068160206079483 72 | 70,0.0174592025578022 73 | 71,0.02892151102423668 74 | 72,0.015090355649590492 75 | 73,0.011485004797577858 76 | 74,0.012966752052307129 77 | 75,0.014173779636621475 78 | 76,0.01959245651960373 79 | 77,0.01742774248123169 80 | 78,0.01232021115720272 81 | 79,0.010578243993222713 82 | 80,0.01595437526702881 83 | 81,0.015133856795728207 84 | 82,0.01444995030760765 85 | 83,0.012522024102509022 86 | 84,0.015470749698579311 87 | 85,0.013650036416947842 88 | 86,0.013332131318747997 89 | 87,0.01170755922794342 90 | 88,0.014295539818704128 91 | 89,0.013092601671814919 92 | 90,0.014245599508285522 93 | 91,0.012834210880100727 94 | 92,0.013021040707826614 95 | 93,0.014317614026367664 96 | 94,0.009989897720515728 97 | 95,0.011934038251638412 98 | 96,0.012677143327891827 99 | 97,0.014147324487566948 100 | 98,0.011732076294720173 101 | 99,0.010483622550964355 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth+pos_enc_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_pos_enc_with_depth_map 2 | 0,0.5108888149261475 3 | 1,0.5644392967224121 4 | 2,0.6649625897407532 5 | 3,0.6732428073883057 6 | 4,0.7036517262458801 7 | 5,0.7223178744316101 8 | 6,0.7614893317222595 9 | 7,0.7601402997970581 10 | 8,0.7749253511428833 11 | 9,0.8210870027542114 12 | 10,0.7659739255905151 13 | 11,0.8454189896583557 14 | 12,0.8442309498786926 15 | 13,0.8441036343574524 16 | 14,0.8627526164054871 17 | 15,0.8859436511993408 18 | 16,0.8770534992218018 19 | 17,0.8835749626159668 20 | 18,0.8840240836143494 21 | 19,0.8820938467979431 22 | 20,0.8931870460510254 23 | 21,0.8971572518348694 24 | 22,0.879705011844635 25 | 23,0.8851736187934875 26 | 24,0.9015034437179565 27 | 25,0.8322393894195557 28 | 26,0.907495379447937 29 | 27,0.8936173915863037 30 | 28,0.8059982061386108 31 | 29,0.9061697721481323 32 | 30,0.9147979617118835 33 | 31,0.9179858565330505 34 | 32,0.9151570796966553 35 | 33,0.9126399159431458 36 | 34,0.9027708768844604 37 | 35,0.9197037816047668 38 | 36,0.8947307467460632 39 | 37,0.9174321293830872 40 | 38,0.9200713038444519 41 | 39,0.9112929701805115 42 | 40,0.9183001518249512 43 | 41,0.9217701554298401 44 | 42,0.9195594787597656 45 | 43,0.9205060005187988 46 | 44,0.9153017997741699 47 | 45,0.9111166596412659 48 | 46,0.919915497303009 49 | 47,0.9162286520004272 50 | 48,0.9010263681411743 51 | 49,0.9209998250007629 52 | 50,0.9284493327140808 53 | 51,0.9283323884010315 54 | 52,0.9234710931777954 55 | 53,0.9088467359542847 56 | 54,0.9229422211647034 57 | 55,0.9312492609024048 58 | 56,0.9207566976547241 59 | 57,0.9255474209785461 60 | 58,0.9322311282157898 61 | 59,0.928053081035614 62 | 60,0.918908953666687 63 | 61,0.9216903448104858 64 | 62,0.9269757270812988 65 | 63,0.9336514472961426 66 | 64,0.9326738715171814 67 | 65,0.9328638315200806 68 | 66,0.9242722392082214 69 | 67,0.9179852604866028 70 | 68,0.9297149181365967 71 | 69,0.929895281791687 72 | 70,0.9074047207832336 73 | 71,0.9292541742324829 74 | 72,0.9343549013137817 75 | 73,0.9345154166221619 76 | 74,0.9257559776306152 77 | 75,0.936337947845459 78 | 76,0.933239758014679 79 | 77,0.9370200037956238 80 | 78,0.933994472026825 81 | 79,0.9334705471992493 82 | 80,0.9331084489822388 83 | 81,0.9343867897987366 84 | 82,0.928580105304718 85 | 83,0.9343747496604919 86 | 84,0.9224532246589661 87 | 85,0.9315452575683594 88 | 86,0.9256129860877991 89 | 87,0.9343476295471191 90 | 88,0.9329884648323059 91 | 89,0.917238712310791 92 | 90,0.9314133524894714 93 | 91,0.9340813159942627 94 | 92,0.9317708015441895 95 | 93,0.9364771246910095 96 | 94,0.9396494626998901 97 | 95,0.9265064597129822 98 | 96,0.9283493161201477 99 | 97,0.9246208071708679 100 | 98,0.9259594678878784 101 | 99,0.9338473081588745 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth+pos_enc_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_pos_enc_with_depth_map 2 | 0,0.5017634630203247 3 | 1,0.4870527684688568 4 | 2,0.45264434814453125 5 | 3,0.3977341949939728 6 | 4,0.36989885568618774 7 | 5,0.3663284480571747 8 | 6,0.3146667778491974 9 | 7,0.319842129945755 10 | 8,0.305232435464859 11 | 9,0.25490930676460266 12 | 10,0.34706437587738037 13 | 11,0.22928102314472198 14 | 12,0.21789811551570892 15 | 13,0.22503115236759186 16 | 14,0.20300422608852386 17 | 15,0.17256313562393188 18 | 16,0.1828896850347519 19 | 17,0.18194448947906494 20 | 18,0.18040964007377625 21 | 19,0.18471631407737732 22 | 20,0.16806544363498688 23 | 21,0.1714111715555191 24 | 22,0.1847836971282959 25 | 23,0.3413585126399994 26 | 24,0.1785680651664734 27 | 25,0.5995873212814331 28 | 26,0.14505961537361145 29 | 27,0.1681976467370987 30 | 28,0.31506192684173584 31 | 29,0.15089082717895508 32 | 30,0.1346847116947174 33 | 31,0.13117453455924988 34 | 32,0.13480983674526215 35 | 33,0.14421819150447845 36 | 34,0.15049996972084045 37 | 35,0.12874813377857208 38 | 36,0.1651623547077179 39 | 37,0.1302511990070343 40 | 38,0.12761856615543365 41 | 39,0.1426088958978653 42 | 40,0.12861259281635284 43 | 41,0.12456071376800537 44 | 42,0.12733346223831177 45 | 43,0.1283855438232422 46 | 44,0.13287648558616638 47 | 45,0.14002810418605804 48 | 46,0.12783955037593842 49 | 47,0.13100577890872955 50 | 48,0.18147483468055725 51 | 49,0.12943841516971588 52 | 50,0.11449240148067474 53 | 51,0.11586685478687286 54 | 52,0.1208285465836525 55 | 53,0.14468027651309967 56 | 54,0.12071529775857925 57 | 55,0.10820642858743668 58 | 56,0.1269441694021225 59 | 57,0.1185133308172226 60 | 58,0.10889267921447754 61 | 59,0.11288049817085266 62 | 60,0.13071565330028534 63 | 61,0.12384387850761414 64 | 62,0.11860866099596024 65 | 63,0.1080113872885704 66 | 64,0.10787419229745865 67 | 65,0.10759659856557846 68 | 66,0.12075942754745483 69 | 67,0.12774620950222015 70 | 68,0.11339045315980911 71 | 69,0.1121147945523262 72 | 70,0.14671696722507477 73 | 71,0.11299770325422287 74 | 72,0.10787578672170639 75 | 73,0.10603892803192139 76 | 74,0.11869166791439056 77 | 75,0.10569094866514206 78 | 76,0.10722392797470093 79 | 77,0.0996122732758522 80 | 78,0.1070176213979721 81 | 79,0.10617350041866302 82 | 80,0.10653447359800339 83 | 81,0.10538818687200546 84 | 82,0.11637213826179504 85 | 83,0.1027020663022995 86 | 84,0.12368610501289368 87 | 85,0.10736051201820374 88 | 86,0.11864783614873886 89 | 87,0.1046474277973175 90 | 88,0.1077992394566536 91 | 89,0.1285136640071869 92 | 90,0.11014148592948914 93 | 91,0.1046181172132492 94 | 92,0.1086650937795639 95 | 93,0.1044251099228859 96 | 94,0.09519178420305252 97 | 95,0.11837788671255112 98 | 96,0.11379481852054596 99 | 97,12.736141204833984 100 | 98,0.2164253294467926 101 | 99,0.36185723543167114 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_depth_map,logs/rot-y_single-bin_with_depth_map 2 | 0,0.610208511,0.59799999 3 | 1,0.699988246,0.691075146 4 | 2,0.735390782,0.724880755 5 | 3,0.759384692,0.752826571 6 | 4,0.778771818,0.775126517 7 | 5,0.79977715,0.79718715 8 | 6,0.824632227,0.82282418 9 | 7,0.855092466,0.849672794 10 | 8,0.881225467,0.876689911 11 | 9,0.905218244,0.898693621 12 | 10,0.922062993,0.915166497 13 | 11,0.934307575,0.929464936 14 | 12,0.941685915,0.938831568 15 | 13,0.951173484,0.940757155 16 | 14,0.958399355,0.95658958 17 | 15,0.963107407,0.960012078 18 | 16,0.965692043,0.96905762 19 | 17,0.97117281,0.96406585 20 | 18,0.972491682,0.972022474 21 | 19,0.97586298,0.975752592 22 | 20,0.974893689,0.97424674 23 | 21,0.980752945,0.981550515 24 | 22,0.978730142,0.977288902 25 | 23,0.981268823,0.972343564 26 | 24,0.981686294,0.980961263 27 | 25,0.981598914,0.986023784 28 | 26,0.986168325,0.975638747 29 | 27,0.985016704,0.989095688 30 | 28,0.983596265,0.9866997 31 | 29,0.983947575,0.987739205 32 | 30,0.986483932,0.983419001 33 | 31,0.988218546,0.987730324 34 | 32,0.987547338,0.990035772 35 | 33,0.985975742,0.987693787 36 | 34,0.987820208,0.988915503 37 | 35,0.987482965,0.987937033 38 | 36,0.989933491,0.987992108 39 | 37,0.988451481,0.990054488 40 | 38,0.988244474,0.991760552 41 | 39,0.988225937,0.988017678 42 | 40,0.990626156,0.989088237 43 | 41,0.990100682,0.992266059 44 | 42,0.990115166,0.991839468 45 | 43,0.991004229,0.990129232 46 | 44,0.98970753,0.991141617 47 | 45,0.991328597,0.989727855 48 | 46,0.990411103,0.992549956 49 | 47,0.990727067,0.994389951 50 | 48,0.990383089,0.989602745 51 | 49,0.992604613,0.991878629 52 | 50,0.99390763,0.994144797 53 | 51,0.992083549,0.992326617 54 | 52,0.990399122,0.992854297 55 | 53,0.993846059,0.991734862 56 | 54,0.99215579,0.993388832 57 | 55,0.990707397,0.992494106 58 | 56,0.992483139,0.992225111 59 | 57,0.991666973,0.992885709 60 | 58,0.991720736,0.995117545 61 | 59,0.995017648,0.994860768 62 | 60,0.994175673,0.993461728 63 | 61,0.993332267,0.992841601 64 | 62,0.990953624,0.995255411 65 | 63,0.99370712,0.995122433 66 | 64,0.992447257,0.992684722 67 | 65,0.993704796,0.993804753 68 | 66,0.992310762,0.99431318 69 | 67,0.993100047,0.993340135 70 | 68,0.994466066,0.992836952 71 | 69,0.993189812,0.995063245 72 | 70,0.993593633,0.995231271 73 | 71,0.994385898,0.994765699 74 | 72,0.995419621,0.993513823 75 | 73,0.991907418,0.994346023 76 | 74,0.994598329,0.994291425 77 | 75,0.996650338,0.996136546 78 | 76,0.996421218,0.994149864 79 | 77,0.99076438,0.994753003 80 | 78,0.994440258,0.994188309 81 | 79,0.994856596,0.983153164 82 | 80,0.993789315,0.994847894 83 | 81,0.994407475,0.995730579 84 | 82,0.995998383,0.996606469 85 | 83,0.996636748,0.997091949 86 | 84,0.994364202,0.990490437 87 | 85,0.992696941,0.99540025 88 | 86,0.99559474,0.995775521 89 | 87,0.995367408,0.99547255 90 | 88,0.997069657,0.99611634 91 | 89,0.994378328,0.994059026 92 | 90,0.995596647,0.995638847 93 | 91,0.994137287,0.996308804 94 | 92,0.996112347,0.997073114 95 | 93,0.994637668,0.99434489 96 | 94,0.994771779,0.996857226 97 | 95,0.996326208,0.992858827 98 | 96,0.994627595,0.996615887 99 | 97,0.994039178,0.995500267 100 | 98,0.996899843,0.993793666 101 | 99,0.994873285,0.996344268 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_depth_map,logs/rot-y_single-bin_with_depth_map 2 | 0,0.466572016,0.47909385 3 | 1,0.387423724,0.396086723 4 | 2,0.347456753,0.361833751 5 | 3,0.320773214,0.32763803 6 | 4,0.297808796,0.305680364 7 | 5,0.271404445,0.279539824 8 | 6,0.245826274,0.254345268 9 | 7,0.213995665,0.225435868 10 | 8,0.182109654,0.193269983 11 | 9,0.152377203,0.164301217 12 | 10,0.130059943,0.14205803 13 | 11,0.113197967,0.123458721 14 | 12,0.102059379,0.109797798 15 | 13,0.087127328,0.106432468 16 | 14,0.076275997,0.084445044 17 | 15,0.069585331,0.077790178 18 | 16,0.063993134,0.064621538 19 | 17,0.056366786,0.071579255 20 | 18,0.05348476,0.058282897 21 | 19,0.048230644,0.053054493 22 | 20,0.049139731,0.054960322 23 | 21,0.039362408,0.04284028 24 | 22,0.042300399,0.049483296 25 | 23,0.038415618,0.056210808 26 | 24,0.037510328,0.042028476 27 | 25,0.036920622,0.033643417 28 | 26,0.029900823,0.050517879 29 | 27,0.031603053,0.027660826 30 | 28,0.033216339,0.031573262 31 | 29,0.032668032,0.029916799 32 | 30,0.028558539,0.0369142 33 | 31,0.025962014,0.029857175 34 | 32,0.026550882,0.025798246 35 | 33,0.029442804,0.029632237 36 | 34,0.025656512,0.027429227 37 | 35,0.026237294,0.029461695 38 | 36,0.02206495,0.027859464 39 | 37,0.024293805,0.024797145 40 | 38,0.025060548,0.02142749 41 | 39,0.025008805,0.028027553 42 | 40,0.020453803,0.026068117 43 | 41,0.021265894,0.020169996 44 | 42,0.021448171,0.020456752 45 | 43,0.020187039,0.023609353 46 | 44,0.022347135,0.021647647 47 | 45,0.019370466,0.024587892 48 | 46,0.02092216,0.019430025 49 | 47,0.020197861,0.015920639 50 | 48,0.020352669,0.023732655 51 | 49,0.016903045,0.020527838 52 | 50,0.014767417,0.016226292 53 | 51,0.017535524,0.019250171 54 | 52,0.020975653,0.018421132 55 | 53,0.014575474,0.020609878 56 | 54,0.017663175,0.017856592 57 | 55,0.019809207,0.018125866 58 | 56,0.016660566,0.019712809 59 | 57,0.01848248,0.018115068 60 | 58,0.018387288,0.01377437 61 | 59,0.012502656,0.014790606 62 | 60,0.013891815,0.017339924 63 | 61,0.015705148,0.018182317 64 | 62,0.019016188,0.013460787 65 | 63,0.014659775,0.014023192 66 | 64,0.016368194,0.018091362 67 | 65,0.014715145,0.015996005 68 | 66,0.016883589,0.015430159 69 | 67,0.015269401,0.016864758 70 | 68,0.0133893,0.018241286 71 | 69,0.01540432,0.013915787 72 | 70,0.014902215,0.013213111 73 | 71,0.013256258,0.014499784 74 | 72,0.011619344,0.016617421 75 | 73,0.017079603,0.014645873 76 | 74,0.012709148,0.01517666 77 | 75,0.009230866,0.011734448 78 | 76,0.009512793,0.015018631 79 | 77,0.019021161,0.014280792 80 | 78,0.013025301,0.015394855 81 | 79,0.012281613,0.034606881 82 | 80,0.014216582,0.014126437 83 | 81,0.013189287,0.012079028 84 | 82,0.010288389,0.010637072 85 | 83,0.009212004,0.00964604 86 | 84,0.01325048,0.022064153 87 | 85,0.016106484,0.012775676 88 | 86,0.010893479,0.012056427 89 | 87,0.011552322,0.013025215 90 | 88,0.008406099,0.011550965 91 | 89,0.013076741,0.015128803 92 | 90,0.01084944,0.012289059 93 | 91,0.01351392,0.011072609 94 | 92,0.010072167,0.009799289 95 | 93,0.012285202,0.015103426 96 | 94,0.012630067,0.010149102 97 | 95,0.009772372,0.017155372 98 | 96,0.012519792,0.010544226 99 | 97,0.013167736,0.012450658 100 | 98,0.008484775,0.015667997 101 | 99,0.01172231,0.010574467 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_depth_map,logs/rot-y_single-bin_with_depth_map 2 | 0,0.54643929,0.548865557 3 | 1,0.620578766,0.657918036 4 | 2,0.654596686,0.674940228 5 | 3,0.718094051,0.731747746 6 | 4,0.739952266,0.718460143 7 | 5,0.767778277,0.735369682 8 | 6,0.771947145,0.6963889 9 | 7,0.809372723,0.739769518 10 | 8,0.784261286,0.816486359 11 | 9,0.85427016,0.825319588 12 | 10,0.867881715,0.868079424 13 | 11,0.875404716,0.862359762 14 | 12,0.873132706,0.696505666 15 | 13,0.865922391,0.86190033 16 | 14,0.858841956,0.886596084 17 | 15,0.888452768,0.887663543 18 | 16,0.879773736,0.903046072 19 | 17,0.887817562,0.89123702 20 | 18,0.893685758,0.897571683 21 | 19,0.910546124,0.86908114 22 | 20,0.899112344,0.892768085 23 | 21,0.900698602,0.884035945 24 | 22,0.901615918,0.901805162 25 | 23,0.880401611,0.904365003 26 | 24,0.88322264,0.907449484 27 | 25,0.890514731,0.684984028 28 | 26,0.891622543,0.914999843 29 | 27,0.89598012,0.918100357 30 | 28,0.908813238,0.918097734 31 | 29,0.91109091,0.915112436 32 | 30,0.918695569,0.902444243 33 | 31,0.915975034,0.916658521 34 | 32,0.861748993,0.90732193 35 | 33,0.920745075,0.916354358 36 | 34,0.921323597,0.902706861 37 | 35,0.916089475,0.925192297 38 | 36,0.913487196,0.899918616 39 | 37,0.927558661,0.932615459 40 | 38,0.90042007,0.893589199 41 | 39,0.926856935,0.917570591 42 | 40,0.913441598,0.927783549 43 | 41,0.930156052,0.868521094 44 | 42,0.927700877,0.930388093 45 | 43,0.924526513,0.924009562 46 | 44,0.931896091,0.928015709 47 | 45,0.922265887,0.885914683 48 | 46,0.891801715,0.92875874 49 | 47,0.925132096,0.928519547 50 | 48,0.926121891,0.926627338 51 | 49,0.927431703,0.9114604 52 | 50,0.916165173,0.918297768 53 | 51,0.910186946,0.885382533 54 | 52,0.925921798,0.930381835 55 | 53,0.925597191,0.920012474 56 | 54,0.92580688,0.935288727 57 | 55,0.926511884,0.914601207 58 | 56,0.917783976,0.933025956 59 | 57,0.937707186,0.926636815 60 | 58,0.925193846,0.936169684 61 | 59,0.937744439,0.931107819 62 | 60,0.926609635,0.928241014 63 | 61,0.923558235,0.929934204 64 | 62,0.926781893,0.937771738 65 | 63,0.934441745,0.923213005 66 | 64,0.920474827,0.916214824 67 | 65,0.929404914,0.861976743 68 | 66,0.925505459,0.903597772 69 | 67,0.914159536,0.929993093 70 | 68,0.904857099,0.925416052 71 | 69,0.922586024,0.931758463 72 | 70,0.93050915,0.927347541 73 | 71,0.938043177,0.93044287 74 | 72,0.871982396,0.93872422 75 | 73,0.931748807,0.873416424 76 | 74,0.93407023,0.935416758 77 | 75,0.94384259,0.932180762 78 | 76,0.936705112,0.925825953 79 | 77,0.920051157,0.932569206 80 | 78,0.927155912,0.89698422 81 | 79,0.933141053,0.930691659 82 | 80,0.934423864,0.930574775 83 | 81,0.939633369,0.933642805 84 | 82,0.934873343,0.938597202 85 | 83,0.927213669,0.9387936 86 | 84,0.933478117,0.909081042 87 | 85,0.9407987,0.932150364 88 | 86,0.940228283,0.924808025 89 | 87,0.942987084,0.937175393 90 | 88,0.936476588,0.920712233 91 | 89,0.941450477,0.92187047 92 | 90,0.939112186,0.919147074 93 | 91,0.89417094,0.939526141 94 | 92,0.933507204,0.921096206 95 | 93,0.928613842,0.934709966 96 | 94,0.931700647,0.928403258 97 | 95,0.934649825,0.933005631 98 | 96,0.93239814,0.935102224 99 | 97,0.936147869,0.931432307 100 | 98,0.926938653,0.934471905 101 | 99,0.933173776,0.932559848 102 | -------------------------------------------------------------------------------- /utils/csv_output/depth_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_tricosine_with_depth_map,logs/rot-y_single-bin_with_depth_map 2 | 0,0.512728274,0.63533777 3 | 1,0.437614232,0.42163527 4 | 2,0.437648296,0.443371654 5 | 3,0.380609602,0.351300478 6 | 4,0.339860946,0.363651723 7 | 5,0.313085556,0.350584567 8 | 6,0.314915031,0.404850572 9 | 7,0.269313157,0.341298252 10 | 8,0.387680739,0.258589149 11 | 9,0.211884543,0.246980429 12 | 10,0.194564044,0.198937997 13 | 11,0.195827559,0.208125234 14 | 12,0.189781696,0.440118045 15 | 13,0.201581642,0.20483315 16 | 14,0.218235075,0.173183456 17 | 15,0.171686471,0.1766285 18 | 16,0.189403325,0.1550951 19 | 17,0.176347777,0.163488016 20 | 18,0.170895979,0.158957526 21 | 19,0.142443627,0.192149758 22 | 20,0.160243601,0.167000175 23 | 21,0.154596418,0.181961864 24 | 22,0.158361033,0.152811661 25 | 23,0.18898955,0.148711324 26 | 24,0.187025264,0.150027916 27 | 25,0.172318876,0.562767565 28 | 26,0.167986751,0.140138566 29 | 27,0.169151351,0.132568702 30 | 28,0.141750291,0.132604167 31 | 29,0.141058505,0.137747273 32 | 30,0.127878085,0.156860277 33 | 31,0.131810665,0.134581074 34 | 32,0.216435939,0.14933756 35 | 33,0.127021611,0.13515529 36 | 34,0.128851324,0.158829078 37 | 35,0.136636913,0.120464765 38 | 36,0.140317082,0.165469319 39 | 37,0.114515588,0.110511377 40 | 38,0.158006564,0.175394967 41 | 39,0.120207988,0.130452961 42 | 40,0.139710635,0.117653579 43 | 41,0.117548205,0.217502952 44 | 42,0.117918864,0.113025405 45 | 43,0.123937219,0.126133382 46 | 44,0.110031135,0.119366959 47 | 45,0.127123132,0.180115014 48 | 46,0.171571687,0.115706682 49 | 47,0.123979166,0.119039401 50 | 48,0.119406305,0.116509482 51 | 49,0.119341545,0.14137654 52 | 50,0.138109982,0.129406452 53 | 51,0.146198735,0.183441922 54 | 52,0.122489348,0.112080961 55 | 53,0.12077444,0.131685376 56 | 54,0.122107707,0.108142659 57 | 55,0.121035077,0.139858276 58 | 56,0.133544594,0.111189693 59 | 57,0.100984238,0.120217115 60 | 58,0.122626863,0.104026332 61 | 59,0.10629493,0.114925079 62 | 60,0.125933886,0.11579816 63 | 61,0.123512045,0.114126682 64 | 62,0.121295683,0.105163656 65 | 63,0.110754527,0.125428811 66 | 64,0.128140777,0.138424471 67 | 65,0.120094009,0.221186727 68 | 66,0.125069454,0.14810209 69 | 67,0.140045092,0.113951527 70 | 68,0.15873991,0.123523958 71 | 69,0.12848568,0.112939999 72 | 70,0.116580091,0.119265147 73 | 71,0.103190102,0.11330419 74 | 72,0.208105236,0.10011252 75 | 73,0.115174524,0.195753068 76 | 74,0.108716652,0.105474055 77 | 75,0.092434794,0.111440271 78 | 76,0.105742469,0.117936082 79 | 77,0.128865197,0.111828715 80 | 78,0.121856764,0.191707104 81 | 79,0.112284079,0.113213904 82 | 80,0.10829024,0.113364488 83 | 81,0.098842613,0.110057853 84 | 82,0.107038416,0.134478316 85 | 83,0.117338836,0.101200752 86 | 84,0.111069813,0.155157372 87 | 85,0.100177363,0.111378007 88 | 86,0.100749627,0.120347217 89 | 87,0.095306642,0.103505976 90 | 88,0.107050247,0.130206302 91 | 89,0.100485861,0.125661045 92 | 90,0.102544397,0.126425102 93 | 91,0.175744817,0.097522259 94 | 92,0.11172419,0.132488683 95 | 93,0.11920099,0.108294047 96 | 94,0.113758415,0.118070506 97 | 95,0.10874784,0.110992059 98 | 96,0.110688701,0.112759866 99 | 97,0.106909633,0.114633337 100 | 98,0.121196948,0.107653551 101 | 99,0.112452686,0.112983227 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-A_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,0.6595706343650818 3 | 1,0.7475813627243042 4 | 2,0.7797335386276245 5 | 3,0.7974050045013428 6 | 4,0.8156607151031494 7 | 5,0.8312563002109528 8 | 6,0.8487735092639923 9 | 7,0.8659686148166656 10 | 8,0.8826016187667847 11 | 9,0.897568017244339 12 | 10,0.9142777919769287 13 | 11,0.9295943081378937 14 | 12,0.941422700881958 15 | 13,0.9509012401103973 16 | 14,0.959236204624176 17 | 15,0.9641611576080322 18 | 16,0.9661401808261871 19 | 17,0.9733545780181885 20 | 18,0.9730362296104431 21 | 19,0.9772874116897583 22 | 20,0.9810784161090851 23 | 21,0.9789856374263763 24 | 22,0.9803755879402161 25 | 23,0.9799835085868835 26 | 24,0.9814310669898987 27 | 25,0.9857872426509857 28 | 26,0.9865390658378601 29 | 27,0.9815713763237 30 | 28,0.9846218824386597 31 | 29,0.9862165749073029 32 | 30,0.9829156398773193 33 | 31,0.9904298782348633 34 | 32,0.9897958636283875 35 | 33,0.985727459192276 36 | 34,0.9864600002765656 37 | 35,0.9915676414966583 38 | 36,0.9898914396762848 39 | 37,0.9915640354156494 40 | 38,0.9824677407741547 41 | 39,0.99171382188797 42 | 40,0.9913645088672638 43 | 41,0.9920434951782227 44 | 42,0.9902288317680359 45 | 43,0.9924062490463257 46 | 44,0.991104006767273 47 | 45,0.9891282618045807 48 | 46,0.9919527173042297 49 | 47,0.9935021102428436 50 | 48,0.991452157497406 51 | 49,0.9878502190113068 52 | 50,0.9926177859306335 53 | 51,0.9928233623504639 54 | 52,0.9948160350322723 55 | 53,0.9922419488430023 56 | 54,0.9915041327476501 57 | 55,0.9926904439926147 58 | 56,0.9957354664802551 59 | 57,0.9927516877651215 60 | 58,0.9947721064090729 61 | 59,0.995674341917038 62 | 60,0.9947401881217957 63 | 61,0.9913298487663269 64 | 62,0.9950888454914093 65 | 63,0.9947198927402496 66 | 64,0.9935267567634583 67 | 65,0.9936252236366272 68 | 66,0.9945374727249146 69 | 67,0.9822322428226471 70 | 68,0.99281445145607 71 | 69,0.9937737882137299 72 | 70,0.9951539933681488 73 | 71,0.9926390647888184 74 | 72,0.9941605627536774 75 | 73,0.9950456321239471 76 | 74,0.9962667226791382 77 | 75,0.9965062141418457 78 | 76,0.9946044683456421 79 | 77,0.9949730038642883 80 | 78,0.9961954057216644 81 | 79,0.9968099594116211 82 | 80,0.9948718547821045 83 | 81,0.9947298765182495 84 | 82,0.995943009853363 85 | 83,0.9951670467853546 86 | 84,0.9952804148197174 87 | 85,0.9973131418228149 88 | 86,0.9960379600524902 89 | 87,0.9941695630550385 90 | 88,0.9966471493244171 91 | 89,0.9941523671150208 92 | 90,0.9886930584907532 93 | 91,0.996075838804245 94 | 92,0.9963895678520203 95 | 93,0.9972283840179443 96 | 94,0.9959542155265808 97 | 95,0.9968517124652863 98 | 96,0.9969511926174164 99 | 97,0.9967174828052521 100 | 98,0.9970735609531403 101 | 99,0.9946383833885193 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-A_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,0.41626201570034027 3 | 1,0.33554188907146454 4 | 2,0.2991364151239395 5 | 3,0.2764538750052452 6 | 4,0.25536663830280304 7 | 5,0.23735927045345306 8 | 6,0.2148924171924591 9 | 7,0.1949322298169136 10 | 8,0.17604432255029678 11 | 9,0.15757304430007935 12 | 10,0.13834645971655846 13 | 11,0.11887500062584877 14 | 12,0.1023993045091629 15 | 13,0.08982285670936108 16 | 14,0.07729617692530155 17 | 15,0.06966112926602364 18 | 16,0.06659447029232979 19 | 17,0.05545783042907715 20 | 18,0.05518226884305477 21 | 19,0.04825213365256786 22 | 20,0.041705249808728695 23 | 21,0.04548481106758118 24 | 22,0.04260922409594059 25 | 23,0.043183472007513046 26 | 24,0.040353696793317795 27 | 25,0.03296373877674341 28 | 26,0.03139131888747215 29 | 27,0.04051482491195202 30 | 28,0.03458424936980009 31 | 29,0.031622893176972866 32 | 30,0.03983312100172043 33 | 31,0.022799360565841198 34 | 32,0.025086416862905025 35 | 33,0.032069859094917774 36 | 34,0.031082610599696636 37 | 35,0.02188058290630579 38 | 36,0.024277744349092245 39 | 37,0.021473384462296963 40 | 38,0.03601892292499542 41 | 39,0.021308090537786484 42 | 40,0.0218513123691082 43 | 41,0.020361371338367462 44 | 42,0.023404834792017937 45 | 43,0.01948881335556507 46 | 44,0.02167285606265068 47 | 45,0.02532968297600746 48 | 46,0.020396994426846504 49 | 47,0.017576306127011776 50 | 48,0.020978411193937063 51 | 49,0.027666842564940453 52 | 50,0.018772754818201065 53 | 51,0.01867496967315674 54 | 52,0.014656889252364635 55 | 53,0.019529782701283693 56 | 54,0.02075393870472908 57 | 55,0.01865231478586793 58 | 56,0.013068117201328278 59 | 57,0.018042877782136202 60 | 58,0.015137740410864353 61 | 59,0.01284729316830635 62 | 60,0.014470302034169436 63 | 61,0.020637556444853544 64 | 62,0.014155286364257336 65 | 63,0.014328296761959791 66 | 64,0.016940167173743248 67 | 65,0.016736043617129326 68 | 66,0.014801089651882648 69 | 67,0.035792393144220114 70 | 68,0.018178721889853477 71 | 69,0.01642866339534521 72 | 70,0.013959744945168495 73 | 71,0.018234978429973125 74 | 72,0.01570427231490612 75 | 73,0.013702026568353176 76 | 74,0.01136207627132535 77 | 75,0.011005059815943241 78 | 76,0.014557552523911 79 | 77,0.014117905404418707 80 | 78,0.011634496040642262 81 | 79,0.010444363113492727 82 | 80,0.014219993725419044 83 | 81,0.013945120386779308 84 | 82,0.011952549684792757 85 | 83,0.013410559855401516 86 | 84,0.013103564269840717 87 | 85,0.009338718838989735 88 | 86,0.011618217919021845 89 | 87,0.015327705536037683 90 | 88,0.01056993706151843 91 | 89,0.015184779185801744 92 | 90,0.024951638653874397 93 | 91,0.012088721618056297 94 | 92,0.01124580716714263 95 | 93,0.009402896743267775 96 | 94,0.011698149610310793 97 | 95,0.00992073817178607 98 | 96,0.00973555352538824 99 | 97,0.010637324769049883 100 | 98,0.009674232918769121 101 | 99,0.014108369592577219 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-A_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,0.6539453268051147 3 | 1,0.742766797542572 4 | 2,0.7518016993999481 5 | 3,0.7840099334716797 6 | 4,0.7461138069629669 7 | 5,0.809810072183609 8 | 6,0.8246830403804779 9 | 7,0.8465828895568848 10 | 8,0.8672221601009369 11 | 9,0.8644307255744934 12 | 10,0.8743984699249268 13 | 11,0.884278804063797 14 | 12,0.9019626379013062 15 | 13,0.9072136878967285 16 | 14,0.9091704189777374 17 | 15,0.9132416546344757 18 | 16,0.9157736003398895 19 | 17,0.9173181653022766 20 | 18,0.9186671376228333 21 | 19,0.9302138388156891 22 | 20,0.9150339365005493 23 | 21,0.8959232866764069 24 | 22,0.9336421489715576 25 | 23,0.9324018359184265 26 | 24,0.9347175359725952 27 | 25,0.9384431838989258 28 | 26,0.8883638978004456 29 | 27,0.8993721604347229 30 | 28,0.9231621325016022 31 | 29,0.9428919553756714 32 | 30,0.9311817586421967 33 | 31,0.9433639645576477 34 | 32,0.9233942031860352 35 | 33,0.8879663050174713 36 | 34,0.9260681867599487 37 | 35,0.941923201084137 38 | 36,0.9484826028347015 39 | 37,0.9427487254142761 40 | 38,0.9389656782150269 41 | 39,0.9383200705051422 42 | 40,0.9444885551929474 43 | 41,0.9312306940555573 44 | 42,0.9418866336345673 45 | 43,0.9416375458240509 46 | 44,0.9179112911224365 47 | 45,0.9395870268344879 48 | 46,0.9431597590446472 49 | 47,0.9419587850570679 50 | 48,0.9190024137496948 51 | 49,0.9473672807216644 52 | 50,0.936659187078476 53 | 51,0.9458725452423096 54 | 52,0.9437598288059235 55 | 53,0.9413842856884003 56 | 54,0.9086226224899292 57 | 55,0.9493059813976288 58 | 56,0.9503820836544037 59 | 57,0.9475049078464508 60 | 58,0.9425947070121765 61 | 59,0.9484144747257233 62 | 60,0.9458116888999939 63 | 61,0.9344966113567352 64 | 62,0.9473488926887512 65 | 63,0.9251012802124023 66 | 64,0.9398766756057739 67 | 65,0.9341376721858978 68 | 66,0.9430765807628632 69 | 67,0.945593386888504 70 | 68,0.9409202337265015 71 | 69,0.938223123550415 72 | 70,0.945827305316925 73 | 71,0.9184952974319458 74 | 72,0.9355522096157074 75 | 73,0.9306965172290802 76 | 74,0.9469799399375916 77 | 75,0.9400181174278259 78 | 76,0.9392223954200745 79 | 77,0.944983720779419 80 | 78,0.9384738206863403 81 | 79,0.9385436773300171 82 | 80,0.9467271566390991 83 | 81,0.9515658318996429 84 | 82,0.9514214992523193 85 | 83,0.9392683804035187 86 | 84,0.9537573456764221 87 | 85,0.9489558041095734 88 | 86,0.9328611493110657 89 | 87,0.9518439173698425 90 | 88,0.9498897194862366 91 | 89,0.9438364803791046 92 | 90,0.9454463124275208 93 | 91,0.9445838630199432 94 | 92,0.9520668089389801 95 | 93,0.9498812258243561 96 | 94,0.9496680498123169 97 | 95,0.9385409653186798 98 | 96,0.9461212456226349 99 | 97,0.9490524530410767 100 | 98,0.9457087218761444 101 | 99,0.9427993893623352 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-A_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,1.6696708649396896 3 | 1,0.35152609646320343 4 | 2,0.32228872179985046 5 | 3,0.3076109290122986 6 | 4,1.1248935163021088 7 | 5,0.2622956484556198 8 | 6,0.30043836683034897 9 | 7,0.2145414724946022 10 | 8,0.191425122320652 11 | 9,0.19337152689695358 12 | 10,0.1878165379166603 13 | 11,0.17548814415931702 14 | 12,0.15045307949185371 15 | 13,0.14331528916954994 16 | 14,0.14551904797554016 17 | 15,0.13643063232302666 18 | 16,0.1343585103750229 19 | 17,0.282934982329607 20 | 18,0.12827210873365402 21 | 19,0.12465986981987953 22 | 20,0.14107241109013557 23 | 21,0.16108069568872452 24 | 22,0.10673518106341362 25 | 23,0.1085176169872284 26 | 24,0.10562336817383766 27 | 25,0.09877025336027145 28 | 26,0.17687615007162094 29 | 27,0.249521866440773 30 | 28,2.1805965192615986 31 | 29,0.18189551681280136 32 | 30,0.10837537422776222 33 | 31,0.12521642819046974 34 | 32,1.9019804671406746 35 | 33,153.80924072116613 36 | 34,318.09401055797935 37 | 35,7.604392224922776 38 | 36,0.12971819564700127 39 | 37,0.17099640145897865 40 | 38,0.16413310170173645 41 | 39,3.9180420227348804 42 | 40,0.09252845868468285 43 | 41,32.57292525097728 44 | 42,1.0010876879096031 45 | 43,0.1223372258245945 46 | 44,0.12864824011921883 47 | 45,0.648179043084383 48 | 46,0.17762398719787598 49 | 47,0.5248917602002621 50 | 48,0.131934504956007 51 | 49,0.47408427111804485 52 | 50,0.10702351480722427 53 | 51,0.3442074544727802 54 | 52,4.64367913454771 55 | 53,0.21701787412166595 56 | 54,1010.1531843859702 57 | 55,0.11703539080917835 58 | 56,0.909787654876709 59 | 57,0.08563509956002235 60 | 58,0.5114591009914875 61 | 59,1.1515621580183506 62 | 60,0.4018517732620239 63 | 61,0.6022852137684822 64 | 62,0.6723063290119171 65 | 63,0.6237734854221344 66 | 64,0.7516481466591358 67 | 65,26.550629049539566 68 | 66,0.16266628727316856 69 | 67,0.5127294808626175 70 | 68,4.995724428445101 71 | 69,1.5431141145527363 72 | 70,3.2249329686164856 73 | 71,0.1809476688504219 74 | 72,0.28853416442871094 75 | 73,1.0946528315544128 76 | 74,0.7537890411913395 77 | 75,0.29799632728099823 78 | 76,0.14474334940314293 79 | 77,0.25393715873360634 80 | 78,2.870554883033037 81 | 79,1.3648642972111702 82 | 80,0.20048223435878754 83 | 81,1.027121415361762 84 | 82,1.8045132365077734 85 | 83,1.2532427944242954 86 | 84,0.33841184340417385 87 | 85,1.447501266375184 88 | 86,4.393560983240604 89 | 87,4.466461207717657 90 | 88,0.2814765051007271 91 | 89,2.8748888671398163 92 | 90,0.5846006199717522 93 | 91,6.728190042078495 94 | 92,4.183903608471155 95 | 93,3.2976580522954464 96 | 94,1.153687585145235 97 | 95,9.246574521064758 98 | 96,6.028553701937199 99 | 97,24.565971955657005 100 | 98,47.16810439527035 101 | 99,19.0687445551157 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-B_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A,logs/rot-y_exp-B 2 | 0,0.6595706343650818,0.6000480651855469 3 | 1,0.7475813627243042,0.6750563383102417 4 | 2,0.7797335386276245,0.7010399699211121 5 | 3,0.7974050045013428,0.7251718640327454 6 | 4,0.8156607151031494,0.7511904835700989 7 | 5,0.8312563002109528,0.7693915963172913 8 | 6,0.8487735092639923,0.7828317284584045 9 | 7,0.8659686148166656,0.7977737188339233 10 | 8,0.8826016187667847,0.8141303658485413 11 | 9,0.897568017244339,0.8354677557945251 12 | 10,0.9142777919769287,0.8539106249809265 13 | 11,0.9295943081378937,0.8794511556625366 14 | 12,0.941422700881958,0.8982357382774353 15 | 13,0.9509012401103973,0.9095901846885681 16 | 14,0.959236204624176,0.9284951686859131 17 | 15,0.9641611576080322,0.9379051327705383 18 | 16,0.9661401808261871,0.9450802803039551 19 | 17,0.9733545780181885,0.9501482844352722 20 | 18,0.9730362296104431,0.9583286643028259 21 | 19,0.9772874116897583,0.9608941078186035 22 | 20,0.9810784161090851,0.9680317640304565 23 | 21,0.9789856374263763,0.9711657166481018 24 | 22,0.9803755879402161,0.9693057537078857 25 | 23,0.9799835085868835,0.9748086929321289 26 | 24,0.9814310669898987,0.9753508567810059 27 | 25,0.9857872426509857,0.9766794443130493 28 | 26,0.9865390658378601,0.9708195924758911 29 | 27,0.9815713763237,0.9782347679138184 30 | 28,0.9846218824386597,0.980494499206543 31 | 29,0.9862165749073029,0.9818149209022522 32 | 30,0.9812324047088623,0.9836184978485107 33 | 31,0.9904298782348633,0.9840064644813538 34 | 32,0.9897958636283875,0.9836942553520203 35 | 33,0.985727459192276,0.9796777367591858 36 | 34,0.9864600002765656,0.9839792847633362 37 | 35,0.9915676414966583,0.9807703495025635 38 | 36,0.9898914396762848,0.9875426888465881 39 | 37,0.9915640354156494,0.9846752882003784 40 | 38,0.9824677407741547,0.980376124382019 41 | 39,0.99171382188797,0.9816982746124268 42 | 40,0.9913645088672638,0.985661506652832 43 | 41,0.9920434951782227,0.9881018996238708 44 | 42,0.9902288317680359,0.9870431423187256 45 | 43,0.9924062490463257,0.9807652831077576 46 | 44,0.991104006767273,0.989000678062439 47 | 45,0.9891282618045807,0.9907253384590149 48 | 46,0.9919527173042297,0.989014208316803 49 | 47,0.9935021102428436,0.9868354797363281 50 | 48,0.991452157497406,0.9881940484046936 51 | 49,0.9878502190113068,0.990738570690155 52 | 50,0.9926177859306335,0.987257182598114 53 | 51,0.9928233623504639,0.9890639781951904 54 | 52,0.9948160350322723,0.9929422736167908 55 | 53,0.9922419488430023,0.9849871397018433 56 | 54,0.9915041327476501,0.9897902011871338 57 | 55,0.9926904439926147,0.9935063719749451 58 | 56,0.9957354664802551,0.9878507256507874 59 | 57,0.9927516877651215,0.9914484024047852 60 | 58,0.9947721064090729,0.9916388988494873 61 | 59,0.995674341917038,0.9905507564544678 62 | 60,0.9947401881217957,0.991159200668335 63 | 61,0.9913298487663269,0.9892823696136475 64 | 62,0.9950888454914093,0.9949520826339722 65 | 63,0.9947198927402496,0.9954166412353516 66 | 64,0.9935267567634583,0.9914990663528442 67 | 65,0.9936252236366272,0.9875115752220154 68 | 66,0.9945374727249146,0.9915017485618591 69 | 67,0.9822322428226471,0.9949578642845154 70 | 68,0.99281445145607,0.9942835569381714 71 | 69,0.9937737882137299,0.9934943914413452 72 | 70,0.9951539933681488,0.9939014315605164 73 | 71,0.9926390647888184,0.9958945512771606 74 | 72,0.9941605627536774,0.9895168542861938 75 | 73,0.9950456321239471,0.9902284145355225 76 | 74,0.9962667226791382,0.9937497973442078 77 | 75,0.9965062141418457,0.9953891634941101 78 | 76,0.9946044683456421,0.9895498156547546 79 | 77,0.9949730038642883,0.9929909706115723 80 | 78,0.9961954057216644,0.991693377494812 81 | 79,0.9968099594116211,0.9947179555892944 82 | 80,0.9948718547821045,0.9938977956771851 83 | 81,0.9947298765182495,0.9928811192512512 84 | 82,0.995943009853363,0.9945154786109924 85 | 83,0.9951670467853546,0.9944049715995789 86 | 84,0.9952804148197174,0.9939019680023193 87 | 85,0.9973131418228149,0.9944862723350525 88 | 86,0.9960379600524902,0.9950379729270935 89 | 87,0.9941695630550385,0.9921040534973145 90 | 88,0.9966471493244171,0.990852952003479 91 | 89,0.9941523671150208,0.9938217997550964 92 | 90,0.9886930584907532,0.9959806203842163 93 | 91,0.996075838804245,0.995283305644989 94 | 92,0.9963895678520203,0.9932147264480591 95 | 93,0.9972283840179443,0.9891338348388672 96 | 94,0.9959542155265808,0.9937722086906433 97 | 95,0.9968517124652863,0.9949572086334229 98 | 96,0.9969511926174164,0.9924212694168091 99 | 97,0.9967174828052521,0.9964489340782166 100 | 98,0.9970735609531403,0.9941613674163818 101 | 99,0.9946383833885193,0.9961534738540649 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-B_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A,logs/rot-y_exp-B 2 | 0,0.41626201570034027,0.4807826578617096 3 | 1,0.33554188907146454,0.42082956433296204 4 | 2,0.2991364151239395,0.38770556449890137 5 | 3,0.2764538750052452,0.35565778613090515 6 | 4,0.25536663830280304,0.32773736119270325 7 | 5,0.23735927045345306,0.3069697916507721 8 | 6,0.2148924171924591,0.28913670778274536 9 | 7,0.1949322298169136,0.2694324553012848 10 | 8,0.17604432255029678,0.25571197271347046 11 | 9,0.15757304430007935,0.23346556723117828 12 | 10,0.13834645971655846,0.21423737704753876 13 | 11,0.11887500062584877,0.18643134832382202 14 | 12,0.1023993045091629,0.16307641565799713 15 | 13,0.08982285670936108,0.1471264809370041 16 | 14,0.07729617692530155,0.12371668964624405 17 | 15,0.06966112926602364,0.10984835773706436 18 | 16,0.06659447029232979,0.10018150508403778 19 | 17,0.05545783042907715,0.09130458533763885 20 | 18,0.05518226884305477,0.08036915957927704 21 | 19,0.04825213365256786,0.07538694143295288 22 | 20,0.041705249808728695,0.06452440470457077 23 | 21,0.04548481106758118,0.05925256013870239 24 | 22,0.04260922409594059,0.06270162016153336 25 | 23,0.043183472007513046,0.053594157099723816 26 | 24,0.040353696793317795,0.05272907763719559 27 | 25,0.03296373877674341,0.04966975748538971 28 | 26,0.03139131888747215,0.058963920921087265 29 | 27,0.04051482491195202,0.048500530421733856 30 | 28,0.03458424936980009,0.04293356090784073 31 | 29,0.031622893176972866,0.03984483331441879 32 | 30,0.03952560015022755,0.03715547174215317 33 | 31,0.024301204830408096,0.036498330533504486 34 | 32,0.025086416862905025,0.036636997014284134 35 | 33,0.032069859094917774,0.04259709641337395 36 | 34,0.031082610599696636,0.036042504012584686 37 | 35,0.02188058290630579,0.04149444028735161 38 | 36,0.024277744349092245,0.030065305531024933 39 | 37,0.021473384462296963,0.034187112003564835 40 | 38,0.03601892292499542,0.04314471036195755 41 | 39,0.021308090537786484,0.03811722248792648 42 | 40,0.0218513123691082,0.032638002187013626 43 | 41,0.020361371338367462,0.028394605964422226 44 | 42,0.023404834792017937,0.030224105343222618 45 | 43,0.01948881335556507,0.03978181257843971 46 | 44,0.02167285606265068,0.02597101964056492 47 | 45,0.02532968297600746,0.023067528381943703 48 | 46,0.020396994426846504,0.026554320007562637 49 | 47,0.017576306127011776,0.030173180624842644 50 | 48,0.020978411193937063,0.02763935923576355 51 | 49,0.027666842564940453,0.02342829294502735 52 | 50,0.018772754818201065,0.02966843917965889 53 | 51,0.01867496967315674,0.02652578055858612 54 | 52,0.014656889252364635,0.019243745133280754 55 | 53,0.019529782701283693,0.032683003693819046 56 | 54,0.02075393870472908,0.024498319253325462 57 | 55,0.01865231478586793,0.01798280142247677 58 | 56,0.013068117201328278,0.02742827497422695 59 | 57,0.018042877782136202,0.021009672433137894 60 | 58,0.015137740410864353,0.021692270413041115 61 | 59,0.01284729316830635,0.023505544289946556 62 | 60,0.014470302034169436,0.02128373645246029 63 | 61,0.020637556444853544,0.025149110704660416 64 | 62,0.014155286364257336,0.015063822269439697 65 | 63,0.014328296761959791,0.014020020142197609 66 | 64,0.016940167173743248,0.02108609490096569 67 | 65,0.016736043617129326,0.02771022729575634 68 | 66,0.014801089651882648,0.02082957699894905 69 | 67,0.035792393144220114,0.01488350797444582 70 | 68,0.018178721889853477,0.015460869297385216 71 | 69,0.01642866339534521,0.017029277980327606 72 | 70,0.013959744945168495,0.016485100612044334 73 | 71,0.018234978429973125,0.012945772148668766 74 | 72,0.01570427231490612,0.023841308429837227 75 | 73,0.013702026568353176,0.022859470918774605 76 | 74,0.01136207627132535,0.016359621658921242 77 | 75,0.011005059815943241,0.013789303600788116 78 | 76,0.014557552523911,0.023719649761915207 79 | 77,0.014117905404418707,0.01765803061425686 80 | 78,0.011634496040642262,0.02027723751962185 81 | 79,0.010444363113492727,0.014560981653630733 82 | 80,0.014219993725419044,0.016346609219908714 83 | 81,0.013945120386779308,0.018119344487786293 84 | 82,0.011952549684792757,0.01483562309294939 85 | 83,0.013410559855401516,0.015300240367650986 86 | 84,0.013103564269840717,0.016466008499264717 87 | 85,0.009338718838989735,0.015065615996718407 88 | 86,0.011618217919021845,0.014000136405229568 89 | 87,0.015327705536037683,0.019280195236206055 90 | 88,0.01056993706151843,0.02128037065267563 91 | 89,0.015184779185801744,0.016245342791080475 92 | 90,0.024951638653874397,0.012156042270362377 93 | 91,0.012088721618056297,0.013503234833478928 94 | 92,0.01124580716714263,0.016826719045639038 95 | 93,0.009402896743267775,0.02452748827636242 96 | 94,0.011698149610310793,0.01638117991387844 97 | 95,0.00992073817178607,0.013964982703328133 98 | 96,0.00973555352538824,0.01885552704334259 99 | 97,0.010637324769049883,0.01171132829040289 100 | 98,0.009674232918769121,0.015501925721764565 101 | 99,0.014108369592577219,0.012304299511015415 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-B_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A,logs/rot-y_exp-B 2 | 0,0.6539453268051147,0.5409694314002991 3 | 1,0.742766797542572,0.6718221306800842 4 | 2,0.7518016993999481,0.5903939604759216 5 | 3,0.7840099334716797,0.6847327947616577 6 | 4,0.7461138069629669,0.7209804058074951 7 | 5,0.809810072183609,0.744877815246582 8 | 6,0.8246830403804779,0.7585359811782837 9 | 7,0.8465828895568848,0.7752809524536133 10 | 8,0.8672221601009369,0.7979952692985535 11 | 9,0.8644307255744934,0.7815231680870056 12 | 10,0.8743984699249268,0.8033223748207092 13 | 11,0.884278804063797,0.83034747838974 14 | 12,0.9019626379013062,0.8294161558151245 15 | 13,0.9072136878967285,0.851375162601471 16 | 14,0.9091704189777374,0.8609939217567444 17 | 15,0.9132416546344757,0.8692182898521423 18 | 16,0.9157736003398895,0.8864220380783081 19 | 17,0.9173181653022766,0.8917028307914734 20 | 18,0.9186671376228333,0.8837647438049316 21 | 19,0.9302138388156891,0.9058263897895813 22 | 20,0.9150339365005493,0.9030463099479675 23 | 21,0.8959232866764069,0.8931152820587158 24 | 22,0.9336421489715576,0.8833728432655334 25 | 23,0.9324018359184265,0.9053963422775269 26 | 24,0.9347175359725952,0.9125784635543823 27 | 25,0.9384431838989258,0.8992658257484436 28 | 26,0.8883638978004456,0.8276993036270142 29 | 27,0.8993721604347229,0.9104119539260864 30 | 28,0.9231621325016022,0.9158927202224731 31 | 29,0.9428919553756714,0.9136813879013062 32 | 30,0.9403899312019348,0.921135425567627 33 | 31,0.9397079348564148,0.9137953519821167 34 | 32,0.9233942031860352,0.9188860654830933 35 | 33,0.8879663050174713,0.9154623746871948 36 | 34,0.9260681867599487,0.9149370193481445 37 | 35,0.941923201084137,0.9190364480018616 38 | 36,0.9484826028347015,0.9209368824958801 39 | 37,0.9427487254142761,0.9150199890136719 40 | 38,0.9389656782150269,0.9221823215484619 41 | 39,0.9383200705051422,0.9110367298126221 42 | 40,0.9444885551929474,0.9290054440498352 43 | 41,0.9312306940555573,0.9241628646850586 44 | 42,0.9418866336345673,0.9143006205558777 45 | 43,0.9416375458240509,0.9233746528625488 46 | 44,0.9179112911224365,0.9164096117019653 47 | 45,0.9395870268344879,0.921299934387207 48 | 46,0.9431597590446472,0.9236639738082886 49 | 47,0.9419587850570679,0.9278364181518555 50 | 48,0.9190024137496948,0.9060739278793335 51 | 49,0.9473672807216644,0.8618124127388 52 | 50,0.936659187078476,0.9087358117103577 53 | 51,0.9458725452423096,0.9244033694267273 54 | 52,0.9437598288059235,0.9326962828636169 55 | 53,0.9413842856884003,0.9083617329597473 56 | 54,0.9086226224899292,0.9367667436599731 57 | 55,0.9493059813976288,0.9329184293746948 58 | 56,0.9503820836544037,0.9241851568222046 59 | 57,0.9475049078464508,0.9255092740058899 60 | 58,0.9425947070121765,0.9168649315834045 61 | 59,0.9484144747257233,0.9320240616798401 62 | 60,0.9458116888999939,0.9030877351760864 63 | 61,0.9344966113567352,0.9334945678710938 64 | 62,0.9473488926887512,0.9413015842437744 65 | 63,0.9251012802124023,0.9334835410118103 66 | 64,0.9398766756057739,0.9329627752304077 67 | 65,0.9341376721858978,0.9207750558853149 68 | 66,0.9430765807628632,0.9352291226387024 69 | 67,0.945593386888504,0.9403650164604187 70 | 68,0.9409202337265015,0.929381787776947 71 | 69,0.938223123550415,0.937008798122406 72 | 70,0.945827305316925,0.9285995364189148 73 | 71,0.9184952974319458,0.9208122491836548 74 | 72,0.9355522096157074,0.9172050952911377 75 | 73,0.9306965172290802,0.9324001669883728 76 | 74,0.9469799399375916,0.9342867136001587 77 | 75,0.9400181174278259,0.9280081987380981 78 | 76,0.9392223954200745,0.9289060831069946 79 | 77,0.944983720779419,0.933513343334198 80 | 78,0.9384738206863403,0.9336594343185425 81 | 79,0.9385436773300171,0.9339166879653931 82 | 80,0.9467271566390991,0.9293647408485413 83 | 81,0.9515658318996429,0.9281243681907654 84 | 82,0.9514214992523193,0.928607165813446 85 | 83,0.9392683804035187,0.9199115037918091 86 | 84,0.9537573456764221,0.9294354319572449 87 | 85,0.9489558041095734,0.9320634007453918 88 | 86,0.9328611493110657,0.9393773674964905 89 | 87,0.9518439173698425,0.9196452498435974 90 | 88,0.9498897194862366,0.9343097805976868 91 | 89,0.9438364803791046,0.9308452010154724 92 | 90,0.9454463124275208,0.9419030547142029 93 | 91,0.9445838630199432,0.9310951232910156 94 | 92,0.9520668089389801,0.9279868006706238 95 | 93,0.9498812258243561,0.9115938544273376 96 | 94,0.9496680498123169,0.9398550391197205 97 | 95,0.9385409653186798,0.9158100485801697 98 | 96,0.9461212456226349,0.9302794337272644 99 | 97,0.9490524530410767,0.9297571778297424 100 | 98,0.9457087218761444,0.9337142109870911 101 | 99,0.9427993893623352,0.9310497641563416 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-B_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A,logs/rot-y_exp-B 2 | 0,1.6696708649396896,0.7698100209236145 3 | 1,0.35152609646320343,0.4991225600242615 4 | 2,0.32228872179985046,0.6096028089523315 5 | 3,0.3076109290122986,0.407632052898407 6 | 4,1.1248935163021088,0.35194912552833557 7 | 5,0.2622956484556198,0.3287530839443207 8 | 6,0.30043836683034897,0.3055974543094635 9 | 7,0.2145414724946022,0.2900542914867401 10 | 8,0.191425122320652,0.2677110731601715 11 | 9,0.19337152689695358,0.305938720703125 12 | 10,0.1878165379166603,0.26461899280548096 13 | 11,0.17548814415931702,0.23963788151741028 14 | 12,0.15045307949185371,0.2484489530324936 15 | 13,0.14331528916954994,0.22124163806438446 16 | 14,0.14551904797554016,0.21334952116012573 17 | 15,0.13643063232302666,0.19927118718624115 18 | 16,0.1343585103750229,0.1731223613023758 19 | 17,0.282934982329607,0.16793060302734375 20 | 18,0.12827210873365402,0.18004678189754486 21 | 19,0.12465986981987953,0.1487322449684143 22 | 20,0.14107241109013557,0.14958439767360687 23 | 21,0.16108069568872452,0.1703510284423828 24 | 22,0.10673518106341362,0.18875859677791595 25 | 23,0.1085176169872284,0.149639293551445 26 | 24,0.10562336817383766,0.13747431337833405 27 | 25,0.09877025336027145,0.15761099755764008 28 | 26,0.17687615007162094,0.35740140080451965 29 | 27,0.249521866440773,0.14456738531589508 30 | 28,2.1805965192615986,0.1328963190317154 31 | 29,0.18189551681280136,0.13884489238262177 32 | 30,1.3662018813192844,0.12166834622621536 33 | 31,0.12521642819046974,0.18255198001861572 34 | 32,1.9019804671406746,0.12926419079303741 35 | 33,153.80924072116613,0.25217095017433167 36 | 34,318.09401055797935,0.19822517037391663 37 | 35,7.604392224922776,0.12934193015098572 38 | 36,0.12971819564700127,0.20767870545387268 39 | 37,0.17099640145897865,0.2563929855823517 40 | 38,0.16413310170173645,0.12775656580924988 41 | 39,3.9180420227348804,0.7840139269828796 42 | 40,0.09252845868468285,0.11711665242910385 43 | 41,32.57292525097728,0.13903208076953888 44 | 42,1.0010876879096031,0.22714863717556 45 | 43,0.1223372258245945,0.11842510104179382 46 | 44,0.12864824011921883,0.4282025694847107 47 | 45,0.648179043084383,0.12328885495662689 48 | 46,0.17762398719787598,0.13065510988235474 49 | 47,0.5248917602002621,0.11718611419200897 50 | 48,0.131934504956007,0.2658053934574127 51 | 49,0.47408427111804485,0.4021584391593933 52 | 50,0.10702351480722427,0.1445142775774002 53 | 51,0.3442074544727802,0.12219571322202682 54 | 52,4.64367913454771,0.1091252863407135 55 | 53,0.21701787412166595,0.6084853410720825 56 | 54,1010.1531843859702,0.10200157761573792 57 | 55,0.11703539080917835,0.10847179591655731 58 | 56,0.909787654876709,0.8602520823478699 59 | 57,0.08563509956002235,0.2637905180454254 60 | 58,0.5114591009914875,0.13923703134059906 61 | 59,1.1515621580183506,0.10847076028585434 62 | 60,0.4018517732620239,9.648314476013184 63 | 61,0.6022852137684822,0.107353575527668 64 | 62,0.6723063290119171,0.09631999582052231 65 | 63,0.6237734854221344,0.1274210512638092 66 | 64,0.7516481466591358,0.12279421836137772 67 | 65,26.550629049539566,0.1628768891096115 68 | 66,0.16266628727316856,0.1022782176733017 69 | 67,0.5127294808626175,0.09578671306371689 70 | 68,4.995724428445101,0.11694175750017166 71 | 69,1.5431141145527363,0.10104011744260788 72 | 70,3.2249329686164856,0.22936823964118958 73 | 71,0.1809476688504219,2.192290782928467 74 | 72,0.28853416442871094,0.5438995957374573 75 | 73,1.0946528315544128,0.10952230542898178 76 | 74,0.7537890411913395,0.11956431716680527 77 | 75,0.29799632728099823,0.6835770606994629 78 | 76,0.14474334940314293,0.1410527229309082 79 | 77,0.25393715873360634,0.1248369812965393 80 | 78,2.870554883033037,0.7277588844299316 81 | 79,1.3648642972111702,0.4312831163406372 82 | 80,0.20048223435878754,0.24836203455924988 83 | 81,1.027121415361762,3.085212469100952 84 | 82,1.8045132365077734,0.2828049957752228 85 | 83,1.2532427944242954,5.187252044677734 86 | 84,0.33841184340417385,3.763049364089966 87 | 85,1.447501266375184,1.3007954359054565 88 | 86,4.393560983240604,0.10025002062320709 89 | 87,4.466461207717657,0.7940567135810852 90 | 88,0.2814765051007271,0.8610344529151917 91 | 89,2.8748888671398163,0.4213177263736725 92 | 90,0.5846006199717522,0.589939296245575 93 | 91,6.728190042078495,0.7431172132492065 94 | 92,4.183903608471155,0.20772874355316162 95 | 93,3.2976580522954464,7.324916362762451 96 | 94,1.153687585145235,5.8619465827941895 97 | 95,9.246574521064758,6.778613567352295 98 | 96,6.028553701937199,0.2298986166715622 99 | 97,24.565971955657005,0.49012354016304016 100 | 98,47.16810439527035,2.4163568019866943 101 | 99,19.0687445551157,0.7023542523384094 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-C_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,0.6042942404747009 3 | 1,0.6944171190261841 4 | 2,0.732487142086029 5 | 3,0.7549425363540649 6 | 4,0.7781610488891602 7 | 5,0.7991511225700378 8 | 6,0.8156494498252869 9 | 7,0.83650803565979 10 | 8,0.8548426628112793 11 | 9,0.8776279091835022 12 | 10,0.8986260294914246 13 | 11,0.913242757320404 14 | 12,0.9287645220756531 15 | 13,0.93578040599823 16 | 14,0.9470317959785461 17 | 15,0.9501326084136963 18 | 16,0.9475334286689758 19 | 17,0.9592606425285339 20 | 18,0.9649060368537903 21 | 19,0.9705646634101868 22 | 20,0.952520489692688 23 | 21,0.9703450202941895 24 | 22,0.9780378341674805 25 | 23,0.9800664186477661 26 | 24,0.9806569218635559 27 | 25,0.9814679622650146 28 | 26,0.9782193303108215 29 | 27,0.9829920530319214 30 | 28,0.9690074920654297 31 | 29,0.9834131002426147 32 | 30,0.9862252473831177 33 | 31,0.9748228192329407 34 | 32,0.9844889044761658 35 | 33,0.9889129400253296 36 | 34,0.9797295331954956 37 | 35,0.9820728302001953 38 | 36,0.9907276630401611 39 | 37,0.9917605519294739 40 | 38,0.9862061142921448 41 | 39,0.9800124168395996 42 | 40,0.9800037145614624 43 | 41,0.9788504242897034 44 | 42,0.9881665706634521 45 | 43,0.9929872155189514 46 | 44,0.9897159337997437 47 | 45,0.9923257827758789 48 | 46,0.9901899695396423 49 | 47,0.9923001527786255 50 | 48,0.9939554929733276 51 | 49,0.9890834093093872 52 | 50,0.9948206543922424 53 | 51,0.9750484824180603 54 | 52,0.9898048639297485 55 | 53,0.9946635961532593 56 | 54,0.992899477481842 57 | 55,0.9953669905662537 58 | 56,0.9881606101989746 59 | 57,0.9945845603942871 60 | 58,0.9915138483047485 61 | 59,0.9939668774604797 62 | 60,0.9926843643188477 63 | 61,0.9909040331840515 64 | 62,0.9938371181488037 65 | 63,0.994929850101471 66 | 64,0.9939595460891724 67 | 65,0.993584394454956 68 | 66,0.9908847808837891 69 | 67,0.9927439093589783 70 | 68,0.9954361319541931 71 | 69,0.9940133690834045 72 | 70,0.9957529902458191 73 | 71,0.9943860769271851 74 | 72,0.9962555170059204 75 | 73,0.992772102355957 76 | 74,0.9874093532562256 77 | 75,0.9941486716270447 78 | 76,0.9945607781410217 79 | 77,0.9944590926170349 80 | 78,0.9961633682250977 81 | 79,0.9940906763076782 82 | 80,0.9928048849105835 83 | 81,0.9937409162521362 84 | 82,0.9917569756507874 85 | 83,0.9932020306587219 86 | 84,0.995654821395874 87 | 85,0.9943497776985168 88 | 86,0.9967876076698303 89 | 87,0.9955793023109436 90 | 88,0.9848641157150269 91 | 89,0.9952899217605591 92 | 90,0.9938852190971375 93 | 91,0.9964104890823364 94 | 92,0.9962943196296692 95 | 93,0.9937224388122559 96 | 94,0.9960552453994751 97 | 95,0.9954787492752075 98 | 96,0.996870219707489 99 | 97,0.9970564246177673 100 | 98,0.9945619106292725 101 | 99,0.9967410564422607 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-C_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,0.4680740535259247 3 | 1,0.39593029022216797 4 | 2,0.35396236181259155 5 | 3,0.32393014430999756 6 | 4,0.2966093420982361 7 | 5,0.2756187319755554 8 | 6,0.256979376077652 9 | 7,0.23626019060611725 10 | 8,0.2155771255493164 11 | 9,0.1904521882534027 12 | 10,0.16400375962257385 13 | 11,0.14445175230503082 14 | 12,0.12417299300432205 15 | 13,0.11431942135095596 16 | 14,0.09884272515773773 17 | 15,0.09297215193510056 18 | 16,0.0972762480378151 19 | 17,0.08204986900091171 20 | 18,0.07099898159503937 21 | 19,0.06254486739635468 22 | 20,0.08847849816083908 23 | 21,0.06213992461562157 24 | 22,0.050351567566394806 25 | 23,0.046191755682229996 26 | 24,0.0452505424618721 27 | 25,0.04339016228914261 28 | 26,0.047645583748817444 29 | 27,0.04091186448931694 30 | 28,0.06276704370975494 31 | 29,0.0394023172557354 32 | 30,0.032897692173719406 33 | 31,0.05288257449865341 34 | 32,0.037675805389881134 35 | 33,0.02930859848856926 36 | 34,0.044806353747844696 37 | 35,0.04138825833797455 38 | 36,0.02581314742565155 39 | 37,0.02315927855670452 40 | 38,0.03387376293540001 41 | 39,0.0430319719016552 42 | 40,0.04433375224471092 43 | 41,0.045861538499593735 44 | 42,0.029445501044392586 45 | 43,0.020773859694600105 46 | 44,0.026288527995347977 47 | 45,0.021795017644762993 48 | 46,0.026146283373236656 49 | 47,0.02181907743215561 50 | 48,0.018022725358605385 51 | 49,0.027558287605643272 52 | 50,0.01664908602833748 53 | 51,0.04948771372437477 54 | 52,0.026512224227190018 55 | 53,0.017121510580182076 56 | 54,0.020479785278439522 57 | 55,0.015235587023198605 58 | 56,0.028288215398788452 59 | 57,0.017076455056667328 60 | 58,0.023221315816044807 61 | 59,0.0178204458206892 62 | 60,0.019662339240312576 63 | 61,0.023307666182518005 64 | 62,0.017612949013710022 65 | 63,0.01633664406836033 66 | 64,0.017592953518033028 67 | 65,0.01883736625313759 68 | 66,0.023244166746735573 69 | 67,0.019905615597963333 70 | 68,0.014730055816471577 71 | 69,0.017280064523220062 72 | 70,0.013783654198050499 73 | 71,0.016492627561092377 74 | 72,0.013050176203250885 75 | 73,0.020375322550535202 76 | 74,0.028159262612462044 77 | 75,0.017110781744122505 78 | 76,0.01654202677309513 79 | 77,0.016318734735250473 80 | 78,0.013315634801983833 81 | 79,0.017005078494548798 82 | 80,0.01931796595454216 83 | 81,0.01838141307234764 84 | 82,0.020580215379595757 85 | 83,0.01781565509736538 86 | 84,0.013733717612922192 87 | 85,0.01635901629924774 88 | 86,0.011716589331626892 89 | 87,0.01409885659813881 90 | 88,0.031949616968631744 91 | 89,0.014977261424064636 92 | 90,0.017175894230604172 93 | 91,0.012445149011909962 94 | 92,0.012392153032124043 95 | 93,0.018580805510282516 96 | 94,0.013020232319831848 97 | 95,0.0138942776247859 98 | 96,0.011398358270525932 99 | 97,0.010370803065598011 100 | 98,0.016310814768075943 101 | 99,0.011302262544631958 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-C_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,0.6763265132904053 3 | 1,0.6662256717681885 4 | 2,0.6904189586639404 5 | 3,0.7393811941146851 6 | 4,0.7652138471603394 7 | 5,0.7761942148208618 8 | 6,0.7851055860519409 9 | 7,0.818626344203949 10 | 8,0.8226867914199829 11 | 9,0.8481512665748596 12 | 10,0.8518550395965576 13 | 11,0.8763121366500854 14 | 12,0.8796267509460449 15 | 13,0.8858419060707092 16 | 14,0.884300708770752 17 | 15,0.8089097142219543 18 | 16,0.8512205481529236 19 | 17,0.904886782169342 20 | 18,0.9082679152488708 21 | 19,0.8915237188339233 22 | 20,0.9042062759399414 23 | 21,0.9124149680137634 24 | 22,0.9171150326728821 25 | 23,0.9065332412719727 26 | 24,0.9217579960823059 27 | 25,0.9157729744911194 28 | 26,0.8733690977096558 29 | 27,0.8974945545196533 30 | 28,0.9113619923591614 31 | 29,0.9270499348640442 32 | 30,0.8720290660858154 33 | 31,0.8984068036079407 34 | 32,0.9080531597137451 35 | 33,0.8689388632774353 36 | 34,0.9200780987739563 37 | 35,0.9195512533187866 38 | 36,0.9221932888031006 39 | 37,0.9293866157531738 40 | 38,0.9258051514625549 41 | 39,0.9126376509666443 42 | 40,0.9222773313522339 43 | 41,0.9124565124511719 44 | 42,0.9179772734642029 45 | 43,0.9307128190994263 46 | 44,0.9246641397476196 47 | 45,0.9348067045211792 48 | 46,0.9213384985923767 49 | 47,0.9287809729576111 50 | 48,0.831829845905304 51 | 49,0.9304068684577942 52 | 50,0.9356265664100647 53 | 51,0.9124912619590759 54 | 52,0.9317522048950195 55 | 53,0.9316222667694092 56 | 54,0.9337998628616333 57 | 55,0.9366926550865173 58 | 56,0.9284136295318604 59 | 57,0.928440511226654 60 | 58,0.916506826877594 61 | 59,0.9359679222106934 62 | 60,0.9274511337280273 63 | 61,0.9352914094924927 64 | 62,0.9270524978637695 65 | 63,0.9349729418754578 66 | 64,0.9202319383621216 67 | 65,0.9261514544487 68 | 66,0.9250791072845459 69 | 67,0.9325145483016968 70 | 68,0.9334625005722046 71 | 69,0.9353182911872864 72 | 70,0.9284601211547852 73 | 71,0.9404886364936829 74 | 72,0.9401715397834778 75 | 73,0.9297958612442017 76 | 74,0.9270153641700745 77 | 75,0.9365927577018738 78 | 76,0.9232490658760071 79 | 77,0.9357755184173584 80 | 78,0.9342222213745117 81 | 79,0.9350659251213074 82 | 80,0.9152461886405945 83 | 81,0.9330258965492249 84 | 82,0.931658148765564 85 | 83,0.934105396270752 86 | 84,0.9379234910011292 87 | 85,0.934994101524353 88 | 86,0.9331852197647095 89 | 87,0.9187940359115601 90 | 88,0.9337748885154724 91 | 89,0.9427176713943481 92 | 90,0.9429637789726257 93 | 91,0.9360564947128296 94 | 92,0.9370009303092957 95 | 93,0.9311383366584778 96 | 94,0.9381106495857239 97 | 95,0.9409337639808655 98 | 96,0.9395820498466492 99 | 97,0.9384344220161438 100 | 98,0.9377941489219666 101 | 99,0.9418402910232544 102 | -------------------------------------------------------------------------------- /utils/csv_output/exp-C_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_exp-A 2 | 0,0.4088100492954254 3 | 1,0.40818893909454346 4 | 2,0.3914930522441864 5 | 3,0.34070777893066406 6 | 4,0.3161851763725281 7 | 5,0.29517441987991333 8 | 6,0.28945615887641907 9 | 7,0.2522716224193573 10 | 8,0.2526489496231079 11 | 9,0.22011427581310272 12 | 10,0.21841426193714142 13 | 11,0.182856485247612 14 | 12,0.18107640743255615 15 | 13,0.1732380986213684 16 | 14,0.17659448087215424 17 | 15,0.28085488080978394 18 | 16,0.2234230488538742 19 | 17,0.1508968323469162 20 | 18,0.14544695615768433 21 | 19,0.16891327500343323 22 | 20,0.149412602186203 23 | 21,0.13722364604473114 24 | 22,0.13284939527511597 25 | 23,0.14590886235237122 26 | 24,0.12616318464279175 27 | 25,0.13587354123592377 28 | 26,0.1963968276977539 29 | 27,0.16659493744373322 30 | 28,0.14130817353725433 31 | 29,0.11832747608423233 32 | 30,0.22080330550670624 33 | 31,0.1572859287261963 34 | 32,0.145426943898201 35 | 33,0.19852502644062042 36 | 34,0.12609125673770905 37 | 35,0.12563377618789673 38 | 36,0.12439163774251938 39 | 37,0.11828288435935974 40 | 38,0.11780350655317307 41 | 39,0.14312675595283508 42 | 40,0.12684880197048187 43 | 41,0.13849447667598724 44 | 42,0.13454076647758484 45 | 43,0.10838907957077026 46 | 44,0.12185433506965637 47 | 45,0.10938802361488342 48 | 46,0.1305336356163025 49 | 47,0.11265148967504501 50 | 48,0.26578739285469055 51 | 49,0.10984429717063904 52 | 50,0.0994388535618782 53 | 51,0.1371806114912033 54 | 52,0.1092693954706192 55 | 53,0.10701253265142441 56 | 54,0.11263889819383621 57 | 55,0.10263968259096146 58 | 56,0.1110641360282898 59 | 57,0.12382184714078903 60 | 58,0.12753084301948547 61 | 59,0.10167315602302551 62 | 60,0.11349555850028992 63 | 61,0.10248947143554688 64 | 62,0.11823619157075882 65 | 63,0.10283500701189041 66 | 64,0.13350807130336761 67 | 65,0.117850162088871 68 | 66,0.12022719532251358 69 | 67,0.10828783363103867 70 | 68,0.10623616725206375 71 | 69,0.10600855201482773 72 | 70,0.1161767914891243 73 | 71,0.0964265987277031 74 | 72,0.10098222643136978 75 | 73,0.11030583083629608 76 | 74,0.11764663457870483 77 | 75,0.1002907007932663 78 | 76,0.13107113540172577 79 | 77,0.10344909131526947 80 | 78,0.10391756147146225 81 | 79,0.10440479964017868 82 | 80,0.13059674203395844 83 | 81,0.107668437063694 84 | 82,0.12885096669197083 85 | 83,0.10540973395109177 86 | 84,0.10174394398927689 87 | 85,0.1025443896651268 88 | 86,0.10397017002105713 89 | 87,0.23722244799137115 90 | 88,0.11846856027841568 91 | 89,0.09231862425804138 92 | 90,0.09290765970945358 93 | 91,0.10736748576164246 94 | 92,0.10157272219657898 95 | 93,0.10897624492645264 96 | 94,0.10176310688257217 97 | 95,0.09583033621311188 98 | 96,0.09740269929170609 99 | 97,0.10151658952236176 100 | 98,0.10172227025032043 101 | 99,0.09174448996782303 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_2_bin_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,bins/logs/rot-y_multibin 2 | 0,0.6364774256944656 3 | 1,0.6404567956924438 4 | 2,0.6424201428890228 5 | 3,0.6379750967025757 6 | 4,0.6433458924293518 7 | 5,0.6362844705581665 8 | 6,0.6365243047475815 9 | 7,0.645372211933136 10 | 8,0.6465469300746918 11 | 9,0.6473226547241211 12 | 10,0.6529091596603394 13 | 11,0.6549556851387024 14 | 12,0.6539251804351807 15 | 13,0.659386545419693 16 | 14,0.6594007611274719 17 | 15,0.6648734211921692 18 | 16,0.6645374000072479 19 | 17,0.665499359369278 20 | 18,0.6671071946620941 21 | 19,0.6664603650569916 22 | 20,0.6683927178382874 23 | 21,0.6704432964324951 24 | 22,0.6681660115718842 25 | 23,0.6700771152973175 26 | 24,0.6656463146209717 27 | 25,0.668890655040741 28 | 26,0.670161634683609 29 | 27,0.6691921949386597 30 | 28,0.6686470359563828 31 | 29,0.669218510389328 32 | 30,0.6717070937156677 33 | 31,0.6678430140018463 34 | 32,0.6708354353904724 35 | 33,0.6728760749101639 36 | 34,0.6752214431762695 37 | 35,0.6751722991466522 38 | 36,0.6748073101043701 39 | 37,0.674422100186348 40 | 38,0.6742368936538696 41 | 39,0.6756955087184906 42 | 40,0.6774636209011078 43 | 41,0.6771893203258514 44 | 42,0.6738326847553253 45 | 43,0.6757023185491562 46 | 44,0.6778578460216522 47 | 45,0.6763264536857605 48 | 46,0.6749691665172577 49 | 47,0.6760756075382233 50 | 48,0.674308717250824 51 | 49,0.6778042018413544 52 | 50,0.6763716042041779 53 | 51,0.6784817427396774 54 | 52,0.6773827821016312 55 | 53,0.6757663190364838 56 | 54,0.6777926385402679 57 | 55,0.6796364486217499 58 | 56,0.6765303015708923 59 | 57,0.6782780587673187 60 | 58,0.678018718957901 61 | 59,0.6800373196601868 62 | 60,0.6777323782444 63 | 61,0.6774869561195374 64 | 62,0.6752618998289108 65 | 63,0.6797653436660767 66 | 64,0.6799356043338776 67 | 65,0.6783954799175262 68 | 66,0.6804667711257935 69 | 67,0.6818868219852448 70 | 68,0.6782740652561188 71 | 69,0.6773740947246552 72 | 70,0.6791952252388 73 | 71,0.6820371747016907 74 | 72,0.679493322968483 75 | 73,0.674627423286438 76 | 74,0.6797760725021362 77 | 75,0.6792221516370773 78 | 76,0.6816938519477844 79 | 77,0.6776654571294785 80 | 78,0.6786770820617676 81 | 79,0.6779235601425171 82 | 80,0.6790441274642944 83 | 81,0.6800603717565536 84 | 82,0.6811229586601257 85 | 83,0.6796829402446747 86 | 84,0.6800612211227417 87 | 85,0.67886883020401 88 | 86,0.6799986958503723 89 | 87,0.6806543171405792 90 | 88,0.6793988645076752 91 | 89,0.682193785905838 92 | 90,0.6812185347080231 93 | 91,0.6816729605197906 94 | 92,0.6814039647579193 95 | 93,0.6836266815662384 96 | 94,0.678368479013443 97 | 95,0.6825132071971893 98 | 96,0.6832332611083984 99 | 97,0.6810125410556793 100 | 98,0.6815325915813446 101 | 99,0.6790782958269119 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_2_bin_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,bins/logs/rot-y_multibin 2 | 0,0.3316621631383896 3 | 1,0.2729806452989578 4 | 2,0.24421586096286774 5 | 3,0.2254568636417389 6 | 4,0.209992453455925 7 | 5,0.19606579840183258 8 | 6,0.18352707475423813 9 | 7,0.17003832757472992 10 | 8,0.15657736361026764 11 | 9,0.1422332376241684 12 | 10,0.12412147969007492 13 | 11,0.10632858797907829 14 | 12,0.09271307662129402 15 | 13,0.08152966573834419 16 | 14,0.0731319971382618 17 | 15,0.06322785280644894 18 | 16,0.06505078636109829 19 | 17,0.05762944370508194 20 | 18,0.05158030986785889 21 | 19,0.04399864189326763 22 | 20,0.04838421568274498 23 | 21,0.0396678913384676 24 | 22,0.03798241168260574 25 | 23,0.037777822464704514 26 | 24,0.04431829787790775 27 | 25,0.04527454264461994 28 | 26,0.044715916737914085 29 | 27,0.03783428855240345 30 | 28,0.035094243474304676 31 | 29,0.05491584725677967 32 | 30,0.033265551552176476 33 | 31,0.03444904275238514 34 | 32,0.03198978491127491 35 | 33,0.020693916827440262 36 | 34,0.02302937302738428 37 | 35,0.02017770241945982 38 | 36,0.030752831138670444 39 | 37,0.027422327548265457 40 | 38,0.024082018993794918 41 | 39,0.019513120874762535 42 | 40,0.02173994481563568 43 | 41,0.018894623033702374 44 | 42,0.032811044715344906 45 | 43,0.02339213900268078 46 | 44,0.017983808647841215 47 | 45,0.01748627796769142 48 | 46,0.019887814298272133 49 | 47,0.018690168857574463 50 | 48,0.023831534199416637 51 | 49,0.018034442327916622 52 | 50,0.02436262834817171 53 | 51,0.01744937477633357 54 | 52,0.017836561892181635 55 | 53,0.02179720811545849 56 | 54,0.020141726825386286 57 | 55,0.017092255409806967 58 | 56,0.014833168592303991 59 | 57,0.017104195430874825 60 | 58,0.014533726032823324 61 | 59,0.013726493809372187 62 | 60,0.02151683857664466 63 | 61,0.02043248899281025 64 | 62,0.015788627322763205 65 | 63,0.01589789381250739 66 | 64,0.013752827886492014 67 | 65,0.013523598201572895 68 | 66,0.016683723777532578 69 | 67,0.011422434356063604 70 | 68,0.013150636106729507 71 | 69,0.014682593289762735 72 | 70,0.01244552107527852 73 | 71,0.012582309544086456 74 | 72,0.012404580134898424 75 | 73,0.01839954685419798 76 | 74,0.011357018258422613 77 | 75,0.011058616451919079 78 | 76,0.010606983909383416 79 | 77,0.014090724289417267 80 | 78,0.015067188534885645 81 | 79,0.012413931544870138 82 | 80,0.010873433202505112 83 | 81,0.010501126758754253 84 | 82,0.008670778013765812 85 | 83,0.014228471089154482 86 | 84,0.015257004648447037 87 | 85,0.015413884073495865 88 | 86,0.019776672590523958 89 | 87,0.016291688662022352 90 | 88,0.010909699834883213 91 | 89,0.008943103719502687 92 | 90,0.014403123408555984 93 | 91,0.01642521284520626 94 | 92,0.013752772007137537 95 | 93,0.00868172850459814 96 | 94,0.012429169844835997 97 | 95,0.010037017986178398 98 | 96,0.01344647677615285 99 | 97,0.011567545821890235 100 | 98,0.01044416707009077 101 | 99,0.009896555915474892 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_2_bin_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,bins/logs/rot-y_multibin 2 | 0,0.52231664955616 3 | 1,0.6856265068054199 4 | 2,0.6680701673030853 5 | 3,0.44650907069444656 6 | 4,0.6695874035358429 7 | 5,0.6520446836948395 8 | 6,0.5348782986402512 9 | 7,0.8330416083335876 10 | 8,0.6165135204792023 11 | 9,0.7227248847484589 12 | 10,0.6442227959632874 13 | 11,0.7836908996105194 14 | 12,0.6652472019195557 15 | 13,0.7385971248149872 16 | 14,0.755437821149826 17 | 15,0.5745496451854706 18 | 16,0.6846127510070801 19 | 17,0.760746568441391 20 | 18,0.6910718679428101 21 | 19,0.6482223719358444 22 | 20,0.6651661396026611 23 | 21,0.6197859346866608 24 | 22,0.6904831230640411 25 | 23,0.5992656350135803 26 | 24,0.7162262499332428 27 | 25,0.7096388638019562 28 | 26,0.6811275780200958 29 | 27,0.6431783139705658 30 | 28,0.6754511296749115 31 | 29,0.642986387014389 32 | 30,0.5951425433158875 33 | 31,0.5924495607614517 34 | 32,0.6003582179546356 35 | 33,0.5761773884296417 36 | 34,0.7707976698875427 37 | 35,0.6962075233459473 38 | 36,0.545121043920517 39 | 37,0.6146388053894043 40 | 38,0.5596119612455368 41 | 39,0.721345067024231 42 | 40,0.6791097521781921 43 | 41,0.5403576344251633 44 | 42,0.728301614522934 45 | 43,0.669027104973793 46 | 44,0.7031333446502686 47 | 45,0.7283302843570709 48 | 46,0.5566553771495819 49 | 47,0.6872655153274536 50 | 48,0.7243080735206604 51 | 49,0.5610879063606262 52 | 50,0.7801924645900726 53 | 51,0.5882973372936249 54 | 52,0.6688424348831177 55 | 53,0.6768398880958557 56 | 54,0.7815794348716736 57 | 55,0.6238159090280533 58 | 56,0.544615775346756 59 | 57,0.7623897790908813 60 | 58,0.7996519804000854 61 | 59,0.7408036589622498 62 | 60,0.6795912384986877 63 | 61,0.60232874751091 64 | 62,0.6893229186534882 65 | 63,0.5726227164268494 66 | 64,0.5969439595937729 67 | 65,0.7894322574138641 68 | 66,0.7549681961536407 69 | 67,0.6846739947795868 70 | 68,0.7578261196613312 71 | 69,0.6775938868522644 72 | 70,0.7651573419570923 73 | 71,0.6301376223564148 74 | 72,0.6226622611284256 75 | 73,0.7911657691001892 76 | 74,0.6913823783397675 77 | 75,0.7981168031692505 78 | 76,0.6893911957740784 79 | 77,0.6048889309167862 80 | 78,0.5491914004087448 81 | 79,0.6675737798213959 82 | 80,0.6392944157123566 83 | 81,0.5539171472191811 84 | 82,0.6662276536226273 85 | 83,0.8012245893478394 86 | 84,0.6885189116001129 87 | 85,0.62159264087677 88 | 86,0.734470397233963 89 | 87,0.6867640614509583 90 | 88,0.5443483963608742 91 | 89,0.6180028915405273 92 | 90,0.7310459315776825 93 | 91,0.5560125708580017 94 | 92,0.7762938141822815 95 | 93,0.7440352737903595 96 | 94,0.6655671447515488 97 | 95,0.5815018713474274 98 | 96,0.737959623336792 99 | 97,0.5871940404176712 100 | 98,0.5400440692901611 101 | 99,0.7042824625968933 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_2_bin_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,bins/logs/rot-y_multibin 2 | 0,0.9716113358736038 3 | 1,0.29656296968460083 4 | 2,0.2878064513206482 5 | 3,0.23549100756645203 6 | 4,0.2119506150484085 7 | 5,0.2157125324010849 8 | 6,0.19199103116989136 9 | 7,0.1922469139099121 10 | 8,0.17774584144353867 11 | 9,0.23308104276657104 12 | 10,0.15016307681798935 13 | 11,0.14047975093126297 14 | 12,0.1546884924173355 15 | 13,0.12178680300712585 16 | 14,0.12607698142528534 17 | 15,0.14293596893548965 18 | 16,0.1333567127585411 19 | 17,0.10018004849553108 20 | 18,0.1670440398156643 21 | 19,0.10738372057676315 22 | 20,0.1667959913611412 23 | 21,0.15547170862555504 24 | 22,0.19834943860769272 25 | 23,0.12914417684078217 26 | 24,0.1475038044154644 27 | 25,0.16057062149047852 28 | 26,0.2995956726372242 29 | 27,9.12888989597559 30 | 28,4.022879522293806 31 | 29,0.0947624109685421 32 | 30,0.10702608153223991 33 | 31,0.13143296539783478 34 | 32,0.09518911316990852 35 | 33,0.7653288654983044 36 | 34,0.09936877340078354 37 | 35,0.19854556024074554 38 | 36,0.10389238968491554 39 | 37,0.34588684514164925 40 | 38,0.8653148449957371 41 | 39,4.9264020547270775 42 | 40,2.87305923178792 43 | 41,0.11811546981334686 44 | 42,0.08074861019849777 45 | 43,0.07469425350427628 46 | 44,0.11323663592338562 47 | 45,0.22992784157395363 48 | 46,0.9011892899870872 49 | 47,0.09686580300331116 50 | 48,0.08644737675786018 51 | 49,0.16559535637497902 52 | 50,0.16024744510650635 53 | 51,0.07448841258883476 54 | 52,0.08844548091292381 55 | 53,0.08455567061901093 56 | 54,0.08670477941632271 57 | 55,1.2567328810691833 58 | 56,1.08005191385746 59 | 57,13.647068928927183 60 | 58,0.07277059182524681 61 | 59,32.528064500540495 62 | 60,1.9966972470283508 63 | 61,1.8272605016827583 64 | 62,0.3905223309993744 65 | 63,0.16984988749027252 66 | 64,0.3941705971956253 67 | 65,0.39730004966259 68 | 66,1.1367217637598515 69 | 67,335.9109493121505 70 | 68,12.22239138931036 71 | 69,1.68156224116683 72 | 70,0.0960344597697258 73 | 71,0.0839514397084713 74 | 72,0.10921547189354897 75 | 73,0.14044076204299927 76 | 74,0.09037190675735474 77 | 75,0.09744089096784592 78 | 76,4.345095701515675 79 | 77,46.17459120973945 80 | 78,1.2582806907594204 81 | 79,10.404278460890055 82 | 80,21.199325058609247 83 | 81,1901.178213916719 84 | 82,28.361846555024385 85 | 83,8.593322768807411 86 | 84,0.10195677727460861 87 | 85,1.4251599311828613 88 | 86,418.9629546366632 89 | 87,18.929166689515114 90 | 88,20.826321303844452 91 | 89,1.2972214370965958 92 | 90,5.640415642410517 93 | 91,7.3656711876392365 94 | 92,0.6004504188895226 95 | 93,0.7657100260257721 96 | 94,0.30230629816651344 97 | 95,0.6624750159680843 98 | 96,0.09269993007183075 99 | 97,0.07171614095568657 100 | 98,0.07990240305662155 101 | 99,0.06913416087627411 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_4_bin_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.49435684084892273 3 | 1,0.5011619329452515 4 | 2,0.5049319863319397 5 | 3,0.49482452869415283 6 | 4,0.503984808921814 7 | 5,0.49121618270874023 8 | 6,0.49521175026893616 9 | 7,0.5083014369010925 10 | 8,0.5075821280479431 11 | 9,0.5053611397743225 12 | 10,0.5084657669067383 13 | 11,0.503389835357666 14 | 12,0.49751150608062744 15 | 13,0.5030765533447266 16 | 14,0.5008565187454224 17 | 15,0.5056978464126587 18 | 16,0.5044467449188232 19 | 17,0.5048827528953552 20 | 18,0.5034015774726868 21 | 19,0.5003069043159485 22 | 20,0.5022529363632202 23 | 21,0.5028795003890991 24 | 22,0.5034143924713135 25 | 23,0.5041339993476868 26 | 24,0.5015771985054016 27 | 25,0.5000021457672119 28 | 26,0.5027535557746887 29 | 27,0.5022955536842346 30 | 28,0.4988979995250702 31 | 29,0.5010260939598083 32 | 30,0.5010988116264343 33 | 31,0.5005591511726379 34 | 32,0.5015374422073364 35 | 33,0.4996916353702545 36 | 34,0.49983495473861694 37 | 35,0.5002853274345398 38 | 36,0.5003752112388611 39 | 37,0.49928537011146545 40 | 38,0.5006262063980103 41 | 39,0.4996160864830017 42 | 40,0.5017666220664978 43 | 41,0.5019720196723938 44 | 42,0.4991300702095032 45 | 43,0.4979267418384552 46 | 44,0.5013131499290466 47 | 45,0.5014316439628601 48 | 46,0.49841588735580444 49 | 47,0.4966474771499634 50 | 48,0.49993985891342163 51 | 49,0.5001904368400574 52 | 50,0.5008471012115479 53 | 51,0.49945005774497986 54 | 52,0.4975368082523346 55 | 53,0.4992374777793884 56 | 54,0.5027960538864136 57 | 55,0.5016849637031555 58 | 56,0.5001382827758789 59 | 57,0.4997164011001587 60 | 58,0.5009957551956177 61 | 59,0.5010706186294556 62 | 60,0.4996848702430725 63 | 61,0.4998314380645752 64 | 62,0.49899664521217346 65 | 63,0.5002076029777527 66 | 64,0.5010014176368713 67 | 65,0.5021624565124512 68 | 66,0.5004079937934875 69 | 67,0.5024847388267517 70 | 68,0.49926692247390747 71 | 69,0.497900128364563 72 | 70,0.5012137293815613 73 | 71,0.5004401803016663 74 | 72,0.49891290068626404 75 | 73,0.4981353282928467 76 | 74,0.5023921728134155 77 | 75,0.4989829361438751 78 | 76,0.4986993670463562 79 | 77,0.49945732951164246 80 | 78,0.5010159015655518 81 | 79,0.5001115202903748 82 | 80,0.49789804220199585 83 | 81,0.49881312251091003 84 | 82,0.5006778836250305 85 | 83,0.4995092749595642 86 | 84,0.49924421310424805 87 | 85,0.4989585876464844 88 | 86,0.500891387462616 89 | 87,0.500886857509613 90 | 88,0.5002598762512207 91 | 89,0.49956637620925903 92 | 90,0.5022526383399963 93 | 91,0.5017555952072144 94 | 92,0.5008655190467834 95 | 93,0.5017520785331726 96 | 94,0.4978599548339844 97 | 95,0.5025416612625122 98 | 96,0.5024111866950989 99 | 97,0.5011456608772278 100 | 98,0.5001981258392334 101 | 99,0.4986412823200226 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_4_bin_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.336965411901474 3 | 1,0.2765127420425415 4 | 2,0.24654190242290497 5 | 3,0.22871440649032593 6 | 4,0.2138243168592453 7 | 5,0.19717861711978912 8 | 6,0.18577954173088074 9 | 7,0.17149658501148224 10 | 8,0.15641136467456818 11 | 9,0.13823726773262024 12 | 10,0.11819185316562653 13 | 11,0.10455338656902313 14 | 12,0.09242311120033264 15 | 13,0.07834954559803009 16 | 14,0.07315950840711594 17 | 15,0.062476079910993576 18 | 16,0.053763438016176224 19 | 17,0.05183437466621399 20 | 18,0.045281339436769485 21 | 19,0.04197997227311134 22 | 20,0.05168071761727333 23 | 21,0.03809330239892006 24 | 22,0.03269248455762863 25 | 23,0.032864317297935486 26 | 24,0.03266817331314087 27 | 25,0.04119502753019333 28 | 26,0.053123872727155685 29 | 27,0.03945056349039078 30 | 28,0.03122532181441784 31 | 29,0.0635588988661766 32 | 30,0.032232701778411865 33 | 31,0.024552270770072937 34 | 32,0.026799287647008896 35 | 33,0.01888345740735531 36 | 34,0.02709309011697769 37 | 35,0.019405094906687737 38 | 36,0.03660287335515022 39 | 37,0.032695069909095764 40 | 38,0.025506654754281044 41 | 39,0.01995375007390976 42 | 40,0.025789974257349968 43 | 41,0.01821177452802658 44 | 42,0.036575719714164734 45 | 43,0.029879730194807053 46 | 44,0.02071373350918293 47 | 45,0.01505003310739994 48 | 46,0.021942229941487312 49 | 47,0.019875094294548035 50 | 48,0.02594662643969059 51 | 49,0.020626435056328773 52 | 50,0.028405990451574326 53 | 51,0.020614158362150192 54 | 52,0.02068614773452282 55 | 53,0.02098756842315197 56 | 54,0.02757357619702816 57 | 55,0.021707072854042053 58 | 56,0.0142069635912776 59 | 57,0.01679847575724125 60 | 58,0.01607084833085537 61 | 59,0.01571953296661377 62 | 60,0.02802596241235733 63 | 61,0.023775288835167885 64 | 62,0.012249690480530262 65 | 63,0.017295747995376587 66 | 64,0.017161089926958084 67 | 65,0.011533372104167938 68 | 66,0.022480670362710953 69 | 67,0.01412288099527359 70 | 68,0.013402200303971767 71 | 69,0.013700799085199833 72 | 70,0.014851067215204239 73 | 71,0.016466490924358368 74 | 72,0.014786130748689175 75 | 73,0.01098654791712761 76 | 74,0.011747024022042751 77 | 75,0.014080558903515339 78 | 76,0.014077951200306416 79 | 77,0.010861201211810112 80 | 78,0.009431061334908009 81 | 79,0.009803513996303082 82 | 80,0.01124445628374815 83 | 81,0.012456942349672318 84 | 82,0.009581610560417175 85 | 83,0.01898716762661934 86 | 84,0.021184537559747696 87 | 85,0.019906584173440933 88 | 86,0.02690681256353855 89 | 87,0.02215573750436306 90 | 88,0.011950727552175522 91 | 89,0.009951531887054443 92 | 90,0.01736011542379856 93 | 91,0.022156791761517525 94 | 92,0.01806703954935074 95 | 93,0.009561102837324142 96 | 94,0.015622434206306934 97 | 95,0.010797925293445587 98 | 96,0.018337609246373177 99 | 97,0.015436126850545406 100 | 98,0.011200637556612492 101 | 99,0.009753896854817867 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_4_bin_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.38093069195747375 3 | 1,0.587740957736969 4 | 2,0.5500872135162354 5 | 3,0.1052144318819046 6 | 4,0.5575112104415894 7 | 5,0.5216537714004517 8 | 6,0.2847365438938141 9 | 7,0.8807137608528137 10 | 8,0.4416270852088928 11 | 9,0.675245463848114 12 | 10,0.5053696632385254 13 | 11,0.7476599812507629 14 | 12,0.5209071040153503 15 | 13,0.6634050011634827 16 | 14,0.6991780400276184 17 | 15,0.34389305114746094 18 | 16,0.5550498962402344 19 | 17,0.696209728717804 20 | 18,0.558992326259613 21 | 19,0.47874173521995544 22 | 20,0.5182306170463562 23 | 21,0.4226059317588806 24 | 22,0.5640531778335571 25 | 23,0.36757731437683105 26 | 24,0.62542724609375 27 | 25,0.597924530506134 28 | 26,0.5450040698051453 29 | 27,0.44812309741973877 30 | 28,0.5367583632469177 31 | 29,0.44413185119628906 32 | 30,0.3720720410346985 33 | 31,0.3724272549152374 34 | 32,0.35781335830688477 35 | 33,0.3134117126464844 36 | 34,0.7065476179122925 37 | 35,0.5571811199188232 38 | 36,0.25097984075546265 39 | 37,0.38388317823410034 40 | 38,0.28363361954689026 41 | 39,0.594810426235199 42 | 40,0.5197136998176575 43 | 41,0.2500999867916107 44 | 42,0.6133952736854553 45 | 43,0.4889870584011078 46 | 44,0.5678533315658569 47 | 45,0.6177371740341187 48 | 46,0.2784538269042969 49 | 47,0.5391901731491089 50 | 48,0.604894757270813 51 | 49,0.27997416257858276 52 | 50,0.7181318402290344 53 | 51,0.3281596302986145 54 | 52,0.4958387613296509 55 | 53,0.5220991373062134 56 | 54,0.7163916230201721 57 | 55,0.3933463990688324 58 | 56,0.24729633331298828 59 | 57,0.6752121448516846 60 | 58,0.7510302662849426 61 | 59,0.6301937103271484 62 | 60,0.5140539407730103 63 | 61,0.366352915763855 64 | 62,0.5326842665672302 65 | 63,0.292441189289093 66 | 64,0.3493868410587311 67 | 65,0.7262594103813171 68 | 66,0.6553086638450623 69 | 67,0.5179692506790161 70 | 68,0.6637434363365173 71 | 69,0.5045992136001587 72 | 70,0.6825988292694092 73 | 71,0.4113005995750427 74 | 72,0.3941526710987091 75 | 73,0.7341476678848267 76 | 74,0.5345930457115173 77 | 75,0.7430847883224487 78 | 76,0.5293629765510559 79 | 77,0.3670382797718048 80 | 78,0.2740624248981476 81 | 79,0.47987115383148193 82 | 80,0.4242591857910156 83 | 81,0.24768860638141632 84 | 82,0.48176953196525574 85 | 83,0.7487767338752747 86 | 84,0.5272279381752014 87 | 85,0.41194236278533936 88 | 86,0.6368169784545898 89 | 87,0.5189234614372253 90 | 88,0.23904462158679962 91 | 89,0.3744748830795288 92 | 90,0.6097939014434814 93 | 91,0.28121674060821533 94 | 92,0.6905983686447144 95 | 93,0.6353582739830017 96 | 94,0.4890918433666229 97 | 95,0.31259167194366455 98 | 96,0.6214494705200195 99 | 97,0.31570401787757874 100 | 98,0.23829668760299683 101 | 99,0.5497159957885742 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_4_bin_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.3439714014530182 3 | 1,0.3100118041038513 4 | 2,0.2905428111553192 5 | 3,0.24458937346935272 6 | 4,0.21663212776184082 7 | 5,0.20995958149433136 8 | 6,0.19643522799015045 9 | 7,0.1990828663110733 10 | 8,0.18670082092285156 11 | 9,0.21133574843406677 12 | 10,0.1388557106256485 13 | 11,0.13806672394275665 14 | 12,0.154269739985466 15 | 13,0.12204053997993469 16 | 14,0.12024927139282227 17 | 15,0.1442117840051651 18 | 16,0.1393846869468689 19 | 17,0.09866513311862946 20 | 18,0.22290289402008057 21 | 19,0.1178457960486412 22 | 20,0.11783464252948761 23 | 21,0.19876106083393097 24 | 22,0.24539975821971893 25 | 23,0.17097270488739014 26 | 24,0.10696014016866684 27 | 25,0.21087747812271118 28 | 26,0.47428828477859497 29 | 27,18.16415786743164 30 | 28,7.931071758270264 31 | 29,0.09539051353931427 32 | 30,0.08269508928060532 33 | 31,0.10650962591171265 34 | 32,0.1030060201883316 35 | 33,1.444541573524475 36 | 34,0.12038403004407883 37 | 35,0.29037612676620483 38 | 36,0.09760013967752457 39 | 37,0.611809253692627 40 | 38,1.6444226503372192 41 | 39,9.772391319274902 42 | 40,5.6738786697387695 43 | 41,0.10852603614330292 44 | 42,0.08662863820791245 45 | 43,0.08189094066619873 46 | 44,0.1465359330177307 47 | 45,0.3768543004989624 48 | 46,1.7048652172088623 49 | 47,0.12248839437961578 50 | 48,0.09084158390760422 51 | 49,0.2413727343082428 52 | 50,0.2402401864528656 53 | 51,0.07653157413005829 54 | 52,0.086482934653759 55 | 53,0.08508908003568649 56 | 54,0.10576577484607697 57 | 55,2.4441640377044678 58 | 56,2.0855298042297363 59 | 57,27.222293853759766 60 | 58,0.076727956533432 61 | 59,64.98594665527344 62 | 60,3.922269344329834 63 | 61,3.571258068084717 64 | 62,0.696333646774292 65 | 63,0.27320000529289246 66 | 64,0.7228024005889893 67 | 65,0.6693872809410095 68 | 66,2.209949016571045 69 | 67,671.7283935546875 70 | 68,24.35675811767578 71 | 69,3.2921228408813477 72 | 70,0.11236879229545593 73 | 71,0.10055643320083618 74 | 72,0.1407698541879654 75 | 73,0.21086421608924866 76 | 74,0.08961521089076996 77 | 75,0.13055475056171417 78 | 76,8.615706443786621 79 | 77,92.24380493164062 80 | 78,2.41709303855896 81 | 79,20.729703903198242 82 | 80,42.33164978027344 83 | 81,3802.28271484375 84 | 82,56.62899398803711 85 | 83,16.92357063293457 86 | 84,0.08018677681684494 87 | 85,1.505804181098938 88 | 86,837.8457641601562 89 | 87,37.75509262084961 90 | 88,41.41953659057617 91 | 89,0.12437942624092102 92 | 90,11.207476615905762 93 | 91,0.5920124650001526 94 | 92,1.0174299478530884 95 | 93,0.43141597509384155 96 | 94,0.5239037275314331 97 | 95,1.2411810159683228 98 | 96,0.11608587205410004 99 | 97,0.0727127343416214 100 | 98,0.06857077032327652 101 | 99,0.06297268718481064 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_affinity_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.570686399936676 3 | 1,0.6837776303291321 4 | 2,0.7190074920654297 5 | 3,0.7467174530029297 6 | 4,0.7654244303703308 7 | 5,0.7853276133537292 8 | 6,0.8061771392822266 9 | 7,0.8246386647224426 10 | 8,0.845759391784668 11 | 9,0.8675978183746338 12 | 10,0.8870587348937988 13 | 11,0.8970209360122681 14 | 12,0.9207183718681335 15 | 13,0.9305022954940796 16 | 14,0.932826817035675 17 | 15,0.9512914419174194 18 | 16,0.9534869194030762 19 | 17,0.9505635499954224 20 | 18,0.9621323943138123 21 | 19,0.9585966467857361 22 | 20,0.9689239859580994 23 | 21,0.9635128378868103 24 | 22,0.9740110039710999 25 | 23,0.9779072403907776 26 | 24,0.9564751982688904 27 | 25,0.965908944606781 28 | 26,0.9682741761207581 29 | 27,0.9745286107063293 30 | 28,0.9806993007659912 31 | 29,0.9747578501701355 32 | 30,0.9788405299186707 33 | 31,0.9813305735588074 34 | 32,0.9848085045814514 35 | 33,0.9847663640975952 36 | 34,0.9831085801124573 37 | 35,0.9811550378799438 38 | 36,0.970558226108551 39 | 37,0.9818985462188721 40 | 38,0.9843637347221375 41 | 39,0.978312075138092 42 | 40,0.9807984828948975 43 | 41,0.981677770614624 44 | 42,0.9778510332107544 45 | 43,0.9843696355819702 46 | 44,0.9749293923377991 47 | 45,0.9669321179389954 48 | 46,0.9861887097358704 49 | 47,0.988936722278595 50 | 48,0.989205002784729 51 | 49,0.9869343638420105 52 | 50,0.9882535934448242 53 | 51,0.9883961081504822 54 | 52,0.9869046211242676 55 | 53,0.9879512786865234 56 | 54,0.9804926514625549 57 | 55,0.9843206405639648 58 | 56,0.9880526065826416 59 | 57,0.983494758605957 60 | 58,0.9863997101783752 61 | 59,0.9888461828231812 62 | 60,0.986798882484436 63 | 61,0.9884769916534424 64 | 62,0.9880291223526001 65 | 63,0.9865804314613342 66 | 64,0.9879496097564697 67 | 65,0.9887548685073853 68 | 66,0.9892851114273071 69 | 67,0.9886768460273743 70 | 68,0.9885974526405334 71 | 69,0.9862238168716431 72 | 70,0.9737057685852051 73 | 71,0.9867151379585266 74 | 72,0.9895062446594238 75 | 73,0.9905867576599121 76 | 74,0.9909699559211731 77 | 75,0.9901915788650513 78 | 76,0.9893408417701721 79 | 77,0.9887121319770813 80 | 78,0.9895479083061218 81 | 79,0.9859102368354797 82 | 80,0.9900866150856018 83 | 81,0.991176426410675 84 | 82,0.9904757142066956 85 | 83,0.9904802441596985 86 | 84,0.9881252646446228 87 | 85,0.9871011972427368 88 | 86,0.9890764355659485 89 | 87,0.9858620166778564 90 | 88,0.9887582659721375 91 | 89,0.9904652237892151 92 | 90,0.989891767501831 93 | 91,0.9885685443878174 94 | 92,0.9881336092948914 95 | 93,0.9911155104637146 96 | 94,0.9912552833557129 97 | 95,0.9910354018211365 98 | 96,0.9910221695899963 99 | 97,0.9874783158302307 100 | 98,0.9899893999099731 101 | 99,0.9903985857963562 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_affinity_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.9952832460403442 3 | 1,0.8148464560508728 4 | 2,0.7343795299530029 5 | 3,0.6698437333106995 6 | 4,0.6232214570045471 7 | 5,0.5781002640724182 8 | 6,0.5344484448432922 9 | 7,0.49725449085235596 10 | 8,0.45188939571380615 11 | 9,0.3972921371459961 12 | 10,0.3475957214832306 13 | 11,0.3198709785938263 14 | 12,0.2577061951160431 15 | 13,0.22631388902664185 16 | 14,0.22425183653831482 17 | 15,0.16845698654651642 18 | 16,0.16238553822040558 19 | 17,0.1697872132062912 20 | 18,0.1357550472021103 21 | 19,0.14464960992336273 22 | 20,0.11221583187580109 23 | 21,0.12472321093082428 24 | 22,0.09333083778619766 25 | 23,0.0791763961315155 26 | 24,0.14551542699337006 27 | 25,0.11894777417182922 28 | 26,0.11014596372842789 29 | 27,0.0896896943449974 30 | 28,0.06655145436525345 31 | 29,0.08706305176019669 32 | 30,0.07236847281455994 33 | 31,0.06474213302135468 34 | 32,0.049053482711315155 35 | 33,0.050418511033058167 36 | 34,0.05651324614882469 37 | 35,0.06372500211000443 38 | 36,0.09503919631242752 39 | 37,0.060934990644454956 40 | 38,0.05004074051976204 41 | 39,0.07194311916828156 42 | 40,0.0632430911064148 43 | 41,0.060111574828624725 44 | 42,0.0727812871336937 45 | 43,0.05129951983690262 46 | 44,0.08206295967102051 47 | 45,0.11001972109079361 48 | 46,0.043077509850263596 49 | 47,0.031995151191949844 50 | 48,0.030304886400699615 51 | 49,0.0401468500494957 52 | 50,0.033113032579422 53 | 51,0.033214326947927475 54 | 52,0.04081788659095764 55 | 53,0.03558500483632088 56 | 54,0.06233423948287964 57 | 55,0.04816588759422302 58 | 56,0.03509446233510971 59 | 57,0.05093689262866974 60 | 58,0.040978651493787766 61 | 59,0.03127192705869675 62 | 60,0.03972320258617401 63 | 61,0.03297094628214836 64 | 62,0.03584013879299164 65 | 63,0.03924640268087387 66 | 64,0.03421425074338913 67 | 65,0.03034297004342079 68 | 66,0.029371777549386024 69 | 67,0.03221123293042183 70 | 68,0.03296941518783569 71 | 69,0.03997176140546799 72 | 70,0.08548048883676529 73 | 71,0.039131924510002136 74 | 72,0.027096638455986977 75 | 73,0.022717341780662537 76 | 74,0.020745953544974327 77 | 75,0.02390940859913826 78 | 76,0.027935607358813286 79 | 77,0.02933054231107235 80 | 78,0.025865549221634865 81 | 79,0.04205409809947014 82 | 80,0.024422017857432365 83 | 81,0.019868874922394753 84 | 82,0.022962862625718117 85 | 83,0.02281695231795311 86 | 84,0.031415507197380066 87 | 85,0.03633364662528038 88 | 86,0.02750328741967678 89 | 87,0.039902620017528534 90 | 88,0.028676712885499 91 | 89,0.02258164808154106 92 | 90,0.0248915646225214 93 | 91,0.03138952702283859 94 | 92,0.032387711107730865 95 | 93,0.019435109570622444 96 | 94,0.01876223459839821 97 | 95,0.019477752968668938 98 | 96,0.01967127062380314 99 | 97,0.032083019614219666 100 | 98,0.024478362873196602 101 | 99,0.022054214030504227 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_affinity_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.5623152256011963 3 | 1,0.6777945756912231 4 | 2,0.6817294359207153 5 | 3,0.7329896092414856 6 | 4,0.7504016160964966 7 | 5,0.7156466841697693 8 | 6,0.7684159874916077 9 | 7,0.8024331331253052 10 | 8,0.8252727389335632 11 | 9,0.777657151222229 12 | 10,0.8259439468383789 13 | 11,0.8734250664710999 14 | 12,0.8683710098266602 15 | 13,0.8560364246368408 16 | 14,0.8957272171974182 17 | 15,0.8963173031806946 18 | 16,0.8974648714065552 19 | 17,0.8989467620849609 20 | 18,0.8659020066261292 21 | 19,0.9082461595535278 22 | 20,0.9142299890518188 23 | 21,0.8958823680877686 24 | 22,0.914530873298645 25 | 23,0.8698350191116333 26 | 24,0.8950868248939514 27 | 25,0.8936523199081421 28 | 26,0.8640329837799072 29 | 27,0.9218565821647644 30 | 28,0.9133979678153992 31 | 29,0.919867992401123 32 | 30,0.9231693148612976 33 | 31,0.9282336235046387 34 | 32,0.9231120347976685 35 | 33,0.9210495948791504 36 | 34,0.9128158688545227 37 | 35,0.9187522530555725 38 | 36,0.9106098413467407 39 | 37,0.9228474497795105 40 | 38,0.9230226874351501 41 | 39,0.9034092426300049 42 | 40,0.9110963940620422 43 | 41,0.9259164929389954 44 | 42,0.9218043088912964 45 | 43,0.927440345287323 46 | 44,0.7328872680664062 47 | 45,0.9312864542007446 48 | 46,0.934135377407074 49 | 47,0.9389386773109436 50 | 48,0.933924674987793 51 | 49,0.9288673400878906 52 | 50,0.9301970601081848 53 | 51,0.923916220664978 54 | 52,0.9001660346984863 55 | 53,0.9160008430480957 56 | 54,0.9176336526870728 57 | 55,0.9324433207511902 58 | 56,0.9286559820175171 59 | 57,0.9137378931045532 60 | 58,0.9340324997901917 61 | 59,0.9340272545814514 62 | 60,0.9332389831542969 63 | 61,0.9140461683273315 64 | 62,0.9208590984344482 65 | 63,0.9277499914169312 66 | 64,0.9390178322792053 67 | 65,0.9273501038551331 68 | 66,0.9263347387313843 69 | 67,0.939673900604248 70 | 68,0.9362928867340088 71 | 69,0.871730625629425 72 | 70,0.9263086915016174 73 | 71,0.9335660338401794 74 | 72,0.9417216777801514 75 | 73,0.9399980306625366 76 | 74,0.9409054517745972 77 | 75,0.9342096447944641 78 | 76,0.9407311081886292 79 | 77,0.9376429915428162 80 | 78,0.9334056377410889 81 | 79,0.9328042268753052 82 | 80,0.93473881483078 83 | 81,0.9348406791687012 84 | 82,0.9347317218780518 85 | 83,0.9358330368995667 86 | 84,0.9310874938964844 87 | 85,0.9327551126480103 88 | 86,0.9335057139396667 89 | 87,0.9308958053588867 90 | 88,0.9295884966850281 91 | 89,0.9281008243560791 92 | 90,0.9207355380058289 93 | 91,0.9084452390670776 94 | 92,0.9362289309501648 95 | 93,0.9420661330223083 96 | 94,0.9394167065620422 97 | 95,0.9414842128753662 98 | 96,0.9397277235984802 99 | 97,0.9260644316673279 100 | 98,0.9347217679023743 101 | 99,0.9316471815109253 102 | -------------------------------------------------------------------------------- /utils/csv_output/multi_affinity_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,logs/rot-y_multibin 2 | 0,0.9566107988357544 3 | 1,0.9349859952926636 4 | 2,0.9544162154197693 5 | 3,0.693359375 6 | 4,0.647921085357666 7 | 5,0.738761305809021 8 | 6,0.6144521832466125 9 | 7,0.5397984385490417 10 | 8,0.5056890249252319 11 | 9,0.609933078289032 12 | 10,0.47301584482192993 13 | 11,0.3623213768005371 14 | 12,0.3696376383304596 15 | 13,0.42134037613868713 16 | 14,0.3111552596092224 17 | 15,0.31755250692367554 18 | 16,0.3099622130393982 19 | 17,0.3131531774997711 20 | 18,0.4136754274368286 21 | 19,0.2731379568576813 22 | 20,0.26159366965293884 23 | 21,0.32456478476524353 24 | 22,0.26171281933784485 25 | 23,0.39260274171829224 26 | 24,0.31107786297798157 27 | 25,0.3470202684402466 28 | 26,0.4111035466194153 29 | 27,0.24302223324775696 30 | 28,0.268155038356781 31 | 29,0.24394281208515167 32 | 30,0.23135332763195038 33 | 31,0.2266709953546524 34 | 32,0.2409190982580185 35 | 33,0.24191614985466003 36 | 34,0.26231345534324646 37 | 35,0.25146183371543884 38 | 36,0.27390503883361816 39 | 37,0.23755191266536713 40 | 38,0.2324133813381195 41 | 39,0.286697119474411 42 | 40,0.27889150381088257 43 | 41,0.22617678344249725 44 | 42,0.27206742763519287 45 | 43,0.21789655089378357 46 | 44,0.7644999027252197 47 | 45,0.20869702100753784 48 | 46,0.2001061886548996 49 | 47,0.1834302693605423 50 | 48,0.2005392611026764 51 | 49,0.21436305344104767 52 | 50,0.2118368297815323 53 | 51,0.22672806680202484 54 | 52,0.39496830105781555 55 | 53,0.2462703436613083 56 | 54,0.2514236569404602 57 | 55,0.21703113615512848 58 | 56,0.2053987979888916 59 | 57,0.26278260350227356 60 | 58,0.19733364880084991 61 | 59,0.19463451206684113 62 | 60,0.19977514445781708 63 | 61,0.2657962143421173 64 | 62,0.25492164492607117 65 | 63,0.21325618028640747 66 | 64,0.1824818253517151 67 | 65,0.22122937440872192 68 | 66,0.22585780918598175 69 | 67,0.17696760594844818 70 | 68,0.1892920583486557 71 | 69,0.38816478848457336 72 | 70,0.2299787700176239 73 | 71,0.19785816967487335 74 | 72,0.17479097843170166 75 | 73,0.18013785779476166 76 | 74,0.17989209294319153 77 | 75,0.1970243901014328 78 | 76,0.18335077166557312 79 | 77,0.19219189882278442 80 | 78,0.20430727303028107 81 | 79,0.2025090456008911 82 | 80,0.1957627683877945 83 | 81,0.18818572163581848 84 | 82,0.19622159004211426 85 | 83,0.19631867110729218 86 | 84,0.20534726977348328 87 | 85,0.20600399374961853 88 | 86,0.20096248388290405 89 | 87,0.20661213994026184 90 | 88,0.2433222234249115 91 | 89,0.2462420016527176 92 | 90,1.616150975227356 93 | 91,0.2899421453475952 94 | 92,0.19249682128429413 95 | 93,0.1741327941417694 96 | 94,0.1803906261920929 97 | 95,0.17353951930999756 98 | 96,0.18973854184150696 99 | 97,0.2250894159078598 100 | 98,0.20002999901771545 101 | 99,0.20366095006465912 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,alpha_single_bin_with_pos_enc,alpha_tricosine_with_pos_enc,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.6400677561759949,0.6471931338310242,0.6863211989402771,0.6087185740470886 3 | 1,0.7232633829116821,0.6914137601852417,0.6302832961082458,0.6958130598068237 4 | 2,0.7462820410728455,0.709906816482544,0.6411057710647583,0.7039565443992615 5 | 3,0.706973671913147,0.7336570024490356,0.7182706594467163,0.7380108833312988 6 | 4,0.7784066200256348,0.7820504307746887,0.7391350865364075,0.7535611391067505 7 | 5,0.8170434236526489,0.8000261783599854,0.7422938346862793,0.7423034906387329 8 | 6,0.8061135411262512,0.8115348219871521,0.7725706100463867,0.7695603966712952 9 | 7,0.8340672850608826,0.8120900988578796,0.8110686540603638,0.7968102097511292 10 | 8,0.8531450629234314,0.8335540294647217,0.8150075674057007,0.8172760009765625 11 | 9,0.8446401953697205,0.8488985896110535,0.8109203577041626,0.8314341306686401 12 | 10,0.8282425403594971,0.8593789339065552,0.8503283858299255,0.8426434397697449 13 | 11,0.8690404295921326,0.8738688230514526,0.8578314185142517,0.8571656942367554 14 | 12,0.8806560635566711,0.8773562908172607,0.8608093857765198,0.8531160354614258 15 | 13,0.8850160837173462,0.8827734589576721,0.860576868057251,0.8651279211044312 16 | 14,0.8985598683357239,0.8902415633201599,0.8846560716629028,0.873566210269928 17 | 15,0.8992338180541992,0.895653247833252,0.8804288506507874,0.884806215763092 18 | 16,0.8934822678565979,0.8705500364303589,0.8850955963134766,0.886089563369751 19 | 17,0.8971868753433228,0.9003416895866394,0.8874266743659973,0.882472574710846 20 | 18,0.9090531468391418,0.8941176533699036,0.893240749835968,0.8976846933364868 21 | 19,0.9070605039596558,0.89914470911026,0.886725664138794,0.8968603014945984 22 | 20,0.8910845518112183,0.8934242129325867,0.8946811556816101,0.8992015719413757 23 | 21,0.9054089784622192,0.9064927101135254,0.898626446723938,0.896363377571106 24 | 22,0.9032750725746155,0.9062277674674988,0.8853469491004944,0.9030553102493286 25 | 23,0.9071454405784607,0.902522623538971,0.8925518989562988,0.8931100964546204 26 | 24,0.9042013883590698,0.8937796354293823,0.909287691116333,0.8942973017692566 27 | 25,0.9139474034309387,0.9185557961463928,0.8913444876670837,0.9045402407646179 28 | 26,0.9179392457008362,0.917791485786438,0.9071851372718811,0.9003372192382812 29 | 27,0.9172322750091553,0.9074113368988037,0.9030707478523254,0.9082906246185303 30 | 28,0.9133330583572388,0.8826454281806946,0.9103443026542664,0.905849814414978 31 | 29,0.9189529418945312,0.9021663069725037,0.9129875302314758,0.9037789702415466 32 | 30,0.9128819108009338,0.9212689995765686,0.9091049432754517,0.9094458818435669 33 | 31,0.906994104385376,0.9171106219291687,0.9148454666137695,0.9113092422485352 34 | 32,0.9222358465194702,0.9155635237693787,0.9080615639686584,0.9112011194229126 35 | 33,0.9175286293029785,0.9177125692367554,0.9078899025917053,0.9072751402854919 36 | 34,0.9094294905662537,0.9172350168228149,0.9115075469017029,0.9092580676078796 37 | 35,0.9247190356254578,0.918796718120575,0.9144414067268372,0.9130107164382935 38 | 36,0.9254876375198364,0.9213650226593018,0.9142535328865051,0.9151101112365723 39 | 37,0.9283514618873596,0.9237658977508545,0.9153316617012024,0.9076852202415466 40 | 38,0.9238529205322266,0.9178661108016968,0.9156718850135803,0.9126120209693909 41 | 39,0.9059971570968628,0.9197582006454468,0.9173747301101685,0.9147352576255798 42 | 40,0.9172156453132629,0.9195657968521118,0.909166693687439,0.9143138527870178 43 | 41,0.9297339916229248,0.9198476672172546,0.9068711996078491,0.9194060564041138 44 | 42,0.928586483001709,0.916400134563446,0.9137561917304993,0.9142179489135742 45 | 43,0.9291079044342041,0.9200621247291565,0.9138803482055664,0.9205396175384521 46 | 44,0.9250819683074951,0.9177462458610535,0.914042055606842,0.9106838703155518 47 | 45,0.9221975207328796,0.9250417947769165,0.9221339225769043,0.9200465083122253 48 | 46,0.9329376816749573,0.9222911596298218,0.9102650880813599,0.914573609828949 49 | 47,0.9268278479576111,0.9279425740242004,0.9054320454597473,0.9144244194030762 50 | 48,0.9247205853462219,0.9259448647499084,0.91727215051651,0.9233770966529846 51 | 49,0.9382876753807068,0.924592912197113,0.9109329581260681,0.9232364892959595 52 | 50,0.9404057860374451,0.9161821603775024,0.9211392402648926,0.9236119985580444 53 | 51,0.9358689785003662,0.9234479665756226,0.919653058052063,0.9221207499504089 54 | 52,0.9372283816337585,0.9222918152809143,0.8753345608711243,0.9247457981109619 55 | 53,0.9376057386398315,0.9245427250862122,0.9248461127281189,0.9022936820983887 56 | 54,0.9297599792480469,0.9248082041740417,0.9295516014099121,0.9141382575035095 57 | 55,0.9273845553398132,0.9286539554595947,0.9177818894386292,0.9126678109169006 58 | 56,0.9296010136604309,0.9348684549331665,0.9172423481941223,0.9249310493469238 59 | 57,0.9265130162239075,0.9182918071746826,0.9225333333015442,0.9107521176338196 60 | 58,0.9324873685836792,0.9307242631912231,0.924489438533783,0.9170426726341248 61 | 59,0.9409798979759216,0.9278944134712219,0.9199726581573486,0.9156734943389893 62 | 60,0.9165357947349548,0.9251298308372498,0.9209211468696594,0.9161543250083923 63 | 61,0.9278584718704224,0.9216277003288269,0.9271728992462158,0.900689423084259 64 | 62,0.9336904287338257,0.9304068684577942,0.9072903990745544,0.9164056777954102 65 | 63,0.9304336905479431,0.9303541779518127,0.9272565245628357,0.9242128729820251 66 | 64,0.9407385587692261,0.9289416074752808,0.9243087768554688,0.9207677245140076 67 | 65,0.9240184426307678,0.9291926026344299,0.9227352738380432,0.9206200838088989 68 | 66,0.9343522191047668,0.9251654148101807,0.9278071522712708,0.9224875569343567 69 | 67,0.9323389530181885,0.9425731301307678,0.9194933772087097,0.924934983253479 70 | 68,0.9370900392532349,0.9283608198165894,0.9167822599411011,0.9192843437194824 71 | 69,0.9152613282203674,0.9189719557762146,0.916300356388092,0.9276775121688843 72 | 70,0.9416686296463013,0.9379631876945496,0.9255526661872864,0.9231894016265869 73 | 71,0.9356262683868408,0.9279413819313049,0.9286400675773621,0.9252322912216187 74 | 72,0.9361514449119568,0.9363983273506165,0.9257145524024963,0.9328538179397583 75 | 73,0.938615620136261,0.9348746538162231,0.9297013878822327,0.9187564253807068 76 | 74,0.9395661950111389,0.9328617453575134,0.92349773645401,0.920699954032898 77 | 75,0.9200454354286194,0.9376638531684875,0.9160712361335754,0.9281585216522217 78 | 76,0.9346734881401062,0.9231322407722473,0.9261691570281982,0.9171883463859558 79 | 77,0.9414622783660889,0.9192685484886169,0.931832492351532,0.9251331686973572 80 | 78,0.9413303732872009,0.9306217432022095,0.9271131157875061,0.9298701882362366 81 | 79,0.9411513209342957,0.9266195297241211,0.9155550599098206,0.9068107604980469 82 | 80,0.9373226761817932,0.9205257296562195,0.9271982908248901,0.9259641170501709 83 | 81,0.9327431917190552,0.9354149699211121,0.9287874102592468,0.9248186945915222 84 | 82,0.9347732663154602,0.9355024695396423,0.9284207224845886,0.9232234954833984 85 | 83,0.9389911890029907,0.9283201098442078,0.9283326864242554,0.9246184825897217 86 | 84,0.9425088167190552,0.9363265037536621,0.9258661866188049,0.9246215224266052 87 | 85,0.9355118274688721,0.939597487449646,0.9197660088539124,0.9238834381103516 88 | 86,0.9445977210998535,0.9257534742355347,0.9290109872817993,0.9223376512527466 89 | 87,0.932367205619812,0.9369345307350159,0.9299178719520569,0.9254543781280518 90 | 88,0.9417551755905151,0.9366289973258972,0.9252886176109314,0.9215586185455322 91 | 89,0.9365430474281311,0.9158395528793335,0.9284940958023071,0.9192836284637451 92 | 90,0.9302768707275391,0.9359331130981445,0.9226723909378052,0.9205099940299988 93 | 91,0.941059410572052,0.9339913725852966,0.92503422498703,0.9241330623626709 94 | 92,0.9381124377250671,0.9428820610046387,0.9320996403694153,0.9212391972541809 95 | 93,0.9361398816108704,0.9312087297439575,0.9298760890960693,0.9261152148246765 96 | 94,0.9385824799537659,0.940757155418396,0.923100471496582,0.9318003058433533 97 | 95,0.9349018931388855,0.9356358051300049,0.9283947944641113,0.9070702195167542 98 | 96,0.9404476881027222,0.9231966137886047,0.929309606552124,0.9303291440010071 99 | 97,0.9446843862533569,0.9418991804122925,0.9278473854064941,0.917888343334198 100 | 98,0.9414491057395935,0.9357419013977051,0.9284554719924927,0.9146331548690796 101 | 99,0.940292239189148,0.9411863088607788,0.9256574511528015,0.9306854009628296 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_second_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.5330743193626404,0.4995960593223572 3 | 1,0.6623765230178833,0.5065120458602905 4 | 2,0.6978092193603516,0.507071852684021 5 | 3,0.7258195877075195,0.5336099863052368 6 | 4,0.7463541626930237,0.6249013543128967 7 | 5,0.7706411480903625,0.6845522522926331 8 | 6,0.7969285845756531,0.7189503312110901 9 | 7,0.8245146870613098,0.7412046194076538 10 | 8,0.8515299558639526,0.764265775680542 11 | 9,0.8781068921089172,0.786479115486145 12 | 10,0.8986018896102905,0.8075795769691467 13 | 11,0.915123701095581,0.830508291721344 14 | 12,0.9328026175498962,0.8582687377929688 15 | 13,0.940780520439148,0.8793695569038391 16 | 14,0.9482492804527283,0.895136296749115 17 | 15,0.9581180214881897,0.914872944355011 18 | 16,0.9613436460494995,0.9300346374511719 19 | 17,0.9686024785041809,0.9399552941322327 20 | 18,0.9723981022834778,0.9428710341453552 21 | 19,0.9720681309700012,0.9533848166465759 22 | 20,0.9756850004196167,0.9612845182418823 23 | 21,0.9786124229431152,0.9556169509887695 24 | 22,0.9806358814239502,0.9667525291442871 25 | 23,0.98103266954422,0.9721457958221436 26 | 24,0.9814091324806213,0.9718517065048218 27 | 25,0.9834623336791992,0.9763031005859375 28 | 26,0.9835328459739685,0.9786550998687744 29 | 27,0.9814990758895874,0.973927915096283 30 | 28,0.9863042235374451,0.9809465408325195 31 | 29,0.985540509223938,0.9844163060188293 32 | 30,0.9881553649902344,0.9823783040046692 33 | 31,0.9853358864784241,0.9827666282653809 34 | 32,0.9874443411827087,0.9848591685295105 35 | 33,0.9869722723960876,0.9838681817054749 36 | 34,0.9898384809494019,0.9857177138328552 37 | 35,0.9884466528892517,0.9841392040252686 38 | 36,0.9853475689888,0.9873312711715698 39 | 37,0.9908441305160522,0.9814872741699219 40 | 38,0.9891785979270935,0.9759257435798645 41 | 39,0.9889490008354187,0.9879662990570068 42 | 40,0.9904429316520691,0.9887861013412476 43 | 41,0.9885872006416321,0.9876943230628967 44 | 42,0.9919919371604919,0.9809208512306213 45 | 43,0.9918577075004578,0.9884217977523804 46 | 44,0.9879346489906311,0.9889755845069885 47 | 45,0.9903157353401184,0.9883526563644409 48 | 46,0.9917426109313965,0.9886467456817627 49 | 47,0.991865873336792,0.9879354238510132 50 | 48,0.9900221824645996,0.9892691373825073 51 | 49,0.9921199679374695,0.9891642928123474 52 | 50,0.9918316006660461,0.9872278571128845 53 | 51,0.9910182356834412,0.9894484281539917 54 | 52,0.9923532009124756,0.9903212785720825 55 | 53,0.9909812808036804,0.9882053136825562 56 | 54,0.9928857088088989,0.991295576095581 57 | 55,0.9943292140960693,0.9872449636459351 58 | 56,0.9931513071060181,0.9922265410423279 59 | 57,0.989457368850708,0.9928170442581177 60 | 58,0.9933512210845947,0.9934793710708618 61 | 59,0.9939045906066895,0.9912370443344116 62 | 60,,0.9918880462646484 63 | 61,,0.9913289546966553 64 | 62,,0.9903188347816467 65 | 63,,0.9905444383621216 66 | 64,,0.9937067627906799 67 | 65,,0.9941499829292297 68 | 66,,0.9902061223983765 69 | 67,,0.9942779541015625 70 | 68,,0.99359130859375 71 | 69,,0.9940575361251831 72 | 70,,0.9917435646057129 73 | 71,,0.9900504946708679 74 | 72,,0.9942225813865662 75 | 73,,0.9933336973190308 76 | 74,,0.9933016300201416 77 | 75,,0.9936275482177734 78 | 76,,0.9923629760742188 79 | 77,,0.9934442043304443 80 | 78,,0.9945631623268127 81 | 79,,0.9933801293373108 82 | 80,,0.9899559617042542 83 | 81,,0.9808568358421326 84 | 82,,0.9857321381568909 85 | 83,,0.9916608333587646 86 | 84,,0.9934998750686646 87 | 85,,0.9947727918624878 88 | 86,,0.9942541122436523 89 | 87,,0.994752824306488 90 | 88,,0.9948300719261169 91 | 89,,0.9950224757194519 92 | 90,,0.9931086897850037 93 | 91,,0.9946547746658325 94 | 92,,0.9941185116767883 95 | 93,,0.994958221912384 96 | 94,,0.9945772886276245 97 | 95,,0.9931212067604065 98 | 96,,0.9946855306625366 99 | 97,,0.9953170418739319 100 | 98,,0.9941219687461853 101 | 99,,0.9950582981109619 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_second_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.5111646056175232,0.5128739476203918 3 | 1,0.4216247498989105,0.5018191933631897 4 | 2,0.3869202733039856,0.5002676844596863 5 | 3,0.35751545429229736,0.49594226479530334 6 | 4,0.3327171206474304,0.44617101550102234 7 | 5,0.30713680386543274,0.396493524312973 8 | 6,0.281408429145813,0.3616492450237274 9 | 7,0.25207945704460144,0.33409684896469116 10 | 8,0.2210589498281479,0.3088935315608978 11 | 9,0.19124145805835724,0.28653624653816223 12 | 10,0.16528549790382385,0.26433223485946655 13 | 11,0.14286434650421143,0.24221310019493103 14 | 12,0.12078681588172913,0.21494115889072418 15 | 13,0.10743484646081924,0.18869933485984802 16 | 14,0.09653738141059875,0.16777817904949188 17 | 15,0.08210841566324234,0.14358636736869812 18 | 16,0.07591087371110916,0.12519167363643646 19 | 17,0.0638948529958725,0.11095239967107773 20 | 18,0.05859529227018356,0.1060684323310852 21 | 19,0.05871021747589111,0.09092000126838684 22 | 20,0.052366334944963455,0.080001100897789 23 | 21,0.04677761718630791,0.08799809217453003 24 | 22,0.04288214445114136,0.07049990445375443 25 | 23,0.04175477474927902,0.06237361207604408 26 | 24,0.040743038058280945,0.06151946261525154 27 | 25,0.03691461309790611,0.0544448047876358 28 | 26,0.03649268299341202,0.049240779131650925 29 | 27,0.039186473935842514,0.05664141848683357 30 | 28,0.03191370144486427,0.045831430703401566 31 | 29,0.03245559707283974,0.03942932188510895 32 | 30,0.02794009819626808,0.042371589690446854 33 | 31,0.03265884891152382,0.04195097088813782 34 | 32,0.02879723161458969,0.037625472992658615 35 | 33,0.029637448489665985,0.038791220635175705 36 | 34,0.02415432780981064,0.0360955074429512 37 | 35,0.02689778245985508,0.0385596789419651 38 | 36,0.03176720067858696,0.032303497195243835 39 | 37,0.022681625559926033,0.04119671881198883 40 | 38,0.025174342095851898,0.05137767270207405 41 | 39,0.02560197375714779,0.030678877606987953 42 | 40,0.02261424995958805,0.028607692569494247 43 | 41,0.025726616382598877,0.029636366292834282 44 | 42,0.01965138129889965,0.04319433867931366 45 | 43,0.019819576293230057,0.02970944344997406 46 | 44,0.02665870077908039,0.027844930067658424 47 | 45,0.022305307909846306,0.029249845072627068 48 | 46,0.020358867943286896,0.028197938576340675 49 | 47,0.01939980685710907,0.02930355630815029 50 | 48,0.022975562140345573,0.026662569493055344 51 | 49,0.019229520112276077,0.027083581313490868 52 | 50,0.01984352059662342,0.030973022803664207 53 | 51,0.021169841289520264,0.026892799884080887 54 | 52,0.017927031964063644,0.02550783008337021 55 | 53,0.02116110920906067,0.028857396915555 56 | 54,0.017539598047733307,0.02307927794754505 57 | 55,0.015034118667244911,0.029737001284956932 58 | 56,0.017045065760612488,0.02079075388610363 59 | 57,0.023106098175048828,0.019281789660453796 60 | 58,0.01668313518166542,0.018227415159344673 61 | 59,0.01583978906273842,0.02280046045780182 62 | 60,,0.021107690408825874 63 | 61,,0.021917235106229782 64 | 62,,0.023867711424827576 65 | 63,,0.02359965071082115 66 | 64,,0.01721753552556038 67 | 65,,0.015953779220581055 68 | 66,,0.023223156109452248 69 | 67,,0.0161072239279747 70 | 68,,0.017537618055939674 71 | 69,,0.016453713178634644 72 | 70,,0.0204198956489563 73 | 71,,0.024196015670895576 74 | 72,,0.015634696930646896 75 | 73,,0.01765541173517704 76 | 74,,0.01794680766761303 77 | 75,,0.017291119322180748 78 | 76,,0.01955888420343399 79 | 77,,0.01739179715514183 80 | 78,,0.015088708139955997 81 | 79,,0.017688659951090813 82 | 80,,0.0276735071092844 83 | 81,,0.0388454832136631 84 | 82,,0.03151237964630127 85 | 83,,0.020263563841581345 86 | 84,,0.01669233664870262 87 | 85,,0.014432905241847038 88 | 86,,0.015141843818128109 89 | 87,,0.014302792958915234 90 | 88,,0.013969631865620613 91 | 89,,0.01342889852821827 92 | 90,,0.017190247774124146 93 | 91,,0.01414207648485899 94 | 92,,0.01542562898248434 95 | 93,,0.01393946073949337 96 | 94,,0.014502927660942078 97 | 95,,0.017610354349017143 98 | 96,,0.014504551887512207 99 | 97,,0.012853540480136871 100 | 98,,0.015876956284046173 101 | 99,,0.013260497711598873 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_second_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.5401935577392578,0.5061125159263611 3 | 1,0.6693888306617737,0.507260262966156 4 | 2,0.7104347348213196,0.5096264481544495 5 | 3,0.7140364050865173,0.5159751772880554 6 | 4,0.702231764793396,0.6190560460090637 7 | 5,0.7769100666046143,0.6973691582679749 8 | 6,0.8022670149803162,0.6757805943489075 9 | 7,0.7993789911270142,0.7498257756233215 10 | 8,0.8367089629173279,0.7632125616073608 11 | 9,0.8396608829498291,0.7693992257118225 12 | 10,0.8579839468002319,0.78679358959198 13 | 11,0.8885736465454102,0.8141860365867615 14 | 12,0.880791425704956,0.817328691482544 15 | 13,0.886802077293396,0.8359704613685608 16 | 14,0.893208920955658,0.8623818159103394 17 | 15,0.8943783640861511,0.8718006014823914 18 | 16,0.9023328423500061,0.8415833711624146 19 | 17,0.8964871764183044,0.8894969820976257 20 | 18,0.9069052338600159,0.8908239603042603 21 | 19,0.9121700525283813,0.8827806115150452 22 | 20,0.9087314605712891,0.8906440138816833 23 | 21,0.905256986618042,0.9017173051834106 24 | 22,0.9152396321296692,0.9046811461448669 25 | 23,0.9064427614212036,0.8832094669342041 26 | 24,0.9027177691459656,0.8944185972213745 27 | 25,0.9199612140655518,0.909101665019989 28 | 26,0.9113211035728455,0.9030346274375916 29 | 27,0.885046124458313,0.9052313566207886 30 | 28,0.9236015677452087,0.9067085981369019 31 | 29,0.907525897026062,0.8763922452926636 32 | 30,0.9182515740394592,0.9051503539085388 33 | 31,0.912943422794342,0.9155370593070984 34 | 32,0.9062497019767761,0.9045754671096802 35 | 33,0.9152827262878418,0.8775880932807922 36 | 34,0.9270886778831482,0.9058632850646973 37 | 35,0.9186521768569946,0.9003616571426392 38 | 36,0.9221734404563904,0.9090725779533386 39 | 37,0.9267725348472595,0.8768558502197266 40 | 38,0.9183965921401978,0.9143580794334412 41 | 39,0.9234006404876709,0.9163610339164734 42 | 40,0.9123709797859192,0.9111243486404419 43 | 41,0.9284425973892212,0.8838428258895874 44 | 42,0.925999641418457,0.9203583598136902 45 | 43,0.9178467392921448,0.903468668460846 46 | 44,0.9170321822166443,0.9072157740592957 47 | 45,0.9133639335632324,0.9160839319229126 48 | 46,0.9290410876274109,0.9238162040710449 49 | 47,0.9163879752159119,0.918948233127594 50 | 48,0.9288790822029114,0.9098387360572815 51 | 49,0.9230549335479736,0.9122633337974548 52 | 50,0.9212501049041748,0.9192684292793274 53 | 51,0.924383282661438,0.912359356880188 54 | 52,0.92362380027771,0.9235591292381287 55 | 53,0.9254149794578552,0.904768705368042 56 | 54,0.9289409518241882,0.9053817391395569 57 | 55,0.9262305498123169,0.9190903902053833 58 | 56,0.9220956563949585,0.9148341417312622 59 | 57,0.92946457862854,0.9303833842277527 60 | 58,0.9174981713294983,0.9265962839126587 61 | 59,0.9256591796875,0.8596237897872925 62 | 60,,0.9209739565849304 63 | 61,,0.9272665977478027 64 | 62,,0.9063639044761658 65 | 63,,0.9281643629074097 66 | 64,,0.9185977578163147 67 | 65,,0.9209501147270203 68 | 66,,0.9221509695053101 69 | 67,,0.9215956330299377 70 | 68,,0.9082291722297668 71 | 69,,0.9169037342071533 72 | 70,,0.9208183288574219 73 | 71,,0.9208436608314514 74 | 72,,0.9176328778266907 75 | 73,,0.92002934217453 76 | 74,,0.9115009307861328 77 | 75,,0.8643436431884766 78 | 76,,0.9243096709251404 79 | 77,,0.9257158637046814 80 | 78,,0.9284163117408752 81 | 79,,0.9214773774147034 82 | 80,,0.8993902802467346 83 | 81,,0.9061789512634277 84 | 82,,0.9230576157569885 85 | 83,,0.9198920130729675 86 | 84,,0.9235404133796692 87 | 85,,0.9204784035682678 88 | 86,,0.9132751822471619 89 | 87,,0.9222601056098938 90 | 88,,0.9280800223350525 91 | 89,,0.9315210580825806 92 | 90,,0.9199073314666748 93 | 91,,0.9253250360488892 94 | 92,,0.9217073917388916 95 | 93,,0.9278841018676758 96 | 94,,0.9229528307914734 97 | 95,,0.926822304725647 98 | 96,,0.9011359214782715 99 | 97,,0.9265130758285522 100 | 98,,0.9177498817443848 101 | 99,,0.9141339659690857 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_second_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.7645803093910217,0.5029675960540771 3 | 1,0.41367876529693604,0.5001790523529053 4 | 2,0.3685796856880188,0.4992886483669281 5 | 3,0.41583356261253357,1.0882266759872437 6 | 4,0.39034366607666016,0.517177939414978 7 | 5,0.29554083943367004,0.3718084990978241 8 | 6,0.27002015709877014,0.5759989619255066 9 | 7,0.269722580909729,0.3251943290233612 10 | 8,0.23762843012809753,0.30800846219062805 11 | 9,0.23984605073928833,0.3135545551776886 12 | 10,0.20973944664001465,0.28753527998924255 13 | 11,0.1665385216474533,0.25594761967658997 14 | 12,0.17994564771652222,0.26571446657180786 15 | 13,0.17138603329658508,0.22926373779773712 16 | 14,0.1613759994506836,0.20637653768062592 17 | 15,0.16675761342048645,0.19010606408119202 18 | 16,0.15599648654460907,0.2532972991466522 19 | 17,0.1628173291683197,0.18144434690475464 20 | 18,0.14740678668022156,0.16619999706745148 21 | 19,0.13977879285812378,0.17971719801425934 22 | 20,0.14608746767044067,0.17468731105327606 23 | 21,0.14616075158119202,0.1532881110906601 24 | 22,0.13266326487064362,0.1517837792634964 25 | 23,0.15505030751228333,0.22196951508522034 26 | 24,0.15675731003284454,0.1892639547586441 27 | 25,0.13234390318393707,0.1555720716714859 28 | 26,0.14197856187820435,0.14760373532772064 29 | 27,0.18725013732910156,0.1472039520740509 30 | 28,0.1236983984708786,0.2754785120487213 31 | 29,0.15128731727600098,1.8085730075836182 32 | 30,0.1332901120185852,0.14927014708518982 33 | 31,0.14252246916294098,0.21206828951835632 34 | 32,0.14615420997142792,1.7127617597579956 35 | 33,0.13629435002803802,0.26204177737236023 36 | 34,0.11975676566362381,2.0483908653259277 37 | 35,0.13404598832130432,0.1800844967365265 38 | 36,0.12748375535011292,0.1776786893606186 39 | 37,0.12377557158470154,0.19329480826854706 40 | 38,0.13421784341335297,0.13871540129184723 41 | 39,0.1264767050743103,0.13911570608615875 42 | 40,0.14412111043930054,0.1834503710269928 43 | 41,0.1190158948302269,0.7069637179374695 44 | 42,0.12194713205099106,0.12534412741661072 45 | 43,0.1357245296239853,0.15192225575447083 46 | 44,0.138009175658226,0.3144014775753021 47 | 45,0.14462047815322876,0.20168018341064453 48 | 46,0.12020426243543625,0.12923778593540192 49 | 47,0.14218515157699585,0.1550372838973999 50 | 48,0.11873059719800949,0.553214967250824 51 | 49,0.128644660115242,0.1482481211423874 52 | 50,0.12742403149604797,0.1246175542473793 53 | 51,0.12347240746021271,0.20570209622383118 54 | 52,0.1247677281498909,0.11649321019649506 55 | 53,0.12405825406312943,0.19980409741401672 56 | 54,0.1155853122472763,0.35816529393196106 57 | 55,0.12252841144800186,0.13724622130393982 58 | 56,0.12937553226947784,0.6270830035209656 59 | 57,0.11610288918018341,0.1136346086859703 60 | 58,0.13636945188045502,0.2782679796218872 61 | 59,0.12257698178291321,2.0219902992248535 62 | 60,,0.22134897112846375 63 | 61,,0.23291337490081787 64 | 62,,0.77008056640625 65 | 63,,0.16710159182548523 66 | 64,,0.562247097492218 67 | 65,,0.1449926793575287 68 | 66,,0.1823757290840149 69 | 67,,0.47904378175735474 70 | 68,,1.9259496927261353 71 | 69,,1.7460676431655884 72 | 70,,0.8488378524780273 73 | 71,,0.16285116970539093 74 | 72,,0.12482190132141113 75 | 73,,0.15893828868865967 76 | 74,,26.68888282775879 77 | 75,,87.39239501953125 78 | 76,,4.248983860015869 79 | 77,,2.7435503005981445 80 | 78,,0.10877091437578201 81 | 79,,0.7845163345336914 82 | 80,,0.9499209523200989 83 | 81,,0.15503203868865967 84 | 82,,0.14084576070308685 85 | 83,,0.14334289729595184 86 | 84,,0.13007409870624542 87 | 85,,0.19383440911769867 88 | 86,,0.25536832213401794 89 | 87,,0.2899816632270813 90 | 88,,0.255639910697937 91 | 89,,0.2945609390735626 92 | 90,,0.231873020529747 93 | 91,,1.0148862600326538 94 | 92,,0.3910515010356903 95 | 93,,0.5180171132087708 96 | 94,,0.5936006307601929 97 | 95,,0.304121196269989 98 | 96,,5.148640155792236 99 | 97,,0.29820311069488525 100 | 98,,0.4059883654117584 101 | 99,,0.33647143840789795 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_third_cleaned_training_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.5069392323493958,0.5131821036338806 3 | 1,0.5914482474327087,0.6163303852081299 4 | 2,0.668931245803833,0.6795750856399536 5 | 3,0.7064706087112427,0.7129862308502197 6 | 4,0.7340694665908813,0.738973081111908 7 | 5,0.7529110908508301,0.7598472237586975 8 | 6,0.7665548920631409,0.7748094797134399 9 | 7,0.7850309014320374,0.7970722913742065 10 | 8,0.8017645478248596,0.8250371813774109 11 | 9,0.8173126578330994,0.8451623916625977 12 | 10,0.8412020802497864,0.8737176656723022 13 | 11,0.8679425716400146,0.8953843712806702 14 | 12,0.8944702744483948,0.9108232855796814 15 | 13,0.9057718515396118,0.9213153719902039 16 | 14,0.9263859391212463,0.9357801675796509 17 | 15,0.9370405673980713,0.9462885856628418 18 | 16,0.9486057758331299,0.9487836956977844 19 | 17,0.9528551697731018,0.9574114680290222 20 | 18,0.9368091225624084,0.9655669927597046 21 | 19,0.9648351669311523,0.9714136123657227 22 | 20,0.9670649766921997,0.9716728925704956 23 | 21,0.9685007333755493,0.9730938076972961 24 | 22,0.9673242568969727,0.9560797214508057 25 | 23,0.9749833941459656,0.9652602672576904 26 | 24,0.9793066382408142,0.9720667600631714 27 | 25,0.9691371917724609,0.982894241809845 28 | 26,0.9828124046325684,0.972553014755249 29 | 27,0.9863329529762268,0.976801335811615 30 | 28,0.9810602068901062,0.9772748947143555 31 | 29,0.9776567220687866,0.9804896116256714 32 | 30,0.9851019382476807,0.9872362613677979 33 | 31,0.9854405522346497,0.9893558025360107 34 | 32,0.9749614596366882,0.9887269139289856 35 | 33,0.9895057678222656,0.9869893789291382 36 | 34,0.9880584478378296,0.9830347299575806 37 | 35,0.9877234101295471,0.9855568408966064 38 | 36,0.9883785247802734,0.9891884922981262 39 | 37,0.9879624247550964,0.9889956116676331 40 | 38,0.9887155890464783,0.9836001992225647 41 | 39,0.9873650670051575,0.9904085993766785 42 | 40,0.9881359934806824,0.9905091524124146 43 | 41,0.9811151623725891,0.9882407188415527 44 | 42,0.9861599802970886,0.9866761565208435 45 | 43,0.98692786693573,0.9925112128257751 46 | 44,0.9795823693275452,0.9913694262504578 47 | 45,0.9924575686454773,0.991726279258728 48 | 46,0.9931233525276184,0.9927942156791687 49 | 47,0.9939773082733154,0.9912031292915344 50 | 48,0.992217481136322,0.9911028742790222 51 | 49,0.9915516376495361,0.9899910092353821 52 | 50,0.9856163859367371,0.9902493953704834 53 | 51,0.990985095500946,0.9878776669502258 54 | 52,0.9942293763160706,0.9915951490402222 55 | 53,0.9917809367179871,0.9917677044868469 56 | 54,0.992010235786438,0.9926860928535461 57 | 55,0.9918158054351807,0.9938988089561462 58 | 56,0.9916228652000427,0.9919005632400513 59 | 57,0.9879317879676819,0.9930049777030945 60 | 58,0.9947538375854492,0.9930987358093262 61 | 59,0.9938991069793701,0.9915943741798401 62 | 60,0.9919843077659607,0.993638813495636 63 | 61,0.9941264986991882,0.9952437877655029 64 | 62,0.995318591594696,0.9913675785064697 65 | 63,0.9935200810432434,0.9933028817176819 66 | 64,0.9901033043861389,0.9951298832893372 67 | 65,0.9931090474128723,0.9954197406768799 68 | 66,0.9941926598548889,0.9916074275970459 69 | 67,0.9950458407402039,0.9936627149581909 70 | 68,0.9944517612457275,0.9937908053398132 71 | 69,0.9949027299880981,0.9948269724845886 72 | 70,0.9948976635932922,0.9961203932762146 73 | 71,0.9939064979553223,0.9720012545585632 74 | 72,0.990359365940094,0.9873551726341248 75 | 73,0.9949988126754761,0.9935866594314575 76 | 74,0.995802640914917,0.9954845905303955 77 | 75,0.996362566947937,0.996104896068573 78 | 76,0.9937642216682434,0.9962460398674011 79 | 77,0.9947280287742615,0.9965746402740479 80 | 78,0.9955654740333557,0.9948673844337463 81 | 79,0.9962960481643677,0.9962607622146606 82 | 80,0.9907768964767456,0.9962096810340881 83 | 81,0.9950898885726929,0.9939010143280029 84 | 82,0.996989369392395,0.9947488307952881 85 | 83,0.9964908361434937,0.9954239726066589 86 | 84,0.9937142729759216,0.9958418011665344 87 | 85,0.9961052536964417,0.9959908127784729 88 | 86,0.9967987537384033,0.9940893054008484 89 | 87,0.9937946796417236,0.9935274720191956 90 | 88,0.9946946501731873,0.9953696131706238 91 | 89,0.9957744479179382,0.9967403411865234 92 | 90,0.9970495700836182,0.9938063025474548 93 | 91,0.9953664541244507,0.9923964142799377 94 | 92,0.9951341152191162,0.9934335350990295 95 | 93,0.9962100982666016,0.995935320854187 96 | 94,0.9972308278083801,0.9965331554412842 97 | 95,0.9963667392730713,0.99539715051651 98 | 96,0.9958866834640503,0.99607253074646 99 | 97,0.9967323541641235,0.9939910769462585 100 | 98,0.9958980679512024,0.9958271384239197 101 | 99,0.9961298108100891,0.9968826174736023 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_third_cleaned_training_loss.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.5184474587440491,0.514799177646637 3 | 1,0.4715733826160431,0.4557076096534729 4 | 2,0.41410791873931885,0.4030498266220093 5 | 3,0.3762640357017517,0.36939841508865356 6 | 4,0.34415072202682495,0.34097719192504883 7 | 5,0.32406190037727356,0.3162393867969513 8 | 6,0.3049965798854828,0.2980538010597229 9 | 7,0.28549525141716003,0.27813565731048584 10 | 8,0.2664185166358948,0.25035151839256287 11 | 9,0.25155285000801086,0.2295449823141098 12 | 10,0.23187905550003052,0.19609911739826202 13 | 11,0.20055866241455078,0.16988499462604523 14 | 12,0.1708531379699707,0.14898909628391266 15 | 13,0.15555861592292786,0.13485212624073029 16 | 14,0.12953726947307587,0.1144777312874794 17 | 15,0.11497560888528824,0.10016879439353943 18 | 16,0.09799551218748093,0.09578156471252441 19 | 17,0.09096068143844604,0.08377540856599808 20 | 18,0.11350774765014648,0.0719563215970993 21 | 19,0.0734831765294075,0.06102084740996361 22 | 20,0.06953892856836319,0.061435699462890625 23 | 21,0.06718103587627411,0.058218590915203094 24 | 22,0.06880258023738861,0.08469975739717484 25 | 23,0.05626221373677254,0.07000214606523514 26 | 24,0.048438191413879395,0.06064785644412041 27 | 25,0.06674529612064362,0.04198189452290535 28 | 26,0.04253034666180611,0.05645068734884262 29 | 27,0.03584432974457741,0.05202118307352066 30 | 28,0.04493755102157593,0.047727085649967194 31 | 29,0.04978388920426369,0.04577414318919182 32 | 30,0.037608351558446884,0.03175966069102287 33 | 31,0.037020888179540634,0.02701359987258911 34 | 32,0.0540175698697567,0.02800225280225277 35 | 33,0.028850894421339035,0.031936146318912506 36 | 34,0.031115688383579254,0.03908330947160721 37 | 35,0.03252720832824707,0.034762192517519 38 | 36,0.030313456431031227,0.027766278013586998 39 | 37,0.030566595494747162,0.026986490935087204 40 | 38,0.0298234224319458,0.03705243393778801 41 | 39,0.031657181680202484,0.024335958063602448 42 | 40,0.0298311710357666,0.024372708052396774 43 | 41,0.04318074882030487,0.02805987000465393 44 | 42,0.03396302089095116,0.03197963163256645 45 | 43,0.03204477205872536,0.019923226907849312 46 | 44,0.044857706874608994,0.022028425708413124 47 | 45,0.022139130160212517,0.021764669567346573 48 | 46,0.019915595650672913,0.018804283812642097 49 | 47,0.018209176138043404,0.021811828017234802 50 | 48,0.02179943397641182,0.022738711908459663 51 | 49,0.023381168022751808,0.024597270414233208 52 | 50,0.03295963630080223,0.02336309291422367 53 | 51,0.024334359914064407,0.02850559912621975 54 | 52,0.017392411828041077,0.021160980686545372 55 | 53,0.02226928435266018,0.020042408257722855 56 | 54,0.02183220535516739,0.018999557942152023 57 | 55,0.02187538333237171,0.016389040276408195 58 | 56,0.022676149383187294,0.02046029083430767 59 | 57,0.029443342238664627,0.018234187737107277 60 | 58,0.016126055270433426,0.017772922292351723 61 | 59,0.01829379051923752,0.02080548368394375 62 | 60,0.02136377803981304,0.01668667234480381 63 | 61,0.017617180943489075,0.01303642988204956 64 | 62,0.014825316146016121,0.02118167094886303 65 | 63,0.018557650968432426,0.017345614731311798 66 | 64,0.02599746733903885,0.013564799912273884 67 | 65,0.019067218527197838,0.01254743430763483 68 | 66,0.01666824147105217,0.020264113321900368 69 | 67,0.01547351572662592,0.015856368467211723 70 | 68,0.016389723867177963,0.016044892370700836 71 | 69,0.015427169390022755,0.01383983064442873 72 | 70,0.01529749296605587,0.01083699706941843 73 | 71,0.017362017184495926,0.0589444674551487 74 | 72,0.024625660851597786,0.028273390606045723 75 | 73,0.015119004063308239,0.016501452773809433 76 | 74,0.013472403399646282,0.01268282625824213 77 | 75,0.012603718787431717,0.01102281454950571 78 | 76,0.017466170713305473,0.010829572565853596 79 | 77,0.01572555862367153,0.010034679435193539 80 | 78,0.0136650325730443,0.01369120366871357 81 | 79,0.01213579811155796,0.010743323713541031 82 | 80,0.023458033800125122,0.010639132931828499 83 | 81,0.015183388255536556,0.017199642956256866 84 | 82,0.010648544877767563,0.014086942188441753 85 | 83,0.011591319926083088,0.012065197341144085 86 | 84,0.017331009730696678,0.011337104253470898 87 | 85,0.012700527906417847,0.011130772531032562 88 | 86,0.010863824747502804,0.014538935385644436 89 | 87,0.01708102412521839,0.015924684703350067 90 | 88,0.01519015897065401,0.012490171007812023 91 | 89,0.013147921301424503,0.009570535272359848 92 | 90,0.010452006943523884,0.015555928461253643 93 | 91,0.013497067615389824,0.01779227703809738 94 | 92,0.01416314672678709,0.016168154776096344 95 | 93,0.012125177308917046,0.011051509529352188 96 | 94,0.010188112035393715,0.009676710702478886 97 | 95,0.011657116934657097,0.012051807716488838 98 | 96,0.012722537852823734,0.010768063366413116 99 | 97,0.010952242650091648,0.015180829912424088 100 | 98,0.012630262412130833,0.011353857815265656 101 | 99,0.012429894879460335,0.008863753639161587 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_third_cleaned_validation_accuracy.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.49831199645996094,0.4967435896396637 3 | 1,0.5381414890289307,0.5735529065132141 4 | 2,0.6150274276733398,0.6692438721656799 5 | 3,0.7116842865943909,0.7303169369697571 6 | 4,0.7498611211776733,0.7546316981315613 7 | 5,0.750446081161499,0.7455857396125793 8 | 6,0.7749678492546082,0.7502431869506836 9 | 7,0.7901824116706848,0.7981840372085571 10 | 8,0.8056091070175171,0.8056427836418152 11 | 9,0.8199124932289124,0.8482809662818909 12 | 10,0.7745903730392456,0.8459877371788025 13 | 11,0.8339253067970276,0.8239230513572693 14 | 12,0.8433789014816284,0.8765159845352173 15 | 13,0.8603834509849548,0.8686511516571045 16 | 14,0.884220540523529,0.8777010440826416 17 | 15,0.8998560905456543,0.8966220021247864 18 | 16,0.8882896304130554,0.9046463370323181 19 | 17,0.8819013237953186,0.9129425883293152 20 | 18,0.9069419503211975,0.9090491533279419 21 | 19,0.9092644453048706,0.9057987928390503 22 | 20,0.9082050323486328,0.8651010394096375 23 | 21,0.910660445690155,0.9021901488304138 24 | 22,0.8653603792190552,0.8849061727523804 25 | 23,0.9157181978225708,0.8889091610908508 26 | 24,0.9137357473373413,0.9041980504989624 27 | 25,0.9096946716308594,0.9189567565917969 28 | 26,0.9153738021850586,0.8899221420288086 29 | 27,0.9207668900489807,0.923848032951355 30 | 28,0.825003981590271,0.8955215215682983 31 | 29,0.9159859418869019,0.8690326809883118 32 | 30,0.9046393632888794,0.9253801703453064 33 | 31,0.9233131408691406,0.9299674034118652 34 | 32,0.9257145524024963,0.9112995266914368 35 | 33,0.9269300699234009,0.9140422344207764 36 | 34,0.9217751622200012,0.9126996994018555 37 | 35,0.9214078783988953,0.9231101274490356 38 | 36,0.9262787699699402,0.9289594888687134 39 | 37,0.9196728467941284,0.8490774035453796 40 | 38,0.9160096049308777,0.9299008846282959 41 | 39,0.9228806495666504,0.9106549024581909 42 | 40,0.8979875445365906,0.9211599826812744 43 | 41,0.9108622074127197,0.9275705814361572 44 | 42,0.9177919030189514,0.9265955686569214 45 | 43,0.9078817963600159,0.9293557405471802 46 | 44,0.9309134483337402,0.9284927248954773 47 | 45,0.9358762502670288,0.9286373257637024 48 | 46,0.9341212511062622,0.9284398555755615 49 | 47,0.9287315607070923,0.9099222421646118 50 | 48,0.9287952184677124,0.9272200465202332 51 | 49,0.9283444881439209,0.9262991547584534 52 | 50,0.9171375632286072,0.9188938140869141 53 | 51,0.927761435508728,0.9270948171615601 54 | 52,0.9308364987373352,0.9328721761703491 55 | 53,0.9245731234550476,0.9258183836936951 56 | 54,0.9261247515678406,0.9323674440383911 57 | 55,0.9298816323280334,0.9349023699760437 58 | 56,0.8876102566719055,0.9250092506408691 59 | 57,0.9285379648208618,0.9334782958030701 60 | 58,0.9300395846366882,0.9402839541435242 61 | 59,0.9327507019042969,0.9312987923622131 62 | 60,0.928963840007782,0.9329085946083069 63 | 61,0.9373693466186523,0.9401975870132446 64 | 62,0.9358710050582886,0.9326238036155701 65 | 63,0.9282122254371643,0.9382452964782715 66 | 64,0.9331640601158142,0.9383229613304138 67 | 65,0.9226546883583069,0.9289769530296326 68 | 66,0.936234712600708,0.9381951093673706 69 | 67,0.9349684119224548,0.9355139136314392 70 | 68,0.9344094395637512,0.9379714727401733 71 | 69,0.9276459217071533,0.9416619539260864 72 | 70,0.9322375655174255,0.9300509691238403 73 | 71,0.922532856464386,0.9166184663772583 74 | 72,0.9356030225753784,0.9300719499588013 75 | 73,0.9336538910865784,0.9371998310089111 76 | 74,0.9361178278923035,0.93988037109375 77 | 75,0.9372519850730896,0.9370654821395874 78 | 76,0.9285513162612915,0.9375638365745544 79 | 77,0.9382128715515137,0.9405808448791504 80 | 78,0.9339078664779663,0.9386417269706726 81 | 79,0.9333094358444214,0.9402540922164917 82 | 80,0.9248196482658386,0.9377831816673279 83 | 81,0.9394784569740295,0.9275939464569092 84 | 82,0.9427780508995056,0.9347148537635803 85 | 83,0.9338074922561646,0.9394181370735168 86 | 84,0.937708854675293,0.9371258020401001 87 | 85,0.9412825107574463,0.9417241215705872 88 | 86,0.9376865029335022,0.9324027895927429 89 | 87,0.9242527484893799,0.9313969016075134 90 | 88,0.9403479099273682,0.9396719932556152 91 | 89,0.9426299929618835,0.9371857643127441 92 | 90,0.9410635232925415,0.9327054023742676 93 | 91,0.923166811466217,0.9211581349372864 94 | 92,0.939449667930603,0.935433566570282 95 | 93,0.9382390975952148,0.9396108984947205 96 | 94,0.9384470582008362,0.9352085590362549 97 | 95,0.9393512010574341,0.9349372982978821 98 | 96,0.938664972782135,0.9294068217277527 99 | 97,0.9324997067451477,0.9322015643119812 100 | 98,0.9356473684310913,0.9388805627822876 101 | 99,0.9408318400382996,0.9435174465179443 102 | -------------------------------------------------------------------------------- /utils/csv_output/pos_enc_third_cleaned_validation_loss.csv: -------------------------------------------------------------------------------- 1 | step,rot-y_single-bin_with_pos_enc,rot-y_tricosine_with_pos_enc 2 | 0,0.5002307295799255,1191.887939453125 3 | 1,0.6038916707038879,0.519759476184845 4 | 2,0.5084132552146912,0.47406935691833496 5 | 3,0.36601701378822327,0.3526161313056946 6 | 4,0.3216087520122528,0.33162349462509155 7 | 5,0.33310312032699585,0.33579838275909424 8 | 6,0.28943759202957153,0.32216793298721313 9 | 7,0.280355304479599,0.2791596055030823 10 | 8,0.2653195261955261,0.2654419541358948 11 | 9,0.2413780242204666,0.22008106112480164 12 | 10,0.3041571378707886,0.2274307757616043 13 | 11,0.235697939991951,0.2560203969478607 14 | 12,0.22664332389831543,0.18803910911083221 15 | 13,0.20946568250656128,0.191775843501091 16 | 14,0.17770163714885712,0.18594588339328766 17 | 15,0.15543246269226074,0.15882118046283722 18 | 16,0.170287624001503,0.14704373478889465 19 | 17,0.18955743312835693,0.13403572142124176 20 | 18,0.14414535462856293,0.1383538395166397 21 | 19,0.1444927453994751,0.14464424550533295 22 | 20,0.14298495650291443,0.20210771262645721 23 | 21,0.14038260281085968,0.1493901014328003 24 | 22,0.20434686541557312,0.17417792975902557 25 | 23,0.13563083112239838,0.1680738478899002 26 | 24,0.13207462430000305,0.18283189833164215 27 | 25,0.14398355782032013,0.1562415361404419 28 | 26,0.1338098645210266,0.17260584235191345 29 | 27,0.12417607754468918,0.12095233798027039 30 | 28,0.430066853761673,0.1637059897184372 31 | 29,0.13068804144859314,0.24913398921489716 32 | 30,0.15643742680549622,0.1194002777338028 33 | 31,0.12211297452449799,0.1072801947593689 34 | 32,0.11901120841503143,0.15500371158123016 35 | 33,0.1151551678776741,0.13585932552814484 36 | 34,0.12406258285045624,0.1405380815267563 37 | 35,0.12777958810329437,0.4739367365837097 38 | 36,0.11760304123163223,0.11273427307605743 39 | 37,0.12900061905384064,1.4445438385009766 40 | 38,0.13366059958934784,0.11167611926794052 41 | 39,0.12220095843076706,0.16263000667095184 42 | 40,0.1581667810678482,0.18717649579048157 43 | 41,0.13833463191986084,0.11234565824270248 44 | 42,0.1294640600681305,0.11504467576742172 45 | 43,0.14693410694599152,0.10920660197734833 46 | 44,0.1117297112941742,0.11376838386058807 47 | 45,0.1035657599568367,0.1140546202659607 48 | 46,0.10430563241243362,0.11155211925506592 49 | 47,0.11294056475162506,0.13810689747333527 50 | 48,0.114687979221344,0.11157132685184479 51 | 49,0.1142527237534523,0.12008128315210342 52 | 50,0.13049566745758057,0.13039571046829224 53 | 51,0.11419155448675156,0.11168774962425232 54 | 52,0.10943692922592163,0.10250359028577805 55 | 53,0.12236760556697845,0.11115673929452896 56 | 54,0.11983725428581238,0.10415364801883698 57 | 55,0.11450605094432831,0.10302695631980896 58 | 56,0.17533916234970093,0.11904110759496689 59 | 57,0.11124803870916367,0.10412539541721344 60 | 58,0.11467138677835464,0.09479480236768723 61 | 59,0.10708357393741608,0.10598216950893402 62 | 60,0.11628138273954391,0.10692422837018967 63 | 61,0.09930109977722168,0.09330224990844727 64 | 62,0.10162851214408875,0.10640821605920792 65 | 63,0.11433802545070648,0.09624210000038147 66 | 64,0.1045493483543396,0.09512428194284439 67 | 65,0.1290111243724823,0.11168298870325089 68 | 66,0.09991801530122757,0.09320318698883057 69 | 67,0.10529084503650665,0.10021660476922989 70 | 68,0.10512533783912659,0.09530051797628403 71 | 69,0.11507755517959595,0.09392766654491425 72 | 70,0.10758216679096222,0.10750102251768112 73 | 71,0.12224903702735901,0.13006427884101868 74 | 72,0.10172666609287262,0.10732830315828323 75 | 73,0.10643792897462845,0.0991373062133789 76 | 74,0.10016395151615143,0.09317849576473236 77 | 75,0.10016903281211853,0.09630747139453888 78 | 76,0.11081475019454956,0.09839734435081482 79 | 77,0.10017886012792587,0.09040658175945282 80 | 78,0.1071295216679573,0.09129421412944794 81 | 79,0.10537565499544144,0.09191980212926865 82 | 80,0.11910008639097214,0.09584474563598633 83 | 81,0.09768813848495483,0.114784374833107 84 | 82,0.09089819341897964,0.10289433598518372 85 | 83,0.10619005560874939,0.09337140619754791 86 | 84,0.10179585218429565,0.09843302518129349 87 | 85,0.09513769298791885,0.08952416479587555 88 | 86,0.09806564450263977,0.11103919893503189 89 | 87,0.12283480167388916,0.10711817443370819 90 | 88,0.09591327607631683,0.09089471399784088 91 | 89,0.09175701439380646,0.09943599998950958 92 | 90,0.09226634353399277,0.11285731941461563 93 | 91,0.12296686321496964,0.7228661179542542 94 | 92,0.09566739201545715,0.19299447536468506 95 | 93,0.10046564042568207,0.13795165717601776 96 | 94,0.09764374792575836,0.14411406219005585 97 | 95,0.09861532598733902,0.1264881044626236 98 | 96,0.09975668042898178,4.671625137329102 99 | 97,0.11349575221538544,2.4732205867767334 100 | 98,0.10212110728025436,0.2653728425502777 101 | 99,0.09752339124679565,0.21265871822834015 102 | -------------------------------------------------------------------------------- /utils/data_checker.py: -------------------------------------------------------------------------------- 1 | import argparse, os 2 | import zipfile 3 | import tensorflow as tf 4 | 5 | dataset_urls = { 6 | "training/image_2": ( 7 | "https://s3.eu-central-1.amazonaws.com/avg-kitti/data_object_image_2.zip" 8 | ), 9 | "training/label_2": ( 10 | "https://s3.eu-central-1.amazonaws.com/avg-kitti/data_object_label_2.zip" 11 | ), 12 | } 13 | 14 | parser = argparse.ArgumentParser(description='KITTI Dataset Checker') 15 | parser.add_argument( 16 | '-f', '--datasetfolder', type=str, help='Root Data folder to Check for', default='dataset' 17 | ) 18 | args = parser.parse_args() 19 | 20 | for dataset in dataset_urls.keys(): 21 | dataset_path = os.path.abspath(os.path.join(args.datasetfolder, dataset)) 22 | if os.path.exists(dataset_path) and os.listdir(dataset_path): 23 | print(dataset_path, "exists.") 24 | else: 25 | print(dataset_path, "does not exist.") 26 | if not os.path.exists(args.datasetfolder): 27 | os.makedirs(args.datasetfolder) 28 | url = dataset_urls[dataset] 29 | print("Downloading dataset to", args.datasetfolder) 30 | file_path = os.path.abspath(os.path.join(args.datasetfolder, url.rsplit('/', 1)[-1])) 31 | path_to_downloaded_file = tf.keras.utils.get_file(file_path, url) 32 | print("Dataset downloaded to ", path_to_downloaded_file) 33 | with zipfile.ZipFile(path_to_downloaded_file, 'r') as zip_ref: 34 | zip_ref.extractall(args.datasetfolder) 35 | print("Dataset extracted.") 36 | if os.path.exists(path_to_downloaded_file): 37 | os.remove(path_to_downloaded_file) 38 | -------------------------------------------------------------------------------- /utils/train_utils.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import os, pathlib 3 | import regex as re 4 | 5 | def setup_gpu(): 6 | # set up tensorflow GPU 7 | tf.config.list_physical_devices('GPU') 8 | gpus = tf.config.experimental.list_physical_devices('GPU') 9 | if gpus: 10 | try: 11 | # Currently, memory growth needs to be the same across GPUs 12 | for gpu in gpus: 13 | tf.config.experimental.set_memory_growth(gpu, True) 14 | logical_gpus = tf.config.experimental.list_logical_devices('GPU') 15 | config = tf.compat.v1.ConfigProto( 16 | gpu_options=tf.compat.v1.GPUOptions(per_process_gpu_memory_fraction=0.8) 17 | # device_count = {'GPU': 1} 18 | ) 19 | config.gpu_options.allow_growth = True 20 | session = tf.compat.v1.Session(config=config) 21 | tf.compat.v1.keras.backend.set_session(session) 22 | print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs") 23 | except RuntimeError as e: 24 | # Memory growth must be set before GPUs have been initialized 25 | print(e) 26 | 27 | # this static method take a python parser as input 28 | def setup_cmd_arg(parser): 29 | parser.add_argument( 30 | '--predict', 31 | dest='predict', 32 | type=str, 33 | default="rot-y", 34 | help='The target angle to be predicted. Options are rot-y, alpha', 35 | ) 36 | parser.add_argument( 37 | '--converter', 38 | dest='orientation', 39 | type=str, 40 | help=( 41 | 'Orientation conversion type of the model. ' 42 | 'Options are alpha, rot-y, tricosine, multibin, voting-bin, single-bin' 43 | ), 44 | ) 45 | parser.add_argument( 46 | '--batch_size', 47 | dest='batch_size', 48 | type=int, 49 | default=8, 50 | help='Define the batch size for training. Default value is 8', 51 | ) 52 | parser.add_argument( 53 | '--epoch', 54 | dest='num_epoch', 55 | type=int, 56 | default=100, 57 | help='Number of epoch used for training. Default value is 100', 58 | ) 59 | parser.add_argument( 60 | '--kitti_dir', 61 | dest='kitti_dir', 62 | type=str, 63 | default='dataset', 64 | help=( 65 | 'path to kitti dataset directory. Its subdirectory should have training/ and testing/.' 66 | ' Default path is dataset/' 67 | ), 68 | ) 69 | parser.add_argument( 70 | '--training_record', 71 | dest='training_record', 72 | type=str, 73 | default='training_record', 74 | help=( 75 | 'root directory of all training record, parent of weights and logs directory. ' 76 | 'Default path is training_record' 77 | ), 78 | ) 79 | parser.add_argument( 80 | '--log_dir', 81 | dest='log_dir', 82 | type=str, 83 | help='path to tensorboard logs directory. Default path is training_record/logs', 84 | ) 85 | parser.add_argument( 86 | '--weight_dir', 87 | dest='weight_dir', 88 | type=str, 89 | help='Relative path to save weights. Default path is training_record/weights', 90 | ) 91 | parser.add_argument( 92 | '--val_split', 93 | dest='val_split', 94 | type=float, 95 | default=0.2, 96 | help='Fraction of the dataset used for validation. Default val_split is 0.2', 97 | ) 98 | parser.add_argument( 99 | '--resume', 100 | dest='resume', 101 | type=bool, 102 | default=False, 103 | help='Resume from previous training under training_record directory', 104 | ) 105 | parser.add_argument( 106 | '--add_pos_enc', 107 | dest='add_pos_enc', 108 | type=bool, 109 | default=False, 110 | help='Add positional encoding to input', 111 | ) 112 | parser.add_argument("--use_angular_loss", dest='use_angular_loss', type=bool, default=False) 113 | parser.add_argument( 114 | "--add_depth_map", 115 | dest="add_depth_map", 116 | type=bool, 117 | default=False, 118 | help="If add_depth_map is true, add the path to directory containing depth map.", 119 | ) 120 | args = parser.parse_args() 121 | return args 122 | 123 | def timer(start, end): 124 | hours, rem = divmod(end - start, 3600) 125 | minutes, seconds = divmod(rem, 60) 126 | print("{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), seconds)) 127 | 128 | def check_args(args, depth_path_dir, label_dir, img_dir): 129 | 130 | if not os.path.isdir(args.kitti_dir): 131 | raise Exception('kitti_dir is not a directory.') 132 | if args.orientation not in [ 133 | 'tricosine', 134 | 'alpha', 135 | 'rot-y', 136 | 'multibin', 137 | 'voting-bin', 138 | 'single-bin', 139 | 'exp-A' 140 | ]: 141 | raise Exception('Invalid Orientation Type.') 142 | if not 0.0 <= args.val_split <= 1.0: 143 | raise Exception('Invalid val_split range between [0.0, 1.0]') 144 | if args.add_depth_map and (not os.path.isdir(depth_path_dir)): 145 | raise Exception( 146 | "Unable to find depth maps. Please put depth map under" 147 | " /kitti_dataset/training/predic_depth" 148 | ) 149 | 150 | def find_latest_epoch_and_weights(weights_directory, verbose = True): 151 | sub_directories = [path for path in weights_directory.iterdir()] 152 | if len(sub_directories) == 0: 153 | raise Exception( 154 | 'No previous training record found. Please enter correct directory or remore' 155 | ' --resume option' 156 | ) 157 | sub_directories.sort() 158 | latest_training_dir = sub_directories[-1] 159 | weight_files = [str(path) for path in latest_training_dir.iterdir()] 160 | weight_files.sort() 161 | latest_weight = weight_files[-1] 162 | latest_epoch = re.search(r'epoch-(\d\d)-', latest_weight).group(1) 163 | if verbose: 164 | print('-----------------------------------------------') 165 | print(f'Resume training from directory: {latest_training_dir}') 166 | print(f'Resume training from epoch number: {latest_epoch}') 167 | return latest_training_dir, latest_epoch, latest_weight -------------------------------------------------------------------------------- /utils/view_tensorboard.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | read -p "Enter Weights Directory: " weights_dir 3 | read -p "Enter Port num: " port_num 4 | echo "tensorboard --logdir $weights --port $port_num" 5 | tensorboard --logdir $weights --port $port_num -------------------------------------------------------------------------------- /utils/visualize_loss_function.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import tensorflow as tf 3 | import sys, math 4 | import pathlib 5 | 6 | sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) 7 | 8 | from model.orientation_converters import * 9 | import model.loss_function as loss_function 10 | from pprint import pprint 11 | import matplotlib.pyplot as plt 12 | 13 | import pandas as pd 14 | 15 | 16 | def generate_loss_csv(batch_size, ground_truth, visualize=False): 17 | y_true = np.full(batch_size, ground_truth) 18 | y_pred = np.linspace(-math.pi, math.pi, batch_size) 19 | output_batch = np.zeros((7, batch_size)) 20 | # ===== multibin ===== 21 | multibin_scaling_factor = 1.5 22 | multibin_true = tf.convert_to_tensor([np.concatenate(radians_to_multibin(angle), axis=-1) for angle in y_true], dtype=tf.float32) 23 | multibin_pred = tf.convert_to_tensor([np.concatenate(radians_to_multibin(angle), axis=-1) for angle in y_pred], dtype=tf.float32) 24 | # multibin_true = tf.convert_to_tensor( 25 | # [np.concatenate(radians_to_multi_affinity_bin(angle), axis=-1) for angle in y_true], 26 | # dtype=tf.float32, 27 | # ) 28 | # multibin_pred = tf.convert_to_tensor( 29 | # [np.concatenate(radians_to_multi_affinity_bin(angle), axis=-1) for angle in y_pred], 30 | # dtype=tf.float32, 31 | # ) 32 | multibin_losses = multibin_scaling_factor * loss_function.loss_multi_affinity__( 33 | multibin_true, multibin_pred 34 | ) 35 | output_batch[0, :] = multibin_losses 36 | # ===== tricosin ===== 37 | tricosine_true = tf.convert_to_tensor( 38 | [radians_to_tricosine(angle) for angle in y_true], dtype=tf.float32 39 | ) 40 | tricosine_pred = tf.convert_to_tensor( 41 | [radians_to_tricosine(angle) for angle in y_pred], dtype=tf.float32 42 | ) 43 | tricosine_losses = loss_function.loss_tricosine_(tricosine_true, tricosine_pred) 44 | output_batch[1, :] = tricosine_losses 45 | # ===== single bin ===== 46 | singlebin_true = tf.convert_to_tensor( 47 | [radians_to_single_bin(angle) for angle in y_true], dtype=tf.float32 48 | ) 49 | singlebin_pred = tf.convert_to_tensor( 50 | [radians_to_single_bin(angle) for angle in y_pred], dtype=tf.float32 51 | ) 52 | singlebin_losses = loss_function.loss_single_bin_l2_(singlebin_true, singlebin_pred) 53 | output_batch[2, :] = singlebin_losses 54 | # ===== voting bin ===== 55 | votingbin_true = tf.convert_to_tensor( 56 | [np.concatenate(radians_to_voting_bin(angle), -1) for angle in y_true], 57 | dtype=tf.float32, 58 | ) 59 | votingbin_pred = tf.convert_to_tensor( 60 | [np.concatenate(radians_to_voting_bin(angle), -1) for angle in y_pred], 61 | dtype=tf.float32, 62 | ) 63 | votingbin_losses = loss_function.loss_voting_bin_(votingbin_true, votingbin_pred) 64 | output_batch[3, :] = votingbin_losses 65 | # ====== angular and roty loss ====== 66 | angular_loss_scaling_factor = 1 67 | roty_true = tf.convert_to_tensor([angle for angle in y_true], dtype=tf.float32) 68 | roty_pred = tf.convert_to_tensor([angle for angle in y_pred], dtype=tf.float32) 69 | angular_losses = loss_function.loss_alpha_rot_y_angular_( 70 | roty_true, roty_pred 71 | ) 72 | 73 | rot_y_scaling_factor = 1 #1 / 20 74 | output_batch[4, :] = angular_losses 75 | roty_losses = loss_function.loss_alpha_rot_y_l2_( 76 | roty_true[:, np.newaxis], roty_pred[:, np.newaxis] 77 | ) 78 | output_batch[5, :] = rot_y_scaling_factor * roty_losses 79 | # ====== l2 loss ====== 80 | l2_scaling_factor = 1 / 20 81 | l2_true = y_true[:, np.newaxis] 82 | l2_pred = y_pred[:, np.newaxis] 83 | l2_loss = l2_scaling_factor * loss_function.l2_loss(l2_true, l2_pred) 84 | output_batch[6, :] = l2_loss 85 | 86 | # ===== exp-A bin ===== 87 | exp_A_true = tf.convert_to_tensor( 88 | [np.concatenate(radians_to_expA(angle), -1) for angle in y_true], 89 | dtype=tf.float32, 90 | ) 91 | exp_A_pred = tf.convert_to_tensor( 92 | [np.concatenate(radians_to_expA(angle), -1) for angle in y_pred], 93 | dtype=tf.float32, 94 | ) 95 | exp_A_losses = loss_function.loss_exp_A_(exp_A_true, exp_A_pred) 96 | # output_batch[7, :] = exp_A_losses 97 | 98 | 99 | 100 | if visualize: 101 | # plt multibin 102 | plt.figure() 103 | plt.plot(y_pred, multibin_losses) 104 | plt.title('Exp-A Loss Function When GT=0') 105 | plt.xlabel('Predicted Value') 106 | plt.ylabel('Loss') 107 | plt.savefig(f"Exp-A_gt_{str(np.round(ground_truth,3))}.png") 108 | # # plot l2 109 | # plt.figure() 110 | # plt.plot(y_pred, tricosine_losses) 111 | # plt.title('L2 Loss Function When GT=0') 112 | # plt.xlabel('Predicted Value') 113 | # plt.ylabel('Loss') 114 | # plt.savefig(f"l2_gt_{str(np.round(ground_truth,3))}.png") 115 | 116 | # # plot angular_losses 117 | # plt.figure() 118 | # plt.plot(y_pred, angular_losses) 119 | # plt.title('Angular Loss Function When GT=0') 120 | # plt.xlabel('Predicted Value') 121 | # plt.ylabel('Loss') 122 | # plt.savefig(f"Angular_gt_{str(np.round(ground_truth,3))}.png") 123 | 124 | return output_batch.T, y_pred 125 | 126 | 127 | if __name__ == "__main__": 128 | 129 | 130 | BATCH_SIZE = 900 131 | generate_loss_csv(BATCH_SIZE, 0, True) 132 | # df_list = [] 133 | # for gt in [0, 0.5 * math.pi, math.pi]: 134 | # loss_array, y_pred = generate_loss_csv(BATCH_SIZE, gt, True) 135 | # output_dir = pathlib.Path("loss_function_graph") 136 | # df = pd.DataFrame( 137 | # data=loss_array, 138 | # index=np.array(y_pred), 139 | # columns=[ 140 | # "Multibin Loss", 141 | # "Tircosine Loss", 142 | # "SingleBin Loss", 143 | # "VotingBin Loss", 144 | # "Angular Loss", 145 | # "RotY Loss", 146 | # "L2 Loss", 147 | # ], 148 | # ) 149 | # df_list.append(df) 150 | # all_df = pd.concat(df_list, 1) 151 | # all_df.to_csv(output_dir/"loss_function.csv") 152 | --------------------------------------------------------------------------------