├── LICENSE ├── README.md ├── assets └── report │ ├── DeepblueAI-Challenge report for MMAct Challenge.pdf │ ├── OPPO_mmact_challenge_report.pdf │ └── Raphael_unified_action_recognition_with_various_modalities.pdf ├── css ├── animate.css ├── bootstrap.min.css ├── font-awesome.min.css ├── line-icons.css ├── magnific-popup.css ├── main.css ├── nivo-lightbox.css ├── owl.carousel.css ├── owl.theme.css ├── responsive.css └── slicknav.css ├── evaluation ├── eval_mmact_trimmed.py ├── eval_mmact_untrimmed.py ├── trimmed_val_scene_gt.json ├── trimmed_val_scene_sample_format.json ├── trimmed_val_view_gt.json ├── trimmed_val_view_sample_format.json ├── untrimmed_val_gt.json └── untrimmed_val_sample_submission.json ├── fonts ├── FontAwesome.otf ├── Linearicons-Free.eot ├── Linearicons-Free.svg ├── Linearicons-Free.ttf ├── Linearicons-Free.woff ├── Linearicons-Free.woff2 ├── fontawesome-webfont.eot ├── fontawesome-webfont.svg ├── fontawesome-webfont.ttf ├── fontawesome-webfont.woff └── fontawesome-webfont.woff2 ├── img ├── bg1.png ├── dummy.png ├── email.png ├── hero-area.png ├── logo.png ├── logo.psd ├── mmactpaper.png └── team │ ├── Korpela.png │ ├── hirokatsu_min.png │ ├── ito.jpeg │ ├── kenshohara.png │ ├── maekawa-face-2018.png │ ├── nakamura.png │ ├── quankong.png │ ├── sample.png │ ├── satoshinichi.jpg │ └── sinha.jpeg ├── index.html ├── js ├── bootstrap.min.js ├── contact-form-script.js ├── form-validator.min.js ├── jquery-min.js ├── jquery.counterup.min.js ├── jquery.easing.min.js ├── jquery.magnific-popup.min.js ├── jquery.mixitup.js ├── jquery.nav.js ├── jquery.slicknav.js ├── jquery.stellar.min.js ├── jquery.vide.js ├── main.js ├── nivo-lightbox.js ├── owl.carousel.js ├── popper.min.js ├── scrolling-nav.js ├── smoothscroll.js ├── waypoints.min.js └── wow.js ├── php └── form-process.php ├── scss ├── main.scss └── responsive.scss └── utils ├── pose_frame_extraction.py ├── sample_clip └── carrying.mp4 └── time_series_classifiers.py /README.md: -------------------------------------------------------------------------------- 1 | # MMAct Challenge 2 | ## MMAct Challenge 2021 with ActivityNet @ CVPR'21 3 | 4 | The [MMAct Challenge 2021](https://mmact19.github.io/challenge/) will be hosted in the [CVPR'21 International Challenge on Activity Recognition (ActivityNet) Workshop](http://activity-net.org/challenges/2021/index.html). 5 | This challenge asks participants to propose cross-modal video action recognition/localization approaches for addressing shortcomings in visual only approaches using [MMAct Dataset](https://mmact19.github.io/2019/). 6 | 7 | ### Dataset structure 8 | After the extraction of the dataset is over, you will see the dataset structure as follows. 9 | 10 | For videos, video data will be stored under each session folders. Example of untrimmed videos under `untrimmed/video` folder: 11 | ``` 12 | trainval/ 13 | ├── cam1 14 | │ ├── subject1 15 | │ │ ├── scene1 16 | │ │ │ ├── session1 17 | │ │ │ │ └── 20181016-133948.mp4 18 | │ │ │ ├── session2 19 | │ │ │ │ └── 20181016-134634.mp4 20 | │ │ │ └── session3 21 | │ │ │ └── 20181016-140459.mp4 22 | ... 23 | ``` 24 | For sensors, sensor data (.csv) will be stored under each session folders according to each type of sensor, `acc_phone_clip, gyro_clip, orientation_clip`:acceleration, gyroscope, orientation from smartphone in the right pockets of pants, `acc_watch_clip`:acceleration from smartwatch worn on the right hand. Example of untrimmed sensor data under `untrimmed/sensor`. Notice that sensor has NO view definition. 25 | ``` 26 | sensor/ 27 | ├── acc_phone_clip 28 | │ ├── subject1 29 | │ │ ├── scene1 30 | │ │ │ ├── session1 31 | │ │ │ ├── session2 32 | │ │ │ ├── session3 33 | ... 34 | ``` 35 | For trimmed data, the annotation will be the file name itself. For untrimmed data, the annotation will be stored under `untrimmed/annotation` folder, the file name of the untrimmed video is the end time stamp. The split of `train` and `val` for each sub-task will be stored in `trimmed/splits` and `untrimmed/splits` respectively. Camera views with the same session index as `subjectXX/sceneYY/sessionZZ` share the same annotation in `annotation/trainval/subjectXX/sceneYY/sessionZZ/`. The folder structure under `untrimmed/annotation` is: 36 | ``` 37 | trainval/ 38 | ├── subject1 39 | │ ├── scene1 40 | │ │ ├── session1 41 | │ │ │ └── subject1_scene1_session1.txt 42 | │ │ ├── session2 43 | │ │ │ └── subject1_scene1_session2.txt 44 | │ │ └── session3 45 | │ │ └── subject1_scene1_session3.txt 46 | ... 47 | ``` 48 | In the untrimmed video annotation file, each column means `[start timestamp]-[end timestamp]-[action_name]` such as: 49 | ``` 50 | 2018/10/16 13:33:45.170-2018/10/16 13:33:49.891-standing 51 | 2018/10/16 13:33:55.362-2018/10/16 13:34:00.323-crouching 52 | 2018/10/16 13:34:06.132-2018/10/16 13:34:14.522-walking 53 | 2018/10/16 13:34:19.402-2018/10/16 13:34:25.114-running 54 | 2018/10/16 13:34:33.226-2018/10/16 13:34:38.762-checking_time 55 | 2018/10/16 13:34:46.450-2018/10/16 13:34:51.698-waving_hand 56 | 2018/10/16 13:34:57.226-2018/10/16 13:35:04.075-using_phone 57 | ... 58 | ``` 59 | 60 | ### Pose keypoints related frame extraction 61 | Human pose keypoints data are stored under `trimmed/pose` folder. Keypoints are provide with a json format by using [openpifpaf](https://github.com/vita-epfl/openpifpaf) with a manually check. All coordinates are in pixel coordinates. The `keypoints` entry is in COCO format with triples of `(x, y, c)` (`c` for confidence) for every joint as listed under `coco-person-keypoints`. To get the same frame index number stored in json that corresponded with the extracted keypoints, below is a sample script for using: 62 | ``` 63 | python utils/pose_frame_extraction.py 64 | ``` 65 | 66 | ### Toolkit for sensor processing 67 | For the entry of sensor data processing, we provide a complete example to show the way of creating time-series sliding window data, dealing with datetime merging across different type of sensor with different sampling rate if an early fusion needed, and use the pre-processed data for training/testing a time-series classifier. When `MMAct trimmed cross-scene dataset` and `MMAct untrimmed cross-session dataset` prepared over, to run the classifier ([InceptionTime](https://github.com/hfawaz/InceptionTime)) training and testing example as: 68 | ``` 69 | python utils/time_series_classifiers.py 70 | ``` 71 | 72 | ### Evaluation for Validation Set 73 | We provide the ground truth files with json format and evaluation script for validation set of both tasks under `evaluation/`. File name as `evaluation/[file_name]_gt.json` is the ground truth file. 74 | To evaluate Task1 Action Recognition with validation set, run: 75 | ``` 76 | python evaluation/eval_mmact_trimmed.py --gt ground_truth_file --pred your_prediction_file 77 | ``` 78 | Example run for Task1 cross-view validation with sample format file from `evaluation/trimmed_val_view_format.json`: 79 | ``` 80 | python evaluation/eval_mmact_trimmed.py --gt trimmed_val_view_gt.json --pred trimmed_val_view_sample_format.json 81 | ``` 82 | 83 | To evaluate Task2 Temporal Localization with validation set, run: 84 | ``` 85 | python evaluation/eval_mmact_untrimmed.py --gt ground_truth_file --pred prediction_file 86 | ``` 87 | Example run for Task2 with the sample format from `evaluation/untrimmed_val_sample_format.json`: 88 | ``` 89 | python evaluation/eval_mmact_untrimmed.py --gt untrimmed_val_gt.json --pred untrimmed_val_sample_format.json 90 | ``` 91 | 92 | ### Test set Submission format for Leaderboard 93 | [Task1 Action Recognition Leaderboard](https://competitions.codalab.org/competitions/31438) recieves result of trimmed action recognition test set for competition. User needs to submit two results on `cross-view` and `cross-scene`,respectively. 94 | Both of the two splits submission files are the same format as follows, 95 | ``` 96 | { 97 | "results": { 98 | "nljxzmeshydtlonl": [ 99 | { 100 | "label": "walking", #one prediction per video is required 101 | "score": 0.5 102 | } 103 | ], 104 | "hvuapypvzwsjutrf": [ 105 | { 106 | "label": "talking", 107 | "score": 0.5 108 | } 109 | ], 110 | "hiukqqolgmtcnisi": [ 111 | { 112 | "label": "throwing", 113 | "score": 0.5 114 | } 115 | ] 116 | } 117 | } 118 | ``` 119 | 120 | [Task2 Temporal Localziation Leaderboard](https://competitions.codalab.org/competitions/31385) recieves result of untrimmed video temporal localization test set for competition, the submission format example is: 121 | ``` 122 | { 123 | "results": { 124 | "mynbiqpmzjplsgqe": [{ 125 | "label": "standing", 126 | "score": 0.40685554496254395, 127 | "segment": [ 128 | 62.03, #start seconds, 0.0 is the starting time of the given video. 129 | 66.32 #end seconds 130 | ] 131 | }, 132 | { 133 | "label": "crouching", 134 | "score": 0.5805843080181547, 135 | "segment": [ 136 | 70.58, 137 | 75.12 138 | ] 139 | } 140 | ] 141 | } 142 | } 143 | ``` 144 | 145 | ### Reference 146 | Please cite the following paper if you use the code or dataset. 147 | ``` 148 | @InProceedings{Kong_2019_ICCV, 149 | author = {Kong, Quan and Wu, Ziming and Deng, Ziwei and Klinkigt, Martin and Tong, Bin and Murakami, Tomokazu}, 150 | title = {MMAct: A Large-Scale Dataset for Cross Modal Human Action Understanding}, 151 | booktitle = {The IEEE International Conference on Computer Vision (ICCV)}, 152 | month = {October}, 153 | year = {2019} 154 | } 155 | ``` 156 | -------------------------------------------------------------------------------- /assets/report/DeepblueAI-Challenge report for MMAct Challenge.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/assets/report/DeepblueAI-Challenge report for MMAct Challenge.pdf -------------------------------------------------------------------------------- /assets/report/OPPO_mmact_challenge_report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/assets/report/OPPO_mmact_challenge_report.pdf -------------------------------------------------------------------------------- /assets/report/Raphael_unified_action_recognition_with_various_modalities.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/assets/report/Raphael_unified_action_recognition_with_various_modalities.pdf -------------------------------------------------------------------------------- /css/line-icons.css: -------------------------------------------------------------------------------- 1 | @font-face { 2 | font-family: 'Linearicons-Free'; 3 | src:url('../fonts/Linearicons-Free.eot?w118d'); 4 | src:url('../fonts/Linearicons-Free.eot?#iefixw118d') format('embedded-opentype'), 5 | url('../fonts/Linearicons-Free.woff2?w118d') format('woff2'), 6 | url('../fonts/Linearicons-Free.woff?w118d') format('woff'), 7 | url('../fonts/Linearicons-Free.ttf?w118d') format('truetype'), 8 | url('../fonts/Linearicons-Free.svg?w118d#Linearicons-Free') format('svg'); 9 | font-weight: normal; 10 | font-style: normal; 11 | } 12 | 13 | .lnr { 14 | font-family: 'Linearicons-Free'; 15 | speak: none; 16 | font-style: normal; 17 | font-weight: normal; 18 | font-variant: normal; 19 | text-transform: none; 20 | line-height: 1; 21 | 22 | /* Better Font Rendering =========== */ 23 | -webkit-font-smoothing: antialiased; 24 | -moz-osx-font-smoothing: grayscale; 25 | } 26 | 27 | .lnr-home:before { 28 | content: "\e800"; 29 | } 30 | .lnr-apartment:before { 31 | content: "\e801"; 32 | } 33 | .lnr-pencil:before { 34 | content: "\e802"; 35 | } 36 | .lnr-magic-wand:before { 37 | content: "\e803"; 38 | } 39 | .lnr-drop:before { 40 | content: "\e804"; 41 | } 42 | .lnr-lighter:before { 43 | content: "\e805"; 44 | } 45 | .lnr-poop:before { 46 | content: "\e806"; 47 | } 48 | .lnr-sun:before { 49 | content: "\e807"; 50 | } 51 | .lnr-moon:before { 52 | content: "\e808"; 53 | } 54 | .lnr-cloud:before { 55 | content: "\e809"; 56 | } 57 | .lnr-cloud-upload:before { 58 | content: "\e80a"; 59 | } 60 | .lnr-cloud-download:before { 61 | content: "\e80b"; 62 | } 63 | .lnr-cloud-sync:before { 64 | content: "\e80c"; 65 | } 66 | .lnr-cloud-check:before { 67 | content: "\e80d"; 68 | } 69 | .lnr-database:before { 70 | content: "\e80e"; 71 | } 72 | .lnr-lock:before { 73 | content: "\e80f"; 74 | } 75 | .lnr-cog:before { 76 | content: "\e810"; 77 | } 78 | .lnr-trash:before { 79 | content: "\e811"; 80 | } 81 | .lnr-dice:before { 82 | content: "\e812"; 83 | } 84 | .lnr-heart:before { 85 | content: "\e813"; 86 | } 87 | .lnr-star:before { 88 | content: "\e814"; 89 | } 90 | .lnr-star-half:before { 91 | content: "\e815"; 92 | } 93 | .lnr-star-empty:before { 94 | content: "\e816"; 95 | } 96 | .lnr-flag:before { 97 | content: "\e817"; 98 | } 99 | .lnr-envelope:before { 100 | content: "\e818"; 101 | } 102 | .lnr-paperclip:before { 103 | content: "\e819"; 104 | } 105 | .lnr-inbox:before { 106 | content: "\e81a"; 107 | } 108 | .lnr-eye:before { 109 | content: "\e81b"; 110 | } 111 | .lnr-printer:before { 112 | content: "\e81c"; 113 | } 114 | .lnr-file-empty:before { 115 | content: "\e81d"; 116 | } 117 | .lnr-file-add:before { 118 | content: "\e81e"; 119 | } 120 | .lnr-enter:before { 121 | content: "\e81f"; 122 | } 123 | .lnr-exit:before { 124 | content: "\e820"; 125 | } 126 | .lnr-graduation-hat:before { 127 | content: "\e821"; 128 | } 129 | .lnr-license:before { 130 | content: "\e822"; 131 | } 132 | .lnr-music-note:before { 133 | content: "\e823"; 134 | } 135 | .lnr-film-play:before { 136 | content: "\e824"; 137 | } 138 | .lnr-camera-video:before { 139 | content: "\e825"; 140 | } 141 | .lnr-camera:before { 142 | content: "\e826"; 143 | } 144 | .lnr-picture:before { 145 | content: "\e827"; 146 | } 147 | .lnr-book:before { 148 | content: "\e828"; 149 | } 150 | .lnr-bookmark:before { 151 | content: "\e829"; 152 | } 153 | .lnr-user:before { 154 | content: "\e82a"; 155 | } 156 | .lnr-users:before { 157 | content: "\e82b"; 158 | } 159 | .lnr-shirt:before { 160 | content: "\e82c"; 161 | } 162 | .lnr-store:before { 163 | content: "\e82d"; 164 | } 165 | .lnr-cart:before { 166 | content: "\e82e"; 167 | } 168 | .lnr-tag:before { 169 | content: "\e82f"; 170 | } 171 | .lnr-phone-handset:before { 172 | content: "\e830"; 173 | } 174 | .lnr-phone:before { 175 | content: "\e831"; 176 | } 177 | .lnr-pushpin:before { 178 | content: "\e832"; 179 | } 180 | .lnr-map-marker:before { 181 | content: "\e833"; 182 | } 183 | .lnr-map:before { 184 | content: "\e834"; 185 | } 186 | .lnr-location:before { 187 | content: "\e835"; 188 | } 189 | .lnr-calendar-full:before { 190 | content: "\e836"; 191 | } 192 | .lnr-keyboard:before { 193 | content: "\e837"; 194 | } 195 | .lnr-spell-check:before { 196 | content: "\e838"; 197 | } 198 | .lnr-screen:before { 199 | content: "\e839"; 200 | } 201 | .lnr-smartphone:before { 202 | content: "\e83a"; 203 | } 204 | .lnr-tablet:before { 205 | content: "\e83b"; 206 | } 207 | .lnr-laptop:before { 208 | content: "\e83c"; 209 | } 210 | .lnr-laptop-phone:before { 211 | content: "\e83d"; 212 | } 213 | .lnr-power-switch:before { 214 | content: "\e83e"; 215 | } 216 | .lnr-bubble:before { 217 | content: "\e83f"; 218 | } 219 | .lnr-heart-pulse:before { 220 | content: "\e840"; 221 | } 222 | .lnr-construction:before { 223 | content: "\e841"; 224 | } 225 | .lnr-pie-chart:before { 226 | content: "\e842"; 227 | } 228 | .lnr-chart-bars:before { 229 | content: "\e843"; 230 | } 231 | .lnr-gift:before { 232 | content: "\e844"; 233 | } 234 | .lnr-diamond:before { 235 | content: "\e845"; 236 | } 237 | .lnr-linearicons:before { 238 | content: "\e846"; 239 | } 240 | .lnr-dinner:before { 241 | content: "\e847"; 242 | } 243 | .lnr-coffee-cup:before { 244 | content: "\e848"; 245 | } 246 | .lnr-leaf:before { 247 | content: "\e849"; 248 | } 249 | .lnr-paw:before { 250 | content: "\e84a"; 251 | } 252 | .lnr-rocket:before { 253 | content: "\e84b"; 254 | } 255 | .lnr-briefcase:before { 256 | content: "\e84c"; 257 | } 258 | .lnr-bus:before { 259 | content: "\e84d"; 260 | } 261 | .lnr-car:before { 262 | content: "\e84e"; 263 | } 264 | .lnr-train:before { 265 | content: "\e84f"; 266 | } 267 | .lnr-bicycle:before { 268 | content: "\e850"; 269 | } 270 | .lnr-wheelchair:before { 271 | content: "\e851"; 272 | } 273 | .lnr-select:before { 274 | content: "\e852"; 275 | } 276 | .lnr-earth:before { 277 | content: "\e853"; 278 | } 279 | .lnr-smile:before { 280 | content: "\e854"; 281 | } 282 | .lnr-sad:before { 283 | content: "\e855"; 284 | } 285 | .lnr-neutral:before { 286 | content: "\e856"; 287 | } 288 | .lnr-mustache:before { 289 | content: "\e857"; 290 | } 291 | .lnr-alarm:before { 292 | content: "\e858"; 293 | } 294 | .lnr-bullhorn:before { 295 | content: "\e859"; 296 | } 297 | .lnr-volume-high:before { 298 | content: "\e85a"; 299 | } 300 | .lnr-volume-medium:before { 301 | content: "\e85b"; 302 | } 303 | .lnr-volume-low:before { 304 | content: "\e85c"; 305 | } 306 | .lnr-volume:before { 307 | content: "\e85d"; 308 | } 309 | .lnr-mic:before { 310 | content: "\e85e"; 311 | } 312 | .lnr-hourglass:before { 313 | content: "\e85f"; 314 | } 315 | .lnr-undo:before { 316 | content: "\e860"; 317 | } 318 | .lnr-redo:before { 319 | content: "\e861"; 320 | } 321 | .lnr-sync:before { 322 | content: "\e862"; 323 | } 324 | .lnr-history:before { 325 | content: "\e863"; 326 | } 327 | .lnr-clock:before { 328 | content: "\e864"; 329 | } 330 | .lnr-download:before { 331 | content: "\e865"; 332 | } 333 | .lnr-upload:before { 334 | content: "\e866"; 335 | } 336 | .lnr-enter-down:before { 337 | content: "\e867"; 338 | } 339 | .lnr-exit-up:before { 340 | content: "\e868"; 341 | } 342 | .lnr-bug:before { 343 | content: "\e869"; 344 | } 345 | .lnr-code:before { 346 | content: "\e86a"; 347 | } 348 | .lnr-link:before { 349 | content: "\e86b"; 350 | } 351 | .lnr-unlink:before { 352 | content: "\e86c"; 353 | } 354 | .lnr-thumbs-up:before { 355 | content: "\e86d"; 356 | } 357 | .lnr-thumbs-down:before { 358 | content: "\e86e"; 359 | } 360 | .lnr-magnifier:before { 361 | content: "\e86f"; 362 | } 363 | .lnr-cross:before { 364 | content: "\e870"; 365 | } 366 | .lnr-menu:before { 367 | content: "\e871"; 368 | } 369 | .lnr-list:before { 370 | content: "\e872"; 371 | } 372 | .lnr-chevron-up:before { 373 | content: "\e873"; 374 | } 375 | .lnr-chevron-down:before { 376 | content: "\e874"; 377 | } 378 | .lnr-chevron-left:before { 379 | content: "\e875"; 380 | } 381 | .lnr-chevron-right:before { 382 | content: "\e876"; 383 | } 384 | .lnr-arrow-up:before { 385 | content: "\e877"; 386 | } 387 | .lnr-arrow-down:before { 388 | content: "\e878"; 389 | } 390 | .lnr-arrow-left:before { 391 | content: "\e879"; 392 | } 393 | .lnr-arrow-right:before { 394 | content: "\e87a"; 395 | } 396 | .lnr-move:before { 397 | content: "\e87b"; 398 | } 399 | .lnr-warning:before { 400 | content: "\e87c"; 401 | } 402 | .lnr-question-circle:before { 403 | content: "\e87d"; 404 | } 405 | .lnr-menu-circle:before { 406 | content: "\e87e"; 407 | } 408 | .lnr-checkmark-circle:before { 409 | content: "\e87f"; 410 | } 411 | .lnr-cross-circle:before { 412 | content: "\e880"; 413 | } 414 | .lnr-plus-circle:before { 415 | content: "\e881"; 416 | } 417 | .lnr-circle-minus:before { 418 | content: "\e882"; 419 | } 420 | .lnr-arrow-up-circle:before { 421 | content: "\e883"; 422 | } 423 | .lnr-arrow-down-circle:before { 424 | content: "\e884"; 425 | } 426 | .lnr-arrow-left-circle:before { 427 | content: "\e885"; 428 | } 429 | .lnr-arrow-right-circle:before { 430 | content: "\e886"; 431 | } 432 | .lnr-chevron-up-circle:before { 433 | content: "\e887"; 434 | } 435 | .lnr-chevron-down-circle:before { 436 | content: "\e888"; 437 | } 438 | .lnr-chevron-left-circle:before { 439 | content: "\e889"; 440 | } 441 | .lnr-chevron-right-circle:before { 442 | content: "\e88a"; 443 | } 444 | .lnr-crop:before { 445 | content: "\e88b"; 446 | } 447 | .lnr-frame-expand:before { 448 | content: "\e88c"; 449 | } 450 | .lnr-frame-contract:before { 451 | content: "\e88d"; 452 | } 453 | .lnr-layers:before { 454 | content: "\e88e"; 455 | } 456 | .lnr-funnel:before { 457 | content: "\e88f"; 458 | } 459 | .lnr-text-format:before { 460 | content: "\e890"; 461 | } 462 | .lnr-text-format-remove:before { 463 | content: "\e891"; 464 | } 465 | .lnr-text-size:before { 466 | content: "\e892"; 467 | } 468 | .lnr-bold:before { 469 | content: "\e893"; 470 | } 471 | .lnr-italic:before { 472 | content: "\e894"; 473 | } 474 | .lnr-underline:before { 475 | content: "\e895"; 476 | } 477 | .lnr-strikethrough:before { 478 | content: "\e896"; 479 | } 480 | .lnr-highlight:before { 481 | content: "\e897"; 482 | } 483 | .lnr-text-align-left:before { 484 | content: "\e898"; 485 | } 486 | .lnr-text-align-center:before { 487 | content: "\e899"; 488 | } 489 | .lnr-text-align-right:before { 490 | content: "\e89a"; 491 | } 492 | .lnr-text-align-justify:before { 493 | content: "\e89b"; 494 | } 495 | .lnr-line-spacing:before { 496 | content: "\e89c"; 497 | } 498 | .lnr-indent-increase:before { 499 | content: "\e89d"; 500 | } 501 | .lnr-indent-decrease:before { 502 | content: "\e89e"; 503 | } 504 | .lnr-pilcrow:before { 505 | content: "\e89f"; 506 | } 507 | .lnr-direction-ltr:before { 508 | content: "\e8a0"; 509 | } 510 | .lnr-direction-rtl:before { 511 | content: "\e8a1"; 512 | } 513 | .lnr-page-break:before { 514 | content: "\e8a2"; 515 | } 516 | .lnr-sort-alpha-asc:before { 517 | content: "\e8a3"; 518 | } 519 | .lnr-sort-amount-asc:before { 520 | content: "\e8a4"; 521 | } 522 | .lnr-hand:before { 523 | content: "\e8a5"; 524 | } 525 | .lnr-pointer-up:before { 526 | content: "\e8a6"; 527 | } 528 | .lnr-pointer-right:before { 529 | content: "\e8a7"; 530 | } 531 | .lnr-pointer-down:before { 532 | content: "\e8a8"; 533 | } 534 | .lnr-pointer-left:before { 535 | content: "\e8a9"; 536 | } 537 | -------------------------------------------------------------------------------- /css/magnific-popup.css: -------------------------------------------------------------------------------- 1 | /* Magnific Popup CSS */ 2 | .mfp-bg { 3 | top: 0; 4 | left: 0; 5 | width: 100%; 6 | height: 100%; 7 | z-index: 1042; 8 | overflow: hidden; 9 | position: fixed; 10 | background: #0b0b0b; 11 | opacity: 0.8; } 12 | 13 | .mfp-wrap { 14 | top: 0; 15 | left: 0; 16 | width: 100%; 17 | height: 100%; 18 | z-index: 1043; 19 | position: fixed; 20 | outline: none !important; 21 | -webkit-backface-visibility: hidden; } 22 | 23 | .mfp-container { 24 | text-align: center; 25 | position: absolute; 26 | width: 100%; 27 | height: 100%; 28 | left: 0; 29 | top: 0; 30 | padding: 0 8px; 31 | box-sizing: border-box; } 32 | 33 | .mfp-container:before { 34 | content: ''; 35 | display: inline-block; 36 | height: 100%; 37 | vertical-align: middle; } 38 | 39 | .mfp-align-top .mfp-container:before { 40 | display: none; } 41 | 42 | .mfp-content { 43 | position: relative; 44 | display: inline-block; 45 | vertical-align: middle; 46 | margin: 0 auto; 47 | text-align: left; 48 | z-index: 1045; } 49 | 50 | .mfp-inline-holder .mfp-content, 51 | .mfp-ajax-holder .mfp-content { 52 | width: 100%; 53 | cursor: auto; } 54 | 55 | .mfp-ajax-cur { 56 | cursor: progress; } 57 | 58 | .mfp-zoom-out-cur, .mfp-zoom-out-cur .mfp-image-holder .mfp-close { 59 | cursor: -moz-zoom-out; 60 | cursor: -webkit-zoom-out; 61 | cursor: zoom-out; } 62 | 63 | .mfp-zoom { 64 | cursor: pointer; 65 | cursor: -webkit-zoom-in; 66 | cursor: -moz-zoom-in; 67 | cursor: zoom-in; } 68 | 69 | .mfp-auto-cursor .mfp-content { 70 | cursor: auto; } 71 | 72 | .mfp-close, 73 | .mfp-arrow, 74 | .mfp-preloader, 75 | .mfp-counter { 76 | -webkit-user-select: none; 77 | -moz-user-select: none; 78 | user-select: none; } 79 | 80 | .mfp-loading.mfp-figure { 81 | display: none; } 82 | 83 | .mfp-hide { 84 | display: none !important; } 85 | 86 | .mfp-preloader { 87 | color: #CCC; 88 | position: absolute; 89 | top: 50%; 90 | width: auto; 91 | text-align: center; 92 | margin-top: -0.8em; 93 | left: 8px; 94 | right: 8px; 95 | z-index: 1044; } 96 | .mfp-preloader a { 97 | color: #CCC; } 98 | .mfp-preloader a:hover { 99 | color: #FFF; } 100 | 101 | .mfp-s-ready .mfp-preloader { 102 | display: none; } 103 | 104 | .mfp-s-error .mfp-content { 105 | display: none; } 106 | 107 | button.mfp-close, 108 | button.mfp-arrow { 109 | overflow: visible; 110 | cursor: pointer; 111 | background: transparent; 112 | border: 0; 113 | -webkit-appearance: none; 114 | display: block; 115 | outline: none; 116 | padding: 0; 117 | z-index: 1046; 118 | box-shadow: none; 119 | touch-action: manipulation; } 120 | 121 | button::-moz-focus-inner { 122 | padding: 0; 123 | border: 0; } 124 | 125 | .mfp-close { 126 | width: 44px; 127 | height: 44px; 128 | line-height: 44px; 129 | position: absolute; 130 | right: 0; 131 | top: 0; 132 | text-decoration: none; 133 | text-align: center; 134 | opacity: 0.65; 135 | padding: 0 0 18px 10px; 136 | color: #FFF; 137 | font-style: normal; 138 | font-size: 28px; 139 | font-family: Arial, Baskerville, monospace; } 140 | .mfp-close:hover, 141 | .mfp-close:focus { 142 | opacity: 1; } 143 | .mfp-close:active { 144 | top: 1px; } 145 | 146 | .mfp-close-btn-in .mfp-close { 147 | color: #333; } 148 | 149 | .mfp-image-holder .mfp-close, 150 | .mfp-iframe-holder .mfp-close { 151 | color: #FFF; 152 | right: -6px; 153 | text-align: right; 154 | padding-right: 6px; 155 | width: 100%; } 156 | 157 | .mfp-counter { 158 | position: absolute; 159 | top: 0; 160 | right: 0; 161 | color: #CCC; 162 | font-size: 12px; 163 | line-height: 18px; 164 | white-space: nowrap; } 165 | 166 | .mfp-arrow { 167 | position: absolute; 168 | opacity: 0.65; 169 | margin: 0; 170 | top: 50%; 171 | margin-top: -55px; 172 | padding: 0; 173 | width: 90px; 174 | height: 110px; 175 | -webkit-tap-highlight-color: transparent; } 176 | .mfp-arrow:active { 177 | margin-top: -54px; } 178 | .mfp-arrow:hover, 179 | .mfp-arrow:focus { 180 | opacity: 1; } 181 | .mfp-arrow:before, 182 | .mfp-arrow:after { 183 | content: ''; 184 | display: block; 185 | width: 0; 186 | height: 0; 187 | position: absolute; 188 | left: 0; 189 | top: 0; 190 | margin-top: 35px; 191 | margin-left: 35px; 192 | border: medium inset transparent; } 193 | .mfp-arrow:after { 194 | border-top-width: 13px; 195 | border-bottom-width: 13px; 196 | top: 8px; } 197 | .mfp-arrow:before { 198 | border-top-width: 21px; 199 | border-bottom-width: 21px; 200 | opacity: 0.7; } 201 | 202 | .mfp-arrow-left { 203 | left: 0; } 204 | .mfp-arrow-left:after { 205 | border-right: 17px solid #FFF; 206 | margin-left: 31px; } 207 | .mfp-arrow-left:before { 208 | margin-left: 25px; 209 | border-right: 27px solid #3F3F3F; } 210 | 211 | .mfp-arrow-right { 212 | right: 0; } 213 | .mfp-arrow-right:after { 214 | border-left: 17px solid #FFF; 215 | margin-left: 39px; } 216 | .mfp-arrow-right:before { 217 | border-left: 27px solid #3F3F3F; } 218 | 219 | .mfp-iframe-holder { 220 | padding-top: 40px; 221 | padding-bottom: 40px; } 222 | .mfp-iframe-holder .mfp-content { 223 | line-height: 0; 224 | width: 100%; 225 | max-width: 900px; } 226 | .mfp-iframe-holder .mfp-close { 227 | top: -40px; } 228 | 229 | .mfp-iframe-scaler { 230 | width: 100%; 231 | height: 0; 232 | overflow: hidden; 233 | padding-top: 56.25%; } 234 | .mfp-iframe-scaler iframe { 235 | position: absolute; 236 | display: block; 237 | top: 0; 238 | left: 0; 239 | width: 100%; 240 | height: 100%; 241 | box-shadow: 0 0 8px rgba(0, 0, 0, 0.6); 242 | background: #000; } 243 | 244 | /* Main image in popup */ 245 | img.mfp-img { 246 | width: auto; 247 | max-width: 100%; 248 | height: auto; 249 | display: block; 250 | line-height: 0; 251 | box-sizing: border-box; 252 | padding: 40px 0 40px; 253 | margin: 0 auto; } 254 | 255 | /* The shadow behind the image */ 256 | .mfp-figure { 257 | line-height: 0; } 258 | .mfp-figure:after { 259 | content: ''; 260 | position: absolute; 261 | left: 0; 262 | top: 40px; 263 | bottom: 40px; 264 | display: block; 265 | right: 0; 266 | width: auto; 267 | height: auto; 268 | z-index: -1; 269 | box-shadow: 0 0 8px rgba(0, 0, 0, 0.6); 270 | background: #444; } 271 | .mfp-figure small { 272 | color: #BDBDBD; 273 | display: block; 274 | font-size: 12px; 275 | line-height: 14px; } 276 | .mfp-figure figure { 277 | margin: 0; } 278 | 279 | .mfp-bottom-bar { 280 | margin-top: -36px; 281 | position: absolute; 282 | top: 100%; 283 | left: 0; 284 | width: 100%; 285 | cursor: auto; } 286 | 287 | .mfp-title { 288 | text-align: left; 289 | line-height: 18px; 290 | color: #F3F3F3; 291 | word-wrap: break-word; 292 | padding-right: 36px; } 293 | 294 | .mfp-image-holder .mfp-content { 295 | max-width: 100%; } 296 | 297 | .mfp-gallery .mfp-image-holder .mfp-figure { 298 | cursor: pointer; } 299 | 300 | @media screen and (max-width: 800px) and (orientation: landscape), screen and (max-height: 300px) { 301 | /** 302 | * Remove all paddings around the image on small screen 303 | */ 304 | .mfp-img-mobile .mfp-image-holder { 305 | padding-left: 0; 306 | padding-right: 0; } 307 | .mfp-img-mobile img.mfp-img { 308 | padding: 0; } 309 | .mfp-img-mobile .mfp-figure:after { 310 | top: 0; 311 | bottom: 0; } 312 | .mfp-img-mobile .mfp-figure small { 313 | display: inline; 314 | margin-left: 5px; } 315 | .mfp-img-mobile .mfp-bottom-bar { 316 | background: rgba(0, 0, 0, 0.6); 317 | bottom: 0; 318 | margin: 0; 319 | top: auto; 320 | padding: 3px 5px; 321 | position: fixed; 322 | box-sizing: border-box; } 323 | .mfp-img-mobile .mfp-bottom-bar:empty { 324 | padding: 0; } 325 | .mfp-img-mobile .mfp-counter { 326 | right: 5px; 327 | top: 3px; } 328 | .mfp-img-mobile .mfp-close { 329 | top: 0; 330 | right: 0; 331 | width: 35px; 332 | height: 35px; 333 | line-height: 35px; 334 | background: rgba(0, 0, 0, 0.6); 335 | position: fixed; 336 | text-align: center; 337 | padding: 0; } } 338 | 339 | @media all and (max-width: 900px) { 340 | .mfp-arrow { 341 | -webkit-transform: scale(0.75); 342 | transform: scale(0.75); } 343 | .mfp-arrow-left { 344 | -webkit-transform-origin: 0; 345 | transform-origin: 0; } 346 | .mfp-arrow-right { 347 | -webkit-transform-origin: 100%; 348 | transform-origin: 100%; } 349 | .mfp-container { 350 | padding-left: 6px; 351 | padding-right: 6px; } } 352 | -------------------------------------------------------------------------------- /css/nivo-lightbox.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * Nivo Lightbox v1.3.1 3 | * http://dev7studios.com/nivo-lightbox 4 | * 5 | * Copyright 2013, Dev7studios 6 | * Free to use and abuse under the MIT license. 7 | * http://www.opensource.org/licenses/mit-license.php 8 | */ 9 | 10 | .nivo-lightbox-overlay { 11 | position: fixed; 12 | top: 0; 13 | left: 0; 14 | z-index: 99999; 15 | width: 100%; 16 | height: 100%; 17 | overflow: hidden; 18 | visibility: hidden; 19 | opacity: 0; 20 | background: rgba(0, 0, 0, 0.8); 21 | -webkit-box-sizing: border-box; 22 | -moz-box-sizing: border-box; 23 | box-sizing: border-box; 24 | } 25 | .nivo-lightbox-overlay.nivo-lightbox-open { 26 | visibility: visible; 27 | opacity: 1; 28 | } 29 | .nivo-lightbox-wrap { 30 | position: absolute; 31 | top: 10%; 32 | bottom: 10%; 33 | left: 10%; 34 | right: 10%; 35 | } 36 | .nivo-lightbox-content { 37 | width: 100%; 38 | height: 100%; 39 | } 40 | .nivo-lightbox-title-wrap { 41 | position: absolute; 42 | bottom: 0; 43 | left: 0; 44 | width: 100%; 45 | z-index: 99999; 46 | text-align: center; 47 | } 48 | .nivo-lightbox-nav { display: none; } 49 | .nivo-lightbox-prev { 50 | position: absolute; 51 | top: 50%; 52 | left: 0; 53 | } 54 | .nivo-lightbox-next { 55 | position: absolute; 56 | top: 50%; 57 | right: 0; 58 | } 59 | .nivo-lightbox-close { 60 | position: absolute; 61 | top: 2%; 62 | right: 2%; 63 | } 64 | 65 | .nivo-lightbox-image { text-align: center; } 66 | .nivo-lightbox-image img { 67 | max-width: 100%; 68 | max-height: 100%; 69 | width: auto; 70 | height: auto; 71 | vertical-align: middle; 72 | } 73 | .nivo-lightbox-content iframe { 74 | width: 100%; 75 | height: 100%; 76 | } 77 | .nivo-lightbox-inline, 78 | .nivo-lightbox-ajax { 79 | max-height: 100%; 80 | overflow: auto; 81 | -webkit-box-sizing: border-box; 82 | -moz-box-sizing: border-box; 83 | box-sizing: border-box; 84 | /* https://bugzilla.mozilla.org/show_bug.cgi?id=308801 */ 85 | } 86 | .nivo-lightbox-error { 87 | display: table; 88 | text-align: center; 89 | width: 100%; 90 | height: 100%; 91 | color: #fff; 92 | text-shadow: 0 1px 1px #000; 93 | } 94 | .nivo-lightbox-error p { 95 | display: table-cell; 96 | vertical-align: middle; 97 | } 98 | 99 | /* Effects 100 | **********************************************/ 101 | .nivo-lightbox-notouch .nivo-lightbox-effect-fade, 102 | .nivo-lightbox-notouch .nivo-lightbox-effect-fadeScale, 103 | .nivo-lightbox-notouch .nivo-lightbox-effect-slideLeft, 104 | .nivo-lightbox-notouch .nivo-lightbox-effect-slideRight, 105 | .nivo-lightbox-notouch .nivo-lightbox-effect-slideUp, 106 | .nivo-lightbox-notouch .nivo-lightbox-effect-slideDown, 107 | .nivo-lightbox-notouch .nivo-lightbox-effect-fall { 108 | -webkit-transition: all 0.2s ease-in-out; 109 | -moz-transition: all 0.2s ease-in-out; 110 | -ms-transition: all 0.2s ease-in-out; 111 | -o-transition: all 0.2s ease-in-out; 112 | transition: all 0.2s ease-in-out; 113 | } 114 | 115 | /* fadeScale */ 116 | .nivo-lightbox-effect-fadeScale .nivo-lightbox-wrap { 117 | -webkit-transition: all 0.3s; 118 | -moz-transition: all 0.3s; 119 | -ms-transition: all 0.3s; 120 | -o-transition: all 0.3s; 121 | transition: all 0.3s; 122 | -webkit-transform: scale(0.7); 123 | -moz-transform: scale(0.7); 124 | -ms-transform: scale(0.7); 125 | transform: scale(0.7); 126 | } 127 | .nivo-lightbox-effect-fadeScale.nivo-lightbox-open .nivo-lightbox-wrap { 128 | -webkit-transform: scale(1); 129 | -moz-transform: scale(1); 130 | -ms-transform: scale(1); 131 | transform: scale(1); 132 | } 133 | 134 | /* slideLeft / slideRight / slideUp / slideDown */ 135 | .nivo-lightbox-effect-slideLeft .nivo-lightbox-wrap, 136 | .nivo-lightbox-effect-slideRight .nivo-lightbox-wrap, 137 | .nivo-lightbox-effect-slideUp .nivo-lightbox-wrap, 138 | .nivo-lightbox-effect-slideDown .nivo-lightbox-wrap { 139 | -webkit-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); 140 | -moz-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); 141 | -ms-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); 142 | -o-transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); 143 | transition: all 0.3s cubic-bezier(0.25, 0.5, 0.5, 0.9); 144 | } 145 | .nivo-lightbox-effect-slideLeft .nivo-lightbox-wrap { 146 | -webkit-transform: translateX(-10%); 147 | -moz-transform: translateX(-10%); 148 | -ms-transform: translateX(-10%); 149 | transform: translateX(-10%); 150 | } 151 | .nivo-lightbox-effect-slideRight .nivo-lightbox-wrap { 152 | -webkit-transform: translateX(10%); 153 | -moz-transform: translateX(10%); 154 | -ms-transform: translateX(10%); 155 | transform: translateX(10%); 156 | } 157 | .nivo-lightbox-effect-slideLeft.nivo-lightbox-open .nivo-lightbox-wrap, 158 | .nivo-lightbox-effect-slideRight.nivo-lightbox-open .nivo-lightbox-wrap { 159 | -webkit-transform: translateX(0); 160 | -moz-transform: translateX(0); 161 | -ms-transform: translateX(0); 162 | transform: translateX(0); 163 | } 164 | .nivo-lightbox-effect-slideDown .nivo-lightbox-wrap { 165 | -webkit-transform: translateY(-10%); 166 | -moz-transform: translateY(-10%); 167 | -ms-transform: translateY(-10%); 168 | transform: translateY(-10%); 169 | } 170 | .nivo-lightbox-effect-slideUp .nivo-lightbox-wrap { 171 | -webkit-transform: translateY(10%); 172 | -moz-transform: translateY(10%); 173 | -ms-transform: translateY(10%); 174 | transform: translateY(10%); 175 | } 176 | .nivo-lightbox-effect-slideUp.nivo-lightbox-open .nivo-lightbox-wrap, 177 | .nivo-lightbox-effect-slideDown.nivo-lightbox-open .nivo-lightbox-wrap { 178 | -webkit-transform: translateY(0); 179 | -moz-transform: translateY(0); 180 | -ms-transform: translateY(0); 181 | transform: translateY(0); 182 | } 183 | 184 | /* fall */ 185 | .nivo-lightbox-body-effect-fall .nivo-lightbox-effect-fall { 186 | -webkit-perspective: 1000px; 187 | -moz-perspective: 1000px; 188 | perspective: 1000px; 189 | } 190 | .nivo-lightbox-effect-fall .nivo-lightbox-wrap { 191 | -webkit-transition: all 0.3s ease-out; 192 | -moz-transition: all 0.3s ease-out; 193 | -ms-transition: all 0.3s ease-out; 194 | -o-transition: all 0.3s ease-out; 195 | transition: all 0.3s ease-out; 196 | -webkit-transform: translateZ(300px); 197 | -moz-transform: translateZ(300px); 198 | -ms-transform: translateZ(300px); 199 | transform: translateZ(300px); 200 | } 201 | .nivo-lightbox-effect-fall.nivo-lightbox-open .nivo-lightbox-wrap { 202 | -webkit-transform: translateZ(0); 203 | -moz-transform: translateZ(0); 204 | -ms-transform: translateZ(0); 205 | transform: translateZ(0); 206 | } 207 | .icon-close{ 208 | font-size: 24px; 209 | } 210 | -------------------------------------------------------------------------------- /css/owl.carousel.css: -------------------------------------------------------------------------------- 1 | /* 2 | * Core Owl Carousel CSS File 3 | * v1.3.3 4 | */ 5 | 6 | /* clearfix */ 7 | .owl-carousel .owl-wrapper:after { 8 | content: "."; 9 | display: block; 10 | clear: both; 11 | visibility: hidden; 12 | line-height: 0; 13 | height: 0; 14 | } 15 | /* display none until init */ 16 | .owl-carousel{ 17 | display: none; 18 | position: relative; 19 | width: 100%; 20 | -ms-touch-action: pan-y; 21 | } 22 | .owl-carousel .owl-wrapper{ 23 | display: none; 24 | position: relative; 25 | -webkit-transform: translate3d(0px, 0px, 0px); 26 | } 27 | .owl-carousel .owl-wrapper-outer{ 28 | overflow: hidden; 29 | position: relative; 30 | width: 100%; 31 | } 32 | .owl-carousel .owl-wrapper-outer.autoHeight{ 33 | -webkit-transition: height 500ms ease-in-out; 34 | -moz-transition: height 500ms ease-in-out; 35 | -ms-transition: height 500ms ease-in-out; 36 | -o-transition: height 500ms ease-in-out; 37 | transition: height 500ms ease-in-out; 38 | } 39 | 40 | .owl-carousel .owl-item{ 41 | float: left; 42 | } 43 | .owl-controls .owl-page, 44 | .owl-controls .owl-buttons div{ 45 | cursor: pointer; 46 | } 47 | .owl-controls { 48 | -webkit-user-select: none; 49 | -khtml-user-select: none; 50 | -moz-user-select: none; 51 | -ms-user-select: none; 52 | user-select: none; 53 | -webkit-tap-highlight-color: rgba(0, 0, 0, 0); 54 | } 55 | 56 | /* mouse grab icon */ 57 | .grabbing { 58 | cursor:url(grabbing.png) 8 8, move; 59 | } 60 | 61 | /* fix */ 62 | .owl-carousel .owl-wrapper, 63 | .owl-carousel .owl-item{ 64 | -webkit-backface-visibility: hidden; 65 | -moz-backface-visibility: hidden; 66 | -ms-backface-visibility: hidden; 67 | -webkit-transform: translate3d(0,0,0); 68 | -moz-transform: translate3d(0,0,0); 69 | -ms-transform: translate3d(0,0,0); 70 | } 71 | 72 | /* Feel free to change duration */ 73 | .animated { 74 | -webkit-animation-duration : 1000 ms ; 75 | animation-duration : 1000 ms ; 76 | -webkit-animation-fill-mode : both ; 77 | animation-fill-mode : both ; 78 | } 79 | /* .owl-animated-out - only for current item */ 80 | /* This is very important class. Use z-index if you want move Out item above In item */ 81 | .owl-animated-out { 82 | z-index : 1 83 | } 84 | /* .owl-animated-in - only for upcoming item 85 | /* This is very important class. Use z-index if you want move In item above Out item */ 86 | .owl-animated-in { 87 | z-index : 0 88 | } 89 | /* .fadeOut is style taken from Animation.css and this is how it looks in owl.carousel.css: */ 90 | .fadeOut { 91 | -webkit-animation-name : fadeOut ; 92 | animation-name : fadeOut ; 93 | } 94 | @-webkit-keyframes fadeOut { 95 | 0% { 96 | opacity : 1 ; 97 | } 98 | 100% { 99 | opacity : 0 ; 100 | } 101 | } 102 | @keyframes fadeOut { 103 | 0% { 104 | opacity : 1 ; 105 | } 106 | 100% { 107 | opacity : 0 ; 108 | } 109 | } -------------------------------------------------------------------------------- /css/owl.theme.css: -------------------------------------------------------------------------------- 1 | /* 2 | * Owl Carousel Owl Demo Theme 3 | * v1.3.3 4 | */ 5 | 6 | .owl-theme .owl-controls{ 7 | margin-top: 10px; 8 | text-align: center; 9 | } 10 | 11 | /* Styling Next and Prev buttons */ 12 | 13 | .owl-theme .owl-controls .owl-buttons div{ 14 | color: #FFF; 15 | display: inline-block; 16 | zoom: 1; 17 | *display: inline;/*IE7 life-saver */ 18 | margin: 5px; 19 | padding: 3px 10px; 20 | font-size: 12px; 21 | -webkit-border-radius: 30px; 22 | -moz-border-radius: 30px; 23 | border-radius: 30px; 24 | background: #869791; 25 | filter: Alpha(Opacity=50);/*IE7 fix*/ 26 | opacity: 0.5; 27 | } 28 | /* Clickable class fix problem with hover on touch devices */ 29 | /* Use it for non-touch hover action */ 30 | .owl-theme .owl-controls.clickable .owl-buttons div:hover{ 31 | filter: Alpha(Opacity=100);/*IE7 fix*/ 32 | opacity: 1; 33 | text-decoration: none; 34 | } 35 | 36 | /* Styling Pagination*/ 37 | 38 | .owl-theme .owl-controls .owl-page{ 39 | display: inline-block; 40 | zoom: 1; 41 | *display: inline;/*IE7 life-saver */ 42 | } 43 | .owl-theme .owl-controls .owl-page span{ 44 | display: block; 45 | width: 12px; 46 | height: 12px; 47 | margin: 5px 7px; 48 | filter: Alpha(Opacity=50);/*IE7 fix*/ 49 | opacity: 0.5; 50 | -webkit-border-radius: 20px; 51 | -moz-border-radius: 20px; 52 | border-radius: 20px; 53 | background: #869791; 54 | } 55 | 56 | .owl-theme .owl-controls .owl-page.active span, 57 | .owl-theme .owl-controls.clickable .owl-page:hover span{ 58 | filter: Alpha(Opacity=100);/*IE7 fix*/ 59 | opacity: 1; 60 | } 61 | 62 | /* If PaginationNumbers is true */ 63 | 64 | .owl-theme .owl-controls .owl-page span.owl-numbers{ 65 | height: auto; 66 | width: auto; 67 | color: #FFF; 68 | padding: 2px 10px; 69 | font-size: 12px; 70 | -webkit-border-radius: 30px; 71 | -moz-border-radius: 30px; 72 | border-radius: 30px; 73 | } 74 | 75 | /* preloading images */ 76 | .owl-item.loading{ 77 | min-height: 150px; 78 | background: url(AjaxLoader.gif) no-repeat center center 79 | } -------------------------------------------------------------------------------- /css/responsive.css: -------------------------------------------------------------------------------- 1 | /* only small desktops */ 2 | /* tablets */ 3 | /* only small tablets */ 4 | @media (min-width: 768px) and (max-width: 991px) { 5 | #hero-area .contents h1 { 6 | font-size: 28px; 7 | } 8 | .item-boxes h4 { 9 | font-size: 18px; 10 | } 11 | #features .show-box img { 12 | display: none; 13 | } 14 | .pricing-table .pricing-details ul li { 15 | font-size: 12px; 16 | } 17 | .counters .facts-item .fact-count h4 { 18 | font-size: 14px; 19 | } 20 | .single-team { 21 | margin-bottom: 30px; 22 | } 23 | .meta-tags span { 24 | margin-right: 3px; 25 | } 26 | .meta-tags span i { 27 | margin-right: 0px; 28 | } 29 | #blog .blog-item-text h3 { 30 | font-size: 14px; 31 | line-height: 20px; 32 | } 33 | } 34 | 35 | /* mobile or only mobile */ 36 | @media (max-width: 767px) { 37 | .section-header .section-title { 38 | font-size: 20px; 39 | } 40 | #hero-area .contents { 41 | padding: 100px 0 80px; 42 | } 43 | #hero-area .contents h1 { 44 | font-size: 18px; 45 | line-height: 30px; 46 | } 47 | #features .show-box img { 48 | display: none; 49 | } 50 | #features .box-item .text h4 { 51 | font-size: 14px; 52 | } 53 | .controls { 54 | margin: 0px; 55 | } 56 | .controls .btn { 57 | font-size: 13px; 58 | padding: 7px 7px; 59 | } 60 | .video-promo .video-promo-content h2 { 61 | font-size: 22px; 62 | } 63 | .pricing-table { 64 | margin-bottom: 15px; 65 | } 66 | .counters .facts-item { 67 | margin-bottom: 30px; 68 | } 69 | .single-team { 70 | margin-bottom: 15px; 71 | } 72 | .contact-us { 73 | margin-bottom: 30px; 74 | } 75 | .contact-us h3 { 76 | font-size: 28px; 77 | } 78 | .footer-links li a { 79 | margin-right: 10px; 80 | } 81 | .copyright { 82 | float: left; 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /css/slicknav.css: -------------------------------------------------------------------------------- 1 | /*! 2 | * SlickNav Responsive Mobile Menu v1.0.3 3 | * (c) 2015 Josh Cope 4 | * licensed under MIT 5 | */ 6 | .slicknav_btn { 7 | cursor: pointer; 8 | display: block; 9 | float: right; 10 | height: 35px; 11 | line-height: 50px; 12 | padding: 10px 0 9px; 13 | position: relative; 14 | vertical-align: middle; 15 | width: 44px; 16 | } 17 | .slicknav_btn .slicknav_icon-bar + .slicknav_icon-bar { 18 | margin-top: 0.188em; } 19 | 20 | .slicknav_menu { 21 | *zoom: 1; } 22 | .slicknav_menu .slicknav_menutxt { 23 | display: block; 24 | line-height: 1.188em; 25 | float: left; } 26 | .slicknav_menu .slicknav_icon { 27 | margin: 0.188em 0 0 0.438em; } 28 | .slicknav_menu .slicknav_no-text { 29 | margin: 0; } 30 | .slicknav_menu .slicknav_icon-bar { 31 | display: block; 32 | width: 1.125em; 33 | height: 0.125em; 34 | margin: 0 auto; 35 | -webkit-border-radius: 1px; 36 | -moz-border-radius: 1px; 37 | border-radius: 1px; 38 | -webkit-transition: all 0.2s linear; 39 | -moz-transition: all 0.2s linear; 40 | -o-transition: all 0.2s linear; 41 | transition: all 0.2s linear; 42 | } 43 | .slicknav_menu:before { 44 | content: " "; 45 | display: table; } 46 | .slicknav_menu:after { 47 | content: " "; 48 | display: table; 49 | clear: both; } 50 | 51 | .slicknav_nav { 52 | clear: both; } 53 | .slicknav_nav ul { 54 | display: block; } 55 | .slicknav_nav li { 56 | display: block; 57 | line-height: 30px; 58 | } 59 | .slicknav_nav li:not(:first-child){ 60 | } 61 | .slicknav_nav .slicknav_arrow { 62 | background: #ededed none repeat scroll 0 0; 63 | float: right; 64 | text-align: center; 65 | width: 35px; 66 | color: #666; 67 | } 68 | .slicknav_nav .slicknav_item { 69 | cursor: pointer; } 70 | .slicknav_nav .slicknav_item a { 71 | display: inline; } 72 | .slicknav_nav .slicknav_row { 73 | display: block; } 74 | .slicknav_nav a { 75 | display: block; } 76 | .slicknav_nav .slicknav_parent-link a { 77 | display: inline; } 78 | 79 | .slicknav_brand { 80 | float: left; } 81 | 82 | .slicknav_menu { 83 | font-size: 16px; 84 | box-sizing: border-box; 85 | } 86 | .slicknav_menu ul{ 87 | background: #fff; 88 | } 89 | .slicknav_menu * { 90 | box-sizing: border-box; } 91 | .slicknav_menu .slicknav_menutxt { 92 | color: #fff; 93 | font-weight: bold; 94 | text-shadow: 0 1px 3px #000; } 95 | 96 | .slicknav_btn { 97 | border: 1px solid; 98 | border-radius: 0; 99 | margin: 16px 15px; 100 | -webkit-transition: all 0.2s linear; 101 | -moz-transition: all 0.2s linear; 102 | -o-transition: all 0.2s linear; 103 | transition: all 0.2s linear; 104 | } 105 | 106 | .slicknav_nav { 107 | color: #fff; 108 | font-size: 0.875em; 109 | list-style: outside none none; 110 | margin: 0; 111 | max-height: 350px; 112 | padding: 0; 113 | width: 100%; 114 | } 115 | .slicknav_nav ul { 116 | list-style: none; 117 | overflow: hidden; 118 | padding: 0; 119 | margin: 0 0 0 20px; } 120 | .slicknav_nav .slicknav_row { 121 | padding: 5px 10px; 122 | margin: 2px 5px; } 123 | .slicknav_nav a { 124 | padding: 5px 15px; 125 | margin: 2px 5px; 126 | text-decoration: none; 127 | color: #666; } 128 | .slicknav_nav .slicknav_txtnode { 129 | margin-left: 15px; } 130 | .slicknav_nav .slicknav_item a { 131 | padding: 0; 132 | margin: 0; } 133 | .slicknav_nav .slicknav_parent-link a { 134 | padding: 0; 135 | margin: 0; } 136 | 137 | .slicknav_brand { 138 | color: #fff; 139 | font-size: 18px; 140 | line-height: 30px; 141 | padding: 7px 12px; 142 | height: 44px; } 143 | -------------------------------------------------------------------------------- /evaluation/eval_mmact_trimmed.py: -------------------------------------------------------------------------------- 1 | """ 2 | MMAct Trimmed Video Action Recognitiono evaluation code. 3 | This code is based on ActivityNet Trimmed Activity Recognition Task evaluation code. 4 | """ 5 | import numpy as np 6 | import os 7 | import pandas as pd 8 | import json 9 | import argparse 10 | from joblib import Parallel, delayed 11 | 12 | def interpolated_prec_rec(prec, rec): 13 | """Interpolated AP - VOCdevkit from VOC 2011. 14 | """ 15 | mprec = np.hstack([[0], prec, [0]]) 16 | mrec = np.hstack([[0], rec, [1]]) 17 | for i in range(len(mprec) - 1)[::-1]: 18 | mprec[i] = max(mprec[i], mprec[i + 1]) 19 | idx = np.where(mrec[1::] != mrec[0:-1])[0] + 1 20 | ap = np.sum((mrec[idx] - mrec[idx - 1]) * mprec[idx]) 21 | return ap 22 | 23 | ################################################################################ 24 | # Metrics 25 | ################################################################################ 26 | 27 | def compute_average_precision_classification(ground_truth, prediction): 28 | """Compute average precision (classification task) between ground truth and 29 | predictions data frames. If multiple predictions occurs for the same 30 | predicted segment, only the one with highest score is matched as 31 | true positive. This code is greatly inspired by Pascal VOC devkit. 32 | Parameters 33 | ---------- 34 | ground_truth : df 35 | Data frame containing the ground truth instances. 36 | Required fields: ['video-id'] 37 | prediction : df 38 | Data frame containing the prediction instances. 39 | Required fields: ['video-id, 'score'] 40 | Outputs 41 | ------- 42 | ap : float 43 | Average precision score. 44 | """ 45 | npos = float(len(ground_truth)) 46 | lock_gt = np.ones(len(ground_truth)) * -1 47 | # Sort predictions by decreasing score order. 48 | sort_idx = prediction['score'].values.argsort()[::-1] 49 | prediction = prediction.loc[sort_idx].reset_index(drop=True) 50 | 51 | # Initialize true positive and false positive vectors. 52 | tp = np.zeros(len(prediction)) 53 | fp = np.zeros(len(prediction)) 54 | 55 | # Assigning true positive to truly grount truth instances. 56 | for idx in range(len(prediction)): 57 | this_pred = prediction.loc[idx] 58 | gt_idx = ground_truth['video-id'] == this_pred['video-id'] 59 | # Check if there is at least one ground truth in the video associated. 60 | if not gt_idx.any(): 61 | fp[idx] = 1 62 | continue 63 | this_gt = ground_truth.loc[gt_idx].reset_index() 64 | if lock_gt[this_gt['index']] >= 0: 65 | fp[idx] = 1 66 | else: 67 | tp[idx] = 1 68 | lock_gt[this_gt['index']] = idx 69 | 70 | # Computing prec-rec 71 | tp = np.cumsum(tp).astype(np.float) 72 | fp = np.cumsum(fp).astype(np.float) 73 | rec = tp / npos 74 | prec = tp / (tp + fp) 75 | return interpolated_prec_rec(prec, rec) 76 | 77 | def import_ground_truth(ground_truth_filename): 78 | """Reads ground truth file, checks if it is well formatted, and returns 79 | the ground truth instances and the activity classes. 80 | Parameters 81 | ---------- 82 | ground_truth_filename : str 83 | Full path to the ground truth json file. 84 | Outputs 85 | ------- 86 | ground_truth : df 87 | Data frame containing the ground truth instances. 88 | activity_index : dict 89 | Dictionary containing class index. 90 | """ 91 | with open(ground_truth_filename, 'r') as fobj: 92 | data = json.load(fobj) 93 | 94 | # Initialize data frame 95 | activity_index, cidx = {}, 0 96 | video_lst, label_lst = [], [] 97 | 98 | for videoid, v in data['annotations'].items(): 99 | for ann in v: 100 | if ann['label'] not in activity_index: 101 | activity_index[ann['label']] = cidx 102 | cidx += 1 103 | video_lst.append(videoid) 104 | label_lst.append(activity_index[ann['label']]) 105 | ground_truth = pd.DataFrame({'video-id': video_lst, 106 | 'label': label_lst}) 107 | ground_truth = ground_truth.drop_duplicates().reset_index(drop=True) 108 | return ground_truth, activity_index 109 | 110 | def import_prediction(ground_truth_filename, prediction_filename): 111 | """Reads prediction file, checks if it is well formatted, and returns 112 | the prediction instances. 113 | Parameters 114 | ---------- 115 | prediction_filename : str 116 | Full path to the prediction json file. 117 | Outputs 118 | ------- 119 | prediction : df 120 | Data frame containing the prediction instances. 121 | """ 122 | ground_truth, activity_index = import_ground_truth(ground_truth_filename) 123 | 124 | with open(prediction_filename, 'r') as fobj: 125 | data = json.load(fobj) 126 | 127 | # Initialize data frame 128 | video_lst, label_lst, score_lst = [], [], [] 129 | for videoid, v in data['results'].items(): 130 | for result in v: 131 | label = activity_index[result['label']] 132 | video_lst.append(videoid) 133 | label_lst.append(label) 134 | score_lst.append(result['score']) 135 | prediction = pd.DataFrame({'video-id': video_lst, 136 | 'label': label_lst, 137 | 'score': score_lst}) 138 | return prediction 139 | 140 | def wrapper_compute_average_precision(ground_truth,prediction,activity_index): 141 | """Computes average precision for each class in the subset. 142 | """ 143 | ap = np.zeros(len(activity_index.items())) 144 | for activity, cidx in activity_index.items(): 145 | gt_idx = ground_truth['label'] == cidx 146 | pred_idx = prediction['label'] == cidx 147 | ap[cidx] = compute_average_precision_classification( 148 | ground_truth.loc[gt_idx].reset_index(drop=True), 149 | prediction.loc[pred_idx].reset_index(drop=True)) 150 | return ap 151 | 152 | def main(): 153 | parser = argparse.ArgumentParser(description='Take input of ground truth file and submission file.') 154 | parser.add_argument('--gt', metavar='gt', type=str, help='ground truth json file path') 155 | parser.add_argument('--pred', metavar='pred', type=str, help='prediction file path') 156 | args = parser.parse_args() 157 | 158 | ground_truth, activity_index = import_ground_truth(args.gt) 159 | prediction = import_prediction(args.gt,args.pred) 160 | 161 | ap = wrapper_compute_average_precision(ground_truth,prediction,activity_index) 162 | 163 | print ('[RESULTS] Performance on MMAct trimmed video ' 164 | 'action recognition task.') 165 | print('\tMean Average Precision: {}'.format(ap.mean())) 166 | 167 | if __name__ == '__main__': 168 | main() 169 | -------------------------------------------------------------------------------- /evaluation/eval_mmact_untrimmed.py: -------------------------------------------------------------------------------- 1 | """ 2 | MMAct Temporal Localization evaluation code. 3 | This code is based on ActivityNet Temporal Action Localization evaluation code. 4 | """ 5 | import numpy as np 6 | import os 7 | import pandas as pd 8 | import json 9 | import argparse 10 | from joblib import Parallel, delayed 11 | 12 | def interpolated_prec_rec(prec, rec): 13 | """Interpolated AP - VOCdevkit from VOC 2011. 14 | """ 15 | mprec = np.hstack([[0], prec, [0]]) 16 | mrec = np.hstack([[0], rec, [1]]) 17 | for i in range(len(mprec) - 1)[::-1]: 18 | mprec[i] = max(mprec[i], mprec[i + 1]) 19 | idx = np.where(mrec[1::] != mrec[0:-1])[0] + 1 20 | ap = np.sum((mrec[idx] - mrec[idx - 1]) * mprec[idx]) 21 | return ap 22 | 23 | def segment_iou(target_segment, candidate_segments): 24 | """Compute the temporal intersection over union between a 25 | target segment and all the test segments. 26 | Parameters 27 | ---------- 28 | target_segment : 1d array 29 | Temporal target segment containing [starting, ending] times. 30 | candidate_segments : 2d array 31 | Temporal candidate segments containing N x [starting, ending] times. 32 | Outputs 33 | ------- 34 | tiou : 1d array 35 | Temporal intersection over union score of the N's candidate segments. 36 | """ 37 | tt1 = np.maximum(target_segment[0], candidate_segments[:, 0]) 38 | tt2 = np.minimum(target_segment[1], candidate_segments[:, 1]) 39 | # Intersection including Non-negative overlap score. 40 | segments_intersection = (tt2 - tt1).clip(0) 41 | # Segment union. 42 | segments_union = (candidate_segments[:, 1] - candidate_segments[:, 0]) \ 43 | + (target_segment[1] - target_segment[0]) - segments_intersection 44 | # Compute overlap as the ratio of the intersection 45 | # over union of two segments. 46 | tIoU = segments_intersection.astype(float) / segments_union 47 | return tIoU 48 | 49 | def import_ground_truth(ground_truth_filename): 50 | """Reads ground truth file, checks if it is well formatted, and returns 51 | the ground truth instances and the activity classes. 52 | Parameters 53 | ---------- 54 | ground_truth_filename : str 55 | Full path to the ground truth json file. 56 | Outputs 57 | ------- 58 | ground_truth : df 59 | Data frame containing the ground truth instances. 60 | activity_index : dict 61 | Dictionary containing class index. 62 | """ 63 | with open(ground_truth_filename, 'r') as fobj: 64 | data = json.load(fobj) 65 | # Checking format 66 | #if not all([field in data.keys() for field in self.gt_fields]): 67 | # raise IOError('Please input a valid ground truth file.') 68 | 69 | # Read ground truth data. 70 | activity_index, cidx = {}, 0 71 | video_lst, t_start_lst, t_end_lst, label_lst = [], [], [], [] 72 | annotations = data['annotations'] 73 | videos = annotations.keys() 74 | for video_id in videos: 75 | for ann in annotations[video_id]: 76 | if ann['label'] not in activity_index: 77 | activity_index[ann['label']] = cidx 78 | cidx += 1 79 | video_lst.append(video_id) 80 | t_start_lst.append(float(ann['segment'][0])) 81 | t_end_lst.append(float(ann['segment'][1])) 82 | label_lst.append(activity_index[ann['label']]) 83 | 84 | ground_truth = pd.DataFrame({'video-id': video_lst, 85 | 't-start': t_start_lst, 86 | 't-end': t_end_lst, 87 | 'label': label_lst}) 88 | return ground_truth, activity_index 89 | 90 | def import_prediction(prediction_filename, activity_index): 91 | """Reads prediction file, checks if it is well formatted, and returns 92 | the prediction instances. 93 | Parameters 94 | ---------- 95 | prediction_filename : str 96 | Full path to the prediction json file. 97 | Outputs 98 | ------- 99 | prediction : df 100 | Data frame containing the prediction instances. 101 | """ 102 | with open(prediction_filename, 'r') as fobj: 103 | data = json.load(fobj) 104 | # Checking format... 105 | #if not all([field in data.keys() for field in self.pred_fields]): 106 | # raise IOError('Please input a valid prediction file.') 107 | 108 | # Read predictions. 109 | video_lst, t_start_lst, t_end_lst, label_lst, score_lst = [], [], [], [], [] 110 | predictions = data['results'] 111 | videos = predictions.keys() 112 | for video_id in videos: 113 | for ann in predictions[video_id]: 114 | #if ann['label'] not in activity_index: 115 | # activity_index[ann['label']] = cidx 116 | # cidx += 1 117 | video_lst.append(video_id) 118 | t_start_lst.append(float(ann['segment'][0])) 119 | t_end_lst.append(float(ann['segment'][1])) 120 | score_lst.append(float(ann['score'])) 121 | label_lst.append(activity_index[ann['label']]) 122 | prediction = pd.DataFrame({'video-id': video_lst, 123 | 't-start': t_start_lst, 124 | 't-end': t_end_lst, 125 | 'label': label_lst, 126 | 'score': score_lst}) 127 | return prediction 128 | 129 | def get_predictions_with_label(prediction_by_label, label_name, cidx): 130 | """Get all predicitons of the given label. Return empty DataFrame if there 131 | is no predcitions with the given label. 132 | """ 133 | try: 134 | return prediction_by_label.get_group(cidx).reset_index(drop=True) 135 | except: 136 | print('Warning: No predictions of label \'%s\' were provdied.' % label_name) 137 | return pd.DataFrame() 138 | 139 | 140 | def compute_average_precision_detection(ground_truth, prediction, tiou_thresholds=np.linspace(0.5, 0.95, 10)): 141 | """Compute average precision (detection task) between ground truth and 142 | predictions data frames. If multiple predictions occurs for the same 143 | predicted segment, only the one with highest score is matches as 144 | true positive. This code is greatly inspired by Pascal VOC devkit. 145 | Parameters 146 | ---------- 147 | ground_truth : df 148 | Data frame containing the ground truth instances. 149 | Required fields: ['video-id', 't-start', 't-end'] 150 | prediction : df 151 | Data frame containing the prediction instances. 152 | Required fields: ['video-id, 't-start', 't-end', 'score'] 153 | tiou_thresholds : 1darray, optional 154 | Temporal intersection over union threshold. 155 | Outputs 156 | ------- 157 | ap : float 158 | Average precision score. 159 | """ 160 | ap = np.zeros(len(tiou_thresholds)) 161 | if prediction.empty: 162 | return ap 163 | 164 | npos = float(len(ground_truth)) 165 | lock_gt = np.ones((len(tiou_thresholds),len(ground_truth))) * -1 166 | # Sort predictions by decreasing score order. 167 | sort_idx = prediction['score'].values.argsort()[::-1] 168 | prediction = prediction.loc[sort_idx].reset_index(drop=True) 169 | 170 | # Initialize true positive and false positive vectors. 171 | tp = np.zeros((len(tiou_thresholds), len(prediction))) 172 | fp = np.zeros((len(tiou_thresholds), len(prediction))) 173 | 174 | # Adaptation to query faster 175 | ground_truth_gbvn = ground_truth.groupby('video-id') 176 | 177 | # Assigning true positive to truly grount truth instances. 178 | for idx, this_pred in prediction.iterrows(): 179 | 180 | try: 181 | # Check if there is at least one ground truth in the video associated. 182 | ground_truth_videoid = ground_truth_gbvn.get_group(this_pred['video-id']) 183 | except Exception as e: 184 | fp[:, idx] = 1 185 | continue 186 | 187 | this_gt = ground_truth_videoid.reset_index() 188 | tiou_arr = segment_iou(this_pred[['t-start', 't-end']].values, 189 | this_gt[['t-start', 't-end']].values) 190 | # We would like to retrieve the predictions with highest tiou score. 191 | tiou_sorted_idx = tiou_arr.argsort()[::-1] 192 | for tidx, tiou_thr in enumerate(tiou_thresholds): 193 | for jdx in tiou_sorted_idx: 194 | if tiou_arr[jdx] < tiou_thr: 195 | fp[tidx, idx] = 1 196 | break 197 | if lock_gt[tidx, this_gt.loc[jdx]['index']] >= 0: 198 | continue 199 | # Assign as true positive after the filters above. 200 | tp[tidx, idx] = 1 201 | lock_gt[tidx, this_gt.loc[jdx]['index']] = idx 202 | break 203 | 204 | if fp[tidx, idx] == 0 and tp[tidx, idx] == 0: 205 | fp[tidx, idx] = 1 206 | 207 | tp_cumsum = np.cumsum(tp, axis=1).astype(np.float) 208 | fp_cumsum = np.cumsum(fp, axis=1).astype(np.float) 209 | recall_cumsum = tp_cumsum / npos 210 | 211 | precision_cumsum = tp_cumsum / (tp_cumsum + fp_cumsum) 212 | 213 | for tidx in range(len(tiou_thresholds)): 214 | ap[tidx] = interpolated_prec_rec(precision_cumsum[tidx,:], recall_cumsum[tidx,:]) 215 | 216 | return ap 217 | 218 | 219 | def main(): 220 | parser = argparse.ArgumentParser(description='Take input of ground truth file and submission file.') 221 | parser.add_argument('--gt', metavar='gt', type=str, help='ground truth json file path') 222 | parser.add_argument('--pred', metavar='pred', type=str, help='prediction file path') 223 | args = parser.parse_args() 224 | tiou_thresholds = np.linspace(0.5, 0.95, 10) 225 | #print(args.ground_truth) 226 | gt, activity_idx = import_ground_truth(args.gt) 227 | pred = import_prediction(args.pred, activity_idx) 228 | gt_by_label = gt.groupby('label') 229 | pred_by_label = pred.groupby('label') 230 | ap = np.zeros((len(tiou_thresholds), len(activity_idx))) 231 | results = Parallel(n_jobs=len(activity_idx))( 232 | delayed(compute_average_precision_detection)( 233 | ground_truth=gt_by_label.get_group(cidx).reset_index(drop=True), 234 | prediction=get_predictions_with_label(pred_by_label, label_name, cidx), 235 | tiou_thresholds=tiou_thresholds, 236 | ) for label_name, cidx in activity_idx.items()) 237 | for i, cidx in enumerate(activity_idx.values()): 238 | ap[:,cidx] = results[i] 239 | #print(ap) 240 | mAP = ap.mean(axis=1) 241 | #print(mAP) 242 | average_mAP = mAP.mean() 243 | print('Performance Evaluation') 244 | print('\tAverage-mAP: {}'.format(average_mAP)) 245 | 246 | 247 | if __name__ == '__main__': 248 | main() 249 | -------------------------------------------------------------------------------- /fonts/FontAwesome.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/FontAwesome.otf -------------------------------------------------------------------------------- /fonts/Linearicons-Free.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/Linearicons-Free.eot -------------------------------------------------------------------------------- /fonts/Linearicons-Free.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/Linearicons-Free.ttf -------------------------------------------------------------------------------- /fonts/Linearicons-Free.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/Linearicons-Free.woff -------------------------------------------------------------------------------- /fonts/Linearicons-Free.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/Linearicons-Free.woff2 -------------------------------------------------------------------------------- /fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /img/bg1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/bg1.png -------------------------------------------------------------------------------- /img/dummy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/dummy.png -------------------------------------------------------------------------------- /img/email.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/email.png -------------------------------------------------------------------------------- /img/hero-area.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/hero-area.png -------------------------------------------------------------------------------- /img/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/logo.png -------------------------------------------------------------------------------- /img/logo.psd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/logo.psd -------------------------------------------------------------------------------- /img/mmactpaper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/mmactpaper.png -------------------------------------------------------------------------------- /img/team/Korpela.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/Korpela.png -------------------------------------------------------------------------------- /img/team/hirokatsu_min.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/hirokatsu_min.png -------------------------------------------------------------------------------- /img/team/ito.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/ito.jpeg -------------------------------------------------------------------------------- /img/team/kenshohara.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/kenshohara.png -------------------------------------------------------------------------------- /img/team/maekawa-face-2018.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/maekawa-face-2018.png -------------------------------------------------------------------------------- /img/team/nakamura.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/nakamura.png -------------------------------------------------------------------------------- /img/team/quankong.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/quankong.png -------------------------------------------------------------------------------- /img/team/sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/sample.png -------------------------------------------------------------------------------- /img/team/satoshinichi.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/satoshinichi.jpg -------------------------------------------------------------------------------- /img/team/sinha.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mmact19/challenge/74cc4a45f2656d579639378165e695f04a6e8eee/img/team/sinha.jpeg -------------------------------------------------------------------------------- /js/contact-form-script.js: -------------------------------------------------------------------------------- 1 | $("#contactForm").validator().on("submit", function (event) { 2 | if (event.isDefaultPrevented()) { 3 | // handle the invalid form... 4 | formError(); 5 | submitMSG(false, "Did you fill in the form properly?"); 6 | } else { 7 | // everything looks good! 8 | event.preventDefault(); 9 | submitForm(); 10 | } 11 | }); 12 | 13 | 14 | function submitForm(){ 15 | // Initiate Variables With Form Content 16 | var name = $("#name").val(); 17 | var email = $("#email").val(); 18 | var msg_subject = $("#msg_subject").val(); 19 | var message = $("#message").val(); 20 | 21 | 22 | $.ajax({ 23 | type: "POST", 24 | url: "php/form-process.php", 25 | data: "name=" + name + "&email=" + email + "&msg_subject=" + msg_subject + "&message=" + message, 26 | success : function(text){ 27 | if (text == "success"){ 28 | formSuccess(); 29 | } else { 30 | formError(); 31 | submitMSG(false,text); 32 | } 33 | } 34 | }); 35 | } 36 | 37 | function formSuccess(){ 38 | $("#contactForm")[0].reset(); 39 | submitMSG(true, "Message Submitted!") 40 | } 41 | 42 | function formError(){ 43 | $("#contactForm").removeClass().addClass('shake animated').one('webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend', function(){ 44 | $(this).removeClass(); 45 | }); 46 | } 47 | 48 | function submitMSG(valid, msg){ 49 | if(valid){ 50 | var msgClasses = "h3 text-center tada animated text-success"; 51 | } else { 52 | var msgClasses = "h3 text-center text-danger"; 53 | } 54 | $("#msgSubmit").removeClass().addClass(msgClasses).text(msg); 55 | } -------------------------------------------------------------------------------- /js/form-validator.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Validator v0.8.1 for Bootstrap 3, by @1000hz 3 | * Copyright 2015 Cina Saffary 4 | * Licensed under http://opensource.org/licenses/MIT 5 | * 6 | * https://github.com/1000hz/bootstrap-validator 7 | */ 8 | 9 | +function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b),f=c.data("bs.validator");(f||"destroy"!=b)&&(f||c.data("bs.validator",f=new d(this,e)),"string"==typeof b&&f[b]())})}var c=':input:not([type="submit"], button):enabled:visible',d=function(b,c){this.$element=a(b),this.options=c,c.errors=a.extend({},d.DEFAULTS.errors,c.errors);for(var e in c.custom)if(!c.errors[e])throw new Error("Missing default error message for custom validator: "+e);a.extend(d.VALIDATORS,c.custom),this.$element.attr("novalidate",!0),this.toggleSubmit(),this.$element.on("input.bs.validator change.bs.validator focusout.bs.validator",a.proxy(this.validateInput,this)),this.$element.on("submit.bs.validator",a.proxy(this.onSubmit,this)),this.$element.find("[data-match]").each(function(){var b=a(this),c=b.data("match");a(c).on("input.bs.validator",function(){b.val()&&b.trigger("input.bs.validator")})})};d.DEFAULTS={delay:500,html:!1,disable:!0,custom:{},errors:{match:"Does not match",minlength:"Not long enough"},feedback:{success:"glyphicon-ok",error:"glyphicon-warning-sign"}},d.VALIDATORS={"native":function(a){var b=a[0];return b.checkValidity?b.checkValidity():!0},match:function(b){var c=b.data("match");return!b.val()||b.val()===a(c).val()},minlength:function(a){var b=a.data("minlength");return!a.val()||a.val().length>=b}},d.prototype.validateInput=function(b){var c=a(b.target),d=c.data("bs.validator.errors");if(c.is('[type="radio"]')&&(c=this.$element.find('input[name="'+c.attr("name")+'"]')),this.$element.trigger(b=a.Event("validate.bs.validator",{relatedTarget:c[0]})),!b.isDefaultPrevented()){var e=this;this.runValidators(c).done(function(f){c.data("bs.validator.errors",f),f.length?e.showErrors(c):e.clearErrors(c),d&&f.toString()===d.toString()||(b=f.length?a.Event("invalid.bs.validator",{relatedTarget:c[0],detail:f}):a.Event("valid.bs.validator",{relatedTarget:c[0],detail:d}),e.$element.trigger(b)),e.toggleSubmit(),e.$element.trigger(a.Event("validated.bs.validator",{relatedTarget:c[0]}))})}},d.prototype.runValidators=function(b){function c(a){return b.data(a+"-error")||b.data("error")||"native"==a&&b[0].validationMessage||g.errors[a]}var e=[],f=a.Deferred(),g=this.options;return b.data("bs.validator.deferred")&&b.data("bs.validator.deferred").reject(),b.data("bs.validator.deferred",f),a.each(d.VALIDATORS,a.proxy(function(a,d){if((b.data(a)||"native"==a)&&!d.call(this,b)){var f=c(a);!~e.indexOf(f)&&e.push(f)}},this)),!e.length&&b.val()&&b.data("remote")?this.defer(b,function(){var d={};d[b.attr("name")]=b.val(),a.get(b.data("remote"),d).fail(function(a,b,d){e.push(c("remote")||d)}).always(function(){f.resolve(e)})}):f.resolve(e),f.promise()},d.prototype.validate=function(){var a=this.options.delay;return this.options.delay=0,this.$element.find(c).trigger("input.bs.validator"),this.options.delay=a,this},d.prototype.showErrors=function(b){var c=this.options.html?"html":"text";this.defer(b,function(){var d=b.closest(".form-group"),e=d.find(".help-block.with-errors"),f=d.find(".form-control-feedback"),g=b.data("bs.validator.errors");g.length&&(g=a("
'+ $this.options.errorMessage +'