├── .gitignore ├── LICENSE ├── README.md ├── osunn_structure.jpg ├── v6.2 ├── 00_environment_buildup.ipynb ├── 01_osumap_loader.ipynb ├── 02_osurhythm_estimator.ipynb ├── 03_osurhythm_momentum_estimator.ipynb ├── 04_osurhythm_slider_classifier.ipynb ├── 05_newsong_importer.ipynb ├── 06_osurhythm_evaluator.ipynb ├── 07_osuflow_evaluator_from_rhythm.ipynb ├── README.md ├── flow_dataset.npz ├── install ├── install.bat ├── load_map.js ├── maplist creator.py ├── maplist.txt ├── momentum_minmax.npy ├── newcombo.js ├── osureader.py ├── package-lock.json ├── package.json ├── plthelper.py ├── requirements.txt ├── saved_rhythm_model ├── saved_rhythm_model_momentums └── tfhelper.py └── v7.0 ├── 01_Training.ipynb ├── 02_Mapmaking.ipynb ├── Colab.ipynb ├── Colab_Training.ipynb ├── README.md ├── TimingAnlyz.exe ├── act_data_prep.py ├── act_final.py ├── act_flow_ds.py ├── act_gan.py ├── act_modding.py ├── act_newmap_prep.py ├── act_rhythm_calc.py ├── act_taiko_hitsounds.py ├── act_timing.py ├── act_train_rhythm.py ├── act_train_speed.py ├── assets └── template.osu ├── audio_tools.py ├── bass.dll ├── gen_maplist.js ├── hitsound_tools.py ├── include └── id3reader_p3.py ├── install ├── install.bat ├── load_map.js ├── losses.py ├── lost_losses.py ├── mania_Colab.ipynb ├── mania_Mapmaking.ipynb ├── mania_act_data_prep.py ├── mania_act_final.py ├── mania_act_rhythm_calc.py ├── mania_analyze.py ├── mania_audio_tools.py ├── mania_setup_colab.py ├── map_analyze.py ├── maplist.txt ├── maplist_maker ├── html │ ├── Inter-Regular.osu.woff │ ├── Inter-Regular.osu.woff2 │ ├── Torus-Regular.osu.otf │ ├── font-face.css │ ├── index.html │ ├── main.css │ └── main.js ├── osu-db-parser │ ├── index.js │ └── src │ │ ├── OsuDB.js │ │ ├── Reader.js │ │ └── Struct.js ├── osuDBGetter.js └── osuPathFinder.js ├── metadata.py ├── models ├── catch │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── cryo │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── default │ ├── flow_dataset.npz │ └── rhythm_model ├── flower │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── hard │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── inst │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── lowbpm │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── mania_highkey │ ├── maps.txt │ └── rhythm_model ├── mania_lowkey │ ├── maps.txt │ └── rhythm_model ├── mania_pattern │ ├── mania_pattern_dataset.npz │ └── maps.txt ├── normal │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── sota │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── taiko │ ├── flow_dataset.npz │ ├── hs_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── tvsize │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model └── vtuber │ ├── flow_dataset.npz │ ├── maps.txt │ └── rhythm_model ├── newcombo.js ├── os_tools.py ├── package-lock.json ├── package.json ├── package_colab.json ├── plot_tools.py ├── requirements.txt ├── requirements_colab.txt ├── rhythm_loader.py ├── setup_colab.py ├── slider_tools.py ├── stream_tools.py └── timing.py /.gitignore: -------------------------------------------------------------------------------- 1 | # ignore map files 2 | v*.*/mapdata/*.npz 3 | 4 | # intermediate results 5 | v*.*/evaluatedRhythm.json 6 | v*.*/mapthis.json 7 | v*.*/mapthis.npz 8 | v*.*/rhythm_data.npz 9 | v*.*/wavdata.json 10 | v*.*/wavfile.wav 11 | v*.*/temp_json_file.json 12 | v*.*/temp/* 13 | v*.*/timing.osu 14 | v*.*/audio.mp3 15 | 16 | # generated map 17 | v*.*/*.osu 18 | 19 | # model 20 | v*.*/flow_dataset.npz 21 | v*.*/saved_rhythm_model 22 | v*.*/hs_dataset.npz 23 | v*.*/mania_pattern_dataset.npz 24 | # saved_rhythm_model_momentums 25 | # saved_slider_model1 26 | # saved_slider_model2 27 | # momentum_minmax.npy 28 | 29 | # maps 30 | # *.osu 31 | # *.osz 32 | # *.osb 33 | 34 | # ffmpeg 35 | ffmpeg.exe 36 | 37 | # as the name suggests 38 | unused/ 39 | 40 | # did not work 41 | v*.*/16_osurhythm_evaluator_lesser.ipynb 42 | v*.*/saved_rhythm_model_lesser 43 | v*.*/saved_rhythm_model_momentums_lesser 44 | v*.*/momentum_minmax_lesser.json 45 | js/ 46 | docker/ 47 | test/ 48 | 49 | # generated files 50 | v*.*/node_modules/ 51 | v*.*/.ipynb_checkpoints/ 52 | **/__pycache__/ 53 | v*.*/logs/ 54 | *.sublime-project 55 | *.sublime-workspace 56 | .vscode/ 57 | 58 | # local reference files 59 | full_maplist.txt 60 | osunn.pptx 61 | predictor_input.json 62 | predictor_output.json 63 | test/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # osumapper 2 | 3 | An automatic beatmap generator using Tensorflow / Deep Learning. 4 | 5 | Demo map 1 (low BPM): https://osu.ppy.sh/beatmapsets/1290030 6 | 7 | Demo map 2 (high BPM): https://osu.ppy.sh/beatmapsets/1290026 8 | 9 | ## Colaboratory 10 | 11 | https://colab.research.google.com/github/kotritrona/osumapper/blob/master/v7.0/Colab.ipynb 12 | 13 | For mania mode: [mania_Colab.ipynb](https://colab.research.google.com/github/kotritrona/osumapper/blob/master/v7.0/mania_Colab.ipynb) 14 | 15 | ## Complete guide for a newcomer in osu! mapping 16 | 17 | https://github.com/kotritrona/osumapper/wiki/Complete-guide:-creating-beatmap-using-osumapper 18 | 19 | ## Installation & Model Running 20 | 21 | - Refer to https://github.com/kotritrona/osumapper/tree/master/v6.2 for version 6.2 22 | - Refer to https://github.com/kotritrona/osumapper/tree/master/v7.0 for version 7.0 23 | 24 | ## Important tip for model training 25 | 26 | Don't train with every single map in your osu!. That's not how machine learning works! 27 | 28 | I would suggest you select only maps you think are well made, for instance a mapset that contains all 5.0 ~ 6.5☆ maps mapped by (insert mapper name). 29 | 30 | ## Maplist.txt creation: 31 | - I have made a maplist generator under `v7.0/` folder. Run `node gen_maplist.js` under the directory to start. 32 | - the other way to create a maplist.txt file to train the model is by using the maplist creator.py script (found in v6.2 folder). running this should overwrite the maplist.txt in the folder with a new one using the maps from the collection folder you have specified. 33 | 34 | ## Model Specification 35 | [Structure diagram](osunn_structure.jpg) 36 | 37 | - Rhythm model 38 | - CNN/LSTM + dense layers 39 | - input music FFTs (7 time_windows x 32 fft_size x 2 (magnitude, phase)) 40 | - additional input timing (is_1/1, is_1/4, is_1/2, is_the_other_1/4, BPM, tick_length, slider_length) 41 | - output (is_note, is_circle, is_slider, is_spinner, is_sliding, is_spinning) for 1/-1 classification 42 | - Momentum model 43 | - Same structure as above 44 | - output (momentum, angular_momentum) as regression 45 | - momentum is distance over time. It should be proportional to circle size which I may implement later. 46 | - angular_momentum is angle over time. currently unused. 47 | - it's only used in v6.2 48 | - Slider model 49 | - was designed to classify slider lengths and shapes 50 | - currently unused 51 | - Flow model 52 | - uses GAN to generate the flow. 53 | - takes 10 notes as a group and train them each time 54 | - Generator: some dense layers, input (randomness x 50), output (cos_list x 20, sin_list x 20) 55 | - this output is then fed into a map generator to build a map corresponding to the angular values 56 | - map constructor output: (x_start, y_start, vector_out_x, vector_out_y, x_end, y_end) x 10 57 | - Discriminator: simpleRNN, some dense layers, input ↑, output (1,) ranging from 0 to 1 58 | - every big epoch(?), trains generator for 7 epochs and then discriminator 3 epochs 59 | - trains 6 ~ 25 big epochs each group. mostly 6 epochs unless the generated map is out of the mapping region (0:512, 0:384). 60 | - Beatmap Converter 61 | - uses node.js to convert map data between JSON and .osu formats 62 | 63 | ## Citing 64 | 65 | If you want to cite osumapper in a scholarly work, please cite the github page. I'm not going to write a paper for it. -------------------------------------------------------------------------------- /osunn_structure.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/osunn_structure.jpg -------------------------------------------------------------------------------- /v6.2/01_osumap_loader.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "deletable": true, 7 | "editable": true 8 | }, 9 | "source": [ 10 | "### osu!nn #1: Map Dataset Reader\n", 11 | "\n", 12 | "This notebook reads a file \"maplist.txt\", then reads the .osu files and the relevant music files to convert into some data.\n", 13 | "\n", 14 | "Data that feeds the Deep Neural Network.\n", 15 | "\n", 16 | "Last edit: 2019/4/22" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": { 22 | "deletable": true, 23 | "editable": true 24 | }, 25 | "source": [ 26 | "First of all, we need to install FFmpeg and specify its path here. It is needed to convert the .mp3 files to .wavs which Python can read.\n", 27 | "\n", 28 | "It's also fine to use any other converter, such as LAME: just edit the 24th line of osureader.py (starting with \"subprocess.call\") for the converter's parameters.\n", 29 | "\n", 30 | "**Then, fill maplist.txt with the paths of .osu files you want to train with.** Otherwise it cannot find any of the maps because the maps are on my computer. The default model is trained with the Sota dataset including 44 maps of Sota Fujimori music.\n", 31 | "\n", 32 | "After that run the grid below to convert the maps." 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": { 39 | "collapsed": false, 40 | "deletable": true, 41 | "editable": true 42 | }, 43 | "outputs": [], 44 | "source": [ 45 | "import os, re, time\n", 46 | "from osureader import * \n", 47 | "\n", 48 | "# set the ffmpeg path here!!\n", 49 | "# add \"r\" before the path string\n", 50 | "\n", 51 | "GLOBAL_VARS[\"ffmpeg_path\"] = r\"D:\\StudyData\\Tensorflow\\ffmpeg\\bin\\ffmpeg.exe\";\n", 52 | "\n", 53 | "# in linux, it is installed globally, so use this\n", 54 | "# GLOBAL_VARS[\"ffmpeg_path\"] = \"ffmpeg\";\n", 55 | "\n", 56 | "mapdata_path = \"mapdata/\";\n", 57 | "\n", 58 | "# check if it works\n", 59 | "test_process_path(GLOBAL_VARS[\"ffmpeg_path\"]);\n", 60 | "\n", 61 | "# check if nodejs works\n", 62 | "test_process_path(\"node\");\n", 63 | "\n", 64 | "# the divisor parameter\n", 65 | "divisor = 4;\n", 66 | "\n", 67 | "# make sure the mapdata folder exists\n", 68 | "if not os.path.isdir(mapdata_path):\n", 69 | " os.mkdir(mapdata_path);\n", 70 | "\n", 71 | "with open(\"maplist.txt\") as fp:\n", 72 | " fcont = fp.readlines();\n", 73 | "\n", 74 | "# The following part is something I used to filter maps with difficulty names\n", 75 | "results = [];\n", 76 | "# exclude_words = [\"Easy\", \"Normal\", \"Hard\", \"Taiko\", \"Salad\", \"Platter\", \"Overdose\", \"Rain\", \"4K\", \"5K\", \"6K\", \"7K\", \"8K\", \"9K\",\n", 77 | "# \"Kantan\", \"Futsuu\", \"Muzukashii\", \"Oni\", \"Field \"];\n", 78 | "for line in fcont:\n", 79 | "# if re.search(\"TV\", line):\n", 80 | "# apd = True;\n", 81 | "# for kw in exclude_words:\n", 82 | "# if kw.lower() in line.strip().lower():\n", 83 | "# apd = False;\n", 84 | "# break;\n", 85 | "# if apd:\n", 86 | "# results.append(line.strip());\n", 87 | " results.append(line);\n", 88 | "\n", 89 | "# Remove the originally existing npzs\n", 90 | "for file in os.listdir(mapdata_path):\n", 91 | " if file.endswith(\".npz\"):\n", 92 | " os.remove(os.path.join(mapdata_path, file));\n", 93 | "\n", 94 | "print(\"Number of filtered maps: {}\".format(len(results)));\n", 95 | "\n", 96 | "for k, mname in enumerate(results):\n", 97 | " try:\n", 98 | " start = time.time()\n", 99 | " read_and_save_osu_file(mname.strip(), filename=os.path.join(mapdata_path, str(k)), divisor=divisor);\n", 100 | " end = time.time()\n", 101 | " print(\"Map data #\" + str(k) + \" saved! time = \" + str(end - start) + \" secs\");\n", 102 | " except Exception as e:\n", 103 | " print(\"Error on #{}, path = {}, error = {}\".format(str(k), mname.strip(), e));\n", 104 | "\n", 105 | "# If some map causes bug please tell me!! https://discord.gg/npmSy7K" 106 | ] 107 | } 108 | ], 109 | "metadata": { 110 | "kernelspec": { 111 | "display_name": "Python 3", 112 | "language": "python", 113 | "name": "python3" 114 | }, 115 | "language_info": { 116 | "codemirror_mode": { 117 | "name": "ipython", 118 | "version": 3 119 | }, 120 | "file_extension": ".py", 121 | "mimetype": "text/x-python", 122 | "name": "python", 123 | "nbconvert_exporter": "python", 124 | "pygments_lexer": "ipython3", 125 | "version": "3.5.2" 126 | } 127 | }, 128 | "nbformat": 4, 129 | "nbformat_minor": 2 130 | } 131 | -------------------------------------------------------------------------------- /v6.2/05_newsong_importer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "deletable": true, 7 | "editable": true 8 | }, 9 | "source": [ 10 | "### osu!nn #5: New Map Reader\n", 11 | "\n", 12 | "Reads the data from the music. This data will be used to create a whole map!\n", 13 | "\n", 14 | "Final edit: 2018/8/16" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": { 20 | "deletable": true, 21 | "editable": true 22 | }, 23 | "source": [ 24 | "Before you read data from the music, it needs timing.\n", 25 | "\n", 26 | "Luckily there are some BPM analyzers on the web, and those are pretty accurate, so no need of Deep Learning for that!\n", 27 | "\n", 28 | "The analyzer I used (in '14) was MixMeister BPM analyzer - its problem is lack of output for offset. There should be something better than that now!\n", 29 | "\n", 30 | "And of course, it is still better to time it yourself; some music has multiple timing sections, and BPM analyzers don't seem to support that.\n", 31 | "\n", 32 | "After timing, save the empty .osu file, and fill in the file_path variable below.\n", 33 | "\n", 34 | "You should also adjust the map parameters; namely, slider velocity. The model doesn't care about the others." 35 | ] 36 | }, 37 | { 38 | "cell_type": "markdown", 39 | "metadata": { 40 | "deletable": true, 41 | "editable": true 42 | }, 43 | "source": [ 44 | "Also, FFmpeg path needed here." 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 4, 50 | "metadata": { 51 | "collapsed": false, 52 | "deletable": true, 53 | "editable": true 54 | }, 55 | "outputs": [], 56 | "source": [ 57 | "import os, re, time\n", 58 | "from osureader import *\n", 59 | "\n", 60 | "# set the ffmpeg path here!!\n", 61 | "# add \"r\" before the path string\n", 62 | "\n", 63 | "GLOBAL_VARS[\"ffmpeg_path\"] = r\"D:\\StudyData\\Tensorflow\\ffmpeg\\bin\\ffmpeg.exe\";\n", 64 | "\n", 65 | "# linux\n", 66 | "# GLOBAL_VARS[\"ffmpeg_path\"] = \"ffmpeg\";\n", 67 | "\n", 68 | "# check if it works\n", 69 | "test_process_path(GLOBAL_VARS[\"ffmpeg_path\"]);\n", 70 | "\n", 71 | "divisor = 4;\n", 72 | "\n", 73 | "def read_new_map(file_path):\n", 74 | " start = time.time()\n", 75 | " read_and_save_osu_tester_file(file_path.strip(), filename=\"mapthis\", divisor=divisor);\n", 76 | " end = time.time()\n", 77 | " print(\"Map data saved! time = \" + str(end - start) + \" secs.\");" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 5, 83 | "metadata": { 84 | "collapsed": false, 85 | "deletable": true, 86 | "editable": true 87 | }, 88 | "outputs": [ 89 | { 90 | "name": "stdout", 91 | "output_type": "stream", 92 | "text": [ 93 | "Map data saved! time = 3.715170383453369 secs.\n" 94 | ] 95 | } 96 | ], 97 | "source": [ 98 | "# input file here!\n", 99 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\whitegreat poem\\\\SeikoP - Shirotae no Uta ([CSGA]Ar3sgice) [(() = ())();].osu\";\n", 100 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\597684 Blue Reflection - Overdose\\\\Asano Hayato - OVERDOSE ([CSGA]Ar3sgice) [BR+].osu\"\n", 101 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\Albireo Lake\\\\Asano Hayato - Albireo Lake ([CSGA]Ar3sgice) [ORibt_].osu\"\n", 102 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\LetsLoveIkimashou\\\\NanamoriGoraku-bu - Let's Love~ de Ikimashou ([CSGA]Ar3sgice) [1234].osu\"\n", 103 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\My Maps\\\\ask for alms\\\\emroots - ask for alms ([CSGA]Ar3sgice) [,,,,,].osu\"\n", 104 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\My Maps\\\\cosminox192\\\\Erik McClure - Cosminox ([CSGA]Ar3sgice) [xxxxx].osu\"\n", 105 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\952035 Nakae Mitsuki - Alchemia\\\\Nakae Mitsuki - Alchemia (Shurelia) [Aristocrat].osu\"\n", 106 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\Need You Back\\\\ave;new feat. Sakura Saori - Need You Back ([CSGA]Ar3sgice) [history.setState()].osu\"\n", 107 | "file_path = \"D:\\\\osu!\\\\Songs\\\\Reboot Tactics\\\\sweet ARMS - Reboot Tactics ([CSGA]Ar3sgice) [Ctrl+Alt+Delete].osu\"\n", 108 | "\n", 109 | "# start here!!!\n", 110 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\Nhato - Hello World\\\\Nhato - Hello World ([CSGA]Ar3sgice) [Unaaaa].osu\"\n", 111 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\My Maps\\\\LookHome2\\\\emroots - Look to the Hometown ([CSGA]Ar3sgice) [233].osu\"\n", 112 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\Scarlet -gravitation-\\\\ave;new - Scarlet -gravitation- ([CSGA]Ar3sgice) [Scarlet Rainbow].osu\"\n", 113 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\shinri\\\\Yano Tatsuya - Shinri e no Kestui ~Lydie~ ([CSGA]Ar3sgice) [Divine Strength].osu\"\n", 114 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\Onaji Hoshi wo Miagete\\\\Yanagawa Kazuki - Onaji Hoshi wo Miagete ~Lydie~ ([CSGA]Ar3sgice) [Star8].osu\"\n", 115 | "\n", 116 | "# file_path = \"D:\\\\osu!\\\\Songs\\\\You're not a heroine, it's me\\\\Asano Hayato - You're not a heroine, it's me. ([CSGA]Ar3sgice) [Megumi].osu\"\n", 117 | "\n", 118 | "read_new_map(file_path);" 119 | ] 120 | }, 121 | { 122 | "cell_type": "markdown", 123 | "metadata": { 124 | "deletable": true, 125 | "editable": true 126 | }, 127 | "source": [ 128 | "That's it! We can now proceed to the next notebook." 129 | ] 130 | } 131 | ], 132 | "metadata": { 133 | "kernelspec": { 134 | "display_name": "Python 3", 135 | "language": "python", 136 | "name": "python3" 137 | }, 138 | "language_info": { 139 | "codemirror_mode": { 140 | "name": "ipython", 141 | "version": 3 142 | }, 143 | "file_extension": ".py", 144 | "mimetype": "text/x-python", 145 | "name": "python", 146 | "nbconvert_exporter": "python", 147 | "pygments_lexer": "ipython3", 148 | "version": "3.5.2" 149 | } 150 | }, 151 | "nbformat": 4, 152 | "nbformat_minor": 2 153 | } 154 | -------------------------------------------------------------------------------- /v6.2/06_osurhythm_evaluator.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "deletable": true, 7 | "editable": true 8 | }, 9 | "source": [ 10 | "### osu!nn #6: Rhythm Predictor\n", 11 | "\n", 12 | "Calculates a map's rhythm from the music and the timing.\n", 13 | "\n", 14 | "Synthesis of \"rhythmData\"\n", 15 | "* rhythmModel x 1\n", 16 | "* momentumModel x 1\n", 17 | "* timingData x 1\n", 18 | "* (Music) x 1\n", 19 | "\n", 20 | "Synthesis Time: ~2 seconds\n", 21 | "\n", 22 | "Final edit: 2018/8/16" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 1, 28 | "metadata": { 29 | "collapsed": true, 30 | "deletable": true, 31 | "editable": true 32 | }, 33 | "outputs": [], 34 | "source": [ 35 | "import tensorflow as tf\n", 36 | "from tensorflow import keras\n", 37 | "import pandas as pd\n", 38 | "import numpy as np\n", 39 | "import matplotlib.pyplot as plt\n", 40 | "import os, re" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": { 46 | "deletable": true, 47 | "editable": true 48 | }, 49 | "source": [ 50 | "Some parameters here. \n", 51 | "\n", 52 | "\"note_density\" determines how many notes will be placed on the timeline, ranges from 0 to 1.\n", 53 | "\n", 54 | "\"slider_favor\" determines how the model favors sliders against circles, ranges from -1 to 1.\n", 55 | "\n", 56 | "\"dist_multiplier\" determines how the flow model has your distance multiplied. ranges from 0 to +∞. Of course +∞ is not advisable.\n", 57 | "\n", 58 | "\"divisor_favor\" determines how the model favors notes to be on X divisors starting from a beat (white, blue, red, blue), ranges from -1 to 1 each.\n", 59 | "\n", 60 | "Ranges not inclusive." 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 2, 66 | "metadata": { 67 | "collapsed": true, 68 | "deletable": true, 69 | "editable": true 70 | }, 71 | "outputs": [], 72 | "source": [ 73 | "# TODO parameter here!!\n", 74 | "dist_multiplier = 1;\n", 75 | "note_density = 0.36;\n", 76 | "slider_favor = 0;\n", 77 | "divisor = 4;\n", 78 | "divisor_favor = [0] * divisor;" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": 3, 84 | "metadata": { 85 | "collapsed": false, 86 | "deletable": true, 87 | "editable": true 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "model = tf.keras.models.load_model(\n", 92 | " \"saved_rhythm_model\",\n", 93 | " custom_objects=None,\n", 94 | " compile=False\n", 95 | ");\n", 96 | "model.compile(loss='mse',\n", 97 | " optimizer=tf.optimizers.RMSprop(0.001),\n", 98 | " metrics=[keras.metrics.mae]);" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 4, 104 | "metadata": { 105 | "collapsed": false, 106 | "deletable": true, 107 | "editable": true 108 | }, 109 | "outputs": [], 110 | "source": [ 111 | "# root = \"..\\\\osureader\\\\mapdata_test\";\n", 112 | "fn = \"mapthis.npz\";\n", 113 | "\n", 114 | "def read_npz(fn):\n", 115 | " with np.load(fn) as data:\n", 116 | " wav_data = data[\"wav\"];\n", 117 | " wav_data = np.swapaxes(wav_data, 2, 3);\n", 118 | " ticks = data[\"ticks\"];\n", 119 | " timestamps = data[\"timestamps\"];\n", 120 | " extra = data[\"extra\"];\n", 121 | " \n", 122 | " # Extra vars\n", 123 | " bpms = extra[0];\n", 124 | " slider_lengths = extra[1];\n", 125 | " ex1 = (60000 / bpms) / 500 - 1;\n", 126 | " ex2 = bpms / 120 - 1;\n", 127 | " ex3 = slider_lengths / 150 - 1;\n", 128 | " \n", 129 | " div_data = np.array([divisor_array(k) + [ex1[k], ex2[k], ex3[k]] for k in ticks]);\n", 130 | " return wav_data, div_data, ticks, timestamps;\n", 131 | "\n", 132 | "def divisor_array(k):\n", 133 | " d_range = list(range(0, divisor));\n", 134 | " return [int(k % divisor == d) for d in d_range];\n", 135 | "\n", 136 | "test_data, div_data, ticks, timestamps = read_npz(fn);" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": 5, 142 | "metadata": { 143 | "collapsed": false, 144 | "deletable": true, 145 | "editable": true 146 | }, 147 | "outputs": [ 148 | { 149 | "name": "stdout", 150 | "output_type": "stream", 151 | "text": [ 152 | "1116 notes predicted.\n" 153 | ] 154 | } 155 | ], 156 | "source": [ 157 | "# Make time intervals from test data\n", 158 | "time_interval = 16;\n", 159 | "if test_data.shape[0]%time_interval > 0:\n", 160 | " test_data = test_data[:-(test_data.shape[0]%time_interval)];\n", 161 | " div_data = div_data[:-(div_data.shape[0]%time_interval)];\n", 162 | "test_data2 = np.reshape(test_data, (-1, time_interval, test_data.shape[1], test_data.shape[2], test_data.shape[3]))\n", 163 | "div_data2 = np.reshape(div_data, (-1, time_interval, div_data.shape[1]))\n", 164 | "\n", 165 | "test_predictions = model.predict([test_data2, div_data2]);\n", 166 | "preds = test_predictions.reshape(-1, test_predictions.shape[2]);\n", 167 | "\n", 168 | "# Favor sliders a little\n", 169 | "preds[:, 2] += slider_favor;\n", 170 | "divs = div_data2.reshape(-1, div_data2.shape[2]);\n", 171 | "margin = np.sum([divisor_favor[k] * divs[:, k] for k in range(0, divisor)]);\n", 172 | "\n", 173 | "preds[:, 0] += margin;\n", 174 | "\n", 175 | "# Predict is_obj using note_density\n", 176 | "obj_preds = preds[:, 0];\n", 177 | "target_count = np.round(note_density * obj_preds.shape[0]).astype(int);\n", 178 | "borderline = np.sort(obj_preds)[obj_preds.shape - target_count];\n", 179 | "is_obj_pred = np.expand_dims(np.where(preds[:, 0] > borderline, 1, 0), axis=1);\n", 180 | "\n", 181 | "obj_type_pred = np.sign(preds[:, 1:4] - np.tile(np.expand_dims(np.max(preds[:, 1:4], axis=1), 1), (1, 3))) + 1;\n", 182 | "others_pred = (1 + np.sign(preds[:, 4:test_predictions.shape[1]] + 0.5)) / 2;\n", 183 | "another_pred_result = np.concatenate([is_obj_pred, is_obj_pred * obj_type_pred, others_pred], axis=1);\n", 184 | "\n", 185 | "print(\"{} notes predicted.\".format(np.sum(is_obj_pred)));" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": 6, 191 | "metadata": { 192 | "collapsed": false, 193 | "deletable": true, 194 | "editable": true 195 | }, 196 | "outputs": [], 197 | "source": [ 198 | "def load_momentum_minmax(fn):\n", 199 | " data = np.load(fn);\n", 200 | " return data;\n", 201 | "mommax, mommin = load_momentum_minmax(\"momentum_minmax.npy\");\n", 202 | "\n", 203 | "momentum_model = tf.keras.models.load_model(\n", 204 | " \"saved_rhythm_model_momentums\",\n", 205 | " custom_objects=None,\n", 206 | " compile=False\n", 207 | ");\n", 208 | "momentum_model.compile(loss='mse',\n", 209 | " optimizer=tf.optimizers.RMSprop(0.001),\n", 210 | " metrics=[keras.metrics.mae]);" 211 | ] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "execution_count": 7, 216 | "metadata": { 217 | "collapsed": false, 218 | "deletable": true, 219 | "editable": true 220 | }, 221 | "outputs": [], 222 | "source": [ 223 | "momentum_predictions_output = momentum_model.predict([test_data2, div_data2]);\n", 224 | "momentum_predictions = (momentum_predictions_output.reshape(-1, 2) + 1) / 2 / 0.8 * (mommax - mommin) + mommin;" 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": { 230 | "deletable": true, 231 | "editable": true 232 | }, 233 | "source": [ 234 | "Save the rhythm data and progress to #7." 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": 8, 240 | "metadata": { 241 | "collapsed": true, 242 | "deletable": true, 243 | "editable": true 244 | }, 245 | "outputs": [], 246 | "source": [ 247 | "np.savez_compressed(\"rhythm_data\", objs = is_obj_pred[:, 0], predictions = another_pred_result, timestamps = timestamps, ticks = ticks, momenta = momentum_predictions, sv = (div_data[:,6] + 1) * 150, dist_multiplier = dist_multiplier);" 248 | ] 249 | }, 250 | { 251 | "cell_type": "code", 252 | "execution_count": 9, 253 | "metadata": { 254 | "collapsed": false, 255 | "deletable": true, 256 | "editable": true 257 | }, 258 | "outputs": [], 259 | "source": [ 260 | "import json\n", 261 | "\n", 262 | "rhythm_json = {\n", 263 | " \"objs\": is_obj_pred[:, 0].tolist(), \n", 264 | " \"predictions\": another_pred_result.tolist(),\n", 265 | " \"timestamps\": timestamps.tolist(),\n", 266 | " \"ticks\": ticks.tolist(),\n", 267 | " \"momenta\": momentum_predictions.tolist(),\n", 268 | " \"sv\": ((div_data[:,6] + 1) * 150).tolist(),\n", 269 | " \"distMultiplier\": dist_multiplier\n", 270 | "};\n", 271 | "with open(\"evaluatedRhythm.json\", \"w\") as er:\n", 272 | " json.dump(rhythm_json, er);" 273 | ] 274 | } 275 | ], 276 | "metadata": { 277 | "kernelspec": { 278 | "display_name": "Python 3", 279 | "language": "python", 280 | "name": "python3" 281 | }, 282 | "language_info": { 283 | "codemirror_mode": { 284 | "name": "ipython", 285 | "version": 3 286 | }, 287 | "file_extension": ".py", 288 | "mimetype": "text/x-python", 289 | "name": "python", 290 | "nbconvert_exporter": "python", 291 | "pygments_lexer": "ipython3", 292 | "version": "3.5.2" 293 | } 294 | }, 295 | "nbformat": 4, 296 | "nbformat_minor": 2 297 | } 298 | -------------------------------------------------------------------------------- /v6.2/README.md: -------------------------------------------------------------------------------- 1 | # osumapper v6.2 2 | 3 | This version uses Tensorflow v2.0.0-beta1. 4 | 5 | v6.2 demo map: https://osu.ppy.sh/beatmapsets/834264 6 | 7 | ## Installation: 8 | - install Python (3.5.x or 3.6.x or 3.7.x) and Jupyter notebook 9 | - install [node.js](https://nodejs.org/) 10 | - install [ffmpeg](https://ffmpeg.org/download.html) 11 | - git clone or download this repository 12 | - cd into this folder 13 | - run `install.bat` if you are on Windows 14 | - run `./install` if on Linux 15 | 16 | ## Running: 17 | - run the notebooks 01, 02, 03 for training 18 | - run the notebooks 05, 06, 07 for creating map 19 | - notebook 04 is unused for now 20 | -------------------------------------------------------------------------------- /v6.2/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v6.2/flow_dataset.npz -------------------------------------------------------------------------------- /v6.2/install: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | pip install -r requirements.txt 3 | npm i -------------------------------------------------------------------------------- /v6.2/install.bat: -------------------------------------------------------------------------------- 1 | pip install -r requirements.txt 2 | npm i -------------------------------------------------------------------------------- /v6.2/maplist creator.py: -------------------------------------------------------------------------------- 1 | import glob 2 | 3 | osupath = input('Please enter the path to your osu songs folder for training: ') 4 | verbose = input('Show verbose output? y/n: ') 5 | files = glob.glob(osupath + '/**/*.osu', recursive = True) 6 | numwritten = 0 7 | f = open('maplist.txt','w+') 8 | 9 | for filename in glob.iglob(osupath + '/**/*.osu', recursive = True): 10 | if(verbose == 'y' or verbose == 'Y' or verbose == 'Yes' or verbose == 'YES'): 11 | print(filename) 12 | f.write('\n' + filename) 13 | numwritten+=1 14 | 15 | print('#######################################################################################') 16 | print('Wrote ' + str(numwritten) + ' map paths to maplist.txt') 17 | input('maplist.txt generated in the same directory as this script, press enter to exit') 18 | -------------------------------------------------------------------------------- /v6.2/maplist.txt: -------------------------------------------------------------------------------- 1 | # .osu paths here! -------------------------------------------------------------------------------- /v6.2/momentum_minmax.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v6.2/momentum_minmax.npy -------------------------------------------------------------------------------- /v6.2/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ipynb", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "big-integer": { 8 | "version": "1.6.34", 9 | "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.34.tgz", 10 | "integrity": "sha512-+w6B0Uo0ZvTSzDkXjoBCTNK0oe+aVL+yPi7kwGZm8hd8+Nj1AFPoxoq1Bl/mEu/G/ivOkUc1LRqVR0XeWFUzuA==" 11 | }, 12 | "complex.js": { 13 | "version": "2.0.11", 14 | "resolved": "https://registry.npmjs.org/complex.js/-/complex.js-2.0.11.tgz", 15 | "integrity": "sha512-6IArJLApNtdg1P1dFtn3dnyzoZBEF0MwMnrfF1exSBRpZYoy4yieMkpZhQDC0uwctw48vii0CFVyHfpgZ/DfGw==" 16 | }, 17 | "fraction.js": { 18 | "version": "4.0.9", 19 | "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.0.9.tgz", 20 | "integrity": "sha512-qP1sNwdrcA+Vs5TTvGETuaaUmz4Tm48V6Jc+8Oh/gqvkb1d42s99w5kvSrZkIATp/mz3rV4CTef6xINkCofu+A==" 21 | }, 22 | "polynomial": { 23 | "version": "1.4.3", 24 | "resolved": "https://registry.npmjs.org/polynomial/-/polynomial-1.4.3.tgz", 25 | "integrity": "sha512-Yf9er7dXiA5jTVaaJc9oGSFB41JW7wigbc1m/nUQ0bOzz0gaY0Ti3HSrvIc4K6vwJ6MsN4eja+8ytLno1z/y1A==", 26 | "requires": { 27 | "big-integer": "1.6.34", 28 | "complex.js": "2.0.11", 29 | "fraction.js": "4.0.9", 30 | "quaternion": "1.0.5" 31 | } 32 | }, 33 | "quaternion": { 34 | "version": "1.0.5", 35 | "resolved": "https://registry.npmjs.org/quaternion/-/quaternion-1.0.5.tgz", 36 | "integrity": "sha512-StmkfFTHZ2CUA9b2qVCOaPZnuiR4W7YgRtrdCefICs4z+ny1qG3lpQ6UN33aC54h0r+i4D7sikvdg8Dv2DLGpw==" 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /v6.2/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "osumapper", 3 | "version": "6.2.0", 4 | "description": "An automatic beatmap generator using Tensorflow / Deep Learning.", 5 | "main": "load_map.js", 6 | "dependencies": { 7 | "polynomial": "^1.4.3" 8 | }, 9 | "devDependencies": {}, 10 | "scripts": { 11 | "test": "echo \"Error: no test specified\" && exit 1" 12 | }, 13 | "keywords": [], 14 | "repository": { 15 | "type": "git", 16 | "url": "git+https://github.com/kotritrona/osumapper.git" 17 | }, 18 | "author": "kotritrona", 19 | "license": "Apache-2.0" 20 | } -------------------------------------------------------------------------------- /v6.2/plthelper.py: -------------------------------------------------------------------------------- 1 | #define TRUE 0 2 | #define FALSE 1 3 | 4 | import matplotlib.pyplot as plt 5 | import matplotlib.lines as lines 6 | import matplotlib.transforms as mtransforms 7 | import matplotlib.text as mtext 8 | 9 | 10 | class MyLine(lines.Line2D): 11 | def __init__(self, *args, **kwargs): 12 | # we'll update the position when the line data is set 13 | self.text = mtext.Text(0, 0, '') 14 | lines.Line2D.__init__(self, *args, **kwargs) 15 | 16 | # we can't access the label attr until *after* the line is 17 | # inited 18 | self.text.set_text(self.get_label()) 19 | 20 | def set_figure(self, figure): 21 | self.text.set_figure(figure) 22 | lines.Line2D.set_figure(self, figure) 23 | 24 | def set_axes(self, axes): 25 | self.text.set_axes(axes) 26 | lines.Line2D.set_axes(self, axes) 27 | 28 | def set_transform(self, transform): 29 | # 2 pixel offset 30 | texttrans = transform + mtransforms.Affine2D().translate(2, 2) 31 | self.text.set_transform(texttrans) 32 | lines.Line2D.set_transform(self, transform) 33 | 34 | def set_data(self, x, y): 35 | if len(x): 36 | self.text.set_position((x[-1], y[-1])) 37 | 38 | lines.Line2D.set_data(self, x, y) 39 | 40 | def draw(self, renderer): 41 | # draw my label at the end of the line with 2 pixel offset 42 | lines.Line2D.draw(self, renderer) 43 | self.text.draw(renderer) 44 | 45 | def plot_history(history): 46 | plt.figure() 47 | plt.xlabel('Epoch') 48 | plt.ylabel('Mean Abs Error [Limitless]') 49 | plt.plot(history.epoch, np.array(history.history['loss']), 50 | label='Train Loss') 51 | plt.plot(history.epoch, np.array(history.history['val_loss']), 52 | label = 'Val loss') 53 | plt.legend() 54 | plt.show() -------------------------------------------------------------------------------- /v6.2/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.16.2 2 | SoundFile==0.10.2 3 | matplotlib==3.0.3 4 | pandas==0.24.2 5 | tensorflow==2.0.0-beta1 6 | scikit-learn==0.19.1 7 | scipy==1.1.0 -------------------------------------------------------------------------------- /v6.2/saved_rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v6.2/saved_rhythm_model -------------------------------------------------------------------------------- /v6.2/saved_rhythm_model_momentums: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v6.2/saved_rhythm_model_momentums -------------------------------------------------------------------------------- /v6.2/tfhelper.py: -------------------------------------------------------------------------------- 1 | #define TRUE 0 2 | #define FALSE 1 3 | 4 | import tensorflow as tf 5 | 6 | def stack_loss(tensor): 7 | complex_list = tf.complex(tensor[:, :, 0] * 512, tensor[:, :, 1] * 384); 8 | stack_limit = 30; 9 | precise_limit = 1; 10 | a = []; 11 | for k in range(tensor.shape[1]): 12 | w = tf.tile(tf.expand_dims(complex_list[:, k], axis=1), [1, tensor.shape[1]]); 13 | r = tf.abs(w - complex_list); 14 | rless = tf.cast(tf.less(r, stack_limit), tf.float32) * tf.cast(tf.greater(r, precise_limit), tf.float32); 15 | rmean = tf.reduce_mean(rless * (stack_limit - r) / stack_limit); 16 | a.append(rmean); 17 | b = tf.reduce_sum(a); 18 | return b; 19 | 20 | # This polygon loss was an attempt to make the map less likely to overlap each other. 21 | # The idea is: calculate the area of polygon formed from the note positions; 22 | # If it is big, then it is good - they form a convex shape, no overlap. 23 | # ... of course it totally doesn't work like that. 24 | def polygon_loss(tensor): 25 | tensor_this = tensor[:, :, 0:2]; 26 | tensor_next = tf.concat([tensor[:, 1:, 0:2], tensor[:, 0:1, 0:2]], axis=1); 27 | sa = (tensor_this[:, :, 0] + tensor_next[:, :, 0]) * (tensor_next[:, :, 1] - tensor_this[:, :, 0]); 28 | surface = tf.abs(tf.reduce_sum(sa, axis=1))/2; 29 | return surface; -------------------------------------------------------------------------------- /v7.0/01_Training.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## osumapper #1: Map Dataset Reader\n", 8 | "\n", 9 | "Read the map list and convert maps to formats Python can use." 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "### IMPORTANT!! First fill \"maplist.txt\" with paths of .osu files you want to train with !!!\n", 17 | "\n", 18 | "You can use the maplist generator by running `node gen_maplist` under this directory.\n", 19 | "\n", 20 | "**Tip:** Don't train with your every single map. Find good maps (maps you like) with relatively similar difficulty.\n", 21 | "\n", 22 | "After that, run the grid below to convert the maps." 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": { 29 | "scrolled": true 30 | }, 31 | "outputs": [], 32 | "source": [ 33 | "from act_data_prep import *\n", 34 | "\n", 35 | "# for osu!mania use this instead of above\n", 36 | "# from mania_act_data_prep import * \n", 37 | "\n", 38 | "step1_load_maps();\n", 39 | "\n", 40 | "# If any map causes bug please tell me!! https://discord.gg/npmSy7K" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "## osumapper #2: rhythm model\n", 48 | "\n", 49 | "Train a rhythm model that decides where to place circles/sliders based on music.\n", 50 | "\n", 51 | "If you're using GPU and it reports a memory error, try setting batch_size parameter to a smaller value (that GPU can handle)." 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "from act_train_rhythm import *;\n", 61 | "\n", 62 | "train_params_p2 = {\n", 63 | " \"divisor\" : 4,\n", 64 | " \"train_epochs\" : 16,\n", 65 | " \"train_batch_size\" : None, # Default is 32 or based on machine specs\n", 66 | " \"plot_history\" : True,\n", 67 | " \"too_many_maps_threshold\" : 200,\n", 68 | " \"train_epochs_many_maps\" : 6,\n", 69 | " \"data_split_count\" : 80\n", 70 | "};\n", 71 | "model_p2 = step2_build_model()" 72 | ] 73 | }, 74 | { 75 | "cell_type": "markdown", 76 | "metadata": {}, 77 | "source": [ 78 | "Train the model and evaluate.
\n", 79 | "is_note_start accuracy should be about 0.8 to 0.9 based on my tests, others should be lower.\n", 80 | "\n", 81 | "**Note:** I changed the metrics from F1 to AUC in this version!! 0.5=guessing 1=perfect for AUC" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": null, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "model_p2 = step2_train_model(model_p2, train_params_p2)\n", 91 | "step2_evaluate(model_p2)" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "metadata": {}, 97 | "source": [ 98 | "Done! now save the model to the disk." 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": {}, 105 | "outputs": [], 106 | "source": [ 107 | "step2_save(model_p2)" 108 | ] 109 | }, 110 | { 111 | "cell_type": "markdown", 112 | "metadata": {}, 113 | "source": [ 114 | "## osumapper #3: flow dataset construction\n", 115 | "\n", 116 | "Construct a dataset for the map flow generator." 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "metadata": {}, 123 | "outputs": [], 124 | "source": [ 125 | "from act_flow_ds import *;\n", 126 | "\n", 127 | "flow_dataset_params = step3_set_params(note_group_size=10, step_size=5);\n", 128 | "maps_flow = step3_read_maps_flow(flow_dataset_params);\n", 129 | "step3_save_flow_dataset(maps_flow);\n", 130 | "\n", 131 | "# hitsounds dataset, only for taiko maps\n", 132 | "# maps_hs_af, maps_hs = step3_read_maps_hs(flow_dataset_params);\n", 133 | "# step3_save_hs_dataset(maps_hs_af, maps_hs);\n", 134 | "\n", 135 | "# pattern dataset, only for mania (remove the flow part for mania)\n", 136 | "# data = step3_read_maps_pattern([]);\n", 137 | "# step3_save_pattern_dataset(data);" 138 | ] 139 | }, 140 | { 141 | "cell_type": "markdown", 142 | "metadata": {}, 143 | "source": [ 144 | "That's it! The models are trained. Start making a new map with the other notebook." 145 | ] 146 | } 147 | ], 148 | "metadata": { 149 | "kernelspec": { 150 | "display_name": "Python 3", 151 | "language": "python", 152 | "name": "python3" 153 | }, 154 | "language_info": { 155 | "codemirror_mode": { 156 | "name": "ipython", 157 | "version": 3 158 | }, 159 | "file_extension": ".py", 160 | "mimetype": "text/x-python", 161 | "name": "python", 162 | "nbconvert_exporter": "python", 163 | "pygments_lexer": "ipython3", 164 | "version": "3.8.3" 165 | } 166 | }, 167 | "nbformat": 4, 168 | "nbformat_minor": 2 169 | } 170 | -------------------------------------------------------------------------------- /v7.0/02_Mapmaking.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## osumapper #4: New Map Reader\n" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "Set the input file string to a timed (having the right BPM/offset) .osu file.\n", 15 | "\n", 16 | "It converts the map/music to Python readable format." 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "from act_newmap_prep import *\n", 26 | "\n", 27 | "# input file here! (don't remove the \"r\" before string)\n", 28 | "file_path = r'..\\..\\test_data\\test.osu'\n", 29 | "\n", 30 | "# Or use auto timing with music file only!!\n", 31 | "\n", 32 | "# from act_timing import *;\n", 33 | "# music_path = r\"..\\..\\test_data\\audio.mp3\"\n", 34 | "# file_path = get_timed_osu_file(music_path, game_mode=0);\n", 35 | "\n", 36 | "step4_read_new_map(file_path);" 37 | ] 38 | }, 39 | { 40 | "cell_type": "markdown", 41 | "metadata": {}, 42 | "source": [ 43 | "## osumapper #5: Rhythm Predictor\n", 44 | "\n", 45 | "Calculates a map's rhythm based on the music and timing." 46 | ] 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "metadata": {}, 51 | "source": [ 52 | "Parameters:\n", 53 | "\n", 54 | "\"note_density\" determines how many notes will be placed on the timeline, ranges from 0 to 1.
\n", 55 | "\"slider_favor\" determines how the model favors sliders against circles, ranges from -1 to 1.
\n", 56 | "\"dist_multiplier\" determines the distance snap. ranges from 0 to +∞. Of course 0/+∞ are not advisable.
\n", 57 | "\"divisor_favor\" determines how the model favors notes to be on X divisors starting from a beat (white, blue, red, blue), ranges from -1 to 1 each.
\n", 58 | "\"slider_max_ticks\" determines the max amount of time a slider can slide, ranges from 1 to +∞." 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": null, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "from act_rhythm_calc import *\n", 68 | "\n", 69 | "model = step5_load_model();\n", 70 | "npz = step5_load_npz();\n", 71 | "params = step5_set_params(dist_multiplier=1, note_density=0.35, slider_favor=0, divisor_favor=[0] * 4, slider_max_ticks=8);\n", 72 | "\n", 73 | "predictions = step5_predict_notes(model, npz, params);\n", 74 | "converted = step5_convert_sliders(predictions, params);" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "step5_save_predictions(converted);" 84 | ] 85 | }, 86 | { 87 | "cell_type": "markdown", 88 | "metadata": {}, 89 | "source": [ 90 | "## osumapper #6: Map flow generator\n", 91 | "\n", 92 | "Generate the final map using a Generative Adversarial Network.\n", 93 | "\n", 94 | "Parameters:\n", 95 | "\n", 96 | "- note_distance_basis: the baseline for distance snap between notes\n", 97 | "- max_ticks_for_ds: max number of time ticks (each 1/4) that it uses the distance snap\n", 98 | "- next_from_slider_end: use slider end instead of slider head for calculating distance\n", 99 | "- box_loss_border, box_loss_value: it's like a barrier on the map edges that bounces off the circles\n", 100 | "- divisor, note_group_size: don't change unless you're using a special model built for it\n", 101 | "- good_epoch, max_epoch: controls the training time. less time makes it faster but risks less quality\n", 102 | "- g_\\*, c_\\*: hyperparameters used by GAN. No one knows how they work but they mysterically affect the result" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": { 109 | "scrolled": true 110 | }, 111 | "outputs": [], 112 | "source": [ 113 | "from act_gan import *;\n", 114 | "\n", 115 | "gan_params = {\n", 116 | " \"divisor\" : 4,\n", 117 | " \"good_epoch\" : 12,\n", 118 | " \"max_epoch\" : 30,\n", 119 | " \"note_group_size\" : 10,\n", 120 | " \"g_epochs\" : 1,\n", 121 | " \"c_epochs\" : 1,\n", 122 | " \"g_batch\" : 50,\n", 123 | " \"g_input_size\" : 50,\n", 124 | " \"c_true_batch\" : 140,\n", 125 | " \"c_false_batch\" : 5,\n", 126 | " \"c_randfalse_batch\" : 5,\n", 127 | " \"note_distance_basis\" : 200,\n", 128 | " \"next_from_slider_end\" : False,\n", 129 | " \"max_ticks_for_ds\" : 1,\n", 130 | " \"box_loss_border\" : 0.1,\n", 131 | " \"box_loss_value\" : 0.4,\n", 132 | " \"box_loss_weight\" : 1\n", 133 | "};\n", 134 | "\n", 135 | "step6_set_gan_params(gan_params);\n", 136 | "osu_a, data = step6_run_all();" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "metadata": {}, 142 | "source": [ 143 | "### Since the generation will take a while...\n", 144 | "\n", 145 | "we can appreciate a nice picture of Cute Sophie!!\n", 146 | "\n", 147 | "" 148 | ] 149 | }, 150 | { 151 | "cell_type": "markdown", 152 | "metadata": {}, 153 | "source": [ 154 | "Do a little modding to the map.\n", 155 | "\n", 156 | "Parameters:\n", 157 | "\n", 158 | "- stream_regularizer: fix bad streams. integer for modes (0,1,2,3,4) 0=inactive\n", 159 | "- slider_mirror: mirror slider ends if they go outside map area. (0,1) 0=inactive 1=active" 160 | ] 161 | }, 162 | { 163 | "cell_type": "code", 164 | "execution_count": null, 165 | "metadata": {}, 166 | "outputs": [], 167 | "source": [ 168 | "from act_modding import *\n", 169 | "\n", 170 | "modding_params = {\n", 171 | " \"stream_regularizer\" : 1,\n", 172 | " \"slider_mirror\" : 1\n", 173 | "}\n", 174 | "\n", 175 | "osu_a, data = step7_modding(osu_a, data, modding_params);" 176 | ] 177 | }, 178 | { 179 | "cell_type": "markdown", 180 | "metadata": {}, 181 | "source": [ 182 | "Finally, save the data into an .osu file!" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": null, 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [ 191 | "from act_final import *\n", 192 | "\n", 193 | "saved_osu_name = step8_save_osu_file(osu_a, data);\n", 194 | "\n", 195 | "# for taiko mode only (comment out the above line and use below)\n", 196 | "# from act_taiko_hitsounds import *\n", 197 | "# taiko_hitsounds_params = step8_taiko_hitsounds_set_params(divisor=4, metronome_count=4)\n", 198 | "# hitsounds = step8_apply_taiko_hitsounds(osu_a, data, params=taiko_hitsounds_params)\n", 199 | "# saved_osu_name = step8_save_osu_file(osu_a, data, hitsounds=hitsounds);\n", 200 | "\n", 201 | "# clean up the folder\n", 202 | "step8_clean_up();" 203 | ] 204 | }, 205 | { 206 | "cell_type": "markdown", 207 | "metadata": {}, 208 | "source": [ 209 | "If it works alright, you should have a nice .osu file under the folder of these notebooks now!\n", 210 | "\n", 211 | "If it does not work, please tell me the problem so probably I could fix it!\n", 212 | "\n", 213 | "For bug reports and feedbacks either report it on github or use discord:
\n", 214 | "[https://discord.com/invite/npmSy7K](https://discord.com/invite/npmSy7K)" 215 | ] 216 | } 217 | ], 218 | "metadata": { 219 | "kernelspec": { 220 | "display_name": "Python 3", 221 | "language": "python", 222 | "name": "python3" 223 | }, 224 | "language_info": { 225 | "codemirror_mode": { 226 | "name": "ipython", 227 | "version": 3 228 | }, 229 | "file_extension": ".py", 230 | "mimetype": "text/x-python", 231 | "name": "python", 232 | "nbconvert_exporter": "python", 233 | "pygments_lexer": "ipython3", 234 | "version": "3.8.3" 235 | } 236 | }, 237 | "nbformat": 4, 238 | "nbformat_minor": 2 239 | } 240 | -------------------------------------------------------------------------------- /v7.0/Colab_Training.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## osumapper: create osu! map using Tensorflow and Colab\n", 8 | "\n", 9 | "### Model Training\n", 10 | "\n", 11 | "Github: https://github.com/kotritrona/osumapper" 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "metadata": { 17 | "id": "EroyvoE7qr_P" 18 | }, 19 | "source": [ 20 | "### Step 0: Installation\n", 21 | "\n", 22 | "First of all, check the Notebook Settings under Edit tab.
\n", 23 | "Activate GPU to make the training faster.\n", 24 | "\n", 25 | "Then, clone the git repository and install dependencies." 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": { 32 | "id": "3APpbRI8qrxm" 33 | }, 34 | "outputs": [], 35 | "source": [ 36 | "%cd /content/\n", 37 | "!git clone https://github.com/kotritrona/osumapper.git\n", 38 | "%cd osumapper/v7.0\n", 39 | "!apt install -y ffmpeg\n", 40 | "!apt install -y nodejs\n", 41 | "!cp requirements_colab.txt requirements.txt\n", 42 | "!cp package_colab.json package.json\n", 43 | "!pip install -r requirements.txt\n", 44 | "!npm install" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": { 50 | "id": "76KQyHYgrFEy" 51 | }, 52 | "source": [ 53 | "### Step 1: Upload training maps\n", 54 | "\n", 55 | "Write the maplist.txt and run the first block of `01_Training.ipynb` (`act_data_prep.step1_load_maps()`) locally.
\n", 56 | "After that, make a folder `NPZ/` under your google drive, and upload the generated npz files under local `mapdata/` in there." 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "metadata": { 63 | "id": "aFWVEtE2vtoT" 64 | }, 65 | "outputs": [], 66 | "source": [ 67 | "# Wait for the upload to finish" 68 | ] 69 | }, 70 | { 71 | "cell_type": "markdown", 72 | "metadata": { 73 | "id": "83RcU3yap-N_" 74 | }, 75 | "source": [ 76 | "Mount your google drive in Colaboratory.
\n", 77 | "It will ask you for an auth code.\n" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": { 84 | "id": "XF6WtFFupmyD" 85 | }, 86 | "outputs": [], 87 | "source": [ 88 | "from google.colab import drive\n", 89 | "drive.mount('/gdrive')" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": { 95 | "id": "5OjRVBotq9A7" 96 | }, 97 | "source": [ 98 | "Copy .npz files to the training data folder." 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": { 105 | "id": "Tx9X_LIZqGGi" 106 | }, 107 | "outputs": [], 108 | "source": [ 109 | "# One of mkdir or rm will pop an error. Ignore it.\n", 110 | "!mkdir mapdata/\n", 111 | "!rm mapdata/*.npz\n", 112 | "!cp /gdrive/'My Drive'/NPZ/*.npz mapdata/\n", 113 | "print(\"Copy complete!\")" 114 | ] 115 | }, 116 | { 117 | "cell_type": "markdown", 118 | "metadata": { 119 | "id": "FNQZjKoer8Fy" 120 | }, 121 | "source": [ 122 | "## Step 2: rhythm model\n", 123 | "\n", 124 | "(after this point it's copypaste from `01_Training.ipynb` from the second block)\n", 125 | "\n", 126 | "Train a rhythm model that decides where to place circles/sliders based on music.\n", 127 | "\n", 128 | "If you're using GPU and it reports a memory error, try setting batch_size parameter to a smaller value (that GPU can handle)." 129 | ] 130 | }, 131 | { 132 | "cell_type": "code", 133 | "execution_count": null, 134 | "metadata": { 135 | "id": "svgt9Fs2r7iy" 136 | }, 137 | "outputs": [], 138 | "source": [ 139 | "from act_train_rhythm import *;\n", 140 | "\n", 141 | "train_params = {\n", 142 | " \"divisor\" : 4,\n", 143 | " \"train_epochs\" : 32,\n", 144 | " \"train_batch_size\" : None, # Default is 32 or based on machine specs\n", 145 | " \"plot_history\" : True,\n", 146 | " \"too_many_maps_threshold\" : 240,\n", 147 | " \"train_epochs_many_maps\" : 6,\n", 148 | " \"data_split_count\" : 80\n", 149 | "};\n", 150 | "model = step2_build_model()" 151 | ] 152 | }, 153 | { 154 | "cell_type": "markdown", 155 | "metadata": { 156 | "id": "Qv88gsdasKYh" 157 | }, 158 | "source": [ 159 | "Train the model and evaluate.\n", 160 | "is_note_start accuracy should be about 0.8 to 0.9 based on my tests, others should be lower.\n", 161 | "\n", 162 | "**Note:** I changed the metrics from F1 to AUC in this version!! 0.5=guessing 1=perfect for AUC" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": null, 168 | "metadata": { 169 | "id": "y4IAxnpUqqy9" 170 | }, 171 | "outputs": [], 172 | "source": [ 173 | "model = step2_train_model(model, train_params)\n", 174 | "step2_evaluate(model)" 175 | ] 176 | }, 177 | { 178 | "cell_type": "markdown", 179 | "metadata": { 180 | "id": "FNxOeuT2sOz5" 181 | }, 182 | "source": [ 183 | "Done! now save the model to the disk." 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": { 190 | "id": "LPnmz5twsPOJ" 191 | }, 192 | "outputs": [], 193 | "source": [ 194 | "from google.colab import files\n", 195 | "step2_save(model)\n", 196 | "\n", 197 | "files.download(\"saved_rhythm_model\")" 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": { 203 | "id": "CfeE3AjbsVoi" 204 | }, 205 | "source": [ 206 | "## Step 3: flow dataset construction\n", 207 | "\n", 208 | "Construct a dataset for the map flow generator." 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": null, 214 | "metadata": { 215 | "id": "hxUhLFoEsWRx" 216 | }, 217 | "outputs": [], 218 | "source": [ 219 | "from act_flow_ds import *;\n", 220 | "\n", 221 | "flow_dataset_params = step3_set_params(note_group_size=10, step_size=5);\n", 222 | "maps_flow = step3_read_maps_flow(flow_dataset_params);\n", 223 | "step3_save_flow_dataset(maps_flow);\n", 224 | "files.download(\"flow_dataset.npz\")\n", 225 | "\n", 226 | "# hitsounds dataset, only for taiko maps\n", 227 | "# maps_hs_af, maps_hs = step3_read_maps_hs(flow_dataset_params);\n", 228 | "# step3_save_hs_dataset(maps_hs_af, maps_hs);\n", 229 | "# files.download(\"hs_dataset.npz\")\n", 230 | "\n", 231 | "# pattern dataset, only for mania (remove the flow part for mania)\n", 232 | "# data = step3_read_maps_pattern([]);\n", 233 | "# step3_save_pattern_dataset(data);\n", 234 | "# files.download(\"mania_pattern_dataset.npz\")" 235 | ] 236 | }, 237 | { 238 | "cell_type": "markdown", 239 | "metadata": { 240 | "id": "QsGdBZ-UtPVk" 241 | }, 242 | "source": [ 243 | "Replace the default model files to use it in Colab map creator." 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": null, 249 | "metadata": { 250 | "id": "pSI3WWi_tPqY" 251 | }, 252 | "outputs": [], 253 | "source": [ 254 | "!cp saved_rhythm_model models/default/rhythm_model\n", 255 | "!cp flow_dataset.npz models/default/flow_dataset.npz\n", 256 | "# !cp hs_dataset.npz models/default/hs_dataset.npz\n", 257 | "# !cp mania_pattern_dataset.npz models/default/mania_pattern_dataset.npz" 258 | ] 259 | }, 260 | { 261 | "cell_type": "markdown", 262 | "metadata": { 263 | "id": "O_pA6cIusoXQ" 264 | }, 265 | "source": [ 266 | "That's it! The models are trained. Start making a new map with the other notebook.\n", 267 | "\n", 268 | "For bug reports and feedbacks either report it on github or use discord:
\n", 269 | "[https://discord.com/invite/npmSy7K](https://discord.com/invite/npmSy7K)" 270 | ] 271 | } 272 | ], 273 | "metadata": { 274 | "accelerator": "GPU", 275 | "colab": { 276 | "collapsed_sections": [], 277 | "name": "Colab Training.ipynb", 278 | "provenance": [] 279 | }, 280 | "kernelspec": { 281 | "display_name": "Python 3", 282 | "language": "python", 283 | "name": "python3" 284 | }, 285 | "language_info": { 286 | "codemirror_mode": { 287 | "name": "ipython", 288 | "version": 3 289 | }, 290 | "file_extension": ".py", 291 | "mimetype": "text/x-python", 292 | "name": "python", 293 | "nbconvert_exporter": "python", 294 | "pygments_lexer": "ipython3", 295 | "version": "3.8.3" 296 | } 297 | }, 298 | "nbformat": 4, 299 | "nbformat_minor": 1 300 | } 301 | -------------------------------------------------------------------------------- /v7.0/README.md: -------------------------------------------------------------------------------- 1 | # osumapper v7.0 2 | 3 | This version uses Tensorflow v2.3.1. 4 | 5 | v7.0 demo map 1 (low BPM): https://osu.ppy.sh/beatmapsets/1290030 6 | 7 | v7.0 demo map 2 (high BPM): https://osu.ppy.sh/beatmapsets/1290026 8 | 9 | ## Colaboratory 10 | 11 | https://colab.research.google.com/github/kotritrona/osumapper/blob/master/v7.0/Colab.ipynb 12 | 13 | For mania mode: [mania_Colab.ipynb](https://colab.research.google.com/github/kotritrona/osumapper/blob/master/v7.0/mania_Colab.ipynb) 14 | 15 | ## Complete guide for a newcomer in osu! mapping 16 | 17 | https://github.com/kotritrona/osumapper/wiki/Complete-guide:-creating-beatmap-using-osumapper 18 | 19 | ## Installation 20 | 21 | Windows 22 | 23 | - install [Anaconda3](https://www.anaconda.com/products/individual#windows) 24 | - install [node.js](https://nodejs.org/) 25 | - git clone or download this repository 26 | - use Anaconda Prompt and cd into this directory (osumapper/v7.0/) 27 | - run `install.bat` 28 | 29 | Linux (Ubuntu) 30 | 31 | - install Python 3.8 32 | - run `./install` 33 | 34 | Other Linux 35 | 36 | - install Python 3.8 37 | - Open `install` file with a text editor 38 | - change "apt" to the correct package manager 39 | - run `./install` 40 | 41 | ## Running 42 | 43 | - start Jupyter Notebook 44 | - run 01_Training.ipynb for training 45 | - run 02_Mapmaking.ipynb for map making 46 | 47 | ## Maplist Generator 48 | 49 | - Run `node gen_maplist.js` under the directory to use the maplist generator 50 | 51 | ## Training in Colaboratory 52 | 53 | - You have to generate .npz map data using the first code block of 01_Training.ipynb and upload them to Google Drive 54 | - After that, use https://colab.research.google.com/github/kotritrona/osumapper/blob/master/v7.0/Colab_Training.ipynb 55 | 56 | ## Difference from previous versions 57 | 58 | - Cleaned up code, removed much useless code 59 | - Moved code from notebook to python files and integrated pipeline together 60 | - Uses librosa to read audio file 61 | - Removed soundfile and pandas dependency 62 | - Added TimingAnalyz support to achieve full auto-mapping (great tool made by [statementreply](https://osu.ppy.sh/users/126198)) 63 | 64 | ## Citing 65 | 66 | If you want to cite osumapper in a scholarly work, please cite the github page. I'm not going to write a paper for it. -------------------------------------------------------------------------------- /v7.0/TimingAnlyz.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/TimingAnlyz.exe -------------------------------------------------------------------------------- /v7.0/act_data_prep.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 1 action script 5 | # 6 | 7 | from audio_tools import *; 8 | from os_tools import *; 9 | 10 | import os, re, time; 11 | 12 | mapdata_path = "mapdata/"; 13 | try: 14 | divisor = GLOBAL["divisor"]; 15 | except: 16 | divisor = 4; 17 | 18 | def step1_load_maps(): 19 | # fix the path..? 20 | fix_path() 21 | 22 | # Test paths and node 23 | test_process_path("node"); 24 | if not os.path.isdir(mapdata_path): 25 | os.mkdir(mapdata_path); 26 | 27 | # Test node modules..? 28 | test_node_modules() 29 | 30 | # Test ffmpeg..? 31 | test_process_path("ffmpeg", "-version"); 32 | 33 | # Open maplist 34 | with open("maplist.txt", encoding="utf8") as fp: 35 | fcont = fp.readlines(); 36 | 37 | # Reset results 38 | results = []; 39 | for line in fcont: 40 | results.append(line); 41 | 42 | # Remove maps 43 | for file in os.listdir(mapdata_path): 44 | if file.endswith(".npz"): 45 | os.remove(os.path.join(mapdata_path, file)); 46 | 47 | print("Number of filtered maps: {}".format(len(results))); 48 | 49 | for k, mname in enumerate(results): 50 | try: 51 | start = time.time() 52 | read_and_save_osu_file(mname.strip(), filename=os.path.join(mapdata_path, str(k)), divisor=divisor); 53 | end = time.time() 54 | print("Map data #" + str(k) + " saved! time = " + str(end - start) + " secs"); 55 | except Exception as e: 56 | print("Error on #{}, path = {}, error = {}".format(str(k), mname.strip(), e)); -------------------------------------------------------------------------------- /v7.0/act_final.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 8 .osu file JSON processing / output 5 | # 6 | 7 | import re, json, datetime; 8 | from os_tools import *; 9 | 10 | def convert_to_osu_obj(obj_array, data, hitsounds=None): 11 | """ 12 | Converts map data from python format to json format. 13 | """ 14 | objs, predictions, ticks, timestamps, is_slider, is_spinner, is_note_end, sv, slider_ticks, dist_multiplier, slider_types, slider_length_base = data; 15 | 16 | if hitsounds is None: 17 | hitsounds = [0] * len(obj_array); 18 | 19 | output = []; 20 | for i, obj in enumerate(obj_array): 21 | if not is_slider[i]: # is a circle; does not consider spinner for now. 22 | obj_dict = { 23 | "x": int(obj[0]), 24 | "y": int(obj[1]), 25 | "type": 1, 26 | "time": int(timestamps[i]), 27 | "hitsounds": int(hitsounds[i]), 28 | "extHitsounds": "0:0:0", 29 | "index": i 30 | }; 31 | else: 32 | obj_dict = { 33 | "x": int(obj[0]), 34 | "y": int(obj[1]), 35 | "type": 2, 36 | "time": int(timestamps[i]), 37 | "hitsounds": int(hitsounds[i]), 38 | "extHitsounds": "0:0:0", 39 | "sliderGenerator": { 40 | "type": int(slider_types[i]), 41 | "dOut": [float(obj[2]), float(obj[3])], 42 | "len": float(slider_length_base[i] * slider_ticks[i]), 43 | "ticks": int(slider_ticks[i]), 44 | "endpoint": [int(obj[4]), int(obj[5])] 45 | }, 46 | "index": i 47 | }; 48 | output.append(obj_dict); 49 | return output; 50 | 51 | def get_osu_file_name(metadata): 52 | """ 53 | Construct the .osu file name from the metadata. 54 | """ 55 | artist = metadata["artist"]; 56 | title = metadata["title"]; 57 | creator = metadata["creator"]; 58 | diffname = metadata["diffname"]; 59 | outname = (artist+" - " if len(artist) > 0 else "") + title + " (" + creator + ") [" + diffname + "].osu"; 60 | outname = re.sub("[^a-zA-Z0-9\(\)\[\] \.\,\!\~\`\{\}\-\_\=\+\&\^\@\#\$\%\;\']","", outname); 61 | return outname; 62 | 63 | def step8_save_osu_file(osu_map, data, hitsounds=None): 64 | """ 65 | Save trained map to disk, using filename generated from its metadata. 66 | """ 67 | osu_obj_array = convert_to_osu_obj(osu_map, data, hitsounds=hitsounds); 68 | 69 | with open("mapthis.json", encoding="utf-8") as map_json: 70 | map_dict = json.load(map_json); 71 | map_meta = map_dict["meta"]; 72 | filename = get_osu_file_name(map_meta); 73 | map_dict["obj"] = osu_obj_array; 74 | 75 | with open('mapthis.json', 'w', encoding="utf-8") as outfile: 76 | json.dump(map_dict, outfile, ensure_ascii=False); 77 | 78 | c = run_command(["node", "load_map.js", "c", "mapthis.json", filename]); 79 | if(len(c) > 1): 80 | print(c.decode("utf-8")); 81 | 82 | print("finished on: {}".format(datetime.datetime.now())); 83 | 84 | return filename; 85 | 86 | def step8_clean_up(): 87 | # clean up intermediate files 88 | for item in ["mapthis.json", "audio.mp3", "timing.osu", "rhythm_data.npz", "mapthis.npz", "temp_json_file.json", "wavfile.wav", "temp/temp_json_file.json", "temp/wavfile.wav", "evaluatedRhythm.json"]: 89 | try: 90 | os.remove(item); 91 | except: 92 | pass -------------------------------------------------------------------------------- /v7.0/act_flow_ds.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 4 Save Flow Dataset 5 | # 6 | 7 | import numpy as np; 8 | import os; 9 | 10 | root = "mapdata/"; 11 | 12 | divisor = 4; 13 | 14 | def step3_set_params(note_group_size=10, step_size=5): 15 | return note_group_size, step_size; 16 | 17 | def read_map_npz_flow(file_path): 18 | with np.load(file_path) as data: 19 | flow_data = data["flow"]; 20 | return flow_data; 21 | 22 | # TICK, TIME, TYPE, X, Y, IN_DX, IN_DY, OUT_DX, OUT_DY 23 | def step3_read_maps_flow(params): 24 | chunk_size, step_size = params; 25 | 26 | max_x = 512; 27 | max_y = 384; 28 | 29 | result = []; 30 | for file in os.listdir(root): 31 | if file.endswith(".npz"): 32 | #print(os.path.join(root, file)); 33 | flow_data = read_map_npz_flow(os.path.join(root, file)); 34 | for i in range(0, (flow_data.shape[0] - chunk_size) // step_size): 35 | chunk = flow_data[i * step_size:i * step_size + chunk_size]; 36 | result.append(chunk); 37 | 38 | # normalize the TICK col and remove TIME col 39 | result = np.array(result) 40 | result[:, :, 0] %= divisor; 41 | result[:, :, 3] /= max_x; 42 | result[:, :, 4] /= max_y; 43 | result[:, :, 9] /= max_x; 44 | result[:, :, 10] /= max_y; 45 | 46 | # TICK, TIME, TYPE, X, Y, IN_DX, IN_DY, OUT_DX, OUT_DY, END_X, END_Y 47 | # only use X,Y,OUT_DX,OUT_DY,END_X,END_Y 48 | used_indices = [3, 4, 7, 8, 9, 10]#np.concatenate([, range(11, 11 + divisor + 1)]) 49 | result = np.array(result)[:, :, used_indices]; 50 | return result; 51 | 52 | def step3_save_flow_dataset(maps_flow): 53 | np.savez_compressed("flow_dataset", maps = maps_flow); 54 | 55 | def read_map_npz_hs(file_path): 56 | with np.load(file_path) as data: 57 | hs_data = data["hs"]; 58 | return hs_data; 59 | 60 | def step3_read_maps_hs(params): 61 | result_avail_flags = []; 62 | result_hitsounds = []; 63 | for file in os.listdir(root): 64 | if file.endswith(".npz"): 65 | hs_data = read_map_npz_hs(os.path.join(root, file)); 66 | 67 | avail_flags = hs_data[:, 0] 68 | hitsounds = hs_data[:, 1:] 69 | 70 | result_avail_flags.append(avail_flags) 71 | result_hitsounds.append(hitsounds) 72 | 73 | af = np.concatenate(result_avail_flags, axis=0) 74 | hs = np.concatenate(result_hitsounds, axis=0) 75 | 76 | return af[af != 0], hs[af != 0] 77 | 78 | def step3_save_hs_dataset(hs_avail_flags, hs): 79 | np.savez_compressed("hs_dataset", avail_flags = hs_avail_flags, hs = hs); 80 | 81 | def read_map_npz_pattern(file_path): 82 | with np.load(file_path) as data: 83 | pattern_data = data["pattern"]; 84 | return pattern_data; 85 | 86 | def array_to_flags(arr): 87 | return sum([k*2**i for i,k in enumerate(arr)]) 88 | 89 | def step3_read_maps_pattern(params): 90 | pattern_length = -1 91 | result_avail_note_begin = [[] for i in range(18)]; 92 | result_avail_note_end = [[] for i in range(18)]; 93 | result_avail_hold = [[] for i in range(18)]; 94 | result_pattern_note_begin = [[] for i in range(18)]; 95 | result_pattern_note_end = [[] for i in range(18)]; 96 | for file in os.listdir(root): 97 | if file.endswith(".npz"): 98 | pattern_data = read_map_npz_pattern(os.path.join(root, file)); 99 | 100 | try: 101 | key_count = (pattern_data.shape[2] - 1) // 2 102 | key_index = key_count - 1 103 | 104 | pattern_length = pattern_data.shape[1] 105 | 106 | avail_hold = pattern_data[:, :, 0] 107 | pattern_note_begin = pattern_data[:, :, 1:1+key_count] 108 | pattern_note_end = pattern_data[:, :, 1+key_count:1+key_count*2] 109 | 110 | avail_note_begin = np.max(pattern_note_begin, axis=2) 111 | avail_note_end = np.max(pattern_note_end, axis=2) 112 | 113 | result_avail_note_begin[key_index].append(avail_note_begin) 114 | result_avail_note_end[key_index].append(avail_note_end) 115 | result_avail_hold[key_index].append(avail_hold) 116 | result_pattern_note_begin[key_index].append(pattern_note_begin) 117 | result_pattern_note_end[key_index].append(pattern_note_end) 118 | except: 119 | print("Error on file {}".format(file)) 120 | 121 | outdata = [] 122 | 123 | if pattern_length == -1: 124 | pattern_length = 16 125 | 126 | for key_index in range(18): 127 | if len(result_avail_note_begin[key_index]) == 0: 128 | outdata.append([np.array([]), np.array([]), np.array([]), np.zeros((0, pattern_length, 1 + key_index)), np.zeros((0, pattern_length, 1 + key_index))]) 129 | continue 130 | anb = np.concatenate(result_avail_note_begin[key_index], axis=0) 131 | ane = np.concatenate(result_avail_note_end[key_index], axis=0) 132 | ah = np.concatenate(result_avail_hold[key_index], axis=0) 133 | pnb = np.concatenate(result_pattern_note_begin[key_index], axis=0) 134 | pne = np.concatenate(result_pattern_note_end[key_index], axis=0) 135 | 136 | begin_flag = np.max(anb, axis=1) 137 | end_flag = np.max(ane, axis=1) 138 | 139 | anbf = np.array([array_to_flags(k) for k in anb]) 140 | anef = np.array([array_to_flags(k) for k in ane]) 141 | ahf = np.array([array_to_flags(k) for k in ah]) 142 | 143 | outdata.append([anbf[begin_flag != 0], anef[end_flag != 0], ahf[begin_flag != 0], pnb[begin_flag != 0], pne[end_flag != 0]]) 144 | 145 | return outdata 146 | 147 | def step3_save_pattern_dataset(data): 148 | save_dict = {} 149 | for key_index in range(18): 150 | key_count = key_index + 1 151 | try: 152 | avail_note_begin, avail_note_end, avail_hold, pattern_note_begin, pattern_note_end = data[key_index] 153 | except: 154 | avail_note_begin, avail_note_end, avail_hold, pattern_note_begin, pattern_note_end = [[]] * 5 155 | save_dict["{}k_avail_note_begin".format(key_count)] = avail_note_begin 156 | save_dict["{}k_avail_note_end".format(key_count)] = avail_note_end 157 | save_dict["{}k_avail_hold".format(key_count)] = avail_hold 158 | save_dict["{}k_pattern_note_begin".format(key_count)] = pattern_note_begin 159 | save_dict["{}k_pattern_note_end".format(key_count)] = pattern_note_end 160 | np.savez_compressed("mania_pattern_dataset", **save_dict) -------------------------------------------------------------------------------- /v7.0/act_modding.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 7 Modding 5 | # 6 | 7 | from stream_tools import stream_regularizer; 8 | from slider_tools import slider_mirror; 9 | 10 | def step7_modding(obj_array, data, params): 11 | if "stream_regularizer" not in params: 12 | params["stream_regularizer"] = 0; 13 | 14 | obj_array, data = stream_regularizer(obj_array, data, mode = params["stream_regularizer"]); 15 | obj_array, data = slider_mirror(obj_array, data, mode = params["slider_mirror"]); 16 | 17 | return obj_array, data; -------------------------------------------------------------------------------- /v7.0/act_newmap_prep.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 5 action script 5 | # 6 | 7 | from audio_tools import *; 8 | from os_tools import * 9 | 10 | import os, re, time; 11 | 12 | mapdata_path = "mapdata/"; 13 | 14 | def step4_read_new_map(file_path, divisor = 4): 15 | # fix the path 16 | fix_path() 17 | 18 | # Test paths and node 19 | test_process_path("node"); 20 | 21 | # Test ffmpeg..? 22 | test_process_path("ffmpeg", "-version"); 23 | 24 | # Test node modules 25 | test_node_modules() 26 | 27 | start = time.time() 28 | read_and_save_osu_tester_file(file_path.strip(), filename="mapthis", divisor=divisor); 29 | end = time.time() -------------------------------------------------------------------------------- /v7.0/act_rhythm_calc.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Step 6 action script 5 | # 6 | 7 | import tensorflow as tf 8 | from tensorflow import keras 9 | import numpy as np 10 | import os, re, json 11 | 12 | divisor = 4; 13 | 14 | def read_npz(fn): 15 | with np.load(fn) as data: 16 | wav_data = data["wav"]; 17 | wav_data = np.swapaxes(wav_data, 2, 3); 18 | ticks = data["ticks"]; 19 | timestamps = data["timestamps"]; 20 | extra = data["extra"]; 21 | 22 | # Extra vars 23 | bpms = extra[0]; 24 | slider_lengths = extra[1]; 25 | ex1 = (60000 / bpms) / 500 - 1; 26 | ex2 = bpms / 120 - 1; 27 | ex3 = slider_lengths / 150 - 1; 28 | 29 | # This might be confusing: "i" is the index of the tick, "k" is the tick count inside the uninherited timing section (red line) 30 | # For most of the cases these are the same numbers, but for maps with multiple timing sections they're different 31 | div_data = np.array([divisor_array(k) + [ex1[i], ex2[i], ex3[i]] for i, k in enumerate(ticks)]); 32 | return wav_data, div_data, ticks, timestamps; 33 | 34 | def divisor_array(k): 35 | d_range = list(range(0, divisor)); 36 | return [int(k % divisor == d) for d in d_range]; 37 | 38 | def step5_set_divisor(x = 4): 39 | global divisor; 40 | divisor = x; 41 | 42 | def step5_set_params(dist_multiplier=1, note_density=0.24, slider_favor=0, divisor_favor=[0] * divisor, slider_max_ticks=8): 43 | return dist_multiplier, note_density, slider_favor, divisor_favor, slider_max_ticks; 44 | 45 | def step5_load_model(model_file="saved_rhythm_model"): 46 | # Fallback for local version 47 | if not os.path.isfile(model_file) and model_file == "saved_rhythm_model": 48 | print("Model not trained! Trying default model...") 49 | model_file = "models/default/rhythm_model" 50 | 51 | model = tf.keras.models.load_model( 52 | model_file, 53 | custom_objects=None, 54 | compile=False 55 | ); 56 | model.compile(loss='mse', 57 | optimizer=tf.optimizers.RMSprop(0.001), 58 | metrics=[keras.metrics.mae]); 59 | return model; 60 | 61 | def step5_load_npz(): 62 | fn = "mapthis.npz"; 63 | 64 | return read_npz(fn); 65 | 66 | def step5_predict_notes(model, npz, params): 67 | 68 | # Get npz data 69 | test_data, div_data, ticks, timestamps = npz; 70 | 71 | dist_multiplier, note_density, slider_favor, divisor_favor, slider_max_ticks = params; 72 | 73 | # Make time intervals from test data 74 | time_interval = 16; 75 | if test_data.shape[0]%time_interval > 0: 76 | test_data = test_data[:-(test_data.shape[0]%time_interval)]; 77 | div_data = div_data[:-(div_data.shape[0]%time_interval)]; 78 | test_data2 = np.reshape(test_data, (-1, time_interval, test_data.shape[1], test_data.shape[2], test_data.shape[3])) 79 | div_data2 = np.reshape(div_data, (-1, time_interval, div_data.shape[1])) 80 | 81 | test_predictions = model.predict([test_data2, div_data2]); 82 | preds = test_predictions.reshape(-1, test_predictions.shape[2]); 83 | 84 | # Favor sliders a little 85 | preds[:, 2] += slider_favor; 86 | divs = div_data2.reshape(-1, div_data2.shape[2]); 87 | margin = np.sum([divisor_favor[k] * divs[:, k] for k in range(0, divisor)]); 88 | 89 | preds[:, 0] += margin; 90 | 91 | # Predict is_obj using note_density 92 | obj_preds = preds[:, 0]; 93 | target_count = np.round(note_density * obj_preds.shape[0]).astype(int); 94 | borderline = np.sort(obj_preds)[obj_preds.shape - target_count]; 95 | is_obj_pred = np.expand_dims(np.where(preds[:, 0] > borderline, 1, 0), axis=1); 96 | 97 | obj_type_pred = np.sign(preds[:, 1:4] - np.tile(np.expand_dims(np.max(preds[:, 1:4], axis=1), 1), (1, 3))) + 1; 98 | others_pred = (1 + np.sign(preds[:, 4:test_predictions.shape[1]] + 0.5)) / 2; 99 | another_pred_result = np.concatenate([is_obj_pred, is_obj_pred * obj_type_pred, others_pred], axis=1); 100 | 101 | print("{} notes predicted.".format(np.sum(is_obj_pred))); 102 | 103 | return is_obj_pred, another_pred_result, timestamps, ticks, div_data, dist_multiplier; 104 | 105 | def step5_convert_sliders(data, params): 106 | unfiltered_is_obj_pred, unfiltered_predictions, unfiltered_timestamps, unfiltered_ticks, unfiltered_div_data, dist_multiplier = data; 107 | dist_multiplier, note_density, slider_favor, divisor_favor, slider_max_ticks = params; 108 | 109 | unfiltered_objs = unfiltered_is_obj_pred[:, 0]; 110 | unfiltered_sv = (unfiltered_div_data[:,2 + divisor] + 1) * 150; 111 | 112 | obj_indices = [i for i,k in enumerate(unfiltered_objs) if k == 1 or unfiltered_predictions[i, 4] == 1]; 113 | 114 | first_step_objs = unfiltered_objs[obj_indices]; 115 | first_step_predictions = unfiltered_predictions[obj_indices]; 116 | first_step_ticks = unfiltered_ticks[obj_indices]; 117 | first_step_timestamps = unfiltered_timestamps[obj_indices]; 118 | first_step_sv = unfiltered_sv[obj_indices]; 119 | 120 | first_step_is_slider = first_step_predictions[:, 2]; 121 | first_step_is_spinner = first_step_predictions[:, 3]; 122 | first_step_is_note_end = first_step_predictions[:, 4]; 123 | 124 | # convert notes with is_slider flag to sliders 125 | # if there is next note, slide to next note 126 | # else, slide for [max] ticks 127 | 128 | skip_this = False; 129 | new_obj_indices = []; 130 | slider_ticks = []; 131 | for i in range(len(first_step_objs)): 132 | if skip_this or not first_step_objs[i]: # not first_step_objs = slider end 133 | first_step_is_slider[i] = 0; 134 | skip_this = False; 135 | continue; 136 | if first_step_is_slider[i]: # this one is a slider!! 137 | if i == first_step_objs.shape[0]-1: # Last Note. 138 | new_obj_indices.append(i); 139 | slider_ticks.append(slider_max_ticks); 140 | continue; 141 | if first_step_ticks[i+1] >= first_step_ticks[i] + slider_max_ticks + 1: # too long! end here 142 | new_obj_indices.append(i); 143 | slider_ticks.append(slider_max_ticks); 144 | else: 145 | skip_this = True; # skip the next note or slider end, and slide to that tick 146 | new_obj_indices.append(i); 147 | slider_ticks.append(max(1, first_step_ticks[i+1] - first_step_ticks[i])); 148 | else: # not a slider! 149 | new_obj_indices.append(i); 150 | slider_ticks.append(0); 151 | 152 | # Filter the removed objects out! 153 | objs = first_step_objs[new_obj_indices]; 154 | predictions = first_step_predictions[new_obj_indices]; 155 | ticks = first_step_ticks[new_obj_indices]; 156 | timestamps = first_step_timestamps[new_obj_indices]; 157 | is_slider = first_step_is_slider[new_obj_indices]; 158 | is_spinner = first_step_is_spinner[new_obj_indices]; 159 | is_note_end = first_step_is_note_end[new_obj_indices]; 160 | sv = first_step_sv[new_obj_indices]; 161 | slider_ticks = np.array(slider_ticks); 162 | 163 | return objs, predictions, ticks, timestamps, is_slider, is_spinner, is_note_end, sv, slider_ticks, dist_multiplier; 164 | 165 | def step5_save_predictions(data): 166 | objs, predictions, ticks, timestamps, is_slider, is_spinner, is_note_end, sv, slider_ticks, dist_multiplier = data; 167 | 168 | np.savez_compressed("rhythm_data", 169 | objs = objs, 170 | predictions = predictions, 171 | ticks = ticks, 172 | timestamps = timestamps, 173 | is_slider = is_slider, 174 | is_spinner = is_spinner, 175 | is_note_end = is_note_end, 176 | sv = sv, 177 | slider_ticks = slider_ticks, 178 | dist_multiplier = dist_multiplier); -------------------------------------------------------------------------------- /v7.0/act_taiko_hitsounds.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 8 Taiko hitsounds 5 | # 6 | 7 | from hitsound_tools import * 8 | import os 9 | 10 | def step8_taiko_hitsounds_set_params(divisor=4, metronome_count=4): 11 | return divisor, metronome_count 12 | 13 | def step8_apply_taiko_hitsounds(obj_array, data, hs_dataset="hs_dataset.npz", params=(4,4)): 14 | _, _, ticks, _, _, _, _, _, _, _, _, _ = data 15 | divisor, metronome_count = params 16 | 17 | # Fallback for local version 18 | if not os.path.isfile(hs_dataset) and hs_dataset == "hs_dataset.npz": 19 | print("Hitsound dataset not found! Trying taiko model...") 20 | hs_dataset = "models/taiko/hs_dataset.npz" 21 | 22 | hs_avail_flags, hs_data = read_hitsound_dataset(hs_dataset) 23 | hitsounds = apply_hitsounds(hs_avail_flags, hs_data, ticks, divisor=divisor, metronome_count=metronome_count) 24 | hitsounds = fix_taiko_big_drum(ticks, hitsounds) 25 | 26 | return hitsounds -------------------------------------------------------------------------------- /v7.0/act_timing.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Timing 5 | # 6 | 7 | import numpy as np 8 | import re, os 9 | import include.id3reader_p3 as id3 10 | from shutil import copy 11 | 12 | from timing import * 13 | from metadata import * 14 | 15 | def get_timed_osu_file(music_path, input_filename = "assets/template.osu", output_filename = "timing.osu", game_mode = 0, mania_key_count = None): 16 | with open(input_filename) as osu_file: 17 | osu_text = osu_file.read() 18 | 19 | rdr = id3.Reader(music_path) 20 | artist = rdr.get_value("performer") 21 | if artist is None: 22 | artist = "unknown" 23 | title = rdr.get_value("title") 24 | if title is None: 25 | title = re.sub("\.[^\.]*$", "", os.path.basename(music_path)) 26 | 27 | bpm, offset = get_timing(music_path) 28 | 29 | osu_text = re.sub("{audio_filename}", "audio.mp3", osu_text) 30 | osu_text = re.sub("{game_mode}", str(game_mode), osu_text) 31 | osu_text = re.sub("{artist}", artist, osu_text) 32 | osu_text = re.sub("{title}", title, osu_text) 33 | osu_text = re.sub("{version}", get_difficulty_name(), osu_text) 34 | osu_text = re.sub("{hp_drain}", "{}".format(np.random.randint(0, 101) / 10), osu_text) 35 | if mania_key_count is None: 36 | osu_text = re.sub("{circle_size}", "{}".format(np.random.randint(30, 51) / 10), osu_text) 37 | else: 38 | osu_text = re.sub("{circle_size}", "{}".format(mania_key_count), osu_text) 39 | osu_text = re.sub("{overall_difficulty}", "{}".format(np.random.randint(50, 91) / 10), osu_text) 40 | osu_text = re.sub("{approach_rate}", "{}".format(np.random.randint(70, 96) / 10), osu_text) 41 | osu_text = re.sub("{slider_velocity}", "{}".format(np.random.randint(12, 26) / 10), osu_text) 42 | osu_text = re.sub("{tickLength}", "{}".format(60000 / bpm), osu_text) 43 | osu_text = re.sub("{offset}", "{}".format(int(offset)), osu_text) 44 | osu_text = re.sub("{colors}", get_colors(), osu_text) 45 | osu_text = re.sub("{hit_objects}", "", osu_text) 46 | 47 | with open(output_filename, 'w', encoding="utf8") as osu_output: 48 | osu_output.write(osu_text) 49 | 50 | copy(music_path, "./audio.mp3") 51 | 52 | return output_filename -------------------------------------------------------------------------------- /v7.0/assets/template.osu: -------------------------------------------------------------------------------- 1 | osu file format v14 2 | 3 | [General] 4 | AudioFilename: {audio_filename} 5 | AudioLeadIn: 0 6 | PreviewTime: -1 7 | Countdown: 0 8 | SampleSet: Soft 9 | StackLeniency: 0.5 10 | Mode: {game_mode} 11 | LetterboxInBreaks: 0 12 | WidescreenStoryboard: 1 13 | 14 | [Editor] 15 | DistanceSpacing: 1.0 16 | BeatDivisor: 4 17 | GridSize: 8 18 | TimelineZoom: 1 19 | 20 | [Metadata] 21 | Title:{title} 22 | TitleUnicode:{title} 23 | Artist:{artist} 24 | ArtistUnicode:{artist} 25 | Creator:osumapper 26 | Version:{version} 27 | Source: 28 | Tags: 29 | 30 | [Difficulty] 31 | HPDrainRate:{hp_drain} 32 | CircleSize:{circle_size} 33 | OverallDifficulty:{overall_difficulty} 34 | ApproachRate:{approach_rate} 35 | SliderMultiplier:{slider_velocity} 36 | SliderTickRate:1 37 | 38 | [Events] 39 | //Background and Video events 40 | //Break Periods 41 | //Storyboard Layer 0 (Background) 42 | //Storyboard Layer 1 (Fail) 43 | //Storyboard Layer 2 (Pass) 44 | //Storyboard Layer 3 (Foreground) 45 | //Storyboard Layer 4 (Overlay) 46 | //Storyboard Sound Samples 47 | 48 | [TimingPoints] 49 | {offset},{tickLength},4,2,1,100,1,0 50 | 51 | 52 | [Colours] 53 | {colors} 54 | 55 | [HitObjects] 56 | {hit_objects} -------------------------------------------------------------------------------- /v7.0/bass.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/bass.dll -------------------------------------------------------------------------------- /v7.0/gen_maplist.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | 'use strict'; 4 | 5 | const osuPathFinder = require("./maplist_maker/osuPathFinder"); 6 | const osuDBGetter = require("./maplist_maker/osuDBGetter"); 7 | const express = require('express'); 8 | const bodyParser = require('body-parser'); 9 | const opn = require('opn'); 10 | const fs = require('fs'); 11 | 12 | async function main() { 13 | const osuPaths = await osuPathFinder(); 14 | const osuDB = osuDBGetter(osuPaths.db); 15 | 16 | let app = express(); 17 | app.use("/beatmaps", function(req, res, next) { 18 | res.set("Content-Type", "application/json"); 19 | res.send(JSON.stringify({ 20 | path: osuPaths.root, 21 | maps: osuDB.beatmaps 22 | })); 23 | res.end(); 24 | }); 25 | app.use(bodyParser.json({limit: '200mb'})); 26 | app.use("/save", function(req, res, next) { 27 | try { 28 | let contents = req.body.contents; 29 | fs.writeFileSync("maplist.txt", contents); 30 | res.send("saved"); 31 | } 32 | catch(e) { 33 | res.send("fail"); 34 | } 35 | res.end(); 36 | }); 37 | app.use("/", express.static('maplist_maker/html')); 38 | 39 | app.use(function(req, res, next) { 40 | res.status(404); 41 | res.send("404"); 42 | res.end(); 43 | }); 44 | 45 | app.listen(3424, function () { 46 | opn("http://127.0.0.1:3424/"); 47 | console.log('Node server listening on port 3424!'); 48 | }); 49 | } 50 | main(); -------------------------------------------------------------------------------- /v7.0/hitsound_tools.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Hitsound helpers 5 | # 6 | 7 | from map_analyze import * 8 | 9 | def get_metronome_count(map_json, tick): 10 | uts_a = map_json["timing"]["uts"]; 11 | if tick < uts_a[0]["beginTime"]: 12 | return uts_a[0]["whiteLines"]; 13 | for uts in reversed(uts_a): 14 | if tick >= uts["beginTime"]: 15 | return uts["whiteLines"]; 16 | 17 | def get_circle_hitsounds(map_json, **kwargs): 18 | """ 19 | Reads JSON map data and creates a list of hitsound groups. 20 | 21 | It only reads circle hitsounds because it's dedicated for taiko mode. 22 | osu mode hitsounds use another function in load_map.js. 23 | it will not work for osu mode because of custom hitsounds. 24 | """ 25 | length = kwargs.get("length", -1); 26 | divisor = kwargs.get("divisor", 4); 27 | tick_times = get_map_timing_array(map_json, length = length, divisor = divisor); 28 | 29 | objs = map_json["obj"]; 30 | obj_times = [obj["time"] for obj in objs] 31 | hitsounds = [k['hitsounds'] for k in objs] 32 | 33 | hs_groups = [] 34 | hs_group = [] 35 | 36 | hs_avails = [] 37 | hs_avail = [] 38 | 39 | po = 0 40 | note_max_wait_time = kwargs.get("note_max_wait_time", 1000) 41 | start_time = obj_times[0] - note_max_wait_time 42 | last_obj_time = start_time 43 | 44 | for i, tick in enumerate(tick_times): 45 | metronome = get_metronome_count(map_json, tick) 46 | if i % (metronome * divisor) == 0: 47 | if len(hs_group) > 0: 48 | hs_groups.append(hs_group) 49 | hs_avails.append(hs_avail) 50 | hs_group = [] 51 | hs_avail = [] 52 | 53 | while obj_times[po] < tick - 5 and po < len(obj_times) - 1: 54 | po += 1 55 | if obj_times[po] >= tick - 5 and obj_times[po] <= tick + 5: # found note 56 | last_obj_time = tick 57 | 58 | hs_group.append(objs[po]["hitsounds"]) 59 | hs_avail.append(1) 60 | else: 61 | hs_group.append(0) 62 | hs_avail.append(0) 63 | 64 | # everything limit to 4 metronomes 65 | for i, hs_group in enumerate(hs_groups): 66 | hs_avail = hs_avails[i] 67 | if len(hs_group) < 4 * divisor: 68 | hs_group += ([0] * (4 * divisor - len(hs_group))) 69 | hs_groups[i] = hs_group 70 | hs_avail += ([0] * (4 * divisor - len(hs_group))) 71 | hs_avails[i] = hs_avail 72 | if len(hs_group) > 4 * divisor: 73 | hs_group = hs_group[:4 * divisor] 74 | hs_groups[i] = hs_group 75 | hs_avail = hs_avail[:4 * divisor] 76 | hs_avails[i] = hs_avail 77 | 78 | # convert hs_avail to flags 79 | hs_avail_flags = [sum([k*2**i for i,k in enumerate(hs_avail)]) for hs_avail in hs_avails] 80 | 81 | return_data = [np.array([hs_avail_flags[i]] + hs_groups[i]) for i in range(len(hs_groups))] 82 | 83 | return return_data 84 | 85 | def bitwise_contains(container, item): 86 | return np.bitwise_and(np.bitwise_not(container), item) == 0 87 | 88 | def read_hitsound_dataset(hs_dataset): 89 | with np.load(hs_dataset) as data: 90 | avail_flags = data["avail_flags"] 91 | hs = data["hs"] 92 | return avail_flags, hs 93 | 94 | def get_hitsound_groups(hs_avail_flags, hs_data, note_metronome_group): 95 | """ 96 | note_metronome_group should be from a single metronome (16 ticks) 97 | np.array of integers 0-15 98 | """ 99 | metronome_length = hs_data.shape[1] 100 | note_avail = [(1 if i in note_metronome_group else 0) for i in range(metronome_length)] 101 | note_avail_flags = sum([k*2**i for i,k in enumerate(note_avail)]) 102 | 103 | possible_hs_groups = hs_data[bitwise_contains(hs_avail_flags, note_avail_flags)] 104 | 105 | return possible_hs_groups 106 | 107 | def get_random_hitsound_group(hs_avail_flags, hs_data, note_metronome_group, default_mask=2): 108 | """ 109 | get a random group of hitsounds. 110 | if it cannot find possible group in hs_data, uses a random group of only whistles. 111 | """ 112 | possible_hs_groups = get_hitsound_groups(hs_avail_flags, hs_data, note_metronome_group) 113 | if len(possible_hs_groups) > 0: 114 | return possible_hs_groups[np.random.randint(0, possible_hs_groups.shape[0])] 115 | else: 116 | return np.bitwise_and(np.random.randint(0, 16, size=hs_data.shape[1]), default_mask) 117 | 118 | def apply_hitsounds(hs_avail_flags, hs_data, ticks, divisor=4, metronome_count=4): 119 | max_tick = ticks[-1] 120 | hs_current = [] 121 | hs_applied = [] 122 | metronome_offset = 0 123 | 124 | for tick in range(max_tick+1): 125 | if tick in ticks: 126 | hs_current.append(metronome_offset) 127 | 128 | metronome_offset += 1 129 | 130 | if metronome_offset >= divisor * metronome_count or tick == max_tick: 131 | hs_group = get_random_hitsound_group(hs_avail_flags, hs_data, hs_current) 132 | hs_applied.append(hs_group) 133 | hs_current = [] 134 | 135 | hs_full = np.concatenate(hs_applied, axis=0) 136 | hs_objs = hs_full[ticks] 137 | 138 | return hs_objs 139 | 140 | def fix_taiko_big_drum(ticks, hitsounds): 141 | """ 142 | Remove finishes when there is another note next tick 143 | """ 144 | for i,tick in enumerate(ticks): 145 | if tick+1 in ticks: 146 | if hitsounds[i] & 4 == 4: # has finish hitsound == big drum 147 | hitsounds[i] -= 4 148 | 149 | return hitsounds -------------------------------------------------------------------------------- /v7.0/install: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | apt install -y ffmpeg 3 | apt install -y nodejs 4 | apt install -y npm 5 | pip install -r requirements.txt 6 | npm i -------------------------------------------------------------------------------- /v7.0/install.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | pip install -r requirements.txt 3 | call npm i 4 | call conda install -y -c conda-forge ffmpeg 5 | echo Install complete. -------------------------------------------------------------------------------- /v7.0/losses.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 7 Loss Functions 5 | # 6 | 7 | import tensorflow as tf 8 | from tensorflow.python.keras.utils import losses_utils 9 | from tensorflow.python.keras.losses import LossFunctionWrapper 10 | 11 | # A regularizer to keep the map inside the box. 12 | # It's so the sliders and notes don't randomly fly out of the screen! 13 | def inblock_loss(vg, border, value): 14 | wall_var_l = tf.where(tf.less(vg, border), tf.square(value - vg), 0 * vg); 15 | wall_var_r = tf.where(tf.greater(vg, 1 - border), tf.square(vg - (1 - value)), 0 * vg); 16 | return tf.reduce_mean(tf.reduce_mean(wall_var_l + wall_var_r, axis=2), axis=1); 17 | 18 | # Loss functions and mapping layer, to adapt to TF 2.0 19 | class GenerativeCustomLoss(LossFunctionWrapper): 20 | """ 21 | This loss function is used in the generative model. 22 | It uses "1 - classification" as loss = good if it's classified as true sample, bad if classified as false. 23 | """ 24 | def __init__(self, 25 | reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE, 26 | name='generative_custom_loss'): 27 | 28 | def loss_function_for_generative_model(y_true, y_pred): 29 | classification = y_pred; 30 | loss1 = 1 - tf.reduce_mean(classification, axis=1); 31 | return loss1; 32 | 33 | super(GenerativeCustomLoss, self).__init__(loss_function_for_generative_model, name=name, reduction=reduction) 34 | 35 | class BoxCustomLoss(LossFunctionWrapper): 36 | """ 37 | Checks if note_start and note_end positions are within boundaries. 38 | If it gets close to the boundary then this loss function will produce positive value. Otherwise it is zero. 39 | """ 40 | def __init__(self, 41 | border, 42 | value, 43 | reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE, 44 | name='generative_custom_loss'): 45 | 46 | self.loss_border = border 47 | self.loss_value = value 48 | 49 | def box_loss(y_true, y_pred): 50 | map_part = y_pred; 51 | return inblock_loss(map_part[:, :, 0:2], self.loss_border, self.loss_value) + inblock_loss(map_part[:, :, 4:6], self.loss_border, self.loss_value) 52 | 53 | super(BoxCustomLoss, self).__init__(box_loss, name=name, reduction=reduction) 54 | 55 | class AlwaysZeroCustomLoss(LossFunctionWrapper): 56 | """ 57 | Why does TF not include this? This is very useful in certain situations 58 | """ 59 | def __init__(self, 60 | reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE, 61 | name='generative_custom_loss'): 62 | 63 | def alw_zero(y_true, y_pred): 64 | return tf.convert_to_tensor(0, dtype=tf.float32); 65 | 66 | super(AlwaysZeroCustomLoss, self).__init__(alw_zero, name=name, reduction=reduction) 67 | 68 | -------------------------------------------------------------------------------- /v7.0/lost_losses.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # redundant loss calculation 5 | # 6 | 7 | import tensorflow as tf 8 | 9 | def stack_loss(tensor): 10 | complex_list = tf.complex(tensor[:, :, 0] * 512, tensor[:, :, 1] * 384); 11 | stack_limit = 30; 12 | precise_limit = 1; 13 | a = []; 14 | for k in range(tensor.shape[1]): 15 | w = tf.tile(tf.expand_dims(complex_list[:, k], axis=1), [1, tensor.shape[1]]); 16 | r = tf.abs(w - complex_list); 17 | rless = tf.cast(tf.less(r, stack_limit), tf.float32) * tf.cast(tf.greater(r, precise_limit), tf.float32); 18 | rmean = tf.reduce_mean(rless * (stack_limit - r) / stack_limit); 19 | a.append(rmean); 20 | b = tf.reduce_sum(a); 21 | return b; 22 | 23 | # This polygon loss was an attempt to make the map less likely to overlap each other. 24 | # The idea is: calculate the area of polygon formed from the note positions; 25 | # If it is big, then it is good - they form a convex shape, no overlap. 26 | # ... of course it totally doesn't work like that. 27 | def polygon_loss(tensor): 28 | tensor_this = tensor[:, :, 0:2]; 29 | tensor_next = tf.concat([tensor[:, 1:, 0:2], tensor[:, 0:1, 0:2]], axis=1); 30 | sa = (tensor_this[:, :, 0] + tensor_next[:, :, 0]) * (tensor_next[:, :, 1] - tensor_this[:, :, 0]); 31 | surface = tf.abs(tf.reduce_sum(sa, axis=1))/2; 32 | return surface; -------------------------------------------------------------------------------- /v7.0/mania_Mapmaking.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## osumapper #4: New Map Reader\n" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "Set the input file string to a timed (having the right BPM/offset) .osu file.\n", 15 | "\n", 16 | "It converts the map/music to Python readable format." 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "from act_newmap_prep import *\n", 26 | "\n", 27 | "# input file here! (don't remove the \"r\" before string)\n", 28 | "file_path = r'..\\..\\test_data\\test_mania.osu'\n", 29 | "file_path = r'D:\\osu!\\Songs\\beatmap-637381895878231005-03.Mystic Pendulum\\RURUTIA - Mystic Pendulum ([CSGA]Ar3sgice) [Mania 1K].osu'\n", 30 | "\n", 31 | "# Or use auto timing with music file only!!\n", 32 | "\n", 33 | "# from act_timing import *;\n", 34 | "# music_path = r\"..\\..\\test_data\\audio.mp3\"\n", 35 | "# file_path = get_timed_osu_file(music_path, game_mode=3, mania_key_count=9);\n", 36 | "\n", 37 | "step4_read_new_map(file_path);" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "## osumapper #5: Rhythm Predictor\n", 45 | "\n", 46 | "Calculates a map's rhythm based on the music and timing." 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "Parameters:\n", 54 | "\n", 55 | "\"note_density\" determines how many notes will be placed on the timeline, ranges from 0 to 1.
\n", 56 | "\"hold_favor\" determines how the model favors holds against circles, ranges from -1 to 1.
\n", 57 | "\"divisor_favor\" determines how the model favors notes to be on X divisors starting from a beat (white, blue, red, blue), ranges from -1 to 1 each.
\n", 58 | "\"hold_max_ticks\" determines the max amount of time a hold can hold off, ranges from 1 to +∞.
\n", 59 | "\"hold_min_return\" determines the final granularity of the pattern dataset, ranges from 1 to +∞.
\n", 60 | "\"rotate_mode\" determines how the patterns from the dataset gets rotated. modes (0,1,2,3,4)\n", 61 | "- 0 = no rotation\n", 62 | "- 1 = random\n", 63 | "- 2 = mirror\n", 64 | "- 3 = circulate\n", 65 | "- 4 = circulate + mirror" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "from mania_act_rhythm_calc import *\n", 75 | "\n", 76 | "model = step5_load_model();\n", 77 | "npz = step5_load_npz();\n", 78 | "params = step5_set_params(note_density=0.6, hold_favor=0.2, divisor_favor=[0] * divisor, hold_max_ticks=8, hold_min_return=1, rotate_mode=4);\n", 79 | "\n", 80 | "predictions = step5_predict_notes(model, npz, params);" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "notes_each_key = step5_build_pattern(predictions, params);" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": {}, 95 | "source": [ 96 | "Do a little modding to the map.\n", 97 | "\n", 98 | "Parameters:\n", 99 | "\n", 100 | "- key_fix: remove continuous notes on single key modes (0,1,2,3) 0=inactive 1=remove late note 2=remove early note 3=divert
\n", 101 | " should be set to 0 for low key count" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "modding_params = {\n", 111 | " \"key_fix\" : 3\n", 112 | "}\n", 113 | "\n", 114 | "notes_each_key = mania_modding(notes_each_key, modding_params);\n", 115 | "notes, key_count = merge_objects_each_key(notes_each_key)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "Finally, save the data into an .osu file!" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "from mania_act_final import *\n", 132 | "\n", 133 | "saved_osu_name = step8_save_osu_mania_file(notes, key_count);\n", 134 | "\n", 135 | "# clean up the folder\n", 136 | "step8_clean_up();" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "metadata": {}, 142 | "source": [ 143 | "If it works alright, you should have a nice .osu file under the folder of these notebooks now!\n", 144 | "\n", 145 | "If it does not work, please tell me the problem so probably I could fix it!\n", 146 | "\n", 147 | "For bug reports and feedbacks either report it on github or use discord:
\n", 148 | "[https://discord.com/invite/npmSy7K](https://discord.com/invite/npmSy7K)\n", 149 | "\n", 150 | "" 151 | ] 152 | } 153 | ], 154 | "metadata": { 155 | "kernelspec": { 156 | "display_name": "Python 3", 157 | "language": "python", 158 | "name": "python3" 159 | }, 160 | "language_info": { 161 | "codemirror_mode": { 162 | "name": "ipython", 163 | "version": 3 164 | }, 165 | "file_extension": ".py", 166 | "mimetype": "text/x-python", 167 | "name": "python", 168 | "nbconvert_exporter": "python", 169 | "pygments_lexer": "ipython3", 170 | "version": "3.8.3" 171 | } 172 | }, 173 | "nbformat": 4, 174 | "nbformat_minor": 2 175 | } 176 | -------------------------------------------------------------------------------- /v7.0/mania_act_data_prep.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 1 action script 5 | # 6 | 7 | from mania_audio_tools import *; 8 | 9 | import os, re, time; 10 | 11 | mapdata_path = "mapdata/"; 12 | try: 13 | divisor = GLOBAL["divisor"]; 14 | except: 15 | divisor = 4; 16 | 17 | def step1_load_maps(): 18 | # Test paths and node 19 | test_process_path("node"); 20 | if not os.path.isdir(mapdata_path): 21 | os.mkdir(mapdata_path); 22 | 23 | # Open maplist 24 | with open("maplist.txt", encoding="utf8") as fp: 25 | fcont = fp.readlines(); 26 | 27 | # Reset results 28 | results = []; 29 | for line in fcont: 30 | results.append(line); 31 | 32 | # Remove maps 33 | for file in os.listdir(mapdata_path): 34 | if file.endswith(".npz"): 35 | os.remove(os.path.join(mapdata_path, file)); 36 | 37 | print("Number of filtered maps: {}".format(len(results))); 38 | 39 | for k, mname in enumerate(results): 40 | try: 41 | start = time.time() 42 | read_and_save_osu_file(mname.strip(), filename=os.path.join(mapdata_path, str(k)), divisor=divisor); 43 | end = time.time() 44 | print("Map data #" + str(k) + " saved! time = " + str(end - start) + " secs"); 45 | except Exception as e: 46 | print("Error on #{}, path = {}, error = {}".format(str(k), mname.strip(), e)); -------------------------------------------------------------------------------- /v7.0/mania_act_final.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 8 .osu file JSON processing / output 5 | # 6 | 7 | import re, json, datetime; 8 | from os_tools import *; 9 | 10 | def convert_to_osu_mania_obj(notes, key_count): 11 | """ 12 | Converts map data from python format to json format. 13 | """ 14 | output = []; 15 | 16 | # X coordinates each key 17 | x_coords = [int(round((0.5+k)/key_count * 512)) for k in range(key_count)]; 18 | 19 | for i, note in enumerate(notes): 20 | begin, end, key, tick = note; 21 | if begin == end: # is a circle; does not consider spinner for now. 22 | obj_dict = { 23 | "x": x_coords[key], 24 | "y": 192, 25 | "type": 1, 26 | "time": int(begin), 27 | "hitsounds": 0, 28 | "extHitsounds": "0:0:0", 29 | "index": i 30 | }; 31 | else: 32 | obj_dict = { 33 | "x": x_coords[key], 34 | "y": 192, 35 | "type": 128, 36 | "time": int(begin), 37 | "hitsounds": 0, 38 | "extHitsounds": "0:0:0", 39 | "holdEndTime": int(end), 40 | "index": i 41 | }; 42 | output.append(obj_dict); 43 | return output; 44 | 45 | def get_osu_file_name(metadata): 46 | """ 47 | Construct the .osu file name from the metadata. 48 | """ 49 | artist = metadata["artist"]; 50 | title = metadata["title"]; 51 | creator = metadata["creator"]; 52 | diffname = metadata["diffname"]; 53 | outname = (artist+" - " if len(artist) > 0 else "") + title + " (" + creator + ") [" + diffname + "].osu"; 54 | outname = re.sub("[^a-zA-Z0-9\(\)\[\] \.\,\!\~\`\{\}\-\_\=\+\&\^\@\#\$\%\;\']","", outname); 55 | return outname; 56 | 57 | def step8_save_osu_mania_file(notes, key_count): 58 | """ 59 | Save trained map to disk, using filename generated from its metadata. 60 | """ 61 | osu_obj_array = convert_to_osu_mania_obj(notes, key_count); 62 | 63 | with open("mapthis.json", encoding="utf-8") as map_json: 64 | map_dict = json.load(map_json); 65 | map_meta = map_dict["meta"]; 66 | filename = get_osu_file_name(map_meta); 67 | map_dict["obj"] = osu_obj_array; 68 | 69 | with open('mapthis.json', 'w', encoding="utf-8") as outfile: 70 | json.dump(map_dict, outfile, ensure_ascii=False); 71 | 72 | c = run_command(["node", "load_map.js", "c", "mapthis.json", filename]); 73 | if(len(c) > 1): 74 | print(c.decode("utf-8")); 75 | 76 | print("finished on: {}".format(datetime.datetime.now())); 77 | 78 | return filename; 79 | 80 | def step8_clean_up(): 81 | # clean up intermediate files 82 | for item in ["mapthis.json", "audio.mp3", "timing.osu", "rhythm_data.npz", "mapthis.npz", "temp_json_file.json", "wavfile.wav", "temp/temp_json_file.json", "temp/wavfile.wav", "evaluatedRhythm.json"]: 83 | try: 84 | os.remove(item); 85 | except: 86 | pass -------------------------------------------------------------------------------- /v7.0/mania_audio_tools.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # For osu! file reading and analysis 5 | # 6 | 7 | import librosa; 8 | import re, os, subprocess, json; 9 | import numpy as np; 10 | from os_tools import *; 11 | from mania_analyze import *; 12 | 13 | # It will always fail. Soundfile doesn't support mp3 14 | import warnings; 15 | warnings.filterwarnings("ignore", message="PySoundFile failed. Trying audioread instead."); 16 | 17 | workingdir = os.path.dirname(os.path.abspath(__file__)); 18 | os.chdir(workingdir); 19 | 20 | def read_osu_file(path, convert=False, wav_name="wavfile.wav", json_name="temp_json_file.json"): 21 | """ 22 | Read .osu file to get audio path and JSON formatted map data 23 | "convert" will also read the music file (despite the name it doesn't convert) 24 | """ 25 | file_dir = os.path.dirname(os.path.abspath(path)); 26 | 27 | # ask node.js to convert the .osu file to .json format 28 | result = run_command(["node", "load_map.js", "jq", path, json_name]); 29 | if(len(result) > 1): 30 | print(result.decode("utf-8")); 31 | raise Exception("Map Convert Failure"); 32 | 33 | with open(json_name, encoding="utf-8") as map_json: 34 | map_dict = json.load(map_json); 35 | 36 | if convert: 37 | mp3_file = os.path.join(file_dir, map_dict["general"]["AudioFilename"]); 38 | # result = run_command([FFMPEG_PATH, "-y", "-i", mp3_file, wav_name]); 39 | # if(len(result) > 1): 40 | # print(result.decode("utf-8")); 41 | # raise Exception("FFMPEG Failure"); 42 | 43 | # delete the temp json later 44 | # if json_name == "temp_json_file.json": 45 | # os.remove(json_name); 46 | 47 | return map_dict, mp3_file; 48 | 49 | def get_freqs(sig, fft_size): 50 | """ 51 | Do Fourier Transform and map imaginary to length/angle coordinates 52 | """ 53 | Lf = np.fft.fft(sig, fft_size); 54 | Lc = Lf[0:fft_size//2]; 55 | La = np.abs(Lc[0:fft_size//2]); 56 | Lg = np.angle(Lc[0:fft_size//2]); 57 | return La, Lg; 58 | 59 | def slice_wave_at(ms, sig, samplerate, size): 60 | ind = (ms/1000 * samplerate)//1; 61 | return sig[max(0, int(ind - size//2)):int(ind + size - size//2)]; 62 | 63 | def lrmix(sig): 64 | """ 65 | Get mono from stereo audio data. Unused in this version (already mono) 66 | """ 67 | return (sig[:,0]+sig[:,1])/2; 68 | 69 | def get_wav_data_at(ms, sig, samplerate, fft_size=2048, freq_low=0, freq_high=-1): 70 | if freq_high == -1: 71 | freq_high = samplerate//2; 72 | waveslice = slice_wave_at(ms, sig, samplerate, fft_size); 73 | 74 | # since osu! maps are usually not mapped to stereo wave, let's mix it to reduce 50% of data 75 | # waveslice_lr = lrmix(waveslice); 76 | 77 | # do a nice FFT 78 | La, Lg = get_freqs(waveslice, fft_size); 79 | 80 | # cut the frequency bins 81 | La = La[fft_size*freq_low//samplerate:fft_size*freq_high//samplerate]; 82 | Lg = Lg[fft_size*freq_low//samplerate:fft_size*freq_high//samplerate]; 83 | 84 | return La, Lg; 85 | 86 | def read_wav_data(timestamps, wavfile, snapint=[-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3], fft_size = 1024): 87 | """ 88 | Read audio data based on timestamps. 89 | 90 | Snapint are percentages of difference between two timestamps. 91 | These are read to handle potential small offset differences between python and osu!. 92 | 93 | Resampling disabled for librosa because it is too slow. 94 | """ 95 | sig, samplerate = librosa.load(wavfile, sr=None, mono=True); 96 | data = list(); 97 | 98 | # normalize sound wave 99 | # sig = sig / np.sqrt(np.mean(sig**2, axis=0)); 100 | # sig = sig / np.max(np.max(np.abs(sig), axis=0)); 101 | sig = sig / np.max(np.abs(sig)); 102 | 103 | # calc a length array 104 | tmpts = np.array(timestamps); 105 | timestamp_interval = tmpts[1:] - tmpts[:-1]; 106 | timestamp_interval = np.append(timestamp_interval, timestamp_interval[-1]); 107 | 108 | for sz in snapint: 109 | data_r = np.array([get_wav_data_at(max(0, min(len(sig) - fft_size, coord + timestamp_interval[i] * sz)), sig, samplerate, fft_size=fft_size, freq_high=samplerate//4) for i, coord in enumerate(timestamps)]); 110 | data.append(data_r); 111 | 112 | 113 | raw_data = np.array(data); 114 | norm_data = np.tile(np.expand_dims(np.mean(raw_data, axis=1), 1), (1, raw_data.shape[1], 1, 1)); 115 | std_data = np.tile(np.expand_dims(np.std(raw_data, axis=1), 1), (1, raw_data.shape[1], 1, 1)); 116 | return (raw_data - norm_data) / std_data; 117 | 118 | def mania_transformed_lst_data(data): 119 | transformed_data = []; 120 | for d in data: 121 | if d[3] == 1: 122 | transformed_data.append([d[0], d[1], d[2], 1, 0, 0, 1, 0, d[4], d[5], d[6], d[7], d[8], d[9]]); 123 | elif d[3] == 2: 124 | transformed_data.append([d[0], d[1], d[2], 0, 1, 0, 0, 0, d[4], d[5], d[6], d[7], d[8], d[9]]); 125 | elif d[3] == 3: 126 | transformed_data.append([d[0], d[1], d[2], 1, 1, 0, 1, 0, d[4], d[5], d[6], d[7], d[8], d[9]]); 127 | elif d[3] == 4: 128 | transformed_data.append([d[0], d[1], d[2], 0, 0, 0, 1, 0, d[4], d[5], d[6], d[7], d[8], d[9]]); 129 | else: 130 | transformed_data.append([d[0], d[1], d[2], 0, 0, 0, 0, 0, d[4], d[5], d[6], d[7], d[8], d[9]]); 131 | return transformed_data; 132 | 133 | def read_and_save_osu_file(path, filename = "saved", divisor=4): 134 | """ 135 | # Main function 136 | # Generated data shape: 137 | # - "lst" array, length MAPTICKS 138 | # table of [TICK, TIME, NOTE, IS_CIRCLE, IS_SLIDER, IS_SPINNER, IS_NOTE_END, UNUSED, SLIDING, SPINNING, MOMENTUM, EX1, EX2, EX3], 139 | # 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 140 | # - "wav" array, shape of [len(snapsize), MAPTICKS, 2, fft_size//4] 141 | # - "pattern" array, shape [num_groups, main_metronome * divisor, 2 * key_count + 1] 142 | # [:, :, 0] pattern_avail_hold 143 | # [:, :, 1:1+key_count] pattern_note_begin 144 | # [:, :, 1+key_count:1+2*key_count] pattern_note_end 145 | # 146 | # MAPTICKS = (Total map time + 3000) / tickLength / (divisor = 4) - EMPTY_TICKS 147 | # EMPTY_TICKS = ticks where no note around in 5 secs 148 | """ 149 | osu_dict, wav_file = read_osu_file(path, convert = True); 150 | data, pattern_data = get_map_notes_and_patterns(osu_dict, divisor=divisor); 151 | timestamps = [c[1] for c in data]; 152 | wav_data = read_wav_data(timestamps, wav_file, snapint=[-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3], fft_size = 128); 153 | # in order to match first dimension 154 | wav_data = np.swapaxes(wav_data, 0, 1); 155 | 156 | # change the representation of note_type 157 | # a bit of copypaste code because I changed the data structure many times here 158 | transformed_data = mania_transformed_lst_data(data); 159 | 160 | np.savez_compressed(filename, lst = transformed_data, wav = wav_data, pattern = pattern_data); 161 | 162 | def read_and_save_osu_tester_file(path, filename = "saved", json_name="mapthis.json", divisor=4): 163 | osu_dict, wav_file = read_osu_file(path, convert = True, json_name=json_name); 164 | sig, samplerate = librosa.load(wav_file, sr=None, mono=True); 165 | file_len = (sig.shape[0] / samplerate * 1000 - 3000); 166 | 167 | # ticks = ticks from each uninherited timing section 168 | ticks, timestamps, tick_lengths, slider_lengths = get_all_ticks_and_lengths_from_ts(osu_dict["timing"]["uts"], osu_dict["timing"]["ts"], file_len, divisor=divisor); 169 | 170 | # old version to determine ticks (all from start) 171 | # ticks = np.array([i for i,k in enumerate(timestamps)]); 172 | extra = np.array([60000 / tick_lengths, slider_lengths]); 173 | 174 | wav_data = read_wav_data(timestamps, wav_file, snapint=[-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3], fft_size = 128); 175 | # in order to match first dimension 176 | wav_data = np.swapaxes(wav_data, 0, 1); 177 | 178 | np.savez_compressed(filename, ticks = ticks, timestamps = timestamps, wav = wav_data, extra = extra); 179 | 180 | def read_and_return_osu_file(path, divisor=4): 181 | osu_dict, wav_file = read_osu_file(path, convert = True); 182 | data, flow_data = get_map_notes(osu_dict, divisor=divisor); 183 | timestamps = [c[1] for c in data]; 184 | wav_data = read_wav_data(timestamps, wav_file, snapint=[-0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3], fft_size = 128); 185 | return data, wav_data, flow_data; 186 | 187 | def test_process_path(path): 188 | """ 189 | Use the version command to test if a dependency works 190 | """ 191 | try: 192 | subprocess.call([path, "--version"]); 193 | return True; 194 | except: 195 | print("Cannot find executable on {}".format(path)); 196 | return False; 197 | -------------------------------------------------------------------------------- /v7.0/mania_setup_colab.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Colab functions 5 | # 6 | 7 | import os 8 | 9 | def colab_clean_up(input_file_name): 10 | for item in [input_file_name, "mapthis.json", "audio.mp3", "timing.osu", "rhythm_data.npz", "mapthis.npz"]: 11 | try: 12 | os.remove(item); 13 | except: 14 | pass 15 | print("intermediate files cleaned up!") 16 | 17 | def load_pretrained_model(model_name): 18 | model_data = { 19 | "default" : { 20 | "rhythm_model" : "models/{}/rhythm_model".format(model_name), 21 | "pattern_dataset" : "models/{}/mania_pattern_dataset.npz".format(model_name), 22 | "rhythm_param" : [0.5, 0.2, [0, 0, 0, 0], 8, 1, 4], 23 | "modding" : { 24 | "key_fix" : 3 25 | } 26 | }, 27 | "lowkey" : { 28 | "rhythm_model" : "models/mania_lowkey/rhythm_model".format(model_name), 29 | "pattern_dataset" : "models/mania_pattern/mania_pattern_dataset.npz".format(model_name), 30 | "rhythm_param" : [0.65, 0.4, [0, 0, 0, 0], 8, 5, 4], 31 | "modding" : { 32 | "key_fix" : 0 33 | } 34 | }, 35 | "highkey" : { 36 | "rhythm_model" : "models/mania_highkey/rhythm_model".format(model_name), 37 | "pattern_dataset" : "models/mania_pattern/mania_pattern_dataset.npz".format(model_name), 38 | "rhythm_param" : [0.45, 0.12, [0, 0, 0, 0], 8, 5, 4], 39 | "modding" : { 40 | "key_fix" : 3 41 | } 42 | } 43 | }; 44 | if model_name not in model_data: 45 | return model_data["default"]; 46 | return model_data[model_name]; -------------------------------------------------------------------------------- /v7.0/maplist.txt: -------------------------------------------------------------------------------- 1 | # .osu paths here -------------------------------------------------------------------------------- /v7.0/maplist_maker/html/Inter-Regular.osu.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/maplist_maker/html/Inter-Regular.osu.woff -------------------------------------------------------------------------------- /v7.0/maplist_maker/html/Inter-Regular.osu.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/maplist_maker/html/Inter-Regular.osu.woff2 -------------------------------------------------------------------------------- /v7.0/maplist_maker/html/Torus-Regular.osu.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/maplist_maker/html/Torus-Regular.osu.otf -------------------------------------------------------------------------------- /v7.0/maplist_maker/html/font-face.css: -------------------------------------------------------------------------------- 1 | @font-face{ 2 | font-family: Inter; 3 | font-style: normal; 4 | font-display: swap; 5 | src: url(Inter-Regular.osu.woff2) format("woff2"), 6 | url(Inter-Regular.osu.woff) format("woff"); 7 | } 8 | @font-face{ 9 | font-family: Torus; 10 | font-style: normal; 11 | font-display: swap; 12 | src: url(Torus-Regular.osu.otf); 13 | } -------------------------------------------------------------------------------- /v7.0/maplist_maker/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | osumapper maplist generator 7 | 8 | 9 | 10 | 12 | 13 | 14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
Filter:
58 |
Mode:
63 |
Mode:
68 | 69 | 70 |
Value:
71 |
Reverse:
72 |
73 |
74 | 75 | 76 | 77 | 78 | 79 | 80 |
81 | 82 |
83 |
84 | 85 |
86 |
87 | 89 | 90 | -------------------------------------------------------------------------------- /v7.0/maplist_maker/html/main.css: -------------------------------------------------------------------------------- 1 | body { 2 | background: #ff52dc; 3 | } 4 | 5 | * { 6 | font-family: Torus, Inter, "Helvetica Neue", "High Tower Text", sans-serif; 7 | } 8 | 9 | select, input[type="text"], textarea, .table-maplist, .filtered-count { 10 | border-top: none; 11 | border-left: none; 12 | border-right: none; 13 | border-bottom: solid 1px white; 14 | background: rgba(230, 230, 255, .3); 15 | } 16 | 17 | button { 18 | border: none; 19 | background: #ffa6ee; 20 | transition: 500ms; 21 | } 22 | 23 | button:hover { 24 | border: none; 25 | cursor: pointer; 26 | background: #ffe3fa; 27 | } 28 | 29 | .container { 30 | position: absolute; 31 | top: 10px; 32 | left: 0; 33 | width: 100vw; 34 | height: calc(100vh - 30px); 35 | display: grid; 36 | grid-template-columns: 60px 1.5fr 1fr 1fr 60px; 37 | grid-template-rows: calc(100vh - 30px); 38 | grid-gap: 15px; 39 | 40 | } 41 | 42 | .col-left { 43 | grid-column: 2/3; 44 | display: grid; 45 | grid-template-columns: auto; 46 | grid-template-rows: 1fr 40px; 47 | grid-gap: 5px; 48 | } 49 | 50 | .col-mid { 51 | grid-column: 3/4; 52 | display: grid; 53 | grid-template-columns: auto; 54 | grid-template-rows: 190px 150px 1fr; 55 | grid-gap: 10px; 56 | } 57 | 58 | .field-wrap { 59 | display: grid; 60 | grid-template-columns: 80px 1fr; 61 | grid-template-rows: 40px 40px 40px 40px 40px 1fr; 62 | grid-gap: 10px; 63 | } 64 | 65 | .button-wrap { 66 | display: grid; 67 | grid-template-columns: 1fr 1fr; 68 | grid-template-rows: 40px 40px 40px 40px 40px 1fr; 69 | grid-gap: 10px; 70 | } 71 | 72 | .col-right { 73 | grid-column: 4/5 74 | display: grid; 75 | grid-template-columns: auto; 76 | grid-template-rows: 1fr; 77 | grid-gap: 5px; 78 | } 79 | 80 | .mid-text { 81 | color: white; 82 | text-align: right; 83 | line-height: 40px; 84 | } 85 | 86 | .textarea-output { 87 | height: 100%; 88 | width: 100%; 89 | } 90 | 91 | .checkbox-wrapper { 92 | line-height: 40px; 93 | } 94 | 95 | .filtered-count { 96 | text-align: right; 97 | line-height: 40px; 98 | padding-right: 20px; 99 | } 100 | 101 | table { 102 | border-spacing: 10px; 103 | border-collapse: separate; 104 | } 105 | 106 | .td-maplist { 107 | max-width: calc(10.7142857vw - 33px); 108 | height: 25px; 109 | white-space: nowrap; 110 | margin-bottom: 5px; 111 | text-overflow: ellipsis; 112 | overflow: hidden; 113 | background: rgba(230, 230, 255, .2); 114 | padding: 0 3px 0 2px; 115 | } 116 | .maplist-table-wrapper { 117 | display: grid; 118 | grid-template-columns: 1fr 1fr 1fr 1fr 5px; 119 | grid-template-rows: auto; 120 | grid-gap: 5px; 121 | overflow-y: scroll; 122 | overflow-x: hidden; 123 | } -------------------------------------------------------------------------------- /v7.0/maplist_maker/html/main.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const propertyRetrievers = [ 4 | a => a.artist_name, 5 | a => a.artist_name_unicode, 6 | a => a.song_title, 7 | a => a.song_title_unicode, 8 | a => a.creator_name, 9 | a => a.difficulty, 10 | a => a.song_source, 11 | a => a.song_tags, 12 | a => a.artist_name + "\t" + a.artist_name_unicode + "\t" + a.song_title + "\t" + a.song_title_unicode + "\t" + a.creator_name + "\t" + a.difficulty + "\t" + a.song_source + "\t" + a.song_tags, 13 | a => a.ranked_status, 14 | a => a.n_hitcircles, 15 | a => a.n_sliders, 16 | a => a.n_spinners, 17 | a => a.n_hitcircles + a.n_sliders + a.n_spinners, 18 | a => a.timing_points.length > 0 ? 60000 / a.timing_points[0][0] : 0, 19 | a => a.last_modification_time, 20 | a => a.approach_rate, 21 | a => a.circle_size, 22 | a => a.hp_drain, 23 | a => a.overall_difficulty, 24 | a => a.slider_velocity, 25 | a => a.star_rating_standard[0], 26 | a => a.star_rating_taiko[0], 27 | a => a.star_rating_ctb[0], 28 | a => a.star_rating_mania[0], 29 | a => a.drain_time, 30 | a => a.total_time / 1000, 31 | a => a.beatmap_id, 32 | a => a.beatmapset_id, 33 | a => a.mode, 34 | ]; 35 | 36 | const propertyIsText = index => index <= 8; 37 | 38 | const propertyComparatorsText = [(a,b) => a.toLowerCase() == b.toLowerCase(), (a,b) => a.toLowerCase().indexOf(b.toLowerCase()) > -1, (a,b) => b.test(a)]; 39 | const propertyComparatorsNumber = [(a,b) => a>=b, (a,b) => a<=b, (a,b) => a==b]; // mapprop, match 40 | 41 | var fullMapset = []; 42 | var currentMapset = []; 43 | 44 | var osuPath = "/var/osu/"; 45 | 46 | function q(selector) { 47 | return document.querySelector(selector); 48 | } 49 | 50 | function qa(selector) { 51 | return document.querySelectorAll(selector); 52 | } 53 | 54 | function ce(tag) { 55 | return document.createElement(tag); 56 | } 57 | 58 | function writeTable(array, baseQSList, className, actionFunction) { 59 | baseQSList.forEach(qs => q(qs).innerHTML = ""); 60 | array.forEach((a, index) => { 61 | a.forEach((el, i) => { 62 | let grid = ce("div"); 63 | grid.className = className || ""; 64 | grid.textContent = el; 65 | if(actionFunction) { 66 | grid.addEventListener("click", actionFunction.bind(null, index)); 67 | } 68 | q(baseQSList[i]).appendChild(grid); 69 | }); 70 | }); 71 | } 72 | 73 | function mapTableActionFunction(index, evt) { 74 | if(index == 0) { 75 | return; 76 | } 77 | let selectedMap = currentMapset[index-1]; 78 | addToMaplist([selectedMap]); 79 | } 80 | 81 | function genMaplistTable(maps) { 82 | let array = [["Artist", "Title", "Creator", "Difficulty"]]; 83 | array = array.concat(maps.map(m => [m.artist_name, m.song_title, m.creator_name, m.difficulty])) 84 | writeTable(array.slice(0, 501), [".maplist-col-artist", ".maplist-col-title", ".maplist-col-creator", ".maplist-col-diff"], "td-maplist", mapTableActionFunction); 85 | } 86 | 87 | function filterMaps(maps, type, method, value, rev) { 88 | let methodFunc = propertyIsText(type) ? propertyComparatorsText[method] : propertyComparatorsNumber[method]; 89 | if(!propertyIsText(type)) { 90 | value = parseFloat(value); 91 | } 92 | if(propertyIsText(type) && method == 2) { // regex 93 | if(rev) { 94 | return maps.filter(m => !methodFunc(propertyRetrievers[type](m), new RegExp(value, "i"))); 95 | } 96 | return maps.filter(m => methodFunc(propertyRetrievers[type](m), new RegExp(value, "i"))); 97 | } 98 | if(rev) { 99 | return maps.filter(m => !methodFunc(propertyRetrievers[type](m), value)); 100 | } 101 | return maps.filter(m => methodFunc(propertyRetrievers[type](m), value)); 102 | } 103 | 104 | function removeDuplicates(array) { 105 | return Array.from(new Set(array)); 106 | } 107 | 108 | function updateMapDisplay() { 109 | genMaplistTable(currentMapset); 110 | q(".filtered-count").textContent = currentMapset.length + " maps filtered."; 111 | } 112 | 113 | function updateMethodType() { 114 | let prop = q(".select-filter").selectedIndex; 115 | if(propertyIsText(prop)) { 116 | q(".text-mode-n").style.display = "none"; 117 | q(".select-mode-n").style.display = "none"; 118 | q(".text-mode-t").style.display = "block"; 119 | q(".select-mode-t").style.display = "block"; 120 | } 121 | else { 122 | q(".text-mode-n").style.display = "block"; 123 | q(".select-mode-n").style.display = "block"; 124 | q(".text-mode-t").style.display = "none"; 125 | q(".select-mode-t").style.display = "none"; 126 | } 127 | } 128 | 129 | function addToMaplist(list) { 130 | let originalMaplist = q(".textarea-output").value.trim().split(/\r?\n/); 131 | let newMaplist = list.map(m => osuPath + "Songs\\" + m.folder_name + "\\" + m.osu_file_name); 132 | 133 | let result = removeDuplicates(originalMaplist.concat(newMaplist)).filter(text => text.length > 0); 134 | 135 | q(".textarea-output").value = result.join("\n"); 136 | } 137 | 138 | function runFilterAction() { 139 | let prop = q(".select-filter").selectedIndex; 140 | let methodT = q(".select-mode-t").selectedIndex; 141 | let methodN = q(".select-mode-n").selectedIndex; 142 | let method = propertyIsText(prop) ? methodT : methodN; 143 | let val = q(".input-val").value; 144 | let rev = q(".input-reverse").checked; 145 | currentMapset = filterMaps(currentMapset, prop, method, val, rev); 146 | updateMapDisplay(); 147 | } 148 | 149 | function removeSubmode() { 150 | currentMapset = currentMapset.filter(a => a.mode == 0); 151 | updateMapDisplay(); 152 | } 153 | 154 | function randomHalf() { 155 | currentMapset = currentMapset.sort(_ => Math.random() - 0.5).slice(0, Math.floor(currentMapset.length/2)); 156 | updateMapDisplay(); 157 | } 158 | 159 | function resetFilters() { 160 | currentMapset = fullMapset.slice(); 161 | updateMapDisplay(); 162 | } 163 | 164 | function output(a) { 165 | q(".middle-output").value += a + "\n"; 166 | } 167 | 168 | function showStatus(a) { 169 | q(".filtered-count").textContent = a; 170 | } 171 | 172 | async function readMapData() { 173 | let data = await fetch("/beatmaps").then(d => d.json()); 174 | osuPath = data.path; 175 | fullMapset = data.maps; 176 | currentMapset = fullMapset.slice(); 177 | updateMapDisplay(); 178 | updateMethodType(); 179 | } 180 | 181 | async function saveMaplist(text) { 182 | let result = await fetch("/save", { 183 | method: "POST", 184 | headers: { 185 | 'Content-Type': 'application/json' 186 | }, 187 | body: JSON.stringify({ 188 | contents: text 189 | }) 190 | }).then(res => res.text()); 191 | showStatus(result); 192 | } 193 | 194 | async function init() { 195 | await readMapData(); 196 | q(".select-filter").addEventListener("change", evt => updateMethodType()); 197 | q(".button-next-search").addEventListener("click", evt => runFilterAction()); 198 | q(".button-reset-search").addEventListener("click", evt => resetFilters()); 199 | q(".button-remove-submode").addEventListener("click", evt => removeSubmode()); 200 | q(".button-random-half").addEventListener("click", evt => randomHalf()); 201 | q(".button-save-maplist").addEventListener("click", evt => saveMaplist(q(".textarea-output").value)); 202 | q(".button-add-to-maplist").addEventListener("click", evt => addToMaplist(currentMapset)); 203 | 204 | } 205 | 206 | init(); -------------------------------------------------------------------------------- /v7.0/maplist_maker/osu-db-parser/index.js: -------------------------------------------------------------------------------- 1 | const OsuDBParser = require("./src/OsuDB"); 2 | 3 | module.exports = OsuDBParser; -------------------------------------------------------------------------------- /v7.0/maplist_maker/osu-db-parser/src/OsuDB.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const osuBuffer = require('osu-buffer'); 3 | const Reader = require("./Reader"); 4 | const { osuDbStruct, collectionsStruct } = require("./Struct"); 5 | 6 | class OsuDBParser { 7 | /** 8 | * @param {Buffer} osuDbBuffer 9 | * @param {Buffer} osuCollectionBuffer 10 | */ 11 | constructor(osuDbBuffer=null, osuCollectionBuffer=null) { 12 | this.reader = new Reader(); 13 | 14 | this.canGetDBData = (osuDbBuffer !== null) 15 | this.canGetCollectionData = (osuCollectionBuffer !== null) 16 | 17 | if (this.canGetDBData) { 18 | this.dbfile = osuBuffer.from(osuDbBuffer); 19 | let dbosuData = this.reader.UnmarshalPacket(this.dbfile, osuDbStruct) 20 | dbosuData.isLocked = !dbosuData.isLocked 21 | this.osuDBData = dbosuData 22 | } 23 | if (this.canGetCollectionData) { 24 | this.collectionDB = osuBuffer.from(osuCollectionBuffer); 25 | let collectionData = this.reader.UnmarshalPacket(this.collectionDB, collectionsStruct) 26 | this.collectionData = collectionData 27 | } 28 | } 29 | 30 | /** 31 | * Set a buffer and parse him ;d 32 | * @param {String} type 33 | * @param {OsuBuffer} buffer 34 | * @return {Boolean} 35 | */ 36 | setBuffer(type, buffer) { 37 | switch(type) { 38 | case "osudb": { 39 | try { 40 | this.dbfile = osuBuffer.from(buffer); 41 | let dbosuData = this.reader.UnmarshalPacket(this.dbfile, osuDbStruct) 42 | dbosuData.isLocked = !dbosuData.isLocked 43 | this.osuDBData = dbosuData 44 | this.canGetDBData = true; 45 | } catch (e) { 46 | console.log("Error while we had tried parse osu!.db") 47 | console.log(e); 48 | } 49 | break; 50 | } 51 | case "collection": { 52 | try { 53 | this.collectionDB = osuBuffer.from(osuCollectionBuffer); 54 | let collectionData = this.reader.UnmarshalPacket(this.collectionDB, collectionsStruct) 55 | this.collectionData = collectionData 56 | this.canGetCollectionData = true; 57 | } catch (e) { 58 | console.log("Error while we had tried parse collection.db") 59 | console.log(e); 60 | } 61 | break; 62 | } 63 | } 64 | return true; 65 | } 66 | 67 | /** 68 | * Get osu DB data if present 69 | * @return {Object} 70 | */ 71 | getOsuDBData() { 72 | return (this.canGetDBData) ? this.osuDBData : null; 73 | } 74 | 75 | /** 76 | * Get collection DB data if present 77 | * @return {Object} 78 | */ 79 | getCollectionData() { 80 | return (this.canGetCollectionData) ? this.collectionData : null; 81 | } 82 | } 83 | 84 | module.exports = OsuDBParser 85 | -------------------------------------------------------------------------------- /v7.0/maplist_maker/osu-db-parser/src/Reader.js: -------------------------------------------------------------------------------- 1 | /* Reader base from osu-packet! */ 2 | const OsuBuffer = require('osu-buffer'); 3 | 4 | class Reader { 5 | constructor() { 6 | } 7 | 8 | /** 9 | * Reads a set of data from a buffer 10 | * @param {OsuBuffer} buff 11 | * @param {Object} layout 12 | * @param {null|Number|Boolean|Object|Array|String} requires 13 | * @param {Object|Array} data 14 | * @return {Object|Array} 15 | */ 16 | Read(buff, layout, data = {}) { 17 | switch (layout.type.toLowerCase()) { 18 | case 'int8': 19 | data = buff.ReadInt8(); 20 | break; 21 | case 'uint8': 22 | data = buff.ReadUInt8(); 23 | break; 24 | case 'int16': 25 | data = buff.ReadInt16(); 26 | break; 27 | case 'uint16': 28 | data = buff.ReadUInt16(); 29 | break; 30 | case 'int32': 31 | data = buff.ReadInt32(); 32 | break; 33 | case 'uint32': 34 | data = buff.ReadUInt32(); 35 | break; 36 | case 'int64': 37 | data = buff.ReadInt64(); 38 | break; 39 | case 'uint64': 40 | data = buff.ReadUInt64(); 41 | break; 42 | case 'string': 43 | data = buff.ReadOsuString(); 44 | break; 45 | case 'float': 46 | data = buff.ReadFloat(); 47 | break; 48 | case 'double': 49 | data = buff.ReadDouble(); 50 | break; 51 | case 'boolean': 52 | data = buff.ReadBoolean(); 53 | break; 54 | case 'byte': 55 | data = buff.ReadByte(); 56 | break; 57 | case 'int32array': { 58 | let len = buff.ReadInt16(); 59 | data = []; 60 | for (let i = 0; i < len; i++) { 61 | data.push(buff.ReadInt32()); 62 | } 63 | break; 64 | } 65 | case "collections": { 66 | let collectionsCount = data['collectionscount']; 67 | data = []; 68 | for (let i=0; i < collectionsCount; i++) { 69 | let collection = { 70 | 'name': buff.ReadOsuString(), 71 | 'beatmapsCount': buff.ReadInt32(), 72 | 'beatmapsMd5': [] 73 | } 74 | 75 | for (let i=0; i= 20140609) { 130 | let difficulties = [] 131 | 132 | for(let i = 0; i<4; i++) { 133 | let length = buff.ReadInt32() 134 | let diffs = {} 135 | for(let i=0; i { 231 | if(item.uses) { 232 | let needelements = item.uses.split(",") 233 | let dater = {} 234 | for (let datak of needelements) { 235 | dater[datak] = data[datak] 236 | } 237 | 238 | data[item.name] = this.Read(buff, item, item.uses ? dater : null); 239 | } else { 240 | data[item.name] = this.Read(buff, item); 241 | } 242 | }); 243 | } else if (layout instanceof Object) { 244 | data = this.Read(buff, layout); 245 | } 246 | return data; 247 | } 248 | 249 | } 250 | 251 | module.exports = Reader; -------------------------------------------------------------------------------- /v7.0/maplist_maker/osu-db-parser/src/Struct.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | osuDbStruct: [ 3 | {name: 'osuver', type: 'int32'}, 4 | {name: 'folder_count', type: 'int32'}, 5 | {name: 'is_locked', type: 'boolean'}, 6 | {name: 'date_unlock_ticks', type: 'int64'}, 7 | {name: 'username', type: 'string'}, 8 | {name: 'beatmaps_count', type: 'int32'}, 9 | {name: 'beatmaps', type: 'beatmaps', uses: 'osuver,beatmaps_count'}, 10 | {name: 'userperms', type: 'int32'} 11 | ], 12 | collectionsStruct: [ 13 | {name: 'osuver', type: 'int32'}, 14 | {name: 'collectionscount', type: 'int32'}, 15 | {name: 'collection', type: 'collections', uses: 'collectionscount'} 16 | ] 17 | } -------------------------------------------------------------------------------- /v7.0/maplist_maker/osuDBGetter.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fs = require("fs"); 4 | const OsuDBParser = require("./osu-db-parser/index.js"); 5 | 6 | function getOsuDB(path) { 7 | let osuDBbuffer = Buffer.from(fs.readFileSync(path)); 8 | const osuDB = new OsuDBParser(osuDBbuffer); 9 | 10 | return osuDB.getOsuDBData(); 11 | } 12 | 13 | module.exports = getOsuDB; -------------------------------------------------------------------------------- /v7.0/maplist_maker/osuPathFinder.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | let regedit = require("regedit"); 4 | const readline = require("readline"); 5 | 6 | async function findOsuPath() { 7 | try { 8 | let result = await new Promise((res, rej) => { 9 | regedit.list('HKCR\\osu!\\shell\\open\\command', (e, r) => e ? rej(e) : res(r)); 10 | }); 11 | let osuExePath = result['HKCR\\osu!\\shell\\open\\command'].values[''].value.replace(/^["']/, "").replace(/['"]? ?"%1"/, ""); 12 | return { 13 | exe: osuExePath, 14 | root: osuExePath.replace(/osu!\.exe$/, ""), 15 | db: osuExePath.replace(/osu!\.exe$/, "osu!.db") 16 | }; 17 | } 18 | catch(e) { 19 | const rl = readline.createInterface({ 20 | input: process.stdin, 21 | output: process.stdout 22 | }); 23 | return await new Promise(res => { 24 | rl.question("Input osu! install path (ex: C:\\osu!): ", function(p) { 25 | p = p.replace(/\//, "\\"); 26 | if(!/\\$/.test(p)) { 27 | p += "\\"; 28 | } 29 | rl.close(); 30 | res({ 31 | exe: p + "osu!.exe", 32 | root: p, 33 | db: p + "osu!.db" 34 | }); 35 | }); 36 | }); 37 | } 38 | } 39 | 40 | module.exports = findOsuPath; -------------------------------------------------------------------------------- /v7.0/metadata.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Metadata of .osu 5 | # 6 | 7 | import numpy as np 8 | 9 | def get_difficulty_name(): 10 | """ 11 | Obtain a random difficulty name 12 | """ 13 | diffs = ["Easy", "Normal", "Hard", "Insane", "Lunatic", "Extra", "Beginner", "Hyper", "Another", "Basic", "Novice", "Advanced", 14 | "Hell", "Expert", "Extra Stage", "Collab", "Colab", "FOUR DIMENSIONS", ".-- .-. --- -. --. .-- .- -.--"] 15 | return diffs[np.random.randint(0,len(diffs))] 16 | 17 | def hsv_to_rgb(h, s, v): 18 | """ 19 | Taken from stackoverflow 24852345 20 | """ 21 | if s == 0.0: return (v, v, v) 22 | i = int(h*6.) 23 | f = (h*6.)-i; p,q,t = v*(1.-s), v*(1.-s*f), v*(1.-s*(1.-f)); i%=6 24 | if i == 0: return (v, t, p) 25 | if i == 1: return (q, v, p) 26 | if i == 2: return (p, v, t) 27 | if i == 3: return (p, q, v) 28 | if i == 4: return (t, p, v) 29 | if i == 5: return (v, p, q) 30 | 31 | def hsv_to_rgb_255(h, s, v): 32 | return tuple(round(255 * i) for i in hsv_to_rgb(h, s, v)) 33 | 34 | def get_color(): 35 | return "{},{},{}".format(*hsv_to_rgb_255(np.random.random(), 0.5, 1)) 36 | 37 | def get_colors(): 38 | """ 39 | Obtain a list of 5-8 random bright colors 40 | """ 41 | count = np.random.randint(4,9) 42 | text_list = [] 43 | for i in range(1, 1+count): 44 | text_list.append("Combo{} : {},{},{}".format(i, *hsv_to_rgb_255(np.random.random(), 0.4 + np.random.random() * 0.4, 1))) 45 | return "\n".join(text_list) 46 | -------------------------------------------------------------------------------- /v7.0/models/catch/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/catch/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/catch/maps.txt: -------------------------------------------------------------------------------- 1 | Afilia Saga - S.M.L (TV size) ([_-Kukkai-_]) [Greaper's Overdose].osu 2 | Afilia Saga - S.M.L (TV size) ([_-Kukkai-_]) [Overdose].osu 3 | Afilia Saga - S.M.L (TV size) ([_-Kukkai-_]) [Rain].osu 4 | BLANKFIELD - Retrospective City (Nelly) [Collab Rain].osu 5 | BLANKFIELD - Retrospective City (Nelly) [Overdose].osu 6 | BLANKFIELD - Retrospective City (Nelly) [Platter].osu 7 | boy pablo - wtf (-Joni-) [rain].osu 8 | Chito (CV Minase Inori), Yuuri (CV Kubo Yurika) - More One Night (Assertive Hardcore Bootleg) [long ver.] (Spectator) [One More Time].osu 9 | Chito (CV Minase Inori), Yuuri (CV Kubo Yurika) - More One Night (Assertive Hardcore Bootleg) [long ver.] (Spectator) [Rei's Platter].osu 10 | Chito (CV Minase Inori), Yuuri (CV Kubo Yurika) - More One Night (Assertive Hardcore Bootleg) [long ver.] (Spectator) [tasuke's Rain].osu 11 | ColorsSlash - Colors Power ni Omakasero! (Sober Bear Remix) (Jemzuu) [Overdose].osu 12 | ColorsSlash - Colors Power ni Omakasero! (Sober Bear Remix) (Jemzuu) [Rain].osu 13 | Fractal Dreamers - Ex Nihilo (wonjae) [Dapu's Rain].osu 14 | Fractal Dreamers - Ex Nihilo (wonjae) [Genesis].osu 15 | halca - Kokuhaku Bungee Jump (Spectator) [Overdose].osu 16 | halca - Kokuhaku Bungee Jump (Spectator) [Platter].osu 17 | halca - Kokuhaku Bungee Jump (Spectator) [Rain].osu 18 | Hoshimachi Suisei - comet (Nelly) [hex's platter].osu 19 | Hoshimachi Suisei - comet (Nelly) [jemzuu's rain].osu 20 | Hoshimachi Suisei - comet (Nelly) [overdose].osu 21 | Hyuji feat. LIQU@. - Mermaid girl (Tropical Remix) (Ascendance) [Brunoob's MAXIMUM].osu 22 | Hyuji feat. LIQU@. - Mermaid girl (Tropical Remix) (Ascendance) [GRAVITY].osu 23 | Hyuji feat. LIQU@. - Mermaid girl (Tropical Remix) (Ascendance) [Rew's ADVANCED].osu 24 | Hyuji feat. LIQU@. - Mermaid girl (Tropical Remix) (Ascendance) [Spec's EXHAUST].osu 25 | Infected Mushroom - Never Mind (WildOne94) [Forget About It!].osu 26 | Kagemori Michiru (CV Morohoshi Sumire) - Ready to (TV Size) (Chatie) [Baron's Rain].osu 27 | Kagemori Michiru (CV Morohoshi Sumire) - Ready to (TV Size) (Chatie) [Greaper's Overdose].osu 28 | Kagemori Michiru (CV Morohoshi Sumire) - Ready to (TV Size) (Chatie) [Overdose].osu 29 | KikuoHana - Nobore! Susume! Takai Tou (dika312) [The Grand Tower].osu 30 | Kola Kid - press start (-Joni-) [Dapu's Rain].osu 31 | Kola Kid - press start (-Joni-) [Overdose].osu 32 | Little Glee Monster - OVER (TV Size) (Secre) [Platter].osu 33 | Little Glee Monster - OVER (TV Size) (Secre) [Rain].osu 34 | Little_Glee_Monster_-_OVER_TV_Size_Secre_Bout_of_Friendship.osu 35 | Miki Sayaka vs. Miki Sayaka (fw. Miki Sayaka) - squartatrice (Ascendance) [celerih's Downfall].osu 36 | Miki Sayaka vs. Miki Sayaka (fw. Miki Sayaka) - squartatrice (Ascendance) [celerih's Rain].osu 37 | Miki Sayaka vs. Miki Sayaka (fw. Miki Sayaka) - squartatrice (Ascendance) [Deif's Overdose].osu 38 | Miki Sayaka vs. Miki Sayaka (fw. Miki Sayaka) - squartatrice (Ascendance) [ZiRoX's Platter].osu 39 | MIMI - Nanimo nai Youna (JBHyperion) [Platter].osu 40 | MIMI - Nanimo nai Youna (JBHyperion) [Rain].osu 41 | MIMI feat. umu. - Ai no Sukima (Cut Ver.) (Mniam) [Platter].osu 42 | MIMI feat. umu. - Ai no Sukima (Cut Ver.) (Mniam) [Rain].osu 43 | MIMI feat. umu. - Ai no Sukima (Cut Ver.) (Mniam) [Sorrow].osu 44 | Nakanoke no Itsutsugo - Gotoubun no Kimochi (Imai Lisa) [Hexuluous' Rain].osu 45 | Nakanoke no Itsutsugo - Gotoubun no Kimochi (Imai Lisa) [Overdose].osu 46 | Nakanoke no Itsutsugo - Gotoubun no Kimochi (Imai Lisa) [Sc4's Platter].osu 47 | nanobii - Rainbow Road (Brunoob) [Kukkai's Rain].osu 48 | nanobii - Rainbow Road (Brunoob) [Special].osu 49 | nora2r - Memory Of Sunrise (JBHyperion) [Overdose].osu 50 | nora2r - Memory Of Sunrise (JBHyperion) [Rain].osu 51 | onoken - Amnolys (Syamu) [Jemzuu's Anomaly].osu 52 | onoken - Amnolys (Syamu) [Platter].osu 53 | onoken - Amnolys (Syamu) [Rain].osu 54 | P4koo - Hyperlight. (feat. Tsuyuri Karin) (Jemzuu) [Hyperion's Platter.].osu 55 | P4koo - Hyperlight. (feat. Tsuyuri Karin) (Jemzuu) [Spec's Rain.].osu 56 | P4koo - Hyperlight. (feat. Tsuyuri Karin) (Jemzuu) [Specuu's Overdose.].osu 57 | Pixel - Running Hell (Rocma) [Quote].osu 58 | Pixel - Running Hell (Rocma) [Roxy's Rain].osu 59 | sakuzyo - Magical Musical Master (Rocma) [Deluge].osu 60 | sakuzyo - Magical Musical Master (Rocma) [Nelly's Rain].osu 61 | sakuzyo - Magical Musical Master (Rocma) [Overdose].osu 62 | sakuzyo - Magical Musical Master (Rocma) [Platter].osu 63 | Shimotsuki Haruka - ReCall (-Luminate) [Overdose].osu 64 | Shimotsuki Haruka - ReCall (-Luminate) [Rain].osu 65 | Shimotsuki Haruka - ReCall (-Luminate) [Time Leap].osu 66 | ShinRa-Bansho - Junjou Armeria (Jemzuu) [Fragile Cherry Blossom].osu 67 | Shinra-Bansho - Junjou Armeria (Jemzuu) [Specuu's Rain].osu 68 | ShinRa-Bansho - Netaminity Theatre 666 (Spectator) [Eternal Jealousy Theatre].osu 69 | ShinRa-Bansho - Netaminity Theatre 666 (Spectator) [Specuu's Rain].osu 70 | siqlo - Me & U (Jemzuu) [Rain].osu 71 | siqlo - Me & U (Jemzuu) [Rocma's Overdose].osu 72 | siqlo - Me & U (Jemzuu) [Us].osu 73 | SPYAIR - I'm a Believer (TV Size) (Dako) [Overdose].osu 74 | SPYAIR - I'm a Believer (TV Size) (Dako) [Platter].osu 75 | SPYAIR - I'm a Believer (TV Size) (Dako) [Rain].osu 76 | SPYAIR - Imagination (TV Size) (Secre) [Dako's Rain].osu 77 | SPYAIR - Imagination (TV Size) (Secre) [Imagine].osu 78 | SPYAIR - Imagination (TV Size) (Secre) [Overdose].osu 79 | SPYAIR - Imagination (TV Size) (Secre) [Platter].osu 80 | SPYAIR - Imagination (TV Size) (Secre) [Rain].osu 81 | Suzuki Konomi - Realize (TV Size) (Greaper) [Overdose].osu 82 | Suzuki Konomi - Realize (TV Size) (Greaper) [Platter].osu 83 | Suzuki Konomi - Realize (TV Size) (Greaper) [Rain].osu 84 | Suzuki Konomi - Realize (TV Size) (Greaper) [Secre's Overdose].osu 85 | Suzuyu - Euphorium (-Joakh) [Spec's Rain].osu 86 | Suzuyu - Euphorium (-Joakh) [White Memory].osu 87 | team Umifure - DEEP BLUE TOWN e Oide yo (Spectator) [Overdose].osu 88 | team Umifure - DEEP BLUE TOWN e Oide yo (Spectator) [Platter].osu 89 | team Umifure - DEEP BLUE TOWN e Oide yo (Spectator) [Rain].osu 90 | Teminite & Evilwave - Rattlesnake (Absolute Zero) [Collab Overdose].osu 91 | Teminite & Evilwave - Rattlesnake (Absolute Zero) [Collab Rain].osu 92 | Teminite & Evilwave - Rattlesnake (Absolute Zero) [Hyperion's Platter].osu 93 | TERRASPEX - AMAZING BREAK (Spectator) [CRYSTAL SPEC'S DELUGE].osu 94 | TERRASPEX - AMAZING BREAK (Spectator) [NELLY & DU5T'S PLATTER].osu 95 | TERRASPEX - AMAZING BREAK (Spectator) [OVERDOSE].osu 96 | TERRASPEX - AMAZING BREAK (Spectator) [SANYI'S RAIN].osu 97 | Thaehan - Yuujou (Ascendance) [Platter].osu 98 | Thaehan - Yuujou (Ascendance) [Rain].osu 99 | Thaehan - Yuujou (Ascendance) [Sinnoh's Overdose].osu 100 | Thaehan - Yuujou (Ascendance) [Sinnoh's Platter].osu 101 | Thaehan - Yuujou (Ascendance) [Sinnoh's Rain].osu 102 | Tia - Deal with the devil (TV Size) (Nelly) [Du5t's Rain].osu 103 | Tia - Deal with the devil (TV Size) (Nelly) [Greaper's Light Rain].osu 104 | Tia - Deal with the devil (TV Size) (Nelly) [Overdose].osu 105 | Uinyasu, Occhoko Bunny - Aa Kenran no Yume ga Gotoku (Epsilon Remix) (-Luminate) [Platter].osu 106 | Uinyasu, Occhoko Bunny - Aa Kenran no Yume ga Gotoku (Epsilon Remix) (-Luminate) [Rain].osu 107 | Umeboshi Chazuke - ICHIBANBOSHIROCKET (Jemzuu) [OVERDOSE].osu 108 | Umeboshi Chazuke - ICHIBANBOSHIROCKET (Jemzuu) [PLATTER].osu 109 | Umeboshi Chazuke - ICHIBANBOSHIROCKET (Jemzuu) [RAIN].osu 110 | UNDEAD CORPORATION - Embraced by the Flame (Daletto) [Hex's Platter].osu 111 | UNDEAD CORPORATION - Embraced by the Flame (Daletto) [Rain].osu 112 | xi - Double Helix (Crowley) [Polymerized Nucleotide].osu 113 | YUC'e - Future Candy ([_-Kukkai-_]) [Dapu's Platter].osu 114 | YUC'e - Future Candy ([_-Kukkai-_]) [Nelly's Overdose].osu 115 | YUC'e - Future Candy ([_-Kukkai-_]) [Rain].osu 116 | YUC'e - Future Candy ([_-Kukkai-_]) [Sanyi's Overdose].osu 117 | Yuizuki Sora - Koi, Hitokuchi. (amulet-pp) [Platter].osu 118 | Yuizuki Sora - Koi, Hitokuchi. (amulet-pp) [Rain].osu 119 | Yuizuki Sora - Koi, Hitokuchi. (amulet-pp) [Syamu's Overdose].osu 120 | Yunomi - Wakusei Rabbit (feat. TORIENA) (-Luminate) [Hyperion's Platter].osu 121 | Yunomi - Wakusei Rabbit (feat. TORIENA) (-Luminate) [Planetarium].osu 122 | Yunomi - Wakusei Rabbit (feat. TORIENA) (-Luminate) [Rain].osu 123 | Zekk - MAHOROBA (Jemzuu) [BEYOND].osu 124 | Zekk - MAHOROBA (Jemzuu) [FUTURE].osu 125 | Zekk - MAHOROBA (Jemzuu) [PRESENT].osu 126 | ZUTOMAYO - Humanoid (Jemzuu) [Imai Lisa & Du5t's Platter].osu 127 | ZUTOMAYO - Humanoid (Jemzuu) [Liyac's Overdose].osu 128 | ZUTOMAYO - Humanoid (Jemzuu) [Paranoid].osu 129 | ZUTOMAYO - Humanoid (Jemzuu) [Rain].osu -------------------------------------------------------------------------------- /v7.0/models/catch/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/catch/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/cryo/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/cryo/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/cryo/maps.txt: -------------------------------------------------------------------------------- 1 | 10 - platonic colors (cRyo[iceeicee]) [www].osu 2 | 123 - DUNNO (cRyo[iceeicee]) [www].osu 3 | 3L - Be Happy (cRyo[iceeicee]) [Happy!!!].osu 4 | ALiCE'S EMOTiON - Mami Mami Zone (cRyo[iceeicee]) [Insane].osu 5 | ALiCE'S EMOTiON - Tsuioku Summer Night (ELEMENTAS Remix) (cRyo[iceeicee]) [Insane].osu 6 | Aso Natsuko - Lovely Girls Anthem -EuroBeatRemix- (cRyo[iceeicee]) [Insane].osu 7 | ave;new - unknown (cRyo[iceeicee]) [www].osu 8 | ave;new feat. Avenew Project - Lovely Angel!! (cRyo[iceeicee]) [www].osu 9 | Chata - Shinjuku Maze (cRyo[iceeicee]) [hoLysoup ^^].osu 10 | choucho - 218 (cRyo[iceeicee]) [hmm].osu 11 | Daiichi Uchuu Sokudo - Superorbital (cRyo[iceeicee]) [www].osu 12 | Daito Giken - Miracle Highway (cRyo[iceeicee]) [Insane].osu 13 | DJ Dean - If I could be you (Dyamorph rmx) (cRyo[iceeicee]).osu 14 | EastNewSound - Kyun Kyun Tamaran Inaba-tan! (cRyo[iceeicee]) [www].osu 15 | Faylan - Last vision for last (TV Size) (cRyo[iceeicee]) [www].osu 16 | Hibino Maki, Aoba Ringo & Isuzu Asuka - Let's GO (cRyo[iceeicee]) [Insane].osu 17 | Hoobastank - Out Of Control (cRyo[iceeicee]) [Insane].osu 18 | Horie Yui - Sweet & Sweet CHERRY (TV Size) (cRyo[iceeicee]) [Insane].osu 19 | ICHIKO - I SAY YES (TV Size) (cRyo[iceeicee]) [www].osu 20 | Kitamura Eri - Shirushi (cRyo[iceeicee]) [Insane].osu 21 | KOKOMI - Windalia (cRyo[iceeicee]) [hmm].osu 22 | KOKOMI - Windalia (cRyo[iceeicee]) [www].osu 23 | KOTOKO - Flower (Short Ver.) (cRyo[iceeicee]) [Insane].osu 24 | KOTOKO - Oboetete Ii yo (cRyo[iceeicee]) [Insane].osu 25 | Kuribayashi Minami - Unreal Paradise (cRyo[iceeicee]) [hmm].osu 26 | Kurosaki Maon - Emergence! (cRyo[iceeicee]) [Insane].osu 27 | LiLA'c Records - Maze of Vapor (cRyo[iceeicee]) [www].osu 28 | Meramipop - algorhythm (cRyo[iceeicee]) [Insane].osu 29 | monoROSETTA - Koi no recipe to Et cetera (cRyo[iceeicee]) [Insane].osu 30 | Mu - Panorama chime (Short Ver.) (cRyo[iceeicee]) [Insane].osu 31 | Nakatsugawa Ui - Hoshizora e Tsuzuku Michi mo (cRyo[iceeicee]) [Insane].osu 32 | Nanahira - Fukkura Fuwaama Cake (cRyo[iceeicee]) [Collab].osu 33 | Nanahira - neko to switch (cRyo[iceeicee]) [Insane].osu 34 | Nanahira - Tobidase! Sweet Sweet Magic (cRyo[iceeicee]) [Insane].osu 35 | Nanahira, Mameko & Riko - Straight LOVE (cRyo[iceeicee]) [www].osu 36 | Pizuya's Cell - Losing your season (cRyo[iceeicee]) [Insane].osu 37 | Ryu feat.Ma15 - Smile 0 Yen (cRyo[iceeicee]) [Insane].osu 38 | Sakakibara Yui - 1 (cRyo[iceeicee]) [YUUUUUU].osu 39 | sphere - REALOVEREALIFE (cRyo[iceeicee]) [hmm].osu 40 | Spilling Star - MOLT (cRyo[iceeicee]) [Insane].osu 41 | Spilling Star - MOLT (cRyo[iceeicee]) [yyy].osu 42 | Tamura Yukari - You & Me feat.motsu from m.o.v.e (cRyo[iceeicee]) [Insane].osu 43 | Tamura Yukari feat. motsu from m.o.v.e - You & Me (cRyo[iceeicee]) [Insane].osu 44 | Tsukimura Mayu - Furefurepponpon! (cRyo[iceeicee]) [www].osu 45 | TWGOK - Elsie ^^ (cRyo[iceeicee]) [www].osu 46 | U - (cRyo[iceeicee]) [hmm].osu 47 | Yanagi Nagi - KILLER SONG (cRyo[iceeicee]) [].osu 48 | yanaginagi - Toaru kaizokuou no kimagure (cRyo[iceeicee]) [Insane].osu 49 | ZAQ - Alteration (cRyo[iceeicee]) [Insane].osu -------------------------------------------------------------------------------- /v7.0/models/cryo/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/cryo/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/default/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/default/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/default/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/default/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/flower/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/flower/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/flower/maps.txt: -------------------------------------------------------------------------------- 1 | AKI AKANE - Hankyou no Barrier Seed (Milan-) [Expert].osu 2 | AKI AKANE - Hankyou no Barrier Seed (Milan-) [Hard].osu 3 | AKI AKANE - Hankyou no Barrier Seed (Milan-) [Insane].osu 4 | AKI AKANE - Hankyou no Barrier Seed (Milan-) [pishi's Extra].osu 5 | AKI AKANE - Hankyou no Barrier Seed (Milan-) [Saut's Extra].osu 6 | BlackY - FLOWER -SPRING Long VER.- (GimBab) [Spring].osu 7 | BLANKFIELD - Flowering Game Night (eiri-) [Extra].osu 8 | BLANKFIELD - Flowering Game Night (eiri-) [Hard].osu 9 | BLANKFIELD - Flowering Game Night (eiri-) [Lunatic].osu 10 | ChamJam - Clover wish (TV Size) (Asphyre) [Flowery Yearning].osu 11 | ChamJam - Clover wish (TV size) (Asphyre) [Shizuku's Wish].osu 12 | ChamJam - Clover wish (TV Size) (Nyantiaz) [Walao's Lovestruck].osu 13 | ClariS - Anemone (Azunyan-) [Florae].osu 14 | ClariS - Anemone -TV MIX- (Neoskylove) [Insane].osu 15 | ClariS - Anemone -TV MIX- (xxdeathx) [Classroom ClariS].osu 16 | ClariS - Anemone -TV MIX- (xxdeathx) [wkyik's Insane].osu 17 | Dance Gavin Dance - Tree Village (Dilectus) [Cool's Extra].osu 18 | Dance Gavin Dance - Tree Village (Dilectus) [Extra].osu 19 | Dance Gavin Dance - Tree Village (Dilectus) [Insane].osu 20 | Dance Gavin Dance - Tree Village (Dilectus) [Mathew's Hard].osu 21 | DUSTCELL - Anemone (Sparhten) [Agatsu's Extra].osu 22 | DUSTCELL - Anemone (Sparhten) [Collab Hard].osu 23 | DUSTCELL - Anemone (Sparhten) [Irrational].osu 24 | DUSTCELL - Anemone (Sparhten) [Neon's Insane].osu 25 | Elu, SisterCleaire - Sunday Sunday Fruit Fool (Yugu) [Sweet].osu 26 | Eternal Melody - Rose Bud -I. into the sorrow- -II. into the fear of desire- (Plaudible) [The Blossoms of Redemption].osu 27 | Farhan feat. Gumi - Dandelion (Stixy) [browiec's Flower].osu 28 | Farhan feat. Gumi - Dandelion (Stixy) [Extra].osu 29 | Farhan feat. Gumi - Dandelion (Stixy) [PikA's Insane].osu 30 | fhana - Anemone no Hana (Sotarks) [Kalijaaz's Expert].osu 31 | fhana - Anemone no Hana (Sotarks) [Kowari's Hard].osu 32 | fhana - Anemone no Hana (Sotarks) [Melancholy].osu 33 | fhana - Anemone no Hana (Sotarks) [Reform's Insane].osu 34 | fhana - Comet Lucifer ~The Seed and the Sower~ (Nao Tomori) [Celsius' Insane].osu 35 | fhana - Comet Lucifer ~The Seed and the Sower~ (Nao Tomori) [Nathan's Extra].osu 36 | fhana - Comet Lucifer ~The Seed and the Sower~ (Nao Tomori) [toshitoshi's Hard].osu 37 | fhana - Comet Lucifer ~The Seed and the Sower~ (Sotarks) [Extra].osu 38 | fhana - Comet Lucifer ~The Seed and the Sower~ (Sotarks) [toybot's Insane].osu 39 | Fractal Dreamers - Gardens Under A Spring Sky ([Mahua]) [gary00737's Insane].osu 40 | Fractal Dreamers - Gardens Under A Spring Sky ([Mahua]) [Otomahua's Collab Another].osu 41 | Fractal Dreamers - Gardens Under A Spring Sky ([Mahua]) [Otomahua's Extra].osu 42 | Fractal Dreamers - Gardens Under A Spring Sky ([Mahua]) [Under].osu 43 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [AF's Another].osu 44 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [August's Extreme].osu 45 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [depzdai's Insane].osu 46 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [Hard].osu 47 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [Insane].osu 48 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [kiry's Expert].osu 49 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [Serendipity].osu 50 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [ser's Extreme].osu 51 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [Tatsuo's Extreme].osu 52 | Fractal Dreamers - Gardens Under A Spring Sky (_Epreus) [thiev's Extra].osu 53 | Fractal Dreamers - Gardens Under A Spring Sky (Lasse) [Another].osu 54 | Fractal Dreamers - Gardens Under A Spring Sky (Lasse) [Hard].osu 55 | Fractal Dreamers - Gardens Under A Spring Sky (Lasse) [Hyper].osu 56 | fripSide - floral summer (Dored) [Collab Insane].osu 57 | fripSide - floral summer (Dored) [Dalou's Floral Angel].osu 58 | fripSide - floral summer (Dored) [Kamiya's Insane].osu 59 | fripSide - floral summer (Dored) [Rio's Insane].osu 60 | ginkiha - Paved Garden (Leader) [Collab Insane].osu 61 | ginkiha - Paved Garden (Leader) [Extra].osu 62 | ginkiha - Paved Garden (Leader) [Hard].osu 63 | ginkiha - Paved Garden (Leader) [lfj's Extra].osu 64 | ginkiha - Paved Garden (Smug Nanachi) [extra].osu 65 | ginkiha - Paved Garden (Smug Nanachi) [fiore].osu 66 | ginkiha - Paved Garden (Smug Nanachi) [hard].osu 67 | ginkiha - Paved Garden (Smug Nanachi) [insane].osu 68 | goreshit - looming shadow of a tree long gone (grumd) [Extra].osu 69 | goreshit - looming shadow of a tree long gone (grumd) [Insane].osu 70 | Hanazono Tae (CV Otsuka Sae) - Hanazono Denki Guitar!!! (Akitoshi) [Deppy's Expert].osu 71 | Hanazono Tae (CV Otsuka Sae) - Hanazono Denki Guitar!!! (Akitoshi) [Expert].osu 72 | Hanazono Tae (CV Otsuka Sae) - Hanazono Denki Guitar!!! (Akitoshi) [Hard].osu 73 | Hanazono Tae (CV Otsuka Sae) - Hanazono Denki Guitar!!! (Akitoshi) [Insane].osu 74 | Hatsuki Yura - Ryuu to Sakura Miko (Beren) [Cilvery's Extra].osu 75 | Hatsuki Yura - Ryuu to Sakura Miko (Beren) [Extra].osu 76 | Hatsuki Yura - Ryuu to Sakura Miko (Beren) [Regraz's Insane].osu 77 | Hatsuki Yura - Ryuu to Sakura Miko (Beren) [vivicat's Hard].osu 78 | himmel feat. YooSanHyakurei - Maple Wind (- Matha -) [abstracted].osu 79 | In Love With A Ghost - Flowers feat. nori (LightsOut) [Reaching for Love].osu 80 | Kano - Ivy (GoldenMine) [Taeyang's Insane].osu 81 | Lifetheory - Daisy (Zare) [Blossom].osu 82 | Lifetheory - Daisy (Zare) [Bud].osu 83 | Look Vibrant - Cauliflower (P1Twist) [Collab Insane].osu 84 | Look Vibrant - Cauliflower (P1Twist) [Regret].osu 85 | Meramipop - Bad Apple!! (Hard) [Appleroval].osu 86 | MuryokuP - A Tree Without A Branch (Melwoine) [Axarious' Extra].osu 87 | MuryokuP - A Tree Without A Branch (Melwoine) [Collab Harder].osu 88 | MuryokuP - A Tree Without A Branch (Melwoine) [Collab insane].osu 89 | MuryokuP - A Tree Without A Branch (Melwoine) [keevy's hard].osu 90 | MuryokuP - A Tree Without A Branch (Melwoine) [Mun's Extreme].osu 91 | MuryokuP - A Tree Without A Branch (Melwoine) [Solros].osu 92 | nasu asaco - flora (Short Ver.) (Lunala) [Paradise Seeker].osu 93 | Nekomata Master - Greening (Lulu-) [Nokashi's Insane].osu 94 | Nekomata Master - Greening (Lulu-) [Tranquility].osu 95 | Nekomata Master - Greening (Ultima Fox) [Hobbes2's Insane].osu 96 | Nekomata Master - Greening (Ultima Fox) [Serenity].osu 97 | Nightcore - Flower Dance (Ickey) [Hard].osu 98 | No Life Negotiator - LOST GARDEN (Mordred) [Awakening].osu 99 | No Life Negotiator - LOST GARDEN (Mordred) [Hard].osu 100 | No Life Negotiator - LOST GARDEN (Mordred) [Kalibe's Insane].osu 101 | paraoka feat. harunya - secret garden (Xilver15) [Ascending Current].osu 102 | PastelPalettes - Kyu~maiflower (Petal) [Agatsu's Insane].osu 103 | PastelPalettes - Kyu~maiflower (Petal) [byd's Hard].osu 104 | PastelPalettes - Kyu~maiflower (Petal) [Petals].osu 105 | Qrispy Joybox feat.mao - Umeyukiyo (Beren) [Another].osu 106 | Qrispy Joybox feat.mao - Umeyukiyo (Beren) [Hyper].osu 107 | Qrispy Joybox feat.mao - Umeyukiyo (LKs) [Azure's Hard].osu 108 | Qrispy Joybox feat.mao - Umeyukiyo (LKs) [Insane].osu 109 | Rex Orange County - Sunflower (-town-) [Blossom].osu 110 | Rin'ca - Pleasure garden (Log Off Now) [Eternal Paradise].osu 111 | Rin'ca - Pleasure garden (Log Off Now) [Mochi's Insane].osu 112 | Rin'ca - Pleasure garden (Nagi Hisakawa) [Everlasting].osu 113 | Rin'ca - Pleasure garden (Nagi Hisakawa) [Hard].osu 114 | Sasaki Sayaka - Sakura, Reincarnation (Kowari) [Insane].osu 115 | Sasaki Sayaka - Sakura, Reincarnation (Kowari) [Petals].osu 116 | SawanoHiroyuki[nZk]Tielle - Amazing Trees (Agatsu) [collab].osu 117 | SawanoHiroyuki[nZk]Tielle - Amazing Trees -extended ver.- (TT Mouse) [Vitality].osu 118 | S-C-U feat. Qrispy Joybox - anemone (Irreversible) [Fang's Insane].osu 119 | S-C-U feat. Qrispy Joybox - anemone (Irreversible) [Hyper].osu 120 | S-C-U feat. Qrispy Joybox - anemone (Irreversible) [ktgster's Extreme].osu 121 | S-C-U feat. Qrispy Joybox - anemone (Irreversible) [Spring].osu 122 | Shimotsuki Haruka - Akahitoha (ImpurePug) [AIR's Blossom].osu 123 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Crystal's Another].osu 124 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Hyper].osu 125 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Koiyuki's Leaf].osu 126 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Lv9's Insane].osu 127 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Petal].osu 128 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Rio & z1's Another].osu 129 | ShinRa-Bansho - Tsuki ni Murakumo Hana ni Kaze ShinRa-Bansho Ver (UberFazz) [Entry's Hard].osu 130 | ShinRa-Bansho - Tsuki ni Murakumo Hana ni Kaze ShinRa-Bansho Ver (UberFazz) [Kw's Extra Stage].osu 131 | ShinRa-Bansho - Tsuki ni Murakumo Hana ni Kaze ShinRa-Bansho Ver (UberFazz) [Last Remote].osu 132 | ShinRa-Bansho - Tsuki ni Murakumo Hana ni Kaze ShinRa-Bansho Ver (UberFazz) [Lunatic].osu 133 | SOOOO - a decayed garden sinks into the isolated sea (Minorsonek) [Everyone is under the same sun].osu 134 | Sound Souler - Bubble Flower (hypercyte) [Expert].osu 135 | Sound Souler - Bubble Flower (hypercyte) [Insane].osu 136 | Sound Souler - Bubble Flower (Sylas) [_Epreus' Extra].osu 137 | Sound Souler - Bubble Flower (Sylas) [effervescence].osu 138 | Sound Souler - Bubble Flower (Sylas) [hard].osu 139 | Sound Souler - Bubble Flower (Sylas) [Insane].osu 140 | Street - Sakura Fubuki (Cherry Blossom) [Another].osu 141 | Street - Sakura Fubuki (Cherry Blossom) [Sakura no Hana].osu 142 | Street - Sakura Fubuki (eiri-) [Bloom].osu 143 | Street - Sakura Fubuki (eiri-) [Insane].osu 144 | Tamura Yukari & Hanazawa Kana - Mogitate Fruit Girls (Raisa12) [kami's Insane].osu 145 | Tamura Yukari & Hanazawa Kana - Mogitate Fruit Girls (TV Size) (Guy) [Insane].osu 146 | THE BINARY - Hana ni Ame o, Kimi ni Uta o (NeonLights) [Collab Hard].osu 147 | THE BINARY - Hana ni Ame o, Kimi ni Uta o (NeonLights) [Frontier's Insane].osu 148 | THE BINARY - Hana ni Ame o, Kimi ni Uta o (NeonLights) [Nines' Extra].osu 149 | Yooh - LIFE Garden (Extended Mix) (ktgster) [Bloom].osu 150 | Yorushika - Flower And Badger Game (MrPotato) [Remembrance].osu 151 | Yorushika - Flower and Badger Game (Ryuusei Aika) [Moonlight Sonata, Blooming Lavender and You].osu -------------------------------------------------------------------------------- /v7.0/models/flower/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/flower/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/hard/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/hard/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/hard/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/hard/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/inst/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/inst/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/inst/maps.txt: -------------------------------------------------------------------------------- 1 | 3R2 - Devillic Sphere (Innovation) [Extra].osu 2 | 3R2 - Devillic Sphere (Innovation) [Will's Insane].osu 3 | ak+q - Axium Crisis (MrSergio) [Collapse].osu 4 | ak+q - Axium Crisis (MrSergio) [Hard].osu 5 | ak+q - Axium Crisis (MrSergio) [Marcuddles' Insane].osu 6 | ak+q - Ignotus (Ryuusei Aika) [Dynamix's QUANTUM].osu 7 | ak+q - Ignotus (Ryuusei Aika) [FUTURE+].osu 8 | ak+q - Ignotus (Ryuusei Aika) [Lobelia's PRESENT+].osu 9 | ak+q - Ignotus (Ryuusei Aika) [Yugu's FUTURE].osu 10 | ak+q - Intro (-Sylvari) [Future].osu 11 | ak+q - Vexaria (Pentori) [_83's Light Insane].osu 12 | ak+q - Vexaria (Pentori) [Electoz's Hard].osu 13 | ak+q - Vexaria (Pentori) [Insane].osu 14 | ak+q - Vexaria (Pentori) [Miura's Another].osu 15 | ak+q - Vexaria (Pentori) [Sharu's Expert].osu 16 | ak+q - Vivid Theory (Cubby) [Future].osu 17 | ak+q - Vivid Theory (Cubby) [Insane].osu 18 | ak+q - Vivid Theory (Cubby) [LCFC's Memory].osu 19 | ak+q - Vivid Theory (Cubby) [Yusomi's Hard].osu 20 | Aoi - aterlbus (z1085684963) [1112's INSANE].osu 21 | Aoi - aterlbus (z1085684963) [Dynamix's MEGA].osu 22 | Aoi - aterlbus (z1085684963) [rui's HARD].osu 23 | Atsushi - Kessen (Yusomi) [Cub's Expert].osu 24 | Atsushi - Kessen (Yusomi) [Firika's Another].osu 25 | Atsushi - Kessen (Yusomi) [Insane].osu 26 | Atsushi - Kessen (Yusomi) [Lv9's Insane].osu 27 | Atsushi - Kessen (Yusomi) [Muya's Expert].osu 28 | cosMo@Bousou-P - End Mark ni Kibou to Namida wo soete (SnowNiNo_) [ADVANCED].osu 29 | cosMo@Bousou-P - End Mark ni Kibou to Namida wo soete (SnowNiNo_) [Internal's EXPERT].osu 30 | cosMo@Bousou-P - End Mark ni Kibou to Namida wo soete (SnowNiNo_) [lululu's MASTER].osu 31 | eyemedia - Bloody Purity (tsuka) [Expert].osu 32 | eyemedia - Bloody Purity (tsuka) [Hard].osu 33 | Eyemedia - Holy Knight (Kloyd) [Expert].osu 34 | Eyemedia - Holy Knight (Kloyd) [Insane].osu 35 | Feryquitous - Quon (-[Pino]-) [Celestial Crown].osu 36 | Feryquitous - Quon (-[Pino]-) [Cub's Aethereal Extra].osu 37 | Feryquitous - Quon (-[Pino]-) [Hard].osu 38 | Feryquitous - Quon (-[Pino]-) [Necho's Insane].osu 39 | Hideyuki Ono - Two Pianists (Nozhomi) [Hard].osu 40 | Hideyuki Ono - Two Pianists (Nozhomi) [Insane].osu 41 | Hideyuki Ono - Two Pianists (Nozhomi) [Virtuosu!].osu 42 | HyuN - Infinity Heaven (Niva) [Ad Infinitum].osu 43 | HyuN - Infinity Heaven (Niva) [Hard].osu 44 | HyuN - Infinity Heaven (Niva) [Mirash's Insane].osu 45 | IAHN - Feel It (Yugu) [Dynamix's Expert].osu 46 | IAHN - Feel It (Yugu) [Insane].osu 47 | IAHN - Feel It (Yugu) [Rua].osu 48 | Ice vs. Morimori Atsushi - REUNION -Duo Blade Against- (Princess Kisses) [Extra].osu 49 | Ice vs. Morimori Atsushi - REUNION -Duo Blade Against- (Princess Kisses) [Hard].osu 50 | Ice vs. Morimori Atsushi - REUNION -Duo Blade Against- (Princess Kisses) [Insane].osu 51 | M2U - Moon Halo (Hailie) [Expert].osu 52 | M2U - Moon Halo (Hailie) [ylvy's Insane].osu 53 | M2U - Stellar (Ultima Fox) [defiance's Insane].osu 54 | M2U - Stellar (Ultima Fox) [Starlight].osu 55 | Mitsuyoshi Takenobu no Ani - Amphisbaena (toybot) [Aeril's Hard].osu 56 | Mitsuyoshi Takenobu no Ani - Amphisbaena (toybot) [Demonic Another].osu 57 | Mitsuyoshi Takenobu no Ani - Amphisbaena (toybot) [Dynamix's Extra].osu 58 | Mitsuyoshi Takenobu no Ani - Amphisbaena (toybot) [Insane].osu 59 | Mitsuyoshi Takenobu no Ani - Amphisbaena (toybot) [Rumi's Extra].osu 60 | Mutsuhiko Izumi - Green Green Dance (ztrot) [Extreme].osu 61 | Nekomata Master - Sayonara Heaven (deetz) [EX].osu 62 | Nekomata Master - Scars of FAUNA (Ambient) [ailv's Hard].osu 63 | Nekomata Master - Scars of FAUNA (Ambient) [Extreme].osu 64 | Nekomata Master - Scars of FAUNA (Ambient) [moph's Insane].osu 65 | Nekomata Master+ - encounter (Sing) [Hard].osu 66 | Nekomata Master+ - encounter (Sing) [Insane].osu 67 | Nekomata Master+ - encounter (Sing) [Irre's Blooming Extra].osu 68 | Nekomata Master+ - encounter (Sing) [Priti's Ultra].osu 69 | Nekomata Master+ - POINT ZERO (Beomsan) [Black Another].osu 70 | Nekomata Master+ - POINT ZERO (Beomsan) [Down's Another].osu 71 | Nekomata Master+ - POINT ZERO (Beomsan) [Hyper].osu 72 | Onoken - Biotonic (Hailie) [Expert].osu 73 | Onoken - Biotonic (Hailie) [ylvy's Insane].osu 74 | onoken - Cristalisia (Mir) [Fragments].osu 75 | onoken - Cristalisia (Mir) [Insane].osu 76 | Project Grimoire - Caliburne ~Story of the Legendary sword~ (Mikkuri) [Extra].osu 77 | Project Grimoire - Caliburne ~Story of the Legendary sword~ (Mikkuri) [Hard].osu 78 | Project Grimoire - Caliburne ~Story of the Legendary sword~ (Mikkuri) [Insane].osu 79 | Project Grimoire - Caliburne ~Story of the Legendary sword~ (Mikkuri) [iyasine's Insane].osu 80 | Project Grimoire - Caliburne ~Story of the Legendary sword~ (Regraz) [Niva's HYPER].osu 81 | Project Grimoire - Caliburne ~Story of the Legendary sword~ (Regraz) [SWORD].osu 82 | Project Grimoire - Excalibur ~Revived Resolution~ (SMOKELIND) [~Expert~].osu 83 | Project Grimoire - Excalibur ~Revived Resolution~ (SMOKELIND) [~Hard~].osu 84 | Project Grimoire - Excalibur ~Revived Resolution~ (SMOKELIND) [~Insane~].osu 85 | Project Grimoire - Excalibur ~Revived resolution~ (SMOKELIND) [~Yooh's Extra~].osu 86 | Project Grimoire - Excalibur ~Revived resolution~ (SnowNiNo_) [EXPERT].osu 87 | Project Grimoire - Excalibur ~Revived resolution~ (SnowNiNo_) [KittyAdventure's ADVANCED].osu 88 | Project Grimoire - Excalibur ~Revived resolution~ (SnowNiNo_) [Kyuukai's MASTER].osu 89 | Project Grimoire - Excalibur ~Revived resolution~ (SnowNiNo_) [LEGENDARY SWORD].osu 90 | Rabpit - Sacred (Matrix) [Insane].osu 91 | Rabpit - Saika (tsuka) [Expert].osu 92 | Rabpit - Saika (tsuka) [Hard].osu 93 | REDALiCE - Acceleration (Leader) [EXPERT].osu 94 | REDALiCE - Acceleration (Leader) [HARD].osu 95 | REDALiCE - Acceleration (Leader) [MASTER].osu 96 | REDALiCE - MERLIN (Pentori) [Cub's Insane].osu 97 | REDALiCE - MERLIN (Pentori) [Extra].osu 98 | REDALiCE - MERLIN (Pentori) [Hard].osu 99 | Sakuzyo - Amenohoakari (Firis Mistlud) [Fiura's Miyabi].osu 100 | Sakuzyo - Amenohoakari (Firis Mistlud) [how2miss' Insane].osu 101 | Sakuzyo - Amenohoakari (Firis Mistlud) [Minorsonek's Insane].osu 102 | Sakuzyo - Fracture Ray (DJ Lucky) [Cub's Extra].osu 103 | Sakuzyo - Fracture Ray (DJ Lucky) [Extra].osu 104 | Sakuzyo - Fracture Ray (DJ Lucky) [Icekalt's Extra].osu 105 | Sakuzyo - Fracture Ray (DJ Lucky) [Insane].osu 106 | Sakuzyo - Fracture Ray (DJ Lucky) [Nuvolina's Hard].osu 107 | Sakuzyo - Imprinting (eiri-) [Cyndere's Insane].osu 108 | Sakuzyo - Imprinting (eiri-) [Dementation's Extra].osu 109 | Sakuzyo - Imprinting (eiri-) [FrenZ's Hard].osu 110 | Sakuzyo - Imprinting (eiri-) [Trynna's Insane].osu 111 | Seiryu - Ultramarine (RLC) [Another].osu 112 | Seiryu - Ultramarine (RLC) [Esti's Advanced].osu 113 | Seiryu - Ultramarine (RLC) [yf's Expert].osu 114 | Seiryu - Ultramarine (RLC) [Zexous' Hyper].osu 115 | sky_delta - Midnight City Warfare (lcfc) [Extra].osu 116 | sky_delta - Midnight City Warfare (lcfc) [Extreme].osu 117 | sky_delta - Midnight City Warfare (lcfc) [Insane].osu 118 | Sound Souler - Paradise (kwk) [Asaiga & kwk's Expert].osu 119 | Sound Souler - Paradise (kwk) [Future].osu 120 | Sound Souler - Paradise (kwk) [Hobbes2's Insane].osu 121 | Sound Souler - Paradise (kwk) [Mir's Insane].osu 122 | Sound Souler - Paradise (kwk) [Tsumia's Extra].osu 123 | Sta - Platinum (FrenZ396) [Diamond].osu 124 | Starving Trancer - New Gravity (fanzhen0019) [Expert].osu 125 | Street - Sakura Fubuki (eiri-) [Bloom].osu 126 | Street - Sakura Fubuki (eiri-) [Insane].osu 127 | Swan Lake Orchestra - Hakuchou no Mizuumi (AngelHoney) [Another].osu 128 | Swan Lake Orchestra - Hakuchou no Mizuumi (AngelHoney) [Extra].osu 129 | Swan Lake Orchestra - Hakuchou no Mizuumi (AngelHoney) [Hyper].osu 130 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [Azzedd's MASTER].osu 131 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [EXPERT].osu 132 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [Jean-Mi's MASTER].osu 133 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [Kard's EXPERT].osu 134 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [Ren's EXPERT].osu 135 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [Shizuqua's ADVANCED].osu 136 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [STINGY'S MASTER].osu 137 | t+pazolite - Oshama Scramble! (DTM9 Nowa) [thiev's EXPERT].osu 138 | Tsukasa - Visions (Flower) [Insane].osu 139 | Tsukasa (Arte Refact) - Fragrance (Lugu) [Fall].osu 140 | xi - ANiMA (liaoxingyao) [Dynamix's LV.8].osu 141 | xi - ANiMA (liaoxingyao) [LV.11].osu 142 | xi - ANiMA (liaoxingyao) [Spring's LV.10].osu 143 | xi - Glorious Crown (Monstrata) [Atsuro's Extra].osu 144 | xi - Glorious Crown (Monstrata) [Hobbes2's Insane].osu 145 | xi - Glorious Crown (Monstrata) [Logic's Hard].osu 146 | Zeami - Music Revolver (KanaRin) [cRyo].osu 147 | Zeami - Music Revolver (KanaRin) [Kana].osu -------------------------------------------------------------------------------- /v7.0/models/inst/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/inst/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/lowbpm/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/lowbpm/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/lowbpm/maps.txt: -------------------------------------------------------------------------------- 1 | AAA - Ashita no Hikari (TV Size) (Yudragen) [Hard].osu 2 | AAA - Ashita no Hikari (TV Size) (Yudragen) [Insane].osu 3 | Aimer - Repray (TV Size) (-Mikan) [Hard].osu 4 | Aimer - Repray (TV Size) (-Mikan) [Our lost love].osu 5 | Aoi (CV Iguchi Yuka), Hinata (CV Asumi Kana) - Irochigai no Tsubasa (TV Size) (climbx3145) [Hard].osu 6 | AZKi - Aoi Yume (domSaur) [Hard].osu 7 | AZKi - Aoi Yume (domSaur) [Insane].osu 8 | ChamJam - Clover wish (TV Size) (Asphyre) [Flowery Yearning].osu 9 | ChamJam - Clover wish (TV size) (Asphyre) [Shizuku's Wish].osu 10 | ChamJam - Clover wish (TV Size) (Nyantiaz) [Walao's Lovestruck].osu 11 | CIX - WIN (TV Size) (- ascended -) [CHAMPIONS].osu 12 | CIX - WIN (TV Size) (- ascended -) [HARD].osu 13 | CIX - WIN (TV Size) (- ascended -) [INSANE].osu 14 | ClariS - Anemone -TV MIX- (Neoskylove) [Insane].osu 15 | ClariS - Anemone -TV MIX- (xxdeathx) [Classroom ClariS].osu 16 | ClariS - Anemone -TV MIX- (xxdeathx) [Hard].osu 17 | ClariS - Anemone -TV MIX- (xxdeathx) [wkyik's Insane].osu 18 | DUSTCELL - Anemone (Sparhten) [Collab Hard].osu 19 | Ed Sheeran feat. Sakura Miko - Shape of You (Akane Hime) [Brain Damage].osu 20 | Elu, SisterCleaire - Sunday Sunday Fruit Fool (Yugu) [Sweet].osu 21 | fhana - Anemone no Hana (Sotarks) [Kowari's Hard].osu 22 | flumpool - Chiisana Hibi (TV Size) (Ipas) [Insane].osu 23 | Fujita Akane, Miyamoto Yume, Ozawa Ari - Precious You (TV Size) (Roger) [Creamy's Insane].osu 24 | Galileo Galilei - Aoi Shiori (TV Size) (Chompy) [Flower].osu 25 | Galileo Galilei - Aoi Shiori (TV Size) (Chompy) [Gero's Hyper].osu 26 | Galileo Galilei - Circle Game (TV Size) (browiec) [Flower].osu 27 | Gawr Gura - Ride On Time (REDALiCE's CITY POP SHARK Remix!!) (Doya) [a].osu 28 | Gawr Gura - Ride On Time (REDALiCE's CITY POP SHARK Remix!!) (Doya) [hard].osu 29 | Given - Marutsuke (TV Size) (Blitzifyyy) [Hard].osu 30 | Given - Marutsuke (TV Size) (Blitzifyyy) [Wonder].osu 31 | halca - FIRST DROP (TV Size) (dkblaze) [Insane].osu 32 | halca - FIRST DROP (TV Size) (dkblaze) [Yuuma's Hard].osu 33 | halca - FIRST DROP (TV Size) (Dustinati) [Hard].osu 34 | halca - FIRST DROP (TV Size) (Dustinati) [Insane].osu 35 | halca - Houkago no Liberty (TV Size) (dkblaze) [Kowari's Hard].osu 36 | Hatano Wataru - Heart Signal (TV Size) (Lobelia) [Beren's Insane].osu 37 | Hatano Wataru - Heart Signal (TV Size) (Lobelia) [Rainbow's Hard].osu 38 | Hatano Wataru - Heart Signal (TV Size) (Lobelia) [Sandrew's Insane].osu 39 | himmel feat. YooSanHyakurei - Maple Wind (- Matha -) [abstracted].osu 40 | Hoshimachi Suisei - NEXT COLOR PLANET (Amateurre) [Meep's Hard].osu 41 | Hoshimachi Suisei - Tenkyuu, Suisei wa Yoru o Mataide (captin1) [meiikyuu's Hard].osu 42 | Inugami Korone - Koro Funk! (Petal) [Nana's hard].osu 43 | Inugami Korone - Koro Funk! (Petal) [PaRaDogi's Light Insane].osu 44 | Kano - Hikari no Michishirube (TV Size) (aidanbh123) [Insane].osu 45 | Kitamura Eri - Shirushi (TV Size) (Shocko) [Hard].osu 46 | KMNZ - VR - Virtual Reality (prod.by Snail's House) (Calvaria) [Insane].osu 47 | KSUKE - Contradiction (feat. Tyler Carter) (TV Size) (- ascended -) [HARD].osu 48 | KSUKE - Contradiction (feat. Tyler Carter) (TV Size) (- ascended -) [INSANE].osu 49 | KSUKE - Contradiction (feat. Tyler Carter) (TV Size) (AirinCat) [Hard].osu 50 | KSUKE - Contradiction (feat. Tyler Carter) (TV Size) (AirinCat) [Insane].osu 51 | Kudou Chitose - I beg you (Cut Ver.) (Yugu) [Hard].osu 52 | Kudou Chitose - I beg you (Cut ver.) (Yugu) [I miss you].osu 53 | Meramipop - Bad Apple!! (Hard) [Appleroval].osu 54 | Mili - sustain++; (TV Size) (SMOKELIND) [world.getRelationship(me, you).end();].osu 55 | Morinaka Kazaki - Hana no Youni (Yugu) [-Eriri-'s Insane].osu 56 | Morinaka Kazaki - Hana no Youni (Yugu) [Hard].osu 57 | Morohoshi Sumire - Masshiro (TV Size) (kunka) [Nely's Hard].osu 58 | Morohoshi Sumire - Masshiro (TV Size) (kunka) [Sekaidrea's Insane].osu 59 | MYTH & ROID - shadowgraph (TV Size) (Involute) [Hard].osu 60 | MYTH & ROID - shadowgraph (TV Size) (Involute) [Myth].osu 61 | nasu asaco - flora (Short Ver.) (Lunala) [Gust's Hyper].osu 62 | nasu asaco - flora (Short Ver.) (Lunala) [Paradise Seeker].osu 63 | Nekomata Master - Sayonara Heaven (deetz) [EX].osu 64 | Nekomata Master - Sayonara Heaven (deetz) [Hyper].osu 65 | Nekomata Okayu - flos (Akane Hime) [amare].osu 66 | Nekomata Okayu - Flos (TasTy-Anime) [MoguMogu].osu 67 | nonoc - Memento (TV Size) (fieryrage) [ayynna's another].osu 68 | nonoc - Memento (TV Size) (fieryrage) [flask's hard].osu 69 | nonoc - Memento (TV Size) (fieryrage) [hooni's insane].osu 70 | nonoc - Memento (TV Size) (Kuki1537) [Hard].osu 71 | nonoc - Memento (TV Size) (Kuki1537) [PikA's Insane].osu 72 | nonoc - Memento (TV Size) (Sylas) [Insane].osu 73 | onoken - Cristalisia (Mir) [Hard].osu 74 | onoken - Cristalisia (Mir) [Insane].osu 75 | Otonashi Diva - Zoetrope (eiri-) [Enerugi's Hard].osu 76 | Otonashi Diva - Zoetrope (eiri-) [Faito's Insane].osu 77 | plingmin - This world is yours (TV Size) (hypercyte) [Terminal].osu 78 | Project Grimoire - Excalibur ~Revived resolution~ (SnowNiNo_) [KittyAdventure's ADVANCED].osu 79 | RAMM ni Haiyoru Kuuko-san to Kuune-san - Sister, Friend, Lover (TV Size) (Zer0-G) [Hard].osu 80 | RAMM ni Haiyoru Kuuko-san to Kuune-san - Sister, Friend, Lover (TV Size) (Zer0-G) [Insane].osu 81 | Ray - Rakuen PROJECT (TV Size) (PaRaDogi) [Hinsvar's Hard].osu 82 | Ray - Rakuen PROJECT (TV Size) (PaRaDogi) [Insane].osu 83 | REDALiCE - Pekorap Tropical House Remix (Yuuma) [Hard].osu 84 | Rindou Mikoto - Happy Halloween (Yugu) [kanor's Hard].osu 85 | Rindou Mikoto - Happy Halloween (Yugu) [Trick or Treat].osu 86 | Sakai Mikio - Identity (TV Size) (Fixxis) [Kowari's Hard].osu 87 | Sanshuu Chuugaku Yuushabu - Aurora Days (TV Size) (lewski) [Sprout].osu 88 | Shiina Yuika - Shiina Yuika o Matteimasu. (Yugu) [ClariS' Hard].osu 89 | Shiina Yuika - Shiina Yuika o Matteimasu. (Yugu) [Strawberry Daifuku~].osu 90 | Shikata Akiko - Akatsuki (TV Size) (-Arche) [Dawn].osu 91 | Shikata Akiko - Akatsuki (TV Size) (-Arche) [Kalibe's Light Insane].osu 92 | Shikata Akiko - Akatsuki (TV Size) (-Arche) [Scub's Insane].osu 93 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Hyper].osu 94 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Lv9's Insane].osu 95 | Shimotsuki Haruka - Akahitoha (ImpurePug) [Scub's Hard].osu 96 | Shin Sakiura feat. AAAMYYY - NIGHT RUNNING (TV Size) (Peter) [Hard].osu 97 | Shin Sakiura feat. AAAMYYY - NIGHT RUNNING (TV Size) (Peter) [Night].osu 98 | Shin Sakiura feat. AAAMYYY - NIGHT RUNNING (TV Size) (Peter) [Pieseu's Light Insane].osu 99 | Shirakami Fubuki - Im. Scatman (Faust) [TUBIRUBIRU PARAPARA PULLLL].osu 100 | Sonar Pocket - GIRIGIRI (TV size) (Yudragen) [HARD].osu 101 | Sonar Pocket - GIRIGIRI (TV size) (Yudragen) [INSANE].osu 102 | Sota Fujimori - DANCE ALL NIGHT (- Yoshimaro -) [6006's Hard].osu 103 | Sota Fujimori - DANCE ALL NIGHT (- Yoshimaro -) [KawaMilr's Insane].osu 104 | Sota Fujimori - DANCE ALL NIGHT (- Yoshimaro -) [Morinaga's Insane].osu 105 | Sota Fujimori - Title BGM Loop ([_Crystal]) [Extreme].osu 106 | Sota Fujimori - Title BGM Loop ([_Crystal]) [Intensity's Extreme].osu 107 | Sota Fujimori - Title BGM Loop ([_Crystal]) [Oracle's Extreme].osu 108 | Sota Fujimori - Title BGM Loop ([_Crystal]) [wkyik's Advanced].osu 109 | Sound Souler - Paradise (kwk) [Hard].osu 110 | Sound Souler - Paradise (kwk) [Hobbes2's Insane].osu 111 | Sta - Platinum (FrenZ396) [Diamond].osu 112 | Sta - Platinum (FrenZ396) [Gold].osu 113 | Stray Kids - SLUMP -Japanese ver.- (TV Size) (Sylvarus) [Hard].osu 114 | Stray Kids - SLUMP -Japanese ver.- (TV Size) (Sylvarus) [Insane].osu 115 | Stray Kids - TOP -Japanese ver.- (TV Size) (AirinCat) [Hard].osu 116 | Stray Kids - TOP -Japanese ver.- (TV Size) (AirinCat) [Light Insane].osu 117 | Suzuki Masayuki - DADDY ! DADDY ! DO ! feat. Suzuki Airi (TV Size) (hypercyte) [Confession].osu 118 | Suzuki Masayuki - DADDY ! DADDY ! DO ! feat. Suzuki Airi (TV Size) (hypercyte) [Fixxis' Hard].osu 119 | Suzuki Masayuki - DADDY ! DADDY ! DO ! feat. Suzuki Airi (TV Size) (hypercyte) [Insane].osu 120 | Tamura Naomi - Yuzurenai Negai (TV Size) (bossandy) [leo16834's Hard].osu 121 | Tamura Naomi - Yuzurenai Negai (TV Size) (bossandy) [Magic Knight].osu 122 | THE DU - Crazy Noisy Bizarre Town (TV Size) (Cloudchaser) [Atsuro's Insane ~Heaven's Door~].osu 123 | THE DU - Crazy Noisy Bizarre Town (TV Size) (Cloudchaser) [Hard ~Star Platinum~].osu 124 | the peggies - Centimeter (TV Size) (-[Shady]-) [Diamond].osu 125 | the peggies - Centimeter (TV Size) (-[Shady]-) [FuJu's Hard].osu 126 | the peggies - Centimeter (TV Size) (Blitzifyyy) [Hard].osu 127 | the peggies - Centimeter (TV Size) (dkblaze) [Insane].osu 128 | the peggies - Centimeter (TV Size) (dkblaze) [Kowari's Hard].osu 129 | Todo Kohaku - Mela! (Hinsvar) [Darling].osu 130 | Todo Kohaku - Mela! (Hinsvar) [Hard].osu 131 | Todo Kohaku - Mela! (Hinsvar) [Insane].osu 132 | Tokoyami Towa - brilliant (Hinsvar) [Hard].osu 133 | Tokoyami Towa - -ERROR (Amateurre) [Mocaotic's Insane].osu 134 | Tokoyami Towa - -ERROR (Amateurre) [xidorn's Hard].osu 135 | Tsukishima Kirari (CV Kusumi Koharu) - Balalaika (TV Size) (Yorita Yoshino) [Regraz's Hard].osu 136 | Tsukishima Kirari (CV Kusumi Koharu) - Balalaika (TV Size) (Yorita Yoshino) [Yugu's Insane].osu 137 | Tsunomaki Watame - Bubble Love (Yorita Yoshino) [Collab Insane].osu 138 | Tsunomaki Watame - Bubble Love (Yorita Yoshino) [Hard].osu 139 | Tsunomaki Watame - Hololive Ierukana (Rikuima) [Insane].osu 140 | Usada Pekora, Sakura Miko, Houshou Marine - Shiawase Usagi PekoMikoMarine (Yuuma) [Happiness].osu 141 | yanaginagi - Zoetrope (TV Size) (NeKroMan4ik) [Hard].osu 142 | Yasuda Rei - Mirror (TV Size) (GIDZ) [Chaoz's Light Insane].osu 143 | Yasuda Rei - Mirror (TV Size) (GIDZ) [Insane].osu 144 | Yasuda Rei - through the dark (TV Size) (- ascended -) [ascended cycopton's hard].osu 145 | Yasuda Rei - through the dark (TV Size) (- ascended -) [light insane].osu 146 | Yasuda Rei - through the dark (TV Size) (- ascended -) [run, little light].osu 147 | Yorushika - Flower And Badger Game (MrPotato) [Remembrance].osu 148 | Yorushika - Flower and Badger Game (Ryuusei Aika) [Moonlight Sonata, Blooming Lavender and You].osu 149 | YuNi - Toumei Seisai (-Mikan) [gambatte's Insane].osu 150 | YuNi - Toumei Seisai (-Mikan) [Garden's Hard].osu 151 | Yunomi & nicamoq feat. Minato Aqua - Indoor Kei Nara Trackmaker (ArThasCD) [Hyper].osu 152 | Yunomi & nicamoq feat. Minato Aqua - Indoor Kei Nara Trackmaker (ArThasCD) [Insane].osu 153 | YURiKA - Nemureru Honnou (TV Size) (eiri-) [Insane].osu -------------------------------------------------------------------------------- /v7.0/models/lowbpm/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/lowbpm/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/mania_highkey/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/mania_highkey/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/mania_lowkey/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/mania_lowkey/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/mania_pattern/mania_pattern_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/mania_pattern/mania_pattern_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/normal/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/normal/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/normal/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/normal/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/sota/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/sota/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/sota/maps.txt: -------------------------------------------------------------------------------- 1 | 96 & Sota ft. Mayumi Morinaga - In The Breeze (fanzhen0019) [1].osu 2 | 96 & Sota ft. Mayumi Morinaga - In The Breeze (fanzhen0019) [Expert].osu 3 | BEMANI Sound Team Sota F. - Love You More (Ritzeh) [Challenge].osu 4 | BEMANI Sound Team Sota F. - Love You More (Ritzeh) [Extra].osu 5 | BEMANI Sound Team Sota Fujimori - OZONE (Ameth Rianno) [scubbrizkiancul ft. anoy, the information.].osu 6 | BEMANI Sound Team Sota Fujimori - OZONE (Feiri) [Cub's O3].osu 7 | BEMANI Sound Team Sota Fujimori - VOLTEXES IV (Flask) [HEAVENLY].osu 8 | Camellia vs Expander - AZISAI (Ongaku) [ORIENTAL].osu 9 | Camellia Vs. Expander - AZISAI (Jacob) [Another].osu 10 | Camellia Vs. Expander - AZISAI (Long ver.) (sionKotori) [ORIENTAL].osu 11 | dj MAX STEROID - Arabian Rave Night (Avena) [Exote's Extra].osu 12 | dj MAX STEROID - Arabian Rave Night (Avena) [Ultra].osu 13 | dj MAX STEROID - Arabian Rave Night (Macuilxochitl) [Another].osu 14 | DOUBLE IMPACT - DOUBLE IMPACT (7odoa) [EXTREME AR10].osu 15 | DOUBLE IMPACT - DOUBLE IMPACT (7odoa) [EXTREME].osu 16 | Expander - aura (Midge) [Beyond Extreme].osu 17 | Expander - aura (Midge) [Kotori's Extreme].osu 18 | Expander - diagram (Moecho) [LEGGENDARIA].osu 19 | Expander - Move That Body (fanzhen0019) [EXTREME].osu 20 | Expander - subtractive (anna apple) [12345].osu 21 | Expander - subtractive (anna apple) [Ayaya's 123456].osu 22 | kors k - Playing With Fire (Sota Fujimori Remix) (xlni) [Pyrotechnics].osu 23 | PyramiC - Cleopatrysm (Regou) [6th's Extra].osu 24 | PyramiC - Cleopatrysm (Regou) [Extra].osu 25 | PyramiC - Cleopatrysm (Regou) [Frey's Insane].osu 26 | Ryu vs. Sota - Go Beyond (Cherry Blossom) [Go Beyond !!!].osu 27 | Ryu vs. Sota - Go Beyond!! (Anto) [Expert!!].osu 28 | Ryu vs. Sota - Go Beyond!! (Anto) [Nuvolina's Expert!!].osu 29 | Ryu vs. Sota - Go Beyond!! (Anto) [Transcension!!].osu 30 | Ryu Vs. Sota - Go Beyond!! (Chordzi) [Extra].osu 31 | Ryu vs. Sota - Go Beyond!! (Extended RRVer.) (ZheVulture) [Euphoria].osu 32 | Ryu Vs. Sota - Go Beyond!! (Yonaka-) [Black Another].osu 33 | Ryu Vs. Sota - Go Beyond!! (Yonaka-) [ShiiTsuin's Another].osu 34 | Sota F. - New Decade (Chocoliti) [Insane].osu 35 | Sota Fujimori - ACCELERATE (ak74) [shenme jiba].osu 36 | Sota Fujimori - ACCELERATE (Ambrew) [Deceleration Extreme].osu 37 | Sota Fujimori - ACCELERATE (Bluekrait) [690 style].osu 38 | Sota Fujimori - ACCELERATE (Chaoslitz) [Ametrin's Extra (#9)].osu 39 | Sota Fujimori - ACCELERATE (Chaoslitz) [Extra (#1)].osu 40 | Sota Fujimori - ACCELERATE (Chaoslitz) [Koiyuki's Extra (#2)].osu 41 | Sota Fujimori - ACCELERATE (Chaoslitz) [Loli's Extra (#5)].osu 42 | Sota Fujimori - ACCELERATE (Chaoslitz) [nika mika's Extra (#6)].osu 43 | Sota Fujimori - ACCELERATE (Chaoslitz) [Skystar's Expert (#8)].osu 44 | Sota Fujimori - ACCELERATE (Chaoslitz) [yf's Extra (#3)].osu 45 | Sota Fujimori - ACCELERATE (J1NX1337) [Expert].osu 46 | Sota Fujimori - ACCELERATE (mindmaster107) [EXTRA].osu 47 | Sota Fujimori - ACCELERATE (mindmaster107) [-JORDAN-'S EXPERT].osu 48 | Sota Fujimori - ACCELERATE (mindmaster107) [WHIPLASH].osu 49 | Sota Fujimori - ACCELERATE (Otosaka-Yu) [Another].osu 50 | Sota Fujimori - ACCELERATE (Otosaka-Yu) [cub's Another].osu 51 | Sota Fujimori - ACCELERATE (Otosaka-Yu) [Necho's Ex].osu 52 | Sota Fujimori - ACCELERATE (Otosaka-Yu) [ZZH's Extra].osu 53 | Sota Fujimori - ACCELERATE (Quantum Rosta) [Expert].osu 54 | Sota Fujimori - ACCELERATE (SnowNiNo_) [Extra].osu 55 | Sota Fujimori - ANDROMEDA -SF_2011 Mix- (g0zz) [andromeda].osu 56 | Sota Fujimori - DANCE ALL NIGHT (- Yoshimaro -) [Delight].osu 57 | Sota Fujimori - DANCE ALL NIGHT (- Yoshimaro -) [fanzhen's Another].osu 58 | Sota Fujimori - DANCE ALL NIGHT (- Yoshimaro -) [-kevincela-'s Extra].osu 59 | Sota Fujimori - DANCE ALL NIGHT (- Yoshimaro -) [Pho's Extra].osu 60 | Sota Fujimori - DANCE ALL NIGHT (ninfia) [Another].osu 61 | Sota Fujimori - Entrapment -Extended Mix- (den0saur) [taku x den0saur x lcfc Bound].osu 62 | Sota Fujimori - Entrapment -Extended Mix- (Laurakko) [extra].osu 63 | Sota Fujimori - Entrapment -Extended Mix- (Laurakko) [taku x den0saur x lcfc Bound].osu 64 | Sota Fujimori - Give Me MORE!! (Flower) [Extreme].osu 65 | Sota Fujimori - Modular Technology (Feiri) [Crack].osu 66 | Sota Fujimori - Modular Technology (Feiri) [Extra].osu 67 | Sota Fujimori - Modular Technology (Sebu) [Vut].osu 68 | Sota Fujimori - Mother Ship (Enyoti) [Velocity].osu 69 | Sota Fujimori - Mother Ship (Flower) [EXTREME].osu 70 | Sota Fujimori - Move That Body (vita2) [HARDTECH].osu 71 | Sota Fujimori - Move That Body -Extended Mix- (Amamiya Yuko) [Extreme].osu 72 | Sota Fujimori - Move That Body -Extended Mix- (Amamiya Yuko) [RLC's Extra].osu 73 | Sota Fujimori - Move That Body -Extended Mix- (Amamiya Yuko) [RLC's Insane].osu 74 | Sota Fujimori - Move That Body -Extended Mix- (Spring Roll) [Normal].osu 75 | Sota Fujimori - New Century -Extended Mix- (Bluekrait) [690 style].osu 76 | Sota Fujimori - OZONE (Feiri) [Ascension].osu 77 | Sota Fujimori - OZONE (Feiri) [Extra].osu 78 | Sota Fujimori - OZONE (Feiri) [Master].osu 79 | Sota Fujimori - polygon (_RyuK) [Zoooom].osu 80 | Sota Fujimori - polygon (6th) [6th].osu 81 | Sota Fujimori - polygon (Ahntaea) [yes].osu 82 | Sota Fujimori - polygon (Extended) (Naidaaka) [owo].osu 83 | Sota Fujimori - polygon (gtfo) [Convex Polygon].osu 84 | Sota Fujimori - polygon (jesneit) [darovinci's fractal].osu 85 | Sota Fujimori - polygon (jesneit) [Polygon's overped].osu 86 | Sota Fujimori - polygon (Kaifin) [Bonzi's Ultra].osu 87 | Sota Fujimori - polygon (Kaifin) [Expert].osu 88 | Sota Fujimori - polygon (Kaifin) [Extra].osu 89 | Sota Fujimori - polygon (Kaifin) [fanzhen's Another].osu 90 | Sota Fujimori - polygon (Kaifin) [Incrementally Hi-Speed Extreme].osu 91 | Sota Fujimori - polygon (Kaifin) [Lolirii's Collab Expert].osu 92 | Sota Fujimori - polygon (Kaifin) [unhinged].osu 93 | Sota Fujimori - polygon (Sebu) [-GN's pentagon].osu 94 | Sota Fujimori - polygon (Sebu) [heptagon].osu 95 | Sota Fujimori - polygon (Sebu) [-PC's hexagon].osu 96 | Sota Fujimori - polygon (Syameimaru-Aya) [Extreme].osu 97 | Sota Fujimori - subtractive -Extended Mix- (Midge) [~eargas.m_].osu 98 | Sota Fujimori - Transport (Thexes) [TECH-TRANCE].osu 99 | Sota Fujimori - Transport -Extended Mix- (HDJump) [SpecialDelivery].osu 100 | Sota Fujimori - VOLTEXES III (Extended Mix) (C00L) [Nemesis].osu 101 | Sota Fujimori - VOLTEXES III (Jacob) [GRAVITY].osu 102 | Sota Fujimori - VOLTEXES III (Nerova Riuz GX) [GRAVITY].osu 103 | Sota Fujimori - VOLTEXES III (Scarlett) [Draramar].osu 104 | Sota Fujimori - VOLTEXES III (Scarlett) [scr style ar9.3].osu 105 | Sota Fujimori - VOLTEXES III (Scarlett) [scr style].osu 106 | Sota Fujimori - VOLTEXES III (Yuzuki Tokine) [Noriko's boom].osu 107 | Sota Fujimori - VOLTEXES III (Yuzuki Tokine) [Splect's EXTRAME].osu 108 | Sota Fujimori - VOLTEXES III (Yuzuki Tokine) [Yuzuki's INFINITE].osu 109 | Sota Fujimori - WOBBLE IMPACT (ksg) [Black Another].osu 110 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [BURST].osu 111 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [Celine's Extra].osu 112 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [Drop's Wobble Extra].osu 113 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [faygo's Another].osu 114 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [Fir's Extra].osu 115 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [na's lowermost fulfillment extra].osu 116 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [-PC's Extra].osu 117 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [Sebu Fujimori].osu 118 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [taku's Extra].osu 119 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [Yokes' Expert].osu 120 | Sota Fujimori - WOBBLE IMPACT (Lulu-) [Zetera's Super Hard].osu 121 | Sota Fujimori - WOBBLE IMPACT (Pahare) [Extreme].osu 122 | Sota Fujimori - WOBBLE IMPACT (taku) [Pro].osu 123 | Sota Fujimori - WOBBLE IMPACT (taku) [REMAP].osu 124 | Sota Fujimori - WOBBLE IMPACT -Extended Mix- (handsome) [EXPAND].osu 125 | Sota Fujimori - WOBBLE IMPACT -Extended Mix- (Midge) [M5].osu 126 | Sota Fujimori 2nd Season - GLITTER (Extended Mix) (buhei) [Extreme].osu -------------------------------------------------------------------------------- /v7.0/models/sota/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/sota/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/taiko/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/taiko/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/taiko/hs_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/taiko/hs_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/taiko/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/taiko/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/tvsize/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/tvsize/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/tvsize/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/tvsize/rhythm_model -------------------------------------------------------------------------------- /v7.0/models/vtuber/flow_dataset.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/vtuber/flow_dataset.npz -------------------------------------------------------------------------------- /v7.0/models/vtuber/rhythm_model: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kotritrona/osumapper/db1eeabccef4adf822551580731b9ec3d4caec68/v7.0/models/vtuber/rhythm_model -------------------------------------------------------------------------------- /v7.0/os_tools.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # OS related library functions 5 | # 6 | 7 | import re, os, subprocess, json; 8 | 9 | def run_command(str_array): 10 | x = subprocess.Popen(str_array, stdout=subprocess.PIPE, stderr=subprocess.PIPE); 11 | err = x.stderr.read() 12 | if len(err) > 1: 13 | print(err.decode("utf8")) 14 | return x.stdout.read(); 15 | 16 | def fix_path(): 17 | path = os.path.dirname(__file__) 18 | if len(path) > 1: 19 | os.chdir(path) 20 | 21 | def test_node_modules(): 22 | has_node_modules = os.path.isdir("node_modules/") 23 | if not has_node_modules: 24 | print("node_modules not found! please run `npm install` first.") 25 | assert has_node_modules -------------------------------------------------------------------------------- /v7.0/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "osumapper", 3 | "version": "7.0.0", 4 | "description": "An automatic beatmap generator using Tensorflow / Deep Learning.", 5 | "main": "load_map.js", 6 | "dependencies": { 7 | "express": "^4.17.1", 8 | "opn": "^6.0.0", 9 | "osu-buffer": "^1.3.5", 10 | "polynomial": "^1.4.3", 11 | "regedit": "^3.0.3" 12 | }, 13 | "devDependencies": {}, 14 | "scripts": { 15 | "test": "echo \"Error: no test specified\" && exit 1" 16 | }, 17 | "keywords": [], 18 | "repository": { 19 | "type": "git", 20 | "url": "git+https://github.com/kotritrona/osumapper.git" 21 | }, 22 | "author": "kotritrona", 23 | "license": "Apache-2.0" 24 | } 25 | -------------------------------------------------------------------------------- /v7.0/package_colab.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "osumapper", 3 | "version": "7.0.0", 4 | "description": "An automatic beatmap generator using Tensorflow / Deep Learning.", 5 | "main": "load_map.js", 6 | "dependencies": { 7 | "polynomial": "^1.4.3" 8 | }, 9 | "devDependencies": {}, 10 | "scripts": { 11 | "test": "echo \"Error: no test specified\" && exit 1" 12 | }, 13 | "keywords": [], 14 | "repository": { 15 | "type": "git", 16 | "url": "git+https://github.com/kotritrona/osumapper.git" 17 | }, 18 | "author": "kotritrona", 19 | "license": "Apache-2.0" 20 | } 21 | -------------------------------------------------------------------------------- /v7.0/plot_tools.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 7 Plot helpers 5 | # 6 | 7 | import matplotlib.pyplot as plt 8 | import matplotlib.lines as lines 9 | import matplotlib.transforms as mtransforms 10 | import matplotlib.text as mtext 11 | 12 | 13 | class MyLine(lines.Line2D): 14 | def __init__(self, *args, **kwargs): 15 | # we'll update the position when the line data is set 16 | self.text = mtext.Text(0, 0, '') 17 | lines.Line2D.__init__(self, *args, **kwargs) 18 | 19 | # we can't access the label attr until *after* the line is 20 | # inited 21 | self.text.set_text(self.get_label()) 22 | 23 | def set_figure(self, figure): 24 | self.text.set_figure(figure) 25 | lines.Line2D.set_figure(self, figure) 26 | 27 | def set_axes(self, axes): 28 | self.text.set_axes(axes) 29 | lines.Line2D.set_axes(self, axes) 30 | 31 | def set_transform(self, transform): 32 | # 2 pixel offset 33 | texttrans = transform + mtransforms.Affine2D().translate(2, 2) 34 | self.text.set_transform(texttrans) 35 | lines.Line2D.set_transform(self, transform) 36 | 37 | def set_data(self, x, y): 38 | if len(x): 39 | self.text.set_position((x[-1], y[-1])) 40 | 41 | lines.Line2D.set_data(self, x, y) 42 | 43 | def draw(self, renderer): 44 | # draw my label at the end of the line with 2 pixel offset 45 | lines.Line2D.draw(self, renderer) 46 | self.text.draw(renderer) 47 | 48 | def plot_history(history): 49 | plt.figure() 50 | plt.xlabel('Epoch') 51 | plt.ylabel('Mean Abs Error [Limitless]') 52 | plt.plot(history.epoch, np.array(history.history['loss']), 53 | label='Train Loss') 54 | plt.plot(history.epoch, np.array(history.history['val_loss']), 55 | label = 'Val loss') 56 | plt.legend() 57 | plt.show() -------------------------------------------------------------------------------- /v7.0/requirements.txt: -------------------------------------------------------------------------------- 1 | librosa==0.8.0 2 | numpy==1.18.5 3 | matplotlib==3.2.2 4 | tensorflow>=2.3.1 5 | scikit_learn==0.23.2 6 | -------------------------------------------------------------------------------- /v7.0/requirements_colab.txt: -------------------------------------------------------------------------------- 1 | librosa==0.8.0 2 | matplotlib==3.2.2 3 | scikit_learn==0.23.2 4 | -------------------------------------------------------------------------------- /v7.0/rhythm_loader.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Part 7 function base 5 | # 6 | 7 | import numpy as np 8 | 9 | def read_map_predictions(fn): 10 | with np.load(fn) as data: 11 | objs = data["objs"]; 12 | predictions = data["predictions"]; 13 | ticks = data["ticks"]; 14 | timestamps = data["timestamps"]; 15 | is_slider = data["is_slider"]; 16 | is_spinner = data["is_spinner"]; 17 | is_note_end = data["is_note_end"]; 18 | sv = data["sv"]; 19 | slider_ticks = data["slider_ticks"]; 20 | dist_multiplier = data["dist_multiplier"]; 21 | return objs, predictions, ticks, timestamps, is_slider, is_spinner, is_note_end, sv, slider_ticks, dist_multiplier; -------------------------------------------------------------------------------- /v7.0/slider_tools.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Slider helpers 5 | # 6 | 7 | import numpy as np 8 | 9 | def slider_mirror(obj_array, data, mode=0): 10 | """ 11 | Mirror sliders if the slider end is out of bounds. 12 | """ 13 | if mode == 0: 14 | return obj_array, data; 15 | 16 | _, _, _, _, is_slider, _, _, _, _, _, _, _ = data; 17 | 18 | min_x = 0 19 | min_y = 0 20 | max_x = 512 21 | max_y = 384 22 | 23 | for i,obj in enumerate(obj_array): 24 | if is_slider[i]: 25 | if obj[4] < min_x or obj[4] > max_x: 26 | obj[4] = obj[0] + (obj[0] - obj[4]) 27 | obj[2] = -obj[2] 28 | if obj[5] < min_y or obj[5] > max_y: 29 | obj[5] = obj[1] + (obj[1] - obj[5]) 30 | obj[3] = -obj[3] 31 | 32 | return obj_array, data; 33 | -------------------------------------------------------------------------------- /v7.0/stream_tools.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Stream helpers 5 | # 6 | 7 | import numpy as np 8 | 9 | def point_distance(p1, p2): 10 | x1, y1 = p1 11 | x2, y2 = p2 12 | return np.sqrt((x1-x2)**2 + (y1-y2)**2) 13 | 14 | def find_center(p1, p2, r): 15 | x1, y1 = p1 16 | x2, y2 = p2 17 | x3 = (x1 + x2)/2 18 | y3 = (y1 + y2)/2 19 | L = np.sqrt((x1-x2)**2 + (y1-y2)**2) 20 | D = np.sqrt(r**2-(L/2)**2) 21 | x4 = x3 + D*(y1-y2)/L 22 | y4 = y3 + D*(x2-x1)/L 23 | x5 = x3 - D*(y1-y2)/L 24 | y5 = y3 - D*(x2-x1)/L 25 | return (x4, y4), (x5, y5) 26 | 27 | def find_angle(p1, p2): 28 | x1, y1 = p1 29 | x2, y2 = p2 30 | return np.arctan2(y2 - y1, x2 - x1) 31 | 32 | def get_arc_from_points_and_radius(p1, p2, r, direction=0): 33 | if direction == 0: 34 | c, _ = find_center(p1, p2, r) 35 | else: 36 | _, c = find_center(p1, p2, r) 37 | a1 = find_angle(c, p1) 38 | a2 = find_angle(c, p2) 39 | return c, a1, a2; 40 | 41 | def get_point_from_arc(c, r, a): 42 | x, y = c; 43 | return (x + np.cos(a) * r, y + np.sin(a) * r); 44 | 45 | def arc_interpolate(p1, p2, k, r=None, direction=None): 46 | if r is None: 47 | r = point_distance(p1, p2)/2 / np.random.random() 48 | elif r == -1: 49 | r = point_distance(p1, p2)/2 / np.random.random() / np.random.random() 50 | if direction is None: 51 | direction = np.random.randint(0,2) 52 | c, a1, a2 = get_arc_from_points_and_radius(p1, p2, r, direction) 53 | if a1-a2 > np.pi: 54 | a2 += np.pi * 2 55 | if a1-a2 < -np.pi: 56 | a2 -= np.pi * 2 57 | return get_point_from_arc(c, r, a1 * (1-k) + a2 * k) 58 | 59 | def stream_regularizer(obj_array, data, mode=0): 60 | divisor = 4; 61 | 62 | if mode == 0: 63 | return obj_array, data; 64 | 65 | _, _, ticks, timestamps, is_slider, _, _, _, _, _, _, _ = data; 66 | 67 | starting_obj = -1; 68 | prev_tick = -32768; 69 | stream_count = 0; 70 | for i,obj in enumerate(obj_array): 71 | if ticks[i] - prev_tick == 1: 72 | stream_count += 1; 73 | else: 74 | if (mode == 3 or mode == 4) and stream_count >= 3: 75 | ending_obj = i-1; 76 | p1 = (obj_array[starting_obj][0], obj_array[starting_obj][1]); 77 | p2 = (obj_array[ending_obj][0], obj_array[ending_obj][1]); 78 | r = point_distance(p1, p2)/2 / np.random.random() / np.random.random(); 79 | direction = np.random.randint(0,2); 80 | for k in range(stream_count - 2): 81 | current_obj = starting_obj + k + 1; 82 | pk = arc_interpolate(p1, p2, (k+1) / (stream_count-1), r, direction); 83 | obj_array[current_obj][0], obj_array[current_obj][1] = pk; 84 | 85 | starting_obj = i; 86 | stream_count = 1; 87 | 88 | if mode == 4 and stream_count >= 5 and (ticks[i] % divisor == 0): 89 | if np.random.random() < 0.5: 90 | ending_obj = i; 91 | p1 = (obj_array[starting_obj][0], obj_array[starting_obj][1]); 92 | p2 = (obj_array[ending_obj][0], obj_array[ending_obj][1]); 93 | r = point_distance(p1, p2)/2 / np.random.random(); 94 | direction = np.random.randint(0,2); 95 | for k in range(stream_count - 2): 96 | current_obj = starting_obj + k + 1; 97 | pk = arc_interpolate(p1, p2, (k+1) / (stream_count-1), r, direction); 98 | obj_array[current_obj][0], obj_array[current_obj][1] = pk; 99 | 100 | starting_obj = i; 101 | stream_count = 1; 102 | 103 | if mode == 1 and stream_count == 3: 104 | if (ticks[starting_obj] % divisor == 0) or (ticks[starting_obj] % divisor == 2): 105 | # Move middle object to arc interpolated point 106 | p1 = (obj_array[starting_obj][0], obj_array[starting_obj][1]); 107 | p2 = (obj_array[i][0], obj_array[i][1]); 108 | p3 = arc_interpolate(p1, p2, 0.5); 109 | obj_array[starting_obj+1][0], obj_array[starting_obj+1][1] = p3; 110 | 111 | # Reset counter 112 | starting_obj = i; 113 | stream_count = 1; 114 | else: 115 | # Skip turn 116 | starting_obj += 1; 117 | stream_count -= 1; 118 | 119 | if mode == 2 and stream_count == 4: 120 | if ticks[starting_obj] % divisor == 0: 121 | # Move 2 middle objects to arc interpolated point 122 | p1 = (obj_array[starting_obj][0], obj_array[starting_obj][1]); 123 | p2 = (obj_array[i][0], obj_array[i][1]); 124 | r = point_distance(p1, p2)/2 / np.random.random() / np.random.random(); 125 | direction = np.random.randint(0,2); 126 | p3 = arc_interpolate(p1, p2, 0.3333333, r, direction); 127 | p4 = arc_interpolate(p1, p2, 0.6666667, r, direction); 128 | obj_array[starting_obj+1][0], obj_array[starting_obj+1][1] = p3; 129 | obj_array[starting_obj+2][0], obj_array[starting_obj+2][1] = p4; 130 | 131 | # Force restart counter on next 132 | starting_obj = i+1; 133 | stream_count = 0; 134 | prev_tick = -32768; 135 | continue; 136 | else: 137 | # Skip turn 138 | starting_obj += 1; 139 | stream_count -= 1; 140 | 141 | prev_tick = ticks[i]; 142 | 143 | return obj_array, data; 144 | -------------------------------------------------------------------------------- /v7.0/timing.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # 4 | # Timing 5 | # 6 | 7 | import numpy as np 8 | from os_tools import run_command 9 | import re 10 | 11 | def get_timing(music_path): 12 | """ 13 | Obtain timing by running TimingAnlyz.exe 14 | """ 15 | result = run_command(["TimingAnlyz.exe", music_path, "0"]).decode("utf-8") 16 | bpm = float(re.findall("BPM:\W*([0-9\.]+)", result)[0]) 17 | ofs = float(re.findall("Offset:\W*([0-9\.]+)", result)[0]) 18 | if np.abs(bpm - np.round(bpm)) < 0.05: 19 | result = run_command(["TimingAnlyz.exe", music_path, str(np.round(bpm))]).decode("utf-8") 20 | bpm = float(re.findall("BPM:\W*([0-9\.]+)", result)[0]) 21 | ofs = float(re.findall("Offset:\W*([0-9\.]+)", result)[0]) 22 | return bpm, ofs --------------------------------------------------------------------------------