├── .gitignore ├── RECOGNITION_CATS_and_DOGS_1.ipynb ├── archive1 ├── main └── main.zip └── ensembledmodel.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /archive1/main: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /archive1/main.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diglukhikh/neural-network-for-classifier/405c48b37c9a0cd28f14a7943a98db1403bab40f/archive1/main.zip -------------------------------------------------------------------------------- /ensembledmodel.py: -------------------------------------------------------------------------------- 1 | #скачать обучающий и валидационный файлы по ссылкам 2 | #https://drive.google.com/file/d/10_ScZ-zbKTTDlOVSNCVKembLZX-CKnDp/view?usp=sharing 3 | #https://drive.google.com/file/d/1F4lqUJ5UKQKMJhUbx0ICKwrEljjt975_/view?usp=sharing 4 | 5 | from keras.models import Sequential, Model 6 | from keras.layers import Dense, Dropout, Input, concatenate 7 | 8 | import numpy 9 | 10 | 11 | train = numpy.loadtxt("newtrain.csv", delimiter=",") 12 | s = train.shape[1] - 1 13 | print(s) 14 | cl = int(numpy.amax(train[:, s]) + 1) 15 | print(f'Количество классов: {cl}') 16 | 17 | X1 = train[:, 0:s] 18 | 19 | Y = train[:, s] 20 | print(Y) 21 | 22 | model_in1 = Input(shape=(s,)) 23 | model_out11 = Dense(s*30, input_dim=s, activation="relu", name="layer1")(model_in1) 24 | model_out111 = Dense(s*30, activation="swish", name="layer111")(model_out11) 25 | model_out1 = Dense(s*15, activation="relu", name="layer11")(model_out111) 26 | model1 = Model(model_in1,model_out1) 27 | 28 | model_in2 = Input(shape=(s,)) 29 | model_out22 = Dense(s*15, input_dim=s, activation="relu", name="layer2")(model_in2) 30 | model_out2 = Dense(s*8, activation="swish", name="layer22")(model_out22) 31 | model2 = Model(model_in2,model_out2) 32 | 33 | model_in3 = Input(shape=(s,)) 34 | model_out3 = Dense(s*8, input_dim=s, activation="swish", name="layer3")(model_in3) 35 | model3 = Model(model_in3,model_out3) 36 | 37 | model_in4 = Input(shape=(s,)) 38 | model_out4 = Dense(s*4, input_dim=s, activation="swish", name="layer4")(model_in4) 39 | model4 = Model(model_in4,model_out4) 40 | 41 | concatenated = concatenate([model_out1,model_out2,model_out3,model_out4]) 42 | out = Dense(cl, activation="softmax", name="outputlayer")(concatenated) 43 | 44 | merged_model = Model([model_in1,model_in2, model_in3,model_in4], out) 45 | merged_model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=["accuracy"]) 46 | merged_model.fit([X1,X1,X1,X1], Y, batch_size=5, epochs=10, verbose=1, validation_split=0.15) 47 | merged_model.save(f"adress") 48 | --------------------------------------------------------------------------------