├── src ├── NLPann.h ├── actvfunc.h ├── rnn.h ├── actvfunc.cpp ├── lstm.h ├── gru.h ├── bp.h ├── seq2vec.h ├── seq2seq.h ├── nlpmain.cpp ├── rnn.cpp ├── char2vec.h ├── gru.cpp ├── bp.cpp ├── lstm.cpp └── mainassist.h ├── .gitignore ├── README.md ├── LICENSE └── makefile /src/NLPann.h: -------------------------------------------------------------------------------- 1 | /*NLPann.h header file by ValK*/ 2 | /*2019/3/29 version0.1*/ 3 | #ifndef __NLPANN_H__ 4 | #define __NLPANN_H__ 5 | 6 | #include "bp.h" 7 | #include "rnn.h" 8 | #include "lstm.h" 9 | #include "gru.h" 10 | #include "actvfunc.h" 11 | #include "seq2seq.h" 12 | #include "seq2vec.h" 13 | #include "char2vec.h" 14 | 15 | #endif 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | *.d 3 | 4 | # Compiled Object files 5 | *.slo 6 | *.lo 7 | *.o 8 | *.obj 9 | 10 | # Precompiled Headers 11 | *.gch 12 | *.pch 13 | 14 | # Compiled Dynamic libraries 15 | *.so 16 | *.dylib 17 | *.dll 18 | 19 | # Fortran module files 20 | *.mod 21 | *.smod 22 | 23 | # Compiled Static libraries 24 | *.lai 25 | *.la 26 | *.a 27 | *.lib 28 | 29 | # Executables 30 | *.exe 31 | *.out 32 | *.app 33 | 34 | .vscode 35 | easynlp 36 | ObjData.dat -------------------------------------------------------------------------------- /src/actvfunc.h: -------------------------------------------------------------------------------- 1 | /*actvfunc.h header file made by ValK*/ 2 | /*2019/5/7 version 1.0*/ 3 | #ifndef __ACTIVATEFUNCTION_H__ 4 | #define __ACTIVATEFUNCTION_H__ 5 | 6 | #include 7 | 8 | double sigmoid(double); 9 | double diffsigmoid(double); 10 | double tanh(double); 11 | double difftanh(double); 12 | double relu(double); 13 | double diffrelu(double); 14 | double leakyrelu(double); 15 | double diffleakyrelu(double); 16 | double elu(double); 17 | double diffelu(double); 18 | double clipgrad(double); 19 | 20 | #endif 21 | -------------------------------------------------------------------------------- /src/rnn.h: -------------------------------------------------------------------------------- 1 | /*rnn.h header file made by ValK*/ 2 | /*2019/3/24 version 0.1*/ 3 | #ifndef __RNN_H__ 4 | #define __RNN_H__ 5 | 6 | #include 7 | #include 8 | 9 | struct rnn_neuron 10 | { 11 | double *in,*out,bia,*wi,*wh,*diff; 12 | double transdiff,*transwi,*transwh,transbia; 13 | }; 14 | 15 | class NormalRNN 16 | { 17 | protected: 18 | int INUM; 19 | int HNUM; 20 | int MAXTIME; 21 | public: 22 | rnn_neuron *hide; 23 | NormalRNN(int,int,int); 24 | ~NormalRNN(); 25 | void Init(); 26 | void Datain(const std::string&); 27 | void Dataout(const std::string&); 28 | }; 29 | class DeepRNN 30 | { 31 | protected: 32 | int INUM; 33 | int HNUM; 34 | int DEPTH; 35 | int MAXTIME; 36 | public: 37 | rnn_neuron *hlink; 38 | rnn_neuron **hide; 39 | DeepRNN(int,int,int,int); 40 | ~DeepRNN(); 41 | void Init(); 42 | void Datain(const std::string&); 43 | void Dataout(const std::string&); 44 | }; 45 | 46 | #endif 47 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # easyNLP 2 | 3 | Do NLP without coding! (in fact it is the library of common models) 4 | 5 | In this program i will use dynamic arrays to make neural networks which can do NLP tasks. (refactoring...) 6 | 7 | ## Build 8 | 9 | This project does not rely on any other third-party libraries. 10 | 11 | Make sure you have `make`and `gcc` or `clang` in your environment. Then use this command: 12 | 13 | ```bash 14 | make easynlp 15 | ``` 16 | 17 | Also can use `-j` to use multi-task of make. 18 | 19 | ## Future Work 20 | 21 | This repo is too old and i'm trying to make it running correctly. (and only running this program takes lots of time...) 22 | 23 | `nlpmain.cpp` and `mainassist.h` are useless. If you really want to use this project, only do this: 24 | 25 | ```C++ 26 | #include "src/NLPann.h" 27 | ``` 28 | 29 | Add the `NLPann.h` as the header, then you could use all models in it. 30 | 31 | The documention will be added later, but before this i'll make this project more like a `C++` project, not `C` project... 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 ValKmjolnir 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/actvfunc.cpp: -------------------------------------------------------------------------------- 1 | 2 | #include "actvfunc.h" 3 | #include 4 | 5 | double sigmoid(double x){ 6 | return 1.0/(1.0+std::exp(-x)); 7 | } 8 | double diffsigmoid(double x){ 9 | x=1.0/(1.0+std::exp(-x)); 10 | return x*(1-x); 11 | } 12 | double tanh(double x){ 13 | double t1=std::exp(x); 14 | double t2=1.0/t1; 15 | return (t1-t2)/(t1+t2); 16 | } 17 | double difftanh(double x){ 18 | x=tanh(x); 19 | return 1-x*x; 20 | } 21 | double relu(double x){ 22 | return x>0? x:0; 23 | } 24 | double diffrelu(double x){ 25 | return x>0? 1:0; 26 | } 27 | double leakyrelu(double x){ 28 | return x>0? x:0.01*x; 29 | } 30 | double diffleakyrelu(double x){ 31 | return x>0? 1:0.01; 32 | } 33 | double elu(double x){ 34 | return x>0? (1+x):std::exp(x); 35 | } 36 | double diffelu(double x){ 37 | return x>0? 1:std::exp(x); 38 | } 39 | double clipgrad(double x){ 40 | double upper_threshold=0.01; 41 | double lower_threshold=0.000001; 42 | double sign=x>0? 1:-1; 43 | x*=sign; 44 | if(x>upper_threshold){ 45 | return sign*upper_threshold; 46 | }else if(x 8 | #include 9 | 10 | struct lstm_neuron 11 | { 12 | double *cell; 13 | double *out; 14 | double *fog_in,*fog_out,fog_bia,*fog_wi,*fog_wh,*fog_diff; 15 | double *sig_in,*sig_out,sig_bia,*sig_wi,*sig_wh,*sig_diff; 16 | double *tan_in,*tan_out,tan_bia,*tan_wi,*tan_wh,*tan_diff; 17 | double *out_in,*out_out,out_bia,*out_wi,*out_wh,*out_diff; 18 | double fog_transbia,*fog_transwi,*fog_transwh; 19 | double sig_transbia,*sig_transwi,*sig_transwh; 20 | double tan_transbia,*tan_transwi,*tan_transwh; 21 | double out_transbia,*out_transwi,*out_transwh; 22 | }; 23 | 24 | class NormalLSTM 25 | { 26 | protected: 27 | int INUM; 28 | int HNUM; 29 | int MAXTIME; 30 | public: 31 | lstm_neuron *hide; 32 | NormalLSTM(int,int,int); 33 | ~NormalLSTM(); 34 | void Init(); 35 | void Datain(const std::string&); 36 | void Dataout(const std::string&); 37 | }; 38 | 39 | class DeepLSTM 40 | { 41 | protected: 42 | int INUM; 43 | int HNUM; 44 | int DEPTH; 45 | int MAXTIME; 46 | public: 47 | lstm_neuron *hlink; 48 | lstm_neuron **hide; 49 | DeepLSTM(int,int,int,int); 50 | ~DeepLSTM(); 51 | void ConstructorAssist(); 52 | void DestructorAssist(); 53 | void Init(); 54 | void Datain(const std::string&); 55 | void Dataout(const std::string&); 56 | }; 57 | 58 | #endif 59 | -------------------------------------------------------------------------------- /src/gru.h: -------------------------------------------------------------------------------- 1 | /*gru.h header file made by ValK*/ 2 | /*2019/5/7 version 1.1*/ 3 | #ifndef __GRU_H__ 4 | #define __GRU_H__ 5 | 6 | #include "rnn.h" 7 | #include 8 | #include 9 | 10 | struct gru_neuron 11 | { 12 | double *out; 13 | double *sig_update_in, *sig_update_out, *sig_update_wi, *sig_update_wh ; 14 | double *sig_replace_in,*sig_replace_out,*sig_replace_wi,*sig_replace_wh; 15 | double *tan_replace_in,*tan_replace_out,*tan_replace_wi,*tan_replace_wh; 16 | double sig_update_bia, sig_update_transbia ; 17 | double sig_replace_bia,sig_replace_transbia; 18 | double tan_replace_bia,tan_replace_transbia; 19 | double *sig_update_diff, *sig_update_transwi, *sig_update_transwh ; 20 | double *sig_replace_diff,*sig_replace_transwi,*sig_replace_transwh; 21 | double *tan_replace_diff,*tan_replace_transwi,*tan_replace_transwh; 22 | }; 23 | 24 | class NormalGRU 25 | { 26 | protected: 27 | int INUM; 28 | int HNUM; 29 | int MAXTIME; 30 | public: 31 | gru_neuron *hide; 32 | NormalGRU(int,int,int); 33 | ~NormalGRU(); 34 | void Init(); 35 | void Datain(const std::string&); 36 | void Dataout(const std::string&); 37 | }; 38 | 39 | class DeepGRU 40 | { 41 | protected: 42 | int INUM; 43 | int HNUM; 44 | int DEPTH; 45 | int MAXTIME; 46 | public: 47 | gru_neuron *hlink; 48 | gru_neuron **hide; 49 | DeepGRU(int,int,int,int); 50 | ~DeepGRU(); 51 | void ConstructorAssist(); 52 | void DestructorAssist(); 53 | void Init(); 54 | void Datain(const std::string&); 55 | void Dataout(const std::string&); 56 | }; 57 | 58 | #endif 59 | -------------------------------------------------------------------------------- /src/bp.h: -------------------------------------------------------------------------------- 1 | /*bp.h header file made by ValK*/ 2 | /*2019/5/5 version 1.0*/ 3 | #ifndef __BP_H__ 4 | #define __BP_H__ 5 | 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | struct neuron 12 | { 13 | double in,out,bia,diff; 14 | double *w; 15 | }; 16 | 17 | class NormalBP 18 | { 19 | private: 20 | int INUM; 21 | int HNUM; 22 | int ONUM; 23 | int batch_size; 24 | neuron *hide; 25 | neuron *output; 26 | double *input; 27 | double *expect; 28 | double lr; 29 | double error; 30 | std::string func_name; 31 | double ActivateFunction(double); 32 | double DiffFunction(double); 33 | public: 34 | NormalBP(int,int,int); 35 | ~NormalBP(); 36 | void Init(); 37 | void Calc(); 38 | void ErrorCalc(); 39 | double GetError(); 40 | void Training(); 41 | void Datain(const std::string&); 42 | void Dataout(const std::string&); 43 | void SetFunction(const std::string&); 44 | void SetLearningrate(double); 45 | void TotalWork(const std::string&,const std::string&,const std::string&); 46 | }; 47 | class DeepBP 48 | { 49 | private: 50 | int INUM; 51 | int HNUM; 52 | int ONUM; 53 | int DEPTH; 54 | int batch_size; 55 | neuron *hlink; 56 | neuron **hide; 57 | neuron *output; 58 | double *input; 59 | double *expect; 60 | double lr; 61 | double error; 62 | std::string func_name; 63 | double ActivateFunction(double); 64 | double DiffFunction(double); 65 | public: 66 | DeepBP(int,int,int,int); 67 | ~DeepBP(); 68 | void Init(); 69 | void Calc(); 70 | void ErrorCalc(); 71 | double GetError(); 72 | void Training(); 73 | void Datain(const std::string&); 74 | void Dataout(const std::string&); 75 | void SetFunction(const std::string&); 76 | void SetLearningrate(double); 77 | void TotalWork(const std::string&,const std::string&,const std::string&); 78 | }; 79 | 80 | #endif 81 | -------------------------------------------------------------------------------- /src/seq2vec.h: -------------------------------------------------------------------------------- 1 | /*seq2vec.h header file by ValK*/ 2 | /*2019/5/7 version1.4*/ 3 | #ifndef __SEQ2VEC_H__ 4 | #define __SEQ2VEC_H__ 5 | 6 | #include 7 | #include 8 | #include 9 | #include "bp.h" 10 | #include "rnn.h" 11 | #include "lstm.h" 12 | #include "gru.h" 13 | #include "actvfunc.h" 14 | 15 | class Seq2Vec 16 | { 17 | protected: 18 | int INUM; 19 | int HNUM; 20 | int ONUM; 21 | int DEPTH; 22 | int MAXTIME; 23 | int batch_size; 24 | double lr; 25 | double **input; 26 | double *expect; 27 | double error; 28 | double maxerror; 29 | neuron *output; 30 | std::string func_name; 31 | public: 32 | virtual void SetFunction(const std::string&)=0; 33 | virtual void SetBatchSize(const int)=0; 34 | virtual void SetLearningRate(const double)=0; 35 | virtual void Calc(const std::string&,const int)=0; 36 | virtual void Training(const std::string&,const int)=0; 37 | virtual void ErrorCalc()=0; 38 | virtual void Datain(const std::string&,const std::string&,const std::string&)=0; 39 | virtual void Dataout(const std::string&,const std::string&,const std::string&)=0; 40 | virtual void TotalWork(const std::string&,const std::string&,const std::string&,const std::string&,const std::string&)=0; 41 | }; 42 | 43 | class NormalSeq2Vec:public Seq2Vec 44 | { 45 | private: 46 | NormalRNN *rnnencoder; 47 | NormalLSTM *lstmencoder; 48 | NormalGRU *gruencoder; 49 | public: 50 | NormalSeq2Vec(const std::string&,int,int,int,int); 51 | ~NormalSeq2Vec(); 52 | void SetFunction(const std::string&); 53 | void SetLearningRate(const double); 54 | void SetBatchSize(const int); 55 | void Calc(const std::string&,const int); 56 | void Training(const std::string&,const int); 57 | void ErrorCalc(); 58 | void Datain(const std::string&,const std::string&,const std::string&); 59 | void Dataout(const std::string&,const std::string&,const std::string&); 60 | void TotalWork(const std::string&,const std::string&,const std::string&,const std::string&,const std::string&); 61 | }; 62 | 63 | class DeepSeq2Vec:public Seq2Vec 64 | { 65 | private: 66 | DeepRNN *rnnencoder; 67 | DeepLSTM *lstmencoder; 68 | DeepGRU *gruencoder; 69 | public: 70 | DeepSeq2Vec(const std::string&,int,int,int,int,int); 71 | ~DeepSeq2Vec(); 72 | void SetFunction(const std::string&); 73 | void SetLearningRate(const double); 74 | void SetBatchSize(const int); 75 | void Calc(const std::string&,const int); 76 | void Training(const std::string&,const int); 77 | void ErrorCalc(); 78 | void Datain(const std::string&,const std::string&,const std::string&); 79 | void Dataout(const std::string&,const std::string&,const std::string&); 80 | void TotalWork(const std::string&,const std::string&,const std::string&,const std::string&,const std::string&); 81 | }; 82 | 83 | void Seq2VecDataMaker(const std::string&,const std::string&,const std::string&,const int); 84 | 85 | #endif 86 | -------------------------------------------------------------------------------- /src/seq2seq.h: -------------------------------------------------------------------------------- 1 | /*seq2seq.h header file by ValK*/ 2 | /*2019/5/7 version1.5*/ 3 | #ifndef __SEQ2SEQ_H__ 4 | #define __SEQ2SEQ_H__ 5 | 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "rnn.h" 12 | #include "lstm.h" 13 | #include "gru.h" 14 | #include "actvfunc.h" 15 | 16 | /*abstract class Seq2Seq*/ 17 | struct seq_neuron 18 | { 19 | double *in,*out,*w,bia,*diff; 20 | double *transw,transbia; 21 | }; 22 | 23 | class Seq2Seq 24 | { 25 | protected: 26 | int INUM; 27 | int HNUM; 28 | int ONUM; 29 | int DEPTH; 30 | int MAXTIME; 31 | int batch_size; 32 | double lr; 33 | double **input; 34 | double **expect; 35 | double error; 36 | double maxerror; 37 | seq_neuron *output; 38 | std::string func_name; 39 | //output function is set as softmax so this doesn't work 40 | public: 41 | virtual void SetBatchSize(const int)=0; 42 | //this decides how many batches of data are used in training process 43 | virtual void SetLearningRate(const double)=0; 44 | //this decides the speed of updating model,but if you set a large rate,network may be corrupted 45 | virtual void Calc(const std::string&,const int,const int)=0; 46 | //used to calculate the forward propagation 47 | virtual void Training(const std::string&,const int,const int)=0; 48 | //used to calculate the back propagation through time 49 | virtual void ErrorCalc(const int)=0; 50 | //used to calculate the loss of training set 51 | virtual void SetFunction(const std::string&)=0; 52 | //set output activate function but the function has been set as softmax so this doesn't work 53 | virtual void Datain(const std::string&,const std::string&,const std::string&,const std::string&)=0; 54 | //input data before calculation 55 | virtual void Dataout(const std::string&,const std::string&,const std::string&,const std::string&)=0; 56 | //output data to save data.But if network is too large,this may take a lot of time 57 | virtual void TotalWork(const std::string&,const std::string&,const std::string&,const std::string&,const std::string&,const std::string&)=0; 58 | }; 59 | /*NormalSeq2Seq with only one hidden layer*/ 60 | class NormalSeq2Seq:public Seq2Seq 61 | { 62 | private: 63 | NormalRNN *rnnencoder; 64 | NormalRNN *rnndecoder; 65 | NormalLSTM *lstmencoder; 66 | NormalLSTM *lstmdecoder; 67 | NormalGRU *gruencoder; 68 | NormalGRU *grudecoder; 69 | public: 70 | NormalSeq2Seq(const std::string&,int,int,int,int); 71 | ~NormalSeq2Seq(); 72 | void SetBatchSize(const int); 73 | void SetLearningRate(const double); 74 | void Calc(const std::string&,const int,const int); 75 | void Training(const std::string&,const int,const int); 76 | void ErrorCalc(const int); 77 | void SetFunction(const std::string&); 78 | void Datain(const std::string&,const std::string&,const std::string&,const std::string&); 79 | void Dataout(const std::string&,const std::string&,const std::string&,const std::string&); 80 | void TotalWork(const std::string&,const std::string&,const std::string&,const std::string&,const std::string&,const std::string&); 81 | }; 82 | /*DeepSeq2Seq with deep neural networks*/ 83 | class DeepSeq2Seq:public Seq2Seq 84 | { 85 | private: 86 | DeepRNN *rnnencoder; 87 | DeepRNN *rnndecoder; 88 | DeepLSTM *lstmencoder; 89 | DeepLSTM *lstmdecoder; 90 | DeepGRU *gruencoder; 91 | DeepGRU *grudecoder; 92 | public: 93 | DeepSeq2Seq(const std::string&,int,int,int,int,int); 94 | ~DeepSeq2Seq(); 95 | void SetBatchSize(const int); 96 | void SetLearningRate(const double); 97 | void Calc(const std::string&,const int,const int); 98 | void Training(const std::string&,const int,const int); 99 | void ErrorCalc(const int); 100 | void SetFunction(const std::string&); 101 | void Datain(const std::string&,const std::string&,const std::string&,const std::string&); 102 | void Dataout(const std::string&,const std::string&,const std::string&,const std::string&); 103 | void TotalWork(const std::string&,const std::string&,const std::string&,const std::string&,const std::string&,const std::string&); 104 | }; 105 | 106 | #endif 107 | -------------------------------------------------------------------------------- /src/nlpmain.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | #include "NLPann.h" 10 | #include "mainassist.h" 11 | 12 | //PrintHelp() can be called if needed 13 | void help(){ 14 | std::cout 15 | <<">> [Help]"<> You can find this help with cmd:\"h\" or \"help\""<> [Tips] [easyNLP-2022 version 1.5 by ValK]"<> You can find tips with cmd:\"t\" or \"tips\""<> "; 61 | std::cin>>cmd; 62 | if(cmd=="h"||cmd=="help"){ 63 | help(); 64 | } 65 | else if(cmd=="t"||cmd=="tips"){ 66 | warn(); 67 | } 68 | else if(cmd=="d"){ 69 | manager.DeleteObj(); 70 | }else if(cmd=="c"){ 71 | manager.MakeData(); 72 | manager.ObjDataOut(); 73 | }else if(cmd=="l"){ 74 | manager.PrintAllObj(); 75 | }else if(cmd=="r"){ 76 | manager.RunModule(); 77 | }else if(cmd=="f"){ 78 | manager.FindObj(); 79 | }else if(cmd=="e"){ 80 | manager.EditObj(); 81 | }else if(cmd=="lr"){ 82 | manager.ChangeLearningRate(); 83 | }else if(cmd=="bs"){ 84 | manager.ChangeBatchSize(); 85 | }else if(cmd=="mk"){ 86 | int maxtime; 87 | char Filename[100]; 88 | char Sequencedata[100]; 89 | char Trainingdata[100]; 90 | std::cout<<">> Please input the name of text data:"; 91 | std::cin>>Filename; 92 | std::cout<<">> Please input the name of sequence data(input data):"; 93 | std::cin>>Sequencedata; 94 | std::cout<<">> Please input the name of training data:"; 95 | std::cin>>Trainingdata; 96 | if(!fopen(Filename,"r")||!fopen(Sequencedata,"w")||!fopen(Trainingdata,"w")){ 97 | std::cout<<">> [Error] Cannot open file."<> Please input the length of every input sequence:"; 100 | std::cin>>maxtime; 101 | Seq2VecDataMaker(Filename,Sequencedata,Trainingdata,maxtime); 102 | } 103 | }else if(cmd=="ft"){ 104 | char Typename[100]; 105 | std::cout<<">> Which type of networks would you like to find?\neasyNLP>>"; 106 | std::cin>>Typename; 107 | manager.FindSpecialObj(Typename); 108 | }else if(cmd=="q"){ 109 | std::cout<<">> [Quiting] Please wait."<> [Error] Undefined command."< 6 | #include 7 | #include 8 | #include 9 | 10 | NormalRNN::NormalRNN(int InputlayerNum,int HiddenlayerNum,int Maxtime) 11 | { 12 | MAXTIME=Maxtime; 13 | INUM=InputlayerNum; 14 | HNUM=HiddenlayerNum; 15 | 16 | hide=new rnn_neuron[HNUM]; 17 | for(int i=0;i> [Error] Cannot open file."<>hide[i].out[0]; 70 | fin>>hide[i].bia; 71 | for(int j=0;j>hide[i].wi[j]; 73 | for(int j=0;j>hide[i].wh[j]; 75 | } 76 | fin.close(); 77 | return; 78 | } 79 | 80 | void NormalRNN::Dataout(const std::string& filename) 81 | { 82 | std::ofstream fout(filename); 83 | if(fout.fail()) 84 | { 85 | std::cout<<">> [Error] Cannot open file."<> [Error] Cannot open file."<>hlink[i].out[0]; 201 | fin>>hlink[i].bia; 202 | for(int j=0;j>hlink[i].wi[j]; 204 | for(int j=0;j>hlink[i].wh[j]; 206 | } 207 | for(int d=0;d>hide[i][d].out[0]; 211 | fin>>hide[i][d].bia; 212 | for(int j=0;j>hide[i][d].wi[j]; 215 | fin>>hide[i][d].wh[j]; 216 | } 217 | } 218 | fin.close(); 219 | return; 220 | } 221 | 222 | void DeepRNN::Dataout(const std::string& filename) 223 | { 224 | std::ofstream fout(filename); 225 | if(fout.fail()) 226 | { 227 | std::cout<<">> [Error] Cannot open file."< 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include "bp.h" 11 | #include "actvfunc.h" 12 | 13 | class Char2Vec 14 | { 15 | private: 16 | int INUM; 17 | int HNUM; 18 | int ONUM; 19 | std::vector> cnt; 20 | std::vector input; 21 | std::vector expect; 22 | double lr; 23 | std::vector hide; 24 | std::vector output; 25 | public: 26 | Char2Vec(const int hnum=256) 27 | { 28 | lr=0.1; 29 | INUM=95; 30 | ONUM=95; 31 | HNUM=hnum; 32 | cnt.resize(95); 33 | for(int i=0;i<95;i++) 34 | cnt[i].resize(95,0); 35 | input.resize(95); 36 | expect.resize(95); 37 | hide.resize(HNUM); 38 | output.resize(ONUM); 39 | for(int i=0;i> [Char2Vec-95char] Initializing completed.\n"; 83 | } 84 | else 85 | Datain(dataFilename); 86 | CountChar(TrainingdataName); 87 | Mainwork(dataFilename); 88 | Print(); 89 | } 90 | 91 | void Char2Vec::Mainwork(const std::string& filename) 92 | { 93 | int epoch=0; 94 | double maxerror=1e8; 95 | double error=1e8; 96 | double softmax=0; 97 | 98 | double max_cnt=-1; 99 | for(auto& i:cnt) 100 | for(auto j:i) 101 | if(j>max_cnt) 102 | max_cnt=j; 103 | // limit to 0~95 104 | if(max_cnt!=0) 105 | for(auto& i:cnt) 106 | for(auto& j:i) 107 | j=j/max_cnt*95; 108 | while(maxerror>0.1) 109 | { 110 | epoch++; 111 | maxerror=0; 112 | for(int i=0;i<95;i++) 113 | { 114 | for(int j=0;j> Epoch "<> Finish training by "<> Final output in progress..."<> Training complete."<> [Error] Cannot open data file!"<>hide[i].bia; 199 | for(int j=0;j>hide[i].w[j]; 201 | } 202 | for(int i=0;i>output[i].bia; 205 | for(int j=0;j>output[i].w[j]; 207 | } 208 | fin.close(); 209 | return; 210 | } 211 | void Char2Vec::Dataout(const std::string& filename) 212 | { 213 | std::ofstream fout(filename); 214 | if(fout.fail()) 215 | { 216 | std::cout<<">> [Error] Cannot open data file!"<> [Result-Char2Vec-95char]"<0.1) 246 | { 247 | has_related=true; 248 | break; 249 | } 250 | if(!has_related) 251 | continue; 252 | std::cout<<" |"<<(char)(i+32)<<": "; 253 | for(int j=0;j0.05) 255 | std::cout<<"|"<<(char)(j+32)<<':'<> [Error] Cannot open data file!"<=32&&temp[i-1]<=126&&temp[i]>=32&&temp[i]<=126) 281 | cnt[temp[i-1]-32][temp[i]-32]++; 282 | } 283 | std::cout<<">> [Info] character counting complete."<> [Error] Cannot open data file!"< 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | NormalGRU::NormalGRU(int InputlayerNum,int HiddenlayerNum,int Maxtime) 13 | { 14 | INUM=InputlayerNum; 15 | HNUM=HiddenlayerNum; 16 | MAXTIME=Maxtime; 17 | hide=new gru_neuron[HNUM]; 18 | for(int i=0;i> [Error] Cannot open file."<>hide[i].out[0]; 85 | fin>>hide[i].sig_update_bia; 86 | fin>>hide[i].sig_replace_bia; 87 | fin>>hide[i].tan_replace_bia; 88 | for(int j=0;j>hide[i].sig_update_wi[j]; 91 | fin>>hide[i].sig_replace_wi[j]; 92 | fin>>hide[i].tan_replace_wi[j]; 93 | } 94 | for(int j=0;j>hide[i].sig_update_wh[j]; 97 | fin>>hide[i].sig_replace_wh[j]; 98 | fin>>hide[i].tan_replace_wh[j]; 99 | } 100 | } 101 | fin.close(); 102 | return; 103 | } 104 | 105 | void NormalGRU::Dataout(const std::string& filename) 106 | { 107 | std::ofstream fout(filename); 108 | if(fout.fail()) 109 | { 110 | std::cout<<">> [Error] Cannot open file."<> [Error] Cannot open file."<>hlink[i].out[0]; 266 | fin>>hlink[i].sig_update_bia; 267 | fin>>hlink[i].sig_replace_bia; 268 | fin>>hlink[i].tan_replace_bia; 269 | for(int j=0;j>hlink[i].sig_update_wi[j]; 272 | fin>>hlink[i].sig_replace_wi[j]; 273 | fin>>hlink[i].tan_replace_wi[j]; 274 | } 275 | for(int j=0;j>hlink[i].sig_update_wh[j]; 278 | fin>>hlink[i].sig_replace_wh[j]; 279 | fin>>hlink[i].tan_replace_wh[j]; 280 | } 281 | } 282 | for(int d=0;d>hide[i][d].out[0]; 286 | fin>>hide[i][d].sig_update_bia; 287 | fin>>hide[i][d].sig_replace_bia; 288 | fin>>hide[i][d].tan_replace_bia; 289 | for(int j=0;j>hide[i][d].sig_update_wi[j]; 292 | fin>>hide[i][d].sig_replace_wi[j]; 293 | fin>>hide[i][d].tan_replace_wi[j]; 294 | fin>>hide[i][d].sig_update_wh[j]; 295 | fin>>hide[i][d].sig_replace_wh[j]; 296 | fin>>hide[i][d].tan_replace_wh[j]; 297 | } 298 | } 299 | fin.close(); 300 | return; 301 | } 302 | 303 | void DeepGRU::Dataout(const std::string& filename) 304 | { 305 | std::ofstream fout(filename); 306 | if(fout.fail()) 307 | { 308 | std::cout<<">> [Error] Cannot open file."< 7 | #include 8 | #include 9 | #include 10 | 11 | NormalBP::NormalBP(int inputlayer_num,int hiddenlayer_num,int outputlayer_num) 12 | { 13 | error=1e8; 14 | lr=0; 15 | func_name="Unknown"; 16 | INUM=inputlayer_num; 17 | HNUM=hiddenlayer_num; 18 | ONUM=outputlayer_num; 19 | batch_size=1; 20 | input=new double[INUM]; 21 | expect=new double[ONUM]; 22 | hide=new neuron[HNUM]; 23 | output=new neuron[ONUM]; 24 | for(int i=0;i> [Error] You haven't chose a correct funtion."; 47 | exit(-1); 48 | } 49 | else if(func_name=="sigmoid") 50 | return sigmoid(x); 51 | else if(func_name=="tanh") 52 | return tanh(x); 53 | else if(func_name=="relu") 54 | return relu(x); 55 | else if(func_name=="leakyrelu") 56 | return leakyrelu(x); 57 | else if(func_name=="elu") 58 | return elu(x); 59 | else 60 | { 61 | std::cout<<">> [Error] You haven't chose a correct funtion."; 62 | exit(-1); 63 | } 64 | } 65 | 66 | double NormalBP::DiffFunction(double x) 67 | { 68 | if(func_name=="Unknown") 69 | { 70 | std::cout<<">> [Error] You haven't chose a correct funtion."; 71 | exit(-1); 72 | } 73 | else if(func_name=="sigmoid") 74 | return diffsigmoid(x); 75 | else if(func_name=="tanh") 76 | return difftanh(x); 77 | else if(func_name=="relu") 78 | return diffrelu(x); 79 | else if(func_name=="leakyrelu") 80 | return diffleakyrelu(x); 81 | else if(func_name=="elu") 82 | return diffelu(x); 83 | else 84 | { 85 | std::cout<<">> [Error] You haven't chose a correct funtion."; 86 | exit(-1); 87 | } 88 | } 89 | 90 | void NormalBP::Init() 91 | { 92 | srand(unsigned(time(NULL))); 93 | for(int i=0;i> [Error] Cannot open file."<>hide[i].bia; 188 | for(int j=0;j>hide[i].w[j]; 190 | } 191 | for(int i=0;i>output[i].bia; 194 | for(int j=0;j>output[i].w[j]; 196 | } 197 | fin.close(); 198 | } 199 | 200 | void NormalBP::Dataout(const std::string& filename) 201 | { 202 | std::ofstream fout(filename); 203 | if(fout.fail()) 204 | { 205 | std::cout<<">> [Error] Cannot open file."<> Output finished"<> [NormalBP] Initializing completed.\n"; 239 | } 240 | else 241 | Datain(dataFilename); 242 | double maxerror=1e8; 243 | int epoch=0; 244 | while(maxerror>0.01) 245 | { 246 | epoch++; 247 | maxerror=0; 248 | std::ifstream finq(QuestiondataName); 249 | std::ifstream fint(TrainingdataName); 250 | if(finq.fail()||fint.fail()) 251 | { 252 | std::cout<<">> [Error] Cannot open data file!"<> [Error] Lack "<>input[i]; 260 | for(int i=0;i>expect[i]; 262 | Calc(); 263 | ErrorCalc(); 264 | Training(); 265 | maxerror+=error; 266 | } 267 | finq.close(); 268 | fint.close(); 269 | if(epoch%10==0) 270 | { 271 | std::cout<<">> Epoch "<> Final output in progress..."<> Training complete."<> [Error] You haven't chose a correct funtion."; 331 | exit(-1); 332 | } 333 | else if(func_name=="sigmoid") 334 | return sigmoid(x); 335 | else if(func_name=="tanh") 336 | return tanh(x); 337 | else if(func_name=="relu") 338 | return relu(x); 339 | else if(func_name=="leakyrelu") 340 | return leakyrelu(x); 341 | else if(func_name=="elu") 342 | return elu(x); 343 | else 344 | { 345 | std::cout<<">> [Error] You haven't chose a correct funtion."; 346 | exit(-1); 347 | } 348 | } 349 | 350 | double DeepBP::DiffFunction(double x) 351 | { 352 | if(func_name=="Unknown") 353 | { 354 | std::cout<<">> [Error] You haven't chose a correct funtion."; 355 | exit(-1); 356 | } 357 | else if(func_name=="sigmoid") 358 | return diffsigmoid(x); 359 | else if(func_name=="tanh") 360 | return difftanh(x); 361 | else if(func_name=="relu") 362 | return diffrelu(x); 363 | else if(func_name=="leakyrelu") 364 | return diffleakyrelu(x); 365 | else if(func_name=="elu") 366 | return diffelu(x); 367 | else 368 | { 369 | std::cout<<">> [Error] You haven't chose a correct funtion."; 370 | exit(-1); 371 | } 372 | } 373 | 374 | void DeepBP::Init() 375 | { 376 | srand(unsigned(time(NULL))); 377 | for(int i=0;i=0;d--) 461 | for(int i=0;i> [Error] Cannot open file."<>hlink[i].bia; 509 | for(int j=0;j>hlink[i].w[j]; 511 | } 512 | for(int d=0;d>hide[i][d].bia; 516 | for(int j=0;j>hide[i][d].w[j]; 518 | } 519 | for(int i=0;i>output[i].bia; 522 | for(int j=0;j>output[i].w[j]; 524 | } 525 | fin.close(); 526 | } 527 | 528 | void DeepBP::Dataout(const std::string& filename) 529 | { 530 | std::ofstream fout(filename); 531 | if(fout.fail()) 532 | { 533 | std::cout<<">> [Error] Cannot open file."<> Output finished"<> [DeepBP] Initializing completed.\n"; 574 | } 575 | else 576 | Datain(dataFilename); 577 | double maxerror=1e8; 578 | int epoch=0; 579 | while(maxerror>0.01) 580 | { 581 | epoch++; 582 | maxerror=0; 583 | std::ifstream finq(QuestiondataName); 584 | std::ifstream fint(TrainingdataName); 585 | if(finq.fail()||fint.fail()) 586 | { 587 | std::cout<<">> [Error] Cannot open data file!"<> [Error] Lack "<>input[i]; 595 | for(int i=0;i>expect[i]; 597 | Calc(); 598 | ErrorCalc(); 599 | Training(); 600 | maxerror+=error; 601 | } 602 | finq.close(); 603 | fint.close(); 604 | if(epoch%10==0) 605 | { 606 | std::cout<<">> Epoch "<> Final output in progress..."<> Training complete."< 6 | #include 7 | #include 8 | #include 9 | 10 | NormalLSTM::NormalLSTM(int InputlayerNum,int HiddenlayerNum,int Maxtime) 11 | { 12 | MAXTIME=Maxtime; 13 | INUM=InputlayerNum; 14 | HNUM=HiddenlayerNum; 15 | hide=new lstm_neuron[HNUM]; 16 | for(int i=0;i> [Error] Cannot open file."<>hide[i].cell[0]; 142 | fin>>hide[i].out[0]; 143 | fin>>hide[i].fog_bia; 144 | fin>>hide[i].sig_bia; 145 | fin>>hide[i].tan_bia; 146 | fin>>hide[i].out_bia; 147 | for(int j=0;j>hide[i].fog_wi[j]; 150 | fin>>hide[i].sig_wi[j]; 151 | fin>>hide[i].tan_wi[j]; 152 | fin>>hide[i].out_wi[j]; 153 | } 154 | for(int j=0;j>hide[i].fog_wh[j]; 157 | fin>>hide[i].sig_wh[j]; 158 | fin>>hide[i].tan_wh[j]; 159 | fin>>hide[i].out_wh[j]; 160 | } 161 | } 162 | fin.close(); 163 | return; 164 | } 165 | 166 | void NormalLSTM::Dataout(const std::string& filename) 167 | { 168 | std::ofstream fout(filename); 169 | if(fout.fail()) 170 | { 171 | std::cout<<">> [Error] Cannot open file."<> [Error] Cannot open file."<>hlink[i].cell[0]; 456 | fin>>hlink[i].out[0]; 457 | fin>>hlink[i].fog_bia; 458 | fin>>hlink[i].sig_bia; 459 | fin>>hlink[i].tan_bia; 460 | fin>>hlink[i].out_bia; 461 | for(int j=0;j>hlink[i].fog_wi[j]; 464 | fin>>hlink[i].sig_wi[j]; 465 | fin>>hlink[i].tan_wi[j]; 466 | fin>>hlink[i].out_wi[j]; 467 | } 468 | for(int j=0;j>hlink[i].fog_wh[j]; 471 | fin>>hlink[i].sig_wh[j]; 472 | fin>>hlink[i].tan_wh[j]; 473 | fin>>hlink[i].out_wh[j]; 474 | } 475 | } 476 | for(int d=0;d>hide[i][d].cell[0]; 480 | fin>>hide[i][d].out[0]; 481 | fin>>hide[i][d].fog_bia; 482 | fin>>hide[i][d].sig_bia; 483 | fin>>hide[i][d].tan_bia; 484 | fin>>hide[i][d].out_bia; 485 | for(int j=0;j>hide[i][d].fog_wi[j]; 488 | fin>>hide[i][d].sig_wi[j]; 489 | fin>>hide[i][d].tan_wi[j]; 490 | fin>>hide[i][d].out_wi[j]; 491 | 492 | fin>>hide[i][d].fog_wh[j]; 493 | fin>>hide[i][d].sig_wh[j]; 494 | fin>>hide[i][d].tan_wh[j]; 495 | fin>>hide[i][d].out_wh[j]; 496 | } 497 | } 498 | fin.close(); 499 | return; 500 | } 501 | 502 | void DeepLSTM::Dataout(const std::string& filename) 503 | { 504 | std::ofstream fout(filename); 505 | if(fout.fail()) 506 | { 507 | std::cout<<">> [Error] Cannot open file."< 7 | #include 8 | #include 9 | #include 10 | 11 | struct ObjElement 12 | { 13 | char ObjName[40]; 14 | char FileName_1[40]; 15 | char FileName_2[40]; 16 | char FileName_3[40]; 17 | char FileName_4[40]; 18 | char FileName_5[40]; 19 | char FileName_6[40]; 20 | char FileName_7[40]; 21 | char FileName_8[40]; 22 | char Function[10]; 23 | int INUM; 24 | int HNUM; 25 | int ONUM; 26 | int DEPTH; 27 | int MAXTIME; 28 | int NetworkType; 29 | int BatchSize; 30 | double LearningRate; 31 | }; 32 | 33 | class UserObject 34 | { 35 | private: 36 | ObjElement Obj; 37 | public: 38 | UserObject *p; 39 | UserObject( 40 | const char *objname="NULL", 41 | const char *filename1="NULL", 42 | const char *filename2="NULL", 43 | const char *filename3="NULL", 44 | const char *filename4="NULL", 45 | const char *filename5="NULL", 46 | const char *filename6="NULL", 47 | const char *filename7="NULL", 48 | const char *filename8="NULL", 49 | const char *function="NULL", 50 | int inum=0, 51 | int hnum=0, 52 | int onum=0, 53 | int depth=0, 54 | int maxtime=0, 55 | int networktype=0, 56 | int batchsize=0, 57 | double lr=0.1 58 | ) 59 | { 60 | strcpy(Obj.ObjName,objname); 61 | strcpy(Obj.FileName_1,filename1); 62 | strcpy(Obj.FileName_2,filename2); 63 | strcpy(Obj.FileName_3,filename3); 64 | strcpy(Obj.FileName_4,filename4); 65 | strcpy(Obj.FileName_5,filename5); 66 | strcpy(Obj.FileName_6,filename6); 67 | strcpy(Obj.FileName_7,filename7); 68 | strcpy(Obj.FileName_8,filename8); 69 | strcpy(Obj.Function,function); 70 | Obj.INUM=inum; 71 | Obj.HNUM=hnum; 72 | Obj.ONUM=onum; 73 | Obj.DEPTH=depth; 74 | Obj.MAXTIME=maxtime; 75 | Obj.NetworkType=networktype; 76 | Obj.BatchSize=batchsize; 77 | Obj.LearningRate=lr; 78 | p=NULL; 79 | } 80 | void PrintObj() 81 | { 82 | std::cout<<" ------------------------------------------------------"<p=NULL; 214 | } 215 | 216 | ObjManager::~ObjManager() 217 | { 218 | UserObject *Node=Head; 219 | UserObject *Temp; 220 | while(Node->p!=NULL) 221 | { 222 | Temp=Node; 223 | Node=Node->p; 224 | delete Temp; 225 | } 226 | delete Node; 227 | } 228 | 229 | void ObjManager::ObjDataIn() 230 | { 231 | UserObject *Node=Head; 232 | if(!fopen("ObjData.dat","r")) 233 | { 234 | FILE* fd=fopen("ObjData.dat","w"); 235 | fclose(fd); 236 | std::cout<<">> [init] Initializing completed."<> [Error] Cannot open important data \"ObjData.dat\" or this data maybe lost!"<p=new UserObject( 250 | Datatemp.ObjName, 251 | Datatemp.FileName_1, 252 | Datatemp.FileName_2, 253 | Datatemp.FileName_3, 254 | Datatemp.FileName_4, 255 | Datatemp.FileName_5, 256 | Datatemp.FileName_6, 257 | Datatemp.FileName_7, 258 | Datatemp.FileName_8, 259 | Datatemp.Function, 260 | Datatemp.INUM, 261 | Datatemp.HNUM, 262 | Datatemp.ONUM, 263 | Datatemp.DEPTH, 264 | Datatemp.MAXTIME, 265 | Datatemp.NetworkType, 266 | Datatemp.BatchSize, 267 | Datatemp.LearningRate 268 | ); 269 | Node=Node->p; 270 | Node->p=NULL; 271 | } 272 | fin.close(); 273 | return; 274 | } 275 | 276 | void ObjManager::ObjDataOut() 277 | { 278 | UserObject *Node=Head; 279 | std::ofstream fout("ObjData.dat",std::ios::binary); 280 | if(fout.fail()) 281 | { 282 | std::cout<<">> [Error] Cannot open important data \"Objdata.dat\" or this data maybe lost!"<p!=NULL) 286 | { 287 | Node=Node->p; 288 | fout.write((char*)Node->getObjPointer(),sizeof(ObjElement)); 289 | } 290 | fout.close(); 291 | return; 292 | } 293 | 294 | void ObjManager::MakeData() 295 | { 296 | UserObject *Node=Head; 297 | std::cout<<">> Name of your project: "; 298 | std::cin>>Datatemp.ObjName; 299 | while(Node->p!=NULL) 300 | { 301 | Node=Node->p; 302 | if(Node->CheckObjName(Datatemp.ObjName)) 303 | { 304 | std::cout<<">> [Error] You have already created this project!"<PrintObj(); 307 | return; 308 | } 309 | } 310 | if(!ObjChoose()) 311 | { 312 | std::cout<<">> [Quiting]"<p=new UserObject( 316 | Datatemp.ObjName, 317 | Datatemp.FileName_1, 318 | Datatemp.FileName_2, 319 | Datatemp.FileName_3, 320 | Datatemp.FileName_4, 321 | Datatemp.FileName_5, 322 | Datatemp.FileName_6, 323 | Datatemp.FileName_7, 324 | Datatemp.FileName_8, 325 | Datatemp.Function, 326 | Datatemp.INUM, 327 | Datatemp.HNUM, 328 | Datatemp.ONUM, 329 | Datatemp.DEPTH, 330 | Datatemp.MAXTIME, 331 | Datatemp.NetworkType, 332 | Datatemp.BatchSize, 333 | Datatemp.LearningRate 334 | ); 335 | Node=Node->p; 336 | Node->p=NULL; 337 | std::cout<<">> New project is established successfully."<>"<> [Choice] Input your choice: "; 363 | std::cin>>Command; 364 | if(Command=="1") 365 | { 366 | std::cout<<">>"<