├── README.md ├── datasets ├── iris_multi │ ├── iris.data │ └── iris.names ├── robot_multi │ ├── Wall-following.names │ └── sensor_readings_24.data ├── sonar_binary │ ├── sonar.all-data │ └── sonar.names ├── soybean_multi │ ├── soybean-large.data │ └── soybean-large.names └── wdbc_binary │ ├── wdbc.data │ └── wdbc.names ├── eca.py ├── fnn.py ├── helpers ├── Metrics.py ├── __pycache__ │ ├── Metrics.cpython-37.pyc │ ├── MyDataset.cpython-37.pyc │ └── MyDataset.cpython-38.pyc └── myDataset.py ├── lab2.py ├── lab3.py ├── lab3_eval.py ├── lab4_cgan.py ├── lab4_gan.py ├── lab4_wgan.py ├── models ├── BaseModels.py ├── ConvNetMNIST.py ├── ConvNetMNIST_BN.py ├── ConvNetSVHN2.py └── __pycache__ │ ├── BaseModels.cpython-37.pyc │ ├── BaseModels.cpython-38.pyc │ ├── ConvNetMNIST.cpython-38.pyc │ └── ConvNetSVHN2.cpython-37.pyc ├── perception.py ├── reference ├── Perceptron.py └── load_data.py └── releases ├── experiment1.zip ├── experiment2.zip ├── experiment3.zip └── experiment4.zip /README.md: -------------------------------------------------------------------------------- 1 | # Recommended specification: 2 | python - 3.7.4 3 | numpy - 1.19.4 4 | cudatoolkit - 10.2.89 5 | cudnn - 7.6.5 6 | pytorch - 1.7.0 7 | 8 | These codes are designed to run in a multi-GPU environment. If it is not available for you, please pay attention to the variable "device_id". 9 | 10 | Please run each .py file separately in the project path. 11 | 12 | Larger datasets like MNIST are not contained in the "datasets" folder. Please redirect routes inside the codes. 13 | 14 | For experiment 3, please run "lab3.py" to collect results and then run "lab3_eval.py" to visualize them. All results can be found in the "results/" folder. 15 | 16 | For experiment 4, result generation and curve generation are valid only for GAN and WGAN model. 17 | -------------------------------------------------------------------------------- /datasets/iris_multi/iris.data: -------------------------------------------------------------------------------- 1 | 5.1,3.5,1.4,0.2,Iris-setosa 2 | 4.9,3.0,1.4,0.2,Iris-setosa 3 | 4.7,3.2,1.3,0.2,Iris-setosa 4 | 4.6,3.1,1.5,0.2,Iris-setosa 5 | 5.0,3.6,1.4,0.2,Iris-setosa 6 | 5.4,3.9,1.7,0.4,Iris-setosa 7 | 4.6,3.4,1.4,0.3,Iris-setosa 8 | 5.0,3.4,1.5,0.2,Iris-setosa 9 | 4.4,2.9,1.4,0.2,Iris-setosa 10 | 4.9,3.1,1.5,0.1,Iris-setosa 11 | 5.4,3.7,1.5,0.2,Iris-setosa 12 | 4.8,3.4,1.6,0.2,Iris-setosa 13 | 4.8,3.0,1.4,0.1,Iris-setosa 14 | 4.3,3.0,1.1,0.1,Iris-setosa 15 | 5.8,4.0,1.2,0.2,Iris-setosa 16 | 5.7,4.4,1.5,0.4,Iris-setosa 17 | 5.4,3.9,1.3,0.4,Iris-setosa 18 | 5.1,3.5,1.4,0.3,Iris-setosa 19 | 5.7,3.8,1.7,0.3,Iris-setosa 20 | 5.1,3.8,1.5,0.3,Iris-setosa 21 | 5.4,3.4,1.7,0.2,Iris-setosa 22 | 5.1,3.7,1.5,0.4,Iris-setosa 23 | 4.6,3.6,1.0,0.2,Iris-setosa 24 | 5.1,3.3,1.7,0.5,Iris-setosa 25 | 4.8,3.4,1.9,0.2,Iris-setosa 26 | 5.0,3.0,1.6,0.2,Iris-setosa 27 | 5.0,3.4,1.6,0.4,Iris-setosa 28 | 5.2,3.5,1.5,0.2,Iris-setosa 29 | 5.2,3.4,1.4,0.2,Iris-setosa 30 | 4.7,3.2,1.6,0.2,Iris-setosa 31 | 4.8,3.1,1.6,0.2,Iris-setosa 32 | 5.4,3.4,1.5,0.4,Iris-setosa 33 | 5.2,4.1,1.5,0.1,Iris-setosa 34 | 5.5,4.2,1.4,0.2,Iris-setosa 35 | 4.9,3.1,1.5,0.1,Iris-setosa 36 | 5.0,3.2,1.2,0.2,Iris-setosa 37 | 5.5,3.5,1.3,0.2,Iris-setosa 38 | 4.9,3.1,1.5,0.1,Iris-setosa 39 | 4.4,3.0,1.3,0.2,Iris-setosa 40 | 5.1,3.4,1.5,0.2,Iris-setosa 41 | 5.0,3.5,1.3,0.3,Iris-setosa 42 | 4.5,2.3,1.3,0.3,Iris-setosa 43 | 4.4,3.2,1.3,0.2,Iris-setosa 44 | 5.0,3.5,1.6,0.6,Iris-setosa 45 | 5.1,3.8,1.9,0.4,Iris-setosa 46 | 4.8,3.0,1.4,0.3,Iris-setosa 47 | 5.1,3.8,1.6,0.2,Iris-setosa 48 | 4.6,3.2,1.4,0.2,Iris-setosa 49 | 5.3,3.7,1.5,0.2,Iris-setosa 50 | 5.0,3.3,1.4,0.2,Iris-setosa 51 | 7.0,3.2,4.7,1.4,Iris-versicolor 52 | 6.4,3.2,4.5,1.5,Iris-versicolor 53 | 6.9,3.1,4.9,1.5,Iris-versicolor 54 | 5.5,2.3,4.0,1.3,Iris-versicolor 55 | 6.5,2.8,4.6,1.5,Iris-versicolor 56 | 5.7,2.8,4.5,1.3,Iris-versicolor 57 | 6.3,3.3,4.7,1.6,Iris-versicolor 58 | 4.9,2.4,3.3,1.0,Iris-versicolor 59 | 6.6,2.9,4.6,1.3,Iris-versicolor 60 | 5.2,2.7,3.9,1.4,Iris-versicolor 61 | 5.0,2.0,3.5,1.0,Iris-versicolor 62 | 5.9,3.0,4.2,1.5,Iris-versicolor 63 | 6.0,2.2,4.0,1.0,Iris-versicolor 64 | 6.1,2.9,4.7,1.4,Iris-versicolor 65 | 5.6,2.9,3.6,1.3,Iris-versicolor 66 | 6.7,3.1,4.4,1.4,Iris-versicolor 67 | 5.6,3.0,4.5,1.5,Iris-versicolor 68 | 5.8,2.7,4.1,1.0,Iris-versicolor 69 | 6.2,2.2,4.5,1.5,Iris-versicolor 70 | 5.6,2.5,3.9,1.1,Iris-versicolor 71 | 5.9,3.2,4.8,1.8,Iris-versicolor 72 | 6.1,2.8,4.0,1.3,Iris-versicolor 73 | 6.3,2.5,4.9,1.5,Iris-versicolor 74 | 6.1,2.8,4.7,1.2,Iris-versicolor 75 | 6.4,2.9,4.3,1.3,Iris-versicolor 76 | 6.6,3.0,4.4,1.4,Iris-versicolor 77 | 6.8,2.8,4.8,1.4,Iris-versicolor 78 | 6.7,3.0,5.0,1.7,Iris-versicolor 79 | 6.0,2.9,4.5,1.5,Iris-versicolor 80 | 5.7,2.6,3.5,1.0,Iris-versicolor 81 | 5.5,2.4,3.8,1.1,Iris-versicolor 82 | 5.5,2.4,3.7,1.0,Iris-versicolor 83 | 5.8,2.7,3.9,1.2,Iris-versicolor 84 | 6.0,2.7,5.1,1.6,Iris-versicolor 85 | 5.4,3.0,4.5,1.5,Iris-versicolor 86 | 6.0,3.4,4.5,1.6,Iris-versicolor 87 | 6.7,3.1,4.7,1.5,Iris-versicolor 88 | 6.3,2.3,4.4,1.3,Iris-versicolor 89 | 5.6,3.0,4.1,1.3,Iris-versicolor 90 | 5.5,2.5,4.0,1.3,Iris-versicolor 91 | 5.5,2.6,4.4,1.2,Iris-versicolor 92 | 6.1,3.0,4.6,1.4,Iris-versicolor 93 | 5.8,2.6,4.0,1.2,Iris-versicolor 94 | 5.0,2.3,3.3,1.0,Iris-versicolor 95 | 5.6,2.7,4.2,1.3,Iris-versicolor 96 | 5.7,3.0,4.2,1.2,Iris-versicolor 97 | 5.7,2.9,4.2,1.3,Iris-versicolor 98 | 6.2,2.9,4.3,1.3,Iris-versicolor 99 | 5.1,2.5,3.0,1.1,Iris-versicolor 100 | 5.7,2.8,4.1,1.3,Iris-versicolor 101 | 6.3,3.3,6.0,2.5,Iris-virginica 102 | 5.8,2.7,5.1,1.9,Iris-virginica 103 | 7.1,3.0,5.9,2.1,Iris-virginica 104 | 6.3,2.9,5.6,1.8,Iris-virginica 105 | 6.5,3.0,5.8,2.2,Iris-virginica 106 | 7.6,3.0,6.6,2.1,Iris-virginica 107 | 4.9,2.5,4.5,1.7,Iris-virginica 108 | 7.3,2.9,6.3,1.8,Iris-virginica 109 | 6.7,2.5,5.8,1.8,Iris-virginica 110 | 7.2,3.6,6.1,2.5,Iris-virginica 111 | 6.5,3.2,5.1,2.0,Iris-virginica 112 | 6.4,2.7,5.3,1.9,Iris-virginica 113 | 6.8,3.0,5.5,2.1,Iris-virginica 114 | 5.7,2.5,5.0,2.0,Iris-virginica 115 | 5.8,2.8,5.1,2.4,Iris-virginica 116 | 6.4,3.2,5.3,2.3,Iris-virginica 117 | 6.5,3.0,5.5,1.8,Iris-virginica 118 | 7.7,3.8,6.7,2.2,Iris-virginica 119 | 7.7,2.6,6.9,2.3,Iris-virginica 120 | 6.0,2.2,5.0,1.5,Iris-virginica 121 | 6.9,3.2,5.7,2.3,Iris-virginica 122 | 5.6,2.8,4.9,2.0,Iris-virginica 123 | 7.7,2.8,6.7,2.0,Iris-virginica 124 | 6.3,2.7,4.9,1.8,Iris-virginica 125 | 6.7,3.3,5.7,2.1,Iris-virginica 126 | 7.2,3.2,6.0,1.8,Iris-virginica 127 | 6.2,2.8,4.8,1.8,Iris-virginica 128 | 6.1,3.0,4.9,1.8,Iris-virginica 129 | 6.4,2.8,5.6,2.1,Iris-virginica 130 | 7.2,3.0,5.8,1.6,Iris-virginica 131 | 7.4,2.8,6.1,1.9,Iris-virginica 132 | 7.9,3.8,6.4,2.0,Iris-virginica 133 | 6.4,2.8,5.6,2.2,Iris-virginica 134 | 6.3,2.8,5.1,1.5,Iris-virginica 135 | 6.1,2.6,5.6,1.4,Iris-virginica 136 | 7.7,3.0,6.1,2.3,Iris-virginica 137 | 6.3,3.4,5.6,2.4,Iris-virginica 138 | 6.4,3.1,5.5,1.8,Iris-virginica 139 | 6.0,3.0,4.8,1.8,Iris-virginica 140 | 6.9,3.1,5.4,2.1,Iris-virginica 141 | 6.7,3.1,5.6,2.4,Iris-virginica 142 | 6.9,3.1,5.1,2.3,Iris-virginica 143 | 5.8,2.7,5.1,1.9,Iris-virginica 144 | 6.8,3.2,5.9,2.3,Iris-virginica 145 | 6.7,3.3,5.7,2.5,Iris-virginica 146 | 6.7,3.0,5.2,2.3,Iris-virginica 147 | 6.3,2.5,5.0,1.9,Iris-virginica 148 | 6.5,3.0,5.2,2.0,Iris-virginica 149 | 6.2,3.4,5.4,2.3,Iris-virginica 150 | 5.9,3.0,5.1,1.8,Iris-virginica 151 | 152 | -------------------------------------------------------------------------------- /datasets/iris_multi/iris.names: -------------------------------------------------------------------------------- 1 | 1. Title: Iris Plants Database 2 | Updated Sept 21 by C.Blake - Added discrepency information 3 | 4 | 2. Sources: 5 | (a) Creator: R.A. Fisher 6 | (b) Donor: Michael Marshall (MARSHALL%PLU@io.arc.nasa.gov) 7 | (c) Date: July, 1988 8 | 9 | 3. Past Usage: 10 | - Publications: too many to mention!!! Here are a few. 11 | 1. Fisher,R.A. "The use of multiple measurements in taxonomic problems" 12 | Annual Eugenics, 7, Part II, 179-188 (1936); also in "Contributions 13 | to Mathematical Statistics" (John Wiley, NY, 1950). 14 | 2. Duda,R.O., & Hart,P.E. (1973) Pattern Classification and Scene Analysis. 15 | (Q327.D83) John Wiley & Sons. ISBN 0-471-22361-1. See page 218. 16 | 3. Dasarathy, B.V. (1980) "Nosing Around the Neighborhood: A New System 17 | Structure and Classification Rule for Recognition in Partially Exposed 18 | Environments". IEEE Transactions on Pattern Analysis and Machine 19 | Intelligence, Vol. PAMI-2, No. 1, 67-71. 20 | -- Results: 21 | -- very low misclassification rates (0% for the setosa class) 22 | 4. Gates, G.W. (1972) "The Reduced Nearest Neighbor Rule". IEEE 23 | Transactions on Information Theory, May 1972, 431-433. 24 | -- Results: 25 | -- very low misclassification rates again 26 | 5. See also: 1988 MLC Proceedings, 54-64. Cheeseman et al's AUTOCLASS II 27 | conceptual clustering system finds 3 classes in the data. 28 | 29 | 4. Relevant Information: 30 | --- This is perhaps the best known database to be found in the pattern 31 | recognition literature. Fisher's paper is a classic in the field 32 | and is referenced frequently to this day. (See Duda & Hart, for 33 | example.) The data set contains 3 classes of 50 instances each, 34 | where each class refers to a type of iris plant. One class is 35 | linearly separable from the other 2; the latter are NOT linearly 36 | separable from each other. 37 | --- Predicted attribute: class of iris plant. 38 | --- This is an exceedingly simple domain. 39 | --- This data differs from the data presented in Fishers article 40 | (identified by Steve Chadwick, spchadwick@espeedaz.net ) 41 | The 35th sample should be: 4.9,3.1,1.5,0.2,"Iris-setosa" 42 | where the error is in the fourth feature. 43 | The 38th sample: 4.9,3.6,1.4,0.1,"Iris-setosa" 44 | where the errors are in the second and third features. 45 | 46 | 5. Number of Instances: 150 (50 in each of three classes) 47 | 48 | 6. Number of Attributes: 4 numeric, predictive attributes and the class 49 | 50 | 7. Attribute Information: 51 | 1. sepal length in cm 52 | 2. sepal width in cm 53 | 3. petal length in cm 54 | 4. petal width in cm 55 | 5. class: 56 | -- Iris Setosa 57 | -- Iris Versicolour 58 | -- Iris Virginica 59 | 60 | 8. Missing Attribute Values: None 61 | 62 | Summary Statistics: 63 | Min Max Mean SD Class Correlation 64 | sepal length: 4.3 7.9 5.84 0.83 0.7826 65 | sepal width: 2.0 4.4 3.05 0.43 -0.4194 66 | petal length: 1.0 6.9 3.76 1.76 0.9490 (high!) 67 | petal width: 0.1 2.5 1.20 0.76 0.9565 (high!) 68 | 69 | 9. Class Distribution: 33.3% for each of 3 classes. 70 | -------------------------------------------------------------------------------- /datasets/robot_multi/Wall-following.names: -------------------------------------------------------------------------------- 1 | 1. Title of Database: Wall-Following navigation task with mobile robot SCITOS-G5 2 | 3 | 2. Sources: 4 | (a) Creators: Ananda Freire, Marcus Veloso and Guilherme Barreto 5 | Department of Teleinformatics Engineering 6 | Federal University of Ceará 7 | Fortaleza, Ceará, Brazil 8 | 9 | (b) Donors of database: Ananda Freire (anandalf@gmail.com) 10 | Guilherme Barreto (guilherme@deti.ufc.br) 11 | 12 | (c) Date received: August, 2010 13 | 14 | 3. Past Usage: 15 | (a) Ananda L. Freire, Guilherme A. Barreto, Marcus Veloso and Antonio T. Varela (2009), 16 | "Short-Term Memory Mechanisms in Neural Network Learning of Robot Navigation 17 | Tasks: A Case Study". Proceedings of the 6th Latin American Robotics Symposium (LARS'2009), 18 | Valparaíso-Chile, pages 1-6, DOI: 10.1109/LARS.2009.5418323 19 | 20 | 4. Relevant Information Paragraph: 21 | -- The data were collected as the SCITOS G5 navigates through the room following the wall in a clockwise 22 | direction, for 4 rounds. To navigate, the robot uses 24 ultrasound sensors arranged circularly around its "waist". 23 | The numbering of the ultrasound sensors starts at the front of the robot and increases in clockwise direction. 24 | 25 | -- The provided files comprise three diferent data sets. The first one contains the raw values of the measurements 26 | of all 24 ultrasound sensors and the corresponding class label (see Section 7). Sensor readings are sampled at a 27 | rate of 9 samples per second. 28 | 29 | The second one contains four sensor readings named 'simplified distances' and the corresponding class label (see Section 7). 30 | These simplified distances are referred to as the 'front distance', 'left distance', 'right distance' and 'back distance'. 31 | They consist, respectively, of the minimum sensor readings among those within 60 degree arcs located at the front, left, 32 | right and back parts of the robot. 33 | 34 | The third one contains only the front and left simplified distances and the corresponding class label (see Section 7). 35 | 36 | -- It is worth mentioning that the 24 ultrasound readings and the simplified distances were collected at the same 37 | time step, so each file has the same number of rows (one for each sampling time step). 38 | 39 | -- The wall-following task and data gathering were designed to test the hypothesis that this apparently simple navigation task 40 | is indeed a non-linearly separable classification task. Thus, linear classifiers, such as the Perceptron network, are not able 41 | to learn the task and command the robot around the room without collisions. Nonlinear neural classifiers, such as the MLP network, 42 | are able to learn the task and command the robot successfully without collisions. 43 | 44 | -- If some kind of short-term memory mechanism is provided to the neural classifiers, their performances are improved in general. 45 | For example, if past inputs are provided together with current sensor readings, even the Perceptron becomes able to 46 | learn the task and command the robot succesfully. If a recurrent neural network, such as the Elman network, is used to 47 | learn the task, the resulting dynamical classifier is able to learn the task using less hidden neurons than the MLP network. 48 | 49 | -- Files with different number of sensor readings were built in order to evaluate the performance of the classifiers 50 | with respect to the number of inputs. 51 | 52 | 5. Number of Instances: 5456 53 | 54 | 6. Number of Attributes 55 | -- sensor_readings_24.data: 24 numeric attributes and the class. 56 | -- sensor_readings_4.data: 4 numeric attributes and the class. 57 | -- sensor_readings_2.data: 2 numeric attributes and the class. 58 | 59 | 7. For Each Attribute: 60 | -- File sensor_readings_24.data: 61 | 1. US1: ultrasound sensor at the front of the robot (reference angle: 180°) - (numeric: real) 62 | 2. US2: ultrasound reading (reference angle: -165°) - (numeric: real) 63 | 3. US3: ultrasound reading (reference angle: -150°) - (numeric: real) 64 | 4. US4: ultrasound reading (reference angle: -135°) - (numeric: real) 65 | 5. US5: ultrasound reading (reference angle: -120°) - (numeric: real) 66 | 6. US6: ultrasound reading (reference angle: -105°) - (numeric: real) 67 | 7. US7: ultrasound reading (reference angle: -90°) - (numeric: real) 68 | 8. US8: ultrasound reading (reference angle: -75°) - (numeric: real) 69 | 9. US9: ultrasound reading (reference angle: -60°) - (numeric: real) 70 | 10. US10: ultrasound reading (reference angle: -45°) - (numeric: real) 71 | 11. US11: ultrasound reading (reference angle: -30°) - (numeric: real) 72 | 12. US12: ultrasound reading (reference angle: -15°) - (numeric: real) 73 | 13. US13: reading of ultrasound sensor situated at the back of the robot (reference angle: 0°) - (numeric: real) 74 | 14. US14: ultrasound reading (reference angle: 15°) - (numeric: real) 75 | 15. US15: ultrasound reading (reference angle: 30°) - (numeric: real) 76 | 16. US16: ultrasound reading (reference angle: 45°) - (numeric: real) 77 | 17. US17: ultrasound reading (reference angle: 60°) - (numeric: real) 78 | 18. US18: ultrasound reading (reference angle: 75°) - (numeric: real) 79 | 19. US19: ultrasound reading (reference angle: 90°) - (numeric: real) 80 | 20. US20: ultrasound reading (reference angle: 105°) - (numeric: real) 81 | 21. US21: ultrasound reading (reference angle: 120°) - (numeric: real) 82 | 22. US22: ultrasound reading (reference angle: 135°) - (numeric: real) 83 | 23. US23: ultrasound reading (reference angle: 150°) - (numeric: real) 84 | 24. US24: ultrasound reading (reference angle: 165°) - (numeric: real) 85 | 25. Class: 86 | -- Move-Forward 87 | -- Slight-Right-Turn 88 | -- Sharp-Right-Turn 89 | -- Slight-Left-Turn 90 | 91 | -- File sensor_readings_4.data: 92 | 1. SD_front: minimum sensor reading within a 60 degree arc located at the front of the robot - (numeric: real) 93 | 2. SD_left: minimum sensor reading within a 60 degree arc located at the left of the robot - (numeric: real) 94 | 3. SD_right: minimum sensor reading within a 60 degree arc located at the right of the robot - (numeric: real) 95 | 4. SD_back: minimum sensor reading within a 60 degree arc located at the back of the robot - (numeric: real) 96 | 5. Class: 97 | -- Move-Forward 98 | -- Slight-Right-Turn 99 | -- Sharp-Right-Turn 100 | -- Slight-Left-Turn 101 | 102 | -- File sensor_readings_2.data: 103 | 1. SD_front: minimum sensor reading within a 60 degree arc located at the front of the robot - (numeric: real) 104 | 2. SD_left: minimum sensor reading within a 60 degree arc located at the left of the robot - (numeric: real) 105 | 3. Class: 106 | -- Move-Forward 107 | -- Slight-Right-Turn 108 | -- Sharp-Right-Turn 109 | -- Slight-Left-Turn 110 | 111 | -- Summary Statistics: 112 | -- File sensor_readings_24.data: 113 | Max Min Mean SD 114 | US1 5.0000 0.40000 1.47162 0.80280 115 | US2 5.0250 0.43700 2.32704 1.41015 116 | US3 5.0290 0.47000 2.48935 1.24743 117 | US4 5.0170 0.83300 2.79650 1.30937 118 | US5 5.0000 1.12000 2.95855 1.33922 119 | US6 5.0050 1.11400 2.89307 1.28258 120 | US7 5.0080 1.12200 3.35111 1.41369 121 | US8 5.0870 0.85900 2.54040 1.11155 122 | US9 5.0000 0.83600 3.12562 1.35697 123 | US10 5.0220 0.81000 2.83239 1.30784 124 | US11 5.0190 0.78300 2.54940 1.38203 125 | US12 5.0000 0.77800 2.07778 1.24930 126 | US13 5.0030 0.77000 2.12578 1.40717 127 | US14 5.0000 0.75600 2.19049 1.57687 128 | US15 5.0000 0.49500 2.20577 1.71543 129 | US16 5.0000 0.42400 1.20211 1.09857 130 | US17 5.0000 0.37300 0.98983 0.94207 131 | US18 5.0000 0.35400 0.91027 0.88953 132 | US19 5.0000 0.34000 1.05811 1.14463 133 | US20 5.0000 0.35500 1.07632 1.14150 134 | US21 5.0000 0.38000 1.01592 0.88744 135 | US22 5.0000 0.37000 1.77803 1.57169 136 | US23 5.0000 0.36700 1.55505 1.29145 137 | US24 5.0000 0.37700 1.57851 1.15048 138 | 139 | -- File sensor_readings_4.data: 140 | Max Min Mean SD 141 | SD_front 5 0.49500 1.29031 0.62670 142 | SD_left 5 0.34000 0.68127 0.34259 143 | SD_right 5 0.83600 1.88182 0.56253 144 | SD_back 5 0.36700 1.27369 0.82175 145 | 146 | -- File sensor_readings_2.data: 147 | Max Min Mean SD 148 | SD_front 5 0.49500 1.29031 0.62670 149 | SD_left 5 0.34000 0.68127 0.34259 150 | 151 | 152 | 8. Missing Attribute Values: none 153 | 154 | 9. Class Distribution: 155 | -- Move-Forward: 2205 samples (40.41%). 156 | -- Slight-Right-Turn: 826 samples (15.13%). 157 | -- Sharp-Right-Turn: 2097 samples (38.43%). 158 | -- Slight-Left-Turn: 328 samples (6.01%). 159 | 160 | -------------------------------------------------------------------------------- /datasets/sonar_binary/sonar.names: -------------------------------------------------------------------------------- 1 | NAME: Sonar, Mines vs. Rocks 2 | 3 | SUMMARY: This is the data set used by Gorman and Sejnowski in their study 4 | of the classification of sonar signals using a neural network [1]. The 5 | task is to train a network to discriminate between sonar signals bounced 6 | off a metal cylinder and those bounced off a roughly cylindrical rock. 7 | 8 | SOURCE: The data set was contributed to the benchmark collection by Terry 9 | Sejnowski, now at the Salk Institute and the University of California at 10 | San Deigo. The data set was developed in collaboration with R. Paul 11 | Gorman of Allied-Signal Aerospace Technology Center. 12 | 13 | MAINTAINER: Scott E. Fahlman 14 | 15 | PROBLEM DESCRIPTION: 16 | 17 | The file "sonar.mines" contains 111 patterns obtained by bouncing sonar 18 | signals off a metal cylinder at various angles and under various 19 | conditions. The file "sonar.rocks" contains 97 patterns obtained from 20 | rocks under similar conditions. The transmitted sonar signal is a 21 | frequency-modulated chirp, rising in frequency. The data set contains 22 | signals obtained from a variety of different aspect angles, spanning 90 23 | degrees for the cylinder and 180 degrees for the rock. 24 | 25 | Each pattern is a set of 60 numbers in the range 0.0 to 1.0. Each number 26 | represents the energy within a particular frequency band, integrated over 27 | a certain period of time. The integration aperture for higher frequencies 28 | occur later in time, since these frequencies are transmitted later during 29 | the chirp. 30 | 31 | The label associated with each record contains the letter "R" if the object 32 | is a rock and "M" if it is a mine (metal cylinder). The numbers in the 33 | labels are in increasing order of aspect angle, but they do not encode the 34 | angle directly. 35 | 36 | METHODOLOGY: 37 | 38 | This data set can be used in a number of different ways to test learning 39 | speed, quality of ultimate learning, ability to generalize, or combinations 40 | of these factors. 41 | 42 | In [1], Gorman and Sejnowski report two series of experiments: an 43 | "aspect-angle independent" series, in which the whole data set is used 44 | without controlling for aspect angle, and an "aspect-angle dependent" 45 | series in which the training and testing sets were carefully controlled to 46 | ensure that each set contained cases from each aspect angle in 47 | appropriate proportions. 48 | 49 | For the aspect-angle independent experiments the combined set of 208 cases 50 | is divided randomly into 13 disjoint sets with 16 cases in each. For each 51 | experiment, 12 of these sets are used as training data, while the 13th is 52 | reserved for testing. The experiment is repeated 13 times so that every 53 | case appears once as part of a test set. The reported performance is an 54 | average over the entire set of 13 different test sets, each run 10 times. 55 | 56 | It was observed that this random division of the sample set led to rather 57 | uneven performance. A few of the splits gave poor results, presumably 58 | because the test set contains some samples from aspect angles that are 59 | under-represented in the corresponding training set. This motivated Gorman 60 | and Sejnowski to devise a different set of experiments in which an attempt 61 | was made to balance the training and test sets so that each would have a 62 | representative number of samples from all aspect angles. Since detailed 63 | aspect angle information was not present in the data base of samples, the 64 | 208 samples were first divided into clusters, using a 60-dimensional 65 | Euclidian metric; each of these clusters was then divided between the 66 | 104-member training set and the 104-member test set. 67 | 68 | The actual training and testing samples used for the "aspect angle 69 | dependent" experiments are marked in the data files. The reported 70 | performance is an average over 10 runs with this single division of the 71 | data set. 72 | 73 | A standard back-propagation network was used for all experiments. The 74 | network had 60 inputs and 2 output units, one indicating a cylinder and the 75 | other a rock. Experiments were run with no hidden units (direct 76 | connections from each input to each output) and with a single hidden layer 77 | with 2, 3, 6, 12, or 24 units. Each network was trained by 300 epochs over 78 | the entire training set. 79 | 80 | The weight-update formulas used in this study were slightly different from 81 | the standard form. A learning rate of 2.0 and momentum of 0.0 was used. 82 | Errors less than 0.2 were treated as zero. Initial weights were uniform 83 | random values in the range -0.3 to +0.3. 84 | 85 | RESULTS: 86 | 87 | For the angle independent experiments, Gorman and Sejnowski report the 88 | following results for networks with different numbers of hidden units: 89 | 90 | Hidden % Right on Std. % Right on Std. 91 | Units Training set Dev. Test Set Dev. 92 | ------ ------------ ---- ---------- ---- 93 | 0 89.4 2.1 77.1 8.3 94 | 2 96.5 0.7 81.9 6.2 95 | 3 98.8 0.4 82.0 7.3 96 | 6 99.7 0.2 83.5 5.6 97 | 12 99.8 0.1 84.7 5.7 98 | 24 99.8 0.1 84.5 5.7 99 | 100 | For the angle-dependent experiments Gorman and Sejnowski report the 101 | following results: 102 | 103 | Hidden % Right on Std. % Right on Std. 104 | Units Training set Dev. Test Set Dev. 105 | ------ ------------ ---- ---------- ---- 106 | 0 79.3 3.4 73.1 4.8 107 | 2 96.2 2.2 85.7 6.3 108 | 3 98.1 1.5 87.6 3.0 109 | 6 99.4 0.9 89.3 2.4 110 | 12 99.8 0.6 90.4 1.8 111 | 24 100.0 0.0 89.2 1.4 112 | 113 | Not surprisingly, the network's performance on the test set was somewhat 114 | better when the aspect angles in the training and test sets were balanced. 115 | 116 | Gorman and Sejnowski further report that a nearest neighbor classifier on 117 | the same data gave an 82.7% probability of correct classification. 118 | 119 | Three trained human subjects were each tested on 100 signals, chosen at 120 | random from the set of 208 returns used to create this data set. Their 121 | responses ranged between 88% and 97% correct. However, they may have been 122 | using information from the raw sonar signal that is not preserved in the 123 | processed data sets presented here. 124 | 125 | REFERENCES: 126 | 127 | 1. Gorman, R. P., and Sejnowski, T. J. (1988). "Analysis of Hidden Units 128 | in a Layered Network Trained to Classify Sonar Targets" in Neural Networks, 129 | Vol. 1, pp. 75-89. 130 | -------------------------------------------------------------------------------- /datasets/soybean_multi/soybean-large.data: -------------------------------------------------------------------------------- 1 | diaporthe-stem-canker,6,0,2,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 2 | diaporthe-stem-canker,4,0,2,1,0,2,0,2,1,1,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 3 | diaporthe-stem-canker,3,0,2,1,0,1,0,2,1,2,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 4 | diaporthe-stem-canker,3,0,2,1,0,1,0,2,0,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 5 | diaporthe-stem-canker,6,0,2,1,0,2,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 6 | diaporthe-stem-canker,5,0,2,1,0,3,0,1,0,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 7 | diaporthe-stem-canker,5,0,2,1,0,2,0,1,1,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 8 | diaporthe-stem-canker,4,0,2,1,1,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 9 | diaporthe-stem-canker,6,0,2,1,0,3,0,1,1,1,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 10 | diaporthe-stem-canker,4,0,2,1,0,2,0,2,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 11 | charcoal-rot,6,0,0,2,0,1,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 12 | charcoal-rot,4,0,0,1,1,1,3,1,1,1,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 13 | charcoal-rot,3,0,0,1,0,1,2,1,0,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 14 | charcoal-rot,6,0,0,1,1,3,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 15 | charcoal-rot,6,0,0,2,0,1,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 16 | charcoal-rot,5,0,0,2,1,3,3,1,1,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 17 | charcoal-rot,6,0,0,2,1,0,2,1,0,0,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 18 | charcoal-rot,4,0,0,1,0,2,2,1,0,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 19 | charcoal-rot,3,0,0,2,0,2,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 20 | charcoal-rot,5,0,0,2,1,2,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 21 | rhizoctonia-root-rot,1,1,2,0,0,2,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0 22 | rhizoctonia-root-rot,1,1,2,0,0,1,1,2,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 23 | rhizoctonia-root-rot,3,0,2,0,1,3,1,2,0,1,1,0,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0 24 | rhizoctonia-root-rot,0,1,2,0,0,0,1,1,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 25 | rhizoctonia-root-rot,0,1,2,0,0,1,1,2,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 26 | rhizoctonia-root-rot,1,1,2,0,0,3,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 27 | rhizoctonia-root-rot,1,1,2,0,0,0,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 28 | rhizoctonia-root-rot,2,1,2,0,0,2,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 29 | rhizoctonia-root-rot,1,1,2,0,0,1,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 30 | rhizoctonia-root-rot,2,1,2,0,0,1,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 31 | phytophthora-rot,0,1,2,1,1,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,0 32 | phytophthora-rot,1,1,2,1,?,3,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 33 | phytophthora-rot,2,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 34 | phytophthora-rot,1,1,2,0,0,2,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,0 35 | phytophthora-rot,2,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 36 | phytophthora-rot,3,1,2,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 37 | phytophthora-rot,0,1,1,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,0 38 | phytophthora-rot,3,1,2,0,0,2,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,0 39 | phytophthora-rot,2,1,1,1,?,0,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 40 | phytophthora-rot,2,1,2,0,0,1,1,2,0,1,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,0 41 | phytophthora-rot,2,1,2,1,?,1,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 42 | phytophthora-rot,1,1,2,1,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,0,0,0,0,?,?,?,?,?,?,?,1 43 | phytophthora-rot,0,1,2,1,0,3,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,0 44 | phytophthora-rot,0,1,1,1,1,2,1,2,1,0,1,1,0,2,2,0,0,0,1,1,2,2,0,1,0,0,0,3,4,0,0,0,0,0,0 45 | phytophthora-rot,3,1,2,0,0,1,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,0 46 | phytophthora-rot,2,1,2,2,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 47 | phytophthora-rot,0,1,2,1,0,2,1,1,0,1,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,0 48 | phytophthora-rot,2,1,1,2,?,2,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 49 | phytophthora-rot,2,1,2,1,1,1,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,0 50 | phytophthora-rot,0,1,2,1,0,3,1,1,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,0 51 | phytophthora-rot,1,1,2,1,0,0,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,0 52 | phytophthora-rot,1,1,2,1,?,0,1,?,?,?,1,1,0,2,2,0,0,0,1,?,1,2,?,0,0,0,0,?,?,?,?,?,?,?,1 53 | phytophthora-rot,3,1,2,1,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 54 | phytophthora-rot,2,1,2,1,?,1,1,?,?,?,1,1,0,2,2,0,0,0,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 55 | phytophthora-rot,3,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 56 | phytophthora-rot,1,1,2,1,1,3,1,2,0,1,1,1,0,2,2,0,0,0,1,1,1,2,0,1,0,0,0,3,4,0,0,0,0,0,0 57 | phytophthora-rot,3,1,1,1,?,3,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 58 | phytophthora-rot,2,1,2,2,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 59 | phytophthora-rot,3,1,1,2,?,2,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 60 | phytophthora-rot,1,1,2,2,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,0,0,0,0,?,?,?,?,?,?,?,1 61 | phytophthora-rot,2,1,2,2,?,3,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 62 | phytophthora-rot,3,1,1,1,?,0,1,?,?,?,1,1,0,2,2,0,0,0,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 63 | phytophthora-rot,2,1,2,0,0,1,1,2,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,0 64 | phytophthora-rot,3,1,1,1,?,1,1,?,?,?,1,1,0,2,2,0,0,0,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 65 | phytophthora-rot,2,1,2,2,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,0,0,0,0,?,?,?,?,?,?,?,1 66 | phytophthora-rot,1,1,2,0,0,0,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,0 67 | phytophthora-rot,3,1,2,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 68 | phytophthora-rot,3,1,2,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 69 | phytophthora-rot,3,1,1,0,0,2,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,0 70 | phytophthora-rot,3,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 71 | brown-stem-rot,4,0,0,1,0,1,3,1,1,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 72 | brown-stem-rot,4,0,0,1,0,1,3,1,1,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 73 | brown-stem-rot,3,1,0,0,0,3,0,1,1,2,1,0,0,2,2,0,0,0,1,0,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 74 | brown-stem-rot,5,0,0,2,0,1,3,1,1,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 75 | brown-stem-rot,5,0,0,2,0,2,3,1,1,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 76 | brown-stem-rot,4,0,0,1,0,3,2,1,0,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 77 | brown-stem-rot,5,0,0,1,1,3,3,1,0,2,1,1,2,0,1,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 78 | brown-stem-rot,6,0,1,1,1,2,0,1,1,0,1,0,0,2,1,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 79 | brown-stem-rot,5,1,0,0,0,3,2,1,0,0,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 80 | brown-stem-rot,5,1,0,1,0,1,3,1,1,0,1,1,2,0,1,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 81 | brown-stem-rot,4,0,1,0,1,2,3,1,1,2,1,0,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 82 | brown-stem-rot,4,1,0,0,0,3,2,1,1,1,1,1,2,0,1,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 83 | brown-stem-rot,4,0,0,1,0,2,0,1,1,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 84 | brown-stem-rot,3,1,0,0,0,2,0,2,0,1,1,1,2,0,1,0,0,0,1,0,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 85 | brown-stem-rot,5,0,0,1,0,3,2,1,0,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 86 | brown-stem-rot,4,0,0,1,0,3,3,1,1,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 87 | brown-stem-rot,4,0,0,1,0,3,2,1,0,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 88 | brown-stem-rot,4,0,0,1,0,1,2,1,1,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 89 | brown-stem-rot,4,0,0,1,0,1,2,1,0,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 90 | brown-stem-rot,3,0,0,1,0,3,2,1,0,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 91 | powdery-mildew,5,0,0,1,1,3,3,1,0,1,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 92 | powdery-mildew,6,0,1,0,1,0,0,0,1,2,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 93 | powdery-mildew,1,1,0,1,0,3,3,1,2,0,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 94 | powdery-mildew,6,1,1,0,0,2,2,0,1,2,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 95 | powdery-mildew,4,1,1,0,0,2,2,0,2,0,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 96 | powdery-mildew,6,0,0,1,1,1,1,1,0,2,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 97 | powdery-mildew,2,1,1,0,0,2,2,0,0,1,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 98 | powdery-mildew,6,1,0,1,0,1,1,1,1,2,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 99 | powdery-mildew,5,1,0,1,0,1,1,1,0,1,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 100 | powdery-mildew,1,1,0,1,0,1,1,1,2,0,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 101 | downy-mildew,6,0,2,0,1,2,1,0,1,2,0,1,2,0,1,0,1,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 102 | downy-mildew,2,0,2,1,1,1,1,1,1,2,0,1,2,0,1,0,1,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 103 | downy-mildew,1,0,2,1,1,3,2,1,0,1,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 104 | downy-mildew,4,1,2,2,0,2,2,1,0,1,0,1,1,0,1,0,1,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 105 | downy-mildew,1,0,2,0,1,0,0,1,0,1,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 106 | downy-mildew,2,1,2,0,0,3,0,1,0,1,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 107 | downy-mildew,2,1,2,1,0,2,0,1,0,1,0,1,2,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 108 | downy-mildew,4,1,2,2,0,2,1,0,1,2,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 109 | downy-mildew,4,1,2,0,0,1,2,1,0,1,0,1,2,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 110 | downy-mildew,5,1,2,1,0,3,2,1,0,1,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 111 | brown-spot,1,1,2,2,1,3,3,1,0,2,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 112 | brown-spot,2,0,2,1,0,2,3,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 113 | brown-spot,2,0,2,1,0,2,3,1,1,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 114 | brown-spot,2,0,2,1,0,1,0,1,2,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 115 | brown-spot,1,1,2,2,1,3,3,1,1,1,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 116 | brown-spot,1,1,2,1,0,2,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 117 | brown-spot,0,1,2,2,1,3,3,1,2,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 118 | brown-spot,2,0,2,1,0,2,3,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 119 | brown-spot,1,0,2,1,0,2,3,1,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 120 | brown-spot,2,1,2,1,0,3,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 121 | brown-spot,5,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 122 | brown-spot,1,1,2,1,0,3,3,1,1,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 123 | brown-spot,1,0,2,1,0,3,3,1,2,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 124 | brown-spot,4,0,2,1,0,1,3,1,0,0,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 125 | brown-spot,1,0,2,1,0,2,3,1,0,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 126 | brown-spot,4,1,2,1,0,3,3,1,0,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 127 | brown-spot,2,0,2,1,0,3,3,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 128 | brown-spot,0,1,1,1,1,2,2,0,2,1,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 129 | brown-spot,1,1,1,1,1,2,0,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 130 | brown-spot,1,1,2,1,0,1,0,1,2,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 131 | brown-spot,1,0,2,1,0,1,3,1,0,0,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 132 | brown-spot,2,0,2,1,0,3,3,1,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 133 | brown-spot,3,0,2,1,0,2,3,2,2,1,0,1,2,0,1,0,0,0,1,0,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0 134 | brown-spot,2,1,2,2,1,3,1,1,1,0,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 135 | brown-spot,1,0,2,1,0,2,3,1,2,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 136 | brown-spot,1,1,2,1,0,2,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 137 | brown-spot,5,0,2,1,0,1,3,1,0,0,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 138 | brown-spot,4,1,1,1,1,2,2,0,0,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0 139 | brown-spot,3,1,2,1,0,1,3,1,0,2,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 140 | brown-spot,1,0,2,1,0,3,3,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 141 | brown-spot,4,0,2,1,0,2,3,2,1,1,0,1,2,0,1,0,0,0,1,0,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0 142 | brown-spot,2,1,2,1,0,2,3,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 143 | brown-spot,2,1,1,1,1,0,0,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 144 | brown-spot,3,1,2,1,0,3,1,1,0,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 145 | brown-spot,3,0,2,1,0,3,3,2,2,0,0,1,2,0,1,0,0,0,1,0,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0 146 | brown-spot,2,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 147 | brown-spot,3,0,2,1,0,3,1,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 148 | brown-spot,3,1,2,1,0,3,1,1,0,2,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 149 | brown-spot,2,1,2,1,0,3,3,2,2,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 150 | brown-spot,5,1,2,1,0,3,3,2,0,2,0,1,2,0,1,0,0,0,1,0,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0 151 | bacterial-blight,5,0,2,1,1,3,3,1,1,0,0,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 152 | bacterial-blight,4,0,2,2,1,2,3,1,1,1,0,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 153 | bacterial-blight,2,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 154 | bacterial-blight,3,0,1,1,0,1,2,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 155 | bacterial-blight,3,0,1,1,0,3,2,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 156 | bacterial-blight,3,0,2,1,1,2,1,1,1,0,0,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 157 | bacterial-blight,3,0,1,1,0,1,0,0,0,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 158 | bacterial-blight,4,0,2,1,1,0,3,1,1,1,0,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 159 | bacterial-blight,2,0,1,1,0,3,1,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 160 | bacterial-blight,4,1,2,2,1,2,1,1,1,2,0,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 161 | bacterial-pustule,2,1,1,2,0,2,2,0,0,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2 162 | bacterial-pustule,3,0,2,0,1,2,3,1,1,1,1,1,2,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0 163 | bacterial-pustule,2,0,1,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 164 | bacterial-pustule,4,1,2,1,0,3,0,1,0,2,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1 165 | bacterial-pustule,3,0,2,1,1,1,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1 166 | bacterial-pustule,3,1,1,0,0,2,0,0,0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 167 | bacterial-pustule,3,0,1,1,1,2,3,0,0,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0 168 | bacterial-pustule,3,1,2,1,0,0,2,1,0,2,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1 169 | bacterial-pustule,4,0,1,1,1,1,3,0,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0 170 | bacterial-pustule,5,1,1,1,0,2,0,0,1,2,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 171 | purple-seed-stain,6,0,2,0,1,2,2,0,0,0,0,0,0,2,2,0,0,0,1,1,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 172 | purple-seed-stain,6,0,2,0,0,2,2,0,1,1,0,1,2,0,0,0,0,0,1,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 173 | purple-seed-stain,4,0,2,1,1,1,1,0,1,2,0,0,0,2,2,0,0,0,0,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 174 | purple-seed-stain,4,0,2,1,1,0,0,0,0,1,0,1,2,0,0,0,0,0,0,1,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 175 | purple-seed-stain,4,0,2,0,0,0,0,0,0,2,0,1,2,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 176 | purple-seed-stain,6,0,2,2,0,2,2,0,0,1,0,1,2,0,0,0,0,0,1,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 177 | purple-seed-stain,3,0,2,0,1,0,0,0,0,1,0,0,0,2,2,0,0,0,0,1,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 178 | purple-seed-stain,3,0,2,1,1,3,3,0,1,1,0,0,0,2,2,0,0,0,0,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 179 | purple-seed-stain,5,0,2,1,0,1,1,0,0,0,0,1,2,0,0,0,0,0,1,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 180 | purple-seed-stain,4,0,2,1,0,0,0,0,1,1,0,0,0,2,2,0,0,0,1,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 181 | anthracnose,5,1,2,1,0,3,3,1,1,0,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,0,0,0,0,0,0 182 | anthracnose,5,1,2,2,1,2,2,0,1,2,0,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,1,2,1,1,1,0,1,0 183 | anthracnose,6,0,2,1,0,1,1,1,1,1,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 184 | anthracnose,2,1,2,2,1,0,0,1,0,0,1,1,0,2,2,0,0,0,1,0,2,1,0,1,0,0,0,0,0,0,0,0,0,0,0 185 | anthracnose,3,0,2,1,0,3,3,1,0,0,1,1,0,2,2,0,0,0,1,0,3,2,1,1,0,0,0,1,2,1,1,1,0,0,0 186 | anthracnose,4,1,2,2,1,2,2,1,0,1,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,1,2,0,0,0,0,0,0 187 | anthracnose,6,0,2,1,0,2,2,1,0,1,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 188 | anthracnose,1,0,2,1,0,1,1,1,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,0,0,1,0,1,0,0,0 189 | anthracnose,6,1,2,1,0,2,2,1,1,1,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 190 | anthracnose,5,0,2,1,0,1,1,1,2,2,1,1,0,2,2,0,0,0,1,0,3,2,1,1,0,0,0,1,2,1,1,1,0,0,0 191 | anthracnose,5,1,2,2,1,3,3,0,1,2,1,1,0,2,2,0,0,0,1,1,3,2,1,1,0,0,0,1,2,0,0,0,0,0,0 192 | anthracnose,0,0,2,1,0,3,3,1,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,0,0,1,1,0,1,0,0 193 | anthracnose,6,0,2,1,0,2,2,0,0,0,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,1,2,1,0,1,1,1,0 194 | anthracnose,5,1,2,1,0,1,1,1,0,1,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,1,1,0,1,1,0 195 | anthracnose,5,0,2,1,0,2,2,1,0,2,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,0,0,0,0,0,0 196 | anthracnose,6,1,2,2,1,0,0,1,0,1,0,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,1,2,0,0,0,0,0,0 197 | anthracnose,5,0,2,1,0,1,1,1,0,0,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 198 | anthracnose,6,1,2,2,1,3,3,0,2,1,0,1,0,2,2,0,0,0,1,1,3,2,1,1,0,0,0,1,2,0,0,0,0,0,0 199 | anthracnose,5,1,2,1,0,3,3,1,0,1,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,1,1,0,1,1,0 200 | anthracnose,5,1,2,1,0,2,2,1,1,1,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,1,1,0,1,1,0 201 | phyllosticta-leaf-spot,3,1,1,1,0,0,2,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 202 | phyllosticta-leaf-spot,3,0,0,1,1,0,2,0,0,1,0,1,2,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 203 | phyllosticta-leaf-spot,3,1,1,1,0,0,0,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 204 | phyllosticta-leaf-spot,3,0,0,1,1,2,0,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 205 | phyllosticta-leaf-spot,3,1,1,2,0,3,2,0,1,1,0,1,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 206 | phyllosticta-leaf-spot,2,0,0,1,1,0,3,0,2,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 207 | phyllosticta-leaf-spot,1,0,0,2,1,3,2,1,1,1,0,1,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 208 | phyllosticta-leaf-spot,2,1,1,1,0,2,2,1,1,1,0,1,2,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 209 | phyllosticta-leaf-spot,2,0,0,2,1,3,0,1,1,0,1,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 210 | phyllosticta-leaf-spot,2,1,1,2,0,3,3,0,2,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 211 | alternarialeaf-spot,4,1,2,1,0,1,1,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 212 | alternarialeaf-spot,4,0,1,1,0,3,3,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 213 | alternarialeaf-spot,3,0,2,1,0,0,0,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 214 | alternarialeaf-spot,6,0,2,2,0,3,3,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 215 | alternarialeaf-spot,6,0,1,1,1,2,2,0,2,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 216 | alternarialeaf-spot,5,0,2,2,0,3,3,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 217 | alternarialeaf-spot,6,0,1,1,0,3,3,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 218 | alternarialeaf-spot,5,1,2,2,0,3,1,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 219 | alternarialeaf-spot,6,0,2,2,0,3,3,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 220 | alternarialeaf-spot,6,0,2,2,0,3,2,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 221 | alternarialeaf-spot,5,0,2,2,0,2,3,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 222 | alternarialeaf-spot,4,1,2,1,0,3,0,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 223 | alternarialeaf-spot,6,0,2,1,0,1,1,0,1,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 224 | alternarialeaf-spot,5,0,2,2,0,2,2,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 225 | alternarialeaf-spot,5,1,2,1,0,0,0,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 226 | alternarialeaf-spot,4,0,2,1,0,2,2,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 227 | alternarialeaf-spot,4,0,2,1,0,1,1,1,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 228 | alternarialeaf-spot,5,0,2,1,0,2,1,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 229 | alternarialeaf-spot,6,0,2,2,0,3,2,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 230 | alternarialeaf-spot,6,1,2,2,0,1,1,0,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 231 | alternarialeaf-spot,5,1,2,2,0,3,1,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 232 | alternarialeaf-spot,5,1,2,2,0,3,3,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 233 | alternarialeaf-spot,4,1,2,1,0,2,1,0,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 234 | alternarialeaf-spot,6,1,1,2,0,2,2,0,2,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 235 | alternarialeaf-spot,4,1,2,1,0,1,2,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 236 | alternarialeaf-spot,6,1,2,2,0,2,1,0,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 237 | alternarialeaf-spot,4,1,2,1,0,0,3,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 238 | alternarialeaf-spot,4,0,2,2,0,3,3,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 239 | alternarialeaf-spot,5,0,2,2,0,2,3,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 240 | alternarialeaf-spot,3,0,2,1,0,0,0,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 241 | alternarialeaf-spot,5,0,2,1,0,1,2,0,1,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 242 | alternarialeaf-spot,5,0,2,2,0,1,1,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 243 | alternarialeaf-spot,4,0,2,2,0,1,1,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 244 | alternarialeaf-spot,5,1,2,1,0,3,3,0,1,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 245 | alternarialeaf-spot,6,0,2,1,0,2,1,0,0,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 246 | alternarialeaf-spot,5,0,2,1,0,0,3,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 247 | alternarialeaf-spot,6,0,2,1,0,0,3,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 248 | alternarialeaf-spot,5,1,2,2,0,2,1,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 249 | alternarialeaf-spot,5,0,2,1,0,3,0,0,1,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 250 | alternarialeaf-spot,6,0,2,1,0,1,2,0,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 251 | frog-eye-leaf-spot,6,0,1,2,0,3,3,0,0,0,0,1,2,0,1,0,0,0,1,0,3,2,1,1,0,0,0,1,2,1,0,0,0,0,0 252 | frog-eye-leaf-spot,4,0,1,2,0,1,1,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 253 | frog-eye-leaf-spot,5,0,1,1,0,2,1,0,0,0,0,1,2,0,1,0,0,0,1,0,3,1,0,1,0,0,0,0,0,0,0,0,0,0,0 254 | frog-eye-leaf-spot,5,1,2,1,0,3,2,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 255 | frog-eye-leaf-spot,6,1,2,2,0,3,3,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 256 | frog-eye-leaf-spot,4,0,1,1,0,3,3,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 257 | frog-eye-leaf-spot,3,0,2,1,0,2,3,0,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 258 | frog-eye-leaf-spot,5,0,2,2,0,2,2,0,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 259 | frog-eye-leaf-spot,5,0,2,1,0,1,1,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 260 | frog-eye-leaf-spot,5,0,2,2,0,2,3,0,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 261 | frog-eye-leaf-spot,5,0,2,1,0,0,1,0,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 262 | frog-eye-leaf-spot,4,0,2,1,0,2,3,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 263 | frog-eye-leaf-spot,4,0,2,2,0,1,1,1,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 264 | frog-eye-leaf-spot,4,0,2,1,0,2,1,1,1,1,0,1,2,0,1,0,0,0,1,0,3,1,0,1,0,0,0,1,1,0,0,0,0,0,0 265 | frog-eye-leaf-spot,3,1,2,1,0,3,2,1,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 266 | frog-eye-leaf-spot,5,0,2,1,0,3,0,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,0,1,0,0,0,1,1,0,0,0,0,0,0 267 | frog-eye-leaf-spot,5,0,2,2,0,1,1,0,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 268 | frog-eye-leaf-spot,4,0,2,2,0,1,2,1,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 269 | frog-eye-leaf-spot,5,0,2,2,0,2,1,0,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 270 | frog-eye-leaf-spot,5,0,2,1,0,3,0,1,0,0,0,1,2,0,1,0,0,0,1,0,3,1,0,1,0,0,0,1,1,0,0,0,0,0,0 271 | frog-eye-leaf-spot,3,0,2,1,0,1,2,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 272 | frog-eye-leaf-spot,6,0,1,2,0,3,3,0,1,0,0,1,2,0,1,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,0,1,1,1,0 273 | frog-eye-leaf-spot,5,0,1,1,0,1,3,1,2,0,0,1,2,0,1,0,0,0,1,0,3,0,1,0,0,0,0,1,1,0,0,0,0,0,0 274 | frog-eye-leaf-spot,5,0,2,1,0,3,2,1,0,0,0,1,2,0,1,0,0,0,1,0,3,1,0,1,0,0,0,1,1,0,0,0,0,0,0 275 | frog-eye-leaf-spot,5,1,2,1,0,3,3,0,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 276 | frog-eye-leaf-spot,3,1,2,1,0,3,0,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 277 | frog-eye-leaf-spot,6,1,2,2,0,3,1,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 278 | frog-eye-leaf-spot,4,0,2,1,0,1,2,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 279 | frog-eye-leaf-spot,4,0,2,2,0,1,0,1,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 280 | frog-eye-leaf-spot,6,1,2,2,0,3,0,0,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 281 | frog-eye-leaf-spot,5,1,2,2,0,3,3,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 282 | frog-eye-leaf-spot,4,0,2,1,0,0,1,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 283 | frog-eye-leaf-spot,4,0,2,1,0,2,3,1,1,1,0,1,2,0,1,0,0,0,1,0,3,1,0,1,0,0,0,1,1,0,0,0,0,0,0 284 | frog-eye-leaf-spot,4,1,1,2,0,1,1,0,2,2,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 285 | frog-eye-leaf-spot,4,0,2,1,0,2,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 286 | frog-eye-leaf-spot,5,1,2,1,0,1,2,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 287 | frog-eye-leaf-spot,4,0,2,2,0,1,3,1,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 288 | frog-eye-leaf-spot,5,0,2,1,0,1,2,0,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 289 | frog-eye-leaf-spot,5,0,2,2,0,2,0,0,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 290 | frog-eye-leaf-spot,5,1,2,1,0,2,3,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 291 | diaporthe-pod-&-stem-blight,5,0,2,2,?,3,3,?,?,0,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,0,1,1,1,1,? 292 | diaporthe-pod-&-stem-blight,6,0,2,2,?,2,3,?,?,1,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 293 | diaporthe-pod-&-stem-blight,5,0,2,2,?,3,3,?,?,0,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 294 | diaporthe-pod-&-stem-blight,1,1,1,2,?,3,0,?,?,2,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,0,1,1,1,1,? 295 | diaporthe-pod-&-stem-blight,5,?,2,2,?,2,3,?,?,?,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 296 | diaporthe-pod-&-stem-blight,5,0,2,2,?,2,3,?,?,0,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 297 | cyst-nematode,2,?,?,?,?,2,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 298 | cyst-nematode,3,?,?,?,?,3,2,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 299 | cyst-nematode,4,?,?,?,?,3,2,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 300 | cyst-nematode,3,?,?,?,?,2,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 301 | cyst-nematode,3,?,?,?,?,2,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 302 | cyst-nematode,4,?,?,?,?,2,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 303 | 2-4-d-injury,?,?,?,?,?,?,?,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 304 | herbicide-injury,1,1,?,0,?,1,0,?,?,?,1,1,2,1,1,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 305 | herbicide-injury,0,1,?,0,?,0,3,?,?,?,1,1,0,2,2,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 306 | herbicide-injury,1,1,?,0,?,0,0,?,?,?,1,1,0,2,2,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 307 | herbicide-injury,1,1,?,0,?,1,3,?,?,?,1,1,2,1,1,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 308 | diaporthe-stem-canker,6,0,2,1,0,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 309 | diaporthe-stem-canker,3,0,2,1,0,2,0,2,1,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 310 | diaporthe-stem-canker,4,0,2,1,0,3,0,2,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 311 | diaporthe-stem-canker,5,0,2,1,0,1,0,1,0,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 312 | diaporthe-stem-canker,3,0,2,1,0,3,0,1,0,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 313 | diaporthe-stem-canker,5,0,2,1,0,2,0,1,1,0,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 314 | diaporthe-stem-canker,5,0,2,1,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 315 | diaporthe-stem-canker,3,0,2,1,0,2,1,1,0,1,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 316 | diaporthe-stem-canker,4,0,2,1,0,3,0,1,1,2,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0 317 | diaporthe-stem-canker,6,0,2,1,0,3,0,1,1,1,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0 318 | charcoal-rot,4,0,0,1,0,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 319 | charcoal-rot,5,0,0,2,0,3,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 320 | charcoal-rot,4,0,0,2,0,3,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 321 | charcoal-rot,5,0,0,2,0,0,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 322 | charcoal-rot,5,0,0,2,1,2,2,1,0,2,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 323 | charcoal-rot,3,0,0,2,1,0,2,1,0,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 324 | charcoal-rot,4,0,0,2,1,1,3,1,1,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 325 | charcoal-rot,5,0,0,2,1,2,2,1,0,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 326 | charcoal-rot,6,0,0,2,1,3,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 327 | charcoal-rot,6,0,0,2,1,3,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0 328 | rhizoctonia-root-rot,0,1,2,0,0,0,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,1 329 | rhizoctonia-root-rot,0,1,2,0,0,3,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 330 | rhizoctonia-root-rot,0,1,2,0,0,2,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 331 | rhizoctonia-root-rot,2,1,2,0,0,0,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 332 | rhizoctonia-root-rot,1,1,2,0,0,2,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 333 | rhizoctonia-root-rot,2,1,2,0,0,3,1,2,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0 334 | rhizoctonia-root-rot,2,1,2,0,0,2,1,1,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0 335 | rhizoctonia-root-rot,4,0,2,0,1,0,1,2,0,2,1,1,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0 336 | rhizoctonia-root-rot,0,1,2,0,0,1,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0 337 | rhizoctonia-root-rot,2,1,2,0,0,3,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0 338 | phytophthora-rot,2,1,1,0,0,3,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,0 339 | phytophthora-rot,1,1,2,0,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,0 340 | phytophthora-rot,2,1,2,1,1,3,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,0 341 | phytophthora-rot,1,1,2,1,1,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,0 342 | phytophthora-rot,3,1,1,1,?,2,1,?,?,?,1,1,0,2,2,0,0,0,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 343 | phytophthora-rot,3,1,1,1,?,3,1,?,?,?,1,1,0,2,2,0,0,0,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 344 | phytophthora-rot,0,1,2,2,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 345 | phytophthora-rot,1,1,2,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,0,0,0,0,?,?,?,?,?,?,?,1 346 | phytophthora-rot,1,1,2,1,?,0,1,?,?,?,1,1,?,?,?,?,?,?,1,?,0,2,?,2,0,0,0,?,?,?,?,?,?,?,1 347 | phytophthora-rot,4,1,1,2,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 348 | phytophthora-rot,1,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,2,0,0,0,?,?,?,?,?,?,?,1 349 | phytophthora-rot,2,1,2,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,0,0,0,0,?,?,?,?,?,?,?,1 350 | phytophthora-rot,3,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 351 | phytophthora-rot,4,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 352 | phytophthora-rot,1,1,2,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,2,0,0,0,?,?,?,?,?,?,?,1 353 | phytophthora-rot,1,1,2,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,2,0,0,0,?,?,?,?,?,?,?,1 354 | phytophthora-rot,1,1,2,2,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,2,0,0,0,?,?,?,?,?,?,?,1 355 | phytophthora-rot,2,1,1,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,1,2,?,2,0,0,0,?,?,?,?,?,?,?,1 356 | phytophthora-rot,3,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 357 | phytophthora-rot,3,1,1,1,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 358 | phytophthora-rot,1,1,2,2,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,0,2,?,2,0,0,0,?,?,?,?,?,?,?,1 359 | phytophthora-rot,2,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 360 | phytophthora-rot,3,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 361 | phytophthora-rot,4,1,1,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 362 | phytophthora-rot,1,1,2,2,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,0,2,?,2,0,0,0,?,?,?,?,?,?,?,1 363 | phytophthora-rot,2,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 364 | phytophthora-rot,3,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 365 | phytophthora-rot,4,1,1,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 366 | phytophthora-rot,1,1,2,2,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,0,2,?,2,0,0,0,?,?,?,?,?,?,?,1 367 | phytophthora-rot,2,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 368 | phytophthora-rot,3,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 369 | phytophthora-rot,1,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,0,2,?,2,0,0,0,?,?,?,?,?,?,?,1 370 | phytophthora-rot,2,1,2,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 371 | phytophthora-rot,3,1,2,1,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 372 | phytophthora-rot,1,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,0,2,?,2,0,0,0,?,?,?,?,?,?,?,1 373 | phytophthora-rot,2,1,2,2,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,0,0,0,0,?,?,?,?,?,?,?,1 374 | phytophthora-rot,3,1,1,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 375 | phytophthora-rot,4,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 376 | phytophthora-rot,1,1,2,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,2,0,0,0,?,?,?,?,?,?,?,1 377 | phytophthora-rot,2,1,2,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 378 | phytophthora-rot,3,1,1,1,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 379 | phytophthora-rot,4,1,1,1,?,1,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 380 | phytophthora-rot,1,1,2,2,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,2,2,?,2,0,0,0,?,?,?,?,?,?,?,1 381 | phytophthora-rot,2,1,2,2,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 382 | phytophthora-rot,3,1,1,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 383 | phytophthora-rot,2,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 384 | phytophthora-rot,3,1,1,1,?,2,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 385 | phytophthora-rot,3,1,1,1,?,3,1,?,?,?,1,1,?,?,?,?,?,?,1,?,3,2,?,0,0,0,0,?,?,?,?,?,?,?,1 386 | brown-stem-rot,3,0,0,0,1,1,0,2,0,0,1,1,2,0,1,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 387 | brown-stem-rot,4,0,1,0,1,2,2,1,1,1,1,0,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 388 | brown-stem-rot,3,0,0,0,1,3,2,2,0,1,1,1,2,0,1,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 389 | brown-stem-rot,5,0,0,1,1,3,0,1,0,0,1,1,2,0,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 390 | brown-stem-rot,6,0,0,1,1,2,1,1,1,1,1,1,2,0,1,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 391 | brown-stem-rot,4,0,1,0,1,3,2,2,0,2,1,0,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 392 | brown-stem-rot,5,0,0,2,0,2,3,1,0,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 393 | brown-stem-rot,5,0,0,1,0,1,0,1,1,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 394 | brown-stem-rot,4,0,0,1,0,3,2,1,0,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 395 | brown-stem-rot,5,0,0,1,0,2,3,1,0,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 396 | brown-stem-rot,5,0,0,1,0,2,2,1,1,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 397 | brown-stem-rot,3,1,1,0,0,2,0,2,0,2,1,0,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 398 | brown-stem-rot,5,1,1,1,0,2,3,2,0,0,1,0,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 399 | brown-stem-rot,4,1,0,0,0,2,2,2,0,1,1,1,2,0,1,0,0,0,1,0,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 400 | brown-stem-rot,5,1,1,0,0,3,3,1,1,2,1,0,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 401 | brown-stem-rot,4,1,1,0,0,3,1,1,1,0,1,0,0,2,2,0,0,0,1,0,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 402 | brown-stem-rot,6,1,1,1,0,3,3,1,1,1,1,0,0,2,2,0,0,0,1,1,0,3,0,0,0,1,0,0,4,0,0,0,0,0,0 403 | brown-stem-rot,3,0,0,1,0,2,2,1,0,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 404 | brown-stem-rot,5,0,0,2,0,3,0,1,0,2,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 405 | brown-stem-rot,3,0,0,1,0,2,2,1,1,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 406 | brown-stem-rot,4,0,0,1,0,1,3,1,0,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 407 | brown-stem-rot,5,0,0,2,0,2,0,1,1,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 408 | brown-stem-rot,4,0,0,1,0,1,0,1,0,0,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 409 | brown-stem-rot,5,0,0,2,0,3,2,1,1,1,0,1,0,2,2,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 410 | powdery-mildew,1,0,0,0,1,0,0,0,0,0,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 411 | powdery-mildew,2,0,1,1,1,1,1,1,1,1,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 412 | powdery-mildew,3,0,2,0,1,2,2,0,0,2,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 413 | powdery-mildew,4,0,0,1,1,3,3,1,0,0,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 414 | powdery-mildew,5,0,1,0,1,0,0,0,0,1,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 415 | powdery-mildew,4,0,1,0,1,2,2,0,1,0,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 416 | powdery-mildew,3,1,0,1,0,3,3,1,1,2,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 417 | powdery-mildew,4,1,1,0,0,0,0,0,2,0,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 418 | powdery-mildew,2,1,1,0,0,0,0,0,0,1,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 419 | powdery-mildew,5,1,0,1,0,3,3,1,0,1,0,1,0,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 420 | downy-mildew,3,0,2,0,1,2,2,1,0,1,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 421 | downy-mildew,4,0,2,1,1,3,3,1,1,2,0,1,2,0,1,0,1,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 422 | downy-mildew,5,0,2,1,1,1,0,1,0,1,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 423 | downy-mildew,2,0,2,0,1,0,3,1,1,2,0,1,2,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 424 | downy-mildew,3,0,2,1,1,1,0,1,0,1,0,1,2,0,1,0,1,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 425 | downy-mildew,6,1,2,0,0,1,3,0,1,2,0,1,2,0,1,0,1,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 426 | downy-mildew,3,1,2,2,0,3,1,1,1,2,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 427 | downy-mildew,5,1,2,1,0,2,3,0,1,2,0,1,1,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 428 | downy-mildew,3,1,2,1,0,1,1,0,1,2,0,1,2,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 429 | downy-mildew,5,1,2,0,0,3,3,0,1,2,0,1,2,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0 430 | brown-spot,1,1,2,1,0,3,2,1,0,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 431 | brown-spot,2,0,2,1,0,1,1,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 432 | brown-spot,3,0,2,1,0,2,3,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 433 | brown-spot,2,1,2,1,0,3,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 434 | brown-spot,2,0,2,1,0,2,2,1,0,1,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 435 | brown-spot,5,0,2,1,0,2,3,2,1,1,0,1,2,0,1,0,0,0,1,0,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0 436 | brown-spot,2,0,2,1,0,3,3,1,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 437 | brown-spot,1,0,2,1,0,2,3,1,2,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 438 | brown-spot,2,1,1,1,1,2,2,1,1,2,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 439 | brown-spot,3,1,2,2,1,1,1,0,2,1,0,1,2,0,1,0,0,0,1,0,3,1,0,0,0,0,0,1,2,0,0,0,0,0,0 440 | brown-spot,0,0,1,1,0,0,0,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 441 | brown-spot,1,0,2,2,0,1,1,0,1,1,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 442 | brown-spot,2,0,1,1,0,2,2,1,2,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 443 | brown-spot,3,0,2,2,0,3,3,0,0,1,1,1,2,0,1,0,0,0,1,0,3,1,0,0,0,0,0,0,1,0,0,0,0,0,0 444 | brown-spot,4,0,1,1,0,1,0,1,1,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,1,2,0,0,0,0,0,0 445 | brown-spot,0,0,2,2,0,2,1,1,2,1,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 446 | brown-spot,1,0,1,1,0,3,2,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 447 | brown-spot,2,0,2,2,0,1,3,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 448 | brown-spot,0,0,1,1,0,2,0,0,2,2,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 449 | brown-spot,1,1,2,2,1,3,1,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 450 | brown-spot,1,0,2,1,0,1,1,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 451 | brown-spot,2,0,2,1,0,2,2,1,0,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 452 | brown-spot,3,1,2,1,0,3,3,1,0,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 453 | brown-spot,4,0,2,1,0,2,1,1,0,0,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 454 | brown-spot,5,0,2,1,0,3,2,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 455 | brown-spot,1,1,2,1,0,2,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 456 | brown-spot,2,0,2,1,0,3,3,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 457 | brown-spot,3,0,2,1,0,2,3,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 458 | brown-spot,1,1,2,1,0,3,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 459 | brown-spot,2,0,2,1,0,1,1,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 460 | brown-spot,3,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 461 | brown-spot,1,1,2,1,0,3,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 462 | brown-spot,2,0,2,1,0,2,3,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 463 | brown-spot,3,0,2,1,0,3,3,1,0,1,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 464 | brown-spot,1,1,2,1,0,2,1,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 465 | brown-spot,2,0,2,1,0,3,2,1,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 466 | brown-spot,3,0,2,1,0,2,3,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 467 | brown-spot,4,1,2,1,0,3,1,1,0,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 468 | brown-spot,5,0,2,1,0,1,2,1,0,0,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 469 | brown-spot,1,0,2,1,0,2,3,1,0,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 470 | brown-spot,2,1,2,1,0,3,3,1,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 471 | brown-spot,3,0,2,1,0,2,1,1,0,0,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 472 | brown-spot,4,0,2,1,0,3,3,1,0,1,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 473 | brown-spot,5,1,2,1,0,3,3,1,0,2,0,1,2,0,1,0,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 474 | brown-spot,1,0,2,1,0,1,1,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 475 | brown-spot,2,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 476 | brown-spot,3,1,2,1,0,3,3,1,0,2,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 477 | brown-spot,1,0,2,1,0,1,1,1,0,0,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 478 | brown-spot,2,0,2,1,0,2,2,1,0,1,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 479 | brown-spot,3,1,2,1,0,3,3,1,0,2,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 480 | brown-spot,1,0,2,1,0,1,3,1,0,0,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 481 | brown-spot,2,0,2,1,0,2,2,1,0,1,0,1,2,0,1,1,0,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0 482 | bacterial-blight,3,0,2,1,1,1,1,1,1,1,0,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 483 | bacterial-blight,4,1,1,1,0,2,2,0,0,2,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 484 | bacterial-blight,2,1,1,1,0,1,0,0,0,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 485 | bacterial-blight,4,0,1,2,0,3,2,0,0,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 486 | bacterial-blight,5,1,2,1,1,1,3,1,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 487 | bacterial-blight,3,0,1,1,0,2,0,0,0,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 488 | bacterial-blight,4,0,2,1,1,3,1,1,1,1,0,1,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 489 | bacterial-blight,3,0,2,1,1,1,2,1,1,1,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 490 | bacterial-blight,4,1,1,1,0,2,3,0,0,2,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 491 | bacterial-blight,5,0,2,1,1,3,0,1,1,1,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 492 | bacterial-pustule,3,0,1,0,1,3,3,0,0,1,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0 493 | bacterial-pustule,1,1,1,1,0,1,0,0,1,2,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1 494 | bacterial-pustule,2,0,1,2,1,2,1,0,0,1,0,1,2,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0 495 | bacterial-pustule,3,1,1,1,0,3,2,0,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 496 | bacterial-pustule,2,0,2,1,1,3,1,1,1,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0 497 | bacterial-pustule,2,0,2,1,1,1,3,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0 498 | bacterial-pustule,1,0,1,1,1,3,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0 499 | bacterial-pustule,2,1,2,2,0,1,2,1,1,2,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 500 | bacterial-pustule,5,0,1,2,1,0,1,0,0,1,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0 501 | bacterial-pustule,2,1,2,0,0,1,2,1,0,2,0,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 502 | purple-seed-stain,5,0,2,2,1,2,2,0,0,1,0,1,2,0,0,0,0,0,0,1,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 503 | purple-seed-stain,6,0,2,0,1,3,3,0,1,2,0,1,2,0,0,0,0,0,1,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 504 | purple-seed-stain,5,0,2,2,1,1,1,0,1,2,0,1,2,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 505 | purple-seed-stain,4,0,2,2,1,0,0,0,0,2,0,1,2,0,0,0,0,0,0,1,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 506 | purple-seed-stain,5,0,2,0,0,1,1,0,0,1,0,1,2,0,0,0,0,0,1,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 507 | purple-seed-stain,6,0,2,1,0,2,2,0,0,2,0,0,0,2,2,0,0,0,1,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 508 | purple-seed-stain,3,0,2,2,0,3,3,0,0,1,0,0,0,2,2,0,0,0,0,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 509 | purple-seed-stain,3,0,2,0,0,3,3,0,0,2,0,0,0,2,2,0,0,0,0,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 510 | purple-seed-stain,5,0,2,2,0,1,1,0,1,2,0,0,0,2,2,0,0,0,0,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0 511 | purple-seed-stain,6,0,2,1,0,3,3,0,1,2,0,1,2,0,0,0,0,0,1,0,0,3,0,0,0,0,0,1,1,1,0,1,0,0,0 512 | anthracnose,0,0,2,1,0,0,0,0,0,0,0,1,0,2,2,0,0,0,1,0,2,1,0,1,0,0,0,0,0,1,0,1,1,1,0 513 | anthracnose,2,0,2,1,0,2,2,0,2,2,1,1,0,2,2,0,0,0,1,0,3,1,0,1,0,0,0,0,0,1,0,1,1,1,0 514 | anthracnose,4,0,2,1,0,0,0,0,1,1,0,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,1,2,1,1,0,1,1,0 515 | anthracnose,1,0,2,1,0,0,0,0,2,1,1,1,0,2,2,0,0,0,1,0,2,1,0,1,0,0,0,0,0,1,0,1,0,1,0 516 | anthracnose,4,1,2,2,1,1,1,1,0,0,0,1,0,2,2,0,0,0,1,1,3,2,1,1,0,0,0,1,2,1,1,0,1,0,0 517 | anthracnose,6,0,2,1,0,3,3,1,0,1,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 518 | anthracnose,6,1,2,1,0,1,1,1,1,0,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 519 | anthracnose,6,1,2,1,0,3,3,1,0,0,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 520 | anthracnose,5,1,2,1,0,2,2,1,0,2,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 521 | anthracnose,5,0,2,1,0,3,3,1,1,0,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 522 | anthracnose,4,0,2,1,0,2,2,1,1,2,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,0,0,0,0,0,0 523 | anthracnose,3,1,2,2,1,1,1,0,1,2,1,1,0,2,2,0,0,0,1,1,3,2,1,1,0,0,0,1,2,0,0,0,0,0,0 524 | anthracnose,4,1,2,2,1,1,1,0,1,0,1,1,0,2,2,0,0,0,1,1,3,2,1,1,0,0,0,1,2,0,0,0,0,0,0 525 | anthracnose,5,1,2,2,1,2,2,1,0,1,0,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,1,2,0,0,0,0,0,0 526 | anthracnose,6,1,2,2,1,3,3,0,1,2,0,1,0,2,2,0,0,0,1,0,3,2,1,1,0,0,0,1,2,0,0,0,0,0,0 527 | anthracnose,4,0,2,1,0,1,1,2,0,0,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,0,0,0,0,0,0 528 | anthracnose,5,1,2,1,0,2,2,1,1,1,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 529 | anthracnose,6,0,2,1,0,3,3,1,0,0,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 530 | anthracnose,4,1,2,1,0,1,1,2,1,1,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,0,0,0,0,0,0 531 | anthracnose,5,0,2,1,0,2,2,1,0,0,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 532 | anthracnose,6,1,2,1,0,3,3,1,1,1,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,1,1,0,1,1,0 533 | anthracnose,5,0,2,1,0,3,3,1,0,0,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,0,0,0,0,0,0 534 | anthracnose,5,1,2,1,0,1,1,1,1,1,0,0,0,2,2,0,0,0,1,0,3,2,0,0,0,0,0,1,2,0,0,0,0,0,0 535 | anthracnose,5,0,2,1,0,1,1,1,0,1,0,0,0,2,2,0,0,0,1,0,3,2,1,0,0,0,0,1,2,0,0,0,0,0,0 536 | phyllosticta-leaf-spot,1,0,0,1,1,0,0,1,0,0,0,1,2,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 537 | phyllosticta-leaf-spot,3,0,0,1,1,2,3,0,0,2,1,1,2,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 538 | phyllosticta-leaf-spot,1,1,1,1,0,2,3,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 539 | phyllosticta-leaf-spot,2,1,1,1,0,2,0,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 540 | phyllosticta-leaf-spot,2,0,0,2,1,1,2,0,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 541 | phyllosticta-leaf-spot,2,0,0,2,1,1,3,0,0,2,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 542 | phyllosticta-leaf-spot,3,1,1,2,0,1,0,0,0,0,1,1,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 543 | phyllosticta-leaf-spot,4,1,1,2,0,1,2,0,1,1,1,1,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 544 | phyllosticta-leaf-spot,2,1,1,2,0,3,0,0,1,1,0,1,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 545 | phyllosticta-leaf-spot,4,1,1,2,0,1,3,0,1,1,0,1,2,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 546 | alternarialeaf-spot,6,0,2,2,0,2,2,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 547 | alternarialeaf-spot,5,0,2,2,0,2,2,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 548 | alternarialeaf-spot,6,1,2,2,0,2,3,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 549 | alternarialeaf-spot,5,1,2,1,0,2,2,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 550 | alternarialeaf-spot,6,0,2,1,0,3,3,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 551 | alternarialeaf-spot,5,0,2,1,0,2,2,0,0,0,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 552 | alternarialeaf-spot,6,0,2,1,0,3,3,0,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 553 | alternarialeaf-spot,5,0,2,1,0,3,0,0,1,1,0,1,2,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 554 | alternarialeaf-spot,5,1,2,1,1,0,0,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 555 | alternarialeaf-spot,5,1,2,1,1,1,1,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 556 | alternarialeaf-spot,4,0,2,1,1,0,0,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 557 | alternarialeaf-spot,6,1,1,2,1,1,1,0,2,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 558 | alternarialeaf-spot,4,1,2,1,0,2,2,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0 559 | alternarialeaf-spot,6,0,1,1,1,0,0,0,2,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 560 | alternarialeaf-spot,5,0,2,1,1,2,2,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 561 | alternarialeaf-spot,6,0,1,2,1,3,3,0,2,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0 562 | alternarialeaf-spot,4,1,2,2,0,0,0,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 563 | alternarialeaf-spot,5,1,1,2,1,0,0,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 564 | alternarialeaf-spot,6,0,2,1,0,1,1,0,2,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 565 | alternarialeaf-spot,5,1,1,2,1,2,2,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 566 | alternarialeaf-spot,4,0,2,1,0,1,0,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 567 | alternarialeaf-spot,5,0,2,2,0,2,1,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 568 | alternarialeaf-spot,6,1,2,2,0,3,2,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 569 | alternarialeaf-spot,5,0,2,2,0,1,3,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 570 | alternarialeaf-spot,5,0,2,2,0,2,0,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 571 | alternarialeaf-spot,5,1,2,2,0,3,1,0,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 572 | alternarialeaf-spot,5,0,2,2,0,2,3,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 573 | alternarialeaf-spot,5,1,2,2,0,3,0,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 574 | alternarialeaf-spot,5,0,2,2,0,1,1,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 575 | alternarialeaf-spot,4,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 576 | alternarialeaf-spot,5,1,2,2,0,3,3,0,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 577 | alternarialeaf-spot,6,0,2,2,0,1,0,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 578 | alternarialeaf-spot,3,0,2,1,0,2,1,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 579 | alternarialeaf-spot,4,1,2,1,0,3,2,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 580 | alternarialeaf-spot,5,0,2,2,0,1,3,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 581 | alternarialeaf-spot,6,0,2,2,0,2,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 582 | alternarialeaf-spot,4,1,2,1,0,3,1,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 583 | alternarialeaf-spot,4,0,2,1,0,1,2,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 584 | alternarialeaf-spot,5,0,2,2,0,2,3,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 585 | alternarialeaf-spot,5,1,2,2,0,3,0,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 586 | alternarialeaf-spot,5,0,2,2,0,1,1,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 587 | alternarialeaf-spot,6,0,2,2,0,2,2,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 588 | alternarialeaf-spot,6,1,2,2,0,3,3,0,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 589 | alternarialeaf-spot,5,0,2,2,0,1,0,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 590 | alternarialeaf-spot,6,0,2,2,0,2,1,1,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 591 | alternarialeaf-spot,5,1,2,2,0,3,2,0,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 592 | alternarialeaf-spot,6,0,2,2,0,3,3,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 593 | alternarialeaf-spot,5,0,2,2,0,3,1,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 594 | alternarialeaf-spot,6,1,2,2,0,3,2,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 595 | alternarialeaf-spot,5,0,2,2,0,2,1,1,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 596 | alternarialeaf-spot,6,0,2,2,0,2,2,0,1,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 597 | frog-eye-leaf-spot,4,1,2,1,0,3,0,1,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 598 | frog-eye-leaf-spot,4,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 599 | frog-eye-leaf-spot,3,0,2,1,0,1,0,0,0,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 600 | frog-eye-leaf-spot,4,0,2,1,0,1,3,1,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 601 | frog-eye-leaf-spot,4,1,2,1,0,3,1,1,1,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 602 | frog-eye-leaf-spot,4,1,2,1,0,2,1,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 603 | frog-eye-leaf-spot,4,1,2,1,0,3,0,1,0,2,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 604 | frog-eye-leaf-spot,4,0,1,1,1,3,1,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 605 | frog-eye-leaf-spot,6,0,1,1,0,2,3,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 606 | frog-eye-leaf-spot,5,0,1,1,1,0,1,0,0,0,0,1,2,0,1,0,0,0,1,0,3,0,0,1,0,0,0,1,1,0,0,0,0,0,0 607 | frog-eye-leaf-spot,3,0,2,1,1,0,0,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 608 | frog-eye-leaf-spot,5,0,2,1,1,2,2,0,2,0,0,1,2,0,1,0,0,0,1,1,3,1,0,0,0,0,0,0,0,0,0,0,0,0,0 609 | frog-eye-leaf-spot,3,1,2,1,1,0,0,1,1,2,1,1,2,0,1,0,0,0,0,1,2,1,0,1,0,0,0,1,1,0,0,0,0,0,0 610 | frog-eye-leaf-spot,5,0,2,1,1,2,2,0,0,0,0,1,2,0,1,0,0,0,1,0,3,1,0,1,0,0,0,1,1,0,0,0,0,0,0 611 | frog-eye-leaf-spot,4,0,2,2,1,1,0,1,2,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 612 | frog-eye-leaf-spot,4,0,2,2,1,3,2,0,1,0,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 613 | frog-eye-leaf-spot,3,1,2,2,1,2,0,1,0,2,1,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 614 | frog-eye-leaf-spot,4,1,2,2,1,3,0,1,2,2,1,1,2,0,1,0,0,0,0,1,3,1,0,1,0,0,0,0,0,0,0,0,0,0,0 615 | frog-eye-leaf-spot,3,0,2,2,0,2,2,1,0,1,0,1,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 616 | frog-eye-leaf-spot,3,0,2,1,0,1,0,1,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 617 | frog-eye-leaf-spot,4,0,2,2,0,2,1,1,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 618 | frog-eye-leaf-spot,5,1,2,1,0,3,2,0,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 619 | frog-eye-leaf-spot,6,0,2,2,0,1,3,0,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 620 | frog-eye-leaf-spot,4,0,2,1,0,2,0,1,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 621 | frog-eye-leaf-spot,5,1,2,2,0,3,1,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 622 | frog-eye-leaf-spot,6,0,2,1,0,1,2,0,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 623 | frog-eye-leaf-spot,4,0,2,2,0,2,3,1,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 624 | frog-eye-leaf-spot,5,1,2,1,0,3,0,0,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 625 | frog-eye-leaf-spot,6,0,2,2,0,1,1,0,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 626 | frog-eye-leaf-spot,4,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 627 | frog-eye-leaf-spot,5,1,2,2,0,3,3,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 628 | frog-eye-leaf-spot,6,0,2,1,0,1,0,0,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 629 | frog-eye-leaf-spot,3,0,2,2,0,2,1,1,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 630 | frog-eye-leaf-spot,4,1,2,1,0,3,2,1,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 631 | frog-eye-leaf-spot,5,0,2,2,0,1,3,0,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 632 | frog-eye-leaf-spot,6,0,2,2,0,2,0,0,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 633 | frog-eye-leaf-spot,3,1,2,1,0,3,1,1,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 634 | frog-eye-leaf-spot,4,0,2,1,0,1,2,1,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 635 | frog-eye-leaf-spot,5,0,2,1,0,2,3,0,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 636 | frog-eye-leaf-spot,6,1,2,2,0,3,0,0,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 637 | frog-eye-leaf-spot,3,0,2,1,0,1,1,1,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 638 | frog-eye-leaf-spot,4,0,2,1,0,2,2,1,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 639 | frog-eye-leaf-spot,5,1,2,2,0,3,3,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 640 | frog-eye-leaf-spot,4,0,2,1,0,1,0,1,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 641 | frog-eye-leaf-spot,5,0,2,2,0,2,1,0,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 642 | frog-eye-leaf-spot,6,1,2,1,0,3,2,0,0,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 643 | frog-eye-leaf-spot,4,0,2,2,0,1,3,1,1,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 644 | frog-eye-leaf-spot,5,0,2,1,0,2,0,0,0,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 645 | frog-eye-leaf-spot,6,1,2,2,0,3,1,0,1,2,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 646 | frog-eye-leaf-spot,4,0,2,1,0,1,2,1,0,0,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 647 | frog-eye-leaf-spot,5,0,2,2,0,2,3,0,1,1,0,1,2,0,1,0,0,0,1,0,3,2,0,1,0,0,0,1,1,0,0,0,0,0,0 648 | diaporthe-pod-&-stem-blight,6,?,2,2,?,2,3,?,?,?,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 649 | diaporthe-pod-&-stem-blight,6,?,2,2,?,1,3,?,?,?,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 650 | diaporthe-pod-&-stem-blight,5,?,2,2,?,1,3,?,?,?,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 651 | diaporthe-pod-&-stem-blight,6,?,2,2,?,3,3,?,?,?,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 652 | diaporthe-pod-&-stem-blight,6,?,2,2,?,0,3,?,?,?,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 653 | diaporthe-pod-&-stem-blight,1,1,1,2,?,0,0,?,?,2,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,0,1,1,1,1,? 654 | diaporthe-pod-&-stem-blight,5,0,2,2,?,1,3,?,?,0,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 655 | diaporthe-pod-&-stem-blight,5,0,2,2,?,3,3,?,?,0,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 656 | diaporthe-pod-&-stem-blight,6,0,2,2,?,3,3,?,?,1,0,0,?,?,?,?,?,?,1,?,0,0,1,0,0,0,0,1,2,1,1,1,1,1,? 657 | cyst-nematode,2,?,?,?,?,1,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 658 | cyst-nematode,3,?,?,?,?,2,2,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 659 | cyst-nematode,4,?,?,?,?,1,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 660 | cyst-nematode,2,?,?,?,?,2,2,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 661 | cyst-nematode,3,?,?,?,?,2,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 662 | cyst-nematode,4,?,?,?,?,3,2,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 663 | cyst-nematode,4,?,?,?,?,3,1,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 664 | cyst-nematode,3,?,?,?,?,3,2,?,?,?,1,1,?,?,?,?,?,?,0,?,?,?,?,?,?,?,?,2,?,1,0,?,1,?,2 665 | 2-4-d-injury,5,?,?,?,?,?,1,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 666 | 2-4-d-injury,0,?,?,?,?,?,0,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 667 | 2-4-d-injury,1,?,?,?,?,?,1,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 668 | 2-4-d-injury,2,?,?,?,?,?,2,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 669 | 2-4-d-injury,3,?,?,?,?,?,3,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 670 | 2-4-d-injury,4,?,?,?,?,?,0,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 671 | 2-4-d-injury,6,?,?,?,?,?,2,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 672 | 2-4-d-injury,0,?,?,?,?,?,3,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 673 | 2-4-d-injury,1,?,?,?,?,?,0,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 674 | 2-4-d-injury,2,?,?,?,?,?,1,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 675 | 2-4-d-injury,3,?,?,?,?,?,2,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 676 | 2-4-d-injury,4,?,?,?,?,?,3,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 677 | 2-4-d-injury,5,?,?,?,?,?,0,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 678 | 2-4-d-injury,6,?,?,?,?,?,1,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 679 | 2-4-d-injury,0,?,?,?,?,?,2,?,?,?,?,1,0,2,2,?,1,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,? 680 | herbicide-injury,0,1,?,0,?,0,0,?,?,?,1,1,0,2,2,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 681 | herbicide-injury,2,1,?,0,?,0,0,?,?,?,1,1,0,2,2,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 682 | herbicide-injury,0,1,?,0,?,1,3,?,?,?,1,1,2,1,1,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 683 | herbicide-injury,2,1,?,0,?,1,3,?,?,?,1,1,2,1,1,0,1,?,1,?,?,?,?,?,?,?,?,3,?,?,?,?,?,?,1 684 | -------------------------------------------------------------------------------- /datasets/soybean_multi/soybean-large.names: -------------------------------------------------------------------------------- 1 | 1. Title: Large Soybean Database 2 | 3 | 2. Sources: 4 | (a) R.S. Michalski and R.L. Chilausky "Learning by Being Told and 5 | Learning from Examples: An Experimental Comparison of the Two 6 | Methods of Knowledge Acquisition in the Context of Developing 7 | an Expert System for Soybean Disease Diagnosis", International 8 | Journal of Policy Analysis and Information Systems, Vol. 4, 9 | No. 2, 1980. 10 | (b) Donor: Ming Tan & Jeff Schlimmer (Jeff.Schlimmer%cs.cmu.edu) 11 | (c) Date: 11 July 1988 12 | 13 | 3. Past Usage: 14 | 1. See above. 15 | 2. Tan, M., & Eshelman, L. (1988). Using weighted networks to represent 16 | classification knowledge in noisy domains. Proceedings of the Fifth 17 | International Conference on Machine Learning (pp. 121-134). Ann Arbor, 18 | Michigan: Morgan Kaufmann. 19 | -- IWN recorded a 97.1% classification accuracy 20 | -- 290 training and 340 test instances 21 | 3. Fisher,D.H. & Schlimmer,J.C. (1988). Concept Simplification and 22 | Predictive Accuracy. Proceedings of the Fifth 23 | International Conference on Machine Learning (pp. 22-28). Ann Arbor, 24 | Michigan: Morgan Kaufmann. 25 | -- Notes why this database is highly predictable 26 | 27 | 4. Relevant Information Paragraph: 28 | There are 19 classes, only the first 15 of which have been used in prior 29 | work. The folklore seems to be that the last four classes are 30 | unjustified by the data since they have so few examples. 31 | There are 35 categorical attributes, some nominal and some ordered. The 32 | value ``dna'' means does not apply. The values for attributes are 33 | encoded numerically, with the first value encoded as ``0,'' the second as 34 | ``1,'' and so forth. An unknown values is encoded as ``?''. 35 | 36 | 5. Number of Instances: 307 37 | 38 | 6. Number of Attributes: 35 (all have been nominalized) 39 | 40 | 7. Attribute Information: 41 | -- 19 Classes 42 | diaporthe-stem-canker, charcoal-rot, rhizoctonia-root-rot, 43 | phytophthora-rot, brown-stem-rot, powdery-mildew, 44 | downy-mildew, brown-spot, bacterial-blight, 45 | bacterial-pustule, purple-seed-stain, anthracnose, 46 | phyllosticta-leaf-spot, alternarialeaf-spot, 47 | frog-eye-leaf-spot, diaporthe-pod-&-stem-blight, 48 | cyst-nematode, 2-4-d-injury, herbicide-injury. 49 | 50 | 1. date: april,may,june,july,august,september,october,?. 51 | 2. plant-stand: normal,lt-normal,?. 52 | 3. precip: lt-norm,norm,gt-norm,?. 53 | 4. temp: lt-norm,norm,gt-norm,?. 54 | 5. hail: yes,no,?. 55 | 6. crop-hist: diff-lst-year,same-lst-yr,same-lst-two-yrs, 56 | same-lst-sev-yrs,?. 57 | 7. area-damaged: scattered,low-areas,upper-areas,whole-field,?. 58 | 8. severity: minor,pot-severe,severe,?. 59 | 9. seed-tmt: none,fungicide,other,?. 60 | 10. germination: 90-100%,80-89%,lt-80%,?. 61 | 11. plant-growth: norm,abnorm,?. 62 | 12. leaves: norm,abnorm. 63 | 13. leafspots-halo: absent,yellow-halos,no-yellow-halos,?. 64 | 14. leafspots-marg: w-s-marg,no-w-s-marg,dna,?. 65 | 15. leafspot-size: lt-1/8,gt-1/8,dna,?. 66 | 16. leaf-shread: absent,present,?. 67 | 17. leaf-malf: absent,present,?. 68 | 18. leaf-mild: absent,upper-surf,lower-surf,?. 69 | 19. stem: norm,abnorm,?. 70 | 20. lodging: yes,no,?. 71 | 21. stem-cankers: absent,below-soil,above-soil,above-sec-nde,?. 72 | 22. canker-lesion: dna,brown,dk-brown-blk,tan,?. 73 | 23. fruiting-bodies: absent,present,?. 74 | 24. external decay: absent,firm-and-dry,watery,?. 75 | 25. mycelium: absent,present,?. 76 | 26. int-discolor: none,brown,black,?. 77 | 27. sclerotia: absent,present,?. 78 | 28. fruit-pods: norm,diseased,few-present,dna,?. 79 | 29. fruit spots: absent,colored,brown-w/blk-specks,distort,dna,?. 80 | 30. seed: norm,abnorm,?. 81 | 31. mold-growth: absent,present,?. 82 | 32. seed-discolor: absent,present,?. 83 | 33. seed-size: norm,lt-norm,?. 84 | 34. shriveling: absent,present,?. 85 | 35. roots: norm,rotted,galls-cysts,?. 86 | 87 | 8. Number of Missing Attribute Values: (denoted by "?") 88 | (Problem: these don't appear to be correct! Needs to be updated.) 89 | 1. date: 0 90 | 2. plant-stand: 1 91 | 3. precip: 8 92 | 4. temp: 11 93 | 5. hail: 7 94 | 6. crop-hist: 41 95 | 7. area-damaged: 1 96 | 8. severity: 1 97 | 9. seed-tmt: 41 98 | 10. germination: 41 99 | 11. plant-growth: 36 100 | 12. leaves: 1 101 | 13. leafspots-halo: 0 102 | 14. leafspots-marg: 25 103 | 15. leafspot-size: 25 104 | 16. leaf-shread: 25 105 | 17. leaf-malf: 26 106 | 18. leaf-mild: 25 107 | 19. stem: 30 108 | 20. lodging: 1 109 | 21. stem-cankers: 41 110 | 22. canker-lesion: 11 111 | 23. fruiting-bodies: 11 112 | 24. external decay: 35 113 | 25. mycelium: 11 114 | 26. int-discolor: 11 115 | 27. sclerotia: 11 116 | 28. fruit-pods: 11 117 | 29. fruit spots: 25 118 | 30. seed: 35 119 | 31. mold-growth: 29 120 | 32. seed-discolor: 29 121 | 33. seed-size: 35 122 | 34. shriveling: 29 123 | 35. roots: 35 124 | 125 | 9. Class Distribution: 126 | 1. diaporthe-stem-canker: 10 127 | 2. charcoal-rot: 10 128 | 3. rhizoctonia-root-rot: 10 129 | 4. phytophthora-rot: 40 130 | 5. brown-stem-rot: 20 131 | 6. powdery-mildew: 10 132 | 7. downy-mildew: 10 133 | 8. brown-spot: 40 134 | 9. bacterial-blight: 10 135 | 10. bacterial-pustule: 10 136 | 11. purple-seed-stain: 10 137 | 12. anthracnose: 20 138 | 13. phyllosticta-leaf-spot: 10 139 | 14. alternarialeaf-spot: 40 140 | 15. frog-eye-leaf-spot: 40 141 | 16. diaporthe-pod-&-stem-blight: 6 142 | 17. cyst-nematode: 6 143 | 18. 2-4-d-injury: 1 144 | 19. herbicide-injury: 4 145 | -------------------------------------------------------------------------------- /datasets/wdbc_binary/wdbc.names: -------------------------------------------------------------------------------- 1 | 1. Title: Wisconsin Diagnostic Breast Cancer (WDBC) 2 | 3 | 2. Source Information 4 | 5 | a) Creators: 6 | 7 | Dr. William H. Wolberg, General Surgery Dept., University of 8 | Wisconsin, Clinical Sciences Center, Madison, WI 53792 9 | wolberg@eagle.surgery.wisc.edu 10 | 11 | W. Nick Street, Computer Sciences Dept., University of 12 | Wisconsin, 1210 West Dayton St., Madison, WI 53706 13 | street@cs.wisc.edu 608-262-6619 14 | 15 | Olvi L. Mangasarian, Computer Sciences Dept., University of 16 | Wisconsin, 1210 West Dayton St., Madison, WI 53706 17 | olvi@cs.wisc.edu 18 | 19 | b) Donor: Nick Street 20 | 21 | c) Date: November 1995 22 | 23 | 3. Past Usage: 24 | 25 | first usage: 26 | 27 | W.N. Street, W.H. Wolberg and O.L. Mangasarian 28 | Nuclear feature extraction for breast tumor diagnosis. 29 | IS&T/SPIE 1993 International Symposium on Electronic Imaging: Science 30 | and Technology, volume 1905, pages 861-870, San Jose, CA, 1993. 31 | 32 | OR literature: 33 | 34 | O.L. Mangasarian, W.N. Street and W.H. Wolberg. 35 | Breast cancer diagnosis and prognosis via linear programming. 36 | Operations Research, 43(4), pages 570-577, July-August 1995. 37 | 38 | Medical literature: 39 | 40 | W.H. Wolberg, W.N. Street, and O.L. Mangasarian. 41 | Machine learning techniques to diagnose breast cancer from 42 | fine-needle aspirates. 43 | Cancer Letters 77 (1994) 163-171. 44 | 45 | W.H. Wolberg, W.N. Street, and O.L. Mangasarian. 46 | Image analysis and machine learning applied to breast cancer 47 | diagnosis and prognosis. 48 | Analytical and Quantitative Cytology and Histology, Vol. 17 49 | No. 2, pages 77-87, April 1995. 50 | 51 | W.H. Wolberg, W.N. Street, D.M. Heisey, and O.L. Mangasarian. 52 | Computerized breast cancer diagnosis and prognosis from fine 53 | needle aspirates. 54 | Archives of Surgery 1995;130:511-516. 55 | 56 | W.H. Wolberg, W.N. Street, D.M. Heisey, and O.L. Mangasarian. 57 | Computer-derived nuclear features distinguish malignant from 58 | benign breast cytology. 59 | Human Pathology, 26:792--796, 1995. 60 | 61 | See also: 62 | http://www.cs.wisc.edu/~olvi/uwmp/mpml.html 63 | http://www.cs.wisc.edu/~olvi/uwmp/cancer.html 64 | 65 | Results: 66 | 67 | - predicting field 2, diagnosis: B = benign, M = malignant 68 | - sets are linearly separable using all 30 input features 69 | - best predictive accuracy obtained using one separating plane 70 | in the 3-D space of Worst Area, Worst Smoothness and 71 | Mean Texture. Estimated accuracy 97.5% using repeated 72 | 10-fold crossvalidations. Classifier has correctly 73 | diagnosed 176 consecutive new patients as of November 74 | 1995. 75 | 76 | 4. Relevant information 77 | 78 | Features are computed from a digitized image of a fine needle 79 | aspirate (FNA) of a breast mass. They describe 80 | characteristics of the cell nuclei present in the image. 81 | A few of the images can be found at 82 | http://www.cs.wisc.edu/~street/images/ 83 | 84 | Separating plane described above was obtained using 85 | Multisurface Method-Tree (MSM-T) [K. P. Bennett, "Decision Tree 86 | Construction Via Linear Programming." Proceedings of the 4th 87 | Midwest Artificial Intelligence and Cognitive Science Society, 88 | pp. 97-101, 1992], a classification method which uses linear 89 | programming to construct a decision tree. Relevant features 90 | were selected using an exhaustive search in the space of 1-4 91 | features and 1-3 separating planes. 92 | 93 | The actual linear program used to obtain the separating plane 94 | in the 3-dimensional space is that described in: 95 | [K. P. Bennett and O. L. Mangasarian: "Robust Linear 96 | Programming Discrimination of Two Linearly Inseparable Sets", 97 | Optimization Methods and Software 1, 1992, 23-34]. 98 | 99 | 100 | This database is also available through the UW CS ftp server: 101 | 102 | ftp ftp.cs.wisc.edu 103 | cd math-prog/cpo-dataset/machine-learn/WDBC/ 104 | 105 | 5. Number of instances: 569 106 | 107 | 6. Number of attributes: 32 (ID, diagnosis, 30 real-valued input features) 108 | 109 | 7. Attribute information 110 | 111 | 1) ID number 112 | 2) Diagnosis (M = malignant, B = benign) 113 | 3-32) 114 | 115 | Ten real-valued features are computed for each cell nucleus: 116 | 117 | a) radius (mean of distances from center to points on the perimeter) 118 | b) texture (standard deviation of gray-scale values) 119 | c) perimeter 120 | d) area 121 | e) smoothness (local variation in radius lengths) 122 | f) compactness (perimeter^2 / area - 1.0) 123 | g) concavity (severity of concave portions of the contour) 124 | h) concave points (number of concave portions of the contour) 125 | i) symmetry 126 | j) fractal dimension ("coastline approximation" - 1) 127 | 128 | Several of the papers listed above contain detailed descriptions of 129 | how these features are computed. 130 | 131 | The mean, standard error, and "worst" or largest (mean of the three 132 | largest values) of these features were computed for each image, 133 | resulting in 30 features. For instance, field 3 is Mean Radius, field 134 | 13 is Radius SE, field 23 is Worst Radius. 135 | 136 | All feature values are recoded with four significant digits. 137 | 138 | 8. Missing attribute values: none 139 | 140 | 9. Class distribution: 357 benign, 212 malignant -------------------------------------------------------------------------------- /eca.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.utils.data import DataLoader 3 | from helpers.MyDataset import * 4 | from models.ConvNetSVHN2 import * 5 | from helpers.Metrics import * 6 | 7 | #test device status 8 | if torch.cuda.is_available(): 9 | device_id = 0 10 | torch.backends.cudnn.enabled = True 11 | torch.backends.cudnn.benchmark = True 12 | else: 13 | device_id = None 14 | 15 | #initialize model 16 | net = ConvNetSVHN2(128).to(device_id) 17 | 18 | #set hyperparameters 19 | loss_func = nn.CrossEntropyLoss().to(device_id) 20 | opt = torch.optim.SGD(net.parameters(),lr=0.001,momentum=0.9,weight_decay=0.0005) 21 | load_batch,train_batch = 1024,256 22 | 23 | #load dataset 24 | trainData = Svhn2Dataset('D://datasets/svhn-format2/','train') 25 | testData = Svhn2Dataset('D://datasets/svhn-format2/','t10k') 26 | 27 | trainLoader = DataLoader( 28 | dataset = trainData , 29 | batch_size = load_batch , 30 | shuffle = True , 31 | num_workers = 0 , 32 | drop_last = True 33 | ) 34 | testLoader = DataLoader( 35 | dataset = testData , 36 | batch_size = load_batch , 37 | shuffle = False , 38 | num_workers = 0 , 39 | drop_last = False 40 | ) 41 | 42 | if __name__=='__main__': 43 | for epoch in range(5): 44 | net = net.train() 45 | for _,(X,y) in enumerate(trainLoader): 46 | X,y=X.to(device_id),y.to(device_id) 47 | 48 | batch = 0 49 | while batchII',lbpath.read(8)) 151 | y = np.fromfile(lbpath,dtype=np.uint8) 152 | with open(images_path,'rb') as imgpath: 153 | magic,num,rows,cols = unpack('>IIII',imgpath.read(16)) 154 | X = np.fromfile(imgpath,dtype=np.uint8).reshape(len(y),1,28,28) 155 | 156 | if kind=='train': 157 | assert X.shape==(60000,1,28,28),'Data missed partially, expect (60000,1,28,28), but got '+str(X.shape)+' instead' 158 | assert y.shape==(60000,),'Data missed partially, expect (60000,), but got '+str(y.shape)+' instead' 159 | else: 160 | assert X.shape==(10000,1,28,28),'Data missed partially, expect (10000,1,28,28), but got '+str(X.shape)+' instead' 161 | assert y.shape==(10000,),'Data missed partially, expect (10000,), but got '+str(y.shape)+' instead' 162 | 163 | #convert all to tensors 164 | self.X = torch.from_numpy(X.astype(np.float32)) 165 | self.y = torch.from_numpy(y.astype(np.int64)) 166 | 167 | def __getitem__(self,index): 168 | return self.X[index],self.y[index] 169 | 170 | def __len__(self): 171 | return self.y.size(0) 172 | 173 | class Svhn2Dataset(Dataset): 174 | def __init__(self,path,kind): 175 | if kind=='train': 176 | data = loadmat(join(path,'train_32x32.mat')) 177 | X = data['X'].transpose((3,2,0,1)) 178 | y = data['y'].reshape(73257) 179 | 180 | assert X.shape==(73257,3,32,32),'Data missed partially, expect (73257,3,32,32), but got '+str(X.shape)+' instead' 181 | assert y.shape==(73257,),'Data missed partially, expect (73257,), but got '+str(y.shape)+' instead' 182 | else: 183 | data = loadmat(join(path,'test_32x32.mat')) 184 | X = data['X'].transpose((3,2,0,1)) 185 | y = data['y'].reshape(26032) 186 | 187 | assert X.shape==(26032,3,32,32),'Data missed partially, expect (26032,3,32,32), but got '+str(X.shape)+' instead' 188 | assert y.shape==(26032,),'Data missed partially, expect (26032,), but got '+str(y.shape)+' instead' 189 | 190 | #let label "10" be "0" 191 | y = np.where(y==10,0,y) 192 | 193 | #convert all to tensors 194 | self.X = torch.from_numpy(X.astype(np.float32)) 195 | self.y = torch.from_numpy(y.astype(np.int64)) 196 | 197 | def __getitem__(self,index): 198 | return self.X[index],self.y[index] 199 | 200 | def __len__(self): 201 | return self.y.size(0) 202 | -------------------------------------------------------------------------------- /lab2.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from sys import argv 4 | from helpers.MyDataset import * 5 | #from helpers.myfunc import * 6 | from torch.utils.data import DataLoader 7 | from models.ConvNetMNIST import * 8 | #import matplotlib.pyplot as plt 9 | from time import time 10 | from numpy import * 11 | 12 | #test device status 13 | device_status=torch.cuda.is_available() 14 | if device_status: 15 | device_id=0 16 | 17 | #initialize model 18 | net=ConvNetMNIST((1,28,28),128,10) 19 | if device_status: 20 | net=net.to(device_id) 21 | 22 | #set hyperparameters 23 | loss_func=nn.CrossEntropyLoss() 24 | opt=torch.optim.SGD(net.parameters(),lr=0.001) 25 | mini_batch=256 26 | 27 | #load dataset 28 | trainData=MnistDataset('D://datasets/mnist/','train') 29 | testData=MnistDataset('D://datasets/mnist/','t10k') 30 | 31 | trainLoader=DataLoader( 32 | dataset=trainData, 33 | batch_size=mini_batch, 34 | shuffle=True, 35 | num_workers=0 36 | ) 37 | testLoader=DataLoader( 38 | dataset=testData, 39 | batch_size=2048, 40 | shuffle=False, 41 | num_workers=0 42 | ) 43 | 44 | if __name__=='__main__': 45 | epochs,accuracys,losses=[],[],[] 46 | 47 | #train & test 48 | times = [] 49 | for epoch in range(2): 50 | loss_sum=0. 51 | net=net.train() 52 | for _,(x,y) in enumerate(trainLoader): 53 | if device_status: 54 | x,y=x.to(device_id),y.to(device_id) 55 | 56 | #calcualte estimated results 57 | opt.zero_grad() 58 | t1 = time() 59 | y_hat=net(x) 60 | t2 = time() 61 | times.append(t2-t1) 62 | 63 | #calculate loss and propagate back 64 | loss=loss_func(y_hat,y) 65 | loss.backward() 66 | opt.step() 67 | # loss_sum+=mini_batch*loss.item() 68 | print(mean(times)) 69 | 70 | # net=net.eval() 71 | # positive_n=0 72 | # for _,(x,y) in enumerate(testLoader): 73 | # if device_status: 74 | # x=x.to(device_id) 75 | 76 | # #predict 77 | # with torch.no_grad(): 78 | # y_hat=net(x) 79 | 80 | # #compare and count 81 | # for i in range(len(y)): 82 | # if torch.argmax(y_hat[i]).item()==y[i].item(): 83 | # positive_n+=1 84 | 85 | # print('epoch = %d accuracy = %f' %(epoch,positive_n/testData.__len__())) 86 | # epochs.append(epoch) 87 | # accuracys.append(positive_n/testData.__len__()) 88 | # losses.append(loss_sum/60000) 89 | 90 | # plt.plot(epochs,accuracys) 91 | # plt.savefig('./results/accuracy.jpg') 92 | # plt.close('all') 93 | # plt.plot(epochs,losses) 94 | # plt.savefig('./results/loss.jpg') 95 | 96 | #save parameters 97 | #torch.save(net.state_dict(),'./results/ConvNetMNIST.pkl') 98 | -------------------------------------------------------------------------------- /lab3.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.optim import * 3 | from torch.utils.data import DataLoader 4 | import numpy as np 5 | from tqdm import tqdm 6 | from helpers.MyDataset import * 7 | from helpers.Metrics import * 8 | from models.ConvNetMNIST import * 9 | from models.ConvNetMNIST_BN import * 10 | 11 | #test device status 12 | device_status = torch.cuda.is_available() 13 | device_id = 2 14 | 15 | #set enumerated class pools 16 | net_classes = [ConvNetMNIST,ConvNetMNIST_BN] 17 | opt_classes = [SGD,RMSprop,Adam] 18 | spec = { 19 | 'ConvNetMNIST' : '', 20 | 'ConvNetMNIST_BN' : '_bn' 21 | } 22 | 23 | #set hyperparameters 24 | loss_func = nn.CrossEntropyLoss() 25 | mini_batch = 128 26 | lamda = 0.0001 27 | 28 | #load dataset 29 | trainSet = MnistDataset('../datasets/mnist/','train') 30 | testSet = MnistDataset('../datasets/mnist/','t10k') 31 | trainLoader=DataLoader( 32 | dataset = trainSet , 33 | batch_size = mini_batch , 34 | shuffle = True , 35 | num_workers = 0 36 | ) 37 | testLoader=DataLoader( 38 | dataset = testSet , 39 | batch_size = mini_batch , 40 | shuffle = False , 41 | num_workers = 0 42 | ) 43 | 44 | if __name__=='__main__': 45 | 46 | #with L1 regularization 47 | for net_class in net_classes: 48 | for opt_class in opt_classes: 49 | 50 | #instantiate model and optimizer 51 | net = net_class( 52 | input_shape = (1,28,28) , 53 | num_feature = 128 , 54 | num_class = 10 55 | ) 56 | opt = opt_class(net.parameters(),lr=0.001) 57 | if device_status: 58 | net = net.to(device_id) 59 | 60 | accuracies,losses = [],[] 61 | for _ in tqdm(range(50),ncols=70): 62 | 63 | #train 64 | loss_sum = 0. 65 | net = net.train() 66 | for _,(X,y) in enumerate(trainLoader): 67 | if device_status: 68 | X,y = X.to(device_id),y.to(device_id) 69 | opt.zero_grad() 70 | y_hat=net(X) 71 | regularization = 0. 72 | for param in net.parameters(): 73 | regularization += torch.sum(abs(param)) 74 | loss=loss_func(y_hat,y)+lamda*regularization 75 | loss.backward() 76 | opt.step() 77 | loss_sum += loss.item()*len(y) 78 | losses.append(loss_sum) 79 | 80 | #test 81 | y_pred = torch.empty(0,10,dtype=torch.float32) 82 | y_true = testSet.y 83 | net=net.eval() 84 | for _,(X,y) in enumerate(testLoader): 85 | if device_status: 86 | X = X.to(device_id) 87 | with torch.no_grad(): 88 | y_hat = net(X) 89 | if device_status: 90 | y_hat = y_hat.cpu() 91 | y_pred = torch.cat([y_pred,y_hat],dim=0) 92 | y_pred,y_true = y_pred.numpy(),y_true.numpy() 93 | accuracies.append(accuracy(y_pred,y_true,task='multi-classification')) 94 | accuracies = np.array(accuracies,dtype=np.float32) 95 | losses = np.array(losses,dtype=np.float32) 96 | np.save('results/accuracy_%s%s_L1.npy' %(opt_class.__name__,spec[net_class.__name__]),accuracies) 97 | np.save('results/loss_%s%s_L1.npy' %(opt_class.__name__,spec[net_class.__name__]),losses) 98 | 99 | #without L1 regularization 100 | for net_class in net_classes: 101 | for opt_class in opt_classes: 102 | 103 | #instantiate model and optimizer 104 | net = net_class( 105 | input_shape = (1,28,28) , 106 | num_feature = 128 , 107 | num_class = 10 108 | ) 109 | opt = opt_class(net.parameters(),lr=0.001) 110 | if device_status: 111 | net = net.to(device_id) 112 | 113 | accuracies,losses = [],[] 114 | for _ in tqdm(range(50),ncols=70): 115 | 116 | #train 117 | loss_sum = 0. 118 | net = net.train() 119 | for _,(X,y) in enumerate(trainLoader): 120 | if device_status: 121 | X,y = X.to(device_id),y.to(device_id) 122 | opt.zero_grad() 123 | y_hat=net(X) 124 | loss=loss_func(y_hat,y) 125 | loss.backward() 126 | opt.step() 127 | loss_sum += loss.item()*len(y) 128 | losses.append(loss_sum) 129 | 130 | #test 131 | y_pred = torch.empty(0,10,dtype=torch.float32) 132 | y_true = testSet.y 133 | net=net.eval() 134 | for _,(X,y) in enumerate(testLoader): 135 | if device_status: 136 | X = X.to(device_id) 137 | with torch.no_grad(): 138 | y_hat = net(X) 139 | if device_status: 140 | y_hat = y_hat.cpu() 141 | y_pred = torch.cat([y_pred,y_hat],dim=0) 142 | y_pred,y_true = y_pred.numpy(),y_true.numpy() 143 | accuracies.append(accuracy(y_pred,y_true,task='multi-classification')) 144 | accuracies = np.array(accuracies,dtype=np.float32) 145 | losses = np.array(losses,dtype=np.float32) 146 | np.save('results/accuracy_%s%s.npy' %(opt_class.__name__,spec[net_class.__name__]),accuracies) 147 | np.save('results/loss_%s%s.npy' %(opt_class.__name__,spec[net_class.__name__]),losses) 148 | 149 | print('done') 150 | -------------------------------------------------------------------------------- /lab3_eval.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.optim import * 3 | from torch.utils.data import DataLoader 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | from helpers.MyDataset import * 7 | from helpers.Metrics import * 8 | from models.ConvNetMNIST import * 9 | from models.ConvNetMNIST_BN import * 10 | 11 | #set enumerated class pools 12 | net_classes = [ConvNetMNIST,ConvNetMNIST_BN] 13 | opt_classes = [SGD,RMSprop,Adam] 14 | spec = { 15 | 'ConvNetMNIST' : '', 16 | 'ConvNetMNIST_BN' : '_bn' 17 | } 18 | color = { 19 | 'SGD' : 'r' , 20 | 'RMSprop' : 'g' , 21 | 'Adam' : 'b' , 22 | 'ConvNetMNIST' : {'':'r','_L1':'g'} , 23 | 'ConvNetMNIST_BN' : {'':'b','_L1':'black'} 24 | } 25 | 26 | if __name__=='__main__': 27 | index = np.arange(50) 28 | 29 | #accuracy with L1 regularization 30 | for net_class in net_classes: 31 | plt.close('all') 32 | for opt_class in opt_classes: 33 | accuracies = np.load('results/accuracy_%s%s_L1.npy' %(opt_class.__name__,spec[net_class.__name__])) 34 | plt.plot(index,accuracies,color[opt_class.__name__]) 35 | plt.savefig('results/accuracy%s_L1.jpg' % spec[net_class.__name__]) 36 | 37 | #accuracy without L1 regularization 38 | for net_class in net_classes: 39 | plt.close('all') 40 | for opt_class in opt_classes: 41 | accuracies = np.load('results/accuracy_%s%s.npy' %(opt_class.__name__,spec[net_class.__name__])) 42 | plt.plot(index,accuracies,color[opt_class.__name__]) 43 | plt.savefig('results/accuracy%s.jpg' % spec[net_class.__name__]) 44 | 45 | #loss with L1 regularization 46 | for net_class in net_classes: 47 | plt.close('all') 48 | for opt_class in opt_classes: 49 | losses = np.load('results/loss_%s%s_L1.npy' %(opt_class.__name__,spec[net_class.__name__])) 50 | plt.plot(index,losses,color[opt_class.__name__]) 51 | plt.savefig('results/loss%s_L1.jpg' % spec[net_class.__name__]) 52 | 53 | #loss without L1 regularization 54 | for net_class in net_classes: 55 | plt.close('all') 56 | for opt_class in opt_classes: 57 | losses = np.load('results/loss_%s%s.npy' %(opt_class.__name__,spec[net_class.__name__])) 58 | plt.plot(index,losses,color[opt_class.__name__]) 59 | plt.savefig('results/loss%s.jpg' % spec[net_class.__name__]) 60 | 61 | #accuracy for each optimizer 62 | for opt_class in opt_classes: 63 | plt.close('all') 64 | for net_class in net_classes: 65 | for regularization in ['','_L1']: 66 | accuracies = np.load('results/accuracy_%s%s%s.npy' %(opt_class.__name__,spec[net_class.__name__],regularization)) 67 | plt.plot(index,accuracies,color[net_class.__name__][regularization]) 68 | plt.savefig('results/accuracy_%s.jpg' % opt_class.__name__) 69 | 70 | #loss for each optimizer 71 | for opt_class in opt_classes: 72 | plt.close('all') 73 | for net_class in net_classes: 74 | for regularization in ['','_L1']: 75 | losses = np.load('results/loss_%s%s%s.npy' %(opt_class.__name__,spec[net_class.__name__],regularization)) 76 | plt.plot(index,losses,color[net_class.__name__][regularization]) 77 | plt.savefig('results/loss_%s.jpg' % opt_class.__name__) 78 | 79 | print('done') 80 | -------------------------------------------------------------------------------- /lab4_cgan.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | import numpy as np 4 | from torch.autograd import Variable 5 | from torchvision.utils import save_image 6 | from torch.utils.data import DataLoader 7 | from helpers.MyDataset import * 8 | 9 | trainData = MnistDataset('../datasets/mnist/','train') 10 | trainLoader=DataLoader( 11 | dataset = trainData , 12 | batch_size = 16 , 13 | shuffle = True , 14 | num_workers = 0 , 15 | drop_last = False 16 | ) 17 | 18 | img_shape = (1, 28, 28) 19 | 20 | class Generator(nn.Module): 21 | def __init__(self): 22 | super(Generator, self).__init__() 23 | 24 | self.label_emb = nn.Embedding(10, 10) 25 | 26 | def block(in_feat, out_feat, normalize=True): 27 | layers = [nn.Linear(in_feat, out_feat)] 28 | if normalize: 29 | layers.append(nn.BatchNorm1d(out_feat, 0.8)) 30 | layers.append(nn.LeakyReLU(0.2, inplace=True)) 31 | return layers 32 | 33 | self.model = nn.Sequential( 34 | *block(100 + 10, 128, normalize=False), 35 | *block(128, 512), 36 | *block(512, 512), 37 | *block(512, 512), 38 | *block(512, 1024), 39 | nn.Linear(1024, int(np.prod(img_shape))), 40 | nn.Tanh() 41 | ) 42 | 43 | def forward(self, noise, labels): 44 | # Concatenate label embedding and image to produce input 45 | gen_input = torch.cat((self.label_emb(labels), noise), -1) 46 | img = self.model(gen_input) 47 | img = img.view(img.size(0), *img_shape) 48 | return img 49 | 50 | 51 | class Discriminator(nn.Module): 52 | def __init__(self): 53 | super(Discriminator, self).__init__() 54 | 55 | self.label_embedding = nn.Embedding(10, 10) 56 | 57 | self.model = nn.Sequential( 58 | nn.Linear(10 + int(np.prod(img_shape)), 512), 59 | nn.LeakyReLU(0.2, inplace=True), 60 | nn.Linear(512, 512), 61 | nn.Dropout(0.4), 62 | nn.LeakyReLU(0.2, inplace=True), 63 | nn.Linear(512, 512), 64 | nn.Dropout(0.4), 65 | nn.LeakyReLU(0.2, inplace=True), 66 | nn.Linear(512, 512), 67 | nn.Dropout(0.4), 68 | nn.LeakyReLU(0.2, inplace=True), 69 | nn.Linear(512, 1), 70 | ) 71 | 72 | def forward(self, img, labels): 73 | # Concatenate label embedding and image to produce input 74 | d_in = torch.cat((img.view(img.size(0), -1), self.label_embedding(labels)), -1) 75 | validity = self.model(d_in) 76 | return validity 77 | 78 | ad_loss = torch.nn.MSELoss().cuda() 79 | generator = Generator().cuda() 80 | discriminator = Discriminator().cuda() 81 | 82 | opt_G = torch.optim.Adam(generator.parameters(), lr=0.0002, betas=(0.5,0.999)) 83 | opt_D = torch.optim.Adam(discriminator.parameters(), lr=0.0002, betas=(0.5,0.999)) 84 | 85 | 86 | FloatTensor = torch.cuda.FloatTensor 87 | LongTensor = torch.cuda.LongTensor 88 | 89 | n_epochs = 500 90 | def sample_image(n_row, batches_done): 91 | """Saves a grid of generated digits ranging from 0 to n_classes""" 92 | # Sample noise 93 | z = Variable(FloatTensor(np.random.normal(0, 1, (n_row ** 2, 100)))) 94 | # Get labels ranging from 0 to n_classes for n rows 95 | labels = np.array([num for _ in range(n_row) for num in range(n_row)]) 96 | labels = Variable(LongTensor(labels)) 97 | gen_imgs = generator(z, labels) 98 | save_image(gen_imgs.data, "results/cgan%d.jpg" % batches_done, nrow=n_row, normalize=True) 99 | 100 | 101 | for epoch in range(n_epochs): 102 | for i,(x,y) in enumerate(trainLoader): 103 | 104 | valid = Variable(FloatTensor(x.shape[0], 1).fill_(1.0), requires_grad=False) 105 | fake = Variable(FloatTensor(x.shape[0], 1).fill_(0.0), requires_grad=False) 106 | 107 | real_img=Variable(x.type(FloatTensor)) 108 | y = Variable(y.type(LongTensor)) 109 | 110 | opt_G.zero_grad() 111 | 112 | z = Variable(FloatTensor(np.random.normal(0, 1, (x.shape[0], 100)))) 113 | 114 | fake_img = Variable(LongTensor(np.random.randint(0, 10, x.shape[0]))) 115 | 116 | gen = generator(z,fake_img) 117 | validity = discriminator(gen,fake_img) 118 | g_loss = ad_loss(validity,valid) 119 | 120 | g_loss.backward() 121 | opt_G.step() 122 | 123 | opt_D.zero_grad() 124 | valid_real = discriminator(real_img,y) 125 | valid_loss = ad_loss(valid_real,valid) 126 | 127 | valid_fake = discriminator(gen.detach(),fake_img) 128 | fake_loss = ad_loss(valid_fake,fake) 129 | 130 | d_loss = (valid_loss+fake_loss)/2 131 | 132 | d_loss.backward() 133 | opt_D.step() 134 | 135 | if i==0: 136 | print('epoch = %d, loss_G = %f, loss_D = %f' %(epoch,d_loss.item(),g_loss.item())) 137 | 138 | batches_done = epoch * len(trainLoader) + i 139 | if batches_done % 1000 == 0: 140 | sample_image(n_row=10, batches_done=batches_done) 141 | -------------------------------------------------------------------------------- /lab4_gan.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from torch.utils.data import DataLoader 4 | from tqdm import tqdm 5 | import cv2 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | from helpers.MyDataset import * 9 | 10 | #test device status 11 | device_status = torch.cuda.is_available() 12 | if device_status: 13 | device_id = 0 14 | torch.backends.cudnn.enabled = True 15 | torch.backends.cudnn.benchmark = True 16 | 17 | #initialize model 18 | G = nn.Sequential( 19 | nn.Linear(100,128) , 20 | nn.LeakyReLU(0.2,inplace=True) , 21 | 22 | nn.Linear(128,256) , 23 | nn.BatchNorm1d(256,0.8) , 24 | nn.LeakyReLU(0.2,inplace=True) , 25 | 26 | nn.Linear(256,512) , 27 | nn.BatchNorm1d(512,0.8) , 28 | nn.LeakyReLU(0.2,inplace=True) , 29 | 30 | nn.Linear(512,1024) , 31 | nn.BatchNorm1d(1024,0.8) , 32 | nn.LeakyReLU(0.2,inplace=True) , 33 | 34 | nn.Linear(1024,1*28*28) , 35 | nn.Hardsigmoid() , 36 | nn.Unflatten(1,(1,28,28)) 37 | ) 38 | D = nn.Sequential( 39 | nn.Flatten() , 40 | nn.Linear(1*28*28,512) , 41 | nn.LeakyReLU(0.2,inplace=True) , 42 | nn.Linear(512,256) , 43 | nn.LeakyReLU(0.2,inplace=True) , 44 | nn.Linear(256,1) , 45 | nn.Hardsigmoid() 46 | ) 47 | if device_status: 48 | G = G.to(device_id) 49 | D = D.to(device_id) 50 | 51 | #set hyperparameters 52 | mini_batch = 128 53 | opt_G = torch.optim.Adam(G.parameters(),lr=0.0001, betas=(0.5, 0.999)) 54 | opt_D = torch.optim.Adam(D.parameters(),lr=0.0001, betas=(0.5, 0.999)) 55 | loss_func = nn.BCELoss() 56 | if device_status: 57 | loss_func = loss_func.to(device_id) 58 | 59 | #load dataset 60 | trainData = MnistDataset('D://datasets/mnist/','t10k') 61 | trainLoader=DataLoader( 62 | dataset = trainData , 63 | batch_size = mini_batch , 64 | shuffle = True , 65 | num_workers = 0 , 66 | drop_last = True 67 | ) 68 | 69 | if __name__=='__main__': 70 | 71 | #ground truth 72 | true = torch.ones(mini_batch,1,dtype=torch.float32,requires_grad=False) 73 | false = torch.zeros(mini_batch,1,dtype=torch.float32,requires_grad=False) 74 | test_z = torch.randn(100,100) 75 | if device_status: 76 | true,false = true.to(device_id),false.to(device_id) 77 | test_z = test_z.to(device_id) 78 | 79 | losses = [] 80 | for epoch in range(101): 81 | loss_G,loss_D = 0.,0. 82 | for _,(real,_) in enumerate(trainLoader): 83 | z = torch.randn(mini_batch,100) 84 | if device_status: 85 | real = real.to(device_id) 86 | z = z.to(device_id) 87 | 88 | #train discriminator 89 | if loss_G<100: 90 | opt_D.zero_grad() 91 | loss_real = loss_func(D(real),true) 92 | loss_fake = loss_func(D(G(z)),false) 93 | loss = (loss_real+loss_fake)/2. 94 | loss.backward(retain_graph=True) 95 | opt_D.step() 96 | loss_D += loss.item() 97 | 98 | #train generator 99 | opt_G.zero_grad() 100 | loss = loss_func(D(G(z)),true) 101 | loss.backward(retain_graph=False) 102 | opt_G.step() 103 | loss_G += loss.item() 104 | 105 | print('epoch = %d, loss_G = %f, loss_D = %f' %(epoch,loss_G,loss_D)) 106 | losses.append(loss_D) 107 | 108 | #test 109 | ''' 110 | if epoch%5==0: 111 | fake = G(test_z) 112 | 113 | if device_status: 114 | fake = fake.cpu() 115 | fake = fake.detach().numpy() 116 | 117 | plot = [] 118 | for i in range(10): 119 | row = [] 120 | for j in range(10): 121 | row.append(fake[i+10*j]) 122 | row = np.concatenate(row,axis=2) 123 | plot.append(row) 124 | plot = np.concatenate(plot,axis=1) 125 | plot = (plot*255.).astype(np.uint8) 126 | plot = plot.reshape(280,280) 127 | 128 | cv2.imwrite('results/gan%d.jpg' % epoch,plot) 129 | ''' 130 | index = np.arange(101) 131 | plt.plot(index,losses) 132 | plt.show() 133 | -------------------------------------------------------------------------------- /lab4_wgan.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from torch.utils.data import DataLoader 4 | from tqdm import tqdm 5 | import cv2 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | from helpers.MyDataset import * 9 | 10 | #test device status 11 | device_status = torch.cuda.is_available() 12 | if device_status: 13 | device_id = 0 14 | torch.backends.cudnn.enabled = True 15 | torch.backends.cudnn.benchmark = True 16 | 17 | #initialize model 18 | G = nn.Sequential( 19 | nn.Linear(100,128) , 20 | nn.LeakyReLU(0.2,inplace=True) , 21 | 22 | nn.Linear(128,256) , 23 | nn.LeakyReLU(0.2,inplace=True) , 24 | 25 | nn.Linear(256,512) , 26 | nn.LeakyReLU(0.2,inplace=True) , 27 | 28 | nn.Linear(512,1024) , 29 | nn.LeakyReLU(0.2,inplace=True) , 30 | 31 | nn.Linear(1024,1*28*28) , 32 | nn.Hardsigmoid() , 33 | nn.Unflatten(1,(1,28,28)) 34 | ) 35 | D = nn.Sequential( 36 | nn.Flatten() , 37 | nn.Linear(1*28*28,512) , 38 | nn.LeakyReLU(0.2,inplace=True) , 39 | nn.Linear(512,256) , 40 | nn.LeakyReLU(0.2,inplace=True) , 41 | nn.Linear(256,1) , 42 | nn.Hardsigmoid() 43 | ) 44 | if device_status: 45 | G = G.to(device_id) 46 | D = D.to(device_id) 47 | 48 | #set hyperparameters 49 | mini_batch = 128 50 | opt_G = torch.optim.RMSprop(G.parameters(),lr=0.00001) 51 | opt_D = torch.optim.RMSprop(D.parameters(),lr=0.00001) 52 | 53 | #load dataset 54 | trainData = MnistDataset('D://datasets/mnist/','t10k') 55 | trainLoader=DataLoader( 56 | dataset = trainData , 57 | batch_size = mini_batch , 58 | shuffle = True , 59 | num_workers = 0 , 60 | drop_last = True 61 | ) 62 | 63 | if __name__=='__main__': 64 | 65 | #ground truth 66 | true = torch.ones(mini_batch,1,dtype=torch.float32,requires_grad=False) 67 | false = torch.zeros(mini_batch,1,dtype=torch.float32,requires_grad=False) 68 | test_z = torch.randn(100,100) 69 | if device_status: 70 | true,false = true.to(device_id),false.to(device_id) 71 | test_z = test_z.to(device_id) 72 | 73 | losses = [] 74 | for epoch in range(101): 75 | loss_G,loss_D = 0.,0. 76 | for i,(real,_) in enumerate(trainLoader): 77 | z = torch.randn(mini_batch,100) 78 | if device_status: 79 | real = real.to(device_id) 80 | z = z.to(device_id) 81 | 82 | #train discriminator 83 | opt_D.zero_grad() 84 | loss = torch.mean(D(G(z)))-torch.mean(D(real)) 85 | loss.backward(retain_graph=True) 86 | opt_D.step() 87 | loss_D += loss.item() 88 | 89 | #Clip weights of discriminator 90 | for p in D.parameters(): 91 | p.data.clamp_(-0.01, 0.01) 92 | 93 | #train generator 94 | if i%5==0: 95 | opt_G.zero_grad() 96 | loss = -torch.mean(D(G(z))) 97 | loss.backward(retain_graph=False) 98 | opt_G.step() 99 | loss_G += loss.item() 100 | 101 | print('epoch = %d, loss_G = %f, loss_D = %f' %(epoch,loss_G,loss_D)) 102 | losses.append(loss_D) 103 | 104 | #test 105 | ''' 106 | if epoch%5==0: 107 | fake = G(test_z) 108 | 109 | if device_status: 110 | fake = fake.cpu() 111 | fake = fake.detach().numpy() 112 | 113 | plot = [] 114 | for i in range(10): 115 | row = [] 116 | for j in range(10): 117 | row.append(fake[i+10*j]) 118 | row = np.concatenate(row,axis=2) 119 | plot.append(row) 120 | plot = np.concatenate(plot,axis=1) 121 | plot = (plot*255.).astype(np.uint8) 122 | plot = plot.reshape(280,280) 123 | 124 | cv2.imwrite('results/wgan%d.jpg' % epoch,plot) 125 | ''' 126 | index = np.arange(101) 127 | plt.plot(index,losses) 128 | plt.show() 129 | -------------------------------------------------------------------------------- /models/BaseModels.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | import torch.nn.functional as F 3 | 4 | #convolutional module with identical input and output size 5 | class Conv2dSame(nn.Sequential): 6 | def __init__(self,in_channels,out_channels,kernel_size): 7 | assert type(kernel_size)==int,'Unsupported type '+str(type(kernel_size))+' for kernel_size' 8 | 9 | bound = kernel_size-1 10 | bound_l = bound//2 11 | bound_r = bound-bound_l 12 | 13 | super(Conv2dSame,self).__init__( 14 | nn.ReplicationPad2d((bound_l,bound_r,bound_l,bound_r)), 15 | nn.Conv2d(in_channels,out_channels,kernel_size) 16 | ) 17 | 18 | #Conv2dSame+ReLU+BatchNorm2d 19 | class Conv2dSame_BN_ReLU(nn.Sequential): 20 | def __init__(self,in_channels,out_channels,kernel_size): 21 | assert type(kernel_size)==int,'Unsupported type '+str(type(kernel_size))+' for kernel_size' 22 | 23 | super(Conv2dSame_BN_ReLU,self).__init__( 24 | Conv2dSame(in_channels,out_channels,kernel_size), 25 | nn.BatchNorm2d(out_channels) , 26 | nn.ReLU() 27 | ) 28 | -------------------------------------------------------------------------------- /models/ConvNetMNIST.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | from models.BaseModels import * 3 | 4 | #small convolutional network for MNIST dataset 5 | class ConvNetMNIST(nn.Module): 6 | def __init__(self,input_shape,num_feature,num_class,transfered=None): 7 | super(ConvNetMNIST,self).__init__() 8 | 9 | self.conv = nn.Sequential( #input_shape[0]*input_shape[1]*input_shape[2] 10 | Conv2dSame(in_channels=input_shape[0],out_channels=32,kernel_size=3), 11 | nn.ReLU(), 12 | Conv2dSame(in_channels=32,out_channels=64,kernel_size=3), 13 | nn.ReLU(), 14 | nn.MaxPool2d(2), 15 | nn.Dropout(0.25) 16 | ) #64*(input_shape[1]/2)*(input_shape[2]/2) 17 | 18 | self.fc1 = nn.Sequential( #64*(input_shape[1]/2)*(input_shape[2]/2) 19 | nn.Linear(64*(input_shape[1]//2)*(input_shape[2]//2),num_feature), 20 | nn.ReLU(), 21 | nn.Dropout(0.5) 22 | ) #num_feature 23 | 24 | self.fc2 = nn.Linear(num_feature,num_class) 25 | 26 | def forward(self,x,mode='all'): 27 | x = self.conv(x) 28 | x = x.view(x.size(0),-1) 29 | x = self.fc1(x) 30 | y = self.fc2(x) 31 | return y 32 | -------------------------------------------------------------------------------- /models/ConvNetMNIST_BN.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | from models.BaseModels import * 3 | 4 | #small convolutional network for MNIST dataset 5 | class ConvNetMNIST_BN(nn.Module): 6 | def __init__(self,input_shape,num_feature,num_class,transfered=None): 7 | super(ConvNetMNIST_BN,self).__init__() 8 | 9 | self.conv = nn.Sequential( #input_shape[0]*input_shape[1]*input_shape[2] 10 | Conv2dSame_ReLU_BN(in_channels=input_shape[0],out_channels=32,kernel_size=3), 11 | Conv2dSame_ReLU_BN(in_channels=32,out_channels=64,kernel_size=3), 12 | nn.MaxPool2d(2), 13 | nn.Dropout(0.25) 14 | ) #64*(input_shape[1]/2)*(input_shape[2]/2) 15 | 16 | self.fc1 = nn.Sequential( #64*(input_shape[1]/2)*(input_shape[2]/2) 17 | nn.Linear(64*(input_shape[1]//2)*(input_shape[2]//2),num_feature), 18 | nn.ReLU(), 19 | nn.Dropout(0.5) 20 | ) #num_feature 21 | 22 | self.fc2 = nn.Linear(num_feature,num_class) 23 | 24 | def forward(self,x,mode='all'): 25 | x = self.conv(x) 26 | x = x.view(x.size(0),-1) 27 | x = self.fc1(x) 28 | y = self.fc2(x) 29 | return y 30 | -------------------------------------------------------------------------------- /models/ConvNetSVHN2.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | from models.BaseModels import * 3 | 4 | class eca_layer(nn.Module): 5 | """Constructs a ECA module. 6 | Args: 7 | channel: Number of channels of the input feature map 8 | k_size: Adaptive selection of kernel size 9 | """ 10 | def __init__(self, channel, k_size=3): 11 | super(eca_layer, self).__init__() 12 | self.avg_pool = nn.AdaptiveAvgPool2d(1) 13 | self.conv = nn.Conv1d(1, 1, kernel_size=k_size, padding=(k_size - 1) // 2, bias=False) 14 | self.sigmoid = nn.Sigmoid() 15 | 16 | def forward(self, x): 17 | # x: input features with shape [b, c, h, w] 18 | b, c, h, w = x.size() 19 | 20 | # feature descriptor on the global spatial information 21 | y = self.avg_pool(x) 22 | 23 | # Two different branches of ECA module 24 | y = self.conv(y.squeeze(-1).transpose(-1, -2)).transpose(-1, -2).unsqueeze(-1) 25 | 26 | # Multi-scale information fusion 27 | y = self.sigmoid(y) 28 | 29 | return x * y.expand_as(x) 30 | 31 | #small convolutional network for SVHN dataset (format 2) 32 | class ConvNetSVHN2(nn.Module): 33 | def __init__(self,num_feature): 34 | super(ConvNetSVHN2,self).__init__() 35 | 36 | self.conv = nn.Sequential( #3*32*32 37 | Conv2dSame_BN_ReLU(3,32,3) , 38 | Conv2dSame_BN_ReLU(32,32,3) , 39 | nn.MaxPool2d(2) , 40 | nn.Dropout(0.3) , 41 | Conv2dSame_BN_ReLU(32,64,3) , 42 | Conv2dSame_BN_ReLU(64,64,3) , 43 | nn.MaxPool2d(2) , 44 | nn.Dropout(0.3) , 45 | Conv2dSame_BN_ReLU(64,128,3) , 46 | Conv2dSame_BN_ReLU(128,128,3) , 47 | nn.MaxPool2d(2) , 48 | nn.Dropout(0.3) 49 | ) #128*(32/8)*(32/8) 50 | 51 | self.eca = eca_layer(128) 52 | 53 | self.fc = nn.Sequential( #128*(32/8)*(32/8) 54 | nn.Linear(128,num_feature) , 55 | nn.ReLU() , 56 | nn.Dropout(0.3) , 57 | nn.Linear(num_feature,10) 58 | ) #10 59 | 60 | def forward(self,x): 61 | x = self.conv(x) 62 | #x = self.eca(x) 63 | #x = x.flatten(1,-1) 64 | x = x.mean(-1).mean(-1) 65 | x = self.fc(x) 66 | 67 | return x 68 | -------------------------------------------------------------------------------- /models/__pycache__/BaseModels.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/models/__pycache__/BaseModels.cpython-37.pyc -------------------------------------------------------------------------------- /models/__pycache__/BaseModels.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/models/__pycache__/BaseModels.cpython-38.pyc -------------------------------------------------------------------------------- /models/__pycache__/ConvNetMNIST.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/models/__pycache__/ConvNetMNIST.cpython-38.pyc -------------------------------------------------------------------------------- /models/__pycache__/ConvNetSVHN2.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/models/__pycache__/ConvNetSVHN2.cpython-37.pyc -------------------------------------------------------------------------------- /perception.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from sys import argv 4 | from helpers.myDataset import * 5 | from helpers.myfunc import * 6 | from torch.utils.data import DataLoader 7 | 8 | #test device status 9 | device_status=torch.cuda.is_available() 10 | if device_status: 11 | device_id=0 12 | 13 | #initialize model 14 | net=nn.Sequential( 15 | nn.Linear(60,2) 16 | ) 17 | if device_status: 18 | net=net.to(device_id) 19 | 20 | #set hyperparameters 21 | loss_func=nn.CrossEntropyLoss() 22 | opt=torch.optim.Adam(net.parameters(),lr=0.0007) 23 | mini_batch=1 24 | 25 | #load dataset 26 | trainData=TrainData('sonar','./datasets/sonar_binary/sonar.all-data') 27 | testData=TestData('sonar','./datasets/sonar_binary/sonar.all-data') 28 | 29 | trainLoader=DataLoader( 30 | dataset=trainData, 31 | batch_size=mini_batch, 32 | shuffle=True, 33 | num_workers=0 34 | ) 35 | testLoader=DataLoader( 36 | dataset=testData, 37 | batch_size=testData.__len__(), 38 | shuffle=False, 39 | num_workers=0 40 | ) 41 | 42 | if __name__=='__main__': 43 | 44 | #train & test 45 | for epoch in range(100): 46 | net=net.train() 47 | for _,(x,y) in enumerate(trainLoader): 48 | if device_status: 49 | x,y=x.to(device_id),y.to(device_id) 50 | 51 | #calcualte estimated results 52 | opt.zero_grad() 53 | y_hat=net(x) 54 | 55 | #calculate loss and propagate back 56 | loss=loss_func(y_hat,y) 57 | loss.backward() 58 | opt.step() 59 | 60 | # if epoch%10!=0: 61 | # continue 62 | 63 | net=net.eval() 64 | positive_n=0 65 | for _,(x,y) in enumerate(testLoader): 66 | if device_status: 67 | x=x.to(device_id) 68 | 69 | #predict 70 | with torch.no_grad(): 71 | y_hat=net(x) 72 | 73 | #compare and count 74 | for i in range(len(y)): 75 | if torch.argmax(y_hat[i]).item()==y[i].item(): 76 | positive_n+=1 77 | 78 | print('epoch = %d accuracy = %f' %(epoch,positive_n/testData.__len__())) 79 | 80 | #save parameters 81 | torch.save(net.state_dict(),'./results/perception.pkl') 82 | -------------------------------------------------------------------------------- /reference/Perceptron.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | from load_data import load_data 5 | 6 | #sonar Attributes : 60 Classes : 2 7 | #wdbc Attributes : 30 Classes : 2 8 | 9 | dataset_path ={"sonar":"/home/ubuntu/zhangli/datasets/datasets/sonar_binary/sonar.all-data", 10 | "wdbc":"/home/ubuntu/zhangli/datasets/datasets/wdbc_binary/wdbc.data", 11 | } 12 | class Perceptron(nn.Module): 13 | def __init__(self, in_dim=2, out_dim=2): 14 | super(Perceptron, self).__init__() 15 | self.net = nn.Linear(in_dim, out_dim) 16 | #参数初始化 17 | for params in self.net.parameters(): 18 | nn.init.normal_(params, mean=0, std=0.01) 19 | #输入数据在模型中前向传播的计算过程 20 | def forward(self, x): 21 | x = self.net(x) 22 | return x 23 | #训练权重 24 | def train(train_loader, model, criterion, optimizer): 25 | model.train() 26 | for i, (feature, label) in enumerate(train_loader): 27 | output = model(feature) 28 | loss = criterion(output, label) 29 | optimizer.zero_grad() 30 | loss.backward() 31 | optimizer.step() 32 | #验证结果 33 | def validate(val_loader, model, criterion): 34 | model.eval() 35 | correct = 0 36 | with torch.no_grad(): 37 | for i, (feature, label) in enumerate(val_loader): 38 | output = model(feature) 39 | #计算预测准确率 40 | _, pred = output.topk(1, 1, True, True) 41 | correct += pred.eq(label.view(-1, 1)).sum(0, keepdim=True) 42 | return correct[0] 43 | 44 | 45 | if __name__ == '__main__': 46 | model = Perceptron(in_dim=30,out_dim=2) 47 | epoch = 100 48 | batch_size = 20 49 | learning_rate = 0.05 50 | #损失函数 51 | criterion = nn.CrossEntropyLoss() 52 | #优化器 53 | optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate) 54 | #读入训练数据 55 | train_data, val_data = load_data("wdbc",dataset_path["wdbc"]) 56 | #将训练数据分成一个个batch 57 | train_loader = torch.utils.data.DataLoader(train_data, batch_size=batch_size, shuffle=True) 58 | val_loader = torch.utils.data.DataLoader(val_data, batch_size=batch_size, shuffle=False) 59 | for i in range(epoch): 60 | train(train_loader, model, criterion, optimizer) 61 | correct = validate(val_loader, model, criterion) 62 | print("epoch:\t",i,"acc:\t",correct*100.0/len(val_data)) 63 | 64 | 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /reference/load_data.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | 4 | def load_data(data_name, file_path): 5 | if data_name == "sonar": 6 | #dataset = np.loadtxt("/home/ubuntu/zhangli/datasets/datasets/sonar_binary/sonar.all-data", dtype=str, skiprows=0, delimiter=',') 7 | dataset = np.loadtxt(file_path, dtype=str, skiprows=0, delimiter=',') 8 | feature = [dataset[i][0:-1] for i in range(len(dataset))] 9 | label = [0 if dataset[i][-1]=='R' else 1 for i in range(len(dataset))] 10 | train_feature = torch.tensor(np.matrix([feature[i] for i in range(len(feature)) if i % 3 != 0]).astype(float),dtype=torch.float32) 11 | val_feature = torch.tensor(np.matrix([feature[i] for i in range(len(feature)) if i % 3 == 0]).astype(float),dtype=torch.float32) 12 | train_label = torch.tensor([label[i] for i in range(len(label)) if i % 3 != 0]) 13 | val_label = torch.tensor([label[i] for i in range(len(label)) if i % 3 == 0]) 14 | elif data_name == "wdbc": 15 | #dataset = np.loadtxt("/home/ubuntu/zhangli/datasets/datasets/wdbc_binary/wdbc.data", dtype=str, skiprows=0, delimiter=',') 16 | dataset = np.loadtxt(file_path, dtype=str, skiprows=0, delimiter=',') 17 | feature = [dataset[i][2:] for i in range(len(dataset))] 18 | label = [0 if dataset[i][1]=='M' else 1 for i in range(len(dataset))] 19 | train_feature = torch.tensor(np.matrix([feature[i] for i in range(len(feature)) if i % 3 != 0]).astype(float),dtype=torch.float32) 20 | val_feature = torch.tensor(np.matrix([feature[i] for i in range(len(feature)) if i % 3 == 0]).astype(float),dtype=torch.float32) 21 | train_label = torch.tensor([label[i] for i in range(len(label)) if i % 3 != 0]) 22 | val_label = torch.tensor([label[i] for i in range(len(label)) if i % 3 == 0]) 23 | elif data_name == "soybean": 24 | #dataset = np.loadtxt("/home/ubuntu/zhangli/datasets/datasets/soybean_multi/soybean-large.data", dtype=str, skiprows=0, delimiter=',') 25 | dataset = np.loadtxt(file_path, dtype=str, skiprows=0, delimiter=',') 26 | for i in range(len(dataset)): 27 | for j in range(len(dataset[i])): 28 | if dataset[i][j] == '?': 29 | dataset[i][j] = '-1' 30 | feature = [dataset[i][1:] for i in range(len(dataset))] 31 | label = [] 32 | num = 0 33 | label_dict = {} 34 | for i in range(len(dataset)): 35 | if dataset[i][0] in label_dict: 36 | label.append(label_dict[dataset[i][0]]) 37 | else: 38 | label_dict[dataset[i][0]] = num 39 | label.append(num) 40 | num = num + 1 41 | train_feature = torch.tensor(np.matrix(feature[0:307]).astype(float),dtype=torch.float32) 42 | val_feature = torch.tensor(np.matrix(feature[307:]).astype(float),dtype=torch.float32) 43 | train_label = torch.tensor(label[0:307]) 44 | val_label = torch.tensor(label[307:]) 45 | elif data_name == "robot" or data_name == "iris": 46 | #dataset = np.loadtxt("/home/ubuntu/zhangli/datasets/datasets/robot_multi/sensor_readings_24.data", dtype=str, skiprows=0, delimiter=',') 47 | dataset = np.loadtxt(file_path, dtype=str, skiprows=0, delimiter=',') 48 | feature = [dataset[i][0:-1] for i in range(len(dataset))] 49 | label = [] 50 | num = 0 51 | label_dict = {} 52 | for i in range(len(dataset)): 53 | if dataset[i][-1] in label_dict: 54 | label.append(label_dict[dataset[i][-1]]) 55 | else: 56 | label_dict[dataset[i][-1]] = num 57 | label.append(num) 58 | num = num + 1 59 | train_feature = torch.tensor(np.matrix([feature[i] for i in range(len(feature)) if i % 3 != 0]).astype(float),dtype=torch.float32) 60 | val_feature = torch.tensor(np.matrix([feature[i] for i in range(len(feature)) if i % 3 == 0]).astype(float),dtype=torch.float32) 61 | train_label = torch.tensor([label[i] for i in range(len(label)) if i % 3 != 0]) 62 | val_label = torch.tensor([label[i] for i in range(len(label)) if i % 3 == 0]) 63 | train_data = torch.utils.data.TensorDataset(train_feature,train_label) 64 | val_data = torch.utils.data.TensorDataset(val_feature,val_label) 65 | return train_data, val_data 66 | 67 | -------------------------------------------------------------------------------- /releases/experiment1.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/releases/experiment1.zip -------------------------------------------------------------------------------- /releases/experiment2.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/releases/experiment2.zip -------------------------------------------------------------------------------- /releases/experiment3.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/releases/experiment3.zip -------------------------------------------------------------------------------- /releases/experiment4.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lblaoke/TJU-DeepLearning2020/b7c56454cfe7c93c2de28e91e4fd4415fda571e3/releases/experiment4.zip --------------------------------------------------------------------------------