├── .gitignore ├── README.md ├── go ├── perceptron.go └── perceptron_test.go ├── python ├── perceptron.py └── requirements.txt ├── ruby └── perceptron.rb └── rust ├── .gitignore ├── Cargo.lock ├── Cargo.toml └── src └── main.rs /.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Perceptron Implementations 2 | 3 | Here are two implementations of a Perceptron in **Python** and **Rust**. For 4 | more information, please refer to the following two blogposts: 5 | 6 | - https://blog.dbrgn.ch/2013/3/26/perceptrons-in-python/ 7 | - https://blog.dbrgn.ch/2015/5/15/perceptrons-in-rust/ 8 | 9 | ## Contributed Implementations 10 | 11 | - **Ruby** by @mhutter 12 | - **Go** by @mhutter 13 | 14 | ## License 15 | 16 | All code is released under the MIT license. 17 | -------------------------------------------------------------------------------- /go/perceptron.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "math/rand" 6 | "time" 7 | ) 8 | 9 | const ( 10 | // ETA is the learning rate 11 | ETA float64 = 0.2 12 | // N iterations 13 | N = 100 14 | ) 15 | 16 | // TrainingDatum represents training data 17 | type TrainingDatum struct { 18 | Input [3]int8 19 | Expected int8 20 | } 21 | 22 | func dot(input [3]int8, weights []float64) float64 { 23 | return float64(input[0])*weights[0] + 24 | float64(input[1])*weights[1] + 25 | float64(input[2])*weights[2] 26 | } 27 | 28 | // heaviside step function 29 | func heaviside(in float64) int8 { 30 | if in < 0 { 31 | return 0 32 | } 33 | return 1 34 | } 35 | 36 | func main() { 37 | trainingData := []TrainingDatum{ 38 | TrainingDatum{Input: [3]int8{0, 0, 1}, Expected: 0}, 39 | TrainingDatum{Input: [3]int8{0, 1, 1}, Expected: 1}, 40 | TrainingDatum{Input: [3]int8{1, 0, 1}, Expected: 1}, 41 | TrainingDatum{Input: [3]int8{1, 1, 1}, Expected: 1}, 42 | } 43 | 44 | rand.Seed(time.Now().UnixNano()) 45 | w := []float64{ 46 | rand.Float64(), 47 | rand.Float64(), 48 | rand.Float64(), 49 | } 50 | 51 | fmt.Println("initial weight: ", w) 52 | 53 | for i := 0; i < N; i++ { 54 | td := trainingData[rand.Intn(len(trainingData))] 55 | result := dot(td.Input, w) 56 | errorValue := int8(td.Expected) - heaviside(result) 57 | 58 | w[0] += float64(errorValue*td.Input[0]) * ETA 59 | w[1] += float64(errorValue*td.Input[1]) * ETA 60 | w[2] += float64(errorValue*td.Input[2]) * ETA 61 | } 62 | 63 | for _, td := range trainingData { 64 | result := dot(td.Input, w) 65 | 66 | fmt.Printf("%v: % .7f -> %d\n", 67 | td.Input[0:2], 68 | result, 69 | heaviside(result)) 70 | } 71 | fmt.Println("final weight: ", w) 72 | } 73 | -------------------------------------------------------------------------------- /go/perceptron_test.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "testing" 4 | 5 | func TestHeaviside(t *testing.T) { 6 | data := map[float64]int8{ 7 | -1000.1: 0, 8 | -0.0000000000000001: 0, 9 | 0.0: 1, 10 | 0.00000000000000001: 1, 11 | 133.7: 1, 12 | } 13 | 14 | for in, expected := range data { 15 | actual := heaviside(in) 16 | if actual != expected { 17 | t.Errorf("Expected: %d\n Got: %d", expected, actual) 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /python/perceptron.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from random import choice 3 | from numpy import array, dot, random 4 | 5 | unit_step = lambda x: 0 if x < 0 else 1 6 | 7 | training_data = [ 8 | (array([0,0,1]), 0), 9 | (array([0,1,1]), 1), 10 | (array([1,0,1]), 1), 11 | (array([1,1,1]), 1), 12 | ] 13 | 14 | w = random.rand(3) 15 | errors = [] 16 | eta = 0.2 17 | n = 100 18 | 19 | for i in xrange(n): 20 | x, expected = choice(training_data) 21 | result = dot(w, x) 22 | error = expected - unit_step(result) 23 | errors.append(error) 24 | w += eta * error * x 25 | 26 | for x, _ in training_data: 27 | result = dot(x, w) 28 | print("{}: {} -> {}".format(x[:2], result, unit_step(result))) 29 | -------------------------------------------------------------------------------- /python/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | -------------------------------------------------------------------------------- /ruby/perceptron.rb: -------------------------------------------------------------------------------- 1 | # A Perceptron in Ruby 2 | 3 | # Some helper methods 4 | module Refinements 5 | refine Numeric do 6 | def heaviside 7 | self < 0 ? 0 : 1 8 | end 9 | end 10 | 11 | refine Array do 12 | def dot(other) 13 | zip(other).map { |x, y| x * y }.inject(:+) 14 | end 15 | end 16 | end 17 | 18 | using Refinements 19 | 20 | # definitions for output 21 | OUTMAP = { 22 | -1 => '-', 23 | 0 => '.', 24 | 1 => '+' 25 | } 26 | OK = "\e[0;32m✓\e[m\e[m" 27 | NOK = "\e[0;31m✗\e[m\e[m" 28 | 29 | # constant data 30 | TRAINING_DATA = [ 31 | [[0, 0, 1], 0], 32 | [[0, 1, 1], 1], 33 | [[1, 0, 1], 1], 34 | [[1, 1, 1], 1] 35 | ] 36 | ETA = 0.2 37 | N = 100 38 | 39 | # runtime data 40 | weight = Array.new(3).map! { rand } 41 | errors = [] 42 | 43 | puts "initial weight: #{weight}" 44 | 45 | N.times do 46 | data, expected = TRAINING_DATA.sample 47 | result = data.dot(weight) 48 | error = expected - result.heaviside 49 | errors << error 50 | 51 | (0...weight.size).each do |i| 52 | weight[i] += ETA * error * data[i] 53 | end 54 | end 55 | 56 | # output for control 57 | TRAINING_DATA.each do |data, expected| 58 | result = data.dot(weight) 59 | 60 | correct = result.heaviside == expected ? OK : NOK 61 | puts format('%s: % .7f -> %s %s', 62 | data[0, 2], 63 | result, 64 | result.heaviside, 65 | correct) 66 | end 67 | 68 | puts "final weight: #{weight}" 69 | 70 | # display the progress 71 | puts errors.map { |e| OUTMAP[e] }.join 72 | -------------------------------------------------------------------------------- /rust/.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | -------------------------------------------------------------------------------- /rust/Cargo.lock: -------------------------------------------------------------------------------- 1 | [root] 2 | name = "perceptron" 3 | version = "0.0.1" 4 | dependencies = [ 5 | "rand 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", 6 | ] 7 | 8 | [[package]] 9 | name = "libc" 10 | version = "0.1.7" 11 | source = "registry+https://github.com/rust-lang/crates.io-index" 12 | 13 | [[package]] 14 | name = "rand" 15 | version = "0.3.8" 16 | source = "registry+https://github.com/rust-lang/crates.io-index" 17 | dependencies = [ 18 | "libc 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", 19 | ] 20 | 21 | -------------------------------------------------------------------------------- /rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "perceptron" 3 | version = "0.0.1" 4 | authors = ["Danilo Bargen "] 5 | 6 | [dependencies] 7 | rand = "0.3" 8 | -------------------------------------------------------------------------------- /rust/src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate rand; 2 | 3 | use rand::Rng; 4 | use rand::distributions::{Range, IndependentSample}; 5 | 6 | 7 | /// Heaviside Step Function 8 | trait Heaviside { 9 | fn heaviside(&self) -> i8; 10 | } 11 | 12 | /// Implement heaviside() for f64 13 | impl Heaviside for f64 { 14 | fn heaviside(&self) -> i8 { 15 | (*self >= 0.0) as i8 16 | } 17 | } 18 | 19 | 20 | /// Dot product of input and weights 21 | fn dot(input: (i8, i8, i8), weights: (f64, f64, f64)) -> f64 { 22 | input.0 as f64 * weights.0 23 | + input.1 as f64 * weights.1 24 | + input.2 as f64 * weights.2 25 | } 26 | 27 | 28 | struct TrainingDatum { 29 | input: (i8, i8, i8), 30 | expected: i8, 31 | } 32 | 33 | 34 | fn main() { 35 | println!("Hello, perceptron!"); 36 | 37 | let mut rng = rand::thread_rng(); 38 | 39 | // Provide some training data 40 | let training_data = [ 41 | TrainingDatum { input: (0, 0, 1), expected: 0 }, 42 | TrainingDatum { input: (0, 1, 1), expected: 1 }, 43 | TrainingDatum { input: (1, 0, 1), expected: 1 }, 44 | TrainingDatum { input: (1, 1, 1), expected: 1 }, 45 | ]; 46 | 47 | // Initialize weight vector with random data between 0 and 1 48 | let range = Range::new(0.0, 1.0); 49 | let mut w = ( 50 | range.ind_sample(&mut rng), 51 | range.ind_sample(&mut rng), 52 | range.ind_sample(&mut rng), 53 | ); 54 | 55 | // Learning rate 56 | let eta = 0.2; 57 | 58 | // Number of iterations 59 | let n = 100; 60 | 61 | // Training 62 | println!("Starting training phase with {} iterations...", n); 63 | for _ in 0..n { 64 | 65 | // Choose a random training sample 66 | let &TrainingDatum { input: x, expected } = rng.choose(&training_data).unwrap(); 67 | 68 | // Calculate the dot product 69 | let result = dot(x, w); 70 | 71 | // Calculate the error 72 | let error = expected - result.heaviside(); 73 | 74 | // Update the weights 75 | w.0 += eta * error as f64 * x.0 as f64; 76 | w.1 += eta * error as f64 * x.1 as f64; 77 | w.2 += eta * error as f64 * x.2 as f64; 78 | } 79 | 80 | // Show result 81 | for &TrainingDatum { input, .. } in &training_data { 82 | let result = dot(input, w); 83 | println!("{} OR {}: {:.*} -> {}", input.0, input.1, 8, result, result.heaviside()); 84 | } 85 | } 86 | 87 | 88 | #[cfg(test)] 89 | mod test { 90 | use super::Heaviside; 91 | 92 | #[test] 93 | fn heaviside_positive() { 94 | assert_eq!((0.5).heaviside(), 1i8); 95 | } 96 | 97 | #[test] 98 | fn heaviside_zero() { 99 | assert_eq!((0.0).heaviside(), 1i8); 100 | } 101 | 102 | #[test] 103 | fn heaviside_negative() { 104 | assert_eq!((-0.5).heaviside(), 0i8); 105 | } 106 | 107 | } 108 | --------------------------------------------------------------------------------