├── MANIFEST.in ├── lifestyles ├── __version__.py └── models │ ├── cbc.py │ ├── cbc_hb.py │ └── ranking_based_hierarchical_bayes.py ├── requirements.txt ├── data ├── lemonade │ ├── comparisons.tsv │ ├── profiles.tsv │ └── selections.tsv └── computer_buyers │ ├── design_matrix.csv │ ├── personal_characteristics.csv │ └── likelihood_to_buy.csv ├── LICENSE └── README.md /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst LICENSE 2 | -------------------------------------------------------------------------------- /lifestyles/__version__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.0.1' 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas 2 | numpy 3 | pymc3 4 | matplotlib 5 | -------------------------------------------------------------------------------- /data/lemonade/comparisons.tsv: -------------------------------------------------------------------------------- 1 | Comparisons Profile1 Profile2 Profile3 2 | 1 5 3 11 3 | 2 9 2 12 4 | 3 1 6 10 5 | 4 12 4 8 6 | 5 3 2 9 7 | 6 7 1 11 8 | 7 10 6 5 9 | 8 7 12 5 10 | 9 11 8 6 11 | 10 4 3 10 12 | 11 6 7 3 13 | 12 2 5 8 14 | 13 9 10 4 15 | 14 5 7 4 16 | 15 3 9 1 17 | 16 11 12 2 18 | 17 2 4 1 19 | 18 8 1 12 20 | 19 8 9 6 21 | 20 10 11 7 22 | -------------------------------------------------------------------------------- /data/lemonade/profiles.tsv: -------------------------------------------------------------------------------- 1 | Profile Temperature Sugar Lemon Intensity 2 | 1 Warm 2sugar no Medium 3 | 2 VeryWarm NoSugar yes Medium 4 | 3 Warm 1sugar no Strong 5 | 4 Warm NoSugar yes Low 6 | 5 Ice 2sugar yes Strong 7 | 6 Ice 1sugar no Low 8 | 7 Warm NoSugar no Strong 9 | 8 Warm 1sugar yes Medium 10 | 9 Warm 2sugar yes Low 11 | 10 VeryWarm 1sugar yes Strong 12 | 11 Ice NoSugar no Medium 13 | 12 VeryWarm 2sugar no Low 14 | -------------------------------------------------------------------------------- /data/computer_buyers/design_matrix.csv: -------------------------------------------------------------------------------- 1 | Hot line,RAM,Screen size,CPU speed,Hard disk,CD ROM,Cache,Color of unit,Availability,Warranty,Software,Guarantee,Price 2 | 1,-1,1,1,-1,-1,1,-1,-1,1,1,1,-1 3 | 1,1,1,-1,1,-1,1,1,-1,-1,-1,1,1 4 | 1,-1,-1,-1,1,1,1,-1,1,-1,1,1,1 5 | 1,1,-1,1,-1,1,1,1,1,1,-1,1,-1 6 | 1,1,-1,1,1,-1,-1,1,-1,1,1,-1,1 7 | -1,-1,-1,-1,1,-1,-1,1,1,1,-1,1,-1 8 | -1,-1,1,1,-1,1,-1,1,-1,-1,-1,1,1 9 | -1,1,-1,1,1,1,1,-1,-1,-1,-1,-1,-1 10 | -1,-1,1,1,1,-1,1,1,1,-1,1,-1,-1 11 | 1,-1,1,1,1,1,-1,-1,1,1,-1,-1,1 12 | -1,1,-1,1,-1,-1,-1,-1,1,-1,1,1,1 13 | 1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 14 | 1,1,1,-1,-1,1,-1,1,1,-1,1,-1,-1 15 | -1,1,1,-1,1,1,-1,-1,-1,1,1,1,-1 16 | -1,1,1,-1,-1,-1,1,-1,1,1,-1,-1,1 17 | -1,-1,-1,-1,-1,1,1,1,-1,1,1,-1,1 18 | 1,1,1,-1,-1,1,1,1,-1,-1,1,-1,1 19 | -1,1,-1,-1,1,1,-1,1,1,-1,1,1,1 20 | 1,1,-1,1,1,-1,-1,1,1,1,-1,-1,-1 21 | -1,-1,-1,1,-1,1,1,-1,1,-1,1,1,-1 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017 Cameron Davidson-Pilon 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /data/lemonade/selections.tsv: -------------------------------------------------------------------------------- 1 | Comparisons Individual1 Individual2 Individual3 Individual4 Individual5 Individual6 Individual7 Individual8 Individual9 Individual10 2 | 1 1 3 2 2 1 1 3 3 2 1 3 | 2 1 2 3 3 1 3 2 3 2 1 4 | 3 2 3 2 1 1 3 2 3 1 2 5 | 4 2 2 2 1 1 1 2 1 3 2 6 | 5 1 2 3 1 3 2 2 3 2 3 7 | 6 3 1 3 2 2 1 3 3 2 2 8 | 7 2 2 2 2 3 1 2 1 2 2 9 | 8 3 1 2 2 3 2 1 2 3 3 10 | 9 1 1 3 1 1 1 3 1 1 3 11 | 10 2 1 1 2 2 3 3 3 2 2 12 | 11 1 2 1 2 1 3 1 2 3 1 13 | 12 2 1 1 2 2 1 1 1 1 2 14 | 13 2 3 3 3 1 2 2 2 2 2 15 | 14 1 3 3 2 1 2 3 1 3 1 16 | 15 3 1 2 1 2 3 1 3 1 1 17 | 16 1 1 2 2 2 2 3 2 3 1 18 | 17 1 1 2 3 3 1 1 2 3 3 19 | 18 3 2 3 3 2 1 2 3 2 2 20 | 19 3 3 3 3 1 2 3 1 3 3 21 | 20 2 2 1 2 2 1 2 2 2 2 22 | -------------------------------------------------------------------------------- /lifestyles/models/cbc.py: -------------------------------------------------------------------------------- 1 | from theano import tensor as tt 2 | import pandas as pd 3 | import pymc3 as pm 4 | 5 | """ 6 | Looking at what xlstat does, they make sure that all weights in a level sum to 0 7 | """ 8 | profiles = pd.get_dummies(pd.read_csv("data/lemonade/profiles.tsv", sep="\s+").set_index('Profile'), drop_first=True) 9 | comparisons = pd.read_csv("data/lemonade/comparisons.tsv", sep="\s+").set_index('Comparisons') 10 | selections = pd.read_csv("data/lemonade/selections.tsv", sep="\s+").set_index("Comparisons") 11 | 12 | 13 | first_choice = profiles.loc[comparisons['Profile1']] 14 | second_choice = profiles.loc[comparisons['Profile2']] 15 | third_choice = profiles.loc[comparisons['Profile3']] 16 | 17 | 18 | with pm.Model() as hierarchical_model: 19 | 20 | weights = pm.Normal("weights", 0, sd=10., shape=(profiles.shape[1], 1)) 21 | 22 | probs = tt.nnet.softmax(tt.stack([ 23 | tt.dot(first_choice, weights), 24 | tt.dot(second_choice, weights), 25 | tt.dot(third_choice, weights) 26 | ], axis=0).T) 27 | 28 | cs = [pm.Categorical("Obs%d" % i, probs, observed=(selections['Individual%i' % i] - 1).values) for i in xrange(1, 11)] 29 | 30 | with hierarchical_model: 31 | 32 | hierarchical_trace = pm.sample(40000, pm.Metropolis(), tune=2000) 33 | 34 | 35 | pm.plots.traceplot(hierarchical_trace) 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![lifestyles_logo](https://imgur.com/SjZBq1V.png) 2 | 3 | _lifestyles_ is a Python package for performing conjoint analysis. What is conjoint analysis? I'm glad you asked! Conjoint analysis is an alternative survey analysis technique. Instead of asking survey partcipants about how they feel about specific characteristics, instead the particpants are asked to evaluate holistically. For example, suppose you are interested in creating a new lemonade beverage, and you want to better understand what your potential customers' preferences are. We _could_ design a survey like: 4 | 5 | 6 | > Q1. How much sugar do you prefer in your lemonade? 7 | > - [ ] No sugar 8 | > - [ ] 1 sugar 9 | > - [ ] 2 sugar 10 | > 11 | > Q2. How much lemon do you prefer in your lemonade? 12 | > - [ ] None 13 | > - [ ] Some 14 | > 15 | > Q3. How much .... 16 | 17 | There are some drawbacks to this survey design. We have isolated the attributes of the lemonade, so participants must also compare in isolation. This isn't how consumers make choices. Instead they compare products holistically. Compare the above survey to this instead: 18 | 19 | > Q1. Which lemonade would you prefer to purchase? 20 | > - [ ] Some sugar, ice cold and strong lemon flavour 21 | > - [ ] No sugar, ice cold and mild lemon and mild mint flavour 22 | 23 | Or, something like: 24 | 25 | > Q2. On a scale of 1 to 10, how likely are you to purchase the following lemonade? 26 | > 27 | > Warm, honey-sweetened, with strong lemon flavour. 28 | > 29 | > 1 ♢ ♢ ♢ ♢ ♢ ♢ ♢ ♢ ♢ ♢ 10 30 | 31 | 32 | The latter surveys asks us to look at beverages, and not attributes. This is the much more common consumer task. Indeed, walking into a convience store for a lemonade implies the consumer will have to make these decisions. 33 | 34 | How can we analyze surveys like this? That's where conjoint analysis comes in. The statistical methods will decompose the consumers' choices into what attributes strongly correlate with purchase or selection. 35 | 36 | ### Work in Progress 37 | 38 | This library is a work-in-progress, and alpha-stage development. 39 | 40 | ### References 41 | - [Hierarchical Bayes Conjoint Analysis: Recovery of Partworth Heterogeneity from Reduced Experimental Designs. Peter J. Lenk; Wayne S](http://webuser.bus.umich.edu/plenk/HB%20Conjoint%20Lenk%20DeSarbo%20Green%20Young%20MS%201996.pdf). 42 | - [Software for Hierarchical Bayes 43 | Estimation for CBC Data](https://www.sawtoothsoftware.com/download/ssiweb/CBCHB_Manual.pdf). 44 | - [Case study into university pricing](https://conjoint.online/2017/04/20/pricing-case-study/) 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /data/computer_buyers/personal_characteristics.csv: -------------------------------------------------------------------------------- 1 | FEMALE,YEARS,OWN,TECH,APPLY,EXPERT 2 | 0,1,1,1,4,9 3 | 0,0,1,1,1,6 4 | 0,1,0,1,5,10 5 | 1,1,1,1,8,10 6 | 0,0,1,1,1,9 7 | 1,0,1,1,5,7 8 | 0,0,1,1,2,6 9 | 0,0,1,1,3,10 10 | 1,0,1,1,3,6 11 | 0,0,0,1,2,4 12 | 0,1,1,1,3,10 13 | 0,0,1,1,6,8 14 | 0,0,1,1,3,10 15 | 0,1,0,1,2,9 16 | 0,0,0,1,7,8 17 | 0,0,1,1,2,8 18 | 0,0,1,1,5,8 19 | 0,1,0,0,0,8 20 | 1,0,1,0,2,6 21 | 0,0,0,1,2,8 22 | 0,0,1,1,3,7 23 | 1,0,1,0,1,3 24 | 0,1,1,1,6,8 25 | 0,0,1,1,6,10 26 | 0,0,1,1,3,8 27 | 0,0,1,1,2,10 28 | 0,1,1,0,0,6 29 | 0,0,1,1,3,8 30 | 0,1,1,0,4,10 31 | 0,0,0,1,1,7 32 | 0,0,0,1,1,7 33 | 0,1,1,1,3,10 34 | 0,0,1,1,2,8 35 | 0,0,1,1,4,10 36 | 0,0,1,1,5,9 37 | 1,0,1,1,1,3 38 | 0,0,0,0,3,3 39 | 0,0,1,1,2,7 40 | 1,0,1,1,2,4 41 | 1,0,1,1,4,7 42 | 0,0,1,1,3,7 43 | 0,0,1,1,3,8 44 | 0,1,0,1,2,9 45 | 0,1,1,1,6,10 46 | 1,0,1,1,2,7 47 | 0,0,1,1,6,9 48 | 0,0,1,0,2,4 49 | 0,0,1,1,4,10 50 | 1,1,1,1,1,4 51 | 1,0,0,1,3,8 52 | 0,1,1,1,5,10 53 | 0,1,1,1,3,8 54 | 0,0,0,0,0,6 55 | 1,1,1,0,4,8 56 | 0,1,1,1,3,7 57 | 1,1,1,1,3,8 58 | 0,0,1,1,2,8 59 | 0,0,0,1,4,9 60 | 0,0,0,1,2,8 61 | 1,0,1,1,4,8 62 | 1,0,1,1,5,6 63 | 0,1,1,1,6,10 64 | 1,1,1,1,1,6 65 | 0,0,0,1,2,10 66 | 0,0,1,1,5,9 67 | 0,1,1,0,2,6 68 | 0,0,1,1,3,8 69 | 0,0,1,1,3,5 70 | 0,1,0,1,5,10 71 | 0,1,1,1,7,10 72 | 1,0,1,1,5,7 73 | 1,0,1,1,2,4 74 | 1,0,1,1,4,8 75 | 0,0,1,1,4,9 76 | 1,0,1,1,3,6 77 | 0,0,1,1,3,6 78 | 0,0,1,1,3,4 79 | 0,0,1,1,6,10 80 | 0,0,0,0,0,6 81 | 0,1,1,1,4,10 82 | 0,0,0,0,1,6 83 | 0,1,1,1,6,10 84 | 0,1,1,1,4,10 85 | 1,0,1,1,3,9 86 | 0,0,1,1,3,8 87 | 1,1,1,0,2,8 88 | 0,0,1,1,5,5 89 | 0,0,1,1,2,8 90 | 1,0,1,1,2,6 91 | 0,0,0,1,3,10 92 | 0,1,1,0,2,10 93 | 1,0,1,1,2,9 94 | 1,1,1,1,2,8 95 | 0,0,0,1,3,8 96 | 1,0,1,1,3,6 97 | 0,0,1,1,4,10 98 | 0,0,1,1,1,10 99 | 0,1,1,1,2,7 100 | 0,0,0,0,2,7 101 | 0,1,1,1,5,8 102 | 0,0,1,1,4,10 103 | 0,0,1,0,2,4 104 | 0,0,1,1,3,4 105 | 1,0,1,1,2,4 106 | 0,0,0,1,4,4 107 | 0,0,1,1,2,5 108 | 0,0,1,1,6,7 109 | 0,0,0,1,6,8 110 | 0,0,0,0,2,8 111 | 0,0,0,0,2,6 112 | 0,0,1,1,4,9 113 | 0,0,1,1,3,6 114 | 0,1,1,1,3,8 115 | 1,1,1,1,2,6 116 | 0,0,1,1,3,6 117 | 0,1,1,1,3,7 118 | 1,0,1,1,3,6 119 | 0,0,0,1,1,5 120 | 1,0,1,1,2,8 121 | 0,0,0,1,3,10 122 | 0,1,1,1,3,10 123 | 0,0,0,1,1,6 124 | 0,0,1,1,2,8 125 | 0,0,1,1,3,8 126 | 1,1,1,1,4,4 127 | 0,0,1,1,3,8 128 | 0,0,1,1,4,8 129 | 0,0,1,0,3,10 130 | 0,0,1,1,3,10 131 | 0,1,1,1,3,9 132 | 1,0,1,1,4,7 133 | 1,0,0,1,1,7 134 | 1,0,1,1,2,8 135 | 1,0,1,1,4,8 136 | 0,0,1,1,4,8 137 | 0,0,1,0,0,4 138 | 0,0,1,0,1,8 139 | 1,0,1,1,2,7 140 | 1,0,1,1,4,7 141 | 0,0,1,1,2,8 142 | 1,0,1,0,0,4 143 | 1,1,1,1,4,8 144 | 0,0,1,1,2,8 145 | 0,0,1,1,6,9 146 | 0,0,0,1,6,7 147 | 0,1,1,1,6,8 148 | 1,0,0,1,2,5 149 | 0,0,1,1,4,7 150 | 1,0,1,0,1,4 151 | 0,0,1,1,3,8 152 | 0,1,0,0,0,10 153 | 0,0,1,1,3,8 154 | 1,0,1,1,1,5 155 | 0,0,1,1,2,9 156 | 0,0,1,1,4,8 157 | 0,0,1,0,3,7 158 | 0,0,0,1,1,7 159 | 1,1,1,1,7,10 160 | 1,1,1,1,5,10 161 | 0,0,1,1,2,4 162 | 1,0,1,0,3,4 163 | 0,1,0,1,6,9 164 | 1,0,1,0,2,6 165 | 1,0,1,1,2,8 166 | 0,1,0,1,4,10 167 | 0,1,1,1,4,8 168 | 0,1,1,1,3,10 169 | 0,1,0,1,5,9 170 | 0,0,1,1,5,9 171 | 0,0,0,1,6,7 172 | 0,0,1,1,5,8 173 | 0,1,1,1,3,4 174 | 0,1,1,1,3,8 175 | 0,0,1,1,3,7 176 | 0,0,1,1,0,6 177 | 1,0,1,1,2,7 178 | 0,0,0,1,4,10 179 | 1,0,1,0,3,8 180 | 0,0,1,1,2,7 181 | 0,0,0,1,5,6 182 | 0,0,1,1,4,10 183 | 0,1,1,1,5,10 184 | 0,0,1,1,8,8 185 | 1,0,1,1,1,7 186 | 0,1,0,0,4,6 187 | 0,0,1,0,1,7 188 | 0,1,1,0,2,10 189 | 0,0,1,1,3,10 190 | 0,0,0,1,4,8 191 | 1,0,1,1,2,4 192 | -------------------------------------------------------------------------------- /lifestyles/models/cbc_hb.py: -------------------------------------------------------------------------------- 1 | from theano import tensor as tt 2 | import pandas as pd 3 | import pymc3 as pm 4 | import numpy as np 5 | 6 | 7 | def _create_observation_variable(individual_selections, choices, partsworth): 8 | """ 9 | This function handles creating the PyMC3 observation variables. It also gracefully handles missing observations in individual selections. 10 | 11 | `individual_selections` is a Series of the individuals selections made, starting from 0. It can contain NaNs which represent answer was not provided. 12 | 13 | `choices` is a DataFrame with a hierarchical index: level=0 enumerates the choices, and level=1 displays the profile at a specific choice. 14 | It's size is (n_questions, n_choices_per_question). 15 | 16 | `partsworth` is a slice of PyMC3 matrix. It represents the partsworth variables of a individual. Size is (n_profiles,) 17 | 18 | This computes the values exp(partsworth * profile_j) / sum[ exp(partsworth * profile_k ] for all j. 19 | """ 20 | nan_mask = pd.notnull(individual_selections) 21 | return pm.Categorical("Obs_%s" % individual_selections.name, 22 | tt.nnet.softmax(tt.stack([ 23 | tt.dot(choice.values, partsworth) for _, choice in choices[nan_mask.values].groupby(axis=1, level=0) 24 | ], axis=0).T), 25 | observed=individual_selections[nan_mask.values].values) 26 | 27 | 28 | def model(profiles, comparisons, selections, sample=2500, alpha_prior_std=10): 29 | all_attributes = pd.get_dummies(profiles).columns 30 | profiles_dummies = pd.get_dummies(profiles, drop_first=True) 31 | choices = pd.concat({profile: profiles_dummies.loc[comparisons[profile]].reset_index(drop=True) for profile in comparisons.columns}, axis=1) 32 | 33 | respondants = selections.columns 34 | n_attributes_in_model = profiles_dummies.shape[1] 35 | n_participants = selections.shape[1] 36 | 37 | with pm.Model(): 38 | 39 | # https://www.sawtoothsoftware.com/download/ssiweb/CBCHB_Manual.pdf 40 | # need to include the covariance matrix as a parent of `partsworth` 41 | alpha = pm.Normal('alpha', 0, sd=alpha_prior_std, shape=n_attributes_in_model, testval=np.random.randn(n_attributes_in_model)) 42 | partsworth = pm.MvNormal("partsworth", alpha, tau=np.eye(n_attributes_in_model), shape=(n_participants, n_attributes_in_model)) 43 | 44 | cs = [_create_observation_variable(selection, choices, partsworth[i, :]) for i, (_, selection) in enumerate(selections.iteritems())] 45 | 46 | trace = pm.sample(sample) 47 | return transform_trace_to_individual_summary_statistics(trace, respondants, profiles_dummies.columns, all_attributes) 48 | 49 | 50 | def transform_trace_to_individual_summary_statistics(trace, respondants, attributes_in_model, all_attributes): 51 | 52 | def create_linear_combination(df): 53 | name = df.name 54 | cols_to_impute_from = [c for c in attributes_in_model if c.startswith(name)] 55 | col_to_impute = [c for c in all_attributes if (c.startswith(name) and (c not in cols_to_impute_from))][0] 56 | df.loc[col_to_impute] = -df.loc[cols_to_impute_from].groupby(level=1).sum().values 57 | return df 58 | 59 | partsworth_trace = trace.get_values("partsworth") 60 | N, _, _ = partsworth_trace.shape 61 | sample_axis_index = range(N) 62 | df = pd.concat( 63 | [ 64 | pd.DataFrame(partsworth_trace[:, :, i], 65 | columns=respondants, 66 | index=pd.MultiIndex.from_product([[attr], sample_axis_index]) 67 | ) 68 | for i, attr in enumerate(attributes_in_model) 69 | ] 70 | ) 71 | 72 | df = df.reindex(pd.MultiIndex.from_product([all_attributes, sample_axis_index])) 73 | df = df.groupby(lambda i: i.split("_")[0], level=0, group_keys=False).apply(create_linear_combination) 74 | return df.groupby(level=0).describe().swaplevel(axis=1) 75 | 76 | 77 | def calculate_individual_importance(individual_summary_stats): 78 | pass 79 | 80 | 81 | if __name__ == "__main__": 82 | # data from https://help.xlstat.com/customer/en/portal/articles/2062399-choice-based-conjoint-analysis-with-hierarchical-bayes-cbc-hb- 83 | profiles = pd.read_csv("data/lemonade/profiles.tsv", sep="\s+").set_index('Profile') 84 | comparisons = pd.read_csv("data/lemonade/comparisons.tsv", sep="\s+").set_index('Comparisons') 85 | selections = pd.read_csv("data/lemonade/selections.tsv", sep="\s+").set_index("Comparisons") - 1 86 | ind_summary_stats = model(profiles, comparisons, selections) 87 | print ind_summary_stats 88 | -------------------------------------------------------------------------------- /lifestyles/models/ranking_based_hierarchical_bayes.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | This model looks like the following: 4 | 5 | Y_i = X_i \beta_i + \epsilon_i for i=1..n 6 | 7 | \beta_i = \Sigma z_i + \delta_i 8 | 9 | where z_i are individual characteristics of the participants and X_i and the characteristic of the 10 | objects (profiles) shown to user i. Y_i encode the responses. 11 | 12 | The matrix \Sigma represents the unobserved preferences, by participant characterstic 13 | and object characteristic. Thus, each participant has a specific preference vector, \beta_i 14 | 15 | Example output of \Sigma: 16 | 17 | Hot line RAM Screen size CPU speed Hard disk \ 18 | APPLY mean -0.007746 0.044346 0.010688 0.019450 0.017453 19 | std 0.052518 0.051294 0.053317 0.054735 0.053340 20 | EXPERT mean 0.036692 -0.044133 0.010544 0.038605 0.048322 21 | std 0.048852 0.047047 0.048433 0.047586 0.049023 22 | FEMALE mean 0.301551 -0.007823 -0.116979 -0.009133 -0.059673 23 | std 0.188446 0.184779 0.189560 0.192246 0.189363 24 | OWN mean 0.025637 -0.014327 0.046543 -0.288823 -0.022155 25 | std 0.193098 0.194544 0.202576 0.191677 0.205398 26 | TECH mean -0.092104 0.216937 -0.037374 0.111902 0.039594 27 | std 0.219524 0.229970 0.221848 0.225858 0.223730 28 | YEARS mean -0.031091 0.153652 0.052316 0.206464 -0.004942 29 | std 0.180996 0.181811 0.183941 0.178567 0.178801 30 | constant mean -0.171966 0.286664 0.071539 0.162412 -0.267776 31 | std 0.373027 0.364603 0.383818 0.370149 0.386035 32 | 33 | CD ROM Cache Color of unit Availability Warranty \ 34 | APPLY mean 0.030005 -0.024019 -0.007257 -0.023260 0.003275 35 | std 0.052011 0.053753 0.054286 0.052572 0.052126 36 | EXPERT mean -0.014856 0.037270 0.004334 -0.018608 -0.001328 37 | std 0.047208 0.049132 0.049493 0.046833 0.047744 38 | FEMALE mean -0.176534 -0.050838 0.001616 0.017704 0.114448 39 | std 0.189728 0.189738 0.190505 0.181998 0.189794 40 | OWN mean 0.126685 -0.046220 -0.029179 0.039143 -0.039319 41 | std 0.185128 0.189676 0.196250 0.189792 0.196129 42 | TECH mean -0.015982 0.070722 0.028498 0.121370 0.012058 43 | std 0.229553 0.218412 0.227133 0.227576 0.233561 44 | YEARS mean -0.070300 0.011943 -0.117638 -0.125296 -0.028073 45 | std 0.183298 0.186200 0.182523 0.177328 0.188354 46 | constant mean 0.475872 -0.166859 0.014595 0.215484 0.129247 47 | std 0.369968 0.386870 0.385288 0.366806 0.369530 48 | 49 | Software Guarantee Price constant 50 | APPLY mean 0.015383 0.000679 -0.010254 0.177272 51 | std 0.052421 0.053174 0.052620 0.051181 52 | EXPERT mean -0.014422 0.003025 0.052377 0.123728 53 | std 0.048148 0.048165 0.048086 0.048768 54 | FEMALE mean -0.048464 -0.001087 0.345567 0.043020 55 | std 0.194091 0.188258 0.188386 0.186807 56 | OWN mean 0.130883 -0.083200 -0.005931 -0.037465 57 | std 0.184213 0.195836 0.189383 0.194186 58 | TECH mean -0.064105 -0.134026 -0.097655 0.006192 59 | std 0.222548 0.227758 0.230823 0.233296 60 | YEARS mean 0.109149 -0.057188 -0.125526 -0.111568 61 | std 0.188263 0.182854 0.186182 0.185695 62 | constant mean 0.180477 0.286788 -1.506407 3.229457 63 | std 0.373614 0.362738 0.367813 0.383238 64 | 65 | 66 | 67 | http://webuser.bus.umich.edu/plenk/HB%20Conjoint%20Lenk%20DeSarbo%20Green%20Young%20MS%201996.pdf 68 | 69 | """ 70 | from theano import tensor as tt 71 | import pandas as pd 72 | import pymc3 as pm 73 | 74 | personal_characteristics = pd.read_csv("data/computer_buyers/personal_characteristics.csv") # (190, 6) 75 | personal_characteristics['constant'] = 1 76 | 77 | likelihood_to_buy = pd.read_csv("data/computer_buyers/likelihood_to_buy.csv") # (190, 20) 78 | 79 | design_matrix = pd.read_csv("data/computer_buyers/design_matrix.csv") 80 | design_matrix['constant'] = 1. 81 | 82 | N_PART_CHRC = personal_characteristics.shape[1] 83 | N_PARTICIPANTS = likelihood_to_buy.shape[0] 84 | N_COMP_CHRC = design_matrix.shape[1] 85 | N_PROFILES = likelihood_to_buy.shape[1] 86 | 87 | 88 | with pm.Model() as hierarchical_model: 89 | 90 | Sigma = pm.Normal("Sigma", 0, sd=10, shape=(N_PART_CHRC, N_COMP_CHRC)) # (7, 14) 91 | Beta = pm.Normal("Beta", tt.dot(personal_characteristics, Sigma), sd=1, shape=(N_PARTICIPANTS, N_COMP_CHRC)) # (190, 14) 92 | 93 | mean = tt.dot(Beta, design_matrix.T) # (190, 20) 94 | sd = pm.Uniform("Individual_variance", 1e-5, 1e2) 95 | y = pm.Normal("observations", mean, sd=sd, shape=(N_PARTICIPANTS, N_PROFILES), observed=likelihood_to_buy) # (190, 20) 96 | 97 | 98 | with hierarchical_model: 99 | hierarchical_trace = pm.sample(draws=2500, tune=1000, n_init=25000) 100 | 101 | 102 | df_mean = pd.DataFrame(hierarchical_trace.get_values("Sigma").mean(0), columns=design_matrix.columns, index=personal_characteristics.columns) 103 | df_std = pd.DataFrame(hierarchical_trace.get_values("Sigma").std(0), columns=design_matrix.columns, index=personal_characteristics.columns) 104 | 105 | df = pd.concat([df_mean, df_std], keys=['mean', 'std']).swaplevel().sort_index() 106 | -------------------------------------------------------------------------------- /data/computer_buyers/likelihood_to_buy.csv: -------------------------------------------------------------------------------- 1 | profile_1,profile_2,profile_3,profile_4,profile_5,profile_6,profile_7,profile_8,profile_9,profile_10,profile_11,profile_12,profile_13,profile_14,profile_15,profile_16,profile_17,profile_18,profile_19,profile_20 2 | 6,3,5,7,5,6,7,7,8,8,5,4,8,8,5,4,3,6,7,5 3 | 5,3,3,7,3,6,3,7,7,3,3,4,6,4,3,3,3,3,7,5 4 | 10,7,2,10,3,2,4,10,6,5,4,3,8,7,3,4,5,2,9,7 5 | 10,7,7,9,8,5,8,9,9,10,10,7,10,10,8,8,10,9,9,10 6 | 8,4,3,10,3,3,6,10,9,6,4,3,7,7,3,3,4,3,8,9 7 | 3,7,4,9,3,6,4,7,3,6,4,4,9,7,4,6,4,4,2,4 8 | 8,5,3,3,7,3,6,9,8,3,3,6,7,8,5,4,5,5,8,7 9 | 9,2,2,8,5,7,3,4,8,3,8,4,7,10,2,4,2,4,7,10 10 | 9,1,1,8,0,6,0,1,1,1,1,1,1,8,0,0,0,0,7,9 11 | 5,5,6,7,6,5,8,8,7,7,8,3,6,5,6,2,2,6,6,9 12 | 8,2,6,10,7,5,7,9,7,8,7,1,6,7,2,3,3,3,7,9 13 | 6,3,3,8,6,2,2,9,4,3,6,2,7,8,6,5,6,5,7,6 14 | 9,5,7,6,2,8,2,1,2,6,3,7,3,4,6,5,6,6,3,5 15 | 6,7,6,8,9,6,5,7,8,5,6,7,9,9,2,2,4,3,9,8 16 | 9,7,6,8,6,6,7,4,4,5,5,7,5,9,5,5,5,7,5,8 17 | 7,3,3,9,4,6,2,8,6,3,4,5,7,7,3,2,3,4,8,7 18 | 7,1,2,8,1,6,3,8,7,2,0,5,9,9,0,1,2,1,6,9 19 | 6,4,4,4,4,3,4,6,6,5,5,4,4,4,1,1,2,1,6,7 20 | 7,3,6,5,4,3,2,3,6,3,3,3,6,3,2,3,5,4,5,5 21 | 7,8,8,9,6,8,9,5,5,5,7,5,5,5,5,5,5,5,5,5 22 | 4,1,1,4,1,2,2,3,5,2,3,4,3,2,1,2,2,2,6,6 23 | 2,3,3,4,5,6,3,0,1,2,5,2,6,7,1,2,1,8,3,10 24 | 9,1,2,10,7,7,1,8,8,2,2,8,8,8,0,2,1,1,10,10 25 | 2,6,9,7,6,5,4,10,7,8,3,0,4,10,0,5,4,6,3,3 26 | 3,4,3,6,3,3,4,7,6,4,4,3,7,8,5,4,5,5,7,6 27 | 6,4,4,9,3,3,3,6,5,4,3,2,8,6,2,1,3,5,5,9 28 | 6,5,7,7,3,3,7,6,2,4,4,3,7,7,4,5,8,4,3,8 29 | 7,3,0,9,5,6,3,7,7,5,4,5,5,7,1,0,3,3,9,8 30 | 10,7,5,10,7,5,2,7,8,8,7,5,8,7,3,5,4,6,9,7 31 | 7,3,3,8,3,6,2,7,9,4,3,5,7,7,2,2,2,2,9,7 32 | 2,0,1,9,1,8,0,1,2,2,1,1,1,6,3,0,1,0,7,1 33 | 4,3,2,6,2,1,2,6,2,4,4,2,6,8,2,1,4,5,5,6 34 | 3,2,2,7,7,3,4,0,5,5,6,0,10,9,0,0,0,4,3,9 35 | 8,1,1,8,1,3,4,8,6,6,2,4,6,5,1,2,2,3,9,7 36 | 9,3,3,9,5,6,6,10,8,6,6,6,7,7,3,3,4,4,8,7 37 | 1,0,3,3,0,0,0,0,1,2,0,0,6,6,1,0,0,3,4,3 38 | 0,0,0,10,0,0,3,5,1,0,0,0,10,10,0,0,0,0,2,3 39 | 10,0,0,10,0,4,0,3,7,0,0,3,4,7,0,0,1,0,7,8 40 | 6,3,1,7,6,4,5,3,8,4,5,1,3,3,1,3,4,4,4,7 41 | 7,3,2,7,2,5,0,7,3,2,0,6,7,9,2,0,2,1,10,8 42 | 8,8,8,9,8,7,8,9,8,8,7,6,7,9,6,8,7,8,5,7 43 | 7,4,9,6,5,5,6,6,7,4,4,6,9,10,7,8,7,8,8,9 44 | 8,3,2,8,8,2,7,10,10,8,8,2,2,2,2,2,2,2,9,8 45 | 7,2,5,8,6,6,7,8,8,9,6,6,7,8,5,6,4,4,8,7 46 | 9,2,2,9,2,5,3,7,7,3,3,4,5,9,2,2,2,3,3,7 47 | 7,6,6,8,8,8,7,9,7,7,6,6,8,9,5,6,8,9,9,8 48 | 6,4,5,7,5,7,5,7,7,5,4,8,7,7,4,4,2,3,9,8 49 | 9,5,7,9,6,6,4,6,8,5,2,4,9,7,2,7,7,6,7,9 50 | 4,1,2,9,6,8,2,8,8,6,1,6,9,9,3,3,3,3,8,7 51 | 9,5,5,9,6,6,5,7,8,6,5,6,8,9,5,5,5,5,8,8 52 | 0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,5,0 53 | 4,1,1,9,0,9,0,8,9,0,0,3,9,8,0,0,0,0,7,5 54 | 8,0,0,6,0,0,0,0,0,0,0,1,4,0,0,0,0,0,5,0 55 | 9,4,5,7,4,5,7,6,4,5,3,6,8,7,5,4,8,4,5,6 56 | 5,6,6,7,6,2,3,6,5,4,7,2,7,7,5,3,4,7,2,7 57 | 5,5,4,5,10,5,4,6,5,4,10,10,8,8,8,7,10,6,10,10 58 | 3,6,2,2,3,2,3,2,9,10,5,3,4,9,5,2,4,3,3,3 59 | 3,3,1,9,4,8,0,10,10,1,1,0,9,10,3,5,3,6,8,9 60 | 7,3,4,9,1,4,5,6,3,3,3,6,8,10,0,3,2,4,8,8 61 | 5,1,4,9,2,2,3,3,2,9,2,2,7,9,2,3,6,2,2,2 62 | 7,3,0,10,2,4,2,10,8,3,7,7,10,10,6,2,0,2,10,8 63 | 7,1,2,6,4,4,3,6,7,4,3,2,5,8,2,2,4,4,3,8 64 | 8,3,4,5,4,2,1,2,4,3,1,4,7,4,1,1,4,1,6,2 65 | 7,5,7,6,7,5,7,8,7,9,5,5,6,9,5,6,6,8,7,7 66 | 10,3,0,7,4,7,2,7,10,4,2,0,8,9,1,1,0,0,3,4 67 | 7,0,0,0,0,0,0,0,0,0,0,7,0,9,0,0,0,0,0,8 68 | 7,3,2,9,7,3,3,9,8,6,6,2,3,3,2,2,3,3,9,8 69 | 3,2,3,6,2,3,3,3,4,4,2,2,8,5,2,3,3,3,4,9 70 | 8,2,3,9,7,2,3,5,7,2,2,1,1,4,1,1,1,1,9,10 71 | 7,2,5,6,3,7,2,7,4,4,2,3,6,8,2,2,3,3,3,3 72 | 8,3,4,9,5,0,3,2,2,2,3,1,10,4,3,3,9,5,6,8 73 | 2,6,8,10,3,2,3,4,3,9,8,4,3,3,3,1,1,3,10,5 74 | 6,4,6,6,3,6,2,6,8,6,6,4,8,8,4,4,4,2,7,8 75 | 8,3,4,9,2,4,6,9,8,5,2,6,8,9,4,3,7,4,5,9 76 | 4,4,4,10,4,10,4,10,9,4,3,9,9,9,3,4,4,4,8,9 77 | 10,5,5,9,6,4,0,8,8,1,2,8,10,10,0,2,4,5,8,9 78 | 2,2,2,2,2,2,2,2,2,2,2,2,8,7,2,2,2,2,5,7 79 | 6,8,8,9,9,5,6,8,9,8,5,6,7,8,6,6,4,6,9,5 80 | 5,2,5,6,2,3,4,4,4,4,2,0,4,8,4,3,5,6,7,6 81 | 6,4,6,6,3,6,3,9,7,5,2,4,6,6,3,3,3,3,6,4 82 | 5,2,2,0,10,10,10,3,2,2,4,1,5,5,5,5,4,4,7,10 83 | 5,5,4,7,6,6,3,9,5,2,4,4,7,8,4,3,4,5,10,5 84 | 8,3,1,8,6,3,6,10,9,8,7,6,7,6,5,4,6,5,8,8 85 | 1,8,8,1,8,9,1,10,10,8,1,1,1,10,1,1,1,7,9,1 86 | 9,3,2,6,2,2,3,7,3,6,3,0,0,1,0,0,0,2,0,5 87 | 10,6,4,8,5,6,4,6,6,6,6,6,6,5,4,4,5,5,8,6 88 | 2,2,3,6,4,7,3,8,9,2,1,9,10,9,0,1,2,3,9,8 89 | 3,1,1,9,3,6,5,9,6,7,3,6,8,10,3,4,4,4,7,10 90 | 0,0,5,7,5,0,8,9,0,0,0,5,6,6,0,5,5,0,0,6 91 | 10,0,2,9,3,6,2,10,8,1,2,4,8,6,3,3,3,3,6,9 92 | 3,3,4,8,1,1,5,9,5,4,1,1,2,6,1,1,1,2,3,8 93 | 6,5,7,6,6,6,0,0,0,0,4,0,0,0,0,0,0,0,10,0 94 | 7,4,4,7,5,4,6,7,8,4,3,4,5,5,4,4,4,4,8,7 95 | 9,2,1,8,2,8,2,9,7,1,2,7,9,9,0,1,2,1,9,9 96 | 3,2,6,8,2,3,3,2,2,1,2,3,3,2,1,2,5,3,4,2 97 | 1,0,0,0,0,0,0,5,0,0,0,0,5,5,0,0,0,0,0,0 98 | 5,1,2,7,0,5,0,8,8,2,0,9,10,10,2,2,1,0,10,9 99 | 9,5,6,8,6,8,6,8,7,9,9,7,8,7,3,3,6,7,6,7 100 | 6,3,3,2,3,5,5,3,8,1,1,2,3,6,2,1,1,1,3,2 101 | 7,3,3,7,4,4,4,6,6,7,5,2,4,4,4,3,3,3,5,5 102 | 8,6,8,8,5,5,5,8,4,8,3,4,8,8,3,7,7,4,5,7 103 | 3,4,5,8,4,8,5,8,8,4,5,8,9,8,4,4,4,4,7,9 104 | 8,3,5,9,9,9,3,10,10,9,6,5,8,8,8,7,7,7,8,9 105 | 5,5,3,10,6,8,4,7,7,6,7,7,8,10,7,6,6,6,9,8 106 | 6,3,1,8,5,7,1,7,3,3,3,6,9,7,4,3,3,3,9,7 107 | 10,0,0,10,0,10,0,10,10,0,0,10,10,10,0,0,0,0,10,10 108 | 7,3,3,8,7,4,3,3,7,8,6,1,1,3,1,0,0,3,8,7 109 | 8,5,7,9,6,6,7,8,5,9,5,3,8,10,1,2,6,5,6,7 110 | 3,3,4,7,3,2,2,6,5,7,3,2,2,1,1,1,1,1,5,7 111 | 10,0,2,5,0,10,4,5,5,2,1,3,10,8,2,1,1,0,5,5 112 | 6,4,5,8,4,6,5,9,7,5,4,5,6,5,2,6,5,5,7,9 113 | 0,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 114 | 6,2,5,6,5,3,2,3,7,4,3,3,7,8,2,5,4,3,5,8 115 | 7,2,1,3,1,4,1,6,4,2,3,6,8,10,2,3,3,1,6,9 116 | 0,1,1,2,2,2,1,5,6,3,4,3,8,9,3,4,5,5,6,7 117 | 9,3,2,8,0,4,2,5,6,0,0,4,6,9,0,0,0,3,7,7 118 | 2,1,3,2,1,2,2,2,3,3,1,3,4,3,2,1,2,2,3,3 119 | 4,4,8,5,4,5,5,8,8,4,4,3,4,4,7,8,4,4,4,8 120 | 2,3,2,7,2,7,2,4,5,3,1,6,6,4,1,4,3,6,10,7 121 | 3,3,3,6,3,4,4,6,6,4,3,3,6,6,3,1,3,2,6,5 122 | 8,7,6,8,6,8,7,9,10,7,2,4,8,10,4,5,6,6,9,8 123 | 6,4,6,6,6,5,6,6,8,6,7,7,8,9,6,5,6,6,7,8 124 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 125 | 6,3,4,9,2,4,3,6,7,2,2,4,5,6,1,3,3,2,3,5 126 | 4,1,1,8,2,2,2,2,1,1,1,2,8,7,4,2,2,2,8,9 127 | 2,3,4,9,2,2,5,10,3,5,3,1,8,8,2,4,6,6,5,6 128 | 2,3,2,6,4,5,7,5,6,4,5,2,8,9,3,3,7,8,8,6 129 | 5,4,4,6,3,6,6,5,7,4,2,4,7,10,3,3,3,3,4,5 130 | 3,2,5,2,5,2,4,2,8,5,3,3,8,8,2,5,6,5,4,4 131 | 10,6,5,8,5,3,3,7,8,6,4,2,3,5,3,4,5,3,6,7 132 | 6,2,2,10,10,1,7,10,8,8,8,0,2,3,0,0,2,2,8,8 133 | 3,0,0,5,0,3,0,7,3,2,3,0,2,3,0,1,0,1,8,3 134 | 5,2,5,7,5,4,5,5,4,4,4,4,5,5,5,5,5,5,5,6 135 | 7,6,6,10,4,7,6,6,10,8,6,6,6,6,5,5,5,5,9,10 136 | 7,7,7,8,7,6,6,6,8,7,6,7,7,8,6,8,8,8,6,7 137 | 1,0,2,2,1,2,0,1,3,1,1,1,3,1,1,1,1,2,2,4 138 | 6,2,4,6,2,3,4,6,5,3,3,7,6,5,3,4,4,4,7,9 139 | 6,6,6,8,7,7,6,6,6,6,6,6,6,6,6,6,6,6,8,7 140 | 2,5,4,8,1,2,6,5,5,7,2,1,6,4,1,1,5,3,5,4 141 | 8,6,6,8,7,6,5,6,6,8,5,6,7,8,6,6,6,6,7,7 142 | 8,3,6,6,2,6,3,6,7,5,3,4,7,7,1,3,4,3,4,6 143 | 8,6,5,3,3,7,3,8,6,3,2,3,3,5,1,2,2,2,4,7 144 | 7,8,5,8,3,3,1,1,7,2,1,3,8,4,2,1,1,2,3,2 145 | 0,8,2,4,4,2,3,8,8,5,3,1,3,8,5,1,8,4,6,5 146 | 7,3,3,6,3,4,5,10,6,4,4,5,9,10,3,8,9,9,8,10 147 | 6,7,2,9,6,2,4,9,8,3,5,4,6,7,4,2,8,5,8,8 148 | 3,3,3,3,2,4,3,4,8,4,4,4,4,5,3,3,3,3,7,3 149 | 9,6,9,9,7,6,3,8,8,8,7,6,8,9,6,8,9,8,8,9 150 | 2,2,2,2,7,2,1,1,1,4,1,2,3,2,2,2,3,2,4,2 151 | 7,5,6,7,3,5,5,4,6,6,6,7,9,9,5,6,6,6,6,7 152 | 8,0,0,8,0,6,0,8,10,0,0,4,6,7,0,0,0,0,9,7 153 | 8,6,5,8,6,7,6,8,8,7,6,6,7,6,6,4,5,4,7,6 154 | 8,2,0,9,9,3,5,4,2,6,8,0,6,8,7,1,3,4,5,1 155 | 4,3,5,7,6,3,4,3,1,4,3,2,8,7,3,4,4,4,2,7 156 | 6,0,0,10,0,8,0,8,8,0,0,8,8,10,0,0,0,0,8,9 157 | 5,4,8,8,4,6,7,5,5,6,6,5,8,7,2,7,8,7,4,8 158 | 2,1,2,10,0,0,0,10,3,0,0,0,3,3,2,2,3,0,1,8 159 | 4,1,3,3,3,1,2,5,4,2,2,2,9,9,2,4,4,4,4,10 160 | 6,3,4,6,5,4,2,8,7,6,4,6,7,9,4,3,5,7,8,6 161 | 6,1,1,7,1,7,2,7,9,2,1,4,7,10,7,1,3,2,7,6 162 | 6,3,8,8,8,8,8,8,6,8,8,4,9,8,4,8,10,7,6,7 163 | 8,3,3,10,8,5,4,10,10,8,7,1,4,6,1,1,4,3,9,7 164 | 6,5,6,7,5,3,4,7,8,4,4,8,6,7,4,4,4,4,7,9 165 | 9,5,4,9,6,3,4,5,5,7,6,5,9,6,6,5,6,4,8,6 166 | 8,7,5,7,6,4,7,9,8,8,6,5,7,7,6,4,5,6,6,6 167 | 0,0,0,6,1,1,1,8,2,0,1,2,7,10,3,2,2,1,4,6 168 | 2,0,0,4,0,0,3,7,4,0,0,0,8,5,0,0,4,3,7,7 169 | 3,1,5,7,4,3,6,6,2,5,4,5,5,6,3,5,5,4,5,5 170 | 3,1,7,9,1,1,4,6,2,8,2,2,9,8,2,1,2,6,7,8 171 | 7,3,5,8,3,4,6,4,3,6,4,4,7,9,3,3,4,5,5,10 172 | 4,4,1,8,4,7,7,9,9,7,4,3,6,9,2,4,5,5,8,8 173 | 2,2,4,4,2,2,7,8,2,5,0,1,8,9,0,3,7,6,1,6 174 | 7,2,6,8,8,5,6,7,8,3,8,1,5,9,4,4,7,6,7,7 175 | 3,2,2,9,3,2,4,7,6,5,3,3,4,5,4,3,4,4,7,9 176 | 2,2,9,10,2,3,7,8,4,7,4,0,7,9,3,8,7,10,2,10 177 | 7,2,1,8,1,0,0,0,0,8,0,7,10,0,0,0,5,0,5,0 178 | 4,2,5,4,5,6,7,8,7,9,8,8,2,5,2,3,4,5,6,6 179 | 8,5,5,8,6,7,4,7,5,7,5,5,7,8,4,6,6,6,8,6 180 | 6,2,3,7,1,5,3,6,7,1,2,1,6,7,4,4,8,5,5,8 181 | 6,4,6,8,3,4,6,7,7,3,3,5,7,8,3,2,5,6,7,9 182 | 8,4,6,9,6,8,8,9,8,8,7,8,9,9,6,5,6,6,9,10 183 | 4,5,2,4,6,5,5,7,7,7,3,4,7,7,5,2,6,7,10,7 184 | 8,2,2,9,4,6,2,7,9,1,5,4,3,5,3,0,3,1,9,8 185 | 4,3,4,7,5,6,6,7,7,4,4,5,8,8,3,5,6,6,6,8 186 | 8,4,2,9,5,4,1,8,7,4,1,1,9,10,1,1,1,1,8,9 187 | 3,2,4,8,2,3,2,9,3,3,2,3,9,10,1,2,2,3,4,9 188 | 9,2,1,9,1,3,5,9,9,6,5,7,5,8,4,5,5,4,6,6 189 | 10,6,6,10,6,10,7,10,10,6,5,8,10,10,4,6,6,7,10,10 190 | 6,3,2,8,3,1,6,8,9,6,5,4,4,6,3,2,3,3,8,7 191 | 5,0,0,5,0,2,0,7,3,0,0,1,1,5,2,0,0,0,6,2 192 | --------------------------------------------------------------------------------