├── README.md ├── chapter10 ├── 10-01-preprocessing-and-stats.py ├── 10-02-plot-product-per-user.py ├── 10-03-remove-outliers.py ├── 10-04-one-hot-encoding.py ├── 10-05-sklearn-kmeans.py ├── 10-06-sklearn-silhouette-score.py ├── 10-07-elbow-method.py ├── 10-08-keyword-freq-cluster-1.py ├── 10-09-keyword-freq-cluster-2.py ├── 10-10-cluster-stats.py ├── 10-11-agglomative-scipy.py ├── 10-13-agglomative-sklearn.py ├── chapter10.ipynb └── online_retail_utf.txt.zip ├── chapter11 ├── 11-1...11-3-preprocessing-step-by-step.py ├── 11-12-koreanwiki-w2v.py ├── 11-4-preprocessing-scikit.py ├── 11-5-logistic-regression.py ├── 11-6-lda.py ├── 11-6-topic-modeling.py ├── 11-7-part-of-speech-tagging.py ├── 11-8-named-entity-recognition.py └── korean_wiki │ └── README.md ├── chapter12 ├── 12-10-wals.py ├── 12-11-compute_GD.py ├── 12-12-traintest.py ├── 12-13-compute_als2.py ├── 12-14-error-plot.py ├── 12-15-recommend-by-lfa.py ├── 12-6-collect-movie-plot.py ├── 12-7-create-tokenizer.py ├── 12-8-recommend-by-contents.py ├── 12-9-als.py ├── README.md ├── chapter12.ipynb ├── ml-100k-plot.txt.zip └── ml2imdb.id.final ├── chapter13 ├── 13-1-kmeans.py ├── 13-13-deep-mnist.py ├── 13-2-eigenface.py ├── README.md ├── att_face.zip └── faces.csv ├── chapter8 ├── 8-1-vector-normalization-naive.py └── 8-2-vector-normalization-numpy.py └── chapter9 ├── chapter09_sklearn_example.ipynb ├── chapter09_tf_example.ipynb └── readme.md /README.md: -------------------------------------------------------------------------------- 1 | ## 처음 배우는 머신러닝 2 | 머신러닝 기초 지식에서 실전 예제와 문제 해결까지 3 | 4 | * 지은이 **김승연**, **정용주** 5 | * 펴낸이 **김태헌** 6 | * 펴낸곳 **한빛미디어(주)** 7 | 8 | Published by HANBIT Media, Inc. Printed in Korea Copyright © 2017 김승연, 정용주 & HANBIT Media, Inc. 이 책의 저작권은 김승연, 정용주와 한빛미디어 (주)에 있습니다. 저작권법에 의해 보호를 받는 저작물이므로 무단전재와 복제를 금합니다. 9 | -------------------------------------------------------------------------------- /chapter10/10-01-preprocessing-and-stats.py: -------------------------------------------------------------------------------- 1 | import time 2 | from scipy import stats 3 | 4 | 5 | # 데이터 구조 정의 6 | # 사용자 ID를 키로, 상품 코드의 셋을 밸류로 갖는 딕셔너리와 7 | # 상품 코드를 키로, 사용자 ID의 셋을 밸류로 갖는 딕셔너리 8 | user_product_dic = {} 9 | product_user_dic = {} 10 | 11 | # 상품 코드를 키로 가지고 상품명을 밸류로 갖는 딕셔너리 12 | # 군집화의 내용을 확인하는 단계에서 상품명을 사용합니다. 13 | product_id_name_dic = {} 14 | 15 | # 파일을 읽어 위에 정의한 데이터구조를 채웁니다. 16 | for line in open('online_retail_utf.txt'): 17 | 18 | # 데이터를 한 행씩 읽어서 필요한 항목을 저장합니다. 19 | line_items = line.strip().split('\t') 20 | user_code = line_items[6] 21 | product_id = line_items[1] 22 | product_name = line_items[2] 23 | 24 | # 사용자 ID가 없을 경우 무시합니다. 25 | if len(user_code) == 0: 26 | continue 27 | 28 | # 영국에서 구매한 사용자만 고려하므로, 국가가 united kingdom이 아닌 경우엔 무시합니다. 29 | country = line_items[7] 30 | if country != 'United Kingdom': 31 | continue 32 | 33 | # 연도 읽을 때 에러 처리. 파일 헤더를 무시합니다. 34 | try: 35 | invoice_year = time.strptime(line_items[4], '%m/%d/%y %H:%M').tm_year 36 | 37 | except ValueError: 38 | continue 39 | 40 | # 2011년에 일어난 구매가 아닌 것은 무시합니다. 41 | if invoice_year != 2011: 42 | continue 43 | 44 | # 읽은 정보로 데이터 구조를 채웁니다. 45 | # 상품 가짓수를 고려하므로 상품 코드를 셋으로 가지도록 하겠습니다. 46 | user_product_dic.setdefault(user_code, set()) 47 | user_product_dic[user_code].add(product_id) 48 | 49 | product_user_dic.setdefault(product_id, set()) 50 | product_user_dic[product_id].add(user_code) 51 | 52 | product_id_name_dic[product_id] = product_name 53 | 54 | # 데이터구조를 다 채웠으므로 각 사용자들이 구매한 상품 가짓수로 리스트를 만들어봅시다. 55 | product_per_user_li = [len(x) for x in user_product_dic.values()] 56 | 57 | 58 | # 이 장에서 사용할 최종 사용자 수와 상품 가짓수를 출력해봅니다. 59 | print('# of users:', len(user_product_dic)) 60 | print('# of products:', len(product_user_dic)) 61 | 62 | # 각 사용자들이 구매한 상품 가짓수로 기초 통계량을 출력합니다. 63 | print(stats.describe(product_per_user_li)) 64 | -------------------------------------------------------------------------------- /chapter10/10-02-plot-product-per-user.py: -------------------------------------------------------------------------------- 1 | from collections import Counter 2 | import matplotlib.pyplot as plt 3 | # 사용자가 구매한 고유 상품 가짓수를 플롯해봅니다. 4 | plot_data_all = Counter(product_per_user_li) 5 | plot_data_x = list(plot_data_all.keys()) 6 | plot_data_y = list(plot_data_all.values()) 7 | plt.xlabel('고유 상품 가짓수') 8 | plt.ylabel('사용자 수') 9 | plt.scatter(plot_data_x, plot_data_y, marker='+') 10 | plt.show() 11 | -------------------------------------------------------------------------------- /chapter10/10-03-remove-outliers.py: -------------------------------------------------------------------------------- 1 | # 구매한 상품의 가짓수가 1인 사용자의 사용자 ID를 찾습니다. 2 | min_product_user_li =[k for k,v in user_product_dic.items() if len(v)==1] 3 | # 마찬가지로, 구매한 상품의 가짓수가 600개 이상인 사용자의 사용자 ID를 찾습니다. 4 | max_product_user_li =[k for k,v in user_product_dic.items() if len(v)>=600] 5 | print("# of users purchased one product:%d" % (len(min_product_user_li))) 6 | print("# of users purchased more than 600 product:%d" % (len(max_product_user_li))) 7 | # 찾아낸 사용자를 군집화에 사용할 user_product_dic에서 제외합니다. 8 | user_product_dic = {k:v for k,v in user_product_dic.items() if len(v)>1 and len(v)<600} 9 | print("# of left user:%d" % (len(user_product_dic))) 10 | # 남아 있는 사용자가 구매한 상품에도 0에서 시작하는 고유 ID를 부여합니다. 11 | # 데이터셋에서 제외된 사용자가 구매한 상품은 군집화에서 사용하지 않기 때문에 이러한 처리를 해야 합니다. 12 | id_product_dic = {} 13 | for product_set_li in user_product_dic.values(): 14 | for x in product_set_li: 15 | if x in id_product_dic: 16 | product_id = id_product_dic[x] 17 | else: 18 | id_product_dic.setdefault(x, len(id_product_dic)) 19 | print("# of left items:%d" % (len(id_product_dic))) 20 | -------------------------------------------------------------------------------- /chapter10/10-04-one-hot-encoding.py: -------------------------------------------------------------------------------- 1 | # 사용자 ID 참조를 위한 딕셔너리 2 | id_user_dic = {} 3 | 4 | # 군집화의 입력으로 사용할 리스트 5 | user_product_vec_li = [] 6 | 7 | # 군집화에서 사용할 총 고유 상품 가짓수. 즉, 원-핫 인코딩으로 변환할 피처의 가짓수 8 | all_product_count = len(id_product_dic) 9 | 10 | for user_code, product_per_user_set in user_product_dic.items(): 11 | # 고유 상품 가짓수를 길이로 하는 리스트 생성 12 | user_product_vec = [0] * all_product_count 13 | # id_user_dic의 길이를 이용하여 사용자 ID를 0부터 시작하는 user_id로 바꿉니다. 14 | id_user_dic[len(id_user_dic)] = user_code 15 | 16 | # 사용자가 구매한 상품 코드를 키로 하여 user_product_vec에서의 17 | # 해당 상품 코드의 상품 ID를 찾습니다. 그리고 값을 1로 세팅합니다. 18 | for product_name in product_per_user_set: 19 | user_product_vec[id_product_dic[product_name]] = 1 20 | 21 | # 한 사용자의 처리가 끝났으므로 이 사용자의 user_product_vec을 배열에 추가합니다. 22 | # 이때 배열의 인덱스는 새로 정의한 user_id가 됩니다. 23 | user_product_vec_li.append(user_product_vec) 24 | -------------------------------------------------------------------------------- /chapter10/10-05-sklearn-kmeans.py: -------------------------------------------------------------------------------- 1 | from sklearn.cluster import KMeans 2 | import random 3 | 4 | # 학습용과 평가용 데이터로 나누기 위해 사용자-상품 벡터를 셔플합니다. 5 | random.shuffle(user_product_vec_li) 6 | 7 | # 학습용 데이터에 사용자 2500명을, 평가용 데이터에 나머지 사용자를 넣습니다. 8 | # 학습용 데이터에 있는 사용자 정보만을 가지고 클러스터를 만든 후 9 | # 평가용 데이터의 사용자가 어느 클러스터에 속하는지 알아봅니다. 10 | train_data = user_product_vec_li[:2500] 11 | test_data = user_product_vec_li[2500:] 12 | 13 | print("# of train data:% d, # of test_data: %d" % (len(train_data),len(test_data))) 14 | # 학습 데이터를 군집화하여 4개의 클러스터를 생성한 후 그 결과를 km_predict에 저장합니다. 15 | km_predict = KMeans(n_clusters=4, init='k-means++', n_init=10, max_iter=20).fit(train_data) 16 | 17 | # km_predict 의 predict 함수를 이용하여 평가 데이터가 전 단계에서 만든 4개의 클러스터 중 어느 곳에 18 | # 속하는지 살펴봅니다. 19 | km_predict_result = km_predict.predict(test_data) 20 | print(km_predict_result) 21 | -------------------------------------------------------------------------------- /chapter10/10-06-sklearn-silhouette-score.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from sklearn.metrics import silhouette_score 4 | test_data = np.array(user_product_vec_li) 5 | 6 | for k in range(2,9): 7 | km = KMeans(n_clusters=k).fit(test_data) 8 | print("score for %d clusters:%.3f" % (k, silhouette_score(test_data,km.labels_))) 9 | -------------------------------------------------------------------------------- /chapter10/10-07-elbow-method.py: -------------------------------------------------------------------------------- 1 | # 클러스터 수를 키로 하고 inertia를 값으로 하는 딕셔너리입니다. 2 | ssw_dic={} 3 | 4 | # 클러스터 수 K를 1부터 8까지 바꾸어가며 급내제곱합의 평균값을 계산하고, 5 | # K를 키로 지정하여 딕셔너리에 넣습니다. 6 | for k in range(1, 8): 7 | km= KMeans(n_clusters=k).fit(test_data) 8 | ssw_dic[k] = km.inertia_ 9 | print(km.inertia_) 10 | 11 | plot_data_x = list(ssw_dic.keys()) 12 | plot_data_y = list(ssw_dic.values()) 13 | plt.xlabel("# of clusters") 14 | plt.ylabel("within ss") 15 | plt.plot(plot_data_x, plot_data_y, linestyle="-", marker='o') 16 | plt.show() 17 | -------------------------------------------------------------------------------- /chapter10/10-08-keyword-freq-cluster-1.py: -------------------------------------------------------------------------------- 1 | def analyze_clusters_keywords(labels,product_id_name_dic,user_product_dic, id_user_dic): 2 | #각 클러스터의 아이디와, 해당 아이디의 클러스터 들어있는 유저 수를 출력합니다. 3 | print(Counter(labels)) 4 | cluster_item = {} 5 | 6 | for i in range(len(labels)): 7 | cluster_item.setdefault(labels[i], []) 8 | 9 | # 각 사용자의 임시 ID i에 대해 사용자 코드를 찾은 후 10 | # 그 사용자 코드와 연결된 구매상품의 ID를 참조한 후 11 | # 그 ID를 이용해 상품명을 찾아 12 | # 딕셔너리에 클러스터 ID를 키로, 상품명을 값으로 추가합니다. 13 | for x in user_product_dic[id_user_dic[i]]: 14 | cluster_item[labels[i]].extend([product_id_name_dic[x]]) 15 | 16 | for cluster_id, product_name in cluster_item.items(): 17 | 18 | # 각 클러스터안의 상품명을 join명령으로 합쳐 하나의 문자열로 만든 뒤 19 | # 스페이스 혹은 탭으로 스플릿하여 키워드로 분해합니다. 20 | product_name_keyword = (" ").join(product_name).split() 21 | 22 | # 클러스터의 아이디와, 그 아이디를 가지는 클러스터에 속하는 유저들이 구매한 상품들의 상품명안에 23 | # 가장 자주 나타나는 단어 20개를 역순으로 출력합니다 24 | print("cluster_id:", cluster_id) 25 | print(Counter(product_name_keyword).most_common(20)) 26 | 27 | km=KMeans(n_clusters=2, n_init=10,max_iter=20) 28 | km.fit(test_data) 29 | analyze_clusters_keywords(km.labels_,product_id_name_dic,user_product_dic,id_user_dic) 30 | -------------------------------------------------------------------------------- /chapter10/10-09-keyword-freq-cluster-2.py: -------------------------------------------------------------------------------- 1 | def analyze_clusters_keywords_bigram(labels,product_id_name_dic,user_product_dic, id_user_dic): 2 | #각 클러스터의 아이디와, 해당 아이디의 클러스터 들어있는 유저 수를 출력합니다. 3 | print(Counter(labels)) 4 | cluster_item = {} 5 | 6 | for i in range(len(labels)): 7 | cluster_item.setdefault(labels[i], []) 8 | 9 | # 각 사용자의 임시 ID i에 대해 사용자 코드를 찾은 후 10 | # 그 사용자 코드와 연결된 구매상품의 ID를 참조한 후 11 | # 그 ID를 이용해 상품명을 찾아 12 | # 딕셔너리에 클러스터 ID를 키로, 상품명을 값으로 추가합니다. 13 | for x in user_product_dic[id_user_dic[i]]: 14 | cluster_item[labels[i]].extend([product_id_name_dic[x]]) 15 | 16 | for cluster_id, product_name in cluster_item.items(): 17 | # 각 클러스터 안의 상품명을 join 명령으로 합쳐 하나의 문자열로 만든 뒤 18 | # OF를 공백으로 리플레이스하고 19 | # 스페이스 혹은 탭으로 스플릿하여 키워드로 분해한 뒤 20 | # 연속되는 두 키워드를 합쳐서 하나의 키워드를 만듭니다. 21 | bigram = [] 22 | product_name_keyword = (' ').join(product_name).replace(' OF ', ' ').split() 23 | for i in range(0, len(product_name_keyword) - 1): 24 | bigram.append(' '.join(product_name_keyword[i:i + 2])) 25 | print('cluster_id:', cluster_id) 26 | print(Counter(bigram).most_common(20)) 27 | 28 | km=KMeans(n_clusters=2, n_init=10,max_iter=20) 29 | km.fit(user_product_vec_li) 30 | analyze_clusters_keywords_bigram(km.labels_,product_id_name_dic,user_product_dic,id_user_dic) 31 | -------------------------------------------------------------------------------- /chapter10/10-10-cluster-stats.py: -------------------------------------------------------------------------------- 1 | 2 | def analyze_clusters_product_count(label, user_product_dic, id_user_dic): 3 | product_len_dic = {} 4 | for i in range(0, len(label)): 5 | product_len_dic.setdefault(label[i], []) 6 | # 클러스터의 ID를 키로 하는 딕셔너리에 7 | # 그 클러스터에 속한 사용자가 구매한 고유 상품의 가짓수를 저장합니다. 8 | product_len_dic[label[i]].append(len(user_product_dic[id_user_dic[i]])) 9 | for k, v in product_len_dic.items(): 10 | print('cluster:', k) 11 | print(stats.describe(v)) 12 | 13 | analyze_clusters_product_count(km_plus.labels_, user_product_dic,id_user_dic) 14 | -------------------------------------------------------------------------------- /chapter10/10-11-agglomative-scipy.py: -------------------------------------------------------------------------------- 1 | 2 | from scipy.cluster.hierarchy import linkage 3 | from scipy.cluster.hierarchy import dendrogram 4 | 5 | # scipy의 집괴적 군집화 함수 6 | # 이번에는 두 클러스터에 속한 모든 샘플 간의 거리 평균을 7 | # 클러스터를 집괴하는 기준으로 합니다. 8 | # 거리 함수로는 유클리드 함수를 씁니다. 9 | 10 | row_clusters = linkage(test_data, method='complete',metric='euclidean') 11 | # 사용자 ID를 사용자 코드로 변환합니다. 12 | tmp_label=[] 13 | for i in range(len(id_user_dic)): 14 | tmp_label.append(id_user_dic[i]) 15 | 16 | # 플롯을 그립니다. 17 | row_denr = dendrogram(row_clusters,labels=tmp_label) 18 | plt.tight_layout() 19 | plt.ylabel('euclid') 20 | plt.show() 21 | -------------------------------------------------------------------------------- /chapter10/10-13-agglomative-sklearn.py: -------------------------------------------------------------------------------- 1 | # 예제 10-13 2 | from sklearn.cluster import AgglomerativeClustering 3 | ward = AgglomerativeClustering(n_clusters=2, affinity='euclidean',linkage='ward') 4 | ward.fit(test_data) 5 | 6 | # 예제 10-14 7 | analyze_clusters_keywords_bigram(ward.labels_,product_id_name_dic,user_product_dic,id_user_dic) 8 | -------------------------------------------------------------------------------- /chapter10/online_retail_utf.txt.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/your-first-ml-book/Examples/42d4de60f27b47fc69f02edf6e02840cb1701386/chapter10/online_retail_utf.txt.zip -------------------------------------------------------------------------------- /chapter11/11-1...11-3-preprocessing-step-by-step.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # 예제 11-1 4 | # 단어집을 처리합니다. 5 | vocabulary = {} # 딕셔너리를 선언합니다. 6 | with open('SMSSpamCollection') as file_handle: # 파일을 엽니다. 7 | for line in file_handle: # 파일을 한 줄씩 읽습니다 8 | splits = line.split() # 한 줄을 빈 칸으로 쪼개서 리스트로 만듭니다. 9 | label = splits[0] # 맨 앞의 단어는 레이블이니까 따로 둡니다. 10 | text = splits[1:] 11 | 12 | # 전체 내용을 단어 단위로 살펴보고 13 | # 사전에 해당 단어가 없으면 추가 후 고유번호를 붙입니다. 14 | # 그리고 그 매핑을 vocabulary에 저장합니다({단어 -> 고유ID}). 15 | for word in text: 16 | lower = word.lower() 17 | if not lower in vocabulary: 18 | vocabulary[lower] = len(vocabulary) 19 | 20 | # 단어집의 내용을 출력합니다. 21 | print(vocabulary) 22 | 23 | # 예제 11-2 24 | # 각 문서의 피처 벡터를 뽑아서 features 리스트에 넣습니다. 25 | features = [] 26 | with open('SMSSpamCollection') as file_handle: 27 | for line in file_handle: # 파일을 한 줄씩 읽습니다. 28 | splits = line.split() 29 | feature = np.zeros(len(vocabulary)) # 0으로 채워진 numpy 벡터를 만듭니다 30 | text = splits[1:] 31 | for word in text: 32 | lower = word.lower() 33 | # vocabulary에 따라 각 피처가 몇 번 나왔는지 개수를 셉니다 34 | feature[vocabulary[lower]] += 1 35 | 36 | # 단어 빈도 피처이므로 문서에서 나온 총 단어 수로 전체 벡터를 나누어 피처를 만듭니다. 37 | feature = feature / sum(feature) 38 | features.append(feature) 39 | print(features) 40 | 41 | # 예제 11-3 42 | # 레이블을 처리합니다. 43 | labels = [] 44 | with open('SMSSpamCollection') as file_handle: 45 | for line in file_handle: # 파일을 한 줄씩 읽습니다 46 | splits = line.split() 47 | label = splits[0] 48 | if label == 'spam': # 맨 앞 단어(label)가 spam이면 1, 아니면 0을 추가합니다. 49 | labels.append(1) 50 | else: 51 | labels.append(0) 52 | -------------------------------------------------------------------------------- /chapter11/11-12-koreanwiki-w2v.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import time 3 | import glob 4 | import unicodedata 5 | from konlpy.tag import Mecab 6 | from gensim.models import Word2Vec 7 | 8 | # 모델의 파라미터들입니다. 9 | WINDOW=5 10 | EMBEDDING_SIZE=200 11 | BATCH_SIZE = 10000 12 | ITER = 10 13 | 14 | def read_text(fin): 15 | # 전처리된 위키백과 파일을 읽어 들입니다. 16 | corpus_li = [] 17 | mecab = Mecab(dicpath='/opt/local/lib/mecab/dic/mecab-ko-dic') 18 | for line in open(fin): 19 | # 깨지는 글자를 처리하기 위해 unicodedata.normalize 함수를 이용해 20 | # NFKC로변환합니다. 21 | line = unicodedata.normalize('NFKC', line) 22 | try: 23 | # 첫 글자가 숫자로 시작하는 문장을 말뭉치에 추가합니다. 24 | _ = int(line[0]) 25 | corpus_li.append(' '.join(mecab.nouns(line)) + '\n') 26 | 27 | except ValueError: 28 | # 첫 글자가 한글로 시작하는 문장을 말뭉치에 추가합니다. 29 | if ord(line[0]) >= ord('가') and ord(line[0]) <= ord('힇'): 30 | corpus_li.append(' '.join(mecab.nouns(line))+'\n') 31 | else: 32 | pass 33 | print('# of lines in corpus',len(corpus_li)) 34 | return(corpus_li) 35 | 36 | def train_word2vec(corpus_li, fout_model): 37 | # read_text에서 생성한 말뭉치를 이용해 word2vec을 학습시킵니다. 38 | model = Word2Vec(corpus_li, sg=1, size=EMBEDDING_SIZE, window=WINDOW, min_count=5, workers=3, batch_words=BATCH_SIZE, iter=ITER) 39 | model.init_sims(replace=True) #clean up memory 40 | model.save(fout_model) 41 | return(model) 42 | 43 | # 전처리된 파일을 한번에 읽어 들이기 위한 정규식 44 | input_pattern = '파일위치/korean_wiki/kowiki-latest-pages-articles.xml-88.txt' 45 | fin_li = glob.glob(input_pattern) 46 | 47 | for fin in fin_li: 48 | corpus_li = read_text(fin) 49 | 50 | # 모델학습 51 | model = train_word2vec(corpus_li, '파일위치/korean_wiki/test_model.txt') 52 | print(model.most_similar('프랑스', topn=20)) 53 | print(model.most_similar(positive=['한국','파리'], negative=['서울'])) 54 | -------------------------------------------------------------------------------- /chapter11/11-4-preprocessing-scikit.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from sklearn.feature_extraction.text import CountVectorizer 3 | from sklearn.feature_extraction.text import TfidfTransformer 4 | 5 | spam_header = 'spam\t' 6 | no_spam_header = 'ham\t' 7 | documents = [] 8 | labels = [] 9 | 10 | with open('SMSSpamCollection') as file_handle: 11 | for line in file_handle: 12 | # 각 줄에서 레이블 부분만 떼어내고 나머지를 documents에 넣습니다. 13 | if line.startswith(spam_header): 14 | labels.append(1) 15 | documents.append(line[len(spam_header):]) 16 | elif line.startswith(no_spam_header): 17 | labels.append(0) 18 | documents.append(line[len(no_spam_header):]) 19 | 20 | vectorizer = CountVectorizer() # 단어 횟수 피처를 만드는 클래스입니다. 21 | term_counts = vectorizer.fit_transform(documents) # 문서에서 단어 횟수를 셉니다. 22 | vocabulary = vectorizer.get_feature_names() 23 | 24 | # 단어 횟수 피처에서 단어 빈도 피처를 만드는 클래스입니다. 25 | # tf-idf에서 idf를 생성하지 않으면 단어 빈도(term frequency)가 만들어집니다. 26 | tf_transformer = TfidfTransformer(use_idf=False).fit(term_counts) 27 | features = tf_transformer.transform(term_counts) 28 | 29 | # 처리된 파일을 저장합니다. 앞으로의 예제에서 사용될 예정입니다. 30 | with open('processed.pickle', 'wb') as file_handle: 31 | pickle.dump((vocabulary, features, labels), file_handle) 32 | -------------------------------------------------------------------------------- /chapter11/11-5-logistic-regression.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | from sklearn.linear_model import LogisticRegression 3 | 4 | with open('processed.pickle', 'rb') as file_handle: 5 | vocabulary, features, labels = pickle.load(file_handle) 6 | 7 | # 학습-평가 데이터 나누기 8 | # 처음 50%를 학습으로 사용하고 나머지를 평가로 사용합니다. 9 | total_number = len(labels) 10 | middle_index = total_number//2 11 | train_features = features[:middle_index,:] 12 | train_labels = labels[:middle_index] 13 | test_features = features[middle_index:,:] 14 | test_labels = labels[middle_index:] 15 | 16 | classifier = LogisticRegression() 17 | classifier.fit(train_features, train_labels) 18 | print('train accuracy: %4.4f' % classifier.score(train_features, train_labels)) 19 | print('test accuracy: %4.4f' % classifier.score(test_features, test_labels)) 20 | 21 | # 어떤 항목이 판별에 영향을 많이 줬는지 찾아보기 22 | weights = classifier.coef_[0, :] 23 | pairs = [] 24 | for index, value in enumerate(weights): 25 | pairs.append( (abs(value), vocabulary[index]) ) 26 | pairs.sort(key=lambda x: x[0], reverse=True) 27 | for pair in pairs[:20]: 28 | print('score %4.4f word: %s' % pair) 29 | -------------------------------------------------------------------------------- /chapter11/11-6-lda.py: -------------------------------------------------------------------------------- 1 | from sklearn.decomposition import LatentDirichletAllocation 2 | from sklearn.feature_extraction.text import CountVectorizer 3 | from sklearn.feature_extraction.text import TfidfTransformer 4 | 5 | spam_header = 'spam\t' 6 | no_spam_header = 'ham\t' 7 | documents = [] 8 | 9 | # 위와는 다르게 레이블을 만들 필요는 없습니다. 단순히 문서만을 추출합니다. 10 | with open('SMSSpamCollection') as file_handle: 11 | for line in file_handle: 12 | if line.startswith(spam_header): 13 | documents.append(line[len(spam_header):]) 14 | elif line.startswith(no_spam_header): 15 | documents.append(line[len(no_spam_header):]) 16 | 17 | # LDA는 단어 빈도 피쳐가 아닌 단어가 나온 갯수가 잘 동작하기 때문에 18 | # CountVectorizer를 사용합니다. 또한 토픽 모델에 도움이 되지 않는 19 | # 단어들(stop_words)을 자동으로 제거합니다. 20 | vectorizer = CountVectorizer(stop_words='english', max_features=2000) 21 | term_counts = vectorizer.fit_transform(documents) 22 | vocabulary = vectorizer.get_feature_names() 23 | 24 | # 토픽 모델을 학습합니다. 25 | topic_model = LatentDirichletAllocation(n_topics=10) 26 | topic_model.fit(term_counts) 27 | 28 | # 학습된 토픽들을 하나씩 출력합니다. 29 | topics = topic_model.components_ 30 | for topic_id, weights in enumerate(topics): 31 | print('topic %d' % topic_id, end=': ') 32 | pairs = [] 33 | for term_id, value in enumerate(weights): 34 | pairs.append( (abs(value), vocabulary[term_id]) ) 35 | pairs.sort(key=lambda x: x[0], reverse=True) 36 | for pair in pairs[:10]: 37 | print(pair[1], end=',') 38 | print() 39 | -------------------------------------------------------------------------------- /chapter11/11-6-topic-modeling.py: -------------------------------------------------------------------------------- 1 | from sklearn.decomposition import LatentDirichletAllocation 2 | from sklearn.feature_extraction.text import CountVectorizer 3 | from sklearn.feature_extraction.text import TfidfTransformer 4 | 5 | spam_header = 'spam\t' 6 | no_spam_header = 'ham\t' 7 | documents = [] 8 | 9 | # 위와는 다르게 레이블을 만들 필요는 없습니다. 단순히 문서만을 추출합니다. 10 | with open('SMSSpamCollection') as file_handle: 11 | for line in file_handle: 12 | if line.startswith(spam_header): 13 | documents.append(line[len(spam_header):]) 14 | elif line.startswith(no_spam_header): 15 | documents.append(line[len(no_spam_header):]) 16 | 17 | # LDA는 단어 빈도 피처보다 개수 피처가 잘 동작하기 때문에 18 | # CountVectorizer를 사용합니다. 또한 토픽 모델에 도움이 되지 않는 19 | # 단어(stop_words)를 자동으로 제거합니다. 20 | vectorizer = CountVectorizer(stop_words='english', max_features=2000) 21 | term_counts = vectorizer.fit_transform(documents) 22 | vocabulary = vectorizer.get_feature_names() 23 | 24 | # 토픽 모델을 학습합니다. 25 | topic_model = LatentDirichletAllocation(n_topics=10) 26 | topic_model.fit(term_counts) 27 | 28 | # 학습된 토픽들을 하나씩 출력합니다. 29 | topics = topic_model.components_ 30 | for topic_id, weights in enumerate(topics): 31 | print('topic %d' % topic_id, end=': ') 32 | pairs = [] 33 | for term_id, value in enumerate(weights): 34 | pairs.append( (abs(value), vocabulary[term_id]) ) 35 | pairs.sort(key=lambda x: x[0], reverse=True) 36 | for pair in pairs[:10]: 37 | print(pair[1], end=',') 38 | print() 39 | -------------------------------------------------------------------------------- /chapter11/11-7-part-of-speech-tagging.py: -------------------------------------------------------------------------------- 1 | from nltk.tag import StanfordPOSTagger 2 | from nltk.tokenize import word_tokenize 3 | 4 | STANFORD_POS_MODEL_PATH = '압축을 푼 장소/models/english-bidirectional-distsim.tagger' 5 | STANFORD_POS_JAR_PATH = '압축을 푼 장소/stanford-postagger-3.6.0.jar' 6 | 7 | pos_tagger = StanfordPOSTagger(STANFORD_POS_MODEL_PATH, STANFORD_POS_JAR_PATH) 8 | 9 | # 임의로 만들어낸 예제입니다. 이 부분을 원하는 문장으로 바꿔서 실습하세요. 10 | text = 'One day in November 2016, the two authors of this book, Seungyeon and Youngjoo, had a coffee at Red Rock cafe, which is a very popular place in Mountain View.' 11 | 12 | tokens = word_tokenize(text) 13 | print(tokens) # 쪼개진 토큰을 출력합니다. 14 | print() 15 | print(pos_tagger.tag(tokens)) # 품사 태깅을 하고 그 결과를 출력합니다. 16 | 17 | # 동사와 명사만 뽑아봅시다. 18 | noun_and_verbs = [] 19 | for token in pos_tagger.tag(tokens): 20 | if token[1].startswith('V') or token[1].startswith('N'): 21 | noun_and_verbs.append(token[0]) 22 | print(', '.join(noun_and_verbs)) 23 | -------------------------------------------------------------------------------- /chapter11/11-8-named-entity-recognition.py: -------------------------------------------------------------------------------- 1 | from nltk.tag import StanfordNERTagger 2 | from nltk.tokenize import word_tokenize 3 | 4 | STANFORD_NER_CLASSIFER_PATH = '압축을 푼 장소/classifiers/english.muc.7class.distsim.crf.ser.gz' 5 | STANFORD_NER_JAR_PATH = '압축을 푼 장소/stanford-ner-3.6.0.jar' 6 | 7 | ner_tagger = StanfordNERTagger(STANFORD_NER_CLASSIFER_PATH, STANFORD_NER_JAR_PATH) 8 | 9 | # 임의로 만든 예제입니다. 이 부분을 원하는 문장으로 바꿔서 실습하세요. 10 | text = 'One day in November 2016, the two authors of this book, Seungyeon and Youngjoo, had a coffee at Red Rock cafe, which is a very popular place in Mountain View.' 11 | 12 | tokens = word_tokenize(text) 13 | print(ner_tagger.tag(tokens)) 14 | 15 | # 장소에 해당하는 단어만 출력합니다. 16 | all_locations = [] 17 | for token in ner_tagger.tag(tokens): 18 | if token[1] == 'LOCATION': 19 | all_locations.append(token[0]) 20 | print(', '.join(all_locations)) 21 | -------------------------------------------------------------------------------- /chapter11/korean_wiki/README.md: -------------------------------------------------------------------------------- 1 | 한국어 위키 파일입니다. 2 | * **processed_kowiki.zip**: wp2txt로 xml태그를 제거한 한국어 위키 데이터 (95파일). 3 | https://drive.google.com/file/d/0B1b9GHSR8kLyZTQ5Q1VJWlZkWTg/view?usp=sharing 4 | * **pos_tagged_kowiki_noun.zip**: 위의 파일들을 mecab-ko로 품사태깅한 후 명사만 추출한 파일 (95파일). 5 | https://drive.google.com/file/d/0B1b9GHSR8kLyLVIyc1Qxa2RrWXM/view?usp=sharing 6 | -------------------------------------------------------------------------------- /chapter12/12-10-wals.py: -------------------------------------------------------------------------------- 1 | W = R>0.0 2 | W[W == True] = 1 3 | W[W == False] = 0 4 | W = W.astype(np.float64, copy=False) 5 | 6 | def compute_wALS(R,W, n_iter, lambda_, k): 7 | m,n = R.shape 8 | X = np.random.rand(m, k) 9 | Y = np.random.rand(k, n) 10 | weighted_errors = [] 11 | 12 | # [예제 12-9]와 달리 가중치 행렬을 넣어서 계산합니다. 13 | for ii in range(n_iter): 14 | # 각 사용자와 영화의 가중치 행렬을 이용하여 X와 Y를 갱신합니다. 15 | for u, Wu in enumerate(W): 16 | X[u,:] = np.linalg.solve(np.dot(Y, np.dot(np.diag(Wu), Y.T)) +lambda_ * np.eye(k), np.dot(Y, np.dot(np.diag(Wu),R[u,:].T))).T 17 | for i, Wi in enumerate(W.T): 18 | Y[:, i] = np.linalg.solve(np.dot(X.T, np.dot(np.diag(Wi), X)) + lambda_ * np.eye(k), np.dot(X.T, np.dot(np.diag(Wi), R[:, i]))) 19 | 20 | # 가중치 행렬을 mean_squared_error 함수의 인자로 사용합니다. 21 | weighted_errors.append(mean_squared_error(R, np.dot(X, Y),sample_weight=W)) 22 | if ii % 10 == 0: 23 | print('iteration %d is completed'%(ii)) 24 | 25 | R_hat = np.dot(X, Y) 26 | print('Error of rated movies: %.5f'%(mean_squared_error(R, np.dot(X, Y), sample_weight=W))) 27 | return(R_hat, errors) 28 | -------------------------------------------------------------------------------- /chapter12/12-11-compute_GD.py: -------------------------------------------------------------------------------- 1 | def compute_GD(R,n_iter, lambda_, learning_rate, k): 2 | m,n =R.shape 3 | errors=[] 4 | 5 | X = np.random.rand(m, k) 6 | Y = np.random.rand(k, n) 7 | 8 | # 입력받은 반복 횟수만큼 갱신을 반복합니다. 9 | for ii in range(n_iter): 10 | for u in range(m): 11 | for i in range(n): 12 | if R[u,i]>0: 13 | # 새로 정의된 갱신식. 각 사용자 및 상품의 행렬에 대해 하나씩 계산합니다. 14 | e_ui = R[u,i]-np.dot(X[u, :], Y[:,i]) 15 | 16 | X[u,:] = X[u,:] + learning_rate * (e_ui* Y[:,i] - lambda_ * X[u,:]) 17 | Y[:,i] = Y[:,i] + learning_rate * (e_ui * X[u,:] - lambda_ * Y[:,i]) 18 | 19 | errors.append(mean_squared_error(R, np.dot(X, Y))) 20 | 21 | if ii % 10 == 0: 22 | print('iteration %d is completed'%(ii)) 23 | 24 | R_hat = np.dot(X, Y) 25 | print('Error of rated movies: %.5f'%(mean_squared_error(R, R_hat))) 26 | 27 | return(R_hat, errors) 28 | 29 | 30 | -------------------------------------------------------------------------------- /chapter12/12-12-traintest.py: -------------------------------------------------------------------------------- 1 | # 예제 12-12 2 | def train_test_split(R, n_test): 3 | train = R.copy() 4 | # 모든 항이 0으로 채워진 학습용 별점 행렬을 만듭니다. 5 | test = np.zeros(R.shape) 6 | 7 | for user in range(R.shape[0]): 8 | # 각 시용자마다 n_test개의 0이 아닌 항(사용자가 입력한 별점)을 임의로 골라 9 | # 인덱스를 기억합니다. 10 | test_index = np.random.choice(R[user, :].nonzero()[0], size=n_test,replace=False) 11 | 12 | # 위에서 정한 인덱스에 해당하는 별점을 0으로 만듭니다. 13 | train[user, test_index] = 0 14 | 15 | # 평가 데이터 행렬의 해당 인덱스에 사용자가 입력한 실제 별점을 입력합니다. 16 | test[user, test_index] = R[user, test_index] 17 | return(train, test) 18 | 19 | # 예제 12-13 20 | def get_test_mse(true,pred): 21 | # 학습-평가 데이터에서 0이 아닌 값만 이용해서 에러를 계산합니다. 22 | # true가 평가 데이터, pred가 학습 데이터입니다. 23 | # 평가 데이터가 0이 아닌 항들의 인덱스에 해당하는 점수만 추출합니다. 24 | pred = pred[true.nonzero()].flatten() 25 | true = true[true.nonzero()].flatten() 26 | return mean_squared_error(true,pred) 27 | -------------------------------------------------------------------------------- /chapter12/12-13-compute_als2.py: -------------------------------------------------------------------------------- 1 | from sklearn.metrics import mean_squared_error 2 | import numpy as np 3 | 4 | def compute_ALS2(R, test, n_iter, lambda_, k): 5 | '''임의의 사용자 요인 행렬 X와 임의의 영화 요인 행렬 Y를 생성하고 교대 최소제곱법을 이용하여 6 | 유틸리티 행렬 R을 근사합니다. 그후 test행렬을 이용하여 평가합니다. 7 | R(ndarray) : 유틸리티 행렬 8 | test: 평가행렬 9 | lambda_(float) : 정규화 파라미터 10 | n_iter(fint) : X와 Y의 갱신 횟수 11 | ''' 12 | m,n =R.shape 13 | X = np.random.rand(m, k) 14 | Y = np.random.rand(k, n) 15 | errors =[] 16 | # 갱신 시마다 계산한 에러를 저장합니다. 17 | for i in range(0, n_iter): 18 | X = np.linalg.solve(np.dot(Y, Y.T) + lambda_ * np.eye(k),np.dot(Y, R.T)).T 19 | Y = np.linalg.solve(np.dot(X.T, X) + lambda_ * np.eye(k), np.dot(X.T, R)) 20 | errors.append(get_test_mse(test,np.dot(X, Y))) 21 | 22 | if i % 10 == 0: 23 | print('iteration %d is completed'%(i)) 24 | 25 | R_hat = np.dot(X, Y) 26 | print('Error of rated movies: %.5f'%(get_test_mse(test,R_hat))) 27 | return(R_hat, errors) 28 | -------------------------------------------------------------------------------- /chapter12/12-14-error-plot.py: -------------------------------------------------------------------------------- 1 | from matplotlib import pyplot as plt 2 | 3 | plt.xlim(0,20) # x축의 표시 범위를 0-20까지 설정(20은 반복 횟수입니다) 4 | plt.ylim(0,15) # y축의 표시 범위를 0-15까지 설정 5 | plt.xlabel('iteration') 6 | plt.ylabel('MSE') 7 | plt.xticks(x, range(0,20)) # x축에 표시할 숫자를 0부터 19까지의 정수로 함 8 | 9 | # 평가 에러를 점선으로 표시 10 | test_plot, = plt.plot(x,test_errors, '--', label='test_error') 11 | # 학습 에러를 실선으로 표시 12 | train_plot, = plt.plot(x,train_errors, label='train_error') 13 | 14 | plt.legend(handles=[train_plot, test_plot]) # 범례 생성 15 | plt.show() 16 | -------------------------------------------------------------------------------- /chapter12/12-15-recommend-by-lfa.py: -------------------------------------------------------------------------------- 1 | # 근사 행렬의 가장 작은 값을 0으로 만들고자 전체 항의 값에서 작은 값을 뺍니다. 2 | R_hat -= np.min(R_hat) 3 | 4 | # 근사 행렬의 가장 큰 값을 5로 만들고자 5를 가장 큰 예측값(np.max(R_hat))으로 나눈 값을 곱합니다. 5 | # 예를 들어 가장 큰 예측값이 3일 경우 3을 5로 만들기 위해서는 5/3을 곱하면 됩니다. 6 | # 위에서 구한 값을 예측 행렬의 모든 항에 곱합니다. 7 | R_hat *= float(5) / np.max(R_hat) 8 | 9 | def recommend_by_user(user): 10 | # 사용자의 ID를 입력으로 받아 그 사용자가 보지 않은 영화를 추천합니다. 11 | user_index = user-1 12 | user_seen_movies = sorted(list(enumerate(R_hat[user_index])), 13 | key=lambda x:x[1], reverse=True) 14 | recommended=1 15 | print("-----recommendation for user %d------"%(user)) 16 | for movie_info in user_seen_movies: 17 | if W[u][movie_info[0]]==0: 18 | movie_title= movie_info_dic[str(movie_info[0]+1)] 19 | movie_score= movie_info[1] 20 | print("rank %d recommendation:%s(%.3f)"%(recommended,movie_title[0], movie_score)) 21 | recommended+=1 22 | if recommended==6: 23 | break 24 | -------------------------------------------------------------------------------- /chapter12/12-6-collect-movie-plot.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | from sklearn.feature_extraction.text import TfidfVectorizer 4 | result_lines = [] 5 | movie_plot_li = [] 6 | movie_title_li = [] 7 | 8 | # 영화 ID 1부터 100까지의 영화를 가져옵니다. 9 | # [예제 12-1]에서 생성해둔 무비렌즈 영화 정보 리스트 movie_info_li를 이용합니다. 10 | for movie_info in movie_info_li[:100]: 11 | movie_url = movie_info[3] 12 | 13 | if movie_url == '': 14 | # 무비렌즈 데이터에 url이 없을 경우의 예외 처리. 타이틀과 플롯은 공백으로 설정합니다. 15 | print(movie_info) 16 | movie_title_li.append('') 17 | movie_plot_li.append('') 18 | 19 | else: 20 | response = requests.get(movie_url) 21 | imdb_id = response.url.split('/')[-2] 22 | # print(imdb_id) 23 | if imdb_id == 'www.imdb.com': 24 | print('no imdb id of: %s' % (movie_info[0])) 25 | # IMDB ID가 없을 경우의 예외 처리 26 | movie_title = '' 27 | movie_plot = '' 28 | 29 | else: 30 | try: 31 | movie_response = requests.get('http://www.omdbapi.com/?i=' + imdb_id + '&plot=full&r=json') 32 | 33 | except MissingSchema: 34 | # OMDB API의 예외 처리 35 | print('wrong URL: %s' % (movie_info[0])) 36 | movie_title = '' 37 | movie_plot = '' 38 | 39 | try: 40 | movie_title = json.loads(movie_response.text)['Title'] 41 | movie_plot = json.loads(movie_response.text)['Plot'] 42 | #print(movie_response.text) 43 | except KeyError: 44 | # API 결과의 예외 처리 45 | print('incomplete json: %s' % (movie_info[0]))) 46 | movie_title = '' 47 | movie_plot = '' 48 | 49 | result_lines.append("%s\t%s\n" % (movie_title, movie_plot)) 50 | movie_plot_li.append(movie_plot) 51 | movie_title_li.append(movie_title) 52 | 53 | print('download complete: %d movie data downloaded'%(len(movie_title_li))) 54 | # 3개 이하의 문서에서 나오는 단어는 TF-IDF 계산에서 제외합니다. 스톱워드는 'english'로 합니다. 55 | vectorizer = TfidfVectorizer(min_df=3, stop_words='english') 56 | X = vectorizer.fit_transform(movie_plot_li) 57 | 58 | # TF-IDF로 변환한 키워드의 리스트 59 | # X의 0번 열에 해당하는 키워드가 feature_names[0]의 키워드입니다. 60 | feature_names = vectorizer.get_feature_names() 61 | 62 | -------------------------------------------------------------------------------- /chapter12/12-7-create-tokenizer.py: -------------------------------------------------------------------------------- 1 | from nltk.tokenize import RegexpTokenizer 2 | from nltk.stem import WordNetLemmatizer 3 | 4 | class LemmaTokenizer(object): 5 | def __init__(self): 6 | self.tokenizer = RegexpTokenizer('(?u)\w\w+') 7 | # TfidfVectorizer와 같은 방식으로 키워드를 가져옵니다. 8 | self.wnl = WordNetLemmatizer() 9 | def __call__(self, doc): 10 | return([self.wnl.lemmatize(t) for t in self.tokenizer.tokenize(doc)]) 11 | 12 | # 사이킷런에 위에서 정의한 토크나이저를 입력으로 넣습니다. 13 | vectorizer2 =TfidfVectorizer(min_df=1,tokenizer=LemmaTokenizer(),stop_words='english') 14 | X = vectorizer2.fit_transform(movie_plot_li) 15 | feature_names = vectorizer2.get_feature_names() 16 | -------------------------------------------------------------------------------- /chapter12/12-8-recommend-by-contents.py: -------------------------------------------------------------------------------- 1 | from sklearn.metrics.pairwise import cosine_similarity 2 | 3 | movie_sim = cosine_similarity(X) 4 | def similar_recommend_by_movie_id(movielens_id): 5 | movie_index = movielens_id-1 6 | # enumerate 함수로 [(리스트 인덱스 0, 유사도 0), (리스트 인덱스 1, 유사도 1)...]의 7 | # 리스트를 만듭니다. 그 후 각 튜플의 두 번째 항목, 즉 유사도를 이용하여 내림차순 정렬합니다. 8 | # 이렇게 만든 리스트의 가장 앞 튜플의 첫 번째 항목이 영화 ID가 됩니다. 9 | similar_movies = sorted(list(enumerate(movie_sim[movie_index])),key=lambda x:x[1], reverse=True) 10 | recommended=1 11 | print("-----recommendation for movie %d------"%(movie)) 12 | for movie_info in similar_movies[1:7]: 13 | # 주어진 영화와 가장 비슷한 영화는 그 영화 자신이므로 출력 시 제외합니다. 14 | movie_title= movie_info_li[movie_info[0]] 15 | print('rank %d recommendation:%s'%(recommended,movie_title[0])) 16 | recommended+=1 17 | -------------------------------------------------------------------------------- /chapter12/12-9-als.py: -------------------------------------------------------------------------------- 1 | from sklearn.metrics import mean_squared_error 2 | import numpy as np 3 | def compute_ALS(R, n_iter, lambda_, k): 4 | '''임의의 사용자 요인 행렬 X와 임의의 영화 요인 행렬 Y를 생성한 뒤 5 | 교대 최소제곱법을 이용하여 유틸리티 행렬 R을 근사합니다. 6 | R(ndarray) : 유틸리티 행렬 7 | lambda_(float) : 정규화 파라미터입니다. 8 | n_iter(fint) : X와 Y의 갱신 횟수입니다. 9 | ''' 10 | m, n =R.shape 11 | X = np.random.rand(m, k) 12 | Y = np.random.rand(k, n) 13 | 14 | # 각 갱신 때마다 계산한 에러를 저장합니다. 15 | errors =[] 16 | for i in range(0, n_iter): 17 | # [식 6-4]를 구현했습니다. 18 | # 넘파이의 eye 함수는 파라미터 a를 받아 a x a 크기의 단위행렬을 만듭니다. 19 | X = np.linalg.solve(np.dot(Y, Y.T) + lambda_ * np.eye(k), np.dot(Y, R.T)).T 20 | Y = np.linalg.solve(np.dot(X.T, X) + lambda_ * np.eye(k), np.dot(X.T, R)) 21 | 22 | errors.append(mean_squared_error(R, np.dot(X, Y))) 23 | 24 | if i % 10 == 0: 25 | print('iteration %d is completed'%(i)) 26 | #print(mean_squared_error(R, np.dot(X, Y))) 27 | 28 | R_hat = np.dot(X, Y) 29 | print('Error of rated movies: %.5f'%(mean_squared_error(R, np.dot(X, Y)))) 30 | return(R_hat, errors) 31 | -------------------------------------------------------------------------------- /chapter12/README.md: -------------------------------------------------------------------------------- 1 | **  예제 12-5에 대하여 ** 2 | 3 | 2017년 9월 24일 무비렌즈100K 데이터셋의 IMDB링크가 작동하지 않는 것을 확인했습니다. 4 | 이를 해결하기 위해 무비렌즈20M 데이터셋을 이용해 무비렌즈100K아이디와 IMDB아이디의 매핑을 추가했습니다. 5 | 또한 OMDb API 유료화로 인해 무료 API로는 플롯을 가져올 수 없게 되었습니다. 6 | 이를 해결하기 위해 무비렌즈 100K의 플롯 데이터를 올려 두었습니다. 7 | -------------------------------------------------------------------------------- /chapter12/chapter12.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import codecs\n", 10 | "import numpy as np\n", 11 | "from scipy import stats\n", 12 | "from matplotlib import pyplot as plt\n", 13 | "from sklearn.metrics import mean_squared_error" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "metadata": {}, 20 | "outputs": [], 21 | "source": [ 22 | "# 예제 12-1\n", 23 | "\n", 24 | "def read_data(fin, delim):\n", 25 | " info_li = []\n", 26 | "\n", 27 | " for line in codecs.open(fin,\"r\",encoding=\"latin-1\"):\n", 28 | " line_items = line.strip().split(delim)\n", 29 | " #print(line_items)\n", 30 | "\n", 31 | " key = int(line_items[0])\n", 32 | " if (len(info_li)+1)!=key:\n", 33 | " print('errors at data_id')\n", 34 | " exit(0)\n", 35 | " info_li.append(line_items[1:])\n", 36 | " \n", 37 | " print('rows in %s: %d'%(fin, len(info_li)))\n", 38 | "\n", 39 | "\n", 40 | " return(info_li)" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 3, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "fin_user= \"u.user\"\n", 50 | "fin_movie= \"u.item\"\n", 51 | "fin_rating= \"u.data\"" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 4, 57 | "metadata": {}, 58 | "outputs": [ 59 | { 60 | "name": "stdout", 61 | "output_type": "stream", 62 | "text": [ 63 | "rows in u.user: 943\n", 64 | "rows in u.item: 1682\n" 65 | ] 66 | } 67 | ], 68 | "source": [ 69 | "user_info_li = read_data(fin_user,'|')\n", 70 | "movie_info_li = read_data(fin_movie,'|')" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 5, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "# 예제 12-2 무비렌즈의 별점 정보 파일을 이용하여 유틸리티 행렬 만들기\n", 80 | "def read_rating_data(fin):\n", 81 | "\n", 82 | " Q = np.zeros((len(user_info_li), len(movie_info_li)), dtype=np.float64)\n", 83 | "\n", 84 | " for line in open(fin):\n", 85 | " user, movie, rating, date = line.strip().split(\"\\t\")\n", 86 | " user_index = int(user)-1\n", 87 | " movie_index = int(movie)-1\n", 88 | "\n", 89 | " Q[user_index,movie_index]= float(rating)\n", 90 | "\n", 91 | " return(Q)" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 6, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "R=read_rating_data(fin_rating)" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": 7, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "# 예제 12-3 사용자의 평균 별점 기초 통계량 구하기\n", 110 | "user_mean_li=[]\n", 111 | "for i in range(0,R.shape[0]):\n", 112 | " user_rating = [x for x in R[i] if x>0.0]\n", 113 | " user_mean_li.append(stats.describe(user_rating).mean)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 8, 119 | "metadata": {}, 120 | "outputs": [ 121 | { 122 | "data": { 123 | "text/plain": [ 124 | "DescribeResult(nobs=943, minmax=(1.4919540229885058, 4.8695652173913047), mean=3.5881911071848003, variance=0.19823286521146211, skewness=-0.44416521432016465, kurtosis=0.9286813229088393)" 125 | ] 126 | }, 127 | "execution_count": 8, 128 | "metadata": {}, 129 | "output_type": "execute_result" 130 | } 131 | ], 132 | "source": [ 133 | "stats.describe(user_mean_li)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 9, 139 | "metadata": {}, 140 | "outputs": [ 141 | { 142 | "name": "stderr", 143 | "output_type": "stream", 144 | "text": [ 145 | "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/numpy/core/fromnumeric.py:3146: RuntimeWarning: Degrees of freedom <= 0 for slice\n", 146 | " **kwargs)\n", 147 | "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/numpy/core/_methods.py:127: RuntimeWarning: invalid value encountered in double_scalars\n", 148 | " ret = ret.dtype.type(ret / rcount)\n" 149 | ] 150 | } 151 | ], 152 | "source": [ 153 | "# 예제 12-4 각 영화의 평균 별점 기초 통계량 구하기\n", 154 | "movie_mean_li=[]\n", 155 | "for i in range(0,R.shape[1]):\n", 156 | " R_T = R.T\n", 157 | " movie_rating = [x for x in R_T[i] if x>0.0]\n", 158 | " movie_mean_li.append(stats.describe(movie_rating).mean)" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": 10, 164 | "metadata": {}, 165 | "outputs": [ 166 | { 167 | "data": { 168 | "text/plain": [ 169 | "DescribeResult(nobs=1682, minmax=(1.0, 5.0), mean=3.0760445083251788, variance=0.61099475027975136, skewness=-0.6991282361538707, kurtosis=0.4442028070725441)" 170 | ] 171 | }, 172 | "execution_count": 10, 173 | "metadata": {}, 174 | "output_type": "execute_result" 175 | } 176 | ], 177 | "source": [ 178 | "stats.describe(movie_mean_li)" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 11, 184 | "metadata": {}, 185 | "outputs": [ 186 | { 187 | "name": "stdout", 188 | "output_type": "stream", 189 | "text": [ 190 | "1682\n" 191 | ] 192 | } 193 | ], 194 | "source": [ 195 | "# 추가된 파일. 영화 플롯을 읽어옵니다. 예제 12-5, 12-6을 대체합니다.\n", 196 | "from sklearn.feature_extraction.text import TfidfVectorizer\n", 197 | "\n", 198 | "movie_plot_li =[]\n", 199 | "for line in open('ml-100k-plot.txt'):\n", 200 | " try:\n", 201 | " ml_id, plot =line.strip().split('|',1)\n", 202 | " movie_plot_li.append(plot)\n", 203 | " \n", 204 | " except ValueError:\n", 205 | " movie_plot_li.append('')\n", 206 | "print(len(movie_plot_li))\n", 207 | "\n", 208 | "vectorizer = TfidfVectorizer(min_df=3, stop_words='english')\n", 209 | "X = vectorizer.fit_transform(movie_plot_li[:100])\n", 210 | "# TF-IDF로 변환한 키워드의 리스트\n", 211 | "# X의 0번 열에 해당하는 키워드가 feature_names[0]의 키워드입니다.\n", 212 | "feature_names = vectorizer.get_feature_names()" 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": 12, 218 | "metadata": { 219 | "scrolled": true 220 | }, 221 | "outputs": [ 222 | { 223 | "name": "stdout", 224 | "output_type": "stream", 225 | "text": [ 226 | "['27', 'able', 'accident', 'action', 'affair', 'agent', 'alive', 'america', 'american', 'amy', 'army', 'arrival', 'asked', 'away', 'based', 'beautiful', 'befriends', 'begins', 'believes', 'bloody', 'blue', 'bond', 'boss', 'boy', 'breaks', 'bring', 'brother', 'brothers', 'brought', 'bruce', 'called', 'calls', 'car', 'case', 'charles', 'charlie', 'christmas', 'chronicles', 'city', 'class', 'come', 'comedy', 'comes', 'commit', 'company', 'continue', 'convenience', 'convicted', 'country', 'crime', 'criminal', 'cruel', 'dark', 'daughter', 'day', 'days', 'deadly', 'dealing', 'death', 'decide', 'decides', 'desert', 'despite', 'discovers', 'does', 'doesn', 'dr', 'dreams', 'drug', 'earlier', 'edward', 'efforts', 'emotional', 'emotions', 'empire', 'end', 'ends', 'escape', 'especially', 'eve', 'eventually', 'ex', 'extremely', 'face', 'fall', 'falls', 'family', 'famous', 'fate', 'father', 'favorite', 'fbi', 'fellow', 'fight', 'figure', 'filled', 'film', 'finding', 'finds', 'forces', 'france', 'free', 'friend', 'friends', 'fun', 'future', 'game', 'gets', 'girl', 'goes', 'good', 'group', 'happens', 'hasn', 'having', 'head', 'help', 'high', 'hollywood', 'home', 'hostage', 'house', 'huge', 'human', 'husband', 'identity', 'including', 'information', 'instead', 'intentions', 'investigation', 'jack', 'jackson', 'james', 'job', 'john', 'joins', 'just', 'justice', 'kevin', 'kill', 'killed', 'killer', 'killers', 'killing', 'kills', 'land', 'later', 'law', 'leader', 'leads', 'learn', 'leave', 'life', 'like', 'little', 'lives', 'living', 'local', 'long', 'looks', 'loses', 'lost', 'love', 'lover', 'low', 'major', 'make', 'makes', 'making', 'man', 'marriage', 'married', 'marries', 'master', 'media', 'meet', 'meets', 'men', 'mission', 'money', 'morning', 'mother', 'moves', 'movie', 'movies', 'murder', 'murdered', 'murderer', 'named', 'national', 'need', 'needs', 'new', 'night', 'old', 'older', 'order', 'orders', 'outcast', 'owner', 'parents', 'partner', 'past', 'pay', 'people', 'person', 'peter', 'pick', 'piece', 'place', 'plan', 'plans', 'play', 'police', 'political', 'powerful', 'present', 'president', 'problem', 'problems', 'promising', 'prostitute', 'question', 'quickly', 'real', 'really', 'relationship', 'rescue', 'rest', 'return', 'revolves', 'richard', 'right', 'robert', 'room', 'rules', 'sam', 'samuel', 'school', 'secret', 'sees', 'senator', 'sent', 'serial', 'series', 'set', 'simple', 'sister', 'situation', 'small', 'social', 'solve', 'son', 'soon', 'start', 'stay', 'stolen', 'stop', 'store', 'story', 'strange', 'strong', 'stumbles', 'successful', 'suddenly', 'survive', 'taken', 'takes', 'taking', 'teaches', 'team', 'things', 'thinking', 'time', 'times', 'told', 'town', 'tried', 'tries', 'trip', 'true', 'try', 'trying', 'turns', 'type', 'uncle', 'underground', 'vacation', 'victim', 'violence', 'visit', 'wants', 'war', 'way', 'ways', 'white', 'wife', 'william', 'winning', 'wins', 'woman', 'women', 'work', 'working', 'works', 'world', 'wrong', 'year', 'years', 'york', 'young']\n" 227 | ] 228 | } 229 | ], 230 | "source": [ 231 | "print(feature_names)" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": 13, 237 | "metadata": { 238 | "scrolled": false 239 | }, 240 | "outputs": [], 241 | "source": [ 242 | "# 예제 12-7 사용자 정의 토크나이저 만들기\n", 243 | "from nltk.tokenize import RegexpTokenizer\n", 244 | "from nltk.stem import WordNetLemmatizer\n", 245 | "class LemmaTokenizer(object):\n", 246 | " def __init__(self):\n", 247 | " self.tokenizer = RegexpTokenizer('(?u)\\w\\w+')\n", 248 | " # TfidfVectorizer와 같은 방식으로 키워드를 가져옵니다.\n", 249 | " self.wnl = WordNetLemmatizer()\n", 250 | " def __call__(self, doc):\n", 251 | " return([self.wnl.lemmatize(t) for t in self.tokenizer.tokenize(doc)])\n", 252 | "\n", 253 | "# 사이킷런에 위에서 정의한 토크나이저를 입력으로 넣습니다.\n", 254 | "vectorizer2 =TfidfVectorizer(min_df=3,tokenizer=LemmaTokenizer(),stop_words='english')\n", 255 | "X = vectorizer2.fit_transform(movie_plot_li[:100])\n", 256 | "feature_names = vectorizer2.get_feature_names()" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": 14, 262 | "metadata": { 263 | "scrolled": true 264 | }, 265 | "outputs": [ 266 | { 267 | "name": "stdout", 268 | "output_type": "stream", 269 | "text": [ 270 | "['27', 'able', 'accident', 'action', 'affair', 'agent', 'alive', 'america', 'american', 'amy', 'army', 'arrival', 'asked', 'attempt', 'away', 'based', 'beautiful', 'befriends', 'begin', 'belief', 'bloody', 'blue', 'body', 'bond', 'bos', 'boy', 'break', 'bring', 'brother', 'brought', 'bruce', 'called', 'car', 'case', 'cause', 'challenge', 'character', 'charles', 'charlie', 'christmas', 'chronicle', 'city', 'class', 'close', 'come', 'comedy', 'commit', 'company', 'contact', 'continue', 'convenience', 'convicted', 'cop', 'country', 'crime', 'criminal', 'cruel', 'dark', 'daughter', 'day', 'deadly', 'dealing', 'death', 'decide', 'decides', 'desert', 'despite', 'detective', 'director', 'discovers', 'doe', 'doesn', 'dollar', 'dr', 'dream', 'drug', 'earlier', 'edward', 'effort', 'emotion', 'emotional', 'empire', 'encounter', 'end', 'escape', 'especially', 'eve', 'eventually', 'ex', 'extremely', 'face', 'fall', 'family', 'famous', 'fate', 'father', 'favorite', 'fbi', 'fellow', 'fight', 'figure', 'filled', 'film', 'finding', 'force', 'france', 'free', 'friend', 'fun', 'future', 'gain', 'game', 'girl', 'girlfriend', 'good', 'grade', 'group', 'ha', 'hand', 'happens', 'hasn', 'having', 'head', 'help', 'high', 'hollywood', 'home', 'hope', 'hostage', 'house', 'huge', 'human', 'husband', 'identity', 'incident', 'including', 'information', 'instead', 'intention', 'investigation', 'jack', 'jackson', 'james', 'job', 'john', 'join', 'just', 'justice', 'kevin', 'kid', 'kill', 'killed', 'killer', 'killing', 'know', 'land', 'later', 'law', 'lead', 'leader', 'learn', 'leave', 'life', 'like', 'little', 'living', 'local', 'long', 'look', 'loses', 'lost', 'love', 'lover', 'low', 'major', 'make', 'making', 'man', 'marriage', 'married', 'marries', 'master', 'medium', 'meet', 'men', 'mind', 'mission', 'money', 'morning', 'mother', 'movie', 'murder', 'murdered', 'murderer', 'named', 'national', 'need', 'new', 'night', 'offer', 'old', 'older', 'order', 'outcast', 'owner', 'parent', 'partner', 'past', 'pay', 'people', 'perfect', 'person', 'peter', 'pick', 'piece', 'place', 'plan', 'play', 'police', 'political', 'powerful', 'present', 'president', 'problem', 'promising', 'prostitute', 'queen', 'question', 'quickly', 'read', 'real', 'really', 'reason', 'rebel', 'refuse', 'relationship', 'rescue', 'rest', 'return', 'revolves', 'richard', 'right', 'robert', 'role', 'room', 'rule', 'run', 'sam', 'samuel', 'save', 'school', 'secret', 'senator', 'sent', 'serial', 'series', 'set', 'simple', 'sister', 'situation', 'small', 'social', 'solve', 'son', 'soon', 'start', 'state', 'stay', 'stolen', 'stop', 'store', 'story', 'strange', 'strong', 'student', 'stumble', 'successful', 'suddenly', 'survive', 'taken', 'taking', 'teach', 'team', 'term', 'thing', 'think', 'thinking', 'time', 'told', 'town', 'trick', 'tried', 'trip', 'true', 'try', 'trying', 'turn', 'type', 'u', 'uncle', 'underground', 'vacation', 'victim', 'violence', 'visit', 'wa', 'want', 'war', 'way', 'wedding', 'week', 'white', 'wife', 'william', 'win', 'winning', 'woman', 'work', 'working', 'world', 'wrong', 'year', 'york', 'young']\n" 271 | ] 272 | } 273 | ], 274 | "source": [ 275 | "print(feature_names)" 276 | ] 277 | }, 278 | { 279 | "cell_type": "code", 280 | "execution_count": 15, 281 | "metadata": {}, 282 | "outputs": [], 283 | "source": [ 284 | "# 예제 12-8 비슷한 영화 추천하기\n", 285 | "from sklearn.metrics.pairwise import cosine_similarity\n", 286 | "movie_sim = cosine_similarity(X)\n", 287 | "def similar_recommend_by_movie_id(movielens_id):\n", 288 | " movie_index = movielens_id-1\n", 289 | " # enumerate 함수로 [(리스트 인덱스 0, 유사도 0), (리스트 인덱스 1, 유사도 1)...]의\n", 290 | " # 리스트를 만듭니다. 그 후 각 튜플의 두 번째 항목, 즉 유사도를 이용하여 내림차순 정렬합니다.\n", 291 | " # 이렇게 만든 리스트의 가장 앞 튜플의 첫 번째 항목이 영화 ID가 됩니다.\n", 292 | " similar_movies = sorted(list(enumerate(movie_sim[movie_index])),key=lambda x:x[1], reverse=True)\n", 293 | " recommended=1\n", 294 | " print(\"-----recommendation for movie %d------\"%(movielens_id))\n", 295 | " for movie_info in similar_movies[1:6]:\n", 296 | " # 주어진 영화와 가장 비슷한 영화는 그 영화 자신이므로 출력 시 제외합니다.\n", 297 | " movie_title= movie_info_li[movie_info[0]]\n", 298 | " print('rank %d recommendation:%s'%(recommended,movie_title[0]))\n", 299 | " recommended+=1" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": 16, 305 | "metadata": {}, 306 | "outputs": [ 307 | { 308 | "name": "stdout", 309 | "output_type": "stream", 310 | "text": [ 311 | "-----recommendation for movie 1------\n", 312 | "rank 1 recommendation:Shawshank Redemption, The (1994)\n", 313 | "rank 2 recommendation:Aladdin (1992)\n", 314 | "rank 3 recommendation:What's Eating Gilbert Grape (1993)\n", 315 | "rank 4 recommendation:Searching for Bobby Fischer (1993)\n", 316 | "rank 5 recommendation:Firm, The (1993)\n" 317 | ] 318 | } 319 | ], 320 | "source": [ 321 | "similar_recommend_by_movie_id(1)" 322 | ] 323 | }, 324 | { 325 | "cell_type": "code", 326 | "execution_count": 41, 327 | "metadata": {}, 328 | "outputs": [], 329 | "source": [ 330 | "# 예제 12-9 교대 최소제곱법 구현하기\n", 331 | "\n", 332 | "from sklearn.metrics import mean_squared_error\n", 333 | "import numpy as np\n", 334 | "def compute_ALS(R, n_iter, lambda_, k):\n", 335 | " '''임의의 사용자 요인 행렬 X와 임의의 영화 요인 행렬 Y를 생성한 뒤\n", 336 | " 교대 최소제곱법을 이용하여 유틸리티 행렬 R을 근사합니다.\n", 337 | " R(ndarray) : 유틸리티 행렬\n", 338 | " lambda_(float) : 정규화 파라미터입니다.\n", 339 | " n_iter(fint) : X와 Y의 갱신 횟수입니다.\n", 340 | " '''\n", 341 | " m, n =R.shape\n", 342 | " X = np.random.rand(m, k)\n", 343 | " Y = np.random.rand(k, n)\n", 344 | "\n", 345 | " # 각 갱신 때마다 계산한 에러를 저장합니다.\n", 346 | " errors =[]\n", 347 | " for i in range(0, n_iter):\n", 348 | " # [식 6-4]를 구현했습니다.\n", 349 | " # 넘파이의 eye 함수는 파라미터 a를 받아 a x a 크기의 단위행렬을 만듭니다.\n", 350 | " X = np.linalg.solve(np.dot(Y, Y.T) + lambda_ * np.eye(k), np.dot(Y, R.T)).T\n", 351 | " Y = np.linalg.solve(np.dot(X.T, X) + lambda_ * np.eye(k), np.dot(X.T, R))\n", 352 | " \n", 353 | " errors.append(mean_squared_error(R, np.dot(X, Y)))\n", 354 | " \n", 355 | " if i % 10 == 0:\n", 356 | " print('iteration %d is completed'%(i))\n", 357 | " #print(mean_squared_error(R, np.dot(X, Y)))\n", 358 | " \n", 359 | " R_hat = np.dot(X, Y)\n", 360 | " print('Error of rated movies: %.5f'%(mean_squared_error(R, np.dot(X, Y))))\n", 361 | " return(R_hat, errors)" 362 | ] 363 | }, 364 | { 365 | "cell_type": "code", 366 | "execution_count": 42, 367 | "metadata": {}, 368 | "outputs": [ 369 | { 370 | "name": "stdout", 371 | "output_type": "stream", 372 | "text": [ 373 | "iteration 0 is completed\n", 374 | "iteration 10 is completed\n", 375 | "Error of rated movies: 0.24658\n" 376 | ] 377 | } 378 | ], 379 | "source": [ 380 | "R_hat, errors = compute_ALS(R, 20, 0.1,100)" 381 | ] 382 | }, 383 | { 384 | "cell_type": "code", 385 | "execution_count": 19, 386 | "metadata": {}, 387 | "outputs": [], 388 | "source": [ 389 | "W = R>0.0\n", 390 | "W[W == True] = 1\n", 391 | "W[W == False] = 0\n", 392 | "W = W.astype(np.float64, copy=False)" 393 | ] 394 | }, 395 | { 396 | "cell_type": "code", 397 | "execution_count": 32, 398 | "metadata": {}, 399 | "outputs": [], 400 | "source": [ 401 | "# 예제 12-10 가중치 교대 최소제곱법 구현하기 \n", 402 | "\n", 403 | "def compute_wALS(R,W, n_iter, lambda_, k):\n", 404 | " m,n = R.shape\n", 405 | " X = np.random.rand(m, k)\n", 406 | " Y = np.random.rand(k, n)\n", 407 | " weighted_errors = []\n", 408 | " \n", 409 | " # [예제 12-9]와 달리 가중치 행렬을 넣어서 계산합니다.\n", 410 | " for ii in range(n_iter):\n", 411 | " # 각 사용자와 영화의 가중치 행렬을 이용하여 X와 Y를 갱신합니다.\n", 412 | " for u, Wu in enumerate(W):\n", 413 | " X[u,:] = np.linalg.solve(np.dot(Y, np.dot(np.diag(Wu), Y.T)) +lambda_ * np.eye(k), np.dot(Y, np.dot(np.diag(Wu),R[u,:].T))).T\n", 414 | " for i, Wi in enumerate(W.T):\n", 415 | " Y[:, i] = np.linalg.solve(np.dot(X.T, np.dot(np.diag(Wi), X)) + lambda_ * np.eye(k), np.dot(X.T, np.dot(np.diag(Wi), R[:, i])))\n", 416 | "\n", 417 | " # 가중치 행렬을 mean_squared_error 함수의 인자로 사용합니다.\n", 418 | " weighted_errors.append(mean_squared_error(R, np.dot(X, Y),sample_weight=W))\n", 419 | " if ii % 10 == 0:\n", 420 | " print('iteration %d is completed'%(ii))\n", 421 | " \n", 422 | " R_hat = np.dot(X, Y)\n", 423 | " print('Error of rated movies: %.5f'%(mean_squared_error(R, np.dot(X, Y), sample_weight=W)))\n", 424 | " return(R_hat, errors)" 425 | ] 426 | }, 427 | { 428 | "cell_type": "code", 429 | "execution_count": 33, 430 | "metadata": { 431 | "scrolled": true 432 | }, 433 | "outputs": [ 434 | { 435 | "name": "stdout", 436 | "output_type": "stream", 437 | "text": [ 438 | "iteration 0 is completed\n", 439 | "iteration 10 is completed\n", 440 | "Error of rated movies: 0.00066\n" 441 | ] 442 | } 443 | ], 444 | "source": [ 445 | "R_hat, errors =compute_wALS(R, W, 20, 0.1,100)" 446 | ] 447 | }, 448 | { 449 | "cell_type": "code", 450 | "execution_count": 20, 451 | "metadata": {}, 452 | "outputs": [], 453 | "source": [ 454 | "# 예제 12-11 경사하강법 구현하기\n", 455 | "\n", 456 | "def compute_GD(R,n_iter, lambda_, learning_rate, k):\n", 457 | " m,n =R.shape\n", 458 | " errors=[]\n", 459 | " \n", 460 | " X = np.random.rand(m, k)\n", 461 | " Y = np.random.rand(k, n)\n", 462 | " \n", 463 | " # 입력받은 반복 횟수만큼 갱신을 반복합니다.\n", 464 | " for ii in range(n_iter):\n", 465 | " for u in range(m):\n", 466 | " for i in range(n):\n", 467 | " if R[u,i]>0:\n", 468 | " # 새로 정의된 갱신식. 각 사용자 및 상품의 행렬에 대해 하나씩 계산합니다.\n", 469 | " e_ui = R[u,i]-np.dot(X[u, :], Y[:,i])\n", 470 | "\n", 471 | " X[u,:] = X[u,:] + learning_rate * (e_ui* Y[:,i] - lambda_ * X[u,:])\n", 472 | " Y[:,i] = Y[:,i] + learning_rate * (e_ui * X[u,:] - lambda_ * Y[:,i]) \n", 473 | " \n", 474 | " errors.append(mean_squared_error(R, np.dot(X, Y)))\n", 475 | " \n", 476 | " if ii % 10 == 0:\n", 477 | " print('iteration %d is completed'%(ii))\n", 478 | "\n", 479 | " R_hat = np.dot(X, Y)\n", 480 | " print('Error of rated movies: %.5f'%(mean_squared_error(R, R_hat)))\n", 481 | "\n", 482 | " return(R_hat, errors)\n", 483 | "\n" 484 | ] 485 | }, 486 | { 487 | "cell_type": "code", 488 | "execution_count": 21, 489 | "metadata": {}, 490 | "outputs": [ 491 | { 492 | "name": "stdout", 493 | "output_type": "stream", 494 | "text": [ 495 | "iteration 0 is completed\n", 496 | "iteration 10 is completed\n", 497 | "Error of rated movies: 25.38837\n" 498 | ] 499 | } 500 | ], 501 | "source": [ 502 | "R_hat, errors= compute_GD(R,20, 1, 0.001, 100)" 503 | ] 504 | }, 505 | { 506 | "cell_type": "code", 507 | "execution_count": 22, 508 | "metadata": {}, 509 | "outputs": [ 510 | { 511 | "name": "stdout", 512 | "output_type": "stream", 513 | "text": [ 514 | "iteration 0 is completed\n", 515 | "iteration 10 is completed\n", 516 | "iteration 20 is completed\n", 517 | "iteration 30 is completed\n", 518 | "Error of rated movies: 17.67668\n" 519 | ] 520 | } 521 | ], 522 | "source": [ 523 | "R_hat, errors= compute_GD(R,40, 1, 0.001, 100)" 524 | ] 525 | }, 526 | { 527 | "cell_type": "code", 528 | "execution_count": 23, 529 | "metadata": {}, 530 | "outputs": [], 531 | "source": [ 532 | "# 예제 12-12 학습-평가 데이터 나누기\n", 533 | "def train_test_split(R, n_test):\n", 534 | " train = R.copy()\n", 535 | " test = np.zeros(R.shape)\n", 536 | " for user in range(R.shape[0]):\n", 537 | " test_index = np.random.choice(R[user, :].nonzero()[0], size=n_test, replace=False)\n", 538 | " train[user, test_index] = 0.\n", 539 | " test[user, test_index] = R[user, test_index]\n", 540 | "\n", 541 | " return (train, test)" 542 | ] 543 | }, 544 | { 545 | "cell_type": "code", 546 | "execution_count": 24, 547 | "metadata": {}, 548 | "outputs": [], 549 | "source": [ 550 | "# 예제 12-13\n", 551 | "def get_test_mse(true,pred):\n", 552 | " # 학습-평가 데이터에서 0이 아닌 값만 이용해서 에러를 계산합니다.\n", 553 | " # true가 평가 데이터, pred가 학습 데이터입니다.\n", 554 | " # 평가 데이터가 0이 아닌 항들의 인덱스에 해당하는 점수만 추출합니다.\n", 555 | " pred = pred[true.nonzero()].flatten()\n", 556 | " true = true[true.nonzero()].flatten()\n", 557 | " return mean_squared_error(true,pred)" 558 | ] 559 | }, 560 | { 561 | "cell_type": "code", 562 | "execution_count": 25, 563 | "metadata": {}, 564 | "outputs": [], 565 | "source": [ 566 | "from sklearn.metrics import mean_squared_error\n", 567 | "import numpy as np\n", 568 | "\n", 569 | "def compute_ALS(R, test, n_iter, lambda_, k):\n", 570 | " '''임의의 사용자 요인 행렬 X와 임의의 영화 요인 행렬 Y를 생성하고 교대 최소제곱법을 이용하여\n", 571 | " 유틸리티 행렬 R을 근사합니다. 그후 test행렬을 이용하여 평가합니다.\n", 572 | " R(ndarray) : 유틸리티 행렬\n", 573 | " test: 평가행렬\n", 574 | " lambda_(float) : 정규화 파라미터\n", 575 | " n_iter(fint) : X와 Y의 갱신 횟수\n", 576 | " '''\n", 577 | " m,n =R.shape\n", 578 | " X = np.random.rand(m, k)\n", 579 | " Y = np.random.rand(k, n)\n", 580 | " errors =[]\n", 581 | " # 갱신 시마다 계산한 에러를 저장합니다.\n", 582 | " for i in range(0, n_iter):\n", 583 | " X = np.linalg.solve(np.dot(Y, Y.T) + lambda_ * np.eye(k),np.dot(Y, R.T)).T\n", 584 | " Y = np.linalg.solve(np.dot(X.T, X) + lambda_ * np.eye(k), np.dot(X.T, R))\n", 585 | " errors.append(get_test_mse(test,np.dot(X, Y)))\n", 586 | "\n", 587 | " if i % 10 == 0:\n", 588 | " print('iteration %d is completed'%(i))\n", 589 | " \n", 590 | " R_hat = np.dot(X, Y)\n", 591 | " print('Error of rated movies: %.5f'%(get_test_mse(test,R_hat)))\n", 592 | " return(R_hat, errors)" 593 | ] 594 | }, 595 | { 596 | "cell_type": "code", 597 | "execution_count": 26, 598 | "metadata": {}, 599 | "outputs": [], 600 | "source": [ 601 | "train,test = train_test_split(R, 10)" 602 | ] 603 | }, 604 | { 605 | "cell_type": "code", 606 | "execution_count": 27, 607 | "metadata": {}, 608 | "outputs": [ 609 | { 610 | "name": "stdout", 611 | "output_type": "stream", 612 | "text": [ 613 | "iteration 0 is completed\n", 614 | "iteration 10 is completed\n", 615 | "Error of rated movies: 2.39322\n" 616 | ] 617 | } 618 | ], 619 | "source": [ 620 | "R_hat, train_errors = compute_ALS(train, train,20, 0.1,100)" 621 | ] 622 | }, 623 | { 624 | "cell_type": "code", 625 | "execution_count": 28, 626 | "metadata": {}, 627 | "outputs": [ 628 | { 629 | "name": "stdout", 630 | "output_type": "stream", 631 | "text": [ 632 | "iteration 0 is completed\n", 633 | "iteration 10 is completed\n", 634 | "Error of rated movies: 10.01291\n" 635 | ] 636 | } 637 | ], 638 | "source": [ 639 | "R_hat, test_errors = compute_ALS(train, test,20, 0.1,100)" 640 | ] 641 | }, 642 | { 643 | "cell_type": "code", 644 | "execution_count": 29, 645 | "metadata": {}, 646 | "outputs": [ 647 | { 648 | "data": { 649 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmcFPWd//HXZy5mhms4BuUeQgSNqChoRNSgRoPGM0aJ\nxgSP/ZGoMSTrSnCT9dh1f2vWDZtjE1mNSKIEYxQ1h0nwAI2KKCAoAhGIKAPCDESO4ZirP/tH1Qw9\n93Qz3T1Q7+fj0Y+uqm99v/Xpnpr+VH2r+tvm7oiISHRlZToAERHJLCUCEZGIUyIQEYk4JQIRkYhT\nIhARiTglAhGRiFMiEBGJOCUCEZGIUyIQEYm4nEwH0B59+/b1kpKSTIchInJIWbp06TZ3L25rvUMi\nEZSUlLBkyZJMhyEickgxsw/as566hkREIk6JQEQk4pQIREQi7pC4RiAih7bq6mpKS0vZv39/pkM5\nLOXn5zNo0CByc3OTqq9EICIpV1paSvfu3SkpKcHMMh3OYcXd2b59O6WlpQwbNiypNtQ1JCIpt3//\nfvr06aMkkAJmRp8+fQ7qbEuJQETSQkkgdQ72vVUiEBGJOCUCEZGIS1kiMLNZZlZmZiubKbvVzNzM\n+qZq+yIidXbs2MHPfvazhOtdcMEF7NixIwURdS6pPCOYDUxsvNDMBgPnAR+mcNsiIvVaSgQ1NTWt\n1nv22WcpKipKSUyNt91WLImul4iU3T7q7i+bWUkzRf8NTAOeSdW2RaQT++N02PJOx7Z55HFw/r0t\nFk+fPp3169czevRocnNzyc/Pp1evXqxZs4b33nuPSy+9lI0bN7J//36mTp3KlClTgAPjnFVUVHD+\n+edz+umn89prrzFw4ECeeeYZCgoKmt3e+vXrufnmmykvL6ewsJAHH3yQo48+mmuvvZb8/Hzeeust\nxo8fT48ePVi/fj1/+9vfGDJkCA8//DA33ngjS5YsIScnhxkzZnDWWWcxe/Zs5s2bR0VFBbW1tbz0\n0ksd+val9XsEZnYJsMndV+gOAhFJl3vvvZeVK1eyfPlyFi5cyOc//3lWrlxZf9/9rFmz6N27N/v2\n7ePkk0/m8ssvp0+fPg3aWLt2LXPnzuXBBx/kyiuv5Mknn+Saa65pdntTpkxh5syZHHXUUSxevJib\nbrqJF198EQi+U/Haa6+RnZ3NXXfdxapVq3jllVcoKCjgBz/4AWbGO++8w5o1azjvvPN47733AFi2\nbBlvv/02vXv37vD3J22JwMwKgX8m6BZqz/pTgCkAQ4YMSWFkIpJWrRy5p8spp5zS4MtXP/7xj3nq\nqacA2LhxI2vXrm2SCIYNG8bo0aMBGDNmDBs2bGi27YqKCl577TWuuOKK+mWVlZX101dccQXZ2dn1\n8xdffHH9mcUrr7zCLbfcAsDRRx/N0KFD6xPBueeem5IkAOk9IxgODAPqzgYGAcvM7BR339J4ZXd/\nAHgAYOzYsZ7GOEXkMNe1a9f66YULF/L888+zaNEiCgsLmTBhQrNfzurSpUv9dHZ2Nvv27Wu27Vgs\nRlFREcuXL29z283Ntyfmjpa220fd/R137+fuJe5eApQCJzWXBEREOlL37t3ZvXt3s2U7d+6kV69e\nFBYWsmbNGl5//fWD2laPHj0YNmwYv/nNb4BgCIgVK1a0q+4ZZ5zBnDlzAHjvvff48MMPGTly5EHF\n0x6pvH10LrAIGGlmpWZ2Q6q2JSLSmj59+jB+/HhGjRrFbbfd1qBs4sSJ1NTUcMwxxzB9+nROPfXU\ng97enDlzeOihhzjhhBM49thjeeaZ9t0bc9NNNxGLxTjuuOOYNGkSs2fPbnAmkirm3vl7XcaOHev6\nhTKRQ9fq1as55phjMh3GYa2599jMlrr72Lbq6pvFIiIRp2GoRUSSdPPNN/Pqq682WDZ16lSuu+66\nDEWUHCUCEZEk/fSnP810CB1CXUMiIhGnRCAiEnFKBCIiEadEICKHvWSHoQb44Q9/yN69ezs4os5F\niUBEDnudIRHU1tY2mM/ksNON6a4hETnsxQ9Dfe6559KvXz8ef/xxKisrueyyy7j77rvZs2cPV155\nJaWlpdTW1vIv//IvbN26lc2bN3PWWWfRt29fFixY0Gz78+fP584776SyspLhw4fz8MMP061bN0pK\nSpg0aRLPPfcc06ZNY+bMmYwePZpXXnmFq666issvv5zrr7+ebdu2UVxczMMPP8yQIUOaDFc9Y8aM\nlL4/SgQiknaT/ndRk2UXHt+fr4wrYV9VLdc+/EaT8i+OGcQVYwfz9z1V3Pjo0gZlv/7auFa3Fz8M\n9fz583niiSd44403cHcuvvhiXn75ZcrLyxkwYAB/+MMfgGAMop49ezJjxgwWLFhA377N/6Ditm3b\nuOeee3j++efp2rUr3//+95kxYwZ33HEHEAxvsWzZMgBmzpxJVVUVdSMlXHTRRUyePJnJkycza9Ys\nvvnNb/L0008DDYerTjUlAhGJlPnz5zN//nxOPPFEIBg2eu3atZxxxhnceuutfOc73+HCCy/kjDPO\naFd7r7/+OqtWrWL8+PEAVFVVMW7cgcQ0adKkBuvHzy9atIh58+YB8JWvfIVp06bVlzUerjqVlAhE\nJO1aO4IvyMtutbx317w2zwBa4+7cfvvtfO1rX2tStmzZMp599lm+973vcc4559Qf1bfV3rnnnsvc\nuXObLe+Mw043povFInLYix+G+nOf+xyzZs2ioqICgE2bNlFWVsbmzZspLCzkmmuu4bbbbqvvzmlt\nCGuAU089lVdffZV169YBsGfPnvofk2nLaaedxmOPPQYEI5a29yyko+mMQEQOe/HDUJ9//vlcffXV\n9d033bp149FHH2XdunXcdtttZGVlkZuby/333w8EPzs5ceJEBgwY0OzF4uLiYmbPns1VV11V/0tk\n99xzDyNGjGgzrp/85Cdcd9113HffffUXizNBw1CLSMppGOrU0zDUIiKSNHUNiYi006c//ekGP0QP\n8Mgjj3DcccdlKKKOoUQgItJOixcvznQIKaGuIRFJi0PheuSh6mDfWyUCEUm5/Px8tm/frmSQAu7O\n9u3byc/PT7qNlHUNmdks4EKgzN1HhcvuAy4CqoD1wHXuviNVMYhI5zBo0CBKS0spLy/PdCiHpfz8\nfAYNGpR0/VReI5gN/A/wy7hlzwG3u3uNmX0fuB34TgpjEJFOIDc3l2HDhmU6DGlByrqG3P1l4O+N\nls1397oxVV8Hkk9hIiLSITJ5jeB64I8Z3L6IiJChRGBm3wVqgDmtrDPFzJaY2RL1K4qIpE7aE4GZ\nXUtwEfnL3sotBO7+gLuPdfexxcXFaYtPRCRq0vqFMjObCEwDPuPuh/ePgIqIHCJSdkZgZnOBRcBI\nMys1sxsI7iLqDjxnZsvNbGaqti8iIu2TsjMCd7+qmcUPpWp7IiKSHH2zWEQk4pQIREQiTolARCTi\nlAhERCJOiUBEJOKUCEREIk6JQEQk4pQIREQiTolARCTilAhERCJOiUBEJOKUCEREIk6JQEQk4pQI\nREQiTolARCTilAhERCJOiUBEJOKUCEREIk6JQEQk4pQIREQiLmWJwMxmmVmZma2MW9bbzJ4zs7Xh\nc69UbV9ERNonlWcEs4GJjZZNB15w96OAF8J5ERHJoJxUNezuL5tZSaPFlwATwulfAAuB76QqBhFp\nnbuHzweWZWUZALUxx92JK8KAnOzg+LGqJobjDeuakZcTlO+vrm1QFrQNXXKyAdhTWRNsO648J8vI\nz83G3dkdlsfLy84iPzebWMzZvb9peZfcoLw25uzeX92kPD83m/zcbGpqY+xqpn5hXlBeXRtj176m\n9bvl59AlJ5vKmlp27Wtav0dBUL6/upZdzWy/qCCPvJws9lXVNhtfr6555GZnsbeqhoLcbMysyTqp\nkLJE0IIj3P2jcHoLcESaty+N7K+upSbm1NTGqK51amIxCnKzKSrMIxZz1mzZTU0sLKuNURNzBhYV\nUNK3K5U1tSxYU0ZtDGpiMWLu1Mbg2AE9OKZ/D3btr2be0lJqYk7MPXiOOWeOKOb4QUWU7drP7Nc2\nUOvB8pgHHz6XnjiQ0YOLeH/bHh54eT21YVksXO/604dx/KAiVm7ayf+8uI5Y+GHlHnwoffvcEYwa\n2JPFf9vOT+rKw/rucM9loxhxRHdeWL2VH7+4Djxov+5D7WdfPomhfbry9FubuH/hepywPNzO3P93\nKkf0yOeRRRt48C/v19er+9B7duoZ9CzI5acL1vHLRRuCMg6UL7r9bHKzs/iPP67m8Tc31pe5O3k5\n2Sz53mcBuH3eO/xuxWbgwLZ7Febx6vSzAbj5V8t4btXWBp+kg3oX8OKtEwD46qw3eHXdtgYf5kcf\n2YM/Tj0DgEt/+irLN+5osD+MGdqLJ288DYCJP3yZtWUVDcrPHFHML68/BYAJ9y1g8879DcovOO5I\nfvblMQB8+v+/wM5GH6ZXjBnEfVecAMAJd8+nJtYwU1x7Wgl3XXwslTUxjr9rfpP99eazhnPb547m\n471VjLnn+Sbl0yaO5KYJn2TTx/s4874FTcr/9ZJj+eq4EtaWVXD+j/7SpPwHV5zA5WMGsWLjDr44\nc1GT8pnXnMTEUf1ZtH471z78ZpPyR244hTOOKuaF1WXc/KtlTcrn3XQaJw3pxe/e3sy0J95uUj7/\n22cy4oju/PrNjUweV0Ka8kDaE0E9d3cz85bKzWwKMAVgyJAhaYsr3apqYuytqmFfdS17q2rZV1VL\nTrZx9JE9AHh+1VbKdleyr7qWfVU1VNbEGNyrkCtPHgzAPb9fxdbdlVTV1FJVE6OqNsaYob35x3NH\nAHDJT19le0VlfVlVTYxLRg/gP75wPACj7vxzi/+MVbUxLvhx03+Wun/GPZW1fP3Rpjv7bZ8byTH9\ne7BzbzV3/W5Vk/Lu+bkcP6iIv++t4sG//A0zI9uMLAuORk8cUsTowUXs2lfNC6vLyDIjO8swC444\nL9sbfLjsr67l/W17MAMzwwiOOKtqY0CQVPZV1wbLLawf1xmam51Fz4LcsPxAG9nhEXGPghxK+hbW\n1zUMLDhqBTiiRz4nDSmqr0e4Tl35J/p2ZcKIfmF8wQpBO4FRA3qy9/ja+mVmB+oCfHpY7/CoMJg3\noLDLgX/Zs0f2Y1CvAixsF6BnQW59+YXH9+f4gT2DuuE2+nbvUl8+6eTBfGZEcVz7Rv+i/PryyaeV\n8PGeqvr6AIN7F9aXf+0zw6kIj9rryj/Rt1t9+dRzjqr/W9QZeUT3+unvTDyauhQVvoMcOyDY73Oy\njO99/hgaGz24CICuXXK448JPNSkfMzS47FjUNZc7L2paPnZobyD429198bFNyk8I2x/Su5B/vaRp\n+TH9g/iOOqI7/3bpqCblw4uD1z9qYA/uaaZ8cK/g/TtpSC/+/bKm5f3Cv8+44X2alKWSeeNzt45s\nPOga+r27jwrn/wpMcPePzKw/sNDdR7bVzqCjRvkPfvUsn+zXnU/269ZgZ8+0bRWVlO+uZNe+anaG\nD3fqP6h/tnAdSzZ8zK591ezeH3zgDyjK57Ep4wC4/P7XWPrBxw3aPGFwEc/cPB6A83/0F1Z/tKu+\nzAzOPKqYX4RHZVfOXETZ7v10yckmLyeLvJwsxn2iD//0ueBtvX3e21RWx+rLcrOzGD24iItOGADA\n/760HjPIycoiN9vIyc5ixBHdGTO0F7GYM3/VVnKyjJxsIzc7i5wsY2CvAgb1KqSmNsZ7WyvIybb6\nD+ucLKNnYS498nOpjTm79lWTFS7PrnuY1Xc/iEjqmNlSdx/b5nppTgT3Advd/V4zmw70dvdpbbWT\n3/8oP3LyD+vn+3Xvwj+dN5IrTx7MvqpaVpTu4JP9utGna95B9am5Ox/vrWbLzv18Kjwyefadj3hx\nTRlluyvZubeKXftrqKqJ1Z+eT33sLZ5ZvrlBO70Kc3nrjvMA+N7T77Dsgx30LMile34OhXnZDCgq\nYNrEowF4ZvkmtlVUUZiXXd8/2bdbl/ojm8079pFlRkFuNvl5WeRlZ6Wt31BEDm3tTQQp6xoys7kE\nF4b7mlkpcCdwL/C4md0AfABc2Z62Rg3sybzbzmJt2W7WlVWwtqyCI3sGp7CrPtrFlx54HQg+gI/q\n153h/box+bShHH1kD2pjTpZBzKF8dyUf7dzHlp37mTCyHwV52fx2xWYeff0Dtuzcz5Zd+6mqCU5l\n37nrPLrn5/Lu5p28snYbR/ToQs/CPIb06UqP/BxiMScry/jquKF87tgj6VmQW//okX/gjOWeS49r\n9bVdMnpgq+UDigra8xaJiCQtpWcEHWXs2LG+ZMmSZst276/mrQ93sLasgnVlFawr283asgoe+MpY\nThnWm2ff+YhbH19BZU0t8V3hdRdlnn5rE79640OO7JFP/575HNkzeJ4wsh/5udlpeoUiIh2vU3QN\ndZTWEkFz6l6TmbFi4w6eXr6Jbl1yOCLuw354cTd90IvIYS3jXUOZFN+HfsLgovo7AUREpCmNNSQi\nEnFKBCIiEadEICIScUoEIiIRp0QgIhJxSgQiIhGnRCAiEnFKBCIiEadEICIScUoEIiIRp0QgIhJx\nSgQiIhGnRCAiEnFKBCIiEadEICIScUoEIiIRp0QgIhJxSgQiIhGXkURgZt82s3fNbKWZzTWz/EzE\nISIiGUgEZjYQ+CYw1t1HAdnAl9Idh4iIBDLVNZQDFJhZDlAIbM5QHCIikddqIjCza+Kmxzcq+0Yy\nG3T3TcB/AR8CHwE73X1+Mm2JiMjBa+uM4B/jpn/SqOz6ZDZoZr2AS4BhwACga3zCiVtvipktMbMl\n5eXlyWxKRETaoa1EYC1MNzffXp8F3nf3cnevBuYBpzVeyd0fcPex7j62uLg4yU2JiEhb2koE3sJ0\nc/Pt9SFwqpkVmpkB5wCrk2xLREQOUk4b5Ueb2dsER//Dw2nC+U8ks0F3X2xmTwDLgBrgLeCBZNoS\nEZGD11YiOCYVG3X3O4E7U9G2iIgkptVE4O4fxM+bWR/gTOBDd1+aysBERCQ92rp99PdmNiqc7g+s\nJLhb6BEz+1Ya4hMRkRRr62LxMHdfGU5fBzzn7hcBnybJ20dFRKRzaSsRVMdNnwM8C+Duu4FYqoIS\nEZH0aeti8UYzuwUoBU4C/gRgZgVAbopjExGRNGjrjOAG4FjgWmCSu+8Il58KPJzCuEREJE3aumuo\nDPh6M8sXAAtSFZSIiKRPq4nAzH7bWrm7X9yx4YiISLq1dY1gHLARmAssJvnxhUREpJNqKxEcCZwL\nXAVcDfwBmOvu76Y6MBERSY9WLxa7e627/8ndJxNcIF4HLEz2twhERKTzaeuMADPrAnye4KygBPgx\n8FRqwxIRkXRp62LxL4FRBF8kuzvuW8YiInKYaOuM4BpgDzAV+Gbw8wFAcNHY3b1HCmMTEZE0aOt7\nBJn6cXsREUkTfdCLiEScEoGISMQpEYiIRJwSgYhIxCkRiIhEXEYSgZkVmdkTZrbGzFab2bhMxCEi\nIu34ZnGK/Aj4k7t/0czygMIMxSEiEnlpTwRm1hM4k+DHbnD3KqAq3XGIiEggE11Dw4By4GEze8vM\nfm5mXTMQh4iIkJlEkEPw+8f3u/uJBENYTG+8kplNMbMlZrakvLw83TGKiERGJhJBKVDq7ovD+ScI\nEkMD7v6Au49197HFxcVpDVBEJErSngjcfQuw0cxGhovOAValOw4REQlk6q6hW4A54R1DfwOuy1Ac\nIiKRl5FE4O7LgbGZ2LaIiDSkbxaLiEScEoGISMQpEYiIRJwSgYhIxCkRiIhEnBKBiEjEKRGIiESc\nEoGISMQpEYiIRJwSgYhIxCkRiIhEnBKBiEjEKRGIiEScEoGISMQpEYiIRJwSgYhIxCkRiIhEnBKB\niEjEKRGIiEScEoGISMRlLBGYWbaZvWVmv89UDCIiktkzgqnA6gxuX0REyFAiMLNBwOeBn2di+yIi\nckCmzgh+CEwDYhnavoiIhNKeCMzsQqDM3Ze2sd4UM1tiZkvKy8vTFJ2ISPRk4oxgPHCxmW0AHgPO\nNrNHG6/k7g+4+1h3H1tcXJzuGEVEIiPticDdb3f3Qe5eAnwJeNHdr0l3HCIiEjg0vkdQsz/TEYiI\nHLYymgjcfaG7X9jmitveg/UL0hCRiEj0HBpnBNl5MOeLsOyXmY5EROSwc2gkgr5HwbAz4be3wPN3\nQ0x3nYqIdJRDIxFYNlz9OJw0GV6ZAU9eD9X7Mh2ViMhhISfTAbRbdi5c9CPoMxyeuwN2boKr5kLX\nvpmOTETkkHZonBHUMYPxU+GKX8CWt+Hn58C2tZmOSkTkkHZoJYI6x14Kk38PlRXw88/ChlcyHZGI\nyCHr0EwEAINPhn94Hrr1g19eCisey3REIiKHpEM3EQD0HgY3zIchp8JTX4MF/wHumY5KROSQcmgn\nAoCCXnDNPDjhanjp3iAh1FRmOioRkUPGoXPXUGty8uDSn0HvT8CCe2BnKUx6FAp7ZzoyEZFO79A/\nI6hjBp+5Db7wcyh9Ex46F7avz3RUIiKd3uGTCOocfwV89RnYuz1IBh++numIREQ6tcMvEQAMPQ3+\n4QXI7wm/uBhWPpnpiEREOq3D4xpBc/oMhxueh8euhieuh5f+E3oNg14lwd1GvUrC+aGQ0yXT0YqI\nZMzhmwgAuvYJuole/VHwTeS/vw/vvwTVe+NWMugxIC5JlITTw4KEUdAruP4gInKYOrwTAUBuPkz4\nzoF5d9hTHiSFjzfAx+Hz39+Hdc9BxdaG9bv0CBJE0ZCgqymvG+R1hS7dIK973HT4qJvuEpbpbENE\nOrnDPxE0ZhZ8G7lbPxjy6ablVXvg4w8OJIm6hLF9XTCkRdXu4Nlr27e9rNwDySG3MPhthezchs85\nXeKW5bUwHT5n5QSjsWaFD8sOlmWFz5YVN914vbh1LCt4LywrKKtfltVMedwjKxuwhuVwYL5JmbVQ\nprMskc4ieomgLXld4YhPBY+WuAdfWquqgMrdQfKoqjiQKKr2NDNdEXRJ1VZDbVX4qA6XVcUtb1Re\nN33YsrgkEfdcn0SaKcPAwrrQqKy5Nhsta7FO45gSmK5vsx1lcU8tl8cnyvYsiytrz7I2248vbq29\ndi5vd5226nVEeTvqtXig0szy9q7b7HqtrNN3BHxmWgttdywlgmSYBV1OufnpGQbbHWI1QfKJ1YDH\ngudYbXBmUj/d3PJYw3ViNYCHbYZ1mn14o/nG64Zt4A3Xr2u7frrxuo3Wa/U5dmDIkGbXoeGy5tZr\nsqyuDk3Lk55ura1Gf8cm67Yy395lDbbT3mXNtOUJttfu5e2s01a9dpXTcnm7t9vCMDXNDl/T3nVb\ne+9biil9P8ClRHAoMAu7hnIzHYmIHIbS/j0CMxtsZgvMbJWZvWtmU9Mdg4iIHJCJM4Ia4FZ3X2Zm\n3YGlZvacu6/KQCwiIpGX9jMCd//I3ZeF07uB1cDAdMchIiKBjA4xYWYlwInA4kzGISISZRlLBGbW\nDXgS+Ja772qmfIqZLTGzJeXl5ekPUEQkIjKSCMwslyAJzHH3ec2t4+4PuPtYdx9bXFyc3gBFRCIk\nE3cNGfAQsNrdZ6R7+yIi0lAmzgjGA18Bzjaz5eHjggzEISIiZOD2UXd/hWa/py0iIplweP4wjYiI\ntJsSgYhIxCkRiIhEnBKBiEjEKRGIiEScEoGISMQpEYiIRJwSgYhIxCkRiIhEnBKBiEjEKRGIiESc\nEoGISMQpEYiIRJwSgYhIxCkRiIhEnBKBiEjEKRGIiEScEoGISMQpEYiIRJwSgYhIxGUkEZjZRDP7\nq5mtM7PpmYhBREQCaU8EZpYN/BQ4H/gUcJWZfSrdcYiISCATZwSnAOvc/W/uXgU8BlySgThERITM\nJIKBwMa4+dJwmYiIZEBOpgNoiZlNAaaEs5VmtvIgm+wLbMtg/cOpjc4QQ2dpozPE0Fna6AwxdJY2\nOkMMAEPbtZa7p/UBjAP+HDd/O3B7G3WWdMB2D6qNzhBDZ2mjM8TQWdroDDF0ljY6QwydpY3OEEMi\nj0x0Db0JHGVmw8wsD/gS8NsMxCEiImSga8jda8zsG8CfgWxglru/m+44REQkkJFrBO7+LPBsAlUe\n6IDNHmwbnSGGztJGZ4ihs7TRGWLoLG10hhg6SxudIYZ2s7AvSkREIkpDTIiIRFynTgQdMRSFmc0y\ns7Jkbz81s8FmtsDMVpnZu2Y2NYk28s3sDTNbEbZxd5KxZJvZW2b2+yTrbzCzd8xsuZktSbKNIjN7\nwszWmNlqMxuXYP2R4fbrHrvM7FsJtvHt8H1caWZzzSw/sVcBZjY1rP9ue7ff3L5kZr3N7DkzWxs+\n90qijSvCOGJmNjbJOO4L/yZvm9lTZlaUYP1/C+suN7P5ZjYg0Rjiym41Mzezvkm8jrvMbFPc/nFB\nMnGY2S3h+/Gumf1ngjH8Om77G8xseRKvY7SZvV73v2ZmpyTRxglmtij8n/2dmfVorY2Dkq7bk5K4\ndSobWA98AsgDVgCfSqKdM4GTgJVJxtEfOCmc7g68l2gcgAHdwulcYDFwahKx/CPwK+D3Sb6WDUDf\ng/y7/AL4h3A6Dyg6yL/xFmBoAnUGAu8DBeH848C1CW53FLASKCS4TvY88Mlk9iXgP4Hp4fR04PtJ\ntHEMMBJYCIxNMo7zgJxw+vutxdFC/R5x098EZiYaQ7h8MMGNIB+0ta+1EMddwD8l8Ldsro2zwr9p\nl3C+X6KvI678B8AdScQwHzg/nL4AWJhEG28Cnwmnrwf+LZH9PJFHZz4j6JChKNz9ZeDvyQbh7h+5\n+7JwejewmgS/Ce2BinA2N3wkdHHGzAYBnwd+nki9jmRmPQl22IcA3L3K3XccRJPnAOvd/YME6+UA\nBWaWQ/BhvjnB+scAi919r7vXAC8BX2irUgv70iUEyZHw+dJE23D31e7+13bG3lIb88PXAvA6MCjB\n+rviZrvSxv7Zyv/VfwPT2qrfRhvt1kIbNwL3untluE5ZMjGYmQFXAnOTiMGBuiP4nrSxj7bQxgjg\n5XD6OeDy1to4GJ05EXS6oSjMrAQ4keCIPtG62eEpZhnwnLsn2sYPCf7BYoluO44D881sqQXf3E7U\nMKAceDitbEhUAAAGAUlEQVTsovq5mXU9iHi+RBv/ZI25+ybgv4APgY+Ane4+P8HtrgTOMLM+ZlZI\ncMQ2OME26hzh7h+F01uAI5JspyNdD/wx0Upm9u9mthH4MnBHEvUvATa5+4pE6zbyjbCbalZbXW0t\nGEHw911sZi+Z2clJxnEGsNXd1yZR91vAfeH7+V8EX5xN1LscOPi9guT30TZ15kTQqZhZN+BJ4FuN\njp7axd1r3X00wZHaKWY2KoFtXwiUufvSRLfbyOnufhLByK83m9mZCdbPITh9vd/dTwT2EHSHJMyC\nLxNeDPwmwXq9CP45hgEDgK5mdk0ibbj7aoLuk/nAn4DlQG0ibbTQrpPgmV5HM7PvAjXAnETruvt3\n3X1wWPcbCW63EPhnkkggjdwPDAdGEyT6HyTRRg7QGzgVuA14PDy6T9RVJHigEudG4Nvh+/ltwrPo\nBF0P3GRmSwm6pauSjKVNnTkRbKJhBhwULks7M8slSAJz3H3ewbQVdqUsACYmUG08cLGZbSDoIjvb\nzB5NYtubwucy4CmC7rdElAKlcWczTxAkhmScDyxz960J1vss8L67l7t7NTAPOC3Rjbv7Q+4+xt3P\nBD4muPaTjK1m1h8gfG6xGyLVzOxa4ELgy2FSStYcEu+GGE6QnFeE++kgYJmZHZlII+6+NTxoigEP\nkvg+CsF+Oi/skn2D4Cy61QvXjYXdjl8Afp3E9gEmE+ybEBzsJPw63H2Nu5/n7mMIEtL6JGNpU2dO\nBJ1iKIrwSOIhYLW7z0iyjeK6uzjMrAA4F1jT3vrufru7D3L3EoL34UV3T+go2My6mln3ummCi4sJ\n3Unl7luAjWY2Mlx0DrAqkTbiJHu09SFwqpkVhn+bcwiu2yTEzPqFz0MI/uF/lUQsEOyTk8PpycAz\nSbZzUMxsIkHX4cXuvjeJ+kfFzV5CAvsngLu/4+793L0k3E9LCW6y2JJgHP3jZi8jwX009DTBBWPM\nbATBTQ2JDt72WWCNu5cmsX0Irgl8Jpw+G0i4eyluH80CvgfMTDKWtqXqKnRHPAj6bt8jyITfTbKN\nuQSnmNUEO+cNCdY/neB0/22CLoTlwAUJtnE88FbYxkrauAuhjbYmkMRdQwR3X60IH+8exPs5GlgS\nvpangV5JtNEV2A70TDKGuwk+qFYCjxDeHZJgG38hSGIrgHOS3ZeAPsALBP/ozwO9k2jjsnC6EthK\n3KCMCbSxjuCaWt0+2uJdPy3UfzJ8P98GfgcMTDSGRuUbaPuuoebieAR4J4zjt0D/JNrIAx4NX88y\n4OxEXwcwG/j6QewXpwNLw/1rMTAmiTamEnz+vQfcS/gF4FQ89M1iEZGI68xdQyIikgZKBCIiEadE\nICIScUoEIiIRp0QgIhJxSgQSKWb2WvhcYmZXd3Db/9zctkQ6O90+KpFkZhMIRrm8MIE6OX5gYLfm\nyivcvVtHxCeSTjojkEgxs7pRYO8lGJhsuQW/b5BtwZj+b4YDnn0tXH+Cmf3FzH5L+C1qM3s6HLjv\n3brB+8zsXoIRUZeb2Zz4bVngPgt+/+AdM5sU1/ZCO/D7DnOSHBNH5KBk5DeLRTqB6cSdEYQf6Dvd\n/WQz6wK8amZ1o5qeBIxy9/fD+evd/e/hcCFvmtmT7j7dzL7hwcCCjX2B4BvZJxCMefOmmdUNL3wi\ncCzBkASvEowr9UrHv1yRlumMQCRwHvDVcKjwxQRDR9SNv/NGXBIA+KaZrSAY939w3HotOR2Y68Fg\nalsJfv+gbmjkN9y91INB1pYDJR3yakQSoDMCkYABt7j7nxssDK4l7Gk0/1lgnLvvNbOFQMI/lRmn\nMm66Fv1PSgbojECiajfBGO91/gzcGA45jpmNaOFHd3oCH4dJ4GiCMe/rVNfVb+QvwKTwOkQxwa+8\nvdEhr0KkA+joQ6LqbaA27OKZDfyIoFtmWXjBtpzmf3byT8DXzWw18FeC7qE6DwBvm9kyd/9y3PKn\ngHEEI1E6MM3dt4SJRCTjdPuoiEjEqWtIRCTilAhERCJOiUBEJOKUCEREIk6JQEQk4pQIREQiTolA\nRCTilAhERCLu/wBJpmifKPzW/gAAAABJRU5ErkJggg==\n", 650 | "text/plain": [ 651 | "" 652 | ] 653 | }, 654 | "metadata": {}, 655 | "output_type": "display_data" 656 | } 657 | ], 658 | "source": [ 659 | "# 예제 12-14 반복 횟수에 따른 학습-평가 에러 변화 플롯하기\n", 660 | "from matplotlib import pyplot as plt\n", 661 | "\n", 662 | "x=range(0,20)\n", 663 | "\n", 664 | "plt.xlim(0,20) # x축의 표시 범위를 0-20까지 설정(20은 반복 횟수입니다)\n", 665 | "plt.ylim(0,15) # y축의 표시 범위를 0-15까지 설정\n", 666 | "plt.xlabel('iteration')\n", 667 | "plt.ylabel('MSE')\n", 668 | "plt.xticks(x, range(0,20)) # x축에 표시할 숫자를 0부터 19까지의 정수로 함\n", 669 | "# 평가 에러를 점선으로 표시\n", 670 | "test_plot, = plt.plot(x,test_errors, '--', label='test_error')\n", 671 | "# 학습 에러를 실선으로 표시\n", 672 | "train_plot, = plt.plot(x,train_errors, label='train_error')\n", 673 | "plt.legend(handles=[train_plot, test_plot]) # 범례 생성\n", 674 | "plt.show()" 675 | ] 676 | }, 677 | { 678 | "cell_type": "code", 679 | "execution_count": 36, 680 | "metadata": {}, 681 | "outputs": [ 682 | { 683 | "name": "stdout", 684 | "output_type": "stream", 685 | "text": [ 686 | "iteration 0 is completed\n", 687 | "iteration 10 is completed\n", 688 | "Error of rated movies: 5.47677\n", 689 | "iteration 0 is completed\n", 690 | "iteration 10 is completed\n", 691 | "Error of rated movies: 9.05617\n" 692 | ] 693 | } 694 | ], 695 | "source": [ 696 | "R_hat, train_errors = compute_ALS(train, train,20, 50,100)\n", 697 | "R_hat, test_errors = compute_ALS(train, test,20, 50,100)" 698 | ] 699 | }, 700 | { 701 | "cell_type": "code", 702 | "execution_count": 37, 703 | "metadata": {}, 704 | "outputs": [ 705 | { 706 | "data": { 707 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmYFOW59/HvPT37DDsDsoOIiivguCKJe9zXKNG4Jy/R\nGGNyjEYTE5OTnHOZeELMph6NSKKELK5JjiZgBI2CKDsICKIIgywDyjIDzHq/f1TN0LNP9yw9Tf8+\n11VXV9VT9dTdPdVz91PLU+buiIhI6kpLdAAiIpJYSgQiIilOiUBEJMUpEYiIpDglAhGRFKdEICKS\n4pQIRERSnBKBiEiKUyIQEUlx6YkOoDX69u3rw4cPT3QYIiJJZcGCBdvcvaCl5ZIiEQwfPpz58+cn\nOgwRkaRiZh+1ZjkdGhIRSXFKBCIiKU6JQEQkxSXFOQIRSW4VFRUUFRWxb9++RIdyQMrOzmbw4MFk\nZGTEtb4SgYh0uKKiIrp168bw4cMxs0SHc0Bxd7Zv305RUREjRoyIqw4dGhKRDrdv3z769OmjJNAB\nzIw+ffq0qbWlRCAinUJJoOO09bNVIhARSXFKBCIiKa7DEoGZTTGzrWa2vJGyO83MzaxvR21fRKTG\njh07ePjhh2Ne7/zzz2fHjh0dEFHX0pEtgqnAufVnmtkQ4BxgfQduW0SkVlOJoLKystn1XnrpJXr2\n7NkhMdXfdkuxxLpcLDrs8lF3f93MhjdS9HPgbuDFjtq2iHRhL98Dm5e1b50HHQ3nPdBk8T333MPa\ntWsZM2YMGRkZZGdn06tXL1atWsXq1au59NJL2bBhA/v27eOOO+5g0qRJwP5+zkpKSjjvvPM49dRT\nmTNnDoMGDeLFF18kJyen0e2tXbuW2267jeLiYnJzc3n88cc5/PDDufHGG8nOzmbRokWMHz+e7t27\ns3btWj744AOGDh3Kk08+ya233sr8+fNJT09n8uTJnH766UydOpXnnnuOkpISqqqqeO2119r14+vU\n+wjM7BJgo7sv0RUEItJZHnjgAZYvX87ixYuZPXs2F1xwAcuXL6+97n7KlCn07t2bvXv3cvzxx3PF\nFVfQp0+fOnWsWbOG6dOn8/jjj3PVVVfx7LPPcu211za6vUmTJvHoo48yatQo5s2bx1e/+lVeffVV\nILinYs6cOUQiEX7wgx+wYsUK3njjDXJycvjZz36GmbFs2TJWrVrFOeecw+rVqwFYuHAhS5cupXfv\n3u3++XRaIjCzXOA7BIeFWrP8JGASwNChQzswMhHpVM38cu8sJ5xwQp2br375y1/y/PPPA7BhwwbW\nrFnTIBGMGDGCMWPGAHDcccexbt26RusuKSlhzpw5XHnllbXzysrKasevvPJKIpFI7fTFF19c27J4\n4403uP322wE4/PDDGTZsWG0iOPvsszskCUDntghGAiOAmtbAYGChmZ3g7pvrL+zujwGPARQWFnon\nxikiB7i8vLza8dmzZ/PKK68wd+5ccnNzOe200xq9OSsrK6t2PBKJsHfv3kbrrq6upmfPnixevLjF\nbTc23ZqY21unXT7q7svcvZ+7D3f34UARMK6xJCAi0p66devG7t27Gy3buXMnvXr1Ijc3l1WrVvHW\nW2+1aVvdu3dnxIgR/OUvfwGCLiCWLFnSqnUnTJjAtGnTAFi9ejXr16/nsMMOa1M8rdGRl49OB+YC\nh5lZkZl9qaO2JSLSnD59+jB+/HiOOuoo7rrrrjpl5557LpWVlYwePZp77rmHk046qc3bmzZtGk88\n8QTHHnssRx55JC++2LprY7761a9SXV3N0UcfzcSJE5k6dWqdlkhHMfeuf9SlsLDQ9YQykeS1cuVK\nRo8enegwDmiNfcZmtsDdC1taV3cWi4ikOHVDLSISp9tuu40333yzzrw77riDm266KUERxUeJQEQk\nTr/5zW8SHUK70KEhEZEUp0QgIpLilAhERFKcEoGIHPDi7YYa4KGHHmLPnj3tHFHXokQgIge8rpAI\nqqqq6kwnstvp+nTVkIgc8KK7oT777LPp168ff/7znykrK+Oyyy7jhz/8IaWlpVx11VUUFRVRVVXF\n9773PbZs2cLHH3/M6aefTt++fZk1a1aj9c+YMYP777+fsrIyRo4cyZNPPkl+fj7Dhw9n4sSJzJw5\nk7vvvptHH32UMWPG8MYbb3D11VdzxRVXcPPNN7Nt2zYKCgp48sknGTp0aIPuqidPntyhn48SgYh0\nuon/O7fBvAuPGcB1Jw9nb3kVNz75doPyzx83mCsLh/BJaTm3Pr2gTtmfvnJys9uL7oZ6xowZPPPM\nM7z99tu4OxdffDGvv/46xcXFDBw4kP/7v/8Dgj6IevToweTJk5k1axZ9+zb+QMVt27bx4x//mFde\neYW8vDx+8pOfMHnyZL7//e8DQfcWCxcuBODRRx+lvLycmp4SLrroIm644QZuuOEGpkyZwte//nVe\neOEFoG531R1NiUBEUsqMGTOYMWMGY8eOBYJuo9esWcOECRO48847+fa3v82FF17IhAkTWlXfW2+9\nxYoVKxg/fjwA5eXlnHzy/sQ0ceLEOstHT8+dO5fnnnsOgOuuu4677767tqx+d9UdSYlARDpdc7/g\nczIjzZb3zstssQXQHHfn3nvv5Stf+UqDsoULF/LSSy9x3333ceaZZ9b+qm+pvrPPPpvp06c3Wt4V\nu52uTyeLReSAF90N9ec+9zmmTJlCSUkJABs3bmTr1q18/PHH5Obmcu2113LXXXfVHs5prgtrgJNO\nOok333yT999/H4DS0tLah8m05JRTTuGPf/wjEPRY2tpWSHtTi0BEDnjR3VCfd955XHPNNbWHb/Lz\n83n66ad5//33ueuuu0hLSyMjI4NHHnkECB47ee655zJw4MBGTxYXFBQwdepUrr766tonkf34xz/m\n0EMPbTGuX/3qV9x00008+OCDtSeLE0HdUItIh1M31B1P3VCLiEjcdGhIRKSVTjzxxDoPogd46qmn\nOProoxMUUftQIhARaaV58+YlOoQOoUNDItIpkuF8ZLJq62erRCAiHS47O5vt27crGXQAd2f79u1k\nZ2fHXUeHHRoysynAhcBWdz8qnPcgcBFQDqwFbnL3HR0Vg4h0DYMHD6aoqIji4uJEh3JAys7OZvDg\nwXGv35HnCKYCvwZ+HzVvJnCvu1ea2U+Ae4Fvd2AMItIFZGRkMGLEiESHIU3osEND7v468Em9eTPc\nvaZP1beA+FOYiIi0i0SeI7gZeDmB2xcRERKUCMzsu0AlMK2ZZSaZ2Xwzm6/jiiIiHafTE4GZ3Uhw\nEvmL3swlBO7+mLsXunthQUFBp8UnIpJqOvWGMjM7F7gb+Ky7H9gPARURSRId1iIws+nAXOAwMysy\nsy8RXEXUDZhpZovN7NGO2r6IiLROh7UI3P3qRmY/EU9dG3fsxd0xszZGJSIi9SXFncWflJYze7VO\nGIuIdISkSAQZkTQeemWNbk8XEekASZEI+nXLYsmGHcx+T60CEZH2lhSJoFdeJoN75fDQK6vVKhAR\naWdJ8TwCA7597uFs2bWPqmonPaKTxiIi7SUpEgHARccOTHQIIiIHpKQ4NFSjsqqaP8/fwJy12xId\niojIASOpEoEDv/zXGh54eZXOFYiItJOkSgQZkTRuP+MQlhbt5NVVWxMdjojIASGpEgHA5eMGM6R3\nju4rEBFpJ0mXCDIiadx++iiWbVSrQESkPSRdIgC4bNwgzhrdj+yMSKJDERFJeklz+Wi0jEgav73h\n+ESHISJyQEjKFkGNXfsqePqtj3SuQESkDZI6Ecx8dwv3vbCcV1bqXIGISLySOhFcMmYgw/rkqg8i\nEZE2SOpEkB5J4/YzRvHux7uYuWJLosMREUlKSZ0IAC4dM5DhfXJ1X4GISJySPhHUtAr6dsti197K\nRIcjIpJ0kvLy0fouHzeIK44bnOgwRESSUoe1CMxsipltNbPlUfN6m9lMM1sTvvZqp20BUPTpHpYV\n7WyPKkVEUkZHHhqaCpxbb949wL/cfRTwr3C6Xbg7N099h7ueWUJ1tc4ViIi0VoclAnd/Hfik3uxL\ngN+F478DLm2v7ZkZt3x2JKs272bGis3tVa2IyAGvs08W93f3TeH4ZqB/e1Z+8bEDObhvHg+9skat\nAhGRVkrYVUMeXOvZ5H9rM5tkZvPNbH5xcXGr6kyPpHH7mYewavNu/vmuWgUiIq3R2Ylgi5kNAAhf\nm+wbwt0fc/dCdy8sKCho9QYuPnYQh/bPZ21xSdujFRFJAZ19+ehfgRuAB8LXF9t7A5E04++3TyAz\nPelvkRAR6RQdefnodGAucJiZFZnZlwgSwNlmtgY4K5xudzVJYPnGnTpXICLSgg5rEbj71U0UndlR\n24w25/1tXPPbeTz8xXGcf/SAztikiEhSSo7jJ7s3tbxMPSce3IeRBXn8QlcQiYg0KzkSQek2qCyP\naZVImvH1M0fx3pbdvLxcVxCJiDQlORJBdSW891LMq114zEAO6ZfPL/61Wq0CEZEmJEciiGTCoqdi\nXy1sFaSZsWtfBQBLNuxgdzguIiLJ0vtobm94/1+wYwP0HBLTqhccPYBeuRn0zM2ksqqaiY/Npayy\nmlH98hk3tBdjh/bklJF9GdI7t4OCFxHp2pKjRZDbJ3hdPC3mVSNpxoRR+29Ie/z6Qr5x5qEM7JnD\ny8s38+1nl/Hcwo0A7NpXweSZq5n13lZ27IntnISISLJKjhZBJBNGng6LnobP3AVpkbiqSY+kMWFU\nQW1iqK52PtxeSl5m8DGs3rybX7+6hprTCQcX5DFuaC8mfeZgDu3fjT3lleyrqCY/K103rInIASM5\nEgHAuOvhLzfCB7PhkPa5FSEtzRhZkF87XTi8N0t/8DmWFu1g0fodLFr/Ka+u2sqNpwwH4MXFH3Pv\nc8sAyIykkZcVIS8rnWlfPpFhffKY8e5mXli8kbzMdPKz08nPSicvK51rTxpGflY6a7bsZt32PaSn\nGWlpFryaccKI3kTSjI079vJpaTmRNKsd0tOMYX3ygKDFUlZRjRkYQY+raQY9czMB2FNeSUWVEz6e\nAQPSzMjLCv7M+yqqcKe2vEZ2RpBYyyqD8mhmkJXeuvK95VV4ve6j0sxq6y8tq2zQuVR62v7ymvM4\n0dvIjKSRkxnB3dm5d/+5nZplsjLSyM1Mp7ra2bG3Zv39FeRkRsjNTKeq2vmkNGjlRceYn5VObmY6\nFVXVteXRumdnkJMZoayyav/6UfH1ys0kJzPCvooqtteW71+gb34W2RkR9pRXsr2kYf0F3YLykrJK\ntpeUNSg/qEc2WekRdu6taDS+wb1yyIik8WlpOZ800ood1juX9Ega20vK+HRPzee3P76D++aTlmZs\n3b2PnXsanjsb1b8bAJt27m3wBMBIGhzSLygv+nQPu/fVLc+IWG35+u17KCmrW56ZnsYh/YLv34fb\nStlTXlnns83JjNR+P9/fupt9FdV11s/LSmdE3+C78d7m3ZRX1i3vnpNe+91ZvnEnVfUuGOmVm8nQ\nPsEh4aVFO6h/PUmfvEyG9M6lutpZUrSjwWfTr3s2g3rmUFlVzdKNDZ+DMrBHDgf1yKassorlG3c1\nKB/SK4d+3bPZW17Fik0N1x83tFfts1Y6WvIkgsPOh5zesPD37ZYIGpOflc4pI/tyysi+QN0v9XHD\nenH/RUdQWlZJSVkVpWWVlJZV1v6j3bGngvc276Y0LCsJd+yJhUMgC55ftJGHZ69tsM1VPzqXSFqE\nx1//gKlz1tUpi6QZa//7fAD+828reGZBUZ3yHjkZLLn/HADu/POSBpfKDuqZw5v3nAHA//v9fP69\nZlud8lH98pn5H58F4JrH57Hgo0/rlI8Z0pMXbhsPwCW/fpNVm3fXKT/1kL48/eUTATj7569R9One\nOuWfO7I//3tdIQATfjqrwT+zy8cNYvJVYwAo/PErDb7M1500jB9dehSV1c6Y/5xJfbd8diT3nHc4\nu/ZVMO5HDcu/dc6hfO2MUWzauZdTfzKrQfn9Fx3BTeNH8OG2Us75+esNyn/6+WO4qnAI7368i8sf\nntOg/DfXjOOCYwbwzrpPuO6JtxuUT73peE47rB+vry7mlqcXNih/5paTKRzem38s38y3/rKkQfnL\nd0xg9IDuvLh4I99/8d0G5f+++3SG9M5l+jvr+ek/3mtQvuC+s+iTn8WUNz/kN7Ma3/ey0yI8PGtt\ng30vPc14P9z3fjZjdbP73n+/tJKXljW979334nJeX12388jofe9bf1nS7L73tT8sanbf+/Lv32HD\nJ03ve9dPebvhvjd2EJMnBvve5x+d2+S+V+XOZY387Wv2vZKyykb3jZp9b1tJOVc80rC8Zt/b8Oke\nrnhkboPyD/77/AY/2jqKJcMD3wsLC33+/Pnwj+/A24/Bnasgr2+iw2qRu7O3ooqcjAhmxpZd+yje\nXUZltVNVXU1VNVRWV3PSiD6kpRnvbd7NR9tLqXYPl3Hc4dKxg4Dgbum1xSU4wa9SdyczPcI1Jw4F\n4F8rt/DhttKo7Qe/mmrK/7704wZfll65GXzhhKD8uYVFbN61r055/27ZtY8B/dM762t/9RrBHjq4\nVw4XHTsQgGnzPqKk3q/C4X3z+NyRBwHw+7nrKKv3q+6Q/vmcflg/AJ5888PaX201v4RGH9SNUw7p\nS3W187u56+qsa8DRg3tw3LDe7Kuo4k/vbNhfFn6BxgzpyTGDe1JSVsnzizbWWRegcHgvDj+oOzv2\nlDf4RwZw4sG9GVmQz7aSMl5ZsaVB/TUXGmzeua/uP7qwfMKovgzokUPRp3t464P6j+eA0w4roG9+\nFh9tL23wjxDgzMP70yM3g7XFJY0+fe/sI/qTF7Y2V2xq+Kvzc0ceRHZGhJWbdrFm6/6OGGve//lH\nDyCSZizfuJN120vrrGsYFxwT3JW/ZMMONu6ou++kpxnnhH/bhes/ZcvOfXX+cWVlRGr/tvPXfcK2\nei2i/Kx0Th0VfI/nfbC9tkVXU0WPnAxOPDg4Pzhn7TZKy6rqrN87L5PjhgUPOXxjzTb2VdQtL+iW\nxbFDegLw+upiKqvr7nv9u2dz5MAeAMx6b2uDvpAH9szhsIO6UV3tvLamYQ/IQ3vnMrIgn4qqat58\nf1uD8hF98xjWJ499FVXM+7Dh335kQR6De+VSWlbZ6N9+wqi+bW4RmNkCdy9scbmkSgRbV8LDJ8E5\n/wWnfC3RYYmIdGmtTQTJdcaz32gYfEJwT0ESJDARkWSQXIkAYNx1ULwKit5JdCQiIgeE5EsER14O\nmfmw8HctLysiIi1KvkSQlQ9HXQ7Ln4ey3S0vLyIizUq+RAAw7gaoKIXlzyU6EhGRpJeciWDQcVAw\nOrinQERE2iQ5E4FZcKfxxvmwpeFNNiIi0nrJmQgAjpkY9EG0MPbuqUVEZL/kTQR5feDwC2HpH6Gy\nYR8tIiLSOglJBGb2TTN718yWm9l0M8uOq6Jx18PeT2HV39s5QhGR1NHpicDMBgFfBwrd/SggAnwh\nrspGfBZ6DtVJYxGRNkjUoaF0IMfM0oFc4OO4aklLg7HXBV1Tf7qu/aITEUkhzSYCM7s2anx8vbK4\nen1z943A/wDrgU3ATnefEU9dAIy5BiwteGiNiIjErKUWwX9Ejf+qXtnN8WzQzHoBlwAjgIFAXnTC\niVpukpnNN7P5xcUNu4Ct1WMwHHIWLJoG1VVNLyciIo1qKRFYE+ONTbfWWcCH7l7s7hXAc8Ap9Rdy\n98fcvdDdCwsKChpUUsfY62D3x8ED7kVEJCYtJQJvYryx6dZaD5xkZrkWPHXhTGBlnHUFDj0X8grU\nEZ2ISBxaelTl4Wa2lODX/8hwnHD64Hg26O7zzOwZYCFQCSwCHounrlrpmXDs1fDWw1CyFfL7tak6\nEZFU0lIiGN0RG3X3+4H727XScdfDnF/Ckukw/o52rVpE5EDW7KEhd/8oegBKgHFA33C66+g7Coae\nHNxToKeXiYi0WkuXj/7dzI4KxwcAywmuFnrKzL7RCfHFZtz1sP19WD830ZGIiCSNlk4Wj3D35eH4\nTcBMd78IOJE4Lx/tUEdcAlnddaexiEgMWkoEFVHjZwIvAbj7bqC6o4KKW2YeHP15ePcF2Lcz0dGI\niCSFlhLBBjO73cwuIzg38A8AM8sBMjo6uLiMvQ4q98KyZxIdiYhIUmgpEXwJOBK4EZjo7jvC+ScB\nT3ZgXPEbOBb6H63DQyIirdTs5aPuvhW4pZH5s4BZHRVUm9Q8vezlu2DTUhhwTKIjEhHp0ppNBGb2\n1+bK3f3i9g2nnRxzJcy4DxY9BQMeTHQ0IiJdWks3lJ0MbACmA/OIv3+hzpXTK7iCaOmf4Oz/hIyc\nREckItJltXSO4CDgO8BRwC+As4Ft7v6au7/W0cG1ybjrgiuHVv4t0ZGIiHRpLd1ZXOXu/3D3GwhO\nEL8PzI73WQSdatip0GuEThqLiLSgxSeUmVmWmV0OPA3cBvwSeL6jA2uztLSgVbDu37B9baKjERHp\nslrqYuL3wFyCewh+6O7Hu/uPwqeMdX3HXgMW0dPLRESa0VKL4FpgFHAHMMfMdoXDbjPb1fHhtVH3\nATDqHFg8DaoqEx2NiEiX1NI5gjR37xYO3aOGbu7evbOCbJNx10PJFnjlfihp5pGXIiIpqsVzBElv\n1Dkw+mKY+2v4+ZHwwm2weVmioxIR6TJauo8g+UXSYeJTULwa3v5fWPwHWPx0cFXRiV+Bwy+AtEii\noxQRSRjzJHiIS2Fhoc+fP799Ktu7I7jjeN5jsHM99BgKJ/y/4BBSTs/22YaISBdgZgvcvbDF5VIu\nEdSoqoT3XoJ5j8JHb0JGHoy5Gk68JXjamYhIkmttIjjwDw01JZIOR1wcDJuWBglh4e/hnd/CIWfB\nibfCyDOC+xFERA5gCfkvZ2Y9zewZM1tlZivN7ORExFFrwDFw6cPwzRVw+neDk8nTroDfnABvPw5l\nJQkNT0SkIyXk0JCZ/Q74t7v/1swygdyoZx000CGHhppTWQ4rXoC3HoGPF0JWDxhzDYyYEDzvoNuA\noLtrEZEurMueIzCzHsBi4GBv5cY7PRHUcIeid4KEsPKvUB3elJbfP0gIA8fCgDFhcujf+fGJiDSj\nK58jGAEUA0+a2bHAAuAOdy9NQCzNM4MhJwRD+R7Yshw+XrR/WP1PIMxl3QbuTw4Dx8LAMZDXN6Hh\ni4i0RiJaBIXAW8B4d59nZr8Adrn79+otNwmYBDB06NDjPvroo06Ns1XKSoLzCdHJYfua/eU9hgQJ\noSY59D8K8gp0WElEOkVXPjR0EPCWuw8PpycA97j7BU2tk7BDQ/HYtws2LYFNi/cnh08+2F8eyYIe\ng6DH4CBRdK8Zjxoy8xIXv4gcMLrsoSF332xmG8zsMHd/DzgTWNHZcXSY7O7BSeURE/bP2/tpkBy2\nroJdRbBzI+wsgrWzYPcmag8v1cjpFSSE7oMbJon8fkF5Vg9d2ioi7SJR9xHcDkwLrxj6ALgpQXF0\njpxecPBpwVBfVUWQDHYWNRx2rIf1c4InrdVnaUG9Ob0gp3fwmts7arpn3ema8cx8HZoSkToSkgjc\nfTHQYnMlJUQyoOfQYGjKvl2wK2xFlBYHLYw9nwSve8PX3Ztg68pguryZ+x7SMiArP0gImXlRQ7d6\n0/nNjOdCenbUkBW8RlL3/kSRZKZvbjLI7h4M/Ua3bvnK8jBJRCWK6MRRXhqc6C4vCcbLS2HP+rrT\nFXFcxJWWXjcxNPcayYJIZpA80jL2j0cyw+maITOot854ZjCdlhF0GJiW3shr9BDOs8aWiQStK4vo\nUJukLCWCA1F6ZnBfQ1vubaiuDpJBTWKIThLlJVBZBpX7mnltZF7F3iAZVZYFQ3UlVJUHh8eqKqC6\nIpj26vb7LGJlYWJIi4TJIRIcSqsdjy6Pem0wWBPzm1gGqzuvzrS1UF4zXbMc+6cblFkLZeyfjh5v\n9pVm5lGvvJHxprZZO15/uSam6x/yrL9srPU1WKex5VqxbIP5LZWF8vo2fji5AygRSOPS0iCrWzB0\ntuqqqMRQM5Q3Ml0VJJPaoQq8kXkNpmvmVQRJp7o6XK8qmK4zXh1Vb3R51HL4/mW9OrgRsc50/cGj\ntlsdVUc4v7Y+6k3XL496rVm//mvt8s0t01gZLa9X/yIHaV9DT1EikBSWFv76JjvRkUhreb3k0Opx\naDz50Phydaajtt2qZVtappl1GluuVcs2oql16pdldN7+r0QgIm1njRyakaShs2MiIilOiUBEJMUp\nEYiIpDglAhGRFKdEICKS4pQIRERSnBKBiEiKUyIQEUlxSgQiIilOiUBEJMUpEYiIpDglAhGRFKdE\nICKS4pQIRERSnBKBiEiKS1giMLOImS0ys78nKgYREUlsi+AOYGUCty8iIiQoEZjZYOAC4LeJ2L6I\niOyXqBbBQ8DdQHWCti8iIqFOTwRmdiGw1d0XtLDcJDObb2bzi4uLOyk6EZHUk4gWwXjgYjNbB/wR\nOMPMnq6/kLs/5u6F7l5YUFDQ2TGKiKSMTk8E7n6vuw929+HAF4BX3f3azo5DREQCuo9ARCTFpSdy\n4+4+G5idyBhERFKdWgQiIilOiUBEJMUpEYiIpDglAhGRFKdEICKS4pQIRERSnBKBiEiKUyIQEUlx\nSgQiIilOiUBEJMUpEYiIpDglAhGRFKdEICKS4pQIRERSnBKBiEiKUyIQEUlxSgQiIilOiUBEJMUp\nEYiIpDglAhGRFNfpicDMhpjZLDNbYWbvmtkdnR2DiIjsl56AbVYCd7r7QjPrBiwws5nuviIBsYiI\npLxObxG4+yZ3XxiO7wZWAoM6Ow4REQkk9ByBmQ0HxgLzEhmHiEgqS1giMLN84FngG+6+q5HySWY2\n38zmFxcXd36AIiIpIiGJwMwyCJLANHd/rrFl3P0xdy9098KCgoLODVBEJIUk4qohA54AVrr75M7e\nvoiI1JWIFsF44DrgDDNbHA7nJyAOEREhAZePuvsbgHX2dkVEpHG6s1hEJMUpEYiIpDglAhGRFKdE\nICKS4pQIRERSnBKBiEiKUyIQEUlxSgQiIilOiUBEJMUpEYiIpDglAhGRFKdEICKS4pQIRERSnBKB\niEiKUyLIlqF/AAAJ+klEQVQQEUlxSgQiIilOiUBEJMUpEYiIpDglAhGRFKdEICKS4hKSCMzsXDN7\nz8zeN7N7EhGDiIgEOj0RmFkE+A1wHnAEcLWZHdHZcYiISCARLYITgPfd/QN3Lwf+CFySgDhERITE\nJIJBwIao6aJwnoiIJEB6ogNoiplNAiaFk2VmtryNVfYFtiVw/QOpjq4QQ1epoyvE0FXq6AoxdJU6\nukIMAMNatZS7d+oAnAz8M2r6XuDeFtaZ3w7bbVMdXSGGrlJHV4ihq9TRFWLoKnV0hRi6Sh1dIYZY\nhkQcGnoHGGVmI8wsE/gC8NcExCEiIiTg0JC7V5rZ14B/AhFgiru/29lxiIhIICHnCNz9JeClGFZ5\nrB0229Y6ukIMXaWOrhBDV6mjK8TQVeroCjF0lTq6QgytZuGxKBERSVHqYkJEJMV16UTQHl1RmNkU\nM9sa7+WnZjbEzGaZ2Qoze9fM7oijjmwze9vMloR1/DDOWCJmtsjM/h7n+uvMbJmZLTaz+XHW0dPM\nnjGzVWa20sxOjnH9w8Lt1wy7zOwbMdbxzfBzXG5m080sO7Z3AWZ2R7j+u63dfmP7kpn1NrOZZrYm\nfO0VRx1XhnFUm1lhnHE8GP5NlprZ82bWM8b1fxSuu9jMZpjZwFhjiCq708zczPrG8T5+YGYbo/aP\n8+OJw8xuDz+Pd83spzHG8Keo7a8zs8VxvI8xZvZWzXfNzE6Io45jzWxu+J39m5l1b66ONumsy5Pi\nuHQqAqwFDgYygSXAEXHU8xlgHLA8zjgGAOPC8W7A6ljjAAzID8czgHnASXHE8h/AH4C/x/le1gF9\n2/h3+R3w5XA8E+jZxr/xZmBYDOsMAj4EcsLpPwM3xrjdo4DlQC7BebJXgEPi2ZeAnwL3hOP3AD+J\no47RwGHAbKAwzjjOAdLD8Z80F0cT63ePGv868GisMYTzhxBcCPJRS/taE3H8APhWDH/Lxuo4Pfyb\nZoXT/WJ9H1HlPwO+H0cMM4DzwvHzgdlx1PEO8Nlw/GbgR7Hs57EMXblF0C5dUbj768An8Qbh7pvc\nfWE4vhtYSYx3QnugJJzMCIeYTs6Y2WDgAuC3sazXnsysB8EO+wSAu5e7+442VHkmsNbdP4pxvXQg\nx8zSCf6Zfxzj+qOBee6+x90rgdeAy1taqYl96RKC5Ej4emmsdbj7Snd/r5WxN1XHjPC9ALwFDI5x\n/V1Rk3m0sH828736OXB3S+u3UEerNVHHrcAD7l4WLrM1nhjMzICrgOlxxOBAzS/4HrSwjzZRx6HA\n6+H4TOCK5upoi66cCLpcVxRmNhwYS/CLPtZ1I2ETcysw091jreMhgi9YdazbjuLADDNbYMGd27Ea\nARQDT4aHqH5rZnltiOcLtPAlq8/dNwL/A6wHNgE73X1GjNtdDkwwsz5mlkvwi21IjHXU6O/um8Lx\nzUD/OOtpTzcDL8e6kpn9l5ltAL4IfD+O9S8BNrr7kljXredr4WGqKS0damvCoQR/33lm9pqZHR9n\nHBOALe6+Jo51vwE8GH6e/0Nw42ys3mX/j98riX8fbVFXTgRdipnlA88C36j366lV3L3K3ccQ/FI7\nwcyOimHbFwJb3X1BrNut51R3H0fQ8+ttZvaZGNdPJ2i+PuLuY4FSgsMhMbPgZsKLgb/EuF4vgi/H\nCGAgkGdm18ZSh7uvJDh8MgP4B7AYqIqljibqdWJs6bU3M/suUAlMi3Vdd/+uuw8J1/1ajNvNBb5D\nHAmknkeAkcAYgkT/szjqSAd6AycBdwF/Dn/dx+pqYvyhEuVW4Jvh5/lNwlZ0jG4GvmpmCwgOS5fH\nGUuLunIi2EjdDDg4nNfpzCyDIAlMc/fn2lJXeChlFnBuDKuNBy42s3UEh8jOMLOn49j2xvB1K/A8\nweG3WBQBRVGtmWcIEkM8zgMWuvuWGNc7C/jQ3YvdvQJ4Djgl1o27+xPufpy7fwb4lODcTzy2mNkA\ngPC1ycMQHc3MbgQuBL4YJqV4TSP2wxAjCZLzknA/HQwsNLODYqnE3beEP5qqgceJfR+FYD99Ljwk\n+zZBK7rZE9f1hYcdLwf+FMf2AW4g2Dch+LET8/tw91Xufo67H0eQkNbGGUuLunIi6BJdUYS/JJ4A\nVrr75DjrKKi5isPMcoCzgVWtXd/d73X3we4+nOBzeNXdY/oVbGZ5ZtatZpzg5GJMV1K5+2Zgg5kd\nFs46E1gRSx1R4v21tR44ycxyw7/NmQTnbWJiZv3C16EEX/g/xBELBPvkDeH4DcCLcdbTJmZ2LsGh\nw4vdfU8c64+KmryEGPZPAHdf5u793H14uJ8WEVxksTnGOAZETV5GjPto6AWCE8aY2aEEFzXE2nnb\nWcAqdy+KY/sQnBP4bDh+BhDz4aWofTQNuA94NM5YWtZRZ6HbYyA4druaIBN+N846phM0MSsIds4v\nxbj+qQTN/aUEhxAWA+fHWMcxwKKwjuW0cBVCC3WdRhxXDRFcfbUkHN5tw+c5BpgfvpcXgF5x1JEH\nbAd6xBnDDwn+US0HniK8OiTGOv5NkMSWAGfGuy8BfYB/EXzRXwF6x1HHZeF4GbCFqE4ZY6jjfYJz\najX7aJNX/TSx/rPh57kU+BswKNYY6pWvo+WrhhqL4ylgWRjHX4EBcdSRCTwdvp+FwBmxvg9gKnBL\nG/aLU4EF4f41DzgujjruIPj/txp4gPAG4I4YdGexiEiK68qHhkREpBMoEYiIpDglAhGRFKdEICKS\n4pQIRERSnBKBpBQzmxO+Djeza9q57u80ti2Rrk6Xj0pKMrPTCHq5vDCGddJ9f8dujZWXuHt+e8Qn\n0pnUIpCUYmY1vcA+QNAx2WILnm8QsaBP/3fCDs++Ei5/mpn928z+SngXtZm9EHbc925N531m9gBB\nj6iLzWxa9LYs8KAFzz9YZmYTo+qebfuf7zAtzj5xRNokIc8sFukC7iGqRRD+Q9/p7sebWRbwppnV\n9Go6DjjK3T8Mp29290/C7kLeMbNn3f0eM/uaBx0L1nc5wR3ZxxL0efOOmdV0LzwWOJKgS4I3CfqV\neqP9365I09QiEAmcA1wfdhU+j6DriJr+d96OSgIAXzezJQT9/g+JWq4ppwLTPehMbQvB8w9qukZ+\n292LPOhkbTEwvF3ejUgM1CIQCRhwu7v/s87M4FxCab3ps4CT3X2Pmc0GYn5UZpSyqPEq9J2UBFCL\nQFLVboI+3mv8E7g17HIcMzu0iYfu9AA+DZPA4QR93teoqFm/nn8DE8PzEAUET3l7u13ehUg70K8P\nSVVLgarwEM9U4BcEh2UWhidsi2n8sZP/AG4xs5XAewSHh2o8Biw1s4Xu/sWo+c8DJxP0ROnA3e6+\nOUwkIgmny0dFRFKcDg2JiKQ4JQIRkRSnRCAikuKUCEREUpwSgYhIilMiEBFJcUoEIiIpTolARCTF\n/X+sZWxSH4LOBQAAAABJRU5ErkJggg==\n", 708 | "text/plain": [ 709 | "" 710 | ] 711 | }, 712 | "metadata": {}, 713 | "output_type": "display_data" 714 | } 715 | ], 716 | "source": [ 717 | "plt.clf()\n", 718 | "from matplotlib import pyplot as plt\n", 719 | "\n", 720 | "x=range(0,20)\n", 721 | "\n", 722 | "plt.xlim(0,20) # x축의 표시 범위를 0-20까지 설정(20은 반복 횟수입니다)\n", 723 | "plt.ylim(0,15) # y축의 표시 범위를 0-15까지 설정\n", 724 | "plt.xlabel('iteration')\n", 725 | "plt.ylabel('MSE')\n", 726 | "plt.xticks(x, range(0,20)) # x축에 표시할 숫자를 0부터 19까지의 정수로 함\n", 727 | "# 평가 에러를 점선으로 표시\n", 728 | "test_plot, = plt.plot(x,test_errors, '--', label='test_error')\n", 729 | "# 학습 에러를 실선으로 표시\n", 730 | "train_plot, = plt.plot(x,train_errors, label='train_error')\n", 731 | "plt.legend(handles=[train_plot, test_plot]) # 범례 생성\n", 732 | "plt.show()" 733 | ] 734 | }, 735 | { 736 | "cell_type": "code", 737 | "execution_count": 38, 738 | "metadata": {}, 739 | "outputs": [], 740 | "source": [ 741 | "# 예제 12-15 사용자에게 영화 추천하기\n", 742 | "# X, Y를 임의의 수로 초기화하므로 추천 결과가 그때그때 달라질 수 있습니다.\n", 743 | "\n", 744 | "# 근사 행렬의 가장 작은 값을 0으로 만들기 위해 전체 항의 값에서 작은 값을 뺍니다.\n", 745 | "R_hat -= np.min(R_hat)\n", 746 | "\n", 747 | "# 근사 행렬의 가장 큰 값을 5로 만들고자 5를 가장 큰 예측값(np.max(R_hat))으로 나눈 값을 곱합니다.\n", 748 | "# 예를 들어 가장 큰 예측값이 3일 경우 3을 5로 만들기 위해서는 5/3을 곱하면 됩니다.\n", 749 | "# 위에서 구한 값을 예측 행렬의 모든 항에 곱합니다.\n", 750 | "R_hat *= float(5) / np.max(R_hat)\n", 751 | "\n", 752 | "def recommend_by_user(user):\n", 753 | " # 사용자의 ID를 입력으로 받아 그 사용자가 보지 않은 영화를 추천합니다.\n", 754 | " user_index = user-1\n", 755 | " user_seen_movies = sorted(list(enumerate(R_hat[user_index])),key=lambda x:x[1], reverse=True)\n", 756 | " recommended=1\n", 757 | " print(\"-----recommendation for user %d------\"%(user))\n", 758 | " for movie_info in user_seen_movies:\n", 759 | " if W[user][movie_info[0]]==0:\n", 760 | " movie_title= movie_info_li[int(movie_info[0]+1)]\n", 761 | " movie_score= movie_info[1]\n", 762 | " print(\"rank %d recommendation:%s(%.3f)\"%(recommended,movie_title[0], movie_score))\n", 763 | " recommended+=1\n", 764 | " if recommended==6:\n", 765 | " break" 766 | ] 767 | }, 768 | { 769 | "cell_type": "code", 770 | "execution_count": 43, 771 | "metadata": {}, 772 | "outputs": [ 773 | { 774 | "name": "stdout", 775 | "output_type": "stream", 776 | "text": [ 777 | "-----recommendation for user 1------\n", 778 | "rank 1 recommendation:So I Married an Axe Murderer (1993)(6.426)\n", 779 | "rank 2 recommendation:Wrong Trousers, The (1993)(6.422)\n", 780 | "rank 3 recommendation:French Twist (Gazon maudit) (1995)(6.259)\n", 781 | "rank 4 recommendation:Nikita (La Femme Nikita) (1990)(6.203)\n", 782 | "rank 5 recommendation:Full Monty, The (1997)(5.728)\n" 783 | ] 784 | } 785 | ], 786 | "source": [ 787 | "recommend_by_user(1)" 788 | ] 789 | }, 790 | { 791 | "cell_type": "code", 792 | "execution_count": null, 793 | "metadata": {}, 794 | "outputs": [], 795 | "source": [] 796 | } 797 | ], 798 | "metadata": { 799 | "kernelspec": { 800 | "display_name": "Python 3", 801 | "language": "python", 802 | "name": "python3" 803 | }, 804 | "language_info": { 805 | "codemirror_mode": { 806 | "name": "ipython", 807 | "version": 3 808 | }, 809 | "file_extension": ".py", 810 | "mimetype": "text/x-python", 811 | "name": "python", 812 | "nbconvert_exporter": "python", 813 | "pygments_lexer": "ipython3", 814 | "version": "3.6.2" 815 | } 816 | }, 817 | "nbformat": 4, 818 | "nbformat_minor": 1 819 | } 820 | -------------------------------------------------------------------------------- /chapter12/ml-100k-plot.txt.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/your-first-ml-book/Examples/42d4de60f27b47fc69f02edf6e02840cb1701386/chapter12/ml-100k-plot.txt.zip -------------------------------------------------------------------------------- /chapter12/ml2imdb.id.final: -------------------------------------------------------------------------------- 1 | 1|0114709 2 | 2|0113189 3 | 3|0113101 4 | 4|0113161 5 | 5|0112722 6 | 6|0115012 7 | 7|0114746 8 | 8|0112431 9 | 9|0112818 10 | 10|0114279 11 | 11|0114369 12 | 12|0114814 13 | 13|0113819 14 | 14|0110877 15 | 15|0113862 16 | 16|0113149 17 | 17|0116367 18 | 18|0112445 19 | 19|0112379 20 | 20|0112365 21 | 21|0117110 22 | 22|0112573 23 | 23|0075314 24 | 24|0113326 25 | 25|0115685 26 | 26|0112585 27 | 27|0112442 28 | 28|0112384 29 | 29|0112462 30 | 30|0061395 31 | 31|0112740 32 | 32|0109508 33 | 33|0112851 34 | 34|0112887 35 | 35|0113114 36 | 36|0113729 37 | 37|0110620 38 | 38|0113957 39 | 39|0114558 40 | 40|0114682 41 | 41|0112508 42 | 42|0109445 43 | 43|0109635 44 | 44|0109642 45 | 45|0111797 46 | 46|0109759 47 | 47|0109707 48 | 48|0110057 49 | 49|0110099 50 | 50|0076759 51 | 51|0110322 52 | 52|0110428 53 | 53|0110632 54 | 54|0114069 55 | 55|0082949 56 | 56|0110912 57 | 57|0110889 58 | 58|0110932 59 | 59|0111495 60 | 60|0108394 61 | 61|0111507 62 | 62|0111282 63 | 63|0111070 64 | 64|0111161 65 | 65|0108550 66 | 66|0114924 67 | 67|0109040 68 | 68|0109506 69 | 69|0109830 70 | 70|0109831 71 | 71|0110357 72 | 72|0110475 73 | 73|0110478 74 | 74|0059170 75 | 75|0109339 76 | 76|0106519 77 | 77|0106918 78 | 78|0106965 79 | 79|0106977 80 | 80|0107144 81 | 81|0110074 82 | 82|0107290 83 | 83|0107616 84 | 84|0111003 85 | 85|0110955 86 | 86|0107943 87 | 87|0108065 88 | 88|0108160 89 | 89|0083658 90 | 90|0108174 91 | 91|0107688 92 | 92|0108399 93 | 93|0114906 94 | 94|0099785 95 | 95|0827990 96 | 96|0103064 97 | 97|0099348 98 | 98|0102926 99 | 99|0029583 100 | 100|0116282 101 | 101|0082509 102 | 102|0065421 103 | 103|0115509 104 | 104|0114658 105 | 105|0117608 106 | 106|0116095 107 | 107|0117071 108 | 108|0116768 109 | 109|0117128 110 | 110|0114048 111 | 111|0117979 112 | 112|0116322 113 | 113|0113362 114 | 114|0118114 115 | 115|0113270 116 | 116|0112701 117 | 117|0117500 118 | 118|0117998 119 | 119|0110480 120 | 120|0117765 121 | 121|0116629 122 | 122|0115798 123 | 123|0116365 124 | 124|0116905 125 | 125|0117333 126 | 126|0117718 127 | 127|0068646 128 | 128|0104558 129 | 129|0115736 130 | 130|0116745 131 | 131|0054698 132 | 132|0032138 133 | 133|0031381 134 | 134|0033467 135 | 135|0062622 136 | 136|0031679 137 | 137|0115678 138 | 138|0116000 139 | 139|0064603 140 | 140|0107131 141 | 141|0046672 142 | 142|0066817 143 | 143|0059742 144 | 144|0095016 145 | 145|0104692 146 | 146|0118044 147 | 147|0116908 148 | 148|0116409 149 | 149|0116722 150 | 150|0117802 151 | 151|0067992 152 | 152|0070707 153 | 153|0095159 154 | 154|0079470 155 | 155|0092890 156 | 156|0105236 157 | 157|0091763 158 | 158|0098627 159 | 159|0103772 160 | 160|0104348 161 | 161|0092099 162 | 162|0082846 163 | 163|0072081 164 | 164|0096754 165 | 165|0091288 166 | 166|0091480 167 | 167|0081375 168 | 168|0071853 169 | 169|0108598 170 | 170|0095765 171 | 171|0101700 172 | 172|0080684 173 | 173|0093779 174 | 174|0082971 175 | 175|0088846 176 | 176|0090605 177 | 177|0060196 178 | 178|0050083 179 | 179|0066921 180 | 180|0078788 181 | 181|0086190 182 | 182|0099685 183 | 183|0078748 184 | 184|0106308 185 | 185|0054215 186 | 186|0080455 187 | 187|0071562 188 | 188|0093058 189 | 189|0104361 190 | 190|0097499 191 | 191|0086879 192 | 192|0081398 193 | 193|0086197 194 | 194|0070735 195 | 195|0088247 196 | 196|0097165 197 | 197|0061722 198 | 198|0100263 199 | 199|0050212 200 | 200|0081505 201 | 201|0092991 202 | 202|0107048 203 | 203|0105695 204 | 204|0088763 205 | 205|0066206 206 | 206|0094625 207 | 207|0099334 208 | 208|0072431 209 | 209|0088258 210 | 210|0097576 211 | 211|0066026 212 | 212|0096332 213 | 213|0091867 214 | 214|0084503 215 | 215|0097351 216 | 216|0098635 217 | 217|0103874 218 | 218|0101540 219 | 219|0087800 220 | 220|0117057 221 | 221|0115751 222 | 222|0117731 223 | 223|0117666 224 | 224|0117477 225 | 225|0115433 226 | 226|0099423 227 | 227|0102975 228 | 228|0084726 229 | 229|0088170 230 | 230|0092007 231 | 231|0103776 232 | 232|0096487 233 | 233|0105690 234 | 234|0073195 235 | 235|0116996 236 | 236|0115906 237 | 237|0116695 238 | 238|0093822 239 | 239|0105435 240 | 240|0115641 241 | 241|0104691 242 | 242|0116790 243 | 243|0119432 244 | 244|0120152 245 | 245|0118972 246 | 246|0118842 247 | 247|0120389 248 | 248|0119229 249 | 249|0118655 250 | 250|0119116 251 | 251|0117615 252 | 252|0119567 253 | 253|0114134 254 | 254|0118688 255 | 255|0119738 256 | 256|0119654 257 | 257|0119654 258 | 258|0118884 259 | 259|0119190 260 | 260|0119081 261 | 261|0118570 262 | 262|0119361 263 | 263|0120207 264 | 264|0119675 265 | 265|0099810 266 | 266|0119484 267 | 267|0049967 268 | 268|0118842 269 | 269|0119164 270 | 270|0119177 271 | 271|0120201 272 | 272|0119217 273 | 273|0113277 274 | 274|0114319 275 | 275|0114388 276 | 276|0113627 277 | 277|0114272 278 | 278|0115644 279 | 279|0114039 280 | 280|0118055 281 | 281|0110997 282 | 282|0117913 283 | 283|0118308 284 | 284|0117918 285 | 285|0117589 286 | 286|0116209 287 | 287|0116999 288 | 288|0117571 289 | 289|0116250 290 | 290|0119115 291 | 291|0118548 292 | 292|0120036 293 | 293|0119008 294 | 294|0119528 295 | 295|0118771 296 | 296|0117398 297 | 297|0120402 298 | 298|0119094 299 | 299|0119311 300 | 300|0118571 301 | 301|0119360 302 | 302|0119488 303 | 303|0120402 304 | 304|0116329 305 | 305|0119349 306 | 306|0119280 307 | 307|0118971 308 | 308|0119095 309 | 309|0119527 310 | 310|0119978 311 | 311|0120520 312 | 312|0119668 313 | 313|0120338 314 | 314|0118539 315 | 315|0118636 316 | 316|0119822 317 | 317|0107207 318 | 318|0108052 319 | 319|0116242 320 | 320|0117293 321 | 321|0117091 322 | 322|0119731 323 | 323|0118928 324 | 324|0116922 325 | 325|0115964 326 | 326|0119173 327 | 327|0118887 328 | 328|0118883 329 | 329|0118966 330 | 330|0118531 331 | 331|0119051 332 | 332|0119468 333 | 333|0119174 334 | 334|0120399 335 | 335|0119326 336 | 336|0119906 337 | 337|0119324 338 | 338|0118689 339 | 339|0119592 340 | 340|0118749 341 | 341|0118901 342 | 342|0120483 343 | 343|0118583 344 | 344|0118632 345 | 345|0118954 346 | 346|0119396 347 | 347|0120885 348 | 348|0118966 349 | 349|0120696 350 | 350|0119099 351 | 351|0118643 352 | 352|0120185 353 | 353|0118956 354 | 354|0120888 355 | 355|0120184 356 | 356|0109446 357 | 357|0073486 358 | 358|0120177 359 | 359|0118647 360 | 360|0120529 361 | 361|0119365 362 | 362|0118747 363 | 363|0114576 364 | 364|0112281 365 | 365|0114168 366 | 366|0112792 367 | 367|0112697 368 | 368|0115683 369 | 369|0115697 370 | 370|0117002 371 | 371|0112579 372 | 372|0113464 373 | 373|0113492 374 | 374|0113820 375 | 375|0114436 376 | 376|0110066 377 | 377|0110006 378 | 378|0110527 379 | 379|0114608 380 | 380|0111280 381 | 381|0110598 382 | 382|0109045 383 | 383|0109813 384 | 384|0110622 385 | 385|0111503 386 | 386|0106220 387 | 387|0106226 388 | 388|0109254 389 | 389|0109279 390 | 390|0106880 391 | 391|0107362 392 | 392|0107501 393 | 393|0107614 394 | 394|0110939 395 | 395|0107977 396 | 396|0111127 397 | 397|0108238 398 | 398|0108255 399 | 399|0108333 400 | 400|0110366 401 | 401|0112572 402 | 402|0099653 403 | 403|0096895 404 | 404|0032910 405 | 405|0117060 406 | 406|0117894 407 | 407|0117723 408 | 408|0112691 409 | 409|0116669 410 | 410|0116778 411 | 411|0117218 412 | 412|0118073 413 | 413|0117826 414 | 414|0084370 415 | 415|0072653 416 | 416|0050798 417 | 417|0055277 418 | 418|0042332 419 | 419|0058331 420 | 420|0043274 421 | 421|0117509 422 | 422|0115491 423 | 423|0083866 424 | 424|0115885 425 | 425|0103850 426 | 426|0092106 427 | 427|0056592 428 | 428|0067185 429 | 429|0043456 430 | 430|0023969 431 | 431|0091203 432 | 432|0032455 433 | 433|0097493 434 | 434|0049223 435 | 435|0064115 436 | 436|0082010 437 | 437|0103678 438 | 438|0085159 439 | 439|0106262 440 | 440|0083550 441 | 441|0078767 442 | 442|0099030 443 | 443|0056869 444 | 444|0051418 445 | 445|0037549 446 | 446|0074258 447 | 447|0074285 448 | 448|0075005 449 | 449|0079945 450 | 450|0098382 451 | 451|0077631 452 | 452|0077766 453 | 453|0085750 454 | 454|0115633 455 | 455|0116704 456 | 456|0118708 457 | 457|0119152 458 | 458|0113987 459 | 459|0112749 460 | 460|0112744 461 | 461|0114478 462 | 462|0103994 463 | 463|0111112 464 | 464|0111590 465 | 465|0110213 466 | 466|0105226 467 | 467|0106489 468 | 468|0108002 469 | 469|0108122 470 | 470|0108358 471 | 471|0115956 472 | 472|0116136 473 | 473|0116683 474 | 474|0057012 475 | 475|0117951 476 | 476|0116313 477 | 477|0117008 478 | 478|0032904 479 | 479|0052357 480 | 480|0053125 481 | 481|0053604 482 | 482|0053291 483 | 483|0034583 484 | 484|0033870 485 | 485|0058385 486 | 486|0047437 487 | 487|0046250 488 | 488|0043014 489 | 489|0038787 490 | 490|0048728 491 | 491|0029843 492 | 492|0048028 493 | 493|0025878 494 | 494|0032599 495 | 495|0048960 496 | 496|0038650 497 | 497|0029947 498 | 498|0043265 499 | 499|0051459 500 | 500|0116329 501 | 501|0033563 502 | 502|0066808 503 | 503|0068334 504 | 504|0061418 505 | 505|0046912 506 | 506|0048545 507 | 507|0044081 508 | 508|0117318 509 | 509|0097937 510 | 510|0054047 511 | 511|0056172 512 | 512|0093191 513 | 513|0041959 514 | 514|0075686 515 | 515|0082096 516 | 516|0085859 517 | 517|0079522 518 | 518|0100150 519 | 519|0040897 520 | 520|0057115 521 | 521|0077416 522 | 522|0090967 523 | 523|0061512 524 | 524|0032553 525 | 525|0038355 526 | 526|0052618 527 | 527|0083987 528 | 528|0087553 529 | 529|0089606 530 | 530|0073341 531 | 531|0117631 532 | 532|0116743 533 | 533|0116041 534 | 534|0120366 535 | 535|0118556 536 | 536|0117359 537 | 537|0102494 538 | 538|0118617 539 | 539|0119715 540 | 540|0113845 541 | 541|0113855 542 | 542|0114148 543 | 543|0113828 544 | 544|0114660 545 | 545|0114825 546 | 546|0115759 547 | 547|0115033 548 | 548|0110647 549 | 549|0114287 550 | 550|0112864 551 | 551|0113690 552 | 552|0114508 553 | 553|0114887 554 | 554|0114898 555 | 555|0114928 556 | 556|0114938 557 | 557|0109771 558 | 558|0110005 559 | 559|0110148 560 | 560|0113538 561 | 561|0109836 562 | 562|0114214 563 | 563|0112040 564 | 564|0114609 565 | 565|0114852 566 | 566|0109444 567 | 567|0111686 568 | 568|0111257 569 | 569|0111742 570 | 570|0111756 571 | 571|0106292 572 | 572|0109303 573 | 573|0106452 574 | 574|0106471 575 | 575|0109439 576 | 576|0106582 577 | 577|0106598 578 | 578|0106697 579 | 579|0106873 580 | 580|0112966 581 | 581|0107302 582 | 582|0107822 583 | 583|0107983 584 | 584|0108071 585 | 585|0108186 586 | 586|0111400 587 | 587|0107146 588 | 588|0101414 589 | 589|0065214 590 | 590|0116514 591 | 591|0117381 592 | 592|0114736 593 | 593|0108211 594 | 594|0113280 595 | 595|0116277 596 | 596|0116583 597 | 597|0116213 598 | 598|0115680 599 | 599|0106544 600 | 600|0117496 601 | 601|0035896 602 | 602|0043278 603 | 603|0047396 604 | 604|0025316 605 | 605|0037059 606 | 606|0042192 607 | 607|0032976 608 | 608|0038109 609 | 609|0042451 610 | 610|0051658 611 | 611|0037008 612 | 612|0029162 613 | 613|0028010 614 | 614|0049261 615 | 615|0026029 616 | 616|0063350 617 | 617|0020697 618 | 618|0048491 619 | 619|0116259 620 | 620|0115862 621 | 621|0047977 622 | 622|0054357 623 | 623|0109127 624 | 624|0038166 625 | 625|0057546 626 | 626|0041890 627 | 627|0102798 628 | 628|0117665 629 | 629|0084865 630 | 630|0059243 631 | 631|0104036 632 | 632|0084707 633 | 633|0029992 634 | 634|0117040 635 | 635|0080749 636 | 636|0082340 637 | 637|0082533 638 | 638|0084589 639 | 639|0078875 640 | 640|0097108 641 | 641|0050825 642 | 642|0099703 643 | 643|0111216 644 | 644|0096257 645 | 645|0100332 646 | 646|0064116 647 | 647|0089881 648 | 648|0045061 649 | 649|0087843 650 | 650|0050976 651 | 651|0097441 652 | 652|0100519 653 | 653|0052311 654 | 654|0071315 655 | 655|0092005 656 | 656|0022100 657 | 657|0056218 658 | 658|0100436 659 | 659|0036613 660 | 660|0101921 661 | 661|0044706 662 | 662|0081534 663 | 663|0078841 664 | 664|0087884 665 | 665|0103644 666 | 666|0071233 667 | 667|0075704 668 | 668|0082083 669 | 669|0101492 670 | 670|0106452 671 | 671|0026138 672 | 672|0103919 673 | 673|0055824 674 | 674|0083722 675 | 675|0013442 676 | 676|0115988 677 | 677|0116310 678 | 678|0120461 679 | 679|0082198 680 | 680|0119484 681 | 681|0120524 682 | 682|0119345 683 | 683|0120029 684 | 684|0107206 685 | 685|0116253 686 | 686|0107808 687 | 687|0119640 688 | 688|0119509 689 | 689|0119395 690 | 690|0120102 691 | 691|0118929 692 | 692|0112346 693 | 693|0112641 694 | 694|0114117 695 | 695|0113537 696 | 696|0115907 697 | 697|0112461 698 | 698|0109340 699 | 699|0110367 700 | 700|0113808 701 | 701|0107472 702 | 702|0109219 703 | 703|0111712 704 | 704|0107151 705 | 705|0045152 706 | 706|0115610 707 | 707|0101811 708 | 708|0098724 709 | 709|0105488 710 | 710|0088794 711 | 711|0117773 712 | 712|0094155 713 | 713|0114057 714 | 714|0112637 715 | 715|0114681 716 | 716|0113321 717 | 717|0116731 718 | 718|0113403 719 | 719|0109370 720 | 720|0113071 721 | 721|0113749 722 | 722|0113986 723 | 723|0112571 724 | 724|0112679 725 | 725|0109758 726 | 726|0113089 727 | 727|0110116 728 | 728|0110216 729 | 729|0110638 730 | 730|0110963 731 | 731|0109484 732 | 732|0106673 733 | 733|0109913 734 | 734|0107478 735 | 735|0107818 736 | 736|0108101 737 | 737|0111201 738 | 738|0111418 739 | 739|0100405 740 | 740|0116684 741 | 741|0113613 742 | 742|0117438 743 | 743|0115986 744 | 744|0117039 745 | 745|0069198 746 | 746|0089886 747 | 747|0106387 748 | 748|0120053 749 | 749|0119632 750 | 750|0118607 751 | 751|0120347 752 | 752|0120008 753 | 753|0111579 754 | 754|0119994 755 | 755|0113497 756 | 756|0113041 757 | 757|0112286 758 | 758|0116839 759 | 759|0113010 760 | 760|0114367 761 | 761|0113972 762 | 762|0115639 763 | 763|0116483 764 | 764|0116606 765 | 765|0103859 766 | 766|0113756 767 | 767|0112288 768 | 768|0112642 769 | 769|0112715 770 | 770|0112857 771 | 771|0113481 772 | 772|0113540 773 | 773|0110604 774 | 774|0114194 775 | 775|0114496 776 | 776|0114663 777 | 777|0112643 778 | 778|0112883 779 | 779|0109676 780 | 780|0109686 781 | 781|0113117 782 | 782|0110365 783 | 783|0110516 784 | 784|0106402 785 | 785|0110737 786 | 786|0114113 787 | 787|0114296 788 | 788|0110965 789 | 789|0114594 790 | 790|0114694 791 | 791|0112435 792 | 792|0109348 793 | 793|0109504 794 | 794|0110167 795 | 795|0110989 796 | 796|0111256 797 | 797|0111438 798 | 798|0112443 799 | 799|0112570 800 | 800|0113409 801 | 801|0109067 802 | 802|0107076 803 | 803|0107096 804 | 804|0110197 805 | 805|0107507 806 | 806|0107554 807 | 807|0107840 808 | 808|0107889 809 | 809|0107969 810 | 810|0111143 811 | 811|0108328 812 | 812|0109120 813 | 813|0112651 814 | 814|0109934 815 | 815|0117247 816 | 816|0112625 817 | 817|0113122 818 | 818|0116414 819 | 819|0116168 820 | 820|0117705 821 | 821|0117104 822 | 822|0062952 823 | 823|0117107 824 | 824|0116448 825 | 825|0115571 826 | 826|0117331 827 | 827|0116040 828 | 828|0115493 829 | 829|0116320 830 | 830|0114170 831 | 831|0116225 832 | 832|0115725 833 | 833|0115783 834 | 834|0113253 835 | 835|0025164 836 | 836|0031725 837 | 837|0033891 838 | 838|0093229 839 | 839|0374639 840 | 840|0116830 841 | 841|0116421 842 | 842|0054195 843 | 843|0053271 844 | 844|0116361 845 | 845|0117887 846 | 846|0117924 847 | 847|0116913 848 | 848|0037101 849 | 849|0099371 850 | 850|0117320 851 | 851|0060304 852 | 852|0115715 853 | 853|0103873 854 | 854|0092610 855 | 855|0082269 856 | 856|0102536 857 | 857|0114093 858 | 858|0115535 859 | 859|0090655 860 | 860|0092632 861 | 861|0091651 862 | 862|0116705 863 | 863|0065777 864 | 864|0117119 865 | 865|0119349 866 | 866|0117038 867 | 867|0118163 868 | 868|0116506 869 | 869|0119141 870 | 870|0120357 871 | 871|0120434 872 | 872|0119572 873 | 873|0119896 874 | 874|0118818 875 | 875|0120112 876 | 876|0119695 877 | 877|0119086 878 | 878|0120317 879 | 879|0119874 880 | 880|0120169 881 | 881|0119695 882 | 882|0120481 883 | 883|0120303 884 | 884|0120539 885 | 885|0119891 886 | 886|0119535 887 | 887|0119080 888 | 888|0119832 889 | 889|0120275 890 | 890|0119707 891 | 891|0118698 892 | 892|0119137 893 | 893|0119142 894 | 894|0119303 895 | 895|0120082 896 | 896|0120255 897 | 897|0128755 898 | 898|0119925 899 | 899|0120521 900 | 900|0119485 901 | 901|0119718 902 | 902|0118715 903 | 903|0118566 904 | 904|0119590 905 | 905|0119223 906 | 906|0119843 907 | 907|0120881 908 | 908|0120693 909 | 909|0118892 910 | 910|0119792 911 | 911|0119594 912 | 912|0120873 913 | 913|0119574 914 | 914|0120890 915 | 915|0119942 916 | 916|0120738 917 | 917|0120749 918 | 918|0120632 919 | 919|0112682 920 | 920|0114753 921 | 921|0106332 922 | 922|0112817 923 | 923|0101640 924 | 924|0118158 925 | 925|0118040 926 | 926|0116130 927 | 927|0113083 928 | 928|0115963 929 | 929|0116493 930 | 930|0115857 931 | 931|0116654 932 | 932|0116311 933 | 933|0116378 934 | 934|0117372 935 | 935|0119859 936 | 936|0115744 937 | 937|0120323 938 | 938|0120151 939 | 939|0113870 940 | 940|0109068 941 | 941|0111732 942 | 942|0108551 943 | 943|0110265 944 | 944|0110971 945 | 945|0056923 946 | 946|0082406 947 | 947|0095250 948 | 948|0118750 949 | 949|0113347 950 | 950|0113158 951 | 951|0113419 952 | 952|0112541 953 | 953|0114798 954 | 954|0114805 955 | 955|0112471 956 | 956|0110684 957 | 957|0105652 958 | 958|0110081 959 | 959|0106677 960 | 960|0107653 961 | 961|0107756 962 | 962|0108000 963 | 963|0108181 964 | 964|0113849 965 | 965|0050419 966 | 966|0050105 967 | 967|0027893 968 | 968|0041509 969 | 969|0063819 970 | 970|0102014 971 | 971|0102426 972 | 972|0105107 973 | 973|0113212 974 | 974|0116260 975 | 975|0116287 976 | 976|0117688 977 | 977|0117774 978 | 978|0116508 979 | 979|0117965 980 | 980|0117093 981 | 981|0118927 982 | 982|0117011 983 | 983|0117473 984 | 984|0120107 985 | 985|0115710 986 | 986|0120390 987 | 987|0120414 988 | 988|0118691 989 | 989|0118829 990 | 990|0118623 991 | 991|0116762 992 | 992|0116502 993 | 993|0119282 994 | 994|0119502 995 | 995|0119465 996 | 996|0112499 997 | 997|0114571 998 | 998|0109361 999 | 999|0109443 1000 | 1000|0110353 1001 | 1001|0117768 1002 | 1002|0119887 1003 | 1003|0059793 1004 | 1004|0107004 1005 | 1005|0101765 1006 | 1006|0101458 1007 | 1007|0118111 1008 | 1008|0116594 1009 | 1009|0117737 1010 | 1010|0115632 1011 | 1011|0115438 1012 | 1012|0119951 1013 | 1013|0118615 1014 | 1014|0120032 1015 | 1015|0120118 1016 | 1016|0118880 1017 | 1017|0117958 1018 | 1018|0101026 1019 | 1019|0097202 1020 | 1020|0036855 1021 | 1021|0056801 1022 | 1022|0119107 1023 | 1023|0119109 1024 | 1024|0119723 1025 | 1025|0119123 1026 | 1026|0123953 1027 | 1027|0120121 1028 | 1028|0113228 1029 | 1029|0113500 1030 | 1030|0106400 1031 | 1031|0110305 1032 | 1032|0110363 1033 | 1033|0116552 1034 | 1034|0117420 1035 | 1035|0106611 1036 | 1036|0101775 1037 | 1037|0084021 1038 | 1038|0119210 1039 | 1039|0116477 1040 | 1040|0118002 1041 | 1041|0113097 1042 | 1042|0113501 1043 | 1043|0114268 1044 | 1044|0110771 1045 | 1045|0106881 1046 | 1046|0107497 1047 | 1047|0117108 1048 | 1048|0117628 1049 | 1049|0116571 1050 | 1050|0039420 1051 | 1051|0115580 1052 | 1052|0112896 1053 | 1053|0114011 1054 | 1054|0117102 1055 | 1055|0111194 1056 | 1056|0104029 1057 | 1057|0117283 1058 | 1058|0111667 1059 | 1059|0116126 1060 | 1060|0115472 1061 | 1061|0116240 1062 | 1062|0119815 1063 | 1063|0113670 1064 | 1064|0039286 1065 | 1065|0085809 1066 | 1066|0112453 1067 | 1067|0115734 1068 | 1068|0114808 1069 | 1069|0109093 1070 | 1070|0113677 1071 | 1071|0114095 1072 | 1072|0114210 1073 | 1073|0111149 1074 | 1074|0110950 1075 | 1075|0110455 1076 | 1076|0110763 1077 | 1077|0110395 1078 | 1078|0095776 1079 | 1079|0116707 1080 | 1080|0109485 1081 | 1081|0115994 1082 | 1082|0116293 1083 | 1083|0115495 1084 | 1084|0112373 1085 | 1085|0115837 1086 | 1086|0113443 1087 | 1087|0112536 1088 | 1088|0119013 1089 | 1089|0120179 1090 | 1090|0108162 1091 | 1091|0076538 1092 | 1092|0116059 1093 | 1093|0113674 1094 | 1094|0117891 1095 | 1095|0116531 1096 | 1096|0115927 1097 | 1097|0113247 1098 | 1098|0116324 1099 | 1099|0110769 1100 | 1100|0111689 1101 | 1101|0108149 1102 | 1102|0118001 1103 | 1103|0103130 1104 | 1104|0103905 1105 | 1105|0120670 1106 | 1106|0120769 1107 | 1107|0112495 1108 | 1108|0113044 1109 | 1109|0109579 1110 | 1110|0114614 1111 | 1111|0109655 1112 | 1112|0109450 1113 | 1113|0110588 1114 | 1114|0116269 1115 | 1115|0117991 1116 | 1116|0032762 1117 | 1117|0117791 1118 | 1118|0078446 1119 | 1119|0094006 1120 | 1120|0116601 1121 | 1121|0058450 1122 | 1122|0032022 1123 | 1123|0047162 1124 | 1124|0022879 1125 | 1125|0055018 1126 | 1126|0052027 1127 | 1127|0120382 1128 | 1128|0113283 1129 | 1129|0109424 1130 | 1130|0110217 1131 | 1131|0114323 1132 | 1132|0116289 1133 | 1133|0072951 1134 | 1134|0116404 1135 | 1135|0101761 1136 | 1136|0116410 1137 | 1137|0115640 1138 | 1138|0118702 1139 | 1139|0113243 1140 | 1140|0111001 1141 | 1141|0108515 1142 | 1142|0118147 1143 | 1143|0119256 1144 | 1144|0117422 1145 | 1145|0109305 1146 | 1146|0106505 1147 | 1147|0113896 1148 | 1148|0111454 1149 | 1149|0067959 1150 | 1150|0116827 1151 | 1151|0117260 1152 | 1152|0116621 1153 | 1153|0106339 1154 | 1154|0058898 1155 | 1155|0114266 1156 | 1156|0112767 1157 | 1157|0120004 1158 | 1158|0113057 1159 | 1159|0079944 1160 | 1160|0119578 1161 | 1161|0117284 1162 | 1162|0117332 1163 | 1163|0117364 1164 | 1164|0120550 1165 | 1165|0115676 1166 | 1166|0107447 1167 | 1167|0111309 1168 | 1168|0107426 1169 | 1169|0109842 1170 | 1170|0111252 1171 | 1171|0111019 1172 | 1172|0032143 1173 | 1173|0118742 1174 | 1174|0115847 1175 | 1175|0119327 1176 | 1176|0120490 1177 | 1177|0116151 1178 | 1178|0110443 1179 | 1179|0113755 1180 | 1180|0110093 1181 | 1181|0110399 1182 | 1182|0109480 1183 | 1183|0109493 1184 | 1184|0109729 1185 | 1185|0110123 1186 | 1186|0110137 1187 | 1187|0073778 1188 | 1188|0100994 1189 | 1189|0119937 1190 | 1190|0120318 1191 | 1191|0119521 1192 | 1192|0106473 1193 | 1193|0110882 1194 | 1194|0110729 1195 | 1195|0106966 1196 | 1196|0105032 1197 | 1197|0116275 1198 | 1198|0054189 1199 | 1199|0109592 1200 | 1200|0042644 1201 | 1201|0116992 1202 | 1202|0109255 1203 | 1203|0027125 1204 | 1204|0035446 1205 | 1205|0117582 1206 | 1206|0106266 1207 | 1207|0113451 1208 | 1208|0113552 1209 | 1209|0110538 1210 | 1210|0114857 1211 | 1211|0109306 1212 | 1212|0106926 1213 | 1213|0107057 1214 | 1214|0074102 1215 | 1215|0115624 1216 | 1216|0116783 1217 | 1217|0112401 1218 | 1218|0113118 1219 | 1219|0113198 1220 | 1220|0113305 1221 | 1221|0111693 1222 | 1222|0107286 1223 | 1223|0107322 1224 | 1224|0111094 1225 | 1225|0112368 1226 | 1226|0119783 1227 | 1227|0112427 1228 | 1228|0114781 1229 | 1229|0114151 1230 | 1230|0110907 1231 | 1231|0100114 1232 | 1232|0102370 1233 | 1233|0117221 1234 | 1234|0118836 1235 | 1235|0898266 1236 | 1236|0119845 1237 | 1237|0117994 1238 | 1238|0118230 1239 | 1239|0112760 1240 | 1240|0113568 1241 | 1241|0118064 1242 | 1242|0103207 1243 | 1243|0119784 1244 | 1244|0119664 1245 | 1245|0119225 1246 | 1246|0112602 1247 | 1247|0109198 1248 | 1248|0109297 1249 | 1249|0106941 1250 | 1250|0112483 1251 | 1251|0057345 1252 | 1252|0057345 1253 | 1253|0114666 1254 | 1254|0119214 1255 | 1255|0115760 1256 | 1256|0118964 1257 | 1257|0118964 1258 | 1258|0120373 1259 | 1259|0114131 1260 | 1260|0114702 1261 | 1261|0114307 1262 | 1262|0118113 1263 | 1263|0116353 1264 | 1264|0110693 1265 | 1265|0120197 1266 | 1266|0070506 1267 | 1267|0112688 1268 | 1268|0104779 1269 | 1269|0050658 1270 | 1270|0107413 1271 | 1271|0110687 1272 | 1272|0111357 1273 | 1273|0109456 1274 | 1274|0107978 1275 | 1275|0110259 1276 | 1276|0117784 1277 | 1277|0117603 1278 | 1278|0120094 1279 | 1279|0120512 1280 | 1280|0118900 1281 | 1281|0116985 1282 | 1282|0113211 1283 | 1283|0119848 1284 | 1284|0115645 1285 | 1285|0110892 1286 | 1286|0117615 1287 | 1287|0116165 1288 | 1288|0112844 1289 | 1289|0113448 1290 | 1290|0109491 1291 | 1291|0115851 1292 | 1292|0120133 1293 | 1293|0120478 1294 | 1294|0118662 1295 | 1295|0119457 1296 | 1296|0116631 1297 | 1297|0110391 1298 | 1298|0045537 1299 | 1299|0034012 1300 | 1300|0118523 1301 | 1301|0083131 1302 | 1302|0116834 1303 | 1303|0109890 1304 | 1304|0117169 1305 | 1305|0113936 1306 | 1306|0109593 1307 | 1307|0109381 1308 | 1308|0109191 1309 | 1309|0072362 1310 | 1310|0038235 1311 | 1311|0114885 1312 | 1312|0117357 1313 | 1313|0120782 1314 | 1314|0111323 1315 | 1315|0119381 1316 | 1316|0119314 1317 | 1317|0113490 1318 | 1318|0112646 1319 | 1319|0113952 1320 | 1320|0113319 1321 | 1321|0114047 1322 | 1322|0107642 1323 | 1323|0111752 1324 | 1324|0110374 1325 | 1325|0115591 1326 | 1326|0115742 1327 | 1327|0109374 1328 | 1328|0110712 1329 | 1329|0110400 1330 | 1330|0111546 1331 | 1331|0113610 1332 | 1332|0106810 1333 | 1333|0111180 1334 | 1334|0111237 1335 | 1335|0115530 1336 | 1336|0116756 1337 | 1337|0116823 1338 | 1338|0117999 1339 | 1339|0108220 1340 | 1340|0112746 1341 | 1341|0104403 1342 | 1342|0112716 1343 | 1343|0113695 1344 | 1344|0108227 1345 | 1345|0111424 1346 | 1346|0104109 1347 | 1347|0051980 1348 | 1348|0100840 1349 | 1349|0107575 1350 | 1350|0042054 1351 | 1351|0116934 1352 | 1352|0075169 1353 | 1353|0109001 1354 | 1354|0105729 1355 | 1355|0116635 1356 | 1356|0116167 1357 | 1357|0109823 1358 | 1358|NA 1359 | 1359|NA 1360 | 1360|0111622 1361 | 1361|0117577 1362 | 1362|0115531 1363 | 1363|0116859 1364 | 1364|0115684 1365 | 1365|0107274 1366 | 1366|0110173 1367 | 1367|0109781 1368 | 1368|0110521 1369 | 1369|0042354 1370 | 1370|0110171 1371 | 1371|0110425 1372 | 1372|0123281 1373 | 1373|0093105 1374 | 1374|0080714 1375 | 1375|0106535 1376 | 1376|0119644 1377 | 1377|0116565 1378 | 1378|0120014 1379 | 1379|0116931 1380 | 1380|0113314 1381 | 1381|0116920 1382 | 1382|0058985 1383 | 1383|0120087 1384 | 1384|0115680 1385 | 1385|0120034 1386 | 1386|0105569 1387 | 1387|0119098 1388 | 1388|0116384 1389 | 1389|0117076 1390 | 1390|0113425 1391 | 1391|0116334 1392 | 1392|0119557 1393 | 1393|0120192 1394 | 1394|0120257 1395 | 1395|0119338 1396 | 1396|0114550 1397 | 1397|0025586 1398 | 1398|0115548 1399 | 1399|0120222 1400 | 1400|0114129 1401 | 1401|0107468 1402 | 1402|0107225 1403 | 1403|0109382 1404 | 1404|0094336 1405 | 1405|0118762 1406 | 1406|0114916 1407 | 1407|0111255 1408 | 1408|0113199 1409 | 1409|0111333 1410 | 1410|0109934 1411 | 1411|0062711 1412 | 1412|0113596 1413 | 1413|0111301 1414 | 1414|0112702 1415 | 1415|0110657 1416 | 1416|0110678 1417 | 1417|0105660 1418 | 1418|0107282 1419 | 1419|0110027 1420 | 1420|NA 1421 | 1421|0107566 1422 | 1422|0108260 1423 | 1423|0114888 1424 | 1424|0110091 1425 | 1425|0110097 1426 | 1426|0116442 1427 | 1427|0112907 1428 | 1428|0120238 1429 | 1429|0120148 1430 | 1430|0119352 1431 | 1431|0130073 1432 | 1432|0119670 1433 | 1433|0119655 1434 | 1434|0120122 1435 | 1435|0114536 1436 | 1436|0107611 1437 | 1437|0110064 1438 | 1438|0114084 1439 | 1439|0110186 1440 | 1440|0109035 1441 | 1441|0113851 1442 | 1442|0114345 1443 | 1443|0109021 1444 | 1444|0059793 1445 | 1445|0110296 1446 | 1446|0112606 1447 | 1447|0106537 1448 | 1448|0107471 1449 | 1449|0048473 1450 | 1450|0039428 1451 | 1451|0032484 1452 | 1452|0036094 1453 | 1453|0038300 1454 | 1454|0039152 1455 | 1455|0036241 1456 | 1456|0046414 1457 | 1457|0116928 1458 | 1458|0028757 1459 | 1459|0113731 1460 | 1460|0114472 1461 | 1461|0026465 1462 | 1462|0118100 1463 | 1463|0118764 1464 | 1464|0114534 1465 | 1465|0113612 1466 | 1466|0113774 1467 | 1467|0108026 1468 | 1468|0112757 1469 | 1469|0112302 1470 | 1470|0113234 1471 | 1471|0113303 1472 | 1472|0108500 1473 | 1473|0031580 1474 | 1474|0110671 1475 | 1475|0106408 1476 | 1476|0040723 1477 | 1477|0119791 1478 | 1478|0112819 1479 | 1479|0114241 1480 | 1480|0071607 1481 | 1481|0111048 1482 | 1482|0113147 1483 | 1483|0120744 1484 | 1484|0110189 1485 | 1485|0109454 1486 | 1486|0113173 1487 | 1487|0106834 1488 | 1488|0107002 1489 | 1489|0109403 1490 | 1490|0106878 1491 | 1491|0114706 1492 | 1492|0110719 1493 | 1493|0113839 1494 | 1494|0110570 1495 | 1495|0113080 1496 | 1496|0115836 1497 | 1497|0116886 1498 | 1498|0113031 1499 | 1499|0109942 1500 | 1500|0117550 1501 | 1501|0116754 1502 | 1502|0110623 1503 | 1503|0113188 1504 | 1504|0109255 1505 | 1505|0113542 1506 | 1506|0113947 1507 | 1507|0117968 1508 | 1508|0112438 1509 | 1509|0109891 1510 | 1510|0116953 1511 | 1511|0115886 1512 | 1512|0052572 1513 | 1513|0120190 1514 | 1514|0119019 1515 | 1515|0114952 1516 | 1516|0111709 1517 | 1517|0117427 1518 | 1518|0113691 1519 | 1519|0113967 1520 | 1520|0113043 1521 | 1521|0107613 1522 | 1522|0111488 1523 | 1523|0109920 1524 | 1524|0110246 1525 | 1525|0120772 1526 | 1526|0090329 1527 | 1527|0120820 1528 | 1528|0119809 1529 | 1529|0114787 1530 | 1530|0113463 1531 | 1531|0113028 1532 | 1532|0109828 1533 | 1533|0106678 1534 | 1534|0120394 1535 | 1535|0109731 1536 | 1536|0109066 1537 | 1537|0115951 1538 | 1538|0118586 1539 | 1539|0106379 1540 | 1540|0112342 1541 | 1541|0109226 1542 | 1542|0017350 1543 | 1543|0116714 1544 | 1544|0113442 1545 | 1545|0113107 1546 | 1546|0094878 1547 | 1547|0114435 1548 | 1548|0112727 1549 | 1549|0112899 1550 | 1550|0112854 1551 | 1551|0109906 1552 | 1552|0113360 1553 | 1553|0114788 1554 | 1554|0111054 1555 | 1555|0108069 1556 | 1556|0112712 1557 | 1557|0111787 1558 | 1558|0048956 1559 | 1559|0110061 1560 | 1560|0109443 1561 | 1561|0111430 1562 | 1562|0107727 1563 | 1563|0111613 1564 | 1564|0103095 1565 | 1565|0104046 1566 | 1566|0031612 1567 | 1567|0103926 1568 | 1568|0105737 1569 | 1569|0094265 1570 | 1570|0105201 1571 | 1571|0070820 1572 | 1572|0084898 1573 | 1573|0063715 1574 | 1574|0114122 1575 | 1575|0100990 1576 | 1576|0093199 1577 | 1577|0049521 1578 | 1578|0061495 1579 | 1579|0094822 1580 | 1580|0024252 1581 | 1581|0043140 1582 | 1582|0039881 1583 | 1583|0092281 1584 | 1584|0039004 1585 | 1585|0099028 1586 | 1586|0104684 1587 | 1587|0052287 1588 | 1588|0117544 1589 | 1589|0117561 1590 | 1590|0112951 1591 | 1591|0112913 1592 | 1592|NA 1593 | 1593|0101692 1594 | 1594|0120661 1595 | 1595|0111173 1596 | 1596|0113948 1597 | 1597|0105275 1598 | 1598|0118859 1599 | 1599|0114494 1600 | 1600|0109949 1601 | 1601|0119819 1602 | 1602|0120793 1603 | 1603|0112364 1604 | 1604|0040427 1605 | 1605|0116930 1606 | 1606|0119527 1607 | 1607|0119338 1608 | 1608|0118787 1609 | 1609|0118663 1610 | 1610|0120383 1611 | 1611|0116643 1612 | 1612|0116845 1613 | 1613|0114690 1614 | 1614|0052126 1615 | 1615|0120479 1616 | 1616|0112849 1617 | 1617|0119327 1618 | 1618|0099939 1619 | 1619|0113720 1620 | 1620|0120142 1621 | 1621|0112604 1622 | 1622|0107779 1623 | 1623|0112769 1624 | 1624|0118744 1625 | 1625|0119791 1626 | 1626|0110251 1627 | 1627|0114936 1628 | 1628|0110299 1629 | 1629|0113973 1630 | 1630|0111055 1631 | 1631|0107349 1632 | 1632|0114671 1633 | 1633|0109028 1634 | 1634|0109751 1635 | 1635|0092123 1636 | 1636|0112586 1637 | 1637|0116418 1638 | 1638|0117202 1639 | 1639|0115600 1640 | 1640|0116581 1641 | 1641|0112777 1642 | 1642|0117690 1643 | 1643|0112362 1644 | 1644|0117775 1645 | 1645|0118804 1646 | 1646|0119657 1647 | 1647|0119250 1648 | 1648|0119780 1649 | 1649|0124295 1650 | 1650|0118804 1651 | 1651|0120176 1652 | 1652|0116295 1653 | 1653|0116212 1654 | 1654|0118836 1655 | 1655|0109783 1656 | 1656|0119548 1657 | 1657|0114618 1658 | 1658|0117773 1659 | 1659|0116405 1660 | 1660|0114474 1661 | 1661|0110649 1662 | 1662|0114303 1663 | 1663|0114007 1664 | 1664|0118541 1665 | 1665|0118783 1666 | 1666|0117482 1667 | 1667|0113968 1668 | 1668|0118127 1669 | 1669|0116949 1670 | 1670|0128690 1671 | 1671|0116379 1672 | 1672|0107315 1673 | 1673|0113827 1674 | 1674|0056215 1675 | 1675|0117781 1676 | 1676|0118117 1677 | 1677|0114592 1678 | 1678|0119711 1679 | 1679|0120594 1680 | 1680|0120148 1681 | 1681|0111804 1682 | 1682|0102855 1683 | -------------------------------------------------------------------------------- /chapter13/13-1-kmeans.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import sys 3 | import numpy as np 4 | from matplotlib import pyplot as plt 5 | 6 | def image_kmeans(fin_img, K,fout_img): 7 | 8 | img = cv2.imread(fin_img) 9 | # 입력받은 이미지 행렬의 모양을 바꿉니다. 10 | # 가로 x 세로 x 채널 수의 입력을 가로 * 세로 * 채널 수로 바꿉니다. 11 | Z = img.reshape((-1,3)) 12 | 13 | # cv.kmeans의 입력으로 사용하기 위해 강도값을 넘파이의 float32 형으로 바꿉니다. 14 | Z = np.float32(Z) 15 | 16 | # 파라미터 criteria의 인자를 정합니다. 여기서는 최대 갱신 수 10, 갱신 에러 값 1.0을 이용하여 17 | # 갱신을 종료합니다. 초기화는 10번 하겠습니다. 18 | criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0) 19 | ret,label,center=cv2.kmeans(Z,K,None,criteria,10,cv2.KMEANS_RANDOM_CENTERS) 20 | 21 | # 출력된 중심점의 값을 픽셀의 강도로 사용하기 위해 int로 바꿉니다. 22 | center = np.uint8(center) 23 | 24 | # 각 픽셀을 해당하는 중심점의 피처값(여기서는 각 채널의 강도값)으로 채웁니다. 25 | res = center[label.flatten()] 26 | 27 | # 출력을 위해 원래 이미지와 같은 모양의 행렬로 바꿉니다. 28 | res2 = res.reshape((img.shape)) 29 | 30 | plt.imshow(cv2.cvtColor(res2, cv2.COLOR_BGR2RGB)) 31 | plt.title('k=%d',K) 32 | plt.show() 33 | plt.savefig(fout_img) 34 | plt.close() 35 | 36 | if __name__ =='__main__': 37 | 38 | argv = sys.argv 39 | image_kmeans(argv[1],argv[2],argv[3]) 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /chapter13/13-13-deep-mnist.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from __future__ import division 3 | from __future__ import print_function 4 | 5 | import argparse 6 | import sys 7 | from tensorflow.examples.tutorials.mnist import input_data 8 | import tensorflow as tf 9 | FLAGS = None 10 | 11 | def weight_variable(shape): 12 | initial = tf.truncated_normal(shape, stddev=0.1) 13 | return tf.Variable(initial, name='weights') 14 | 15 | def bias_variable(shape): 16 | initial = tf.constant(0.1, shape=shape) 17 | return tf.Variable(initial, name='bias') 18 | 19 | def conv2d(x, W): 20 | return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME', name='conv') 21 | 22 | def max_pool_2x2(x): 23 | return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],strides=[1, 2, 2, 1], padding='SAME', name='pool') 24 | 25 | def main(_): 26 | # 데이터를 읽어 들입니다. 27 | mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True) 28 | 29 | x = tf.placeholder(tf.float32, [None, 784],name='x') 30 | y_ = tf.placeholder(tf.float32, [None, 10]) 31 | 32 | with tf.name_scope('first_conv'): 33 | W_conv1 = weight_variable([5, 5, 1, 32]) 34 | b_conv1 = bias_variable([32]) 35 | x_image = tf.reshape(x, [-1, 28, 28, 1]) 36 | h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1,name='act') 37 | 38 | with tf.name_scope('first_pool'): 39 | h_pool1 = max_pool_2x2(h_conv1) 40 | 41 | with tf.name_scope('second_conv'): 42 | W_conv2 = weight_variable([5, 5, 32, 64]) 43 | b_conv2 = bias_variable([64]) 44 | h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2,name='act') 45 | 46 | with tf.name_scope('second_pool'): 47 | h_pool2 = max_pool_2x2(h_conv2) 48 | 49 | with tf.name_scope('first_fc'): 50 | W_fc1 = weight_variable([7 * 7 * 64, 1024]) 51 | b_fc1 = bias_variable([1024]) 52 | h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64]) 53 | h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1,name='act') 54 | 55 | with tf.name_scope('drop_out'): 56 | keep_prob = tf.placeholder(tf.float32) 57 | h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob) 58 | 59 | with tf.name_scope('second_fc'): 60 | W_fc2 = weight_variable([1024, 10]) 61 | b_fc2 = bias_variable([10]) 62 | y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2 63 | 64 | cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(y_conv, y_),name='cross_ent') 65 | train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy) 66 | 67 | correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1)) 68 | accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) 69 | 70 | 71 | # 그래프 구조를 기록하기 위한 폴더 72 | logdir = './deep_graph' 73 | with tf.Session() as sess: 74 | sess.run(tf.global_variables_initializer()) 75 | # 텐서보드 기록용 76 | summary_writer = tf.summary.FileWriter(logdir, sess.graph) 77 | 78 | for i in range(10000): 79 | # 한 번에 데이터 100개를 이용하여 학습합니다. 80 | batch = mnist.train.next_batch(100) 81 | if i % 100 == 0: 82 | # 평가 시에는 완성된 모델을 사용하기 위해 83 | # 드롭-아웃을 하지 않으므로 keep_prob가 1.0이 됩니다. 84 | train_accuracy = accuracy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}) 85 | print("step %d, training accuracy %g" % (i, train_accuracy)) 86 | train_step.run(feed_dict={x: batch[0], y_: batch[1], keep_prob: 0.5}) 87 | print("test accuracy %g" % accuracy.eval(feed_dict={x: mnist.test.images,y_: mnist.test.labels, keep_prob: 1.0})) 88 | 89 | if __name__ == '__main__': 90 | parser = argparse.ArgumentParser() 91 | parser.add_argument('--data_dir', type=str,default='/tmp/tensorflow/mnist/input_data',help='Directory for storing input data') 92 | FLAGS, unparsed = parser.parse_known_args() 93 | tf.app.run(main=main, argv=[sys.argv[0]] + unparsed) 94 | -------------------------------------------------------------------------------- /chapter13/13-2-eigenface.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | import sys 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.metrics import classification_report 6 | from sklearn.decomposition import PCA 7 | from sklearn.model_selection import GridSearchCV 8 | from sklearn.svm import SVC 9 | 10 | def read_data(fin): 11 | ''' 이미지 파일을 읽어 들여 이미지 데이터, 이미지 얼굴 번호, 사람 이름을 리턴합니다. fin 파일은 각 이미지 데이터의 경로를 포함하고 있습니다. 12 | ''' 13 | target_li = [] 14 | data_li = [] 15 | for line in open(fin): 16 | image_path, face_id = line.strip().split(';') 17 | image_data = cv2.imread(image_path, cv2.COLOR_BGR2GRAY) 18 | data_li.append(image_data) # 이미지 데이터 19 | target_li.append(int(face_id)) # 이미지 얼굴 번호 20 | return(np.array(data_li), np.array(target_li)) 21 | 22 | 23 | def create_train_test_data(image_data, label_li): 24 | 25 | # 이미지데이터수,이미지세로픽셀크기,이미지가로픽셀크기 26 | n_samples, image_h, image_w = image_data.shape 27 | # 가로 픽셀 강도벡터와 세로 픽셀 강도벡터를 이어서 하나의 벡터로 만듭니다. 28 | # 길이는 image_h x image_w가 되겠죠. 그리고 이 벡터가 피처 벡터가 됩니다. 29 | X = image_data.reshape(n_samples, -1) 30 | n_features = X.shape[1] # 피처 크기 31 | y = label_li # 학습 레이블 32 | 33 | n_classes = len(set(y)) # 클래스(인물) 수 34 | print("Total dataset size:") 35 | print("n_samples: %d" % n_samples) 36 | print("n_features: %d" % n_features) 37 | print("n_classes: %d" % n_classes) 38 | # 사이킷런의 함수 train_test_split을 이용하여 학습셋과 평가셋을 나눕니다. 39 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=42) 40 | return(X_train, X_test, y_train, y_test) 41 | 42 | def extract_features(X_train, X_test, n_components): 43 | print("Extracting the top %d eigenfaces from %d faces"% (n_components, X_train.shape[0])) 44 | pca = PCA(n_components=n_components,svd_solver='randomized', whiten=True).fit(X_train) 45 | X_train_pca = pca.transform(X_train) 46 | X_test_pca = pca.transform(X_test) 47 | return(X_train_pca, X_test_pca) 48 | 49 | def train_test_classifer(X_train_pca, X_test_pca, y_train, y_test): 50 | print("Fitting the classifier to the training set") 51 | param_grid = {'C': [1e3, 5e3, 1e4, 5e4, 1e5],'gamma': [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.1]} 52 | clf = GridSearchCV(SVC(kernel='rbf', class_weight='balanced'), param_grid) 53 | clf = clf.fit(X_train_pca, y_train) 54 | print("Best estimator found by grid search:") 55 | print(clf.best_estimator_) 56 | 57 | print("Predicting people's names on the test set") 58 | y_pred = clf.predict(X_test_pca) 59 | print(classification_report(y_test, y_pred)) 60 | 61 | if __name__ == '__main__': 62 | argv = sys.argv 63 | image_data, label = read_data('faces.csv') 64 | n_eigenface = 10 # 추출할 고유 얼굴 수 65 | X_train, X_test, y_train, y_test = create_train_test_data(image_data, label) 66 | X_train_pca, X_test_pca = extract_features(X_train, X_test, n_eigenface) 67 | train_test_classifer(X_train_pca, X_test_pca, y_train, y_test) 68 | -------------------------------------------------------------------------------- /chapter13/README.md: -------------------------------------------------------------------------------- 1 | 예제 13-2에서 사용되는 이미지의 경로가 담인 파일을 업로드했습니다. 2 | /Users/youngjoo.chung/Downloads/att_face/ 를 각자의 환경에 맞추어 바꾼 후 사용해주세요. 3 | 4 | 5 | 책에 실린 데이터셋 링크가 작동하지 않는 것을 확인했습니다. att_face.zip파일을 다운받아 사용해주세요. 6 | -------------------------------------------------------------------------------- /chapter13/att_face.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/your-first-ml-book/Examples/42d4de60f27b47fc69f02edf6e02840cb1701386/chapter13/att_face.zip -------------------------------------------------------------------------------- /chapter13/faces.csv: -------------------------------------------------------------------------------- 1 | /Users/youngjoo.chung/Downloads/att_face/s1/1.pgm;0 2 | /Users/youngjoo.chung/Downloads/att_face/s1/10.pgm;0 3 | /Users/youngjoo.chung/Downloads/att_face/s1/2.pgm;0 4 | /Users/youngjoo.chung/Downloads/att_face/s1/3.pgm;0 5 | /Users/youngjoo.chung/Downloads/att_face/s1/4.pgm;0 6 | /Users/youngjoo.chung/Downloads/att_face/s1/5.pgm;0 7 | /Users/youngjoo.chung/Downloads/att_face/s1/6.pgm;0 8 | /Users/youngjoo.chung/Downloads/att_face/s1/7.pgm;0 9 | /Users/youngjoo.chung/Downloads/att_face/s1/8.pgm;0 10 | /Users/youngjoo.chung/Downloads/att_face/s1/9.pgm;0 11 | /Users/youngjoo.chung/Downloads/att_face/s10/1.pgm;9 12 | /Users/youngjoo.chung/Downloads/att_face/s10/10.pgm;9 13 | /Users/youngjoo.chung/Downloads/att_face/s10/2.pgm;9 14 | /Users/youngjoo.chung/Downloads/att_face/s10/3.pgm;9 15 | /Users/youngjoo.chung/Downloads/att_face/s10/4.pgm;9 16 | /Users/youngjoo.chung/Downloads/att_face/s10/5.pgm;9 17 | /Users/youngjoo.chung/Downloads/att_face/s10/6.pgm;9 18 | /Users/youngjoo.chung/Downloads/att_face/s10/7.pgm;9 19 | /Users/youngjoo.chung/Downloads/att_face/s10/8.pgm;9 20 | /Users/youngjoo.chung/Downloads/att_face/s10/9.pgm;9 21 | /Users/youngjoo.chung/Downloads/att_face/s11/1.pgm;10 22 | /Users/youngjoo.chung/Downloads/att_face/s11/10.pgm;10 23 | /Users/youngjoo.chung/Downloads/att_face/s11/2.pgm;10 24 | /Users/youngjoo.chung/Downloads/att_face/s11/3.pgm;10 25 | /Users/youngjoo.chung/Downloads/att_face/s11/4.pgm;10 26 | /Users/youngjoo.chung/Downloads/att_face/s11/5.pgm;10 27 | /Users/youngjoo.chung/Downloads/att_face/s11/6.pgm;10 28 | /Users/youngjoo.chung/Downloads/att_face/s11/7.pgm;10 29 | /Users/youngjoo.chung/Downloads/att_face/s11/8.pgm;10 30 | /Users/youngjoo.chung/Downloads/att_face/s11/9.pgm;10 31 | /Users/youngjoo.chung/Downloads/att_face/s12/1.pgm;11 32 | /Users/youngjoo.chung/Downloads/att_face/s12/10.pgm;11 33 | /Users/youngjoo.chung/Downloads/att_face/s12/2.pgm;11 34 | /Users/youngjoo.chung/Downloads/att_face/s12/3.pgm;11 35 | /Users/youngjoo.chung/Downloads/att_face/s12/4.pgm;11 36 | /Users/youngjoo.chung/Downloads/att_face/s12/5.pgm;11 37 | /Users/youngjoo.chung/Downloads/att_face/s12/6.pgm;11 38 | /Users/youngjoo.chung/Downloads/att_face/s12/7.pgm;11 39 | /Users/youngjoo.chung/Downloads/att_face/s12/8.pgm;11 40 | /Users/youngjoo.chung/Downloads/att_face/s12/9.pgm;11 41 | /Users/youngjoo.chung/Downloads/att_face/s13/1.pgm;12 42 | /Users/youngjoo.chung/Downloads/att_face/s13/10.pgm;12 43 | /Users/youngjoo.chung/Downloads/att_face/s13/2.pgm;12 44 | /Users/youngjoo.chung/Downloads/att_face/s13/3.pgm;12 45 | /Users/youngjoo.chung/Downloads/att_face/s13/4.pgm;12 46 | /Users/youngjoo.chung/Downloads/att_face/s13/5.pgm;12 47 | /Users/youngjoo.chung/Downloads/att_face/s13/6.pgm;12 48 | /Users/youngjoo.chung/Downloads/att_face/s13/7.pgm;12 49 | /Users/youngjoo.chung/Downloads/att_face/s13/8.pgm;12 50 | /Users/youngjoo.chung/Downloads/att_face/s13/9.pgm;12 51 | /Users/youngjoo.chung/Downloads/att_face/s14/1.pgm;13 52 | /Users/youngjoo.chung/Downloads/att_face/s14/10.pgm;13 53 | /Users/youngjoo.chung/Downloads/att_face/s14/2.pgm;13 54 | /Users/youngjoo.chung/Downloads/att_face/s14/3.pgm;13 55 | /Users/youngjoo.chung/Downloads/att_face/s14/4.pgm;13 56 | /Users/youngjoo.chung/Downloads/att_face/s14/5.pgm;13 57 | /Users/youngjoo.chung/Downloads/att_face/s14/6.pgm;13 58 | /Users/youngjoo.chung/Downloads/att_face/s14/7.pgm;13 59 | /Users/youngjoo.chung/Downloads/att_face/s14/8.pgm;13 60 | /Users/youngjoo.chung/Downloads/att_face/s14/9.pgm;13 61 | /Users/youngjoo.chung/Downloads/att_face/s15/1.pgm;14 62 | /Users/youngjoo.chung/Downloads/att_face/s15/10.pgm;14 63 | /Users/youngjoo.chung/Downloads/att_face/s15/2.pgm;14 64 | /Users/youngjoo.chung/Downloads/att_face/s15/3.pgm;14 65 | /Users/youngjoo.chung/Downloads/att_face/s15/4.pgm;14 66 | /Users/youngjoo.chung/Downloads/att_face/s15/5.pgm;14 67 | /Users/youngjoo.chung/Downloads/att_face/s15/6.pgm;14 68 | /Users/youngjoo.chung/Downloads/att_face/s15/7.pgm;14 69 | /Users/youngjoo.chung/Downloads/att_face/s15/8.pgm;14 70 | /Users/youngjoo.chung/Downloads/att_face/s15/9.pgm;14 71 | /Users/youngjoo.chung/Downloads/att_face/s16/1.pgm;15 72 | /Users/youngjoo.chung/Downloads/att_face/s16/10.pgm;15 73 | /Users/youngjoo.chung/Downloads/att_face/s16/2.pgm;15 74 | /Users/youngjoo.chung/Downloads/att_face/s16/3.pgm;15 75 | /Users/youngjoo.chung/Downloads/att_face/s16/4.pgm;15 76 | /Users/youngjoo.chung/Downloads/att_face/s16/5.pgm;15 77 | /Users/youngjoo.chung/Downloads/att_face/s16/6.pgm;15 78 | /Users/youngjoo.chung/Downloads/att_face/s16/7.pgm;15 79 | /Users/youngjoo.chung/Downloads/att_face/s16/8.pgm;15 80 | /Users/youngjoo.chung/Downloads/att_face/s16/9.pgm;15 81 | /Users/youngjoo.chung/Downloads/att_face/s17/1.pgm;16 82 | /Users/youngjoo.chung/Downloads/att_face/s17/10.pgm;16 83 | /Users/youngjoo.chung/Downloads/att_face/s17/2.pgm;16 84 | /Users/youngjoo.chung/Downloads/att_face/s17/3.pgm;16 85 | /Users/youngjoo.chung/Downloads/att_face/s17/4.pgm;16 86 | /Users/youngjoo.chung/Downloads/att_face/s17/5.pgm;16 87 | /Users/youngjoo.chung/Downloads/att_face/s17/6.pgm;16 88 | /Users/youngjoo.chung/Downloads/att_face/s17/7.pgm;16 89 | /Users/youngjoo.chung/Downloads/att_face/s17/8.pgm;16 90 | /Users/youngjoo.chung/Downloads/att_face/s17/9.pgm;16 91 | /Users/youngjoo.chung/Downloads/att_face/s18/1.pgm;17 92 | /Users/youngjoo.chung/Downloads/att_face/s18/10.pgm;17 93 | /Users/youngjoo.chung/Downloads/att_face/s18/2.pgm;17 94 | /Users/youngjoo.chung/Downloads/att_face/s18/3.pgm;17 95 | /Users/youngjoo.chung/Downloads/att_face/s18/4.pgm;17 96 | /Users/youngjoo.chung/Downloads/att_face/s18/5.pgm;17 97 | /Users/youngjoo.chung/Downloads/att_face/s18/6.pgm;17 98 | /Users/youngjoo.chung/Downloads/att_face/s18/7.pgm;17 99 | /Users/youngjoo.chung/Downloads/att_face/s18/8.pgm;17 100 | /Users/youngjoo.chung/Downloads/att_face/s18/9.pgm;17 101 | /Users/youngjoo.chung/Downloads/att_face/s19/1.pgm;18 102 | /Users/youngjoo.chung/Downloads/att_face/s19/10.pgm;18 103 | /Users/youngjoo.chung/Downloads/att_face/s19/2.pgm;18 104 | /Users/youngjoo.chung/Downloads/att_face/s19/3.pgm;18 105 | /Users/youngjoo.chung/Downloads/att_face/s19/4.pgm;18 106 | /Users/youngjoo.chung/Downloads/att_face/s19/5.pgm;18 107 | /Users/youngjoo.chung/Downloads/att_face/s19/6.pgm;18 108 | /Users/youngjoo.chung/Downloads/att_face/s19/7.pgm;18 109 | /Users/youngjoo.chung/Downloads/att_face/s19/8.pgm;18 110 | /Users/youngjoo.chung/Downloads/att_face/s19/9.pgm;18 111 | /Users/youngjoo.chung/Downloads/att_face/s2/1.pgm;1 112 | /Users/youngjoo.chung/Downloads/att_face/s2/10.pgm;1 113 | /Users/youngjoo.chung/Downloads/att_face/s2/2.pgm;1 114 | /Users/youngjoo.chung/Downloads/att_face/s2/3.pgm;1 115 | /Users/youngjoo.chung/Downloads/att_face/s2/4.pgm;1 116 | /Users/youngjoo.chung/Downloads/att_face/s2/5.pgm;1 117 | /Users/youngjoo.chung/Downloads/att_face/s2/6.pgm;1 118 | /Users/youngjoo.chung/Downloads/att_face/s2/7.pgm;1 119 | /Users/youngjoo.chung/Downloads/att_face/s2/8.pgm;1 120 | /Users/youngjoo.chung/Downloads/att_face/s2/9.pgm;1 121 | /Users/youngjoo.chung/Downloads/att_face/s20/1.pgm;19 122 | /Users/youngjoo.chung/Downloads/att_face/s20/10.pgm;19 123 | /Users/youngjoo.chung/Downloads/att_face/s20/2.pgm;19 124 | /Users/youngjoo.chung/Downloads/att_face/s20/3.pgm;19 125 | /Users/youngjoo.chung/Downloads/att_face/s20/4.pgm;19 126 | /Users/youngjoo.chung/Downloads/att_face/s20/5.pgm;19 127 | /Users/youngjoo.chung/Downloads/att_face/s20/6.pgm;19 128 | /Users/youngjoo.chung/Downloads/att_face/s20/7.pgm;19 129 | /Users/youngjoo.chung/Downloads/att_face/s20/8.pgm;19 130 | /Users/youngjoo.chung/Downloads/att_face/s20/9.pgm;19 131 | /Users/youngjoo.chung/Downloads/att_face/s21/1.pgm;20 132 | /Users/youngjoo.chung/Downloads/att_face/s21/10.pgm;20 133 | /Users/youngjoo.chung/Downloads/att_face/s21/2.pgm;20 134 | /Users/youngjoo.chung/Downloads/att_face/s21/3.pgm;20 135 | /Users/youngjoo.chung/Downloads/att_face/s21/4.pgm;20 136 | /Users/youngjoo.chung/Downloads/att_face/s21/5.pgm;20 137 | /Users/youngjoo.chung/Downloads/att_face/s21/6.pgm;20 138 | /Users/youngjoo.chung/Downloads/att_face/s21/7.pgm;20 139 | /Users/youngjoo.chung/Downloads/att_face/s21/8.pgm;20 140 | /Users/youngjoo.chung/Downloads/att_face/s21/9.pgm;20 141 | /Users/youngjoo.chung/Downloads/att_face/s22/1.pgm;21 142 | /Users/youngjoo.chung/Downloads/att_face/s22/10.pgm;21 143 | /Users/youngjoo.chung/Downloads/att_face/s22/2.pgm;21 144 | /Users/youngjoo.chung/Downloads/att_face/s22/3.pgm;21 145 | /Users/youngjoo.chung/Downloads/att_face/s22/4.pgm;21 146 | /Users/youngjoo.chung/Downloads/att_face/s22/5.pgm;21 147 | /Users/youngjoo.chung/Downloads/att_face/s22/6.pgm;21 148 | /Users/youngjoo.chung/Downloads/att_face/s22/7.pgm;21 149 | /Users/youngjoo.chung/Downloads/att_face/s22/8.pgm;21 150 | /Users/youngjoo.chung/Downloads/att_face/s22/9.pgm;21 151 | /Users/youngjoo.chung/Downloads/att_face/s23/1.pgm;22 152 | /Users/youngjoo.chung/Downloads/att_face/s23/10.pgm;22 153 | /Users/youngjoo.chung/Downloads/att_face/s23/2.pgm;22 154 | /Users/youngjoo.chung/Downloads/att_face/s23/3.pgm;22 155 | /Users/youngjoo.chung/Downloads/att_face/s23/4.pgm;22 156 | /Users/youngjoo.chung/Downloads/att_face/s23/5.pgm;22 157 | /Users/youngjoo.chung/Downloads/att_face/s23/6.pgm;22 158 | /Users/youngjoo.chung/Downloads/att_face/s23/7.pgm;22 159 | /Users/youngjoo.chung/Downloads/att_face/s23/8.pgm;22 160 | /Users/youngjoo.chung/Downloads/att_face/s23/9.pgm;22 161 | /Users/youngjoo.chung/Downloads/att_face/s24/1.pgm;23 162 | /Users/youngjoo.chung/Downloads/att_face/s24/10.pgm;23 163 | /Users/youngjoo.chung/Downloads/att_face/s24/2.pgm;23 164 | /Users/youngjoo.chung/Downloads/att_face/s24/3.pgm;23 165 | /Users/youngjoo.chung/Downloads/att_face/s24/4.pgm;23 166 | /Users/youngjoo.chung/Downloads/att_face/s24/5.pgm;23 167 | /Users/youngjoo.chung/Downloads/att_face/s24/6.pgm;23 168 | /Users/youngjoo.chung/Downloads/att_face/s24/7.pgm;23 169 | /Users/youngjoo.chung/Downloads/att_face/s24/8.pgm;23 170 | /Users/youngjoo.chung/Downloads/att_face/s24/9.pgm;23 171 | /Users/youngjoo.chung/Downloads/att_face/s25/1.pgm;24 172 | /Users/youngjoo.chung/Downloads/att_face/s25/10.pgm;24 173 | /Users/youngjoo.chung/Downloads/att_face/s25/2.pgm;24 174 | /Users/youngjoo.chung/Downloads/att_face/s25/3.pgm;24 175 | /Users/youngjoo.chung/Downloads/att_face/s25/4.pgm;24 176 | /Users/youngjoo.chung/Downloads/att_face/s25/5.pgm;24 177 | /Users/youngjoo.chung/Downloads/att_face/s25/6.pgm;24 178 | /Users/youngjoo.chung/Downloads/att_face/s25/7.pgm;24 179 | /Users/youngjoo.chung/Downloads/att_face/s25/8.pgm;24 180 | /Users/youngjoo.chung/Downloads/att_face/s25/9.pgm;24 181 | /Users/youngjoo.chung/Downloads/att_face/s26/1.pgm;25 182 | /Users/youngjoo.chung/Downloads/att_face/s26/10.pgm;25 183 | /Users/youngjoo.chung/Downloads/att_face/s26/2.pgm;25 184 | /Users/youngjoo.chung/Downloads/att_face/s26/3.pgm;25 185 | /Users/youngjoo.chung/Downloads/att_face/s26/4.pgm;25 186 | /Users/youngjoo.chung/Downloads/att_face/s26/5.pgm;25 187 | /Users/youngjoo.chung/Downloads/att_face/s26/6.pgm;25 188 | /Users/youngjoo.chung/Downloads/att_face/s26/7.pgm;25 189 | /Users/youngjoo.chung/Downloads/att_face/s26/8.pgm;25 190 | /Users/youngjoo.chung/Downloads/att_face/s26/9.pgm;25 191 | /Users/youngjoo.chung/Downloads/att_face/s27/1.pgm;26 192 | /Users/youngjoo.chung/Downloads/att_face/s27/10.pgm;26 193 | /Users/youngjoo.chung/Downloads/att_face/s27/2.pgm;26 194 | /Users/youngjoo.chung/Downloads/att_face/s27/3.pgm;26 195 | /Users/youngjoo.chung/Downloads/att_face/s27/4.pgm;26 196 | /Users/youngjoo.chung/Downloads/att_face/s27/5.pgm;26 197 | /Users/youngjoo.chung/Downloads/att_face/s27/6.pgm;26 198 | /Users/youngjoo.chung/Downloads/att_face/s27/7.pgm;26 199 | /Users/youngjoo.chung/Downloads/att_face/s27/8.pgm;26 200 | /Users/youngjoo.chung/Downloads/att_face/s27/9.pgm;26 201 | /Users/youngjoo.chung/Downloads/att_face/s28/1.pgm;27 202 | /Users/youngjoo.chung/Downloads/att_face/s28/10.pgm;27 203 | /Users/youngjoo.chung/Downloads/att_face/s28/2.pgm;27 204 | /Users/youngjoo.chung/Downloads/att_face/s28/3.pgm;27 205 | /Users/youngjoo.chung/Downloads/att_face/s28/4.pgm;27 206 | /Users/youngjoo.chung/Downloads/att_face/s28/5.pgm;27 207 | /Users/youngjoo.chung/Downloads/att_face/s28/6.pgm;27 208 | /Users/youngjoo.chung/Downloads/att_face/s28/7.pgm;27 209 | /Users/youngjoo.chung/Downloads/att_face/s28/8.pgm;27 210 | /Users/youngjoo.chung/Downloads/att_face/s28/9.pgm;27 211 | /Users/youngjoo.chung/Downloads/att_face/s29/1.pgm;28 212 | /Users/youngjoo.chung/Downloads/att_face/s29/10.pgm;28 213 | /Users/youngjoo.chung/Downloads/att_face/s29/2.pgm;28 214 | /Users/youngjoo.chung/Downloads/att_face/s29/3.pgm;28 215 | /Users/youngjoo.chung/Downloads/att_face/s29/4.pgm;28 216 | /Users/youngjoo.chung/Downloads/att_face/s29/5.pgm;28 217 | /Users/youngjoo.chung/Downloads/att_face/s29/6.pgm;28 218 | /Users/youngjoo.chung/Downloads/att_face/s29/7.pgm;28 219 | /Users/youngjoo.chung/Downloads/att_face/s29/8.pgm;28 220 | /Users/youngjoo.chung/Downloads/att_face/s29/9.pgm;28 221 | /Users/youngjoo.chung/Downloads/att_face/s3/1.pgm;2 222 | /Users/youngjoo.chung/Downloads/att_face/s3/10.pgm;2 223 | /Users/youngjoo.chung/Downloads/att_face/s3/2.pgm;2 224 | /Users/youngjoo.chung/Downloads/att_face/s3/3.pgm;2 225 | /Users/youngjoo.chung/Downloads/att_face/s3/4.pgm;2 226 | /Users/youngjoo.chung/Downloads/att_face/s3/5.pgm;2 227 | /Users/youngjoo.chung/Downloads/att_face/s3/6.pgm;2 228 | /Users/youngjoo.chung/Downloads/att_face/s3/7.pgm;2 229 | /Users/youngjoo.chung/Downloads/att_face/s3/8.pgm;2 230 | /Users/youngjoo.chung/Downloads/att_face/s3/9.pgm;2 231 | /Users/youngjoo.chung/Downloads/att_face/s30/1.pgm;29 232 | /Users/youngjoo.chung/Downloads/att_face/s30/10.pgm;29 233 | /Users/youngjoo.chung/Downloads/att_face/s30/2.pgm;29 234 | /Users/youngjoo.chung/Downloads/att_face/s30/3.pgm;29 235 | /Users/youngjoo.chung/Downloads/att_face/s30/4.pgm;29 236 | /Users/youngjoo.chung/Downloads/att_face/s30/5.pgm;29 237 | /Users/youngjoo.chung/Downloads/att_face/s30/6.pgm;29 238 | /Users/youngjoo.chung/Downloads/att_face/s30/7.pgm;29 239 | /Users/youngjoo.chung/Downloads/att_face/s30/8.pgm;29 240 | /Users/youngjoo.chung/Downloads/att_face/s30/9.pgm;29 241 | /Users/youngjoo.chung/Downloads/att_face/s31/1.pgm;30 242 | /Users/youngjoo.chung/Downloads/att_face/s31/10.pgm;30 243 | /Users/youngjoo.chung/Downloads/att_face/s31/2.pgm;30 244 | /Users/youngjoo.chung/Downloads/att_face/s31/3.pgm;30 245 | /Users/youngjoo.chung/Downloads/att_face/s31/4.pgm;30 246 | /Users/youngjoo.chung/Downloads/att_face/s31/5.pgm;30 247 | /Users/youngjoo.chung/Downloads/att_face/s31/6.pgm;30 248 | /Users/youngjoo.chung/Downloads/att_face/s31/7.pgm;30 249 | /Users/youngjoo.chung/Downloads/att_face/s31/8.pgm;30 250 | /Users/youngjoo.chung/Downloads/att_face/s31/9.pgm;30 251 | /Users/youngjoo.chung/Downloads/att_face/s32/1.pgm;31 252 | /Users/youngjoo.chung/Downloads/att_face/s32/10.pgm;31 253 | /Users/youngjoo.chung/Downloads/att_face/s32/2.pgm;31 254 | /Users/youngjoo.chung/Downloads/att_face/s32/3.pgm;31 255 | /Users/youngjoo.chung/Downloads/att_face/s32/4.pgm;31 256 | /Users/youngjoo.chung/Downloads/att_face/s32/5.pgm;31 257 | /Users/youngjoo.chung/Downloads/att_face/s32/6.pgm;31 258 | /Users/youngjoo.chung/Downloads/att_face/s32/7.pgm;31 259 | /Users/youngjoo.chung/Downloads/att_face/s32/8.pgm;31 260 | /Users/youngjoo.chung/Downloads/att_face/s32/9.pgm;31 261 | /Users/youngjoo.chung/Downloads/att_face/s33/1.pgm;32 262 | /Users/youngjoo.chung/Downloads/att_face/s33/10.pgm;32 263 | /Users/youngjoo.chung/Downloads/att_face/s33/2.pgm;32 264 | /Users/youngjoo.chung/Downloads/att_face/s33/3.pgm;32 265 | /Users/youngjoo.chung/Downloads/att_face/s33/4.pgm;32 266 | /Users/youngjoo.chung/Downloads/att_face/s33/5.pgm;32 267 | /Users/youngjoo.chung/Downloads/att_face/s33/6.pgm;32 268 | /Users/youngjoo.chung/Downloads/att_face/s33/7.pgm;32 269 | /Users/youngjoo.chung/Downloads/att_face/s33/8.pgm;32 270 | /Users/youngjoo.chung/Downloads/att_face/s33/9.pgm;32 271 | /Users/youngjoo.chung/Downloads/att_face/s34/1.pgm;33 272 | /Users/youngjoo.chung/Downloads/att_face/s34/10.pgm;33 273 | /Users/youngjoo.chung/Downloads/att_face/s34/2.pgm;33 274 | /Users/youngjoo.chung/Downloads/att_face/s34/3.pgm;33 275 | /Users/youngjoo.chung/Downloads/att_face/s34/4.pgm;33 276 | /Users/youngjoo.chung/Downloads/att_face/s34/5.pgm;33 277 | /Users/youngjoo.chung/Downloads/att_face/s34/6.pgm;33 278 | /Users/youngjoo.chung/Downloads/att_face/s34/7.pgm;33 279 | /Users/youngjoo.chung/Downloads/att_face/s34/8.pgm;33 280 | /Users/youngjoo.chung/Downloads/att_face/s34/9.pgm;33 281 | /Users/youngjoo.chung/Downloads/att_face/s35/1.pgm;34 282 | /Users/youngjoo.chung/Downloads/att_face/s35/10.pgm;34 283 | /Users/youngjoo.chung/Downloads/att_face/s35/2.pgm;34 284 | /Users/youngjoo.chung/Downloads/att_face/s35/3.pgm;34 285 | /Users/youngjoo.chung/Downloads/att_face/s35/4.pgm;34 286 | /Users/youngjoo.chung/Downloads/att_face/s35/5.pgm;34 287 | /Users/youngjoo.chung/Downloads/att_face/s35/6.pgm;34 288 | /Users/youngjoo.chung/Downloads/att_face/s35/7.pgm;34 289 | /Users/youngjoo.chung/Downloads/att_face/s35/8.pgm;34 290 | /Users/youngjoo.chung/Downloads/att_face/s35/9.pgm;34 291 | /Users/youngjoo.chung/Downloads/att_face/s36/1.pgm;35 292 | /Users/youngjoo.chung/Downloads/att_face/s36/10.pgm;35 293 | /Users/youngjoo.chung/Downloads/att_face/s36/2.pgm;35 294 | /Users/youngjoo.chung/Downloads/att_face/s36/3.pgm;35 295 | /Users/youngjoo.chung/Downloads/att_face/s36/4.pgm;35 296 | /Users/youngjoo.chung/Downloads/att_face/s36/5.pgm;35 297 | /Users/youngjoo.chung/Downloads/att_face/s36/6.pgm;35 298 | /Users/youngjoo.chung/Downloads/att_face/s36/7.pgm;35 299 | /Users/youngjoo.chung/Downloads/att_face/s36/8.pgm;35 300 | /Users/youngjoo.chung/Downloads/att_face/s36/9.pgm;35 301 | /Users/youngjoo.chung/Downloads/att_face/s37/1.pgm;36 302 | /Users/youngjoo.chung/Downloads/att_face/s37/10.pgm;36 303 | /Users/youngjoo.chung/Downloads/att_face/s37/2.pgm;36 304 | /Users/youngjoo.chung/Downloads/att_face/s37/3.pgm;36 305 | /Users/youngjoo.chung/Downloads/att_face/s37/4.pgm;36 306 | /Users/youngjoo.chung/Downloads/att_face/s37/5.pgm;36 307 | /Users/youngjoo.chung/Downloads/att_face/s37/6.pgm;36 308 | /Users/youngjoo.chung/Downloads/att_face/s37/7.pgm;36 309 | /Users/youngjoo.chung/Downloads/att_face/s37/8.pgm;36 310 | /Users/youngjoo.chung/Downloads/att_face/s37/9.pgm;36 311 | /Users/youngjoo.chung/Downloads/att_face/s38/1.pgm;37 312 | /Users/youngjoo.chung/Downloads/att_face/s38/10.pgm;37 313 | /Users/youngjoo.chung/Downloads/att_face/s38/2.pgm;37 314 | /Users/youngjoo.chung/Downloads/att_face/s38/3.pgm;37 315 | /Users/youngjoo.chung/Downloads/att_face/s38/4.pgm;37 316 | /Users/youngjoo.chung/Downloads/att_face/s38/5.pgm;37 317 | /Users/youngjoo.chung/Downloads/att_face/s38/6.pgm;37 318 | /Users/youngjoo.chung/Downloads/att_face/s38/7.pgm;37 319 | /Users/youngjoo.chung/Downloads/att_face/s38/8.pgm;37 320 | /Users/youngjoo.chung/Downloads/att_face/s38/9.pgm;37 321 | /Users/youngjoo.chung/Downloads/att_face/s39/1.pgm;38 322 | /Users/youngjoo.chung/Downloads/att_face/s39/10.pgm;38 323 | /Users/youngjoo.chung/Downloads/att_face/s39/2.pgm;38 324 | /Users/youngjoo.chung/Downloads/att_face/s39/3.pgm;38 325 | /Users/youngjoo.chung/Downloads/att_face/s39/4.pgm;38 326 | /Users/youngjoo.chung/Downloads/att_face/s39/5.pgm;38 327 | /Users/youngjoo.chung/Downloads/att_face/s39/6.pgm;38 328 | /Users/youngjoo.chung/Downloads/att_face/s39/7.pgm;38 329 | /Users/youngjoo.chung/Downloads/att_face/s39/8.pgm;38 330 | /Users/youngjoo.chung/Downloads/att_face/s39/9.pgm;38 331 | /Users/youngjoo.chung/Downloads/att_face/s4/1.pgm;3 332 | /Users/youngjoo.chung/Downloads/att_face/s4/10.pgm;3 333 | /Users/youngjoo.chung/Downloads/att_face/s4/2.pgm;3 334 | /Users/youngjoo.chung/Downloads/att_face/s4/3.pgm;3 335 | /Users/youngjoo.chung/Downloads/att_face/s4/4.pgm;3 336 | /Users/youngjoo.chung/Downloads/att_face/s4/5.pgm;3 337 | /Users/youngjoo.chung/Downloads/att_face/s4/6.pgm;3 338 | /Users/youngjoo.chung/Downloads/att_face/s4/7.pgm;3 339 | /Users/youngjoo.chung/Downloads/att_face/s4/8.pgm;3 340 | /Users/youngjoo.chung/Downloads/att_face/s4/9.pgm;3 341 | /Users/youngjoo.chung/Downloads/att_face/s40/1.pgm;39 342 | /Users/youngjoo.chung/Downloads/att_face/s40/10.pgm;39 343 | /Users/youngjoo.chung/Downloads/att_face/s40/2.pgm;39 344 | /Users/youngjoo.chung/Downloads/att_face/s40/3.pgm;39 345 | /Users/youngjoo.chung/Downloads/att_face/s40/4.pgm;39 346 | /Users/youngjoo.chung/Downloads/att_face/s40/5.pgm;39 347 | /Users/youngjoo.chung/Downloads/att_face/s40/6.pgm;39 348 | /Users/youngjoo.chung/Downloads/att_face/s40/7.pgm;39 349 | /Users/youngjoo.chung/Downloads/att_face/s40/8.pgm;39 350 | /Users/youngjoo.chung/Downloads/att_face/s40/9.pgm;39 351 | /Users/youngjoo.chung/Downloads/att_face/s5/1.pgm;4 352 | /Users/youngjoo.chung/Downloads/att_face/s5/10.pgm;4 353 | /Users/youngjoo.chung/Downloads/att_face/s5/2.pgm;4 354 | /Users/youngjoo.chung/Downloads/att_face/s5/3.pgm;4 355 | /Users/youngjoo.chung/Downloads/att_face/s5/4.pgm;4 356 | /Users/youngjoo.chung/Downloads/att_face/s5/5.pgm;4 357 | /Users/youngjoo.chung/Downloads/att_face/s5/6.pgm;4 358 | /Users/youngjoo.chung/Downloads/att_face/s5/7.pgm;4 359 | /Users/youngjoo.chung/Downloads/att_face/s5/8.pgm;4 360 | /Users/youngjoo.chung/Downloads/att_face/s5/9.pgm;4 361 | /Users/youngjoo.chung/Downloads/att_face/s6/1.pgm;5 362 | /Users/youngjoo.chung/Downloads/att_face/s6/10.pgm;5 363 | /Users/youngjoo.chung/Downloads/att_face/s6/2.pgm;5 364 | /Users/youngjoo.chung/Downloads/att_face/s6/3.pgm;5 365 | /Users/youngjoo.chung/Downloads/att_face/s6/4.pgm;5 366 | /Users/youngjoo.chung/Downloads/att_face/s6/5.pgm;5 367 | /Users/youngjoo.chung/Downloads/att_face/s6/6.pgm;5 368 | /Users/youngjoo.chung/Downloads/att_face/s6/7.pgm;5 369 | /Users/youngjoo.chung/Downloads/att_face/s6/8.pgm;5 370 | /Users/youngjoo.chung/Downloads/att_face/s6/9.pgm;5 371 | /Users/youngjoo.chung/Downloads/att_face/s7/1.pgm;6 372 | /Users/youngjoo.chung/Downloads/att_face/s7/10.pgm;6 373 | /Users/youngjoo.chung/Downloads/att_face/s7/2.pgm;6 374 | /Users/youngjoo.chung/Downloads/att_face/s7/3.pgm;6 375 | /Users/youngjoo.chung/Downloads/att_face/s7/4.pgm;6 376 | /Users/youngjoo.chung/Downloads/att_face/s7/5.pgm;6 377 | /Users/youngjoo.chung/Downloads/att_face/s7/6.pgm;6 378 | /Users/youngjoo.chung/Downloads/att_face/s7/7.pgm;6 379 | /Users/youngjoo.chung/Downloads/att_face/s7/8.pgm;6 380 | /Users/youngjoo.chung/Downloads/att_face/s7/9.pgm;6 381 | /Users/youngjoo.chung/Downloads/att_face/s8/1.pgm;7 382 | /Users/youngjoo.chung/Downloads/att_face/s8/10.pgm;7 383 | /Users/youngjoo.chung/Downloads/att_face/s8/2.pgm;7 384 | /Users/youngjoo.chung/Downloads/att_face/s8/3.pgm;7 385 | /Users/youngjoo.chung/Downloads/att_face/s8/4.pgm;7 386 | /Users/youngjoo.chung/Downloads/att_face/s8/5.pgm;7 387 | /Users/youngjoo.chung/Downloads/att_face/s8/6.pgm;7 388 | /Users/youngjoo.chung/Downloads/att_face/s8/7.pgm;7 389 | /Users/youngjoo.chung/Downloads/att_face/s8/8.pgm;7 390 | /Users/youngjoo.chung/Downloads/att_face/s8/9.pgm;7 391 | /Users/youngjoo.chung/Downloads/att_face/s9/1.pgm;8 392 | /Users/youngjoo.chung/Downloads/att_face/s9/10.pgm;8 393 | /Users/youngjoo.chung/Downloads/att_face/s9/2.pgm;8 394 | /Users/youngjoo.chung/Downloads/att_face/s9/3.pgm;8 395 | /Users/youngjoo.chung/Downloads/att_face/s9/4.pgm;8 396 | /Users/youngjoo.chung/Downloads/att_face/s9/5.pgm;8 397 | /Users/youngjoo.chung/Downloads/att_face/s9/6.pgm;8 398 | /Users/youngjoo.chung/Downloads/att_face/s9/7.pgm;8 399 | /Users/youngjoo.chung/Downloads/att_face/s9/8.pgm;8 400 | /Users/youngjoo.chung/Downloads/att_face/s9/9.pgm;8 401 | -------------------------------------------------------------------------------- /chapter8/8-1-vector-normalization-naive.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import time 3 | 4 | start_time = time.time() 5 | vector = np.array(range(1000000), dtype=float) 6 | 7 | # 각 원소의 제곱의 합을 구합니다. 8 | sum_value = 0 9 | for element in vector: 10 | sum_value += element * element 11 | sqrt_sum = np.sqrt(sum_value) 12 | 13 | # 구해진 값으로 벡터의 각 원소를 스케일해줍니다. 14 | for i in range(len(vector)): 15 | vector[i] = vector[i] / sqrt_sum 16 | 17 | end_time = time.time() 18 | print('time spent:', end_time - start_time) 19 | -------------------------------------------------------------------------------- /chapter8/8-2-vector-normalization-numpy.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import time 3 | 4 | start_time = time.time() 5 | vector = np.array(range(1000000), dtype=float) 6 | 7 | # 각 원소의 제곱의 합을 구합니다. 8 | sum_value = np.sum(vector * vector) 9 | sqrt_sum = np.sqrt(sum_value) 10 | 11 | # 구해진 값으로 벡터의 각 원소를 스케일해줍니다. 12 | vector = vector / sqrt_sum 13 | end_time = time.time() 14 | print('time spent:', end_time - start_time) 15 | -------------------------------------------------------------------------------- /chapter9/chapter09_sklearn_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# 예제 9-2" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": { 14 | "collapsed": true 15 | }, 16 | "outputs": [], 17 | "source": [ 18 | "import sklearn\n", 19 | "import numpy as np\n", 20 | "from sklearn import datasets # 보스턴 데이터셋\n", 21 | "from sklearn.linear_model import LinearRegression # 사이킷런 선형회귀함수" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "metadata": {}, 28 | "outputs": [ 29 | { 30 | "data": { 31 | "text/plain": [ 32 | "'0.20.2'" 33 | ] 34 | }, 35 | "execution_count": 2, 36 | "metadata": {}, 37 | "output_type": "execute_result" 38 | } 39 | ], 40 | "source": [ 41 | "sklearn.__version__ # 사이킷런의 버전을 확인합니다." 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 3, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "boston = datasets.load_boston() # 보스턴 데이터셋을 로드합니다" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 4, 56 | "metadata": {}, 57 | "outputs": [ 58 | { 59 | "name": "stdout", 60 | "output_type": "stream", 61 | "text": [ 62 | "dict_keys(['data', 'target', 'feature_names', 'DESCR', 'filename'])\n" 63 | ] 64 | } 65 | ], 66 | "source": [ 67 | "print(boston.keys()) \n", 68 | "# 데이터셋의 키값을 출력합니다. \n", 69 | "#data는 피쳐값, feature_name은 피쳐이름, target은 집값, DESCR은 데이터 출처, filename은 파일패스를 나타냅니다. " 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 5, 75 | "metadata": {}, 76 | "outputs": [ 77 | { 78 | "name": "stderr", 79 | "output_type": "stream", 80 | "text": [ 81 | "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/scipy/linalg/basic.py:1018: RuntimeWarning: internal gelsd driver lwork query error, required iwork dimension not returned. This is likely the result of LAPACK bug 0038, fixed in LAPACK 3.2.2 (released July 21, 2010). Falling back to 'gelss' driver.\n", 82 | " warnings.warn(mesg, RuntimeWarning)\n" 83 | ] 84 | }, 85 | { 86 | "data": { 87 | "text/plain": [ 88 | "LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None,\n", 89 | " normalize=False)" 90 | ] 91 | }, 92 | "execution_count": 5, 93 | "metadata": {}, 94 | "output_type": "execute_result" 95 | } 96 | ], 97 | "source": [ 98 | "lr = LinearRegression() # 선형회귀모델 객체를 생성합니다 \n", 99 | "lr.fit(boston.data, boston.target) #데이터로 lr모델을 학습시킵니다" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 6, 105 | "metadata": {}, 106 | "outputs": [ 107 | { 108 | "name": "stdout", 109 | "output_type": "stream", 110 | "text": [ 111 | "예측값: [ 30.56759672] 실제값: 34.7\n" 112 | ] 113 | } 114 | ], 115 | "source": [ 116 | "predicted_price = lr.predict(boston.data[2].reshape(1,-1)) #3번째 데이터의 집값을 예측합니다. \n", 117 | "print('예측값:', predicted_price, '실제값:',boston.target[2])" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": 7, 123 | "metadata": {}, 124 | "outputs": [ 125 | { 126 | "name": "stdout", 127 | "output_type": "stream", 128 | "text": [ 129 | "[('CRIM', -0.10801135783679496), ('ZN', 0.046420458366878317), ('INDUS', 0.020558626367067345), ('CHAS', 2.686733819344878), ('NOX', -17.766611228299929), ('RM', 3.8098652068092265), ('AGE', 0.00069222464034051495), ('DIS', -1.4755668456002586), ('RAD', 0.30604947898517054), ('TAX', -0.012334593916574), ('PTRATIO', -0.9527472317072897), ('B', 0.0093116832737939904), ('LSTAT', -0.52475837785549317)]\n" 130 | ] 131 | } 132 | ], 133 | "source": [ 134 | "print([x for x in zip(boston.feature_names, lr.coef_)]) \n", 135 | "# 피쳐명을 키로, 각 피쳐의 계수값을 밸류로 하는 딕셔너리를 만든 후 출력합니다. \n", 136 | "# 사이킷런 0.20에서 데이터포인트 [445, 0]의 값이 바뀌어서 계수가 책과는 약간 다릅니다.\n", 137 | "# https://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_boston.html" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": null, 143 | "metadata": { 144 | "collapsed": true 145 | }, 146 | "outputs": [], 147 | "source": [] 148 | } 149 | ], 150 | "metadata": { 151 | "kernelspec": { 152 | "display_name": "Python 3", 153 | "language": "python", 154 | "name": "python3" 155 | }, 156 | "language_info": { 157 | "codemirror_mode": { 158 | "name": "ipython", 159 | "version": 3 160 | }, 161 | "file_extension": ".py", 162 | "mimetype": "text/x-python", 163 | "name": "python", 164 | "nbconvert_exporter": "python", 165 | "pygments_lexer": "ipython3", 166 | "version": "3.6.1" 167 | } 168 | }, 169 | "nbformat": 4, 170 | "nbformat_minor": 2 171 | } 172 | -------------------------------------------------------------------------------- /chapter9/chapter09_tf_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/h5py/__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", 13 | " from ._conv import register_converters as _register_converters\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "import tensorflow as tf\n", 19 | "import numpy as np\n", 20 | "from matplotlib import pyplot as plt # 학습 결과 플롯용 패키지\n", 21 | "from sklearn import datasets" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "metadata": {}, 28 | "outputs": [ 29 | { 30 | "data": { 31 | "text/plain": [ 32 | "'1.9.0'" 33 | ] 34 | }, 35 | "execution_count": 2, 36 | "metadata": {}, 37 | "output_type": "execute_result" 38 | } 39 | ], 40 | "source": [ 41 | "tf.__version__" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 3, 47 | "metadata": { 48 | "collapsed": true 49 | }, 50 | "outputs": [], 51 | "source": [ 52 | "boston = datasets.load_boston()\n", 53 | "boston_slice = [x[5] for x in boston.data] #6번째 피처만 사용\n", 54 | "\n", 55 | "# 텐서플로에서 사용할 크기로 조정 \n", 56 | "# reshape 함수로 데이터를 열이 1인 배열형으로 변환\n", 57 | "data_x=np.array(boston_slice).reshape(-1,1)\n", 58 | "data_y=boston.target.reshape(-1,1) " 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 4, 64 | "metadata": {}, 65 | "outputs": [ 66 | { 67 | "name": "stdout", 68 | "output_type": "stream", 69 | "text": [ 70 | "(506, 1) (506, 1)\n" 71 | ] 72 | } 73 | ], 74 | "source": [ 75 | "print(data_x.shape, data_y.shape)" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": 5, 81 | "metadata": { 82 | "collapsed": true 83 | }, 84 | "outputs": [], 85 | "source": [ 86 | "# 입력데이터의 샘플 수를 얻습니다\n", 87 | "n_sample = data_x.shape[0] \n", 88 | "# 샘플수피처수의 플레이스홀더. 피처를 하나만 사용하므로 피처수는 1이 됩니다.\n", 89 | "X = tf.placeholder(tf.float32, shape=(n_sample,1), name='X') \n", 90 | "# 샘플수타깃수의 플레이스홀더. 타깃수가 집값 하나이므로 1이 됩니다.\n", 91 | "y = tf.placeholder(tf.float32, shape=(n_sample,1), name='y')\n", 92 | "# 기울기. 피처수타깃수의 크기를 가집니다. 피처 수와 타깃수이 1이므로 기울기는 11로 정의됩니다. \n", 93 | "W = tf.Variable(tf.zeros((1,1)),name='weights')\n", 94 | "# 편향. 타깃수타깃수의 크기를 가집니다. 타깃수가 1이므로 기울기는 11로 정의됩니다. \n", 95 | "b = tf.Variable(tf.zeros((1,1)), name='bias')" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 6, 101 | "metadata": { 102 | "collapsed": true 103 | }, 104 | "outputs": [], 105 | "source": [ 106 | "y_pred = tf.matmul(X,W) + b # 모델\n", 107 | "loss = tf.reduce_mean(tf.square(y_pred - y)) # 손실 함수\n", 108 | "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) #최적화 클래스\n", 109 | "train_op = optimizer.minimize(loss) # 최적화 함수. 손실 함수의 최솟값을 찾습니다.\n", 110 | "summary_op = tf.summary.scalar('loss',loss) # 시각화를 위한 서머리 함수. 손실 함수의 변화를 기록합니다.\n" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": 7, 116 | "metadata": {}, 117 | "outputs": [ 118 | { 119 | "data": { 120 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3X+QXNWVH/DvmVbL9GCblmBCoQYheXdLxFoZjZky2shx\nBTmL4pWRZ4ECE9iiUk7IH05ibJfWQ2rLklzeMBvFC67Klrcoe7NsGYP45eGHdi1SlpwtkzLZkUcy\nqyClsJHADTbjlZq1mTbqmTn5o/uNXve8H/e9fr/7+6mi0LSm+93u0Zy+fe6554qqgoiI8m8o7QEQ\nEVE0GNCJiAqCAZ2IqCAY0ImICoIBnYioIBjQiYgKggGdiKggGNCJiAqCAZ2IqCBWJHmxSy65RNet\nW5fkJYmIcu/IkSO/UNURv+9LNKCvW7cO09PTSV6SiCj3ROS0yfcx5UJEVBAM6EREBcGATkRUEAzo\nREQFwYBORFQQRlUuInIKwC8BLACYV9UxEVkNYD+AdQBOAbhFVc/GM0zq19RMHfsOnsTrjSbWVCvY\ntX0DxkdraQ+rL1E/pyRfoyivlfTPNqrrWY9TbzRREsGC6tL/h8tDaM4vQhUoieC2a6/Al8c39X39\n3vted9UIDp+YjfTnYH8+tYR/18TkxKJOQB9T1V/YbvuvAM6o6qSITABYpapf8HqcsbExZdli8qZm\n6rjnyRfRbC0s3VYpl3DvjZtyG9Sjfk5JvkZRXivpn21U13N6HD93bFmLsStXh76+yTWj/Dn0+5h2\nInJEVcf8vq+flMsnADzY+fODAMb7eCyK0b6DJ5f9Q2u2FrDv4MmURtS/qJ9Tkq9RlNdK+mcb1fWc\nHsfPwy+81tf1Ta4Z5c+h38cMwzSgK4DnROSIiNzVue1SVX2j8+efAbjU6Y4icpeITIvI9OzsbJ/D\npTBebzQD3Z4HUT+nJF+jKK+V9M82quuFGd+Cal/XN71mlD+Hfh4zDNOA/mFV/SCAjwH4tIh8xP6X\n2s7bOOZuVPUBVR1T1bGREd+dqxSDNdVKoNvzIOrnlORrFOW1kv7ZRnW9MOMrifR1fdNrRvlz6Ocx\nwzAK6Kpa7/z/TQDfBvAhAD8XkcsAoPP/N+MaJPVn1/YNqJRLXbdVyiXs2r4hpRH1L+rnlORrFOW1\nkv7ZRnU9p8fxc9u1V/R1fdNrzp2bx9RMPdDYvB47yd813yoXEbkQwJCq/rLz5+sBfAnA0wDuBDDZ\n+f9TcQ6UwrMWY4pU5RL1c0ryNYryWkn/bKO6nv1x6o0mBN0f8ctDwLzCscol7PWdxn7dVSN49tgb\naDRbS993dq6Fe558ses+QZ9PZqtcROR9aM/KgfYbwLdU9Y9F5GIAjwJYC+A02mWLZ7wei1UuRNQr\n7kodvzLHrZOHUHfIcdeqFTw/sa3v60fBtMrFd4auqj8BcLXD7f8A4KPhhkdE1OZVudJvQO99s6g3\nmstm30UqGuBOUSJKVZwB1aTMsUhFAwzoRJSqOAOqyZtFkYoGGNCJKFVxBlSTN4vx0RruvXETatUK\nBO3ceV53USd6YhERUa84K3V2bd/guODa+2YxPlrLZQDvxYBORLExbaQVV0AtYsmuFwZ0IoqFSYVJ\nEooy+zbBHDoRxaKITeGyjgGdiCI3NVN33KwD5LO+Oy8Y0IkoUlaqxU0e67vzgjl0IoqUV2/w3gqT\nIp6klSYGdCKKlFdKxV7fnZVF0yJhyoWIIuWWUqlVK12Bmoum0WNAJ6JIme78TLIp1tRMHVsnD2H9\nxAFsnTwUuN95XjDlQkSRMt3Ms6ZacayEiXrRdJBSOwzoRBQ5k808ptvy+xVne96sYUAnIiNRV6Qk\ntS2/SP3O/TCHTkS+rLRFvdGE4nzaIg+56CL1O/fDgE5EvtzSFnuePh76MZN6kyhSv3M/DOhE5Mst\nPdFotkIH4KTKFovU79wPc+hE5MutIgUA7t5/FPsOngyc/04ytz0oHRc5QyciX37piTDpkiRz24NS\nh86ATkS+xkdrWDVc9vyeoOmSpHLbeV7QDYoBnYiM7L5h47IA3KveaBrPhJPKbQ9SiwHm0InIiL1u\n3C2fLsDS35nsyEwit806dCIiB+OjNTw/sQ3337p52WxdAGjP92dhJuyWkx8SKVxOnQGdiAJzSpf0\nBnNL2jNhp1w9ACyoFi6nzpQLUc5k5VCI3nTJ1slDiTTbCqq3xcCQCBa0++2nKL1dOEMnypEsV2xk\neUemlSp6ZXIHFtX5s0TanySiwIBOlCNZrtjIy47MIvd2YcqFKEeyXrGRhx2ZSbXtTYPxDF1ESiIy\nIyLPdr5eLyIviMjLIrJfRFbGN0wiAoo9u0xKXj5JhBFkhv4ZAC8BeG/n6z8BcJ+qPiIifw7gUwC+\nFvH4iMimyLPLJOXhk0QYRjN0EbkcwA4AX+98LQC2AXi88y0PAhiPY4BEdF4RZpeD0lclDaYz9PsB\n/CGA93S+vhhAQ1XnO1//FEB+/kUR5VieZ5eDdL5nGnxn6CLycQBvquqRMBcQkbtEZFpEpmdnZ8M8\nBBEVRJardIrAJOWyFcBOETkF4BG0Uy1fBVAVEWuGfzkAx89NqvqAqo6p6tjIyEgEQyaivMp6lU7e\n+QZ0Vb1HVS9X1XUAPgngkKreDuAwgJs733YngKdiGyURFUKUVTrMxS/Xz8aiLwD4nIi8jHZO/RvR\nDImIiiqq3aRZ3jGbpkAbi1T1ewC+1/nzTwB8KPohEVFR9fZVCduLxisXP8iLq9wpSlRwWWnmZYmi\nSoe5eGcM6EQFVsQywamZumPHRIA7Ztmci6jAilYmaL1BOQVz7pjlDJ0ot0xSKW5HxbndnnVOb1AA\nUBLJ3Y7ZODCgE+XQH029iId+8OrSKUFuqZSSS2qiJJLEMCPnliNfVB34YA4w5UKUO1Mz9a5gbnFK\npTgFc6/bs47dJr0xoBPlzL6DJ43P76x5BLrRLz2Xu7rtLJ+KlAUM6DSw8rrT0Ks0r3emumv7BpRL\nzumVs3Mt3L3/aK4CexG6TcaJOXQaSHku51tTrTguagrgPFP1ya6cnWstPXeg/00/cctzt8m4cYZO\nAynP5XxOaQcBcPuWtcsC3b6DJ9Fa9M+XN1sL2PvMcW6nzzkGdBpIed5paKUdqpXy0m3V4TLGrly9\n7HuDPJ+zc63cvslRG1MuNJDc0hZZqZYwqTF/Z35x6c/2tIn9+9yeZxD9vsllrfVAkXGGTgMpy9US\nJp0E3VJGd+8/2rXA6/Q8g+rnTY5dEZPFgE4DKcvVEib5fa9Ztz1oOqVnguj3TS7PaxV5xJQLDays\nVkuY5PfddoBa7K1kx0dr2PvMcePrrxouozHXiiQ9kue1ijxiQCfKGJP8vslOT3vQPDvXMr7+8MoV\nmPni9cbf7yXraxVFw5QLUcaY5Pe9doBawgbNKGfPWV6rKCIGdKKMMcnv+y129gbNIDn0KGfPWV6r\nKCKmXIgyyC+/b/3d3meOL6VTBO1NoTWH3PeenRtx9/6jvtcVANddNdJ1W79lh1ldqygiBnSiDDIJ\notOnz6Bhy40rumfmWycPdd3fhAJ44kgdY1euxvhoDVMzdex6/BhaC+2cfb3RxK7HjwHIfouEQSSa\nYBvNsbExnZ6eTux6RHnU22cGaAdqe6piaqaOz+4/6timpVop4535xWXlgkMCGHQBANCe5T8/sQ2j\nX3rOcUF11XA5soVT8iciR1R1zO/7OEMniknYVIXJifZeLXQbTeeKFtNgDpxfGHWrjglSNUPJYUAn\nikHQbo724G/S67yfShQr1+6FZYX5xCoXohgE2SHZuz3ezZDI0pZ5t4AraKdDvPgFc3sbXrfqmLA7\nTyleDOhEMQiyQ9Lt4ONeC6pLW/q9Wuju+MBlocZsfwzrU8SenRtRHuo+IKM8JNizc2Poa1B8mHIh\nisFFlbJjLttpZh0kfWLN8p+f2AbA+TCKrZOHPB/DbXG0JIKv3HI1gPMVMtXhMsolWeqpXq2UsWfn\nRla4ZBQDOlHEpmbqePvc/LLby0PiWD4YtMWt9QbgVt/t9wbhFMytKhoAXbn/3sVPe8teyh6mXIgi\ntu/gyaW6bbt3X7DCMQAHbXF7kU/+2nRBsyTStXsTAD7/6DHP9A87JWYbAzpRxNxmyGfnWo6HUVvb\n4/0WMy1vn5v37Cdu+gaxqIpXJncspW/uefLFwE2/KFsY0Iki5jVDdjrgwSpZbMy10LP+6Ki1oF2z\n5KmZOrZOHsL6iQNL+XN7/5SSOD+ofZymC7O996NsYUAnipjfDNmetugtWTTd/GPNkt1OBLLGsaZa\nwYIqekN6b/Mu01k3OyVmm++iqIhcAOBvAbyr8/2Pq+puEVkP4BEAFwM4AuAPVPVcnIMlygP7bk63\nxU7r9iAzYztrluxW7773meP4dev89n+Fd/Mut4XZIQHee0EZbzWjOfCC4mVS5fIOgG2q+isRKQP4\nvoj8DYDPAbhPVR8RkT8H8CkAX4txrES5YQW9zz96zDEvLWjPrsMc4GzNkr3u77Q13wrmVs7cbtf2\nDb79Yyj7fAO6trt3/arzZbnznwLYBuBfd25/EMAeMKBTjsR5Gr2VCnFbZFS0Z9d+R8k5uemaGqZP\nn8FDP3g18LjcUiv2TxVxvB6UDKM6dBEpoZ1W+U0AfwbgxwAaqmoV2/4UgONPXkTuAnAXAKxdu7bf\n8RJFImivlaD2PnPcN5Xi1bfFy7PH3sBbzZbrfSvlEt61Ysh4Y5OFfcvzzyigq+oCgM0iUgXwbQBX\nmV5AVR8A8ADQbp8bZpBEUTPpaBiEfbZfHS4bdSO0gqtT2sRr5u7WTdHSbC3ggvIQykPnd3gCXNAc\nBIF2iqpqQ0QOA/gdAFURWdGZpV8OwL0wlihjojiN3gri9Uazq4OhaWvZt9+Zx8Y173EM6Fvetwr/\n+8dnQs3g7WPwWgil4vEtWxSRkc7MHCJSAfC7AF4CcBjAzZ1vuxPAU3ENkihqbqkH0xpre7kg4N/B\n0Emj2cLzPz7j+Hen/qGJ27csT1GWh8R4A5I1LmtmzmBefCZ16JcBOCwiPwLwdwD+p6o+C+ALAD4n\nIi+jXbr4jfiGSRStfk+jD1tuaOr1RhNjV65ettFoQRU7PnBZoFYB3K4/OEyqXH4EYNTh9p8A+FAc\ngyKKW9iqDnuaJU5rqhXsefr4so1Gi9peFL33xk1dY587N++Z6uF2/cHAbouUqjhLB/0ErepwOuvT\nTbkkuHDlCt8FTDfXXTWCb7qUJTaarWVj9xsbt+sPhlwE9DR/6Sk+cZcORs0vzeK2ALlu4kDgax0+\nMRvo+61r7X3m+LKZOqtbBkfmA3refunJXNSlg3HzSlt4VZHUAvY7B9r/zofLQ5hrOfcfn5qpL7uW\nNWvnBGhwZT6g5+2XnsxFUTrYL6fgBzjn1t36nThtp7c/rl//ciclEbyrXHIN6F7//rlBaHBlPqBn\n4Zee4uEWIJPK9zp9+tv12DFAsHRARW/3Qqd+J73pjN7HDZNHX1BFw2ORs95oLh0Tx1k4WTLfPrff\nemHKrn5LB/vl9OmvtajLThuyfyK09xm3TvrpDaSmJY3lkqDqMXt3aWO+pLdlrtehFzQYMj9DN50V\nUT69a8XQ0s921XAZu29I7gDiIJ/y/M7xBEKUNCqwcc17XHeEmvZGB5iGpLbMB3R2gSsmpzK7X7vk\ni+MS5HBmv0+EQUoaLa1FNdreLwJYbV3sLQZ6MQ1JmQ/oABd5iigLi91On/7KQ9KVQwfMPhGG3Tlq\nNAlX4NTkDgDAeo8SSKYhKRcBnYonC4vdbp/+nG7ze5PxK2l8+5350JuM7IHa7VOFAExDUvYXRamY\nsrrYPX36TKj0ntu4rZLGPTs3LlsANjgPGuWSdAVqp4VkAXD7lrX8FEucoVM60lrs7u1b/qtfzy/1\nDK83ml3b7d02sU3N1Lt2ZFYrZXz86svwxJF61/MRtLfwW/efPn0GD7/wGhZUURLBlvetwg9ffcs1\nVXPhyhL++Pe7q2j81pS4qWiwiQY8/qofY2NjOj09ndj1KNviCD5ejxlm4RJob/JZVMWaagXXXTWC\nh//Pa1hwKEEpDwG967rWIuaqnjcPoP0GdtM1NRw+Mbusp7r19/feuAmAWQrI6fnxXNBiEJEjqjrm\n+30M6FQEvbNmiz0ouh3YnCYrJbN18pBjbrxaKeOd+UWjIO32GG4HQ1N+mAZ0plwo97xm3s3WAu55\n8keYX9TMBXPg/PFzbouqTgupbtVAWVhopnQxoFPu+ZUMNhOubw9q/cQBDHmcIerk9UZzWXrpoko5\n8MHQVCwM6BSpJBflkjpsIm4KOAbzSrmEC8pDjgdXXFQpL+tDUy4JD4YecCxbpMjYz9mMu8dI75me\nRSMAbrqmht03LC93rJRLEMHyPjQLinlVrBoue/aaoeJiQKfIeO3+TOJaRaJoH3Lh1hDMrROjaruF\nwn23bsbzE9sYzAfMwKRcWJ8bP9NFObefRZCf0SAs9Hk1BPNKNbFR1+AaiIDOU4+SYdLffGqmjl2P\nHevazLPrsWOYPn2ma2OO38+oOlz2PBS5CLwWM502ZtkNwhseLTcQKZckUwGDzKS/+Z6nj3ct2gHt\nroMPvfCq589oaqaOzXufw7qJA1g3caDwwdxvMdNKxZRcmqazsmUwDcQMnfW5yTBpdezWoMqtYs8q\nz7PP6ouuJGK0mGn9Pc8LIMtABPS0jzrLgqTWEKJudbymWsG+gycHJpgD7RLGz+4/in0HT/quL/C8\ngGxLeu1uIAL6oJ96lKU1hFUuue/h8hAUsiztcvbtd1wPSi4ye9mn3/oCzwvIpjR+7wYih256FmRR\nZWkNYfcNG1Euded9yyXBf7nxA7j3xk1YNdx9xmaeg3m1UvY8F9TaCOSl2VrAwy+8lpmfH5lL4/du\nIGbowGCfepSlNQS/FMG+gycLs+B5dPf1WOdxwtC+m69u/7/zWrglldxaAnANKNvS+L0bmIA+yLK2\nhuD15lqUIGV90qi5vPYlEXx2/9GuNzS3bokllz4vg7QGlEdp/N4NRMpl0JmUE/ZjaqaOrZOHsH7i\nALZOHgq11d96DJOlzwtXlvy/KUXlkmD3DRsBOL/2QHvW3dsewe3ndNu1V8T686N4xP1754Qz9AEQ\nZyVEFAs/QQ6eqJRLmDuX3S3/JRHsu/nqpefe+9o7dVW08qpWz3Knn9PYlatZyZIzaVQg+R5wISJX\nAPgrAJeivfj+gKp+VURWA9gPYB2AUwBuUdWzXo/FAy6Kx+tQhV3bNxj9Y3Z7DIt1ko9b6iFpIu0F\nT6fDNKwTiNye8/qJA46fQgTAK5M74h045ZbpARcmKZd5AJ9X1fcD2ALg0yLyfgATAL6rqr8F4Lud\nr2nAuAVia6Zu0nnRL29uBcAsBHOgvQlq5ovX4/5bN3dVTt10TQ1PHKl7PuesHo5NxeAb0FX1DVX9\nYefPvwTwEoAagE8AeLDzbQ8CGI9rkJRNUzN1z5PrTUu28hbMrAXP8dEanp/Yhlcmd2DX9g1G5YVp\n5FVpcARaFBWRdQBGAbwA4FJVfaPzVz9DOyVDA2TfwZNGi5h29tm4tRBqHZCcF2/Ntbpm3dYagEl5\n4aDviaB4GS+Kisi7ATwB4G5V/Uex7ZhQVRURx3/NInIXgLsAYO3atf2NljIlTImhNRvvXQhVYNmp\n91m1iHaTMSsI73n6uOeCbu8nkEHeE0HxMpqhi0gZ7WD+kKo+2bn55yJyWefvLwPwptN9VfUBVR1T\n1bGRkZEoxkwZETRVYk8tOO2iy0Mwt1hNxqZm6q4NxwCmUyhZvgFd2lPxbwB4SVX/1PZXTwO4s/Pn\nOwE8Ff3wKMvcaqyd9KYWirCBaOvkIdy9/6jr35t2TSSKiknKZSuAPwDwoohY/3r/M4BJAI+KyKcA\nnAZwSzxDpKyy19n6ne1p1Vhb3HbR5Ynf+G+79goGc0qUb0BX1e8DrmtWH412OJQ39nzw5r3POaYf\nag6pmeuuGsE3f/Bq7ONL0xNH6hi7cjWDOiWGW/8zLIot9Unas9P5hHqnHPLhE7NJDSs17IhISePW\n/4zKQg/zoM35g2x1LkIO3USenycPVs8fBvSM8uqlnMQvVdg3FJOSvKmZOkTcj50rkrxtmrJkYUJB\nwTHlklFp9zCPqzm/FSjydKJc2E1PTummvKTRsnQoCpljQM+otHt+xPWG4hQokhQmON++Za1xeaal\nWikvK1m03sxM+tukLe0JBYXDgJ5Raff8iOsNJWxAGC4PRdIHXdEOtqZq1Qq+PL4J9964CSWv8+Qc\n9KYm8jTrTXtCQeEwoGdUmj0/pmbqmDs3v+z2KN5QwgaEudYi3o6oD/q5ebPHsT/f8dEavnLL1cYz\n9Uazhc17n+tKrbi9mdUbzcylX9KeUFA4vv3Qo8R+6OkyqVpwO2yiWiljz86Nfb+hBDnMIg21aqXr\n9QG6q3bC1s9XyiVcUB7yPC+1Ui5lamcpq1yyw7QfOgP6gHAKpE4BxOvAit7dnv2MxWR3adJ6n6Pb\naxb2zahaKeOd+UXP+0f5OlNxRHnABRWAaf42ysUwt4oOq4/4qYhO6LFy26YpbhEsSycI2rtX7dxe\ns6C5dMtbzdZSGs0NFx2pHwzoA8IrfxvHiTomFR1/NPVioMd0cseWtUu5bdMPm7dfuxY3XVPrqnhR\ntLfq28fn9pqFPTlpTbWy9GbmFtS56Ej9YEAfEF6Bwh5oo1oM8/tEMDVTx0N99HIpieCOLWvx5fFN\nxqWQ9vscPjG7rF1v7ycWt9esVq0snVpkynoNvQ714KIj9YsBfUB4tbq1B7Koqmu8zhoFwp12ZBEA\nP7739/Dl8U0A/NMUq4bLqFUrWFTF4ROzmJqp+44PaL9m5aHl6ZXXG02cnWsZ17RbryGApU8twPlD\nPYD2m431c0ir2iUvm57IHbf+F5y9UuGiStl1Jtt7TFq/1QwlEcfUhJV/7idXfFFPHblXK95ySfCr\nX88vVZdYqR+v1gNTM/Xzz98haqvt/36nLAnOtw7eOnnI8VAPwfk0Tlpb7LnVvxg4Qy+w3jx2o+k+\nq4wid2uf4bnlma3b+7leo9nqmkG6ffpYNVzGhStXoNXTZ6DZWvDMt1spqH0HT6K14P05QgHPRVL7\n83R7E/NL/SQhT5ueyB0DeoEFOeatt8IjqN43DzfWYmCQ046c2BdZndJE99+6Gbtv2Oh5PJwbK5CZ\nforwWiSdOze/9MYT5E3s9c5idVIpEG71LwamXAzlcZNFkDrvfvuTmyxM9u68nD59xnWTjsmB0fbu\nk71pIusNJqx6o4laBKcqnZ1rLY1j1/YN2PX4Md9ZPwBUh8uJpkDc0lasuskXztAN5KmpkmVqph6o\nEVW/MzGThUlrcXVqpo7Ne5/z3HFp2nPF6bpTM3V8/tFjfe9Gve6qkb4+RVi6Uhc9sXwI7Ty/nVWC\nmWQKhFv9i4EB3UAe84tBq0j6nYn53X945YqlYH7Pky/6pkJq1QqO7r4e99+62XMjjgJL6QjrjeLu\n/UdD14rbHT4x67sRyNTrjSb2PH18WT5/EcCFK1d0lUH+urXg+vrElQJJs3cQRYcpFwN5zC96ja13\n+3qQmZhb6mnX9g3Y9dixZQGrdzwmqZlySfD2O/NYP3Ggq6eKWw+YeqOJXY8dwyKAhQgbrb/eaC6l\nctx60JSGxOia1eGyax+XRrPVNTv2erQ4UyBRVDdRujhDN5DHVqJem2LCzsS8Uk/jozW8+wL3+YE1\nHpM3wYUFRaPZ6roGAM/ZcmtRIw3mQPdrOD5aW7a7FDB7A/HbxWrVoJs8DlMg5IUB3UAe84teY7a2\nn78yuQPPT2wznpX5pZ4aHp0ErdfK5E1wsefrZmsBe585vjTuMJ1UgvZfESxva+u0u9SP9Yb5lkeK\nySQ9xBQImWBAN5DH/GIcY/ZLPbkFa2tx023Lu4mzc61Q5X+W2669IlBQt0Ks/VNI0BSbtalofLTm\nOmZrF6sXqwNjlv+9UTawfS4Zc2utWxLBV265GsDyPHelXMJN19TwxJF61+0mZYm9rMAWpqd6zbCX\nudu4rKAbpIzR3grXq30x4L4+kLUe6ZQOts+NWB77XEQ9ZrfNQAuqy/Lc9k8Fh0/MOm5wqlUrgSpI\nrBmy9ekjiHqjicMnZrH1N1YvzdRLItj6G6u7xuv2JvN6oxloM1RvSs7rE5P976xxAfn4JEjZwhm6\nAdPDIbIkrjFbNd5Oed9Vw2UMr1yxrAJm/cQB10BZrZTx9rl5o802vYc/uH1i8OL2GvgdumH/dHD3\n/qOe1xC0D5a2mocR9Ysz9AjltQ49jjGPj9aw6DIJODvXcqyA8cp5N5otQNtvBtbM9Y4ta40WoZ1m\nzOUhWbZRx87pNbBX7zjp3eHqR9H/zluiMFiHbqBIdehRjNmru6GdFTz9try3FhXDK1dg5ovXL902\nduVq31YL1te932fd5jbG3tfAqza+5nDtVR415W7XIEoCA7qBPPa5iHPMu7ZvMF6UtDbn7Hn6uOfu\n0N4A6LfJpXeD0323bu76/vHRmmtKpvc18Hpzcjrfc/cNG317slSHy9g6eShXvX8o/5hyMZBmHXrY\nhc04x+y0wOfWd8UKnl512Pbv82Pf3u/XW8f0NXA4w8Lz9vHRGvbdfPXSImbvt1k92PPU+4eKgQHd\nQFp16P00BUt6zB+/+jLP4OkVsE3faLz6wDjlxk1fA7fNnl6bQO0HXd/X6TdjXcOtB3vUay55rLyi\nePlWuYjIXwD4OIA3VfW3O7etBrAfwDoApwDcoqpn/S6W1yqXtLilDHqrPXrF3erXrYLmpmtqOHxi\n1vG6brXjq4bL2H3DRqPx+VW1CIBXJncEfj7rJg64/t2pEI/nVtUTdnxO8lh5ReGZVrmY5ND/EsB/\nB/BXttsmAHxXVSdFZKLz9RfCDJTchVnYTOIoMbcKmsMnZl3faNwWMIOMyW+hMez6QLVSds3vdx1H\nZyiJNRevKiYG9MHlG9BV9W9FZF3PzZ8A8C86f34QwPfAgB65MIHB9Be9n1l82Aqafrv5eVXXCBB6\nfWDPzo2uteUmr5v1fdZt1101smxnbNRrLnmsvKL4hc2hX6qqb3T+/DMAl7p9o4jcJSLTIjI9O8va\n3CDCLGyDtEJOAAAIUElEQVSa/KL3e2BHWt0nvZ63IvwnEK/7+b1uux47hl2PH+u67Ykjddx0TS3W\n9Ys8dgCl+PW9KKrtJLxrIl5VH1DVMVUdGxnp79zKQRNmYdPkF73fTUdpVf2Mj9Zcq2n6PYTC7f5+\nr1trUZeVL9rTT0E7WprKYwdQil/YOvSfi8hlqvqGiFwG4M0oB1UUUSxOBk1TONWI9/6i9/txPYp8\neFh7dm70fH5hX/N+Xjcncac+0vwZUHaFDehPA7gTwGTn/09FNqKCSGJx0onJL3oUi3Zh8uFhgq3T\nfe69cZPj4wR9zXsf26tKBzDfIWt9b9x4whD1MilbfBjtBdBLAPwcwG4AUwAeBbAWwGm0yxbP+F1s\nkMoWw5YcJiHKkjfTIB3mmib3sV8fcG992/uaRzWe8pAAgq60S1RN0Dj7JktkZYuqepvLX3008KgG\nSJarEJxm8dddNYJ9B0/is/uPBpo9m86Iw5TZ+d3HtC+602seZjx+vWOiCr5pfbqj/GMvl5hkvf+L\n/eN62AASJCiGeYPzu4/JgdOA82sedelllIGWNeYUFrf+xyRPVQhhq16CBMUwZXZ+9zH9tOP0mme1\n7G9qpm7cJZKoF2foMclqFYJTbjZIYLbff0jE8aALp6BoUkUS9D4mi5SrhsuOr3nQ8bjltKPMdVuf\nlNyk/WZD2ccTi1KW5OKX20LgBeUhx/7evYuJJjnr8pDg3ResQGOu5djPJYoqF7/+MPbn5rfI2c+C\nrtNZqf0siHr1qmGflsFmuijKgJ6ipBssuQWMaqWMd+YXfcfhdUj0oioucjhOLu5AZA/KF1XKEIHj\nm0k/vJ630yeUsJVMXkf13d/T750GS5TNuSgmSS9+uaVW3mq2cN+tm31nq273X1TFK5M7sHXy0LIm\nV3Ev5iVRi+32vJ2Cudf3+3FLIdWqFQZzMsKAnqKkSxu9Km9MAqNf5U6SzyfJVJXb83aboYfNdYdZ\nZyCyY5VLipKutOi38sbv/kk9n36biwXl9rxvu/aKSCuZ0jpIhYqDM/QUJT0j67fyxu/+ST2fpFNV\nXs/b5DDroNdiAKewuCiasqJt8U7i+ZicCFS015UGGxdFc6JoM7Ikno9fLp9b52lQMYdOsQlziLHJ\nffxy+f32eyfKK87QKRZhZsmm9/HL5We5MRpRnBjQKRZxdFe080rtZL0xGlFcmHKhWMTRXdFUnhqj\nEUWJAZ1iEUd3RVOs56ZBxZQLxSKO7opBFK16iMgEAzrFIswmpqy2HCbKC24sIiLKONONRcyhExEV\nBAM6EVFBMKATERUEAzoRUUEwoBMRFQQDOhFRQTCgExEVBAM6EVFBMKATERUEAzoRUUEwoBMRFURf\nAV1E/pWInBSRl0VkIqpBERFRcKEDuoiUAPwZgI8BeD+A20Tk/VENjIiIgulnhv4hAC+r6k9U9RyA\nRwB8IpphERFRUP30Q68BeM329U8BXNvfcNzd8fUX8P2XfxHXwxMRxebDv3kJvvlvYwuPS2JfFBWR\nu0RkWkSmZ2dn474cEdHA6meGXgdwhe3ryzu3dVHVBwA8ALQPuAh7sSTe3YiI8qyfGfrfAfgtEVkv\nIisBfBLA09EMi4iIggo9Q1fVeRH5DwAOAigB+AtVPR7ZyIiIKJC+DolW1b8G8NcRjYWIiPrAnaJE\nRAXBgE5EVBAM6EREBcGATkRUEAzoREQFIaqh9/oEv5jILIDTiV0wnEsADEKPAT7PYhmU5wkMznO1\nP88rVXXE7w6JBvQ8EJFpVR1Lexxx4/MslkF5nsDgPNcwz5MpFyKigmBAJyIqCAb05R5IewAJ4fMs\nlkF5nsDgPNfAz5M5dCKiguAMnYioIBjQbUSkJCIzIvJs2mOJk4icEpEXReSoiEynPZ64iEhVRB4X\nkRMi8pKI/E7aY4qaiGzo/Byt//5RRO5Oe1xxEJHPishxEfl7EXlYRC5Ie0xxEJHPdJ7j8aA/y766\nLRbQZwC8BOC9aQ8kAdepatFreb8K4DuqenOnZ/9w2gOKmqqeBLAZWDq4vQ7g26kOKgYiUgPwnwC8\nX1WbIvIo2mcw/GWqA4uYiPw2gH+H9pnN5wB8R0SeVdWXTe7PGXqHiFwOYAeAr6c9FuqfiFwE4CMA\nvgEAqnpOVRvpjip2HwXwY1XN+ua9sFYAqIjICrTfnF9PeTxx+KcAXlDVOVWdB/C/ANxoemcG9PPu\nB/CHABbTHkgCFMBzInJERO5KezAxWQ9gFsD/6KTRvi4iF6Y9qJh9EsDDaQ8iDqpaB/DfALwK4A0A\nb6nqc+mOKhZ/D+Cfi8jFIjIM4PfQfdSnJwZ0ACLycQBvquqRtMeSkA+r6gcBfAzAp0XkI2kPKAYr\nAHwQwNdUdRTA2wAm0h1SfDoppZ0AHkt7LHEQkVUAPoH2G/UaABeKyB3pjip6qvoSgD8B8ByA7wA4\nCmDB9P4M6G1bAewUkVMAHgGwTUS+me6Q4tOZ7UBV30Q73/qhdEcUi58C+KmqvtD5+nG0A3xRfQzA\nD1X152kPJCb/EsArqjqrqi0ATwL4ZymPKRaq+g1VvUZVPwLgLID/Z3pfBnQAqnqPql6uquvQ/th6\nSFUL9+4PACJyoYi8x/ozgOvR/phXKKr6MwCviciGzk0fBfB/UxxS3G5DQdMtHa8C2CIiwyIiaP88\nX0p5TLEQkX/S+f9atPPn3zK9L6tcBs+lAL7d/p3ACgDfUtXvpDuk2PxHAA910hE/AfBvUh5PLDpv\nzL8L4N+nPZa4qOoLIvI4gB8CmAcwg+LuGH1CRC4G0ALw6SCL+dwpSkRUEEy5EBEVBAM6EVFBMKAT\nERUEAzoRUUEwoBMRFQQDOhFRQTCgExEVBAM6EVFB/H9PjYeZuGKbyAAAAABJRU5ErkJggg==\n", 121 | "text/plain": [ 122 | "" 123 | ] 124 | }, 125 | "metadata": {}, 126 | "output_type": "display_data" 127 | }, 128 | { 129 | "name": "stdout", 130 | "output_type": "stream", 131 | "text": [ 132 | "loss = 592.1469\n", 133 | "loss = 155.3169\n", 134 | "loss = 76.3167\n", 135 | "loss = 62.0246\n", 136 | "loss = 59.4341\n", 137 | "loss = 58.9596\n", 138 | "loss = 58.8678\n", 139 | "loss = 58.8452\n", 140 | "loss = 58.8351\n", 141 | "loss = 58.8273\n" 142 | ] 143 | }, 144 | { 145 | "data": { 146 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXuUVPW157+7iwKq0dCAHaINCEbTnRgEtK+SkLguOAER\nxR7jM49lcjNx1hqTGxPTsc0yETMkdEIy6sxyknGSuTE3BlE07SuKSSCJ8UZMY0MIAYxGHhaoKBQq\nXUB19Z4/qk5Rfeo8fufUedf+rMXqpurUOfuc0/X97bN/e+8fMTMEQRCE+NMUtgGCIAiCN4igC4Ig\nJAQRdEEQhIQggi4IgpAQRNAFQRASggi6IAhCQhBBFwRBSAgi6IIgCAlBBF0QBCEhjAryYCeddBJP\nnz49yEMKgiDEno0bN77BzK122wUq6NOnT0d/f3+QhxQEQYg9RLRLZTsJuQiCICQEEXRBEISEIIIu\nCIKQEETQBUEQEoIIuiAIQkJQynIhop0A3gZQBDDEzJ1ENBHAagDTAewEcCUzH/THTKFe+gayWLl2\nB/bm8jilJYPuRe3omtMWtll14fU5BXmNvDxW0PfWq+Np+8nm8kgRochc+dmcbkJ+aBjMQIoI15w3\nFcu7ZtZ9fP1n53e0Yv32/Z7eh+rzaQv4u0YqKxaVBb2Tmd+oeu17AA4wcy8R9QCYwMw3We2ns7OT\nJW0xePoGsrj5oS3IF4qV1zLpFFZcNjO2ou71OQV5jbw8VtD31qvjGe3Hjk/NnYbOUye6Pr7KMb28\nD/Xusxoi2sjMnXbb1RNyuRTAPeXf7wHQVce+BB9ZuXZHzR9avlDEyrU7QrKofrw+pyCvkZfHCvre\nenU8o/3YsWrDnrqOr3JML+9Dvft0g6qgM4CniGgjEV1Xfm0yM+8r//4qgMlGHySi64ion4j69+/f\nX6e5ghv25vKOXo8DXp9TkNfIy2MFfW+9Op4b+4rMdR1f9Zhe3od69ukGVUH/CDOfDWAxgOuJ6Pzq\nN7kUtzGM3TDz3czcycydra22lauCD5zSknH0ehzw+pyCvEZeHivoe+vV8dzYlyKq6/iqx/TyPtSz\nTzcoCTozZ8s/XwfwSwDnAniNiE4GgPLP1/0yUqiP7kXtyKRTI17LpFPoXtQekkX14/U5BXmNvDxW\n0PfWq+MZ7ceOa86bWtfxVY7p5X2od59usM1yIaJxAJqY+e3y7wsBfAvAIwCuBdBb/vmwn4YK7tEm\nY5KU5eL1OQV5jbw8VtD31qvjVe/HaZaL2+Mb2a5luWg2VMe7nZyT2flELsuFiE5DySsHSgPAL5j5\n20Q0CcD9AKYB2IVS2uIBq31JlosgCEaEmVYbhyww1SwXWw+dmf8BYJbB628CuMCdeYIgCCX0gprN\n5XHzQ1sAOPOSrfZvNVhYZc5ERdBVkUpRQRBCxc/US22wyObyYBwfLPoGspVtkpQFJoIuCEKo+Cmo\nKoNFkrLARNAFQQgVPwVVZbBIUhaYCLogCKHip6CqDBZdc9qw4rKZaGvJgAC0tWQiNSHqhECXoBME\nQdDjZ+pl96J2wwwW/WDRNactlgKuRwRdEITQ8UtQk1iDYYUIuiAIvhGFts1J8b5VEEEXBMEX/M4v\nF2qRSVFBEHwhiW2bo44IuiAIntM3kEU2QQU7cUEEXRAET9FCLWbEsWAnLkgMXRAET7FavUefMhiF\nSdMkIYIuCIKnWIVUqgt2ZNLUeyTkIgiCp5iFVNpaMspdDgV3iKALguApqqX8QXY57BvIYl7vOszo\neRzzeteN6LaYJCTkIgiCp6hWZ57SkjHMhPF60rSRQjsi6IIgeI5KdaZqn5V6SdICFnaIoAuCEApB\n9VlJ0gIWdoigC4KghB8phkH0WQkqtBMFZFJUEARbVJZyc7tfvycrk7SAhR0i6IIg2GIWh172yFbX\n+/RrkNCTpAUs7JCQiyAItpjFm3P5AvoGsq7EMcjJykZpoSseuiAItljFm29YvclVuKSRJiuDQgRd\nEARb7OLNbsIlfi4OradRCotE0AVBsKVrThsmNKctt3Fath/UZGVQsfooIIIuCIISt15yZo0A68nm\n8sqecFCTlY3UM0YmRQVBUKK6EMhs8QoCKu+plNgHMVnZSLF68dAFQVCma04bnulZgDuuml3jrRMA\n1m0fBU84yFh92IigC4LgGKNwiV7MNcL2hI1i9ekU4fDRocRNkkrIRRBiRlRW+dGHS+b1rotkib2+\nZ0xLcxrvHBlCLl8AkKzui+KhC0KMiHLGRpRL7LVQ0cu9S9A8ehQKwyOfJ6IQGvICEXRBiBFRztiI\nS4l9kidJJeQiCDEi6mIUhxL7JHdfFA9dEGJEI2Vs+EWUQ0P1IoIuCDEiyWIUFHEJDblBOeRCRCkA\n/QCyzHwxEc0AcB+ASQA2Avg0Mx/zx0xBEIDgVvlJOnEIDbnBSQz9SwC2AXhX+f/fBXA7M99HRD8C\n8DkAP/TYPkEQdMRdjKKSdplElEIuRDQFwBIAPy7/nwAsALCmvMk9ALr8MFAQhOQQ5bTLJKAaQ78D\nwNcADJf/PwlAjpmHyv9/BYDhEEtE1xFRPxH179+/vy5jBUGIN1FOu0wCtoJORBcDeJ2ZN7o5ADPf\nzcydzNzZ2trqZheCICSEqKddxh2VGPo8AEuJ6CIAY1GKod8JoIWIRpW99CkA5JlJEARLkpwDHgVs\nPXRmvpmZpzDzdABXA1jHzJ8EsB7A5eXNrgXwsG9WCoKQCLxMu2yUVYicUE8e+k0AvkJEL6IUU/+J\nNyYJgpBUvMoBl8lVY4jZrOml93R2dnJ/f39gxxMEIZmYdXZsa8ngmZ4FIVjkL0S0kZk77baTXi6C\nkHCSmPctk6vGSOm/ICSYpIYmpKeNMSLogpBgkpj33TeQxeCxoZrXpaeNhFwEIbaohFKSFprQnjj0\ng1RLJo1lS8+MfSipXkTQBSGG3NK3Bfc+u7uyjqfZMmrjM+nKUmvVjM+kgzDTc4yeOABg3JhRDS/m\ngIRcBCF29A1kR4i5hlEohch4H2avR52kPXF4jQi6IMSMlWt31Ii5hl7YcoO13rnV61FHJkOtEUEX\nhJhh5Y3qhc1M6BjAnG89FbtsF1ngwxoRdKFhiWvpuJlIE1AjbN2L2pFOGcdXDg4WcMPqTbES9iSv\nNuQFMikqNCT6bAmzScUo0r2ovSbTgwB8cu40Y9ttisEPDhYq5w5EfzWkuC/w4Sci6EJDYpWfHXWx\ncLIM3cq1O1AYtm/vkS8UcdujW3GkMBzLQU4oISEXoSGJe7ZE15w2dC9qxyktGezN5bFy7Q7DsImT\n8zk4WEhcEVKjIR660JDEvS+3asjI7DydUO8gl8ReMlFFPHShIYl6toTdhK1qSb/ReTqlnkEuqb1k\noooIutCQRDlbQkUEzbzmbC4/YgCoPk831DvIJbGXTJSRkIvQsEQ1W0JlwralOY2DJsVB+vBL15w2\n9O86gJ8/u9v22Jl0EyaOG+NZeCTucxVxQwRdECKGigjarUujHwBWbdijdOyhYfY0xh33uYq4ISEX\nQYgYKuXthwwabumpHgCKiiuTFYrsaTgk6nMVSUMEXRAihooIqni41dukHHTj8jIcEuW5iiQiIRdB\niBgqhUNG1aLV6AeAa86bqhRDB7wPh0R1riKJiKALQgSxE0Htvdse3VqZHCWUqvzbDAaA5V0z8cvn\nszh8zHgA0DAKh0geeXwQQReECKIiov27Doxog8s4LshGgjtoI+bH9zLSju41m1Eoll7P5vLoXrMZ\ngLQDiCIi6IIQMVSqQFUWuVi5dgeyuTxSRCgyV35akS8Mo/uB44J926NbK2KuUSgybnt0qwh6BJFJ\nUUGIGCrFOFaLXGgDgJYuqIm4cqbL8PFMF7Ncd7PXhXARD10QfMJt7FklD90qEyVFZDpZChyPtbux\nQYg2IuiC4ANO+61Xi3+TSWikOvvErGCHYO+Jq/jp2rFaTBaZbonpItNJR0IuguADTnqY6Hu3mAny\n4aNDlR4tRrnq2iIX9YptuokqmS7Llp6JdBPVvL9s6Zl1HUPwB/HQBcEHnPQwMRJ/I3L5Qo2XbxTS\nefwv+0z3kUmnMDbdZBoDb8mksWzpmZX9O1lMQwgfEXRB8AEnPUycxKure7SY5arnLCYs84Uixoxq\nQjpFI7JXMumUYQWn5KDHCxF0QfCB+R2tNWmFZj1MnC5CYTcA2O0vly8g3USY0JxGbrBQI9SaiGdz\n+RETqLIknTqH8gX86Pcv4Ye/ewkAcMdVswO5ZiLoguAxfQNZPLgxO0LMCcDHzzH2qO3K+PWMt4mR\nq+yvMMxoHj0KA99cWGN79WfN8txF0EcyVBzGZ3/6Zzz99zcM3w+qu6QIuiB4jFFMnAGs377fcHuj\nMn4rDh8rTY6aiao+7m2W1aL39PsGsrjx/s22WTKS0lji3/+0E994eKvp+0tnnYJbLn4/3n3i2MBs\nEkEXBI+xW03IKA6txcPnfOspW1HXWtzqQyT6OLf2/rzedbbxfM0zVyk+atRe5tv2vYXFdz5tuc2y\nSz6Az8ybEZBFtYigC4LHWMWwzcr47bxpPdqgoZLvbhSC0cfzVTNtGqmX+eCxIVzyv/6Il/YfNt1m\n3umT8NPPnot0KhoZ4LaCTkRjAfwBwJjy9muY+VYimgHgPgCTAGwE8GlmPuansYIQB+xi2NVxaL0g\nq6J5yXb57tpAMT6Txth0k+EkKGAdRrHq4pg0Vq7djrvWv2S5zdNfm4+pE5sDssgZKh76UQALmPkd\nIkoD+CMRPQHgKwBuZ+b7iOhHAD4H4Ic+2ioIsaA6hm3lqWvbOBXzai/ZKrxTPVDk8gVk0incbpJt\nYfZUkSLCD66clVgR/9NLb+Ka//us5TZ3feJsLDnr5IAsqg9bQWdmBvBO+b/p8j8GsADAJ8qv3wNg\nGUTQBQHAcVE3m2QklMIlTtIVAWDc6BS+/Z9nVrx7szYBRv1crDJUzMIySVtd6MDhY5jXu85yEL3i\nnCn43uVngRys8hQVlGLoRJRCKaxyOoC7ALwEIMfMQ+VNXgGQnLsuNAR+Fs3YTTIySt65SkvbarSe\n5rf0bTFsnwuUhNhMsMw8+qRWhDIzbrx/Mx4qt0ww4oQxo/CHr83HxHGjA7TMH5QEnZmLAGYTUQuA\nXwLoUD0AEV0H4DoAmDZtmhsbBcFznDbPcsptj261DaU4mQTVYADLHtmKQ/mC4WdTRFhx2UzTcI9V\nhkpSlop77C978YVfDFhuc991czH3tEkBWRQcjrJcmDlHROsBfAhACxGNKnvpUwAYDoHMfDeAuwGg\ns7PT6d+vIPiC1WSiG1Gr9vZbmtNK+eSauJrFrs08d6PuhxpFZnx59Sa0NKeRbiIUhkeW9ycxQ2XP\ngUF89HvrLbf54oLTcePC5J27HpUsl1YAhbKYZwB8DMB3AawHcDlKmS7XAnjYT0MFwUucNM+yQ+/t\nq4h5OlXqaNi/64Dh4s1zT5uA/3jpgGMPHih58QcHC0inCC2ZNA7ljTNb4sqRQhEd33jScpv3TT4B\nD1//EWRGpyy3SxoqHvrJAO4px9GbANzPzI8R0d8A3EdEywEMAPiJj3YKgqc4aZ5lRnXPE6cUimxZ\nlbnzzTw+OXeaYT8Yq26J+mO8fWTINLMlTnzpvgE8vGmv5TZP3vBRdLznXQFZFE1Uslz+AmCOwev/\nAHCuH0YJgt+oFNtY4TZ/vBqrydC9uTyWd80EAKzasKeyJujHz2lD56kTlY9dZI5lQ63f7Xgdn/m3\nP1tuc8U5U7DyilkBWRQPpFJUaEjcZnXU45U74ZSWDPoGslj93J4Ra4Kufm4POk+dWJn41GwfPDZk\n6rXHoaFWbvAYZn/r17bbvbB8MUaPikZVZhQhdpAyVS+dnZ3c398f2PGE6BOnfttOvPJ0ijBu9CjL\nCUyrz668fBaWPbLVdPm3Tbdad0nUQwBe7l3i2BY/WfI/n8bWvW9ZbiNhlBJEtJGZO+22Ew9dCA2/\nUwe9RrWqU18iP73ncWcHKvtYZoOB0et2hUxRaKi16rndlftrRveidlw///SALEoesRD0OHlxgjpe\npw76jV0GjFllZZvDBSwKw2y49qgd2nHrmRvwkt1vDuL8ldbphK0njsFzX78gllWZUSTygh43L05Q\nx8vUwSCw6qJo1biqe1E7bli9ydGxsrk8mtNNGCwMG75v1g89zIrP4jDj/d94EseKxjZrPHvzBXjP\n+OB6hDcSkRf0uHlxgjpepA7Wi9HTH2AsiE76nej3m0k3IW8izkakiDAmnTIVdKu//yArPr/75PbK\nMmtm3Hn1bFw6W76rQRB5QY+bFyeoU2/qYL0YPf11P7AZIFQWUDZ6IrTzfo32m06RYeWmWUy+yGy5\n2HNYf/+b9+Rw6V3PWG7zodMmYdV1cwOySKgm8oIeBS9O8IewG0IZPf1VC65G9ROhivdruN8iY9zo\nFIYLwyNyytdv328axiECzJLQiI5PtrZk0li29ExfrptKVSYAbFm2ECeOtV7rVPCfyAt62F6c4B9h\nT3Y78XJVtrXLUT987PjfcJEZD27M4uPntOHBjVlDT91gbDF8L5cvlJ4s4M280n+7dyN+teVVy21W\nfX4uPvTe5DW3ijuRF/SwvTjBH6Iw2W01yWm0rRVuKkfzhSIe27wPY9NNlp+r9tS11YP0aJkxbq7d\nb/72Gv7Lz6zrQz41d1qlclWILpEXdCA5bT2F40Rhstvo6S/dRCNi6IDaE6GblYcA686JFRjYWS4K\nmmGR0676xPHqoSOYu+K3ttv9/duLI7NWpqBGLARdSB5RmOw2e/ozes1ukLGyu60lg8NHh1xVjQIj\nnw6snirMniKYGTNu/pXtcX795fNxxuQTXdkoRAMRdCEUojLZbfb05/Qpwex82loyeKZngetmXlqb\nXY3uRe3oXrN5xBMEUHqyqN5u0e1/wI7X3rbc9zXnTsOKyySMkiRE0IVQiMpkt35idn5HK9Zv3+94\nvsbufIyeBqwaagHAhOY0br1kZPaK9vttj26tfLYlk0bXnDbcsHqTbQHTyysukqrMBCOCLoRCWJPd\n+pWF3jkyVElVzObyIxabMJuo7RvI1gjqsqVnYsVlM0e8PsamK+CSs06uyXDRJj2tKk+75rRh8cz3\noP2WUjphLl/AT/9jp+ExvrrwffjCgjOsL4qQGKTbopAorFIh3YY9UkQYZq548Kue24OiQU5hc7oJ\nhSKPyGXXBHqCbvAASh58dS66PoNFq0IFoNyyt4lGpjSaVbIK8UK126IIupAI9F6zRrWgzetd53sf\nc6doMfZ6bNMyYMz2oR1DiC/SPldoGKw873yhiGWPbEX/rgORE3Pg+ALRTrN7jEQ6CplDQrhIkqkQ\ne+xywHP5guFCzFFhes/jjheDNhJpswwhaZPROIiHLsSWoJaDCwurBaG1Jer0GTr6SVZpk9FYiKAL\nnhJUfxYvFmmOMlrmDGC8YMX8jtaa1gk/f3Y3MukmTGhOIzdYkDYZDYgIuuAZQfZncVtqHxeODpX6\noJuld5qdf6nnOuH2q2aLkDcgDSPoYXf2awRU+7OY3Qsn9yjpE312LXu/bFFAJAvANC4NIehR6OzX\nCKhkWZjdi/5dB0bEf+3ukZNOiXHFatCyO/+kD3iCMQ2R5WLlOQreoZJlYXYvVm3Y4+geze9ordPa\n6GOVndK9qB2ZdMrVZ4Xk0hAeuuTnBoNKfxYzr7JoUuCm3aO+gSyWPbLVdcfCuGGXnWLU00X1s0Jy\naQgPXfJzg6FrThtWXDYTbS0ZEErFL/qy85TDxlBael73A5sbRsxTRErl+l1z2jDwzYW446rZltdc\naBwaovTfKMVNelyEw3SLBRr0iyZr9yjJueZmEOB6wlhIHlL6X4UsYxedLJ82i77hWjpeNpdHigj5\nQtEwpNAIMNxPGAvRIejvXUN46I1OlJ5Q7GyJQsFQighF5spPt9xx1WzceP9m032kUwQwRnRgtLJH\njzTdijZefu9UPfSGiKE3OlHK8rGLs0ehYEgTz3rEHCidq9U+Vl4+CyuvmFW5Fnb26JFJ/WgTxveu\nIUIujU7UsnysFv1OikhNaE4DsA4xaddA+2nW/tbMQ5dJ/WgTxvdOPPQGwO8sn76BLOb1rsOMnscx\nr3cd+gayrvcRXADQP9Ipwq2XlPqwGOWLp1OEw0eHaq6X0baZdArXnDfV8HVJTYw2YWTXiaA3AGZC\n4YUgaHHCbC4/YiLPiahX7yPupIiw8vJZI7zv6hDThOY0wKWWvvrrZRaOWt410zYdVIgefn7vzJBJ\n0QbBr9l2L1bJieJKQm5QmfCSVYUaC6++d56lLRLRVAA/AzAZpWyqu5n5TiKaCGA1gOkAdgK4kpkP\nOrZUCASruHU9WMUJVf+YVcR8+qRmFIqMveUngTAhAk4Zn6mkVxaZK2mXQEm0zc45avMZgr/49b0z\nQ2VSdAjAjcz8PBGdCGAjEf0awGcA/JaZe4moB0APgJv8M1WIIi3NacM88ZbmtGlDtEP5Am59ZKvt\nvjWxbCJg55uD3hpeB8ww9KZVmsCZNdWSCU7BC2xj6My8j5mfL//+NoBtANoAXArgnvJm9wDo8stI\nIZr0DWTxzpEhw/dy+YJhytYNqzcpiTlwPF3PJk07cNoMxLdvIIsb799sm6YWRlxVaBwcpS0S0XQA\ncwBsADCZmfeV33oVpZCM0ECsXLvDtChGZWrmrk+cjSVnnQxgZKyxqc6CHr+ZPmmkoGueuUq+uFQt\nC36iLOhEdAKABwHcwMxvUVWTJWZmIjL8ayai6wBcBwDTpk2rz1ohUriJ+1ZP/mmpitXCBgA3WCze\nEAWeeekAbunbguVdMwHYF0PpwylBx1WFxkEpbZGI0iiJ+b3M/FD55deI6OTy+ycDeN3os8x8NzN3\nMnNna2vye1g3CvsOOZ+crA4tGKU7dq/ZjK9EXMw1Vm3YU/ndalJXwilCkKhkuRCAnwDYxsz/o+qt\nRwBcC6C3/PNhXywUIgEz4+z//mvXjbLadKEFI6+2UIxumEWPFl7pG8iCAMPBTbUNriB4hUrIZR6A\nTwPYQkSa+/R1lIT8fiL6HIBdAK70x0QhLO5a/6Jt34mr/2kqnv77G7aph/qskLin6TWRdf48AfjB\nlbNEzIVAsRV0Zv4jYNo76AJvzRHCZNu+t7D4zqctt5k1ZTwe/sJHDN/7wDeewGBhuOZ1ra9JNbFf\nE5StQy3xedYQkoQ052pgjg4V0X7Lk7bbbf7mQow3EGU937nsLHSv2TwidFLd16Sa+R2t+Pmzu50Z\n7DOpJkLRJGtHH1apHbZqkZ7lQtCIoEcYP8r1b35oC1Y9Zy2kP/uXc3H++5xPYDtJyVu/fb/j/ftJ\nc7rJ8OmiHrQcdBF0IShE0COKStWhCiphlI+fPQU/uHKWoQ1OBxTVlLyoxdDtxNxtCCVq5+mEqKxy\nJagjgh5RrJrjW32pjg0N4xcbdmHFE9txdMhcpHYsvxBjRqVM3/dqQDHbd9SLh7wiriX9ft5/wT9E\n0COKkyZOz7z4BlY8sQ1/zb5lur8/3jQfUyY0Kx/f7YBih11VZRQxS0u0wygHPS5er1/3X/AXEfSI\nYtXEac+BQXz/qR14eNNew88u/MBkdC9qxxmTT3R9fL+6ArpdYi7dRDhh7CgcHCygidz3d3Ejzp+c\nO23EIs0q6PPugXh5vdIVMp6IoEeU7kXthoslZ3N5fPR760e8dtpJ49CzuAMf+8BkVLdkqAe/ugK6\nEQQCcNW5U9F56kSsXLujrnRHBtCSSSOXVyuQamvJYHnXTHSeOtFywWe9vfM7WmtEOk5er3SFjCci\n6BGEmTE23YTm0SlTr/BrF7bjsx+egcxo8zh4PRgNKF6UsbvJP2cAj23eh9V/3hNoNWn1+WqCazTI\n6mEAP392Nx7bvA+H8oVKaCVOXq9f91/wF1mxKCLsePVtfO/J7fjtdsOWOLjs7DZ85WPvcxQHd4sW\n5zVawMGLCVEVUQyLtpaMZXy7byDrqnlYJp3C2HSTaesEr66vl8Ql3t8IeLZikeAPhwYL+N+/fxH/\n5/f/MHz/rCnj0bO4Ax9+70meHVPlC6oX3CJzxTPz4stcnavuR6WoFiOfYLLwhhVGy8AZXTM35AtF\njBnVhEza+KkrivF06QoZP8RDD4jiMOPB519B7xPbceDwsZr3x41Ooeei9+Pqf5qKdMr7tbuNPGOj\nNTCDXvNyes/jde+j+ilifkcr1m/fb7tUnV5Yja6F2TUDGHkXRUgE4ParZlsOZrK2qGCEeOgR4Pnd\nB9H7xHY89/IBw/ev/dCp+OIFZ+CkE8b4bovqhJyXcV6VJwInE5RGtGTS2HTrQvQNZLHska1K7QRa\nMmksW3omlj2ytXLssenaQdTsmk1oTrsS9FNaMhWvd0bP44YDThTj6UJ8EEH3kNfeOoI7fvN309L6\nj5x+Em66sAMzp4wP2DJ1ofYqu0ElRe+Wvi11iXm6ibBs6ZmO4vLaZwCMKLw6OFiosc/smuVctBDW\nTyhKFongByLodXB0qIh7n92N3ie241ix1mOb/K4xuHnx+7F01iloavImndAtZgLSRIS+gWxFxLzK\nbrB7IugbyOLeOppzVU8izutdZyvmBIx4SjD6jP6JxU507eYAtHh+ta3VE876nHjJIhHqRQTdIX94\nYT++86tt2P7q24bvf3HB6fj8+afhXWPtuxMGiVlee5F5hGfq1ZqXZmKnvb5y7Q7X/VEII/ur24Up\njOLSdvYBpWvW/cDmmnVTtfi8XZGSJubVS+5V34PqfaSIRiwoHcZkpGS1xB8RdBt2vXkY33/qBTy6\n2bgq88Iz34OvLmrH6e8+IWDLnKF9MY2KY/SeqRfZDSmTXi2pcuFTPbHi8ZmRg6VVbnsmncL8jtaa\ntUvt7Ktg8GDFVT/tRL36PI2eWrR9aLaEle0SpypWwRwRdB2Hjw7hx0+/jNt/84Lh+6e/+wR8/aIO\nzG9/t2dVmX6i97pUVqb3ArPjaK/Xs8BFLl/AvN51FQ/S7OljQnMaS846eUTZfjaXt8wjLzJXQlAr\n1+6wLWTSvHBt33qqY+Jm11h/hDCqR+NUxSqY0/CCzsz41ZZXseKJbXjloPEX7ubFHbj2w9MxNu1P\nVaZfGHmkTVr/AAAOHUlEQVRdZh6lF5Nx1YOHmQesiZ+ZCKti5EEa5YurlutXo+1XdZDThNwuJu5k\nEAs62yVOVayCOQ0p6Nv2vYXvPrkdv9thvMjC5edMwVc+9r7YZxyYPeIbMb/D+YIW1RgVJOkxKqW/\n7dGtNQVAqg20qj1IfZionq6O2n6dPkXYxcSNBjGrATbImLZk3SSDhhD0g4eP4a71L+LHf3zZ8P05\n01rQc2EHzjttkuk+4jhh5ESM6l1ByK6Lollp+1v5oZptVWLTGmYepNuujhrZXB53XDXb8VOEXUy8\nf9cBrNqwB0VmpIgw97QJeH73oZqsovkdrYHGtKV3SzJIpKAPFYfx4POvYMUT2w1zhk8cMwo9F3Xg\nyk61qsw4Thj1DWQdtYqt99Ha6vP61gFaEZBVDroWm85ahG8AYw+ybyDrWVuBFZfNdNymwCwmDgAP\nbsxWzqXIjOd3H8LHz2mrVLdqzkLQMW2vspuEcElM6X//zgNY8cR2bNx10PD9f5k3A9fPfy8muajK\nDLoc3gvMbDaj3nOxO562f9UiIL09Rp/T53kDsB0onGCVclh9fBUIpewcI9vaqkTcbgAjAC/3LnF2\nIkLsSXzp/6uHjuCO37yA+/68x/D9j55Rqsr8YFv9VZlxnDCy85jdPlqbhZ7Mcrb19qiEQtIpwuGj\nQ5jR83iNp2hUlJPN5dH9wGYMo9Qzxwyni1tUX0OjmP/YdBOGhlmppW+LRbMw7YnPav5BQ2LaghWx\nEfSjQ0X8+5924Tu/2ma4Wk1bSwY9izuwZObJnldlxnHCyMzmam/Q6aO1XejJaIKz2h7AfhCksupq\nnqz+GFqVp/7czAaSkTuHI0U3ur9Hqnq4qPZzyaRTsHoQ1iZQVfYjMW3BilgI+ltHCjhr2VM1r//r\nBWfg8x+dgRN9rsqM44SRlc1uC4fs4rpWPU60a2WXOUKoFed8oYjbHt1q22PFijaHGSuE0mBSne/u\nZqJVG0C/bJP7rrofiWkLVsRC0E8YPQpXdU7F20cL+OrCdpzWGmxVZhwnjPyw2S70ZCbWLZm0Za+Y\naswc7YODhUrBj5uipPkdrZXsEhWqwzlO89I1qlsUmE2sTmhOo3n0KKX5B0GwIzGTooL/2E0OW/Vc\nB44PLi3NaTADh/IFNFlMADo5jspn53e02rbXNYvKWFWD2tkLWPejB8yXtjPq0y40HqqTot6vpCAk\nlu5F7eUFHkYyeGyo4j2vuGwm2loyIJQErVqwsuWmVgcHCzg6NIzbr5qNYQcOheYha8dxwt5cHsu7\nZuJTc6dV+rWkiDDvvRNH2Gtmzd5c3vT8jdCH5MyujRb+0t7T7IJuG0FQQTx0ReJYWOSHzWY55Fae\npFVKo1WKnh596MFNaqbZNahua2t1bJU1RbUFNKL+9yHEB/HQPUR7XNY8TC2u2jeQDds0U/yyuWtO\nG8aNqZ160SYu5/Wuw4yexzGvd13lWFaxZyMxT6cIaV2mktEktJHHnG4ipFPGWU5m16D6Whmhb1lQ\n05FRR/XCGYIQJCLoClhld0QVP202E+iDgwXDAUQlvTNFVAlFrLx8FlZeMcswPFGNURhj5RWzsPLy\nWZXwhR6ja2CVvWJ0bLsniqj/bQjJJRZZLmGTpMIiL2xWzTLRhK17UTu612y2LMAZZq6pgFQJWZil\nYDpZt9PsmugX0tBQSYGM8t+GkFzEQ1fAzMOMemGRk9ed4GRycG8uXwrTjLb2HdysWWoU3lHZp/51\np9dK5fxbmtO29gmC14igK2D0BY5DYZFfNhuFOloyxsVdmigesuiv4tQu1fkB1WswfZKxcJu9rs9K\n0UfU0ynCO0eGYjXnIiQDCbkoEGZhkdtMlaBtvnjWyJWBgJHiaRamSRE5Ss3rG8gqLaMHqF+DZ/9h\n3NDN7HVt39XdI6uPcfjoUE0WkB+dEuOYeSX4i23aIhH9PwAXA3idmT9Yfm0igNUApgPYCeBKZjb/\n6y8T57TFMLAqRrH64vr9RTezy6gNbLXouTkXu+NW47YT4fSex03f2+lif2axey87JXpxPYX44GXa\n4k8BXKh7rQfAb5n5DAC/Lf9f8Bg3mSpBpFia2bV++34807MAL/cuwTM9C2q8ZbPCmnqOW43b+QGr\nNEQ31y2IOZc4Zl4J/mMbcmHmPxDRdN3LlwL45/Lv9wD4HYCbPLRLgLtMFdWFEerx4t1m0LhtCqay\nfwJczw9cc95U05YAbsIkQTRzi2PmleA/bmPok5l5X/n3VwFM9sgeoQo3bXtVvuj1rsAUVjthq3RJ\nhvvVo5Z3zTQVdP31NBoIgdo4vbbSkV9hrzi2dBb8p+4sFy4F4U0D8UR0HRH1E1H//v31rVvZaLjJ\nVFF53K/3cT2srJ/uRe01GSUaZoVEqph9vvq6GYWzuh/YjO41m2tCXABMw09eEMfMK8F/3Ar6a0R0\nMgCUf75utiEz383Mnczc2dpa38ryjYabuLPKF73ex3Uv4uFu6JrThk/OnVYj6l4Imcp1MxoICwYr\nFgURyw7rHgjRxm3I5REA1wLoLf982DOLEoQX2SZO484qqXpePK7XGw93y/Kumeg8daLp+fmZ5ukk\nPh1ELDuseyBEF1tBJ6JVKE2AnkRErwC4FSUhv5+IPgdgF4Ar/TQyjtQbp64Huy96WCswuRFbs8+Y\ndUx0cs2N9m21kISThTUkli2EgUqWyzUmb13gsS2JQjXbJAy8LDpSFWk3A5zKZ6qPD9RO5phdczf2\nGA2E6SYCCCPCLl4MjlI0JLhBKkV9IuppZXovV+uN4tR7VhVFNwOc3WdUVy4yuuZu7DEbCI1eq0d8\nw3y6E+KNCLpPxCmtzK2AOBFFNwOc3WdUF202uuZe59J7KbRRfroToo005/KJOKWVuU1jdCKKbqon\n7T6j+rRjdM2j3EEz6k93QnQRQfeJqKaVGbWddSsgTkTRzQBn9xkV8Z3QnDa85lEdcPsGsmgyaUUQ\nhcFGiDYScvGRqKWVmYVWWprTODhY297WSECqJ+vGZ9JIp0hpQtDNRKzdZ4wmKavJpFO49ZIzXe3b\n6ryrt/Vy8lK7P0YrIkVhsBGijywSHTJBZjOYLarckknj6NCwbec+o0nIdBPhhLGjkBss4JSWDOZ3\ntJp2XPQD/QBDhIotXh3bqrukUctgt09iZvcnRYQfXDkrUs6BECyq3RbFQw+RoLMZzEIoh/IF3H7V\nbNuBxaxSsnn0KAx8c2Eo2RlBPAWZzTGs2rBHqS+7Kmb3Z5hZxFxQQgQ9RILOZrDKvFERRjdZJ0nI\nzjA7b7PFot1OXsYpM0qIJjIpGiJBZzPUOxHoNuvEj/NRWVPUK8zO26yPulsBjupErRAfRNBDJOjU\nuXozb9xmnXh9PkEs4lGN2Xlfc95UTwU4qplRQnyQkEuIhNFTpZ6Ys5usEz/OJ+jQjtV5WzUKc3ss\nEXDBLZLlEjJJ69kRxPmorNmZtOsqNDaS5RITkuaRBXE+dpOH0gtFaFQkhi7EDrtYviygLDQq4qEL\nvuFl//Nq7GL50gtFaFRE0AVf8Kv/uYZVaEfyuYVGRUIugi+4CXt4FSqRfG6hUREPXfAFP/qfq+Ll\nikyCECdE0AVfcBP28DJUkrTsIUFQQUIugi/40f9cEARrxEMXfMGP/ueCIFgjlaKCIAgRR7VSVEIu\ngiAICUEEXRAEISGIoAuCICQEEXRBEISEIIIuCIKQEALNciGi/QB2BXZAd5wE4I2wjQgAOc9k0Sjn\nCTTOuVaf56nM3Gr3gUAFPQ4QUb9KelDckfNMFo1ynkDjnKub85SQiyAIQkIQQRcEQUgIIui13B22\nAQEh55ksGuU8gcY5V8fnKTF0QRCEhCAeuiAIQkIQQa+CiFJENEBEj4Vti58Q0U4i2kJEm4gosd3S\niKiFiNYQ0XYi2kZEHwrbJq8hovbyfdT+vUVEN4Rtlx8Q0ZeJaCsR/ZWIVhHR2LBt8gMi+lL5HLc6\nvZfSPnckXwKwDcC7wjYkAOYzc9Jzee8E8CQzX05EowE0h22Q1zDzDgCzgZJDAiAL4JehGuUDRNQG\n4F8BfICZ80R0P4CrAfw0VMM8hog+CODzAM4FcAzAk0T0GDO/qPJ58dDLENEUAEsA/DhsW4T6IaLx\nAM4H8BMAYOZjzJwL1yrfuQDAS8wc9eI9t4wCkCGiUSgNzntDtscP3g9gAzMPMvMQgN8DuEz1wyLo\nx7kDwNcADIdtSAAwgKeIaCMRXRe2MT4xA8B+AP9WDqP9mIjGhW2Uz1wNYFXYRvgBM2cBfB/AbgD7\nABxi5qfCtcoX/grgo0Q0iYiaAVwEYKrqh0XQARDRxQBeZ+aNYdsSEB9h5rMBLAZwPRGdH7ZBPjAK\nwNkAfsjMcwAcBtATrkn+UQ4pLQXwQNi2+AERTQBwKUoD9SkAxhHRp8K1ynuYeRuA7wJ4CsCTADYB\nKKp+XgS9xDwAS4loJ4D7ACwgop+Ha5J/lL0dMPPrKMVbzw3XIl94BcArzLyh/P81KAl8UlkM4Hlm\nfi1sQ3ziPwF4mZn3M3MBwEMAPhyyTb7AzD9h5nOY+XwABwG8oPpZEXQAzHwzM09h5ukoPbauY+bE\njf4AQETjiOhE7XcAC1F6zEsUzPwqgD1EpK0wfQGAv4Vokt9cg4SGW8rsBjCXiJqJiFC6n9tCtskX\niOjd5Z/TUIqf/0L1s5Ll0nhMBvDL0ncCowD8gpmfDNck3/gigHvL4Yh/APhsyPb4Qnlg/hiA/xq2\nLX7BzBuIaA2A5wEMARhAcitGHySiSQAKAK53MpkvlaKCIAgJQUIugiAICUEEXRAEISGIoAuCICQE\nEXRBEISEIIIuCIKQEETQBUEQEoIIuiAIQkIQQRcEQUgI/x9rzGr3MfYIswAAAABJRU5ErkJggg==\n", 147 | "text/plain": [ 148 | "" 149 | ] 150 | }, 151 | "metadata": {}, 152 | "output_type": "display_data" 153 | } 154 | ], 155 | "source": [ 156 | "def plot_graph(y, fout):\n", 157 | " \"\"\"데이터 플롯을 위한 함수. 입력값(피처값), 출력값(집값)을 플롯합니다\"\"\"\n", 158 | "\n", 159 | " plt.scatter(data_x.reshape(1, -1)[0], boston.target.reshape(1, -1)[0])\n", 160 | " plt.plot(data_x.reshape(1, -1)[0], y.reshape(1, -1)[0])\n", 161 | " plt.show() # 이미지를 주피터 노트북에 표시합니다\n", 162 | " plt.savefig(fout)\n", 163 | " plt.clf()\n", 164 | "\n", 165 | "with tf.Session() as sess:\n", 166 | " # 코드 안에서 세션을 정의합니다.\n", 167 | " sess.run(tf.global_variables_initializer()) # 변수초기화\n", 168 | "\n", 169 | " # 텐서보드용 서머리 라이터를 위에서 지정한 디렉터리와 그래프를 이용하여 생성\n", 170 | " summary_writer = tf.summary.FileWriter( './graphs', sess.graph)\n", 171 | "\n", 172 | " # 학습 전의 예측된 기울기 상태\n", 173 | " y_pred_before = sess.run(y_pred, {X: data_x})\n", 174 | "\n", 175 | " plot_graph(y_pred_before,'before.png')\n", 176 | " \n", 177 | "\n", 178 | " # 최적화 함수를 이용하여 기울기를 100번 업데이트\n", 179 | " for i in range(100):\n", 180 | " # loss 연산, summary_op 연산, train_op 연산을 수행합니다.\n", 181 | " # loss 연산의 결과를 loss_t, summary_op연산의 결과를 summary에 받습니다.\n", 182 | " loss_t, summary, _ = sess.run([loss, summary_op, train_op],feed_dict={X: data_x, y: data_y})\n", 183 | " \n", 184 | " # 각 업데이트마다 생성된 summary_op 함수의 결과를 서머리 라이터에 적습니다.\n", 185 | " summary_writer.add_summary(summary, i)\n", 186 | "\n", 187 | " if i%10 == 0:\n", 188 | " print('loss = % 4.4f' % loss_t.mean()) # 10회 학습 후의 평균 손실.\n", 189 | " y_pred_after = sess.run(y_pred, {X: data_x})\n", 190 | "\n", 191 | " y_pred_after = sess.run(y_pred, {X: data_x})\n", 192 | " plot_graph(y_pred_after,'after.png')\n" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": null, 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": null, 205 | "metadata": { 206 | "collapsed": true 207 | }, 208 | "outputs": [], 209 | "source": [] 210 | } 211 | ], 212 | "metadata": { 213 | "kernelspec": { 214 | "display_name": "tensorflow", 215 | "language": "python", 216 | "name": "tensorflow" 217 | }, 218 | "language_info": { 219 | "codemirror_mode": { 220 | "name": "ipython", 221 | "version": 3 222 | }, 223 | "file_extension": ".py", 224 | "mimetype": "text/x-python", 225 | "name": "python", 226 | "nbconvert_exporter": "python", 227 | "pygments_lexer": "ipython3", 228 | "version": "3.6.1" 229 | } 230 | }, 231 | "nbformat": 4, 232 | "nbformat_minor": 2 233 | } 234 | -------------------------------------------------------------------------------- /chapter9/readme.md: -------------------------------------------------------------------------------- 1 | 2 | --------------------------------------------------------------------------------