├── .gitignore
├── 00_setup_installations
├── 00_setup_installations.txt
├── 01_virtual_env_and_tensorflow-1.14_installation.txt
├── 02_git_for_windows.txt
└── 03_pip_paket_yukleme.txt
├── 01_python_basic
├── 00_python_giris.ipynb
├── 01_degiskenler_(variables).ipynb
├── 01_python_basic_answers.ipynb
├── 02_tirnak_kullanimi_ve_kacis_(quotes_escape).ipynb
├── 03_kullanicidan_bilgi_almak_(input).ipynb
├── 04_cikti_formatlama_(print_format).ipynb
├── 05_kosullu_durumlar_(conditionals_if_else).ipynb
├── 06_islecler_(operators).ipynb
├── 07_donguler_(loops).ipynb
├── 08_dosya_islemleri_(file_operations).ipynb
├── 09_listeler_(lists).ipynb
├── 10_liste_metotlari_(list_methods).ipynb
├── 11_demetler_(tuples).ipynb
├── 12_sozlukler_(dictionaries).ipynb
├── 13_kumeler_(sets).ipynb
├── 14_fonksiyonlar_(functions).ipynb
├── 15_lambda_fonksiyonlar_(lambda_functions).ipynb
├── 16_moduller_(moduls).ipynb
├── 17_nesneye_yonelik_programlama_(object_oriented_programming_OOP).ipynb
├── 18_ yield.ipynb
├── img
│ └── Guido_van_Rossum.png
├── merhaba.py
├── my_module
│ ├── __pycache__
│ │ ├── alan_hesapla.cpython-36.pyc
│ │ └── my_geo.cpython-36.pyc
│ └── my_geo.py
├── notlar.txt
└── yeni_dosya.txt
├── 02_pandas
├── 00_pandas_basics.ipynb
├── 01_read_data_name_columns.ipynb
├── 02_drop_columns_and_row.ipynb
├── 03_filter_and_sort.ipynb
├── 04_read_excel_files_write_dataframe_to_disk.ipynb
├── 05_null_check_type_casting.ipynb
├── 06_string_ops.ipynb
├── 07_group_by_and_aggregations.ipynb
├── 08_date_time_index_parse_date.ipynb
├── 09_map_and_select_dtypes.ipynb
├── 10_join_union.ipynb
├── 11_postgresql_db_connection.ipynb
├── 12_sql_like_operations.ipynb
├── Docker-Toolbox ve PostgreSQL-Kurulumu.txt
├── Pandas_Kaynaklar.txt
└── simple_data.csv
├── 03_numpy
├── 01_numpy_giris_matris_olusturma.ipynb
├── 02_numpy_index.ipynb
├── 03_numpy_islemler.ipynb
└── 04_numpy_filter_and_query.ipynb
├── 04_matplotlib
├── 01.matplotlib giris.ipynb
├── 02.matplotlib scatter plot.ipynb
├── 03.matplotlib histogram.ipynb
├── 03_Veri_Gorsellestirme_Odev_Cevaplari.ipynb
├── 04.matplotlib boxplot.ipynb
├── 05. matplotlib barplot.ipynb
├── 06.matplotlib piechart.ipynb
├── boxplot_01.png
└── boxplot_normal_dagilim.png
├── 05_seaborn
├── 01.seaborn_scatterplot_giris.ipynb
├── 02.seaborn_line_chart_avakado.ipynb
├── 02.seaborn_line_chart_giris.ipynb
├── 03. seaborn bar chart.ipynb
├── 04. seaborn lm plot.ipynb
├── 05.seaborn_line_chart_with_data.ipynb
├── 06_seaborn_pairplot.ipynb
└── 07_seaborn_mixed.ipynb
├── 06_plotly
├── 01_plotly_line_plot.ipynb
├── 02_plotly_scatter_plot.ipynb
├── 03_plotly_bar_plot.ipynb
├── 04_plotly_bar_plot_2.ipynb
└── 05_plotly_animation.ipynb
├── 07_data_preprocessing_cleaning
├── 00_github_ornek_veri_okuma.ipynb
├── 01_veri_on_isleme_temizlik_ilk_safha_simple_data.ipynb
├── 02_veri_on_isleme_temizlik_ml_safhasi_simple_data.ipynb
├── 04_Preprosessing_Odev_Cevaplari.ipynb
├── ileri_seviye_ornekler
│ ├── 01._ileri_seviye_hazirlik_housing_data_part1.ipynb
│ ├── 02.ileri_seviye_hazirlik_housing_data_part2.ipynb
│ └── 03_iris_quartiles.ipynb
├── label_one_hot.png
└── scikit_learn_ornekler
│ ├── 01_general_concepts_of_preparation.txt
│ ├── 5.3.1_Standardization.ipynb
│ ├── 5.3.2_Non-linear_transformation.ipynb
│ ├── 5.3.3_Normalization.ipynb
│ └── 5.3.4_Encoding_categorical_features.ipynb
├── 08_linear_regression
├── 01_simple_lineer_regression_advertising.ipynb
├── 02_multiple_linear_regression_advertising_01.ipynb
├── 03_multiple_linear_regression_advertising_backward_02.ipynb
├── 04_multiple_linear_regression_housing_mixed_attr.ipynb
├── 05_Regresyon_Odev_Cevaplari.ipynb
├── 05_multiple_linear_regression_housing_mixed_attr_backward_02.ipynb
├── hyperplane.png
└── other_examples
│ ├── 02_simple_linear_regression_with_cv.ipynb
│ ├── 04_multiple_linear_regression_mixed_attributes_housing_data_part1.ipynb
│ └── 05_multiple_linear_regression_mixed_attributes_housing_data_part2.ipynb
├── 09_nonlinear_regression
├── 01_polynomial_regression.ipynb
└── 02_polinomial_regression_housing.ipynb
├── 10_classification
├── 01_knn_with_iris_dataset.ipynb
├── 02_logistic_regression_with_adult_mixed_attr.ipynb
├── 03_model_selection_tuning_with_SosyalMedyaReklamKampanyası_data.ipynb
├── 04_xgboost_SosyalMedyaReklamKampanyası_data.ipynb
├── 06_Classification_Odev_Cevaplari.ipynb
├── hata_matrisi_precision_recall.xlsx
├── other_examples
│ ├── 03_logistic_regression_with_iris.ipynb
│ ├── 04_logistic_regression_with_adult.ipynb
│ └── 05_model_selection_tuning_with_iris_data.ipynb
├── saved_models
│ ├── 01.knn_with_iris_dataset.pkl
│ ├── 05.Model_selection_tuning_with_iris_data.pkl
│ ├── 06.model_selection_tuning_with_SosyalMedyaReklamKampanyası_data.pkl
│ └── 07.xgboost_SosyalMedyaReklamKampanyası_data.pkl
├── submission_for_kaggle.csv
└── xgboost.png
├── 11_clustering
├── 01_kmeans_clustering_with_Mall_Customers.ipynb
├── 02_kmeans_clustering_with_Mall_Customers_mixed_attr.ipynb
├── 03_pca_wine_data.ipynb
├── 04_lda_wine_data.ipynb
├── 05_kernel_pca_wine_data.ipynb
└── 07_Clustering_Odev_Cevaplari.ipynb
├── 12_association_rules
├── .ipynb_checkpoints
│ └── 01_association_rule-checkpoint.ipynb
├── 01_association_rule.ipynb
├── __pycache__
│ └── apyori.cpython-36.pyc
├── apriori.py
└── apyori.py
├── 13_deep_learning
├── .ipynb_checkpoints
│ ├── 01_introduction_to_tensorflow-checkpoint.ipynb
│ ├── 02_ann_picture_classification_fashionmnist-checkpoint.ipynb
│ ├── 03_cnn_classification_of_cifar-10_objects-checkpoint.ipynb
│ ├── 04_rnn_text_classification_of_imdb_reviews-checkpoint.ipynb
│ └── 05_transfer_learning_and_fine_tuning-checkpoint.ipynb
├── makale_gazi_unv_derin_ogrenme.pdf
├── tensorflow-1.14
│ ├── .ipynb_checkpoints
│ │ ├── 00_introduction_to_tensorflow-checkpoint.ipynb
│ │ └── 01_ann_churn_prediction-checkpoint.ipynb
│ ├── venv_tensorflow-1
│ │ └── dev
│ │ │ ├── .ipynb_checkpoints
│ │ │ ├── 01_tf_introduction_to_tensorflow-checkpoint.ipynb
│ │ │ ├── 02_tf_graph-checkpoint.ipynb
│ │ │ ├── 03_tf_variables_placeholders-checkpoint.ipynb
│ │ │ ├── 04_tf_build_a_neural_network_with_tensorflow-checkpoint.ipynb
│ │ │ ├── 05_tf_simple_linear_regression-checkpoint.ipynb
│ │ │ ├── 51_keras_ann_churn_prediction-checkpoint.ipynb
│ │ │ └── 52_keras_cnn_cat_dog_classification-checkpoint.ipynb
│ │ │ ├── 01_tf_introduction_to_tensorflow.ipynb
│ │ │ ├── 02_tf_graph.ipynb
│ │ │ ├── 03_tf_variables_placeholders.ipynb
│ │ │ ├── 04_tf_build_a_neural_network_with_tensorflow.ipynb
│ │ │ ├── 05_tf_simple_linear_regression.ipynb
│ │ │ ├── 51_keras_ann_churn_prediction.ipynb
│ │ │ ├── 52_keras_cnn_cat_dog_classification.ipynb
│ │ │ ├── cnn_animation.gif
│ │ │ ├── cnn_architecture.png
│ │ │ ├── cnn_max_pooling.gif
│ │ │ ├── cnn_phases.png
│ │ │ └── simple_ann_schema.png
│ └── virtual_env_and_tensorflow-1.14_installation.txt
└── tensorflow-2.0
│ ├── .ipynb_checkpoints
│ └── 03_cnn_classification_of_cifar-10_objects-checkpoint.ipynb
│ ├── jupyter_noteboos_tf_2
│ ├── .ipynb_checkpoints
│ │ ├── 02_ann_picture_classification_fashionmnist-checkpoint.ipynb
│ │ └── 03_cnn_classification_of_cifar-10_objects-checkpoint.ipynb
│ ├── 01_introduction_to_tensorflow.ipynb
│ ├── 02_ann_picture_classification_fashionmnist.ipynb
│ ├── 03_cnn_classification_of_cifar-10_objects.ipynb
│ ├── 04_rnn_text_classification_of_imdb_reviews.ipynb
│ └── 05_transfer_learning_and_fine_tuning.ipynb
│ └── virtual_env_and_tensorflow-2_installation.txt
├── LICENSE
├── README.md
├── final_project_titanic
├── .ipynb_checkpoints
│ ├── 07_classification_with_titanic_train_data-checkpoint.ipynb
│ └── 08.classification_with_titanic_test_data_kaggle-checkpoint.ipynb
├── 07.classification_with_titanic_train_data.pkl
├── 07_classification_with_titanic_train_data.ipynb
├── 08.classification_with_titanic_test_data_kaggle.ipynb
├── 08.classification_with_titanic_train_data_tuning.pkl
├── 09.classification_with_titanic_train_data_tuning.ipynb
└── submission_for_kaggle.csv
└── opening_remarks.pptx
/.gitignore:
--------------------------------------------------------------------------------
1 | ignored_files/
2 | .ipynb_checkpoints
3 | presentations/
--------------------------------------------------------------------------------
/00_setup_installations/00_setup_installations.txt:
--------------------------------------------------------------------------------
1 |
2 | PYTHON VE JUPYTER KURULUMU, TEMEL PAKETLERİN YÜKLENMESİ
3 | =======================================================================
4 | https://www.veribilimiokulu.com/python-ile-veri-bilimi-calisma-ortami-kurmak-jupyter-ve-paket-kurulumu/
5 | adresindeki blog yazısından faydalanılabilir.
6 |
7 | 1. Anaconda kurulu ise kaldıralım gereksiz kaynak tüketimine sebep oluyor.
8 | Neye ihtiyacımız varsa elle tek tek yükleyelim.
9 | Paket yüklemelerini daha rahat yapabileceğimiz oturmuş bir sürüm olan
10 | Python 3.6.8 kullanacağız.
11 |
12 |
13 | 2.
14 | https://www.python.org/downloads/release/python-368/
15 | adresinden
16 | "Windows x86-64 executable installer"
17 | indirip kuruluma başlayalım.
18 | Kurulum esnasında PATH'e ekle seçeneğini seçelim.
19 |
20 | 3. Kurulum sonrası python ve pip kontrol:
21 | where python
22 |
23 | Çıktı:
24 | C:\Python\Python36\python.exe
25 |
26 | python -V
27 |
28 | Çıktı:
29 | Python 3.6.8
30 |
31 | pip -V
32 |
33 | Çıktı:
34 | pip 9.0.3 from c:\python\python36\lib\site-packages (python 3.6)
35 |
36 | 4. Pip Upgrade
37 | python -m pip install --upgrade pip
38 |
39 | 5. Temel paketlerin kurulumu (virtualenv kullanılmayacak ise)
40 | python -m pip install pandas sklearn matplotlib seaborn plotly jupyter
41 |
42 | 6. Jupyter çalıştırma
43 | Windows cmd açılır (D dizini içinde jupyter kullanmak istiyor isek
44 | d:
45 | ile d sürücüsü seçilir)
46 |
47 | jupyter notebook
48 | veya
49 | python -m notebook
50 |
51 | komutları ile jupyter çalıştırılır.
52 |
53 |
--------------------------------------------------------------------------------
/00_setup_installations/01_virtual_env_and_tensorflow-1.14_installation.txt:
--------------------------------------------------------------------------------
1 | ========================================================================================
2 | NOT:Python 3.6.8'in "00_setup_installations.txt" dokümanına göre kurulu olmalıdır.
3 | ========================================================================================
4 |
5 | 1. cmd komut satırını yönetici olarak açınız.
6 |
7 |
8 | 2. Çalışma dizni ve sanal ortamın yaratılması:
9 | mkdir python_egitim
10 | cd python_egitim
11 | python -m venv tensorflow-1
12 |
13 | Son komut sanal ortam oluşturur ve biraz zaman alabilir.
14 |
15 | 2. kontrol:
16 | dir
17 |
18 | Çıktı:
19 | Volume in drive C has no label.
20 | Volume Serial Number is 55F6-35C4
21 |
22 | Directory of C:\Users\user\python_egitim\tensorflow-1\dev
23 |
24 | 28.07.2019 15:02
.
25 | 28.07.2019 15:02 ..
26 | 28.07.2019 15:00 .ipynb_checkpoints
27 | 28.07.2019 15:02 977 Untitled.ipynb
28 | 1 File(s) 977 bytes
29 | 3 Dir(s) 832.709.644.288 bytes free
30 |
31 |
32 |
33 | 3. Oluşturulan sanal ortamı aktif hale getirme:
34 | cd tensorflow-1\Scripts
35 | activate
36 |
37 | bu komuttan sonra
38 | promt başında (tensorflow-1) gelecektir.
39 |
40 | cd ..
41 | mkdir dev
42 | cd dev
43 |
44 | Yukarıdaki komutlarla development dizini oluşturuyoruz ve o dizine geçiyoruz.
45 |
46 | 4. Kontrol:
47 | python -V
48 |
49 | Çıktısı
50 | Python 3.6.5
51 |
52 | python -m pip install --upgrade pip
53 |
54 | PermissionError: [WinError 5] Access is denied:
55 | hatası verebilir aldırış etmeden komutu tekrarlayınız.
56 | Requirement already up-to-date: pip in c:\users\user\python_egitim\tensorflow-1\lib\site-packages (19.2.1)
57 |
58 | pip -V
59 | Çıktısı:
60 | pip 19.1.1 from c:\users\user\envs\tensorflow-1\lib\site-packages\pip (python 3.6)
61 |
62 | cd ..
63 |
64 | 5. tensorflow-1.14, keras yüklemek ve diğer sık kullanılan paketleri yüklemek
65 |
66 | python -m pip install tensorflow keras pandas matplotlib sklearn pillow jupyter
67 | Niçin sanal ortama da jupyter kurmalıyız:
68 | https://stackoverflow.com/questions/38221181/no-module-named-tensorflow-in-jupyter
69 |
70 | 6. Kontrol:
71 | python
72 |
73 | python shell giriş >>> gelecektir.
74 |
75 |
76 | >>>import tensorflow as tf
77 | >>>tf.__version__
78 |
79 | Çıktısı:
80 | '1.14.0'
81 | Bazı uyarılar verebilir onlar hata değil kulak asmayın
82 |
83 | exit()
84 |
85 | python shell çıkış.
86 |
87 | 7. Sanal ortamı deactive etmek:
88 | deactivate
89 |
90 |
91 | Tekrar aktif hale getirmek için Scripts içinden
92 | cd Scripts
93 | activate
94 | cd ..
95 | cd dev
96 |
97 |
98 | 8. virtualenv'dan jupyter kullanmak için
99 | ipython kernel install --user --name=tensorflow-1
100 | Installed kernelspec tensorflow-1 in C:\Users\user\AppData\Roaming\jupyter\kernels\tensorflow-1
101 | Önemli not: Bu ortama pip ile yeni paket yüklerseniz jupyter'i bunu görmesi için yukarıdaki komutu yeniden çalıştırınız.
102 | Aksi halde paketi yüklemenize rağmen module bulunamadı hatası verecektir.
103 |
104 |
105 | 9. jupyter notebook çalıştırdığımızda
106 | new butonuna tıkladığımızda artık Pyton 3 ile birlikte tensorflow-1'i de görüyor olacağız.
107 |
108 |
109 | 10. Jupyter içinde tensorflow kontrol
110 | import warnings
111 | warnings.filterwarnings('ignore')
112 | import tensorflow as tf
113 | tf.__version__
114 |
115 | Çıktı:
116 | '1.14.0'
117 |
118 |
119 |
120 | 11. Jupyter notebook kapatıldıktan sonra
121 | deactivate
122 | komutu ile sanal ortam kapatılır.
123 | Ortamı tekrar aktif etmek sadece Scripts klasörü içinden mümkündür.
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 | Olası hatalar ve çözüm önerileri
137 | -----------------------------------------------------
138 | 1.
139 | Sorun: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
140 |
141 | Çözüm : Activate root
142 |
143 | 2.
144 | Could not install packages due to an EnvironmentError: [WinError 5] Erişim engellendi: 'c:\\programdata\\anaconda3\\lib\\site-packages\\__pycache__\\easy_install.cpython-37.pyc'
145 | Consider using the `--user` option or check the permissions.
146 |
147 | Çözüm: cmd'yi yöneticiolarak başlatmak
148 |
149 |
150 | 3. ssl bağlantı hatası
151 |
152 | Çözüm: Anaconda Library bin dizinini ortam değişkenleri path'e eklemek
153 |
154 |
155 |
--------------------------------------------------------------------------------
/00_setup_installations/02_git_for_windows.txt:
--------------------------------------------------------------------------------
1 |
2 | 1.
3 | https://git-scm.com/download
4 |
5 | 2.
6 | Tüm varsayılan seçenekleri değiştirmeden kurulum yapalım.
7 |
8 |
9 | 3.
10 | git config --global user.name "Adınız Soyadınız"
11 | git config --global use.email "github account email"
12 |
13 |
14 | 4.
15 | çalışma dizni oluştur
16 | mkdir egitim/dizini
17 | cd egitim/dizini
18 |
19 |
20 | 5.
21 | git clone github
22 |
23 | 6.
24 | Github sayfasına gidin ve repoyu fork edin
25 |
26 | 7.
27 | Tekrar komut satırında
28 | git remote set-url
--------------------------------------------------------------------------------
/00_setup_installations/03_pip_paket_yukleme.txt:
--------------------------------------------------------------------------------
1 | //////////////// pip ile paket yüklemek /////////////////
2 |
3 | 1. Standart komut
4 | pip install numpy
5 |
6 | numpy'ın son versiyonunu yükler
7 |
8 | 2. Requiremets
9 | bazen belli versiyonları yüklemek gerekebilir
10 | bunun için requirements.txt dosyası oluşturulup içine:
11 |
12 | numpy==1.16.0
13 | pandas>0.23 <0.25
14 |
15 | şeklinde ilgili paketler ve versiyonları yazılır
16 |
17 | daha sonra
18 |
19 | pip instal -r requirements.txt
20 |
21 | komutu çalıştırılır.
22 | Bu durumda dosya içinde gereksinimlere uymayan paketler kaldırılır ve ilgili versiyonlar yüklenir.
23 |
24 |
25 |
26 | //////////////// pip ile paket kaldırmak /////////////////
27 |
28 | 1.
29 | pip uninstall pandas
30 |
31 |
--------------------------------------------------------------------------------
/01_python_basic/00_python_giris.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Python Nedir?"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "Python (C, C++, Perl, Ruby ve benzerleri gibi) bir programlama dilidir.\n",
15 | "\n",
16 | "Bu programlama dili Guido Van Rossum adlı Hollandalı bir programcı tarafından 90’lı yılların başında geliştirilmeye başlanmıştır. Çoğu insan, isminin Python olmasına aldanarak, bu programlama dilinin, adını piton yılanından aldığını düşünür. Ancak zannedildiğinin aksine bu programlama dilinin adı piton yılanından gelmez. Guido Van Rossum bu programlama dilini, The Monty Python adlı bir İngiliz komedi grubunun, Monty Python’s Flying Circus adlı gösterisinden esinlenerek adlandırmıştır. Ancak her ne kadar gerçek böyle olsa da, Python programlama dilinin pek çok yerde bir yılan figürü ile temsil edilmesi neredeyse bir gelenek halini almıştır.\n",
17 | "\n",
18 | "Python bir programlama dilidir. Öğrenmesi kolay bir programlama dilidir. "
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | ""
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | "# Telaffuz"
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | " Önerilen telaffuz: [paytın] "
40 | ]
41 | },
42 | {
43 | "cell_type": "markdown",
44 | "metadata": {},
45 | "source": [
46 | "# Platform Desteği"
47 | ]
48 | },
49 | {
50 | "cell_type": "markdown",
51 | "metadata": {},
52 | "source": [
53 | "GNU/Linux, Windows, Mac OS X, AS/400, BeOS, MorphOS, MS-DOS, OS/2, OS/390, z/OS, RiscOS, S60, Solaris, VMS, Windows CE, HP-UX, iOS ve Android gibi, belki adını dahi duymadığınız pek çok ortamda Python uygulamaları geliştirebilirsiniz. Ayrıca herhangi bir ortamda yazdığınız bir Python programı, üzerinde hiçbir değişiklik yapılmadan veya ufak değişikliklerle başka ortamlarda da çalıştırılabilir."
54 | ]
55 | },
56 | {
57 | "cell_type": "markdown",
58 | "metadata": {},
59 | "source": [
60 | "# Farklı Python Sürümleri"
61 | ]
62 | },
63 | {
64 | "cell_type": "markdown",
65 | "metadata": {},
66 | "source": [
67 | "Python programlama dili 1990 yılından bu yana geliştirilen bir dil. Bu süre içinde pek çok Python programı yazıldı ve insanların kullanımına sunuldu. Şu anda piyasada Python’ın 2.x serisinden bir sürümle yazılmış pek çok program bulunuyor. 3.x serisi ise ancak son yıllarda yaygınlık kazanmaya başladı."
68 | ]
69 | },
70 | {
71 | "cell_type": "markdown",
72 | "metadata": {},
73 | "source": [
74 | "# Hangi Python Sürümünü Kullanıyorum?"
75 | ]
76 | },
77 | {
78 | "cell_type": "markdown",
79 | "metadata": {},
80 | "source": [
81 | " C:\\Users\\user>python --version\n",
82 | " Python 3.7.1"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": 1,
88 | "metadata": {},
89 | "outputs": [
90 | {
91 | "data": {
92 | "text/plain": [
93 | "7"
94 | ]
95 | },
96 | "execution_count": 1,
97 | "metadata": {},
98 | "output_type": "execute_result"
99 | }
100 | ],
101 | "source": [
102 | "3+4"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": 2,
108 | "metadata": {},
109 | "outputs": [
110 | {
111 | "data": {
112 | "text/plain": [
113 | "'3.6.8 (tags/v3.6.8:3c6b436a57, Dec 24 2018, 00:16:47) [MSC v.1916 64 bit (AMD64)]'"
114 | ]
115 | },
116 | "execution_count": 2,
117 | "metadata": {},
118 | "output_type": "execute_result"
119 | }
120 | ],
121 | "source": [
122 | "import sys\n",
123 | "sys.version"
124 | ]
125 | },
126 | {
127 | "cell_type": "markdown",
128 | "metadata": {},
129 | "source": [
130 | "# Çalışma Diznim Neresi?"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": 3,
136 | "metadata": {},
137 | "outputs": [
138 | {
139 | "data": {
140 | "text/plain": [
141 | "'D:\\\\egitim\\\\verilen\\\\machine-learning-with-python\\\\01_python_basic'"
142 | ]
143 | },
144 | "execution_count": 3,
145 | "metadata": {},
146 | "output_type": "execute_result"
147 | }
148 | ],
149 | "source": [
150 | "import os\n",
151 | "os.getcwd()"
152 | ]
153 | },
154 | {
155 | "cell_type": "markdown",
156 | "metadata": {},
157 | "source": [
158 | " merhaba.py adında .py uzantılı bir dosya oluşturup içine \n",
159 | " print(Merhaba DÜnya!) yazıp aşağıdaki gibi çalıştırıp sonucu görünüz."
160 | ]
161 | },
162 | {
163 | "cell_type": "markdown",
164 | "metadata": {},
165 | "source": [
166 | " D:\\egitim\\verilen\\face-to-face\\PythonProgramming>python merhaba.py\n",
167 | " Merhaba Dünya"
168 | ]
169 | },
170 | {
171 | "cell_type": "markdown",
172 | "metadata": {},
173 | "source": [
174 | "# Python shell Kullanımı"
175 | ]
176 | },
177 | {
178 | "cell_type": "markdown",
179 | "metadata": {},
180 | "source": [
181 | "komut satırına python yazınız."
182 | ]
183 | },
184 | {
185 | "cell_type": "markdown",
186 | "metadata": {},
187 | "source": [
188 | " D:\\egitim\\verilen\\face-to-face\\PythonProgramming>python\n",
189 | " Python 3.7.1 (default, Dec 10 2018, 22:54:23) [MSC v.1915 64 bit (AMD64)] :: Anaconda, Inc. on win32\n",
190 | " Type \"help\", \"copyright\", \"credits\" or \"license\" for more information.\n",
191 | " >>>"
192 | ]
193 | },
194 | {
195 | "cell_type": "markdown",
196 | "metadata": {},
197 | "source": [
198 | " >>> print(\"Merhaba Dünya\")\n",
199 | " Merhaba Dünya"
200 | ]
201 | },
202 | {
203 | "cell_type": "markdown",
204 | "metadata": {},
205 | "source": [
206 | " exit() \n",
207 | " ile shell den çıkınız"
208 | ]
209 | },
210 | {
211 | "cell_type": "markdown",
212 | "metadata": {},
213 | "source": [
214 | "# Python Girintileri (Indentations)"
215 | ]
216 | },
217 | {
218 | "cell_type": "code",
219 | "execution_count": 4,
220 | "metadata": {},
221 | "outputs": [
222 | {
223 | "name": "stdout",
224 | "output_type": "stream",
225 | "text": [
226 | "Doğru\n"
227 | ]
228 | }
229 | ],
230 | "source": [
231 | "if 2 > 1:\n",
232 | " print(\"Doğru\")"
233 | ]
234 | },
235 | {
236 | "cell_type": "code",
237 | "execution_count": 7,
238 | "metadata": {},
239 | "outputs": [
240 | {
241 | "ename": "IndentationError",
242 | "evalue": "expected an indented block (, line 2)",
243 | "output_type": "error",
244 | "traceback": [
245 | "\u001b[1;36m File \u001b[1;32m\"\"\u001b[1;36m, line \u001b[1;32m2\u001b[0m\n\u001b[1;33m print(\"Yanlış\")\u001b[0m\n\u001b[1;37m ^\u001b[0m\n\u001b[1;31mIndentationError\u001b[0m\u001b[1;31m:\u001b[0m expected an indented block\n"
246 | ]
247 | }
248 | ],
249 | "source": [
250 | "if 2 < 1:\n",
251 | "print(\"Yanlış\")"
252 | ]
253 | },
254 | {
255 | "cell_type": "markdown",
256 | "metadata": {},
257 | "source": [
258 | "# Python Yorum Satırı"
259 | ]
260 | },
261 | {
262 | "cell_type": "code",
263 | "execution_count": 10,
264 | "metadata": {},
265 | "outputs": [
266 | {
267 | "name": "stdout",
268 | "output_type": "stream",
269 | "text": [
270 | "10\n"
271 | ]
272 | }
273 | ],
274 | "source": [
275 | "# Bu yorum satırı\n",
276 | "print(2 * 5)"
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": 8,
282 | "metadata": {},
283 | "outputs": [
284 | {
285 | "name": "stdout",
286 | "output_type": "stream",
287 | "text": [
288 | "10\n"
289 | ]
290 | }
291 | ],
292 | "source": [
293 | "\"\"\"\n",
294 | "Bu yorum bloğu\n",
295 | "\"\"\"\n",
296 | "print(2 * 5)"
297 | ]
298 | },
299 | {
300 | "cell_type": "code",
301 | "execution_count": 9,
302 | "metadata": {},
303 | "outputs": [
304 | {
305 | "name": "stdout",
306 | "output_type": "stream",
307 | "text": [
308 | "10\n"
309 | ]
310 | }
311 | ],
312 | "source": [
313 | "'''\n",
314 | "Bu yorum bloğu\n",
315 | "'''\n",
316 | "print(2 * 5)"
317 | ]
318 | },
319 | {
320 | "cell_type": "code",
321 | "execution_count": null,
322 | "metadata": {},
323 | "outputs": [],
324 | "source": []
325 | }
326 | ],
327 | "metadata": {
328 | "kernelspec": {
329 | "display_name": "Python 3",
330 | "language": "python",
331 | "name": "python3"
332 | },
333 | "language_info": {
334 | "codemirror_mode": {
335 | "name": "ipython",
336 | "version": 3
337 | },
338 | "file_extension": ".py",
339 | "mimetype": "text/x-python",
340 | "name": "python",
341 | "nbconvert_exporter": "python",
342 | "pygments_lexer": "ipython3",
343 | "version": "3.6.8"
344 | }
345 | },
346 | "nbformat": 4,
347 | "nbformat_minor": 2
348 | }
349 |
--------------------------------------------------------------------------------
/01_python_basic/01_degiskenler_(variables).ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | " Diğer programlama dillerinin aksine, Python'un bir değişken bildirme komutu yoktur.\n",
8 | "\n",
9 | " İlk olarak bir değer atarsanız, bir değişken yaratılır."
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "# Basit veri tipleri(data types) ilk olarak numbers ve string olarak 2'ye ayrılır.\n",
19 | "Numbers: int\tlong\tfloat\tcomplex\n",
20 | "String : str"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 1,
26 | "metadata": {},
27 | "outputs": [
28 | {
29 | "name": "stdout",
30 | "output_type": "stream",
31 | "text": [
32 | "5\n",
33 | "Ankara\n"
34 | ]
35 | }
36 | ],
37 | "source": [
38 | "x = 5\n",
39 | "y = \"Ankara\"\n",
40 | "print(x)\n",
41 | "print(y)"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": 2,
47 | "metadata": {},
48 | "outputs": [
49 | {
50 | "data": {
51 | "text/plain": [
52 | "int"
53 | ]
54 | },
55 | "execution_count": 2,
56 | "metadata": {},
57 | "output_type": "execute_result"
58 | }
59 | ],
60 | "source": [
61 | "type(x)"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 3,
67 | "metadata": {},
68 | "outputs": [
69 | {
70 | "data": {
71 | "text/plain": [
72 | "str"
73 | ]
74 | },
75 | "execution_count": 3,
76 | "metadata": {},
77 | "output_type": "execute_result"
78 | }
79 | ],
80 | "source": [
81 | "type(y)"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": 4,
87 | "metadata": {},
88 | "outputs": [],
89 | "source": [
90 | "x = 4 # int türünde\n",
91 | "x = \"Yasemin\" # x şimdi string oldu"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": 5,
97 | "metadata": {},
98 | "outputs": [
99 | {
100 | "data": {
101 | "text/plain": [
102 | "str"
103 | ]
104 | },
105 | "execution_count": 5,
106 | "metadata": {},
107 | "output_type": "execute_result"
108 | }
109 | ],
110 | "source": [
111 | "type(x)"
112 | ]
113 | },
114 | {
115 | "cell_type": "markdown",
116 | "metadata": {},
117 | "source": [
118 | "# Değişken İsimlendirme Kuralları"
119 | ]
120 | },
121 | {
122 | "cell_type": "markdown",
123 | "metadata": {},
124 | "source": [
125 | " 1. Bir değişken ismi bir harf veya alt çizgi karakteriyle başlamalıdır.\n",
126 | " 2. Değişken ismi bir sayı ile başlayamaz, + gibi aritmetik işleç ile başlayamaz. \n",
127 | " 3. Bir değişken ismi yalnızca alfa sayısal karakterler ve alt çizgiler içerebilir (A-z, 0-9 ve _).\n",
128 | " 4. Değişken isimleri büyük / küçük harf duyarlıdır (yas, Yas ve AGE üç farklı değişkendir)\n",
129 | " 5. Değişken ismi boşluk içeremez örn: isim soyisim Doğrusu: isim_soyisim"
130 | ]
131 | },
132 | {
133 | "cell_type": "code",
134 | "execution_count": 4,
135 | "metadata": {},
136 | "outputs": [],
137 | "source": [
138 | "# degisken_ismi"
139 | ]
140 | },
141 | {
142 | "cell_type": "markdown",
143 | "metadata": {},
144 | "source": [
145 | "# Rezerve Edilmiş kelimeler"
146 | ]
147 | },
148 | {
149 | "cell_type": "markdown",
150 | "metadata": {},
151 | "source": [
152 | "['False', 'None', 'True', 'and', 'as', 'assert', 'async', 'await', 'break', 'class', 'continue', 'def', 'del',\n",
153 | "'elif', 'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'nonlocal',\n",
154 | "'not', 'or', 'pass', 'raise', 'return', 'try', 'while', 'with', 'yield']"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 7,
160 | "metadata": {},
161 | "outputs": [
162 | {
163 | "data": {
164 | "text/plain": [
165 | "['False',\n",
166 | " 'None',\n",
167 | " 'True',\n",
168 | " 'and',\n",
169 | " 'as',\n",
170 | " 'assert',\n",
171 | " 'async',\n",
172 | " 'await',\n",
173 | " 'break',\n",
174 | " 'class',\n",
175 | " 'continue',\n",
176 | " 'def',\n",
177 | " 'del',\n",
178 | " 'elif',\n",
179 | " 'else',\n",
180 | " 'except',\n",
181 | " 'finally',\n",
182 | " 'for',\n",
183 | " 'from',\n",
184 | " 'global',\n",
185 | " 'if',\n",
186 | " 'import',\n",
187 | " 'in',\n",
188 | " 'is',\n",
189 | " 'lambda',\n",
190 | " 'nonlocal',\n",
191 | " 'not',\n",
192 | " 'or',\n",
193 | " 'pass',\n",
194 | " 'raise',\n",
195 | " 'return',\n",
196 | " 'try',\n",
197 | " 'while',\n",
198 | " 'with',\n",
199 | " 'yield']"
200 | ]
201 | },
202 | "execution_count": 7,
203 | "metadata": {},
204 | "output_type": "execute_result"
205 | }
206 | ],
207 | "source": [
208 | "# Ezberlemek zorsa\n",
209 | "import keyword\n",
210 | "keyword.kwlist"
211 | ]
212 | },
213 | {
214 | "cell_type": "markdown",
215 | "metadata": {},
216 | "source": [
217 | "# Aynı değere sahip değişkenlere aynı anda değer atama"
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": 6,
223 | "metadata": {},
224 | "outputs": [],
225 | "source": [
226 | "a = b = 4"
227 | ]
228 | },
229 | {
230 | "cell_type": "code",
231 | "execution_count": 7,
232 | "metadata": {},
233 | "outputs": [
234 | {
235 | "name": "stdout",
236 | "output_type": "stream",
237 | "text": [
238 | "4\n",
239 | "4\n"
240 | ]
241 | }
242 | ],
243 | "source": [
244 | "print(a)\n",
245 | "print(b)"
246 | ]
247 | },
248 | {
249 | "cell_type": "markdown",
250 | "metadata": {},
251 | "source": [
252 | "# Değişkenlere liste içinde sırayla değer atama"
253 | ]
254 | },
255 | {
256 | "cell_type": "code",
257 | "execution_count": 6,
258 | "metadata": {},
259 | "outputs": [],
260 | "source": [
261 | "[a,b,c] = [1,6,\"Malatya\"]"
262 | ]
263 | },
264 | {
265 | "cell_type": "code",
266 | "execution_count": 7,
267 | "metadata": {},
268 | "outputs": [
269 | {
270 | "name": "stdout",
271 | "output_type": "stream",
272 | "text": [
273 | "1\n",
274 | "6\n",
275 | "Malatya\n"
276 | ]
277 | }
278 | ],
279 | "source": [
280 | "print(a)\n",
281 | "print(b)\n",
282 | "print(c)"
283 | ]
284 | },
285 | {
286 | "cell_type": "markdown",
287 | "metadata": {},
288 | "source": [
289 | "# Değişkenlere liste dışında sırayla değer atama"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": 8,
295 | "metadata": {},
296 | "outputs": [],
297 | "source": [
298 | "a, b, c = 12, 13 , \"Konya\""
299 | ]
300 | },
301 | {
302 | "cell_type": "code",
303 | "execution_count": 9,
304 | "metadata": {},
305 | "outputs": [
306 | {
307 | "name": "stdout",
308 | "output_type": "stream",
309 | "text": [
310 | "12\n",
311 | "13\n",
312 | "Konya\n"
313 | ]
314 | }
315 | ],
316 | "source": [
317 | "print(a)\n",
318 | "print(b)\n",
319 | "print(c)"
320 | ]
321 | },
322 | {
323 | "cell_type": "markdown",
324 | "metadata": {},
325 | "source": [
326 | "# Etkileşimli Kabuğun (Interactive Shell) Hafızası"
327 | ]
328 | },
329 | {
330 | "cell_type": "code",
331 | "execution_count": 10,
332 | "metadata": {},
333 | "outputs": [
334 | {
335 | "data": {
336 | "text/plain": [
337 | "13"
338 | ]
339 | },
340 | "execution_count": 10,
341 | "metadata": {},
342 | "output_type": "execute_result"
343 | }
344 | ],
345 | "source": [
346 | "5 + 8"
347 | ]
348 | },
349 | {
350 | "cell_type": "code",
351 | "execution_count": 11,
352 | "metadata": {},
353 | "outputs": [
354 | {
355 | "data": {
356 | "text/plain": [
357 | "13"
358 | ]
359 | },
360 | "execution_count": 11,
361 | "metadata": {},
362 | "output_type": "execute_result"
363 | }
364 | ],
365 | "source": [
366 | "_"
367 | ]
368 | },
369 | {
370 | "cell_type": "code",
371 | "execution_count": null,
372 | "metadata": {},
373 | "outputs": [],
374 | "source": []
375 | }
376 | ],
377 | "metadata": {
378 | "kernelspec": {
379 | "display_name": "Python 3",
380 | "language": "python",
381 | "name": "python3"
382 | },
383 | "language_info": {
384 | "codemirror_mode": {
385 | "name": "ipython",
386 | "version": 3
387 | },
388 | "file_extension": ".py",
389 | "mimetype": "text/x-python",
390 | "name": "python",
391 | "nbconvert_exporter": "python",
392 | "pygments_lexer": "ipython3",
393 | "version": "3.6.8"
394 | }
395 | },
396 | "nbformat": 4,
397 | "nbformat_minor": 2
398 | }
399 |
--------------------------------------------------------------------------------
/01_python_basic/03_kullanicidan_bilgi_almak_(input).ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "name": "stdout",
10 | "output_type": "stream",
11 | "text": [
12 | "İsminiz nedir? Erkan\n"
13 | ]
14 | }
15 | ],
16 | "source": [
17 | "isim = input(\"İsminiz nedir? \")"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": 2,
23 | "metadata": {},
24 | "outputs": [
25 | {
26 | "name": "stdout",
27 | "output_type": "stream",
28 | "text": [
29 | "Merhaba Erkan\n"
30 | ]
31 | }
32 | ],
33 | "source": [
34 | "print(\"Merhaba \", isim)"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": 3,
40 | "metadata": {},
41 | "outputs": [],
42 | "source": [
43 | "yas = 0"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 4,
49 | "metadata": {},
50 | "outputs": [
51 | {
52 | "name": "stdout",
53 | "output_type": "stream",
54 | "text": [
55 | "Yaşınız: 33\n"
56 | ]
57 | }
58 | ],
59 | "source": [
60 | "yas = input(\"Yaşınız: \")"
61 | ]
62 | },
63 | {
64 | "cell_type": "code",
65 | "execution_count": 5,
66 | "metadata": {},
67 | "outputs": [
68 | {
69 | "name": "stdout",
70 | "output_type": "stream",
71 | "text": [
72 | "Merhaba Erkan yaşınız: 33\n"
73 | ]
74 | }
75 | ],
76 | "source": [
77 | "print(\"Merhaba\", isim, \" yaşınız: \", yas)"
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "execution_count": 6,
83 | "metadata": {},
84 | "outputs": [
85 | {
86 | "ename": "TypeError",
87 | "evalue": "unsupported operand type(s) for -: 'int' and 'str'",
88 | "output_type": "error",
89 | "traceback": [
90 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
91 | "\u001b[1;31mTypeError\u001b[0m Traceback (most recent call last)",
92 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Merhaba\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0misim\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m2019\u001b[0m \u001b[1;33m-\u001b[0m \u001b[0myas\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m\" yılında doğmuş olmalısınız.\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
93 | "\u001b[1;31mTypeError\u001b[0m: unsupported operand type(s) for -: 'int' and 'str'"
94 | ]
95 | }
96 | ],
97 | "source": [
98 | "print(\"Merhaba\", isim, 2019 - yas, \" yılında doğmuş olmalısınız.\")"
99 | ]
100 | },
101 | {
102 | "cell_type": "markdown",
103 | "metadata": {},
104 | "source": [
105 | " Dikkat!!!! Alınan girdiler string türündedir.
"
106 | ]
107 | },
108 | {
109 | "cell_type": "code",
110 | "execution_count": 7,
111 | "metadata": {},
112 | "outputs": [
113 | {
114 | "name": "stdout",
115 | "output_type": "stream",
116 | "text": [
117 | "Merhaba Erkan 1986 yılında doğmuş olmalısınız.\n"
118 | ]
119 | }
120 | ],
121 | "source": [
122 | "print(\"Merhaba\", isim, 2019 - int(yas), \" yılında doğmuş olmalısınız.\")"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": 10,
128 | "metadata": {},
129 | "outputs": [
130 | {
131 | "ename": "ValueError",
132 | "evalue": "invalid literal for int() with base 10: 'kirk'",
133 | "output_type": "error",
134 | "traceback": [
135 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
136 | "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
137 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mkırkbir\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"kirk\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2\u001b[0m \u001b[0mtype\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mkırkbir\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
138 | "\u001b[1;31mValueError\u001b[0m: invalid literal for int() with base 10: 'kirk'"
139 | ]
140 | }
141 | ],
142 | "source": [
143 | "kırkbir = int(\"kirk\")\n",
144 | "type(kırkbir)"
145 | ]
146 | },
147 | {
148 | "cell_type": "markdown",
149 | "metadata": {},
150 | "source": [
151 | " 01_python_basic: alıştırma - 1
"
152 | ]
153 | },
154 | {
155 | "cell_type": "markdown",
156 | "metadata": {},
157 | "source": [
158 | " Kullanıcıdan 2 rakam alan ve toplamını ekrana yazan bir python programı yazınız."
159 | ]
160 | },
161 | {
162 | "cell_type": "code",
163 | "execution_count": null,
164 | "metadata": {},
165 | "outputs": [],
166 | "source": []
167 | },
168 | {
169 | "cell_type": "markdown",
170 | "metadata": {},
171 | "source": [
172 | " Opsiyonel Alıştırma
"
173 | ]
174 | },
175 | {
176 | "cell_type": "markdown",
177 | "metadata": {},
178 | "source": [
179 | "## Tek input ile birden fazla rakam alarak listeye aktarma"
180 | ]
181 | },
182 | {
183 | "cell_type": "code",
184 | "execution_count": 1,
185 | "metadata": {},
186 | "outputs": [
187 | {
188 | "name": "stdout",
189 | "output_type": "stream",
190 | "text": [
191 | "Boşluk bırakarak giriniz: 2 4 3 12\n"
192 | ]
193 | }
194 | ],
195 | "source": [
196 | "import re\n",
197 | "a = input(\"Boşluk bırakarak giriniz: \")\n",
198 | "a = list(re.findall(r\"[\\w']+\", a)) # Birden fazla çeşitli paternlere göre ayırma yapar\n",
199 | "indeks = 0\n",
200 | "for i in a:\n",
201 | " a[indeks] = int(i)\n",
202 | " indeks +=1"
203 | ]
204 | },
205 | {
206 | "cell_type": "code",
207 | "execution_count": 2,
208 | "metadata": {},
209 | "outputs": [
210 | {
211 | "data": {
212 | "text/plain": [
213 | "[2, 4, 3, 12]"
214 | ]
215 | },
216 | "execution_count": 2,
217 | "metadata": {},
218 | "output_type": "execute_result"
219 | }
220 | ],
221 | "source": [
222 | "a"
223 | ]
224 | },
225 | {
226 | "cell_type": "code",
227 | "execution_count": null,
228 | "metadata": {},
229 | "outputs": [],
230 | "source": []
231 | }
232 | ],
233 | "metadata": {
234 | "kernelspec": {
235 | "display_name": "Python 3",
236 | "language": "python",
237 | "name": "python3"
238 | },
239 | "language_info": {
240 | "codemirror_mode": {
241 | "name": "ipython",
242 | "version": 3
243 | },
244 | "file_extension": ".py",
245 | "mimetype": "text/x-python",
246 | "name": "python",
247 | "nbconvert_exporter": "python",
248 | "pygments_lexer": "ipython3",
249 | "version": "3.6.8"
250 | }
251 | },
252 | "nbformat": 4,
253 | "nbformat_minor": 2
254 | }
255 |
--------------------------------------------------------------------------------
/01_python_basic/04_cikti_formatlama_(print_format).ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# karakter dizisi birleştirme"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "alan_adı = \"datalonga\"\n",
17 | "uzantı = \"com\""
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": 5,
23 | "metadata": {},
24 | "outputs": [
25 | {
26 | "name": "stdout",
27 | "output_type": "stream",
28 | "text": [
29 | "datalonga.com\n"
30 | ]
31 | }
32 | ],
33 | "source": [
34 | "print(\"{}.{}\".format(alan_adı, uzantı))"
35 | ]
36 | },
37 | {
38 | "cell_type": "markdown",
39 | "metadata": {},
40 | "source": [
41 | "# format()"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": 6,
47 | "metadata": {},
48 | "outputs": [
49 | {
50 | "name": "stdout",
51 | "output_type": "stream",
52 | "text": [
53 | "Satın aldığınız 'datalonga.com' alan adı bir yıl geçerlidir.\n"
54 | ]
55 | }
56 | ],
57 | "source": [
58 | "print(\"Satın aldığınız '{}.{}' alan adı bir yıl geçerlidir.\"\n",
59 | " .format(alan_adı,uzantı))"
60 | ]
61 | },
62 | {
63 | "cell_type": "code",
64 | "execution_count": 7,
65 | "metadata": {},
66 | "outputs": [
67 | {
68 | "name": "stdout",
69 | "output_type": "stream",
70 | "text": [
71 | "13 yaşında Ahmet adında bir çocuğum var.\n"
72 | ]
73 | }
74 | ],
75 | "source": [
76 | "print(\"{} yaşında {} adında bir çocuğum var.\".format(13, \"Ahmet\"))"
77 | ]
78 | },
79 | {
80 | "cell_type": "code",
81 | "execution_count": 8,
82 | "metadata": {},
83 | "outputs": [
84 | {
85 | "name": "stdout",
86 | "output_type": "stream",
87 | "text": [
88 | "Matematik'den 89, Fizik'den 87 aldınız\n"
89 | ]
90 | }
91 | ],
92 | "source": [
93 | "print(\"{}'den {}, {}'den {} aldınız\".format(\"Matematik\",89, \"Fizik\", 87))"
94 | ]
95 | },
96 | {
97 | "cell_type": "markdown",
98 | "metadata": {},
99 | "source": [
100 | "# Format için farklı bir yöntem"
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": 9,
106 | "metadata": {},
107 | "outputs": [
108 | {
109 | "name": "stdout",
110 | "output_type": "stream",
111 | "text": [
112 | "Satın aldığınız 'datalonga.com' alan adı bir yıl geçerlidir.\n"
113 | ]
114 | }
115 | ],
116 | "source": [
117 | "print(\"Satın aldığınız '%s.%s' alan adı bir yıl geçerlidir.\" %(alan_adı,uzantı))"
118 | ]
119 | },
120 | {
121 | "cell_type": "code",
122 | "execution_count": 10,
123 | "metadata": {},
124 | "outputs": [
125 | {
126 | "name": "stdout",
127 | "output_type": "stream",
128 | "text": [
129 | "Satın aldığınız '5.com' alan adı bir yıl geçerlidir.\n"
130 | ]
131 | }
132 | ],
133 | "source": [
134 | "print(\"Satın aldığınız '%d.%s' alan adı bir yıl geçerlidir.\" %(5,uzantı))"
135 | ]
136 | },
137 | {
138 | "cell_type": "code",
139 | "execution_count": null,
140 | "metadata": {},
141 | "outputs": [],
142 | "source": []
143 | }
144 | ],
145 | "metadata": {
146 | "kernelspec": {
147 | "display_name": "Python 3",
148 | "language": "python",
149 | "name": "python3"
150 | },
151 | "language_info": {
152 | "codemirror_mode": {
153 | "name": "ipython",
154 | "version": 3
155 | },
156 | "file_extension": ".py",
157 | "mimetype": "text/x-python",
158 | "name": "python",
159 | "nbconvert_exporter": "python",
160 | "pygments_lexer": "ipython3",
161 | "version": "3.6.8"
162 | }
163 | },
164 | "nbformat": 4,
165 | "nbformat_minor": 2
166 | }
167 |
--------------------------------------------------------------------------------
/01_python_basic/05_kosullu_durumlar_(conditionals_if_else).ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# if else"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | " if sınama:\n",
15 | " sınama geçerliyse yapılacak iş\n",
16 | " else:\n",
17 | " sınama geçerli değilse yapılacak iş"
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "# if elif else"
25 | ]
26 | },
27 | {
28 | "cell_type": "markdown",
29 | "metadata": {},
30 | "source": [
31 | " if sınama1:\n",
32 | " sınama1 geçerliyse yapılacak iş.\n",
33 | " elif sınama2:\n",
34 | " sınama2 geçerliyse yapılacak iş.\n",
35 | " else:\n",
36 | " Her iki sınama da geçerli değilse yapılacak iş"
37 | ]
38 | },
39 | {
40 | "cell_type": "markdown",
41 | "metadata": {},
42 | "source": [
43 | "# Örnek"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 4,
49 | "metadata": {},
50 | "outputs": [
51 | {
52 | "name": "stdout",
53 | "output_type": "stream",
54 | "text": [
55 | "Bir sayı giriniz:0\n"
56 | ]
57 | }
58 | ],
59 | "source": [
60 | "sayı = input(\"Bir sayı giriniz:\")\n",
61 | "sayı = int(sayı)"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 5,
67 | "metadata": {},
68 | "outputs": [
69 | {
70 | "name": "stdout",
71 | "output_type": "stream",
72 | "text": [
73 | "0 negatiftir.\n"
74 | ]
75 | }
76 | ],
77 | "source": [
78 | "if sayı > 0:\n",
79 | " print(sayı, \"poztiftir.\")\n",
80 | "else:\n",
81 | " print(sayı, \"negatiftir.\")"
82 | ]
83 | },
84 | {
85 | "cell_type": "markdown",
86 | "metadata": {},
87 | "source": [
88 | " Lütfen \"girinti\" ve \":\" ye dikkat!!!!!!!!!
"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": 6,
94 | "metadata": {},
95 | "outputs": [
96 | {
97 | "name": "stdout",
98 | "output_type": "stream",
99 | "text": [
100 | "0 sıfırdır.\n"
101 | ]
102 | }
103 | ],
104 | "source": [
105 | "if sayı > 0:\n",
106 | " print(sayı, \"poztiftir.\")\n",
107 | "elif sayı == 0:\n",
108 | " print(sayı, \"sıfırdır.\")\n",
109 | "else:\n",
110 | " print(sayı, \"negatiftir.\")\n",
111 | " "
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": null,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": []
120 | }
121 | ],
122 | "metadata": {
123 | "kernelspec": {
124 | "display_name": "Python 3",
125 | "language": "python",
126 | "name": "python3"
127 | },
128 | "language_info": {
129 | "codemirror_mode": {
130 | "name": "ipython",
131 | "version": 3
132 | },
133 | "file_extension": ".py",
134 | "mimetype": "text/x-python",
135 | "name": "python",
136 | "nbconvert_exporter": "python",
137 | "pygments_lexer": "ipython3",
138 | "version": "3.6.8"
139 | }
140 | },
141 | "nbformat": 4,
142 | "nbformat_minor": 2
143 | }
144 |
--------------------------------------------------------------------------------
/01_python_basic/15_lambda_fonksiyonlar_(lambda_functions).ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | " Lamda tıpkı def ifadesi gibi fonksiyon tanımlamamıza yardım eder. Fonksiyon tanımlayıp kullanmayı çok daha hızlandırır. lambda ifadesi ile oluşturulan fonksiyonlara lambda fonksiyon denir."
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "# Fonksiyon tanımı\n",
17 | "toplama = lambda p1, p2: p1+p2"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": 2,
23 | "metadata": {},
24 | "outputs": [
25 | {
26 | "data": {
27 | "text/plain": [
28 | "function"
29 | ]
30 | },
31 | "execution_count": 2,
32 | "metadata": {},
33 | "output_type": "execute_result"
34 | }
35 | ],
36 | "source": [
37 | "type(toplama)"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": 3,
43 | "metadata": {},
44 | "outputs": [
45 | {
46 | "name": "stdout",
47 | "output_type": "stream",
48 | "text": [
49 | "8\n"
50 | ]
51 | }
52 | ],
53 | "source": [
54 | "print(toplama(3,5))"
55 | ]
56 | },
57 | {
58 | "cell_type": "code",
59 | "execution_count": 4,
60 | "metadata": {},
61 | "outputs": [
62 | {
63 | "data": {
64 | "text/plain": [
65 | "8"
66 | ]
67 | },
68 | "execution_count": 4,
69 | "metadata": {},
70 | "output_type": "execute_result"
71 | }
72 | ],
73 | "source": [
74 | "(lambda p1, p2: p1+p2)(3,5)"
75 | ]
76 | },
77 | {
78 | "cell_type": "markdown",
79 | "metadata": {},
80 | "source": [
81 | " Lambda fonksiyonlarını, bir fonksiyonun işlevselliğine ihtiyaç duyduğumuz, ama konum olarak bir fonksiyon tanımlayamayacağımız veya fonksiyon tanımlamanın zor ya da meşakkatli olduğu durumlarda kullanabiliriz."
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": 5,
87 | "metadata": {},
88 | "outputs": [],
89 | "source": [
90 | "tek_mi = lambda x: x % 2 == 1"
91 | ]
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": 7,
96 | "metadata": {},
97 | "outputs": [
98 | {
99 | "name": "stdout",
100 | "output_type": "stream",
101 | "text": [
102 | "False\n"
103 | ]
104 | }
105 | ],
106 | "source": [
107 | "print(tek_mi(10))"
108 | ]
109 | },
110 | {
111 | "cell_type": "code",
112 | "execution_count": 8,
113 | "metadata": {},
114 | "outputs": [
115 | {
116 | "name": "stdout",
117 | "output_type": "stream",
118 | "text": [
119 | "Çift\n"
120 | ]
121 | }
122 | ],
123 | "source": [
124 | "tek_mi = lambda x: \"Tek\" if x % 2 == 1 else \"Çift\"\n",
125 | "print(tek_mi(10))"
126 | ]
127 | },
128 | {
129 | "cell_type": "code",
130 | "execution_count": 9,
131 | "metadata": {},
132 | "outputs": [
133 | {
134 | "data": {
135 | "text/plain": [
136 | "16"
137 | ]
138 | },
139 | "execution_count": 9,
140 | "metadata": {},
141 | "output_type": "execute_result"
142 | }
143 | ],
144 | "source": [
145 | "karesi = lambda x: x**2\n",
146 | "karesi(4)"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": null,
152 | "metadata": {},
153 | "outputs": [],
154 | "source": []
155 | }
156 | ],
157 | "metadata": {
158 | "kernelspec": {
159 | "display_name": "Python 3",
160 | "language": "python",
161 | "name": "python3"
162 | },
163 | "language_info": {
164 | "codemirror_mode": {
165 | "name": "ipython",
166 | "version": 3
167 | },
168 | "file_extension": ".py",
169 | "mimetype": "text/x-python",
170 | "name": "python",
171 | "nbconvert_exporter": "python",
172 | "pygments_lexer": "ipython3",
173 | "version": "3.6.8"
174 | }
175 | },
176 | "nbformat": 4,
177 | "nbformat_minor": 2
178 | }
179 |
--------------------------------------------------------------------------------
/01_python_basic/16_moduller_(moduls).ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | " Modül, bazı işlevleri kolaylıkla yerine getirmemizi sağlayan birtakım fonksiyonları ve nitelikleri içinde barındıran araçlardır.\n",
8 | " \n",
9 | " Şimdiye kadar fonksiyonların hayatımızı nasıl kolaylaştırdığını anladık. Modüller ise bunun daha kapsamlısını yapar.\n",
10 | " \n",
11 | " Modüller: \n",
12 | " Daha az kod,\n",
13 | " Kodları tekrar tekrar kullanma,\n",
14 | " Daha düzenli, daha derli toplu bir şekilde çalıştırma kolaylığı sağlar."
15 | ]
16 | },
17 | {
18 | "cell_type": "markdown",
19 | "metadata": {},
20 | "source": [
21 | " Modülleri import etmeden önce mutlaka onları bilgisayarımızda ilgili yerlere koymamız gerekir. Bir çok mudül arasında bağımlılık söz konusudur. Dolayısıyla paket yönetimi için conda veya pip gibi araçlar kullanılır. Bu araçlar paket yüklerken bağımlılıkları da kontrol ederler ve gerekirse bağımlı paketleri de yüklerler.\n",
22 | " \n",
23 | " Yüklenmemiş bir modülü import edemeyiz. Çünkü çalışacak kod parçası bilgisayarımızda yoktur."
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "metadata": {},
29 | "source": [
30 | "# Modüllerin İçe Aktarılması (import)"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": 1,
36 | "metadata": {},
37 | "outputs": [],
38 | "source": [
39 | "import sys"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": 2,
45 | "metadata": {},
46 | "outputs": [
47 | {
48 | "data": {
49 | "text/plain": [
50 | "'Copyright (c) 2001-2018 Python Software Foundation.\\nAll Rights Reserved.\\n\\nCopyright (c) 2000 BeOpen.com.\\nAll Rights Reserved.\\n\\nCopyright (c) 1995-2001 Corporation for National Research Initiatives.\\nAll Rights Reserved.\\n\\nCopyright (c) 1991-1995 Stichting Mathematisch Centrum, Amsterdam.\\nAll Rights Reserved.'"
51 | ]
52 | },
53 | "execution_count": 2,
54 | "metadata": {},
55 | "output_type": "execute_result"
56 | }
57 | ],
58 | "source": [
59 | "sys.copyright"
60 | ]
61 | },
62 | {
63 | "cell_type": "code",
64 | "execution_count": 3,
65 | "metadata": {},
66 | "outputs": [
67 | {
68 | "data": {
69 | "text/plain": [
70 | "sys.getwindowsversion(major=10, minor=0, build=17763, platform=2, service_pack='')"
71 | ]
72 | },
73 | "execution_count": 3,
74 | "metadata": {},
75 | "output_type": "execute_result"
76 | }
77 | ],
78 | "source": [
79 | "sys.getwindowsversion()"
80 | ]
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": 13,
85 | "metadata": {},
86 | "outputs": [],
87 | "source": [
88 | "import os"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": 5,
94 | "metadata": {},
95 | "outputs": [
96 | {
97 | "name": "stdout",
98 | "output_type": "stream",
99 | "text": [
100 | "nt\n"
101 | ]
102 | }
103 | ],
104 | "source": [
105 | "print(os.name)"
106 | ]
107 | },
108 | {
109 | "cell_type": "code",
110 | "execution_count": 6,
111 | "metadata": {},
112 | "outputs": [
113 | {
114 | "name": "stdout",
115 | "output_type": "stream",
116 | "text": [
117 | "Windows kullanıyorsunuz\n"
118 | ]
119 | }
120 | ],
121 | "source": [
122 | "if os.name == \"nt\":\n",
123 | " print(\"Windows kullanıyorsunuz\")\n",
124 | "else: print(\"Başka bir OS\")"
125 | ]
126 | },
127 | {
128 | "cell_type": "markdown",
129 | "metadata": {},
130 | "source": [
131 | "# import alias"
132 | ]
133 | },
134 | {
135 | "cell_type": "code",
136 | "execution_count": 28,
137 | "metadata": {},
138 | "outputs": [],
139 | "source": [
140 | "# import modül as farklı_isim"
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": 7,
146 | "metadata": {},
147 | "outputs": [],
148 | "source": [
149 | "import os as isletim_sistemi"
150 | ]
151 | },
152 | {
153 | "cell_type": "code",
154 | "execution_count": 8,
155 | "metadata": {},
156 | "outputs": [
157 | {
158 | "name": "stdout",
159 | "output_type": "stream",
160 | "text": [
161 | "12\n"
162 | ]
163 | }
164 | ],
165 | "source": [
166 | "print(isletim_sistemi.cpu_count())"
167 | ]
168 | },
169 | {
170 | "cell_type": "markdown",
171 | "metadata": {},
172 | "source": [
173 | "# import başka bir yöntem"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": 29,
179 | "metadata": {},
180 | "outputs": [],
181 | "source": [
182 | "# from modül_adı import fonksiyon, nitelik, sınıf"
183 | ]
184 | },
185 | {
186 | "cell_type": "code",
187 | "execution_count": 9,
188 | "metadata": {},
189 | "outputs": [],
190 | "source": [
191 | "from os import name"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": 10,
197 | "metadata": {},
198 | "outputs": [
199 | {
200 | "name": "stdout",
201 | "output_type": "stream",
202 | "text": [
203 | "nt\n"
204 | ]
205 | }
206 | ],
207 | "source": [
208 | "print(name)"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": 11,
214 | "metadata": {},
215 | "outputs": [],
216 | "source": [
217 | "from sklearn.preprocessing import OneHotEncoder"
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": 12,
223 | "metadata": {},
224 | "outputs": [],
225 | "source": [
226 | "encoder = OneHotEncoder()"
227 | ]
228 | },
229 | {
230 | "cell_type": "markdown",
231 | "metadata": {},
232 | "source": [
233 | "# hepsini birden içe aktar "
234 | ]
235 | },
236 | {
237 | "cell_type": "code",
238 | "execution_count": 2,
239 | "metadata": {},
240 | "outputs": [],
241 | "source": [
242 | "from os import *"
243 | ]
244 | },
245 | {
246 | "cell_type": "code",
247 | "execution_count": 3,
248 | "metadata": {},
249 | "outputs": [
250 | {
251 | "data": {
252 | "text/plain": [
253 | "'D:\\\\egitim\\\\verilen\\\\machine-learning-with-python\\\\01_python_basic'"
254 | ]
255 | },
256 | "execution_count": 3,
257 | "metadata": {},
258 | "output_type": "execute_result"
259 | }
260 | ],
261 | "source": [
262 | "getcwd()"
263 | ]
264 | },
265 | {
266 | "cell_type": "markdown",
267 | "metadata": {},
268 | "source": [
269 | "# Modül Tanımlamak"
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": 13,
275 | "metadata": {},
276 | "outputs": [],
277 | "source": [
278 | "# Kendi modülümüzü oluşturabiliriz.\n",
279 | "# Bunun için bulunduğumuz dizne \"my_module\" adında bir klasör oluşturalım\n",
280 | "# Bu klsör içinde \"my_geo.py\" adında bir dosya oluşturalım\n",
281 | "# bu dosyanın içine basit bir alan hesaplama fonksiyonu yazalım\n",
282 | "# Aşağıdaki şekilde kullanalım"
283 | ]
284 | },
285 | {
286 | "cell_type": "code",
287 | "execution_count": 14,
288 | "metadata": {},
289 | "outputs": [],
290 | "source": [
291 | "import my_module.my_geo as g"
292 | ]
293 | },
294 | {
295 | "cell_type": "code",
296 | "execution_count": 15,
297 | "metadata": {},
298 | "outputs": [
299 | {
300 | "data": {
301 | "text/plain": [
302 | "6"
303 | ]
304 | },
305 | "execution_count": 15,
306 | "metadata": {},
307 | "output_type": "execute_result"
308 | }
309 | ],
310 | "source": [
311 | "g.alan_hesapla(2,3)"
312 | ]
313 | },
314 | {
315 | "cell_type": "code",
316 | "execution_count": null,
317 | "metadata": {},
318 | "outputs": [],
319 | "source": []
320 | }
321 | ],
322 | "metadata": {
323 | "kernelspec": {
324 | "display_name": "Python 3",
325 | "language": "python",
326 | "name": "python3"
327 | },
328 | "language_info": {
329 | "codemirror_mode": {
330 | "name": "ipython",
331 | "version": 3
332 | },
333 | "file_extension": ".py",
334 | "mimetype": "text/x-python",
335 | "name": "python",
336 | "nbconvert_exporter": "python",
337 | "pygments_lexer": "ipython3",
338 | "version": "3.6.8"
339 | }
340 | },
341 | "nbformat": 4,
342 | "nbformat_minor": 2
343 | }
344 |
--------------------------------------------------------------------------------
/01_python_basic/17_nesneye_yonelik_programlama_(object_oriented_programming_OOP).ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "\"\"\" \n",
10 | " Nesne tabanlı programlama, pek çok yazılım geliştirme yönteminden yalnızca biridir. \n",
11 | "Siz bu yöntemi, yazdığınız programlarda kullanmak zorunda değilsiniz. \n",
12 | "Nesne tabanlı programlamadan hiç yararlanmadan da faydalı ve iyi programlar yazabilirsiniz elbette.\n",
13 | "Python sizi bu yöntemi kullanmaya asla zorlamaz. \n",
14 | "Ancak nesne tabanlı programlama yaklaşımı program geliştirme alanında oldukça yaygın kullanılan \n",
15 | "bir yöntemdir. Dolayısıyla, etrafta nesne tabanlı programlama yaklaşımından yararlanılarak \n",
16 | "yazılmış pek çok kodla karşılaşacaksınız. \n",
17 | "Hiç değilse karşılaştığınız bu kodları anlayabilmek için nesne tabanlı programlamayı \n",
18 | "biliyor ve tanıyor olmanız lazım. \n",
19 | "Aksi halde, bu yöntem kullanılarak geliştirilmiş programları anlayamazsınız.\n",
20 | "\"\"\""
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {},
26 | "source": [
27 | "# Sınıflar"
28 | ]
29 | },
30 | {
31 | "cell_type": "markdown",
32 | "metadata": {},
33 | "source": [
34 | " OOP'nin temel kavramıdır. Nesneleri tanımlamamızı sağlar. Sınıfları nesne üreten şablon veya kalıp olarak da düşünebiliriz. Örneğin oyuncak araba üreten bir makine kalıbı tasarlandığında aynı kalığtan binlerce oyuncak araba üretilebilir."
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": 1,
40 | "metadata": {},
41 | "outputs": [],
42 | "source": [
43 | "class Araba:\n",
44 | " def __init__(self, marka=\"TOFAŞ\", model=\"Hacı Murat\", uretim_yili=1974, \n",
45 | " yakit_cinsi=\"benzin\", yakit_depo_kapasite=45, mevcut_yakit=10):\n",
46 | " self.marka = marka\n",
47 | " self.model = model\n",
48 | " self.uretim_yili = uretim_yili\n",
49 | " self.yakit_cinsi = yakit_cinsi\n",
50 | " self.yakit_depo_kapasite = yakit_depo_kapasite\n",
51 | " self.mevcut_yakit = mevcut_yakit\n",
52 | " \n",
53 | " \n",
54 | " def ozellik_yaz(self):\n",
55 | " print(\"Marka:\", self.marka)\n",
56 | " print(\"Model:\", self.model)\n",
57 | " print(\"Üretim Yılı:\", self.uretim_yili)\n",
58 | " print(\"Yakıt cinsi:\", self.yakit_cinsi)\n",
59 | " print(\"Depo kapasite:\", self.yakit_depo_kapasite)\n",
60 | " \n",
61 | " def yakit_al(self, miktar):\n",
62 | " if miktar + self.mevcut_yakit <= self.yakit_depo_kapasite:\n",
63 | " self.mevcut_yakit += miktar\n",
64 | " else:\n",
65 | " print(\"Bu kadar yakıt alamam\")\n",
66 | " bos = self.yakit_depo_kapasite - self.mevcut_yakit\n",
67 | " print(\"En fazla {} litre daha {} doldurabilirsin.\".format(bos,self.yakit_cinsi))\n",
68 | " \n",
69 | " def ne_kadar_yakit_var(self):\n",
70 | " return self.mevcut_yakit\n",
71 | " \n",
72 | " def kalan_yakiti_yazdir(self):\n",
73 | " print(self.mevcut_yakit,\"litre yakıt var.\")\n",
74 | " \n",
75 | " "
76 | ]
77 | },
78 | {
79 | "cell_type": "code",
80 | "execution_count": 2,
81 | "metadata": {},
82 | "outputs": [],
83 | "source": [
84 | "# Varsayılan özelliklerle araba yaratmak\n",
85 | "hacı_murat_01 = Araba()"
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": 3,
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "hacı_murat_02 = Araba()"
95 | ]
96 | },
97 | {
98 | "cell_type": "code",
99 | "execution_count": 4,
100 | "metadata": {},
101 | "outputs": [
102 | {
103 | "data": {
104 | "text/plain": [
105 | "__main__.Araba"
106 | ]
107 | },
108 | "execution_count": 4,
109 | "metadata": {},
110 | "output_type": "execute_result"
111 | }
112 | ],
113 | "source": [
114 | "type(hacı_murat_01)"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 5,
120 | "metadata": {},
121 | "outputs": [
122 | {
123 | "name": "stdout",
124 | "output_type": "stream",
125 | "text": [
126 | "Marka: TOFAŞ\n",
127 | "Model: Hacı Murat\n",
128 | "Üretim Yılı: 1974\n",
129 | "Yakıt cinsi: benzin\n",
130 | "Depo kapasite: 45\n"
131 | ]
132 | }
133 | ],
134 | "source": [
135 | "hacı_murat_01.ozellik_yaz()"
136 | ]
137 | },
138 | {
139 | "cell_type": "code",
140 | "execution_count": 6,
141 | "metadata": {},
142 | "outputs": [],
143 | "source": [
144 | "# Nesne yaratırken elle parametre göndermek\n",
145 | "mercedes_01 = Araba(marka=\"Mercedes\", model=\"C200\", \n",
146 | " uretim_yili=1979, yakit_cinsi=\"benzin\", \n",
147 | " yakit_depo_kapasite=45.0, mevcut_yakit=25.0)"
148 | ]
149 | },
150 | {
151 | "cell_type": "code",
152 | "execution_count": 7,
153 | "metadata": {},
154 | "outputs": [
155 | {
156 | "name": "stdout",
157 | "output_type": "stream",
158 | "text": [
159 | "Marka: Mercedes\n",
160 | "Model: C200\n",
161 | "Üretim Yılı: 1979\n",
162 | "Yakıt cinsi: benzin\n",
163 | "Depo kapasite: 45.0\n"
164 | ]
165 | }
166 | ],
167 | "source": [
168 | "mercedes_01.ozellik_yaz()"
169 | ]
170 | },
171 | {
172 | "cell_type": "code",
173 | "execution_count": 6,
174 | "metadata": {},
175 | "outputs": [
176 | {
177 | "data": {
178 | "text/plain": [
179 | "10"
180 | ]
181 | },
182 | "execution_count": 6,
183 | "metadata": {},
184 | "output_type": "execute_result"
185 | }
186 | ],
187 | "source": [
188 | "hacı_murat_01.ne_kadar_yakit_var()"
189 | ]
190 | },
191 | {
192 | "cell_type": "code",
193 | "execution_count": 7,
194 | "metadata": {},
195 | "outputs": [],
196 | "source": [
197 | "hacı_murat_01.yakit_al(20.0)"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": 8,
203 | "metadata": {},
204 | "outputs": [
205 | {
206 | "data": {
207 | "text/plain": [
208 | "30.0"
209 | ]
210 | },
211 | "execution_count": 8,
212 | "metadata": {},
213 | "output_type": "execute_result"
214 | }
215 | ],
216 | "source": [
217 | "hacı_murat_01.ne_kadar_yakit_var()"
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": 9,
223 | "metadata": {},
224 | "outputs": [
225 | {
226 | "name": "stdout",
227 | "output_type": "stream",
228 | "text": [
229 | "Marka: TOFAŞ\n",
230 | "Model: Hacı Murat\n",
231 | "Üretim Yılı: 1974\n",
232 | "Yakıt cinsi: benzin\n",
233 | "Depo kapasite: 45\n"
234 | ]
235 | }
236 | ],
237 | "source": [
238 | "hacı_murat_01.ozellik_yaz()"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": 10,
244 | "metadata": {},
245 | "outputs": [],
246 | "source": [
247 | "hacı_murat_01.yakit_al(10.0)"
248 | ]
249 | },
250 | {
251 | "cell_type": "code",
252 | "execution_count": 11,
253 | "metadata": {},
254 | "outputs": [
255 | {
256 | "name": "stdout",
257 | "output_type": "stream",
258 | "text": [
259 | "40.0 litre yakıt var.\n"
260 | ]
261 | }
262 | ],
263 | "source": [
264 | "hacı_murat_01.kalan_yakiti_yazdir()"
265 | ]
266 | },
267 | {
268 | "cell_type": "code",
269 | "execution_count": 12,
270 | "metadata": {},
271 | "outputs": [],
272 | "source": [
273 | "murat_131 = Araba(marka=\"Murat\", model=131, \n",
274 | " uretim_yili=1979, yakit_cinsi=\"benzin\", \n",
275 | " yakit_depo_kapasite=45.0, mevcut_yakit=25.0)"
276 | ]
277 | },
278 | {
279 | "cell_type": "code",
280 | "execution_count": 13,
281 | "metadata": {},
282 | "outputs": [
283 | {
284 | "name": "stdout",
285 | "output_type": "stream",
286 | "text": [
287 | "Marka: Murat\n",
288 | "Model: 131\n",
289 | "Üretim Yılı: 1979\n",
290 | "Yakıt cinsi: benzin\n",
291 | "Depo kapasite: 45.0\n"
292 | ]
293 | }
294 | ],
295 | "source": [
296 | "murat_131.ozellik_yaz()"
297 | ]
298 | },
299 | {
300 | "cell_type": "code",
301 | "execution_count": 67,
302 | "metadata": {},
303 | "outputs": [
304 | {
305 | "name": "stdout",
306 | "output_type": "stream",
307 | "text": [
308 | "Bu kadar yakıt alamam\n",
309 | "En fazla 20.0 litre daha benzin doldurabilirsin\n"
310 | ]
311 | }
312 | ],
313 | "source": [
314 | "murat_131.yakit_al(50)"
315 | ]
316 | },
317 | {
318 | "cell_type": "markdown",
319 | "metadata": {},
320 | "source": [
321 | " 01_python_basic: alıştırma - 10
"
322 | ]
323 | },
324 | {
325 | "cell_type": "code",
326 | "execution_count": 14,
327 | "metadata": {},
328 | "outputs": [],
329 | "source": [
330 | "# toplam, ortalama, standart_sapma metodlarına sahip bir sınıf yazınız.\n",
331 | "# Metodlar girdi olarak rakam listesini almalı ve sonuç döndürmelidir."
332 | ]
333 | }
334 | ],
335 | "metadata": {
336 | "kernelspec": {
337 | "display_name": "Python 3",
338 | "language": "python",
339 | "name": "python3"
340 | },
341 | "language_info": {
342 | "codemirror_mode": {
343 | "name": "ipython",
344 | "version": 3
345 | },
346 | "file_extension": ".py",
347 | "mimetype": "text/x-python",
348 | "name": "python",
349 | "nbconvert_exporter": "python",
350 | "pygments_lexer": "ipython3",
351 | "version": "3.6.8"
352 | }
353 | },
354 | "nbformat": 4,
355 | "nbformat_minor": 2
356 | }
357 |
--------------------------------------------------------------------------------
/01_python_basic/18_ yield.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | " yield deyimi, return deyimi gibi fonksiyonlarda kullanılır, ancak, fonksiyon bir generator döndürür. "
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "def sonuclari_yiginla(liste):\n",
17 | " for x in liste:\n",
18 | " yield x * x"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": 2,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "sonuclar = sonuclari_yiginla([1,2,3,4,5])"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": 3,
33 | "metadata": {},
34 | "outputs": [
35 | {
36 | "name": "stdout",
37 | "output_type": "stream",
38 | "text": [
39 | "1\n",
40 | "4\n",
41 | "9\n",
42 | "16\n",
43 | "25\n"
44 | ]
45 | }
46 | ],
47 | "source": [
48 | "for k in sonuclar:\n",
49 | " print(k)"
50 | ]
51 | },
52 | {
53 | "cell_type": "code",
54 | "execution_count": null,
55 | "metadata": {},
56 | "outputs": [],
57 | "source": []
58 | }
59 | ],
60 | "metadata": {
61 | "kernelspec": {
62 | "display_name": "Python 3",
63 | "language": "python",
64 | "name": "python3"
65 | },
66 | "language_info": {
67 | "codemirror_mode": {
68 | "name": "ipython",
69 | "version": 3
70 | },
71 | "file_extension": ".py",
72 | "mimetype": "text/x-python",
73 | "name": "python",
74 | "nbconvert_exporter": "python",
75 | "pygments_lexer": "ipython3",
76 | "version": "3.6.5"
77 | }
78 | },
79 | "nbformat": 4,
80 | "nbformat_minor": 2
81 | }
82 |
--------------------------------------------------------------------------------
/01_python_basic/img/Guido_van_Rossum.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/01_python_basic/img/Guido_van_Rossum.png
--------------------------------------------------------------------------------
/01_python_basic/merhaba.py:
--------------------------------------------------------------------------------
1 | print("Merhaba Dünya")
--------------------------------------------------------------------------------
/01_python_basic/my_module/__pycache__/alan_hesapla.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/01_python_basic/my_module/__pycache__/alan_hesapla.cpython-36.pyc
--------------------------------------------------------------------------------
/01_python_basic/my_module/__pycache__/my_geo.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/01_python_basic/my_module/__pycache__/my_geo.cpython-36.pyc
--------------------------------------------------------------------------------
/01_python_basic/my_module/my_geo.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | def alan_hesapla(en=1.0, boy=1.0):
4 | return en * boy
5 |
--------------------------------------------------------------------------------
/01_python_basic/notlar.txt:
--------------------------------------------------------------------------------
1 | ogrenci diploma etkinlikler
2 | Ahmet 3.84 0.12 0.22
3 | Mehmet 3.22
4 | Esra 2.88 0.18 0.22
--------------------------------------------------------------------------------
/01_python_basic/yeni_dosya.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/01_python_basic/yeni_dosya.txt
--------------------------------------------------------------------------------
/02_pandas/04_read_excel_files_write_dataframe_to_disk.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 2,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "data_path = \"D:\\\\Datasets\\\\\"\n",
19 | "data_set = \"simple_data.xlsx\""
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": 4,
25 | "metadata": {
26 | "scrolled": true
27 | },
28 | "outputs": [],
29 | "source": [
30 | "df = pd.read_excel(data_path + data_set)\n",
31 | "# df = pd.read_excel(\"simple_data.xlsx\")"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": 6,
37 | "metadata": {},
38 | "outputs": [],
39 | "source": [
40 | "# xlrd >= 1.0.0 hatası alınması durumunda komut satırından aşağıdaki paket yüklenir ve yeniden denenir.\n",
41 | "# pip install xlrd >= 1.0.0"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": 7,
47 | "metadata": {},
48 | "outputs": [],
49 | "source": [
50 | "# Not:Excel'den okumak csv'ye göreçok yavaştır. \n",
51 | "# Ancak veri türü hakkındaki çıkarımlar daha isabetlidir."
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": 5,
57 | "metadata": {},
58 | "outputs": [
59 | {
60 | "data": {
61 | "text/html": [
62 | "\n",
63 | "\n",
76 | "
\n",
77 | " \n",
78 | " \n",
79 | " | \n",
80 | " sirano | \n",
81 | " isim | \n",
82 | " yas | \n",
83 | " meslek | \n",
84 | " sehir | \n",
85 | " aylik_gelir | \n",
86 | "
\n",
87 | " \n",
88 | " \n",
89 | " \n",
90 | " 0 | \n",
91 | " 1 | \n",
92 | " Cemal | \n",
93 | " 35 | \n",
94 | " Isci | \n",
95 | " Ankara | \n",
96 | " 3500 | \n",
97 | "
\n",
98 | " \n",
99 | " 1 | \n",
100 | " 2 | \n",
101 | " Ceyda | \n",
102 | " 42 | \n",
103 | " Memur | \n",
104 | " Kayseri | \n",
105 | " 4200 | \n",
106 | "
\n",
107 | " \n",
108 | " 2 | \n",
109 | " 3 | \n",
110 | " Timur | \n",
111 | " 30 | \n",
112 | " Müzisyen | \n",
113 | " Istanbul | \n",
114 | " 9000 | \n",
115 | "
\n",
116 | " \n",
117 | " 3 | \n",
118 | " 4 | \n",
119 | " Burcu | \n",
120 | " 29 | \n",
121 | " Pazarlamaci | \n",
122 | " Ankara | \n",
123 | " 4200 | \n",
124 | "
\n",
125 | " \n",
126 | " 4 | \n",
127 | " 5 | \n",
128 | " Yasemin | \n",
129 | " 23 | \n",
130 | " Pazarlamaci | \n",
131 | " Bursa | \n",
132 | " 4800 | \n",
133 | "
\n",
134 | " \n",
135 | "
\n",
136 | "
"
137 | ],
138 | "text/plain": [
139 | " sirano isim yas meslek sehir aylik_gelir\n",
140 | "0 1 Cemal 35 Isci Ankara 3500\n",
141 | "1 2 Ceyda 42 Memur Kayseri 4200\n",
142 | "2 3 Timur 30 Müzisyen Istanbul 9000\n",
143 | "3 4 Burcu 29 Pazarlamaci Ankara 4200\n",
144 | "4 5 Yasemin 23 Pazarlamaci Bursa 4800"
145 | ]
146 | },
147 | "execution_count": 5,
148 | "metadata": {},
149 | "output_type": "execute_result"
150 | }
151 | ],
152 | "source": [
153 | "df.head()"
154 | ]
155 | },
156 | {
157 | "cell_type": "code",
158 | "execution_count": 6,
159 | "metadata": {},
160 | "outputs": [
161 | {
162 | "name": "stdout",
163 | "output_type": "stream",
164 | "text": [
165 | "\n",
166 | "RangeIndex: 15 entries, 0 to 14\n",
167 | "Data columns (total 6 columns):\n",
168 | "sirano 15 non-null int64\n",
169 | "isim 15 non-null object\n",
170 | "yas 15 non-null int64\n",
171 | "meslek 15 non-null object\n",
172 | "sehir 15 non-null object\n",
173 | "aylik_gelir 15 non-null int64\n",
174 | "dtypes: int64(3), object(3)\n",
175 | "memory usage: 848.0+ bytes\n"
176 | ]
177 | }
178 | ],
179 | "source": [
180 | "df.info()"
181 | ]
182 | },
183 | {
184 | "cell_type": "markdown",
185 | "metadata": {},
186 | "source": [
187 | "# Pandas Dataframe'i diske yazma"
188 | ]
189 | },
190 | {
191 | "cell_type": "code",
192 | "execution_count": 7,
193 | "metadata": {},
194 | "outputs": [],
195 | "source": [
196 | "df.to_csv(path_or_buf=\"D:\\\\Datasets\\\\simple_data_pandas_writedisk.csv\", \n",
197 | " encoding='utf-8',\n",
198 | " sep=\",\", index=False, header=True, \n",
199 | " columns=['sirano', 'isim', 'yas', 'meslek', 'sehir', 'aylik_gelir'])\n",
200 | "\n",
201 | "# Daha fazla bilgi: https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_csv.html"
202 | ]
203 | },
204 | {
205 | "cell_type": "code",
206 | "execution_count": 8,
207 | "metadata": {},
208 | "outputs": [],
209 | "source": [
210 | "df_ff = pd.read_csv(\"D:\\\\Datasets\\\\simple_data_pandas_writedisk.csv\")"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 9,
216 | "metadata": {},
217 | "outputs": [
218 | {
219 | "data": {
220 | "text/html": [
221 | "\n",
222 | "\n",
235 | "
\n",
236 | " \n",
237 | " \n",
238 | " | \n",
239 | " sirano | \n",
240 | " isim | \n",
241 | " yas | \n",
242 | " meslek | \n",
243 | " sehir | \n",
244 | " aylik_gelir | \n",
245 | "
\n",
246 | " \n",
247 | " \n",
248 | " \n",
249 | " 0 | \n",
250 | " 1 | \n",
251 | " Cemal | \n",
252 | " 35 | \n",
253 | " Isci | \n",
254 | " Ankara | \n",
255 | " 3500 | \n",
256 | "
\n",
257 | " \n",
258 | " 1 | \n",
259 | " 2 | \n",
260 | " Ceyda | \n",
261 | " 42 | \n",
262 | " Memur | \n",
263 | " Kayseri | \n",
264 | " 4200 | \n",
265 | "
\n",
266 | " \n",
267 | " 2 | \n",
268 | " 3 | \n",
269 | " Timur | \n",
270 | " 30 | \n",
271 | " Müzisyen | \n",
272 | " Istanbul | \n",
273 | " 9000 | \n",
274 | "
\n",
275 | " \n",
276 | " 3 | \n",
277 | " 4 | \n",
278 | " Burcu | \n",
279 | " 29 | \n",
280 | " Pazarlamaci | \n",
281 | " Ankara | \n",
282 | " 4200 | \n",
283 | "
\n",
284 | " \n",
285 | " 4 | \n",
286 | " 5 | \n",
287 | " Yasemin | \n",
288 | " 23 | \n",
289 | " Pazarlamaci | \n",
290 | " Bursa | \n",
291 | " 4800 | \n",
292 | "
\n",
293 | " \n",
294 | "
\n",
295 | "
"
296 | ],
297 | "text/plain": [
298 | " sirano isim yas meslek sehir aylik_gelir\n",
299 | "0 1 Cemal 35 Isci Ankara 3500\n",
300 | "1 2 Ceyda 42 Memur Kayseri 4200\n",
301 | "2 3 Timur 30 Müzisyen Istanbul 9000\n",
302 | "3 4 Burcu 29 Pazarlamaci Ankara 4200\n",
303 | "4 5 Yasemin 23 Pazarlamaci Bursa 4800"
304 | ]
305 | },
306 | "execution_count": 9,
307 | "metadata": {},
308 | "output_type": "execute_result"
309 | }
310 | ],
311 | "source": [
312 | "df_ff.head()"
313 | ]
314 | },
315 | {
316 | "cell_type": "code",
317 | "execution_count": null,
318 | "metadata": {},
319 | "outputs": [],
320 | "source": []
321 | }
322 | ],
323 | "metadata": {
324 | "kernelspec": {
325 | "display_name": "Python 3",
326 | "language": "python",
327 | "name": "python3"
328 | },
329 | "language_info": {
330 | "codemirror_mode": {
331 | "name": "ipython",
332 | "version": 3
333 | },
334 | "file_extension": ".py",
335 | "mimetype": "text/x-python",
336 | "name": "python",
337 | "nbconvert_exporter": "python",
338 | "pygments_lexer": "ipython3",
339 | "version": "3.6.8"
340 | }
341 | },
342 | "nbformat": 4,
343 | "nbformat_minor": 2
344 | }
345 |
--------------------------------------------------------------------------------
/02_pandas/09_map_and_select_dtypes.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 4,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd\n",
10 | "import numpy as np"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 5,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "df = pd.read_csv(\"D:/Datasets/simple_data.csv\")"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": 6,
25 | "metadata": {},
26 | "outputs": [
27 | {
28 | "data": {
29 | "text/html": [
30 | "\n",
31 | "\n",
44 | "
\n",
45 | " \n",
46 | " \n",
47 | " | \n",
48 | " sirano | \n",
49 | " isim | \n",
50 | " yas | \n",
51 | " meslek | \n",
52 | " sehir | \n",
53 | " aylik_gelir | \n",
54 | "
\n",
55 | " \n",
56 | " \n",
57 | " \n",
58 | " 0 | \n",
59 | " 1 | \n",
60 | " Cemal | \n",
61 | " 35 | \n",
62 | " Isci | \n",
63 | " Ankara | \n",
64 | " 3500 | \n",
65 | "
\n",
66 | " \n",
67 | " 1 | \n",
68 | " 2 | \n",
69 | " Ceyda | \n",
70 | " 42 | \n",
71 | " Memur | \n",
72 | " Kayseri | \n",
73 | " 4200 | \n",
74 | "
\n",
75 | " \n",
76 | " 2 | \n",
77 | " 3 | \n",
78 | " Timur | \n",
79 | " 30 | \n",
80 | " Müzisyen | \n",
81 | " Istanbul | \n",
82 | " 9000 | \n",
83 | "
\n",
84 | " \n",
85 | " 3 | \n",
86 | " 4 | \n",
87 | " Burcu | \n",
88 | " 29 | \n",
89 | " Pazarlamaci | \n",
90 | " Ankara | \n",
91 | " 4200 | \n",
92 | "
\n",
93 | " \n",
94 | " 4 | \n",
95 | " 5 | \n",
96 | " Yasemin | \n",
97 | " 23 | \n",
98 | " NaN | \n",
99 | " Bursa | \n",
100 | " 4800 | \n",
101 | "
\n",
102 | " \n",
103 | "
\n",
104 | "
"
105 | ],
106 | "text/plain": [
107 | " sirano isim yas meslek sehir aylik_gelir\n",
108 | "0 1 Cemal 35 Isci Ankara 3500\n",
109 | "1 2 Ceyda 42 Memur Kayseri 4200\n",
110 | "2 3 Timur 30 Müzisyen Istanbul 9000\n",
111 | "3 4 Burcu 29 Pazarlamaci Ankara 4200\n",
112 | "4 5 Yasemin 23 NaN Bursa 4800"
113 | ]
114 | },
115 | "execution_count": 6,
116 | "metadata": {},
117 | "output_type": "execute_result"
118 | }
119 | ],
120 | "source": [
121 | "df.head()"
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": 7,
127 | "metadata": {},
128 | "outputs": [
129 | {
130 | "data": {
131 | "text/plain": [
132 | "array(['Isci', 'Memur', 'Müzisyen', 'Pazarlamaci', nan, 'Doktor',\n",
133 | " 'Berber', 'Tuhafiyeci', 'Tornacı'], dtype=object)"
134 | ]
135 | },
136 | "execution_count": 7,
137 | "metadata": {},
138 | "output_type": "execute_result"
139 | }
140 | ],
141 | "source": [
142 | "df['meslek'].unique()"
143 | ]
144 | },
145 | {
146 | "cell_type": "markdown",
147 | "metadata": {},
148 | "source": [
149 | "## nan değeri Bilinmiyor ile dolduralım"
150 | ]
151 | },
152 | {
153 | "cell_type": "code",
154 | "execution_count": 8,
155 | "metadata": {},
156 | "outputs": [],
157 | "source": [
158 | "df['meslek'] = df['meslek'].map({np.nan: 'Bilinmiyor'})"
159 | ]
160 | },
161 | {
162 | "cell_type": "code",
163 | "execution_count": 9,
164 | "metadata": {},
165 | "outputs": [
166 | {
167 | "data": {
168 | "text/plain": [
169 | "array([nan, 'Bilinmiyor'], dtype=object)"
170 | ]
171 | },
172 | "execution_count": 9,
173 | "metadata": {},
174 | "output_type": "execute_result"
175 | }
176 | ],
177 | "source": [
178 | "df['meslek'].unique()"
179 | ]
180 | },
181 | {
182 | "cell_type": "code",
183 | "execution_count": 10,
184 | "metadata": {},
185 | "outputs": [
186 | {
187 | "data": {
188 | "text/html": [
189 | "\n",
190 | "\n",
203 | "
\n",
204 | " \n",
205 | " \n",
206 | " | \n",
207 | " sirano | \n",
208 | " isim | \n",
209 | " yas | \n",
210 | " meslek | \n",
211 | " sehir | \n",
212 | " aylik_gelir | \n",
213 | "
\n",
214 | " \n",
215 | " \n",
216 | " \n",
217 | " 0 | \n",
218 | " 1 | \n",
219 | " Cemal | \n",
220 | " 35 | \n",
221 | " NaN | \n",
222 | " Ankara | \n",
223 | " 3500 | \n",
224 | "
\n",
225 | " \n",
226 | " 1 | \n",
227 | " 2 | \n",
228 | " Ceyda | \n",
229 | " 42 | \n",
230 | " NaN | \n",
231 | " Kayseri | \n",
232 | " 4200 | \n",
233 | "
\n",
234 | " \n",
235 | " 2 | \n",
236 | " 3 | \n",
237 | " Timur | \n",
238 | " 30 | \n",
239 | " NaN | \n",
240 | " Istanbul | \n",
241 | " 9000 | \n",
242 | "
\n",
243 | " \n",
244 | " 3 | \n",
245 | " 4 | \n",
246 | " Burcu | \n",
247 | " 29 | \n",
248 | " NaN | \n",
249 | " Ankara | \n",
250 | " 4200 | \n",
251 | "
\n",
252 | " \n",
253 | " 4 | \n",
254 | " 5 | \n",
255 | " Yasemin | \n",
256 | " 23 | \n",
257 | " Bilinmiyor | \n",
258 | " Bursa | \n",
259 | " 4800 | \n",
260 | "
\n",
261 | " \n",
262 | "
\n",
263 | "
"
264 | ],
265 | "text/plain": [
266 | " sirano isim yas meslek sehir aylik_gelir\n",
267 | "0 1 Cemal 35 NaN Ankara 3500\n",
268 | "1 2 Ceyda 42 NaN Kayseri 4200\n",
269 | "2 3 Timur 30 NaN Istanbul 9000\n",
270 | "3 4 Burcu 29 NaN Ankara 4200\n",
271 | "4 5 Yasemin 23 Bilinmiyor Bursa 4800"
272 | ]
273 | },
274 | "execution_count": 10,
275 | "metadata": {},
276 | "output_type": "execute_result"
277 | }
278 | ],
279 | "source": [
280 | "df.head()"
281 | ]
282 | },
283 | {
284 | "cell_type": "code",
285 | "execution_count": 11,
286 | "metadata": {},
287 | "outputs": [],
288 | "source": [
289 | "# Bütün olası kategoriler için eşlemek gerektiğinden daha az sayıda kategoride\n",
290 | "# veya binary kategorilerde kullanmak daha uygun"
291 | ]
292 | },
293 | {
294 | "cell_type": "markdown",
295 | "metadata": {},
296 | "source": [
297 | "# Kategorik nitelikleri seçmek"
298 | ]
299 | },
300 | {
301 | "cell_type": "code",
302 | "execution_count": 12,
303 | "metadata": {},
304 | "outputs": [
305 | {
306 | "data": {
307 | "text/plain": [
308 | "Index(['isim', 'meslek', 'sehir'], dtype='object')"
309 | ]
310 | },
311 | "execution_count": 12,
312 | "metadata": {},
313 | "output_type": "execute_result"
314 | }
315 | ],
316 | "source": [
317 | "df.select_dtypes(['object']).columns"
318 | ]
319 | },
320 | {
321 | "cell_type": "code",
322 | "execution_count": null,
323 | "metadata": {},
324 | "outputs": [],
325 | "source": []
326 | }
327 | ],
328 | "metadata": {
329 | "kernelspec": {
330 | "display_name": "Python 3",
331 | "language": "python",
332 | "name": "python3"
333 | },
334 | "language_info": {
335 | "codemirror_mode": {
336 | "name": "ipython",
337 | "version": 3
338 | },
339 | "file_extension": ".py",
340 | "mimetype": "text/x-python",
341 | "name": "python",
342 | "nbconvert_exporter": "python",
343 | "pygments_lexer": "ipython3",
344 | "version": "3.6.8"
345 | }
346 | },
347 | "nbformat": 4,
348 | "nbformat_minor": 2
349 | }
350 |
--------------------------------------------------------------------------------
/02_pandas/11_postgresql_db_connection.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# PostgreSQL"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 44,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "# Uygulama Docker üzerinde çalışan bir postgresql veri tabanı ile yapılmıştır\n",
17 | "# Kurulum dosyası bu dizinde Docker-Toolbox ve PostgreSQL-Kurulumu.txt içindedir."
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": 1,
23 | "metadata": {},
24 | "outputs": [],
25 | "source": [
26 | "# cmd üzerinden pip install psycopg2 ile paket yüklenir.\n",
27 | "import psycopg2"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": 2,
33 | "metadata": {},
34 | "outputs": [],
35 | "source": [
36 | "# ip numarasını docker-machine ls ile öğrenebiliriz\n",
37 | "conn = psycopg2.connect(host=\"192.168.99.107\",database=\"spark\", user=\"postgres\", password=\"postgres\")"
38 | ]
39 | },
40 | {
41 | "cell_type": "markdown",
42 | "metadata": {},
43 | "source": [
44 | "# fetchall()"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": 3,
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "cur = conn.cursor()"
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": 4,
59 | "metadata": {},
60 | "outputs": [],
61 | "source": [
62 | "cur.execute(\"SELECT * FROM advertising limit 20\")"
63 | ]
64 | },
65 | {
66 | "cell_type": "code",
67 | "execution_count": 5,
68 | "metadata": {},
69 | "outputs": [
70 | {
71 | "data": {
72 | "text/plain": [
73 | "20"
74 | ]
75 | },
76 | "execution_count": 5,
77 | "metadata": {},
78 | "output_type": "execute_result"
79 | }
80 | ],
81 | "source": [
82 | "cur.rowcount"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": 6,
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "records = cur.fetchall()"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": 7,
97 | "metadata": {},
98 | "outputs": [
99 | {
100 | "data": {
101 | "text/plain": [
102 | "list"
103 | ]
104 | },
105 | "execution_count": 7,
106 | "metadata": {},
107 | "output_type": "execute_result"
108 | }
109 | ],
110 | "source": [
111 | "type(records)"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": 8,
117 | "metadata": {},
118 | "outputs": [
119 | {
120 | "data": {
121 | "text/plain": [
122 | "[(1, 230.1, 37.8, 69.2, 22.1),\n",
123 | " (2, 44.5, 39.3, 45.1, 10.4),\n",
124 | " (3, 17.2, 45.9, 69.3, 9.3),\n",
125 | " (4, 151.5, 41.3, 58.5, 18.5),\n",
126 | " (5, 180.8, 10.8, 58.4, 12.9),\n",
127 | " (6, 8.7, 48.9, 75.0, 7.2),\n",
128 | " (7, 57.5, 32.8, 23.5, 11.8),\n",
129 | " (8, 120.2, 19.6, 11.6, 13.2),\n",
130 | " (9, 8.6, 2.1, 1.0, 4.8),\n",
131 | " (10, 199.8, 2.6, 21.2, 10.6),\n",
132 | " (11, 66.1, 5.8, 24.2, 8.6),\n",
133 | " (12, 214.7, 24.0, 4.0, 17.4),\n",
134 | " (13, 23.8, 35.1, 65.9, 9.2),\n",
135 | " (14, 97.5, 7.6, 7.2, 9.7),\n",
136 | " (15, 204.1, 32.9, 46.0, 19.0),\n",
137 | " (16, 195.4, 47.7, 52.9, 22.4),\n",
138 | " (17, 67.8, 36.6, 114.0, 12.5),\n",
139 | " (18, 281.4, 39.6, 55.8, 24.4),\n",
140 | " (19, 69.2, 20.5, 18.3, 11.3),\n",
141 | " (20, 147.3, 23.9, 19.1, 14.6)]"
142 | ]
143 | },
144 | "execution_count": 8,
145 | "metadata": {},
146 | "output_type": "execute_result"
147 | }
148 | ],
149 | "source": [
150 | "records"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": 9,
156 | "metadata": {},
157 | "outputs": [],
158 | "source": [
159 | "import pandas as pd"
160 | ]
161 | },
162 | {
163 | "cell_type": "code",
164 | "execution_count": 10,
165 | "metadata": {},
166 | "outputs": [
167 | {
168 | "data": {
169 | "text/plain": [
170 | "Index(['ID', 'TV', 'Radio', 'Newspaper', 'Sales'], dtype='object')"
171 | ]
172 | },
173 | "execution_count": 10,
174 | "metadata": {},
175 | "output_type": "execute_result"
176 | }
177 | ],
178 | "source": [
179 | "# Sütun isimlerini öğrenip ver tabanından okunan veride kullanmak için\n",
180 | "df_csv = pd.read_csv(\"D:/Datasets/Advertising.csv\")\n",
181 | "df_csv.columns"
182 | ]
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": 11,
187 | "metadata": {},
188 | "outputs": [],
189 | "source": [
190 | "df = pd.DataFrame(records, columns=df_csv.columns)"
191 | ]
192 | },
193 | {
194 | "cell_type": "code",
195 | "execution_count": 12,
196 | "metadata": {},
197 | "outputs": [
198 | {
199 | "data": {
200 | "text/html": [
201 | "\n",
202 | "\n",
215 | "
\n",
216 | " \n",
217 | " \n",
218 | " | \n",
219 | " ID | \n",
220 | " TV | \n",
221 | " Radio | \n",
222 | " Newspaper | \n",
223 | " Sales | \n",
224 | "
\n",
225 | " \n",
226 | " \n",
227 | " \n",
228 | " 0 | \n",
229 | " 1 | \n",
230 | " 230.1 | \n",
231 | " 37.8 | \n",
232 | " 69.2 | \n",
233 | " 22.1 | \n",
234 | "
\n",
235 | " \n",
236 | " 1 | \n",
237 | " 2 | \n",
238 | " 44.5 | \n",
239 | " 39.3 | \n",
240 | " 45.1 | \n",
241 | " 10.4 | \n",
242 | "
\n",
243 | " \n",
244 | " 2 | \n",
245 | " 3 | \n",
246 | " 17.2 | \n",
247 | " 45.9 | \n",
248 | " 69.3 | \n",
249 | " 9.3 | \n",
250 | "
\n",
251 | " \n",
252 | " 3 | \n",
253 | " 4 | \n",
254 | " 151.5 | \n",
255 | " 41.3 | \n",
256 | " 58.5 | \n",
257 | " 18.5 | \n",
258 | "
\n",
259 | " \n",
260 | " 4 | \n",
261 | " 5 | \n",
262 | " 180.8 | \n",
263 | " 10.8 | \n",
264 | " 58.4 | \n",
265 | " 12.9 | \n",
266 | "
\n",
267 | " \n",
268 | "
\n",
269 | "
"
270 | ],
271 | "text/plain": [
272 | " ID TV Radio Newspaper Sales\n",
273 | "0 1 230.1 37.8 69.2 22.1\n",
274 | "1 2 44.5 39.3 45.1 10.4\n",
275 | "2 3 17.2 45.9 69.3 9.3\n",
276 | "3 4 151.5 41.3 58.5 18.5\n",
277 | "4 5 180.8 10.8 58.4 12.9"
278 | ]
279 | },
280 | "execution_count": 12,
281 | "metadata": {},
282 | "output_type": "execute_result"
283 | }
284 | ],
285 | "source": [
286 | "df.head()"
287 | ]
288 | },
289 | {
290 | "cell_type": "code",
291 | "execution_count": 24,
292 | "metadata": {},
293 | "outputs": [
294 | {
295 | "data": {
296 | "text/plain": [
297 | "Index(['ID', 'TV', 'Radio', 'Newspaper', 'Sales'], dtype='object')"
298 | ]
299 | },
300 | "execution_count": 24,
301 | "metadata": {},
302 | "output_type": "execute_result"
303 | }
304 | ],
305 | "source": [
306 | "df.columns"
307 | ]
308 | },
309 | {
310 | "cell_type": "code",
311 | "execution_count": 26,
312 | "metadata": {},
313 | "outputs": [],
314 | "source": [
315 | "cur.close()\n",
316 | "conn.close()"
317 | ]
318 | },
319 | {
320 | "cell_type": "code",
321 | "execution_count": null,
322 | "metadata": {},
323 | "outputs": [],
324 | "source": []
325 | },
326 | {
327 | "cell_type": "code",
328 | "execution_count": null,
329 | "metadata": {},
330 | "outputs": [],
331 | "source": []
332 | }
333 | ],
334 | "metadata": {
335 | "kernelspec": {
336 | "display_name": "Python 3",
337 | "language": "python",
338 | "name": "python3"
339 | },
340 | "language_info": {
341 | "codemirror_mode": {
342 | "name": "ipython",
343 | "version": 3
344 | },
345 | "file_extension": ".py",
346 | "mimetype": "text/x-python",
347 | "name": "python",
348 | "nbconvert_exporter": "python",
349 | "pygments_lexer": "ipython3",
350 | "version": "3.6.8"
351 | }
352 | },
353 | "nbformat": 4,
354 | "nbformat_minor": 2
355 | }
356 |
--------------------------------------------------------------------------------
/02_pandas/Docker-Toolbox ve PostgreSQL-Kurulumu.txt:
--------------------------------------------------------------------------------
1 |
2 | Docker Toolbox Kurulum videosu: https://www.youtube.com/watch?v=QqR10kxTPoM
3 |
4 | 1. Windows 7 ve üzeri kullanıyor olmalısınız. İşletim sistemi 64 bit olmalıdır.
5 |
6 | 2. Windows 10 Pro, Enterprise ve Education kullanmıyorsanız ve bu sürümleri kullandığınız halde Oracle Virtualbox kullanmaya devam etmek istiyorsanız
7 | DockerToolbox kurmanız gerekir.
8 |
9 | 3. Ctrl+Alt+delete tuşlarına aynı anda basarak Taskmanager -> Performance sekmesine ulaşın
10 | sağ altta Virtualization Enabled olmalıdır.
11 |
12 | 4. https://github.com/docker/toolbox/releases
13 | adresinden Assets bölümünden DockerToolbox-18.09.3.exe
14 | dosyasını indirin (217 MB)
15 |
16 | 5. DockerToolbox'ı kurun.
17 | Docker-compose hariç diğer seçenekleri kaldırın.
18 | (Virtualbox kurulu değilse onu seçebilirsiniz)
19 | Docker Quickstart Terminal Masaüstünde Mavi renkte bulunacaktır.
20 | Onu Çift tıklayarak docker-machine oluşturalım
21 |
22 | 6. Terminali açın (Örneğin gitbash)
23 |
24 | 7. Terminalde docker-meachine ls komutunu yazın
25 | Herhangi bir running docker-machine yok ise
26 | aşağıdaki komutu çalıştırın.
27 |
28 | 8. Terminalde
29 |
30 | docker-machine create default
31 |
32 | komutunu çalıştırın
33 |
34 | Beklenen ekran çıktıları:
35 | Running pre-create checks...
36 | Creating machine...
37 | (default) Copying C:\Users\user\.docker\machine\cache\boot2docker.iso to C:\Users\user\.docker\machine\machines\default\boot2docker.iso...
38 | (default) Creating VirtualBox VM...
39 | (default) Creating SSH key...
40 | (default) Starting the VM...
41 | (default) Check network to re-create if needed...
42 | (default) Windows might ask for the permission to configure a dhcp server. Sometimes, such confirmation window is minimized in the taskbar.
43 | (default) Waiting for an IP...
44 | Waiting for machine to be running, this may take a few minutes...
45 | Detecting operating system of created instance...
46 | Waiting for SSH to be available...
47 | Detecting the provisioner...
48 | Provisioning with boot2docker...
49 | Copying certs to the local machine directory...
50 | Copying certs to the remote machine...
51 | Setting Docker configuration on the remote daemon...
52 | Checking connection to Docker...
53 | Docker is up and running!
54 | To see how to connect your Docker Client to the Docker Engine running on this virtual machine, run: D:\Program Files\Docker Toolbox\docker-machine.exe env default
55 |
56 | 9. Şu iki komutu çalıştırın
57 |
58 | $ docker-machine env
59 |
60 | $ eval $("C:\Program Files\Docker Toolbox\docker-machine.exe" env)
61 | Not: Yukarıdaki komut sizin ekranınızda en alt satırda bulunur. Onu kopyalayıp yapıştırın.
62 |
63 | 10. docker ps
64 | komutu ile mevcut container'ları listeleyin (Yeni yüklediğimiz için sonuç boş olmalıdır)
65 |
66 | 11. POSTGRESQL Docker Kurulumu
67 | docker run -p 5432:5432 -d \
68 | -e POSTGRES_PASSWORD=postgres \
69 | -e POSTGRES_USER=postgres \
70 | -e POSTGRES_DB=spark \
71 | -v pgdata:/var/lib/postgresql/data \
72 | postgres
73 |
74 |
75 | 12. docker ps ile kontrol edelim
76 | $ docker ps
77 | CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
78 | d25235bc5eb2 postgres "docker-entrypoint.s…" 14 seconds ago Up 14 seconds 0.0.0.0:5432->5432/tcp nervous_leavitt
79 |
80 | 13. postgresql makinesine bağlanma
81 | $ winpty docker.exe exec -it d25235bc5eb2 psql -U postgres spark
82 |
83 | Beklenen çıktı:
84 | psql (11.2 (Debian 11.2-1.pgdg90+1))
85 | Type "help" for help.
86 |
87 | spark=#
88 |
89 | 14. \q
90 | komutu ile postgres shell'den çıkalım.
91 |
92 | CSV DOSYASINDAN POSTGRESQL VERİ TABANINA VERİ YAZMAK
93 | ====================================================
94 |
95 | 1. Docker çalıştırma
96 | docker-machine start default
97 | docker-machine env
98 | eval $("C:\Program Files\Docker Toolbox\docker-machine.exe" env)
99 |
100 | 2. Postgre Sql container çalıştırma
101 | docker container start ceae3953f0a7
102 |
103 | container id öğrenmek için
104 | docker container ls -a
105 |
106 | 3. pip install psycopg2
107 |
108 |
109 | 4. postgres kullanıcısı ile postgres shelle bağlanma
110 | winpty docker.exe exec -it cea psql -U postgres
111 |
112 | 5. Veri tabanlarını listeleme
113 | postgres-# \l
114 | spark veri tabanını seç
115 | postgres=# \c spark
116 | spark veri tabanı tablolarını listele
117 | spark=# \dt
118 | List of relations
119 | Schema | Name | Type | Owner
120 | --------+-------------+-------+----------
121 | public | simple_data | table | postgres
122 | (1 row)
123 |
124 |
125 | 6. Advertising verisine uygun tablo yaratma
126 | CREATE TABLE public.advertising (
127 | id int4 NULL,
128 | tv float8 NULL,
129 | radio float8 NULL,
130 | newspaper float8 NULL,
131 | sales float8 NULL
132 | );
133 |
134 |
135 | 7. Tablo oluşmuş mu?
136 | spark=# \dt
137 | List of relations
138 | Schema | Name | Type | Owner
139 | --------+-------------+-------+----------
140 | public | advertising | table | postgres
141 | public | simple_data | table | postgres
142 | (2 rows)
143 |
144 | 8. Ana makineden postgres container içine csv dosyası kopyalama
145 | Ana makineden posgresql e dosya kopyalama
146 | user@DESKTOP-RL2HHBV MINGW64
147 | $ docker cp Advertising.csv ceae3953f0a7:/Advertising.csv
148 |
149 |
150 | 9. container içindeki csv dosyayı spark veri tabanına aktarma
151 |
152 | spark=# COPY advertising FROM '/Advertising.csv' DELIMITERS ',' CSV HEADER;
153 |
154 | sonuç:
155 | COPY 200
156 |
157 | 10. Buradan sonra Jupyter Notebook ile devam edebilirsiniz.
--------------------------------------------------------------------------------
/02_pandas/Pandas_Kaynaklar.txt:
--------------------------------------------------------------------------------
1 | 1.
2 | Data School - top 25 pandas tricks
3 | https://www.youtube.com/watch?v=RlIiVeig3hc
4 |
5 | 2.
--------------------------------------------------------------------------------
/02_pandas/simple_data.csv:
--------------------------------------------------------------------------------
1 | sirano;isim;yas;meslek;sehir;aylik_gelir
2 | 1;Cemal;35;Isci;Ankara;3500
3 | 2;Ceyda;42;Memur;Kayseri;4200
4 | 3;Timur;30;Müzisyen;Istanbul;9000
5 | 4;Burcu;29;Pazarlamaci;Ankara;4200
6 | 5;Yasemin;23;;Bursa;4800
7 | 6;Ali;33;Memur;Ankara;4250
8 | 7;Dilek;29;Pazarlamaci;Istanbul;7300
9 | 8;Murat;31;Müzisyen;Istanbul;12000
10 | 9;Ahmet;33;Doktor;Ankara;18000
11 | 10;Muhittin;46;Berber;Istanbul;12000
12 | 11;Hicaziye;47;Tuhafiyeci;Ankara;4800
13 | 12;Harun;43;Tornacı;Ankara;4200
14 | 13;Hakkı;33;Memur;Çorum;3750
15 | 14;Gülizar;37;Doktor;İzmir;14250
16 | 15;Şehmuz;41;;Ankara;8700
17 | 16;Gençay;46;Berber;Ankara;8800
18 | 16;Gençay;46;Berber;Ankara;8800
--------------------------------------------------------------------------------
/03_numpy/04_numpy_filter_and_query.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np"
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "# Tek boyutlu array içindeki max değer"
17 | ]
18 | },
19 | {
20 | "cell_type": "code",
21 | "execution_count": 2,
22 | "metadata": {},
23 | "outputs": [],
24 | "source": [
25 | "arr = np.array([11, 12, 13, 14, 15, 16, 17, 15, 11, 12, 14, 15, 16, 17])"
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": 3,
31 | "metadata": {},
32 | "outputs": [
33 | {
34 | "data": {
35 | "text/plain": [
36 | "17"
37 | ]
38 | },
39 | "execution_count": 3,
40 | "metadata": {},
41 | "output_type": "execute_result"
42 | }
43 | ],
44 | "source": [
45 | "max(arr)"
46 | ]
47 | },
48 | {
49 | "cell_type": "code",
50 | "execution_count": 4,
51 | "metadata": {},
52 | "outputs": [
53 | {
54 | "data": {
55 | "text/plain": [
56 | "17"
57 | ]
58 | },
59 | "execution_count": 4,
60 | "metadata": {},
61 | "output_type": "execute_result"
62 | }
63 | ],
64 | "source": [
65 | "np.amax(arr)"
66 | ]
67 | },
68 | {
69 | "cell_type": "markdown",
70 | "metadata": {},
71 | "source": [
72 | "# Max değere ait indeks"
73 | ]
74 | },
75 | {
76 | "cell_type": "code",
77 | "execution_count": 5,
78 | "metadata": {},
79 | "outputs": [
80 | {
81 | "data": {
82 | "text/plain": [
83 | "(array([ 6, 13], dtype=int64),)"
84 | ]
85 | },
86 | "execution_count": 5,
87 | "metadata": {},
88 | "output_type": "execute_result"
89 | }
90 | ],
91 | "source": [
92 | "# maksimum değere sahip elemanların indeksleri\n",
93 | "result = np.where(arr == np.amax(arr))\n",
94 | "result"
95 | ]
96 | },
97 | {
98 | "cell_type": "code",
99 | "execution_count": 6,
100 | "metadata": {},
101 | "outputs": [
102 | {
103 | "data": {
104 | "text/plain": [
105 | "[6, 13]"
106 | ]
107 | },
108 | "execution_count": 6,
109 | "metadata": {},
110 | "output_type": "execute_result"
111 | }
112 | ],
113 | "source": [
114 | "# Bu indekslerin python listesi hali\n",
115 | "result_list = list(result[0])\n",
116 | "result_list"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": 7,
122 | "metadata": {},
123 | "outputs": [
124 | {
125 | "data": {
126 | "text/plain": [
127 | "6"
128 | ]
129 | },
130 | "execution_count": 7,
131 | "metadata": {},
132 | "output_type": "execute_result"
133 | }
134 | ],
135 | "source": [
136 | "# Dönen indeksi ilki\n",
137 | "result_list[0]"
138 | ]
139 | },
140 | {
141 | "cell_type": "markdown",
142 | "metadata": {},
143 | "source": [
144 | "# İki boyutlu array içindeki max değer ve indeksi"
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "execution_count": 8,
150 | "metadata": {},
151 | "outputs": [],
152 | "source": [
153 | "arr2D = np.array([[11, 12, 13],\n",
154 | " [14, 15, 16],\n",
155 | " [17, 15, 11],\n",
156 | " [12, 14, 15]])"
157 | ]
158 | },
159 | {
160 | "cell_type": "code",
161 | "execution_count": 9,
162 | "metadata": {},
163 | "outputs": [
164 | {
165 | "data": {
166 | "text/plain": [
167 | "17"
168 | ]
169 | },
170 | "execution_count": 9,
171 | "metadata": {},
172 | "output_type": "execute_result"
173 | }
174 | ],
175 | "source": [
176 | "max_value = np.amax(arr2D)\n",
177 | "max_value"
178 | ]
179 | },
180 | {
181 | "cell_type": "markdown",
182 | "metadata": {},
183 | "source": [
184 | "# her bir sütun için max değer"
185 | ]
186 | },
187 | {
188 | "cell_type": "code",
189 | "execution_count": 10,
190 | "metadata": {},
191 | "outputs": [
192 | {
193 | "name": "stdout",
194 | "output_type": "stream",
195 | "text": [
196 | "Max value of every column: [17 15 16]\n"
197 | ]
198 | }
199 | ],
200 | "source": [
201 | "maxInColumns = np.amax(arr2D, axis=0)\n",
202 | " \n",
203 | "print('Max value of every column: ', maxInColumns)"
204 | ]
205 | },
206 | {
207 | "cell_type": "markdown",
208 | "metadata": {},
209 | "source": [
210 | "# her bir satır için max değer"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 11,
216 | "metadata": {},
217 | "outputs": [
218 | {
219 | "name": "stdout",
220 | "output_type": "stream",
221 | "text": [
222 | "Max value of every Row: [13 16 17 15]\n"
223 | ]
224 | }
225 | ],
226 | "source": [
227 | "maxInRows = np.amax(arr2D, axis=1)\n",
228 | " \n",
229 | "print('Max value of every Row: ', maxInRows)"
230 | ]
231 | },
232 | {
233 | "cell_type": "markdown",
234 | "metadata": {},
235 | "source": [
236 | "# max değerlerin indeksleri"
237 | ]
238 | },
239 | {
240 | "cell_type": "code",
241 | "execution_count": 12,
242 | "metadata": {},
243 | "outputs": [
244 | {
245 | "name": "stdout",
246 | "output_type": "stream",
247 | "text": [
248 | "Tuple of arrays returned : (array([2], dtype=int64), array([0], dtype=int64))\n"
249 | ]
250 | }
251 | ],
252 | "source": [
253 | "# Find index of maximum value from 2D numpy array\n",
254 | "result2D = np.where(arr2D == np.amax(arr2D))\n",
255 | " \n",
256 | "print('Tuple of arrays returned : ', result2D)"
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": 13,
262 | "metadata": {},
263 | "outputs": [
264 | {
265 | "name": "stdout",
266 | "output_type": "stream",
267 | "text": [
268 | "List of coordinates of maximum value in Numpy array : \n",
269 | "(2, 0)\n"
270 | ]
271 | }
272 | ],
273 | "source": [
274 | "print('List of coordinates of maximum value in Numpy array : ')\n",
275 | "\n",
276 | "# zip the 2 arrays to get the exact coordinates\n",
277 | "listOfCordinates = list(zip(result2D[0], result2D[1]))\n",
278 | "# travese over the list of cordinates\n",
279 | "for cord in listOfCordinates:\n",
280 | " print(cord)"
281 | ]
282 | },
283 | {
284 | "cell_type": "code",
285 | "execution_count": null,
286 | "metadata": {},
287 | "outputs": [],
288 | "source": [
289 | "# En büyük değer 2. satır, 0. sütunda "
290 | ]
291 | }
292 | ],
293 | "metadata": {
294 | "kernelspec": {
295 | "display_name": "Python 3",
296 | "language": "python",
297 | "name": "python3"
298 | },
299 | "language_info": {
300 | "codemirror_mode": {
301 | "name": "ipython",
302 | "version": 3
303 | },
304 | "file_extension": ".py",
305 | "mimetype": "text/x-python",
306 | "name": "python",
307 | "nbconvert_exporter": "python",
308 | "pygments_lexer": "ipython3",
309 | "version": "3.6.8"
310 | }
311 | },
312 | "nbformat": 4,
313 | "nbformat_minor": 2
314 | }
315 |
--------------------------------------------------------------------------------
/04_matplotlib/04.matplotlib boxplot.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# BOXPLOT"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | " Kutu grafiği, ayrıca kutu ve bıyık grafiği olarak da adlandırılır: beş sayılık özete dayalı olarak değerlerin dağılımını göstermenin bir yolu: minimum, ilk çeyrek, medyan, üçüncü çeyrek ve maksimum."
15 | ]
16 | },
17 | {
18 | "cell_type": "markdown",
19 | "metadata": {},
20 | "source": [
21 | "
"
22 | ]
23 | },
24 | {
25 | "cell_type": "markdown",
26 | "metadata": {},
27 | "source": [
28 | "
"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": 1,
34 | "metadata": {},
35 | "outputs": [],
36 | "source": [
37 | "import numpy as np\n",
38 | "import matplotlib.pyplot as plt\n",
39 | "%matplotlib inline"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": 2,
45 | "metadata": {},
46 | "outputs": [
47 | {
48 | "data": {
49 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAe4AAAHSCAYAAAAqryiAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAVHElEQVR4nO3df6zd913f8dcb2yi0tJA2twwoXtjUZUYWv3aH2jX8SEunjHZlMCbVG9Bt1qxJm0f3QxvMYh1CloZA0yr4g1k4pNvayyqgGutUaMRMM6O26KYU5uJCxY90AYbNEiilKjjpe3/4tnWc68Rtc873vu3HQ7LuOd/zzfm8FcV65vvjnFvdHQBghs9YegAA4PoJNwAMItwAMIhwA8Agwg0Agwg3AAyyf+kBrsdtt93Wt99++9JjAMBaPPDAA7/f3Ru7vTYi3Lfffnu2t7eXHgMA1qKqHrzWa06VA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3MATbG1t5fDhw9m3b18OHz6cra2tpUcCduxfegBgb9na2sqJEydy+vTp3HnnnTl79myOHj2aJDly5MjC0wHV3UvP8JQ2Nzd7e3t76THgpnD48OH84A/+YO66666Pbztz5kyOHz+ec+fOLTgZ3Dyq6oHu3tz1NeEGrrRv37585CMfyYEDBz6+7dKlS7nlllvy2GOPLTgZ3DyeLNyucQOPc+jQoZw9e/Zx286ePZtDhw4tNBFwJeEGHufEiRM5evRozpw5k0uXLuXMmTM5evRoTpw4sfRoQNycBlzlYzegHT9+POfPn8+hQ4dy8uRJN6bBHuEaNwDsMa5xA8ANQrgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGGRl4a6qe6rqQlWdu2Lb91bVL1fVe6rqbVX1BataHwBuRKs84r43yd1Xbfv+7v7S7v7yJG9J8m9WuD4A3HBWFu7uvj/Jw1dt++AVT5+ZpFe1PgDciPave8GqOpnk25P8YZK71r0+AEy29pvTuvtEd39Rkjck+cfX2q+qjlXVdlVtX7x4cX0DAsAetuRd5W9M8jev9WJ3n+ruze7e3NjYWONYALB3rTXcVfWCK56+Msn71rk+AEy3smvcVbWV5OuS3FZVDyV5bZJvqKo7knw0yYNJ/uGq1geAG9HKwt3dR3bZfHpV6wHAzcA3pwHAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDrCzcVXVPVV2oqnNXbPv+qnpfVf1yVb25qj53VesDwI1olUfc9ya5+6pt9yU53N1fmuTXknzXCtcHgBvOysLd3fcnefiqbW/r7kd3nr4zyfNXtT4A3IiWvMb995O8dcH1AWCcRcJdVSeSPJrkDU+yz7Gq2q6q7YsXL65vOADYw9Ye7qp6dZJXJPk73d3X2q+7T3X3ZndvbmxsrG9AANjD9q9zsaq6O8m/SvK13f3hda4NADeCVX4cbCvJO5LcUVUPVdXRJD+U5FlJ7quq91TVD69qfQC4Ea3siLu7j+yy+fSq1gOAm4FvTgOAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEFWFu6quqeqLlTVuSu2/a2qem9VfbSqNle1NgDcqFZ5xH1vkruv2nYuyTcnuX+F6wLADWv/qt64u++vqtuv2nY+SapqVcsCwA3NNW4AGGRlR9yfrqo6luRYkhw8eHDhaWCevXRmq7uXHgFuGHs23N19KsmpJNnc3PS3Hj5JT0csq0p0YY9xqhwABlnlx8G2krwjyR1V9VBVHa2qb6qqh5K8KMn/qKqfWdX6AHAjWuVd5Ueu8dKbV7UmANzonCoHgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABllZuKvqnqq6UFXnrtj2nKq6r6rev/Pz1lWtDwA3olUecd+b5O6rtn1nkp/t7hck+dmd5wDAdVpZuLv7/iQPX7X5G5O8fufx65P8jVWtDwA3onVf4/687v7dJNn5+bxr7VhVx6pqu6q2L168uLYBAWAv27M3p3X3qe7e7O7NjY2NpccBgD1h3eH+var6/CTZ+XlhzesDwGjrDvdPJXn1zuNXJ/lva14fAEZb5cfBtpK8I8kdVfVQVR1N8u+SvKyq3p/kZTvPAYDrtH9Vb9zdR67x0ktXtSYA3Oj27M1pAMATCTcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0AgywS7qr6jqo6V1XvrarXLDEDAEy09nBX1eEk/yDJVyX5siSvqKoXrHsOAJhoiSPuQ0ne2d0f7u5Hk7w9yTctMAcAjLNEuM8l+Zqqem5VPSPJNyT5ogXmAIBx9q97we4+X1Xfl+S+JB9K8ktJHr16v6o6luRYkhw8eHCtMwLAXrXIzWndfbq7v7K7vybJw0nev8s+p7p7s7s3NzY21j8kAOxBaz/iTpKqel53X6iqg0m+OcmLlpgDAKZZJNxJfqKqnpvkUpJ/1N2PLDQHAIyySLi7+6uXWBcApvPNaQAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMMgi4a6qf1pV762qc1W1VVW3LDEHAEyz9nBX1Rcm+SdJNrv7cJJ9SV617jkAYKKlTpXvT/JZVbU/yTOS/M5CcwDAKGsPd3f/dpIfSPKBJL+b5A+7+23rngMAJlriVPmtSb4xyRcn+YIkz6yqb91lv2NVtV1V2xcvXlz3mACwJy1xqvzrk/xmd1/s7ktJfjLJX7l6p+4+1d2b3b25sbGx9iEBYC9aItwfSPLCqnpGVVWSlyY5v8AcADDOEte435Xkx5O8O8n/3pnh1LrnAICJnjLcVfV5VXW6qt668/xLqurop7Nod7+2u/9idx/u7m/r7j/5dN4PAG4W13PEfW+Sn8nlG8mS5NeSvGZVAwEA13Y94b6tu9+U5KNJ0t2PJnlspVMBALu6nnD/cVU9N0knSVW9MMkfrnQqAGBX+69jn3+W5KeS/Pmq+vkkG0m+ZaVTAQC7espwd/e7q+prk9yRpJL86s7nrwGANXvKcFfVt1+16SurKt39n1Y0EwBwDddzqvwvX/H4llz+wpR3JxFuAFiz6zlVfvzK51X1OUn+88omAgCu6VP55rQPJ3nB0z0IAPDUruca93/PzkfBcjn0X5LkTascCgDY3fVc4/6BKx4/muTB7n5oRfMAO57znOfkkUceWXqMXP5dQMu69dZb8/DDDy89BuwJ13ON++3rGAR4vEceeSTd/dQ73gT2wv88wF5xzXBX1R/lE6fIH/dSku7uZ69sKgBgV9cMd3c/a52DAABP7XqucSdJqup5ufw57iRJd39gJRMBANd0Pb+P+5VV9f4kv5nk7Ul+K8lbVzwXALCL6/kc9/cmeWGSX+vuL87lb077+ZVOBQDs6nrCfam7/1+Sz6iqz+juM0m+fMVzAQC7uJ5r3H9QVZ+d5H8leUNVXcjlz3MDAGt2zSPuqvqhqnpxkm/M5a85fU2Sn07y60n++nrGAwCu9GRH3O/P5W9N+/wk/zXJVne/fi1TAQC7uuYRd3e/rrtflORrkzyc5Eer6nxVfXdV/YW1TQgAfNxT3pzW3Q929/d191ck+dtJvjnJ+U91waq6o6rec8WfD1bVaz7V9wOAm8n1/HawA0nuTvKqXP4o2NuTfM+numB3/2p27kqvqn1JfjvJmz/V9wOAm8mTfVf5y5IcSfLyJL+Q5MeSHOvuP34a139pkl/v7gefxvcEgBvWkx1x/+skb0zyL7p7Vb9P71VJtlb03gBww3myXzJy1yoXrqrPTPLKJN91jdePJTmWJAcPHlzlKAAwxvV8c9qq/LUk7+7u39vtxe4+1d2b3b25sbGx5tEAYG9aMtxH4jQ5AHxSFgl3VT0jycuS/OQS6wPAVNf9+7ifTt394STPXWJtAJhsyVPlAMAnSbgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGGSRcFfV51bVj1fV+6rqfFW9aIk5AGCa/Qut+7okP93d31JVn5nkGQvNAQCjrD3cVfXsJF+T5O8mSXf/aZI/XfccADDREqfK/1ySi0l+tKp+sap+pKqeucAcADDOEqfK9yf5yiTHu/tdVfW6JN+Z5Luv3KmqjiU5liQHDx5c+5CwtH7ts5N/+zlLj7En9GufvfQIsGdUd693wao/k+Sd3X37zvOvTvKd3f3ya/0zm5ubvb29vaYJYW+oqqz77+de5d8FN5uqeqC7N3d7be2nyrv7/yb5P1V1x86mlyb5lXXPAQATLXVX+fEkb9i5o/w3kvy9heYAgFEWCXd3vyfJrqcAAIBr881pADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCD7l1i0qn4ryR8leSzJo929ucQcADDNIuHecVd3//6C6wPAOE6VA8AgSx1xd5K3VVUn+Y/dferqHarqWJJjSXLw4ME1jwd7Q1UtPcKecOutty49AuwZS4X7xd39O1X1vCT3VdX7uvv+K3fYifmpJNnc3OwlhoQldS//n31V7Yk5gE9Y5FR5d//Ozs8LSd6c5KuWmAMApll7uKvqmVX1rI89TvJXk5xb9xwAMNESp8o/L8mbd67d7U/yxu7+6QXmAIBx1h7u7v6NJF+27nUB4Ebg42AAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADDIYuGuqn1V9YtV9ZalZgCAaZY84v6OJOcXXB8Axlkk3FX1/CQvT/IjS6wPAFMtdcT9H5L8yyQfXWh9ABhp7eGuqlckudDdDzzFfseqaruqti9evLim6QBgb1viiPvFSV5ZVb+V5MeSvKSq/svVO3X3qe7e7O7NjY2Ndc8IAHvS2sPd3d/V3c/v7tuTvCrJ/+zub133HAAwkc9xA8Ag+5dcvLt/LsnPLTkDAEziiBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAZZe7ir6paq+oWq+qWqem9Vfc+6ZwCAqfYvsOafJHlJd3+oqg4kOVtVb+3udy4wCwCMsvZwd3cn+dDO0wM7f3rdcwDARItc466qfVX1niQXktzX3e9aYg4AmGaRcHf3Y9395Umen+Srqurw1ftU1bGq2q6q7YsXL65/SADYgxa9q7y7/yDJzyW5e5fXTnX3ZndvbmxsrH02ANiLlrirfKOqPnfn8Wcl+fok71v3HAAw0RJ3lX9+ktdX1b5c/h+HN3X3WxaYAwDGWeKu8l9O8hXrXhcAbgS+OQ0ABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYZO3hrqovqqozVXW+qt5bVd+x7hkAYKr9C6z5aJJ/3t3vrqpnJXmgqu7r7l9ZYBYAGGXtR9zd/bvd/e6dx3+U5HySL1z3HAAw0aLXuKvq9iRfkeRdS84BAFMscao8SVJVn53kJ5K8prs/uMvrx5IcS5KDBw+ueTqYr6r2zPt099MwCZAsFO6qOpDL0X5Dd//kbvt096kkp5Jkc3PT33r4JIkl3JiWuKu8kpxOcr67//261weAyZa4xv3iJN+W5CVV9Z6dP9+wwBwAMM7aT5V399kkT8/FNwC4yfjmNAAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWAQ4QaAQYQbAAYRbgAYRLgBYBDhBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGGSRcFfVPVV1oarOLbE+AEy11BH3vUnuXmht4ClsbW3l8OHD2bdvXw4fPpytra2lRwJ27F9i0e6+v6puX2Jt4MltbW3lxIkTOX36dO68886cPXs2R48eTZIcOXJk4ekA17iBxzl58mROnz6du+66KwcOHMhdd92V06dP5+TJk0uPBiSp7l5m4ctH3G/p7sPXeP1YkmNJcvDgwb/04IMPrm84uInt27cvH/nIR3LgwIGPb7t06VJuueWWPPbYYwtOBjePqnqguzd3e23PHnF396nu3uzuzY2NjaXHgZvGoUOHcvbs2cdtO3v2bA4dOrTQRMCV9my4gWWcOHEiR48ezZkzZ3Lp0qWcOXMmR48ezYkTJ5YeDchCN6dV1VaSr0tyW1U9lOS13X16iVmAx/vYDWjHjx/P+fPnc+jQoZw8edKNabBHLHaN+5OxubnZ29vbS48BAGsx8ho3APBEwg0Agwg3AAwi3AAwiHADwCDCDQCDCDcADCLcADCIcAPAIMINAIMINwAMItwAMIhwA8Agwg0Agwg3AAwy4vdxV9XFJA8uPQfchG5L8vtLDwE3oT/b3Ru7vTAi3MAyqmq7uzeXngP4BKfKAWAQ4QaAQYQbeDKnlh4AeDzXuAFgEEfcADCIcANPUFX3VNWFqjq39CzA4wk3sJt7k9y99BDAEwk38ATdfX+Sh5eeA3gi4QaAQYQbAAYRbgAYRLgBYBDhBp6gqraSvCPJHVX1UFUdXXom4DLfnAYAgzjiBoBBhBsABhFuABhEuAFgEOEGgEGEGwAGEW4AGES4AWCQ/w9OJELwvCYpGAAAAABJRU5ErkJggg==\n",
50 | "text/plain": [
51 | ""
52 | ]
53 | },
54 | "metadata": {
55 | "needs_background": "light"
56 | },
57 | "output_type": "display_data"
58 | }
59 | ],
60 | "source": [
61 | "values = [1,2,2,2,2 ,2, 5, 6, 6, 7, 7,4,5,6,6,6,7,7,7,7,6,6,6, 8, 8, 8, 10,11,11,10,14]\n",
62 | "\n",
63 | "plt.figure(figsize=(8,8))\n",
64 | "plt.boxplot(values)\n",
65 | "plt.yticks(range(min(values), max(values)))\n",
66 | "plt.ylabel(\"Value\")\n",
67 | "plt.show()"
68 | ]
69 | },
70 | {
71 | "cell_type": "markdown",
72 | "metadata": {},
73 | "source": [
74 | "# 04_matplotlib: alıştırma - 3"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": 1,
80 | "metadata": {},
81 | "outputs": [],
82 | "source": [
83 | "# tips.csv veri setinde tip niteliğine ait boxplot çiziniz ve grafiği yorumlayınız."
84 | ]
85 | }
86 | ],
87 | "metadata": {
88 | "kernelspec": {
89 | "display_name": "Python 3",
90 | "language": "python",
91 | "name": "python3"
92 | },
93 | "language_info": {
94 | "codemirror_mode": {
95 | "name": "ipython",
96 | "version": 3
97 | },
98 | "file_extension": ".py",
99 | "mimetype": "text/x-python",
100 | "name": "python",
101 | "nbconvert_exporter": "python",
102 | "pygments_lexer": "ipython3",
103 | "version": "3.6.8"
104 | }
105 | },
106 | "nbformat": 4,
107 | "nbformat_minor": 2
108 | }
109 |
--------------------------------------------------------------------------------
/04_matplotlib/boxplot_01.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/04_matplotlib/boxplot_01.png
--------------------------------------------------------------------------------
/04_matplotlib/boxplot_normal_dagilim.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/04_matplotlib/boxplot_normal_dagilim.png
--------------------------------------------------------------------------------
/07_data_preprocessing_cleaning/00_github_ornek_veri_okuma.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import os\n",
10 | "os.environ['HTTPS_PROXY'] = 'https://172.16.64.74:8080'"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 1,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "import pandas as pd"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": 2,
25 | "metadata": {},
26 | "outputs": [],
27 | "source": [
28 | "df = pd.read_csv(\"https://raw.githubusercontent.com/erkansirin78/datasets/master/simple_data.csv\")"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": 3,
34 | "metadata": {},
35 | "outputs": [
36 | {
37 | "data": {
38 | "text/html": [
39 | "\n",
40 | "\n",
53 | "
\n",
54 | " \n",
55 | " \n",
56 | " | \n",
57 | " sirano | \n",
58 | " isim | \n",
59 | " yas | \n",
60 | " meslek | \n",
61 | " sehir | \n",
62 | " aylik_gelir | \n",
63 | "
\n",
64 | " \n",
65 | " \n",
66 | " \n",
67 | " 0 | \n",
68 | " 1 | \n",
69 | " Cemal | \n",
70 | " 35 | \n",
71 | " Isci | \n",
72 | " Ankara | \n",
73 | " 3500 | \n",
74 | "
\n",
75 | " \n",
76 | " 1 | \n",
77 | " 2 | \n",
78 | " Ceyda | \n",
79 | " 42 | \n",
80 | " Memur | \n",
81 | " Kayseri | \n",
82 | " 4200 | \n",
83 | "
\n",
84 | " \n",
85 | " 2 | \n",
86 | " 3 | \n",
87 | " Timur | \n",
88 | " 30 | \n",
89 | " Müzisyen | \n",
90 | " Istanbul | \n",
91 | " 9000 | \n",
92 | "
\n",
93 | " \n",
94 | " 3 | \n",
95 | " 4 | \n",
96 | " Burcu | \n",
97 | " 29 | \n",
98 | " Pazarlamaci | \n",
99 | " Ankara | \n",
100 | " 4200 | \n",
101 | "
\n",
102 | " \n",
103 | " 4 | \n",
104 | " 5 | \n",
105 | " Yasemin | \n",
106 | " 23 | \n",
107 | " Pazarlamaci | \n",
108 | " Bursa | \n",
109 | " 4800 | \n",
110 | "
\n",
111 | " \n",
112 | "
\n",
113 | "
"
114 | ],
115 | "text/plain": [
116 | " sirano isim yas meslek sehir aylik_gelir\n",
117 | "0 1 Cemal 35 Isci Ankara 3500\n",
118 | "1 2 Ceyda 42 Memur Kayseri 4200\n",
119 | "2 3 Timur 30 Müzisyen Istanbul 9000\n",
120 | "3 4 Burcu 29 Pazarlamaci Ankara 4200\n",
121 | "4 5 Yasemin 23 Pazarlamaci Bursa 4800"
122 | ]
123 | },
124 | "execution_count": 3,
125 | "metadata": {},
126 | "output_type": "execute_result"
127 | }
128 | ],
129 | "source": [
130 | "df.head()"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "metadata": {},
137 | "outputs": [],
138 | "source": []
139 | }
140 | ],
141 | "metadata": {
142 | "kernelspec": {
143 | "display_name": "Python 3",
144 | "language": "python",
145 | "name": "python3"
146 | },
147 | "language_info": {
148 | "codemirror_mode": {
149 | "name": "ipython",
150 | "version": 3
151 | },
152 | "file_extension": ".py",
153 | "mimetype": "text/x-python",
154 | "name": "python",
155 | "nbconvert_exporter": "python",
156 | "pygments_lexer": "ipython3",
157 | "version": "3.6.5"
158 | }
159 | },
160 | "nbformat": 4,
161 | "nbformat_minor": 2
162 | }
163 |
--------------------------------------------------------------------------------
/07_data_preprocessing_cleaning/ileri_seviye_ornekler/03_iris_quartiles.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd\n",
10 | "import numpy as np"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 2,
16 | "metadata": {},
17 | "outputs": [
18 | {
19 | "data": {
20 | "text/html": [
21 | "\n",
22 | "\n",
35 | "
\n",
36 | " \n",
37 | " \n",
38 | " | \n",
39 | " SepalLengthCm | \n",
40 | " SepalWidthCm | \n",
41 | " PetalLengthCm | \n",
42 | " PetalWidthCm | \n",
43 | " Species | \n",
44 | "
\n",
45 | " \n",
46 | " \n",
47 | " \n",
48 | " 0 | \n",
49 | " 5.1 | \n",
50 | " 3.5 | \n",
51 | " 1.4 | \n",
52 | " 0.2 | \n",
53 | " Iris-setosa | \n",
54 | "
\n",
55 | " \n",
56 | " 1 | \n",
57 | " 4.9 | \n",
58 | " 3.0 | \n",
59 | " 1.4 | \n",
60 | " 0.2 | \n",
61 | " Iris-setosa | \n",
62 | "
\n",
63 | " \n",
64 | " 2 | \n",
65 | " 4.7 | \n",
66 | " 3.2 | \n",
67 | " 1.3 | \n",
68 | " 0.2 | \n",
69 | " Iris-setosa | \n",
70 | "
\n",
71 | " \n",
72 | " 3 | \n",
73 | " 4.6 | \n",
74 | " 3.1 | \n",
75 | " 1.5 | \n",
76 | " 0.2 | \n",
77 | " Iris-setosa | \n",
78 | "
\n",
79 | " \n",
80 | " 4 | \n",
81 | " 5.0 | \n",
82 | " 3.6 | \n",
83 | " 1.4 | \n",
84 | " 0.2 | \n",
85 | " Iris-setosa | \n",
86 | "
\n",
87 | " \n",
88 | "
\n",
89 | "
"
90 | ],
91 | "text/plain": [
92 | " SepalLengthCm SepalWidthCm PetalLengthCm PetalWidthCm Species\n",
93 | "0 5.1 3.5 1.4 0.2 Iris-setosa\n",
94 | "1 4.9 3.0 1.4 0.2 Iris-setosa\n",
95 | "2 4.7 3.2 1.3 0.2 Iris-setosa\n",
96 | "3 4.6 3.1 1.5 0.2 Iris-setosa\n",
97 | "4 5.0 3.6 1.4 0.2 Iris-setosa"
98 | ]
99 | },
100 | "execution_count": 2,
101 | "metadata": {},
102 | "output_type": "execute_result"
103 | }
104 | ],
105 | "source": [
106 | "df = pd.read_csv(\"D:/Datasets/iris.csv\")\n",
107 | "df.head()"
108 | ]
109 | },
110 | {
111 | "cell_type": "code",
112 | "execution_count": null,
113 | "metadata": {},
114 | "outputs": [],
115 | "source": [
116 | "# Her bir çiçek türüne ait quartileden 0.02 ile 0.98 dışında olanları 1 olarak işaretlemek"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": 4,
122 | "metadata": {},
123 | "outputs": [
124 | {
125 | "name": "stderr",
126 | "output_type": "stream",
127 | "text": [
128 | "c:\\python\\python36\\lib\\site-packages\\ipykernel_launcher.py:7: SettingWithCopyWarning: \n",
129 | "A value is trying to be set on a copy of a slice from a DataFrame.\n",
130 | "Try using .loc[row_indexer,col_indexer] = value instead\n",
131 | "\n",
132 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
133 | " import sys\n"
134 | ]
135 | },
136 | {
137 | "data": {
138 | "text/html": [
139 | "\n",
140 | "\n",
153 | "
\n",
154 | " \n",
155 | " \n",
156 | " | \n",
157 | " SepalLengthCm | \n",
158 | " SepalWidthCm | \n",
159 | " PetalLengthCm | \n",
160 | " PetalWidthCm | \n",
161 | " Species | \n",
162 | " outlier | \n",
163 | "
\n",
164 | " \n",
165 | " \n",
166 | " \n",
167 | " 14 | \n",
168 | " 5.8 | \n",
169 | " 4.0 | \n",
170 | " 1.2 | \n",
171 | " 0.2 | \n",
172 | " Iris-setosa | \n",
173 | " 1 | \n",
174 | "
\n",
175 | " \n",
176 | " 50 | \n",
177 | " 7.0 | \n",
178 | " 3.2 | \n",
179 | " 4.7 | \n",
180 | " 1.4 | \n",
181 | " Iris-versicolor | \n",
182 | " 1 | \n",
183 | "
\n",
184 | " \n",
185 | " 51 | \n",
186 | " 6.4 | \n",
187 | " 3.2 | \n",
188 | " 4.5 | \n",
189 | " 1.5 | \n",
190 | " Iris-versicolor | \n",
191 | " 0 | \n",
192 | "
\n",
193 | " \n",
194 | " 52 | \n",
195 | " 6.9 | \n",
196 | " 3.1 | \n",
197 | " 4.9 | \n",
198 | " 1.5 | \n",
199 | " Iris-versicolor | \n",
200 | " 0 | \n",
201 | "
\n",
202 | " \n",
203 | " 54 | \n",
204 | " 6.5 | \n",
205 | " 2.8 | \n",
206 | " 4.6 | \n",
207 | " 1.5 | \n",
208 | " Iris-versicolor | \n",
209 | " 0 | \n",
210 | "
\n",
211 | " \n",
212 | "
\n",
213 | "
"
214 | ],
215 | "text/plain": [
216 | " SepalLengthCm SepalWidthCm PetalLengthCm PetalWidthCm Species \\\n",
217 | "14 5.8 4.0 1.2 0.2 Iris-setosa \n",
218 | "50 7.0 3.2 4.7 1.4 Iris-versicolor \n",
219 | "51 6.4 3.2 4.5 1.5 Iris-versicolor \n",
220 | "52 6.9 3.1 4.9 1.5 Iris-versicolor \n",
221 | "54 6.5 2.8 4.6 1.5 Iris-versicolor \n",
222 | "\n",
223 | " outlier \n",
224 | "14 1 \n",
225 | "50 1 \n",
226 | "51 0 \n",
227 | "52 0 \n",
228 | "54 0 "
229 | ]
230 | },
231 | "execution_count": 4,
232 | "metadata": {},
233 | "output_type": "execute_result"
234 | }
235 | ],
236 | "source": [
237 | "quartiles_dict = {}\n",
238 | "outlier_df = pd.DataFrame()\n",
239 | "for tur in list(df.Species.unique()):\n",
240 | " quartiles_dict[tur+\"002\"] = df[df['Species'] == tur]['SepalLengthCm'].quantile(0.02)\n",
241 | " quartiles_dict[tur+\"098\"] = df[df['Species'] == tur]['SepalLengthCm'].quantile(0.98)\n",
242 | " df2 = df[df['Species'] == tur]\n",
243 | " df2['outlier'] = np.where((df2['SepalLengthCm'] < quartiles_dict[tur+\"002\"] ) | (df2['SepalLengthCm'] > quartiles_dict[tur+\"098\"]), 1, 0)\n",
244 | " outlier_df = outlier_df.append(df2, ignore_index=True)\n",
245 | " \n",
246 | "outlier_df[outlier_df.SepalLengthCm > 5.7].head()"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": 5,
252 | "metadata": {},
253 | "outputs": [
254 | {
255 | "data": {
256 | "text/plain": [
257 | "{'Iris-setosa002': 4.398000000000001,\n",
258 | " 'Iris-setosa098': 5.701999999999999,\n",
259 | " 'Iris-versicolor002': 4.998,\n",
260 | " 'Iris-versicolor098': 6.902,\n",
261 | " 'Iris-virginica002': 5.585999999999999,\n",
262 | " 'Iris-virginica098': 7.703999999999999}"
263 | ]
264 | },
265 | "execution_count": 5,
266 | "metadata": {},
267 | "output_type": "execute_result"
268 | }
269 | ],
270 | "source": [
271 | "quartiles_dict"
272 | ]
273 | },
274 | {
275 | "cell_type": "code",
276 | "execution_count": null,
277 | "metadata": {},
278 | "outputs": [],
279 | "source": []
280 | }
281 | ],
282 | "metadata": {
283 | "kernelspec": {
284 | "display_name": "myenv",
285 | "language": "python",
286 | "name": "myenv"
287 | },
288 | "language_info": {
289 | "codemirror_mode": {
290 | "name": "ipython",
291 | "version": 3
292 | },
293 | "file_extension": ".py",
294 | "mimetype": "text/x-python",
295 | "name": "python",
296 | "nbconvert_exporter": "python",
297 | "pygments_lexer": "ipython3",
298 | "version": "3.6.5"
299 | }
300 | },
301 | "nbformat": 4,
302 | "nbformat_minor": 2
303 | }
304 |
--------------------------------------------------------------------------------
/07_data_preprocessing_cleaning/label_one_hot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/07_data_preprocessing_cleaning/label_one_hot.png
--------------------------------------------------------------------------------
/07_data_preprocessing_cleaning/scikit_learn_ornekler/01_general_concepts_of_preparation.txt:
--------------------------------------------------------------------------------
1 |
2 | Scikit Learn Kütüphanesinde Bulunan Veri Ön İşleme Kavramları
3 | ===============================================================
4 | 5.3. Preprocessing data
5 | 5.3.1. Standardization, or mean removal and variance scaling
6 | 5.3.1.1. Scaling features to a range
7 | 5.3.1.2. Scaling sparse data
8 | 5.3.1.3. Scaling data with outliers
9 | 5.3.1.4. Centering kernel matrices
10 | 5.3.2. Non-linear transformation
11 | 5.3.2.1. Mapping to a Uniform distribution
12 | 5.3.2.2. Mapping to a Gaussian distribution
13 | 5.3.3. Normalization
14 | 5.3.4. Encoding categorical features
15 | 5.3.5. Discretization
16 | 5.3.5.1. K-bins discretization
17 | 5.3.5.2. Feature binarization
18 | 5.3.6. Imputation of missing values
19 | 5.3.7. Generating polynomial features
20 | 5.3.8. Custom transformers
21 |
--------------------------------------------------------------------------------
/08_linear_regression/hyperplane.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/08_linear_regression/hyperplane.png
--------------------------------------------------------------------------------
/08_linear_regression/other_examples/02_simple_linear_regression_with_cv.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 2,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 3,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "df = pd.read_csv(\"D:/Datasets/Advertising.csv\")"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": 4,
24 | "metadata": {},
25 | "outputs": [
26 | {
27 | "data": {
28 | "text/html": [
29 | "\n",
30 | "\n",
43 | "
\n",
44 | " \n",
45 | " \n",
46 | " | \n",
47 | " ID | \n",
48 | " TV | \n",
49 | " Radio | \n",
50 | " Newspaper | \n",
51 | " Sales | \n",
52 | "
\n",
53 | " \n",
54 | " \n",
55 | " \n",
56 | " 0 | \n",
57 | " 1 | \n",
58 | " 230.1 | \n",
59 | " 37.8 | \n",
60 | " 69.2 | \n",
61 | " 22.1 | \n",
62 | "
\n",
63 | " \n",
64 | " 1 | \n",
65 | " 2 | \n",
66 | " 44.5 | \n",
67 | " 39.3 | \n",
68 | " 45.1 | \n",
69 | " 10.4 | \n",
70 | "
\n",
71 | " \n",
72 | " 2 | \n",
73 | " 3 | \n",
74 | " 17.2 | \n",
75 | " 45.9 | \n",
76 | " 69.3 | \n",
77 | " 9.3 | \n",
78 | "
\n",
79 | " \n",
80 | " 3 | \n",
81 | " 4 | \n",
82 | " 151.5 | \n",
83 | " 41.3 | \n",
84 | " 58.5 | \n",
85 | " 18.5 | \n",
86 | "
\n",
87 | " \n",
88 | " 4 | \n",
89 | " 5 | \n",
90 | " 180.8 | \n",
91 | " 10.8 | \n",
92 | " 58.4 | \n",
93 | " 12.9 | \n",
94 | "
\n",
95 | " \n",
96 | "
\n",
97 | "
"
98 | ],
99 | "text/plain": [
100 | " ID TV Radio Newspaper Sales\n",
101 | "0 1 230.1 37.8 69.2 22.1\n",
102 | "1 2 44.5 39.3 45.1 10.4\n",
103 | "2 3 17.2 45.9 69.3 9.3\n",
104 | "3 4 151.5 41.3 58.5 18.5\n",
105 | "4 5 180.8 10.8 58.4 12.9"
106 | ]
107 | },
108 | "execution_count": 4,
109 | "metadata": {},
110 | "output_type": "execute_result"
111 | }
112 | ],
113 | "source": [
114 | "df.head()"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 5,
120 | "metadata": {},
121 | "outputs": [],
122 | "source": [
123 | "# y değerim Sales\n",
124 | "# X değerim TV+Radio+Newspaper"
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": 6,
130 | "metadata": {},
131 | "outputs": [
132 | {
133 | "data": {
134 | "text/html": [
135 | "\n",
136 | "\n",
149 | "
\n",
150 | " \n",
151 | " \n",
152 | " | \n",
153 | " ID | \n",
154 | " TV | \n",
155 | " Radio | \n",
156 | " Newspaper | \n",
157 | " Sales | \n",
158 | " Advertising_Budget | \n",
159 | "
\n",
160 | " \n",
161 | " \n",
162 | " \n",
163 | " 0 | \n",
164 | " 1 | \n",
165 | " 230.1 | \n",
166 | " 37.8 | \n",
167 | " 69.2 | \n",
168 | " 22.1 | \n",
169 | " 337.1 | \n",
170 | "
\n",
171 | " \n",
172 | " 1 | \n",
173 | " 2 | \n",
174 | " 44.5 | \n",
175 | " 39.3 | \n",
176 | " 45.1 | \n",
177 | " 10.4 | \n",
178 | " 128.9 | \n",
179 | "
\n",
180 | " \n",
181 | " 2 | \n",
182 | " 3 | \n",
183 | " 17.2 | \n",
184 | " 45.9 | \n",
185 | " 69.3 | \n",
186 | " 9.3 | \n",
187 | " 132.4 | \n",
188 | "
\n",
189 | " \n",
190 | " 3 | \n",
191 | " 4 | \n",
192 | " 151.5 | \n",
193 | " 41.3 | \n",
194 | " 58.5 | \n",
195 | " 18.5 | \n",
196 | " 251.3 | \n",
197 | "
\n",
198 | " \n",
199 | " 4 | \n",
200 | " 5 | \n",
201 | " 180.8 | \n",
202 | " 10.8 | \n",
203 | " 58.4 | \n",
204 | " 12.9 | \n",
205 | " 250.0 | \n",
206 | "
\n",
207 | " \n",
208 | "
\n",
209 | "
"
210 | ],
211 | "text/plain": [
212 | " ID TV Radio Newspaper Sales Advertising_Budget\n",
213 | "0 1 230.1 37.8 69.2 22.1 337.1\n",
214 | "1 2 44.5 39.3 45.1 10.4 128.9\n",
215 | "2 3 17.2 45.9 69.3 9.3 132.4\n",
216 | "3 4 151.5 41.3 58.5 18.5 251.3\n",
217 | "4 5 180.8 10.8 58.4 12.9 250.0"
218 | ]
219 | },
220 | "execution_count": 6,
221 | "metadata": {},
222 | "output_type": "execute_result"
223 | }
224 | ],
225 | "source": [
226 | "df['Advertising_Budget'] = df['TV'] + df['Newspaper'] + df['Radio']\n",
227 | "df.head()"
228 | ]
229 | },
230 | {
231 | "cell_type": "code",
232 | "execution_count": 7,
233 | "metadata": {},
234 | "outputs": [
235 | {
236 | "data": {
237 | "text/plain": [
238 | "(200, 1)"
239 | ]
240 | },
241 | "execution_count": 7,
242 | "metadata": {},
243 | "output_type": "execute_result"
244 | }
245 | ],
246 | "source": [
247 | "X = df['Advertising_Budget'].values.reshape(-1,1)\n",
248 | "X.shape"
249 | ]
250 | },
251 | {
252 | "cell_type": "code",
253 | "execution_count": 8,
254 | "metadata": {},
255 | "outputs": [
256 | {
257 | "data": {
258 | "text/plain": [
259 | "(200,)"
260 | ]
261 | },
262 | "execution_count": 8,
263 | "metadata": {},
264 | "output_type": "execute_result"
265 | }
266 | ],
267 | "source": [
268 | "y = df['Sales'].values\n",
269 | "y.shape"
270 | ]
271 | },
272 | {
273 | "cell_type": "markdown",
274 | "metadata": {},
275 | "source": [
276 | "# Using Cros-Validation"
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": 9,
282 | "metadata": {},
283 | "outputs": [],
284 | "source": [
285 | "from sklearn.linear_model import LinearRegression\n",
286 | "from sklearn.model_selection import cross_val_score\n",
287 | "r2_scores = cross_val_score(LinearRegression(), X, y, cv=5, scoring='r2')"
288 | ]
289 | },
290 | {
291 | "cell_type": "code",
292 | "execution_count": 10,
293 | "metadata": {},
294 | "outputs": [
295 | {
296 | "data": {
297 | "text/plain": [
298 | "array([0.74964192, 0.79455226, 0.76417134, 0.74872042, 0.65980565])"
299 | ]
300 | },
301 | "execution_count": 10,
302 | "metadata": {},
303 | "output_type": "execute_result"
304 | }
305 | ],
306 | "source": [
307 | "r2_scores"
308 | ]
309 | },
310 | {
311 | "cell_type": "code",
312 | "execution_count": 11,
313 | "metadata": {},
314 | "outputs": [
315 | {
316 | "data": {
317 | "text/plain": [
318 | "0.743378317855542"
319 | ]
320 | },
321 | "execution_count": 11,
322 | "metadata": {},
323 | "output_type": "execute_result"
324 | }
325 | ],
326 | "source": [
327 | "r2_scores.mean()"
328 | ]
329 | },
330 | {
331 | "cell_type": "code",
332 | "execution_count": null,
333 | "metadata": {},
334 | "outputs": [],
335 | "source": []
336 | }
337 | ],
338 | "metadata": {
339 | "kernelspec": {
340 | "display_name": "Python 3",
341 | "language": "python",
342 | "name": "python3"
343 | },
344 | "language_info": {
345 | "codemirror_mode": {
346 | "name": "ipython",
347 | "version": 3
348 | },
349 | "file_extension": ".py",
350 | "mimetype": "text/x-python",
351 | "name": "python",
352 | "nbconvert_exporter": "python",
353 | "pygments_lexer": "ipython3",
354 | "version": "3.6.5"
355 | }
356 | },
357 | "nbformat": 4,
358 | "nbformat_minor": 2
359 | }
360 |
--------------------------------------------------------------------------------
/10_classification/hata_matrisi_precision_recall.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/10_classification/hata_matrisi_precision_recall.xlsx
--------------------------------------------------------------------------------
/10_classification/saved_models/01.knn_with_iris_dataset.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/10_classification/saved_models/01.knn_with_iris_dataset.pkl
--------------------------------------------------------------------------------
/10_classification/saved_models/05.Model_selection_tuning_with_iris_data.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/10_classification/saved_models/05.Model_selection_tuning_with_iris_data.pkl
--------------------------------------------------------------------------------
/10_classification/saved_models/06.model_selection_tuning_with_SosyalMedyaReklamKampanyası_data.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/10_classification/saved_models/06.model_selection_tuning_with_SosyalMedyaReklamKampanyası_data.pkl
--------------------------------------------------------------------------------
/10_classification/saved_models/07.xgboost_SosyalMedyaReklamKampanyası_data.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/10_classification/saved_models/07.xgboost_SosyalMedyaReklamKampanyası_data.pkl
--------------------------------------------------------------------------------
/10_classification/submission_for_kaggle.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,0
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,1
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,0
35 | 925,0
36 | 926,0
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,0
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,1
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,1
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,1
76 | 966,1
77 | 967,1
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,1
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,0
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,1
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,0
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,0
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,1
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,1
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,0
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,1
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,0
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,1
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,1
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/10_classification/xgboost.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/10_classification/xgboost.png
--------------------------------------------------------------------------------
/12_association_rules/.ipynb_checkpoints/01_association_rule-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/12_association_rules/__pycache__/apyori.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/12_association_rules/__pycache__/apyori.cpython-36.pyc
--------------------------------------------------------------------------------
/12_association_rules/apriori.py:
--------------------------------------------------------------------------------
1 | # Apriori
2 |
3 | # Importing the libraries
4 | import numpy as np
5 | import matplotlib.pyplot as plt
6 | import pandas as pd
7 |
8 | # Data Preprocessing
9 | dataset = pd.read_csv('Market_Basket_Optimisation.csv', header = None)
10 | transactions = []
11 | for i in range(0, 7501):
12 | transactions.append([str(dataset.values[i,j]) for j in range(0, 20)])
13 |
14 | # Training Apriori on the dataset
15 | from apyori import apriori
16 | rules = apriori(transactions, min_support = 0.003, min_confidence = 0.2, min_lift = 3, min_length = 2)
17 |
18 | # Visualising the results
19 | results = list(rules)
--------------------------------------------------------------------------------
/13_deep_learning/.ipynb_checkpoints/01_introduction_to_tensorflow-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "# Tensorflow, nümerik operasyonlar için güçlü bir açık kaynak kütüphanesidir.\n"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import tensorflow as tf\n",
19 | "import numpy as np"
20 | ]
21 | },
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {},
25 | "source": [
26 | "# 1. Sabit tanımlama (define a constant)"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": 2,
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "tensor_20 = tf.constant([[23, 4], [32, 51]])"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 3,
41 | "metadata": {},
42 | "outputs": [
43 | {
44 | "data": {
45 | "text/plain": [
46 | ""
49 | ]
50 | },
51 | "execution_count": 3,
52 | "metadata": {},
53 | "output_type": "execute_result"
54 | }
55 | ],
56 | "source": [
57 | "tensor_20"
58 | ]
59 | },
60 | {
61 | "cell_type": "code",
62 | "execution_count": 4,
63 | "metadata": {},
64 | "outputs": [
65 | {
66 | "data": {
67 | "text/plain": [
68 | "TensorShape([2, 2])"
69 | ]
70 | },
71 | "execution_count": 4,
72 | "metadata": {},
73 | "output_type": "execute_result"
74 | }
75 | ],
76 | "source": [
77 | "tensor_20.shape"
78 | ]
79 | },
80 | {
81 | "cell_type": "markdown",
82 | "metadata": {},
83 | "source": [
84 | "## 1.1. Sabitin değerlerine ulaşma"
85 | ]
86 | },
87 | {
88 | "cell_type": "code",
89 | "execution_count": 5,
90 | "metadata": {},
91 | "outputs": [
92 | {
93 | "data": {
94 | "text/plain": [
95 | "array([[23, 4],\n",
96 | " [32, 51]])"
97 | ]
98 | },
99 | "execution_count": 5,
100 | "metadata": {},
101 | "output_type": "execute_result"
102 | }
103 | ],
104 | "source": [
105 | "# session gerektirmeden numpy ile değerlere doğrudan ulaşmak\n",
106 | "tensor_20.numpy()"
107 | ]
108 | },
109 | {
110 | "cell_type": "code",
111 | "execution_count": 6,
112 | "metadata": {},
113 | "outputs": [],
114 | "source": [
115 | "# bu numpy array tekrar Tensorflow'a çevrilebilir\n",
116 | "numpy_tensor = np.array([[23, 4], [32, 51]])"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": 7,
122 | "metadata": {},
123 | "outputs": [
124 | {
125 | "data": {
126 | "text/plain": [
127 | ""
130 | ]
131 | },
132 | "execution_count": 7,
133 | "metadata": {},
134 | "output_type": "execute_result"
135 | }
136 | ],
137 | "source": [
138 | "tensor_from_numpy = tf.constant(numpy_tensor)\n",
139 | "tensor_from_numpy"
140 | ]
141 | },
142 | {
143 | "cell_type": "markdown",
144 | "metadata": {},
145 | "source": [
146 | "# 2. Değişkenler (variables)"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": 8,
152 | "metadata": {},
153 | "outputs": [],
154 | "source": [
155 | "tf2_variable = tf.Variable(3)"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": 9,
161 | "metadata": {},
162 | "outputs": [],
163 | "source": [
164 | "tensor = tf.constant([[1, 2], [3, 4]])"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": 10,
170 | "metadata": {},
171 | "outputs": [
172 | {
173 | "data": {
174 | "text/plain": [
175 | ""
178 | ]
179 | },
180 | "execution_count": 10,
181 | "metadata": {},
182 | "output_type": "execute_result"
183 | }
184 | ],
185 | "source": [
186 | "tensor"
187 | ]
188 | },
189 | {
190 | "cell_type": "code",
191 | "execution_count": 11,
192 | "metadata": {},
193 | "outputs": [
194 | {
195 | "data": {
196 | "text/plain": [
197 | ""
200 | ]
201 | },
202 | "execution_count": 11,
203 | "metadata": {},
204 | "output_type": "execute_result"
205 | }
206 | ],
207 | "source": [
208 | "tensor + 2"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": 12,
214 | "metadata": {},
215 | "outputs": [
216 | {
217 | "data": {
218 | "text/plain": [
219 | ""
222 | ]
223 | },
224 | "execution_count": 12,
225 | "metadata": {},
226 | "output_type": "execute_result"
227 | }
228 | ],
229 | "source": [
230 | "tensor * 5"
231 | ]
232 | },
233 | {
234 | "cell_type": "code",
235 | "execution_count": 13,
236 | "metadata": {},
237 | "outputs": [
238 | {
239 | "data": {
240 | "text/plain": [
241 | "array([[ 1, 4],\n",
242 | " [ 9, 16]], dtype=int32)"
243 | ]
244 | },
245 | "execution_count": 13,
246 | "metadata": {},
247 | "output_type": "execute_result"
248 | }
249 | ],
250 | "source": [
251 | "np.square(tensor)"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": 14,
257 | "metadata": {},
258 | "outputs": [
259 | {
260 | "data": {
261 | "text/plain": [
262 | "array([[1. , 1.41421356],\n",
263 | " [1.73205081, 2. ]])"
264 | ]
265 | },
266 | "execution_count": 14,
267 | "metadata": {},
268 | "output_type": "execute_result"
269 | }
270 | ],
271 | "source": [
272 | "np.sqrt(tensor)"
273 | ]
274 | },
275 | {
276 | "cell_type": "markdown",
277 | "metadata": {},
278 | "source": [
279 | "# 3. Computation Graph Oluşturma"
280 | ]
281 | },
282 | {
283 | "cell_type": "code",
284 | "execution_count": 15,
285 | "metadata": {},
286 | "outputs": [],
287 | "source": [
288 | "x = tf.Variable(3, name=\"x\")\n",
289 | "y = tf.Variable(4, name=\"y\")"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": 16,
295 | "metadata": {},
296 | "outputs": [
297 | {
298 | "data": {
299 | "text/plain": [
300 | "tensorflow.python.ops.resource_variable_ops.ResourceVariable"
301 | ]
302 | },
303 | "execution_count": 16,
304 | "metadata": {},
305 | "output_type": "execute_result"
306 | }
307 | ],
308 | "source": [
309 | "type(x)"
310 | ]
311 | },
312 | {
313 | "cell_type": "code",
314 | "execution_count": 25,
315 | "metadata": {},
316 | "outputs": [],
317 | "source": [
318 | "def my_f(x,y):\n",
319 | " return x*x*y + y + 2"
320 | ]
321 | },
322 | {
323 | "cell_type": "code",
324 | "execution_count": 26,
325 | "metadata": {},
326 | "outputs": [
327 | {
328 | "data": {
329 | "text/plain": [
330 | "function"
331 | ]
332 | },
333 | "execution_count": 26,
334 | "metadata": {},
335 | "output_type": "execute_result"
336 | }
337 | ],
338 | "source": [
339 | "type(my_f)"
340 | ]
341 | },
342 | {
343 | "cell_type": "code",
344 | "execution_count": 27,
345 | "metadata": {},
346 | "outputs": [],
347 | "source": [
348 | "out_a = my_f(x,y)"
349 | ]
350 | },
351 | {
352 | "cell_type": "code",
353 | "execution_count": 29,
354 | "metadata": {},
355 | "outputs": [
356 | {
357 | "data": {
358 | "text/plain": [
359 | "tensorflow.python.framework.ops.EagerTensor"
360 | ]
361 | },
362 | "execution_count": 29,
363 | "metadata": {},
364 | "output_type": "execute_result"
365 | }
366 | ],
367 | "source": [
368 | "type(out_a)"
369 | ]
370 | },
371 | {
372 | "cell_type": "code",
373 | "execution_count": 28,
374 | "metadata": {},
375 | "outputs": [
376 | {
377 | "name": "stdout",
378 | "output_type": "stream",
379 | "text": [
380 | "tf.Tensor(42, shape=(), dtype=int32)\n"
381 | ]
382 | }
383 | ],
384 | "source": [
385 | "print(out_a)"
386 | ]
387 | }
388 | ],
389 | "metadata": {
390 | "kernelspec": {
391 | "display_name": "myenv",
392 | "language": "python",
393 | "name": "myenv"
394 | },
395 | "language_info": {
396 | "codemirror_mode": {
397 | "name": "ipython",
398 | "version": 3
399 | },
400 | "file_extension": ".py",
401 | "mimetype": "text/x-python",
402 | "name": "python",
403 | "nbconvert_exporter": "python",
404 | "pygments_lexer": "ipython3",
405 | "version": "3.6.5"
406 | }
407 | },
408 | "nbformat": 4,
409 | "nbformat_minor": 2
410 | }
411 |
--------------------------------------------------------------------------------
/13_deep_learning/.ipynb_checkpoints/05_transfer_learning_and_fine_tuning-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/13_deep_learning/makale_gazi_unv_derin_ogrenme.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/13_deep_learning/makale_gazi_unv_derin_ogrenme.pdf
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/.ipynb_checkpoints/00_introduction_to_tensorflow-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/.ipynb_checkpoints/02_tf_graph-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 3,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import tensorflow as tf"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 4,
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "data": {
19 | "text/plain": [
20 | "'1.14.0'"
21 | ]
22 | },
23 | "execution_count": 4,
24 | "metadata": {},
25 | "output_type": "execute_result"
26 | }
27 | ],
28 | "source": [
29 | "tf.__version__"
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "## 1. Graph\n",
37 | "\n",
38 | " Global variable connecting variables and placeholders to operations.\n",
39 | " The graph is composed of two types of objects:\n",
40 | " tf.Operation: Nodes\n",
41 | " tf.Tensor: Edges\n",
42 | " \n",
43 | " In TensorFlow each node is an edge with possible inputs that can supply some output."
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 5,
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "n1 = tf.constant(1)"
53 | ]
54 | },
55 | {
56 | "cell_type": "code",
57 | "execution_count": 6,
58 | "metadata": {},
59 | "outputs": [],
60 | "source": [
61 | "n2 = tf.constant(2)"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 7,
67 | "metadata": {},
68 | "outputs": [],
69 | "source": [
70 | "n3 = n1 + n2"
71 | ]
72 | },
73 | {
74 | "cell_type": "code",
75 | "execution_count": 8,
76 | "metadata": {},
77 | "outputs": [],
78 | "source": [
79 | "with tf.Session() as sess:\n",
80 | " result = sess.run(n3)"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": 9,
86 | "metadata": {},
87 | "outputs": [
88 | {
89 | "name": "stdout",
90 | "output_type": "stream",
91 | "text": [
92 | "3\n"
93 | ]
94 | }
95 | ],
96 | "source": [
97 | "print(result)"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": 10,
103 | "metadata": {},
104 | "outputs": [
105 | {
106 | "name": "stdout",
107 | "output_type": "stream",
108 | "text": [
109 | "Tensor(\"add:0\", shape=(), dtype=int32)\n"
110 | ]
111 | }
112 | ],
113 | "source": [
114 | "print(n3)"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 11,
120 | "metadata": {},
121 | "outputs": [],
122 | "source": [
123 | "# Tensorflow'u başlattığımızda varsayılan graph otomatik olarak oluşur.\n",
124 | "# İstersek ilave graph yaratabiliriz."
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": 12,
130 | "metadata": {},
131 | "outputs": [
132 | {
133 | "name": "stdout",
134 | "output_type": "stream",
135 | "text": [
136 | "\n"
137 | ]
138 | }
139 | ],
140 | "source": [
141 | "# Varsayılan Graph\n",
142 | "print(tf.get_default_graph())"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": 13,
148 | "metadata": {},
149 | "outputs": [],
150 | "source": [
151 | "# Yeni bir Graph\n",
152 | "g = tf.Graph()"
153 | ]
154 | },
155 | {
156 | "cell_type": "code",
157 | "execution_count": 14,
158 | "metadata": {},
159 | "outputs": [
160 | {
161 | "name": "stdout",
162 | "output_type": "stream",
163 | "text": [
164 | "\n"
165 | ]
166 | }
167 | ],
168 | "source": [
169 | "print(g)"
170 | ]
171 | },
172 | {
173 | "cell_type": "code",
174 | "execution_count": 15,
175 | "metadata": {},
176 | "outputs": [
177 | {
178 | "name": "stdout",
179 | "output_type": "stream",
180 | "text": [
181 | "\n"
182 | ]
183 | }
184 | ],
185 | "source": [
186 | "# Mevcut varsayılan graph'ı başka bir isimle referans göstermek\n",
187 | "g1 = tf.get_default_graph()\n",
188 | "print(g1)"
189 | ]
190 | },
191 | {
192 | "cell_type": "code",
193 | "execution_count": 16,
194 | "metadata": {},
195 | "outputs": [],
196 | "source": [
197 | "# Yeni bir graph daha\n",
198 | "g3 = tf.Graph()"
199 | ]
200 | },
201 | {
202 | "cell_type": "code",
203 | "execution_count": 17,
204 | "metadata": {},
205 | "outputs": [
206 | {
207 | "name": "stdout",
208 | "output_type": "stream",
209 | "text": [
210 | "\n"
211 | ]
212 | }
213 | ],
214 | "source": [
215 | "print(g3)"
216 | ]
217 | },
218 | {
219 | "cell_type": "code",
220 | "execution_count": 19,
221 | "metadata": {},
222 | "outputs": [
223 | {
224 | "name": "stdout",
225 | "output_type": "stream",
226 | "text": [
227 | "True\n"
228 | ]
229 | }
230 | ],
231 | "source": [
232 | "# Farklı bir graph'ı varsayılan olarak belirleme\n",
233 | "# Aşağıdaki kod bloğunda g3 varsayılan graph olacak\n",
234 | "with g3.as_default():\n",
235 | " print(g3 is tf.get_default_graph())"
236 | ]
237 | },
238 | {
239 | "cell_type": "code",
240 | "execution_count": 21,
241 | "metadata": {},
242 | "outputs": [
243 | {
244 | "name": "stdout",
245 | "output_type": "stream",
246 | "text": [
247 | "False\n"
248 | ]
249 | }
250 | ],
251 | "source": [
252 | "# with bloğu bittiği için artık burada değil\n",
253 | "print(g3 is tf.get_default_graph())"
254 | ]
255 | },
256 | {
257 | "cell_type": "code",
258 | "execution_count": null,
259 | "metadata": {},
260 | "outputs": [],
261 | "source": []
262 | }
263 | ],
264 | "metadata": {
265 | "kernelspec": {
266 | "display_name": "tensorflow-1",
267 | "language": "python",
268 | "name": "tensorflow-1"
269 | },
270 | "language_info": {
271 | "codemirror_mode": {
272 | "name": "ipython",
273 | "version": 3
274 | },
275 | "file_extension": ".py",
276 | "mimetype": "text/x-python",
277 | "name": "python",
278 | "nbconvert_exporter": "python",
279 | "pygments_lexer": "ipython3",
280 | "version": "3.6.5"
281 | }
282 | },
283 | "nbformat": 4,
284 | "nbformat_minor": 2
285 | }
286 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/.ipynb_checkpoints/04_tf_build_a_neural_network_with_tensorflow-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import tensorflow as tf\n",
10 | "import numpy as np"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 2,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "np.random.seed(101)\n",
20 | "tf.set_random_seed(101)"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 3,
26 | "metadata": {},
27 | "outputs": [
28 | {
29 | "data": {
30 | "text/plain": [
31 | "'1.14.0'"
32 | ]
33 | },
34 | "execution_count": 3,
35 | "metadata": {},
36 | "output_type": "execute_result"
37 | }
38 | ],
39 | "source": [
40 | "tf.__version__"
41 | ]
42 | },
43 | {
44 | "cell_type": "markdown",
45 | "metadata": {},
46 | "source": [
47 | "\n",
48 | "Görsel: https://www.udemy.com/complete-guide-to-tensorflow-for-deep-learning-with-python"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "## Jupyter'e özgü interaktif session"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 4,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "sess = tf.InteractiveSession()"
65 | ]
66 | },
67 | {
68 | "cell_type": "markdown",
69 | "metadata": {},
70 | "source": [
71 | "## Boyutlar"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 5,
77 | "metadata": {},
78 | "outputs": [],
79 | "source": [
80 | "n_features = 10\n",
81 | "n_dense_neurons = 3"
82 | ]
83 | },
84 | {
85 | "cell_type": "markdown",
86 | "metadata": {},
87 | "source": [
88 | "## input olarak x"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": 6,
94 | "metadata": {},
95 | "outputs": [],
96 | "source": [
97 | "# x'lerimiz ne olacak bilmiyoruz o yüzden yer tutucu kullanıyoruz.\n",
98 | "# shape kısmında satır sayısına None dedik çünkü batch size şuan bilmiyoruz\n",
99 | "# Batch size ağı besleme esnasındaki bir arada girilen staır sayısı\n",
100 | "x = tf.placeholder(tf.float32, (None, n_features))"
101 | ]
102 | },
103 | {
104 | "cell_type": "markdown",
105 | "metadata": {},
106 | "source": [
107 | "## Ağırlık/katsayı/weight olarak W"
108 | ]
109 | },
110 | {
111 | "cell_type": "code",
112 | "execution_count": 7,
113 | "metadata": {},
114 | "outputs": [],
115 | "source": [
116 | "# Ağırlığımız yani x'lerin katsayıları. Genelde başlangış değeri olarak 0-1\n",
117 | "# arası bir değer verilir.\n",
118 | "W = tf.Variable(tf.random_normal( [n_features, n_dense_neurons] ))"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": 8,
124 | "metadata": {},
125 | "outputs": [
126 | {
127 | "data": {
128 | "text/plain": [
129 | "array([[-0.33280614, 1.2702985 , -0.47111124],\n",
130 | " [ 0.64046067, 0.22630073, 1.6514024 ],\n",
131 | " [-0.50997734, -0.03110447, -0.8991391 ],\n",
132 | " [ 0.06735504, -0.02050027, 0.9054657 ],\n",
133 | " [ 0.13053663, -0.6236509 , 1.740842 ],\n",
134 | " [-0.12082776, -0.70383835, 1.1355529 ],\n",
135 | " [-0.49338904, -0.3851979 , -0.91806746],\n",
136 | " [ 2.0413523 , 0.8281394 , 0.12083016],\n",
137 | " [-1.0672375 , -0.08883611, -0.47264633],\n",
138 | " [ 0.41959324, -0.6774816 , 1.1672206 ]], dtype=float32)"
139 | ]
140 | },
141 | "execution_count": 8,
142 | "metadata": {},
143 | "output_type": "execute_result"
144 | }
145 | ],
146 | "source": [
147 | "sess.run(tf.random_normal( [n_features, n_dense_neurons]))"
148 | ]
149 | },
150 | {
151 | "cell_type": "markdown",
152 | "metadata": {},
153 | "source": [
154 | "## Bias olarak b"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 9,
160 | "metadata": {},
161 | "outputs": [],
162 | "source": [
163 | "# bias terimi olarak her bir neuron başına bir sabit verdik\n",
164 | "b = tf.Variable( tf.ones([n_dense_neurons]))"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": 10,
170 | "metadata": {},
171 | "outputs": [
172 | {
173 | "data": {
174 | "text/plain": [
175 | "array([1., 1., 1.], dtype=float32)"
176 | ]
177 | },
178 | "execution_count": 10,
179 | "metadata": {},
180 | "output_type": "execute_result"
181 | }
182 | ],
183 | "source": [
184 | "sess.run(b.initializer)\n",
185 | "sess.run(b)"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {},
191 | "source": [
192 | "## Operasyon: Ağırlıklarla niteliklerin çarpımı xW"
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": 11,
198 | "metadata": {},
199 | "outputs": [],
200 | "source": [
201 | "# x'in satır sayısı 10, W sütun sayısı 10 olduğuna göre matris çarpımı olabilir.\n",
202 | "# matris çarpımında temel kural çarpılacak matris şekilleri (a,b) (b,c) olmalıdır\n",
203 | "xW = tf.matmul(x,W)"
204 | ]
205 | },
206 | {
207 | "cell_type": "markdown",
208 | "metadata": {},
209 | "source": [
210 | "## Operasyon: Bias ekleme"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 12,
216 | "metadata": {},
217 | "outputs": [],
218 | "source": [
219 | "z = tf.add(xW, b)"
220 | ]
221 | },
222 | {
223 | "cell_type": "markdown",
224 | "metadata": {},
225 | "source": [
226 | "## Aktivasyon Fonksiyonu"
227 | ]
228 | },
229 | {
230 | "cell_type": "code",
231 | "execution_count": 13,
232 | "metadata": {},
233 | "outputs": [],
234 | "source": [
235 | "# a = tf.tanh\n",
236 | "# a = tf.nn.relu\n",
237 | "a = tf.sigmoid(z)"
238 | ]
239 | },
240 | {
241 | "cell_type": "markdown",
242 | "metadata": {},
243 | "source": [
244 | "## Değişkenleri ilklendirme (initialize)"
245 | ]
246 | },
247 | {
248 | "cell_type": "code",
249 | "execution_count": 14,
250 | "metadata": {},
251 | "outputs": [],
252 | "source": [
253 | "init = tf.global_variables_initializer()\n",
254 | "sess.run(init)"
255 | ]
256 | },
257 | {
258 | "cell_type": "markdown",
259 | "metadata": {},
260 | "source": [
261 | "## Katmanı (layer) çalıştırma ve sonuçları görme"
262 | ]
263 | },
264 | {
265 | "cell_type": "code",
266 | "execution_count": 15,
267 | "metadata": {},
268 | "outputs": [],
269 | "source": [
270 | "layer_out = sess.run(a, feed_dict={x:np.random.random( [1, n_features] )})"
271 | ]
272 | },
273 | {
274 | "cell_type": "code",
275 | "execution_count": 16,
276 | "metadata": {},
277 | "outputs": [
278 | {
279 | "name": "stdout",
280 | "output_type": "stream",
281 | "text": [
282 | "[[0.725475 0.43900672 0.30847842]]\n"
283 | ]
284 | }
285 | ],
286 | "source": [
287 | "print(layer_out)"
288 | ]
289 | }
290 | ],
291 | "metadata": {
292 | "kernelspec": {
293 | "display_name": "tensorflow-1",
294 | "language": "python",
295 | "name": "tensorflow-1"
296 | },
297 | "language_info": {
298 | "codemirror_mode": {
299 | "name": "ipython",
300 | "version": 3
301 | },
302 | "file_extension": ".py",
303 | "mimetype": "text/x-python",
304 | "name": "python",
305 | "nbconvert_exporter": "python",
306 | "pygments_lexer": "ipython3",
307 | "version": "3.6.5"
308 | }
309 | },
310 | "nbformat": 4,
311 | "nbformat_minor": 2
312 | }
313 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/.ipynb_checkpoints/05_tf_simple_linear_regression-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/02_tf_graph.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 3,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import tensorflow as tf"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 4,
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "data": {
19 | "text/plain": [
20 | "'1.14.0'"
21 | ]
22 | },
23 | "execution_count": 4,
24 | "metadata": {},
25 | "output_type": "execute_result"
26 | }
27 | ],
28 | "source": [
29 | "tf.__version__"
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "## 1. Graph\n",
37 | "\n",
38 | " Global variable connecting variables and placeholders to operations.\n",
39 | " The graph is composed of two types of objects:\n",
40 | " tf.Operation: Nodes\n",
41 | " tf.Tensor: Edges\n",
42 | " \n",
43 | " In TensorFlow each node is an edge with possible inputs that can supply some output."
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 5,
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "n1 = tf.constant(1)"
53 | ]
54 | },
55 | {
56 | "cell_type": "code",
57 | "execution_count": 6,
58 | "metadata": {},
59 | "outputs": [],
60 | "source": [
61 | "n2 = tf.constant(2)"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 7,
67 | "metadata": {},
68 | "outputs": [],
69 | "source": [
70 | "n3 = n1 + n2"
71 | ]
72 | },
73 | {
74 | "cell_type": "code",
75 | "execution_count": 8,
76 | "metadata": {},
77 | "outputs": [],
78 | "source": [
79 | "with tf.Session() as sess:\n",
80 | " result = sess.run(n3)"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": 9,
86 | "metadata": {},
87 | "outputs": [
88 | {
89 | "name": "stdout",
90 | "output_type": "stream",
91 | "text": [
92 | "3\n"
93 | ]
94 | }
95 | ],
96 | "source": [
97 | "print(result)"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": 10,
103 | "metadata": {},
104 | "outputs": [
105 | {
106 | "name": "stdout",
107 | "output_type": "stream",
108 | "text": [
109 | "Tensor(\"add:0\", shape=(), dtype=int32)\n"
110 | ]
111 | }
112 | ],
113 | "source": [
114 | "print(n3)"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 11,
120 | "metadata": {},
121 | "outputs": [],
122 | "source": [
123 | "# Tensorflow'u başlattığımızda varsayılan graph otomatik olarak oluşur.\n",
124 | "# İstersek ilave graph yaratabiliriz."
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": 12,
130 | "metadata": {},
131 | "outputs": [
132 | {
133 | "name": "stdout",
134 | "output_type": "stream",
135 | "text": [
136 | "\n"
137 | ]
138 | }
139 | ],
140 | "source": [
141 | "# Varsayılan Graph\n",
142 | "print(tf.get_default_graph())"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": 13,
148 | "metadata": {},
149 | "outputs": [],
150 | "source": [
151 | "# Yeni bir Graph\n",
152 | "g = tf.Graph()"
153 | ]
154 | },
155 | {
156 | "cell_type": "code",
157 | "execution_count": 14,
158 | "metadata": {},
159 | "outputs": [
160 | {
161 | "name": "stdout",
162 | "output_type": "stream",
163 | "text": [
164 | "\n"
165 | ]
166 | }
167 | ],
168 | "source": [
169 | "print(g)"
170 | ]
171 | },
172 | {
173 | "cell_type": "code",
174 | "execution_count": 15,
175 | "metadata": {},
176 | "outputs": [
177 | {
178 | "name": "stdout",
179 | "output_type": "stream",
180 | "text": [
181 | "\n"
182 | ]
183 | }
184 | ],
185 | "source": [
186 | "# Mevcut varsayılan graph'ı başka bir isimle referans göstermek\n",
187 | "g1 = tf.get_default_graph()\n",
188 | "print(g1)"
189 | ]
190 | },
191 | {
192 | "cell_type": "code",
193 | "execution_count": 16,
194 | "metadata": {},
195 | "outputs": [],
196 | "source": [
197 | "# Yeni bir graph daha\n",
198 | "g3 = tf.Graph()"
199 | ]
200 | },
201 | {
202 | "cell_type": "code",
203 | "execution_count": 17,
204 | "metadata": {},
205 | "outputs": [
206 | {
207 | "name": "stdout",
208 | "output_type": "stream",
209 | "text": [
210 | "\n"
211 | ]
212 | }
213 | ],
214 | "source": [
215 | "print(g3)"
216 | ]
217 | },
218 | {
219 | "cell_type": "code",
220 | "execution_count": 19,
221 | "metadata": {},
222 | "outputs": [
223 | {
224 | "name": "stdout",
225 | "output_type": "stream",
226 | "text": [
227 | "True\n"
228 | ]
229 | }
230 | ],
231 | "source": [
232 | "# Farklı bir graph'ı varsayılan olarak belirleme\n",
233 | "# Aşağıdaki kod bloğunda g3 varsayılan graph olacak\n",
234 | "with g3.as_default():\n",
235 | " print(g3 is tf.get_default_graph())"
236 | ]
237 | },
238 | {
239 | "cell_type": "code",
240 | "execution_count": 21,
241 | "metadata": {},
242 | "outputs": [
243 | {
244 | "name": "stdout",
245 | "output_type": "stream",
246 | "text": [
247 | "False\n"
248 | ]
249 | }
250 | ],
251 | "source": [
252 | "# with bloğu bittiği için artık burada değil\n",
253 | "print(g3 is tf.get_default_graph())"
254 | ]
255 | },
256 | {
257 | "cell_type": "code",
258 | "execution_count": null,
259 | "metadata": {},
260 | "outputs": [],
261 | "source": []
262 | }
263 | ],
264 | "metadata": {
265 | "kernelspec": {
266 | "display_name": "tensorflow-1",
267 | "language": "python",
268 | "name": "tensorflow-1"
269 | },
270 | "language_info": {
271 | "codemirror_mode": {
272 | "name": "ipython",
273 | "version": 3
274 | },
275 | "file_extension": ".py",
276 | "mimetype": "text/x-python",
277 | "name": "python",
278 | "nbconvert_exporter": "python",
279 | "pygments_lexer": "ipython3",
280 | "version": "3.6.5"
281 | }
282 | },
283 | "nbformat": 4,
284 | "nbformat_minor": 2
285 | }
286 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/04_tf_build_a_neural_network_with_tensorflow.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import tensorflow as tf\n",
10 | "import numpy as np"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 2,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "np.random.seed(101)\n",
20 | "tf.set_random_seed(101)"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 3,
26 | "metadata": {},
27 | "outputs": [
28 | {
29 | "data": {
30 | "text/plain": [
31 | "'1.14.0'"
32 | ]
33 | },
34 | "execution_count": 3,
35 | "metadata": {},
36 | "output_type": "execute_result"
37 | }
38 | ],
39 | "source": [
40 | "tf.__version__"
41 | ]
42 | },
43 | {
44 | "cell_type": "markdown",
45 | "metadata": {},
46 | "source": [
47 | "\n",
48 | "Görsel: https://www.udemy.com/complete-guide-to-tensorflow-for-deep-learning-with-python"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "## Jupyter'e özgü interaktif session"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 4,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "sess = tf.InteractiveSession()"
65 | ]
66 | },
67 | {
68 | "cell_type": "markdown",
69 | "metadata": {},
70 | "source": [
71 | "## Boyutlar"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 5,
77 | "metadata": {},
78 | "outputs": [],
79 | "source": [
80 | "n_features = 10\n",
81 | "n_dense_neurons = 3"
82 | ]
83 | },
84 | {
85 | "cell_type": "markdown",
86 | "metadata": {},
87 | "source": [
88 | "## input olarak x"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": 6,
94 | "metadata": {},
95 | "outputs": [],
96 | "source": [
97 | "# x'lerimiz ne olacak bilmiyoruz o yüzden yer tutucu kullanıyoruz.\n",
98 | "# shape kısmında satır sayısına None dedik çünkü batch size şuan bilmiyoruz\n",
99 | "# Batch size ağı besleme esnasındaki bir arada girilen staır sayısı\n",
100 | "x = tf.placeholder(tf.float32, (None, n_features))"
101 | ]
102 | },
103 | {
104 | "cell_type": "markdown",
105 | "metadata": {},
106 | "source": [
107 | "## Ağırlık/katsayı/weight olarak W"
108 | ]
109 | },
110 | {
111 | "cell_type": "code",
112 | "execution_count": 7,
113 | "metadata": {},
114 | "outputs": [],
115 | "source": [
116 | "# Ağırlığımız yani x'lerin katsayıları. Genelde başlangış değeri olarak 0-1\n",
117 | "# arası bir değer verilir.\n",
118 | "W = tf.Variable(tf.random_normal( [n_features, n_dense_neurons] ))"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": 8,
124 | "metadata": {},
125 | "outputs": [
126 | {
127 | "data": {
128 | "text/plain": [
129 | "array([[-0.33280614, 1.2702985 , -0.47111124],\n",
130 | " [ 0.64046067, 0.22630073, 1.6514024 ],\n",
131 | " [-0.50997734, -0.03110447, -0.8991391 ],\n",
132 | " [ 0.06735504, -0.02050027, 0.9054657 ],\n",
133 | " [ 0.13053663, -0.6236509 , 1.740842 ],\n",
134 | " [-0.12082776, -0.70383835, 1.1355529 ],\n",
135 | " [-0.49338904, -0.3851979 , -0.91806746],\n",
136 | " [ 2.0413523 , 0.8281394 , 0.12083016],\n",
137 | " [-1.0672375 , -0.08883611, -0.47264633],\n",
138 | " [ 0.41959324, -0.6774816 , 1.1672206 ]], dtype=float32)"
139 | ]
140 | },
141 | "execution_count": 8,
142 | "metadata": {},
143 | "output_type": "execute_result"
144 | }
145 | ],
146 | "source": [
147 | "sess.run(tf.random_normal( [n_features, n_dense_neurons]))"
148 | ]
149 | },
150 | {
151 | "cell_type": "markdown",
152 | "metadata": {},
153 | "source": [
154 | "## Bias olarak b"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 9,
160 | "metadata": {},
161 | "outputs": [],
162 | "source": [
163 | "# bias terimi olarak her bir neuron başına bir sabit verdik\n",
164 | "b = tf.Variable( tf.ones([n_dense_neurons]))"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": 10,
170 | "metadata": {},
171 | "outputs": [
172 | {
173 | "data": {
174 | "text/plain": [
175 | "array([1., 1., 1.], dtype=float32)"
176 | ]
177 | },
178 | "execution_count": 10,
179 | "metadata": {},
180 | "output_type": "execute_result"
181 | }
182 | ],
183 | "source": [
184 | "sess.run(b.initializer)\n",
185 | "sess.run(b)"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {},
191 | "source": [
192 | "## Operasyon: Ağırlıklarla niteliklerin çarpımı xW"
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": 11,
198 | "metadata": {},
199 | "outputs": [],
200 | "source": [
201 | "# x'in satır sayısı 10, W sütun sayısı 10 olduğuna göre matris çarpımı olabilir.\n",
202 | "# matris çarpımında temel kural çarpılacak matris şekilleri (a,b) (b,c) olmalıdır\n",
203 | "xW = tf.matmul(x,W)"
204 | ]
205 | },
206 | {
207 | "cell_type": "markdown",
208 | "metadata": {},
209 | "source": [
210 | "## Operasyon: Bias ekleme"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 12,
216 | "metadata": {},
217 | "outputs": [],
218 | "source": [
219 | "z = tf.add(xW, b)"
220 | ]
221 | },
222 | {
223 | "cell_type": "markdown",
224 | "metadata": {},
225 | "source": [
226 | "## Aktivasyon Fonksiyonu"
227 | ]
228 | },
229 | {
230 | "cell_type": "code",
231 | "execution_count": 13,
232 | "metadata": {},
233 | "outputs": [],
234 | "source": [
235 | "# a = tf.tanh\n",
236 | "# a = tf.nn.relu\n",
237 | "a = tf.sigmoid(z)"
238 | ]
239 | },
240 | {
241 | "cell_type": "markdown",
242 | "metadata": {},
243 | "source": [
244 | "## Değişkenleri ilklendirme (initialize)"
245 | ]
246 | },
247 | {
248 | "cell_type": "code",
249 | "execution_count": 14,
250 | "metadata": {},
251 | "outputs": [],
252 | "source": [
253 | "init = tf.global_variables_initializer()\n",
254 | "sess.run(init)"
255 | ]
256 | },
257 | {
258 | "cell_type": "markdown",
259 | "metadata": {},
260 | "source": [
261 | "## Katmanı (layer) çalıştırma ve sonuçları görme"
262 | ]
263 | },
264 | {
265 | "cell_type": "code",
266 | "execution_count": 15,
267 | "metadata": {},
268 | "outputs": [],
269 | "source": [
270 | "layer_out = sess.run(a, feed_dict={x:np.random.random( [1, n_features] )})"
271 | ]
272 | },
273 | {
274 | "cell_type": "code",
275 | "execution_count": 16,
276 | "metadata": {},
277 | "outputs": [
278 | {
279 | "name": "stdout",
280 | "output_type": "stream",
281 | "text": [
282 | "[[0.725475 0.43900672 0.30847842]]\n"
283 | ]
284 | }
285 | ],
286 | "source": [
287 | "print(layer_out)"
288 | ]
289 | }
290 | ],
291 | "metadata": {
292 | "kernelspec": {
293 | "display_name": "tensorflow-1",
294 | "language": "python",
295 | "name": "tensorflow-1"
296 | },
297 | "language_info": {
298 | "codemirror_mode": {
299 | "name": "ipython",
300 | "version": 3
301 | },
302 | "file_extension": ".py",
303 | "mimetype": "text/x-python",
304 | "name": "python",
305 | "nbconvert_exporter": "python",
306 | "pygments_lexer": "ipython3",
307 | "version": "3.6.5"
308 | }
309 | },
310 | "nbformat": 4,
311 | "nbformat_minor": 2
312 | }
313 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_animation.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_animation.gif
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_architecture.png
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_max_pooling.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_max_pooling.gif
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_phases.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/cnn_phases.png
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/simple_ann_schema.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/13_deep_learning/tensorflow-1.14/venv_tensorflow-1/dev/simple_ann_schema.png
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-1.14/virtual_env_and_tensorflow-1.14_installation.txt:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 1. cmd komut satırını yönetici olarak açınız.
5 |
6 |
7 | 2. Çalışma dizni ve sanal ortamın yaratılması:
8 | mkdir python_egitim
9 | cd python_egitim
10 | python -m venv tensorflow-1
11 |
12 | Son komut sanal ortam oluşturur ve biraz zaman alabilir.
13 |
14 | 2. kontrol:
15 | dir
16 |
17 | Çıktı:
18 | Volume in drive C has no label.
19 | Volume Serial Number is 55F6-35C4
20 |
21 | Directory of C:\Users\user\python_egitim\tensorflow-1\dev
22 |
23 | 28.07.2019 15:02 .
24 | 28.07.2019 15:02 ..
25 | 28.07.2019 15:00 .ipynb_checkpoints
26 | 28.07.2019 15:02 977 Untitled.ipynb
27 | 1 File(s) 977 bytes
28 | 3 Dir(s) 832.709.644.288 bytes free
29 |
30 |
31 |
32 | 3. Oluşturulan sanal ortamı aktif hale getirme:
33 | cd tensorflow-1\Scripts
34 | activate
35 |
36 | bu komuttan sonra
37 | promt başında (tensorflow-1) gelecektir.
38 |
39 | cd ..
40 | mkdir dev
41 | cd dev
42 |
43 | Yukarıdaki komutlarla development dizini oluşturuyoruz ve o dizine geçiyoruz.
44 |
45 | 4. Kontrol:
46 | python -V
47 |
48 | Çıktısı
49 | Python 3.6.5
50 |
51 | pip install --upgrade pip
52 |
53 | PermissionError: [WinError 5] Access is denied:
54 | hatası verebilir aldırış etmeden komutu tekrarlayınız.
55 | Requirement already up-to-date: pip in c:\users\user\python_egitim\tensorflow-1\lib\site-packages (19.2.1)
56 |
57 | pip -V
58 | Çıktısı:
59 | pip 19.1.1 from c:\users\user\envs\tensorflow-1\lib\site-packages\pip (python 3.6)
60 |
61 | cd ..
62 |
63 | 5. tensorflow-1.14, keras yüklemek ve diğer sık kullanılan paketleri yüklemek
64 |
65 | pip install tensorflow keras pandas matplotlib sklearn pillow jupyter
66 | Niçin sanal ortama da jupyter kurmalıyız:
67 | https://stackoverflow.com/questions/38221181/no-module-named-tensorflow-in-jupyter
68 |
69 | 6. Kontrol:
70 | python
71 |
72 | python shell giriş >>> gelecektir.
73 |
74 |
75 | >>>import tensorflow as tf
76 | >>>tf.__version__
77 |
78 | Çıktısı:
79 | '1.14.0'
80 | Bazı uyarılar verebilir onlar hata değil kulak asmayın
81 |
82 | exit()
83 |
84 | python shell çıkış.
85 |
86 | 7. Sanal ortamı deactive etmek:
87 | deactivate
88 |
89 |
90 | Tekrar aktif hale getirmek için Scripts içinden
91 | cd Scripts
92 | activate
93 | cd ..
94 | cd dev
95 |
96 |
97 | 8. virtualenv'dan jupyter kullanmak için
98 | ipython kernel install --user --name=tensorflow-1
99 | Installed kernelspec tensorflow-1 in C:\Users\user\AppData\Roaming\jupyter\kernels\tensorflow-1
100 |
101 |
102 | 9. jupyter notebook çalıştırdığımızda
103 | new butonuna tıkladığımızda artık Pyton 3 ile birlikte tensorflow-1'i de görüyor olacağız.
104 |
105 |
106 | 10. Jupyter içinde tensorflow kontrol
107 | import warnings
108 | warnings.filterwarnings('ignore')
109 | import tensorflow as tf
110 | tf.__version__
111 |
112 | Çıktı:
113 | '1.14.0'
114 |
115 |
116 |
117 | 11. Jupyter notebook kapatıldıktan sonra
118 | deactivate
119 | komutu ile sanal ortam kapatılır.
120 | Ortamı tekrar aktif etmek sadece Scripts klasörü içinden mümkündür.
121 |
122 |
123 | 12. Eğer jupyter içinden herhangi bir kernel'i kaldırmak istersek
124 | jupyter kernelspec uninstall unwanted-kernel
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 | Olası hatalar ve çözüm önerileri
135 | -----------------------------------------------------
136 | 1.
137 | Sorun: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
138 |
139 | Çözüm : Activate root
140 |
141 | 2.
142 | Could not install packages due to an EnvironmentError: [WinError 5] Erişim engellendi: 'c:\\programdata\\anaconda3\\lib\\site-packages\\__pycache__\\easy_install.cpython-37.pyc'
143 | Consider using the `--user` option or check the permissions.
144 |
145 | Çözüm: cmd'yi yöneticiolarak başlatmak
146 |
147 |
148 | 3. ssl bağlantı hatası
149 |
150 | Çözüm: Anaconda Library bin dizinini ortam değişkenleri path'e eklemek
151 |
152 |
153 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-2.0/jupyter_noteboos_tf_2/01_introduction_to_tensorflow.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "# Tensorflow, nümerik operasyonlar için güçlü bir açık kaynak kütüphanesidir.\n"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import tensorflow as tf\n",
19 | "import numpy as np"
20 | ]
21 | },
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {},
25 | "source": [
26 | "# 1. Sabit tanımlama (define a constant)"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": 2,
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "tensor_20 = tf.constant([[23, 4], [32, 51]])"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 3,
41 | "metadata": {},
42 | "outputs": [
43 | {
44 | "data": {
45 | "text/plain": [
46 | ""
49 | ]
50 | },
51 | "execution_count": 3,
52 | "metadata": {},
53 | "output_type": "execute_result"
54 | }
55 | ],
56 | "source": [
57 | "tensor_20"
58 | ]
59 | },
60 | {
61 | "cell_type": "code",
62 | "execution_count": 4,
63 | "metadata": {},
64 | "outputs": [
65 | {
66 | "data": {
67 | "text/plain": [
68 | "TensorShape([2, 2])"
69 | ]
70 | },
71 | "execution_count": 4,
72 | "metadata": {},
73 | "output_type": "execute_result"
74 | }
75 | ],
76 | "source": [
77 | "tensor_20.shape"
78 | ]
79 | },
80 | {
81 | "cell_type": "markdown",
82 | "metadata": {},
83 | "source": [
84 | "## 1.1. Sabitin değerlerine ulaşma"
85 | ]
86 | },
87 | {
88 | "cell_type": "code",
89 | "execution_count": 5,
90 | "metadata": {},
91 | "outputs": [
92 | {
93 | "data": {
94 | "text/plain": [
95 | "array([[23, 4],\n",
96 | " [32, 51]])"
97 | ]
98 | },
99 | "execution_count": 5,
100 | "metadata": {},
101 | "output_type": "execute_result"
102 | }
103 | ],
104 | "source": [
105 | "# session gerektirmeden numpy ile değerlere doğrudan ulaşmak\n",
106 | "tensor_20.numpy()"
107 | ]
108 | },
109 | {
110 | "cell_type": "code",
111 | "execution_count": 6,
112 | "metadata": {},
113 | "outputs": [],
114 | "source": [
115 | "# bu numpy array tekrar Tensorflow'a çevrilebilir\n",
116 | "numpy_tensor = np.array([[23, 4], [32, 51]])"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": 7,
122 | "metadata": {},
123 | "outputs": [
124 | {
125 | "data": {
126 | "text/plain": [
127 | ""
130 | ]
131 | },
132 | "execution_count": 7,
133 | "metadata": {},
134 | "output_type": "execute_result"
135 | }
136 | ],
137 | "source": [
138 | "tensor_from_numpy = tf.constant(numpy_tensor)\n",
139 | "tensor_from_numpy"
140 | ]
141 | },
142 | {
143 | "cell_type": "markdown",
144 | "metadata": {},
145 | "source": [
146 | "# 2. Değişkenler (variables)"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": 8,
152 | "metadata": {},
153 | "outputs": [],
154 | "source": [
155 | "tf2_variable = tf.Variable(3)"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": 9,
161 | "metadata": {},
162 | "outputs": [],
163 | "source": [
164 | "tensor = tf.constant([[1, 2], [3, 4]])"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": 10,
170 | "metadata": {},
171 | "outputs": [
172 | {
173 | "data": {
174 | "text/plain": [
175 | ""
178 | ]
179 | },
180 | "execution_count": 10,
181 | "metadata": {},
182 | "output_type": "execute_result"
183 | }
184 | ],
185 | "source": [
186 | "tensor"
187 | ]
188 | },
189 | {
190 | "cell_type": "code",
191 | "execution_count": 11,
192 | "metadata": {},
193 | "outputs": [
194 | {
195 | "data": {
196 | "text/plain": [
197 | ""
200 | ]
201 | },
202 | "execution_count": 11,
203 | "metadata": {},
204 | "output_type": "execute_result"
205 | }
206 | ],
207 | "source": [
208 | "tensor + 2"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": 12,
214 | "metadata": {},
215 | "outputs": [
216 | {
217 | "data": {
218 | "text/plain": [
219 | ""
222 | ]
223 | },
224 | "execution_count": 12,
225 | "metadata": {},
226 | "output_type": "execute_result"
227 | }
228 | ],
229 | "source": [
230 | "tensor * 5"
231 | ]
232 | },
233 | {
234 | "cell_type": "code",
235 | "execution_count": 13,
236 | "metadata": {},
237 | "outputs": [
238 | {
239 | "data": {
240 | "text/plain": [
241 | "array([[ 1, 4],\n",
242 | " [ 9, 16]], dtype=int32)"
243 | ]
244 | },
245 | "execution_count": 13,
246 | "metadata": {},
247 | "output_type": "execute_result"
248 | }
249 | ],
250 | "source": [
251 | "np.square(tensor)"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": 14,
257 | "metadata": {},
258 | "outputs": [
259 | {
260 | "data": {
261 | "text/plain": [
262 | "array([[1. , 1.41421356],\n",
263 | " [1.73205081, 2. ]])"
264 | ]
265 | },
266 | "execution_count": 14,
267 | "metadata": {},
268 | "output_type": "execute_result"
269 | }
270 | ],
271 | "source": [
272 | "np.sqrt(tensor)"
273 | ]
274 | },
275 | {
276 | "cell_type": "markdown",
277 | "metadata": {},
278 | "source": [
279 | "# 3. Computation Graph Oluşturma"
280 | ]
281 | },
282 | {
283 | "cell_type": "code",
284 | "execution_count": 15,
285 | "metadata": {},
286 | "outputs": [],
287 | "source": [
288 | "x = tf.Variable(3, name=\"x\")\n",
289 | "y = tf.Variable(4, name=\"y\")"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": 16,
295 | "metadata": {},
296 | "outputs": [
297 | {
298 | "data": {
299 | "text/plain": [
300 | "tensorflow.python.ops.resource_variable_ops.ResourceVariable"
301 | ]
302 | },
303 | "execution_count": 16,
304 | "metadata": {},
305 | "output_type": "execute_result"
306 | }
307 | ],
308 | "source": [
309 | "type(x)"
310 | ]
311 | },
312 | {
313 | "cell_type": "code",
314 | "execution_count": 25,
315 | "metadata": {},
316 | "outputs": [],
317 | "source": [
318 | "def my_f(x,y):\n",
319 | " return x*x*y + y + 2"
320 | ]
321 | },
322 | {
323 | "cell_type": "code",
324 | "execution_count": 26,
325 | "metadata": {},
326 | "outputs": [
327 | {
328 | "data": {
329 | "text/plain": [
330 | "function"
331 | ]
332 | },
333 | "execution_count": 26,
334 | "metadata": {},
335 | "output_type": "execute_result"
336 | }
337 | ],
338 | "source": [
339 | "type(my_f)"
340 | ]
341 | },
342 | {
343 | "cell_type": "code",
344 | "execution_count": 27,
345 | "metadata": {},
346 | "outputs": [],
347 | "source": [
348 | "out_a = my_f(x,y)"
349 | ]
350 | },
351 | {
352 | "cell_type": "code",
353 | "execution_count": 29,
354 | "metadata": {},
355 | "outputs": [
356 | {
357 | "data": {
358 | "text/plain": [
359 | "tensorflow.python.framework.ops.EagerTensor"
360 | ]
361 | },
362 | "execution_count": 29,
363 | "metadata": {},
364 | "output_type": "execute_result"
365 | }
366 | ],
367 | "source": [
368 | "type(out_a)"
369 | ]
370 | },
371 | {
372 | "cell_type": "code",
373 | "execution_count": 28,
374 | "metadata": {},
375 | "outputs": [
376 | {
377 | "name": "stdout",
378 | "output_type": "stream",
379 | "text": [
380 | "tf.Tensor(42, shape=(), dtype=int32)\n"
381 | ]
382 | }
383 | ],
384 | "source": [
385 | "print(out_a)"
386 | ]
387 | }
388 | ],
389 | "metadata": {
390 | "kernelspec": {
391 | "display_name": "myenv",
392 | "language": "python",
393 | "name": "myenv"
394 | },
395 | "language_info": {
396 | "codemirror_mode": {
397 | "name": "ipython",
398 | "version": 3
399 | },
400 | "file_extension": ".py",
401 | "mimetype": "text/x-python",
402 | "name": "python",
403 | "nbconvert_exporter": "python",
404 | "pygments_lexer": "ipython3",
405 | "version": "3.6.5"
406 | }
407 | },
408 | "nbformat": 4,
409 | "nbformat_minor": 2
410 | }
411 |
--------------------------------------------------------------------------------
/13_deep_learning/tensorflow-2.0/virtual_env_and_tensorflow-2_installation.txt:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 1. cmd komut satırını yönetici olarak açınız.
5 |
6 |
7 | 2. Çalışma dizni ve sanal ortamın yaratılması:
8 | mkdir python_egitim
9 | cd python_egitim
10 | python -m venv tensorflow-2
11 |
12 | Son komut sanal ortam oluşturur ve biraz zaman alabilir.
13 |
14 | 2. kontrol:
15 | dir
16 |
17 | Volume in drive C has no label.
18 | Volume Serial Number is 55F6-35C4
19 |
20 | Directory of C:\Users\user\python_egitim
21 |
22 | 28.07.2019 15:10 .
23 | 28.07.2019 15:10 ..
24 | 28.07.2019 15:10 tensorflow-2
25 | 0 File(s) 0 bytes
26 | 4 Dir(s) 832.660.901.888 bytes free
27 |
28 |
29 |
30 | 3. Oluşturulan sanal ortamı aktif hale getirme:
31 | cd tensorflow-2\Scripts
32 | activate
33 |
34 | bu komuttan sonra
35 | promt başında (tensorflow-2) gelecektir.
36 |
37 | cd ..
38 | mkdir dev
39 | cd dev
40 |
41 | Yukarıdaki komutlarla development dizini oluşturuyoruz ve o dizine geçiyoruz.
42 |
43 | 4. Kontrol:
44 | python -V
45 |
46 | Çıktısı
47 | Python 3.6.5
48 |
49 | pip install --upgrade pip
50 |
51 | PermissionError: [WinError 5] Access is denied:
52 | hatası verebilir aldırış etmeden komutu tekrarlayınız.
53 | Çıktı:
54 | Requirement already up-to-date: pip in c:\users\user\python_egitim\tensorflow-2\lib\site-packages (19.2.1)
55 |
56 | pip -V
57 | Çıktısı:
58 | pip 19.2.1 from c:\users\user\python_egitim\tensorflow-2\lib\site-packages\pip (python 3.6)
59 |
60 | cd ..
61 |
62 | 5. pip install tensorflow, keras yüklemek ve diğer sık kullanılan paketleri yüklemek
63 |
64 | pip install pip install tensorflow==2.0.0-alpha0 keras pandas matplotlib sklearn pillow jupyter
65 | Niçin sanal ortama da jupyter kurmalıyız:
66 | https://stackoverflow.com/questions/38221181/no-module-named-tensorflow-in-jupyter
67 |
68 | 6. Kontrol:
69 | python
70 |
71 | python shell giriş >>> gelecektir.
72 | >>>import tensorflow as tf
73 | >>>tf.__version__
74 |
75 | Çıktısı:
76 | '2.0.0-alpha0'
77 | Bazı uyarılar verebilir onlar hata değil kulak asmayın
78 |
79 |
80 | exit()
81 |
82 | python shell çıkış.
83 | 7. Sanal ortamı deactive etmek:
84 | deactivate
85 |
86 |
87 | Tekrar aktif hale getirmek için Scripts içinden
88 | cd Scripts
89 | activate
90 | cd ..
91 | cd dev
92 |
93 | 8. virtualenv'dan jupyter kullanmak için
94 | ipython kernel install --user --name=tensorflow-2
95 | Installed kernelspec tensorflow_1 in C:\Users\user\AppData\Roaming\jupyter\kernels\tensorflow-1
96 |
97 |
98 | 9. jupyter notebook çalıştırdığımızda
99 | new butonuna tıkladığımızda artık Pyton3 ile birlikte tensorflow-2'yi de görüyor olacağız.
100 |
101 |
102 | 10. Jupyter içinde tensorflow kontrol
103 | import warnings
104 | warnings.filterwarnings('ignore')
105 | import tensorflow as tf
106 | tf.__version__
107 |
108 | Çıktı:
109 | '2.0.0-alpha0'
110 |
111 |
112 |
113 | 11. Jupyter kapatma:
114 | komut satırında
115 | Ctrl+C
116 |
117 | Jupyter notebook kapatıldıktan sonra
118 | deactivate
119 | komutu ile sanal ortam kapatılır.
120 | Ortamı tekrar aktif etmek sadece Scripts klasörü içinden mümkündür.
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 | Olası hatalar ve çözüm önerileri
134 | -----------------------------------------------------
135 | 1.
136 | Sorun: pip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.
137 |
138 | Çözüm : Activate root
139 |
140 | 2.
141 | Could not install packages due to an EnvironmentError: [WinError 5] Erişim engellendi: 'c:\\programdata\\anaconda3\\lib\\site-packages\\__pycache__\\easy_install.cpython-37.pyc'
142 | Consider using the `--user` option or check the permissions.
143 |
144 | Çözüm: cmd'yi yöneticiolarak başlatmak
145 |
146 |
147 | 3. ssl bağlantı hatası
148 |
149 | Çözüm: Anaconda Library bin dizinini ortam değişkenleri path'e eklemek
150 |
151 |
152 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # machine-learning-with-python
2 | Bu repo 5 gün süreyle düzenlenen Python ile Makine Öğrenmesi Eğitimleri için oluşturulmuştur.
3 |
--------------------------------------------------------------------------------
/final_project_titanic/07.classification_with_titanic_train_data.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/final_project_titanic/07.classification_with_titanic_train_data.pkl
--------------------------------------------------------------------------------
/final_project_titanic/08.classification_with_titanic_train_data_tuning.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/final_project_titanic/08.classification_with_titanic_train_data_tuning.pkl
--------------------------------------------------------------------------------
/final_project_titanic/submission_for_kaggle.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,0
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,1
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,0
35 | 925,0
36 | 926,0
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,0
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,1
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,1
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,1
76 | 966,1
77 | 967,1
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,1
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,0
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,1
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,0
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,0
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,1
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,1
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,0
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,1
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,0
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,1
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,1
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/opening_remarks.pptx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/erkansirin78/machine-learning-with-python/a6212a3ef40a705c35202e32fa8d99d280d52aa3/opening_remarks.pptx
--------------------------------------------------------------------------------