├── .gitattributes
├── dict_names.csv
├── pickle_gb.pkl
├── rink_image.PNG
├── rink_image_base.png
├── rink_image_bw.png
├── rink_image_test.png
├── rink_image_faded.png
├── rink_image_faded_bw.png
├── twitter_credentials_sample.py
├── dict_names.py
├── files_parse.py
├── run_schedule.py
├── parse_parameters.py
├── run_fetch.py
├── bind_20062007_pbp.py
├── flush_files.py
├── LICENSE
├── parameters_sample.py
├── flush_charts.py
├── run_tweets.py
├── environment.yaml
├── dict_team_colors.py
├── run_parse.py
├── .gitignore
├── dict_teams.py
├── tweet_players_gamescore.py
├── tweet_teams_shots_scatter.py
├── tweet_teams_shots_scatter_period.py
├── tweet_teams_shots_scatter_situation.py
├── schedule_fetch.py
├── tweet_teams_gameflow.py
├── tweet_units_onice.py
├── README.md
├── tweet_players_onice.py
├── tweet_players_individual.py
├── tweet_units_lines_matchups_lines.py
├── tweet_units_lines_matchups_pairings.py
├── tweet_units_lines_teammates_pairings.py
├── environment.txt
├── run_stats.py
├── files_parse_rosters.py
├── files_fetch.py
├── chart_units_pk_onice_xg.py
├── chart_units_pp_onice_xg.py
├── chart_units_lines_onice_xg.py
├── chart_units_pairings_onice_xg.py
├── chart_units_pk_onice_shots.py
├── chart_units_pp_onice_shots.py
└── chart_units_lines_onice_shots.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/dict_names.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/dict_names.csv
--------------------------------------------------------------------------------
/pickle_gb.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/pickle_gb.pkl
--------------------------------------------------------------------------------
/rink_image.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/rink_image.PNG
--------------------------------------------------------------------------------
/rink_image_base.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/rink_image_base.png
--------------------------------------------------------------------------------
/rink_image_bw.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/rink_image_bw.png
--------------------------------------------------------------------------------
/rink_image_test.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/rink_image_test.png
--------------------------------------------------------------------------------
/rink_image_faded.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/rink_image_faded.png
--------------------------------------------------------------------------------
/rink_image_faded_bw.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mikegallimore/NHL_Single/HEAD/rink_image_faded_bw.png
--------------------------------------------------------------------------------
/twitter_credentials_sample.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | APP_KEY = "YOUR_APP_KEY_HERE"
6 | APP_SECRET = "YOUR_APP_SECRET_HERE"
7 | OAUTH_TOKEN = "YOUR_OAUTH_TOKEN_HERE"
8 | OAUTH_TOKEN_SECRET = "YOUR_OAUTH_TOKEN_SECRET_HERE"
--------------------------------------------------------------------------------
/dict_names.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import csv
6 |
7 | NAMES = {}
8 |
9 | with open('dict_names.csv', 'r') as Roster_List:
10 | Roster_Reader = list(csv.reader(Roster_List))
11 | for row in Roster_Reader:
12 | NAMES[row[0]] = row[1]
--------------------------------------------------------------------------------
/files_parse.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | ### import and run parsing modules
6 | import files_parse_rosters
7 | files_parse_rosters
8 |
9 | import files_parse_shifts
10 | files_parse_shifts
11 |
12 | import files_parse_pbp
13 | files_parse_pbp
14 |
15 | import files_parse_TOI
16 | files_parse_TOI
17 |
18 | import files_parse_merge_pbpTOI
19 | files_parse_merge_pbpTOI
--------------------------------------------------------------------------------
/run_schedule.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 |
6 | import argparse
7 |
8 | parser = argparse.ArgumentParser()
9 |
10 | ### creates arguments to make use of in functions
11 | parser.add_argument('season_id', help='Set to [8-digit season number] (e.g. 20182019)')
12 |
13 | args = parser.parse_args()
14 |
15 | import schedule_fetch
16 | schedule_fetch.parse_ids(args.season_id)
17 | schedule_fetch
--------------------------------------------------------------------------------
/parse_parameters.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import parameters
6 | import argparse
7 |
8 | parser = argparse.ArgumentParser()
9 |
10 | ### creates arguments to make use of in functions
11 | parser.add_argument("--season_id", dest="season_id", required=False)
12 | parser.add_argument("--game_id", dest="game_id", required=False)
13 |
14 | args = parser.parse_args()
15 |
16 | ### passes the arguments through to parameters.py
17 | parameters.parse_parameters(args.season_id, args.game_id)
--------------------------------------------------------------------------------
/run_fetch.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import argparse
6 |
7 | parser = argparse.ArgumentParser()
8 |
9 |
10 | ###
11 | ### COMMAND LINE ARGUMENTS
12 | ###
13 |
14 | parser.add_argument('season_id', help='Set to [8-digit season number] (e.g. 20182019)')
15 | parser.add_argument('game_id', help='Set to [5-digit game number] game (e.g. 20001)')
16 |
17 | parser.add_argument('--fetch', dest='fetch', help='Can set to [true]', required=False)
18 |
19 |
20 | args = parser.parse_args()
21 |
22 |
23 | ###
24 | ### FETCH FILES
25 | ###
26 |
27 | import files_fetch
28 | files_fetch.parse_ids(args.season_id, args.game_id)
29 | files_fetch
--------------------------------------------------------------------------------
/bind_20062007_pbp.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import parameters
6 | from pathlib import Path
7 |
8 | ### pull common variables from the parameters file
9 | files_root = parameters.files_root
10 | files_20062007 = parameters.files_20062007
11 |
12 | pbp_20062007 = Path(files_20062007).glob('*_pbp.csv')
13 |
14 | header_saved = False
15 | with open(files_root + '20062007_pbp_master.csv', 'w', newline='') as fileout:
16 | for filename in pbp_20062007:
17 | with open(str(filename)) as filein:
18 | header = next(filein)
19 | if not header_saved:
20 | fileout.write(header)
21 | header_saved = True
22 | for line in filein:
23 | fileout.write(line)
24 |
25 |
26 | print('Finished binding the 20062007 play-by-play copies.')
--------------------------------------------------------------------------------
/flush_files.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import os
6 | import glob
7 | from pathlib import Path
8 | import parameters
9 |
10 | def parse_ids(season_id, game_id):
11 |
12 | ### pull common variables from the parameters file
13 | files_root = parameters.files_root
14 |
15 | ### make path
16 | files_path = Path(files_root)
17 |
18 | ### create lists within the files folder
19 | files_list = files_path.glob('*')
20 | schedules_list = files_path.glob('*schedule*')
21 | schedules = [i for i in schedules_list]
22 |
23 | if int(game_id) < 30000:
24 | for i in files_list:
25 | if i not in schedules:
26 | try:
27 | os.remove(i)
28 | except:
29 | continue
30 |
31 | elif int(game_id) > 30000:
32 | for i in files_list:
33 | try:
34 | os.remove(i)
35 | except:
36 | continue
37 |
38 | print('Flushed any preexisting (excluding schedule) files.')
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 mikegallimore
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/parameters_sample.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import os
6 |
7 | ### set subfolder paths; check to see if they already exist and, if they don't, create them
8 | files_root = '/yourpath/Files/'
9 | charts_root = '/yourpath/Charts/'
10 |
11 | charts_players = charts_root + 'Players/'
12 |
13 | charts_teams = charts_root + 'Teams/'
14 | charts_teams_period = charts_teams + 'Period/'
15 | charts_teams_situation = charts_teams + 'Situation/'
16 |
17 | charts_units = charts_root + 'Units/'
18 |
19 | if not os.path.exists(files_root):
20 | os.makedirs(files_root)
21 | print('Created subfolder ' + files_root)
22 | if not os.path.exists(charts_root):
23 | os.makedirs(charts_root)
24 | print('Created subfolder ' + charts_root)
25 |
26 | if not os.path.exists(charts_players):
27 | os.makedirs(charts_players)
28 | print('Created subfolder ' + charts_players)
29 |
30 | if not os.path.exists(charts_players):
31 | os.makedirs(charts_players)
32 | print('Created subfolder ' + charts_players)
33 |
34 | if not os.path.exists(charts_teams):
35 | os.makedirs(charts_teams)
36 | print('Created subfolder ' + charts_teams)
37 | if not os.path.exists(charts_teams_period):
38 | os.makedirs(charts_teams_period)
39 | print('Created subfolder ' + charts_teams_period)
40 | if not os.path.exists(charts_teams_situation):
41 | os.makedirs(charts_teams_situation)
42 | print('Created subfolder ' + charts_teams_situation)
43 |
44 | if not os.path.exists(charts_units):
45 | os.makedirs(charts_units)
46 | print('Created subfolder ' + charts_units)
--------------------------------------------------------------------------------
/flush_charts.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import os
6 | import glob
7 | from pathlib import Path
8 | import parameters
9 |
10 | ### pull common variables from the parameters file
11 | charts_root = parameters.charts_root
12 |
13 | charts_players = parameters.charts_players
14 |
15 | charts_teams = parameters.charts_teams
16 | charts_teams_period = parameters.charts_teams_period
17 | charts_teams_situation = parameters.charts_teams_situation
18 |
19 | charts_units = parameters.charts_units
20 |
21 | ### make paths
22 | players_path = Path(charts_players)
23 |
24 | teams_path = Path(charts_teams)
25 | teams_period_path = Path(charts_teams_period)
26 | teams_situation_path = Path(charts_teams_situation)
27 | teams_path = Path(charts_teams)
28 |
29 | units_path = Path(charts_units)
30 |
31 | ### create lists of images in each charts folder
32 | players_list = players_path.glob('*.PNG')
33 |
34 | teams_list = teams_path.glob('*.PNG')
35 | teams_period_list = teams_period_path.glob('*.PNG')
36 | teams_situation_list = teams_situation_path.glob('*.PNG')
37 |
38 | units_list = units_path.glob('*.PNG')
39 |
40 | ### iterate over each list in order to remove all images
41 | for i in players_list:
42 | try:
43 | os.remove(i)
44 | except:
45 | print('Error while deleting file : ', i)
46 |
47 | for i in teams_list:
48 | try:
49 | os.remove(i)
50 |
51 | except:
52 | print('Error while deleting file : ', i)
53 |
54 | for i in teams_period_list:
55 | try:
56 | os.remove(i)
57 | except:
58 | print('Error while deleting file : ', i)
59 |
60 | for i in teams_situation_list:
61 | try:
62 | os.remove(i)
63 | except:
64 | print('Error while deleting file : ', i)
65 |
66 | for i in units_list:
67 | try:
68 | os.remove(i)
69 | except:
70 | print('Error while deleting file : ', i)
71 |
72 | print('Flushed any preexisting charts.')
--------------------------------------------------------------------------------
/run_tweets.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import argparse
6 |
7 | parser = argparse.ArgumentParser()
8 |
9 |
10 | ###
11 | ### COMMAND LINE ARGUMENTS
12 | ###
13 |
14 | parser.add_argument('season_id', help='Set to [8-digit season number] (e.g. 20182019)')
15 | parser.add_argument('game_id', help='Set to [5-digit game number] game (e.g. 20001)')
16 |
17 | parser.add_argument('--teams', dest='teams', help='Set to [no] to bypass tweeting team charts', required=False)
18 | parser.add_argument('--players', dest='players', help='Set to [no] to bypass tweeting all players charts', required=False)
19 | parser.add_argument('--units', dest='units', help='Set to [no] to bypass tweeting all units charts', required=False)
20 |
21 | parser.add_argument('--scope', dest='scope', help='Set to [more] to run additional scripts for teams, players and units', required=False)
22 |
23 | args = parser.parse_args()
24 |
25 |
26 | ###
27 | ### Teams
28 | ###
29 |
30 | if args.teams != 'no':
31 | import tweet_teams_gameflow
32 | tweet_teams_gameflow.parse_ids(args.season_id, args.game_id)
33 | tweet_teams_gameflow
34 |
35 | import tweet_teams_shotmaps
36 | tweet_teams_shotmaps.parse_ids(args.season_id, args.game_id)
37 | tweet_teams_shotmaps
38 |
39 |
40 | ###
41 | ### Players
42 | ###
43 |
44 | if args.players != 'no':
45 | import tweet_players_gamescore
46 | tweet_players_gamescore.parse_ids(args.season_id, args.game_id)
47 | tweet_players_gamescore
48 |
49 | import tweet_players_individual
50 | tweet_players_individual.parse_ids(args.season_id, args.game_id)
51 | tweet_players_individual
52 |
53 | import tweet_players_onice
54 | tweet_players_onice.parse_ids(args.season_id, args.game_id)
55 | tweet_players_onice
56 |
57 |
58 | ###
59 | ### Units
60 | ###
61 |
62 | if args.units != 'no':
63 | import tweet_units_onice
64 | tweet_units_onice.parse_ids(args.season_id, args.game_id)
65 | tweet_units_onice
66 |
67 | if args.units != 'skip' and args.scope == 'more' and args.tweet != 'no':
68 | import tweet_units_lines_matchups_lines
69 | tweet_units_lines_matchups_lines.parse_ids(args.season_id, args.game_id)
70 | tweet_units_lines_matchups_lines
71 |
72 | import tweet_units_lines_matchups_pairings
73 | tweet_units_lines_matchups_pairings.parse_ids(args.season_id, args.game_id)
74 | tweet_units_lines_matchups_pairings
75 |
76 | import tweet_units_lines_teammates_pairings
77 | tweet_units_lines_teammates_pairings.parse_ids(args.season_id, args.game_id)
78 | tweet_units_lines_teammates_pairings
--------------------------------------------------------------------------------
/environment.yaml:
--------------------------------------------------------------------------------
1 | name: NHL_Single
2 | channels:
3 | - defaults
4 | dependencies:
5 | - alabaster=0.7.12=py36_0
6 | - asn1crypto=0.24.0=py36_0
7 | - babel=2.6.0=py36_0
8 | - beautifulsoup4=4.6.3=py36_0
9 | - blas=1.0=mkl
10 | - blinker=1.4=py36_0
11 | - ca-certificates=2019.11.27=0
12 | - certifi=2019.11.28=py36_0
13 | - cffi=1.12.3=py36h7a1dbc1_0
14 | - chardet=3.0.4=py36_1
15 | - cryptography=2.6.1=py36h7a1dbc1_0
16 | - cycler=0.10.0=py36h009560c_0
17 | - docutils=0.14=py36h6012d8f_0
18 | - freetype=2.9.1=ha9979f8_1
19 | - icc_rt=2019.0.0=h0cc432a_1
20 | - icu=58.2=ha66f8fd_1
21 | - idna=2.8=py36_0
22 | - imagesize=1.1.0=py36_0
23 | - intel-openmp=2019.3=203
24 | - jinja2=2.10.1=py36_0
25 | - jpeg=9b=hb83a4c4_2
26 | - kiwisolver=1.0.1=py36h6538335_0
27 | - libpng=1.6.37=h2a8f88b_0
28 | - markupsafe=1.1.1=py36he774522_0
29 | - matplotlib=3.0.1=py36hc8f65d3_0
30 | - mkl=2018.0.3=1
31 | - mkl_fft=1.0.6=py36hdbbee80_0
32 | - mkl_random=1.0.1=py36h77b88f5_1
33 | - numpy=1.15.0=py36h9fa60d3_0
34 | - numpy-base=1.15.0=py36h4a99626_0
35 | - numpydoc=0.8.0=py36_0
36 | - oauthlib=2.1.0=py36_0
37 | - openssl=1.1.1c=he774522_1
38 | - packaging=19.0=py36_0
39 | - pandas=0.23.4=py36h830ac7b_0
40 | - pandoc=2.2.1=h1a437c5_0
41 | - pandocfilters=1.4.2=py36_1
42 | - patsy=0.5.1=py36_0
43 | - pip=10.0.1=py36_0
44 | - pycparser=2.19=py36_0
45 | - pygments=2.3.1=py36_0
46 | - pyjwt=1.7.1=py36_0
47 | - pyopenssl=19.0.0=py36_0
48 | - pyparsing=2.4.0=py_0
49 | - pyqt=5.9.2=py36h6538335_2
50 | - pysocks=1.6.8=py36_0
51 | - python=3.6.8=h9f7ef89_7
52 | - python-dateutil=2.8.0=py36_0
53 | - pytz=2019.1=py_0
54 | - qt=5.9.7=vc14h73c81de_0
55 | - requests=2.21.0=py36_0
56 | - requests-oauthlib=1.2.0=py_0
57 | - scikit-learn=0.20.1=py36hb854c30_0
58 | - scipy=1.1.0=py36hc28095f_0
59 | - seaborn=0.9.0=py36_0
60 | - selenium=3.141.0=py36he774522_0
61 | - setuptools=41.0.0=py36_0
62 | - sip=4.19.8=py36h6538335_0
63 | - six=1.12.0=py36_0
64 | - snowballstemmer=1.2.1=py36h763602f_0
65 | - sphinx=2.0.1=py_0
66 | - sphinxcontrib-applehelp=1.0.1=py_0
67 | - sphinxcontrib-devhelp=1.0.1=py_0
68 | - sphinxcontrib-htmlhelp=1.0.2=py_0
69 | - sphinxcontrib-jsmath=1.0.1=py_0
70 | - sphinxcontrib-qthelp=1.0.2=py_0
71 | - sphinxcontrib-serializinghtml=1.1.3=py_0
72 | - sqlite=3.28.0=he774522_0
73 | - statsmodels=0.9.0=py36h452e1ab_0
74 | - tbb=2019.4=h74a9793_0
75 | - tbb4py=2019.4=py36h74a9793_0
76 | - tornado=6.0.2=py36he774522_0
77 | - twython=3.7.0=py36_0
78 | - urllib3=1.24.2=py36_0
79 | - vc=14.1=h0510ff6_4
80 | - vs2015_runtime=14.15.26706=h3a45250_0
81 | - wheel=0.33.1=py36_0
82 | - win_inet_pton=1.1.0=py36_0
83 | - wincertstore=0.2=py36h7fe50ca_0
84 | - zlib=1.2.11=h62dcd97_3
85 | prefix: C:\Users\yourprofile\Miniconda3\envs\NHL_Single
86 |
87 |
--------------------------------------------------------------------------------
/dict_team_colors.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 |
6 | team_color_1st = {
7 | 'ANA': '#F47A38',
8 | 'ARI': '#8C2633',
9 | 'BOS': '#FFB81C',
10 | 'BUF': '#002654',
11 | 'CAR': '#CC0000',
12 | 'CBJ': '#002654',
13 | 'CGY': '#C8102E',
14 | 'CHI': '#CF0A2C',
15 | 'COL': '#6F263D',
16 | 'DAL': '#006847',
17 | 'DET': '#CE1126',
18 | 'EDM': '#041E42',
19 | 'FLA': '#041E42',
20 | 'LAK': '#111111',
21 | 'MIN': '#154734',
22 | 'MTL': '#AF1E2D',
23 | 'NJD': '#CE1126',
24 | 'NSH': '#FFB81C',
25 | 'NYI': '#00539B',
26 | 'NYR': '#0038A8',
27 | 'OTT': '#C52032',
28 | 'PHI': '#F74902',
29 | 'PIT': '#FCB514',
30 | 'SJS': '#006D75',
31 | 'STL': '#002F87',
32 | 'TBL': '#002868',
33 | 'TOR': '#00205B',
34 | 'VAN': '#00205B',
35 | 'VGK': '#B4975A',
36 | 'WSH': '#C8102E',
37 | 'WPG': '#004C97'
38 | }
39 |
40 | team_color_2nd = {
41 | 'ANA': '#B9975B',
42 | 'ARI': '#E2D6B5',
43 | 'BOS': '#000000',
44 | 'BUF': '#FCB514',
45 | 'CAR': '#000000',
46 | 'CBJ': '#CE1126',
47 | 'CGY': '#F1BE48',
48 | 'CHI': '#000000',
49 | 'COL': '#236192',
50 | 'DAL': '#8F8F8C',
51 | 'EDM': '#FF4C00',
52 | 'FLA': '#C8102E',
53 | 'LAK': '#A2AAAD',
54 | 'MIN': '#A6192E',
55 | 'MTL': '#192168',
56 | 'NJD': '#000000',
57 | 'NSH': '#041E42',
58 | 'NYI': '#F47D30',
59 | 'NYR': '#CE1126',
60 | 'OTT': '#000000',
61 | 'PHI': '#000000',
62 | 'PIT': '#000000',
63 | 'SJS': '#EA7200',
64 | 'STL': '#FCB514',
65 | 'TBL': '#000000',
66 | 'VAN': '#00843D',
67 | 'VGK': '#333F42',
68 | 'WSH': '#041E42',
69 | 'WPG': '#041E42'
70 | }
71 |
72 | team_color_3rd = {
73 | 'ANA': '#00685E',
74 | 'ARI': '#111111',
75 | 'BUF': '#ADAFAA',
76 | 'CAR': '#A2AAAD',
77 | 'CBJ': '#A4A9AD',
78 | 'CGY': '#111111',
79 | 'CHI': '#00833E',
80 | 'COL': '#A2AAAD',
81 | 'DAL': '#111111',
82 | 'FLA': '#B9975B',
83 | 'LAK': '#572A84',
84 | 'MIN': '#DDCBA4',
85 | 'OTT': '#C2912C',
86 | 'PHI': '#000000',
87 | 'PIT': '#CFC493',
88 | 'SJS': '#000000',
89 | 'VAN': '#99999A',
90 | 'WPG': '#7B303E'
91 | }
92 |
93 | team_color_4th = {
94 | 'ANA': '#532A44',
95 | }
--------------------------------------------------------------------------------
/run_parse.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import argparse
6 |
7 | parser = argparse.ArgumentParser()
8 |
9 |
10 | ###
11 | ### COMMAND LINE ARGUMENTS
12 | ###
13 |
14 | parser.add_argument('season_id', help='Set to [8-digit season number] (e.g. 20182019)')
15 | parser.add_argument('game_id', help='Set to [5-digit game number] game (e.g. 20001)')
16 |
17 | parser.add_argument('--rosters', dest='rosters', help='Can set to [true]', required=False)
18 | parser.add_argument('--shifts', dest='shifts', help='Can set to [true]', required=False)
19 | parser.add_argument('--pbp', dest='pbp', help='Can set to [true]', required=False)
20 | parser.add_argument('--toi', dest='toi', help='Can set to [true]', required=False)
21 | parser.add_argument('--merge_pbp', dest='merge_pbp', help='Can set to [true]', required=False)
22 | parser.add_argument('--xg', dest='xg', help='Can set to [true]', required=False)
23 |
24 | # for use with the 20062007 season only
25 | parser.add_argument('--load_pbp', dest='load_pbp', help='Setting to [true] will load a stored play-by-play file', required=False)
26 |
27 | # for instances where players are recorded in the lineup at one position but actually playing another (e.g. Luke Witkowski listed as a D while playing F)
28 | parser.add_argument('--switch_F2D', dest='switch_F2D', help='Set to [player name] (e.g. Luke_Witkowski)', required=False)
29 | parser.add_argument('--switch_D2F', dest='switch_D2F', help='Set to [player name] (e.g. Luke_Witkowski)', required=False)
30 |
31 | args = parser.parse_args()
32 |
33 |
34 | ###
35 | ### PARSE FILES
36 | ###
37 |
38 | ##
39 | ## Rosters
40 | ##
41 |
42 | if (args.rosters == 'true'
43 | or args.rosters is None and args.shifts is None and args.pbp is None and args.toi is None and args.merge_pbp is None and args.xg is None):
44 |
45 | import files_parse_rosters
46 | files_parse_rosters.parse_ids(args.season_id, args.game_id, args.switch_F2D, args.switch_D2F)
47 | files_parse_rosters
48 |
49 | ##
50 | ## Shifts
51 | ##
52 |
53 | if (args.shifts == 'true'
54 | or args.rosters is None and args.shifts is None and args.pbp is None and args.toi is None and args.merge_pbp is None and args.xg is None):
55 |
56 | import files_parse_shifts
57 | files_parse_shifts.parse_ids(args.season_id, args.game_id)
58 | files_parse_shifts
59 |
60 | ##
61 | ## Play-by-Play
62 | ##
63 |
64 | if (args.pbp == 'true'
65 | or args.rosters is None and args.shifts is None and args.pbp is None and args.toi is None and args.merge_pbp is None and args.xg is None):
66 |
67 | import files_parse_pbp
68 | files_parse_pbp.parse_ids(args.season_id, args.game_id, args.load_pbp)
69 | files_parse_pbp
70 |
71 | ##
72 | ## Time on Ice
73 | ##
74 |
75 | if (args.toi == 'true'
76 | or args.rosters is None and args.shifts is None and args.pbp is None and args.toi is None and args.merge_pbp is None and args.xg is None):
77 |
78 | import files_parse_toi
79 | files_parse_toi.parse_ids(args.season_id, args.game_id, args.load_pbp)
80 | files_parse_toi
81 |
82 | ##
83 | ## Merge
84 | ##
85 |
86 | if (args.merge_pbp == 'true'
87 | or args.rosters is None and args.shifts is None and args.pbp is None and args.toi is None and args.merge_pbp is None and args.xg is None):
88 | import files_parse_merge_pbp
89 | files_parse_merge_pbp.parse_ids(args.season_id, args.game_id, args.load_pbp)
90 | files_parse_merge_pbp
91 |
92 | ##
93 | ## Expected Goals
94 | ##
95 |
96 | if (args.xg == 'true'
97 | or args.rosters is None and args.shifts is None and args.pbp is None and args.toi is None and args.merge_pbp is None and args.xg is None):
98 |
99 | import files_parse_xg
100 | files_parse_xg.parse_ids(args.season_id, args.game_id)
101 | files_parse_xg
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # Parameters
7 | parameters.py
8 |
9 | # Twitter Credentials
10 | twitter_credentials.py
11 |
12 | # Folders
13 | Charts/
14 | Files/
15 |
16 | # Images (Players)
17 | Charts/Players/skaters_5v5_onice_shots_away.png
18 | Charts/Players/skaters_5v5_onice_shots_home.png
19 |
20 | # Images (Teams)
21 | Charts/Teams/shots_density.png
22 | Charts/Teams/shots_density_5v5.png
23 | Charts/Teams/shots_density_pp_away.png
24 | Charts/Teams/shots_density_pp_home.png
25 | Charts/Teams/shots_gameflow.png
26 | Charts/Teams/shots_gameflow_5v5.png
27 | Charts/Teams/shots_scatter.png
28 | Charts/Teams/shots_scatter_5v5.png
29 | Charts/Teams/shots_scatter_pp_away.png
30 | Charts/Teams/shots_scatter_pp_home.png
31 |
32 | # Images (Teams: Period)
33 | Charts/Teams/Period/shots_scatter_1st.png
34 | Charts/Teams/Period/shots_scatter_2nd.png
35 | Charts/Teams/Period/shots_scatter_3rd.png
36 | Charts/Teams/Period/shots_scatter_5v5_1st.png
37 | Charts/Teams/Period/shots_scatter_5v5_2nd.png
38 | Charts/Teams/Period/shots_scatter_5v5_3rd.png
39 | Charts/Teams/Period/shots_scatter_pp_away_1st.png
40 | Charts/Teams/Period/shots_scatter_pp_away_2nd.png
41 | Charts/Teams/Period/shots_scatter_pp_away_3rd.png
42 | Charts/Teams/Period/shots_scatter_pp_home_1st.png
43 | Charts/Teams/Period/shots_scatter_pp_home_2nd.png
44 | Charts/Teams/Period/shots_scatter_pp_home_3rd.png
45 |
46 | # Images (Teams: Situation)
47 | Charts/Teams/Situation/shots_scatter_leading.png
48 | Charts/Teams/Situation/shots_scatter_tied.png
49 | Charts/Teams/Situation/shots_scatter_trailing.png
50 | Charts/Teams/Situation/shots_scatter_5v5_leading.png
51 | Charts/Teams/Situation/shots_scatter_5v5_tied.png
52 | Charts/Teams/Situation/shots_scatter_5v5_trailing.png
53 | Charts/Teams/Situation/shots_scatter_pp_away_leading.png
54 | Charts/Teams/Situation/shots_scatter_pp_away_tied.png
55 | Charts/Teams/Situation/shots_scatter_pp_away_trailing.png
56 | Charts/Teams/Situation/shots_scatter_pp_home_leading.png
57 | Charts/Teams/Situation/shots_scatter_pp_home_tied.png
58 | Charts/Teams/Situation/shots_scatter_pp_home_trailing.png
59 |
60 | # Images (Units)
61 | Charts/Units/onice_shots_away.png
62 | Charts/Units/onice_shots_home.png
63 |
64 | # C extensions
65 | *.so
66 |
67 | # Distribution / packaging
68 | .Python
69 | build/
70 | develop-eggs/
71 | dist/
72 | downloads/
73 | eggs/
74 | .eggs/
75 | lib/
76 | lib64/
77 | parts/
78 | sdist/
79 | var/
80 | wheels/
81 | *.egg-info/
82 | .installed.cfg
83 | *.egg
84 | MANIFEST
85 |
86 | # PyInstaller
87 | # Usually these files are written by a python script from a template
88 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
89 | *.manifest
90 | *.spec
91 |
92 | # Installer logs
93 | pip-log.txt
94 | pip-delete-this-directory.txt
95 |
96 | # Unit test / coverage reports
97 | htmlcov/
98 | .tox/
99 | .nox/
100 | .coverage
101 | .coverage.*
102 | .cache
103 | nosetests.xml
104 | coverage.xml
105 | *.cover
106 | .hypothesis/
107 | .pytest_cache/
108 |
109 | # Translations
110 | *.mo
111 | *.pot
112 |
113 | # Django stuff:
114 | *.log
115 | local_settings.py
116 | db.sqlite3
117 |
118 | # Flask stuff:
119 | instance/
120 | .webassets-cache
121 |
122 | # Scrapy stuff:
123 | .scrapy
124 |
125 | # Sphinx documentation
126 | docs/_build/
127 |
128 | # PyBuilder
129 | target/
130 |
131 | # Jupyter Notebook
132 | .ipynb_checkpoints
133 |
134 | # IPython
135 | profile_default/
136 | ipython_config.py
137 |
138 | # pyenv
139 | .python-version
140 |
141 | # celery beat schedule file
142 | celerybeat-schedule
143 |
144 | # SageMath parsed files
145 | *.sage.py
146 |
147 | # Environments
148 | .env
149 | .venv
150 | env/
151 | venv/
152 | ENV/
153 | env.bak/
154 | venv.bak/
155 |
156 | # Spyder project settings
157 | .spyderproject
158 | .spyproject
159 |
160 | # Rope project settings
161 | .ropeproject
162 |
163 | # mkdocs documentation
164 | /site
165 |
166 | # mypy
167 | .mypy_cache/
168 | .dmypy.json
169 | dmypy.json
170 |
171 | # Pyre type checker
172 | .pyre/
173 |
--------------------------------------------------------------------------------
/dict_teams.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 |
6 | NHL = {'ANAHEIM DUCKS': 'ANA',
7 | 'ARIZONA COYOTES': 'ARI',
8 | 'ATLANTA THRASHERS': 'ATL',
9 | 'BOSTON BRUINS': 'BOS',
10 | 'BUFFALO SABRES': 'BUF',
11 | 'CALGARY FLAMES': 'CGY',
12 | 'CAROLINA HURRICANES': 'CAR',
13 | 'CHICAGO BLACKHAWKS': 'CHI',
14 | 'COLORADO AVALANCHE': 'COL',
15 | 'COLUMBUS BLUE JACKETS': 'CBJ',
16 | 'DALLAS STARS': 'DAL',
17 | 'DETROIT RED WINGS': 'DET',
18 | 'EDMONTON OILERS': 'EDM',
19 | 'FLORIDA PANTHERS': 'FLA',
20 | 'LOS ANGELES KINGS': 'LAK',
21 | 'MINNESOTA WILD': 'MIN',
22 | 'CANADIENS MONTREAL': 'MTL',
23 | 'CANADIENS MONTRÉAL': 'MTL',
24 | 'MONTRÉAL CANADIENS': 'MTL',
25 | 'MONTREAL CANADIENS': 'MTL',
26 | 'NASHVILLE PREDATORS': 'NSH',
27 | 'NEW JERSEY DEVILS': 'NJD',
28 | 'NEW YORK RANGERS': 'NYR',
29 | 'NEW YORK ISLANDERS': 'NYI',
30 | 'OTTAWA SENATORS': 'OTT',
31 | 'PHILADELPHIA FLYERS': 'PHI',
32 | 'PHOENIX COYOTES': 'PHX',
33 | 'PITTSBURGH PENGUINS': 'PIT',
34 | 'SAN JOSE SHARKS': 'SJS',
35 | 'ST. LOUIS BLUES': 'STL',
36 | 'TAMPA BAY LIGHTNING': 'TBL',
37 | 'TORONTO MAPLE LEAFS': 'TOR',
38 | 'VANCOUVER CANUCKS': 'VAN',
39 | 'VEGAS GOLDEN KNIGHTS': 'VGK',
40 | 'WASHINGTON CAPITALS': 'WSH',
41 | 'WINNIPEG JETS': 'WPG'}
42 |
43 | MONIKERS_DICT = {'Ducks': 'ANA',
44 | 'Coyotes': 'ARI',
45 | 'Thrashers': 'ATL',
46 | 'Bruins': 'BOS',
47 | 'Sabres': 'BUF',
48 | 'Flames': 'CGY',
49 | 'Hurricanes': 'CAR',
50 | 'Blackhawks': 'CHI',
51 | 'Avalanche': 'COL',
52 | 'Blue Jackets': 'CBJ',
53 | 'Stars': 'DAL',
54 | 'Red Wings': 'DET',
55 | 'Oilers': 'EDM',
56 | 'Panthers': 'FLA',
57 | 'Kings': 'LAK',
58 | 'Wild': 'MIN',
59 | 'Canadiens': 'MTL',
60 | 'Predators': 'NSH',
61 | 'Devils': 'NJD',
62 | 'Rangers': 'NYR',
63 | 'Islanders': 'NYI',
64 | 'Senators': 'OTT',
65 | 'Flyers': 'PHI',
66 | 'Penguins': 'PIT',
67 | 'Sharks': 'SJS',
68 | 'Blues': 'STL',
69 | 'Lightning': 'TBL',
70 | 'Maple Leafs': 'TOR',
71 | 'Canucks': 'VAN',
72 | 'Golden Knights': 'VGK',
73 | 'Capitals': 'WSH',
74 | 'Jets': 'WPG'}
75 |
76 | MONIKERS_DICT_PHX = {'Ducks': 'ANA',
77 | 'Coyotes': 'PHX',
78 | 'Thrashers': 'ATL',
79 | 'Bruins': 'BOS',
80 | 'Sabres': 'BUF',
81 | 'Flames': 'CGY',
82 | 'Hurricanes': 'CAR',
83 | 'Blackhawks': 'CHI',
84 | 'Avalanche': 'COL',
85 | 'Blue Jackets': 'CBJ',
86 | 'Stars': 'DAL',
87 | 'Red Wings': 'DET',
88 | 'Oilers': 'EDM',
89 | 'Panthers': 'FLA',
90 | 'Kings': 'LAK',
91 | 'Wild': 'MIN',
92 | 'Canadiens': 'MTL',
93 | 'Predators': 'NSH',
94 | 'Devils': 'NJD',
95 | 'Rangers': 'NYR',
96 | 'Islanders': 'NYI',
97 | 'Senators': 'OTT',
98 | 'Flyers': 'PHI',
99 | 'Penguins': 'PIT',
100 | 'Sharks': 'SJS',
101 | 'Blues': 'STL',
102 | 'Lightning': 'TBL',
103 | 'Maple Leafs': 'TOR',
104 | 'Canucks': 'VAN',
105 | 'Golden Knights': 'VGK',
106 | 'Capitals': 'WSH',
107 | 'Jets': 'WPG'}
108 |
109 | MONIKERS_DICT_WPG2ATL = {'Ducks': 'ANA',
110 | 'Coyotes': 'PHX',
111 | 'Thrashers': 'ATL',
112 | 'Bruins': 'BOS',
113 | 'Sabres': 'BUF',
114 | 'Flames': 'CGY',
115 | 'Hurricanes': 'CAR',
116 | 'Blackhawks': 'CHI',
117 | 'Avalanche': 'COL',
118 | 'Blue Jackets': 'CBJ',
119 | 'Stars': 'DAL',
120 | 'Red Wings': 'DET',
121 | 'Oilers': 'EDM',
122 | 'Panthers': 'FLA',
123 | 'Kings': 'LAK',
124 | 'Wild': 'MIN',
125 | 'Canadiens': 'MTL',
126 | 'Predators': 'NSH',
127 | 'Devils': 'NJD',
128 | 'Rangers': 'NYR',
129 | 'Islanders': 'NYI',
130 | 'Senators': 'OTT',
131 | 'Flyers': 'PHI',
132 | 'Penguins': 'PIT',
133 | 'Sharks': 'SJS',
134 | 'Blues': 'STL',
135 | 'Lightning': 'TBL',
136 | 'Maple Leafs': 'TOR',
137 | 'Canucks': 'VAN',
138 | 'Golden Knights': 'VGK',
139 | 'Capitals': 'WSH',
140 | 'Jets': 'ATL'}
141 |
142 | TRICODES = {'BUF': 'BUF',
143 | 'L.A': 'LAK',
144 | 'N.J': 'NJD',
145 | 'S.J': 'SJS',
146 | 'T.B': 'TBL'}
--------------------------------------------------------------------------------
/tweet_players_gamescore.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import pandas as pd
7 | import parameters
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_players = parameters.charts_players
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | players_gamescores_away = open(charts_players + 'gamescores_away.png', 'rb')
38 | players_gamescores_home = open(charts_players + 'gamescores_home.png', 'rb')
39 |
40 | with open(livefeed_file) as livefeed_json:
41 | livefeed_data = json.load(livefeed_json)
42 |
43 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
44 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
45 | minutes_gone = int()
46 | seconds_gone = int()
47 | regulation_time_gone = str()
48 | ot_time_gone = str()
49 |
50 | try:
51 | time_left_split = status.split(':')
52 | regulation_minutes_gone = 20 - int(time_left_split[0])
53 | ot_minutes_gone = 5 - int(time_left_split[0])
54 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
55 | seconds_gone = 0 - int(time_left_split[0])
56 | seconds_gone = '0' + str(seconds_gone)
57 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
58 | seconds_gone = 60 - int(time_left_split[1])
59 | seconds_gone = '0' + str(seconds_gone)
60 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
61 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
62 | except:
63 | pass
64 |
65 | images = [players_gamescores_away, players_gamescores_home]
66 |
67 | media_ids = []
68 |
69 | for i in images:
70 | response = twitter.upload_media(media=i)
71 | media_ids.append(response['media_id_string'])
72 |
73 | if period == 1 and status != 'END':
74 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) GameScores:', media_ids=media_ids)
75 | elif period == 1 and status == 'END':
76 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) GameScores:', media_ids=media_ids)
77 |
78 | if period == 2 and status != 'END':
79 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) GameScores:', media_ids=media_ids)
80 | elif period == 2 and status == 'END':
81 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) GameScores:', media_ids=media_ids)
82 |
83 | if period == 3 and status != 'END' and status != 'Final':
84 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) GameScores:', media_ids=media_ids)
85 | elif period == 3 and status == 'END':
86 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) GameScores:', media_ids=media_ids)
87 | elif period == 3 and status == 'Final':
88 | twitter.update_status(status= away + ' @ ' + home + ' (Final) GameScores:', media_ids=media_ids)
89 |
90 | if period == 4 and status != 'END' and status != 'Final':
91 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) GameScores:', media_ids=media_ids)
92 | elif period == 4 and status == 'END':
93 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) GameScores:', media_ids=media_ids)
94 | elif period == 4 and status == 'Final':
95 | twitter.update_status(status= away + ' @ ' + home + ' (Final) GameScores:', media_ids=media_ids)
96 |
97 | if period == 5 and status != 'Final':
98 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) GameScores:', media_ids=media_ids)
99 | elif period == 5 and status == 'Final':
100 | twitter.update_status(status= away + ' @ ' + home + ' (Final) GameScores:', media_ids=media_ids)
101 |
102 |
103 | print('Tweeted player GameScores.')
--------------------------------------------------------------------------------
/tweet_teams_shots_scatter.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import parameters
7 | import pandas as pd
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_teams = parameters.charts_teams
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | shots_all = open(charts_teams + 'shots_scatter.png', 'rb')
38 | shots_5v5 = open(charts_teams + 'shots_scatter_5v5.png', 'rb')
39 | shots_PP_away = open(charts_teams + 'shots_scatter_pp_away.png', 'rb')
40 | shots_PP_home = open(charts_teams + 'shots_scatter_pp_home.png', 'rb')
41 |
42 | with open(livefeed_file) as livefeed_json:
43 | livefeed_data = json.load(livefeed_json)
44 |
45 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
46 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
47 | minutes_gone = int()
48 | seconds_gone = int()
49 | regulation_time_gone = str()
50 | ot_time_gone = str()
51 |
52 | try:
53 | time_left_split = status.split(':')
54 | regulation_minutes_gone = 20 - int(time_left_split[0])
55 | ot_minutes_gone = 5 - int(time_left_split[0])
56 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
57 | seconds_gone = 0 - int(time_left_split[0])
58 | seconds_gone = '0' + str(seconds_gone)
59 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
60 | seconds_gone = 60 - int(time_left_split[1])
61 | seconds_gone = '0' + str(seconds_gone)
62 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
63 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
64 | except:
65 | pass
66 |
67 | images = [shots_all, shots_5v5, shots_PP_away, shots_PP_home]
68 |
69 | media_ids = []
70 |
71 | for i in images:
72 | response = twitter.upload_media(media=i)
73 | media_ids.append(response['media_id_string'])
74 |
75 | if period == 1 and status != 'END':
76 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) shot maps:', media_ids=media_ids)
77 | elif period == 1 and status == 'END':
78 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) shot maps:', media_ids=media_ids)
79 |
80 | if period == 2 and status != 'END':
81 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) shot maps:', media_ids=media_ids)
82 | elif period == 2 and status == 'END':
83 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) shot maps:', media_ids=media_ids)
84 |
85 | if period == 3 and status != 'END' and status != 'Final':
86 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) shot maps:', media_ids=media_ids)
87 | elif period == 3 and status == 'END':
88 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) shot maps:', media_ids=media_ids)
89 | elif period == 3 and status == 'Final':
90 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
91 |
92 | if period == 4 and status != 'END' and status != 'Final':
93 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) shot maps:', media_ids=media_ids)
94 | elif period == 4 and status == 'END':
95 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) shot maps:', media_ids=media_ids)
96 | elif period == 4 and status == 'Final':
97 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
98 |
99 | if period == 5 and status != 'Final':
100 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) shot maps:', media_ids=media_ids)
101 | elif period == 5 and status == 'Final':
102 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
103 |
104 |
105 | print('Tweeted the scatter plot shot maps.')
--------------------------------------------------------------------------------
/tweet_teams_shots_scatter_period.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import parameters
7 | import pandas as pd
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_teams = parameters.charts_teams
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | shots_all = open(charts_teams + 'shots_scatter.png', 'rb')
38 | shots_5v5 = open(charts_teams + 'shots_scatter_5v5.png', 'rb')
39 | shots_PP_away = open(charts_teams + 'shots_scatter_pp_away.png', 'rb')
40 | shots_PP_home = open(charts_teams + 'shots_scatter_pp_home.png', 'rb')
41 |
42 | with open(livefeed_file) as livefeed_json:
43 | livefeed_data = json.load(livefeed_json)
44 |
45 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
46 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
47 | minutes_gone = int()
48 | seconds_gone = int()
49 | regulation_time_gone = str()
50 | ot_time_gone = str()
51 |
52 | try:
53 | time_left_split = status.split(':')
54 | regulation_minutes_gone = 20 - int(time_left_split[0])
55 | ot_minutes_gone = 5 - int(time_left_split[0])
56 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
57 | seconds_gone = 0 - int(time_left_split[0])
58 | seconds_gone = '0' + str(seconds_gone)
59 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
60 | seconds_gone = 60 - int(time_left_split[1])
61 | seconds_gone = '0' + str(seconds_gone)
62 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
63 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
64 | except:
65 | pass
66 |
67 | images = [shots_all, shots_5v5, shots_PP_away, shots_PP_home]
68 |
69 | media_ids = []
70 |
71 | for i in images:
72 | response = twitter.upload_media(media=i)
73 | media_ids.append(response['media_id_string'])
74 |
75 | if period == 1 and status != 'END':
76 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) shot maps:', media_ids=media_ids)
77 | elif period == 1 and status == 'END':
78 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) shot maps:', media_ids=media_ids)
79 |
80 | if period == 2 and status != 'END':
81 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) shot maps:', media_ids=media_ids)
82 | elif period == 2 and status == 'END':
83 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) shot maps:', media_ids=media_ids)
84 |
85 | if period == 3 and status != 'END' and status != 'Final':
86 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) shot maps:', media_ids=media_ids)
87 | elif period == 3 and status == 'END':
88 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) shot maps:', media_ids=media_ids)
89 | elif period == 3 and status == 'Final':
90 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
91 |
92 | if period == 4 and status != 'END' and status != 'Final':
93 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) shot maps:', media_ids=media_ids)
94 | elif period == 4 and status == 'END':
95 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) shot maps:', media_ids=media_ids)
96 | elif period == 4 and status == 'Final':
97 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
98 |
99 | if period == 5 and status != 'Final':
100 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) shot maps:', media_ids=media_ids)
101 | elif period == 5 and status == 'Final':
102 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
103 |
104 |
105 | print('Tweeted the scatter plot shot maps.')
--------------------------------------------------------------------------------
/tweet_teams_shots_scatter_situation.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import parameters
7 | import pandas as pd
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_teams = parameters.charts_teams
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | shots_all = open(charts_teams + 'shots_scatter.png', 'rb')
38 | shots_5v5 = open(charts_teams + 'shots_scatter_5v5.png', 'rb')
39 | shots_PP_away = open(charts_teams + 'shots_scatter_pp_away.png', 'rb')
40 | shots_PP_home = open(charts_teams + 'shots_scatter_pp_home.png', 'rb')
41 |
42 | with open(livefeed_file) as livefeed_json:
43 | livefeed_data = json.load(livefeed_json)
44 |
45 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
46 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
47 | minutes_gone = int()
48 | seconds_gone = int()
49 | regulation_time_gone = str()
50 | ot_time_gone = str()
51 |
52 | try:
53 | time_left_split = status.split(':')
54 | regulation_minutes_gone = 20 - int(time_left_split[0])
55 | ot_minutes_gone = 5 - int(time_left_split[0])
56 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
57 | seconds_gone = 0 - int(time_left_split[0])
58 | seconds_gone = '0' + str(seconds_gone)
59 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
60 | seconds_gone = 60 - int(time_left_split[1])
61 | seconds_gone = '0' + str(seconds_gone)
62 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
63 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
64 | except:
65 | pass
66 |
67 | images = [shots_all, shots_5v5, shots_PP_away, shots_PP_home]
68 |
69 | media_ids = []
70 |
71 | for i in images:
72 | response = twitter.upload_media(media=i)
73 | media_ids.append(response['media_id_string'])
74 |
75 | if period == 1 and status != 'END':
76 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) shot maps:', media_ids=media_ids)
77 | elif period == 1 and status == 'END':
78 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) shot maps:', media_ids=media_ids)
79 |
80 | if period == 2 and status != 'END':
81 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) shot maps:', media_ids=media_ids)
82 | elif period == 2 and status == 'END':
83 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) shot maps:', media_ids=media_ids)
84 |
85 | if period == 3 and status != 'END' and status != 'Final':
86 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) shot maps:', media_ids=media_ids)
87 | elif period == 3 and status == 'END':
88 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) shot maps:', media_ids=media_ids)
89 | elif period == 3 and status == 'Final':
90 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
91 |
92 | if period == 4 and status != 'END' and status != 'Final':
93 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) shot maps:', media_ids=media_ids)
94 | elif period == 4 and status == 'END':
95 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) shot maps:', media_ids=media_ids)
96 | elif period == 4 and status == 'Final':
97 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
98 |
99 | if period == 5 and status != 'Final':
100 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) shot maps:', media_ids=media_ids)
101 | elif period == 5 and status == 'Final':
102 | twitter.update_status(status= away + ' @ ' + home + ' (Final) shot maps:', media_ids=media_ids)
103 |
104 |
105 | print('Tweeted the scatter plot shot maps.')
--------------------------------------------------------------------------------
/schedule_fetch.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 |
6 | import requests
7 | import json
8 | import csv
9 | import pandas as pd
10 | import parameters
11 | from pathlib import Path
12 | import dict_teams
13 |
14 | def parse_ids(season_id):
15 |
16 | ### pull common variables from the parameters file
17 | files_root = parameters.files_root
18 |
19 | ### create variables that point to the .csv processed stats files for players
20 | JSON_schedule = files_root + season_id + "_schedule.json"
21 | schedule_csv = files_root + season_id + "_schedule.csv"
22 |
23 | ### retrieve the JSON schedule information
24 | try:
25 | year_start = season_id[0:4]
26 | year_end = season_id[4:8]
27 |
28 | if int(season_id) != 20192020:
29 | JSON_schedule_url = 'https://statsapi.web.nhl.com/api/v1/schedule?startDate=' + year_start + '-08-30&endDate=' + year_end + '-06-30'
30 | if int(season_id) == 20192020:
31 | JSON_schedule_url = 'https://statsapi.web.nhl.com/api/v1/schedule?startDate=' + year_start + '-08-30&endDate=' + year_end + '-10-30'
32 |
33 | JSON_schedule_request = requests.get(JSON_schedule_url, timeout=5).text
34 |
35 | f = open(files_root + season_id + '_schedule.json', 'w+')
36 | f.write(JSON_schedule_request)
37 | f.close()
38 | print('Retrieved NHL schedule (JSON) for ' + season_id)
39 | except:
40 | print('ERROR: Could not retreive the season schedule (JSON) for ' + season_id)
41 |
42 | ###
43 | ### SCHEDULE (JSON)
44 | ###
45 |
46 | with open(JSON_schedule) as JSON_schedule_in:
47 | JSON_schedule_parsed = json.load(JSON_schedule_in)
48 |
49 | JSON_game_dates = JSON_schedule_parsed['dates']
50 |
51 | ### begin the portion of the script that handles the csv generation
52 | with open(schedule_csv, 'w', newline='') as schedule_out:
53 | JSON_csvWriter = csv.writer(schedule_out)
54 |
55 | JSON_csvWriter.writerow(['SEASON', 'GAME_ID', 'DATE', 'HOME', 'AWAY'])
56 |
57 | for JSON_allgames in JSON_game_dates:
58 | JSON_dates = JSON_allgames["date"]
59 |
60 | JSON_games = JSON_allgames['games']
61 |
62 | for JSON_game in JSON_games:
63 |
64 | JSON_seasonid = JSON_game["season"]
65 |
66 | JSON_game_id = str(JSON_game["gamePk"])[5:]
67 | JSON_game_id = int(JSON_game_id)
68 |
69 | if JSON_game_id > 39999:
70 | continue
71 |
72 | JSON_date = JSON_dates
73 | JSON_date_split = JSON_date.split('-')
74 | JSON_year = JSON_date_split[0]
75 | JSON_month = JSON_date_split[1]
76 | JSON_day = JSON_date_split[2]
77 | JSON_date = JSON_month + '/' + JSON_day + '/' + JSON_year
78 |
79 | JSON_home = JSON_game["teams"]["home"]["team"]["name"].upper()
80 | JSON_away = JSON_game["teams"]["away"]["team"]["name"].upper()
81 |
82 | JSON_game_data = (JSON_seasonid, JSON_game_id, JSON_date, JSON_home, JSON_away)
83 |
84 | ### write the rows of shifts to the csv file
85 | JSON_csvWriter.writerows([JSON_game_data])
86 |
87 | try:
88 | ### reload the newly minted csv file to replace the team names with their tricodes
89 | schedule_df = pd.read_csv(schedule_csv)
90 |
91 | schedule_df = schedule_df[(schedule_df.GAME_ID < 40000)].sort_values('GAME_ID')
92 |
93 | schedule_df['AWAY'] = schedule_df['AWAY'].replace(dict_teams.NHL)
94 | schedule_df['HOME'] = schedule_df['HOME'].replace(dict_teams.NHL)
95 |
96 | schedule_df.to_csv(schedule_csv, index = False)
97 |
98 | except:
99 | ### reload the newly minted csv file to replace the team names with their tricodes
100 | schedule_df = pd.read_csv(schedule_csv, encoding='latin-1')
101 |
102 | schedule_df = schedule_df[(schedule_df.GAME_ID < 40000)].sort_values('GAME_ID')
103 |
104 | schedule_df['AWAY'] = schedule_df['AWAY'].replace(dict_teams.NHL)
105 | schedule_df['HOME'] = schedule_df['HOME'].replace(dict_teams.NHL)
106 |
107 | schedule_df.to_csv(schedule_csv, index = False)
108 |
109 |
110 | print('Finished parsing the NHL schedule for ' + season_id)
111 |
112 |
113 | schedules_path = Path(files_root).glob('*_schedule.csv')
114 |
115 | header_saved = False
116 | with open('schedule.csv', 'w', newline='') as fileout:
117 | for filename in schedules_path:
118 | with open(str(filename)) as filein:
119 | header = next(filein)
120 | if not header_saved:
121 | fileout.write(header)
122 | header_saved = True
123 | for line in filein:
124 | fileout.write(line)
125 |
126 | print('Finished binding the schedules since 20062007.')
--------------------------------------------------------------------------------
/tweet_teams_gameflow.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import parameters
7 | import pandas as pd
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_teams = parameters.charts_teams
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | shots_gameflow = open(charts_teams + 'shots_gameflow.png', 'rb')
38 | shots_gameflow_5v5 = open(charts_teams + 'shots_gameflow_5v5.png', 'rb')
39 | shots_gameflow_xg = open(charts_teams + 'shots_gameflow_xg.png', 'rb')
40 | shots_gameflow_xg_5v5 = open(charts_teams + 'shots_gameflow_xg_5v5.png', 'rb')
41 |
42 | with open(livefeed_file) as livefeed_json:
43 | livefeed_data = json.load(livefeed_json)
44 |
45 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
46 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
47 | minutes_gone = int()
48 | seconds_gone = int()
49 | regulation_time_gone = str()
50 | ot_time_gone = str()
51 |
52 | try:
53 | time_left_split = status.split(':')
54 | regulation_minutes_gone = 20 - int(time_left_split[0])
55 | ot_minutes_gone = 5 - int(time_left_split[0])
56 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
57 | seconds_gone = 0 - int(time_left_split[0])
58 | seconds_gone = '0' + str(seconds_gone)
59 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
60 | seconds_gone = 60 - int(time_left_split[1])
61 | seconds_gone = '0' + str(seconds_gone)
62 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
63 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
64 | except:
65 | pass
66 |
67 | images = [shots_gameflow, shots_gameflow_5v5, shots_gameflow_xg, shots_gameflow_xg_5v5]
68 |
69 | media_ids = []
70 |
71 | for i in images:
72 | response = twitter.upload_media(media=i)
73 | media_ids.append(response['media_id_string'])
74 |
75 | if period == 1 and status != 'END':
76 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) GameFlow for shots, xG:', media_ids=media_ids)
77 | elif period == 1 and status == 'END':
78 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) GameFlow for shots, xG:', media_ids=media_ids)
79 |
80 | if period == 2 and status != 'END':
81 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) GameFlow for shots, xG:', media_ids=media_ids)
82 | elif period == 2 and status == 'END':
83 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) GameFlow for shots, xG:', media_ids=media_ids)
84 |
85 | if period == 3 and status != 'END' and status != 'Final':
86 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) GameFlow for shots, xG:', media_ids=media_ids)
87 | elif period == 3 and status == 'END':
88 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) GameFlow for shots, xG:', media_ids=media_ids)
89 | elif period == 3 and status == 'Final':
90 | twitter.update_status(status= away + ' @ ' + home + ' (Final) GameFlow for shots, xG:', media_ids=media_ids)
91 |
92 | if period == 4 and status != 'END' and status != 'Final':
93 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) GameFlow for shots, xG:', media_ids=media_ids)
94 | elif period == 4 and status == 'END':
95 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) GameFlow for shots, xG:', media_ids=media_ids)
96 | elif period == 4 and status == 'Final':
97 | twitter.update_status(status= away + ' @ ' + home + ' (Final) GameFlow for shots, xG:', media_ids=media_ids)
98 |
99 | if period == 5 and status != 'Final':
100 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) GameFlow for shots, xG:', media_ids=media_ids)
101 | elif period == 5 and status == 'Final':
102 | twitter.update_status(status= away + ' @ ' + home + ' (Final) GameFlow for shots, xG:', media_ids=media_ids)
103 |
104 |
105 | print('Tweeted GameFlow charts.')
--------------------------------------------------------------------------------
/tweet_units_onice.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import pandas as pd
7 | import parameters
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_units = parameters.charts_units
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | units_onice_shots_away = open(charts_units + 'onice_shots_away.png', 'rb')
38 | units_onice_shots_home = open(charts_units + 'onice_shots_home.png', 'rb')
39 |
40 | units_onice_xg_away = open(charts_units + 'onice_xg_away.png', 'rb')
41 | units_onice_xg_home = open(charts_units + 'onice_xg_home.png', 'rb')
42 |
43 | with open(livefeed_file) as livefeed_json:
44 | livefeed_data = json.load(livefeed_json)
45 |
46 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
47 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
48 | minutes_gone = int()
49 | seconds_gone = int()
50 | regulation_time_gone = str()
51 | ot_time_gone = str()
52 |
53 | try:
54 | time_left_split = status.split(':')
55 | regulation_minutes_gone = 20 - int(time_left_split[0])
56 | ot_minutes_gone = 5 - int(time_left_split[0])
57 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
58 | seconds_gone = 0 - int(time_left_split[0])
59 | seconds_gone = '0' + str(seconds_gone)
60 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
61 | seconds_gone = 60 - int(time_left_split[1])
62 | seconds_gone = '0' + str(seconds_gone)
63 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
64 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
65 | except:
66 | pass
67 |
68 | images = [units_onice_shots_away, units_onice_shots_home, units_onice_xg_away, units_onice_xg_home]
69 |
70 | media_ids = []
71 |
72 | for i in images:
73 | response = twitter.upload_media(media=i)
74 | media_ids.append(response['media_id_string'])
75 |
76 | if period == 1 and status != 'END':
77 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) on-ice shots, xG for units:', media_ids=media_ids)
78 | elif period == 1 and status == 'END':
79 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) on-ice shots, xG by unit:', media_ids=media_ids)
80 |
81 | if period == 2 and status != 'END':
82 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) on-ice shots, xG for units:', media_ids=media_ids)
83 | elif period == 2 and status == 'END':
84 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) unit on-ice shots, xG:', media_ids=media_ids)
85 |
86 | if period == 3 and status != 'END' and status != 'Final':
87 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) on-ice shots, xG for units:', media_ids=media_ids)
88 | elif period == 3 and status == 'END':
89 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) on-ice shots, xG for units:', media_ids=media_ids)
90 | elif period == 3 and status == 'Final':
91 | twitter.update_status(status= away + ' @ ' + home + ' (Final) on-ice shots, xG for units:', media_ids=media_ids)
92 |
93 | if period == 4 and status != 'END' and status != 'Final':
94 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) on-ice shots, xG for units:', media_ids=media_ids)
95 | elif period == 4 and status == 'END':
96 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) on-ice shots, xG for units:', media_ids=media_ids)
97 | elif period == 4 and status == 'Final':
98 | twitter.update_status(status= away + ' @ ' + home + ' (Final) on-ice shots, xG for units:', media_ids=media_ids)
99 |
100 | if period == 5 and status != 'Final':
101 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) on-ice shots, xG for units:', media_ids=media_ids)
102 | elif period == 5 and status == 'Final':
103 | twitter.update_status(status= away + ' @ ' + home + ' (Final) on-ice shots, xG for units:', media_ids=media_ids)
104 |
105 |
106 | print('Tweeted unit on-ice shots, xG.')
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## NHL_Single
2 | A tool for scraping and parsing the available source files for any NHL game, beginning with the 20062007 season, in order to generate tabular and visualized data.
3 |
4 | ### Prerequisites
5 | At minimum, a vanilla installation of Python3 (code written with 3.6.5; tested with 3.6.8). If you don't already have Python on your machine, the simplest remedy is to install the Anaconda Distribution or, alternatively, Miniconda.
6 |
7 | ### Setup
8 | 1. Clone/Download the NHL_Single files
9 | 2. Move the folder containing the NHL_Single files wherever you want them stored on your machine
10 | 3. Using your machine's interactive Python shell (i.e. command-line interface; for Anaconda or Miniconda, this is Anaconda Prompt):
11 | a. Change the working directory to the NHL_Single folder
12 | b. Create a new environment
13 | c. Install dependencies
14 |
15 | ##### Anaconda or Miniconda as package and environment manager
16 | conda create -n environment_name -f environment.yml
17 |
18 | ##### PIP as package manager; Pipenv as environment manager
19 | pipenv install
20 |
21 | ##### PIP as package manager; venv as environment manager
22 | py -m venv env
23 | pip install -r requirements.txt
24 |
25 | 4. If you intend on using the script's Twitter functionality:
26 | a. Create a Twitter application and request its Access Token
27 | b. Rename twitter_credentials_sample.py to twitter_credentials.py
28 | c. Input your own APP_KEY, APP_SECRET, OAUTH_TOKEN and OAUTH_TOKEN_SECRET
29 |
30 | 5. Rename parameters_sample.py to parameters.py and update the files_root and charts_root objects with the path to your NHL_Single folder
31 |
32 | ### Usage
33 | You must be within your machine's the command-line interface and need to activate the environment you created.
34 |
35 | ##### Game
36 | At minimum, you must indicate two positional arguments, which are the particular season and the 5-digit game number:
37 | python run_game.py 20182019 20001
38 |
39 | There are optional arguments available for you to tack onto the basic structure as well:
40 | --fetch set to 'skip'
41 | --images set to 'show'
42 | --load_pbp set to 'true' (pertains to, as explained in the 'Notes' section below, 20062007 only)
43 | --parse set to 'skip'
44 | --players set to 'skip'
45 | --scope set to 'full'
46 | --teams set to 'skip'
47 | --tweet set to 'no'
48 | --units set to 'skip'
49 |
50 | To re-run a game without bothering to fetch and parse the game files again plus not tweet any generated charts:
51 | python run_game.py 20182019 20001 --fetch skip --parse skip --tweet no
52 |
53 | To generate all of the team, player and unit tables and charts possible, as well as have the charts display on your screen as they're made:
54 | python run_game.py 20182019 20001 --scope full --images show
55 |
56 | Reference the required and optional arguments in the shell by typing the following:
57 | python run_game.py -h
58 |
59 | ##### Schedule
60 | To fetch a season's schedule manually, indicate the particular season as demonstrated below:
61 | python run_schedule.py 20182019
62 |
63 | ### Notes
64 |
65 | ##### What's Next?
66 | NHL_Single, as its name implies, only processes one game at a time. With the release of v2.0--which extended compatibility back to the 20062007 season--the next major change will be to enable fetching and parsing a range of games within a season.
67 |
68 | ##### Compatability
69 | This tool was developed and tested within Windows. It may (or may not!) play nice with other operating systems, for which testing in and feedback is most welcome!
70 |
71 | ##### The Optional 'load_pbp' Command-Line Argument
72 | An inelegant solution to a persistent problem exclusive to the 20062007 season arising from the complexity of parsing this season's unique play-by-play formatting: NUL bytes somehow wind up in the play-by-play data, stopping any further processing of the rows that follow their insertion. If you care for further elaboration, hit me up on Twitter but the result, simply put, is the loss of some events in the play-by-play output.
73 |
74 | After much trial and all error, NHL_Single lacks a means for automating the removal of the NUL bytes. Thus the necessity, at present, for manually doing so--which is easy enough in Excel with a macro but obviously tedious--in order to get a complete play-by-play file as output.
75 |
76 | To spare any potential user the drudgery, this Dropbox folder contains purged 20062007 play-by-play files users should download before using the optional 'load_pbp' command-line argument as shown below:
77 | python run_game.py 20062007 20001 --load_pbp true
78 |
79 | ### Acknowledgements
80 | A lot of people have, in varying ways (be it by patiently answering questions, making their own code available for consultation, offering suggestions or simply encouragement), helped me learn enough to put this thing together. I am grateful for all of the feedback received and resources made available.
81 |
--------------------------------------------------------------------------------
/tweet_players_onice.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import pandas as pd
7 | import parameters
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_players_onice = parameters.charts_players_onice
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | skaters_onice_shots_away = open(charts_players_onice + 'skaters_onice_shots_away.png', 'rb')
38 | skaters_onice_shots_home = open(charts_players_onice + 'skaters_onice_shots_home.png', 'rb')
39 |
40 | skaters_onice_xg_away = open(charts_players_onice + 'skaters_onice_xg_away.png', 'rb')
41 | skaters_onice_xg_home = open(charts_players_onice + 'skaters_onice_xg_home.png', 'rb')
42 |
43 | with open(livefeed_file) as livefeed_json:
44 | livefeed_data = json.load(livefeed_json)
45 |
46 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
47 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
48 | minutes_gone = int()
49 | seconds_gone = int()
50 | regulation_time_gone = str()
51 | ot_time_gone = str()
52 |
53 | try:
54 | time_left_split = status.split(':')
55 | regulation_minutes_gone = 20 - int(time_left_split[0])
56 | ot_minutes_gone = 5 - int(time_left_split[0])
57 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
58 | seconds_gone = 0 - int(time_left_split[0])
59 | seconds_gone = '0' + str(seconds_gone)
60 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
61 | seconds_gone = 60 - int(time_left_split[1])
62 | seconds_gone = '0' + str(seconds_gone)
63 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
64 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
65 | except:
66 | pass
67 |
68 | images = [skaters_onice_shots_away, skaters_onice_shots_home, skaters_onice_xg_away, skaters_onice_xg_home]
69 |
70 | media_ids = []
71 |
72 | for i in images:
73 | response = twitter.upload_media(media=i)
74 | media_ids.append(response['media_id_string'])
75 |
76 | if period == 1 and status != 'END':
77 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) on-ice shots, xG for skaters:', media_ids=media_ids)
78 | elif period == 1 and status == 'END':
79 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) on-ice shots, xG for skaters:', media_ids=media_ids)
80 |
81 | if period == 2 and status != 'END':
82 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) on-ice shots, xG for skaters:', media_ids=media_ids)
83 | elif period == 2 and status == 'END':
84 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) on-ice shots, xG for skaters:', media_ids=media_ids)
85 |
86 | if period == 3 and status != 'END' and status != 'Final':
87 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) on-ice shots, xG for skaters:', media_ids=media_ids)
88 | elif period == 3 and status == 'END':
89 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) on-ice shots, xG for skaters:', media_ids=media_ids)
90 | elif period == 3 and status == 'Final':
91 | twitter.update_status(status= away + ' @ ' + home + ' (Final) on-ice shots, xG for skaters:', media_ids=media_ids)
92 |
93 | if period == 4 and status != 'END' and status != 'Final':
94 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) on-ice shots, xG for skaters:', media_ids=media_ids)
95 | elif period == 4 and status == 'END':
96 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) on-ice shots, xG for skaters:', media_ids=media_ids)
97 | elif period == 4 and status == 'Final':
98 | twitter.update_status(status= away + ' @ ' + home + ' (Final) on-ice shots, xG for skaters:', media_ids=media_ids)
99 |
100 | if period == 5 and status != 'Final':
101 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) on-ice shots, xG for skaters:', media_ids=media_ids)
102 | elif period == 5 and status == 'Final':
103 | twitter.update_status(status= away + ' @ ' + home + ' (Final) on-ice shots, xG for skaters:', media_ids=media_ids)
104 |
105 |
106 | print('Tweeted on-ice shots, xG for skaters.')
--------------------------------------------------------------------------------
/tweet_players_individual.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import pandas as pd
7 | import parameters
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_players_individual = parameters.charts_players_individual
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | skaters_individual_shots_away = open(charts_players_individual + 'skaters_individual_shots_away.png', 'rb')
38 | skaters_individual_shots_home = open(charts_players_individual + 'skaters_individual_shots_home.png', 'rb')
39 |
40 | skaters_individual_xg_away = open(charts_players_individual + 'skaters_individual_xg_away.png', 'rb')
41 | skaters_individual_xg_home = open(charts_players_individual + 'skaters_individual_xg_home.png', 'rb')
42 |
43 | with open(livefeed_file) as livefeed_json:
44 | livefeed_data = json.load(livefeed_json)
45 |
46 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
47 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
48 | minutes_gone = int()
49 | seconds_gone = int()
50 | regulation_time_gone = str()
51 | ot_time_gone = str()
52 |
53 | try:
54 | time_left_split = status.split(':')
55 | regulation_minutes_gone = 20 - int(time_left_split[0])
56 | ot_minutes_gone = 5 - int(time_left_split[0])
57 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
58 | seconds_gone = 0 - int(time_left_split[0])
59 | seconds_gone = '0' + str(seconds_gone)
60 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
61 | seconds_gone = 60 - int(time_left_split[1])
62 | seconds_gone = '0' + str(seconds_gone)
63 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
64 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
65 | except:
66 | pass
67 |
68 | images = [skaters_individual_shots_away, skaters_individual_shots_home, skaters_individual_xg_away, skaters_individual_xg_home]
69 |
70 | media_ids = []
71 |
72 | for i in images:
73 | response = twitter.upload_media(media=i)
74 | media_ids.append(response['media_id_string'])
75 |
76 | if period == 1 and status != 'END':
77 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) individual shots, xG for skaters:', media_ids=media_ids)
78 | elif period == 1 and status == 'END':
79 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) individual shots, xG for skaters:', media_ids=media_ids)
80 |
81 | if period == 2 and status != 'END':
82 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) individual shots, xG for skaters:', media_ids=media_ids)
83 | elif period == 2 and status == 'END':
84 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) individual shots, xG for skaters:', media_ids=media_ids)
85 |
86 | if period == 3 and status != 'END' and status != 'Final':
87 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) individual shots, xG for skaters:', media_ids=media_ids)
88 | elif period == 3 and status == 'END':
89 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) individual shots, xG for skaters:', media_ids=media_ids)
90 | elif period == 3 and status == 'Final':
91 | twitter.update_status(status= away + ' @ ' + home + ' (Final) individual shots, xG for skaters:', media_ids=media_ids)
92 |
93 | if period == 4 and status != 'END' and status != 'Final':
94 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) individual shots, xG for skaters:', media_ids=media_ids)
95 | elif period == 4 and status == 'END':
96 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) individual shots, xG for skaters:', media_ids=media_ids)
97 | elif period == 4 and status == 'Final':
98 | twitter.update_status(status= away + ' @ ' + home + ' (Final) individual shots, xG for skaters:', media_ids=media_ids)
99 |
100 | if period == 5 and status != 'Final':
101 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) individual shots, xG for skaters:', media_ids=media_ids)
102 | elif period == 5 and status == 'Final':
103 | twitter.update_status(status= away + ' @ ' + home + ' (Final) individual shots, xG for skaters:', media_ids=media_ids)
104 |
105 |
106 | print('Tweeted individual shots, xG for skaters.')
--------------------------------------------------------------------------------
/tweet_units_lines_matchups_lines.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import pandas as pd
7 | import parameters
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_units_lines = parameters.charts_units_lines
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | units_onice_shots_lines_matchups_lines_away = open(charts_units_lines + 'onice_shots_away_lines_matchups_lines.png', 'rb')
38 | units_onice_shots_lines_matchups_lines_home = open(charts_units_lines + 'onice_shots_home_lines_matchups_lines.png', 'rb')
39 | units_onice_xg_lines_matchups_lines_away = open(charts_units_lines + 'onice_xg_away_lines_matchups_lines.png', 'rb')
40 | units_onice_xg_lines_matchups_lines_home = open(charts_units_lines + 'onice_xg_home_lines_matchups_lines.png', 'rb')
41 |
42 | with open(livefeed_file) as livefeed_json:
43 | livefeed_data = json.load(livefeed_json)
44 |
45 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
46 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
47 | minutes_gone = int()
48 | seconds_gone = int()
49 | regulation_time_gone = str()
50 | ot_time_gone = str()
51 |
52 | try:
53 | time_left_split = status.split(':')
54 | regulation_minutes_gone = 20 - int(time_left_split[0])
55 | ot_minutes_gone = 5 - int(time_left_split[0])
56 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
57 | seconds_gone = 0 - int(time_left_split[0])
58 | seconds_gone = '0' + str(seconds_gone)
59 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
60 | seconds_gone = 60 - int(time_left_split[1])
61 | seconds_gone = '0' + str(seconds_gone)
62 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
63 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
64 | except:
65 | pass
66 |
67 | images = [units_onice_shots_lines_matchups_lines_away, units_onice_shots_lines_matchups_lines_home, units_onice_xg_lines_matchups_lines_away, units_onice_xg_lines_matchups_lines_home]
68 |
69 | media_ids = []
70 |
71 | for i in images:
72 | response = twitter.upload_media(media=i)
73 | media_ids.append(response['media_id_string'])
74 |
75 | if period == 1 and status != 'END':
76 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) line vs. line on-ice shots, xG:', media_ids=media_ids)
77 | elif period == 1 and status == 'END':
78 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) line vs. line on-ice shots, xG:', media_ids=media_ids)
79 |
80 | if period == 2 and status != 'END':
81 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) line vs. line on-ice shots, xG:', media_ids=media_ids)
82 | elif period == 2 and status == 'END':
83 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) line vs. line on-ice shots, xG:', media_ids=media_ids)
84 |
85 | if period == 3 and status != 'END' and status != 'Final':
86 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) line vs. line on-ice shots, xG:', media_ids=media_ids)
87 | elif period == 3 and status == 'END':
88 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) line vs. line on-ice shots, xG:', media_ids=media_ids)
89 | elif period == 3 and status == 'Final':
90 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line vs. line on-ice shots, xG:', media_ids=media_ids)
91 |
92 | if period == 4 and status != 'END' and status != 'Final':
93 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) line vs. line on-ice shots, xG:', media_ids=media_ids)
94 | elif period == 4 and status == 'END':
95 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) line vs. line on-ice shots, xG:', media_ids=media_ids)
96 | elif period == 4 and status == 'Final':
97 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line vs. line on-ice shots, xG:', media_ids=media_ids)
98 |
99 | if period == 5 and status != 'Final':
100 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) line vs. line on-ice shots, xG:', media_ids=media_ids)
101 | elif period == 5 and status == 'Final':
102 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line vs. line on-ice shots, xG:', media_ids=media_ids)
103 |
104 |
105 | print('Tweeted unit line vs. line matchups.')
--------------------------------------------------------------------------------
/tweet_units_lines_matchups_pairings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import pandas as pd
7 | import parameters
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_units_lines = parameters.charts_units_lines
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | units_onice_shots_lines_matchups_pairings_away = open(charts_units_lines + 'onice_shots_away_lines_matchups_pairings.png', 'rb')
38 | units_onice_shots_lines_matchups_pairings_home = open(charts_units_lines + 'onice_shots_home_lines_matchups_pairings.png', 'rb')
39 | units_onice_xg_lines_matchups_pairings_away = open(charts_units_lines + 'onice_xg_away_lines_matchups_pairings.png', 'rb')
40 | units_onice_xg_lines_matchups_pairings_home = open(charts_units_lines + 'onice_xg_home_lines_matchups_pairings.png', 'rb')
41 |
42 | with open(livefeed_file) as livefeed_json:
43 | livefeed_data = json.load(livefeed_json)
44 |
45 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
46 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
47 | minutes_gone = int()
48 | seconds_gone = int()
49 | regulation_time_gone = str()
50 | ot_time_gone = str()
51 |
52 | try:
53 | time_left_split = status.split(':')
54 | regulation_minutes_gone = 20 - int(time_left_split[0])
55 | ot_minutes_gone = 5 - int(time_left_split[0])
56 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
57 | seconds_gone = 0 - int(time_left_split[0])
58 | seconds_gone = '0' + str(seconds_gone)
59 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
60 | seconds_gone = 60 - int(time_left_split[1])
61 | seconds_gone = '0' + str(seconds_gone)
62 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
63 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
64 | except:
65 | pass
66 |
67 | images = [units_onice_shots_lines_matchups_pairings_away, units_onice_shots_lines_matchups_pairings_home, units_onice_xg_lines_matchups_pairings_away, units_onice_xg_lines_matchups_pairings_home]
68 |
69 | media_ids = []
70 |
71 | for i in images:
72 | response = twitter.upload_media(media=i)
73 | media_ids.append(response['media_id_string'])
74 |
75 | if period == 1 and status != 'END':
76 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
77 | elif period == 1 and status == 'END':
78 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
79 |
80 | if period == 2 and status != 'END':
81 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
82 | elif period == 2 and status == 'END':
83 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
84 |
85 | if period == 3 and status != 'END' and status != 'Final':
86 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
87 | elif period == 3 and status == 'END':
88 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
89 | elif period == 3 and status == 'Final':
90 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
91 |
92 | if period == 4 and status != 'END' and status != 'Final':
93 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
94 | elif period == 4 and status == 'END':
95 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
96 | elif period == 4 and status == 'Final':
97 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
98 |
99 | if period == 5 and status != 'Final':
100 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
101 | elif period == 5 and status == 'Final':
102 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line vs. pairing on-ice shots, xG:', media_ids=media_ids)
103 |
104 |
105 | print('Tweeted unit line vs. pairing matchups.')
--------------------------------------------------------------------------------
/tweet_units_lines_teammates_pairings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | from twython import Twython
6 | import pandas as pd
7 | import parameters
8 | import json
9 | import twitter_credentials
10 |
11 | def parse_ids(season_id, game_id):
12 |
13 | ### pull common variables from the parameters file
14 | charts_units_lines = parameters.charts_units_lines
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | home = schedule_date['HOME'].item()
24 | away = schedule_date['AWAY'].item()
25 |
26 | ### establish common filepaths
27 | livefeed_file = files_root + 'livefeed.json'
28 |
29 | ### post charts to Twitter
30 | APP_KEY = twitter_credentials.APP_KEY
31 | APP_SECRET = twitter_credentials.APP_SECRET
32 | OAUTH_TOKEN = twitter_credentials.OAUTH_TOKEN
33 | OAUTH_TOKEN_SECRET = twitter_credentials.OAUTH_TOKEN_SECRET
34 |
35 | twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
36 |
37 | units_onice_shots_lines_teammates_pairings_away = open(charts_units_lines + 'onice_shots_away_lines_teammates_pairings.png', 'rb')
38 | units_onice_shots_lines_teammates_pairings_home = open(charts_units_lines + 'onice_shots_home_lines_teammates_pairings.png', 'rb')
39 | units_onice_xg_lines_teammates_pairings_away = open(charts_units_lines + 'onice_xg_away_lines_teammates_pairings.png', 'rb')
40 | units_onice_xg_lines_teammates_pairings_home = open(charts_units_lines + 'onice_xg_home_lines_teammates_pairings.png', 'rb')
41 |
42 |
43 | with open(livefeed_file) as livefeed_json:
44 | livefeed_data = json.load(livefeed_json)
45 |
46 | period = livefeed_data["liveData"]["linescore"]["currentPeriod"]
47 | status = livefeed_data["liveData"]["linescore"]["currentPeriodTimeRemaining"]
48 | minutes_gone = int()
49 | seconds_gone = int()
50 | regulation_time_gone = str()
51 | ot_time_gone = str()
52 |
53 | try:
54 | time_left_split = status.split(':')
55 | regulation_minutes_gone = 20 - int(time_left_split[0])
56 | ot_minutes_gone = 5 - int(time_left_split[0])
57 | if int(time_left_split[1]) == 0 and int(time_left_split[1]) > 50:
58 | seconds_gone = 0 - int(time_left_split[0])
59 | seconds_gone = '0' + str(seconds_gone)
60 | elif int(time_left_split[1]) != 0 and int(time_left_split[1]) > 50:
61 | seconds_gone = 60 - int(time_left_split[1])
62 | seconds_gone = '0' + str(seconds_gone)
63 | regulation_time_gone = str(regulation_minutes_gone) + ':' + str(seconds_gone)
64 | ot_time_gone = str(ot_minutes_gone) + ':' + str(seconds_gone)
65 | except:
66 | pass
67 |
68 | images = [units_onice_shots_lines_teammates_pairings_away, units_onice_shots_lines_teammates_pairings_home, units_onice_xg_lines_teammates_pairings_away, units_onice_xg_lines_teammates_pairings_home]
69 |
70 | media_ids = []
71 |
72 | for i in images:
73 | response = twitter.upload_media(media=i)
74 | media_ids.append(response['media_id_string'])
75 |
76 | if period == 1 and status != 'END':
77 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 1st Period) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
78 | elif period == 1 and status == 'END':
79 | twitter.update_status(status= away + ' @ ' + home + ' (End of 1st Period) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
80 |
81 | if period == 2 and status != 'END':
82 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 2nd Period) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
83 | elif period == 2 and status == 'END':
84 | twitter.update_status(status= away + ' @ ' + home + ' (End of 2nd Period) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
85 |
86 | if period == 3 and status != 'END' and status != 'Final':
87 | twitter.update_status(status= away + ' @ ' + home + ' (' + regulation_time_gone + ' into the 3rd Period) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
88 | elif period == 3 and status == 'END':
89 | twitter.update_status(status= away + ' @ ' + home + ' (End of 3rd Period) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
90 | elif period == 3 and status == 'Final':
91 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
92 |
93 | if period == 4 and status != 'END' and status != 'Final':
94 | twitter.update_status(status= away + ' @ ' + home + ' (' + ot_time_gone + ' into Overtime) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
95 | elif period == 4 and status == 'END':
96 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
97 | elif period == 4 and status == 'Final':
98 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
99 |
100 | if period == 5 and status != 'Final':
101 | twitter.update_status(status= away + ' @ ' + home + ' (End of Overtime) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
102 | elif period == 5 and status == 'Final':
103 | twitter.update_status(status= away + ' @ ' + home + ' (Final) line w/ pairing on-ice shots, xG:', media_ids=media_ids)
104 |
105 |
106 | print('Tweeted unit line with pairing teammates.')
--------------------------------------------------------------------------------
/environment.txt:
--------------------------------------------------------------------------------
1 | # This file may be used to create an environment using:
2 | # $ conda create --name --file
3 | # platform: win-64
4 | @EXPLICIT
5 | https://repo.anaconda.com/pkgs/main/win-64/pandoc-2.2.1-h1a437c5_0.tar.bz2
6 | https://repo.anaconda.com/pkgs/main/win-64/blas-1.0-mkl.tar.bz2
7 | https://repo.anaconda.com/pkgs/main/win-64/ca-certificates-2019.11.27-0.tar.bz2
8 | https://repo.anaconda.com/pkgs/main/win-64/icc_rt-2019.0.0-h0cc432a_1.tar.bz2
9 | https://repo.anaconda.com/pkgs/main/win-64/intel-openmp-2019.3-203.tar.bz2
10 | https://repo.anaconda.com/pkgs/main/win-64/vs2015_runtime-14.15.26706-h3a45250_0.tar.bz2
11 | https://repo.anaconda.com/pkgs/main/win-64/mkl-2018.0.3-1.tar.bz2
12 | https://repo.anaconda.com/pkgs/main/win-64/vc-14.1-h0510ff6_4.tar.bz2
13 | https://repo.anaconda.com/pkgs/main/win-64/icu-58.2-ha66f8fd_1.tar.bz2
14 | https://repo.anaconda.com/pkgs/main/win-64/jpeg-9b-hb83a4c4_2.tar.bz2
15 | https://repo.anaconda.com/pkgs/main/win-64/openssl-1.1.1c-he774522_1.tar.bz2
16 | https://repo.anaconda.com/pkgs/main/win-64/sqlite-3.28.0-he774522_0.tar.bz2
17 | https://repo.anaconda.com/pkgs/main/win-64/tbb-2019.4-h74a9793_0.conda
18 | https://repo.anaconda.com/pkgs/main/win-64/zlib-1.2.11-h62dcd97_3.tar.bz2
19 | https://repo.anaconda.com/pkgs/main/win-64/libpng-1.6.37-h2a8f88b_0.tar.bz2
20 | https://repo.anaconda.com/pkgs/main/win-64/python-3.6.8-h9f7ef89_7.tar.bz2
21 | https://repo.anaconda.com/pkgs/main/win-64/alabaster-0.7.12-py36_0.tar.bz2
22 | https://repo.anaconda.com/pkgs/main/win-64/asn1crypto-0.24.0-py36_0.tar.bz2
23 | https://repo.anaconda.com/pkgs/main/win-64/beautifulsoup4-4.6.3-py36_0.tar.bz2
24 | https://repo.anaconda.com/pkgs/main/win-64/blinker-1.4-py36_0.tar.bz2
25 | https://repo.anaconda.com/pkgs/main/win-64/certifi-2019.11.28-py36_0.tar.bz2
26 | https://repo.anaconda.com/pkgs/main/win-64/chardet-3.0.4-py36_1.tar.bz2
27 | https://repo.anaconda.com/pkgs/main/win-64/docutils-0.14-py36h6012d8f_0.tar.bz2
28 | https://repo.anaconda.com/pkgs/main/win-64/freetype-2.9.1-ha9979f8_1.tar.bz2
29 | https://repo.anaconda.com/pkgs/main/win-64/idna-2.8-py36_0.tar.bz2
30 | https://repo.anaconda.com/pkgs/main/win-64/imagesize-1.1.0-py36_0.tar.bz2
31 | https://repo.anaconda.com/pkgs/main/win-64/kiwisolver-1.0.1-py36h6538335_0.tar.bz2
32 | https://repo.anaconda.com/pkgs/main/win-64/markupsafe-1.1.1-py36he774522_0.tar.bz2
33 | https://repo.anaconda.com/pkgs/main/win-64/numpy-base-1.15.0-py36h4a99626_0.tar.bz2
34 | https://repo.anaconda.com/pkgs/main/win-64/pandocfilters-1.4.2-py36_1.tar.bz2
35 | https://repo.anaconda.com/pkgs/main/win-64/pycparser-2.19-py36_0.tar.bz2
36 | https://repo.anaconda.com/pkgs/main/noarch/pyparsing-2.4.0-py_0.tar.bz2
37 | https://repo.anaconda.com/pkgs/main/noarch/pytz-2019.1-py_0.tar.bz2
38 | https://repo.anaconda.com/pkgs/main/win-64/qt-5.9.7-vc14h73c81de_0.tar.bz2
39 | https://repo.anaconda.com/pkgs/main/win-64/sip-4.19.8-py36h6538335_0.tar.bz2
40 | https://repo.anaconda.com/pkgs/main/win-64/six-1.12.0-py36_0.tar.bz2
41 | https://repo.anaconda.com/pkgs/main/win-64/snowballstemmer-1.2.1-py36h763602f_0.tar.bz2
42 | https://repo.anaconda.com/pkgs/main/noarch/sphinxcontrib-applehelp-1.0.1-py_0.tar.bz2
43 | https://repo.anaconda.com/pkgs/main/noarch/sphinxcontrib-devhelp-1.0.1-py_0.tar.bz2
44 | https://repo.anaconda.com/pkgs/main/noarch/sphinxcontrib-htmlhelp-1.0.2-py_0.tar.bz2
45 | https://repo.anaconda.com/pkgs/main/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2
46 | https://repo.anaconda.com/pkgs/main/noarch/sphinxcontrib-qthelp-1.0.2-py_0.tar.bz2
47 | https://repo.anaconda.com/pkgs/main/noarch/sphinxcontrib-serializinghtml-1.1.3-py_0.tar.bz2
48 | https://repo.anaconda.com/pkgs/main/win-64/tbb4py-2019.4-py36h74a9793_0.conda
49 | https://repo.anaconda.com/pkgs/main/win-64/tornado-6.0.2-py36he774522_0.tar.bz2
50 | https://repo.anaconda.com/pkgs/main/win-64/win_inet_pton-1.1.0-py36_0.tar.bz2
51 | https://repo.anaconda.com/pkgs/main/win-64/wincertstore-0.2-py36h7fe50ca_0.tar.bz2
52 | https://repo.anaconda.com/pkgs/main/win-64/babel-2.6.0-py36_0.tar.bz2
53 | https://repo.anaconda.com/pkgs/main/win-64/cffi-1.12.3-py36h7a1dbc1_0.tar.bz2
54 | https://repo.anaconda.com/pkgs/main/win-64/cycler-0.10.0-py36h009560c_0.tar.bz2
55 | https://repo.anaconda.com/pkgs/main/win-64/packaging-19.0-py36_0.tar.bz2
56 | https://repo.anaconda.com/pkgs/main/win-64/pyqt-5.9.2-py36h6538335_2.tar.bz2
57 | https://repo.anaconda.com/pkgs/main/win-64/pysocks-1.6.8-py36_0.tar.bz2
58 | https://repo.anaconda.com/pkgs/main/win-64/python-dateutil-2.8.0-py36_0.tar.bz2
59 | https://repo.anaconda.com/pkgs/main/win-64/setuptools-41.0.0-py36_0.tar.bz2
60 | https://repo.anaconda.com/pkgs/main/win-64/cryptography-2.6.1-py36h7a1dbc1_0.tar.bz2
61 | https://repo.anaconda.com/pkgs/main/win-64/jinja2-2.10.1-py36_0.tar.bz2
62 | https://repo.anaconda.com/pkgs/main/win-64/pygments-2.3.1-py36_0.tar.bz2
63 | https://repo.anaconda.com/pkgs/main/win-64/wheel-0.33.1-py36_0.tar.bz2
64 | https://repo.anaconda.com/pkgs/main/win-64/pip-10.0.1-py36_0.tar.bz2
65 | https://repo.anaconda.com/pkgs/main/win-64/pyjwt-1.7.1-py36_0.tar.bz2
66 | https://repo.anaconda.com/pkgs/main/win-64/pyopenssl-19.0.0-py36_0.tar.bz2
67 | https://repo.anaconda.com/pkgs/main/win-64/oauthlib-2.1.0-py36_0.tar.bz2
68 | https://repo.anaconda.com/pkgs/main/win-64/urllib3-1.24.2-py36_0.tar.bz2
69 | https://repo.anaconda.com/pkgs/main/win-64/requests-2.21.0-py36_0.tar.bz2
70 | https://repo.anaconda.com/pkgs/main/win-64/selenium-3.141.0-py36he774522_0.tar.bz2
71 | https://repo.anaconda.com/pkgs/main/noarch/requests-oauthlib-1.2.0-py_0.tar.bz2
72 | https://repo.anaconda.com/pkgs/main/noarch/sphinx-2.0.1-py_0.tar.bz2
73 | https://repo.anaconda.com/pkgs/main/win-64/numpydoc-0.8.0-py36_0.tar.bz2
74 | https://repo.anaconda.com/pkgs/main/win-64/twython-3.7.0-py36_0.tar.bz2
75 | https://repo.anaconda.com/pkgs/main/win-64/matplotlib-3.0.1-py36hc8f65d3_0.tar.bz2
76 | https://repo.anaconda.com/pkgs/main/win-64/mkl_fft-1.0.6-py36hdbbee80_0.tar.bz2
77 | https://repo.anaconda.com/pkgs/main/win-64/mkl_random-1.0.1-py36h77b88f5_1.tar.bz2
78 | https://repo.anaconda.com/pkgs/main/win-64/numpy-1.15.0-py36h9fa60d3_0.tar.bz2
79 | https://repo.anaconda.com/pkgs/main/win-64/pandas-0.23.4-py36h830ac7b_0.tar.bz2
80 | https://repo.anaconda.com/pkgs/main/win-64/scipy-1.1.0-py36hc28095f_0.tar.bz2
81 | https://repo.anaconda.com/pkgs/main/win-64/patsy-0.5.1-py36_0.tar.bz2
82 | https://repo.anaconda.com/pkgs/main/win-64/scikit-learn-0.20.1-py36hb854c30_0.conda
83 | https://repo.anaconda.com/pkgs/main/win-64/statsmodels-0.9.0-py36h452e1ab_0.tar.bz2
84 | https://repo.anaconda.com/pkgs/main/win-64/seaborn-0.9.0-py36_0.tar.bz2
85 |
--------------------------------------------------------------------------------
/run_stats.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | import argparse
6 |
7 | parser = argparse.ArgumentParser()
8 |
9 | ### creates arguments to make use of in functions
10 | parser.add_argument('season_id', help='Set to [8-digit season number] (e.g. 20182019)')
11 | parser.add_argument('game_id', help='Set to [5-digit game number] game (e.g. 20001)')
12 |
13 | parser.add_argument('--focus', dest='focus', help='Can set to [teams, players, units, lines, pairings, pp, pk]', required=False)
14 | parser.add_argument('--detail', dest='detail', help='Can set to [basic, period, situation] for teams, [basic, period, situation, opponents, teammates] for players, [basic, matchups, matchups_lines, matchups_pairings, teammates] for units', required=False)
15 |
16 | args = parser.parse_args()
17 |
18 |
19 | ###
20 | ### TEAM STATS
21 | ###
22 |
23 | if args.focus == 'teams' and args.detail is None or args.focus == 'teams' and args.detail == 'basic':
24 | import stats_teams
25 | stats_teams.parse_ids(args.season_id, args.game_id)
26 | stats_teams
27 |
28 | if args.focus == 'teams' and args.detail is None or args.focus == 'teams' and args.detail == 'period':
29 | import stats_teams_period
30 | stats_teams_period.parse_ids(args.season_id, args.game_id)
31 | stats_teams_period
32 |
33 | if args.focus == 'teams' and args.detail is None or args.focus == 'teams' and args.detail == 'situation':
34 | import stats_teams_situation
35 | stats_teams_situation.parse_ids(args.season_id, args.game_id)
36 | stats_teams_situation
37 |
38 |
39 | ###
40 | ### PLAYER STATS
41 | ###
42 |
43 | if args.focus == 'players' and args.detail is None or args.focus == 'players' and args.detail == 'basic':
44 | import stats_players
45 | stats_players.parse_ids(args.season_id, args.game_id)
46 | stats_players
47 |
48 | if args.focus == 'players' and args.detail is None or args.focus == 'players' and args.detail == 'period':
49 | import stats_players_period
50 | stats_players_period.parse_ids(args.season_id, args.game_id)
51 | stats_players_period
52 |
53 | if args.focus == 'players' and args.detail is None or args.focus == 'players' and args.detail == 'situation':
54 | import stats_players_situation
55 | stats_players_situation.parse_ids(args.season_id, args.game_id)
56 | stats_players_situation
57 |
58 | if args.focus == 'players' and args.detail is None or args.focus == 'players' and args.detail == 'opponents':
59 | import stats_players_opponents
60 | stats_players_opponents.parse_ids(args.season_id, args.game_id)
61 | stats_players_opponents
62 |
63 | if args.focus == 'players' and args.detail is None or args.focus == 'players' and args.detail == 'teammates':
64 | import stats_players_teammates
65 | stats_players_teammates.parse_ids(args.season_id, args.game_id)
66 | stats_players_teammates
67 |
68 |
69 | ###
70 | ### UNIT STATS
71 | ###
72 |
73 | ##
74 | ## Lines
75 | ##
76 |
77 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'basic' or args.focus == 'lines' and args.detail is None or args.focus == 'lines' and args.detail == 'basic':
78 | import stats_units_lines
79 | stats_units_lines.parse_ids(args.season_id, args.game_id)
80 | stats_units_lines
81 |
82 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'matchups' or args.focus == 'units' and args.detail == 'matchups_lines' or args.focus == 'lines' and args.detail is None or args.focus == 'lines' and args.detail == 'matchups' or args.focus == 'lines' and args.detail == 'matchups_lines':
83 | import stats_units_lines_matchups_lines
84 | stats_units_lines_matchups_lines.parse_ids(args.season_id, args.game_id)
85 | stats_units_lines_matchups_lines
86 |
87 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'matchups' or args.focus == 'units' and args.detail == 'matchups_pairings' or args.focus == 'lines' and args.detail is None or args.focus == 'lines' and args.detail == 'matchups' or args.focus == 'lines' and args.detail == 'matchups_pairings':
88 | import stats_units_lines_matchups_pairings
89 | stats_units_lines_matchups_pairings.parse_ids(args.season_id, args.game_id)
90 | stats_units_lines_matchups_pairings
91 |
92 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'teammates' or args.focus == 'lines' and args.detail is None or args.focus == 'lines' and args.detail == 'teammates':
93 | import stats_units_lines_teammates_pairings
94 | stats_units_lines_teammates_pairings.parse_ids(args.season_id, args.game_id)
95 | stats_units_lines_teammates_pairings
96 |
97 | ##
98 | ## Pairings
99 | ##
100 |
101 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'basic' or args.focus == 'pairings' and args.detail is None or args.focus == 'pairings' and args.detail == 'basic':
102 | import stats_units_pairings
103 | stats_units_pairings.parse_ids(args.season_id, args.game_id)
104 | stats_units_pairings
105 |
106 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'matchups' or args.focus == 'units' and args.detail == 'matchups_lines' or args.focus == 'pairings' and args.detail is None or args.focus == 'pairings' and args.detail == 'matchups' or args.focus == 'pairings' and args.detail == 'matchups_lines':
107 | import stats_units_pairings_matchups_lines
108 | stats_units_pairings_matchups_lines.parse_ids(args.season_id, args.game_id)
109 | stats_units_pairings_matchups_lines
110 |
111 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'matchups' or args.focus == 'units' and args.detail == 'matchups_pairings' or args.focus == 'pairings' and args.detail is None or args.focus == 'pairings' and args.detail == 'matchups' or args.focus == 'pairings' and args.detail == 'matchups_pairings':
112 | import stats_units_pairings_matchups_pairings
113 | stats_units_pairings_matchups_pairings.parse_ids(args.season_id, args.game_id)
114 | stats_units_pairings_matchups_pairings
115 |
116 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'teammates' or args.focus == 'pairings' and args.detail is None or args.focus == 'pairings' and args.detail == 'teammates':
117 | import stats_units_pairings_teammates_lines
118 | stats_units_pairings_teammates_lines.parse_ids(args.season_id, args.game_id)
119 | stats_units_pairings_teammates_lines
120 |
121 | ##
122 | ## Power Play
123 | ##
124 |
125 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'basic' or args.focus == 'pp' and args.detail is None or args.focus == 'pp' and args.detail == 'basic':
126 | import stats_units_pp
127 | stats_units_pp.parse_ids(args.season_id, args.game_id)
128 | stats_units_pp
129 |
130 | ##
131 | ## Penalty Kill
132 | ##
133 |
134 | if args.focus == 'units' and args.detail is None or args.focus == 'units' and args.detail == 'basic' or args.focus == 'pk' and args.detail is None or args.focus == 'pk' and args.detail == 'basic':
135 | import stats_units_pk
136 | stats_units_pk.parse_ids(args.season_id, args.game_id)
137 | stats_units_pk
--------------------------------------------------------------------------------
/files_parse_rosters.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 |
6 | from bs4 import BeautifulSoup
7 | import csv
8 | import pandas as pd
9 | import parameters
10 | import dict_names
11 |
12 | def parse_ids(season_id, game_id, switch_F2D, switch_D2F):
13 |
14 | ### pull common variables from the parameters file
15 | files_root = parameters.files_root
16 |
17 | ### generate date and team information
18 | schedule_csv = files_root + season_id + "_schedule.csv"
19 |
20 | schedule_df = pd.read_csv(schedule_csv)
21 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
22 |
23 | date = schedule_date['DATE'].item()
24 | home = schedule_date['HOME'].item()
25 | away = schedule_date['AWAY'].item()
26 |
27 | ### establish file locations and destinations
28 | rosters_in = files_root + 'rosters.HTM'
29 | rosters_out = files_root + 'rosters.csv'
30 |
31 | ###
32 | ### ROSTERS (HTM)
33 | ###
34 |
35 | if int(season_id) >= 20072008:
36 | ### load the rosters (HTM) file and convert it to a BeautifulSoup object
37 | with open(rosters_in, 'r') as infile:
38 | roster_soup = BeautifulSoup(infile, 'html.parser')
39 |
40 | ### get full rosters for home and away teams and create a tuple (#, Pos, Name)
41 | home_roster = roster_soup.select('.border')[3].find_all('td')[3:]
42 | away_roster = roster_soup.select('.border')[2].find_all('td')[3:]
43 |
44 | home_roster = [i.string.strip() for i in home_roster]
45 | away_roster = [i.string.strip() for i in away_roster]
46 |
47 | home_roster = [list(h) for h in zip(home_roster[0::3], home_roster[1::3], home_roster[2::3])]
48 | away_roster = [list(a) for a in zip(away_roster[0::3], away_roster[1::3], away_roster[2::3])]
49 |
50 | ### remove (C) or (A) from the end of the names of captains and alternates
51 | for i in range(len(home_roster)-1):
52 | try:
53 | if (home_roster[i][2][-1] == ')'):
54 | home_roster[i][2] = home_roster[i][2][:-5]
55 | if (away_roster[i][2][-1] == ')'):
56 | away_roster[i][2] = away_roster[i][2][:-5]
57 | except:
58 | pass
59 |
60 | ### begin writing both rosters to one .csv; write column titles to a header row
61 | csvRows = [('SEASON', 'GAME_ID', 'DATE', 'LOCATION', 'TEAM', 'PLAYER_NO', 'PLAYER_NAME', 'PLAYER_POS')]
62 |
63 | ### loop through the away players
64 | for l in away_roster:
65 | try:
66 | away_player = l[2].upper().replace(' ', '.')
67 | away_player = dict_names.NAMES[away_player]
68 | except:
69 | pass
70 | csvRows += [(season_id, game_id, date, 'Away', away, l[0], away_player, l[1])]
71 |
72 | ### loop through the home players
73 | for l in home_roster:
74 | try:
75 | home_player = l[2].upper().replace(' ', '.')
76 | home_player = dict_names.NAMES[home_player]
77 | except:
78 | pass
79 | csvRows += [(season_id, game_id, date, 'Home', home, l[0], home_player, l[1])]
80 |
81 | ### trigger and write to the outfile
82 | with open(rosters_out, 'w', newline='') as outfile:
83 | writer = csv.writer(outfile)
84 | writer.writerows(csvRows)
85 |
86 | if int(season_id) == 20062007:
87 | ### load the rosters (HTM) file and convert it to a BeautifulSoup object
88 | with open(rosters_in, 'r') as infile, open(rosters_out, 'w+', newline='') as outfile:
89 | writer = csv.writer(outfile)
90 |
91 | ### write the header row
92 | writer.writerow(['SEASON', 'GAME_ID', 'DATE', 'LOCATION', 'TEAM', 'PLAYER_NO', 'PLAYER_NAME', 'PLAYER_POS'])
93 |
94 | roster_soup = BeautifulSoup(infile, 'html.parser')
95 |
96 | ### get full rosters for home and away teams
97 | roster_table = roster_soup.select('table')[1].find_all('tr')[2:]
98 |
99 | ### loop through the table with only the columns containing away player info, writing to file
100 | for row in roster_table:
101 | away_tds = row.select('td')[0:3]
102 | away_no = away_tds[0].string
103 | away_name = away_tds[2].string
104 | away_pos = away_tds[1].string
105 |
106 | writer.writerow([season_id, game_id, date, 'Away', away, away_no, away_name, away_pos])
107 |
108 | ### loop through the table with only the columns containing away player info, writing to file
109 | for row in roster_table:
110 | home_tds = row.select('td')[4:7]
111 | home_no = home_tds[0].string
112 | home_name = home_tds[2].string
113 | home_name = home_name.replace(' ', '.')
114 | home_pos = home_tds[1].string
115 |
116 | writer.writerow([season_id, game_id, date, 'Home', home, home_no, home_name, home_pos])
117 |
118 | ### create a dataframe using the newly-created outfile
119 | rosters_df = pd.read_csv(rosters_out)
120 |
121 | if int(season_id) == 20062007:
122 | try:
123 | rosters_df['PLAYER_NAME'] = rosters_df['PLAYER_NAME'].str.replace(' ', '.')
124 | except:
125 | pass
126 | try:
127 | rosters_df['PLAYER_NAME'] = rosters_df['PLAYER_NAME'].replace({'PLAYER_NAME': dict_names})
128 | except:
129 | pass
130 |
131 | ### replace names for special name cases
132 | try:
133 | rosters_df.loc[(rosters_df.PLAYER_NAME == 'SEBASTIAN.AHO') & (rosters_df.TEAM == 'CAR'),['PLAYER_NAME']] = 'SEBASTIAN.A.AHO'; rosters_df
134 | rosters_df.loc[(rosters_df.PLAYER_NAME == 'SEBASTIAN.AHO') & (rosters_df.TEAM == 'NYI'),['PLAYER_NAME']] = 'SEBASTIAN.J.AHO'; rosters_df
135 | except:
136 | pass
137 |
138 | ### search for and change known instances of players whose positions are incorrectly classified
139 | try:
140 | if switch_F2D == 'Luke_Witkowski':
141 | rosters_df.loc[(rosters_df.PLAYER_NAME == 'LUKE.WITKOWSKI') & (rosters_df.PLAYER_POS != 'D'),['PLAYER_POS']] = 'D'; rosters_df
142 | except:
143 | pass
144 |
145 | try:
146 | if switch_D2F == 'Luke_Witkowski':
147 | rosters_df.loc[(rosters_df.PLAYER_NAME == 'LUKE.WITKOWSKI') & (rosters_df.PLAYER_POS == 'D'),['PLAYER_POS']] = 'F'; rosters_df
148 | except:
149 | pass
150 |
151 | ### add a new column to the dataframe that duplicates the player position values
152 | rosters_df['PLAYER_POS_DETAIL'] = rosters_df['PLAYER_POS']
153 |
154 | ### change the 'C', 'R' or 'L' designations in the original player position column to 'F'
155 | rosters_df.loc[(rosters_df.PLAYER_POS != 'D') & (rosters_df.PLAYER_POS != 'G'),['PLAYER_POS']] = 'F'; rosters_df
156 | rosters_df.to_csv(rosters_out, index=False)
157 |
158 |
159 | print('Finished parsing NHL rosters from .HTM for ' + season_id + ' ' + game_id)
--------------------------------------------------------------------------------
/files_fetch.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 |
6 | import requests
7 | import os
8 | import csv
9 | import json
10 | import pandas as pd
11 | import dict_teams
12 | import parameters
13 |
14 | def parse_ids(season_id, game_id):
15 |
16 | ### pull common variables from the parameters file
17 | files_root = parameters.files_root
18 |
19 | ### retrieve schedule
20 | schedule_csv = files_root + season_id + "_schedule.csv"
21 | schedule_exists = os.path.isfile(schedule_csv)
22 |
23 | if schedule_exists:
24 | print(season_id + ' schedule already exists')
25 |
26 | else:
27 | ### create variables that point to the .csv processed stats files for players
28 | JSON_schedule = files_root + season_id + "_schedule.json"
29 | schedule_csv = files_root + season_id + "_schedule.csv"
30 |
31 | ### find the .json schedule source and save to file
32 | try:
33 | year_start = season_id[0:4]
34 | year_end = season_id[4:8]
35 |
36 | if int(season_id) != 20192020:
37 | JSON_schedule_url = 'https://statsapi.web.nhl.com/api/v1/schedule?startDate=' + year_start + '-08-30&endDate=' + year_end + '-06-30'
38 | if int(season_id) == 20192020:
39 | JSON_schedule_url = 'https://statsapi.web.nhl.com/api/v1/schedule?startDate=' + year_start + '-08-30&endDate=' + year_end + '-10-30'
40 |
41 | JSON_schedule_request = requests.get(JSON_schedule_url, timeout=5).text
42 |
43 | f = open(files_root + season_id + '_schedule.json', 'w+')
44 | f.write(JSON_schedule_request)
45 | f.close()
46 | print('Retrieved NHL schedule (JSON) for ' + season_id)
47 | except:
48 | print('ERROR: Could not retreive the season schedule (JSON) for ' + season_id)
49 |
50 | ### pull and parse the .json schedule file as .csv
51 | with open(JSON_schedule) as JSON_schedule_in:
52 | JSON_schedule_parsed = json.load(JSON_schedule_in)
53 |
54 | JSON_game_dates = JSON_schedule_parsed['dates']
55 |
56 | ### begin the portion of the script that handles the csv generation
57 | with open(schedule_csv, 'w', newline='') as schedule_out:
58 | JSON_csvWriter = csv.writer(schedule_out)
59 |
60 | JSON_csvWriter.writerow(['SEASON', 'GAME_ID', 'DATE', 'HOME', 'AWAY'])
61 |
62 | for JSON_allgames in JSON_game_dates:
63 | JSON_dates = JSON_allgames["date"]
64 |
65 | JSON_games = JSON_allgames['games']
66 |
67 | for JSON_game in JSON_games:
68 |
69 | JSON_seasonid = JSON_game["season"]
70 |
71 | JSON_game_id = str(JSON_game["gamePk"])[5:]
72 | JSON_game_id = int(JSON_game_id)
73 |
74 | if JSON_game_id > 39999:
75 | continue
76 |
77 | JSON_date = JSON_dates
78 | JSON_date_split = JSON_date.split('-')
79 | JSON_year = JSON_date_split[0]
80 | JSON_month = JSON_date_split[1]
81 | JSON_day = JSON_date_split[2]
82 | JSON_date = JSON_month + '/' + JSON_day + '/' + JSON_year
83 |
84 | JSON_home = JSON_game["teams"]["home"]["team"]["name"].upper()
85 | JSON_away = JSON_game["teams"]["away"]["team"]["name"].upper()
86 |
87 | JSON_game_data = (JSON_seasonid, JSON_game_id, JSON_date, JSON_home, JSON_away)
88 |
89 | ### write the rows of shifts to the csv file
90 | JSON_csvWriter.writerows([JSON_game_data])
91 |
92 | try:
93 | ### reload the newly minted .csv file to replace the team names with their tricodes
94 | schedule_df = pd.read_csv(schedule_csv)
95 |
96 | schedule_df = schedule_df[(schedule_df.GAME_ID < 40000)].sort_values('GAME_ID')
97 |
98 | schedule_df['AWAY'] = schedule_df['AWAY'].replace(dict_teams.NHL)
99 | schedule_df['HOME'] = schedule_df['HOME'].replace(dict_teams.NHL)
100 |
101 | schedule_df.to_csv(schedule_csv, index = False)
102 |
103 | except:
104 | ### reload the newly minted .csv file to replace the team names with their tricodes
105 | schedule_df = pd.read_csv(schedule_csv, encoding='latin-1')
106 |
107 | schedule_df = schedule_df[(schedule_df.GAME_ID < 40000)].sort_values('GAME_ID')
108 |
109 | schedule_df['AWAY'] = schedule_df['AWAY'].replace(dict_teams.NHL)
110 | schedule_df['HOME'] = schedule_df['HOME'].replace(dict_teams.NHL)
111 |
112 | schedule_df.to_csv(schedule_csv, index = False)
113 |
114 | print('Finished parsing the NHL schedule for ' + season_id)
115 |
116 |
117 | if int(season_id) >= 20062007:
118 | ### retrieve HTM rosters
119 | try:
120 | ROS_content = requests.get('http://www.nhl.com/scores/htmlreports/' + season_id + '/RO0' + game_id + '.HTM', timeout=5).text
121 | if(len(ROS_content) < 10000):
122 | raise Exception
123 | f = open(files_root + 'rosters.HTM', 'w+')
124 | f.write(ROS_content)
125 | f.close()
126 | print('Retrieved NHL rosters (HTM) for ' + season_id + ' ' + game_id)
127 | except:
128 | print('ERROR: Could not retrieve NHL rosters (HTM) for ' + season_id + ' ' + game_id)
129 |
130 | ### retrieve HTM play-by-play
131 | try:
132 | PBP_content = requests.get('http://www.nhl.com/scores/htmlreports/' + season_id + '/PL0' + game_id + '.HTM', timeout=5).text
133 | if(len(ROS_content) < 10000):
134 | raise Exception
135 | f = open(files_root + 'pbp.HTM', 'w+')
136 | f.write(PBP_content)
137 | f.close()
138 | print('Retrieved NHL play-by-play (HTM) for ' + season_id + ' ' + game_id)
139 | except:
140 | print('ERROR: Could not retrieve NHL play-by-play (HTM) for ' + season_id + ' ' + game_id)
141 |
142 | ### retrieve HTM home shift charts
143 | try:
144 | TH0_content = requests.get('http://www.nhl.com/scores/htmlreports/' + season_id + '/TH0' + game_id + '.HTM', timeout=5).text
145 | if(len(TH0_content) < 10000):
146 | raise Exception
147 | f = open(files_root + 'shifts_home.HTM', 'w+')
148 | f.write(TH0_content)
149 | f.close()
150 | print('Retrieved NHL shifts (THO, HTM) for ' + season_id + ' ' + game_id)
151 | except:
152 | print('ERROR: Could not retrieve NHL shifts (THO, HTM) for ' + season_id + ' ' + game_id)
153 |
154 | ### retrieve HTM visitor shift charts
155 | try:
156 | TV0_content = requests.get('http://www.nhl.com/scores/htmlreports/' + season_id + '/TV0' + game_id + '.HTM', timeout=5).text
157 | if(len(TV0_content) < 10000):
158 | raise Exception
159 | f = open(files_root +'shifts_away.HTM', 'w+')
160 | f.write(TV0_content)
161 | f.close()
162 | print('Retrieved NHL shifts (TVI, HTM) for ' + season_id + ' ' + game_id)
163 | except:
164 | print('ERROR: Could not retrieve NHL shifts (TVI, HTM) for ' + season_id + ' ' + game_id)
165 |
166 | ### retrieve JSON livefeed
167 | try:
168 | JSON_content = requests.get('http://statsapi.web.nhl.com/api/v1/game/' + season_id[0:4] + '0' + game_id + '/feed/live', timeout=5).text
169 | if(len(JSON_content) < 1000):
170 | raise Exception
171 | f = open(files_root + 'livefeed.json', 'w+')
172 | f.write(JSON_content)
173 | f.close()
174 | print('Retrieved NHL livefeed (JSON) for ' + season_id + ' ' + game_id)
175 | except:
176 | print('ERROR: Could not retrieve NHL livefeed (JSON) ' + season_id + ' ' + game_id)
177 |
178 | ### retrieve JSON shifts
179 | if int(season_id) >= 20102011:
180 | try:
181 | JSON_content = requests.get('http://www.nhl.com/stats/rest/shiftcharts?cayenneExp=gameId=' + season_id[0:4] + '0' + game_id, timeout=5).text
182 | if(len(JSON_content) < 1000):
183 | raise Exception
184 | f = open(files_root + 'shifts.json', 'w+')
185 | f.write(JSON_content)
186 | f.close()
187 | print('Retrieved NHL shifts (JSON) for ' + season_id + ' ' + game_id)
188 | except:
189 | print('ERROR: Could not retrieve NHL shifts (JSON) for ' + season_id + ' ' + game_id)
--------------------------------------------------------------------------------
/chart_units_pk_onice_xg.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | #import json
6 | import pandas as pd
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 | import parameters
10 | import matplotlib.colors as clr
11 | import dict_team_colors
12 | import mod_switch_colors
13 |
14 | def parse_ids(season_id, game_id, images):
15 |
16 | # pull common variables from the parameters file
17 | charts_units_pk = parameters.charts_units_pk
18 | files_root = parameters.files_root
19 |
20 | # generate date and team information
21 | schedule_csv = files_root + season_id + "_schedule.csv"
22 |
23 | schedule_df = pd.read_csv(schedule_csv)
24 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
25 |
26 | date = schedule_date['DATE'].item()
27 | home = schedule_date['HOME'].item()
28 | away = schedule_date['AWAY'].item()
29 | teams = [away, home]
30 |
31 | # create variables that point to the .csv processed stats files for lines
32 | pk_file = files_root + 'stats_units_pk_onice.csv'
33 |
34 | # create dataframe objects that read in info from the .csv files
35 | pk_df = pd.read_csv(pk_file)
36 |
37 | max_toi = pk_df['TOI'].max()
38 |
39 | # choose colors for each team; set them in a list; generate a custom colormap for each team
40 | away_color = dict_team_colors.team_color_1st[away]
41 | home_color = dict_team_colors.team_color_1st[home]
42 |
43 | # change one team's color from its primary option to, depending on the opponent, either a second, third or fourth option
44 | try:
45 | away_color = mod_switch_colors.switch_team_colors(away, home)[0]
46 | home_color = mod_switch_colors.switch_team_colors(away, home)[1]
47 | except:
48 | pass
49 |
50 | team_colors = [away_color, home_color]
51 |
52 | away_cmap = clr.LinearSegmentedColormap.from_list('custom away', [(0, '#ffffff'), (1, away_color)], N=256)
53 | home_cmap = clr.LinearSegmentedColormap.from_list('custom home', [(0, '#ffffff'), (1, home_color)], N=256)
54 |
55 | ###
56 | ### PP
57 | ###
58 |
59 | # loop through each team
60 | for team in teams:
61 |
62 | if team == away:
63 | team_color = team_colors[0]
64 | opponent_color = team_colors[1]
65 | team_color_map = plt.cm.get_cmap(away_cmap)
66 | opponent_color_map = plt.cm.get_cmap(home_cmap)
67 |
68 | if team == home:
69 | team_color = team_colors[1]
70 | opponent_color = team_colors[0]
71 | team_color_map = plt.cm.get_cmap(home_cmap)
72 | opponent_color_map = plt.cm.get_cmap(away_cmap)
73 |
74 | # create a lines dataframe; filter for team; sort by time on ice; keep the lines with the 8 highest totals; rank and then invert the rankings
75 | team_pk_df = pk_df.copy()
76 | team_pk_df = team_pk_df[(team_pk_df['TEAM'] == team)]
77 | team_pk_df = team_pk_df.sort_values(by=['TOI'], ascending = True)
78 | team_pk_df = team_pk_df.iloc[-8:]
79 | team_pk_df['RANK'] = team_pk_df['TOI'].rank(method='first')
80 | team_pk_df = team_pk_df.sort_values(by=['RANK'], ascending = True)
81 | team_pk_df['RANK'] -= 1
82 |
83 | team_pk_df['UNIT'] = team_pk_df['UNIT'] + ' (' + team_pk_df['STRENGTH'] + ')'
84 |
85 | # remove zeros from the differential column
86 | team_pk_df['xGD'] = team_pk_df['xGD'].replace(0, np.NaN)
87 |
88 | # make expected goals against negative values
89 | team_pk_df['xGA'] *= -1
90 |
91 | # create another lines dataframe with just the time on ice column; set a max value; scale each line's time on ice relative to the max value
92 | pk_toi = team_pk_df['TOI']
93 | team_max_pk_toi = pk_toi.max()
94 |
95 | pk_toi_color = pk_toi / float(team_max_pk_toi)
96 |
97 | # connect team and opponent color map colors to each line's scaled time on ice
98 | pk_toi_color_map_for = team_color_map(pk_toi_color)
99 | pk_toi_color_map_against = opponent_color_map(pk_toi_color)
100 |
101 | # create a figure with two subplots sharing the y-axis
102 | fig = plt.figure(figsize=(8,8))
103 | grid = plt.GridSpec(1, 8, hspace=0.75, wspace=0.50)
104 |
105 | ax_pk_xg = fig.add_subplot(grid[0, 0:-2])
106 | ax_pk_toi = fig.add_subplot(grid[0, -1])
107 |
108 | # set the plot title
109 | fig.suptitle(date + ' Penalty Kill Units On-Ice Expected Goals\n\n')
110 |
111 | ax_pk_xg.set_title('5v5 xG', fontsize=10)
112 | ax_pk_toi.set_title('5v5 TOI', fontsize=10)
113 |
114 | # create bars for expected goals for and against as well as line markers (to note the expected goals differential) for each line
115 | try:
116 | pk_xGF_plot = team_pk_df.plot.barh(x='UNIT', y='xGF', stacked=True, color=pk_toi_color_map_for, width=0.25, legend=None, label='', ax=ax_pk_xg);
117 | except:
118 | pass
119 | try:
120 | pk_xGA_plot = team_pk_df.plot.barh(x='UNIT', y='xGA', stacked=True, color=pk_toi_color_map_against, width=0.25, legend=None, label='', ax=ax_pk_xg);
121 | except:
122 | pass
123 | try:
124 | pk_xGD_plot = team_pk_df.plot(x='xGD', y='RANK', marker='|', markersize=15, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend=None, label='', ax=ax_pk_xg);
125 | except:
126 | pass
127 |
128 | # plot the bars for time on ice
129 | try:
130 | toi_pk = team_pk_df.plot.barh(x='UNIT', y='TOI', color='white', edgecolor=team_color, width=0.25, legend=None, label='', ax=ax_pk_toi);
131 | except:
132 | pass
133 |
134 | # remove the labels for each subplot
135 | ax_pk_xg.set_xlabel('')
136 | ax_pk_xg.set_ylabel('')
137 |
138 | ax_pk_toi.set_xlabel('')
139 | ax_pk_toi.set_ylabel('')
140 |
141 | # set vertical indicators for break-even expected goals differential
142 | ax_pk_xg.axvspan(0, 0, ymin=0, ymax=1, alpha=.25, linestyle=':', color='black')
143 |
144 | # change the tick parameters
145 | ax_pk_xg.tick_params(
146 | axis='both',
147 | which='both',
148 | bottom=False,
149 | top=False,
150 | left=False,
151 | labelleft=True, # labels along the left edge are on
152 | labelbottom=True)
153 |
154 | ax_pk_toi.tick_params(
155 | axis='both',
156 | which='both',
157 | bottom=False,
158 | top=False,
159 | left=False,
160 | labelleft=False, # labels along the left edge are off
161 | labelbottom=True)
162 |
163 | # change the y-axis label colors
164 | ax_pk_xg.tick_params(
165 | axis='y',
166 | which='both',
167 | labelcolor=team_color)
168 |
169 | # create a list of x-axis tick values contingent on the max values for expected goals for and against
170 | xGF_max = pk_df['xGF']
171 | xGF_max = xGF_max.max()
172 |
173 | xGA_max = pk_df['xGA']
174 | xGA_max = xGA_max.max()
175 |
176 | xG_tickmax = int()
177 | if xGF_max > xGA_max:
178 | xG_tickmax = xGF_max
179 | if xGF_max < xGA_max:
180 | xG_tickmax = xGA_max
181 |
182 | xG_ticklabels = []
183 | if xG_tickmax > 0 and xG_tickmax <= 0.5:
184 | xG_ticklabels = [-0.5, -0.4, -0.3, -0.2, -0.1, 0.0, 0.1, 0.2, 0.3, 0.4, 0.5]
185 | if xG_tickmax > 0.5 and xG_tickmax <= 1:
186 | xG_ticklabels = [-1.0, -0.8, -0.6, -0.4, -0.2, 0.0, 0.2, 0.4, 0.6, 0.8, 1.0]
187 | if xG_tickmax > 1 and xG_tickmax <= 1.5:
188 | xG_ticklabels = [-1.5, -1.2, -0.9, -0.6, -0.3, 0.0, 0.3, 0.6, 0.9, 1.2, 1.5]
189 | if xG_tickmax > 1.5 and xG_tickmax <= 2:
190 | xG_ticklabels = [-2.0, -1.6, -1.2, -0.8, -0.4, 0.0, 0.4, 0.8, 1.2, 1.6, 2.0]
191 | if xG_tickmax > 2 and xG_tickmax <= 2.5:
192 | xG_ticklabels = [-2.5, -2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0, 2.5]
193 | if xG_tickmax > 2.5 and xG_tickmax <= 3:
194 | xG_ticklabels = [-3.0, -2.4, -1.8, -1.2, -0.6, 0.0, 0.6, 1.2, 1.8, 2.4, 3.0]
195 | if xG_tickmax > 3 and xG_tickmax <= 3.5:
196 | xG_ticklabels = [-3.5, -2.8, -2.1, -1.4, -0.7, 0.0, 0.7, 1.4, 2.1, 2.8, 3.5]
197 | if xG_tickmax > 3.5 and xG_tickmax <= 4:
198 | xG_ticklabels = [-4.0, -3.2, -2.4, -1.6, -0.8, 0.0, 0.8, 1.6, 2.4, 3.2, 4.0]
199 |
200 | toi_tickmax = max_toi
201 |
202 | toi_ticklabels = []
203 | if toi_tickmax <= 2:
204 | toi_ticklabels = [0, 2]
205 | if toi_tickmax > 2 and toi_tickmax <= 4:
206 | toi_ticklabels = [0, 4]
207 | if toi_tickmax > 4 and toi_tickmax <= 6:
208 | toi_ticklabels = [0, 6]
209 | if toi_tickmax > 6 and toi_tickmax <= 8:
210 | toi_ticklabels = [0, 8]
211 | if toi_tickmax > 8 and toi_tickmax <= 10:
212 | toi_ticklabels = [0, 10]
213 | if toi_tickmax > 10 and toi_tickmax <= 12:
214 | toi_ticklabels = [0, 12]
215 | if toi_tickmax > 12 and toi_tickmax <= 14:
216 | toi_ticklabels = [0, 14]
217 | if toi_tickmax > 14 and toi_tickmax <= 16:
218 | toi_ticklabels = [0, 16]
219 | if toi_tickmax > 16 and toi_tickmax <= 18:
220 | toi_ticklabels = [0, 18]
221 | if toi_tickmax > 18 and toi_tickmax <= 20:
222 | toi_ticklabels = [0, 20]
223 |
224 | # set vertical indicator for midpoint of time on ice max
225 | ax_pk_toi.axvspan(toi_ticklabels[1] / 2, toi_ticklabels[1] / 2, ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
226 | ax_pk_toi.axvspan(toi_ticklabels[1], toi_ticklabels[1], ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
227 |
228 | # use the newly-minted x-ticklabels to ensure the x-axis labels will always display as integers
229 | ax_pk_xg.set_xticks(xG_ticklabels, minor=False)
230 | ax_pk_toi.set_xticks(toi_ticklabels, minor=False)
231 |
232 | # remove the borders to each subplot
233 | ax_pk_xg.spines["top"].set_visible(False)
234 | ax_pk_xg.spines["bottom"].set_visible(False)
235 | ax_pk_xg.spines["right"].set_visible(False)
236 | ax_pk_xg.spines["left"].set_visible(False)
237 |
238 | ax_pk_toi.spines["top"].set_visible(False)
239 | ax_pk_toi.spines["bottom"].set_visible(False)
240 | ax_pk_toi.spines["right"].set_visible(False)
241 | ax_pk_toi.spines["left"].set_visible(False)
242 |
243 | # add a legend for the shot type markers
244 | from matplotlib.lines import Line2D
245 | elements = [Line2D([0], [0], marker='|', markersize=13, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Differential')]
246 | ax_pk_xg.legend(handles=elements, loc='center', bbox_to_anchor=(.5, -.1), ncol=2).get_frame().set_linewidth(0.0)
247 |
248 | # add text boxes with team names in white and with the team's color in the background
249 | fig.text(.425, 0.936, ' ' + away + ' ', color='white', fontsize='12', bbox=dict(facecolor=away_color, edgecolor='None'))
250 | fig.text(.525, 0.936, ' ' + home + ' ', fontsize='12', color='white', bbox=dict(facecolor=home_color, edgecolor='None'))
251 | fig.text(.490, 0.936, '@', color='black', fontsize='12', bbox=dict(facecolor='white', edgecolor='None'))
252 |
253 |
254 | ###
255 | ### SAVE TO FILE
256 | ###
257 |
258 | if team == away:
259 | plt.savefig(charts_units_pk + 'onice_xg_away_pk.png', bbox_inches='tight', pad_inches=0.2)
260 | elif team == home:
261 | plt.savefig(charts_units_pk + 'onice_xg_home_pk.png', bbox_inches='tight', pad_inches=0.2)
262 |
263 | # exercise a command-line option to show the current figure
264 | if images == 'show':
265 | plt.show()
266 |
267 |
268 | ###
269 | ### CLOSE
270 | ###
271 |
272 | plt.close(fig)
273 |
274 | # status update
275 | print('Plotting ' + team + ' power play units on-ice xG.')
276 |
277 | # status update
278 | print('Finished plotting on-ice xG for power play units.')
--------------------------------------------------------------------------------
/chart_units_pp_onice_xg.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | #import json
6 | import pandas as pd
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 | import matplotlib as mpl
10 | import parameters
11 | import matplotlib.colors as clr
12 | import dict_team_colors
13 | import mod_switch_colors
14 |
15 | def parse_ids(season_id, game_id, images):
16 |
17 | # pull common variables from the parameters file
18 | charts_units_pp = parameters.charts_units_pp
19 | files_root = parameters.files_root
20 |
21 | # generate date and team information
22 | schedule_csv = files_root + season_id + "_schedule.csv"
23 |
24 | schedule_df = pd.read_csv(schedule_csv)
25 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
26 |
27 | date = schedule_date['DATE'].item()
28 | home = schedule_date['HOME'].item()
29 | away = schedule_date['AWAY'].item()
30 | teams = [away, home]
31 |
32 | # create variables that point to the .csv processed stats files for lines
33 | pp_file = files_root + 'stats_units_pp_onice.csv'
34 |
35 | # create dataframe objects that read in info from the .csv files
36 | pp_df = pd.read_csv(pp_file)
37 |
38 | max_toi = pp_df['TOI'].max()
39 |
40 | # choose colors for each team; set them in a list; generate a custom colormap for each team
41 | away_color = dict_team_colors.team_color_1st[away]
42 | home_color = dict_team_colors.team_color_1st[home]
43 |
44 | # change one team's color from its primary option to, depending on the opponent, either a second, third or fourth option
45 | try:
46 | away_color = mod_switch_colors.switch_team_colors(away, home)[0]
47 | home_color = mod_switch_colors.switch_team_colors(away, home)[1]
48 | except:
49 | pass
50 |
51 | team_colors = [away_color, home_color]
52 |
53 | away_cmap = clr.LinearSegmentedColormap.from_list('custom away', [(0, '#ffffff'), (1, away_color)], N=256)
54 | home_cmap = clr.LinearSegmentedColormap.from_list('custom home', [(0, '#ffffff'), (1, home_color)], N=256)
55 |
56 | ###
57 | ### PP
58 | ###
59 |
60 | # loop through each team
61 | for team in teams:
62 |
63 | if team == away:
64 | team_color = team_colors[0]
65 | opponent_color = team_colors[1]
66 | team_color_map = plt.cm.get_cmap(away_cmap)
67 | opponent_color_map = plt.cm.get_cmap(home_cmap)
68 |
69 | if team == home:
70 | team_color = team_colors[1]
71 | opponent_color = team_colors[0]
72 | team_color_map = plt.cm.get_cmap(home_cmap)
73 | opponent_color_map = plt.cm.get_cmap(away_cmap)
74 |
75 | # create a lines dataframe; filter for team; sort by time on ice; keep the lines with the 8 highest totals; rank and then invert the rankings
76 | team_pp_df = pp_df.copy()
77 | team_pp_df = team_pp_df[(team_pp_df['TEAM'] == team)]
78 | team_pp_df = team_pp_df.sort_values(by=['TOI'], ascending = True)
79 | team_pp_df = team_pp_df.iloc[-8:]
80 | team_pp_df['RANK'] = team_pp_df['TOI'].rank(method='first')
81 | team_pp_df = team_pp_df.sort_values(by=['RANK'], ascending = True)
82 | team_pp_df['RANK'] -= 1
83 |
84 | team_pp_df['UNIT'] = team_pp_df['UNIT'] + ' (' + team_pp_df['STRENGTH'] + ')'
85 |
86 | # remove zeros from the differential column
87 | team_pp_df['xGD'] = team_pp_df['xGD'].replace(0, np.NaN)
88 |
89 | # make expected goals against negative values
90 | team_pp_df['xGA'] *= -1
91 |
92 | # create another lines dataframe with just the time on ice column; set a max value; scale each line's time on ice relative to the max value
93 | pp_toi = team_pp_df['TOI']
94 | team_max_pp_toi = pp_toi.max()
95 |
96 | pp_toi_color = pp_toi / float(team_max_pp_toi)
97 |
98 | # connect team and opponent color map colors to each line's scaled time on ice
99 | pp_toi_color_map_for = team_color_map(pp_toi_color)
100 | pp_toi_color_map_against = opponent_color_map(pp_toi_color)
101 |
102 | # create a figure with two subplots sharing the y-axis
103 | fig = plt.figure(figsize=(8,8))
104 | grid = plt.GridSpec(1, 8, hspace=0.75, wspace=0.50)
105 |
106 | ax_pp_xg = fig.add_subplot(grid[0, 0:-2])
107 | ax_pp_toi = fig.add_subplot(grid[0, -1])
108 |
109 | # set the plot title
110 | fig.suptitle(date + ' Power Play Units On-Ice Expected Goals\n\n')
111 |
112 | ax_pp_xg.set_title('5v5 xG', fontsize=10)
113 | ax_pp_toi.set_title('5v5 TOI', fontsize=10)
114 |
115 | # create bars for expected goals for and against as well as line markers (to note the expected goals differential) for each line
116 | try:
117 | pp_xGF_plot = team_pp_df.plot.barh(x='UNIT', y='xGF', stacked=True, color=pp_toi_color_map_for, width=0.25, legend=None, label='', ax=ax_pp_xg);
118 | except:
119 | pass
120 | try:
121 | pp_xGA_plot = team_pp_df.plot.barh(x='UNIT', y='xGA', stacked=True, color=pp_toi_color_map_against, width=0.25, legend=None, label='', ax=ax_pp_xg);
122 | except:
123 | pass
124 | try:
125 | pp_xGD_plot = team_pp_df.plot(x='xGD', y='RANK', marker='|', markersize=15, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend=None, label='', ax=ax_pp_xg);
126 | except:
127 | pass
128 |
129 | # plot the bars for time on ice
130 | try:
131 | toi_pp = team_pp_df.plot.barh(x='UNIT', y='TOI', color='white', edgecolor=team_color, width=0.25, legend=None, label='', ax=ax_pp_toi);
132 | except:
133 | pass
134 |
135 | # remove the labels for each subplot
136 | ax_pp_xg.set_xlabel('')
137 | ax_pp_xg.set_ylabel('')
138 |
139 | ax_pp_toi.set_xlabel('')
140 | ax_pp_toi.set_ylabel('')
141 |
142 | # set vertical indicators for break-even expected goals differential
143 | ax_pp_xg.axvspan(0, 0, ymin=0, ymax=1, alpha=.25, linestyle=':', color='black')
144 |
145 | # change the tick parameters
146 | ax_pp_xg.tick_params(
147 | axis='both',
148 | which='both',
149 | bottom=False,
150 | top=False,
151 | left=False,
152 | labelleft=True, # labels along the left edge are on
153 | labelbottom=True)
154 |
155 | ax_pp_toi.tick_params(
156 | axis='both',
157 | which='both',
158 | bottom=False,
159 | top=False,
160 | left=False,
161 | labelleft=False, # labels along the left edge are off
162 | labelbottom=True)
163 |
164 | # change the y-axis label colors
165 | ax_pp_xg.tick_params(
166 | axis='y',
167 | which='both',
168 | labelcolor=team_color)
169 |
170 | # create a list of x-axis tick values contingent on the max values for expected goals for and against
171 | xGF_max = pp_df['xGF']
172 | xGF_max = xGF_max.max()
173 |
174 | xGA_max = pp_df['xGA']
175 | xGA_max = xGA_max.max()
176 |
177 | xG_tickmax = int()
178 | if xGF_max > xGA_max:
179 | xG_tickmax = xGF_max
180 | if xGF_max < xGA_max:
181 | xG_tickmax = xGA_max
182 |
183 | xG_ticklabels = []
184 | if xG_tickmax > 0 and xG_tickmax <= 0.5:
185 | xG_ticklabels = [-0.5, -0.4, -0.3, -0.2, -0.1, 0.0, 0.1, 0.2, 0.3, 0.4, 0.5]
186 | if xG_tickmax > 0.5 and xG_tickmax <= 1:
187 | xG_ticklabels = [-1.0, -0.8, -0.6, -0.4, -0.2, 0.0, 0.2, 0.4, 0.6, 0.8, 1.0]
188 | if xG_tickmax > 1 and xG_tickmax <= 1.5:
189 | xG_ticklabels = [-1.5, -1.2, -0.9, -0.6, -0.3, 0.0, 0.3, 0.6, 0.9, 1.2, 1.5]
190 | if xG_tickmax > 1.5 and xG_tickmax <= 2:
191 | xG_ticklabels = [-2.0, -1.6, -1.2, -0.8, -0.4, 0.0, 0.4, 0.8, 1.2, 1.6, 2.0]
192 | if xG_tickmax > 2 and xG_tickmax <= 2.5:
193 | xG_ticklabels = [-2.5, -2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0, 2.5]
194 | if xG_tickmax > 2.5 and xG_tickmax <= 3:
195 | xG_ticklabels = [-3.0, -2.4, -1.8, -1.2, -0.6, 0.0, 0.6, 1.2, 1.8, 2.4, 3.0]
196 | if xG_tickmax > 3 and xG_tickmax <= 3.5:
197 | xG_ticklabels = [-3.5, -2.8, -2.1, -1.4, -0.7, 0.0, 0.7, 1.4, 2.1, 2.8, 3.5]
198 | if xG_tickmax > 3.5 and xG_tickmax <= 4:
199 | xG_ticklabels = [-4.0, -3.2, -2.4, -1.6, -0.8, 0.0, 0.8, 1.6, 2.4, 3.2, 4.0]
200 |
201 | toi_tickmax = max_toi
202 |
203 | toi_ticklabels = []
204 | if toi_tickmax <= 2:
205 | toi_ticklabels = [0, 2]
206 | if toi_tickmax > 2 and toi_tickmax <= 4:
207 | toi_ticklabels = [0, 4]
208 | if toi_tickmax > 4 and toi_tickmax <= 6:
209 | toi_ticklabels = [0, 6]
210 | if toi_tickmax > 6 and toi_tickmax <= 8:
211 | toi_ticklabels = [0, 8]
212 | if toi_tickmax > 8 and toi_tickmax <= 10:
213 | toi_ticklabels = [0, 10]
214 | if toi_tickmax > 10 and toi_tickmax <= 12:
215 | toi_ticklabels = [0, 12]
216 | if toi_tickmax > 12 and toi_tickmax <= 14:
217 | toi_ticklabels = [0, 14]
218 | if toi_tickmax > 14 and toi_tickmax <= 16:
219 | toi_ticklabels = [0, 16]
220 | if toi_tickmax > 16 and toi_tickmax <= 18:
221 | toi_ticklabels = [0, 18]
222 | if toi_tickmax > 18 and toi_tickmax <= 20:
223 | toi_ticklabels = [0, 20]
224 |
225 | # set vertical indicator for midpoint of time on ice max
226 | ax_pp_toi.axvspan(toi_ticklabels[1] / 2, toi_ticklabels[1] / 2, ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
227 | ax_pp_toi.axvspan(toi_ticklabels[1], toi_ticklabels[1], ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
228 |
229 | # use the newly-minted x-ticklabels to ensure the x-axis labels will always display as integers
230 | ax_pp_xg.set_xticks(xG_ticklabels, minor=False)
231 | ax_pp_toi.set_xticks(toi_ticklabels, minor=False)
232 |
233 | # remove the borders to each subplot
234 | ax_pp_xg.spines["top"].set_visible(False)
235 | ax_pp_xg.spines["bottom"].set_visible(False)
236 | ax_pp_xg.spines["right"].set_visible(False)
237 | ax_pp_xg.spines["left"].set_visible(False)
238 |
239 | ax_pp_toi.spines["top"].set_visible(False)
240 | ax_pp_toi.spines["bottom"].set_visible(False)
241 | ax_pp_toi.spines["right"].set_visible(False)
242 | ax_pp_toi.spines["left"].set_visible(False)
243 |
244 | # add a legend for the shot type markers
245 | from matplotlib.lines import Line2D
246 | elements = [Line2D([0], [0], marker='|', markersize=13, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Differential')]
247 | ax_pp_xg.legend(handles=elements, loc='center', bbox_to_anchor=(.5, -.1), ncol=2).get_frame().set_linewidth(0.0)
248 |
249 | # add text boxes with team names in white and with the team's color in the background
250 | fig.text(.425, 0.936, ' ' + away + ' ', color='white', fontsize='12', bbox=dict(facecolor=away_color, edgecolor='None'))
251 | fig.text(.525, 0.936, ' ' + home + ' ', fontsize='12', color='white', bbox=dict(facecolor=home_color, edgecolor='None'))
252 | fig.text(.490, 0.936, '@', color='black', fontsize='12', bbox=dict(facecolor='white', edgecolor='None'))
253 |
254 |
255 | ###
256 | ### SAVE TO FILE
257 | ###
258 |
259 | if team == away:
260 | plt.savefig(charts_units_pp + 'onice_xg_away_pp.png', bbox_inches='tight', pad_inches=0.2)
261 | elif team == home:
262 | plt.savefig(charts_units_pp + 'onice_xg_home_pp.png', bbox_inches='tight', pad_inches=0.2)
263 |
264 | # exercise a command-line option to show the current figure
265 | if images == 'show':
266 | plt.show()
267 |
268 |
269 | ###
270 | ### CLOSE
271 | ###
272 |
273 | plt.close(fig)
274 |
275 | # status update
276 | print('Plotting ' + team + ' power play units on-ice xG.')
277 |
278 | # status update
279 | print('Finished plotting on-ice xG for power play units.')
--------------------------------------------------------------------------------
/chart_units_lines_onice_xg.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | #import json
6 | import pandas as pd
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 | import matplotlib as mpl
10 | import parameters
11 | import matplotlib.colors as clr
12 | import dict_team_colors
13 | import mod_switch_colors
14 |
15 | def parse_ids(season_id, game_id, images):
16 |
17 | # pull common variables from the parameters file
18 | charts_units_lines = parameters.charts_units_lines
19 | files_root = parameters.files_root
20 |
21 | # generate date and team information
22 | schedule_csv = files_root + season_id + "_schedule.csv"
23 |
24 | schedule_df = pd.read_csv(schedule_csv)
25 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
26 |
27 | date = schedule_date['DATE'].item()
28 | home = schedule_date['HOME'].item()
29 | away = schedule_date['AWAY'].item()
30 | teams = [away, home]
31 |
32 | # create variables that point to the .csv processed stats files for lines
33 | lines_file = files_root + 'stats_units_lines_onice.csv'
34 |
35 | # create dataframe objects that read in info from the .csv files
36 | lines_df = pd.read_csv(lines_file)
37 |
38 | max_toi = lines_df['TOI'].max()
39 |
40 | # choose colors for each team; set them in a list; generate a custom colormap for each team
41 | away_color = dict_team_colors.team_color_1st[away]
42 | home_color = dict_team_colors.team_color_1st[home]
43 |
44 | # change one team's color from its primary option to, depending on the opponent, either a second, third or fourth option
45 | try:
46 | away_color = mod_switch_colors.switch_team_colors(away, home)[0]
47 | home_color = mod_switch_colors.switch_team_colors(away, home)[1]
48 | except:
49 | pass
50 |
51 | team_colors = [away_color, home_color]
52 |
53 | away_cmap = clr.LinearSegmentedColormap.from_list('custom away', [(0, '#ffffff'), (1, away_color)], N=256)
54 | home_cmap = clr.LinearSegmentedColormap.from_list('custom home', [(0, '#ffffff'), (1, home_color)], N=256)
55 |
56 | ###
57 | ### 5v5
58 | ###
59 |
60 | # loop through each team
61 | for team in teams:
62 |
63 | if team == away:
64 | team_color = team_colors[0]
65 | opponent_color = team_colors[1]
66 | team_color_map = plt.cm.get_cmap(away_cmap)
67 | opponent_color_map = plt.cm.get_cmap(home_cmap)
68 |
69 | if team == home:
70 | team_color = team_colors[1]
71 | opponent_color = team_colors[0]
72 | team_color_map = plt.cm.get_cmap(home_cmap)
73 | opponent_color_map = plt.cm.get_cmap(away_cmap)
74 |
75 | # create a lines dataframe; filter for team; sort by time on ice; keep the lines with the 8 highest totals; rank and then invert the rankings
76 | team_lines_df = lines_df.copy()
77 | team_lines_df = team_lines_df[(team_lines_df['TEAM'] == team)]
78 | team_lines_df = team_lines_df.sort_values(by=['TOI'], ascending = True)
79 | team_lines_df = team_lines_df.iloc[-8:]
80 | team_lines_df['RANK'] = team_lines_df['TOI'].rank(method='first')
81 | team_lines_df = team_lines_df.sort_values(by=['RANK'], ascending = True)
82 | team_lines_df['RANK'] -= 1
83 |
84 | # remove zeros from the differential column
85 | team_lines_df['xGD'] = team_lines_df['xGD'].replace(0, np.NaN)
86 |
87 | # make expected goals against negative values
88 | team_lines_df['xGA'] *= -1
89 |
90 | # create another lines dataframe with just the time on ice column; set a max value; scale each line's time on ice relative to the max value
91 | lines_toi = team_lines_df['TOI']
92 |
93 | max_lines_toi = lines_toi.max()
94 |
95 | lines_toi_color = lines_toi / float(max_lines_toi)
96 |
97 | # connect team and opponent color map colors to each line's scaled time on ice
98 | lines_toi_color_map_for = team_color_map(lines_toi_color)
99 | lines_toi_color_map_against = opponent_color_map(lines_toi_color)
100 |
101 | # create a figure with two subplots sharing the y-axis
102 | fig = plt.figure(figsize=(8,8))
103 | grid = plt.GridSpec(1, 8, hspace=0.75, wspace=0.50)
104 |
105 | ax_lines_xg = fig.add_subplot(grid[0, 0:-2])
106 | ax_lines_toi = fig.add_subplot(grid[0, -1])
107 |
108 | # set the plot title
109 | fig.suptitle(date + ' Forward Lines On-Ice Expected Goals\n\n')
110 |
111 | ax_lines_xg.set_title('5v5 xG', fontsize=10)
112 | ax_lines_toi.set_title('5v5 TOI', fontsize=10)
113 |
114 | # create bars for expected goals for and against as well as line markers (to note the expected goals differential) for each line
115 | try:
116 | lines_xGF_plot = team_lines_df.plot.barh(x='LINE', y='xGF', stacked=True, color=lines_toi_color_map_for, width=0.25, legend=None, label='', ax=ax_lines_xg);
117 | except:
118 | pass
119 | try:
120 | lines_xGA_plot = team_lines_df.plot.barh(x='LINE', y='xGA', stacked=True, color=lines_toi_color_map_against, width=0.25, legend=None, label='', ax=ax_lines_xg);
121 | except:
122 | pass
123 | try:
124 | lines_xGD_plot = team_lines_df.plot(x='xGD', y='RANK', marker='|', markersize=15, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend=None, label='', ax=ax_lines_xg);
125 | except:
126 | pass
127 |
128 | # plot the bars for time on ice
129 | try:
130 | toi_lines = team_lines_df.plot.barh(x='LINE', y='TOI', color='white', edgecolor=team_color, width=0.25, legend=None, label='', ax=ax_lines_toi);
131 | except:
132 | pass
133 |
134 | # remove the labels for each subplot
135 | ax_lines_xg.set_xlabel('')
136 | ax_lines_xg.set_ylabel('')
137 |
138 | ax_lines_toi.set_xlabel('')
139 | ax_lines_toi.set_ylabel('')
140 |
141 | # set vertical indicators for break-even expected goals differential
142 | ax_lines_xg.axvspan(0, 0, ymin=0, ymax=1, alpha=.25, linestyle=':', color='black')
143 |
144 | # change the tick parameters
145 | ax_lines_xg.tick_params(
146 | axis='both',
147 | which='both',
148 | bottom=False,
149 | top=False,
150 | left=False,
151 | labelleft=True, # labels along the left edge are on
152 | labelbottom=True)
153 |
154 | ax_lines_toi.tick_params(
155 | axis='both',
156 | which='both',
157 | bottom=False,
158 | top=False,
159 | left=False,
160 | labelleft=False, # labels along the left edge are off
161 | labelbottom=True)
162 |
163 | # change the y-axis label colors
164 | ax_lines_xg.tick_params(
165 | axis='y',
166 | which='both',
167 | labelcolor=team_color)
168 |
169 | # create a list of x-axis tick values contingent on the max values for expected goals for and against
170 | xGF_max = lines_df['xGF']
171 | xGF_max = xGF_max.max()
172 |
173 | xGA_max = lines_df['xGA']
174 | xGA_max = xGA_max.max()
175 |
176 | xG_tickmax = int()
177 | if xGF_max > xGA_max:
178 | xG_tickmax = xGF_max
179 | if xGF_max < xGA_max:
180 | xG_tickmax = xGA_max
181 |
182 | xG_ticklabels = []
183 | if xG_tickmax > 0 and xG_tickmax <= 0.5:
184 | xG_ticklabels = [-0.5, -0.4, -0.3, -0.2, -0.1, 0.0, 0.1, 0.2, 0.3, 0.4, 0.5]
185 | if xG_tickmax > 0.5 and xG_tickmax <= 1:
186 | xG_ticklabels = [-1.0, -0.8, -0.6, -0.4, -0.2, 0.0, 0.2, 0.4, 0.6, 0.8, 1.0]
187 | if xG_tickmax > 1 and xG_tickmax <= 1.5:
188 | xG_ticklabels = [-1.5, -1.2, -0.9, -0.6, -0.3, 0.0, 0.3, 0.6, 0.9, 1.2, 1.5]
189 | if xG_tickmax > 1.5 and xG_tickmax <= 2:
190 | xG_ticklabels = [-2.0, -1.6, -1.2, -0.8, -0.4, 0.0, 0.4, 0.8, 1.2, 1.6, 2.0]
191 | if xG_tickmax > 2 and xG_tickmax <= 2.5:
192 | xG_ticklabels = [-2.5, -2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0, 2.5]
193 | if xG_tickmax > 2.5 and xG_tickmax <= 3:
194 | xG_ticklabels = [-3.0, -2.4, -1.8, -1.2, -0.6, 0.0, 0.6, 1.2, 1.8, 2.4, 3.0]
195 | if xG_tickmax > 3 and xG_tickmax <= 3.5:
196 | xG_ticklabels = [-3.5, -2.8, -2.1, -1.4, -0.7, 0.0, 0.7, 1.4, 2.1, 2.8, 3.5]
197 | if xG_tickmax > 3.5 and xG_tickmax <= 4:
198 | xG_ticklabels = [-4.0, -3.2, -2.4, -1.6, -0.8, 0.0, 0.8, 1.6, 2.4, 3.2, 4.0]
199 |
200 | toi_tickmax = max_toi
201 |
202 | toi_ticklabels = []
203 | if toi_tickmax <= 2:
204 | toi_ticklabels = [0, 2]
205 | if toi_tickmax > 2 and toi_tickmax <= 4:
206 | toi_ticklabels = [0, 4]
207 | if toi_tickmax > 4 and toi_tickmax <= 6:
208 | toi_ticklabels = [0, 6]
209 | if toi_tickmax > 6 and toi_tickmax <= 8:
210 | toi_ticklabels = [0, 8]
211 | if toi_tickmax > 8 and toi_tickmax <= 10:
212 | toi_ticklabels = [0, 10]
213 | if toi_tickmax > 10 and toi_tickmax <= 12:
214 | toi_ticklabels = [0, 12]
215 | if toi_tickmax > 12 and toi_tickmax <= 14:
216 | toi_ticklabels = [0, 14]
217 | if toi_tickmax > 14 and toi_tickmax <= 16:
218 | toi_ticklabels = [0, 16]
219 | if toi_tickmax > 16 and toi_tickmax <= 18:
220 | toi_ticklabels = [0, 18]
221 | if toi_tickmax > 18 and toi_tickmax <= 20:
222 | toi_ticklabels = [0, 20]
223 |
224 | # set vertical indicator for midpoint of time on ice max
225 | ax_lines_toi.axvspan(toi_ticklabels[1] / 2, toi_ticklabels[1] / 2, ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
226 | ax_lines_toi.axvspan(toi_ticklabels[1], toi_ticklabels[1], ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
227 |
228 | # use the newly-minted x-ticklabels to ensure the x-axis labels will always display as integers
229 | ax_lines_xg.set_xticks(xG_ticklabels, minor=False)
230 | ax_lines_toi.set_xticks(toi_ticklabels, minor=False)
231 |
232 | # remove the borders to each subplot
233 | ax_lines_xg.spines["top"].set_visible(False)
234 | ax_lines_xg.spines["bottom"].set_visible(False)
235 | ax_lines_xg.spines["right"].set_visible(False)
236 | ax_lines_xg.spines["left"].set_visible(False)
237 |
238 | ax_lines_toi.spines["top"].set_visible(False)
239 | ax_lines_toi.spines["bottom"].set_visible(False)
240 | ax_lines_toi.spines["right"].set_visible(False)
241 | ax_lines_toi.spines["left"].set_visible(False)
242 |
243 | # add a legend for the shot type markers
244 | from matplotlib.lines import Line2D
245 | elements = [Line2D([0], [0], marker='|', markersize=13, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Differential')]
246 | ax_lines_xg.legend(handles=elements, loc='center', bbox_to_anchor=(.5, -.1), ncol=2).get_frame().set_linewidth(0.0)
247 |
248 | # add text boxes with team names in white and with the team's color in the background
249 | fig.text(.425, 0.936, ' ' + away + ' ', color='white', fontsize='12', bbox=dict(facecolor=away_color, edgecolor='None'))
250 | fig.text(.525, 0.936, ' ' + home + ' ', fontsize='12', color='white', bbox=dict(facecolor=home_color, edgecolor='None'))
251 | fig.text(.490, 0.936, '@', color='black', fontsize='12', bbox=dict(facecolor='white', edgecolor='None'))
252 |
253 |
254 | ###
255 | ### SAVE TO FILE
256 | ###
257 |
258 | if team == away:
259 | plt.savefig(charts_units_lines + 'onice_xg_away_lines.png', bbox_inches='tight', pad_inches=0.2)
260 | elif team == home:
261 | plt.savefig(charts_units_lines + 'onice_xg_home_lines.png', bbox_inches='tight', pad_inches=0.2)
262 |
263 | # exercise a command-line option to show the current figure
264 | if images == 'show':
265 | plt.show()
266 |
267 |
268 | ###
269 | ### CLOSE
270 | ###
271 |
272 | plt.close(fig)
273 |
274 | # status update
275 | print('Plotting ' + team + ' lines 5v5 on-ice xG.')
276 |
277 | # status update
278 | print('Finished plotting 5v5 on-ice xG for lines.')
--------------------------------------------------------------------------------
/chart_units_pairings_onice_xg.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 |
6 | import pandas as pd
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 | import parameters
10 | import matplotlib.colors as clr
11 | import dict_team_colors
12 | import mod_switch_colors
13 |
14 | def parse_ids(season_id, game_id, images):
15 |
16 | # pull common variables from the parameters file
17 | charts_units_pairings = parameters.charts_units_pairings
18 | files_root = parameters.files_root
19 |
20 | # generate date and team information
21 | schedule_csv = files_root + season_id + "_schedule.csv"
22 |
23 | schedule_df = pd.read_csv(schedule_csv)
24 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
25 |
26 | date = schedule_date['DATE'].item()
27 | home = schedule_date['HOME'].item()
28 | away = schedule_date['AWAY'].item()
29 | teams = [away, home]
30 |
31 | # create variables that point to the .csv processed stats file for pairings
32 | pairings_file = files_root + 'stats_units_pairings_onice.csv'
33 |
34 | # create dataframe objects that read in info from the .csv files
35 | pairings_df = pd.read_csv(pairings_file)
36 |
37 | max_toi = pairings_df['TOI'].max()
38 |
39 | # choose colors for each team; set them in a list; generate a custom colormap for each team
40 | away_color = dict_team_colors.team_color_1st[away]
41 | home_color = dict_team_colors.team_color_1st[home]
42 |
43 | # change one team's color from its primary option to, depending on the opponent, either a second, third or fourth option
44 | try:
45 | away_color = mod_switch_colors.switch_team_colors(away, home)[0]
46 | home_color = mod_switch_colors.switch_team_colors(away, home)[1]
47 | except:
48 | pass
49 |
50 | team_colors = [away_color, home_color]
51 |
52 | away_cmap = clr.LinearSegmentedColormap.from_list('custom away', [(0, '#ffffff'), (1, away_color)], N=256)
53 | home_cmap = clr.LinearSegmentedColormap.from_list('custom home', [(0, '#ffffff'), (1, home_color)], N=256)
54 |
55 |
56 | ###
57 | ### 5v5
58 | ###
59 |
60 | # loop through each team
61 | for team in teams:
62 |
63 | if team == away:
64 | team_color = team_colors[0]
65 | opponent_color = team_colors[1]
66 | team_color_map = plt.cm.get_cmap(away_cmap)
67 | opponent_color_map = plt.cm.get_cmap(home_cmap)
68 |
69 | if team == home:
70 | team_color = team_colors[1]
71 | opponent_color = team_colors[0]
72 | team_color_map = plt.cm.get_cmap(home_cmap)
73 | opponent_color_map = plt.cm.get_cmap(away_cmap)
74 |
75 | # create a pairings dataframe; filter for team; sort by time on ice; keep the pairs with the 3 highest totals; rank and then invert the rankings
76 | team_pairings_df = pairings_df.copy()
77 | team_pairings_df = team_pairings_df[(team_pairings_df['TEAM'] == team)]
78 | team_pairings_df = team_pairings_df.sort_values(by=['TOI'], ascending = True)
79 | team_pairings_df = team_pairings_df.iloc[-6:]
80 | team_pairings_df['RANK'] = team_pairings_df['TOI'].rank(method='first')
81 | team_pairings_df['RANK'] -= 1
82 |
83 | # remove zeros from the differential column
84 | team_pairings_df['xGD'] = team_pairings_df['xGD'].replace(0, np.NaN)
85 |
86 | # make expected goals against negative values
87 | team_pairings_df['xGA'] *= -1
88 |
89 | # create another pairings dataframe with just the time on ice column; set a max value; scale each pair's time on ice relative to the max
90 | pairings_toi = team_pairings_df['TOI']
91 |
92 | max_pairings_toi = pairings_toi.max()
93 |
94 | pairings_toi_color = pairings_toi / float(max_pairings_toi)
95 |
96 | # connect team and opponent color map colors to each line's scaled time on ice
97 | pairings_toi_color_map_for = team_color_map(pairings_toi_color)
98 | pairings_toi_color_map_against = opponent_color_map(pairings_toi_color)
99 |
100 | # create a figure with two subplots sharing the y-axis
101 | fig = plt.figure(figsize=(8,8))
102 | grid = plt.GridSpec(1, 8, hspace=0.75, wspace=0.50)
103 |
104 | ax_pairings_xg = fig.add_subplot(grid[0, 0:-2])
105 | ax_pairings_toi = fig.add_subplot(grid[0, -1])
106 |
107 | # set the plot title
108 | fig.suptitle(date + ' Pairings On-Ice Expected Goals\n\n')
109 |
110 | ax_pairings_xg.set_title('5v5 xG', fontsize=10)
111 | ax_pairings_toi.set_title('5v5 TOI', fontsize=10)
112 |
113 | # create bars for expected goals for and against as well as line markers (to note the expected goals differential) for each pair
114 | try:
115 | pairings_xGF_plot = team_pairings_df.plot.barh(x='PAIRING', y='xGF', stacked=True, color=pairings_toi_color_map_for, width=0.25, legend=None, label='', ax=ax_pairings_xg);
116 | except:
117 | pass
118 | try:
119 | pairings_xGA_plot = team_pairings_df.plot.barh(x='PAIRING', y='xGA', stacked=True, color=pairings_toi_color_map_against, width=0.25, legend=None, label='', ax=ax_pairings_xg);
120 | except:
121 | pass
122 | try:
123 | pairings_xGD_plot = team_pairings_df.plot(x='xGD', y='RANK', marker='|', markersize=19, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend=None, label='', ax=ax_pairings_xg);
124 | except:
125 | pass
126 |
127 | # plot the bars for time on ice
128 | try:
129 | toi_pairings = team_pairings_df.plot.barh(x='PAIRING', y='TOI', color='white', edgecolor=team_color, width=0.25, legend=None, label='', ax=ax_pairings_toi);
130 | except:
131 | pass
132 |
133 | # remove the labels for each subplot
134 | ax_pairings_xg.set_xlabel('')
135 | ax_pairings_xg.set_ylabel('')
136 |
137 | ax_pairings_toi.set_xlabel('')
138 | ax_pairings_toi.set_ylabel('')
139 |
140 | # set vertical indicators for break-even expected goals differential
141 | ax_pairings_xg.axvspan(0, 0, ymin=0, ymax=1, alpha=.25, linestyle=':', color='black')
142 |
143 | # change the tick parameters
144 | ax_pairings_xg.tick_params(
145 | axis='both',
146 | which='both',
147 | bottom=False,
148 | top=False,
149 | left=False,
150 | labelleft=True, # labels along the left edge are on
151 | labelbottom=True)
152 |
153 | ax_pairings_toi.tick_params(
154 | axis='both',
155 | which='both',
156 | bottom=False,
157 | top=False,
158 | left=False,
159 | labelleft=False, # labels along the left edge are off
160 | labelbottom=True)
161 |
162 | # change the y-axis label colors
163 | ax_pairings_xg.tick_params(
164 | axis='y',
165 | which='both',
166 | labelcolor=opponent_color)
167 |
168 | # create a list of x-axis tick values contingent on the max values for expected goals for and against
169 | xGF_max = pairings_df['xGF']
170 | xGF_max = xGF_max.max()
171 |
172 | xGA_max = pairings_df['xGA']
173 | xGA_max = xGA_max.max()
174 |
175 | xG_tickmax = int()
176 | if xGF_max >= xGA_max:
177 | xG_tickmax = xGF_max
178 | if xGF_max < xGA_max:
179 | xG_tickmax = xGA_max
180 |
181 | xG_ticklabels = []
182 | if xG_tickmax > 0 and xG_tickmax <= 0.5:
183 | xG_ticklabels = [-0.5, -0.4, -0.3, -0.2, -0.1, 0.0, 0.1, 0.2, 0.3, 0.4, 0.5]
184 | if xG_tickmax > 0.5 and xG_tickmax <= 1:
185 | xG_ticklabels = [-1.0, -0.8, -0.6, -0.4, -0.2, 0.0, 0.2, 0.4, 0.6, 0.8, 1.0]
186 | if xG_tickmax > 1 and xG_tickmax <= 1.5:
187 | xG_ticklabels = [-1.5, -1.2, -0.9, -0.6, -0.3, 0.0, 0.3, 0.6, 0.9, 1.2, 1.5]
188 | if xG_tickmax > 1.5 and xG_tickmax <= 2:
189 | xG_ticklabels = [-2.0, -1.6, -1.2, -0.8, -0.4, 0.0, 0.4, 0.8, 1.2, 1.6, 2.0]
190 | if xG_tickmax > 2 and xG_tickmax <= 2.5:
191 | xG_ticklabels = [-2.5, -2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0, 2.5]
192 | if xG_tickmax > 2.5 and xG_tickmax <= 3:
193 | xG_ticklabels = [-3.0, -2.4, -1.8, -1.2, -0.6, 0.0, 0.6, 1.2, 1.8, 2.4, 3.0]
194 | if xG_tickmax > 3 and xG_tickmax <= 3.5:
195 | xG_ticklabels = [-3.5, -2.8, -2.1, -1.4, -0.7, 0.0, 0.7, 1.4, 2.1, 2.8, 3.5]
196 | if xG_tickmax > 3.5 and xG_tickmax <= 4:
197 | xG_ticklabels = [-4.0, -3.2, -2.4, -1.6, -0.8, 0.0, 0.8, 1.6, 2.4, 3.2, 4.0]
198 |
199 | toi_tickmax = max_toi
200 |
201 | toi_ticklabels = []
202 | if toi_tickmax <= 2:
203 | toi_ticklabels = [0, 2]
204 | if toi_tickmax > 2 and toi_tickmax <= 4:
205 | toi_ticklabels = [0, 4]
206 | if toi_tickmax > 4 and toi_tickmax <= 6:
207 | toi_ticklabels = [0, 6]
208 | if toi_tickmax > 6 and toi_tickmax <= 8:
209 | toi_ticklabels = [0, 8]
210 | if toi_tickmax > 8 and toi_tickmax <= 10:
211 | toi_ticklabels = [0, 10]
212 | if toi_tickmax > 10 and toi_tickmax <= 12:
213 | toi_ticklabels = [0, 12]
214 | if toi_tickmax > 12 and toi_tickmax <= 14:
215 | toi_ticklabels = [0, 14]
216 | if toi_tickmax > 14 and toi_tickmax <= 16:
217 | toi_ticklabels = [0, 16]
218 | if toi_tickmax > 16 and toi_tickmax <= 18:
219 | toi_ticklabels = [0, 18]
220 | if toi_tickmax > 18 and toi_tickmax <= 20:
221 | toi_ticklabels = [0, 20]
222 |
223 | # set vertical indicator for midpoint of time on ice max
224 | ax_pairings_toi.axvspan(toi_ticklabels[1] / 2, toi_ticklabels[1] / 2, ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
225 | ax_pairings_toi.axvspan(toi_ticklabels[1], toi_ticklabels[1], ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
226 |
227 | # use the newly-minted x-ticklabels to ensure the x-axis labels will always display as integers
228 | ax_pairings_xg.set_xticks(xG_ticklabels, minor=False)
229 | ax_pairings_toi.set_xticks(toi_ticklabels, minor=False)
230 |
231 | # remove the borders to each subplot
232 | ax_pairings_xg.spines["top"].set_visible(False)
233 | ax_pairings_xg.spines["bottom"].set_visible(False)
234 | ax_pairings_xg.spines["right"].set_visible(False)
235 | ax_pairings_xg.spines["left"].set_visible(False)
236 |
237 | ax_pairings_toi.spines["top"].set_visible(False)
238 | ax_pairings_toi.spines["bottom"].set_visible(False)
239 | ax_pairings_toi.spines["right"].set_visible(False)
240 | ax_pairings_toi.spines["left"].set_visible(False)
241 |
242 | # add a legend for the shot type markers
243 | from matplotlib.lines import Line2D
244 | elements = [Line2D([0], [0], marker='|', markersize=13, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Differential')]
245 | ax_pairings_xg.legend(handles=elements, loc='center', bbox_to_anchor=(.5, -.1), ncol=2).get_frame().set_linewidth(0.0)
246 |
247 | # add text boxes with team names in white and with the team's color in the background
248 | fig.text(.425, 0.936, ' ' + away + ' ', color='white', fontsize='12', bbox=dict(facecolor=away_color, edgecolor='None'))
249 | fig.text(.525, 0.936, ' ' + home + ' ', fontsize='12', color='white', bbox=dict(facecolor=home_color, edgecolor='None'))
250 | fig.text(.490, 0.936, '@', color='black', fontsize='12', bbox=dict(facecolor='white', edgecolor='None'))
251 |
252 |
253 | ###
254 | ### SAVE TO FILE
255 | ###
256 |
257 | if team == away:
258 | plt.savefig(charts_units_pairings + 'onice_xg_away_pairings.png', bbox_inches='tight', pad_inches=0.2)
259 | elif team == home:
260 | plt.savefig(charts_units_pairings + 'onice_xg_home_pairings.png', bbox_inches='tight', pad_inches=0.2)
261 |
262 | # exercise a command-line option to show the current figure
263 | if images == 'show':
264 | plt.show()
265 |
266 |
267 | ###
268 | ### CLOSE
269 | ###
270 |
271 | plt.close(fig)
272 |
273 | # status update
274 | print('Plotting ' + team + ' pairings 5v5 on-ice xG.')
275 |
276 | # status update
277 | print('Finished plotting 5v5 on-ice xG for pairings.')
--------------------------------------------------------------------------------
/chart_units_pk_onice_shots.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | #import json
6 | import pandas as pd
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 | import parameters
10 | import matplotlib.colors as clr
11 | import dict_team_colors
12 | import mod_switch_colors
13 |
14 | def parse_ids(season_id, game_id, images):
15 |
16 | # pull common variables from the parameters file
17 | charts_units_pk = parameters.charts_units_pk
18 | files_root = parameters.files_root
19 |
20 | # generate date and team information
21 | schedule_csv = files_root + season_id + "_schedule.csv"
22 |
23 | schedule_df = pd.read_csv(schedule_csv)
24 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
25 |
26 | date = schedule_date['DATE'].item()
27 | home = schedule_date['HOME'].item()
28 | away = schedule_date['AWAY'].item()
29 | teams = [away, home]
30 |
31 | # create variables that point to the .csv processed stats files for lines
32 | pk_file = files_root + 'stats_units_pk_onice.csv'
33 |
34 | # create dataframe objects that read in info from the .csv files
35 | pk_df = pd.read_csv(pk_file)
36 |
37 | max_pk_toi = pk_df['TOI'].max()
38 |
39 | # choose colors for each team; set them in a list; generate a custom colormap for each team
40 | away_color = dict_team_colors.team_color_1st[away]
41 | home_color = dict_team_colors.team_color_1st[home]
42 |
43 | # change one team's color from its primary option to, depending on the opponent, either a second, third or fourth option
44 | try:
45 | away_color = mod_switch_colors.switch_team_colors(away, home)[0]
46 | home_color = mod_switch_colors.switch_team_colors(away, home)[1]
47 | except:
48 | pass
49 |
50 | team_colors = [away_color, home_color]
51 |
52 | away_cmap = clr.LinearSegmentedColormap.from_list('custom away', [(0, '#ffffff'), (1, away_color)], N=256)
53 | home_cmap = clr.LinearSegmentedColormap.from_list('custom home', [(0, '#ffffff'), (1, home_color)], N=256)
54 |
55 |
56 | ###
57 | ### PP
58 | ###
59 |
60 | # loop through each team
61 | for team in teams:
62 |
63 | if team == away:
64 | team_color = team_colors[0]
65 | opponent_color = team_colors[1]
66 | team_color_map = plt.cm.get_cmap(away_cmap)
67 | opponent_color_map = plt.cm.get_cmap(home_cmap)
68 |
69 | if team == home:
70 | team_color = team_colors[1]
71 | opponent_color = team_colors[0]
72 | team_color_map = plt.cm.get_cmap(home_cmap)
73 | opponent_color_map = plt.cm.get_cmap(away_cmap)
74 |
75 | # create a lines dataframe; filter for team; sort by time on ice; keep the lines with the 8 highest totals; rank and then invert the rankings
76 | team_pk_df = pk_df.copy()
77 | team_pk_df = team_pk_df[(team_pk_df['TEAM'] == team)]
78 | team_pk_df = team_pk_df.sort_values(by=['TOI'], ascending = True)
79 | team_pk_df = team_pk_df.iloc[-8:]
80 | team_pk_df['RANK'] = team_pk_df['TOI'].rank(method='first')
81 | team_pk_df = team_pk_df.sort_values(by=['RANK'], ascending = True)
82 | team_pk_df['RANK'] -= 1
83 |
84 | team_pk_df['UNIT'] = team_pk_df['UNIT'] + ' (' + team_pk_df['STRENGTH'] + ')'
85 |
86 | # remove zeros from the goals for and against columns
87 | team_pk_df['GF'] = team_pk_df['GF'].replace(0, np.NaN)
88 | team_pk_df['GA'] = team_pk_df['GA'].replace(0, np.NaN)
89 |
90 | # remove zeros from the differential column
91 | team_pk_df['SD'] = team_pk_df['SD'].replace(0, np.NaN)
92 |
93 | # make goals and shots against negative values
94 | team_pk_df['GA'] *= -1
95 | team_pk_df['SA'] *= -1
96 |
97 | # create another lines dataframe with just the time on ice column; set a max value; scale each line's time on ice relative to the max value
98 | pk_toi = team_pk_df['TOI']
99 | team_max_pk_toi = pk_toi.max()
100 |
101 | pk_toi_color = pk_toi / float(team_max_pk_toi)
102 |
103 | # connect team and opponent color map colors to each line's scaled time on ice
104 | pk_toi_color_map_for = team_color_map(pk_toi_color)
105 | pk_toi_color_map_against = opponent_color_map(pk_toi_color)
106 |
107 | # create a figure with two subplots sharing the y-axis
108 | fig = plt.figure(figsize=(8,8))
109 | grid = plt.GridSpec(1, 8, hspace=0.75, wspace=0.50)
110 |
111 | ax_pk_shots = fig.add_subplot(grid[0, 0:-2])
112 | ax_pk_toi = fig.add_subplot(grid[0, -1])
113 |
114 | # set the plot title
115 | fig.suptitle(date + ' Penalty Kill Units On-Ice Shots\n\n')
116 |
117 | # set the axes titles
118 | ax_pk_shots.set_title('SH S', fontsize=10)
119 | ax_pk_toi.set_title('SH TOI', fontsize=10)
120 |
121 | # create bars for shots for and against as well as markers (to note the shot differential) for each line
122 | try:
123 | pk_SF_plot = team_pk_df.plot.barh(x='UNIT', y='SF', stacked=True, color=pk_toi_color_map_for, width=0.25, legend=None, label='', ax=ax_pk_shots);
124 | except:
125 | pass
126 | try:
127 | pk_SA_plot = team_pk_df.plot.barh(x='UNIT', y='SA', stacked=True, color=pk_toi_color_map_against, width=0.25, legend=None, label='', ax=ax_pk_shots);
128 | except:
129 | pass
130 | try:
131 | pk_GF_marker = team_pk_df.plot(x='GF', y='RANK', marker='D', markersize=5, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend='', label='', ax=ax_pk_shots);
132 | except:
133 | pass
134 | try:
135 | pk_GA_marker = team_pk_df.plot(x='GA', y='RANK', marker='D', markersize=5, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend='', label='', ax=ax_pk_shots);
136 | except:
137 | pass
138 | try:
139 | pk_SD_plot = team_pk_df.plot(x='SD', y='RANK', marker='|', markersize=15, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend=None, label='', ax=ax_pk_shots);
140 | except:
141 | pass
142 |
143 | # plot the bars for time on ice
144 | try:
145 | toi_pk = team_pk_df.plot.barh(x='UNIT', y='TOI', color='white', edgecolor=team_color, width=0.25, legend=None, label='', ax=ax_pk_toi);
146 | except:
147 | pass
148 |
149 | # remove the labels for each subplot
150 | ax_pk_shots.set_xlabel('')
151 | ax_pk_shots.set_ylabel('')
152 |
153 | ax_pk_toi.set_xlabel('')
154 | ax_pk_toi.set_ylabel('')
155 |
156 | # set vertical indicator for break-even shot differential
157 | ax_pk_shots.axvspan(0, 0, ymin=0, ymax=1, alpha=.25, linestyle=':', color='black')
158 |
159 | # change the tick parameters
160 | ax_pk_shots.tick_params(
161 | axis='both',
162 | which='both',
163 | bottom=False,
164 | top=False,
165 | left=False,
166 | labelleft=True, # labels along the left edge are on
167 | labelbottom=True)
168 |
169 | ax_pk_toi.tick_params(
170 | axis='both',
171 | which='both',
172 | bottom=False,
173 | top=False,
174 | left=False,
175 | labelleft=False, # labels along the left edge are off
176 | labelbottom=True)
177 |
178 | # change the y-axis label colors
179 | ax_pk_shots.tick_params(
180 | axis='y',
181 | which='both',
182 | labelcolor=team_color)
183 |
184 | # create a list of x-axis tick values contingent on the max values for shots for and against
185 | SF_max = pk_df['SF']
186 | SF_max = SF_max.max()
187 |
188 | SA_max = pk_df['SA']
189 | SA_max = SA_max.max()
190 |
191 | S_tickmax = int()
192 | if SF_max >= SA_max:
193 | S_tickmax = SF_max
194 | if SF_max < SA_max:
195 | S_tickmax = SA_max
196 |
197 | S_ticklabels = []
198 | if S_tickmax <= 5:
199 | S_ticklabels = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]
200 | if S_tickmax > 5 and S_tickmax <= 10:
201 | S_ticklabels = [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10]
202 | if S_tickmax > 10 and S_tickmax <= 15:
203 | S_ticklabels = [-15, -12, -9, -6, -3, 0, 3, 6, 9, 12, 15]
204 | if S_tickmax > 15 and S_tickmax <= 20:
205 | S_ticklabels = [-20, -16, -12, -8, -4, 0, 4, 8, 12, 16, 20]
206 | if S_tickmax > 20 and S_tickmax <= 25:
207 | S_ticklabels = [-25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25]
208 |
209 | toi_tickmax = max_pk_toi
210 |
211 | toi_ticklabels = []
212 | if toi_tickmax <= 2:
213 | toi_ticklabels = [0, 2]
214 | if toi_tickmax > 2 and toi_tickmax <= 4:
215 | toi_ticklabels = [0, 4]
216 | if toi_tickmax > 4 and toi_tickmax <= 6:
217 | toi_ticklabels = [0, 6]
218 | if toi_tickmax > 6 and toi_tickmax <= 8:
219 | toi_ticklabels = [0, 8]
220 | if toi_tickmax > 8 and toi_tickmax <= 10:
221 | toi_ticklabels = [0, 10]
222 | if toi_tickmax > 10 and toi_tickmax <= 12:
223 | toi_ticklabels = [0, 12]
224 | if toi_tickmax > 12 and toi_tickmax <= 14:
225 | toi_ticklabels = [0, 14]
226 | if toi_tickmax > 14 and toi_tickmax <= 16:
227 | toi_ticklabels = [0, 16]
228 | if toi_tickmax > 16 and toi_tickmax <= 18:
229 | toi_ticklabels = [0, 18]
230 | if toi_tickmax > 18 and toi_tickmax <= 20:
231 | toi_ticklabels = [0, 20]
232 |
233 | # set vertical indicator for midpoint of time on ice max
234 | ax_pk_toi.axvspan(toi_ticklabels[1] / 2, toi_ticklabels[1] / 2, ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
235 | ax_pk_toi.axvspan(toi_ticklabels[1], toi_ticklabels[1], ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
236 |
237 | # use the newly-minted x-ticklabels to ensure the x-axis labels will always display as integers
238 | ax_pk_shots.set_xticks(S_ticklabels, minor=False)
239 | ax_pk_toi.set_xticks(toi_ticklabels, minor=False)
240 |
241 | # remove the borders to each subplot
242 | ax_pk_shots.spines["top"].set_visible(False)
243 | ax_pk_shots.spines["bottom"].set_visible(False)
244 | ax_pk_shots.spines["right"].set_visible(False)
245 | ax_pk_shots.spines["left"].set_visible(False)
246 |
247 | ax_pk_toi.spines["top"].set_visible(False)
248 | ax_pk_toi.spines["bottom"].set_visible(False)
249 | ax_pk_toi.spines["right"].set_visible(False)
250 | ax_pk_toi.spines["left"].set_visible(False)
251 |
252 | # add a legend for the shot type markers
253 | from matplotlib.lines import Line2D
254 | elements = [Line2D([0], [0], marker='D', markersize=5, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Scored'), Line2D([0], [0], marker='|', markersize=13, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Differential')]
255 | ax_pk_shots.legend(handles=elements, loc='center', bbox_to_anchor=(.5, -.1), ncol=2).get_frame().set_linewidth(0.0)
256 |
257 | # add text boxes with team names in white and with the team's color in the background
258 | fig.text(.425, 0.936, ' ' + away + ' ', color='white', fontsize='12', bbox=dict(facecolor=away_color, edgecolor='None'))
259 | fig.text(.525, 0.936, ' ' + home + ' ', fontsize='12', color='white', bbox=dict(facecolor=home_color, edgecolor='None'))
260 | fig.text(.490, 0.936, '@', color='black', fontsize='12', bbox=dict(facecolor='white', edgecolor='None'))
261 |
262 |
263 | ###
264 | ### SAVE TO FILE
265 | ###
266 |
267 | if team == away:
268 | plt.savefig(charts_units_pk + 'onice_shots_away_pk.png', bbox_inches='tight', pad_inches=0.2)
269 | elif team == home:
270 | plt.savefig(charts_units_pk + 'onice_shots_home_pk.png', bbox_inches='tight', pad_inches=0.2)
271 |
272 | # exercise a command-line option to show the current figure
273 | if images == 'show':
274 | plt.show()
275 |
276 |
277 | ###
278 | ### CLOSE
279 | ###
280 |
281 | plt.close(fig)
282 |
283 | # status update
284 | print('Plotting ' + team + ' penalty kill units on-ice shots.')
285 |
286 | # status update
287 | print('Finished plotting on-ice shots for penalty kill units.')
--------------------------------------------------------------------------------
/chart_units_pp_onice_shots.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | #import json
6 | import pandas as pd
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 | import matplotlib as mpl
10 | import parameters
11 | import matplotlib.colors as clr
12 | import dict_team_colors
13 | import mod_switch_colors
14 |
15 | def parse_ids(season_id, game_id, images):
16 |
17 | # pull common variables from the parameters file
18 | charts_units_pp = parameters.charts_units_pp
19 | files_root = parameters.files_root
20 |
21 | # generate date and team information
22 | schedule_csv = files_root + season_id + "_schedule.csv"
23 |
24 | schedule_df = pd.read_csv(schedule_csv)
25 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
26 |
27 | date = schedule_date['DATE'].item()
28 | home = schedule_date['HOME'].item()
29 | away = schedule_date['AWAY'].item()
30 | teams = [away, home]
31 |
32 | # create variables that point to the .csv processed stats files for lines
33 | pp_file = files_root + 'stats_units_pp_onice.csv'
34 |
35 | # create dataframe objects that read in info from the .csv files
36 | pp_df = pd.read_csv(pp_file)
37 |
38 | max_pp_toi = pp_df['TOI'].max()
39 |
40 | # choose colors for each team; set them in a list; generate a custom colormap for each team
41 | away_color = dict_team_colors.team_color_1st[away]
42 | home_color = dict_team_colors.team_color_1st[home]
43 |
44 | # change one team's color from its primary option to, depending on the opponent, either a second, third or fourth option
45 | try:
46 | away_color = mod_switch_colors.switch_team_colors(away, home)[0]
47 | home_color = mod_switch_colors.switch_team_colors(away, home)[1]
48 | except:
49 | pass
50 |
51 | team_colors = [away_color, home_color]
52 |
53 | away_cmap = clr.LinearSegmentedColormap.from_list('custom away', [(0, '#ffffff'), (1, away_color)], N=256)
54 | home_cmap = clr.LinearSegmentedColormap.from_list('custom home', [(0, '#ffffff'), (1, home_color)], N=256)
55 |
56 |
57 | ###
58 | ### PP
59 | ###
60 |
61 | # loop through each team
62 | for team in teams:
63 |
64 | if team == away:
65 | team_color = team_colors[0]
66 | opponent_color = team_colors[1]
67 | team_color_map = plt.cm.get_cmap(away_cmap)
68 | opponent_color_map = plt.cm.get_cmap(home_cmap)
69 |
70 | if team == home:
71 | team_color = team_colors[1]
72 | opponent_color = team_colors[0]
73 | team_color_map = plt.cm.get_cmap(home_cmap)
74 | opponent_color_map = plt.cm.get_cmap(away_cmap)
75 |
76 | # create a lines dataframe; filter for team; sort by time on ice; keep the lines with the 8 highest totals; rank and then invert the rankings
77 | team_pp_df = pp_df.copy()
78 | team_pp_df = team_pp_df[(team_pp_df['TEAM'] == team)]
79 | team_pp_df = team_pp_df.sort_values(by=['TOI'], ascending = True)
80 | team_pp_df = team_pp_df.iloc[-8:]
81 | team_pp_df['RANK'] = team_pp_df['TOI'].rank(method='first')
82 | team_pp_df = team_pp_df.sort_values(by=['RANK'], ascending = True)
83 | team_pp_df['RANK'] -= 1
84 |
85 | team_pp_df['UNIT'] = team_pp_df['UNIT'] + ' (' + team_pp_df['STRENGTH'] + ')'
86 |
87 | # remove zeros from the goals for and against columns
88 | team_pp_df['GF'] = team_pp_df['GF'].replace(0, np.NaN)
89 | team_pp_df['GA'] = team_pp_df['GA'].replace(0, np.NaN)
90 |
91 | # remove zeros from the differential column
92 | team_pp_df['SD'] = team_pp_df['SD'].replace(0, np.NaN)
93 |
94 | # make goals and shots against negative values
95 | team_pp_df['GA'] *= -1
96 | team_pp_df['SA'] *= -1
97 |
98 | # create another lines dataframe with just the time on ice column; set a max value; scale each line's time on ice relative to the max value
99 | pp_toi = team_pp_df['TOI']
100 | team_max_pp_toi = pp_toi.max()
101 |
102 | pp_toi_color = pp_toi / float(team_max_pp_toi)
103 |
104 | # connect team and opponent color map colors to each line's scaled time on ice
105 | pp_toi_color_map_for = team_color_map(pp_toi_color)
106 | pp_toi_color_map_against = opponent_color_map(pp_toi_color)
107 |
108 | # create a figure with two subplots sharing the y-axis
109 | fig = plt.figure(figsize=(8,8))
110 | grid = plt.GridSpec(1, 8, hspace=0.75, wspace=0.50)
111 |
112 | ax_pp_shots = fig.add_subplot(grid[0, 0:-2])
113 | ax_pp_toi = fig.add_subplot(grid[0, -1])
114 |
115 | # set the plot title
116 | fig.suptitle(date + ' Power Play Units On-Ice Shots\n\n')
117 |
118 | # set the axes titles
119 | ax_pp_shots.set_title('PP S', fontsize=10)
120 | ax_pp_toi.set_title('PP TOI', fontsize=10)
121 |
122 | # create bars for shots for and against as well as markers (to note the shot differential) for each line
123 | try:
124 | pp_SF_plot = team_pp_df.plot.barh(x='UNIT', y='SF', stacked=True, color=pp_toi_color_map_for, width=0.25, legend=None, label='', ax=ax_pp_shots);
125 | except:
126 | pass
127 | try:
128 | pp_SA_plot = team_pp_df.plot.barh(x='UNIT', y='SA', stacked=True, color=pp_toi_color_map_against, width=0.25, legend=None, label='', ax=ax_pp_shots);
129 | except:
130 | pass
131 | try:
132 | pp_GF_marker = team_pp_df.plot(x='GF', y='RANK', marker='D', markersize=5, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend='', label='', ax=ax_pp_shots);
133 | except:
134 | pass
135 | try:
136 | pp_GA_marker = team_pp_df.plot(x='GA', y='RANK', marker='D', markersize=5, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend='', label='', ax=ax_pp_shots);
137 | except:
138 | pass
139 | try:
140 | pp_SD_plot = team_pp_df.plot(x='SD', y='RANK', marker='|', markersize=15, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend=None, label='', ax=ax_pp_shots);
141 | except:
142 | pass
143 |
144 | # plot the bars for time on ice
145 | try:
146 | toi_pp = team_pp_df.plot.barh(x='UNIT', y='TOI', color='white', edgecolor=team_color, width=0.25, legend=None, label='', ax=ax_pp_toi);
147 | except:
148 | pass
149 |
150 | # remove the labels for each subplot
151 | ax_pp_shots.set_xlabel('')
152 | ax_pp_shots.set_ylabel('')
153 |
154 | ax_pp_toi.set_xlabel('')
155 | ax_pp_toi.set_ylabel('')
156 |
157 | # set vertical indicator for break-even shot differential
158 | ax_pp_shots.axvspan(0, 0, ymin=0, ymax=1, alpha=.25, linestyle=':', color='black')
159 |
160 | # change the tick parameters
161 | ax_pp_shots.tick_params(
162 | axis='both',
163 | which='both',
164 | bottom=False,
165 | top=False,
166 | left=False,
167 | labelleft=True, # labels along the left edge are on
168 | labelbottom=True)
169 |
170 | ax_pp_toi.tick_params(
171 | axis='both',
172 | which='both',
173 | bottom=False,
174 | top=False,
175 | left=False,
176 | labelleft=False, # labels along the left edge are off
177 | labelbottom=True)
178 |
179 | # change the y-axis label colors
180 | ax_pp_shots.tick_params(
181 | axis='y',
182 | which='both',
183 | labelcolor=team_color)
184 |
185 | # create a list of x-axis tick values contingent on the max values for shots for and against
186 | SF_max = pp_df['SF']
187 | SF_max = SF_max.max()
188 |
189 | SA_max = pp_df['SA']
190 | SA_max = SA_max.max()
191 |
192 | S_tickmax = int()
193 | if SF_max >= SA_max:
194 | S_tickmax = SF_max
195 | if SF_max < SA_max:
196 | S_tickmax = SA_max
197 |
198 | S_ticklabels = []
199 | if S_tickmax <= 5:
200 | S_ticklabels = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]
201 | if S_tickmax > 5 and S_tickmax <= 10:
202 | S_ticklabels = [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10]
203 | if S_tickmax > 10 and S_tickmax <= 15:
204 | S_ticklabels = [-15, -12, -9, -6, -3, 0, 3, 6, 9, 12, 15]
205 | if S_tickmax > 15 and S_tickmax <= 20:
206 | S_ticklabels = [-20, -16, -12, -8, -4, 0, 4, 8, 12, 16, 20]
207 | if S_tickmax > 20 and S_tickmax <= 25:
208 | S_ticklabels = [-25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25]
209 |
210 | toi_tickmax = max_pp_toi
211 |
212 | toi_ticklabels = []
213 | if toi_tickmax <= 2:
214 | toi_ticklabels = [0, 2]
215 | if toi_tickmax > 2 and toi_tickmax <= 4:
216 | toi_ticklabels = [0, 4]
217 | if toi_tickmax > 4 and toi_tickmax <= 6:
218 | toi_ticklabels = [0, 6]
219 | if toi_tickmax > 6 and toi_tickmax <= 8:
220 | toi_ticklabels = [0, 8]
221 | if toi_tickmax > 8 and toi_tickmax <= 10:
222 | toi_ticklabels = [0, 10]
223 | if toi_tickmax > 10 and toi_tickmax <= 12:
224 | toi_ticklabels = [0, 12]
225 | if toi_tickmax > 12 and toi_tickmax <= 14:
226 | toi_ticklabels = [0, 14]
227 | if toi_tickmax > 14 and toi_tickmax <= 16:
228 | toi_ticklabels = [0, 16]
229 | if toi_tickmax > 16 and toi_tickmax <= 18:
230 | toi_ticklabels = [0, 18]
231 | if toi_tickmax > 18 and toi_tickmax <= 20:
232 | toi_ticklabels = [0, 20]
233 |
234 | # set vertical indicator for midpoint of time on ice max
235 | ax_pp_toi.axvspan(toi_ticklabels[1] / 2, toi_ticklabels[1] / 2, ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
236 | ax_pp_toi.axvspan(toi_ticklabels[1], toi_ticklabels[1], ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
237 |
238 | # use the newly-minted x-ticklabels to ensure the x-axis labels will always display as integers
239 | ax_pp_shots.set_xticks(S_ticklabels, minor=False)
240 | ax_pp_toi.set_xticks(toi_ticklabels, minor=False)
241 |
242 | # remove the borders to each subplot
243 | ax_pp_shots.spines["top"].set_visible(False)
244 | ax_pp_shots.spines["bottom"].set_visible(False)
245 | ax_pp_shots.spines["right"].set_visible(False)
246 | ax_pp_shots.spines["left"].set_visible(False)
247 |
248 | ax_pp_toi.spines["top"].set_visible(False)
249 | ax_pp_toi.spines["bottom"].set_visible(False)
250 | ax_pp_toi.spines["right"].set_visible(False)
251 | ax_pp_toi.spines["left"].set_visible(False)
252 |
253 | # add a legend for the shot type markers
254 | from matplotlib.lines import Line2D
255 | elements = [Line2D([0], [0], marker='D', markersize=5, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Scored'), Line2D([0], [0], marker='|', markersize=13, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Differential')]
256 | ax_pp_shots.legend(handles=elements, loc='center', bbox_to_anchor=(.5, -.1), ncol=2).get_frame().set_linewidth(0.0)
257 |
258 | # add text boxes with team names in white and with the team's color in the background
259 | fig.text(.425, 0.936, ' ' + away + ' ', color='white', fontsize='12', bbox=dict(facecolor=away_color, edgecolor='None'))
260 | fig.text(.525, 0.936, ' ' + home + ' ', fontsize='12', color='white', bbox=dict(facecolor=home_color, edgecolor='None'))
261 | fig.text(.490, 0.936, '@', color='black', fontsize='12', bbox=dict(facecolor='white', edgecolor='None'))
262 |
263 |
264 | ###
265 | ### SAVE TO FILE
266 | ###
267 |
268 | if team == away:
269 | plt.savefig(charts_units_pp + 'onice_shots_away_pp.png', bbox_inches='tight', pad_inches=0.2)
270 | elif team == home:
271 | plt.savefig(charts_units_pp + 'onice_shots_home_pp.png', bbox_inches='tight', pad_inches=0.2)
272 |
273 | # exercise a command-line option to show the current figure
274 | if images == 'show':
275 | plt.show()
276 |
277 |
278 | ###
279 | ### CLOSE
280 | ###
281 |
282 | plt.close(fig)
283 |
284 | # status update
285 | print('Plotting ' + team + ' power play units on-ice shots.')
286 |
287 | # status update
288 | print('Finished plotting on-ice shots for power play units.')
--------------------------------------------------------------------------------
/chart_units_lines_onice_shots.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | @author: @mikegallimore
4 | """
5 | #import json
6 | import pandas as pd
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 | import matplotlib as mpl
10 | import parameters
11 | import matplotlib.colors as clr
12 | import dict_team_colors
13 | import mod_switch_colors
14 |
15 | def parse_ids(season_id, game_id, images):
16 |
17 | # pull common variables from the parameters file
18 | charts_units_lines = parameters.charts_units_lines
19 | files_root = parameters.files_root
20 |
21 | # generate date and team information
22 | schedule_csv = files_root + season_id + "_schedule.csv"
23 |
24 | schedule_df = pd.read_csv(schedule_csv)
25 | schedule_date = schedule_df[(schedule_df['GAME_ID'] == int(game_id))]
26 |
27 | date = schedule_date['DATE'].item()
28 | home = schedule_date['HOME'].item()
29 | away = schedule_date['AWAY'].item()
30 | teams = [away, home]
31 |
32 | # create variables that point to the .csv processed stats files for lines
33 | lines_file = files_root + 'stats_units_lines_onice.csv'
34 |
35 | # create dataframe objects that read in info from the .csv files
36 | lines_df = pd.read_csv(lines_file)
37 |
38 | max_toi = lines_df['TOI'].max()
39 |
40 | # choose colors for each team; set them in a list; generate a custom colormap for each team
41 | away_color = dict_team_colors.team_color_1st[away]
42 | home_color = dict_team_colors.team_color_1st[home]
43 |
44 | # change one team's color from its primary option to, depending on the opponent, either a second, third or fourth option
45 | try:
46 | away_color = mod_switch_colors.switch_team_colors(away, home)[0]
47 | home_color = mod_switch_colors.switch_team_colors(away, home)[1]
48 | except:
49 | pass
50 |
51 | team_colors = [away_color, home_color]
52 |
53 | away_cmap = clr.LinearSegmentedColormap.from_list('custom away', [(0, '#ffffff'), (1, away_color)], N=256)
54 | home_cmap = clr.LinearSegmentedColormap.from_list('custom home', [(0, '#ffffff'), (1, home_color)], N=256)
55 |
56 |
57 | ###
58 | ### 5v5
59 | ###
60 |
61 | # loop through each team
62 | for team in teams:
63 |
64 | if team == away:
65 | team_color = team_colors[0]
66 | opponent_color = team_colors[1]
67 | team_color_map = plt.cm.get_cmap(away_cmap)
68 | opponent_color_map = plt.cm.get_cmap(home_cmap)
69 |
70 | if team == home:
71 | team_color = team_colors[1]
72 | opponent_color = team_colors[0]
73 | team_color_map = plt.cm.get_cmap(home_cmap)
74 | opponent_color_map = plt.cm.get_cmap(away_cmap)
75 |
76 | # create a lines dataframe; filter for team; sort by time on ice; keep the lines with the 8 highest totals; rank and then invert the rankings
77 | team_lines_df = lines_df.copy()
78 | team_lines_df = team_lines_df[(team_lines_df['TEAM'] == team)]
79 | team_lines_df = team_lines_df.sort_values(by=['TOI'], ascending = True)
80 | team_lines_df = team_lines_df.iloc[-8:]
81 | team_lines_df['RANK'] = team_lines_df['TOI'].rank(method='first')
82 | team_lines_df = team_lines_df.sort_values(by=['RANK'], ascending = True)
83 | team_lines_df['RANK'] -= 1
84 |
85 | # remove zeros from the goals for and against columns
86 | team_lines_df['GF'] = team_lines_df['GF'].replace(0, np.NaN)
87 | team_lines_df['GA'] = team_lines_df['GA'].replace(0, np.NaN)
88 |
89 | # remove zeros from the differential column
90 | team_lines_df['SD'] = team_lines_df['SD'].replace(0, np.NaN)
91 |
92 | # make shots against negative values
93 | team_lines_df['GA'] *= -1
94 | team_lines_df['SA'] *= -1
95 |
96 | # create another lines dataframe with just the time on ice column; set a max value; scale each line's time on ice relative to the max value
97 | lines_toi = team_lines_df['TOI']
98 | max_lines_toi = lines_toi.max()
99 |
100 | lines_toi_color = lines_toi / float(max_lines_toi)
101 |
102 | # connect team and opponent color map colors to each line's scaled time on ice
103 | lines_toi_color_map_for = team_color_map(lines_toi_color)
104 | lines_toi_color_map_against = opponent_color_map(lines_toi_color)
105 |
106 | # create a figure with two subplots sharing the y-axis
107 | fig = plt.figure(figsize=(8,8))
108 | grid = plt.GridSpec(1, 8, hspace=0.75, wspace=0.50)
109 |
110 | ax_lines_shots = fig.add_subplot(grid[0, 0:-2])
111 | ax_lines_toi = fig.add_subplot(grid[0, -1])
112 |
113 | # set the plot title
114 | fig.suptitle(date + ' Forward Lines On-Ice Shots\n\n')
115 |
116 | # set the axes titles
117 | ax_lines_shots.set_title('5v5 S', fontsize=10)
118 | ax_lines_toi.set_title('5v5 TOI', fontsize=10)
119 |
120 | # create bars for shots for and against as well as markers (to note the shot differential) for each line
121 | try:
122 | lines_SF_plot = team_lines_df.plot.barh(x='LINE', y='SF', stacked=True, color=lines_toi_color_map_for, width=0.25, legend=None, label='', ax=ax_lines_shots);
123 | except:
124 | pass
125 | try:
126 | lines_SA_plot = team_lines_df.plot.barh(x='LINE', y='SA', stacked=True, color=lines_toi_color_map_against, width=0.25, legend=None, label='', ax=ax_lines_shots);
127 | except:
128 | pass
129 | try:
130 | lines_GF_marker = team_lines_df.plot(x='GF', y='RANK', marker='D', markersize=5, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend='', label='', ax=ax_lines_shots);
131 | except:
132 | pass
133 | try:
134 | lines_GA_marker = team_lines_df.plot(x='GA', y='RANK', marker='D', markersize=5, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend='', label='', ax=ax_lines_shots);
135 | except:
136 | pass
137 | try:
138 | lines_SD_plot = team_lines_df.plot(x='SD', y='RANK', marker='|', markersize=15, markerfacecolor='None', markeredgecolor='white', linewidth=0, alpha=1, legend=None, label='', ax=ax_lines_shots);
139 | except:
140 | pass
141 |
142 | # plot the bars for time on ice
143 | try:
144 | toi_lines = team_lines_df.plot.barh(x='LINE', y='TOI', color='white', edgecolor=team_color, width=0.25, legend=None, label='', ax=ax_lines_toi);
145 | except:
146 | pass
147 |
148 | # remove the labels for each subplot
149 | ax_lines_shots.set_xlabel('')
150 | ax_lines_shots.set_ylabel('')
151 |
152 | ax_lines_toi.set_xlabel('')
153 | ax_lines_toi.set_ylabel('')
154 |
155 | # set vertical indicator for break-even shot differential
156 | ax_lines_shots.axvspan(0, 0, ymin=0, ymax=1, alpha=.25, linestyle=':', color='black')
157 |
158 | # change the tick parameters
159 | ax_lines_shots.tick_params(
160 | axis='both',
161 | which='both',
162 | bottom=False,
163 | top=False,
164 | left=False,
165 | labelleft=True, # labels along the left edge are on
166 | labelbottom=True)
167 |
168 | ax_lines_toi.tick_params(
169 | axis='both',
170 | which='both',
171 | bottom=False,
172 | top=False,
173 | left=False,
174 | labelleft=False, # labels along the left edge are off
175 | labelbottom=True)
176 |
177 | # change the y-axis label colors
178 | ax_lines_shots.tick_params(
179 | axis='y',
180 | which='both',
181 | labelcolor=team_color)
182 |
183 | # create a list of x-axis tick values contingent on the max values for shots for and against
184 | SF_max = lines_df['SF']
185 | SF_max = SF_max.max()
186 |
187 | SA_max = lines_df['SA']
188 | SA_max = SA_max.max()
189 |
190 | S_tickmax = int()
191 | if SF_max >= SA_max:
192 | S_tickmax = SF_max
193 | if SF_max < SA_max:
194 | S_tickmax = SA_max
195 |
196 | S_ticklabels = []
197 | if S_tickmax <= 5:
198 | S_ticklabels = [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]
199 | if S_tickmax > 5 and S_tickmax <= 10:
200 | S_ticklabels = [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10]
201 | if S_tickmax > 10 and S_tickmax <= 15:
202 | S_ticklabels = [-15, -12, -9, -6, -3, 0, 3, 6, 9, 12, 15]
203 | if S_tickmax > 15 and S_tickmax <= 20:
204 | S_ticklabels = [-20, -16, -12, -8, -4, 0, 4, 8, 12, 16, 20]
205 | if S_tickmax > 20 and S_tickmax <= 25:
206 | S_ticklabels = [-25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25]
207 |
208 | toi_tickmax = max_toi
209 |
210 | toi_ticklabels = []
211 | if toi_tickmax <= 2:
212 | toi_ticklabels = [0, 2]
213 | if toi_tickmax > 2 and toi_tickmax <= 4:
214 | toi_ticklabels = [0, 4]
215 | if toi_tickmax > 4 and toi_tickmax <= 6:
216 | toi_ticklabels = [0, 6]
217 | if toi_tickmax > 6 and toi_tickmax <= 8:
218 | toi_ticklabels = [0, 8]
219 | if toi_tickmax > 8 and toi_tickmax <= 10:
220 | toi_ticklabels = [0, 10]
221 | if toi_tickmax > 10 and toi_tickmax <= 12:
222 | toi_ticklabels = [0, 12]
223 | if toi_tickmax > 12 and toi_tickmax <= 14:
224 | toi_ticklabels = [0, 14]
225 | if toi_tickmax > 14 and toi_tickmax <= 16:
226 | toi_ticklabels = [0, 16]
227 | if toi_tickmax > 16 and toi_tickmax <= 18:
228 | toi_ticklabels = [0, 18]
229 | if toi_tickmax > 18 and toi_tickmax <= 20:
230 | toi_ticklabels = [0, 20]
231 |
232 | # set vertical indicator for midpoint of time on ice max
233 | ax_lines_toi.axvspan(toi_ticklabels[1] / 2, toi_ticklabels[1] / 2, ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
234 | ax_lines_toi.axvspan(toi_ticklabels[1], toi_ticklabels[1], ymin=0, ymax=1, zorder=0, alpha=0.25, linestyle=':', color='black')
235 |
236 | # use the newly-minted x-ticklabels to ensure the x-axis labels will always display as integers
237 | ax_lines_shots.set_xticks(S_ticklabels, minor=False)
238 | ax_lines_toi.set_xticks(toi_ticklabels, minor=False)
239 |
240 | # remove the borders to each subplot
241 | ax_lines_shots.spines["top"].set_visible(False)
242 | ax_lines_shots.spines["bottom"].set_visible(False)
243 | ax_lines_shots.spines["right"].set_visible(False)
244 | ax_lines_shots.spines["left"].set_visible(False)
245 |
246 | ax_lines_toi.spines["top"].set_visible(False)
247 | ax_lines_toi.spines["bottom"].set_visible(False)
248 | ax_lines_toi.spines["right"].set_visible(False)
249 | ax_lines_toi.spines["left"].set_visible(False)
250 |
251 | # add a legend for the shot type markers
252 | from matplotlib.lines import Line2D
253 | elements = [Line2D([0], [0], marker='D', markersize=5, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Scored'), Line2D([0], [0], marker='|', markersize=13, markerfacecolor='None', markeredgecolor='black', linewidth=0, alpha=1, label='Differential')]
254 | ax_lines_shots.legend(handles=elements, loc='center', bbox_to_anchor=(.5, -.1), ncol=2).get_frame().set_linewidth(0.0)
255 |
256 | # add text boxes with team names in white and with the team's color in the background
257 | fig.text(.425, 0.936, ' ' + away + ' ', color='white', fontsize='12', bbox=dict(facecolor=away_color, edgecolor='None'))
258 | fig.text(.525, 0.936, ' ' + home + ' ', fontsize='12', color='white', bbox=dict(facecolor=home_color, edgecolor='None'))
259 | fig.text(.490, 0.936, '@', color='black', fontsize='12', bbox=dict(facecolor='white', edgecolor='None'))
260 |
261 |
262 | ###
263 | ### SAVE TO FILE
264 | ###
265 |
266 | if team == away:
267 | plt.savefig(charts_units_lines + 'onice_shots_away_lines.png', bbox_inches='tight', pad_inches=0.2)
268 | elif team == home:
269 | plt.savefig(charts_units_lines + 'onice_shots_home_lines.png', bbox_inches='tight', pad_inches=0.2)
270 |
271 | # exercise a command-line option to show the current figure
272 | if images == 'show':
273 | plt.show()
274 |
275 |
276 | ###
277 | ### CLOSE
278 | ###
279 |
280 | plt.close(fig)
281 |
282 | # status update
283 | print('Plotting ' + team + ' lines 5v5 on-ice shots.')
284 |
285 | # status update
286 | print('Finished plotting 5v5 on-ice shots for lines.')
--------------------------------------------------------------------------------