├── .gitignore
├── Jenkinsfile
├── Jenkinsfile_BF6064
├── ReadMe.md
├── advance_settings.yaml
├── bf_sde.py
├── common.py
├── constants.py
├── drivers.py
├── irq_debug.tgz
├── sal.py
├── settings.yaml
└── test
└── AOT_Test.py
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | venv/
3 | logs/
4 | .vscode/
5 | __pycache__/
6 | irq_debug/
7 | autom4te.cache/
8 | .project
9 | .history/
10 | .pydevproject
11 | release
12 | bf-sde*
13 | test/BF2556X-1T_BSP_9.0.0-master
14 | test/*.log
15 | *.log
16 | test/sal_logs
17 | test/zlog-cfg-cur
18 | report
19 |
--------------------------------------------------------------------------------
/Jenkinsfile:
--------------------------------------------------------------------------------
1 | pipeline {
2 | agent { label 'BF2556' }
3 | stages {
4 | stage('Clone AOT') {
5 | steps {
6 | git credentialsId: 'Jenkins_priv_ssh', url: 'https://github.com/stordis/APS-One-touch.git'
7 | }
8 | }
9 | stage('Clone SAL') {
10 | steps {
11 | dir('sal'){
12 | git branch: 'SAL_Light', credentialsId: 'BF2556_ssh_key', url: "${sal_src}"
13 | }
14 | }
15 | }
16 |
17 | stage('Test AOT'){
18 | steps{
19 | sh 'PYTHONPATH=$PYTHONPATH:AOT python3 test/AOT_Test.py ~/jenkins_ci/settings.yaml ~/jenkins_ci/advance_settings.yaml'
20 | }
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Jenkinsfile_BF6064:
--------------------------------------------------------------------------------
1 | pipeline {
2 | agent { label 'BF6064' }
3 | stages {
4 | stage('Clone AOT') {
5 | steps {
6 | git credentialsId: 'Jenkins_priv_ssh', url: 'https://github.com/stordis/APS-One-touch.git'
7 | }
8 | }
9 | stage('Clone SAL') {
10 | steps {
11 | dir('sal'){
12 | git branch: 'SAL_Light', credentialsId: 'BF6064_ssh_key', url: "${sal_src}"
13 | }
14 | }
15 | }
16 | stage('Test AOT'){
17 | steps{
18 | sh 'PYTHONPATH=$PYTHONPATH:AOT python3 test/AOT_Test.py ~/jenkins_ci/settings.yaml ~/jenkins_ci/advance_settings.yaml'
19 | }
20 | }
21 | }
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/ReadMe.md:
--------------------------------------------------------------------------------
1 | # APS One Touch (AOT)
2 |
3 | ⚠️ **APS One Touch is now deprecated**. Intel compatible BSP packages for the BSPs can be found on our support portal,
4 | as are Debian installers for other software dependencies such as the SAL.**
5 |
6 | AOT is an easy tool to setup APSN's switches. AOT configures BF2556X_1T and BF6064X_T with BF_SDE, BSP, SAL(Switch Abstraction Layer).
7 | To get the files required by installer refer [Required Software](#required-software) section below.
8 |
9 | While executing AOT, default input appears in square braces and will be picked up if user provides no input and just hits 'Enter',
10 | e.g. '[n]' or '[do_nothing]'
11 |
12 | ## Required Software
13 | Following SWs will be required to set up APSN switches,
14 | Please note - users can get all below SWs (except SDE) in a single archive with compatible releases from 'All in one package' section at APSN Support portal.
15 |
16 | |SW|Details|Where to get it|
17 | |---|---|---|
18 | |bf-reference-bsp-<Release>-<APSN switch model>_<APSN ver>.zip|APSN BSP|APSN's Support Portal|
19 | |sal_<Release>|Switch Abstraction Layer for APSN Switches|APSN's Support Portal|
20 | |bf-sde-<Release>|Intel's Tofino SDE|Intel's Support Portal|
21 |
22 | ## Release Compatibility
23 | Following is the compatibility mapping among various SWs. Although those SWs can be downloaded from Intel's and APSN's support portals individually
24 | but As mentioned above users can get all below SWs (except SDE and OS) in a single archive with compatible releases from 'All in one package' section at APSN Support portal.
25 |
26 | |Device|AOT|SAL|APSN BSP|SDE|Ref-BSP|OS (Recommended)|Kernel|
27 | |---|---|---|---|---|---|---|---|
28 | |BF2556X_1T
BF6064X_T|v1.6.1|sal_1.3.5|bf-reference-bsp-9.7.0-BF2556_1.0.0.zip
bf-reference-bsp-9.7.0-BF6064_1.0.0.zip|BF_SDE_9.7.0|-|Ubuntu Server 20.04.x LTS|-
29 | |BF2556X_1T
BF6064X_T|v1.5.3|sal_1.3.4|bf-reference-bsp-9.4.0-BF2556_1.0.4.zip
bf-reference-bsp-9.4.0-BF6064_1.0.1.zip|BF_SDE_9.5.0|bf-reference-bsp-9.5.0|Ubuntu Server 18.04.4 LTS|5.4.x
30 | |BF2556X_1T
BF6064X_T|v1.5.2|sal_1.3.3|bf-reference-bsp-9.4.0-BF2556_1.0.3.zip
bf-reference-bsp-9.4.0-BF6064_1.0.1.zip|BF_SDE_9.5.0|bf-reference-bsp-9.5.0|Ubuntu Server 18.04.4 LTS|5.4.x
31 | |BF2556X_1T
BF6064X_T|v1.5.1|sal_1.3.1|bf-reference-bsp-9.4.0-BF2556_1.0.2.zip
bf-reference-bsp-9.4.0-BF6064_1.0.1.zip|BF_SDE_9.4.0|bf-reference-bsp-9.4.0|Ubuntu Server 18.04.4 LTS|5.4.x
32 | |BF2556X_1T
BF6064X_T|v1.5.0|sal_1.3.0|bf-reference-bsp-9.4.0-BF2556_1.0.2.zip
bf-reference-bsp-9.4.0-BF6064_1.0.1.zip|BF_SDE_9.4.0|bf-reference-bsp-9.4.0|Ubuntu Server 18.04.4 LTS|5.4.x
33 | |BF2556X_1T
BF6064X_T|v1.4.2|sal_1.2.0|bf-reference-bsp-9.4.0-BF2556_1.0.1.zip
bf-reference-bsp-9.4.0-BF6064_1.0.1.zip|BF_SDE_9.4.0|bf-reference-bsp-9.4.0|Ubuntu Server 18.04.4 LTS|5.4.x
34 | |BF2556X_1T
BF6064X_T|v1.4.1|sal_1.1.1|bf-reference-bsp-9.3.0-BF2556_1c5723d.zip
bf-reference-bsp-9.3.0-BF6064_f536cae.zip|BF_SDE_9.3.0|bf-reference-bsp-9.3.0|Ubuntu Server 18.04.4 LTS|5.4.x
35 | |BF2556X_1T
BF6064X_T|v1.3.0|sal_1.1.0|bf-reference-bsp-9.2.0-BF2556_5189449.zip
bf-reference-bsp-9.2.0-BF6064_0ee36ac.zip|BF_SDE_9.2.0|bf-reference-bsp-9.2.0|Ubuntu Server 18.04.4 LTS|4.15.x
36 | |BF2556X_1T
BF6064X_T|v1.2.0|sal_1.1.0|BF2556X-1T_BSP_9.0.0(master HEAD)
BF6064X_BSP_9.0.0(master HEAD)|BF_SDE_9.1
BF_SDE_9.2|NA|Ubuntu Server 18.04.4 LTS|4.15.x
37 |
38 | ## Quick start
39 | For a quick start user should at least check for following node values in settings.yaml (No need to update settings.yaml if using 'All in one package' as it comes with preset configs according to its content) :
40 | - BF SDE
41 | - sde_pkg
42 | - BSP
43 | - aps_bsp_pkg
44 | -SAL
45 | - sal_home
46 |
47 | By default, all the package paths are calculated relative to user_home or relative to `PATH_PREFIX` only if provided.
48 |
49 | ## settings.yaml
50 | This is a configuration input file for the AOT tool, This file contains all the config parameters to install the APSN switches so that user has to provide minimal inputs during installation.
51 |
52 | By default, settings.yaml is picked up from project directory i.e. APS-One-touch/settings.yaml,
53 | But user can also provide path to custom settings file as a CLI argument while executing python scripts.
54 | This way user can save different settings in different files and need not to modify same file while launching AOT with different settings,
55 | User can quickly switch between different installations/configs just by changing CLI argument.
56 | For Example - User may have 2 or more setting files with different config params based on SDE/BSP/SAL versions or config options e.g. ->
57 | - settings_SDE_9.2.yaml for Installing/Building/Running SDE_9.2 version
58 | - settings_SDE_9.3.yaml for Installing/Building/Running SDE_9.3 version
59 | - settings_SDE_9.3_p4runtime.yaml for Installing/Building/Running SDE_9.3 with p4runtime profile.
60 | - settings_SDE_9.3_switch_profile_sal_1.3.0.yaml for Installing/Building/Running SDE_9.3 with switch profile and SAL_1.3.0.
61 |
62 | And then just use desired settings file as CLI arg to run SAL - `python3 APS-One-touch/sal.py ~/settings_9.2.yaml` or just to run SDE `python3 APS-One-touch/bf_sde.py ~/settings_SDE_9.3_p4runtime.yaml`
63 |
64 | When used without any CLI arg e.g. - `python3 APS-One-touch/bf_sde.py` or `python3 APS-One-touch/sal.py` default APS-One-touch/settings.yaml is picked.
65 |
66 | ## advance_settings.yaml
67 | advance_settings.yaml is useful for development usage only, e.g. For building SAL from the source.
68 | For most of the end users settings.yaml is enough to be configured.
69 |
70 | ## How to run
71 |
72 | - Install dependency packages :
73 | - `sudo apt install python3` (if not already installed)
74 | - `sudo apt install python` (python2.7 if not already installed, used by p4studio_build.py)
75 | - `sudo apt-get install libusb-1.0-0-dev` (required for BSP compilation)
76 | - `sudo apt-get install libcurl4-openssl-dev` (required for BSP compilation)
77 | - `sudo apt install i2c-tools`
78 | - `sudo apt install gcc-8 g++-8` (SAL is compiled using version 8 compiler)
79 | - Start installation :
80 | - To install and run SDE - `python3 bf_sde.py `
81 | - To run SAL - `python3 sal.py `
82 | Please note - prerequisite to run SAL - SDE must be installed beforehand and
83 | If not using 'All in one package' get SAL from APSN support and unzip to directory of choice,
84 | like: `unzip sal_.zip -d ` , Also check the path of is same as configured in your settings file at SAL->sal_home,
85 | while running SAL AOT will pick sal executable as configured in settings file under SAL->sal_home.
86 |
87 | ## Support
88 | Raise issues in GitHub repo : or in APSN's support portal.
89 | s
90 |
--------------------------------------------------------------------------------
/advance_settings.yaml:
--------------------------------------------------------------------------------
1 | %YAML 1.2
2 | ---
3 |
4 | #In general following settings are not used by end users but by APS Networks internal development.
5 |
6 | PATH_PREFIX: #Various paths used in this setting file are evaluated relative to this, Default is user home diretory
7 |
8 | BSP:
9 | bsp_repo: /bsp/bf-reference-bsp-9.7.0-BF2556 # Local Repository path to complete BSP code, used while creating release package.
10 |
11 | SAL:
12 | sal_repo: sal # Local sal repo path.
13 |
14 | GB: #Following parameters are useful when you have SAL src and building it, Otherwise can be ignored.
15 | gb_src: /gearbox/Px-4.2_2020_03_018/work/cpss/
16 | gb_lib: /gearbox/Px-4.2_2020_03_018/work/cpss/compilation_root/cpss_4.2/ia64_PX/
--------------------------------------------------------------------------------
/bf_sde.py:
--------------------------------------------------------------------------------
1 | import getpass
2 | import os
3 | import shutil
4 | import tarfile
5 | import zipfile
6 | from pathlib import Path
7 |
8 | import constants
9 | from common import create_symlinks, delete_files, execute_cmd_n_get_output, get_env_var, \
10 | get_from_setting_dict, \
11 | get_sde_dir_name_in_tar, get_sde_home_absolute, get_sde_pkg_abs_path, \
12 | set_env_var, validate_path_existence, \
13 | append_to_env_var, \
14 | dname, get_switch_model, execute_cmd, get_ref_bsp_abs_path, \
15 | get_aps_bsp_pkg_abs_path, execute_cmd_n_get_output_2, get_abs_path, \
16 | get_from_advance_setting_dict, create_release, get_p4_prog_name, do_basic_path_validation
17 | from constants import stratum_profile, p4_prog_env_var_name
18 | from drivers import load_drivers
19 |
20 |
21 | def get_sde_build_flags():
22 | return get_from_setting_dict(constants.sde_details_node, constants.sde_build_flags_node)
23 |
24 |
25 | def get_p4_studio_build_profile_name():
26 | return get_from_setting_dict('BF SDE', 'p4studio_build_profile')
27 |
28 |
29 | def install_sde_deps():
30 | os.system('sudo apt -y install python')
31 |
32 |
33 | def build_sde():
34 | #install_sde_deps()
35 | sde_tar = tarfile.open(get_sde_pkg_abs_path())
36 | sde_home_absolute = get_sde_home_absolute()
37 | sde_build_flags = get_sde_build_flags()
38 |
39 | # Deletion is required otherwise moving the directories
40 | # in further steps might create issues.
41 | # And delete only when user have opted for not to resume build
42 | # if sde_build_flags is not None and '-rb' not in sde_build_flags \
43 | # and '-bm' not in sde_build_flags:
44 | try:
45 | print("Deleting previous installation at {}.".format(
46 | sde_home_absolute))
47 | os.system('sudo rm -rf {}'.format(sde_home_absolute))
48 | except FileNotFoundError:
49 | print('{} already deleted.'.format(sde_home_absolute))
50 |
51 | # Extract tar here i.e. in APS one touch directory
52 | sde_tar.extractall()
53 | # In case SDE home is configured in yaml other than this directory,
54 | # If both paths are same following line doesn't cause any problem.
55 | if not os.path.exists(sde_home_absolute):
56 | shutil.move(get_sde_dir_name_in_tar(), sde_home_absolute)
57 | sde_tar.close()
58 | os.chdir(sde_home_absolute)
59 | p4studio_build_profile = get_abs_path(get_p4_studio_build_profile_name())
60 |
61 | sde_install_cmd = "{0}/p4studio/p4studio profile apply {1}".format(
62 | sde_home_absolute,
63 | p4studio_build_profile)
64 |
65 | os.environ[
66 | constants.path_env_var_name] += os.pathsep + sde_home_absolute + '/install/bin/'
67 | print('Building sde with command {}'.format(sde_install_cmd))
68 | os.system(sde_install_cmd)
69 | return True
70 |
71 |
72 | def start_bf_switchd():
73 | # os.chdir(common.dname)
74 | print('Starting BF switchd.')
75 | set_sde_env_n_load_drivers()
76 |
77 | p4_prog_name = get_env_var(p4_prog_env_var_name)
78 |
79 | # LD_LIBRARY_PATH is set for ONLPv2 case, libs in install/lib folder are
80 | # not found there but this does not cause any harm for Ubuntu case either.
81 | # set_env('LD_LIBRARY_PATH', "/{0}/install/lib".format(get_env_var('SDE')))
82 | # print("LD_LIBRARY_PATH : ")
83 | # os.system("echo $LD_LIBRARY_PATH")
84 |
85 | if not p4_prog_name:
86 | print("Starting switchd without p4 program")
87 | start_switchd_cmd = "sudo -E {0}/run_switchd.sh -c {0}/pkgsrc/p4-examples/tofino/tofino_skip_p4.conf.in " \
88 | "--skip-p4".format(get_env_var('SDE'))
89 | else:
90 | print("Starting switchd with P4 prog:{}".format(p4_prog_name))
91 | start_switchd_cmd = 'sudo -E {0}/run_switchd.sh -p {1}'.format(
92 | get_env_var('SDE'),
93 | p4_prog_name.replace(".p4", ""))
94 | username = getpass.getuser()
95 |
96 | if username == "root":
97 | start_switchd_cmd = start_switchd_cmd.replace("sudo -E", "")
98 | alloc_dma()
99 | print("Starting switchd with command : {}".format(start_switchd_cmd))
100 | os.system(start_switchd_cmd)
101 |
102 |
103 | def alloc_dma():
104 | output = execute_cmd_n_get_output('cat /etc/sysctl.conf')
105 | if 'vm.nr_hugepages = 128' not in output:
106 | print('Setting up huge pages...')
107 | dma_alloc_cmd = 'sudo /{}/pkgsrc/ptf-modules/ptf-utils/dma_setup.sh'.format(
108 | get_env_var('SDE'))
109 | os.system(dma_alloc_cmd)
110 |
111 |
112 | def ask_user_for_building_sde():
113 | install_sde = input("SDE : build y/[n]?")
114 | if not install_sde:
115 | install_sde = "n"
116 | if install_sde == "y":
117 | create_symlinks()
118 | build_sde()
119 |
120 |
121 | def get_diff_file_name():
122 | return '{}.diff'.format(get_switch_model()).lower()
123 |
124 |
125 | def get_bsp_repo_abs_path():
126 | path_from_adv_setting = get_from_advance_setting_dict(
127 | constants.BSP_node, constants.bsp_repo_node_name)
128 | if path_from_adv_setting is None:
129 | return get_default_bsp_repo_path()
130 | else:
131 | return get_abs_path(path_from_adv_setting)
132 |
133 |
134 | def prepare_bsp_pkg():
135 | bsp_repo_abs = get_bsp_repo_abs_path()
136 | earliest_commit_hash = execute_cmd_n_get_output_2(
137 | 'git --git-dir {0}/.git rev-list --max-parents=0 HEAD'.format(
138 | bsp_repo_abs))
139 | latest_commit_hash = execute_cmd_n_get_output_2(
140 | 'git --git-dir {0}/.git rev-parse HEAD'.format(bsp_repo_abs))
141 | os.chdir(bsp_repo_abs)
142 | execute_cmd_n_get_output_2(
143 | 'git --git-dir {0}/.git diff {1} {2} -- '
144 | '\':!./platforms/apsn/\' '
145 | '\':!.idea/\' '
146 | '\':!.gitignore\' '
147 | '\':!autom4te.cache\' '
148 | '\':!*Makefile.in\' '
149 | '> {3}'.format(bsp_repo_abs, earliest_commit_hash, latest_commit_hash,
150 | bsp_repo_abs + '/' + get_diff_file_name()))
151 | create_release(bsp_repo_abs, [[bsp_repo_abs, '/platforms/apsn/'],
152 | [bsp_repo_abs, '/platforms/common/'],
153 | [bsp_repo_abs, '/platforms/include/'],
154 | [bsp_repo_abs, '/CMakeLists.txt'],
155 | [bsp_repo_abs, '/drivers']])
156 |
157 |
158 | def ask_user_for_building_bsp():
159 | in_put = input("BSP : build y/c(clean)/[n]? ")
160 | # "OR developer's option- packaging(p)?")
161 | if not in_put:
162 | in_put = "n"
163 | # Order is important, if 'p' and 'y' both oprions are given
164 | # Then first package then build
165 |
166 | for i in in_put:
167 | if i == "p":
168 | prepare_bsp_pkg()
169 | elif i == "y":
170 | install_switch_bsp()
171 | elif i == "c":
172 | clean_bsp()
173 | elif i== "n":
174 | pass
175 | else:
176 | print("Unknown option for BSP build {}:".format(i))
177 |
178 |
179 | def ask_user_for_starting_sde():
180 | start_sde = input("SDE : start y/[n]?")
181 | if not start_sde:
182 | start_sde = "n"
183 | if start_sde == "y":
184 | start_bf_switchd()
185 | else:
186 | print("You selected not to start SDE.")
187 |
188 |
189 | def load_bf_sde_profile():
190 | ask_user_for_building_sde()
191 | ask_user_for_building_bsp()
192 | prepare_sde_release()
193 | ask_user_for_starting_sde()
194 |
195 |
196 | def prepare_sde_release():
197 | # TODO prepare precompiled binaries from SDE, to avoid the need for
198 | # building SDE.
199 | pass
200 |
201 |
202 | def set_sde_env():
203 | print("Setting environment for BF_SDE.")
204 | sde_home_absolute = get_sde_home_absolute()
205 | if validate_path_existence(sde_home_absolute, 'SDE'):
206 | set_env_var(constants.sde_env_var_name, sde_home_absolute)
207 | set_env_var(constants.sde_install_env_var_name,
208 | get_env_var(constants.sde_env_var_name) + '/install/')
209 | os.environ[constants.p4_prog_env_var_name] = get_p4_prog_name()
210 | append_to_env_var(constants.path_env_var_name, get_env_var(
211 | constants.sde_install_env_var_name) + '/bin/')
212 | print(
213 | 'Environment variables set: \n SDE: {0} \n SDE_INSTALL: {1} \n PATH: {2} \n P4_PROG: {3}'.format(
214 | get_env_var(constants.sde_env_var_name),
215 | get_env_var(constants.sde_install_env_var_name),
216 | get_env_var(constants.path_env_var_name),
217 | get_env_var(constants.p4_prog_env_var_name)))
218 | return True
219 | else:
220 | print('ERROR: SDE directory couldnt be found, exiting .')
221 | exit(0)
222 |
223 | def set_sde_env_n_load_drivers():
224 | set_sde_env()
225 | load_drivers()
226 | return True
227 |
228 |
229 | def install_bsp_deps():
230 | os.system('sudo apt -y install libusb-1.0-0-dev libcurl4-openssl-dev')
231 |
232 |
233 | def clean_bsp():
234 | print('Cleaning BSP...')
235 | to_delete = [get_aps_bsp_pkg_abs_path() + f for f in
236 | ['/CMakeCache.txt',
237 | '/Makefile',
238 | '/CMakeFiles', '/cmake-build-debug']]
239 | execute_cmd(
240 | 'make -C {} clean'.
241 | format(get_aps_bsp_pkg_abs_path()))
242 |
243 | for file in to_delete:
244 | print('Deteling {}'.format(file))
245 | delete_files(file)
246 | return True
247 |
248 |
249 | def install_switch_bsp():
250 | set_sde_env()
251 | aps_bsp_installation_file = get_aps_bsp_pkg_abs_path()
252 | if (get_switch_model() == constants.bf6064x_t and (
253 | 'BF2556' in aps_bsp_installation_file or 'bf2556' in aps_bsp_installation_file)) \
254 | or (get_switch_model() == constants.bf2556x_1t and (
255 | 'BF6064' in aps_bsp_installation_file or 'bf6064' in aps_bsp_installation_file)):
256 | print("ERROR: Incompatible BSP provided in settings.yaml,"
257 | " Switch model is {model} but BSP is {bsp}".
258 | format(model=get_switch_model(),
259 | bsp=aps_bsp_installation_file))
260 | exit(0)
261 |
262 | print("Installing {}".format(aps_bsp_installation_file))
263 |
264 | os.environ['BSP_INSTALL'] = get_env_var('SDE_INSTALL')
265 | print(
266 | "BSP_INSTALL directory set to {}".format(
267 | os.environ['BSP_INSTALL']))
268 |
269 | install_bsp_deps()
270 | cmake_cmd = 'cmake -DCMAKE_INSTALL_PREFIX={}'.format(get_env_var('SDE_INSTALL'))
271 | cmake_cmd += ' -B ' + aps_bsp_installation_file
272 | cmake_cmd += ' -S ' + aps_bsp_installation_file
273 | execute_cmd(cmake_cmd)
274 | os.system("make -C {0}".format(aps_bsp_installation_file))
275 | os.system("make -C {0} install".format(aps_bsp_installation_file))
276 |
277 | return True
278 |
279 |
280 | def just_load_sde():
281 | ask_user_for_building_sde()
282 | ask_user_for_building_bsp()
283 | prepare_sde_release()
284 | ask_user_for_starting_sde()
285 |
286 |
287 | if __name__ == '__main__':
288 | do_basic_path_validation()
289 | just_load_sde()
290 |
291 |
292 | def get_default_bsp_repo_path():
293 | if get_switch_model() == constants.bf2556x_1t:
294 | return get_abs_path(
295 | '/bsp/bf-reference-bsp-9.2.0-BF2556')
296 | elif get_switch_model() == constants.bf6064x_t:
297 | return get_abs_path(
298 | '/bsp/bf-reference-bsp-9.2.0-BF6064')
299 | else:
300 | print('Development BSp can\'t be retrieved for switch model'.
301 | format(get_switch_model()))
302 |
--------------------------------------------------------------------------------
/common.py:
--------------------------------------------------------------------------------
1 | import os
2 | import platform
3 | import sys
4 | import tarfile
5 | import zipfile
6 | from pathlib import Path
7 | import subprocess
8 | import yaml
9 |
10 | import constants
11 | from constants import BSP_node, aps_bsp_pkg_node, ref_bsp_node, \
12 | switch_model_env_var_name, bf2556x_1t, bf6064x_t, p4_prog_node_name
13 | import shutil
14 |
15 | abspath = os.path.abspath(__file__)
16 | # Absolute directory name containing this file
17 | dname = os.path.dirname(abspath)
18 |
19 |
20 | def read_settings():
21 | try:
22 | # Custom path for settings file can be given as CLI arg.
23 | settings_file = sys.argv[1]
24 | except IndexError:
25 | # If no settings file provided as CLI arg default one from the
26 | # project path will be picked.
27 | settings_file = "{}/settings.yaml".format(dname)
28 |
29 | if settings_file is None:
30 | print('Invalid settings file for AOT {}'.format(settings_file))
31 | exit(0)
32 | else:
33 | print('Reading settings from file {}'.format(settings_file))
34 | with open(settings_file, 'r') as stream:
35 | try:
36 | return yaml.safe_load(stream)
37 | except yaml.YAMLError as exc:
38 | print(exc)
39 | print("Error occurred while reading settings file {}".format(settings_file))
40 | exit(0)
41 |
42 |
43 | settings_dict = read_settings()
44 |
45 |
46 | def read_advance_settings():
47 | """
48 | Settings used for development.
49 | """
50 | try:
51 | # Custom path for settings file can be given as CLI arg.
52 | advance_settings_file = sys.argv[2]
53 | except IndexError:
54 | # If no settings file provided as CLI arg default one from the
55 | # project path will be picked.
56 | advance_settings_file = "{}/advance_settings.yaml".format(dname)
57 |
58 | if advance_settings_file is None:
59 | print('Invalid settings file for AOT {}'.format(advance_settings_file))
60 | exit(0)
61 | else:
62 | print('Reading settings from file {}'.format(advance_settings_file))
63 | with open(advance_settings_file, 'r') as stream:
64 | try:
65 | return yaml.safe_load(stream)
66 | except yaml.YAMLError as exc:
67 | print(exc)
68 | print("Error occurred while reading settings file {}".
69 | format(advance_settings_file))
70 | exit(0)
71 |
72 |
73 | advance_settings_dict = read_advance_settings()
74 |
75 |
76 | def delete_files(file):
77 | try:
78 | shutil.rmtree(file)
79 | except FileNotFoundError:
80 | print('{} already deleted'.format(file))
81 | except PermissionError:
82 | i = input('Alert! deleting file {}, y/n ?'.format(file))
83 | if i == 'y':
84 | os.system('sudo rm -rf {}'.format(file))
85 | except NotADirectoryError:
86 | os.system('rm {}'.format(file))
87 |
88 |
89 | aot_release_dir = dname + '/release'
90 | if not os.path.exists(aot_release_dir):
91 | os.mkdir(aot_release_dir)
92 |
93 |
94 | def get_latest_git_tag(local_git_repo):
95 | return execute_cmd_n_get_output_2(
96 | 'git --git-dir {0}/.git describe --abbrev=0 --tags'.
97 | format(local_git_repo)).strip()
98 |
99 |
100 | def get_git_tag_hash(local_git_repo, git_tag):
101 | return execute_cmd_n_get_output_2(
102 | 'git --git-dir {0}/.git rev-list -n 1 {1}'.
103 | format(local_git_repo, git_tag))
104 |
105 |
106 | def get_latest_git_hash(local_git_repo):
107 | return execute_cmd_n_get_output_2(
108 | 'git --git-dir {0}/.git rev-parse HEAD'.
109 | format(local_git_repo))
110 |
111 |
112 | def get_2nd_latest_git_tag(local_git_repo):
113 | # If only one tag exists then second last release tag refers to previous
114 | # commit hash to latest release tag.
115 | return execute_cmd_n_get_output_2(
116 | 'git --git-dir {0}/.git describe --abbrev=0 --tags `git --git-dir {0}/.git rev-list --tags --skip=1 '
117 | '--max-count=1` --always'.
118 | format(local_git_repo)).strip()
119 |
120 |
121 | def create_nested_dir(destination_location, dir_path):
122 | """
123 | Creates nested path given in @dir_path string inside @destination_location,
124 | the outermost directory is ignored and that can be copied by calling function when
125 | all parent directory structure is present.
126 | Args:
127 | destination_location:
128 | dir_path:
129 | Returns:
130 | """
131 | nested_path_list = dir_path.split('/')
132 | # clear up empty strings in the path
133 | nested_path_list = list(filter(None, nested_path_list))
134 | # Slice the last dir
135 | nested_path_list = nested_path_list[:-1]
136 | for d in nested_path_list:
137 | destination_location += '/' + d + '/'
138 | if not os.path.exists(destination_location):
139 | os.mkdir(destination_location)
140 |
141 |
142 | def create_release(local_git_repo, files_to_release):
143 | """
144 |
145 | Args:
146 | local_git_repo: Absolute path to local git repository
147 | files_to_release: File paths relative to local_git_repo to be part of release.
148 | Returns:
149 |
150 | """
151 | rel_tag_latest = get_latest_git_tag(local_git_repo)
152 | release_tag_2ndlast = get_2nd_latest_git_tag(local_git_repo)
153 | hash_rel_tag_latest = get_git_tag_hash(local_git_repo, rel_tag_latest)
154 | hash_latest = get_latest_git_hash(local_git_repo)
155 |
156 | if hash_latest == hash_rel_tag_latest:
157 | print('Preparing main release {}'.format(rel_tag_latest))
158 | print('Preparing release notes since release tag {}'.format(
159 | release_tag_2ndlast))
160 | start_hash_for_rn = release_tag_2ndlast
161 | end_hash_for_rn = rel_tag_latest
162 | arch_name = aot_release_dir + '/{0}_{1}'. \
163 | format(os.path.basename(local_git_repo), rel_tag_latest)
164 | else:
165 | print('Preparing development release.')
166 | start_hash_for_rn = rel_tag_latest
167 | end_hash_for_rn = hash_latest
168 | suffix = execute_cmd_n_get_output_2(
169 | 'git --git-dir {0}/.git describe --tags'.
170 | format(local_git_repo)).strip()
171 | arch_name = aot_release_dir + '/{0}_{1}'. \
172 | format(os.path.basename(local_git_repo), suffix)
173 |
174 | try:
175 | os.mkdir(arch_name)
176 | print('Release directory {} created.'.format(arch_name))
177 | except FileExistsError:
178 | print('Release directory {} already exists, recreated.'.format(
179 | arch_name))
180 | delete_files(arch_name)
181 | os.mkdir(arch_name)
182 |
183 | rel_notes_file = 'RelNotes_{}.txt'.format(os.path.basename(os.path.normpath(arch_name)))
184 | make_rel_notes(local_git_repo, rel_notes_file, start_hash_for_rn, end_hash_for_rn)
185 | for arr in files_to_release:
186 | abs_file_path = arr[0] + '/' + arr[1]
187 | create_nested_dir(arch_name, arr[1])
188 | if os.path.isdir(abs_file_path):
189 | shutil.copytree(abs_file_path, arch_name + '/' + arr[1])
190 | else:
191 | shutil.copyfile(abs_file_path, arch_name + '/' + arr[1])
192 | shutil.copyfile(local_git_repo + '/' + rel_notes_file, arch_name + '/' + rel_notes_file)
193 | shutil.make_archive(arch_name, 'zip', arch_name)
194 | print('Release is available at {}'.format(aot_release_dir))
195 |
196 |
197 | def make_rel_notes(local_git_repo, rel_notes_file, start_hash_for_rn, end_hash_for_rn):
198 | cmd = 'git --git-dir {0}/.git log --pretty=format:%s {2}..{3} > {0}/{1}'. \
199 | format(local_git_repo, rel_notes_file, start_hash_for_rn, end_hash_for_rn)
200 |
201 | print('Executing command : {}'.format(cmd))
202 | os.system(cmd)
203 |
204 |
205 | def check_path(some_path, path_for):
206 | if not os.path.exists(some_path):
207 | print(
208 | "ERROR: Invalid {0} path {1}.".format(path_for,
209 | some_path))
210 | exit(0)
211 | return True
212 |
213 |
214 | def validate_path_existence(some_path, path_for):
215 | if ':' in some_path:
216 | # in case when it is system path variable or so.
217 | for pth in some_path.split(':'):
218 | return check_path(pth, path_for)
219 | else:
220 | return check_path(some_path, path_for)
221 |
222 |
223 | def get_kernel_major_version():
224 | rel = platform.release()
225 | rel_list = rel.split(".")
226 | return rel_list.pop(0) + "." + rel_list.pop(0)
227 |
228 |
229 | def is_onl():
230 | if 'OpenNetworkLinux' in platform.release():
231 | print('Platform info {}'.format(
232 | platform.version()))
233 | return True
234 | return False
235 |
236 |
237 | def is_ubuntu():
238 | if 'Ubuntu' in platform.version():
239 | print('Platform info {}'.format(
240 | platform.version()))
241 | return True
242 | return False
243 |
244 |
245 | def get_path_prefix():
246 | p = get_from_setting_dict(constants.path_prefix_node)
247 | if not p:
248 | return str(Path.home())
249 | return p
250 |
251 |
252 | def get_abs_path(pth):
253 | return get_path_prefix() + '/' + pth
254 |
255 |
256 | def get_env_var(var_name):
257 | try:
258 | return os.environ[var_name]
259 | except KeyError:
260 | print('INFO: env_var {} is not set.'.format(var_name))
261 |
262 |
263 | def append_to_env_var(src_env_var_name, new_val_to_append):
264 | if get_env_var(src_env_var_name) is None:
265 | os.environ[src_env_var_name] = new_val_to_append
266 | else:
267 | os.environ[src_env_var_name] += os.pathsep + new_val_to_append
268 |
269 |
270 | def set_env_var(var_name, var_val):
271 | """
272 | Sets env_var to var_val.
273 | """
274 | if validate_path_existence(var_val, var_name):
275 | os.environ[var_name] = var_val
276 | return True
277 | return False
278 |
279 |
280 | def execute_cmd_n_get_output(cmd):
281 | """
282 | Returns console output of the command
283 | """
284 | print('Executing sys cmd : {}'.format(cmd))
285 |
286 | cmd_output = subprocess.run(cmd.split(' '), stdout=subprocess.PIPE,
287 | stderr=subprocess.STDOUT)
288 | return cmd_output.stdout.decode('UTF-8')
289 |
290 |
291 | def execute_cmd_n_get_output_2(cmd):
292 | return subprocess.check_output(cmd, shell=True).decode('UTF-8').strip()
293 |
294 |
295 | def execute_cmd(cmd):
296 | print('Executing cmd : {}'.format(cmd))
297 | os.system(cmd)
298 |
299 |
300 | def create_symlinks():
301 | # #currently following symlinks are necessary only in case of ONL.
302 | if is_onl():
303 | src = '/usr/share/onl/packages/amd64/onl-kernel-{}-lts-x86-64-all/mbuilds/'.format(
304 | get_kernel_major_version())
305 | # Needed to build sde.
306 | sde_symlink = '/lib/modules/{}/build'.format(platform.release())
307 | # needed to build irq.
308 | irq_symlink = '/usr/src/linux-headers-{}'.format(platform.release())
309 | if os.path.islink(sde_symlink):
310 | print('Removing symlink {}'.format(sde_symlink))
311 | os.unlink(sde_symlink)
312 | print('Creating symlink {}'.format(sde_symlink))
313 | os.symlink(src, sde_symlink)
314 |
315 | if os.path.islink(irq_symlink):
316 | print(print('Removing symlink {}'.format(irq_symlink)))
317 | os.unlink(irq_symlink)
318 | print('Creating symlink {}'.format(irq_symlink))
319 | os.symlink(src, irq_symlink)
320 | return True
321 |
322 |
323 | def get_from_setting_dict(*keys):
324 | # keys to be in lexographic order
325 | val = settings_dict.copy()
326 | for key in keys:
327 | val = val.get(key)
328 | return val
329 |
330 |
331 | def get_from_advance_setting_dict(*keys):
332 | # keys to be in lexographic order
333 | val = advance_settings_dict.copy()
334 | for key in keys:
335 | val = val.get(key)
336 | return val
337 |
338 |
339 | def get_sde_pkg_abs_path():
340 | sde_pkg = get_abs_path(
341 | get_from_setting_dict('BF SDE', 'sde_pkg'))
342 | if not tarfile.is_tarfile(sde_pkg):
343 | print("Invalid tofino SDE tar file {} can not build.".format(sde_pkg))
344 | exit(0)
345 | return sde_pkg
346 |
347 |
348 | def get_aps_bsp_pkg_abs_path():
349 | bsp_pkg = get_abs_path(
350 | get_from_setting_dict(BSP_node, aps_bsp_pkg_node))
351 | return bsp_pkg
352 |
353 |
354 | def get_ref_bsp_abs_path():
355 | bsp_pkg = get_abs_path(
356 | get_from_setting_dict(BSP_node, ref_bsp_node))
357 | if not tarfile.is_tarfile(bsp_pkg):
358 | print("Invalid Reference BSP tar file {} can not build.".format(bsp_pkg))
359 | exit(0)
360 | return bsp_pkg
361 |
362 |
363 | def get_sde_dir_name_in_tar():
364 | sde_tar = tarfile.open(get_sde_pkg_abs_path())
365 | sde_dir_name = sde_tar.getnames()[0]
366 | sde_tar.close()
367 | return sde_dir_name
368 |
369 |
370 | def get_sde_home_absolute():
371 | sde_home_in_config = get_from_setting_dict('BF SDE', 'sde_home')
372 | if sde_home_in_config:
373 | # return absolute path as configured in yaml
374 | return get_abs_path(sde_home_in_config)
375 | # If not given in yaml, return sde_home relative to APS one touch
376 | return dname + '/' + get_sde_dir_name_in_tar()
377 |
378 |
379 | def get_sde_install_dir_absolute():
380 | return get_sde_home_absolute() + '/install'
381 |
382 |
383 | def get_gb_src_home_from_config():
384 | return advance_settings_dict.get('GB').get('gb_src')
385 |
386 |
387 | def get_gb_src_home_absolute():
388 | return get_abs_path(get_gb_src_home_from_config())
389 |
390 |
391 | def get_gb_lib_home_from_config():
392 | return advance_settings_dict.get('GB').get('gb_lib')
393 |
394 |
395 | def get_gb_lib_home_absolute():
396 | return get_abs_path(get_gb_lib_home_from_config())
397 |
398 |
399 | def get_switch_model_from_env():
400 | model_name = get_env_var(switch_model_env_var_name)
401 | if model_name is None or model_name not in [bf2556x_1t, bf6064x_t]:
402 | print('Please set env_var SWITCH_MODEL with values either {0} or {1}, '
403 | 'e.g.- export SWITCH_MODEL={0}'.
404 | format(bf2556x_1t, bf6064x_t))
405 | exit(0)
406 | return model_name
407 |
408 |
409 | def get_switch_model():
410 | output = execute_cmd_n_get_output_2('sudo dmidecode -s system-product-name')
411 | if 'BF2556' in output:
412 | switch_model = bf2556x_1t
413 | elif 'BF6064' in output:
414 | switch_model = bf6064x_t
415 | else:
416 | print('Switch model couldn\'t be retrieved from System, Checking environment for {}'.
417 | format(switch_model_env_var_name))
418 | switch_model = get_switch_model_from_env()
419 | return switch_model
420 |
421 |
422 | print("Switch model is", get_switch_model())
423 |
424 |
425 | def get_p4_prog_name():
426 | p4_prog_name = get_from_setting_dict('BF SDE', p4_prog_node_name)
427 | if p4_prog_name is None:
428 | p4_prog_name = ''
429 | return p4_prog_name
430 |
431 |
432 | def do_basic_path_validation():
433 | # Do basic path verification.
434 | validate_path_existence(get_sde_pkg_abs_path(), 'Barefoot SDE')
435 |
--------------------------------------------------------------------------------
/constants.py:
--------------------------------------------------------------------------------
1 | # Switch Types
2 | bf2556x_1t = 'BF2556X_1T'
3 | bf6064x_t = 'BF6064X_T'
4 | # p4 studio profile names
5 | stratum_profile = 'stratum_profile'
6 |
7 | # Env var names
8 | switch_model_env_var_name = 'SWITCH_MODEL'
9 | sal_home_env_var_name = 'SAL_HOME'
10 | tp_install_env_var_name = 'TP_INSTALL'
11 | pythonpath_env_var_name = 'PYTHONPATH'
12 | gb_src_home_env_var_name = 'GB_SRC_HOME'
13 | gb_lib_home_env_var_name = 'GB_LIB_HOME'
14 | sde_env_var_name = 'SDE'
15 | sde_install_env_var_name = 'SDE_INSTALL'
16 | p4_prog_env_var_name = 'P4_PROG'
17 | sde_include_env_var_name = 'SDE_INCLUDE'
18 | ld_lib_path_env_var_name = 'LD_LIBRARY_PATH'
19 | pi_install_env_var_name = 'PI_INSTALL'
20 | bf_sde_install_env_var_name = 'BF_SDE_INSTALL'
21 | path_env_var_name = 'PATH'
22 |
23 | # Yaml settings nodes
24 | BSP_node = 'BSP'
25 | ref_bsp_node = 'ref_bsp'
26 | aps_bsp_pkg_node = 'aps_bsp_pkg'
27 | bsp_repo_node_name = 'bsp_repo'
28 | sal_repo_node_name = 'sal_repo'
29 | switch_model_node = 'SWITCH Model'
30 | sal_home_node = 'sal_home'
31 | sal_sw_attr_node = 'SAL'
32 | name_node = 'name'
33 | selected_node = 'selected'
34 | path_prefix_node = 'PATH_PREFIX'
35 | # SAL related nodes
36 | details_node = 'details'
37 | tp_install_node_name = 'tp_install'
38 | dut_ips_node_name = 'dut_ips'
39 | # SDE related nodes
40 | sde_build_flags_node = 'build_flags'
41 | sde_modules_node = 'modules'
42 | sde_details_node = 'BF SDE'
43 | p4_prog_node_name = 'p4_prog'
44 | # Node value strings
45 | sde_module_bf_kdrv_string_value = 'bf_kdrv'
46 | sde_module_bf_kpkt_string_value = 'bf_kpkt'
47 |
--------------------------------------------------------------------------------
/drivers.py:
--------------------------------------------------------------------------------
1 | import os
2 | import tarfile
3 |
4 | import constants
5 | from common import execute_cmd_n_get_output, get_env_var, dname, \
6 | create_symlinks, \
7 | is_ubuntu, get_switch_model, get_from_setting_dict
8 | from constants import sde_module_bf_kdrv_string_value, \
9 | sde_module_bf_kpkt_string_value
10 |
11 | installation_files = {
12 | "irq_debug_tgz": "./irq_debug.tgz"
13 | }
14 |
15 |
16 | def get_sde_modules():
17 | return get_from_setting_dict(constants.sde_details_node, constants.sde_modules_node)
18 |
19 |
20 | def load_and_verify_kernel_modules():
21 | output = execute_cmd_n_get_output('lsmod')
22 | bf_mod = True
23 |
24 | sde_module_names = get_sde_modules()
25 | if sde_module_names is not None:
26 | for module_name in sde_module_names:
27 | if module_name == sde_module_bf_kdrv_string_value:
28 | if module_name not in output:
29 | load_bf_kdrv()
30 | else:
31 | print('Module {} already loaded'.format(module_name))
32 | elif module_name == sde_module_bf_kpkt_string_value:
33 | if module_name not in output:
34 | load_bf_kpkt()
35 | else:
36 | print('Module {} already loaded'.format(module_name))
37 | else:
38 | print('Invalid module to load - {}.'.format(module_name))
39 | exit(0)
40 | else:
41 | print('Select at-least one SDE module to load in settings.xml')
42 | exit(0)
43 |
44 | output = execute_cmd_n_get_output('lsmod')
45 |
46 | if not any(mod in output for mod in [sde_module_bf_kdrv_string_value,
47 | sde_module_bf_kpkt_string_value]):
48 | bf_mod = False
49 | print("ERROR: Neither of {0}/{1} module loaded.".
50 | format(sde_module_bf_kdrv_string_value,
51 | sde_module_bf_kpkt_string_value))
52 |
53 | # Load switch specific kernel modules
54 | if get_switch_model() == constants.bf2556x_1t:
55 | return bf_mod and load_and_verify_kernel_modules_bf2556()
56 | else:
57 | return bf_mod and load_and_verify_kernel_modules_bf6064()
58 |
59 |
60 | def load_drivers():
61 | print('Loading kernel modules.')
62 | if not load_and_verify_kernel_modules():
63 | print("ERROR:Some kernel modules are not loaded.")
64 | exit(0)
65 |
66 |
67 | def load_and_verify_kernel_modules_bf6064():
68 | execute_cmd_n_get_output('sudo i2cset -y 0 0x70 0x20 \
69 | sudo i2cset -y 0 0x32 0xE 0x0 \
70 | sudo i2cset -y 0 0x32 0xF 0x0 \
71 | sudo i2cset -y 0 0x34 0x2 0x0 \
72 | sudo i2cset -y 0 0x34 0x3 0x0 \
73 | sudo i2cset -y 0 0x34 0x4 0x0 \
74 | sudo i2cset -y 0 0x35 0x2 0x0 \
75 | sudo i2cset -y 0 0x35 0x3 0x0 \
76 | sudo i2cset -y 0 0x35 0x4 0x0 \
77 | sudo i2cset -y 0 0x70 0x20 \
78 | sudo i2cset -y 0 0x32 0x14 0xff \
79 | sudo i2cset -y 0 0x32 0x15 0xff \
80 | sudo i2cset -y 0 0x34 0xB 0xff \
81 | sudo i2cset -y 0 0x34 0xC 0xff \
82 | sudo i2cset -y 0 0x34 0xD 0xff \
83 | sudo i2cset -y 0 0x35 0xB 0xff \
84 | sudo i2cset -y 0 0x35 0xC 0xff \
85 | sudo i2cset -y 0 0x35 0xD 0xff')
86 | return True
87 |
88 |
89 | sde_folder_path = ""
90 |
91 |
92 | def load_and_verify_kernel_modules_bf2556():
93 | output = execute_cmd_n_get_output('lsmod')
94 | irq_debug = True
95 |
96 | if 'irq_debug' not in output:
97 | install_irq_debug()
98 |
99 | # Verify that modules are loaded.
100 | output = execute_cmd_n_get_output('lsmod')
101 |
102 | if 'irq_debug' not in output:
103 | irq_debug = False
104 | print("ERROR:irq_debug is not loaded.")
105 |
106 | return irq_debug
107 |
108 |
109 | def install_irq_debug():
110 | print("Installing irq_debug...")
111 | os.chdir(dname)
112 | print("Working dir :{}".format(dname))
113 | irq = installation_files["irq_debug_tgz"]
114 | print("Installing irq debug drivers.")
115 | create_symlinks()
116 | tar = tarfile.open(irq)
117 | irq_folder_name = tar.getnames()[0]
118 | tar.extractall()
119 | tar.close()
120 | print(irq_folder_name)
121 | os.chdir(irq_folder_name)
122 | os.system("make clean")
123 | os.system("make")
124 |
125 | print("Installing module irq_debug.")
126 | os.system("sudo insmod ./irq_debug.ko")
127 |
128 |
129 | def load_bf_kdrv():
130 | print("Loading bf_kdrv....")
131 | print("Using SDE {} for loading bf_kdrv.".format(get_env_var('SDE')))
132 | os.system(
133 | "sudo {0}/bin/bf_kdrv_mod_load {0}".format(
134 | get_env_var('SDE_INSTALL')))
135 |
136 |
137 | def load_bf_kpkt():
138 | print("Loading bf_kpkt....")
139 | print("Using SDE {} for loading bf_kpkt.".format(get_env_var('SDE')))
140 | os.system(
141 | "sudo {0}/bin/bf_kpkt_mod_load {0}".format(
142 | get_env_var('SDE_INSTALL')))
143 |
--------------------------------------------------------------------------------
/irq_debug.tgz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/APS-Networks/APS-One-touch/2e7869b40c4a7ceed3a6e3b035ccd7340cab42ce/irq_debug.tgz
--------------------------------------------------------------------------------
/sal.py:
--------------------------------------------------------------------------------
1 | import ipaddress
2 | import os
3 | from threading import Thread
4 |
5 | import common
6 | import constants
7 | from bf_sde import set_sde_env_n_load_drivers, load_bf_sde_profile
8 | from common import delete_files, get_env_var, get_gb_lib_home_absolute, \
9 | execute_cmd, set_env_var, get_gb_src_home_absolute, \
10 | get_abs_path, \
11 | append_to_env_var, create_release, get_from_setting_dict, get_p4_prog_name
12 | from constants import path_env_var_name, pythonpath_env_var_name
13 | from drivers import load_drivers
14 |
15 |
16 | def set_sal_runtime_env():
17 | print("Setting environment for SAL runtime.")
18 | if not set_sde_env_n_load_drivers():
19 | return False
20 | set_env_var(constants.sal_home_env_var_name, get_sal_home_absolute())
21 | set_env_var(constants.tp_install_env_var_name, get_tp_install_path_absolute())
22 | os.environ[constants.p4_prog_env_var_name] = get_p4_prog_name()
23 | print('SAL_HOME: {}'.format(get_env_var(constants.sal_home_env_var_name)))
24 | print('TP_INSTALL: {}'.format(get_env_var(constants.tp_install_env_var_name)))
25 | print('P4_PROG: {}'.format(get_env_var(constants.p4_prog_env_var_name)))
26 | return True
27 |
28 |
29 | def set_sal_env():
30 | print("Setting environment for SAL.")
31 | if not set_sde_env_n_load_drivers():
32 | return False
33 | rc = set_env_var(constants.sal_home_env_var_name, get_sal_repo_absolute())
34 | print('SAL_HOME: {}'.format(get_env_var(constants.sal_home_env_var_name)))
35 | rc &= set_env_var(constants.sde_include_env_var_name,
36 | get_env_var(
37 | constants.sde_install_env_var_name) + '/include')
38 | rc &= set_env_var(constants.gb_src_home_env_var_name,
39 | get_gb_src_home_absolute())
40 | rc &= set_env_var(constants.gb_lib_home_env_var_name,
41 | get_gb_lib_home_absolute())
42 | rc &= set_env_var(constants.tp_install_env_var_name,
43 | get_tp_install_path_absolute())
44 | print('SAL_HOME: %s \
45 | \n SDE: %s \
46 | \n SDE_INSTALL: %s \
47 | \n SDE_INCLUDE: %s \
48 | \n GB_SRC_HOME: %s \
49 | \n GB_LIB_HOME: %s \
50 | \n TP_INSTALL: %s' %
51 | (get_env_var(constants.sal_home_env_var_name),
52 | get_env_var(constants.sde_env_var_name),
53 | get_env_var(constants.sde_install_env_var_name),
54 | get_env_var(constants.sde_include_env_var_name),
55 | get_env_var(constants.gb_src_home_env_var_name),
56 | get_env_var(constants.gb_lib_home_env_var_name),
57 | get_env_var(constants.tp_install_env_var_name)))
58 | return rc
59 |
60 |
61 | def get_sal_home_absolute():
62 | return get_abs_path(get_sal_home_from_config())
63 |
64 |
65 | def get_sal_repo_absolute():
66 | return get_abs_path(get_sal_repo_from_config())
67 |
68 |
69 | def get_tp_install_path_from_settings():
70 | if not get_from_setting_dict(constants.sal_sw_attr_node,
71 | constants.tp_install_node_name):
72 | return get_sal_home_from_config() + '/sal_tp_install/'
73 | else:
74 | return get_from_setting_dict(constants.sal_sw_attr_node,
75 | constants.tp_install_node_name)
76 |
77 |
78 | def get_tp_install_path_absolute():
79 | return get_abs_path(get_tp_install_path_from_settings())
80 |
81 |
82 | def get_sal_home_from_config():
83 | return common.settings_dict. \
84 | get(constants.sal_sw_attr_node).get(constants.sal_home_node)
85 |
86 |
87 | def get_sal_repo_from_config():
88 | return common.advance_settings_dict. \
89 | get(constants.sal_sw_attr_node).get(constants.sal_repo_node_name)
90 |
91 |
92 | def install_sal_build_deps():
93 | os.system('python3 -m pip install grpcio-tools')
94 | os.system('sudo apt install g++-8 gcc-8')
95 | return True
96 |
97 |
98 | def build_sal():
99 | print('Building SAL...')
100 |
101 | cmake_cmd = 'cmake '
102 | cmake_cmd += ' -B ' + get_env_var(constants.sal_home_env_var_name)
103 | cmake_cmd += ' -S ' + get_env_var(constants.sal_home_env_var_name)
104 |
105 | print('Executing cmake command {}.'.format(cmake_cmd))
106 |
107 | execute_cmd(cmake_cmd)
108 | execute_cmd(
109 | 'LD_LIBRARY_PATH={0}/lib:$LD_LIBRARY_PATH make -C {1}'.format(
110 | get_env_var(constants.tp_install_env_var_name),
111 | get_env_var(constants.sal_home_env_var_name)))
112 |
113 | return True
114 |
115 |
116 | def prepare_sal_release():
117 | files_to_copy = [
118 | [get_sal_repo_absolute(), '/include/'],
119 | [get_sal_repo_absolute(), '/src/include/'],
120 | [get_sal_repo_absolute(), '/build'],
121 | [get_sal_repo_absolute(), '/lib'],
122 | [get_sal_repo_absolute(), '/scripts'],
123 | [get_sal_repo_absolute(), '/config'],
124 | [get_sal_repo_absolute(), '/proto'],
125 | [get_sal_repo_absolute(), '/README.md'],
126 | [get_sal_repo_absolute(), '/test/sal_service_test_BF6064.py'],
127 | [get_sal_repo_absolute(), '/test/sal_service_test_BF2556.py'],
128 | [get_sal_repo_absolute(), '/test/TestUtil.py'],
129 | [get_sal_repo_absolute(), '/sal_services_pb2.py'],
130 | [get_sal_repo_absolute(), '/sal_services_pb2_grpc.py'],
131 | [get_sal_repo_absolute(), '/sal_services.grpc.pb.cc'],
132 | [get_sal_repo_absolute(), '/sal_services.grpc.pb.h'],
133 | [get_sal_repo_absolute(), '/sal_services.pb.cc'],
134 | [get_sal_repo_absolute(), '/sal_services.pb.h']
135 | ]
136 |
137 | package_input = input("Package 3rdParty [y]/n:")
138 | if not package_input:
139 | package_input = 'y'
140 | if package_input == 'y':
141 | files_to_copy.append([get_sal_repo_absolute(), '/{}/lib'.
142 | format(sal_3rdparty_build_dir)])
143 | files_to_copy.append([get_sal_repo_absolute(), '/{}/include'.
144 | format(sal_3rdparty_build_dir)])
145 | files_to_copy.append([get_sal_repo_absolute(), '/{}/bin'.
146 | format(sal_3rdparty_build_dir)])
147 | files_to_copy.append([get_sal_repo_absolute(), '/{}/share'.
148 | format(sal_3rdparty_build_dir)])
149 | create_release(get_sal_repo_absolute(), files_to_copy)
150 |
151 | return True
152 |
153 |
154 | def clean_sal():
155 | print('Cleaning SAL...')
156 | to_delete = [get_env_var(constants.sal_home_env_var_name) + f for f in
157 | ['/lib', '/bin', '/build', '/logs/', '/CMakeCache.txt',
158 | '/Makefile',
159 | '/CMakeFiles', '/cmake-build-debug']]
160 | execute_cmd(
161 | 'make -C {} clean'.
162 | format(get_env_var(constants.sal_home_env_var_name)))
163 |
164 | for file in to_delete:
165 | print('Deteling {}'.format(file))
166 | delete_files(file)
167 | return True
168 |
169 |
170 | def run_sal(debug):
171 | print('Starting SAL reference application...')
172 | load_drivers()
173 | sal_home = get_env_var(constants.sal_home_env_var_name)
174 | sal_executable = sal_home + '/build/salRefApp'
175 | sal_run_cmd = 'sudo -E LD_LIBRARY_PATH={0}:{1}:{2}:{3}:{4} {6} {5}'.format(
176 | sal_home + '/build',
177 | sal_home + '/lib',
178 | get_env_var(constants.tp_install_env_var_name) + '/lib',
179 | get_env_var(constants.sal_home_env_var_name) + '/install/lib',
180 | get_env_var(constants.sde_install_env_var_name) + '/lib', sal_executable,
181 | 'gdb' if debug else '')
182 | print('Running SAL with command: {}'.format(sal_run_cmd))
183 | execute_cmd(sal_run_cmd)
184 | return True
185 |
186 |
187 | def get_dut_ips():
188 | return get_from_setting_dict(constants.sal_sw_attr_node, constants.dut_ips_node_name)
189 |
190 |
191 | def is_valid_ip(ipaddr):
192 | try:
193 | ipaddress.ip_address(ipaddr)
194 | return True
195 | except ValueError:
196 | print('address/netmask is invalid : %s' % ipaddr)
197 | return False
198 |
199 |
200 | def set_sal_test_env():
201 | set_env_var(constants.sal_home_env_var_name, get_sal_home_absolute())
202 | append_to_env_var(pythonpath_env_var_name, get_env_var(constants.sal_home_env_var_name))
203 | append_to_env_var(pythonpath_env_var_name, get_env_var(constants.sal_home_env_var_name) + "/test")
204 | print("%s = %s" % (pythonpath_env_var_name, get_env_var(pythonpath_env_var_name)))
205 |
206 |
207 | def execute_test_cmd(ip, sal_grpc_port):
208 | test_cmd = 'python3 %s/test/SAL_Tests.py %s %s' % \
209 | (get_env_var(constants.sal_home_env_var_name), ip, sal_grpc_port)
210 | os.system(test_cmd)
211 |
212 |
213 | def install_sal_test_deps():
214 | os.system('pip3 install --upgrade pip')
215 | os.system('pip3 install future paramiko grpcio-tools html-testRunner setuptools_rust')
216 | return True
217 |
218 |
219 | def get_sal_ip_port_dict():
220 | ip_port_dict = {}
221 | dut_ips = get_dut_ips()
222 | if dut_ips is not None:
223 | for ip in dut_ips:
224 | ip_port = ip.split(':')
225 | try:
226 | dev_ip = ip_port[0]
227 | sal_grpc_port = ip_port[1]
228 | if not is_valid_ip(dev_ip) and not sal_grpc_port:
229 | raise ValueError
230 | elif dev_ip not in ip_port_dict:
231 | ip_port_dict[dev_ip] = sal_grpc_port
232 | except (IndexError, ValueError) as e:
233 | print("ERROR: Invalid DUT_IP or gRPC Port provided for connecting to SAL. : %s" % e)
234 | print("Devices to be tested %s" % ip_port_dict)
235 | return ip_port_dict
236 |
237 |
238 | def execute_sal_tests():
239 | print("Executing tests from %s." % get_env_var(constants.sal_home_env_var_name))
240 |
241 | for dev_ip, sal_grpc_port in get_sal_ip_port_dict().items():
242 | t = Thread(target=execute_test_cmd, name='SAL Tests thread for device %s ' % dev_ip,
243 | args=(dev_ip, sal_grpc_port,))
244 | print("Starting %s" % t.name)
245 | t.start()
246 | return True
247 |
248 |
249 | # Dependencies will be built inside local repository at following fixed path.
250 | # To run SAL path for 3rdParty path 'tp_install' in settings.yaml is used,
251 | # Which may or may not be same as following 3rdParty build path.
252 | sal_3rdparty_build_dir = '/sal_tp_install'
253 | sal_3rdparty_build_path = get_sal_repo_absolute() + sal_3rdparty_build_dir
254 |
255 |
256 | def install_sal_thirdparty_deps():
257 | print('Installing SAL 3rdparty dependencies.')
258 |
259 | if not os.path.exists(sal_3rdparty_build_path):
260 | os.makedirs(sal_3rdparty_build_path)
261 |
262 | i = input('Install boost y/[n] ?')
263 | if not i or i not in ['y', 'n']:
264 | i = 'n'
265 | if i == 'y' and not install_boost():
266 | return False
267 |
268 | i = input('Install protobuf y/[n] ?')
269 | if not i or i not in ['y', 'n']:
270 | i = 'n'
271 | if i == 'y' and not install_protobuf():
272 | return False
273 |
274 | append_to_env_var(constants.path_env_var_name,
275 | sal_3rdparty_build_path + '/bin/')
276 | print(get_env_var(path_env_var_name))
277 |
278 | i = input('Install gRPC y/[n] ?')
279 | if not i or i not in ['y', 'n']:
280 | i = 'n'
281 | if i == 'y' and not install_grpc():
282 | return False
283 | return True
284 |
285 |
286 | def install_protobuf():
287 | print('Installing protobuf.')
288 | protobuf_ver = 'v3.6.1'
289 | protobuf_dir = '{0}/protobuf{1}/'.format(sal_3rdparty_build_path, protobuf_ver)
290 | if os.path.exists(protobuf_dir):
291 | print('{0} already exists, will rebuild.'.format(protobuf_dir))
292 | else:
293 | os.system(
294 | 'git clone https://github.com/protocolbuffers/protobuf.git {}'.format(
295 | protobuf_dir))
296 | os.chdir(protobuf_dir)
297 | os.system('git checkout tags/{}'.format(protobuf_ver))
298 |
299 | os.chdir(protobuf_dir)
300 | os.system('./autogen.sh')
301 | rc = os.system('./configure -q --prefix={}'.format(sal_3rdparty_build_path))
302 | if rc != 0:
303 | return False
304 | rc = os.system('make -s')
305 | if rc != 0:
306 | return False
307 | # os.system('make check')
308 | rc = os.system('make -s -j install')
309 | if rc != 0:
310 | return False
311 | rc = os.system('sudo ldconfig')
312 | if rc != 0:
313 | return False
314 | return True
315 |
316 |
317 | def install_grpc():
318 | print('Installing gRPC.')
319 | grpc_ver = 'v1.17.0'
320 | grpc_dir = '{0}/grpc{1}/'.format(sal_3rdparty_build_path, grpc_ver)
321 | if os.path.exists(grpc_dir):
322 | print('{0} already exists, will rebuild.'.format(grpc_dir))
323 | else:
324 | os.system(
325 | 'git clone https://github.com/google/grpc.git {}'.format(grpc_dir))
326 | os.chdir(grpc_dir)
327 | os.system('git checkout tags/{}'.format(grpc_ver))
328 | os.system('git submodule update --init --recursive')
329 |
330 | os.chdir(grpc_dir)
331 | make_cmd = 'make clean && LD_LIBRARY_PATH={0}/lib/ PKG_CONFIG_PATH={0}/lib/pkgconfig/:$PKG_CONFIG_PATH \
332 | make -s -I{0} LDFLAGS=-L{0}/lib prefix={0}'.format(sal_3rdparty_build_path, 'include/')
333 | print('Executing CMD: {}'.format(make_cmd))
334 | rc = os.system(make_cmd)
335 | if rc != 0:
336 | print('{} Failed with return code {}'.format(make_cmd, rc))
337 | return False
338 |
339 | make_install_cmd = 'make -s -j install prefix={0}'.format(sal_3rdparty_build_path)
340 | rc = os.system(make_install_cmd)
341 | if rc != 0:
342 | print('{} Failed with return code {}'.format(make_install_cmd, rc))
343 | return False
344 | return True
345 |
346 |
347 | def install_boost():
348 | print('Installing Boost.')
349 | boost_ver = '1_67_0'
350 | boost_dir = '{0}/boost_{1}/'.format(sal_3rdparty_build_path, boost_ver)
351 | boost_arch_name = 'boost_{}.tar.bz2'.format(boost_ver)
352 |
353 | if os.path.exists(boost_dir):
354 | print('{0} already exists, will rebuild.'.format(boost_dir))
355 | else:
356 | os.system('wget http://downloads.sourceforge.net/project/boost/boost/{0}/{1} -P {2}'.
357 | format(boost_ver.replace('_', '.'), boost_arch_name, sal_3rdparty_build_path))
358 |
359 | rc = os.system('tar -xvf {0} -C {1}'.
360 | format(sal_3rdparty_build_path + '/' + boost_arch_name, sal_3rdparty_build_path))
361 | os.chdir(boost_dir)
362 | print('./bootstrap.sh --prefix={}'.format(sal_3rdparty_build_path))
363 | rc &= os.system('./bootstrap.sh --prefix={}'.format(sal_3rdparty_build_path))
364 | rc &= os.system('./b2 -j')
365 | rc &= os.system('./b2 --with-system --with-log --with-program_options install')
366 | rc &= os.system('sudo ldconfig')
367 | rc &= os.system('chmod -R a+rwx {}'.format(boost_dir))
368 | if rc != 0:
369 | print('Boost build failed !')
370 | return False
371 |
372 | return True
373 |
374 |
375 | def make_executable(path):
376 | mode = os.stat(path).st_mode
377 | mode |= (mode & 0o444) >> 2 # copy R bits to X
378 | os.chmod(path, mode)
379 |
380 |
381 | def execute_user_action(sal_input):
382 | rc = True
383 |
384 | if 'c' in sal_input:
385 | set_env_var(constants.sal_home_env_var_name, get_sal_home_absolute())
386 | rc &= clean_sal()
387 | if 'i' in sal_input:
388 | rc &= install_sal_thirdparty_deps()
389 | if 'b' in sal_input:
390 | rc &= install_sal_build_deps()
391 | rc &= set_sal_env()
392 | rc &= build_sal()
393 | if 'p' in sal_input:
394 | rc &= prepare_sal_release()
395 | if 'r' in sal_input or 'd' in sal_input:
396 | set_sal_runtime_env()
397 | rc &= run_sal('d' in sal_input)
398 | if 't' in sal_input:
399 | set_sal_test_env()
400 | install_sal_test_deps()
401 | rc &= execute_sal_tests()
402 | return rc
403 |
404 |
405 | def take_user_input():
406 | sal_input = input(
407 | "SAL : run(r), execute_tests(t), [do_nothing(n)]? ")
408 | # "OR developer's options - "
409 | # "build(b), clean(c), debug(d), "
410 | # "install 3rdParty SWs(i), "
411 | # "prepare rel(p) ? ")
412 |
413 | if 'n' in sal_input or not sal_input:
414 | # In case user give nasty input like cbrn
415 | # User meant do nothing in such cases
416 | return
417 |
418 | execute_user_action(sal_input)
419 |
420 |
421 | def load_sal_profile():
422 | load_bf_sde_profile()
423 | take_user_input()
424 |
425 |
426 | def just_load_sal():
427 | """
428 | When deps of SAL are taken care already, Directly execute this file.
429 | :return:
430 | """
431 | take_user_input()
432 |
433 |
434 | if __name__ == '__main__':
435 | just_load_sal()
436 |
--------------------------------------------------------------------------------
/settings.yaml:
--------------------------------------------------------------------------------
1 | %YAML 1.2
2 | ---
3 |
4 | #For quick start goto bottom and type in profile of choice under slected.
5 |
6 | PATH_PREFIX: #Various paths used in this setting file are evaluated relative to this, Default is user home directory
7 |
8 | BSP:
9 | aps_bsp_pkg: /bsp/bf-reference-bsp-9.7.0-BF2556_1.0.0 #APSN BSP
10 |
11 | BF SDE:
12 | sde_pkg: /BF/bf-sde-9.7.0.tgz
13 | sde_home: /BF/bf-sde-9.7.0 #Path will be automatically created by AOT, this is SDE installation dir path (relative to PATH_PREFIX as every other path in this file). If left blank default is APS-One-touch/.
14 | p4studio_build_profile: /BF/bf-sde-9.7.0/p4studio/profiles/p4-runtime-tofino.yaml
15 | p4_prog: #Leave it blank to start SDE without a P4 program or give p4 program name which is already built in SDE.
16 | modules: #Following barefoot SDE modules will be loaded before starting SDE.
17 | - bf_kdrv
18 | #- bf_kpkt
19 |
20 | SAL :
21 | sal_home: /APS-One-touch/release/sal_1.3.3 #Path to directory where SAL artifacts are present
22 | tp_install: #3rdParty libs path to run the SAL, defaults to /sal_tp_install
23 | # If executing SAL tests configure
24 | dut_ips: #One or more to execute SAL tests upon,
25 | #SAL should be running on following device address(es) before running any tests.
26 | - 127.0.0.1:50054
27 | - 10.10.192.218:50054
28 | - 10.10.192.219:50054
29 |
30 |
31 |
--------------------------------------------------------------------------------
/test/AOT_Test.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import sys
4 | import unittest
5 |
6 | import HtmlTestRunner
7 |
8 | import bf_sde
9 | import sal
10 | from threading import Thread
11 | import socket
12 | import time
13 | import os
14 |
15 | from sal import get_sal_ip_port_dict
16 |
17 |
18 | def is_port_up(host, port, retries=10, timeout=2):
19 | result = 0
20 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
21 | for i in range(retries):
22 | result = sock.connect_ex((host, port))
23 | if result != 0:
24 | time.sleep(timeout)
25 | else:
26 | break
27 | sock.close()
28 | return not bool(result)
29 |
30 |
31 | def kill_process(process_name):
32 | os.system("sudo killall -9 {}".format(process_name))
33 |
34 |
35 | class AOTTests(unittest.TestCase):
36 |
37 | @classmethod
38 | def setUpClass(cls):
39 | bf_sde.create_symlinks()
40 | bf_sde.build_sde()
41 |
42 | def setUp(self):
43 | pass
44 |
45 | def tearDown(self):
46 | pass
47 |
48 | def test_SAL_Build(self):
49 | self.assertTrue(sal.execute_user_action('c'))
50 | self.assertTrue(sal.execute_user_action('b'))
51 | self.assertTrue(sal.execute_user_action('c'))
52 |
53 | def test_SAL_Run(self):
54 | self.assertTrue(sal.execute_user_action('c'))
55 | self.assertTrue(sal.execute_user_action('b'))
56 | t = Thread(target=sal.execute_user_action, name='Run SAL', args=('r',))
57 | t.daemon = True
58 | t.start()
59 | time.sleep(5)
60 | for dev_ip, sal_grpc_port in get_sal_ip_port_dict().items():
61 | self.assertTrue(is_port_up(dev_ip, sal_grpc_port))
62 | self.assertTrue(sal.execute_user_action('c'))
63 | kill_process('salRefApp')
64 |
65 | def test_switchd_run(self):
66 | t = Thread(target=bf_sde.start_bf_switchd, name='bf_switchd')
67 | t.daemon = True
68 | t.start()
69 | self.assertTrue(
70 | is_port_up('127.0.0.1', 9999, retries=30, timeout=2))
71 | kill_process('bf_switchd')
72 |
73 | def test_build_bsp(self):
74 | self.assertTrue(bf_sde.install_switch_bsp())
75 | t = Thread(target=bf_sde.start_bf_switchd, name='bf_switchd')
76 | t.daemon = True
77 | t.start()
78 | self.assertTrue(
79 | is_port_up('127.0.0.1', 9999, retries=30, timeout=2))
80 | kill_process('bf_switchd')
81 |
82 |
83 | if __name__ == '__main__':
84 | del sys.argv[1:]
85 | del sys.argv[2:]
86 | unittest.main(testRunner=HtmlTestRunner.HTMLTestRunner(output='report'))
87 |
--------------------------------------------------------------------------------