├── .gitignore ├── LICENSE ├── README.md ├── entry_2021.py ├── requirements.txt ├── score_2021.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 SEU Wearable Heart-Sleep-Emotion Intelligent Monitoring Lab 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Python example code for the 4th China Physiological Signal Challenge 2021 2 | 3 | ## What's in this repository? 4 | 5 | We implemented a threshold-based classifier that uses the coefficient of sample entropy (cosEn) of the ECG lead signals as features. This simple example illustrates how to format your Python entry for the Challenge. However, it is not designed to score well (or, more accurately, designed not to do well), so you should not use it as a baseline for your model's performance. 6 | 7 | The code uses two main scripts, as described below, to run and test your algorithm for the 2021 Challenge. 8 | 9 | ## How do I run these scripts? 10 | 11 | You can run this baseline method by installing the requirements 12 | 13 | pip install requirements.txt 14 | 15 | and running 16 | 17 | python entry_2021.py 18 | 19 | where is the folder path of the test set, is the folder path of your detection results. 20 | 21 | ## How do I run my code and save my results? 22 | 23 | Please edit entry_2021.py to implement your algorithm. You should save the results as ‘.json’ files by record. The format is as {‘predict_endpoints’: [[s0, e0], [s1, e1], …, [sm-1, em-2]] }. The name of the result file should be the same as the corresponding record file. 24 | 25 | After obtaining the test results, you can evaluate the scores of your method by running 26 | 27 | python score_2021.py 28 | 29 | where is the folder save the answers, which is the same path as while the data and annotations are stored with 'wfdb' format. is the folder path of your detection results. 30 | 31 | ## Useful links 32 | 33 | - [MATLAB example code for The China Physiological Signal Challenge (CPSC2021)](https://github.com/CPSC-Committee/cpsc2021-matlab-entry) -------------------------------------------------------------------------------- /entry_2021.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import numpy as np 4 | import os 5 | import sys 6 | 7 | import wfdb 8 | from utils import qrs_detect, comp_cosEn, save_dict 9 | 10 | """ 11 | Written by: Xingyao Wang, Chengyu Liu 12 | School of Instrument Science and Engineering 13 | Southeast University, China 14 | chengyu@seu.edu.cn 15 | 16 | Save answers to '.json' files, the format is as {‘predict_endpoints’: [[s0, e0], [s1, e1], …, [sm-1, em-2]]}. 17 | """ 18 | 19 | def load_data(sample_path): 20 | sig, fields = wfdb.rdsamp(sample_path) 21 | length = len(sig) 22 | fs = fields['fs'] 23 | 24 | return sig, length, fs 25 | 26 | def ngrams_rr(data, length): 27 | grams = [] 28 | for i in range(0, length-12, 12): 29 | grams.append(data[i: i+12]) 30 | return grams 31 | 32 | def challenge_entry(sample_path): 33 | """ 34 | This is a baseline method. 35 | """ 36 | 37 | sig, _, fs = load_data(sample_path) 38 | sig = sig[:, 1] 39 | end_points = [] 40 | 41 | r_peaks = qrs_detect(sig, fs=200) 42 | print(r_peaks) 43 | rr_seq = np.diff(r_peaks) / fs 44 | len_rr = len(rr_seq) 45 | 46 | rr_seq_slice = ngrams_rr(rr_seq, len_rr) 47 | is_af = [] 48 | for rr_period in rr_seq_slice: 49 | cos_en, _ = comp_cosEn(rr_period) 50 | if cos_en <= -1.4: 51 | is_af.append(0) 52 | else: 53 | is_af.append(1) 54 | is_af = np.array([[j] * 12 for j in is_af]).flatten() 55 | rr_seq_last = rr_seq[-12: ] 56 | cos_en, _ = comp_cosEn(rr_seq_last) 57 | if cos_en <= -1.4: 58 | is_af_last = 0 59 | else: 60 | is_af_last = 1 61 | 62 | len_rr_remain = len_rr - int(12*len(rr_seq_slice)) 63 | is_af = np.concatenate((is_af, np.array([is_af_last] * len_rr_remain).flatten()), axis=0) 64 | 65 | if np.sum(is_af) == len(is_af): 66 | end_points.append([0, len(sig)-1]) 67 | elif np.sum(is_af) != 0: 68 | state_diff = np.diff(is_af) 69 | start_r = np.where(state_diff==1)[0] + 1 70 | end_r = np.where(state_diff==-1)[0] + 1 71 | 72 | if is_af[0] == 1: 73 | start_r = np.insert(start_r, 0, 0) 74 | if is_af[-1] == 1: 75 | end_r = np.insert(end_r, len(end_r), len(is_af)-1) 76 | start_r = np.expand_dims(start_r, -1) 77 | end_r = np.expand_dims(end_r, -1) 78 | start_end = np.concatenate((r_peaks[start_r], r_peaks[end_r]), axis=-1).tolist() 79 | end_points.extend(start_end) 80 | 81 | pred_dcit = {'predict_endpoints': end_points} 82 | 83 | return pred_dcit 84 | 85 | 86 | if __name__ == '__main__': 87 | DATA_PATH = sys.argv[1] 88 | RESULT_PATH = sys.argv[2] 89 | if not os.path.exists(RESULT_PATH): 90 | os.makedirs(RESULT_PATH) 91 | 92 | test_set = open(os.path.join(DATA_PATH, 'RECORDS'), 'r').read().splitlines() 93 | for i, sample in enumerate(test_set): 94 | print(sample) 95 | sample_path = os.path.join(DATA_PATH, sample) 96 | pred_dict = challenge_entry(sample_path) 97 | 98 | save_dict(os.path.join(RESULT_PATH, sample+'.json'), pred_dict) 99 | 100 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.16.2 2 | scipy==1.2.1 3 | wfdb==2.2.1 4 | matplotlib==3.0.3 5 | pandas==0.24.2 6 | peakutils==1.3.3 7 | scikit_learn==0.24.1 8 | -------------------------------------------------------------------------------- /score_2021.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import numpy as np 4 | import json 5 | import os 6 | import sys 7 | 8 | import scipy.io as sio 9 | import wfdb 10 | 11 | """ 12 | Written by: Xingyao Wang, Chengyu Liu 13 | School of Instrument Science and Engineering 14 | Southeast University, China 15 | chengyu@seu.edu.cn 16 | """ 17 | 18 | R = np.array([[1, -1, -.5], [-2, 1, 0], [-1, 0, 1]]) 19 | 20 | class RefInfo(): 21 | def __init__(self, sample_path): 22 | self.sample_path = sample_path 23 | self.fs, self.len_sig, self.beat_loc, self.af_starts, self.af_ends, self.class_true = self._load_ref() 24 | self.endpoints_true = np.dstack((self.af_starts, self.af_ends))[0, :, :] 25 | # self.endpoints_true = np.concatenate((self.af_starts, self.af_ends), axis=-1) 26 | 27 | if self.class_true == 1 or self.class_true == 2: 28 | self.onset_score_range, self.offset_score_range = self._gen_endpoint_score_range() 29 | else: 30 | self.onset_score_range, self.offset_score_range = None, None 31 | 32 | def _load_ref(self): 33 | sig, fields = wfdb.rdsamp(self.sample_path) 34 | ann_ref = wfdb.rdann(self.sample_path, 'atr') 35 | 36 | fs = fields['fs'] 37 | length = len(sig) 38 | sample_descrip = fields['comments'] 39 | 40 | beat_loc = np.array(ann_ref.sample) # r-peak locations 41 | ann_note = np.array(ann_ref.aux_note) # rhythm change flag 42 | 43 | af_start_scripts = np.where((ann_note=='(AFIB') | (ann_note=='(AFL'))[0] 44 | af_end_scripts = np.where(ann_note=='(N')[0] 45 | 46 | if 'non atrial fibrillation' in sample_descrip: 47 | class_true = 0 48 | elif 'persistent atrial fibrillation' in sample_descrip: 49 | class_true = 1 50 | elif 'paroxysmal atrial fibrillation' in sample_descrip: 51 | class_true = 2 52 | else: 53 | print('Error: the recording is out of range!') 54 | 55 | return -1 56 | 57 | return fs, length, beat_loc, af_start_scripts, af_end_scripts, class_true 58 | 59 | def _gen_endpoint_score_range(self): 60 | """ 61 | 62 | """ 63 | onset_range = np.zeros((self.len_sig, ),dtype=np.float) 64 | offset_range = np.zeros((self.len_sig, ),dtype=np.float) 65 | for i, af_start in enumerate(self.af_starts): 66 | if self.class_true == 2: 67 | if max(af_start-1, 0) == 0: 68 | onset_range[: self.beat_loc[af_start+2]] += 1 69 | elif max(af_start-2, 0) == 0: 70 | onset_range[self.beat_loc[af_start-1]: self.beat_loc[af_start+2]] += 1 71 | onset_range[: self.beat_loc[af_start-1]] += .5 72 | else: 73 | onset_range[self.beat_loc[af_start-1]: self.beat_loc[af_start+2]] += 1 74 | onset_range[self.beat_loc[af_start-2]: self.beat_loc[af_start-1]] += .5 75 | onset_range[self.beat_loc[af_start+2]: self.beat_loc[af_start+3]] += .5 76 | elif self.class_true == 1: 77 | onset_range[: self.beat_loc[af_start+2]] += 1 78 | onset_range[self.beat_loc[af_start+2]: self.beat_loc[af_start+3]] += .5 79 | for i, af_end in enumerate(self.af_ends): 80 | if self.class_true == 2: 81 | if min(af_end+1, len(self.beat_loc)-1) == len(self.beat_loc)-1: 82 | offset_range[self.beat_loc[af_end-2]: ] += 1 83 | elif min(af_end+2, len(self.beat_loc)-1) == len(self.beat_loc)-1: 84 | offset_range[self.beat_loc[af_end-2]: self.beat_loc[af_end+1]] += 1 85 | offset_range[self.beat_loc[af_end+1]: ] += 0.5 86 | else: 87 | offset_range[self.beat_loc[af_end-2]: self.beat_loc[af_end+1]] += 1 88 | offset_range[self.beat_loc[af_end+1]: min(self.beat_loc[af_end+2], self.len_sig-1)] += .5 89 | offset_range[self.beat_loc[af_end-3]: self.beat_loc[af_end-2]] += .5 90 | elif self.class_true == 1: 91 | offset_range[self.beat_loc[af_end-2]: ] += 1 92 | offset_range[self.beat_loc[af_end-3]: self.beat_loc[af_end-2]] += .5 93 | 94 | return onset_range, offset_range 95 | 96 | def load_ans(ans_file): 97 | endpoints_pred = [] 98 | if ans_file.endswith('.json'): 99 | json_file = open(ans_file, "r") 100 | ans_dic = json.load(json_file) 101 | endpoints_pred = np.array(ans_dic['predict_endpoints']) 102 | 103 | elif ans_file.endswith('.mat'): 104 | ans_struct = sio.loadmat(ans_file) 105 | endpoints_pred = ans_struct['predict_endpoints']-1 106 | 107 | return endpoints_pred 108 | 109 | def ue_calculate(endpoints_pred, endpoints_true, onset_score_range, offset_score_range): 110 | score = 0 111 | ma = len(endpoints_true) 112 | mr = len(endpoints_pred) 113 | 114 | if mr == 0: 115 | score = 0 116 | 117 | else: 118 | for [start, end] in endpoints_pred: 119 | score += onset_score_range[int(start)] 120 | score += offset_score_range[int(end)] 121 | 122 | score *= (ma / max(ma, mr)) 123 | 124 | return score 125 | 126 | def ur_calculate(class_true, class_pred): 127 | score = R[int(class_true), int(class_pred)] 128 | 129 | return score 130 | 131 | def score(data_path, ans_path): 132 | # AF burden estimation 133 | SCORE = [] 134 | 135 | def is_mat_or_json(file): 136 | return (file.endswith('.json')) + (file.endswith('.mat')) 137 | ans_set = filter(is_mat_or_json, os.listdir(ans_path)) 138 | # test_set = open(os.path.join(data_path, 'RECORDS'), 'r').read().splitlines() 139 | for i, ans_sample in enumerate(ans_set): 140 | sample_nam = ans_sample.split('.')[0] 141 | sample_path = os.path.join(data_path, sample_nam) 142 | 143 | endpoints_pred = load_ans(os.path.join(ans_path, ans_sample)) 144 | TrueRef = RefInfo(sample_path) 145 | 146 | if len(endpoints_pred) == 0: 147 | class_pred = 0 148 | elif len(endpoints_pred) == 1 and np.diff(endpoints_pred)[-1] == TrueRef.len_sig - 1: 149 | class_pred = 1 150 | else: 151 | class_pred = 2 152 | 153 | ur_score = ur_calculate(TrueRef.class_true, class_pred) 154 | 155 | if TrueRef.class_true == 1 or TrueRef.class_true == 2: 156 | ue_score = ue_calculate(endpoints_pred, TrueRef.endpoints_true, TrueRef.onset_score_range, TrueRef.offset_score_range) 157 | else: 158 | ue_score = 0 159 | 160 | u = ur_score + ue_score 161 | SCORE.append(u) 162 | 163 | score_avg = np.mean(SCORE) 164 | 165 | return score_avg 166 | 167 | if __name__ == '__main__': 168 | TESTSET_PATH = sys.argv[1] 169 | RESULT_PATH = sys.argv[2] 170 | score_avg = score(TESTSET_PATH, RESULT_PATH) 171 | print('AF Endpoints Detection Performance: %0.4f' %score_avg) 172 | 173 | with open(os.path.join(RESULT_PATH, 'score.txt'), 'w') as score_file: 174 | print('AF Endpoints Detection Performance: %0.4f' %score_avg, file=score_file) 175 | 176 | score_file.close() 177 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import numpy as np 3 | import sys 4 | 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | import peakutils 8 | from sklearn import preprocessing 9 | from scipy import signal 10 | 11 | """ 12 | Written by: Xingyao Wang, Chengyu Liu 13 | School of Instrument Science and Engineering 14 | Southeast University, China 15 | chengyu@seu.edu.cn 16 | """ 17 | 18 | def p_t_qrs(ecg_original, fs=1000, gr=1): 19 | delay = 0 20 | skip = 0 21 | m_selected_RR = 0 22 | mean_RR = 0 23 | ser_back = 0 24 | 25 | if (fs == 200): 26 | # Low pass and High pass 27 | # Low pass 28 | wn = 12 * 2 / fs 29 | N = 3 30 | a, b = signal.butter(N, wn, 'low') 31 | ecg_l = signal.filtfilt(a, b, ecg_original) 32 | ecg_l = ecg_l / max(abs(ecg_l)) 33 | ecg_l = np.around(ecg_l, decimals=4) 34 | 35 | # High pass 36 | wn = 5 * 2 / fs 37 | N = 3 38 | a, b = signal.butter(N, wn, 'high') 39 | ecg_h = signal.filtfilt(a, b, ecg_original) 40 | ecg_h = ecg_h / max(abs(ecg_h)) 41 | 42 | else: 43 | # Bandpass 44 | f1 = 5 45 | f2 = 15 46 | wn = [] 47 | wn.append(f1 * 2 / fs) 48 | wn.append(f2 * 2 / fs) 49 | N = 3 50 | a, b = signal.butter(N, wn, 'bandpass') 51 | ecg_h = signal.filtfilt(a, b, ecg_original) 52 | ecg_h = ecg_h / max(abs(ecg_h)) 53 | 54 | # Derivative 55 | int_c = (5 - 1) / (fs * 1 / 40) 56 | x = np.arange(1,6) 57 | xp = np.dot(np.array([1, 2, 0, -2, -1]), (1 / 8) * fs) 58 | fp = np.arange(1,5+int_c,int_c) 59 | b = np.interp(fp, x, xp) 60 | ecg_d = signal.filtfilt(b, 1, ecg_h) 61 | ecg_d = ecg_d / max(ecg_d) 62 | 63 | # Squaring and Moving average 64 | ecg_s = np.power(ecg_d, 2) 65 | ecg_m = np.convolve(ecg_s ,np.ones(int(np.around(0.150*fs)))/np.around(0.150*fs)) 66 | delay = delay + np.around(0.150*fs) / 2 67 | # Fiducial Marks 68 | locs = peakutils.indexes(ecg_m, thres=0, min_dist=np.around(0.2 * fs)) 69 | pks = ecg_m[locs[:]] 70 | 71 | # Init other parameters 72 | LLp = len(pks) 73 | qrs_c = np.zeros(LLp) 74 | qrs_i = np.zeros(LLp) 75 | qrs_i_raw = np.zeros(LLp) 76 | qrs_amp_raw= np.zeros(LLp) 77 | nois_c = np.zeros(LLp) 78 | nois_i = np.zeros(LLp) 79 | SIGL_buf = np.zeros(LLp) 80 | NOISL_buf = np.zeros(LLp) 81 | SIGL_buf1 = np.zeros(LLp) 82 | NOISL_buf1 = np.zeros(LLp) 83 | THRS_buf1 = np.zeros(LLp) 84 | THRS_buf = np.zeros(LLp) 85 | 86 | # Init training phase 87 | THR_SIG = max(ecg_m[0:2*fs])*1/3 88 | THR_NOISE = np.mean(ecg_m[0:2*fs])*1/2 89 | SIG_LEV= THR_SIG 90 | NOISE_LEV = THR_NOISE 91 | 92 | 93 | # Init bandpath filter threshold 94 | THR_SIG1 = max(ecg_h[0:2*fs])*1/3 95 | THR_NOISE1 = np.mean(ecg_h[0:2*fs])*1/2 96 | SIG_LEV1 = THR_SIG1 97 | NOISE_LEV1 = THR_NOISE1 98 | 99 | # Thresholding and desicion rule 100 | Beat_C = -1 101 | Beat_C1 = -1 102 | Noise_Count = 0 103 | 104 | for i in range(LLp): 105 | if ((locs[i] - np.around(0.150*fs)) >= 1 and (locs[i] <= len(ecg_h))): 106 | _start = locs[i] - np.around(0.15*fs).astype(int) 107 | _ = ecg_h[_start:locs[i]] 108 | y_i = max(_) 109 | x_i = np.argmax(_) 110 | else: 111 | if i == 0: 112 | y_i = max(ecg_h[0:locs[i]]) 113 | x_i = np.argmax(ecg_h[0:locs[i]]) 114 | ser_back = 1 115 | elif (locs[i] >= len(ecg_h)): 116 | _ = ecg_h[locs[i] - np.around(0.150*fs).astype(int):] 117 | y_i = max(_) 118 | x_i = np.argmax(_) 119 | 120 | # Update the heart_rate 121 | if (Beat_C >= 9): 122 | diffRR = np.diff(qrs_i[Beat_C-8:Beat_C]) 123 | mean_RR = np.mean(diffRR) 124 | comp = qrs_i[Beat_C] - qrs_i[Beat_C-1] 125 | if ((comp <= 0.92*mean_RR) or (comp >= 1.16*mean_RR)): 126 | THR_SIG = 0.5*(THR_SIG) 127 | THR_SIG1 = 0.5*(THR_SIG1) 128 | else: 129 | m_selected_RR = mean_RR 130 | 131 | # Calculate the mean last 8 R waves to ensure that QRS is not 132 | if m_selected_RR: 133 | test_m = m_selected_RR 134 | elif (mean_RR and m_selected_RR == 0): 135 | test_m = mean_RR 136 | else: 137 | test_m = 0 138 | 139 | if test_m: 140 | if ((locs[i] - qrs_i[Beat_C]) >= np.around(1.66*test_m)): 141 | _start = int(qrs_i[Beat_C] + np.around(0.20*fs)) 142 | _end = int(locs[i] - np.around(0.20*fs)) 143 | pks_temp = max(ecg_m[_start:_end+1]) 144 | locs_temp = np.argmax(ecg_m[_start:_end+1]) 145 | locs_temp = qrs_i[Beat_C] + np.around(0.20*fs) + locs_temp - 1 146 | 147 | if (pks_temp > THR_NOISE): 148 | Beat_C += 1 149 | qrs_c[Beat_C] = pks_temp 150 | qrs_i[Beat_C] = locs_temp 151 | 152 | if (locs_temp <= len(ecg_h)): 153 | _start = int(locs_temp - np.around(0.150*fs)) 154 | _end = int(locs_temp + 1) 155 | y_i_t = max(ecg_h[_start:_end]) 156 | x_i_t = np.argmax(ecg_h[_start:_end]) 157 | else: 158 | _ = locs_temp - np.around(0.150*fs) 159 | y_i_t = max(ecg_h[_:]) 160 | x_i_t = np.argmax(ecg_h[_:]) 161 | 162 | if (y_i_t > THR_NOISE1): 163 | Beat_C1 += 1 164 | qrs_i_raw[Beat_C1] = locs_temp - np.around(0.150*fs) + (x_i_t - 1) 165 | qrs_amp_raw[Beat_C1] = y_i_t 166 | SIG_LEV1 = 0.25*y_i_t + 0.75*SIG_LEV1 167 | 168 | not_nois = 1 169 | SIG_LEV = 0.25*pks_temp + 0.75*SIG_LEV 170 | else: 171 | not_nois = 0 172 | 173 | # Find noise and QRS peaks 174 | if (pks[i] >= THR_SIG): 175 | if (Beat_C >= 3): 176 | if ((locs[i] - qrs_i[Beat_C]) <= np.around(0.3600*fs)): 177 | _start = locs[i] - np.around(0.075*fs).astype('int') 178 | Slope1 = np.mean(np.diff(ecg_m[_start:locs[i]])) 179 | _start = int(qrs_i[Beat_C] - np.around(0.075*fs)) 180 | _end = int(qrs_i[Beat_C]) 181 | Slope2 = np.mean(np.diff(ecg_m[_start:_end])) 182 | if abs(Slope1) <= abs(0.5*(Slope2)): 183 | nois_c[Noise_Count] = pks[i] 184 | nois_i[Noise_Count] = locs[i] 185 | Noise_Count += 1 186 | skip = 1 187 | NOISE_LEV1 = 0.125*y_i + 0.875*NOISE_LEV1 188 | NOISE_LEV = 0.125*pks[i] + 0.875*NOISE_LEV 189 | else: 190 | skip = 0 191 | 192 | if (skip == 0): 193 | Beat_C += 1 194 | qrs_c[Beat_C] = pks[i] 195 | qrs_i[Beat_C] = locs[i] 196 | 197 | if (y_i >= THR_SIG1): 198 | Beat_C1 += 1 199 | if ser_back: 200 | qrs_i_raw[Beat_C1] = x_i 201 | else: 202 | qrs_i_raw[Beat_C1] = locs[i] - np.around(0.150*fs) + (x_i - 1) 203 | 204 | qrs_amp_raw[Beat_C1] = y_i 205 | SIG_LEV1 = 0.125*y_i + 0.875*SIG_LEV1 206 | 207 | SIG_LEV = 0.125*pks[i] + 0.875*SIG_LEV 208 | 209 | elif ((THR_NOISE <= pks[i]) and (pks[i] < THR_SIG)): 210 | NOISE_LEV1 = 0.125*y_i + 0.875*NOISE_LEV1 211 | NOISE_LEV = 0.125*pks[i] + 0.875*NOISE_LEV 212 | elif (pks[i] < THR_NOISE): 213 | nois_c[Noise_Count] = pks[i] 214 | nois_i[Noise_Count] = locs[i] 215 | NOISE_LEV1 = 0.125*y_i + 0.875*NOISE_LEV1 216 | NOISE_LEV = 0.125*pks[i] + 0.875*NOISE_LEV 217 | Noise_Count += 1 218 | 219 | # Adjust the threshold with SNR 220 | if (NOISE_LEV != 0 or SIG_LEV != 0): 221 | THR_SIG = NOISE_LEV + 0.25*(abs(SIG_LEV - NOISE_LEV)) 222 | THR_NOISE = 0.5*(THR_SIG) 223 | 224 | if (NOISE_LEV1 != 0 or SIG_LEV1 != 0): 225 | THR_SIG1 = NOISE_LEV1 + 0.25*(abs(SIG_LEV1 - NOISE_LEV1)) 226 | THR_NOISE1 = 0.5*(THR_SIG1) 227 | 228 | SIGL_buf[i] = SIG_LEV 229 | NOISL_buf[i] = NOISE_LEV 230 | THRS_buf[i] = THR_SIG 231 | 232 | SIGL_buf1[i] = SIG_LEV1 233 | NOISL_buf1[i] = NOISE_LEV1 234 | THRS_buf1[i] = THR_SIG1 235 | 236 | skip = 0 237 | not_nois = 0 238 | ser_back = 0 239 | 240 | # Adjust lengths 241 | qrs_i_raw = qrs_i_raw[0:Beat_C1+1] 242 | qrs_amp_raw = qrs_amp_raw[0:Beat_C1+1] 243 | qrs_c = qrs_c[0:Beat_C+1] 244 | qrs_i = qrs_i[0:Beat_C+1] 245 | 246 | return qrs_i_raw 247 | 248 | def qrs_detect(ECG, fs): 249 | winsize = 5 * fs * 60 # 5min 滑窗 250 | #winsize = 10 * fs # 10s 滑窗 251 | NB_SAMP = len(ECG) 252 | peaks = [] 253 | if NB_SAMP < winsize: 254 | peaks.extend(p_t_qrs(ECG, fs)) 255 | peaks = np.array(peaks) 256 | peaks = np.delete(peaks, np.where(peaks >= NB_SAMP-2*fs)[0]) # 删除最后2sR波位置 257 | else: 258 | # 5分钟滑窗检测,重叠5s数据 259 | count = NB_SAMP // winsize 260 | for j in range(count+1): 261 | if j == 0: 262 | ecg_data = ECG[j*winsize: (j+1)*winsize] 263 | 264 | peak = p_t_qrs(ecg_data, fs) 265 | peak = np.array(peak) 266 | peak = np.delete(peak, np.where(peak >= winsize-2*fs)[0]).tolist() # 删除5分钟窗口最后2sR波位置 267 | 268 | peaks.extend(map(lambda n: n+j*winsize, peak)) 269 | elif j == count: 270 | ecg_data = ECG[j*winsize-5*fs: ] 271 | if len(ecg_data) == 0: 272 | pass 273 | else: 274 | peak = p_t_qrs(ecg_data, fs) 275 | peak = np.array(peak) 276 | peak = np.delete(peak, np.where(peak <= 2*fs)[0]).tolist() # 删除最后多余窗口前2sR波位置 277 | 278 | peaks.extend(map(lambda n: n+j*winsize-5*fs, peak)) 279 | else: 280 | ecg_data = ECG[j*winsize-5*fs: (j+1)*winsize] 281 | peak = p_t_qrs(ecg_data, fs) 282 | peak = np.array(peak) 283 | peak = np.delete(peak, np.where((peak <= 2*fs) | (peak >= winsize-2*fs))[0]).tolist() # 删除中间片段5分钟窗口前2s和最后2sR波位置 284 | 285 | peaks.extend(map(lambda n: n+j*winsize-5*fs, peak)) 286 | 287 | peaks = np.array(peaks) 288 | peaks = np.sort(peaks) 289 | dp = np.abs(np.diff(peaks)) 290 | 291 | final_peaks = peaks[np.where(dp >= 0.2*fs)[0]+1] 292 | 293 | return final_peaks 294 | 295 | def sampen(rr_seq, max_temp_len, r): 296 | """ 297 | rr_seq: segment of the RR intervals series 298 | max_temp_len: maximum template length 299 | r: initial value of the tolerance matching 300 | """ 301 | length = len(rr_seq) 302 | lastrun = np.zeros((1,length)) 303 | run = np.zeros((1,length)) 304 | A = np.zeros((max_temp_len,1)) 305 | B = np.zeros((max_temp_len,1)) 306 | p = np.zeros((max_temp_len,1)) 307 | e = np.zeros((max_temp_len,1)) 308 | 309 | for i in range(length - 1): 310 | nj = length - i - 1 311 | for jj in range(nj): 312 | j = jj + i + 2 313 | if np.abs(rr_seq[j-1] - rr_seq[i]) < r: 314 | run[0, jj] = lastrun[0, jj] + 1 315 | am1 = float(max_temp_len) 316 | br1 = float(run[0,jj]) 317 | M1 = min(am1,br1) 318 | 319 | for m in range(int(M1)): 320 | A[m] = A[m] + 1 321 | if j < length: 322 | B[m] = B[m]+1 323 | else: 324 | run[0, jj] = 0 325 | 326 | for j in range(nj): 327 | lastrun[0, j] = run[0,j] 328 | 329 | N = length * (length - 1) / 2 330 | p[0] = A[0] / N 331 | e[0] = -1 * np.log(p[0] + sys.float_info.min) 332 | for m in range(max_temp_len-1): 333 | p[m+1]=A[m+1]/B[m] 334 | e[m+1]=-1*np.log(p[m+1]) 335 | 336 | return e, A, B 337 | 338 | def comp_cosEn(rr_segment): 339 | r = 0.03 # initial value of the tolerance matching 340 | max_temp_len = 2 # maximum template length 341 | min_num_count = 5 # minimum numerator count 342 | dr = 0.001 # tolerance matching increment 343 | match_num = np.ones((max_temp_len,1)) # number of matches for m=1,2,...,M 344 | match_num = -1000 * match_num 345 | while match_num[max_temp_len-1,0] < min_num_count: 346 | e, match_num, B = sampen(rr_segment, max_temp_len, r) 347 | r = r + dr 348 | if match_num[max_temp_len-1, 0] != -1000: 349 | mRR = np.mean(rr_segment) 350 | cosEn = e[max_temp_len-1, 0] + np.log(2 * (r-dr)) - np.log(mRR) 351 | else: 352 | cosEn = -1000 353 | sentropy = e[max_temp_len-1, 0] 354 | 355 | return cosEn, sentropy 356 | 357 | def load_dict(filename): 358 | '''load dict from json file''' 359 | with open(filename,"r") as json_file: 360 | dic = json.load(json_file) 361 | return dic 362 | 363 | def save_dict(filename, dic): 364 | '''save dict into json file''' 365 | with open(filename,'w') as json_file: 366 | json.dump(dic, json_file, ensure_ascii=False) --------------------------------------------------------------------------------