├── layers ├── __init__.py ├── Transformer_EncDec.py ├── SelfAttention_Family.py └── Embed.py ├── model ├── __init__.py └── TimeBridge.py ├── utils ├── __init__.py ├── masking.py ├── metrics.py ├── losses.py ├── tools.py └── timefeatures.py ├── data_provider ├── __init__.py └── data_factory.py ├── requirements.txt ├── LICENSE ├── experiments ├── exp_basic.py └── exp_long_term_forecasting.py ├── README.md ├── .gitignore ├── _logs ├── LongForecasting │ └── TimeBridge │ │ ├── ETTh1_0.35_TimeBridge_720.logs │ │ ├── ETTh1_0.35_TimeBridge_336.logs │ │ ├── ETTh1_0.35_TimeBridge_96.logs │ │ ├── weather_0.1_TimeBridge_720.logs │ │ ├── electricity_0.2_TimeBridge_192.logs │ │ ├── ETTh1_0.35_TimeBridge_192.logs │ │ ├── ETTh2_0.35_TimeBridge_720.logs │ │ ├── electricity_0.2_TimeBridge_336.logs │ │ ├── electricity_0.2_TimeBridge_720.logs │ │ ├── ETTh2_0.35_TimeBridge_192.logs │ │ ├── weather_0.1_TimeBridge_96.logs │ │ ├── ETTh2_0.35_TimeBridge_336.logs │ │ ├── electricity_0.2_TimeBridge_96.logs │ │ └── ETTh2_0.35_TimeBridge_96.logs └── result_long_term_forecast.txt ├── scripts └── TimeBridge.sh └── run.py /layers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /model/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /data_provider/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas==1.5.3 2 | scikit-learn==1.2.2 3 | numpy==1.23.5 4 | matplotlib==3.7.0 5 | torch==2.0.0 6 | reformer-pytorch==1.4.4 7 | pytorch_wavelets==1.3.0 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Peiyuan Liu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /experiments/exp_basic.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | from model import TimeBridge 4 | 5 | 6 | class Exp_Basic(object): 7 | def __init__(self, args): 8 | self.args = args 9 | self.model_dict = { 10 | 'TimeBridge': TimeBridge, 11 | } 12 | self.device = self._acquire_device() 13 | self.model = self._build_model().to(self.device) 14 | 15 | def _build_model(self): 16 | raise NotImplementedError 17 | return None 18 | 19 | def _acquire_device(self): 20 | if self.args.use_gpu: 21 | os.environ["CUDA_VISIBLE_DEVICES"] = str( 22 | self.args.gpu) if not self.args.use_multi_gpu else self.args.devices 23 | device = torch.device('cuda:{}'.format(self.args.gpu)) 24 | print('Use GPU: cuda:{}'.format(self.args.gpu)) 25 | else: 26 | device = torch.device('cpu') 27 | print('Use CPU') 28 | return device 29 | 30 | def _get_data(self): 31 | pass 32 | 33 | def vali(self): 34 | pass 35 | 36 | def train(self): 37 | pass 38 | 39 | def test(self): 40 | pass 41 | -------------------------------------------------------------------------------- /utils/masking.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | 4 | class TriangularCausalMask(): 5 | def __init__(self, B, L, device="cpu"): 6 | mask_shape = [B, 1, L, L] 7 | with torch.no_grad(): 8 | self._mask = torch.triu(torch.ones(mask_shape, dtype=torch.bool), diagonal=1).to(device) 9 | 10 | @property 11 | def mask(self): 12 | return self._mask 13 | 14 | 15 | class ProbMask(): 16 | def __init__(self, B, H, L, index, scores, device="cpu"): 17 | _mask = torch.ones(L, scores.shape[-1], dtype=torch.bool).to(device).triu(1) 18 | _mask_ex = _mask[None, None, :].expand(B, H, L, scores.shape[-1]) 19 | indicator = _mask_ex[torch.arange(B)[:, None, None], 20 | torch.arange(H)[None, :, None], 21 | index, :].to(device) 22 | self._mask = indicator.view(scores.shape).to(device) 23 | 24 | @property 25 | def mask(self): 26 | return self._mask 27 | 28 | 29 | class LocalMask(): 30 | def __init__(self, B, L,S,device="cpu"): 31 | mask_shape = [B, 1, L, S] 32 | with torch.no_grad(): 33 | self.len = math.ceil(np.log2(L)) 34 | self._mask1 = torch.triu(torch.ones(mask_shape, dtype=torch.bool), diagonal=1).to(device) 35 | self._mask2 = ~torch.triu(torch.ones(mask_shape,dtype=torch.bool),diagonal=-self.len).to(device) 36 | self._mask = self._mask1+self._mask2 37 | @property 38 | def mask(self): 39 | return self._mask -------------------------------------------------------------------------------- /utils/metrics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def RSE(pred, true): 5 | return np.sqrt(np.sum((true - pred) ** 2)) / np.sqrt(np.sum((true - true.mean()) ** 2)) 6 | 7 | 8 | def CORR(pred, true): 9 | u = ((true - true.mean(0)) * (pred - pred.mean(0))).sum(0) 10 | d = np.sqrt(((true - true.mean(0)) ** 2 * (pred - pred.mean(0)) ** 2).sum(0)) 11 | return (u / d).mean(-1) 12 | 13 | 14 | def MAE(pred, true): 15 | return np.mean(np.abs(pred - true)) 16 | 17 | def mae_2(forecast, target): 18 | return np.abs(forecast - target) 19 | 20 | def MSE(pred, true): 21 | return np.mean((pred - true) ** 2) 22 | 23 | 24 | def RMSE(pred, true): 25 | return np.sqrt(MSE(pred, true)) 26 | 27 | 28 | def MAPE(pred, true): 29 | mape = np.abs((pred - true) / true) 30 | mape = np.where(mape > 5, 0, mape) 31 | return np.mean(mape) 32 | 33 | def mape_2(forecast, target): 34 | denom = np.abs(target) 35 | # divide by 1.0 instead of 0.0, in case when denom is zero the enumerator will be 0.0 anyway. 36 | denom[denom == 0.0] = 1.0 37 | return 100 * np.abs(forecast - target) / denom 38 | 39 | def MSPE(pred, true): 40 | return np.mean(np.square((pred - true) / true)) 41 | 42 | 43 | def metric(pred, true): 44 | mae = MAE(pred, true) 45 | mse = MSE(pred, true) 46 | rmse = RMSE(pred, true) 47 | mape = MAPE(pred, true) 48 | mspe = MSPE(pred, true) 49 | 50 | mae_ = mae_2(pred, true) 51 | mape_ = mape_2(pred, true) 52 | 53 | return mae, mse, rmse, mape, mspe 54 | -------------------------------------------------------------------------------- /data_provider/data_factory.py: -------------------------------------------------------------------------------- 1 | from data_provider.data_loader import Dataset_ETT_hour, Dataset_ETT_minute, Dataset_Custom, Dataset_Solar, Dataset_PEMS 2 | from torch.utils.data import DataLoader 3 | 4 | data_dict = { 5 | 'ETTh1': Dataset_ETT_hour, 6 | 'ETTh2': Dataset_ETT_hour, 7 | 'ETTm1': Dataset_ETT_minute, 8 | 'ETTm2': Dataset_ETT_minute, 9 | 'Solar': Dataset_Solar, 10 | 'PEMS': Dataset_PEMS, 11 | 'custom': Dataset_Custom, 12 | } 13 | 14 | 15 | def data_provider(args, flag): 16 | Data = data_dict[args.data] 17 | timeenc = 0 if args.embed != 'timeF' else 1 18 | 19 | if flag == 'test': 20 | shuffle_flag = False 21 | drop_last = False 22 | batch_size = 1 23 | freq = args.freq 24 | else: 25 | shuffle_flag = True 26 | drop_last = True 27 | batch_size = args.batch_size # bsz for train and valid 28 | freq = args.freq 29 | 30 | data_set = Data( 31 | root_path=args.root_path, 32 | data_path=args.data_path, 33 | flag=flag, 34 | size=[args.seq_len, args.label_len, args.pred_len], 35 | features=args.features, 36 | target=args.target, 37 | timeenc=timeenc, 38 | freq=freq, 39 | seasonal_patterns=args.seasonal_patterns 40 | ) 41 | print(flag, len(data_set)) 42 | data_loader = DataLoader( 43 | data_set, 44 | batch_size=batch_size, 45 | shuffle=shuffle_flag, 46 | num_workers=args.num_workers, 47 | drop_last=drop_last) 48 | return data_set, data_loader 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TimeBridge: Non-Stationarity Matters for Long-term Time Series Forecasting 2 | 3 |
9 | 10 | ## Updates 11 | 12 | 🚩 **2025-05-01:** TimeBridge has been accepted as **ICML 2025 Poster**. 13 | 14 | 🚩 **2025-04-18:** Release the detailed training logs (see [_logs](./_logs/)). 15 | 16 | 🚩 **2025-02-11:** Release the code. 17 | 18 | 🚩 **2024-10-08:** Initial upload to arXiv [[PDF]](https://arxiv.org/abs/2410.04442). 19 | 20 | ## Usage 21 | 22 | 1. Install the dependencies 23 | ```bash 24 | pip install -r requirements.txt 25 | ``` 26 | 27 | 2. Obtain the dataset from [Google Drive](https://drive.google.com/file/d/1l51QsKvQPcqILT3DwfjCgx8Dsg2rpjot/view?usp=drive_link) and extract it to the root directory of the project. Make sure the extracted folder is named `dataset` and has the following structure: 28 | ``` 29 | dataset 30 | ├── electricity 31 | │ └── electricity.csv 32 | ├── ETT-small 33 | │ ├── ETTh1.csv 34 | │ ├── ETTh2.csv 35 | │ ├── ETTm1.csv 36 | │ └── ETTm2.csv 37 | ├── PEMS 38 | │ ├── PEMS03.npz 39 | │ ├── PEMS04.npz 40 | │ ├── PEMS07.npz 41 | │ └── PEMS08.csv 42 | ├── Solar 43 | │ └── solar_AL.txt 44 | ├── traffic 45 | │ └── traffic.csv 46 | └── weather 47 | └── weather.csv 48 | ``` 49 | 50 | 3. Train and evaluate the model. All the training scripts are located in the `scripts` directory. For example, to train the model on the Solar-Energy dataset, run the following command: 51 | ```bash 52 | sh ./scripts/TimeBridge.sh 53 | ``` 54 | 55 | 56 | ## Bibtex 57 | If you find this work useful, please consider citing it: 58 | 59 | ``` 60 | @article{liu2025timebridge, 61 | title={TimeBridge: Non-Stationarity Matters for Long-term Time Series Forecasting}, 62 | author={Liu, Peiyuan and Wu, Beiliang and Hu, Yifan and Li, Naiqi and Dai, Tao and Bao, Jigang and Xia, Shu-Tao}, 63 | journal={International Conference on Machine Learning}, 64 | year={2025}, 65 | } 66 | ``` 67 | 68 | ## Contact 69 | If you have any questions, please get in touch with [lpy23@mails.tsinghua.edu.cn](lpy23@mails.tsinghua.edu.cn) or submit an issue. 70 | -------------------------------------------------------------------------------- /model/TimeBridge.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from layers.Embed import PatchEmbed 4 | from layers.SelfAttention_Family import TSMixer, ResAttention 5 | from layers.Transformer_EncDec import TSEncoder, IntAttention, PatchSampling, CointAttention 6 | 7 | 8 | class Model(nn.Module): 9 | def __init__(self, configs): 10 | super(Model, self).__init__() 11 | 12 | self.revin = configs.revin # long-term with temporal 13 | 14 | self.c_in = configs.enc_in 15 | self.period = configs.period 16 | self.seq_len = configs.seq_len 17 | self.pred_len = configs.pred_len 18 | self.num_p = self.seq_len // self.period 19 | if configs.num_p is None: 20 | configs.num_p = self.num_p 21 | 22 | self.embedding = PatchEmbed(configs, num_p=self.num_p) 23 | 24 | layers = self.layers_init(configs) 25 | self.encoder = TSEncoder(layers) 26 | 27 | out_p = self.num_p if configs.pd_layers == 0 else configs.num_p 28 | self.decoder = nn.Sequential( 29 | nn.Flatten(start_dim=-2), 30 | nn.Linear(out_p * configs.d_model, configs.pred_len, bias=False) 31 | ) 32 | 33 | def layers_init(self, configs): 34 | integrated_attention = [IntAttention( 35 | TSMixer(ResAttention(attention_dropout=configs.attn_dropout), configs.d_model, configs.n_heads), 36 | configs.d_model, configs.d_ff, dropout=configs.dropout, stable_len=configs.stable_len, 37 | activation=configs.activation, stable=True, enc_in=self.c_in 38 | ) for i in range(configs.ia_layers)] 39 | 40 | patch_sampling = [PatchSampling( 41 | TSMixer(ResAttention(attention_dropout=configs.attn_dropout), configs.d_model, configs.n_heads), 42 | configs.d_model, configs.d_ff, stable=False, stable_len=configs.stable_len, 43 | in_p=self.num_p if i == 0 else configs.num_p, out_p=configs.num_p, 44 | dropout=configs.dropout, activation=configs.activation 45 | ) for i in range(configs.pd_layers)] 46 | 47 | cointegrated_attention = [CointAttention( 48 | TSMixer(ResAttention(attention_dropout=configs.attn_dropout), 49 | configs.d_model, configs.n_heads), 50 | configs.d_model, configs.d_ff, dropout=configs.dropout, 51 | activation=configs.activation, stable=False, enc_in=self.c_in, stable_len=configs.stable_len, 52 | ) for i in range(configs.ca_layers)] 53 | 54 | return [*integrated_attention, *patch_sampling, *cointegrated_attention] 55 | 56 | def forecast(self, x_enc, x_mark_enc, x_dec, x_mark_dec): 57 | if x_mark_enc is None: 58 | x_mark_enc = torch.zeros((*x_enc.shape[:-1], 4), device=x_enc.device) 59 | 60 | mean, std = (x_enc.mean(1, keepdim=True).detach(), 61 | x_enc.std(1, keepdim=True).detach()) 62 | x_enc = (x_enc - mean) / (std + 1e-5) 63 | 64 | x_enc = self.embedding(x_enc, x_mark_enc) 65 | enc_out = self.encoder(x_enc)[0][:, :self.c_in, ...] 66 | dec_out = self.decoder(enc_out).transpose(-1, -2) 67 | 68 | return dec_out * std + mean 69 | 70 | def forward(self, x_enc, x_mark_enc, x_dec, x_mark_dec, mask=None): 71 | dec_out = self.forecast(x_enc, x_mark_enc, x_dec, x_mark_dec) 72 | return dec_out[:, -self.pred_len:, :] # [B, L, D] 73 | -------------------------------------------------------------------------------- /utils/losses.py: -------------------------------------------------------------------------------- 1 | # This source code is provided for the purposes of scientific reproducibility 2 | # under the following limited license from Element AI Inc. The code is an 3 | # implementation of the N-BEATS model (Oreshkin et al., N-BEATS: Neural basis 4 | # expansion analysis for interpretable time series forecasting, 5 | # https://arxiv.org/abs/1905.10437). The copyright to the source code is 6 | # licensed under the Creative Commons - Attribution-NonCommercial 4.0 7 | # International license (CC BY-NC 4.0): 8 | # https://creativecommons.org/licenses/by-nc/4.0/. Any commercial use (whether 9 | # for the benefit of third parties or internally in production) requires an 10 | # explicit license. The subject-matter of the N-BEATS model and associated 11 | # materials are the property of Element AI Inc. and may be subject to patent 12 | # protection. No license to patents is granted hereunder (whether express or 13 | # implied). Copyright © 2020 Element AI Inc. All rights reserved. 14 | 15 | """ 16 | Loss functions for PyTorch. 17 | """ 18 | 19 | import torch as t 20 | import torch.nn as nn 21 | import numpy as np 22 | import pdb 23 | 24 | 25 | def divide_no_nan(a, b): 26 | """ 27 | a/b where the resulted NaN or Inf are replaced by 0. 28 | """ 29 | result = a / b 30 | result[result != result] = .0 31 | result[result == np.inf] = .0 32 | return result 33 | 34 | 35 | class mape_loss(nn.Module): 36 | def __init__(self): 37 | super(mape_loss, self).__init__() 38 | 39 | def forward(self, insample: t.Tensor, freq: int, 40 | forecast: t.Tensor, target: t.Tensor, mask: t.Tensor) -> t.float: 41 | """ 42 | MAPE loss as defined in: https://en.wikipedia.org/wiki/Mean_absolute_percentage_error 43 | 44 | :param forecast: Forecast values. Shape: batch, time 45 | :param target: Target values. Shape: batch, time 46 | :param mask: 0/1 mask. Shape: batch, time 47 | :return: Loss value 48 | """ 49 | weights = divide_no_nan(mask, target) 50 | return t.mean(t.abs((forecast - target) * weights)) 51 | 52 | 53 | class smape_loss(nn.Module): 54 | def __init__(self): 55 | super(smape_loss, self).__init__() 56 | 57 | def forward(self, insample: t.Tensor, freq: int, 58 | forecast: t.Tensor, target: t.Tensor, mask: t.Tensor) -> t.float: 59 | """ 60 | sMAPE loss as defined in https://robjhyndman.com/hyndsight/smape/ (Makridakis 1993) 61 | 62 | :param forecast: Forecast values. Shape: batch, time 63 | :param target: Target values. Shape: batch, time 64 | :param mask: 0/1 mask. Shape: batch, time 65 | :return: Loss value 66 | """ 67 | return 200 * t.mean(divide_no_nan(t.abs(forecast - target), 68 | t.abs(forecast.data) + t.abs(target.data)) * mask) 69 | 70 | 71 | class mase_loss(nn.Module): 72 | def __init__(self): 73 | super(mase_loss, self).__init__() 74 | 75 | def forward(self, insample: t.Tensor, freq: int, 76 | forecast: t.Tensor, target: t.Tensor, mask: t.Tensor) -> t.float: 77 | """ 78 | MASE loss as defined in "Scaled Errors" https://robjhyndman.com/papers/mase.pdf 79 | 80 | :param insample: Insample values. Shape: batch, time_i 81 | :param freq: Frequency value 82 | :param forecast: Forecast values. Shape: batch, time_o 83 | :param target: Target values. Shape: batch, time_o 84 | :param mask: 0/1 mask. Shape: batch, time_o 85 | :return: Loss value 86 | """ 87 | masep = t.mean(t.abs(insample[:, freq:] - insample[:, :-freq]), dim=1) 88 | masked_masep_inv = divide_no_nan(mask, masep[:, None]) 89 | return t.mean(t.abs(target - forecast) * masked_masep_inv) 90 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | .pybuilder/ 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | # For a library or package, you might want to ignore these files since the code is 86 | # intended to run in multiple environments; otherwise, check them in: 87 | # .python-version 88 | 89 | # pipenv 90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 93 | # install all needed dependencies. 94 | #Pipfile.lock 95 | 96 | # poetry 97 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 98 | # This is especially recommended for binary packages to ensure reproducibility, and is more 99 | # commonly ignored for libraries. 100 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 101 | #poetry.lock 102 | 103 | # pdm 104 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 105 | #pdm.lock 106 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 107 | # in version control. 108 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 109 | .pdm.toml 110 | .pdm-python 111 | .pdm-build/ 112 | 113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 114 | __pypackages__/ 115 | 116 | # Celery stuff 117 | celerybeat-schedule 118 | celerybeat.pid 119 | 120 | # SageMath parsed files 121 | *.sage.py 122 | 123 | # Environments 124 | .env 125 | .venv 126 | env/ 127 | venv/ 128 | ENV/ 129 | env.bak/ 130 | venv.bak/ 131 | 132 | # Spyder project settings 133 | .spyderproject 134 | .spyproject 135 | 136 | # Rope project settings 137 | .ropeproject 138 | 139 | # mkdocs documentation 140 | /site 141 | 142 | # mypy 143 | .mypy_cache/ 144 | .dmypy.json 145 | dmypy.json 146 | 147 | # Pyre type checker 148 | .pyre/ 149 | 150 | # pytype static type analyzer 151 | .pytype/ 152 | 153 | # Cython debug symbols 154 | cython_debug/ 155 | 156 | # PyCharm 157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 159 | # and can be added to the global gitignore or merged into this file. For a more nuclear 160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 161 | #.idea/ 162 | 163 | .DS_Store 164 | -------------------------------------------------------------------------------- /utils/tools.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | import torch 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | plt.switch_backend('agg') 9 | 10 | 11 | def adjust_learning_rate(optimizer, scheduler, epoch, args, printout=False): 12 | if args.lradj == 'type1': 13 | lr_adjust = {epoch: args.learning_rate * (0.5 ** ((epoch - 1) // 1))} 14 | elif args.lradj == 'type2': 15 | lr_adjust = { 16 | 2: 5e-5, 4: 1e-5, 6: 5e-6, 8: 1e-6, 17 | 10: 5e-7, 15: 1e-7, 20: 5e-8 18 | } 19 | elif args.lradj == 'type3': 20 | lr_adjust = {epoch: args.learning_rate if epoch < 3 else args.learning_rate * (0.9 ** ((epoch - 3) // 1))} 21 | elif args.lradj == 'TST': 22 | lr_adjust = {epoch: scheduler.get_last_lr()[0]} 23 | 24 | if epoch in lr_adjust.keys(): 25 | lr = lr_adjust[epoch] 26 | for param_group in optimizer.param_groups: 27 | param_group['lr'] = lr 28 | if printout: 29 | print('Updating learning rate to {}'.format(lr)) 30 | 31 | 32 | class EarlyStopping: 33 | def __init__(self, patience=7, verbose=False, delta=0): 34 | self.patience = patience 35 | self.verbose = verbose 36 | self.counter = 0 37 | self.best_score = None 38 | self.early_stop = False 39 | self.val_loss_min = np.inf 40 | self.delta = delta 41 | 42 | def __call__(self, val_loss, model, path): 43 | score = -val_loss 44 | if self.best_score is None: 45 | self.best_score = score 46 | self.save_checkpoint(val_loss, model, path) 47 | elif score < self.best_score + self.delta: 48 | self.counter += 1 49 | print(f'EarlyStopping counter: {self.counter} out of {self.patience}') 50 | if self.counter >= self.patience: 51 | self.early_stop = True 52 | else: 53 | self.best_score = score 54 | self.save_checkpoint(val_loss, model, path) 55 | self.counter = 0 56 | 57 | def save_checkpoint(self, val_loss, model, path): 58 | if self.verbose: 59 | print(f'Validation loss decreased ({self.val_loss_min:.6f} --> {val_loss:.6f}). Saving model ...') 60 | torch.save(model.state_dict(), path + '/' + 'checkpoint.pth') 61 | self.val_loss_min = val_loss 62 | 63 | 64 | class dotdict(dict): 65 | """dot.notation access to dictionary attributes""" 66 | __getattr__ = dict.get 67 | __setattr__ = dict.__setitem__ 68 | __delattr__ = dict.__delitem__ 69 | 70 | 71 | class StandardScaler(): 72 | def __init__(self, mean, std): 73 | self.mean = mean 74 | self.std = std 75 | 76 | def transform(self, data): 77 | return (data - self.mean) / self.std 78 | 79 | def inverse_transform(self, data): 80 | return (data * self.std) + self.mean 81 | 82 | 83 | def visual(true, preds=None, name='./pic/test.pdf'): 84 | """ 85 | Results visualization 86 | """ 87 | plt.figure() 88 | plt.plot(true, label='GroundTruth', linewidth=2) 89 | if preds is not None: 90 | plt.plot(preds, label='Prediction', linewidth=2) 91 | plt.legend() 92 | plt.savefig(name, bbox_inches='tight') 93 | 94 | 95 | def adjustment(gt, pred): 96 | anomaly_state = False 97 | for i in range(len(gt)): 98 | if gt[i] == 1 and pred[i] == 1 and not anomaly_state: 99 | anomaly_state = True 100 | for j in range(i, 0, -1): 101 | if gt[j] == 0: 102 | break 103 | else: 104 | if pred[j] == 0: 105 | pred[j] = 1 106 | for j in range(i, len(gt)): 107 | if gt[j] == 0: 108 | break 109 | else: 110 | if pred[j] == 0: 111 | pred[j] = 1 112 | elif gt[i] == 0: 113 | anomaly_state = False 114 | if anomaly_state: 115 | pred[i] = 1 116 | return gt, pred 117 | 118 | 119 | def cal_accuracy(y_pred, y_true): 120 | return np.mean(y_pred == y_true) 121 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh1_0.35_TimeBridge_720.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=64, is_training=1, model_id='ETTh1_720_720', model='TimeBridge', data='ETTh1', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh1.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=720, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=24, enc_in=7, d_model=128, n_heads=8, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=10, pct_start=0.2, learning_rate=0.0002, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh1_720_720_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl720_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7201 6 | val 2161 7 | test 2161 8 | iters: 100, epoch: 1 | loss: 3.6145418 9 | speed: 0.0394s/iter; left time: 437.0516s 10 | Epoch: 1 cost time: 4.299888372421265 11 | Epoch: 1, Steps: 112 | Train Loss: 3.8054716 Vali Loss: 1.4686054 Test Loss: 0.4432241 12 | Validation loss decreased (inf --> 1.468605). Saving model ... 13 | iters: 100, epoch: 2 | loss: 3.6090555 14 | speed: 0.1587s/iter; left time: 1744.4715s 15 | Epoch: 2 cost time: 3.455003499984741 16 | Epoch: 2, Steps: 112 | Train Loss: 3.5882348 Vali Loss: 1.4986989 Test Loss: 0.4434247 17 | EarlyStopping counter: 1 out of 10 18 | iters: 100, epoch: 3 | loss: 3.5291891 19 | speed: 0.1720s/iter; left time: 1870.4922s 20 | Epoch: 3 cost time: 3.4705939292907715 21 | Epoch: 3, Steps: 112 | Train Loss: 3.5385366 Vali Loss: 1.4800526 Test Loss: 0.4467036 22 | EarlyStopping counter: 2 out of 10 23 | iters: 100, epoch: 4 | loss: 3.5853009 24 | speed: 0.1645s/iter; left time: 1770.9133s 25 | Epoch: 4 cost time: 3.410308837890625 26 | Epoch: 4, Steps: 112 | Train Loss: 3.5179224 Vali Loss: 1.4837873 Test Loss: 0.4479825 27 | EarlyStopping counter: 3 out of 10 28 | iters: 100, epoch: 5 | loss: 3.5186853 29 | speed: 0.1616s/iter; left time: 1721.0085s 30 | Epoch: 5 cost time: 3.459929943084717 31 | Epoch: 5, Steps: 112 | Train Loss: 3.5092355 Vali Loss: 1.4850301 Test Loss: 0.4509711 32 | EarlyStopping counter: 4 out of 10 33 | iters: 100, epoch: 6 | loss: 3.5239916 34 | speed: 0.1629s/iter; left time: 1717.2191s 35 | Epoch: 6 cost time: 3.4657349586486816 36 | Epoch: 6, Steps: 112 | Train Loss: 3.5035637 Vali Loss: 1.4829072 Test Loss: 0.4468767 37 | EarlyStopping counter: 5 out of 10 38 | iters: 100, epoch: 7 | loss: 3.5582209 39 | speed: 0.1657s/iter; left time: 1728.3095s 40 | Epoch: 7 cost time: 3.546143054962158 41 | Epoch: 7, Steps: 112 | Train Loss: 3.5011487 Vali Loss: 1.4800924 Test Loss: 0.4492346 42 | EarlyStopping counter: 6 out of 10 43 | iters: 100, epoch: 8 | loss: 3.4794350 44 | speed: 0.1632s/iter; left time: 1683.7074s 45 | Epoch: 8 cost time: 3.439732551574707 46 | Epoch: 8, Steps: 112 | Train Loss: 3.4998084 Vali Loss: 1.4789323 Test Loss: 0.4509532 47 | EarlyStopping counter: 7 out of 10 48 | iters: 100, epoch: 9 | loss: 3.4677410 49 | speed: 0.1649s/iter; left time: 1682.7201s 50 | Epoch: 9 cost time: 3.5084006786346436 51 | Epoch: 9, Steps: 112 | Train Loss: 3.4986396 Vali Loss: 1.4809524 Test Loss: 0.4496857 52 | EarlyStopping counter: 8 out of 10 53 | iters: 100, epoch: 10 | loss: 3.5009406 54 | speed: 0.1634s/iter; left time: 1649.5851s 55 | Epoch: 10 cost time: 3.436768054962158 56 | Epoch: 10, Steps: 112 | Train Loss: 3.4983868 Vali Loss: 1.4803524 Test Loss: 0.4492817 57 | EarlyStopping counter: 9 out of 10 58 | iters: 100, epoch: 11 | loss: 3.5544174 59 | speed: 0.1603s/iter; left time: 1599.4752s 60 | Epoch: 11 cost time: 3.4803216457366943 61 | Epoch: 11, Steps: 112 | Train Loss: 3.4980846 Vali Loss: 1.4798211 Test Loss: 0.4492482 62 | EarlyStopping counter: 10 out of 10 63 | Early stopping 64 | >>>>>>>testing : ETTh1_720_720_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl720_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 65 | test 2161 66 | test shape: (2161, 1, 720, 7) (2161, 1, 720, 7) 67 | test shape: (2161, 720, 7) (2161, 720, 7) 68 | mse:0.44322431087493896, mae:0.4685737192630768 69 | rmse:0.6657509207725525, mape:0.7409285306930542, mspe:41985.12109375 70 | -------------------------------------------------------------------------------- /utils/timefeatures.py: -------------------------------------------------------------------------------- 1 | # From: gluonts/src/gluonts/time_feature/_base.py 2 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"). 5 | # You may not use this file except in compliance with the License. 6 | # A copy of the License is located at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # or in the "license" file accompanying this file. This file is distributed 11 | # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 12 | # express or implied. See the License for the specific language governing 13 | # permissions and limitations under the License. 14 | 15 | from typing import List 16 | 17 | import numpy as np 18 | import pandas as pd 19 | from pandas.tseries import offsets 20 | from pandas.tseries.frequencies import to_offset 21 | 22 | 23 | class TimeFeature: 24 | def __init__(self): 25 | pass 26 | 27 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 28 | pass 29 | 30 | def __repr__(self): 31 | return self.__class__.__name__ + "()" 32 | 33 | 34 | class SecondOfMinute(TimeFeature): 35 | """Minute of hour encoded as value between [-0.5, 0.5]""" 36 | 37 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 38 | return index.second / 59.0 - 0.5 39 | 40 | 41 | class MinuteOfHour(TimeFeature): 42 | """Minute of hour encoded as value between [-0.5, 0.5]""" 43 | 44 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 45 | return index.minute / 59.0 - 0.5 46 | 47 | 48 | class HourOfDay(TimeFeature): 49 | """Hour of day encoded as value between [-0.5, 0.5]""" 50 | 51 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 52 | return index.hour / 23.0 - 0.5 53 | 54 | 55 | class DayOfWeek(TimeFeature): 56 | """Hour of day encoded as value between [-0.5, 0.5]""" 57 | 58 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 59 | return index.dayofweek / 6.0 - 0.5 60 | 61 | 62 | class DayOfMonth(TimeFeature): 63 | """Day of month encoded as value between [-0.5, 0.5]""" 64 | 65 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 66 | return (index.day - 1) / 30.0 - 0.5 67 | 68 | 69 | class DayOfYear(TimeFeature): 70 | """Day of year encoded as value between [-0.5, 0.5]""" 71 | 72 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 73 | return (index.dayofyear - 1) / 365.0 - 0.5 74 | 75 | 76 | class MonthOfYear(TimeFeature): 77 | """Month of year encoded as value between [-0.5, 0.5]""" 78 | 79 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 80 | return (index.month - 1) / 11.0 - 0.5 81 | 82 | 83 | class WeekOfYear(TimeFeature): 84 | """Week of year encoded as value between [-0.5, 0.5]""" 85 | 86 | def __call__(self, index: pd.DatetimeIndex) -> np.ndarray: 87 | return (index.isocalendar().week - 1) / 52.0 - 0.5 88 | 89 | 90 | def time_features_from_frequency_str(freq_str: str) -> List[TimeFeature]: 91 | """ 92 | Returns a list of time features that will be appropriate for the given frequency string. 93 | Parameters 94 | ---------- 95 | freq_str 96 | Frequency string of the form [multiple][granularity] such as "12H", "5min", "1D" etc. 97 | """ 98 | 99 | features_by_offsets = { 100 | offsets.YearEnd: [], 101 | offsets.QuarterEnd: [MonthOfYear], 102 | offsets.MonthEnd: [MonthOfYear], 103 | offsets.Week: [DayOfMonth, WeekOfYear], 104 | offsets.Day: [DayOfWeek, DayOfMonth, DayOfYear], 105 | offsets.BusinessDay: [DayOfWeek, DayOfMonth, DayOfYear], 106 | offsets.Hour: [HourOfDay, DayOfWeek, DayOfMonth, DayOfYear], 107 | offsets.Minute: [ 108 | MinuteOfHour, 109 | HourOfDay, 110 | DayOfWeek, 111 | DayOfMonth, 112 | DayOfYear, 113 | ], 114 | offsets.Second: [ 115 | SecondOfMinute, 116 | MinuteOfHour, 117 | HourOfDay, 118 | DayOfWeek, 119 | DayOfMonth, 120 | DayOfYear, 121 | ], 122 | } 123 | 124 | offset = to_offset(freq_str) 125 | 126 | for offset_type, feature_classes in features_by_offsets.items(): 127 | if isinstance(offset, offset_type): 128 | return [cls() for cls in feature_classes] 129 | 130 | supported_freq_msg = f""" 131 | Unsupported frequency {freq_str} 132 | The following frequencies are supported: 133 | Y - yearly 134 | alias: A 135 | M - monthly 136 | W - weekly 137 | D - daily 138 | B - business days 139 | H - hourly 140 | T - minutely 141 | alias: min 142 | S - secondly 143 | """ 144 | raise RuntimeError(supported_freq_msg) 145 | 146 | 147 | def time_features(dates, freq='h'): 148 | return np.vstack([feat(dates) for feat in time_features_from_frequency_str(freq)]) 149 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh1_0.35_TimeBridge_336.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=64, is_training=1, model_id='ETTh1_720_336', model='TimeBridge', data='ETTh1', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh1.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=336, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=24, enc_in=7, d_model=128, n_heads=8, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=10, pct_start=0.2, learning_rate=0.0002, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh1_720_336_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl336_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7585 6 | val 2545 7 | test 2545 8 | iters: 100, epoch: 1 | loss: 2.5516586 9 | speed: 0.0393s/iter; left time: 460.2511s 10 | Epoch: 1 cost time: 4.45874810218811 11 | Epoch: 1, Steps: 118 | Train Loss: 2.6512798 Vali Loss: 1.2701318 Test Loss: 0.4214122 12 | Validation loss decreased (inf --> 1.270132). Saving model ... 13 | iters: 100, epoch: 2 | loss: 2.4161377 14 | speed: 0.1865s/iter; left time: 2160.0292s 15 | Epoch: 2 cost time: 3.6089625358581543 16 | Epoch: 2, Steps: 118 | Train Loss: 2.4827252 Vali Loss: 1.2779117 Test Loss: 0.4147995 17 | EarlyStopping counter: 1 out of 10 18 | iters: 100, epoch: 3 | loss: 2.4907036 19 | speed: 0.1899s/iter; left time: 2177.5748s 20 | Epoch: 3 cost time: 3.6583104133605957 21 | Epoch: 3, Steps: 118 | Train Loss: 2.4451273 Vali Loss: 1.2767608 Test Loss: 0.4080904 22 | EarlyStopping counter: 2 out of 10 23 | iters: 100, epoch: 4 | loss: 2.3921599 24 | speed: 0.1916s/iter; left time: 2173.8303s 25 | Epoch: 4 cost time: 3.5489661693573 26 | Epoch: 4, Steps: 118 | Train Loss: 2.4291440 Vali Loss: 1.2757246 Test Loss: 0.4065511 27 | EarlyStopping counter: 3 out of 10 28 | iters: 100, epoch: 5 | loss: 2.4339244 29 | speed: 0.1822s/iter; left time: 2046.2044s 30 | Epoch: 5 cost time: 3.4707260131835938 31 | Epoch: 5, Steps: 118 | Train Loss: 2.4213660 Vali Loss: 1.2781680 Test Loss: 0.4093710 32 | EarlyStopping counter: 4 out of 10 33 | iters: 100, epoch: 6 | loss: 2.4520917 34 | speed: 0.1860s/iter; left time: 2066.8241s 35 | Epoch: 6 cost time: 3.678762435913086 36 | Epoch: 6, Steps: 118 | Train Loss: 2.4174211 Vali Loss: 1.2695169 Test Loss: 0.4072302 37 | Validation loss decreased (1.270132 --> 1.269517). Saving model ... 38 | iters: 100, epoch: 7 | loss: 2.3720160 39 | speed: 0.1843s/iter; left time: 2025.5407s 40 | Epoch: 7 cost time: 3.7271993160247803 41 | Epoch: 7, Steps: 118 | Train Loss: 2.4147808 Vali Loss: 1.2719394 Test Loss: 0.4077338 42 | EarlyStopping counter: 1 out of 10 43 | iters: 100, epoch: 8 | loss: 2.4296720 44 | speed: 0.1856s/iter; left time: 2018.3143s 45 | Epoch: 8 cost time: 3.6145553588867188 46 | Epoch: 8, Steps: 118 | Train Loss: 2.4131165 Vali Loss: 1.2729244 Test Loss: 0.4077517 47 | EarlyStopping counter: 2 out of 10 48 | iters: 100, epoch: 9 | loss: 2.4249954 49 | speed: 0.1900s/iter; left time: 2044.2605s 50 | Epoch: 9 cost time: 3.622375965118408 51 | Epoch: 9, Steps: 118 | Train Loss: 2.4129466 Vali Loss: 1.2733946 Test Loss: 0.4079075 52 | EarlyStopping counter: 3 out of 10 53 | iters: 100, epoch: 10 | loss: 2.4351699 54 | speed: 0.1860s/iter; left time: 1979.0376s 55 | Epoch: 10 cost time: 3.541475772857666 56 | Epoch: 10, Steps: 118 | Train Loss: 2.4127650 Vali Loss: 1.2709397 Test Loss: 0.4077171 57 | EarlyStopping counter: 4 out of 10 58 | iters: 100, epoch: 11 | loss: 2.4749789 59 | speed: 0.1864s/iter; left time: 1960.8382s 60 | Epoch: 11 cost time: 3.626129388809204 61 | Epoch: 11, Steps: 118 | Train Loss: 2.4123556 Vali Loss: 1.2714111 Test Loss: 0.4077185 62 | EarlyStopping counter: 5 out of 10 63 | iters: 100, epoch: 12 | loss: 2.3532147 64 | speed: 0.1836s/iter; left time: 1910.2011s 65 | Epoch: 12 cost time: 3.597024440765381 66 | Epoch: 12, Steps: 118 | Train Loss: 2.4123026 Vali Loss: 1.2755030 Test Loss: 0.4076676 67 | EarlyStopping counter: 6 out of 10 68 | iters: 100, epoch: 13 | loss: 2.4270399 69 | speed: 0.1840s/iter; left time: 1892.5001s 70 | Epoch: 13 cost time: 3.6534698009490967 71 | Epoch: 13, Steps: 118 | Train Loss: 2.4119440 Vali Loss: 1.2727399 Test Loss: 0.4076766 72 | EarlyStopping counter: 7 out of 10 73 | iters: 100, epoch: 14 | loss: 2.4467831 74 | speed: 0.1803s/iter; left time: 1832.8344s 75 | Epoch: 14 cost time: 3.5257489681243896 76 | Epoch: 14, Steps: 118 | Train Loss: 2.4123333 Vali Loss: 1.2714875 Test Loss: 0.4076791 77 | EarlyStopping counter: 8 out of 10 78 | iters: 100, epoch: 15 | loss: 2.3660045 79 | speed: 0.1831s/iter; left time: 1840.0315s 80 | Epoch: 15 cost time: 3.5869367122650146 81 | Epoch: 15, Steps: 118 | Train Loss: 2.4118556 Vali Loss: 1.2757784 Test Loss: 0.4076750 82 | EarlyStopping counter: 9 out of 10 83 | iters: 100, epoch: 16 | loss: 2.4090843 84 | speed: 0.1884s/iter; left time: 1870.7882s 85 | Epoch: 16 cost time: 3.5300629138946533 86 | Epoch: 16, Steps: 118 | Train Loss: 2.4121343 Vali Loss: 1.2714771 Test Loss: 0.4076740 87 | EarlyStopping counter: 10 out of 10 88 | Early stopping 89 | >>>>>>>testing : ETTh1_720_336_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl336_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 90 | test 2545 91 | test shape: (2545, 1, 336, 7) (2545, 1, 336, 7) 92 | test shape: (2545, 336, 7) (2545, 336, 7) 93 | mse:0.4072302579879761, mae:0.4267895519733429 94 | rmse:0.6381459832191467, mape:0.695513904094696, mspe:35173.9609375 95 | -------------------------------------------------------------------------------- /layers/Transformer_EncDec.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import math 3 | 4 | import torch 5 | import torch.nn as nn 6 | import torch.nn.functional as F 7 | from einops import rearrange 8 | 9 | 10 | class TSEncoder(nn.Module): 11 | def __init__(self, attn_layers): 12 | super(TSEncoder, self).__init__() 13 | self.attn_layers = nn.ModuleList(attn_layers) 14 | 15 | def forward(self, x, attn_mask=None, tau=None, delta=None): 16 | # x [B, L, D] 17 | attns = [] 18 | for attn_layer in self.attn_layers: 19 | x, attn = attn_layer(x, attn_mask=attn_mask, tau=tau, delta=delta) 20 | attns.append(attn) 21 | return x, attns 22 | 23 | 24 | def PeriodNorm(x, period_len=6): 25 | if len(x.shape) == 3: 26 | x = x.unsqueeze(-2) 27 | b, c, n, t = x.shape 28 | x_patch = [x[..., period_len - 1 - i:-i + t] for i in range(0, period_len)] 29 | x_patch = torch.stack(x_patch, dim=-1) 30 | 31 | mean = x_patch.mean(4) 32 | mean = F.pad(mean.reshape(b * c, n, -1), 33 | mode='replicate', pad=(period_len - 1, 0)).reshape(b, c, n, -1) 34 | out = x - mean 35 | return out.squeeze(-2) 36 | 37 | 38 | class IntAttention(nn.Module): 39 | def __init__(self, attention, d_model, d_ff=None, stable_len=8, attn_map=False, 40 | dropout=0.1, activation="relu", stable=True, enc_in=None): 41 | super(IntAttention, self).__init__() 42 | self.stable = stable 43 | self.stable_len = stable_len 44 | self.attn_map = attn_map 45 | d_ff = d_ff or 4 * d_model 46 | self.attention = attention 47 | 48 | self.fc1 = nn.Linear(d_model, d_ff) 49 | self.fc2 = nn.Linear(d_ff, d_model) 50 | self.norm1 = nn.LayerNorm(d_model) 51 | self.norm2 = nn.LayerNorm(d_model) 52 | self.dropout = nn.Dropout(dropout) 53 | self.activation = F.relu if activation == "relu" else F.gelu 54 | 55 | def forward(self, x, attn_mask=None, tau=None, delta=None): 56 | new_x = self.temporal_attn(x) 57 | x = x + self.dropout(new_x) 58 | 59 | y = x = self.norm1(x) 60 | y = self.dropout(self.activation(self.fc1(y))) 61 | y = self.dropout(self.fc2(y)) 62 | 63 | return self.norm2(x + y), None 64 | 65 | def temporal_attn(self, x): 66 | b, c, n, d = x.shape 67 | new_x = x.reshape(-1, n, d) 68 | 69 | qk = new_x 70 | if self.stable: 71 | with torch.no_grad(): 72 | qk = PeriodNorm(new_x, self.stable_len) 73 | new_x = self.attention(qk, qk, new_x)[0] 74 | new_x = new_x.reshape(b, c, n, d) 75 | return new_x 76 | 77 | 78 | class PatchSampling(nn.Module): 79 | def __init__(self, attention, d_model, d_ff=None, dropout=0.1, activation="relu", 80 | in_p=30, out_p=4, stable=False, stable_len=8): 81 | super(PatchSampling, self).__init__() 82 | 83 | d_ff = d_ff or 4 * d_model 84 | self.in_p = in_p 85 | self.out_p = out_p 86 | self.stable = stable 87 | self.stable_len = stable_len 88 | 89 | self.attention = attention 90 | self.conv1 = nn.Conv1d( 91 | self.in_p, self.out_p, 1, 1, 0, bias=False) 92 | self.conv2 = nn.Conv1d( 93 | self.out_p + 1, self.out_p, 1, 1, 0, bias=False) 94 | 95 | self.fc1 = nn.Linear(d_model, d_ff) 96 | self.fc2 = nn.Linear(d_ff, d_model) 97 | self.norm1 = nn.LayerNorm(d_model) 98 | self.norm2 = nn.LayerNorm(d_model) 99 | self.dropout = nn.Dropout(dropout) 100 | self.activation = F.relu if activation == "relu" else F.gelu 101 | 102 | def forward(self, x, attn_mask=None, tau=None, delta=None): 103 | new_x = self.down_attn(x) 104 | y = x = self.norm1(new_x) 105 | 106 | y = self.dropout(self.activation(self.fc1(y))) 107 | y = self.dropout(self.fc2(y)) 108 | 109 | return self.norm2(x + y), None 110 | 111 | def down_attn(self, x): 112 | b, c, n, d = x.shape 113 | x = x.reshape(-1, n, d) 114 | new_x = self.conv1(x) 115 | new_x = self.conv2(torch.cat( 116 | [new_x, x.mean(-2, keepdim=True)], dim=-2)) + new_x 117 | new_x = self.attention(new_x, x, x)[0] + self.dropout(new_x) 118 | return new_x.reshape(b, c, -1, d) 119 | 120 | 121 | class CointAttention(nn.Module): 122 | def __init__(self, attention, d_model, d_ff=None, axial=True, stable_len=8, 123 | dropout=0.1, activation="relu", stable=True, enc_in=None, ): 124 | super(CointAttention, self).__init__() 125 | 126 | self.stable = stable 127 | self.stable_len = stable_len 128 | d_ff = d_ff or 4 * d_model 129 | 130 | self.axial_func = axial 131 | self.attention1 = attention 132 | self.attention2 = copy.deepcopy(attention) 133 | 134 | self.num_rc = math.ceil((enc_in + 4) ** 0.5) 135 | self.pad_ch = nn.ConstantPad1d( 136 | (0, self.num_rc ** 2 - (enc_in + 4)), 0) 137 | 138 | self.fc1 = nn.Linear(d_model, d_ff) 139 | self.fc2 = nn.Linear(d_ff, d_model) 140 | self.norm0 = nn.LayerNorm(d_model) 141 | self.norm1 = nn.LayerNorm(d_model) 142 | self.norm2 = nn.LayerNorm(d_model) 143 | self.dropout = nn.Dropout(dropout) 144 | self.activation = F.relu if activation == "relu" else F.gelu 145 | 146 | def forward(self, x, attn_mask=None, tau=None, delta=None): 147 | if self.axial_func is True: 148 | new_x = self.axial_attn(x) 149 | else: 150 | new_x = self.full_attn(x) 151 | x = x + self.dropout(new_x) 152 | 153 | y = x = self.norm1(x) 154 | y = self.dropout(self.activation(self.fc1(y))) 155 | y = self.dropout(self.fc2(y)) 156 | 157 | return self.norm2(x + y), None 158 | 159 | def axial_attn(self, x): 160 | b, c, n, d = x.shape 161 | 162 | new_x = rearrange(x, 'b c n d -> (b n) c d') 163 | new_x = (self.pad_ch(new_x.transpose(-1, -2)) 164 | .transpose(-1, -2).reshape(-1, self.num_rc, d)) 165 | new_x = self.attention1(new_x, new_x, new_x)[0] 166 | new_x = rearrange(new_x, '(b r) c d -> (b c) r d', r=self.num_rc) 167 | new_x = self.attention2(new_x, new_x, new_x)[0] + new_x 168 | 169 | new_x = rearrange(new_x, '(b n c) r d -> b (r c) n d', b=b, n=n) 170 | return new_x[:, :c, ...] 171 | 172 | def full_attn(self, x): 173 | b, c, n, d = x.shape 174 | new_x = rearrange(x, 'b c n d -> (b n) c d') 175 | new_x = self.attention1(new_x, new_x, new_x)[0] 176 | new_x = rearrange(new_x, '(b n) c d -> b c n d', b=b, n=n) 177 | return new_x[:, :c, :] 178 | -------------------------------------------------------------------------------- /_logs/result_long_term_forecast.txt: -------------------------------------------------------------------------------- 1 | ETTh1_720_96_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl96_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 2 | mse:0.35381951928138733, mse:0.39041218161582947, rmse:0.5948272943496704, mape:0.6648303270339966, mspe:32930.84375 3 | 4 | ETTh1_720_192_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl192_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 5 | mse:0.38928985595703125, mse:0.41404005885124207, rmse:0.6239309906959534, mape:0.684104323387146, mspe:33910.703125 6 | 7 | ETTh1_720_336_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl336_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 8 | mse:0.4072302579879761, mse:0.4267895519733429, rmse:0.6381459832191467, mape:0.695513904094696, mspe:35173.9609375 9 | 10 | ETTh1_720_720_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl720_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 11 | mse:0.44322431087493896, mse:0.4685737192630768, rmse:0.6657509207725525, mape:0.7409285306930542, mspe:41985.12109375 12 | 13 | ETTh2_720_96_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl96_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 14 | mse:0.26756221055984497, mse:0.32869112491607666, rmse:0.517264187335968, mape:0.4871687889099121, mspe:271.95086669921875 15 | 16 | ETTh2_720_192_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl192_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 17 | mse:0.3351455628871918, mse:0.37173473834991455, rmse:0.5789175629615784, mape:0.5247182846069336, mspe:276.55609130859375 18 | 19 | ETTh2_720_336_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl336_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 20 | mse:0.36790889501571655, mse:0.4007733166217804, rmse:0.6065549254417419, mape:0.5538084506988525, mspe:387.6688232421875 21 | 22 | ETTh2_720_720_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl720_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 23 | mse:0.39865219593048096, mse:0.43189573287963867, rmse:0.6313890814781189, mape:0.6157873868942261, mspe:575.7078857421875 24 | 25 | ETTm1_720_96_TimeBridge_ETTm1_bs64_ftM_sl720_ll48_pl96_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 26 | mse:0.2770201563835144, mse:0.3319298028945923, rmse:0.5263270139694214, mape:0.6105963587760925, mspe:345.5841369628906 27 | 28 | ETTm1_720_192_TimeBridge_ETTm1_bs64_ftM_sl720_ll48_pl192_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 29 | mse:0.32123157382011414, mse:0.3642342686653137, rmse:0.5667729377746582, mape:0.642461359500885, mspe:381.2975158691406 30 | 31 | ETTm1_720_336_TimeBridge_ETTm1_bs64_ftM_sl720_ll48_pl336_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 32 | mse:0.36463049054145813, mse:0.3918009400367737, rmse:0.6038464307785034, mape:0.6632248759269714, mspe:439.5918884277344 33 | 34 | ETTm1_720_720_TimeBridge_ETTm1_bs64_ftM_sl720_ll48_pl720_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 35 | mse:0.41575607657432556, mse:0.4178393483161926, rmse:0.6447914838790894, mape:0.6867216229438782, mspe:501.638671875 36 | 37 | ETTm2_720_96_TimeBridge_ETTm2_bs64_ftM_sl720_ll48_pl96_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 38 | mse:0.15730047225952148, mse:0.24361155927181244, rmse:0.3966112434864044, mape:0.3923564553260803, mspe:189.11209106445312 39 | 40 | ETTm2_720_192_TimeBridge_ETTm2_bs64_ftM_sl720_ll48_pl192_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 41 | mse:0.21371451020240784, mse:0.2832432985305786, rmse:0.4622926712036133, mape:0.43187186121940613, mspe:190.5615692138672 42 | 43 | ETTm2_720_336_TimeBridge_ETTm2_bs64_ftM_sl720_ll48_pl336_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 44 | mse:0.26759645342826843, mse:0.3193214535713196, rmse:0.5172972679138184, mape:0.46421098709106445, mspe:206.6217041015625 45 | 46 | ETTm2_720_720_TimeBridge_ETTm2_bs64_ftM_sl720_ll48_pl720_dm64_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0 47 | mse:0.34888410568237305, mse:0.37606534361839294, rmse:0.5906641483306885, mape:0.5190503597259521, mspe:249.62559509277344 48 | 49 | weather_720_96_TimeBridge_custom_bs32_ftM_sl720_ll48_pl96_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 50 | mse:0.1429259032011032, mse:0.18284930288791656, rmse:0.3780554234981537, mape:0.4395139515399933, mspe:12910708.0 51 | 52 | weather_720_192_TimeBridge_custom_bs32_ftM_sl720_ll48_pl192_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 53 | mse:0.18666963279247284, mse:0.22510170936584473, rmse:0.4320528209209442, mape:0.5002909898757935, mspe:16775707.0 54 | 55 | weather_720_336_TimeBridge_custom_bs32_ftM_sl720_ll48_pl336_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 56 | mse:0.23951725661754608, mse:0.26764458417892456, rmse:0.4894050061702728, mape:0.5587954521179199, mspe:20482008.0 57 | 58 | weather_720_720_TimeBridge_custom_bs32_ftM_sl720_ll48_pl720_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 59 | mse:0.3108551502227783, mse:0.32104259729385376, rmse:0.5575438737869263, mape:0.6310985088348389, mspe:23963216.0 60 | 61 | Solar_720_96_TimeBridge_Solar_bs32_ftM_sl720_ll48_pl96_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 62 | mse:0.1514563262462616, mse:0.19618119299411774, rmse:0.3891738951206207, mape:0.29666686058044434, mspe:29272.6796875 63 | 64 | Solar_720_192_TimeBridge_Solar_bs32_ftM_sl720_ll48_pl192_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 65 | mse:0.18247534334659576, mse:0.2260325849056244, rmse:0.4271713197231293, mape:0.32486703991889954, mspe:30212.4296875 66 | 67 | Solar_720_336_TimeBridge_Solar_bs32_ftM_sl720_ll48_pl336_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 68 | mse:0.1885203719139099, mse:0.22582794725894928, rmse:0.4341893196105957, mape:0.3179113566875458, mspe:42263.18359375 69 | 70 | Solar_720_720_TimeBridge_Solar_bs32_ftM_sl720_ll48_pl720_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0 71 | mse:0.2022007554769516, mse:0.23839934170246124, rmse:0.44966739416122437, mape:0.32999518513679504, mspe:40161.46484375 72 | 73 | electricity_720_96_TimeBridge_custom_bs16_ftM_sl720_ll48_pl96_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0 74 | mse:0.1238083764910698, mse:0.2160072773694992, rmse:0.35186415910720825, mape:0.41729336977005005, mspe:475630.75 75 | 76 | electricity_720_192_TimeBridge_custom_bs16_ftM_sl720_ll48_pl192_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0 77 | mse:0.14436645805835724, mse:0.23861069977283478, rmse:0.37995585799217224, mape:0.4475714862346649, mspe:672516.9375 78 | 79 | electricity_720_336_TimeBridge_custom_bs16_ftM_sl720_ll48_pl336_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0 80 | mse:0.16055546700954437, mse:0.2561715841293335, rmse:0.40069374442100525, mape:0.47098711133003235, mspe:420036.6875 81 | 82 | electricity_720_720_TimeBridge_custom_bs16_ftM_sl720_ll48_pl720_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0 83 | mse:0.17158843576908112, mse:0.2654167711734772, rmse:0.4142323434352875, mape:0.4833582639694214, mspe:409737.9375 84 | 85 | traffic_720_96_TimeBridge_custom_bs4_ftM_sl720_ll48_pl96_dm512_nh64_ial1_pdl1_cal3_df512_ebtimeF_Exp_0 86 | mse:0.31944549083709717, mse:0.22521354258060455, rmse:0.5651950836181641, mape:0.3984883427619934, mspe:98316.796875 87 | 88 | traffic_720_192_TimeBridge_custom_bs4_ftM_sl720_ll48_pl192_dm512_nh64_ial1_pdl1_cal3_df512_ebtimeF_Exp_0 89 | mse:0.33434930443763733, mse:0.2372569441795349, rmse:0.5782294273376465, mape:0.4178156852722168, mspe:596665.6875 90 | 91 | traffic_720_336_TimeBridge_custom_bs4_ftM_sl720_ll48_pl336_dm512_nh64_ial1_pdl1_cal3_df512_ebtimeF_Exp_0 92 | mse:0.34855708479881287, mse:0.24403733015060425, rmse:0.590387225151062, mape:0.4258538782596588, mspe:488699.5625 93 | 94 | traffic_720_720_TimeBridge_custom_bs4_ftM_sl720_ll48_pl720_dm512_nh64_ial1_pdl1_cal3_df512_ebtimeF_Exp_0 95 | mse:0.3789776861667633, mse:0.25874513387680054, rmse:0.6156116127967834, mape:0.4450819194316864, mspe:634383.375 96 | 97 | -------------------------------------------------------------------------------- /layers/SelfAttention_Family.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | import matplotlib.pyplot as plt 4 | import torch 5 | import torch.nn as nn 6 | import torch.nn.functional as F 7 | import numpy as np 8 | from math import sqrt 9 | from utils.masking import TriangularCausalMask, ProbMask 10 | from reformer_pytorch import LSHSelfAttention 11 | from einops import rearrange 12 | 13 | 14 | class FullAttention(nn.Module): 15 | def __init__(self, mask_flag=True, factor=5, scale=None, attention_dropout=0.1, 16 | output_attention=False, attn_map=False): 17 | super(FullAttention, self).__init__() 18 | self.scale = scale 19 | self.attn_map = attn_map 20 | self.alpha = nn.Parameter(torch.rand(1)) 21 | self.mask_flag = mask_flag 22 | self.output_attention = output_attention 23 | self.dropout = nn.Dropout(attention_dropout) 24 | 25 | def forward(self, queries, keys, values, attn_mask, tau=None, delta=None, long_term=True): 26 | B, L, H, E = queries.shape 27 | _, S, _, D = values.shape 28 | scale = self.scale or 1. / sqrt(E) 29 | 30 | scores = torch.einsum("blhe,bshe->bhls", queries, keys) 31 | 32 | if self.mask_flag: 33 | if attn_mask is None: 34 | attn_mask = TriangularCausalMask(B, L, device=queries.device) 35 | 36 | scores.masked_fill_(attn_mask.mask, -np.inf) 37 | 38 | attn_map = torch.softmax(scale * scores, dim=-1) 39 | A = self.dropout(attn_map) 40 | if self.attn_map is True: 41 | heat_map = attn_map[:, ...].max(1)[0] 42 | heat_map = torch.clamp_max(heat_map, 0.15) 43 | # heat_map = torch.softmax(heat_map, -1) 44 | for b in range(heat_map.shape[0]): 45 | # for c in range(heat_map.shape[1]): 46 | h_map = heat_map[b, ...].detach().cpu().numpy() 47 | # plt.savefig(heat_map, f'{b} sample {c} channel') 48 | plt.figure(figsize=(10, 8), dpi=200) 49 | plt.imshow(h_map, cmap='Reds', interpolation='nearest') 50 | plt.colorbar() 51 | 52 | # 设置X轴和Y轴的标签为黑体文字 53 | plt.rcParams['font.family'] = 'serif' 54 | plt.rcParams['font.serif'] = ['Times New Roman'] 55 | plt.xlabel('Key Channel', fontsize=14) 56 | plt.ylabel('Query Channel', fontsize=14) 57 | 58 | # 设置标题 59 | # plt.title('Long-Term Correlations', fontdict={'weight': 'bold'}, fontsize=16, color='green') 60 | 61 | plt.tight_layout() 62 | plt.savefig(f'./stable map/{b}_sample.png') 63 | # plt.savefig(f'./non_stable map/{b}_sample.png') 64 | plt.close() 65 | V = torch.einsum("bhls,bshd->blhd", A, values) 66 | 67 | if self.output_attention: 68 | return V.contiguous(), A 69 | else: 70 | return V.contiguous(), None 71 | 72 | 73 | class AttentionLayer(nn.Module): 74 | def __init__(self, attention, d_model, n_heads, d_keys=None, 75 | d_values=None): 76 | super(AttentionLayer, self).__init__() 77 | 78 | d_keys = d_keys or (d_model // n_heads) 79 | d_values = d_values or (d_model // n_heads) 80 | 81 | self.inner_attention = attention 82 | self.query_projection = nn.Linear(d_model, d_keys * n_heads) 83 | self.key_projection = nn.Linear(d_model, d_keys * n_heads) 84 | self.value_projection = nn.Linear(d_model, d_values * n_heads) 85 | self.out_projection = nn.Linear(d_values * n_heads, d_model) 86 | self.n_heads = n_heads 87 | 88 | def forward(self, queries, keys, values, attn_mask, tau=None, delta=None): 89 | B, L, _ = queries.shape 90 | _, S, _ = keys.shape 91 | H = self.n_heads 92 | 93 | if self.inner_attention is None: 94 | return self.out_projection(self.value_projection(values)), None 95 | queries = self.query_projection(queries).view(B, L, H, -1) 96 | keys = self.key_projection(keys).view(B, S, H, -1) 97 | values = self.value_projection(values).view(B, S, H, -1) 98 | 99 | out, attn = self.inner_attention( 100 | queries, 101 | keys, 102 | values, 103 | attn_mask, 104 | tau=tau, 105 | delta=delta 106 | ) 107 | out = out.view(B, L, -1) 108 | 109 | return self.out_projection(out), attn 110 | 111 | 112 | class TSMixer(nn.Module): 113 | def __init__(self, attention, d_model, n_heads): 114 | super(TSMixer, self).__init__() 115 | 116 | self.attention = attention 117 | self.q_proj = nn.Linear(d_model, d_model) 118 | self.k_proj = nn.Linear(d_model, d_model) 119 | self.v_proj = nn.Linear(d_model, d_model) 120 | self.out = nn.Linear(d_model, d_model) 121 | self.n_heads = n_heads 122 | 123 | def forward(self, q, k, v, res=False, attn=None): 124 | B, L, _ = q.shape 125 | _, S, _ = k.shape 126 | H = self.n_heads 127 | 128 | q = self.q_proj(q).reshape(B, L, H, -1) 129 | k = self.k_proj(k).reshape(B, S, H, -1) 130 | v = self.v_proj(v).reshape(B, S, H, -1) 131 | 132 | out, attn = self.attention( 133 | q, k, v, 134 | res=res, attn=attn 135 | ) 136 | out = out.view(B, L, -1) 137 | 138 | return self.out(out), attn 139 | 140 | 141 | class ResAttention(nn.Module): 142 | def __init__(self, attention_dropout=0.1, scale=None, attn_map=False, nst=False): 143 | super(ResAttention, self).__init__() 144 | 145 | self.nst = nst 146 | self.scale = scale 147 | self.attn_map = attn_map 148 | self.dropout = nn.Dropout(attention_dropout) 149 | 150 | def forward(self, queries, keys, values, res=False, attn=None): 151 | B, L, H, E = queries.shape 152 | _, S, _, D = values.shape 153 | scale = self.scale or 1. / sqrt(E) 154 | 155 | scores = torch.einsum("blhe,bshe->bhls", queries, keys) 156 | attn_map = torch.softmax(scale * scores, dim=-1) 157 | if self.attn_map is True: 158 | heat_map = attn_map.reshape(32, -1, H, L, S) 159 | for b in range(heat_map.shape[0]): 160 | for c in range(heat_map.shape[1]): 161 | h_map = heat_map[b, c, 0, ...].detach().cpu().numpy() 162 | # plt.savefig(heat_map, f'{b} sample {c} channel') 163 | 164 | plt.figure(figsize=(10, 8), dpi=200) 165 | plt.imshow(h_map, cmap='Reds', interpolation='nearest') 166 | plt.colorbar() 167 | 168 | # 设置X轴和Y轴的标签为黑体文字 169 | plt.rcParams['font.family'] = 'serif' 170 | plt.rcParams['font.serif'] = ['Times New Roman'] 171 | plt.xlabel('Key Time Patch', fontsize=14) 172 | plt.ylabel('Query Time Patch', fontsize=14) 173 | plt.tight_layout() 174 | if self.nst is True: 175 | plt.savefig(f'./time map/{b}_sample_{c}_channel.png') 176 | else: 177 | plt.savefig(f'./stable time map/{b}_sample_{c}_channel.png') 178 | # 关闭当前图形窗口 179 | plt.close() 180 | A = self.dropout(attn_map) 181 | V = torch.einsum("bhls,bshd->blhd", A, values) 182 | 183 | return V.contiguous(), A 184 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh1_0.35_TimeBridge_96.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=64, is_training=1, model_id='ETTh1_720_96', model='TimeBridge', data='ETTh1', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh1.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=96, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=24, enc_in=7, d_model=128, n_heads=8, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=10, pct_start=0.2, learning_rate=0.0002, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh1_720_96_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl96_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7825 6 | val 2785 7 | test 2785 8 | iters: 100, epoch: 1 | loss: 1.3977537 9 | speed: 0.0391s/iter; left time: 472.9154s 10 | Epoch: 1 cost time: 4.524451732635498 11 | Epoch: 1, Steps: 122 | Train Loss: 1.5369108 Vali Loss: 0.8561103 Test Loss: 0.3786033 12 | Validation loss decreased (inf --> 0.856110). Saving model ... 13 | iters: 100, epoch: 2 | loss: 1.4073610 14 | speed: 0.2040s/iter; left time: 2444.0362s 15 | Epoch: 2 cost time: 3.547607183456421 16 | Epoch: 2, Steps: 122 | Train Loss: 1.4196245 Vali Loss: 0.8125416 Test Loss: 0.3624643 17 | Validation loss decreased (0.856110 --> 0.812542). Saving model ... 18 | iters: 100, epoch: 3 | loss: 1.4115341 19 | speed: 0.1929s/iter; left time: 2286.6993s 20 | Epoch: 3 cost time: 3.5394225120544434 21 | Epoch: 3, Steps: 122 | Train Loss: 1.3885554 Vali Loss: 0.7939339 Test Loss: 0.3612912 22 | Validation loss decreased (0.812542 --> 0.793934). Saving model ... 23 | iters: 100, epoch: 4 | loss: 1.3876277 24 | speed: 0.1987s/iter; left time: 2331.1647s 25 | Epoch: 4 cost time: 3.69291090965271 26 | Epoch: 4, Steps: 122 | Train Loss: 1.3763528 Vali Loss: 0.7936728 Test Loss: 0.3565071 27 | Validation loss decreased (0.793934 --> 0.793673). Saving model ... 28 | iters: 100, epoch: 5 | loss: 1.3987665 29 | speed: 0.2063s/iter; left time: 2395.9017s 30 | Epoch: 5 cost time: 3.613831043243408 31 | Epoch: 5, Steps: 122 | Train Loss: 1.3704534 Vali Loss: 0.7882236 Test Loss: 0.3550438 32 | Validation loss decreased (0.793673 --> 0.788224). Saving model ... 33 | iters: 100, epoch: 6 | loss: 1.3294816 34 | speed: 0.1992s/iter; left time: 2289.4867s 35 | Epoch: 6 cost time: 3.638547658920288 36 | Epoch: 6, Steps: 122 | Train Loss: 1.3670183 Vali Loss: 0.7890787 Test Loss: 0.3540890 37 | EarlyStopping counter: 1 out of 10 38 | iters: 100, epoch: 7 | loss: 1.3532326 39 | speed: 0.1948s/iter; left time: 2214.6491s 40 | Epoch: 7 cost time: 3.655447006225586 41 | Epoch: 7, Steps: 122 | Train Loss: 1.3657062 Vali Loss: 0.7855979 Test Loss: 0.3544757 42 | Validation loss decreased (0.788224 --> 0.785598). Saving model ... 43 | iters: 100, epoch: 8 | loss: 1.3614080 44 | speed: 0.1965s/iter; left time: 2210.3999s 45 | Epoch: 8 cost time: 3.681514263153076 46 | Epoch: 8, Steps: 122 | Train Loss: 1.3651600 Vali Loss: 0.7850628 Test Loss: 0.3539521 47 | Validation loss decreased (0.785598 --> 0.785063). Saving model ... 48 | iters: 100, epoch: 9 | loss: 1.3276533 49 | speed: 0.2090s/iter; left time: 2324.7608s 50 | Epoch: 9 cost time: 3.7584197521209717 51 | Epoch: 9, Steps: 122 | Train Loss: 1.3643367 Vali Loss: 0.7837971 Test Loss: 0.3538872 52 | Validation loss decreased (0.785063 --> 0.783797). Saving model ... 53 | iters: 100, epoch: 10 | loss: 1.2916201 54 | speed: 0.1957s/iter; left time: 2153.4663s 55 | Epoch: 10 cost time: 3.70709490776062 56 | Epoch: 10, Steps: 122 | Train Loss: 1.3641363 Vali Loss: 0.7856680 Test Loss: 0.3538192 57 | EarlyStopping counter: 1 out of 10 58 | iters: 100, epoch: 11 | loss: 1.4181737 59 | speed: 0.2065s/iter; left time: 2247.3119s 60 | Epoch: 11 cost time: 3.630976676940918 61 | Epoch: 11, Steps: 122 | Train Loss: 1.3641978 Vali Loss: 0.7853438 Test Loss: 0.3538111 62 | EarlyStopping counter: 2 out of 10 63 | iters: 100, epoch: 12 | loss: 1.3753792 64 | speed: 0.1981s/iter; left time: 2131.3575s 65 | Epoch: 12 cost time: 3.716822624206543 66 | Epoch: 12, Steps: 122 | Train Loss: 1.3640310 Vali Loss: 0.7835444 Test Loss: 0.3538196 67 | Validation loss decreased (0.783797 --> 0.783544). Saving model ... 68 | iters: 100, epoch: 13 | loss: 1.3572760 69 | speed: 0.2012s/iter; left time: 2140.5234s 70 | Epoch: 13 cost time: 3.5904133319854736 71 | Epoch: 13, Steps: 122 | Train Loss: 1.3640455 Vali Loss: 0.7857557 Test Loss: 0.3538187 72 | EarlyStopping counter: 1 out of 10 73 | iters: 100, epoch: 14 | loss: 1.2923026 74 | speed: 0.1937s/iter; left time: 2036.4212s 75 | Epoch: 14 cost time: 3.574458360671997 76 | Epoch: 14, Steps: 122 | Train Loss: 1.3640569 Vali Loss: 0.7851154 Test Loss: 0.3538097 77 | EarlyStopping counter: 2 out of 10 78 | iters: 100, epoch: 15 | loss: 1.3878305 79 | speed: 0.1968s/iter; left time: 2045.1916s 80 | Epoch: 15 cost time: 3.6788744926452637 81 | Epoch: 15, Steps: 122 | Train Loss: 1.3638579 Vali Loss: 0.7864899 Test Loss: 0.3538118 82 | EarlyStopping counter: 3 out of 10 83 | iters: 100, epoch: 16 | loss: 1.4072225 84 | speed: 0.1952s/iter; left time: 2005.2286s 85 | Epoch: 16 cost time: 3.6331217288970947 86 | Epoch: 16, Steps: 122 | Train Loss: 1.3635907 Vali Loss: 0.7837139 Test Loss: 0.3538117 87 | EarlyStopping counter: 4 out of 10 88 | iters: 100, epoch: 17 | loss: 1.3468357 89 | speed: 0.1933s/iter; left time: 1961.9272s 90 | Epoch: 17 cost time: 3.5957489013671875 91 | Epoch: 17, Steps: 122 | Train Loss: 1.3639691 Vali Loss: 0.7852625 Test Loss: 0.3538114 92 | EarlyStopping counter: 5 out of 10 93 | iters: 100, epoch: 18 | loss: 1.4003003 94 | speed: 0.1983s/iter; left time: 1988.2246s 95 | Epoch: 18 cost time: 3.5398037433624268 96 | Epoch: 18, Steps: 122 | Train Loss: 1.3640430 Vali Loss: 0.7874051 Test Loss: 0.3538116 97 | EarlyStopping counter: 6 out of 10 98 | iters: 100, epoch: 19 | loss: 1.4191221 99 | speed: 0.1910s/iter; left time: 1892.2953s 100 | Epoch: 19 cost time: 3.625312089920044 101 | Epoch: 19, Steps: 122 | Train Loss: 1.3639777 Vali Loss: 0.7850570 Test Loss: 0.3538115 102 | EarlyStopping counter: 7 out of 10 103 | iters: 100, epoch: 20 | loss: 1.3399694 104 | speed: 0.2025s/iter; left time: 1980.9202s 105 | Epoch: 20 cost time: 3.5809004306793213 106 | Epoch: 20, Steps: 122 | Train Loss: 1.3637189 Vali Loss: 0.7854183 Test Loss: 0.3538115 107 | EarlyStopping counter: 8 out of 10 108 | iters: 100, epoch: 21 | loss: 1.2809454 109 | speed: 0.1958s/iter; left time: 1891.6198s 110 | Epoch: 21 cost time: 3.62457537651062 111 | Epoch: 21, Steps: 122 | Train Loss: 1.3639209 Vali Loss: 0.7865660 Test Loss: 0.3538116 112 | EarlyStopping counter: 9 out of 10 113 | iters: 100, epoch: 22 | loss: 1.3175277 114 | speed: 0.2051s/iter; left time: 1956.0185s 115 | Epoch: 22 cost time: 3.6907379627227783 116 | Epoch: 22, Steps: 122 | Train Loss: 1.3637036 Vali Loss: 0.7860386 Test Loss: 0.3538116 117 | EarlyStopping counter: 10 out of 10 118 | Early stopping 119 | >>>>>>>testing : ETTh1_720_96_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl96_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 120 | test 2785 121 | test shape: (2785, 1, 96, 7) (2785, 1, 96, 7) 122 | test shape: (2785, 96, 7) (2785, 96, 7) 123 | mse:0.35381951928138733, mae:0.39041218161582947 124 | rmse:0.5948272943496704, mape:0.6648303270339966, mspe:32930.84375 125 | -------------------------------------------------------------------------------- /layers/Embed.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import math 3 | 4 | import torch 5 | import torch.nn as nn 6 | import torch.nn.functional as F 7 | from einops import rearrange 8 | 9 | 10 | class DataEmbedding_wo_pos(nn.Module): 11 | def __init__(self, c_in, d_model, embed_type='fixed', freq='h', dropout=0.1): 12 | super(DataEmbedding_wo_pos, self).__init__() 13 | 14 | self.value_embedding = TokenEmbedding(c_in=c_in, d_model=d_model) 15 | self.position_embedding = PositionalEmbedding(d_model=d_model) 16 | self.temporal_embedding = TemporalEmbedding(d_model=d_model, embed_type=embed_type, 17 | freq=freq) if embed_type != 'timeF' else TimeFeatureEmbedding( 18 | d_model=d_model, embed_type=embed_type, freq=freq) 19 | self.dropout = nn.Dropout(p=dropout) 20 | 21 | def forward(self, x, x_mark): 22 | x = self.value_embedding(x) + self.temporal_embedding(x_mark) 23 | return self.dropout(x) 24 | 25 | 26 | class DataEmbedding_wo_pos_temp(nn.Module): 27 | def __init__(self, c_in, d_model, embed_type='fixed', freq='h', dropout=0.1): 28 | super(DataEmbedding_wo_pos_temp, self).__init__() 29 | 30 | self.value_embedding = TokenEmbedding(c_in=c_in, d_model=d_model) 31 | self.position_embedding = PositionalEmbedding(d_model=d_model) 32 | self.temporal_embedding = TemporalEmbedding(d_model=d_model, embed_type=embed_type, 33 | freq=freq) if embed_type != 'timeF' else TimeFeatureEmbedding( 34 | d_model=d_model, embed_type=embed_type, freq=freq) 35 | self.dropout = nn.Dropout(p=dropout) 36 | 37 | def forward(self, x, x_mark): 38 | x = self.value_embedding(x) 39 | return self.dropout(x) 40 | 41 | 42 | class DataEmbedding_wo_temp(nn.Module): 43 | def __init__(self, c_in, d_model, embed_type='fixed', freq='h', dropout=0.1): 44 | super(DataEmbedding_wo_temp, self).__init__() 45 | 46 | self.value_embedding = TokenEmbedding(c_in=c_in, d_model=d_model) 47 | self.position_embedding = PositionalEmbedding(d_model=d_model) 48 | self.temporal_embedding = TemporalEmbedding(d_model=d_model, embed_type=embed_type, 49 | freq=freq) if embed_type != 'timeF' else TimeFeatureEmbedding( 50 | d_model=d_model, embed_type=embed_type, freq=freq) 51 | self.dropout = nn.Dropout(p=dropout) 52 | 53 | def forward(self, x, x_mark): 54 | x = self.value_embedding(x) + self.position_embedding(x) 55 | return self.dropout(x) 56 | 57 | 58 | class PositionalEmbedding(nn.Module): 59 | def __init__(self, d_model, max_len=5000): 60 | super(PositionalEmbedding, self).__init__() 61 | # Compute the positional encodings once in logs space. 62 | pe = torch.zeros(max_len, d_model).float() 63 | pe.require_grad = False 64 | 65 | position = torch.arange(0, max_len).float().unsqueeze(1) 66 | div_term = (torch.arange(0, d_model, 2).float() 67 | * -(math.log(10000.0) / d_model)).exp() 68 | 69 | pe[:, 0::2] = torch.sin(position * div_term) 70 | pe[:, 1::2] = torch.cos(position * div_term) 71 | 72 | pe = pe.unsqueeze(0) 73 | self.register_buffer('pe', pe) 74 | 75 | def forward(self, x): 76 | return self.pe[:, :x.size(1)] 77 | 78 | 79 | class TokenEmbedding(nn.Module): 80 | def __init__(self, c_in, d_model): 81 | super(TokenEmbedding, self).__init__() 82 | padding = 1 if torch.__version__ >= '1.5.0' else 2 83 | self.tokenConv = nn.Conv1d(in_channels=c_in, out_channels=d_model, 84 | kernel_size=3, padding=padding, padding_mode='circular', bias=False) 85 | for m in self.modules(): 86 | if isinstance(m, nn.Conv1d): 87 | nn.init.kaiming_normal_( 88 | m.weight, mode='fan_in', nonlinearity='leaky_relu') 89 | 90 | def forward(self, x): 91 | x = self.tokenConv(x.permute(0, 2, 1)).transpose(1, 2) 92 | return x 93 | 94 | 95 | class FixedEmbedding(nn.Module): 96 | def __init__(self, c_in, d_model): 97 | super(FixedEmbedding, self).__init__() 98 | 99 | w = torch.zeros(c_in, d_model).float() 100 | w.require_grad = False 101 | 102 | position = torch.arange(0, c_in).float().unsqueeze(1) 103 | div_term = (torch.arange(0, d_model, 2).float() 104 | * -(math.log(10000.0) / d_model)).exp() 105 | 106 | w[:, 0::2] = torch.sin(position * div_term) 107 | w[:, 1::2] = torch.cos(position * div_term) 108 | 109 | self.emb = nn.Embedding(c_in, d_model) 110 | self.emb.weight = nn.Parameter(w, requires_grad=False) 111 | 112 | def forward(self, x): 113 | return self.emb(x).detach() 114 | 115 | 116 | class TemporalEmbedding(nn.Module): 117 | def __init__(self, d_model, embed_type='fixed', freq='h'): 118 | super(TemporalEmbedding, self).__init__() 119 | 120 | minute_size = 4 121 | hour_size = 24 122 | weekday_size = 7 123 | day_size = 32 124 | month_size = 13 125 | 126 | Embed = FixedEmbedding if embed_type == 'fixed' else nn.Embedding 127 | if freq == 't': 128 | self.minute_embed = Embed(minute_size, d_model) 129 | self.hour_embed = Embed(hour_size, d_model) 130 | self.weekday_embed = Embed(weekday_size, d_model) 131 | self.day_embed = Embed(day_size, d_model) 132 | self.month_embed = Embed(month_size, d_model) 133 | 134 | def forward(self, x): 135 | x = x.long() 136 | minute_x = self.minute_embed(x[:, :, 4]) if hasattr( 137 | self, 'minute_embed') else 0. 138 | hour_x = self.hour_embed(x[:, :, 3]) 139 | weekday_x = self.weekday_embed(x[:, :, 2]) 140 | day_x = self.day_embed(x[:, :, 1]) 141 | month_x = self.month_embed(x[:, :, 0]) 142 | 143 | return hour_x + weekday_x + day_x + month_x + minute_x 144 | 145 | 146 | class TimeFeatureEmbedding(nn.Module): 147 | def __init__(self, d_model, embed_type='timeF', freq='h'): 148 | super(TimeFeatureEmbedding, self).__init__() 149 | 150 | freq_map = {'h': 4, 't': 5, 's': 6, 151 | 'm': 1, 'a': 1, 'w': 2, 'd': 3, 'b': 3} 152 | d_inp = freq_map[freq] 153 | self.embed = nn.Linear(d_inp, d_model, bias=False) 154 | 155 | def forward(self, x): 156 | return self.embed(x) 157 | 158 | 159 | def normalization(x, mean=None, std=None): 160 | if mean is not None and std is not None: 161 | return (x - mean) / std 162 | mean = x.mean(-1, keepdim=True).detach() 163 | x = x - mean 164 | std = torch.sqrt(torch.var(x, dim=-1, keepdim=True, unbiased=False) + 1e-5) 165 | x /= std 166 | return x, mean, std 167 | 168 | 169 | def denormalization(x, mean, std): 170 | B, D, L = x.shape 171 | if mean.shape[-1] == 1: 172 | x = x * (std[:, :D, 0].unsqueeze(-1).repeat(1, 1, L)) 173 | x = x + (mean[:, :D, 0].unsqueeze(-1).repeat(1, 1, L)) 174 | x = x * (std[:, :D, :L]) 175 | x = x + mean[:, :D, :L] 176 | return x 177 | 178 | 179 | 180 | class PatchEmbed(nn.Module): 181 | def __init__(self, args, num_p=1, d_model=None): 182 | super(PatchEmbed, self).__init__() 183 | self.num_p = num_p 184 | self.patch = args.seq_len // self.num_p 185 | self.d_model = args.d_model if d_model is None else d_model 186 | 187 | self.proj = nn.Sequential( 188 | nn.Linear(self.patch, self.d_model, False), 189 | nn.Dropout(args.dropout) 190 | ) 191 | 192 | def forward(self, x, x_mark): 193 | x = torch.cat([x, x_mark], dim=-1).transpose(-1, -2) 194 | x = self.proj(x.reshape(*x.shape[:-1], self.num_p, self.patch)) 195 | return x 196 | -------------------------------------------------------------------------------- /scripts/TimeBridge.sh: -------------------------------------------------------------------------------- 1 | if [ ! -d "./logs" ]; then 2 | mkdir ./logs 3 | fi 4 | 5 | if [ ! -d "./logs/LongForecasting" ]; then 6 | mkdir ./logs/LongForecasting 7 | fi 8 | 9 | if [ ! -d "./logs/LongForecasting/TimeBridge" ]; then 10 | mkdir ./logs/LongForecasting/TimeBridge 11 | fi 12 | 13 | model_name=TimeBridge 14 | seq_len=720 15 | GPU=0 16 | root=./dataset 17 | 18 | alpha=0.35 19 | data_name=ETTh1 20 | for pred_len in 96 192 336 720 21 | do 22 | CUDA_VISIBLE_DEVICES=$GPU \ 23 | python -u run.py \ 24 | --is_training 1 \ 25 | --root_path $root/ETT-small/ \ 26 | --data_path $data_name.csv \ 27 | --model_id $data_name'_'$seq_len'_'$pred_len \ 28 | --model $model_name \ 29 | --data $data_name \ 30 | --features M \ 31 | --seq_len $seq_len \ 32 | --label_len 48 \ 33 | --pred_len $pred_len \ 34 | --enc_in 7 \ 35 | --ca_layers 0 \ 36 | --pd_layers 1 \ 37 | --ia_layers 3 \ 38 | --des 'Exp' \ 39 | --d_model 128 \ 40 | --d_ff 128 \ 41 | --batch_size 64 \ 42 | --alpha $alpha \ 43 | --learning_rate 0.0002 \ 44 | --train_epochs 100 \ 45 | --patience 10 \ 46 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 47 | done 48 | 49 | 50 | alpha=0.35 51 | data_name=ETTh2 52 | for pred_len in 96 192 336 720 53 | do 54 | CUDA_VISIBLE_DEVICES=$GPU \ 55 | python -u run.py \ 56 | --is_training 1 \ 57 | --root_path $root/ETT-small/ \ 58 | --data_path $data_name.csv \ 59 | --model_id $data_name'_'$seq_len'_'$pred_len \ 60 | --model $model_name \ 61 | --data $data_name \ 62 | --features M \ 63 | --seq_len $seq_len \ 64 | --label_len 48 \ 65 | --pred_len $pred_len \ 66 | --enc_in 7 \ 67 | --period 48 \ 68 | --ca_layers 0 \ 69 | --pd_layers 1 \ 70 | --ia_layers 3 \ 71 | --ca_layers 0 \ 72 | --des 'Exp' \ 73 | --n_heads 4 \ 74 | --period 48 \ 75 | --d_model 128 \ 76 | --d_ff 128 \ 77 | --train_epochs 100 \ 78 | --learning_rate 0.0001 \ 79 | --patience 15 \ 80 | --alpha $alpha \ 81 | --batch_size 16 \ 82 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 83 | done 84 | 85 | 86 | alpha=0.35 87 | data_name=ETTm1 88 | for pred_len in 96 192 336 720 89 | do 90 | CUDA_VISIBLE_DEVICES=$GPU \ 91 | python -u run.py \ 92 | --is_training 1 \ 93 | --root_path $root/ETT-small/ \ 94 | --data_path $data_name.csv \ 95 | --model_id $data_name'_'$seq_len'_'$pred_len \ 96 | --model $model_name \ 97 | --data $data_name \ 98 | --features M \ 99 | --seq_len $seq_len \ 100 | --label_len 48 \ 101 | --pred_len $pred_len \ 102 | --enc_in 7 \ 103 | --ca_layers 0 \ 104 | --pd_layers 1 \ 105 | --ia_layers 3 \ 106 | --des 'Exp' \ 107 | --n_heads 4 \ 108 | --d_model 64 \ 109 | --d_ff 128 \ 110 | --period 48 \ 111 | --num_p 6 \ 112 | --lradj 'TST' \ 113 | --learning_rate 0.0002 \ 114 | --train_epochs 100 \ 115 | --pct_start 0.2 \ 116 | --patience 15 \ 117 | --batch_size 64 \ 118 | --alpha $alpha \ 119 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 120 | done 121 | 122 | alpha=0.35 123 | data_name=ETTm2 124 | for pred_len in 96 192 336 720 125 | do 126 | CUDA_VISIBLE_DEVICES=$GPU \ 127 | python -u run.py \ 128 | --is_training 1 \ 129 | --root_path $root/ETT-small/ \ 130 | --data_path $data_name.csv \ 131 | --model_id $data_name'_'$seq_len'_'$pred_len \ 132 | --model $model_name \ 133 | --data $data_name \ 134 | --features M \ 135 | --seq_len $seq_len \ 136 | --label_len 48 \ 137 | --pred_len $pred_len \ 138 | --pd_layers 1 \ 139 | --enc_in 7 \ 140 | --ca_layers 0 \ 141 | --pd_layers 1 \ 142 | --ia_layers 3 \ 143 | --des 'Exp' \ 144 | --n_heads 4 \ 145 | --d_model 64 \ 146 | --d_ff 128 \ 147 | --lradj 'TST' \ 148 | --period 48 \ 149 | --train_epochs 100 \ 150 | --learning_rate 0.0002 \ 151 | --pct_start 0.2 \ 152 | --patience 10 \ 153 | --batch_size 64 \ 154 | --alpha $alpha \ 155 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 156 | done 157 | 158 | 159 | alpha=0.1 160 | data_name=weather 161 | for pred_len in 96 192 336 720 162 | do 163 | CUDA_VISIBLE_DEVICES=$GPU \ 164 | python -u run.py \ 165 | --is_training 1 \ 166 | --root_path $root/weather/ \ 167 | --data_path weather.csv \ 168 | --model_id $data_name'_'$seq_len'_'$pred_len \ 169 | --model $model_name \ 170 | --data custom \ 171 | --features M \ 172 | --seq_len $seq_len \ 173 | --label_len 48 \ 174 | --pred_len $pred_len \ 175 | --enc_in 21 \ 176 | --ca_layers 1 \ 177 | --pd_layers 1 \ 178 | --ia_layers 1 \ 179 | --des 'Exp' \ 180 | --period 48 \ 181 | --num_p 12 \ 182 | --d_model 128 \ 183 | --d_ff 128 \ 184 | --alpha $alpha \ 185 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 186 | done 187 | 188 | 189 | alpha=0.05 190 | data_name=Solar 191 | for pred_len in 96 192 336 720 192 | do 193 | CUDA_VISIBLE_DEVICES=$GPU \ 194 | python -u run.py \ 195 | --is_training 1 \ 196 | --root_path $root/Solar/ \ 197 | --data_path solar_AL.txt \ 198 | --model_id $data_name'_'$seq_len'_'$pred_len \ 199 | --model $model_name \ 200 | --data Solar \ 201 | --features M \ 202 | --seq_len $seq_len \ 203 | --label_len 48 \ 204 | --pred_len $pred_len \ 205 | --enc_in 137 \ 206 | --ca_layers 1 \ 207 | --pd_layers 1 \ 208 | --ia_layers 1 \ 209 | --des 'Exp' \ 210 | --period 48 \ 211 | --num_p 12 \ 212 | --d_model 128 \ 213 | --d_ff 128 \ 214 | --alpha $alpha \ 215 | --learning_rate 0.0005 \ 216 | --train_epochs 100 \ 217 | --patience 15 \ 218 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 219 | done 220 | 221 | alpha=0.2 222 | data_name=electricity 223 | for pred_len in 96 192 336 720 224 | do 225 | CUDA_VISIBLE_DEVICES=$GPU \ 226 | python -u run.py \ 227 | --is_training 1 \ 228 | --root_path $root/electricity/ \ 229 | --data_path electricity.csv \ 230 | --model_id $data_name'_'$seq_len'_'$pred_len \ 231 | --model $model_name \ 232 | --data custom \ 233 | --features M \ 234 | --seq_len $seq_len \ 235 | --label_len 48 \ 236 | --pred_len $pred_len \ 237 | --enc_in 321 \ 238 | --des 'Exp' \ 239 | --n_heads 32 \ 240 | --d_ff 512 \ 241 | --d_model 512 \ 242 | --ca_layers 2 \ 243 | --pd_layers 1 \ 244 | --ia_layers 1 \ 245 | --attn_dropout 0.1 \ 246 | --num_p 4 \ 247 | --stable_len 4 \ 248 | --alpha $alpha \ 249 | --batch_size 16 \ 250 | --learning_rate 0.0005 \ 251 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 252 | done 253 | 254 | alpha=0.35 255 | data_name=traffic 256 | GPU=0,1,2,3 257 | for pred_len in 336 720 192 96; do 258 | CUDA_VISIBLE_DEVICES=$GPU \ 259 | python -u run.py \ 260 | --is_training 1 \ 261 | --root_path $root/traffic/ \ 262 | --data_path traffic.csv \ 263 | --model_id $data_name'_'$seq_len'_'$pred_len \ 264 | --model $model_name \ 265 | --data custom \ 266 | --features M \ 267 | --seq_len $seq_len \ 268 | --label_len 48 \ 269 | --pred_len $pred_len \ 270 | --enc_in 862 \ 271 | --des 'Exp' \ 272 | --num_p 8 \ 273 | --n_heads 64 \ 274 | --stable_len 2 \ 275 | --d_ff 512 \ 276 | --d_model 512 \ 277 | --ca_layers 3 \ 278 | --pd_layers 1 \ 279 | --ia_layers 1 \ 280 | --batch_size 4 \ 281 | --attn_dropout 0.15 \ 282 | --patience 5 \ 283 | --train_epochs 100 \ 284 | --devices 0,1,2,3 \ 285 | --use_multi_gpu \ 286 | --alpha $alpha \ 287 | --learning_rate 0.0005 \ 288 | --itr 1 > logs/LongForecasting/TimeBridge/$data_name'_'$alpha'_'$model_name'_'$pred_len.logs 289 | done 290 | -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import time 3 | 4 | import torch 5 | from experiments.exp_long_term_forecasting import Exp_Long_Term_Forecast 6 | import random 7 | import numpy as np 8 | 9 | if __name__ == '__main__': 10 | fix_seed = 2023 11 | random.seed(fix_seed) 12 | torch.manual_seed(fix_seed) 13 | np.random.seed(fix_seed) 14 | 15 | parser = argparse.ArgumentParser(description='TimeBridge') 16 | 17 | # ablation control flags 18 | parser.add_argument('--revin', action='store_false', help='non-stationary for short-term', default=True) 19 | parser.add_argument('--alpha', type=float, default=0.2, help='weight of time-frequency MAE loss') 20 | parser.add_argument('--dropout', type=float, default=0.0, help='dropout') 21 | parser.add_argument('--attn_dropout', type=float, default=0.15, help='dropout') 22 | parser.add_argument('--batch_size', type=int, default=32, help='batch size of train input data') 23 | 24 | # basic config 25 | parser.add_argument('--is_training', type=int, required=True, default=1, help='status') 26 | parser.add_argument('--model_id', type=str, required=True, default='test', help='model id') 27 | parser.add_argument('--model', type=str, required=True, default='TimeBridge', help='model name') 28 | 29 | # data loader 30 | parser.add_argument('--data', type=str, required=True, default='custom', help='dataset type') 31 | parser.add_argument('--root_path', type=str, default='./data/electricity/', help='root path of the data file') 32 | parser.add_argument('--data_path', type=str, default='electricity.csv', help='data csv file') 33 | parser.add_argument('--features', type=str, default='M', 34 | help='forecasting task, options:[M, S, MS]; M:multivariate predict multivariate, S:univariate predict univariate, MS:multivariate predict univariate') 35 | parser.add_argument('--target', type=str, default='OT', help='target feature in S or MS task') 36 | parser.add_argument('--freq', type=str, default='h', 37 | help='freq for time features encoding, options:[s:secondly, t:minutely, h:hourly, d:daily, b:business days, w:weekly, m:monthly], you can also use more detailed freq like 15min or 3h') 38 | parser.add_argument('--checkpoints', type=str, default='./checkpoints/', help='location of model checkpoints') 39 | 40 | # forecasting task 41 | parser.add_argument('--seq_len', type=int, default=96, help='input sequence length') 42 | parser.add_argument('--label_len', type=int, default=48, help='start token length') # no longer needed in inverted Transformers 43 | parser.add_argument('--pred_len', type=int, default=96, help='prediction sequence length') 44 | parser.add_argument('--seasonal_patterns', type=str, default='Monthly', help='subset for M4') 45 | 46 | # model define 47 | parser.add_argument('--ia_layers', type=int, default=1, help='num of integrated attention layers') 48 | parser.add_argument('--pd_layers', type=int, default=1, help='num of patch downsampled layers') 49 | parser.add_argument('--ca_layers', type=int, default=0, help='num of cointegrated attention layers') 50 | 51 | parser.add_argument('--stable_len', type=int, default=6, help='length of moving average in patch norm') 52 | parser.add_argument('--num_p', type=int, default=None, help='num of down sampled patches') 53 | 54 | parser.add_argument('--period', type=int, default=24, help='length of patches') 55 | 56 | parser.add_argument('--enc_in', type=int, default=7, help='channel_decoder input size') 57 | parser.add_argument('--d_model', type=int, default=512, help='dimension of model') 58 | parser.add_argument('--n_heads', type=int, default=8, help='num of heads') 59 | parser.add_argument('--d_ff', type=int, default=2048, help='dimension of fcn') 60 | 61 | parser.add_argument('--embed', type=str, default='timeF', help='time features encoding, options:[timeF, fixed, learned]') 62 | parser.add_argument('--activation', type=str, default='gelu', help='activation') 63 | parser.add_argument('--output_attention', action='store_true', help='whether to output attention in ecoder') 64 | 65 | # optimization 66 | parser.add_argument('--num_workers', type=int, default=10, help='data loader num workers') 67 | parser.add_argument('--itr', type=int, default=1, help='experiments times') 68 | parser.add_argument('--train_epochs', type=int, default=10, help='train epochs') 69 | parser.add_argument('--embedding_epochs', type=int, default=5, help='train epochs') 70 | parser.add_argument('--patience', type=int, default=3, help='early stopping patience') 71 | parser.add_argument('--pct_start', type=float, default=0.2, help='optimizer learning rate') 72 | parser.add_argument('--learning_rate', type=float, default=0.0001, help='optimizer learning rate') 73 | parser.add_argument('--embedding_lr', type=float, default=0.0005, help='optimizer learning rate of embedding') 74 | parser.add_argument('--des', type=str, default='test', help='exp description') 75 | parser.add_argument('--loss', type=str, default='MSE', help='loss function') 76 | parser.add_argument('--lradj', type=str, default='type1', help='adjust learning rate') 77 | 78 | # GPU 79 | parser.add_argument('--use_gpu', type=bool, default=True, help='use gpu') 80 | parser.add_argument('--gpu', type=int, default=0, help='gpu') 81 | parser.add_argument('--use_multi_gpu', action='store_true', help='use multiple gpus', default=False) 82 | parser.add_argument('--devices', type=str, default='0,1,2,3', help='device ids of multile gpus') 83 | 84 | parser.add_argument('--inverse', action='store_true', help='inverse output data', default=False) 85 | 86 | args = parser.parse_args() 87 | args.use_gpu = True if torch.cuda.is_available() and args.use_gpu else False 88 | 89 | if args.use_gpu and args.use_multi_gpu: 90 | args.devices = args.devices.replace(' ', '') 91 | device_ids = args.devices.split(',') 92 | args.device_ids = [int(id_) for id_ in device_ids] 93 | args.gpu = args.device_ids[0] 94 | 95 | print('Args in experiment:') 96 | print(args) 97 | 98 | Exp = Exp_Long_Term_Forecast 99 | 100 | if args.is_training: 101 | for ii in range(args.itr): 102 | # setting record of experiments 103 | setting = '{}_{}_{}_bs{}_ft{}_sl{}_ll{}_pl{}_dm{}_nh{}_ial{}_pdl{}_cal{}_df{}_eb{}_{}_{}'.format( 104 | args.model_id, 105 | args.model, 106 | args.data, 107 | args.batch_size, 108 | args.features, 109 | args.seq_len, 110 | args.label_len, 111 | args.pred_len, 112 | args.d_model, 113 | args.n_heads, 114 | args.ia_layers, 115 | args.pd_layers, 116 | args.ca_layers, 117 | args.d_ff, 118 | args.embed, 119 | args.des, ii) 120 | 121 | exp = Exp(args) # set experiments 122 | print('>>>>>>>start training : {}>>>>>>>>>>>>>>>>>>>>>>>>>>'.format(setting)) 123 | exp.train(setting) 124 | 125 | print('>>>>>>>testing : {}<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<'.format(setting)) 126 | exp.test(setting) 127 | torch.cuda.empty_cache() 128 | else: 129 | ii = 0 130 | setting = '{}_{}_{}_bs{}_ft{}_sl{}_ll{}_pl{}_dm{}_nh{}_ial{}_pdl{}_cal{}_df{}_eb{}_{}_{}'.format( 131 | args.model_id, 132 | args.model, 133 | args.data, 134 | args.batch_size, 135 | args.features, 136 | args.seq_len, 137 | args.label_len, 138 | args.pred_len, 139 | args.d_model, 140 | args.n_heads, 141 | args.ia_layers, 142 | args.pd_layers, 143 | args.ca_layers, 144 | args.d_ff, 145 | args.embed, 146 | args.des, ii) 147 | 148 | exp = Exp(args) # set experiments 149 | print('>>>>>>>testing : {}<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<'.format(setting)) 150 | start_time = time.time() 151 | exp.test(setting, test=1) 152 | end_time = time.time() 153 | print(f"运行时间: {end_time - start_time:.4f} 秒") 154 | torch.cuda.empty_cache() 155 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/weather_0.1_TimeBridge_720.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.1, dropout=0.0, attn_dropout=0.15, batch_size=32, is_training=1, model_id='weather_720_720', model='TimeBridge', data='custom', root_path='/data1/liupeiyuan/dataset/datasets/weather/', data_path='weather.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=720, seasonal_patterns='Monthly', ia_layers=1, pd_layers=1, ca_layers=1, stable_len=6, num_p=12, period=48, enc_in=21, d_model=128, n_heads=8, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=10, embedding_epochs=5, patience=3, pct_start=0.2, learning_rate=0.0001, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : weather_720_720_TimeBridge_custom_bs32_ftM_sl720_ll48_pl720_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 35448 6 | val 4551 7 | test 9820 8 | iters: 100, epoch: 1 | loss: 1.1192803 9 | speed: 0.0371s/iter; left time: 406.4811s 10 | iters: 200, epoch: 1 | loss: 1.0684625 11 | speed: 0.0225s/iter; left time: 244.3674s 12 | iters: 300, epoch: 1 | loss: 1.0340514 13 | speed: 0.0216s/iter; left time: 232.6189s 14 | iters: 400, epoch: 1 | loss: 1.0653294 15 | speed: 0.0227s/iter; left time: 242.0650s 16 | iters: 500, epoch: 1 | loss: 1.0150039 17 | speed: 0.0225s/iter; left time: 238.2436s 18 | iters: 600, epoch: 1 | loss: 0.9562545 19 | speed: 0.0222s/iter; left time: 232.0338s 20 | iters: 700, epoch: 1 | loss: 1.0736145 21 | speed: 0.0209s/iter; left time: 216.5584s 22 | iters: 800, epoch: 1 | loss: 0.9835378 23 | speed: 0.0224s/iter; left time: 230.5256s 24 | iters: 900, epoch: 1 | loss: 0.9642209 25 | speed: 0.0251s/iter; left time: 255.4298s 26 | iters: 1000, epoch: 1 | loss: 1.1279728 27 | speed: 0.0246s/iter; left time: 247.5706s 28 | iters: 1100, epoch: 1 | loss: 0.9280192 29 | speed: 0.0206s/iter; left time: 205.1439s 30 | Epoch: 1 cost time: 26.442914485931396 31 | Epoch: 1, Steps: 1107 | Train Loss: 1.0285434 Vali Loss: 0.6062616 Test Loss: 0.3162408 32 | Validation loss decreased (inf --> 0.606262). Saving model ... 33 | iters: 100, epoch: 2 | loss: 0.9811405 34 | speed: 0.5641s/iter; left time: 5564.5908s 35 | iters: 200, epoch: 2 | loss: 0.9565638 36 | speed: 0.0205s/iter; left time: 199.7465s 37 | iters: 300, epoch: 2 | loss: 1.0281646 38 | speed: 0.0201s/iter; left time: 194.2794s 39 | iters: 400, epoch: 2 | loss: 0.9929978 40 | speed: 0.0211s/iter; left time: 201.9484s 41 | iters: 500, epoch: 2 | loss: 0.9752498 42 | speed: 0.0235s/iter; left time: 222.1918s 43 | iters: 600, epoch: 2 | loss: 1.0491171 44 | speed: 0.0238s/iter; left time: 222.5527s 45 | iters: 700, epoch: 2 | loss: 0.9534057 46 | speed: 0.0220s/iter; left time: 204.0970s 47 | iters: 800, epoch: 2 | loss: 1.0351994 48 | speed: 0.0245s/iter; left time: 224.3244s 49 | iters: 900, epoch: 2 | loss: 1.0054100 50 | speed: 0.0239s/iter; left time: 216.6886s 51 | iters: 1000, epoch: 2 | loss: 1.0016066 52 | speed: 0.0225s/iter; left time: 201.2697s 53 | iters: 1100, epoch: 2 | loss: 0.9408118 54 | speed: 0.0230s/iter; left time: 203.9484s 55 | Epoch: 2 cost time: 25.740376472473145 56 | Epoch: 2, Steps: 1107 | Train Loss: 0.9857396 Vali Loss: 0.5981794 Test Loss: 0.3113243 57 | Validation loss decreased (0.606262 --> 0.598179). Saving model ... 58 | iters: 100, epoch: 3 | loss: 1.0309973 59 | speed: 0.5583s/iter; left time: 4889.3602s 60 | iters: 200, epoch: 3 | loss: 1.0473834 61 | speed: 0.0213s/iter; left time: 184.0583s 62 | iters: 300, epoch: 3 | loss: 0.9210221 63 | speed: 0.0220s/iter; left time: 187.9963s 64 | iters: 400, epoch: 3 | loss: 1.0128818 65 | speed: 0.0208s/iter; left time: 176.2125s 66 | iters: 500, epoch: 3 | loss: 0.9550623 67 | speed: 0.0201s/iter; left time: 168.1972s 68 | iters: 600, epoch: 3 | loss: 1.0057529 69 | speed: 0.0206s/iter; left time: 170.3967s 70 | iters: 700, epoch: 3 | loss: 0.9751731 71 | speed: 0.0200s/iter; left time: 163.1197s 72 | iters: 800, epoch: 3 | loss: 0.9544037 73 | speed: 0.0210s/iter; left time: 169.4518s 74 | iters: 900, epoch: 3 | loss: 1.0859772 75 | speed: 0.0207s/iter; left time: 164.9430s 76 | iters: 1000, epoch: 3 | loss: 1.0281229 77 | speed: 0.0216s/iter; left time: 170.0958s 78 | iters: 1100, epoch: 3 | loss: 0.9726408 79 | speed: 0.0210s/iter; left time: 163.1456s 80 | Epoch: 3 cost time: 23.898162126541138 81 | Epoch: 3, Steps: 1107 | Train Loss: 0.9765831 Vali Loss: 0.5961423 Test Loss: 0.3108552 82 | Validation loss decreased (0.598179 --> 0.596142). Saving model ... 83 | iters: 100, epoch: 4 | loss: 0.9705260 84 | speed: 0.5570s/iter; left time: 4260.7640s 85 | iters: 200, epoch: 4 | loss: 1.0429667 86 | speed: 0.0214s/iter; left time: 161.7156s 87 | iters: 300, epoch: 4 | loss: 0.8896492 88 | speed: 0.0243s/iter; left time: 180.9381s 89 | iters: 400, epoch: 4 | loss: 0.9737362 90 | speed: 0.0243s/iter; left time: 178.5416s 91 | iters: 500, epoch: 4 | loss: 0.9385244 92 | speed: 0.0221s/iter; left time: 159.9121s 93 | iters: 600, epoch: 4 | loss: 0.9413038 94 | speed: 0.0217s/iter; left time: 155.0509s 95 | iters: 700, epoch: 4 | loss: 0.9751185 96 | speed: 0.0235s/iter; left time: 165.6094s 97 | iters: 800, epoch: 4 | loss: 0.9353973 98 | speed: 0.0240s/iter; left time: 166.8889s 99 | iters: 900, epoch: 4 | loss: 0.9653575 100 | speed: 0.0236s/iter; left time: 161.9689s 101 | iters: 1000, epoch: 4 | loss: 0.9577286 102 | speed: 0.0222s/iter; left time: 150.0770s 103 | iters: 1100, epoch: 4 | loss: 1.0042149 104 | speed: 0.0228s/iter; left time: 151.8878s 105 | Epoch: 4 cost time: 26.070638179779053 106 | Epoch: 4, Steps: 1107 | Train Loss: 0.9728569 Vali Loss: 0.5974938 Test Loss: 0.3104247 107 | EarlyStopping counter: 1 out of 3 108 | iters: 100, epoch: 5 | loss: 1.0160285 109 | speed: 0.5481s/iter; left time: 3585.9281s 110 | iters: 200, epoch: 5 | loss: 0.9740698 111 | speed: 0.0227s/iter; left time: 146.4907s 112 | iters: 300, epoch: 5 | loss: 0.9615119 113 | speed: 0.0209s/iter; left time: 132.2707s 114 | iters: 400, epoch: 5 | loss: 0.9856627 115 | speed: 0.0216s/iter; left time: 134.8665s 116 | iters: 500, epoch: 5 | loss: 0.9846687 117 | speed: 0.0216s/iter; left time: 132.9674s 118 | iters: 600, epoch: 5 | loss: 0.9316906 119 | speed: 0.0218s/iter; left time: 131.7025s 120 | iters: 700, epoch: 5 | loss: 0.9095043 121 | speed: 0.0211s/iter; left time: 125.6530s 122 | iters: 800, epoch: 5 | loss: 1.0470606 123 | speed: 0.0209s/iter; left time: 122.1515s 124 | iters: 900, epoch: 5 | loss: 0.9600739 125 | speed: 0.0230s/iter; left time: 132.2560s 126 | iters: 1000, epoch: 5 | loss: 1.0139283 127 | speed: 0.0205s/iter; left time: 115.4004s 128 | iters: 1100, epoch: 5 | loss: 0.8754920 129 | speed: 0.0200s/iter; left time: 110.9270s 130 | Epoch: 5 cost time: 24.476744413375854 131 | Epoch: 5, Steps: 1107 | Train Loss: 0.9710704 Vali Loss: 0.5963212 Test Loss: 0.3117056 132 | EarlyStopping counter: 2 out of 3 133 | iters: 100, epoch: 6 | loss: 0.9459094 134 | speed: 0.5611s/iter; left time: 3050.2986s 135 | iters: 200, epoch: 6 | loss: 0.9025727 136 | speed: 0.0207s/iter; left time: 110.5930s 137 | iters: 300, epoch: 6 | loss: 1.0125871 138 | speed: 0.0220s/iter; left time: 114.9362s 139 | iters: 400, epoch: 6 | loss: 0.9375489 140 | speed: 0.0227s/iter; left time: 116.5796s 141 | iters: 500, epoch: 6 | loss: 0.9209133 142 | speed: 0.0223s/iter; left time: 112.4118s 143 | iters: 600, epoch: 6 | loss: 1.0036240 144 | speed: 0.0248s/iter; left time: 122.3382s 145 | iters: 700, epoch: 6 | loss: 0.9800394 146 | speed: 0.0253s/iter; left time: 122.3529s 147 | iters: 800, epoch: 6 | loss: 0.9413810 148 | speed: 0.0219s/iter; left time: 103.5755s 149 | iters: 900, epoch: 6 | loss: 0.8945955 150 | speed: 0.0232s/iter; left time: 107.5601s 151 | iters: 1000, epoch: 6 | loss: 0.9590219 152 | speed: 0.0219s/iter; left time: 99.1390s 153 | iters: 1100, epoch: 6 | loss: 0.9408726 154 | speed: 0.0211s/iter; left time: 93.6758s 155 | Epoch: 6 cost time: 25.581058263778687 156 | Epoch: 6, Steps: 1107 | Train Loss: 0.9699792 Vali Loss: 0.5968599 Test Loss: 0.3113272 157 | EarlyStopping counter: 3 out of 3 158 | Early stopping 159 | >>>>>>>testing : weather_720_720_TimeBridge_custom_bs32_ftM_sl720_ll48_pl720_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 160 | test 9820 161 | test shape: (9820, 1, 720, 21) (9820, 1, 720, 21) 162 | test shape: (9820, 720, 21) (9820, 720, 21) 163 | mse:0.3108551502227783, mae:0.32104259729385376 164 | rmse:0.5575438737869263, mape:0.6310985088348389, mspe:23963216.0 165 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/electricity_0.2_TimeBridge_192.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.2, dropout=0.0, attn_dropout=0.1, batch_size=16, is_training=1, model_id='electricity_720_192', model='TimeBridge', data='custom', root_path='/data1/liupeiyuan/dataset/datasets/electricity/', data_path='electricity.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=192, seasonal_patterns='Monthly', ia_layers=1, pd_layers=1, ca_layers=2, stable_len=4, num_p=4, period=24, enc_in=321, d_model=512, n_heads=32, d_ff=512, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=10, embedding_epochs=5, patience=3, pct_start=0.2, learning_rate=0.0005, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : electricity_720_192_TimeBridge_custom_bs16_ftM_sl720_ll48_pl192_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 17501 6 | val 2441 7 | test 5069 8 | iters: 100, epoch: 1 | loss: 1.0345391 9 | speed: 0.3832s/iter; left time: 4150.1199s 10 | iters: 200, epoch: 1 | loss: 0.9799196 11 | speed: 0.3727s/iter; left time: 3999.8076s 12 | iters: 300, epoch: 1 | loss: 0.8989314 13 | speed: 0.3743s/iter; left time: 3979.0503s 14 | iters: 400, epoch: 1 | loss: 0.8485392 15 | speed: 0.3742s/iter; left time: 3940.6315s 16 | iters: 500, epoch: 1 | loss: 0.8324274 17 | speed: 0.3741s/iter; left time: 3902.2371s 18 | iters: 600, epoch: 1 | loss: 0.8848873 19 | speed: 0.3749s/iter; left time: 3872.7320s 20 | iters: 700, epoch: 1 | loss: 0.9074361 21 | speed: 0.3749s/iter; left time: 3836.0521s 22 | iters: 800, epoch: 1 | loss: 0.8186226 23 | speed: 0.3740s/iter; left time: 3789.3308s 24 | iters: 900, epoch: 1 | loss: 0.8480656 25 | speed: 0.3747s/iter; left time: 3758.7378s 26 | iters: 1000, epoch: 1 | loss: 0.8957411 27 | speed: 0.3760s/iter; left time: 3734.3048s 28 | Epoch: 1 cost time: 410.0901851654053 29 | Epoch: 1, Steps: 1093 | Train Loss: 0.8909383 Vali Loss: 0.1277111 Test Loss: 0.1520196 30 | Validation loss decreased (inf --> 0.127711). Saving model ... 31 | iters: 100, epoch: 2 | loss: 0.8111048 32 | speed: 1.6298s/iter; left time: 15871.1898s 33 | iters: 200, epoch: 2 | loss: 0.7941796 34 | speed: 0.3758s/iter; left time: 3621.6607s 35 | iters: 300, epoch: 2 | loss: 0.7759081 36 | speed: 0.3735s/iter; left time: 3562.6500s 37 | iters: 400, epoch: 2 | loss: 0.7996232 38 | speed: 0.3756s/iter; left time: 3545.0112s 39 | iters: 500, epoch: 2 | loss: 0.7558929 40 | speed: 0.3731s/iter; left time: 3483.7535s 41 | iters: 600, epoch: 2 | loss: 0.8181878 42 | speed: 0.3711s/iter; left time: 3428.1017s 43 | iters: 700, epoch: 2 | loss: 0.7863903 44 | speed: 0.3774s/iter; left time: 3448.5804s 45 | iters: 800, epoch: 2 | loss: 0.8165748 46 | speed: 0.3740s/iter; left time: 3380.2904s 47 | iters: 900, epoch: 2 | loss: 0.7894744 48 | speed: 0.3743s/iter; left time: 3345.8958s 49 | iters: 1000, epoch: 2 | loss: 0.7788391 50 | speed: 0.3740s/iter; left time: 3305.7431s 51 | Epoch: 2 cost time: 410.4243562221527 52 | Epoch: 2, Steps: 1093 | Train Loss: 0.8077675 Vali Loss: 0.1255450 Test Loss: 0.1494850 53 | Validation loss decreased (0.127711 --> 0.125545). Saving model ... 54 | iters: 100, epoch: 3 | loss: 0.7510280 55 | speed: 1.6510s/iter; left time: 14273.2433s 56 | iters: 200, epoch: 3 | loss: 0.7655900 57 | speed: 0.3722s/iter; left time: 3180.4080s 58 | iters: 300, epoch: 3 | loss: 0.7641256 59 | speed: 0.3728s/iter; left time: 3148.5815s 60 | iters: 400, epoch: 3 | loss: 0.7656254 61 | speed: 0.3749s/iter; left time: 3128.2018s 62 | iters: 500, epoch: 3 | loss: 0.8140938 63 | speed: 0.3746s/iter; left time: 3088.5985s 64 | iters: 600, epoch: 3 | loss: 0.7691644 65 | speed: 0.3721s/iter; left time: 3030.7251s 66 | iters: 700, epoch: 3 | loss: 0.7327650 67 | speed: 0.3735s/iter; left time: 3004.9063s 68 | iters: 800, epoch: 3 | loss: 0.7969636 69 | speed: 0.3732s/iter; left time: 2965.2866s 70 | iters: 900, epoch: 3 | loss: 0.8068562 71 | speed: 0.3732s/iter; left time: 2927.6709s 72 | iters: 1000, epoch: 3 | loss: 0.7426975 73 | speed: 0.3736s/iter; left time: 2893.8945s 74 | Epoch: 3 cost time: 409.5348148345947 75 | Epoch: 3, Steps: 1093 | Train Loss: 0.7758293 Vali Loss: 0.1215001 Test Loss: 0.1444169 76 | Validation loss decreased (0.125545 --> 0.121500). Saving model ... 77 | iters: 100, epoch: 4 | loss: 0.7474883 78 | speed: 1.6464s/iter; left time: 12433.9188s 79 | iters: 200, epoch: 4 | loss: 0.7286014 80 | speed: 0.3732s/iter; left time: 2780.9352s 81 | iters: 300, epoch: 4 | loss: 0.7618446 82 | speed: 0.3746s/iter; left time: 2754.1096s 83 | iters: 400, epoch: 4 | loss: 0.7865748 84 | speed: 0.3734s/iter; left time: 2708.0858s 85 | iters: 500, epoch: 4 | loss: 0.7569466 86 | speed: 0.3751s/iter; left time: 2682.9486s 87 | iters: 600, epoch: 4 | loss: 0.7811056 88 | speed: 0.3719s/iter; left time: 2622.9679s 89 | iters: 700, epoch: 4 | loss: 0.7829837 90 | speed: 0.3745s/iter; left time: 2603.5857s 91 | iters: 800, epoch: 4 | loss: 0.8019868 92 | speed: 0.3750s/iter; left time: 2569.5731s 93 | iters: 900, epoch: 4 | loss: 0.7209437 94 | speed: 0.3734s/iter; left time: 2521.4551s 95 | iters: 1000, epoch: 4 | loss: 0.7859673 96 | speed: 0.3762s/iter; left time: 2502.3027s 97 | Epoch: 4 cost time: 410.0463309288025 98 | Epoch: 4, Steps: 1093 | Train Loss: 0.7561964 Vali Loss: 0.1206929 Test Loss: 0.1443663 99 | Validation loss decreased (0.121500 --> 0.120693). Saving model ... 100 | iters: 100, epoch: 5 | loss: 0.7119098 101 | speed: 1.6474s/iter; left time: 10640.3399s 102 | iters: 200, epoch: 5 | loss: 0.7262732 103 | speed: 0.3720s/iter; left time: 2365.7977s 104 | iters: 300, epoch: 5 | loss: 0.7310805 105 | speed: 0.3736s/iter; left time: 2338.6079s 106 | iters: 400, epoch: 5 | loss: 0.7721397 107 | speed: 0.3736s/iter; left time: 2300.9383s 108 | iters: 500, epoch: 5 | loss: 0.7255195 109 | speed: 0.3740s/iter; left time: 2266.3638s 110 | iters: 600, epoch: 5 | loss: 0.6829763 111 | speed: 0.3719s/iter; left time: 2216.3944s 112 | iters: 700, epoch: 5 | loss: 0.7295176 113 | speed: 0.3734s/iter; left time: 2187.4977s 114 | iters: 800, epoch: 5 | loss: 0.7347048 115 | speed: 0.3743s/iter; left time: 2155.5168s 116 | iters: 900, epoch: 5 | loss: 0.7385394 117 | speed: 0.3740s/iter; left time: 2116.5918s 118 | iters: 1000, epoch: 5 | loss: 0.7187116 119 | speed: 0.3739s/iter; left time: 2078.4383s 120 | Epoch: 5 cost time: 409.6311876773834 121 | Epoch: 5, Steps: 1093 | Train Loss: 0.7448033 Vali Loss: 0.1209645 Test Loss: 0.1436014 122 | EarlyStopping counter: 1 out of 3 123 | iters: 100, epoch: 6 | loss: 0.7605271 124 | speed: 1.6477s/iter; left time: 8841.3781s 125 | iters: 200, epoch: 6 | loss: 0.7599976 126 | speed: 0.3742s/iter; left time: 1970.7371s 127 | iters: 300, epoch: 6 | loss: 0.7235015 128 | speed: 0.3757s/iter; left time: 1940.9846s 129 | iters: 400, epoch: 6 | loss: 0.7286054 130 | speed: 0.3752s/iter; left time: 1900.7414s 131 | iters: 500, epoch: 6 | loss: 0.7325112 132 | speed: 0.3769s/iter; left time: 1871.5764s 133 | iters: 600, epoch: 6 | loss: 0.7608700 134 | speed: 0.3737s/iter; left time: 1818.4081s 135 | iters: 700, epoch: 6 | loss: 0.7188350 136 | speed: 0.3746s/iter; left time: 1785.2969s 137 | iters: 800, epoch: 6 | loss: 0.7437788 138 | speed: 0.3720s/iter; left time: 1735.9445s 139 | iters: 900, epoch: 6 | loss: 0.7133632 140 | speed: 0.3755s/iter; left time: 1714.4668s 141 | iters: 1000, epoch: 6 | loss: 0.7631380 142 | speed: 0.3742s/iter; left time: 1671.0321s 143 | Epoch: 6 cost time: 410.2266273498535 144 | Epoch: 6, Steps: 1093 | Train Loss: 0.7383644 Vali Loss: 0.1207877 Test Loss: 0.1422285 145 | EarlyStopping counter: 2 out of 3 146 | iters: 100, epoch: 7 | loss: 0.7781044 147 | speed: 1.6434s/iter; left time: 7022.4476s 148 | iters: 200, epoch: 7 | loss: 0.7088714 149 | speed: 0.3797s/iter; left time: 1584.4563s 150 | iters: 300, epoch: 7 | loss: 0.7678529 151 | speed: 0.3755s/iter; left time: 1529.3860s 152 | iters: 400, epoch: 7 | loss: 0.7400053 153 | speed: 0.3732s/iter; left time: 1482.7533s 154 | iters: 500, epoch: 7 | loss: 0.7263119 155 | speed: 0.3728s/iter; left time: 1443.9239s 156 | iters: 600, epoch: 7 | loss: 0.7340501 157 | speed: 0.3758s/iter; left time: 1417.9643s 158 | iters: 700, epoch: 7 | loss: 0.7037179 159 | speed: 0.3775s/iter; left time: 1386.4938s 160 | iters: 800, epoch: 7 | loss: 0.7020029 161 | speed: 0.3724s/iter; left time: 1330.5032s 162 | iters: 900, epoch: 7 | loss: 0.7331566 163 | speed: 0.3733s/iter; left time: 1296.5457s 164 | iters: 1000, epoch: 7 | loss: 0.7402313 165 | speed: 0.3758s/iter; left time: 1267.6085s 166 | Epoch: 7 cost time: 410.91966104507446 167 | Epoch: 7, Steps: 1093 | Train Loss: 0.7347439 Vali Loss: 0.1213028 Test Loss: 0.1426005 168 | EarlyStopping counter: 3 out of 3 169 | Early stopping 170 | >>>>>>>testing : electricity_720_192_TimeBridge_custom_bs16_ftM_sl720_ll48_pl192_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 171 | test 5069 172 | test shape: (5069, 1, 192, 321) (5069, 1, 192, 321) 173 | test shape: (5069, 192, 321) (5069, 192, 321) 174 | mse:0.14436645805835724, mae:0.23861069977283478 175 | rmse:0.37995585799217224, mape:0.4475714862346649, mspe:672516.9375 176 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh1_0.35_TimeBridge_192.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=64, is_training=1, model_id='ETTh1_720_192', model='TimeBridge', data='ETTh1', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh1.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=192, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=24, enc_in=7, d_model=128, n_heads=8, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=10, pct_start=0.2, learning_rate=0.0002, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh1_720_192_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl192_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7729 6 | val 2689 7 | test 2689 8 | iters: 100, epoch: 1 | loss: 1.9722893 9 | speed: 0.0387s/iter; left time: 460.3045s 10 | Epoch: 1 cost time: 4.44400691986084 11 | Epoch: 1, Steps: 120 | Train Loss: 2.0622572 Vali Loss: 1.0886445 Test Loss: 0.4068258 12 | Validation loss decreased (inf --> 1.088645). Saving model ... 13 | iters: 100, epoch: 2 | loss: 1.8693702 14 | speed: 0.1916s/iter; left time: 2256.8798s 15 | Epoch: 2 cost time: 3.555312156677246 16 | Epoch: 2, Steps: 120 | Train Loss: 1.9196046 Vali Loss: 1.0763049 Test Loss: 0.3963752 17 | Validation loss decreased (1.088645 --> 1.076305). Saving model ... 18 | iters: 100, epoch: 3 | loss: 1.8989885 19 | speed: 0.1908s/iter; left time: 2225.2583s 20 | Epoch: 3 cost time: 3.6492810249328613 21 | Epoch: 3, Steps: 120 | Train Loss: 1.8857356 Vali Loss: 1.0636503 Test Loss: 0.3927337 22 | Validation loss decreased (1.076305 --> 1.063650). Saving model ... 23 | iters: 100, epoch: 4 | loss: 1.9033382 24 | speed: 0.1926s/iter; left time: 2223.1624s 25 | Epoch: 4 cost time: 3.520484209060669 26 | Epoch: 4, Steps: 120 | Train Loss: 1.8719565 Vali Loss: 1.0587424 Test Loss: 0.3908422 27 | Validation loss decreased (1.063650 --> 1.058742). Saving model ... 28 | iters: 100, epoch: 5 | loss: 1.7835255 29 | speed: 0.1995s/iter; left time: 2278.9841s 30 | Epoch: 5 cost time: 3.5248444080352783 31 | Epoch: 5, Steps: 120 | Train Loss: 1.8647289 Vali Loss: 1.0560102 Test Loss: 0.3893822 32 | Validation loss decreased (1.058742 --> 1.056010). Saving model ... 33 | iters: 100, epoch: 6 | loss: 1.8623230 34 | speed: 0.1982s/iter; left time: 2240.4230s 35 | Epoch: 6 cost time: 3.5807807445526123 36 | Epoch: 6, Steps: 120 | Train Loss: 1.8610706 Vali Loss: 1.0591071 Test Loss: 0.3891697 37 | EarlyStopping counter: 1 out of 10 38 | iters: 100, epoch: 7 | loss: 1.9124529 39 | speed: 0.1920s/iter; left time: 2146.4321s 40 | Epoch: 7 cost time: 3.6490089893341064 41 | Epoch: 7, Steps: 120 | Train Loss: 1.8584800 Vali Loss: 1.0574991 Test Loss: 0.3888335 42 | EarlyStopping counter: 2 out of 10 43 | iters: 100, epoch: 8 | loss: 1.8769078 44 | speed: 0.1892s/iter; left time: 2092.6312s 45 | Epoch: 8 cost time: 3.6155734062194824 46 | Epoch: 8, Steps: 120 | Train Loss: 1.8582569 Vali Loss: 1.0570717 Test Loss: 0.3893200 47 | EarlyStopping counter: 3 out of 10 48 | iters: 100, epoch: 9 | loss: 1.8770247 49 | speed: 0.1942s/iter; left time: 2124.3048s 50 | Epoch: 9 cost time: 3.5115931034088135 51 | Epoch: 9, Steps: 120 | Train Loss: 1.8572157 Vali Loss: 1.0560219 Test Loss: 0.3894812 52 | EarlyStopping counter: 4 out of 10 53 | iters: 100, epoch: 10 | loss: 1.8886790 54 | speed: 0.1907s/iter; left time: 2063.9139s 55 | Epoch: 10 cost time: 3.699176073074341 56 | Epoch: 10, Steps: 120 | Train Loss: 1.8567146 Vali Loss: 1.0558100 Test Loss: 0.3893589 57 | Validation loss decreased (1.056010 --> 1.055810). Saving model ... 58 | iters: 100, epoch: 11 | loss: 1.8709807 59 | speed: 0.1958s/iter; left time: 2095.3157s 60 | Epoch: 11 cost time: 3.7255818843841553 61 | Epoch: 11, Steps: 120 | Train Loss: 1.8574964 Vali Loss: 1.0556684 Test Loss: 0.3892841 62 | Validation loss decreased (1.055810 --> 1.055668). Saving model ... 63 | iters: 100, epoch: 12 | loss: 1.8112903 64 | speed: 0.2027s/iter; left time: 2145.2841s 65 | Epoch: 12 cost time: 3.6144015789031982 66 | Epoch: 12, Steps: 120 | Train Loss: 1.8568507 Vali Loss: 1.0561817 Test Loss: 0.3893183 67 | EarlyStopping counter: 1 out of 10 68 | iters: 100, epoch: 13 | loss: 1.8539143 69 | speed: 0.1883s/iter; left time: 1969.7964s 70 | Epoch: 13 cost time: 3.5457334518432617 71 | Epoch: 13, Steps: 120 | Train Loss: 1.8565345 Vali Loss: 1.0561482 Test Loss: 0.3892912 72 | EarlyStopping counter: 2 out of 10 73 | iters: 100, epoch: 14 | loss: 1.8204477 74 | speed: 0.1878s/iter; left time: 1941.7597s 75 | Epoch: 14 cost time: 3.5120444297790527 76 | Epoch: 14, Steps: 120 | Train Loss: 1.8565090 Vali Loss: 1.0558594 Test Loss: 0.3892920 77 | EarlyStopping counter: 3 out of 10 78 | iters: 100, epoch: 15 | loss: 1.9105736 79 | speed: 0.1883s/iter; left time: 1924.5936s 80 | Epoch: 15 cost time: 3.51183819770813 81 | Epoch: 15, Steps: 120 | Train Loss: 1.8566599 Vali Loss: 1.0560892 Test Loss: 0.3892907 82 | EarlyStopping counter: 4 out of 10 83 | iters: 100, epoch: 16 | loss: 1.8782694 84 | speed: 0.1895s/iter; left time: 1914.3172s 85 | Epoch: 16 cost time: 3.4602458477020264 86 | Epoch: 16, Steps: 120 | Train Loss: 1.8569266 Vali Loss: 1.0561850 Test Loss: 0.3892904 87 | EarlyStopping counter: 5 out of 10 88 | iters: 100, epoch: 17 | loss: 1.8302608 89 | speed: 0.1952s/iter; left time: 1948.2512s 90 | Epoch: 17 cost time: 3.508728265762329 91 | Epoch: 17, Steps: 120 | Train Loss: 1.8565533 Vali Loss: 1.0558783 Test Loss: 0.3892899 92 | EarlyStopping counter: 6 out of 10 93 | iters: 100, epoch: 18 | loss: 1.8714256 94 | speed: 0.1993s/iter; left time: 1965.1558s 95 | Epoch: 18 cost time: 3.6304004192352295 96 | Epoch: 18, Steps: 120 | Train Loss: 1.8565989 Vali Loss: 1.0557497 Test Loss: 0.3892897 97 | EarlyStopping counter: 7 out of 10 98 | iters: 100, epoch: 19 | loss: 1.8481597 99 | speed: 0.1922s/iter; left time: 1872.3344s 100 | Epoch: 19 cost time: 3.5083422660827637 101 | Epoch: 19, Steps: 120 | Train Loss: 1.8561012 Vali Loss: 1.0554789 Test Loss: 0.3892898 102 | Validation loss decreased (1.055668 --> 1.055479). Saving model ... 103 | iters: 100, epoch: 20 | loss: 1.8919327 104 | speed: 0.1966s/iter; left time: 1891.4030s 105 | Epoch: 20 cost time: 3.5170254707336426 106 | Epoch: 20, Steps: 120 | Train Loss: 1.8570145 Vali Loss: 1.0557261 Test Loss: 0.3892898 107 | EarlyStopping counter: 1 out of 10 108 | iters: 100, epoch: 21 | loss: 1.9140989 109 | speed: 0.1887s/iter; left time: 1792.8941s 110 | Epoch: 21 cost time: 3.4718565940856934 111 | Epoch: 21, Steps: 120 | Train Loss: 1.8564407 Vali Loss: 1.0558791 Test Loss: 0.3892898 112 | EarlyStopping counter: 2 out of 10 113 | iters: 100, epoch: 22 | loss: 1.8822504 114 | speed: 0.1889s/iter; left time: 1772.5259s 115 | Epoch: 22 cost time: 3.5031661987304688 116 | Epoch: 22, Steps: 120 | Train Loss: 1.8563893 Vali Loss: 1.0559858 Test Loss: 0.3892898 117 | EarlyStopping counter: 3 out of 10 118 | iters: 100, epoch: 23 | loss: 1.8077888 119 | speed: 0.1898s/iter; left time: 1758.1286s 120 | Epoch: 23 cost time: 3.5149619579315186 121 | Epoch: 23, Steps: 120 | Train Loss: 1.8570020 Vali Loss: 1.0556971 Test Loss: 0.3892898 122 | EarlyStopping counter: 4 out of 10 123 | iters: 100, epoch: 24 | loss: 1.8971843 124 | speed: 0.2003s/iter; left time: 1831.1724s 125 | Epoch: 24 cost time: 3.5125486850738525 126 | Epoch: 24, Steps: 120 | Train Loss: 1.8568530 Vali Loss: 1.0560442 Test Loss: 0.3892898 127 | EarlyStopping counter: 5 out of 10 128 | iters: 100, epoch: 25 | loss: 1.8276784 129 | speed: 0.1901s/iter; left time: 1714.4979s 130 | Epoch: 25 cost time: 3.579848289489746 131 | Epoch: 25, Steps: 120 | Train Loss: 1.8564348 Vali Loss: 1.0558295 Test Loss: 0.3892898 132 | EarlyStopping counter: 6 out of 10 133 | iters: 100, epoch: 26 | loss: 1.8429002 134 | speed: 0.1917s/iter; left time: 1706.4420s 135 | Epoch: 26 cost time: 3.5175349712371826 136 | Epoch: 26, Steps: 120 | Train Loss: 1.8560665 Vali Loss: 1.0558923 Test Loss: 0.3892898 137 | EarlyStopping counter: 7 out of 10 138 | iters: 100, epoch: 27 | loss: 1.8276296 139 | speed: 0.1974s/iter; left time: 1733.2100s 140 | Epoch: 27 cost time: 3.553462028503418 141 | Epoch: 27, Steps: 120 | Train Loss: 1.8560275 Vali Loss: 1.0561650 Test Loss: 0.3892898 142 | EarlyStopping counter: 8 out of 10 143 | iters: 100, epoch: 28 | loss: 1.8477656 144 | speed: 0.1911s/iter; left time: 1655.1792s 145 | Epoch: 28 cost time: 3.4923291206359863 146 | Epoch: 28, Steps: 120 | Train Loss: 1.8562433 Vali Loss: 1.0560213 Test Loss: 0.3892898 147 | EarlyStopping counter: 9 out of 10 148 | iters: 100, epoch: 29 | loss: 1.8473099 149 | speed: 0.1973s/iter; left time: 1685.4992s 150 | Epoch: 29 cost time: 3.5739009380340576 151 | Epoch: 29, Steps: 120 | Train Loss: 1.8568070 Vali Loss: 1.0561861 Test Loss: 0.3892898 152 | EarlyStopping counter: 10 out of 10 153 | Early stopping 154 | >>>>>>>testing : ETTh1_720_192_TimeBridge_ETTh1_bs64_ftM_sl720_ll48_pl192_dm128_nh8_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 155 | test 2689 156 | test shape: (2689, 1, 192, 7) (2689, 1, 192, 7) 157 | test shape: (2689, 192, 7) (2689, 192, 7) 158 | mse:0.38928985595703125, mae:0.41404005885124207 159 | rmse:0.6239309906959534, mape:0.684104323387146, mspe:33910.703125 160 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh2_0.35_TimeBridge_720.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=16, is_training=1, model_id='ETTh2_720_720', model='TimeBridge', data='ETTh2', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh2.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=720, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=48, enc_in=7, d_model=128, n_heads=4, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=15, pct_start=0.2, learning_rate=0.0001, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh2_720_720_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl720_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7201 6 | val 2161 7 | test 2161 8 | iters: 100, epoch: 1 | loss: 2.5789275 9 | speed: 0.0313s/iter; left time: 1404.1561s 10 | iters: 200, epoch: 1 | loss: 3.2176564 11 | speed: 0.0178s/iter; left time: 795.4050s 12 | iters: 300, epoch: 1 | loss: 2.9854259 13 | speed: 0.0182s/iter; left time: 813.5916s 14 | iters: 400, epoch: 1 | loss: 2.9687378 15 | speed: 0.0188s/iter; left time: 836.6203s 16 | Epoch: 1 cost time: 9.582162380218506 17 | Epoch: 1, Steps: 450 | Train Loss: 3.2019803 Vali Loss: 0.6900436 Test Loss: 0.4002618 18 | Validation loss decreased (inf --> 0.690044). Saving model ... 19 | iters: 100, epoch: 2 | loss: 2.8330374 20 | speed: 0.1738s/iter; left time: 7724.9492s 21 | iters: 200, epoch: 2 | loss: 2.8445759 22 | speed: 0.0195s/iter; left time: 865.0039s 23 | iters: 300, epoch: 2 | loss: 2.6805689 24 | speed: 0.0204s/iter; left time: 903.9450s 25 | iters: 400, epoch: 2 | loss: 3.3092952 26 | speed: 0.0195s/iter; left time: 862.0847s 27 | Epoch: 2 cost time: 9.6641263961792 28 | Epoch: 2, Steps: 450 | Train Loss: 3.0618104 Vali Loss: 0.6453681 Test Loss: 0.3986434 29 | Validation loss decreased (0.690044 --> 0.645368). Saving model ... 30 | iters: 100, epoch: 3 | loss: 3.0258207 31 | speed: 0.1707s/iter; left time: 7510.4218s 32 | iters: 200, epoch: 3 | loss: 3.5791271 33 | speed: 0.0198s/iter; left time: 870.3507s 34 | iters: 300, epoch: 3 | loss: 3.6462617 35 | speed: 0.0185s/iter; left time: 811.9081s 36 | iters: 400, epoch: 3 | loss: 2.7340860 37 | speed: 0.0195s/iter; left time: 853.5954s 38 | Epoch: 3 cost time: 9.437998533248901 39 | Epoch: 3, Steps: 450 | Train Loss: 3.0112308 Vali Loss: 0.6385931 Test Loss: 0.3986523 40 | Validation loss decreased (0.645368 --> 0.638593). Saving model ... 41 | iters: 100, epoch: 4 | loss: 2.9075689 42 | speed: 0.1764s/iter; left time: 7680.2428s 43 | iters: 200, epoch: 4 | loss: 3.3418634 44 | speed: 0.0216s/iter; left time: 938.3136s 45 | iters: 300, epoch: 4 | loss: 3.3404286 46 | speed: 0.0182s/iter; left time: 788.8318s 47 | iters: 400, epoch: 4 | loss: 2.8475752 48 | speed: 0.0170s/iter; left time: 734.3811s 49 | Epoch: 4 cost time: 9.08591914176941 50 | Epoch: 4, Steps: 450 | Train Loss: 2.9829653 Vali Loss: 0.6440812 Test Loss: 0.3967033 51 | EarlyStopping counter: 1 out of 15 52 | iters: 100, epoch: 5 | loss: 2.6138620 53 | speed: 0.1731s/iter; left time: 7459.3809s 54 | iters: 200, epoch: 5 | loss: 2.8459597 55 | speed: 0.0226s/iter; left time: 973.4230s 56 | iters: 300, epoch: 5 | loss: 2.7374063 57 | speed: 0.0209s/iter; left time: 897.8817s 58 | iters: 400, epoch: 5 | loss: 3.2626624 59 | speed: 0.0199s/iter; left time: 850.9309s 60 | Epoch: 5 cost time: 10.025689125061035 61 | Epoch: 5, Steps: 450 | Train Loss: 2.9682465 Vali Loss: 0.6420363 Test Loss: 0.3956701 62 | EarlyStopping counter: 2 out of 15 63 | iters: 100, epoch: 6 | loss: 3.1749694 64 | speed: 0.1767s/iter; left time: 7537.7079s 65 | iters: 200, epoch: 6 | loss: 3.0692897 66 | speed: 0.0222s/iter; left time: 945.8544s 67 | iters: 300, epoch: 6 | loss: 2.5430481 68 | speed: 0.0228s/iter; left time: 965.7789s 69 | iters: 400, epoch: 6 | loss: 2.5466490 70 | speed: 0.0230s/iter; left time: 974.3151s 71 | Epoch: 6 cost time: 10.6790132522583 72 | Epoch: 6, Steps: 450 | Train Loss: 2.9608415 Vali Loss: 0.6393040 Test Loss: 0.3970679 73 | EarlyStopping counter: 3 out of 15 74 | iters: 100, epoch: 7 | loss: 2.5560687 75 | speed: 0.1790s/iter; left time: 7553.1714s 76 | iters: 200, epoch: 7 | loss: 2.5201263 77 | speed: 0.0179s/iter; left time: 751.6653s 78 | iters: 300, epoch: 7 | loss: 3.2058942 79 | speed: 0.0179s/iter; left time: 749.8740s 80 | iters: 400, epoch: 7 | loss: 2.6272182 81 | speed: 0.0178s/iter; left time: 747.8324s 82 | Epoch: 7 cost time: 8.649392366409302 83 | Epoch: 7, Steps: 450 | Train Loss: 2.9567363 Vali Loss: 0.6425421 Test Loss: 0.3959302 84 | EarlyStopping counter: 4 out of 15 85 | iters: 100, epoch: 8 | loss: 3.3283637 86 | speed: 0.1789s/iter; left time: 7468.1186s 87 | iters: 200, epoch: 8 | loss: 3.2453873 88 | speed: 0.0193s/iter; left time: 804.6435s 89 | iters: 300, epoch: 8 | loss: 2.8558755 90 | speed: 0.0197s/iter; left time: 817.2696s 91 | iters: 400, epoch: 8 | loss: 3.0206509 92 | speed: 0.0194s/iter; left time: 805.4514s 93 | Epoch: 8 cost time: 9.158097267150879 94 | Epoch: 8, Steps: 450 | Train Loss: 2.9547259 Vali Loss: 0.6451969 Test Loss: 0.3957705 95 | EarlyStopping counter: 5 out of 15 96 | iters: 100, epoch: 9 | loss: 3.2455261 97 | speed: 0.1776s/iter; left time: 7335.4885s 98 | iters: 200, epoch: 9 | loss: 3.1837628 99 | speed: 0.0200s/iter; left time: 825.0264s 100 | iters: 300, epoch: 9 | loss: 3.3616076 101 | speed: 0.0192s/iter; left time: 787.4986s 102 | iters: 400, epoch: 9 | loss: 2.8585415 103 | speed: 0.0186s/iter; left time: 763.7745s 104 | Epoch: 9 cost time: 9.216758012771606 105 | Epoch: 9, Steps: 450 | Train Loss: 2.9533500 Vali Loss: 0.6436849 Test Loss: 0.3960450 106 | EarlyStopping counter: 6 out of 15 107 | iters: 100, epoch: 10 | loss: 2.5814373 108 | speed: 0.1731s/iter; left time: 7069.6146s 109 | iters: 200, epoch: 10 | loss: 2.8228464 110 | speed: 0.0198s/iter; left time: 806.0337s 111 | iters: 300, epoch: 10 | loss: 3.7922421 112 | speed: 0.0205s/iter; left time: 832.8169s 113 | iters: 400, epoch: 10 | loss: 2.7722220 114 | speed: 0.0219s/iter; left time: 886.0823s 115 | Epoch: 10 cost time: 9.691646575927734 116 | Epoch: 10, Steps: 450 | Train Loss: 2.9532704 Vali Loss: 0.6444432 Test Loss: 0.3959722 117 | EarlyStopping counter: 7 out of 15 118 | iters: 100, epoch: 11 | loss: 3.2613316 119 | speed: 0.1752s/iter; left time: 7078.3433s 120 | iters: 200, epoch: 11 | loss: 2.8728981 121 | speed: 0.0194s/iter; left time: 782.1108s 122 | iters: 300, epoch: 11 | loss: 2.8792777 123 | speed: 0.0217s/iter; left time: 871.8903s 124 | iters: 400, epoch: 11 | loss: 2.9978731 125 | speed: 0.0191s/iter; left time: 765.0309s 126 | Epoch: 11 cost time: 9.595811128616333 127 | Epoch: 11, Steps: 450 | Train Loss: 2.9527365 Vali Loss: 0.6438583 Test Loss: 0.3959769 128 | EarlyStopping counter: 8 out of 15 129 | iters: 100, epoch: 12 | loss: 2.6068292 130 | speed: 0.1684s/iter; left time: 6729.6321s 131 | iters: 200, epoch: 12 | loss: 2.6202717 132 | speed: 0.0214s/iter; left time: 851.9585s 133 | iters: 300, epoch: 12 | loss: 2.8651431 134 | speed: 0.0217s/iter; left time: 860.6764s 135 | iters: 400, epoch: 12 | loss: 2.8698676 136 | speed: 0.0213s/iter; left time: 844.9711s 137 | Epoch: 12 cost time: 10.116839408874512 138 | Epoch: 12, Steps: 450 | Train Loss: 2.9529501 Vali Loss: 0.6444632 Test Loss: 0.3960010 139 | EarlyStopping counter: 9 out of 15 140 | iters: 100, epoch: 13 | loss: 2.5348723 141 | speed: 0.1672s/iter; left time: 6605.2446s 142 | iters: 200, epoch: 13 | loss: 2.6455965 143 | speed: 0.0209s/iter; left time: 823.4113s 144 | iters: 300, epoch: 13 | loss: 2.6031523 145 | speed: 0.0164s/iter; left time: 645.4869s 146 | iters: 400, epoch: 13 | loss: 2.8099446 147 | speed: 0.0189s/iter; left time: 739.4802s 148 | Epoch: 13 cost time: 9.079530477523804 149 | Epoch: 13, Steps: 450 | Train Loss: 2.9524775 Vali Loss: 0.6445634 Test Loss: 0.3960287 150 | EarlyStopping counter: 10 out of 15 151 | iters: 100, epoch: 14 | loss: 2.6854639 152 | speed: 0.1765s/iter; left time: 6892.2090s 153 | iters: 200, epoch: 14 | loss: 3.2495422 154 | speed: 0.0225s/iter; left time: 875.0975s 155 | iters: 300, epoch: 14 | loss: 2.6534920 156 | speed: 0.0220s/iter; left time: 853.1247s 157 | iters: 400, epoch: 14 | loss: 2.7136321 158 | speed: 0.0212s/iter; left time: 823.3406s 159 | Epoch: 14 cost time: 10.26895546913147 160 | Epoch: 14, Steps: 450 | Train Loss: 2.9529603 Vali Loss: 0.6446217 Test Loss: 0.3960313 161 | EarlyStopping counter: 11 out of 15 162 | iters: 100, epoch: 15 | loss: 2.8946347 163 | speed: 0.1752s/iter; left time: 6763.3611s 164 | iters: 200, epoch: 15 | loss: 3.9172111 165 | speed: 0.0219s/iter; left time: 842.2884s 166 | iters: 300, epoch: 15 | loss: 3.0514174 167 | speed: 0.0221s/iter; left time: 848.6270s 168 | iters: 400, epoch: 15 | loss: 2.3924847 169 | speed: 0.0206s/iter; left time: 787.1188s 170 | Epoch: 15 cost time: 9.958654165267944 171 | Epoch: 15, Steps: 450 | Train Loss: 2.9528488 Vali Loss: 0.6443595 Test Loss: 0.3960313 172 | EarlyStopping counter: 12 out of 15 173 | iters: 100, epoch: 16 | loss: 2.6492357 174 | speed: 0.1714s/iter; left time: 6539.6167s 175 | iters: 200, epoch: 16 | loss: 2.9049582 176 | speed: 0.0219s/iter; left time: 833.6800s 177 | iters: 300, epoch: 16 | loss: 2.6661847 178 | speed: 0.0201s/iter; left time: 764.0837s 179 | iters: 400, epoch: 16 | loss: 2.5499434 180 | speed: 0.0190s/iter; left time: 719.2290s 181 | Epoch: 16 cost time: 9.672161340713501 182 | Epoch: 16, Steps: 450 | Train Loss: 2.9526565 Vali Loss: 0.6445279 Test Loss: 0.3960319 183 | EarlyStopping counter: 13 out of 15 184 | iters: 100, epoch: 17 | loss: 3.4488652 185 | speed: 0.1690s/iter; left time: 6369.8692s 186 | iters: 200, epoch: 17 | loss: 2.8196802 187 | speed: 0.0184s/iter; left time: 690.1661s 188 | iters: 300, epoch: 17 | loss: 2.9121251 189 | speed: 0.0251s/iter; left time: 941.0012s 190 | iters: 400, epoch: 17 | loss: 2.5592852 191 | speed: 0.0243s/iter; left time: 908.3946s 192 | Epoch: 17 cost time: 10.552713632583618 193 | Epoch: 17, Steps: 450 | Train Loss: 2.9526213 Vali Loss: 0.6439161 Test Loss: 0.3960323 194 | EarlyStopping counter: 14 out of 15 195 | iters: 100, epoch: 18 | loss: 3.5040011 196 | speed: 0.1745s/iter; left time: 6500.5134s 197 | iters: 200, epoch: 18 | loss: 2.5529230 198 | speed: 0.0196s/iter; left time: 727.3630s 199 | iters: 300, epoch: 18 | loss: 3.2261810 200 | speed: 0.0235s/iter; left time: 869.2458s 201 | iters: 400, epoch: 18 | loss: 2.7257094 202 | speed: 0.0204s/iter; left time: 753.3007s 203 | Epoch: 18 cost time: 9.956349611282349 204 | Epoch: 18, Steps: 450 | Train Loss: 2.9528718 Vali Loss: 0.6443716 Test Loss: 0.3960323 205 | EarlyStopping counter: 15 out of 15 206 | Early stopping 207 | >>>>>>>testing : ETTh2_720_720_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl720_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 208 | test 2161 209 | test shape: (2161, 1, 720, 7) (2161, 1, 720, 7) 210 | test shape: (2161, 720, 7) (2161, 720, 7) 211 | mse:0.39865219593048096, mae:0.43189573287963867 212 | rmse:0.6313890814781189, mape:0.6157873868942261, mspe:575.7078857421875 213 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/electricity_0.2_TimeBridge_336.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.2, dropout=0.0, attn_dropout=0.1, batch_size=16, is_training=1, model_id='electricity_720_336', model='TimeBridge', data='custom', root_path='/data1/liupeiyuan/dataset/datasets/electricity/', data_path='electricity.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=336, seasonal_patterns='Monthly', ia_layers=1, pd_layers=1, ca_layers=2, stable_len=4, num_p=4, period=24, enc_in=321, d_model=512, n_heads=32, d_ff=512, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=10, embedding_epochs=5, patience=3, pct_start=0.2, learning_rate=0.0005, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : electricity_720_336_TimeBridge_custom_bs16_ftM_sl720_ll48_pl336_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 17357 6 | val 2297 7 | test 4925 8 | iters: 100, epoch: 1 | loss: 1.2224960 9 | speed: 0.3827s/iter; left time: 4110.2561s 10 | iters: 200, epoch: 1 | loss: 1.2298542 11 | speed: 0.3764s/iter; left time: 4005.2225s 12 | iters: 300, epoch: 1 | loss: 1.1827811 13 | speed: 0.3783s/iter; left time: 3987.1756s 14 | iters: 400, epoch: 1 | loss: 1.1025014 15 | speed: 0.3769s/iter; left time: 3935.4058s 16 | iters: 500, epoch: 1 | loss: 1.1287220 17 | speed: 0.3760s/iter; left time: 3888.1281s 18 | iters: 600, epoch: 1 | loss: 1.0776809 19 | speed: 0.3776s/iter; left time: 3867.3895s 20 | iters: 700, epoch: 1 | loss: 1.0070106 21 | speed: 0.3747s/iter; left time: 3800.1729s 22 | iters: 800, epoch: 1 | loss: 1.0335456 23 | speed: 0.3750s/iter; left time: 3765.8680s 24 | iters: 900, epoch: 1 | loss: 1.0151651 25 | speed: 0.3749s/iter; left time: 3727.1638s 26 | iters: 1000, epoch: 1 | loss: 1.0391994 27 | speed: 0.3760s/iter; left time: 3700.1423s 28 | Epoch: 1 cost time: 408.37254762649536 29 | Epoch: 1, Steps: 1084 | Train Loss: 1.1274366 Vali Loss: 0.1418623 Test Loss: 0.1679404 30 | Validation loss decreased (inf --> 0.141862). Saving model ... 31 | iters: 100, epoch: 2 | loss: 1.0462893 32 | speed: 1.5843s/iter; left time: 15299.6356s 33 | iters: 200, epoch: 2 | loss: 1.0542159 34 | speed: 0.3756s/iter; left time: 3589.2673s 35 | iters: 300, epoch: 2 | loss: 1.0213926 36 | speed: 0.3756s/iter; left time: 3551.6323s 37 | iters: 400, epoch: 2 | loss: 0.9978875 38 | speed: 0.3743s/iter; left time: 3502.7769s 39 | iters: 500, epoch: 2 | loss: 1.0254205 40 | speed: 0.3759s/iter; left time: 3479.2929s 41 | iters: 600, epoch: 2 | loss: 0.9968669 42 | speed: 0.3751s/iter; left time: 3435.1942s 43 | iters: 700, epoch: 2 | loss: 1.0400608 44 | speed: 0.3758s/iter; left time: 3403.1782s 45 | iters: 800, epoch: 2 | loss: 0.9912176 46 | speed: 0.3757s/iter; left time: 3365.1880s 47 | iters: 900, epoch: 2 | loss: 1.0897579 48 | speed: 0.3752s/iter; left time: 3323.4023s 49 | iters: 1000, epoch: 2 | loss: 0.9835123 50 | speed: 0.3735s/iter; left time: 3270.8445s 51 | Epoch: 2 cost time: 408.1009600162506 52 | Epoch: 2, Steps: 1084 | Train Loss: 1.0301038 Vali Loss: 0.1369869 Test Loss: 0.1640868 53 | Validation loss decreased (0.141862 --> 0.136987). Saving model ... 54 | iters: 100, epoch: 3 | loss: 0.9806786 55 | speed: 1.6430s/iter; left time: 14085.8049s 56 | iters: 200, epoch: 3 | loss: 1.0103387 57 | speed: 0.3757s/iter; left time: 3182.9794s 58 | iters: 300, epoch: 3 | loss: 0.9856381 59 | speed: 0.3763s/iter; left time: 3150.9950s 60 | iters: 400, epoch: 3 | loss: 0.9261611 61 | speed: 0.3749s/iter; left time: 3101.8991s 62 | iters: 500, epoch: 3 | loss: 0.9755964 63 | speed: 0.3775s/iter; left time: 3085.3238s 64 | iters: 600, epoch: 3 | loss: 0.9412255 65 | speed: 0.3779s/iter; left time: 3050.7291s 66 | iters: 700, epoch: 3 | loss: 0.9992148 67 | speed: 0.3746s/iter; left time: 2986.5074s 68 | iters: 800, epoch: 3 | loss: 0.9258817 69 | speed: 0.3747s/iter; left time: 2949.6711s 70 | iters: 900, epoch: 3 | loss: 0.9572201 71 | speed: 0.3770s/iter; left time: 2930.1821s 72 | iters: 1000, epoch: 3 | loss: 0.9188828 73 | speed: 0.3750s/iter; left time: 2877.5231s 74 | Epoch: 3 cost time: 409.1068925857544 75 | Epoch: 3, Steps: 1084 | Train Loss: 0.9896274 Vali Loss: 0.1338555 Test Loss: 0.1605159 76 | Validation loss decreased (0.136987 --> 0.133856). Saving model ... 77 | iters: 100, epoch: 4 | loss: 0.9962827 78 | speed: 1.6436s/iter; left time: 12308.9573s 79 | iters: 200, epoch: 4 | loss: 0.9287165 80 | speed: 0.3795s/iter; left time: 2804.2629s 81 | iters: 300, epoch: 4 | loss: 1.0028404 82 | speed: 0.3784s/iter; left time: 2757.8812s 83 | iters: 400, epoch: 4 | loss: 1.0094993 84 | speed: 0.3746s/iter; left time: 2692.6714s 85 | iters: 500, epoch: 4 | loss: 0.9492683 86 | speed: 0.3761s/iter; left time: 2666.0734s 87 | iters: 600, epoch: 4 | loss: 1.0327492 88 | speed: 0.3803s/iter; left time: 2657.8478s 89 | iters: 700, epoch: 4 | loss: 0.9434276 90 | speed: 0.3766s/iter; left time: 2594.4698s 91 | iters: 800, epoch: 4 | loss: 0.9393420 92 | speed: 0.3775s/iter; left time: 2562.9328s 93 | iters: 900, epoch: 4 | loss: 0.9160467 94 | speed: 0.3754s/iter; left time: 2510.9656s 95 | iters: 1000, epoch: 4 | loss: 0.9889235 96 | speed: 0.3758s/iter; left time: 2476.2277s 97 | Epoch: 4 cost time: 409.7990276813507 98 | Epoch: 4, Steps: 1084 | Train Loss: 0.9680050 Vali Loss: 0.1343522 Test Loss: 0.1583552 99 | EarlyStopping counter: 1 out of 3 100 | iters: 100, epoch: 5 | loss: 0.9952707 101 | speed: 1.6325s/iter; left time: 10456.4389s 102 | iters: 200, epoch: 5 | loss: 0.9967039 103 | speed: 0.3761s/iter; left time: 2371.1054s 104 | iters: 300, epoch: 5 | loss: 1.0045438 105 | speed: 0.3763s/iter; left time: 2335.0924s 106 | iters: 400, epoch: 5 | loss: 1.0085441 107 | speed: 0.3763s/iter; left time: 2297.5449s 108 | iters: 500, epoch: 5 | loss: 0.9539873 109 | speed: 0.3769s/iter; left time: 2263.3978s 110 | iters: 600, epoch: 5 | loss: 0.9197999 111 | speed: 0.3792s/iter; left time: 2238.9062s 112 | iters: 700, epoch: 5 | loss: 0.9793725 113 | speed: 0.3770s/iter; left time: 2188.6997s 114 | iters: 800, epoch: 5 | loss: 0.9290710 115 | speed: 0.3754s/iter; left time: 2141.8993s 116 | iters: 900, epoch: 5 | loss: 0.9801431 117 | speed: 0.3752s/iter; left time: 2102.9730s 118 | iters: 1000, epoch: 5 | loss: 0.9757313 119 | speed: 0.3752s/iter; left time: 2065.3404s 120 | Epoch: 5 cost time: 409.1346185207367 121 | Epoch: 5, Steps: 1084 | Train Loss: 0.9535378 Vali Loss: 0.1338325 Test Loss: 0.1604696 122 | Validation loss decreased (0.133856 --> 0.133833). Saving model ... 123 | iters: 100, epoch: 6 | loss: 0.9402398 124 | speed: 1.6360s/iter; left time: 8705.4008s 125 | iters: 200, epoch: 6 | loss: 0.9548669 126 | speed: 0.3749s/iter; left time: 1957.2995s 127 | iters: 300, epoch: 6 | loss: 0.9506098 128 | speed: 0.3744s/iter; left time: 1917.0790s 129 | iters: 400, epoch: 6 | loss: 0.9527354 130 | speed: 0.3758s/iter; left time: 1886.9794s 131 | iters: 500, epoch: 6 | loss: 0.9242906 132 | speed: 0.3777s/iter; left time: 1858.8866s 133 | iters: 600, epoch: 6 | loss: 0.9367330 134 | speed: 0.3765s/iter; left time: 1814.8975s 135 | iters: 700, epoch: 6 | loss: 0.9435893 136 | speed: 0.3763s/iter; left time: 1776.6965s 137 | iters: 800, epoch: 6 | loss: 1.0304822 138 | speed: 0.3747s/iter; left time: 1731.2903s 139 | iters: 900, epoch: 6 | loss: 0.8914039 140 | speed: 0.3773s/iter; left time: 1705.8391s 141 | iters: 1000, epoch: 6 | loss: 0.8949940 142 | speed: 0.3742s/iter; left time: 1654.4842s 143 | Epoch: 6 cost time: 408.54527759552 144 | Epoch: 6, Steps: 1084 | Train Loss: 0.9458623 Vali Loss: 0.1333656 Test Loss: 0.1605548 145 | Validation loss decreased (0.133833 --> 0.133366). Saving model ... 146 | iters: 100, epoch: 7 | loss: 0.9402397 147 | speed: 1.6253s/iter; left time: 6886.2493s 148 | iters: 200, epoch: 7 | loss: 0.9550884 149 | speed: 0.3731s/iter; left time: 1543.4088s 150 | iters: 300, epoch: 7 | loss: 0.9715421 151 | speed: 0.3730s/iter; left time: 1505.6805s 152 | iters: 400, epoch: 7 | loss: 0.9647000 153 | speed: 0.3754s/iter; left time: 1477.9736s 154 | iters: 500, epoch: 7 | loss: 0.9203836 155 | speed: 0.3740s/iter; left time: 1435.1524s 156 | iters: 600, epoch: 7 | loss: 0.9551678 157 | speed: 0.3755s/iter; left time: 1403.2572s 158 | iters: 700, epoch: 7 | loss: 0.9516637 159 | speed: 0.3765s/iter; left time: 1369.4907s 160 | iters: 800, epoch: 7 | loss: 0.9481534 161 | speed: 0.3751s/iter; left time: 1326.5979s 162 | iters: 900, epoch: 7 | loss: 0.9191537 163 | speed: 0.3758s/iter; left time: 1291.6538s 164 | iters: 1000, epoch: 7 | loss: 0.9390377 165 | speed: 0.3746s/iter; left time: 1250.1220s 166 | Epoch: 7 cost time: 407.6695485115051 167 | Epoch: 7, Steps: 1084 | Train Loss: 0.9417982 Vali Loss: 0.1339721 Test Loss: 0.1606641 168 | EarlyStopping counter: 1 out of 3 169 | iters: 100, epoch: 8 | loss: 0.9517977 170 | speed: 1.6438s/iter; left time: 5182.9454s 171 | iters: 200, epoch: 8 | loss: 0.9963180 172 | speed: 0.3739s/iter; left time: 1141.4607s 173 | iters: 300, epoch: 8 | loss: 0.8974003 174 | speed: 0.3760s/iter; left time: 1110.3130s 175 | iters: 400, epoch: 8 | loss: 0.9675958 176 | speed: 0.3759s/iter; left time: 1072.5512s 177 | iters: 500, epoch: 8 | loss: 0.9305396 178 | speed: 0.3737s/iter; left time: 1028.7439s 179 | iters: 600, epoch: 8 | loss: 0.9402598 180 | speed: 0.3737s/iter; left time: 991.3006s 181 | iters: 700, epoch: 8 | loss: 0.9064201 182 | speed: 0.3749s/iter; left time: 957.1850s 183 | iters: 800, epoch: 8 | loss: 0.9717017 184 | speed: 0.3785s/iter; left time: 928.4871s 185 | iters: 900, epoch: 8 | loss: 0.9431399 186 | speed: 0.3742s/iter; left time: 880.4632s 187 | iters: 1000, epoch: 8 | loss: 0.9876100 188 | speed: 0.3774s/iter; left time: 850.3126s 189 | Epoch: 8 cost time: 408.2786817550659 190 | Epoch: 8, Steps: 1084 | Train Loss: 0.9397900 Vali Loss: 0.1339144 Test Loss: 0.1605178 191 | EarlyStopping counter: 2 out of 3 192 | iters: 100, epoch: 9 | loss: 0.9789438 193 | speed: 1.6428s/iter; left time: 3399.0314s 194 | iters: 200, epoch: 9 | loss: 0.9199869 195 | speed: 0.3769s/iter; left time: 742.1980s 196 | iters: 300, epoch: 9 | loss: 0.9221796 197 | speed: 0.3755s/iter; left time: 701.7518s 198 | iters: 400, epoch: 9 | loss: 0.9358566 199 | speed: 0.3756s/iter; left time: 664.4880s 200 | iters: 500, epoch: 9 | loss: 0.9463706 201 | speed: 0.3760s/iter; left time: 627.4689s 202 | iters: 600, epoch: 9 | loss: 0.9880624 203 | speed: 0.3776s/iter; left time: 592.5081s 204 | iters: 700, epoch: 9 | loss: 0.9378809 205 | speed: 0.3767s/iter; left time: 553.3865s 206 | iters: 800, epoch: 9 | loss: 0.9113635 207 | speed: 0.3754s/iter; left time: 513.9736s 208 | iters: 900, epoch: 9 | loss: 0.9061885 209 | speed: 0.3773s/iter; left time: 478.7967s 210 | iters: 1000, epoch: 9 | loss: 0.9322492 211 | speed: 0.3749s/iter; left time: 438.2830s 212 | Epoch: 9 cost time: 409.11722588539124 213 | Epoch: 9, Steps: 1084 | Train Loss: 0.9386595 Vali Loss: 0.1338458 Test Loss: 0.1600899 214 | EarlyStopping counter: 3 out of 3 215 | Early stopping 216 | >>>>>>>testing : electricity_720_336_TimeBridge_custom_bs16_ftM_sl720_ll48_pl336_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 217 | test 4925 218 | test shape: (4925, 1, 336, 321) (4925, 1, 336, 321) 219 | test shape: (4925, 336, 321) (4925, 336, 321) 220 | mse:0.16055546700954437, mae:0.2561715841293335 221 | rmse:0.40069374442100525, mape:0.47098711133003235, mspe:420036.6875 222 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/electricity_0.2_TimeBridge_720.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.2, dropout=0.0, attn_dropout=0.1, batch_size=16, is_training=1, model_id='electricity_720_720', model='TimeBridge', data='custom', root_path='/data1/liupeiyuan/dataset/datasets/electricity/', data_path='electricity.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=720, seasonal_patterns='Monthly', ia_layers=1, pd_layers=1, ca_layers=2, stable_len=4, num_p=4, period=24, enc_in=321, d_model=512, n_heads=32, d_ff=512, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=10, embedding_epochs=5, patience=3, pct_start=0.2, learning_rate=0.0005, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : electricity_720_720_TimeBridge_custom_bs16_ftM_sl720_ll48_pl720_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 16973 6 | val 1913 7 | test 4541 8 | iters: 100, epoch: 1 | loss: 1.8575472 9 | speed: 0.3798s/iter; left time: 3988.6123s 10 | iters: 200, epoch: 1 | loss: 1.7406243 11 | speed: 0.3802s/iter; left time: 3954.8514s 12 | iters: 300, epoch: 1 | loss: 1.6422406 13 | speed: 0.3792s/iter; left time: 3906.3005s 14 | iters: 400, epoch: 1 | loss: 1.5576073 15 | speed: 0.3830s/iter; left time: 3906.5423s 16 | iters: 500, epoch: 1 | loss: 1.5407962 17 | speed: 0.3799s/iter; left time: 3837.2222s 18 | iters: 600, epoch: 1 | loss: 1.5774088 19 | speed: 0.3785s/iter; left time: 3785.0189s 20 | iters: 700, epoch: 1 | loss: 1.5264734 21 | speed: 0.3797s/iter; left time: 3759.5002s 22 | iters: 800, epoch: 1 | loss: 1.5033323 23 | speed: 0.3792s/iter; left time: 3716.9775s 24 | iters: 900, epoch: 1 | loss: 1.5154060 25 | speed: 0.3813s/iter; left time: 3699.0777s 26 | iters: 1000, epoch: 1 | loss: 1.4886056 27 | speed: 0.3811s/iter; left time: 3658.4769s 28 | Epoch: 1 cost time: 403.0110197067261 29 | Epoch: 1, Steps: 1060 | Train Loss: 1.6119723 Vali Loss: 0.1725896 Test Loss: 0.1989039 30 | Validation loss decreased (inf --> 0.172590). Saving model ... 31 | iters: 100, epoch: 2 | loss: 1.4892294 32 | speed: 1.4848s/iter; left time: 14018.1597s 33 | iters: 200, epoch: 2 | loss: 1.4687402 34 | speed: 0.3798s/iter; left time: 3547.4632s 35 | iters: 300, epoch: 2 | loss: 1.4333724 36 | speed: 0.3797s/iter; left time: 3508.8395s 37 | iters: 400, epoch: 2 | loss: 1.4882748 38 | speed: 0.3805s/iter; left time: 3478.1677s 39 | iters: 500, epoch: 2 | loss: 1.6206709 40 | speed: 0.3776s/iter; left time: 3414.3049s 41 | iters: 600, epoch: 2 | loss: 1.5424714 42 | speed: 0.3838s/iter; left time: 3431.2550s 43 | iters: 700, epoch: 2 | loss: 1.4255441 44 | speed: 0.3789s/iter; left time: 3350.0205s 45 | iters: 800, epoch: 2 | loss: 1.5260915 46 | speed: 0.3789s/iter; left time: 3311.7995s 47 | iters: 900, epoch: 2 | loss: 1.4394743 48 | speed: 0.3821s/iter; left time: 3301.5137s 49 | iters: 1000, epoch: 2 | loss: 1.4078246 50 | speed: 0.3790s/iter; left time: 3237.3652s 51 | Epoch: 2 cost time: 404.4578676223755 52 | Epoch: 2, Steps: 1060 | Train Loss: 1.4678871 Vali Loss: 0.1572216 Test Loss: 0.1839949 53 | Validation loss decreased (0.172590 --> 0.157222). Saving model ... 54 | iters: 100, epoch: 3 | loss: 1.4278905 55 | speed: 1.5577s/iter; left time: 13055.0895s 56 | iters: 200, epoch: 3 | loss: 1.4004182 57 | speed: 0.3783s/iter; left time: 3132.3869s 58 | iters: 300, epoch: 3 | loss: 1.3779012 59 | speed: 0.3798s/iter; left time: 3107.2390s 60 | iters: 400, epoch: 3 | loss: 1.4319087 61 | speed: 0.3777s/iter; left time: 3051.9230s 62 | iters: 500, epoch: 3 | loss: 1.4320084 63 | speed: 0.3820s/iter; left time: 3049.0945s 64 | iters: 600, epoch: 3 | loss: 1.4249562 65 | speed: 0.3785s/iter; left time: 2983.0466s 66 | iters: 700, epoch: 3 | loss: 1.4314028 67 | speed: 0.3802s/iter; left time: 2958.3027s 68 | iters: 800, epoch: 3 | loss: 1.4162693 69 | speed: 0.3786s/iter; left time: 2908.2631s 70 | iters: 900, epoch: 3 | loss: 1.4501890 71 | speed: 0.3799s/iter; left time: 2880.1876s 72 | iters: 1000, epoch: 3 | loss: 1.3912634 73 | speed: 0.3803s/iter; left time: 2845.3252s 74 | Epoch: 3 cost time: 403.8239235877991 75 | Epoch: 3, Steps: 1060 | Train Loss: 1.4137581 Vali Loss: 0.1539578 Test Loss: 0.1753619 76 | Validation loss decreased (0.157222 --> 0.153958). Saving model ... 77 | iters: 100, epoch: 4 | loss: 1.4562997 78 | speed: 1.5786s/iter; left time: 11557.1988s 79 | iters: 200, epoch: 4 | loss: 1.3532044 80 | speed: 0.3804s/iter; left time: 2746.9543s 81 | iters: 300, epoch: 4 | loss: 1.3846483 82 | speed: 0.3793s/iter; left time: 2700.6866s 83 | iters: 400, epoch: 4 | loss: 1.3982836 84 | speed: 0.3789s/iter; left time: 2660.3967s 85 | iters: 500, epoch: 4 | loss: 1.3134086 86 | speed: 0.3774s/iter; left time: 2612.2901s 87 | iters: 600, epoch: 4 | loss: 1.3606663 88 | speed: 0.3774s/iter; left time: 2574.2299s 89 | iters: 700, epoch: 4 | loss: 1.4257498 90 | speed: 0.3786s/iter; left time: 2544.7185s 91 | iters: 800, epoch: 4 | loss: 1.3895190 92 | speed: 0.3819s/iter; left time: 2528.3567s 93 | iters: 900, epoch: 4 | loss: 1.3977014 94 | speed: 0.3789s/iter; left time: 2470.9349s 95 | iters: 1000, epoch: 4 | loss: 1.4278585 96 | speed: 0.3779s/iter; left time: 2426.6951s 97 | Epoch: 4 cost time: 403.8999996185303 98 | Epoch: 4, Steps: 1060 | Train Loss: 1.3859361 Vali Loss: 0.1524289 Test Loss: 0.1722942 99 | Validation loss decreased (0.153958 --> 0.152429). Saving model ... 100 | iters: 100, epoch: 5 | loss: 1.3147520 101 | speed: 1.5676s/iter; left time: 9814.6017s 102 | iters: 200, epoch: 5 | loss: 1.4185503 103 | speed: 0.3765s/iter; left time: 2319.8240s 104 | iters: 300, epoch: 5 | loss: 1.4571093 105 | speed: 0.3781s/iter; left time: 2291.5832s 106 | iters: 400, epoch: 5 | loss: 1.4259722 107 | speed: 0.3777s/iter; left time: 2251.3927s 108 | iters: 500, epoch: 5 | loss: 1.4074492 109 | speed: 0.3806s/iter; left time: 2230.6428s 110 | iters: 600, epoch: 5 | loss: 1.3971630 111 | speed: 0.3801s/iter; left time: 2189.9302s 112 | iters: 700, epoch: 5 | loss: 1.4259245 113 | speed: 0.3791s/iter; left time: 2145.9027s 114 | iters: 800, epoch: 5 | loss: 1.3425040 115 | speed: 0.3770s/iter; left time: 2096.4007s 116 | iters: 900, epoch: 5 | loss: 1.3696368 117 | speed: 0.3769s/iter; left time: 2058.2992s 118 | iters: 1000, epoch: 5 | loss: 1.3536730 119 | speed: 0.3787s/iter; left time: 2030.0041s 120 | Epoch: 5 cost time: 402.8865325450897 121 | Epoch: 5, Steps: 1060 | Train Loss: 1.3702628 Vali Loss: 0.1519585 Test Loss: 0.1718093 122 | Validation loss decreased (0.152429 --> 0.151959). Saving model ... 123 | iters: 100, epoch: 6 | loss: 1.4357544 124 | speed: 1.5656s/iter; left time: 8142.8820s 125 | iters: 200, epoch: 6 | loss: 1.2958198 126 | speed: 0.3809s/iter; left time: 1942.9702s 127 | iters: 300, epoch: 6 | loss: 1.3653457 128 | speed: 0.3769s/iter; left time: 1884.6511s 129 | iters: 400, epoch: 6 | loss: 1.3587708 130 | speed: 0.3801s/iter; left time: 1862.6557s 131 | iters: 500, epoch: 6 | loss: 1.3095691 132 | speed: 0.3784s/iter; left time: 1816.8861s 133 | iters: 600, epoch: 6 | loss: 1.3359917 134 | speed: 0.3824s/iter; left time: 1797.5820s 135 | iters: 700, epoch: 6 | loss: 1.3050537 136 | speed: 0.3797s/iter; left time: 1747.2223s 137 | iters: 800, epoch: 6 | loss: 1.3520308 138 | speed: 0.3779s/iter; left time: 1701.1079s 139 | iters: 900, epoch: 6 | loss: 1.3647258 140 | speed: 0.3802s/iter; left time: 1673.1548s 141 | iters: 1000, epoch: 6 | loss: 1.3429163 142 | speed: 0.3782s/iter; left time: 1626.7648s 143 | Epoch: 6 cost time: 403.8787667751312 144 | Epoch: 6, Steps: 1060 | Train Loss: 1.3612166 Vali Loss: 0.1512933 Test Loss: 0.1715882 145 | Validation loss decreased (0.151959 --> 0.151293). Saving model ... 146 | iters: 100, epoch: 7 | loss: 1.3246553 147 | speed: 1.5661s/iter; left time: 6485.0951s 148 | iters: 200, epoch: 7 | loss: 1.3308483 149 | speed: 0.3787s/iter; left time: 1530.4538s 150 | iters: 300, epoch: 7 | loss: 1.3712363 151 | speed: 0.3782s/iter; left time: 1490.4002s 152 | iters: 400, epoch: 7 | loss: 1.3259665 153 | speed: 0.3795s/iter; left time: 1457.6597s 154 | iters: 500, epoch: 7 | loss: 1.3399678 155 | speed: 0.3796s/iter; left time: 1420.2497s 156 | iters: 600, epoch: 7 | loss: 1.3226643 157 | speed: 0.3804s/iter; left time: 1385.1482s 158 | iters: 700, epoch: 7 | loss: 1.3837372 159 | speed: 0.3796s/iter; left time: 1344.2374s 160 | iters: 800, epoch: 7 | loss: 1.3394588 161 | speed: 0.3796s/iter; left time: 1306.1643s 162 | iters: 900, epoch: 7 | loss: 1.3357426 163 | speed: 0.3791s/iter; left time: 1266.4165s 164 | iters: 1000, epoch: 7 | loss: 1.3122449 165 | speed: 0.3787s/iter; left time: 1227.3910s 166 | Epoch: 7 cost time: 403.9585494995117 167 | Epoch: 7, Steps: 1060 | Train Loss: 1.3563932 Vali Loss: 0.1515195 Test Loss: 0.1718359 168 | EarlyStopping counter: 1 out of 3 169 | iters: 100, epoch: 8 | loss: 1.3328854 170 | speed: 1.5673s/iter; left time: 4828.9735s 171 | iters: 200, epoch: 8 | loss: 1.3673311 172 | speed: 0.3787s/iter; left time: 1128.7658s 173 | iters: 300, epoch: 8 | loss: 1.3317980 174 | speed: 0.3802s/iter; left time: 1095.2580s 175 | iters: 400, epoch: 8 | loss: 1.3445582 176 | speed: 0.3818s/iter; left time: 1061.7083s 177 | iters: 500, epoch: 8 | loss: 1.3339078 178 | speed: 0.3782s/iter; left time: 1014.0613s 179 | iters: 600, epoch: 8 | loss: 1.3251454 180 | speed: 0.3795s/iter; left time: 979.5510s 181 | iters: 700, epoch: 8 | loss: 1.3661069 182 | speed: 0.3764s/iter; left time: 933.9287s 183 | iters: 800, epoch: 8 | loss: 1.3264189 184 | speed: 0.3779s/iter; left time: 899.6704s 185 | iters: 900, epoch: 8 | loss: 1.3590933 186 | speed: 0.3785s/iter; left time: 863.4426s 187 | iters: 1000, epoch: 8 | loss: 1.3837881 188 | speed: 0.3766s/iter; left time: 821.2621s 189 | Epoch: 8 cost time: 403.01345562934875 190 | Epoch: 8, Steps: 1060 | Train Loss: 1.3538900 Vali Loss: 0.1514414 Test Loss: 0.1714421 191 | EarlyStopping counter: 2 out of 3 192 | iters: 100, epoch: 9 | loss: 1.3589073 193 | speed: 1.5598s/iter; left time: 3152.3059s 194 | iters: 200, epoch: 9 | loss: 1.3442584 195 | speed: 0.3776s/iter; left time: 725.2965s 196 | iters: 300, epoch: 9 | loss: 1.3373181 197 | speed: 0.3786s/iter; left time: 689.4352s 198 | iters: 400, epoch: 9 | loss: 1.3693327 199 | speed: 0.3785s/iter; left time: 651.3236s 200 | iters: 500, epoch: 9 | loss: 1.3379165 201 | speed: 0.3794s/iter; left time: 614.9596s 202 | iters: 600, epoch: 9 | loss: 1.3384202 203 | speed: 0.3821s/iter; left time: 581.2077s 204 | iters: 700, epoch: 9 | loss: 1.3197486 205 | speed: 0.3820s/iter; left time: 542.8201s 206 | iters: 800, epoch: 9 | loss: 1.3629313 207 | speed: 0.3798s/iter; left time: 501.7504s 208 | iters: 900, epoch: 9 | loss: 1.2923871 209 | speed: 0.3775s/iter; left time: 460.9241s 210 | iters: 1000, epoch: 9 | loss: 1.3646375 211 | speed: 0.3772s/iter; left time: 422.7983s 212 | Epoch: 9 cost time: 403.43151092529297 213 | Epoch: 9, Steps: 1060 | Train Loss: 1.3525190 Vali Loss: 0.1513545 Test Loss: 0.1713949 214 | EarlyStopping counter: 3 out of 3 215 | Early stopping 216 | >>>>>>>testing : electricity_720_720_TimeBridge_custom_bs16_ftM_sl720_ll48_pl720_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 217 | test 4541 218 | test shape: (4541, 1, 720, 321) (4541, 1, 720, 321) 219 | test shape: (4541, 720, 321) (4541, 720, 321) 220 | mse:0.17158843576908112, mae:0.2654167711734772 221 | rmse:0.4142323434352875, mape:0.4833582639694214, mspe:409737.9375 222 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh2_0.35_TimeBridge_192.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=16, is_training=1, model_id='ETTh2_720_192', model='TimeBridge', data='ETTh2', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh2.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=192, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=48, enc_in=7, d_model=128, n_heads=4, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=15, pct_start=0.2, learning_rate=0.0001, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh2_720_192_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl192_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7729 6 | val 2689 7 | test 2689 8 | iters: 100, epoch: 1 | loss: 1.6176045 9 | speed: 0.0304s/iter; left time: 1463.6555s 10 | iters: 200, epoch: 1 | loss: 1.4048089 11 | speed: 0.0173s/iter; left time: 831.4750s 12 | iters: 300, epoch: 1 | loss: 1.4329969 13 | speed: 0.0175s/iter; left time: 841.8556s 14 | iters: 400, epoch: 1 | loss: 1.8072054 15 | speed: 0.0187s/iter; left time: 894.5607s 16 | Epoch: 1 cost time: 9.941578149795532 17 | Epoch: 1, Steps: 483 | Train Loss: 1.6984977 Vali Loss: 0.3046491 Test Loss: 0.3593386 18 | Validation loss decreased (inf --> 0.304649). Saving model ... 19 | iters: 100, epoch: 2 | loss: 1.5241972 20 | speed: 0.1970s/iter; left time: 9398.1168s 21 | iters: 200, epoch: 2 | loss: 2.2620838 22 | speed: 0.0175s/iter; left time: 834.8337s 23 | iters: 300, epoch: 2 | loss: 1.5634661 24 | speed: 0.0176s/iter; left time: 837.4428s 25 | iters: 400, epoch: 2 | loss: 1.4457420 26 | speed: 0.0179s/iter; left time: 848.7692s 27 | Epoch: 2 cost time: 9.0196373462677 28 | Epoch: 2, Steps: 483 | Train Loss: 1.5719412 Vali Loss: 0.2858847 Test Loss: 0.3440916 29 | Validation loss decreased (0.304649 --> 0.285885). Saving model ... 30 | iters: 100, epoch: 3 | loss: 1.2619221 31 | speed: 0.2052s/iter; left time: 9691.3833s 32 | iters: 200, epoch: 3 | loss: 1.4702104 33 | speed: 0.0231s/iter; left time: 1087.9879s 34 | iters: 300, epoch: 3 | loss: 1.6433672 35 | speed: 0.0224s/iter; left time: 1054.9446s 36 | iters: 400, epoch: 3 | loss: 1.3639381 37 | speed: 0.0227s/iter; left time: 1066.4323s 38 | Epoch: 3 cost time: 11.42017650604248 39 | Epoch: 3, Steps: 483 | Train Loss: 1.5336682 Vali Loss: 0.2836910 Test Loss: 0.3371258 40 | Validation loss decreased (0.285885 --> 0.283691). Saving model ... 41 | iters: 100, epoch: 4 | loss: 1.2893137 42 | speed: 0.2103s/iter; left time: 9833.8476s 43 | iters: 200, epoch: 4 | loss: 1.2478747 44 | speed: 0.0193s/iter; left time: 900.2219s 45 | iters: 300, epoch: 4 | loss: 1.5448781 46 | speed: 0.0184s/iter; left time: 858.5129s 47 | iters: 400, epoch: 4 | loss: 1.5506711 48 | speed: 0.0186s/iter; left time: 866.2703s 49 | Epoch: 4 cost time: 9.60214376449585 50 | Epoch: 4, Steps: 483 | Train Loss: 1.5189798 Vali Loss: 0.2775283 Test Loss: 0.3351457 51 | Validation loss decreased (0.283691 --> 0.277528). Saving model ... 52 | iters: 100, epoch: 5 | loss: 2.0356884 53 | speed: 0.1958s/iter; left time: 9059.3756s 54 | iters: 200, epoch: 5 | loss: 1.4387221 55 | speed: 0.0212s/iter; left time: 977.6883s 56 | iters: 300, epoch: 5 | loss: 1.2084324 57 | speed: 0.0197s/iter; left time: 909.3696s 58 | iters: 400, epoch: 5 | loss: 1.9275556 59 | speed: 0.0211s/iter; left time: 968.6321s 60 | Epoch: 5 cost time: 10.58309006690979 61 | Epoch: 5, Steps: 483 | Train Loss: 1.5123888 Vali Loss: 0.2806154 Test Loss: 0.3340095 62 | EarlyStopping counter: 1 out of 15 63 | iters: 100, epoch: 6 | loss: 1.5896931 64 | speed: 0.2116s/iter; left time: 9686.7273s 65 | iters: 200, epoch: 6 | loss: 1.4464663 66 | speed: 0.0175s/iter; left time: 800.2642s 67 | iters: 300, epoch: 6 | loss: 1.3367112 68 | speed: 0.0174s/iter; left time: 792.5984s 69 | iters: 400, epoch: 6 | loss: 1.5931861 70 | speed: 0.0175s/iter; left time: 797.4637s 71 | Epoch: 6 cost time: 9.192549467086792 72 | Epoch: 6, Steps: 483 | Train Loss: 1.5087724 Vali Loss: 0.2806478 Test Loss: 0.3343166 73 | EarlyStopping counter: 2 out of 15 74 | iters: 100, epoch: 7 | loss: 1.7364004 75 | speed: 0.2006s/iter; left time: 9086.5897s 76 | iters: 200, epoch: 7 | loss: 1.3394046 77 | speed: 0.0205s/iter; left time: 925.4341s 78 | iters: 300, epoch: 7 | loss: 1.4412032 79 | speed: 0.0209s/iter; left time: 940.9436s 80 | iters: 400, epoch: 7 | loss: 1.3084809 81 | speed: 0.0223s/iter; left time: 1004.2901s 82 | Epoch: 7 cost time: 10.761375665664673 83 | Epoch: 7, Steps: 483 | Train Loss: 1.5066932 Vali Loss: 0.2785324 Test Loss: 0.3333772 84 | EarlyStopping counter: 3 out of 15 85 | iters: 100, epoch: 8 | loss: 1.5109792 86 | speed: 0.2054s/iter; left time: 9208.1803s 87 | iters: 200, epoch: 8 | loss: 1.1548090 88 | speed: 0.0199s/iter; left time: 888.0598s 89 | iters: 300, epoch: 8 | loss: 1.5226994 90 | speed: 0.0200s/iter; left time: 892.3470s 91 | iters: 400, epoch: 8 | loss: 1.7359309 92 | speed: 0.0199s/iter; left time: 886.6597s 93 | Epoch: 8 cost time: 10.166747331619263 94 | Epoch: 8, Steps: 483 | Train Loss: 1.5058559 Vali Loss: 0.2797410 Test Loss: 0.3336249 95 | EarlyStopping counter: 4 out of 15 96 | iters: 100, epoch: 9 | loss: 1.3201541 97 | speed: 0.2152s/iter; left time: 9543.2068s 98 | iters: 200, epoch: 9 | loss: 1.5197501 99 | speed: 0.0192s/iter; left time: 850.0866s 100 | iters: 300, epoch: 9 | loss: 1.4723208 101 | speed: 0.0192s/iter; left time: 845.4364s 102 | iters: 400, epoch: 9 | loss: 1.5643225 103 | speed: 0.0178s/iter; left time: 782.9751s 104 | Epoch: 9 cost time: 9.583164691925049 105 | Epoch: 9, Steps: 483 | Train Loss: 1.5054512 Vali Loss: 0.2800554 Test Loss: 0.3335157 106 | EarlyStopping counter: 5 out of 15 107 | iters: 100, epoch: 10 | loss: 1.3378445 108 | speed: 0.1999s/iter; left time: 8767.0887s 109 | iters: 200, epoch: 10 | loss: 1.8759689 110 | speed: 0.0217s/iter; left time: 949.3718s 111 | iters: 300, epoch: 10 | loss: 1.4199514 112 | speed: 0.0203s/iter; left time: 884.0709s 113 | iters: 400, epoch: 10 | loss: 1.5040090 114 | speed: 0.0200s/iter; left time: 870.2520s 115 | Epoch: 10 cost time: 10.364181280136108 116 | Epoch: 10, Steps: 483 | Train Loss: 1.5054024 Vali Loss: 0.2799874 Test Loss: 0.3333082 117 | EarlyStopping counter: 6 out of 15 118 | iters: 100, epoch: 11 | loss: 1.4196495 119 | speed: 0.2079s/iter; left time: 9016.4837s 120 | iters: 200, epoch: 11 | loss: 1.6779906 121 | speed: 0.0161s/iter; left time: 697.5145s 122 | iters: 300, epoch: 11 | loss: 1.4479866 123 | speed: 0.0183s/iter; left time: 788.5930s 124 | iters: 400, epoch: 11 | loss: 1.6324550 125 | speed: 0.0244s/iter; left time: 1051.9976s 126 | Epoch: 11 cost time: 9.949428796768188 127 | Epoch: 11, Steps: 483 | Train Loss: 1.5049539 Vali Loss: 0.2799321 Test Loss: 0.3333190 128 | EarlyStopping counter: 7 out of 15 129 | iters: 100, epoch: 12 | loss: 1.5535282 130 | speed: 0.2167s/iter; left time: 9293.6658s 131 | iters: 200, epoch: 12 | loss: 1.4525486 132 | speed: 0.0223s/iter; left time: 954.2159s 133 | iters: 300, epoch: 12 | loss: 1.3856951 134 | speed: 0.0215s/iter; left time: 919.6689s 135 | iters: 400, epoch: 12 | loss: 1.8053498 136 | speed: 0.0200s/iter; left time: 852.0657s 137 | Epoch: 12 cost time: 10.969479084014893 138 | Epoch: 12, Steps: 483 | Train Loss: 1.5048940 Vali Loss: 0.2798794 Test Loss: 0.3332984 139 | EarlyStopping counter: 8 out of 15 140 | iters: 100, epoch: 13 | loss: 1.7289269 141 | speed: 0.2134s/iter; left time: 9050.2764s 142 | iters: 200, epoch: 13 | loss: 1.4292418 143 | speed: 0.0218s/iter; left time: 923.0586s 144 | iters: 300, epoch: 13 | loss: 1.1844950 145 | speed: 0.0209s/iter; left time: 880.7520s 146 | iters: 400, epoch: 13 | loss: 1.8365448 147 | speed: 0.0188s/iter; left time: 792.1992s 148 | Epoch: 13 cost time: 10.34959602355957 149 | Epoch: 13, Steps: 483 | Train Loss: 1.5049557 Vali Loss: 0.2798770 Test Loss: 0.3333187 150 | EarlyStopping counter: 9 out of 15 151 | iters: 100, epoch: 14 | loss: 1.2329592 152 | speed: 0.2161s/iter; left time: 9060.4322s 153 | iters: 200, epoch: 14 | loss: 1.3752447 154 | speed: 0.0190s/iter; left time: 795.1048s 155 | iters: 300, epoch: 14 | loss: 1.4555092 156 | speed: 0.0201s/iter; left time: 839.5380s 157 | iters: 400, epoch: 14 | loss: 1.5314350 158 | speed: 0.0201s/iter; left time: 835.2671s 159 | Epoch: 14 cost time: 10.351623773574829 160 | Epoch: 14, Steps: 483 | Train Loss: 1.5052283 Vali Loss: 0.2798263 Test Loss: 0.3333158 161 | EarlyStopping counter: 10 out of 15 162 | iters: 100, epoch: 15 | loss: 1.3640132 163 | speed: 0.2120s/iter; left time: 8785.7611s 164 | iters: 200, epoch: 15 | loss: 1.7178735 165 | speed: 0.0227s/iter; left time: 938.4266s 166 | iters: 300, epoch: 15 | loss: 1.3450797 167 | speed: 0.0201s/iter; left time: 828.5769s 168 | iters: 400, epoch: 15 | loss: 1.7608089 169 | speed: 0.0212s/iter; left time: 871.8770s 170 | Epoch: 15 cost time: 10.739431142807007 171 | Epoch: 15, Steps: 483 | Train Loss: 1.5047850 Vali Loss: 0.2795727 Test Loss: 0.3333139 172 | EarlyStopping counter: 11 out of 15 173 | iters: 100, epoch: 16 | loss: 1.3679222 174 | speed: 0.2088s/iter; left time: 8550.6024s 175 | iters: 200, epoch: 16 | loss: 1.4260951 176 | speed: 0.0194s/iter; left time: 792.3215s 177 | iters: 300, epoch: 16 | loss: 1.4675331 178 | speed: 0.0195s/iter; left time: 796.1387s 179 | iters: 400, epoch: 16 | loss: 1.5333643 180 | speed: 0.0198s/iter; left time: 804.5638s 181 | Epoch: 16 cost time: 9.929117918014526 182 | Epoch: 16, Steps: 483 | Train Loss: 1.5049538 Vali Loss: 0.2798105 Test Loss: 0.3333146 183 | EarlyStopping counter: 12 out of 15 184 | iters: 100, epoch: 17 | loss: 1.2263255 185 | speed: 0.2073s/iter; left time: 8388.2621s 186 | iters: 200, epoch: 17 | loss: 1.7635957 187 | speed: 0.0200s/iter; left time: 809.0600s 188 | iters: 300, epoch: 17 | loss: 1.5614697 189 | speed: 0.0189s/iter; left time: 762.0510s 190 | iters: 400, epoch: 17 | loss: 1.6309401 191 | speed: 0.0195s/iter; left time: 782.4010s 192 | Epoch: 17 cost time: 10.118348836898804 193 | Epoch: 17, Steps: 483 | Train Loss: 1.5048709 Vali Loss: 0.2797926 Test Loss: 0.3333148 194 | EarlyStopping counter: 13 out of 15 195 | iters: 100, epoch: 18 | loss: 1.4083792 196 | speed: 0.2014s/iter; left time: 8053.8760s 197 | iters: 200, epoch: 18 | loss: 1.3590299 198 | speed: 0.0230s/iter; left time: 916.8935s 199 | iters: 300, epoch: 18 | loss: 1.8106625 200 | speed: 0.0222s/iter; left time: 884.7634s 201 | iters: 400, epoch: 18 | loss: 1.3441499 202 | speed: 0.0238s/iter; left time: 944.1972s 203 | Epoch: 18 cost time: 11.55156397819519 204 | Epoch: 18, Steps: 483 | Train Loss: 1.5047944 Vali Loss: 0.2794898 Test Loss: 0.3333148 205 | EarlyStopping counter: 14 out of 15 206 | iters: 100, epoch: 19 | loss: 1.3240416 207 | speed: 0.2036s/iter; left time: 8043.7772s 208 | iters: 200, epoch: 19 | loss: 1.2445635 209 | speed: 0.0227s/iter; left time: 894.3507s 210 | iters: 300, epoch: 19 | loss: 1.7277935 211 | speed: 0.0229s/iter; left time: 899.3708s 212 | iters: 400, epoch: 19 | loss: 1.4704092 213 | speed: 0.0205s/iter; left time: 801.8026s 214 | Epoch: 19 cost time: 10.516722917556763 215 | Epoch: 19, Steps: 483 | Train Loss: 1.5048961 Vali Loss: 0.2798707 Test Loss: 0.3333148 216 | EarlyStopping counter: 15 out of 15 217 | Early stopping 218 | >>>>>>>testing : ETTh2_720_192_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl192_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 219 | test 2689 220 | test shape: (2689, 1, 192, 7) (2689, 1, 192, 7) 221 | test shape: (2689, 192, 7) (2689, 192, 7) 222 | mse:0.3351455628871918, mae:0.37173473834991455 223 | rmse:0.5789175629615784, mape:0.5247182846069336, mspe:276.55609130859375 224 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/weather_0.1_TimeBridge_96.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.1, dropout=0.0, attn_dropout=0.15, batch_size=32, is_training=1, model_id='weather_720_96', model='TimeBridge', data='custom', root_path='/data1/liupeiyuan/dataset/datasets/weather/', data_path='weather.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=96, seasonal_patterns='Monthly', ia_layers=1, pd_layers=1, ca_layers=1, stable_len=6, num_p=12, period=48, enc_in=21, d_model=128, n_heads=8, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=10, embedding_epochs=5, patience=3, pct_start=0.2, learning_rate=0.0001, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : weather_720_96_TimeBridge_custom_bs32_ftM_sl720_ll48_pl96_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 36072 6 | val 5175 7 | test 10444 8 | iters: 100, epoch: 1 | loss: 0.6217725 9 | speed: 0.0363s/iter; left time: 405.0359s 10 | iters: 200, epoch: 1 | loss: 0.5075015 11 | speed: 0.0201s/iter; left time: 222.0445s 12 | iters: 300, epoch: 1 | loss: 0.4899595 13 | speed: 0.0214s/iter; left time: 235.1641s 14 | iters: 400, epoch: 1 | loss: 0.5040983 15 | speed: 0.0204s/iter; left time: 222.2427s 16 | iters: 500, epoch: 1 | loss: 0.4389344 17 | speed: 0.0204s/iter; left time: 219.6920s 18 | iters: 600, epoch: 1 | loss: 0.4528927 19 | speed: 0.0207s/iter; left time: 221.3718s 20 | iters: 700, epoch: 1 | loss: 0.4184795 21 | speed: 0.0199s/iter; left time: 210.8178s 22 | iters: 800, epoch: 1 | loss: 0.4459611 23 | speed: 0.0206s/iter; left time: 215.5022s 24 | iters: 900, epoch: 1 | loss: 0.5321121 25 | speed: 0.0196s/iter; left time: 203.1042s 26 | iters: 1000, epoch: 1 | loss: 0.4346310 27 | speed: 0.0198s/iter; left time: 203.2976s 28 | iters: 1100, epoch: 1 | loss: 0.4536731 29 | speed: 0.0221s/iter; left time: 224.8221s 30 | Epoch: 1 cost time: 24.76050114631653 31 | Epoch: 1, Steps: 1127 | Train Loss: 0.5026792 Vali Loss: 0.3999601 Test Loss: 0.1524574 32 | Validation loss decreased (inf --> 0.399960). Saving model ... 33 | iters: 100, epoch: 2 | loss: 0.4727573 34 | speed: 0.5641s/iter; left time: 5665.9540s 35 | iters: 200, epoch: 2 | loss: 0.4226471 36 | speed: 0.0205s/iter; left time: 203.8822s 37 | iters: 300, epoch: 2 | loss: 0.5126123 38 | speed: 0.0208s/iter; left time: 205.2120s 39 | iters: 400, epoch: 2 | loss: 0.4186696 40 | speed: 0.0192s/iter; left time: 187.3412s 41 | iters: 500, epoch: 2 | loss: 0.4717515 42 | speed: 0.0200s/iter; left time: 192.7538s 43 | iters: 600, epoch: 2 | loss: 0.5062536 44 | speed: 0.0213s/iter; left time: 203.6447s 45 | iters: 700, epoch: 2 | loss: 0.4796877 46 | speed: 0.0207s/iter; left time: 195.8576s 47 | iters: 800, epoch: 2 | loss: 0.5002275 48 | speed: 0.0208s/iter; left time: 194.7255s 49 | iters: 900, epoch: 2 | loss: 0.4188137 50 | speed: 0.0202s/iter; left time: 186.5096s 51 | iters: 1000, epoch: 2 | loss: 0.4768677 52 | speed: 0.0204s/iter; left time: 186.2476s 53 | iters: 1100, epoch: 2 | loss: 0.3575133 54 | speed: 0.0215s/iter; left time: 194.6007s 55 | Epoch: 2 cost time: 23.777212858200073 56 | Epoch: 2, Steps: 1127 | Train Loss: 0.4522950 Vali Loss: 0.3913462 Test Loss: 0.1461549 57 | Validation loss decreased (0.399960 --> 0.391346). Saving model ... 58 | iters: 100, epoch: 3 | loss: 0.4497178 59 | speed: 0.5521s/iter; left time: 4922.6900s 60 | iters: 200, epoch: 3 | loss: 0.4440536 61 | speed: 0.0196s/iter; left time: 172.8241s 62 | iters: 300, epoch: 3 | loss: 0.3963219 63 | speed: 0.0214s/iter; left time: 186.2407s 64 | iters: 400, epoch: 3 | loss: 0.4308885 65 | speed: 0.0208s/iter; left time: 178.9930s 66 | iters: 500, epoch: 3 | loss: 0.4349045 67 | speed: 0.0203s/iter; left time: 173.2122s 68 | iters: 600, epoch: 3 | loss: 0.3740225 69 | speed: 0.0207s/iter; left time: 174.1340s 70 | iters: 700, epoch: 3 | loss: 0.4745664 71 | speed: 0.0199s/iter; left time: 165.4525s 72 | iters: 800, epoch: 3 | loss: 0.4599711 73 | speed: 0.0215s/iter; left time: 176.7068s 74 | iters: 900, epoch: 3 | loss: 0.4645935 75 | speed: 0.0214s/iter; left time: 173.9399s 76 | iters: 1000, epoch: 3 | loss: 0.4150046 77 | speed: 0.0253s/iter; left time: 202.4461s 78 | iters: 1100, epoch: 3 | loss: 0.4285727 79 | speed: 0.0243s/iter; left time: 192.5395s 80 | Epoch: 3 cost time: 24.58201265335083 81 | Epoch: 3, Steps: 1127 | Train Loss: 0.4415652 Vali Loss: 0.3919630 Test Loss: 0.1445887 82 | EarlyStopping counter: 1 out of 3 83 | iters: 100, epoch: 4 | loss: 0.4204171 84 | speed: 0.5649s/iter; left time: 4400.3377s 85 | iters: 200, epoch: 4 | loss: 0.4714330 86 | speed: 0.0215s/iter; left time: 165.6902s 87 | iters: 300, epoch: 4 | loss: 0.3613860 88 | speed: 0.0207s/iter; left time: 156.7373s 89 | iters: 400, epoch: 4 | loss: 0.3866323 90 | speed: 0.0194s/iter; left time: 145.0718s 91 | iters: 500, epoch: 4 | loss: 0.3536577 92 | speed: 0.0199s/iter; left time: 146.8462s 93 | iters: 600, epoch: 4 | loss: 0.5045805 94 | speed: 0.0203s/iter; left time: 147.9818s 95 | iters: 700, epoch: 4 | loss: 0.4423442 96 | speed: 0.0215s/iter; left time: 154.6098s 97 | iters: 800, epoch: 4 | loss: 0.4994688 98 | speed: 0.0213s/iter; left time: 150.9374s 99 | iters: 900, epoch: 4 | loss: 0.4701785 100 | speed: 0.0213s/iter; left time: 148.8381s 101 | iters: 1000, epoch: 4 | loss: 0.4102494 102 | speed: 0.0204s/iter; left time: 140.5128s 103 | iters: 1100, epoch: 4 | loss: 0.4746871 104 | speed: 0.0205s/iter; left time: 139.2253s 105 | Epoch: 4 cost time: 23.912768602371216 106 | Epoch: 4, Steps: 1127 | Train Loss: 0.4368870 Vali Loss: 0.3897519 Test Loss: 0.1435655 107 | Validation loss decreased (0.391346 --> 0.389752). Saving model ... 108 | iters: 100, epoch: 5 | loss: 0.4064460 109 | speed: 0.5531s/iter; left time: 3685.5775s 110 | iters: 200, epoch: 5 | loss: 0.4024847 111 | speed: 0.0211s/iter; left time: 138.6629s 112 | iters: 300, epoch: 5 | loss: 0.4009468 113 | speed: 0.0207s/iter; left time: 134.0063s 114 | iters: 400, epoch: 5 | loss: 0.3629128 115 | speed: 0.0203s/iter; left time: 129.3044s 116 | iters: 500, epoch: 5 | loss: 0.3706598 117 | speed: 0.0200s/iter; left time: 125.2191s 118 | iters: 600, epoch: 5 | loss: 0.4660573 119 | speed: 0.0199s/iter; left time: 122.7146s 120 | iters: 700, epoch: 5 | loss: 0.4446594 121 | speed: 0.0198s/iter; left time: 120.1439s 122 | iters: 800, epoch: 5 | loss: 0.3436044 123 | speed: 0.0203s/iter; left time: 121.0986s 124 | iters: 900, epoch: 5 | loss: 0.4105908 125 | speed: 0.0191s/iter; left time: 111.9597s 126 | iters: 1000, epoch: 5 | loss: 0.4279458 127 | speed: 0.0200s/iter; left time: 114.9875s 128 | iters: 1100, epoch: 5 | loss: 0.4311617 129 | speed: 0.0201s/iter; left time: 113.7755s 130 | Epoch: 5 cost time: 23.25914978981018 131 | Epoch: 5, Steps: 1127 | Train Loss: 0.4345717 Vali Loss: 0.3878226 Test Loss: 0.1431904 132 | Validation loss decreased (0.389752 --> 0.387823). Saving model ... 133 | iters: 100, epoch: 6 | loss: 0.4454451 134 | speed: 0.5904s/iter; left time: 3268.6654s 135 | iters: 200, epoch: 6 | loss: 0.4068154 136 | speed: 0.0201s/iter; left time: 109.0414s 137 | iters: 300, epoch: 6 | loss: 0.4014202 138 | speed: 0.0201s/iter; left time: 107.2894s 139 | iters: 400, epoch: 6 | loss: 0.4531605 140 | speed: 0.0208s/iter; left time: 109.0203s 141 | iters: 500, epoch: 6 | loss: 0.5920354 142 | speed: 0.0209s/iter; left time: 107.0926s 143 | iters: 600, epoch: 6 | loss: 0.4109201 144 | speed: 0.0208s/iter; left time: 104.9748s 145 | iters: 700, epoch: 6 | loss: 0.4630266 146 | speed: 0.0218s/iter; left time: 107.5106s 147 | iters: 800, epoch: 6 | loss: 0.4253446 148 | speed: 0.0218s/iter; left time: 105.6030s 149 | iters: 900, epoch: 6 | loss: 0.4401968 150 | speed: 0.0225s/iter; left time: 106.6910s 151 | iters: 1000, epoch: 6 | loss: 0.3842283 152 | speed: 0.0217s/iter; left time: 100.4047s 153 | iters: 1100, epoch: 6 | loss: 0.4937469 154 | speed: 0.0216s/iter; left time: 97.9580s 155 | Epoch: 6 cost time: 24.616931438446045 156 | Epoch: 6, Steps: 1127 | Train Loss: 0.4333829 Vali Loss: 0.3862739 Test Loss: 0.1429257 157 | Validation loss decreased (0.387823 --> 0.386274). Saving model ... 158 | iters: 100, epoch: 7 | loss: 0.4167922 159 | speed: 0.5726s/iter; left time: 2524.4334s 160 | iters: 200, epoch: 7 | loss: 0.3671733 161 | speed: 0.0211s/iter; left time: 91.0684s 162 | iters: 300, epoch: 7 | loss: 0.4005415 163 | speed: 0.0221s/iter; left time: 92.8836s 164 | iters: 400, epoch: 7 | loss: 0.3989988 165 | speed: 0.0211s/iter; left time: 86.8743s 166 | iters: 500, epoch: 7 | loss: 0.4192830 167 | speed: 0.0212s/iter; left time: 84.9429s 168 | iters: 600, epoch: 7 | loss: 0.4289940 169 | speed: 0.0205s/iter; left time: 80.3281s 170 | iters: 700, epoch: 7 | loss: 0.4602559 171 | speed: 0.0213s/iter; left time: 81.1502s 172 | iters: 800, epoch: 7 | loss: 0.3927089 173 | speed: 0.0233s/iter; left time: 86.5249s 174 | iters: 900, epoch: 7 | loss: 0.4947055 175 | speed: 0.0201s/iter; left time: 72.5604s 176 | iters: 1000, epoch: 7 | loss: 0.4428734 177 | speed: 0.0205s/iter; left time: 71.8993s 178 | iters: 1100, epoch: 7 | loss: 0.4092097 179 | speed: 0.0204s/iter; left time: 69.7089s 180 | Epoch: 7 cost time: 24.55203676223755 181 | Epoch: 7, Steps: 1127 | Train Loss: 0.4328295 Vali Loss: 0.3871203 Test Loss: 0.1428799 182 | EarlyStopping counter: 1 out of 3 183 | iters: 100, epoch: 8 | loss: 0.5337906 184 | speed: 0.5590s/iter; left time: 1834.6723s 185 | iters: 200, epoch: 8 | loss: 0.4498539 186 | speed: 0.0194s/iter; left time: 61.6374s 187 | iters: 300, epoch: 8 | loss: 0.4375495 188 | speed: 0.0190s/iter; left time: 58.5625s 189 | iters: 400, epoch: 8 | loss: 0.3812272 190 | speed: 0.0195s/iter; left time: 58.2022s 191 | iters: 500, epoch: 8 | loss: 0.4046172 192 | speed: 0.0194s/iter; left time: 55.8801s 193 | iters: 600, epoch: 8 | loss: 0.4688789 194 | speed: 0.0206s/iter; left time: 57.3648s 195 | iters: 700, epoch: 8 | loss: 0.3630677 196 | speed: 0.0199s/iter; left time: 53.3883s 197 | iters: 800, epoch: 8 | loss: 0.3939684 198 | speed: 0.0204s/iter; left time: 52.7824s 199 | iters: 900, epoch: 8 | loss: 0.4338245 200 | speed: 0.0208s/iter; left time: 51.6811s 201 | iters: 1000, epoch: 8 | loss: 0.5096874 202 | speed: 0.0204s/iter; left time: 48.6586s 203 | iters: 1100, epoch: 8 | loss: 0.4514995 204 | speed: 0.0203s/iter; left time: 46.4109s 205 | Epoch: 8 cost time: 23.43448305130005 206 | Epoch: 8, Steps: 1127 | Train Loss: 0.4324890 Vali Loss: 0.3870848 Test Loss: 0.1428910 207 | EarlyStopping counter: 2 out of 3 208 | iters: 100, epoch: 9 | loss: 0.4042935 209 | speed: 0.5786s/iter; left time: 1246.8603s 210 | iters: 200, epoch: 9 | loss: 0.4666654 211 | speed: 0.0233s/iter; left time: 47.8938s 212 | iters: 300, epoch: 9 | loss: 0.3921061 213 | speed: 0.0217s/iter; left time: 42.5092s 214 | iters: 400, epoch: 9 | loss: 0.4478674 215 | speed: 0.0199s/iter; left time: 36.8563s 216 | iters: 500, epoch: 9 | loss: 0.4802259 217 | speed: 0.0207s/iter; left time: 36.3840s 218 | iters: 600, epoch: 9 | loss: 0.4203391 219 | speed: 0.0208s/iter; left time: 34.3509s 220 | iters: 700, epoch: 9 | loss: 0.4551291 221 | speed: 0.0207s/iter; left time: 32.2257s 222 | iters: 800, epoch: 9 | loss: 0.3875813 223 | speed: 0.0207s/iter; left time: 30.1911s 224 | iters: 900, epoch: 9 | loss: 0.3981475 225 | speed: 0.0208s/iter; left time: 28.1177s 226 | iters: 1000, epoch: 9 | loss: 0.4142269 227 | speed: 0.0205s/iter; left time: 25.7445s 228 | iters: 1100, epoch: 9 | loss: 0.3906738 229 | speed: 0.0204s/iter; left time: 23.5362s 230 | Epoch: 9 cost time: 24.41859745979309 231 | Epoch: 9, Steps: 1127 | Train Loss: 0.4323682 Vali Loss: 0.3870522 Test Loss: 0.1428617 232 | EarlyStopping counter: 3 out of 3 233 | Early stopping 234 | >>>>>>>testing : weather_720_96_TimeBridge_custom_bs32_ftM_sl720_ll48_pl96_dm128_nh8_ial1_pdl1_cal1_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 235 | test 10444 236 | test shape: (10444, 1, 96, 21) (10444, 1, 96, 21) 237 | test shape: (10444, 96, 21) (10444, 96, 21) 238 | mse:0.1429259032011032, mae:0.18284930288791656 239 | rmse:0.3780554234981537, mape:0.4395139515399933, mspe:12910708.0 240 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh2_0.35_TimeBridge_336.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=16, is_training=1, model_id='ETTh2_720_336', model='TimeBridge', data='ETTh2', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh2.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=336, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=48, enc_in=7, d_model=128, n_heads=4, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=15, pct_start=0.2, learning_rate=0.0001, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh2_720_336_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl336_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7585 6 | val 2545 7 | test 2545 8 | iters: 100, epoch: 1 | loss: 2.0490143 9 | speed: 0.0294s/iter; left time: 1391.9962s 10 | iters: 200, epoch: 1 | loss: 2.0870667 11 | speed: 0.0178s/iter; left time: 841.8323s 12 | iters: 300, epoch: 1 | loss: 2.5605707 13 | speed: 0.0169s/iter; left time: 796.0096s 14 | iters: 400, epoch: 1 | loss: 2.1404889 15 | speed: 0.0173s/iter; left time: 811.9077s 16 | Epoch: 1 cost time: 9.41806936264038 17 | Epoch: 1, Steps: 474 | Train Loss: 2.2037438 Vali Loss: 0.4119492 Test Loss: 0.3709031 18 | Validation loss decreased (inf --> 0.411949). Saving model ... 19 | iters: 100, epoch: 2 | loss: 2.2368884 20 | speed: 0.1909s/iter; left time: 8940.6650s 21 | iters: 200, epoch: 2 | loss: 2.4025748 22 | speed: 0.0195s/iter; left time: 913.2517s 23 | iters: 300, epoch: 2 | loss: 2.2232013 24 | speed: 0.0192s/iter; left time: 893.9896s 25 | iters: 400, epoch: 2 | loss: 2.2547753 26 | speed: 0.0183s/iter; left time: 852.7427s 27 | Epoch: 2 cost time: 9.30513596534729 28 | Epoch: 2, Steps: 474 | Train Loss: 2.0686598 Vali Loss: 0.3848452 Test Loss: 0.3715430 29 | Validation loss decreased (0.411949 --> 0.384845). Saving model ... 30 | iters: 100, epoch: 3 | loss: 2.5091519 31 | speed: 0.2008s/iter; left time: 9308.1114s 32 | iters: 200, epoch: 3 | loss: 1.9400079 33 | speed: 0.0195s/iter; left time: 904.2219s 34 | iters: 300, epoch: 3 | loss: 1.7659415 35 | speed: 0.0198s/iter; left time: 913.2823s 36 | iters: 400, epoch: 3 | loss: 2.4597871 37 | speed: 0.0207s/iter; left time: 953.6322s 38 | Epoch: 3 cost time: 10.28074598312378 39 | Epoch: 3, Steps: 474 | Train Loss: 2.0184734 Vali Loss: 0.3714027 Test Loss: 0.3686972 40 | Validation loss decreased (0.384845 --> 0.371403). Saving model ... 41 | iters: 100, epoch: 4 | loss: 1.8731072 42 | speed: 0.1946s/iter; left time: 8930.1211s 43 | iters: 200, epoch: 4 | loss: 1.9937874 44 | speed: 0.0210s/iter; left time: 962.5190s 45 | iters: 300, epoch: 4 | loss: 1.7570934 46 | speed: 0.0203s/iter; left time: 928.1301s 47 | iters: 400, epoch: 4 | loss: 2.1836863 48 | speed: 0.0191s/iter; left time: 870.4406s 49 | Epoch: 4 cost time: 9.9977867603302 50 | Epoch: 4, Steps: 474 | Train Loss: 1.9992769 Vali Loss: 0.3714537 Test Loss: 0.3701564 51 | EarlyStopping counter: 1 out of 15 52 | iters: 100, epoch: 5 | loss: 1.8627543 53 | speed: 0.1970s/iter; left time: 8943.7579s 54 | iters: 200, epoch: 5 | loss: 1.7675382 55 | speed: 0.0207s/iter; left time: 935.9610s 56 | iters: 300, epoch: 5 | loss: 1.7022305 57 | speed: 0.0192s/iter; left time: 867.0928s 58 | iters: 400, epoch: 5 | loss: 1.7485913 59 | speed: 0.0232s/iter; left time: 1046.9445s 60 | Epoch: 5 cost time: 10.19996452331543 61 | Epoch: 5, Steps: 474 | Train Loss: 1.9898042 Vali Loss: 0.3680319 Test Loss: 0.3679090 62 | Validation loss decreased (0.371403 --> 0.368032). Saving model ... 63 | iters: 100, epoch: 6 | loss: 1.8224610 64 | speed: 0.2017s/iter; left time: 9061.6166s 65 | iters: 200, epoch: 6 | loss: 1.9476874 66 | speed: 0.0196s/iter; left time: 880.3434s 67 | iters: 300, epoch: 6 | loss: 1.6663139 68 | speed: 0.0204s/iter; left time: 913.2198s 69 | iters: 400, epoch: 6 | loss: 1.5639483 70 | speed: 0.0210s/iter; left time: 936.7774s 71 | Epoch: 6 cost time: 10.263386964797974 72 | Epoch: 6, Steps: 474 | Train Loss: 1.9855691 Vali Loss: 0.3707707 Test Loss: 0.3676004 73 | EarlyStopping counter: 1 out of 15 74 | iters: 100, epoch: 7 | loss: 2.2234225 75 | speed: 0.1932s/iter; left time: 8587.0065s 76 | iters: 200, epoch: 7 | loss: 2.0299013 77 | speed: 0.0174s/iter; left time: 772.6282s 78 | iters: 300, epoch: 7 | loss: 1.6561273 79 | speed: 0.0201s/iter; left time: 890.5733s 80 | iters: 400, epoch: 7 | loss: 1.9104818 81 | speed: 0.0204s/iter; left time: 900.0234s 82 | Epoch: 7 cost time: 9.565013647079468 83 | Epoch: 7, Steps: 474 | Train Loss: 1.9834593 Vali Loss: 0.3706593 Test Loss: 0.3674500 84 | EarlyStopping counter: 2 out of 15 85 | iters: 100, epoch: 8 | loss: 1.9139917 86 | speed: 0.1910s/iter; left time: 8399.9789s 87 | iters: 200, epoch: 8 | loss: 2.2532561 88 | speed: 0.0180s/iter; left time: 791.7138s 89 | iters: 300, epoch: 8 | loss: 2.1056900 90 | speed: 0.0178s/iter; left time: 779.3872s 91 | iters: 400, epoch: 8 | loss: 1.8281804 92 | speed: 0.0186s/iter; left time: 812.3291s 93 | Epoch: 8 cost time: 9.280115127563477 94 | Epoch: 8, Steps: 474 | Train Loss: 1.9821681 Vali Loss: 0.3708489 Test Loss: 0.3671449 95 | EarlyStopping counter: 3 out of 15 96 | iters: 100, epoch: 9 | loss: 1.9980048 97 | speed: 0.2024s/iter; left time: 8806.8266s 98 | iters: 200, epoch: 9 | loss: 1.9921193 99 | speed: 0.0233s/iter; left time: 1012.9913s 100 | iters: 300, epoch: 9 | loss: 1.7481588 101 | speed: 0.0221s/iter; left time: 958.8725s 102 | iters: 400, epoch: 9 | loss: 1.6730371 103 | speed: 0.0219s/iter; left time: 947.2845s 104 | Epoch: 9 cost time: 11.013566255569458 105 | Epoch: 9, Steps: 474 | Train Loss: 1.9817556 Vali Loss: 0.3709207 Test Loss: 0.3672110 106 | EarlyStopping counter: 4 out of 15 107 | iters: 100, epoch: 10 | loss: 2.0097008 108 | speed: 0.1989s/iter; left time: 8559.2108s 109 | iters: 200, epoch: 10 | loss: 2.0854487 110 | speed: 0.0190s/iter; left time: 813.6820s 111 | iters: 300, epoch: 10 | loss: 2.8642268 112 | speed: 0.0187s/iter; left time: 802.2061s 113 | iters: 400, epoch: 10 | loss: 1.8679219 114 | speed: 0.0192s/iter; left time: 820.5066s 115 | Epoch: 10 cost time: 9.614798307418823 116 | Epoch: 10, Steps: 474 | Train Loss: 1.9809300 Vali Loss: 0.3713948 Test Loss: 0.3673318 117 | EarlyStopping counter: 5 out of 15 118 | iters: 100, epoch: 11 | loss: 2.2336819 119 | speed: 0.1926s/iter; left time: 8198.5667s 120 | iters: 200, epoch: 11 | loss: 2.3999269 121 | speed: 0.0211s/iter; left time: 896.7618s 122 | iters: 300, epoch: 11 | loss: 2.2455544 123 | speed: 0.0208s/iter; left time: 882.3961s 124 | iters: 400, epoch: 11 | loss: 2.1304457 125 | speed: 0.0219s/iter; left time: 926.8323s 126 | Epoch: 11 cost time: 10.223379373550415 127 | Epoch: 11, Steps: 474 | Train Loss: 1.9809972 Vali Loss: 0.3714892 Test Loss: 0.3672026 128 | EarlyStopping counter: 6 out of 15 129 | iters: 100, epoch: 12 | loss: 1.8437910 130 | speed: 0.1969s/iter; left time: 8285.5371s 131 | iters: 200, epoch: 12 | loss: 1.9696771 132 | speed: 0.0181s/iter; left time: 761.6621s 133 | iters: 300, epoch: 12 | loss: 1.8595055 134 | speed: 0.0179s/iter; left time: 751.6020s 135 | iters: 400, epoch: 12 | loss: 1.6575878 136 | speed: 0.0194s/iter; left time: 808.7690s 137 | Epoch: 12 cost time: 9.240576028823853 138 | Epoch: 12, Steps: 474 | Train Loss: 1.9808142 Vali Loss: 0.3714653 Test Loss: 0.3671649 139 | EarlyStopping counter: 7 out of 15 140 | iters: 100, epoch: 13 | loss: 2.3756986 141 | speed: 0.1930s/iter; left time: 8033.2987s 142 | iters: 200, epoch: 13 | loss: 1.8030987 143 | speed: 0.0205s/iter; left time: 850.7682s 144 | iters: 300, epoch: 13 | loss: 2.4262795 145 | speed: 0.0201s/iter; left time: 832.4559s 146 | iters: 400, epoch: 13 | loss: 2.1440954 147 | speed: 0.0207s/iter; left time: 855.5934s 148 | Epoch: 13 cost time: 10.03203272819519 149 | Epoch: 13, Steps: 474 | Train Loss: 1.9809575 Vali Loss: 0.3709730 Test Loss: 0.3671685 150 | EarlyStopping counter: 8 out of 15 151 | iters: 100, epoch: 14 | loss: 2.1852925 152 | speed: 0.1946s/iter; left time: 8005.9624s 153 | iters: 200, epoch: 14 | loss: 2.1647019 154 | speed: 0.0177s/iter; left time: 727.8456s 155 | iters: 300, epoch: 14 | loss: 2.0979052 156 | speed: 0.0198s/iter; left time: 809.0979s 157 | iters: 400, epoch: 14 | loss: 1.6131914 158 | speed: 0.0198s/iter; left time: 810.0631s 159 | Epoch: 14 cost time: 9.743951320648193 160 | Epoch: 14, Steps: 474 | Train Loss: 1.9808478 Vali Loss: 0.3714334 Test Loss: 0.3671641 161 | EarlyStopping counter: 9 out of 15 162 | iters: 100, epoch: 15 | loss: 2.0495570 163 | speed: 0.1912s/iter; left time: 7776.3089s 164 | iters: 200, epoch: 15 | loss: 2.1531942 165 | speed: 0.0220s/iter; left time: 891.4744s 166 | iters: 300, epoch: 15 | loss: 1.5938382 167 | speed: 0.0212s/iter; left time: 856.7394s 168 | iters: 400, epoch: 15 | loss: 1.8647141 169 | speed: 0.0192s/iter; left time: 776.9761s 170 | Epoch: 15 cost time: 10.060459852218628 171 | Epoch: 15, Steps: 474 | Train Loss: 1.9807650 Vali Loss: 0.3713948 Test Loss: 0.3671694 172 | EarlyStopping counter: 10 out of 15 173 | iters: 100, epoch: 16 | loss: 1.9012452 174 | speed: 0.1999s/iter; left time: 8036.1274s 175 | iters: 200, epoch: 16 | loss: 2.2745106 176 | speed: 0.0189s/iter; left time: 757.3165s 177 | iters: 300, epoch: 16 | loss: 1.7456481 178 | speed: 0.0209s/iter; left time: 835.1309s 179 | iters: 400, epoch: 16 | loss: 2.0229459 180 | speed: 0.0203s/iter; left time: 811.3641s 181 | Epoch: 16 cost time: 9.928469181060791 182 | Epoch: 16, Steps: 474 | Train Loss: 1.9808469 Vali Loss: 0.3712927 Test Loss: 0.3671694 183 | EarlyStopping counter: 11 out of 15 184 | iters: 100, epoch: 17 | loss: 1.8980658 185 | speed: 0.1962s/iter; left time: 7793.8206s 186 | iters: 200, epoch: 17 | loss: 2.1342335 187 | speed: 0.0184s/iter; left time: 727.6984s 188 | iters: 300, epoch: 17 | loss: 1.8881040 189 | speed: 0.0176s/iter; left time: 693.9177s 190 | iters: 400, epoch: 17 | loss: 1.8609103 191 | speed: 0.0176s/iter; left time: 694.2827s 192 | Epoch: 17 cost time: 9.080386877059937 193 | Epoch: 17, Steps: 474 | Train Loss: 1.9810501 Vali Loss: 0.3714051 Test Loss: 0.3671696 194 | EarlyStopping counter: 12 out of 15 195 | iters: 100, epoch: 18 | loss: 1.5607108 196 | speed: 0.1953s/iter; left time: 7662.4297s 197 | iters: 200, epoch: 18 | loss: 1.7138529 198 | speed: 0.0188s/iter; left time: 735.8602s 199 | iters: 300, epoch: 18 | loss: 2.0889435 200 | speed: 0.0182s/iter; left time: 709.8496s 201 | iters: 400, epoch: 18 | loss: 1.6333877 202 | speed: 0.0188s/iter; left time: 732.8540s 203 | Epoch: 18 cost time: 9.538870096206665 204 | Epoch: 18, Steps: 474 | Train Loss: 1.9807732 Vali Loss: 0.3713785 Test Loss: 0.3671696 205 | EarlyStopping counter: 13 out of 15 206 | iters: 100, epoch: 19 | loss: 1.6951430 207 | speed: 0.1928s/iter; left time: 7474.6282s 208 | iters: 200, epoch: 19 | loss: 1.8788795 209 | speed: 0.0182s/iter; left time: 704.4678s 210 | iters: 300, epoch: 19 | loss: 2.2593076 211 | speed: 0.0179s/iter; left time: 690.6460s 212 | iters: 400, epoch: 19 | loss: 1.6327701 213 | speed: 0.0181s/iter; left time: 695.1616s 214 | Epoch: 19 cost time: 9.274600267410278 215 | Epoch: 19, Steps: 474 | Train Loss: 1.9805315 Vali Loss: 0.3713804 Test Loss: 0.3671696 216 | EarlyStopping counter: 14 out of 15 217 | iters: 100, epoch: 20 | loss: 1.9395542 218 | speed: 0.1950s/iter; left time: 7469.3759s 219 | iters: 200, epoch: 20 | loss: 1.7341092 220 | speed: 0.0197s/iter; left time: 750.9418s 221 | iters: 300, epoch: 20 | loss: 2.1863697 222 | speed: 0.0209s/iter; left time: 795.4608s 223 | iters: 400, epoch: 20 | loss: 1.7268776 224 | speed: 0.0208s/iter; left time: 789.8090s 225 | Epoch: 20 cost time: 9.875156164169312 226 | Epoch: 20, Steps: 474 | Train Loss: 1.9810031 Vali Loss: 0.3714037 Test Loss: 0.3671696 227 | EarlyStopping counter: 15 out of 15 228 | Early stopping 229 | >>>>>>>testing : ETTh2_720_336_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl336_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 230 | test 2545 231 | test shape: (2545, 1, 336, 7) (2545, 1, 336, 7) 232 | test shape: (2545, 336, 7) (2545, 336, 7) 233 | mse:0.36790889501571655, mae:0.4007733166217804 234 | rmse:0.6065549254417419, mape:0.5538084506988525, mspe:387.6688232421875 235 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/electricity_0.2_TimeBridge_96.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.2, dropout=0.0, attn_dropout=0.1, batch_size=16, is_training=1, model_id='electricity_720_96', model='TimeBridge', data='custom', root_path='/data1/liupeiyuan/dataset/datasets/electricity/', data_path='electricity.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=96, seasonal_patterns='Monthly', ia_layers=1, pd_layers=1, ca_layers=2, stable_len=4, num_p=4, period=24, enc_in=321, d_model=512, n_heads=32, d_ff=512, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=10, embedding_epochs=5, patience=3, pct_start=0.2, learning_rate=0.0005, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : electricity_720_96_TimeBridge_custom_bs16_ftM_sl720_ll48_pl96_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 17597 6 | val 2537 7 | test 5165 8 | iters: 100, epoch: 1 | loss: 0.7482189 9 | speed: 0.3733s/iter; left time: 4065.4624s 10 | iters: 200, epoch: 1 | loss: 0.7081607 11 | speed: 0.3741s/iter; left time: 4036.4881s 12 | iters: 300, epoch: 1 | loss: 0.6262149 13 | speed: 0.3761s/iter; left time: 4020.5003s 14 | iters: 400, epoch: 1 | loss: 0.6301945 15 | speed: 0.3757s/iter; left time: 3979.1507s 16 | iters: 500, epoch: 1 | loss: 0.6240971 17 | speed: 0.3802s/iter; left time: 3988.7861s 18 | iters: 600, epoch: 1 | loss: 0.6297003 19 | speed: 0.3751s/iter; left time: 3897.9317s 20 | iters: 700, epoch: 1 | loss: 0.6012440 21 | speed: 0.3733s/iter; left time: 3842.0785s 22 | iters: 800, epoch: 1 | loss: 0.6489863 23 | speed: 0.3738s/iter; left time: 3809.8775s 24 | iters: 900, epoch: 1 | loss: 0.5949422 25 | speed: 0.3756s/iter; left time: 3790.2679s 26 | iters: 1000, epoch: 1 | loss: 0.6081442 27 | speed: 0.3776s/iter; left time: 3772.1814s 28 | Epoch: 1 cost time: 412.9007031917572 29 | Epoch: 1, Steps: 1099 | Train Loss: 0.6728980 Vali Loss: 0.1122878 Test Loss: 0.1331840 30 | Validation loss decreased (inf --> 0.112288). Saving model ... 31 | iters: 100, epoch: 2 | loss: 0.5808800 32 | speed: 1.6280s/iter; left time: 15941.5361s 33 | iters: 200, epoch: 2 | loss: 0.5803152 34 | speed: 0.3751s/iter; left time: 3635.2025s 35 | iters: 300, epoch: 2 | loss: 0.6188543 36 | speed: 0.3742s/iter; left time: 3589.7435s 37 | iters: 400, epoch: 2 | loss: 0.5974149 38 | speed: 0.3731s/iter; left time: 3541.7927s 39 | iters: 500, epoch: 2 | loss: 0.5639532 40 | speed: 0.3730s/iter; left time: 3503.4950s 41 | iters: 600, epoch: 2 | loss: 0.6369680 42 | speed: 0.3731s/iter; left time: 3466.6407s 43 | iters: 700, epoch: 2 | loss: 0.5820273 44 | speed: 0.3736s/iter; left time: 3434.0176s 45 | iters: 800, epoch: 2 | loss: 0.6642651 46 | speed: 0.3728s/iter; left time: 3389.9109s 47 | iters: 900, epoch: 2 | loss: 0.5385432 48 | speed: 0.3750s/iter; left time: 3372.1766s 49 | iters: 1000, epoch: 2 | loss: 0.5840624 50 | speed: 0.3736s/iter; left time: 3321.8421s 51 | Epoch: 2 cost time: 411.58321237564087 52 | Epoch: 2, Steps: 1099 | Train Loss: 0.6055252 Vali Loss: 0.1093992 Test Loss: 0.1288661 53 | Validation loss decreased (0.112288 --> 0.109399). Saving model ... 54 | iters: 100, epoch: 3 | loss: 0.5500411 55 | speed: 1.6210s/iter; left time: 14091.2144s 56 | iters: 200, epoch: 3 | loss: 0.5493537 57 | speed: 0.3771s/iter; left time: 3240.7463s 58 | iters: 300, epoch: 3 | loss: 0.5483405 59 | speed: 0.3745s/iter; left time: 3180.5069s 60 | iters: 400, epoch: 3 | loss: 0.5880037 61 | speed: 0.3724s/iter; left time: 3125.9351s 62 | iters: 500, epoch: 3 | loss: 0.5940778 63 | speed: 0.3741s/iter; left time: 3102.7455s 64 | iters: 600, epoch: 3 | loss: 0.5960724 65 | speed: 0.3737s/iter; left time: 3061.7921s 66 | iters: 700, epoch: 3 | loss: 0.5464528 67 | speed: 0.3740s/iter; left time: 3026.9475s 68 | iters: 800, epoch: 3 | loss: 0.5340257 69 | speed: 0.3730s/iter; left time: 2981.1721s 70 | iters: 900, epoch: 3 | loss: 0.6035374 71 | speed: 0.3708s/iter; left time: 2926.3660s 72 | iters: 1000, epoch: 3 | loss: 0.5646857 73 | speed: 0.3736s/iter; left time: 2911.2800s 74 | Epoch: 3 cost time: 411.17315125465393 75 | Epoch: 3, Steps: 1099 | Train Loss: 0.5814965 Vali Loss: 0.1069218 Test Loss: 0.1260788 76 | Validation loss decreased (0.109399 --> 0.106922). Saving model ... 77 | iters: 100, epoch: 4 | loss: 0.5558928 78 | speed: 1.6209s/iter; left time: 12309.0983s 79 | iters: 200, epoch: 4 | loss: 0.5339972 80 | speed: 0.3729s/iter; left time: 2794.7756s 81 | iters: 300, epoch: 4 | loss: 0.5521617 82 | speed: 0.3762s/iter; left time: 2781.3277s 83 | iters: 400, epoch: 4 | loss: 0.5739534 84 | speed: 0.3747s/iter; left time: 2732.8411s 85 | iters: 500, epoch: 4 | loss: 0.5611784 86 | speed: 0.3742s/iter; left time: 2692.2730s 87 | iters: 600, epoch: 4 | loss: 0.5715809 88 | speed: 0.3726s/iter; left time: 2643.1473s 89 | iters: 700, epoch: 4 | loss: 0.5326917 90 | speed: 0.3719s/iter; left time: 2600.7190s 91 | iters: 800, epoch: 4 | loss: 0.5908569 92 | speed: 0.3737s/iter; left time: 2576.1202s 93 | iters: 900, epoch: 4 | loss: 0.5779884 94 | speed: 0.3747s/iter; left time: 2545.7998s 95 | iters: 1000, epoch: 4 | loss: 0.5336750 96 | speed: 0.3723s/iter; left time: 2492.3143s 97 | Epoch: 4 cost time: 411.3499825000763 98 | Epoch: 4, Steps: 1099 | Train Loss: 0.5639456 Vali Loss: 0.1055752 Test Loss: 0.1241619 99 | Validation loss decreased (0.106922 --> 0.105575). Saving model ... 100 | iters: 100, epoch: 5 | loss: 0.5778992 101 | speed: 1.6316s/iter; left time: 10597.3349s 102 | iters: 200, epoch: 5 | loss: 0.5426871 103 | speed: 0.3740s/iter; left time: 2391.8287s 104 | iters: 300, epoch: 5 | loss: 0.5463588 105 | speed: 0.3718s/iter; left time: 2340.2962s 106 | iters: 400, epoch: 5 | loss: 0.6009440 107 | speed: 0.3718s/iter; left time: 2303.0657s 108 | iters: 500, epoch: 5 | loss: 0.5538013 109 | speed: 0.3727s/iter; left time: 2271.4478s 110 | iters: 600, epoch: 5 | loss: 0.5842211 111 | speed: 0.3728s/iter; left time: 2235.0259s 112 | iters: 700, epoch: 5 | loss: 0.5391515 113 | speed: 0.3731s/iter; left time: 2199.1426s 114 | iters: 800, epoch: 5 | loss: 0.5449912 115 | speed: 0.3735s/iter; left time: 2164.4919s 116 | iters: 900, epoch: 5 | loss: 0.5750597 117 | speed: 0.3728s/iter; left time: 2122.8181s 118 | iters: 1000, epoch: 5 | loss: 0.5126827 119 | speed: 0.3706s/iter; left time: 2073.3350s 120 | Epoch: 5 cost time: 410.69983768463135 121 | Epoch: 5, Steps: 1099 | Train Loss: 0.5535573 Vali Loss: 0.1047154 Test Loss: 0.1247882 122 | Validation loss decreased (0.105575 --> 0.104715). Saving model ... 123 | iters: 100, epoch: 6 | loss: 0.5221779 124 | speed: 1.6295s/iter; left time: 8792.7881s 125 | iters: 200, epoch: 6 | loss: 0.5475379 126 | speed: 0.3743s/iter; left time: 1982.1194s 127 | iters: 300, epoch: 6 | loss: 0.5479477 128 | speed: 0.3738s/iter; left time: 1942.2184s 129 | iters: 400, epoch: 6 | loss: 0.5939496 130 | speed: 0.3707s/iter; left time: 1889.0806s 131 | iters: 500, epoch: 6 | loss: 0.5557711 132 | speed: 0.3724s/iter; left time: 1860.5933s 133 | iters: 600, epoch: 6 | loss: 0.5362969 134 | speed: 0.3734s/iter; left time: 1828.2766s 135 | iters: 700, epoch: 6 | loss: 0.5333599 136 | speed: 0.3733s/iter; left time: 1790.4184s 137 | iters: 800, epoch: 6 | loss: 0.5491813 138 | speed: 0.3720s/iter; left time: 1746.8701s 139 | iters: 900, epoch: 6 | loss: 0.5399925 140 | speed: 0.3747s/iter; left time: 1722.1805s 141 | iters: 1000, epoch: 6 | loss: 0.5470123 142 | speed: 0.3699s/iter; left time: 1663.1985s 143 | Epoch: 6 cost time: 410.4087743759155 144 | Epoch: 6, Steps: 1099 | Train Loss: 0.5475802 Vali Loss: 0.1048732 Test Loss: 0.1240544 145 | EarlyStopping counter: 1 out of 3 146 | iters: 100, epoch: 7 | loss: 0.5187538 147 | speed: 1.6294s/iter; left time: 7001.5830s 148 | iters: 200, epoch: 7 | loss: 0.5475389 149 | speed: 0.3731s/iter; left time: 1565.9675s 150 | iters: 300, epoch: 7 | loss: 0.5543696 151 | speed: 0.3745s/iter; left time: 1534.4430s 152 | iters: 400, epoch: 7 | loss: 0.5703942 153 | speed: 0.3721s/iter; left time: 1487.2703s 154 | iters: 500, epoch: 7 | loss: 0.5187464 155 | speed: 0.3731s/iter; left time: 1454.0307s 156 | iters: 600, epoch: 7 | loss: 0.5568469 157 | speed: 0.3734s/iter; left time: 1417.8260s 158 | iters: 700, epoch: 7 | loss: 0.5184290 159 | speed: 0.3752s/iter; left time: 1387.1784s 160 | iters: 800, epoch: 7 | loss: 0.5513139 161 | speed: 0.3748s/iter; left time: 1348.0534s 162 | iters: 900, epoch: 7 | loss: 0.5248240 163 | speed: 0.3734s/iter; left time: 1305.7909s 164 | iters: 1000, epoch: 7 | loss: 0.5481232 165 | speed: 0.3721s/iter; left time: 1264.0357s 166 | Epoch: 7 cost time: 411.5338819026947 167 | Epoch: 7, Steps: 1099 | Train Loss: 0.5443514 Vali Loss: 0.1045513 Test Loss: 0.1240255 168 | Validation loss decreased (0.104715 --> 0.104551). Saving model ... 169 | iters: 100, epoch: 8 | loss: 0.5399263 170 | speed: 1.6229s/iter; left time: 5190.0824s 171 | iters: 200, epoch: 8 | loss: 0.5411422 172 | speed: 0.3743s/iter; left time: 1159.6393s 173 | iters: 300, epoch: 8 | loss: 0.5574081 174 | speed: 0.3764s/iter; left time: 1128.5647s 175 | iters: 400, epoch: 8 | loss: 0.5368634 176 | speed: 0.3760s/iter; left time: 1089.7885s 177 | iters: 500, epoch: 8 | loss: 0.5760238 178 | speed: 0.3733s/iter; left time: 1044.4686s 179 | iters: 600, epoch: 8 | loss: 0.5211551 180 | speed: 0.3724s/iter; left time: 1004.6144s 181 | iters: 700, epoch: 8 | loss: 0.5577036 182 | speed: 0.3727s/iter; left time: 968.3333s 183 | iters: 800, epoch: 8 | loss: 0.5436920 184 | speed: 0.3722s/iter; left time: 929.6916s 185 | iters: 900, epoch: 8 | loss: 0.5379979 186 | speed: 0.3713s/iter; left time: 890.3225s 187 | iters: 1000, epoch: 8 | loss: 0.5575796 188 | speed: 0.3708s/iter; left time: 852.0233s 189 | Epoch: 8 cost time: 411.191055059433 190 | Epoch: 8, Steps: 1099 | Train Loss: 0.5427127 Vali Loss: 0.1043428 Test Loss: 0.1238084 191 | Validation loss decreased (0.104551 --> 0.104343). Saving model ... 192 | iters: 100, epoch: 9 | loss: 0.5534491 193 | speed: 1.6386s/iter; left time: 3439.4928s 194 | iters: 200, epoch: 9 | loss: 0.5370404 195 | speed: 0.3714s/iter; left time: 742.3881s 196 | iters: 300, epoch: 9 | loss: 0.5378971 197 | speed: 0.3702s/iter; left time: 703.0380s 198 | iters: 400, epoch: 9 | loss: 0.5815398 199 | speed: 0.3707s/iter; left time: 666.9788s 200 | iters: 500, epoch: 9 | loss: 0.5350392 201 | speed: 0.3703s/iter; left time: 629.0980s 202 | iters: 600, epoch: 9 | loss: 0.5332074 203 | speed: 0.3703s/iter; left time: 592.1451s 204 | iters: 700, epoch: 9 | loss: 0.5641286 205 | speed: 0.3714s/iter; left time: 556.6626s 206 | iters: 800, epoch: 9 | loss: 0.5518103 207 | speed: 0.3728s/iter; left time: 521.5766s 208 | iters: 900, epoch: 9 | loss: 0.5498026 209 | speed: 0.3718s/iter; left time: 482.9233s 210 | iters: 1000, epoch: 9 | loss: 0.5617808 211 | speed: 0.3720s/iter; left time: 446.0370s 212 | Epoch: 9 cost time: 409.0788457393646 213 | Epoch: 9, Steps: 1099 | Train Loss: 0.5419004 Vali Loss: 0.1044265 Test Loss: 0.1239479 214 | EarlyStopping counter: 1 out of 3 215 | iters: 100, epoch: 10 | loss: 0.5607805 216 | speed: 1.6303s/iter; left time: 1630.3249s 217 | iters: 200, epoch: 10 | loss: 0.5477070 218 | speed: 0.3709s/iter; left time: 333.7777s 219 | iters: 300, epoch: 10 | loss: 0.5591058 220 | speed: 0.3737s/iter; left time: 298.9757s 221 | iters: 400, epoch: 10 | loss: 0.5327781 222 | speed: 0.3716s/iter; left time: 260.1437s 223 | iters: 500, epoch: 10 | loss: 0.5232843 224 | speed: 0.3726s/iter; left time: 223.5785s 225 | iters: 600, epoch: 10 | loss: 0.5163432 226 | speed: 0.3729s/iter; left time: 186.4271s 227 | iters: 700, epoch: 10 | loss: 0.5167787 228 | speed: 0.3730s/iter; left time: 149.2057s 229 | iters: 800, epoch: 10 | loss: 0.5651639 230 | speed: 0.3757s/iter; left time: 112.6976s 231 | iters: 900, epoch: 10 | loss: 0.5199017 232 | speed: 0.3727s/iter; left time: 74.5498s 233 | iters: 1000, epoch: 10 | loss: 0.5522544 234 | speed: 0.3748s/iter; left time: 37.4780s 235 | Epoch: 10 cost time: 410.96915221214294 236 | Epoch: 10, Steps: 1099 | Train Loss: 0.5415000 Vali Loss: 0.1043835 Test Loss: 0.1238480 237 | EarlyStopping counter: 2 out of 3 238 | >>>>>>>testing : electricity_720_96_TimeBridge_custom_bs16_ftM_sl720_ll48_pl96_dm512_nh32_ial1_pdl1_cal2_df512_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 239 | test 5165 240 | test shape: (5165, 1, 96, 321) (5165, 1, 96, 321) 241 | test shape: (5165, 96, 321) (5165, 96, 321) 242 | mse:0.1238083764910698, mae:0.2160072773694992 243 | rmse:0.35186415910720825, mape:0.41729336977005005, mspe:475630.75 244 | -------------------------------------------------------------------------------- /experiments/exp_long_term_forecasting.py: -------------------------------------------------------------------------------- 1 | from torch.optim import lr_scheduler 2 | 3 | from data_provider.data_factory import data_provider 4 | from experiments.exp_basic import Exp_Basic 5 | from utils.tools import EarlyStopping, adjust_learning_rate, visual 6 | from utils.metrics import metric 7 | import torch 8 | import torch.nn as nn 9 | from torch import optim 10 | import os 11 | import time 12 | import warnings 13 | import numpy as np 14 | 15 | warnings.filterwarnings('ignore') 16 | 17 | 18 | class Exp_Long_Term_Forecast(Exp_Basic): 19 | def __init__(self, args): 20 | super(Exp_Long_Term_Forecast, self).__init__(args) 21 | 22 | def _build_model(self): 23 | model = self.model_dict[self.args.model].Model(self.args).float() 24 | 25 | if self.args.use_multi_gpu and self.args.use_gpu: 26 | model = nn.DataParallel(model, device_ids=self.args.device_ids) 27 | return model 28 | 29 | def _get_data(self, flag): 30 | data_set, data_loader = data_provider(self.args, flag) 31 | return data_set, data_loader 32 | 33 | def _select_optimizer(self): 34 | model_optim = optim.Adam(self.model.parameters(), lr=self.args.learning_rate) 35 | return model_optim 36 | 37 | def _select_criterion(self): 38 | if self.args.data == 'PEMS': 39 | criterion = nn.L1Loss() 40 | else: 41 | criterion = nn.MSELoss() 42 | return criterion 43 | 44 | def vali(self, vali_data, vali_loader, criterion): 45 | total_loss = [] 46 | self.model.eval() 47 | with torch.no_grad(): 48 | for i, (batch_x, batch_y, batch_x_mark, batch_y_mark) in enumerate(vali_loader): 49 | batch_x = batch_x.float().to(self.device) 50 | batch_y = batch_y.float() 51 | 52 | if 'PEMS' in self.args.data or 'Solar' in self.args.data: 53 | batch_x_mark = None 54 | batch_y_mark = None 55 | else: 56 | batch_x_mark = batch_x_mark.float().to(self.device) 57 | batch_y_mark = batch_y_mark.float().to(self.device) 58 | 59 | # channel_decoder input 60 | dec_inp = torch.zeros_like(batch_y[:, -self.args.pred_len:, :]).float() 61 | dec_inp = torch.cat([batch_y[:, :self.args.label_len, :], dec_inp], dim=1).float().to(self.device) 62 | # fc1 - channel_decoder 63 | if self.args.output_attention: 64 | outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark)[0] 65 | else: 66 | outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark) 67 | 68 | f_dim = -1 if self.args.features == 'MS' else 0 69 | outputs = outputs[:, -self.args.pred_len:, f_dim:] 70 | batch_y = batch_y[:, -self.args.pred_len:, f_dim:].to(self.device) 71 | 72 | pred = outputs.detach().cpu() 73 | true = batch_y.detach().cpu() 74 | 75 | if self.args.data == 'PEMS': 76 | B, T, C = pred.shape 77 | pred = pred.cpu().numpy() 78 | true = true.cpu().numpy() 79 | pred = vali_data.inverse_transform(pred.reshape(-1, C)).reshape(B, T, C) 80 | true = vali_data.inverse_transform(true.reshape(-1, C)).reshape(B, T, C) 81 | mae, mse, rmse, mape, mspe = metric(pred, true) 82 | loss = mae 83 | else: 84 | loss = criterion(pred, true) 85 | total_loss.append(loss) 86 | 87 | total_loss = np.average(total_loss) 88 | self.model.train() 89 | return total_loss 90 | 91 | def train(self, setting): 92 | train_data, train_loader = self._get_data(flag='train') 93 | vali_data, vali_loader = self._get_data(flag='val') 94 | test_data, test_loader = self._get_data(flag='test') 95 | 96 | path = os.path.join(self.args.checkpoints, setting) 97 | if not os.path.exists(path): 98 | os.makedirs(path) 99 | 100 | time_now = time.time() 101 | 102 | train_steps = len(train_loader) 103 | early_stopping = EarlyStopping(patience=self.args.patience, verbose=True) 104 | 105 | model_optim = self._select_optimizer() 106 | criterion = self._select_criterion() 107 | 108 | if self.args.lradj == 'TST': 109 | scheduler = lr_scheduler.OneCycleLR(optimizer=model_optim, 110 | steps_per_epoch=train_steps, 111 | pct_start=self.args.pct_start, 112 | epochs=self.args.train_epochs, 113 | max_lr=self.args.learning_rate) 114 | 115 | for epoch in range(self.args.train_epochs): 116 | iter_count = 0 117 | train_loss = [] 118 | 119 | self.model.train() 120 | epoch_time = time.time() 121 | for i, (batch_x, batch_y, batch_x_mark, batch_y_mark) in enumerate(train_loader): 122 | iter_count += 1 123 | model_optim.zero_grad() 124 | batch_x = batch_x.float().to(self.device) 125 | 126 | batch_y = batch_y.float().to(self.device) 127 | if 'PEMS' in self.args.data or 'Solar' in self.args.data: 128 | batch_x_mark = None 129 | batch_y_mark = None 130 | else: 131 | batch_x_mark = batch_x_mark.float().to(self.device) 132 | batch_y_mark = batch_y_mark.float().to(self.device) 133 | 134 | # channel_decoder input 135 | dec_inp = torch.zeros_like(batch_y[:, -self.args.pred_len:, :]).float() 136 | dec_inp = torch.cat([batch_y[:, :self.args.label_len, :], dec_inp], dim=1).float().to(self.device) 137 | 138 | if self.args.output_attention: 139 | outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark)[0] 140 | else: 141 | outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark) 142 | 143 | f_dim = -1 if self.args.features == 'MS' else 0 144 | outputs = outputs[:, -self.args.pred_len:, f_dim:] 145 | batch_y = batch_y[:, -self.args.pred_len:, f_dim:].to(self.device) 146 | loss = self.time_freq_mae(batch_y, outputs) 147 | 148 | train_loss.append(loss.item()) 149 | 150 | if (i + 1) % 100 == 0: 151 | print("\titers: {0}, epoch: {1} | loss: {2:.7f}".format(i + 1, epoch + 1, loss.item())) 152 | speed = (time.time() - time_now) / iter_count 153 | left_time = speed * ((self.args.train_epochs - epoch) * train_steps - i) 154 | print('\tspeed: {:.4f}s/iter; left time: {:.4f}s'.format(speed, left_time)) 155 | iter_count = 0 156 | time_now = time.time() 157 | 158 | loss.backward() 159 | model_optim.step() 160 | 161 | if self.args.lradj == 'TST': 162 | adjust_learning_rate(model_optim, scheduler, epoch + 1, self.args, printout=False) 163 | scheduler.step() 164 | 165 | print("Epoch: {} cost time: {}".format(epoch + 1, time.time() - epoch_time)) 166 | train_loss = np.average(train_loss) 167 | vali_loss = self.vali(vali_data, vali_loader, criterion) 168 | # test_loss = 0 169 | test_loss = self.vali(test_data, test_loader, criterion) 170 | 171 | print("Epoch: {0}, Steps: {1} | Train Loss: {2:.7f} Vali Loss: {3:.7f} Test Loss: {4:.7f}".format( 172 | epoch + 1, train_steps, train_loss, vali_loss, test_loss)) 173 | early_stopping(vali_loss, self.model, path) 174 | if early_stopping.early_stop: 175 | print("Early stopping") 176 | break 177 | 178 | if self.args.lradj != 'TST': 179 | adjust_learning_rate(model_optim, None, epoch + 1, self.args) 180 | else: 181 | print('Updating learning rate to {}'.format(scheduler.get_last_lr()[0])) 182 | 183 | # get_cka(self.args, setting, self.model, train_loader, self.device, epoch) 184 | 185 | best_model_path = path + '/' + 'checkpoint.pth' 186 | self.model.load_state_dict(torch.load(best_model_path)) 187 | 188 | return self.model 189 | 190 | def time_freq_mae(self, batch_y, outputs): 191 | # time mae loss 192 | t_loss = (outputs - batch_y).abs().mean() 193 | 194 | # freq mae loss 195 | f_loss = torch.fft.rfft(outputs, dim=1) - torch.fft.rfft(batch_y, dim=1) 196 | f_loss = f_loss.abs().mean() 197 | 198 | return (1 - self.args.alpha) * t_loss + self.args.alpha * f_loss 199 | 200 | def test(self, setting, test=0): 201 | test_data, test_loader = self._get_data(flag='test') 202 | if test: 203 | print('loading model') 204 | self.model.load_state_dict(torch.load(os.path.join('./checkpoints/' + setting, 'checkpoint.pth'))) 205 | 206 | preds = [] 207 | trues = [] 208 | folder_path = './test_results/' + setting + '/' 209 | if not os.path.exists(folder_path): 210 | os.makedirs(folder_path) 211 | 212 | self.model.eval() 213 | with torch.no_grad(): 214 | for i, (batch_x, batch_y, batch_x_mark, batch_y_mark) in enumerate(test_loader): 215 | batch_x = batch_x.float().to(self.device) 216 | batch_y = batch_y.float().to(self.device) 217 | 218 | if 'PEMS' in self.args.data or 'Solar' in self.args.data: 219 | batch_x_mark = None 220 | batch_y_mark = None 221 | else: 222 | batch_x_mark = batch_x_mark.float().to(self.device) 223 | batch_y_mark = batch_y_mark.float().to(self.device) 224 | 225 | # channel_decoder input 226 | dec_inp = torch.zeros_like(batch_y[:, -self.args.pred_len:, :]).float() 227 | dec_inp = torch.cat([batch_y[:, :self.args.label_len, :], dec_inp], dim=1).float().to(self.device) 228 | 229 | # fc1 - channel_decoder 230 | if self.args.output_attention: 231 | outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark)[0] 232 | 233 | else: 234 | outputs = self.model(batch_x, batch_x_mark, dec_inp, batch_y_mark) 235 | 236 | f_dim = -1 if self.args.features == 'MS' else 0 237 | outputs = outputs[:, -self.args.pred_len:, f_dim:] 238 | batch_y = batch_y[:, -self.args.pred_len:, f_dim:].to(self.device) 239 | outputs = outputs.detach().cpu().numpy() 240 | batch_y = batch_y.detach().cpu().numpy() 241 | if test_data.scale and self.args.inverse: 242 | outputs = test_data.inverse_transform(outputs) 243 | batch_y = test_data.inverse_transform(batch_y) 244 | 245 | pred = outputs 246 | true = batch_y 247 | 248 | preds.append(pred) 249 | trues.append(true) 250 | 251 | preds = np.array(preds) 252 | trues = np.array(trues) 253 | print('test shape:', preds.shape, trues.shape) 254 | preds = preds.reshape(-1, preds.shape[-2], preds.shape[-1]) 255 | trues = trues.reshape(-1, trues.shape[-2], trues.shape[-1]) 256 | print('test shape:', preds.shape, trues.shape) 257 | 258 | if self.args.data == 'PEMS': 259 | B, T, C = preds.shape 260 | preds = test_data.inverse_transform(preds.reshape(-1, C)).reshape(B, T, C) 261 | trues = test_data.inverse_transform(trues.reshape(-1, C)).reshape(B, T, C) 262 | 263 | # result save 264 | folder_path = './results/' + setting + '/' 265 | if not os.path.exists(folder_path): 266 | os.makedirs(folder_path) 267 | 268 | mae, mse, rmse, mape, mspe = metric(preds, trues) 269 | print('mse:{}, mae:{}'.format(mse, mae)) 270 | print('rmse:{}, mape:{}, mspe:{}'.format(rmse, mape, mspe)) 271 | f = open("result_long_term_forecast.txt", 'a') 272 | f.write(setting + " \n") 273 | f.write('mse:{}, mse:{}, rmse:{}, mape:{}, mspe:{}'.format(mse, mae, rmse, mape, mspe)) 274 | f.write('\n') 275 | f.write('\n') 276 | f.close() 277 | 278 | return 279 | -------------------------------------------------------------------------------- /_logs/LongForecasting/TimeBridge/ETTh2_0.35_TimeBridge_96.logs: -------------------------------------------------------------------------------- 1 | Args in experiment: 2 | Namespace(revin=True, alpha=0.35, dropout=0.0, attn_dropout=0.15, batch_size=16, is_training=1, model_id='ETTh2_720_96', model='TimeBridge', data='ETTh2', root_path='/data1/liupeiyuan/dataset/datasets/ETT-small/', data_path='ETTh2.csv', features='M', target='OT', freq='h', checkpoints='./checkpoints/', seq_len=720, label_len=48, pred_len=96, seasonal_patterns='Monthly', ia_layers=3, pd_layers=1, ca_layers=0, stable_len=6, num_p=None, period=48, enc_in=7, d_model=128, n_heads=4, d_ff=128, embed='timeF', activation='gelu', output_attention=False, num_workers=10, itr=1, train_epochs=100, embedding_epochs=5, patience=15, pct_start=0.2, learning_rate=0.0001, embedding_lr=0.0005, des='Exp', loss='MSE', lradj='type1', use_gpu=True, gpu=0, use_multi_gpu=False, devices='0,1,2,3', inverse=False) 3 | Use GPU: cuda:0 4 | >>>>>>>start training : ETTh2_720_96_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl96_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0>>>>>>>>>>>>>>>>>>>>>>>>>> 5 | train 7825 6 | val 2785 7 | test 2785 8 | iters: 100, epoch: 1 | loss: 1.1130301 9 | speed: 0.0314s/iter; left time: 1531.6194s 10 | iters: 200, epoch: 1 | loss: 1.4412150 11 | speed: 0.0193s/iter; left time: 941.1256s 12 | iters: 300, epoch: 1 | loss: 1.1004239 13 | speed: 0.0215s/iter; left time: 1045.6602s 14 | iters: 400, epoch: 1 | loss: 1.0549078 15 | speed: 0.0196s/iter; left time: 952.5053s 16 | Epoch: 1 cost time: 10.824738264083862 17 | Epoch: 1, Steps: 489 | Train Loss: 1.2571019 Vali Loss: 0.2386661 Test Loss: 0.2906798 18 | Validation loss decreased (inf --> 0.238666). Saving model ... 19 | iters: 100, epoch: 2 | loss: 1.0097927 20 | speed: 0.1990s/iter; left time: 9614.0944s 21 | iters: 200, epoch: 2 | loss: 1.4238691 22 | speed: 0.0200s/iter; left time: 962.9257s 23 | iters: 300, epoch: 2 | loss: 1.0585345 24 | speed: 0.0196s/iter; left time: 944.1782s 25 | iters: 400, epoch: 2 | loss: 1.3193920 26 | speed: 0.0209s/iter; left time: 1002.8834s 27 | Epoch: 2 cost time: 10.133687973022461 28 | Epoch: 2, Steps: 489 | Train Loss: 1.1420365 Vali Loss: 0.2181755 Test Loss: 0.2733025 29 | Validation loss decreased (0.238666 --> 0.218176). Saving model ... 30 | iters: 100, epoch: 3 | loss: 1.2164117 31 | speed: 0.2012s/iter; left time: 9623.3308s 32 | iters: 200, epoch: 3 | loss: 0.9081314 33 | speed: 0.0216s/iter; left time: 1030.7879s 34 | iters: 300, epoch: 3 | loss: 1.0128345 35 | speed: 0.0205s/iter; left time: 978.4385s 36 | iters: 400, epoch: 3 | loss: 1.0854752 37 | speed: 0.0194s/iter; left time: 921.3753s 38 | Epoch: 3 cost time: 10.129354476928711 39 | Epoch: 3, Steps: 489 | Train Loss: 1.1094594 Vali Loss: 0.2162944 Test Loss: 0.2703330 40 | Validation loss decreased (0.218176 --> 0.216294). Saving model ... 41 | iters: 100, epoch: 4 | loss: 1.3358390 42 | speed: 0.2137s/iter; left time: 10113.4777s 43 | iters: 200, epoch: 4 | loss: 1.0220444 44 | speed: 0.0181s/iter; left time: 856.2000s 45 | iters: 300, epoch: 4 | loss: 1.0668750 46 | speed: 0.0182s/iter; left time: 857.8280s 47 | iters: 400, epoch: 4 | loss: 1.2259622 48 | speed: 0.0174s/iter; left time: 817.6181s 49 | Epoch: 4 cost time: 9.673394918441772 50 | Epoch: 4, Steps: 489 | Train Loss: 1.0967373 Vali Loss: 0.2140521 Test Loss: 0.2684598 51 | Validation loss decreased (0.216294 --> 0.214052). Saving model ... 52 | iters: 100, epoch: 5 | loss: 0.9384047 53 | speed: 0.2089s/iter; left time: 9783.7053s 54 | iters: 200, epoch: 5 | loss: 0.9780928 55 | speed: 0.0197s/iter; left time: 919.1949s 56 | iters: 300, epoch: 5 | loss: 1.1916529 57 | speed: 0.0174s/iter; left time: 811.2592s 58 | iters: 400, epoch: 5 | loss: 1.0095540 59 | speed: 0.0200s/iter; left time: 933.2194s 60 | Epoch: 5 cost time: 10.231797218322754 61 | Epoch: 5, Steps: 489 | Train Loss: 1.0912032 Vali Loss: 0.2138243 Test Loss: 0.2681445 62 | Validation loss decreased (0.214052 --> 0.213824). Saving model ... 63 | iters: 100, epoch: 6 | loss: 1.0096906 64 | speed: 0.2100s/iter; left time: 9736.3595s 65 | iters: 200, epoch: 6 | loss: 1.5368429 66 | speed: 0.0212s/iter; left time: 982.6979s 67 | iters: 300, epoch: 6 | loss: 1.0469003 68 | speed: 0.0213s/iter; left time: 980.8953s 69 | iters: 400, epoch: 6 | loss: 1.0440333 70 | speed: 0.0225s/iter; left time: 1037.1270s 71 | Epoch: 6 cost time: 10.91092848777771 72 | Epoch: 6, Steps: 489 | Train Loss: 1.0884093 Vali Loss: 0.2116524 Test Loss: 0.2675622 73 | Validation loss decreased (0.213824 --> 0.211652). Saving model ... 74 | iters: 100, epoch: 7 | loss: 1.0127639 75 | speed: 0.2097s/iter; left time: 9617.6588s 76 | iters: 200, epoch: 7 | loss: 1.3122361 77 | speed: 0.0223s/iter; left time: 1019.7317s 78 | iters: 300, epoch: 7 | loss: 0.9091011 79 | speed: 0.0215s/iter; left time: 981.5517s 80 | iters: 400, epoch: 7 | loss: 1.1084360 81 | speed: 0.0190s/iter; left time: 867.4207s 82 | Epoch: 7 cost time: 10.668052196502686 83 | Epoch: 7, Steps: 489 | Train Loss: 1.0868701 Vali Loss: 0.2124881 Test Loss: 0.2672713 84 | EarlyStopping counter: 1 out of 15 85 | iters: 100, epoch: 8 | loss: 1.1234723 86 | speed: 0.2046s/iter; left time: 9284.9686s 87 | iters: 200, epoch: 8 | loss: 1.2608228 88 | speed: 0.0190s/iter; left time: 862.2030s 89 | iters: 300, epoch: 8 | loss: 1.0801073 90 | speed: 0.0204s/iter; left time: 922.8589s 91 | iters: 400, epoch: 8 | loss: 0.9981530 92 | speed: 0.0207s/iter; left time: 931.1043s 93 | Epoch: 8 cost time: 10.231593608856201 94 | Epoch: 8, Steps: 489 | Train Loss: 1.0864880 Vali Loss: 0.2119690 Test Loss: 0.2675624 95 | EarlyStopping counter: 2 out of 15 96 | iters: 100, epoch: 9 | loss: 0.9959903 97 | speed: 0.2169s/iter; left time: 9735.9288s 98 | iters: 200, epoch: 9 | loss: 1.0518769 99 | speed: 0.0190s/iter; left time: 851.3595s 100 | iters: 300, epoch: 9 | loss: 1.0405982 101 | speed: 0.0180s/iter; left time: 803.0948s 102 | iters: 400, epoch: 9 | loss: 1.2137454 103 | speed: 0.0198s/iter; left time: 881.9720s 104 | Epoch: 9 cost time: 10.221571683883667 105 | Epoch: 9, Steps: 489 | Train Loss: 1.0858330 Vali Loss: 0.2124533 Test Loss: 0.2675863 106 | EarlyStopping counter: 3 out of 15 107 | iters: 100, epoch: 10 | loss: 0.8616369 108 | speed: 0.2214s/iter; left time: 9831.9446s 109 | iters: 200, epoch: 10 | loss: 0.9048994 110 | speed: 0.0205s/iter; left time: 908.7421s 111 | iters: 300, epoch: 10 | loss: 1.0819223 112 | speed: 0.0194s/iter; left time: 855.7247s 113 | iters: 400, epoch: 10 | loss: 1.0557984 114 | speed: 0.0221s/iter; left time: 975.3344s 115 | Epoch: 10 cost time: 10.590017557144165 116 | Epoch: 10, Steps: 489 | Train Loss: 1.0856947 Vali Loss: 0.2126556 Test Loss: 0.2675454 117 | EarlyStopping counter: 4 out of 15 118 | iters: 100, epoch: 11 | loss: 1.2938403 119 | speed: 0.2072s/iter; left time: 9098.3060s 120 | iters: 200, epoch: 11 | loss: 1.2889109 121 | speed: 0.0218s/iter; left time: 954.5327s 122 | iters: 300, epoch: 11 | loss: 0.8621594 123 | speed: 0.0226s/iter; left time: 987.0310s 124 | iters: 400, epoch: 11 | loss: 1.5878181 125 | speed: 0.0220s/iter; left time: 960.0786s 126 | Epoch: 11 cost time: 11.158379077911377 127 | Epoch: 11, Steps: 489 | Train Loss: 1.0857613 Vali Loss: 0.2124399 Test Loss: 0.2675709 128 | EarlyStopping counter: 5 out of 15 129 | iters: 100, epoch: 12 | loss: 0.9563729 130 | speed: 0.2154s/iter; left time: 9352.5893s 131 | iters: 200, epoch: 12 | loss: 0.9781829 132 | speed: 0.0183s/iter; left time: 793.6524s 133 | iters: 300, epoch: 12 | loss: 1.1510708 134 | speed: 0.0188s/iter; left time: 812.5816s 135 | iters: 400, epoch: 12 | loss: 1.2897160 136 | speed: 0.0214s/iter; left time: 920.9198s 137 | Epoch: 12 cost time: 10.067667245864868 138 | Epoch: 12, Steps: 489 | Train Loss: 1.0854763 Vali Loss: 0.2124651 Test Loss: 0.2675539 139 | EarlyStopping counter: 6 out of 15 140 | iters: 100, epoch: 13 | loss: 0.9559304 141 | speed: 0.2045s/iter; left time: 8779.5223s 142 | iters: 200, epoch: 13 | loss: 1.1045755 143 | speed: 0.0171s/iter; left time: 734.1477s 144 | iters: 300, epoch: 13 | loss: 0.9963694 145 | speed: 0.0169s/iter; left time: 723.9679s 146 | iters: 400, epoch: 13 | loss: 1.0049671 147 | speed: 0.0187s/iter; left time: 795.4594s 148 | Epoch: 13 cost time: 9.14898943901062 149 | Epoch: 13, Steps: 489 | Train Loss: 1.0854715 Vali Loss: 0.2125093 Test Loss: 0.2675530 150 | EarlyStopping counter: 7 out of 15 151 | iters: 100, epoch: 14 | loss: 0.9985921 152 | speed: 0.2050s/iter; left time: 8700.3098s 153 | iters: 200, epoch: 14 | loss: 1.2188470 154 | speed: 0.0207s/iter; left time: 874.9526s 155 | iters: 300, epoch: 14 | loss: 1.1288739 156 | speed: 0.0205s/iter; left time: 868.0010s 157 | iters: 400, epoch: 14 | loss: 1.0829604 158 | speed: 0.0193s/iter; left time: 812.6927s 159 | Epoch: 14 cost time: 10.525816440582275 160 | Epoch: 14, Steps: 489 | Train Loss: 1.0856374 Vali Loss: 0.2124308 Test Loss: 0.2675517 161 | EarlyStopping counter: 8 out of 15 162 | iters: 100, epoch: 15 | loss: 1.1495571 163 | speed: 0.2225s/iter; left time: 9334.0665s 164 | iters: 200, epoch: 15 | loss: 1.0717746 165 | speed: 0.0217s/iter; left time: 909.7166s 166 | iters: 300, epoch: 15 | loss: 1.2859591 167 | speed: 0.0226s/iter; left time: 943.6584s 168 | iters: 400, epoch: 15 | loss: 1.0021524 169 | speed: 0.0219s/iter; left time: 912.9255s 170 | Epoch: 15 cost time: 11.097302198410034 171 | Epoch: 15, Steps: 489 | Train Loss: 1.0852697 Vali Loss: 0.2125152 Test Loss: 0.2675496 172 | EarlyStopping counter: 9 out of 15 173 | iters: 100, epoch: 16 | loss: 0.9766647 174 | speed: 0.2064s/iter; left time: 8560.1235s 175 | iters: 200, epoch: 16 | loss: 1.2804017 176 | speed: 0.0231s/iter; left time: 956.3917s 177 | iters: 300, epoch: 16 | loss: 0.9813588 178 | speed: 0.0216s/iter; left time: 891.6493s 179 | iters: 400, epoch: 16 | loss: 1.3261756 180 | speed: 0.0217s/iter; left time: 892.6489s 181 | Epoch: 16 cost time: 10.904613494873047 182 | Epoch: 16, Steps: 489 | Train Loss: 1.0855490 Vali Loss: 0.2123469 Test Loss: 0.2675494 183 | EarlyStopping counter: 10 out of 15 184 | iters: 100, epoch: 17 | loss: 1.1691746 185 | speed: 0.2047s/iter; left time: 8386.7644s 186 | iters: 200, epoch: 17 | loss: 1.1967493 187 | speed: 0.0200s/iter; left time: 817.0866s 188 | iters: 300, epoch: 17 | loss: 1.0677667 189 | speed: 0.0201s/iter; left time: 818.0830s 190 | iters: 400, epoch: 17 | loss: 1.2999599 191 | speed: 0.0202s/iter; left time: 822.6455s 192 | Epoch: 17 cost time: 10.270443439483643 193 | Epoch: 17, Steps: 489 | Train Loss: 1.0852954 Vali Loss: 0.2125430 Test Loss: 0.2675496 194 | EarlyStopping counter: 11 out of 15 195 | iters: 100, epoch: 18 | loss: 0.9941216 196 | speed: 0.2091s/iter; left time: 8467.7581s 197 | iters: 200, epoch: 18 | loss: 1.2278107 198 | speed: 0.0203s/iter; left time: 820.0047s 199 | iters: 300, epoch: 18 | loss: 1.0418811 200 | speed: 0.0203s/iter; left time: 816.7103s 201 | iters: 400, epoch: 18 | loss: 1.0052992 202 | speed: 0.0211s/iter; left time: 847.2039s 203 | Epoch: 18 cost time: 10.53197979927063 204 | Epoch: 18, Steps: 489 | Train Loss: 1.0854569 Vali Loss: 0.2125082 Test Loss: 0.2675495 205 | EarlyStopping counter: 12 out of 15 206 | iters: 100, epoch: 19 | loss: 1.0292206 207 | speed: 0.2025s/iter; left time: 8099.1042s 208 | iters: 200, epoch: 19 | loss: 1.3430529 209 | speed: 0.0200s/iter; left time: 798.0015s 210 | iters: 300, epoch: 19 | loss: 1.1487910 211 | speed: 0.0198s/iter; left time: 786.4103s 212 | iters: 400, epoch: 19 | loss: 0.9962803 213 | speed: 0.0198s/iter; left time: 784.6121s 214 | Epoch: 19 cost time: 10.104159593582153 215 | Epoch: 19, Steps: 489 | Train Loss: 1.0856647 Vali Loss: 0.2125033 Test Loss: 0.2675495 216 | EarlyStopping counter: 13 out of 15 217 | iters: 100, epoch: 20 | loss: 1.3124194 218 | speed: 0.2047s/iter; left time: 8086.1834s 219 | iters: 200, epoch: 20 | loss: 0.9292442 220 | speed: 0.0215s/iter; left time: 847.9603s 221 | iters: 300, epoch: 20 | loss: 1.0068636 222 | speed: 0.0217s/iter; left time: 854.4689s 223 | iters: 400, epoch: 20 | loss: 1.1766360 224 | speed: 0.0198s/iter; left time: 777.9083s 225 | Epoch: 20 cost time: 10.42854928970337 226 | Epoch: 20, Steps: 489 | Train Loss: 1.0856488 Vali Loss: 0.2125218 Test Loss: 0.2675495 227 | EarlyStopping counter: 14 out of 15 228 | iters: 100, epoch: 21 | loss: 1.0456301 229 | speed: 0.2203s/iter; left time: 8597.1017s 230 | iters: 200, epoch: 21 | loss: 0.9589436 231 | speed: 0.0230s/iter; left time: 893.2768s 232 | iters: 300, epoch: 21 | loss: 0.8971894 233 | speed: 0.0206s/iter; left time: 798.5903s 234 | iters: 400, epoch: 21 | loss: 1.1696072 235 | speed: 0.0231s/iter; left time: 894.3884s 236 | Epoch: 21 cost time: 10.697756290435791 237 | Epoch: 21, Steps: 489 | Train Loss: 1.0853708 Vali Loss: 0.2124989 Test Loss: 0.2675495 238 | EarlyStopping counter: 15 out of 15 239 | Early stopping 240 | >>>>>>>testing : ETTh2_720_96_TimeBridge_ETTh2_bs16_ftM_sl720_ll48_pl96_dm128_nh4_ial3_pdl1_cal0_df128_ebtimeF_Exp_0<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< 241 | test 2785 242 | test shape: (2785, 1, 96, 7) (2785, 1, 96, 7) 243 | test shape: (2785, 96, 7) (2785, 96, 7) 244 | mse:0.26756221055984497, mae:0.32869112491607666 245 | rmse:0.517264187335968, mape:0.4871687889099121, mspe:271.95086669921875 246 | --------------------------------------------------------------------------------