├── seq2seq_c
├── ops
│ ├── __init__.py
│ └── gen_beam_search_ops.py
├── python
│ ├── __init__.py
│ ├── __init__.pyc
│ └── ops
│ │ ├── loss.pyc
│ │ ├── helper.pyc
│ │ ├── __init__.pyc
│ │ ├── decoder.pyc
│ │ ├── basic_decoder.pyc
│ │ ├── _beam_search_ops.so
│ │ ├── beam_search_ops.pyc
│ │ ├── attention_wrapper.pyc
│ │ ├── beam_search_decoder.pyc
│ │ ├── __init__.py
│ │ ├── beam_search_ops.py
│ │ ├── loss.py
│ │ ├── basic_decoder.py
│ │ ├── decoder.py
│ │ └── helper.py
├── __init__.pyc
└── __init__.py
├── README.md
└── Trip
├── data
├── Osak_set.dat
├── Glas_set.dat
├── trajid.dat
├── TKY_split200_set.dat
├── Toro_set.dat
├── TKY_split400_set.dat
├── Melb_set.dat
└── Edin_set.dat
├── origin_data
├── poi-Glas.csv
├── poi-TKY_split200.csv
├── poi-TKY_split400.csv
├── poi-Osak.csv
├── poi-Edin.csv
├── poi-Toro.csv
└── poi-Melb.csv
├── metric.py
├── ops.py
├── AMSGrad.py
└── gae_context.py
/seq2seq_c/ops/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/seq2seq_c/python/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/seq2seq_c/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/__init__.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/__init__.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/loss.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/loss.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/helper.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/helper.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/__init__.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/decoder.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/decoder.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/basic_decoder.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/basic_decoder.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/_beam_search_ops.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/_beam_search_ops.so
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/beam_search_ops.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/beam_search_ops.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/attention_wrapper.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/attention_wrapper.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/beam_search_decoder.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gcooq/DeepTrip/HEAD/seq2seq_c/python/ops/beam_search_decoder.pyc
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 |
16 | from __future__ import absolute_import
17 | from __future__ import division
18 | from __future__ import print_function
19 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DeepTrip
2 | source code for DeepTrip
3 | This is the implementation of the deep learning based DeepTrip model.
4 |
5 | # Environment
6 | * python >= 2.7
7 | * Tensorflow 1.8 or ++
8 |
9 | # Dataset
10 | We use four different cities extrated from real-world Location-based Social Network data as follows.
11 | * Flickr:
12 | * Foursquare(Tokyo):
13 | * (remark) Please do not use these datasets for commercial purpose. For academic uses, please cite the paper. For Foursquare dataset, we use the same method as in [35] to construct the social networks. Thanks for their help.(see the reference [2,35] in the paper).
14 | # Usage
15 | * python gae_context.py
16 |
17 | # Reference
18 | Hope such an implementation could help you on your projects. Any comments and feedback are appreciated.
19 |
20 | * Gao, Qiang, Goce Trajcevski, Fan Zhou, Kunpeng Zhang, Ting Zhong, and Fengli Zhang. "DeepTrip: Adversarially Understanding Human Mobility for Trip Recommendation." In Proceedings of the 27th ACM SIGSPATIAL International Conference on Advances in Geographic Information Systems, pp. 444-447. 2019.
21 |
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/beam_search_ops.py:
--------------------------------------------------------------------------------
1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 | """Beam Search helper ops."""
16 | from __future__ import absolute_import
17 | from __future__ import division
18 | from __future__ import print_function
19 |
20 | from tensorflow.contrib.seq2seq.ops import gen_beam_search_ops
21 | from tensorflow.contrib.util import loader
22 | from tensorflow.python.platform import resource_loader
23 |
24 | _beam_search_ops_so = loader.load_op_library(
25 | resource_loader.get_path_to_datafile("_beam_search_ops.so"))
26 |
27 | gather_tree = gen_beam_search_ops.gather_tree
28 |
--------------------------------------------------------------------------------
/Trip/data/Osak_set.dat:
--------------------------------------------------------------------------------
1 | 95157838@N00-1085 1 19 12
2 | 95157838@N00-1084 5 4 7
3 | 95157838@N00-1086 9 0 7
4 | 22669489@N00-86 6 1 0 3 7
5 | 43749456@N05-441 6 4 1 12
6 | 31484513@N00-263 8 19 7
7 | 97972855@N00-1101 4 1 12 7
8 | 85736811@N00-935 1 3 7
9 | 91041933@N00-989 5 4 20
10 | 71459982@N00-736 8 0 3
11 | 27921677@N00-181 5 0 3
12 | 38235150@N00-367 6 9 1
13 | 7183194@N02-744 16 19 12 0 7 10
14 | 51035737977@N01-574 4 1 0
15 | 8909846@N06-970 4 0 3
16 | 11354044@N05-8 9 21 12 7
17 | 22087824@N00-83 1 29 0
18 | 77526889@N00-820 6 9 4 18 0
19 | 53289105@N00-584 1 19 3 7 17
20 | 21147679@N08-69 0 7 17
21 | 14003952@N04-43 5 4 0
22 | 87807876@N00-955 16 12 0 3 7
23 | 63282110@N00-680 12 3 10
24 | 47957339@N00-487 5 8 11
25 | 12452841@N00-23 0 3 10
26 | 35391423@N00-324 6 9 5 1
27 | 64667184@N02-697 5 0 10
28 | 26678013@N00-171 6 9 11
29 | 12452841@N00-24 16 19 0 3 17
30 | 91609485@N00-1001 6 9 11
31 | 75595126@N00-801 4 13 7
32 | 68089733@N00-713 6 9 0
33 | 68089733@N00-712 6 9 5
34 | 44926895@N07-462 1 0 7 15
35 | 56944727@N00-626 4 1 0
36 | 49462908@N00-526 5 4 0 17 10
37 | 29989965@N00-204 6 9 1 0 7
38 | 94387617@N00-1069 0 3 17
39 | 10340578@N06-3 16 3 7
40 | 10307040@N08-2 4 1 3 7
41 | 82414749@N00-897 5 4 0
42 | 10340578@N06-4 6 9 1
43 | 87807876@N00-945 0 3 11
44 | 36330824413@N01-338 5 1 3
45 | 88016824@N00-963 6 9 1
46 | 94387617@N00-1028 5 3 11
47 | 9643030@N08-1094 5 1 12 7
48 |
--------------------------------------------------------------------------------
/Trip/origin_data/poi-Glas.csv:
--------------------------------------------------------------------------------
1 | poiID,poiCat,poiLon,poiLat
2 | 1,Transport,-4.258101169014088,55.85792013480796
3 | 2,Transport,-4.250727638646297,55.861495672488964
4 | 4,Transport,-4.595632,55.509388
5 | 5,Transport,-4.331517,55.868896500000005
6 | 6,Transport,-4.269904625000001,55.85613520833331
7 | 7,Education,-4.242159871999993,55.861145432
8 | 8,Education,-4.249761639639642,55.86707233333342
9 | 9,Education,-4.285801843195263,55.872184307692386
10 | 10,Education,-4.288902204592891,55.871993964508796
11 | 11,Education,-4.264269125992071,55.866234078372784
12 | 12,Education,-4.25708622437137,55.86211197872341
13 | 13,Religion,-4.2344129376146915,55.86270032844042
14 | 14,Religion,-4.250646,55.853465421052626
15 | 15,Shopping,-4.292544210306379,55.87441117270186
16 | 16,Shopping,-4.255375836084915,55.86053728183968
17 | 17,Museum,-4.30770575,55.830818249999986
18 | 18,Park,-4.2056362195121935,55.849497292682905
19 | 19,Structure,-4.288323547169811,55.85990364420503
20 | 20,Structure,-4.25317754353561,55.86441777836411
21 | 21,Museum,-4.25158601294498,55.859934364077695
22 | 22,Park,-4.238215758620696,55.85119285775855
23 | 24,Structure,-4.309061802631581,55.853225565789465
24 | 25,Museum,-4.294561920863311,55.86907381294964
25 | 26,Museum,-4.290447339401815,55.86847013524039
26 | 27,Structure,-4.317774888888886,55.82766903030305
27 | 28,Structure,-4.261300118451011,55.864777521639816
28 | 29,Structure,-4.268838855855862,55.86140168468457
29 |
--------------------------------------------------------------------------------
/Trip/origin_data/poi-TKY_split200.csv:
--------------------------------------------------------------------------------
1 | poiID,poiCat,poiLon,poiLat
2 | 5,Transport,139.7730182486,35.6985961953
3 | 8,Store,139.7746217251,35.699015417
4 | 11,Transport,139.7760584402,35.7134117258
5 | 21,Transport,139.7660815716,35.6813787223
6 | 28,Transport,139.770963192,35.7277712438
7 | 35,Transport,139.7002792266,35.6908026942
8 | 59,Transport,139.7020304203,35.658196312
9 | 71,Transport,139.7581327377,35.6666920514
10 | 77,Transport,139.6596515179,35.5757297602
11 | 104,Transport,139.5443956554,35.6518891133
12 | 212,Transport,139.56082955,35.7027062994
13 | 253,Transport,139.7013260345,35.658521727
14 | 260,Transport,139.7404289246,35.6301457297
15 | 261,Transport,139.7279191017,35.5884342009
16 | 263,Transport,139.7160503268,35.5624778239
17 | 284,Transport,139.7109555728,35.7298646365
18 | 297,Transport,139.7567421198,35.6556181358
19 | 336,Transport,139.7347489277,35.6071430623
20 | 351,Transport,139.7208380699,35.7780624397
21 | 428,Transport,139.7285527736,35.6196991729
22 | 449,Transport,139.6969020367,35.531326929
23 | 480,Transport,139.765222,35.699583
24 | 603,Transport,139.6657991409,35.7058285501
25 | 640,Transport,139.7038972378,35.7129024404
26 | 651,Transport,139.747568965,35.6457318132
27 | 1,Transport,139.8051452637,35.7496469435
28 | 765,Transport,139.7387123108,35.6282272324
29 | 875,Transport,139.5799040794,35.7032018165
30 | 1005,Transport,139.7380418533,35.6281616973
31 | 2151,Transport,139.5075917244,35.6037536336
32 |
--------------------------------------------------------------------------------
/Trip/origin_data/poi-TKY_split400.csv:
--------------------------------------------------------------------------------
1 | poiID,poiCat,poiLon,poiLat
2 | 5,Transport,139.7730182486,35.6985961953
3 | 8,Store,139.7746217251,35.699015417
4 | 11,Transport,139.7760584402,35.7134117258
5 | 21,Transport,139.7660815716,35.6813787223
6 | 28,Transport,139.770963192,35.7277712438
7 | 35,Transport,139.7002792266,35.6908026942
8 | 59,Transport,139.7020304203,35.658196312
9 | 71,Transport,139.7581327377,35.6666920514
10 | 77,Transport,139.6596515179,35.5757297602
11 | 104,Transport,139.5443956554,35.6518891133
12 | 212,Transport,139.56082955,35.7027062994
13 | 253,Transport,139.7013260345,35.658521727
14 | 260,Transport,139.7404289246,35.6301457297
15 | 261,Transport,139.7279191017,35.5884342009
16 | 263,Transport,139.7160503268,35.5624778239
17 | 284,Transport,139.7109555728,35.7298646365
18 | 297,Transport,139.7567421198,35.6556181358
19 | 336,Transport,139.7347489277,35.6071430623
20 | 351,Transport,139.7208380699,35.7780624397
21 | 428,Transport,139.7285527736,35.6196991729
22 | 449,Transport,139.6969020367,35.531326929
23 | 480,Transport,139.765222,35.699583
24 | 603,Transport,139.6657991409,35.7058285501
25 | 640,Transport,139.7038972378,35.7129024404
26 | 651,Transport,139.747568965,35.6457318132
27 | 1,Transport,139.8051452637,35.7496469435
28 | 765,Transport,139.7387123108,35.6282272324
29 | 875,Transport,139.5799040794,35.7032018165
30 | 1005,Transport,139.7380418533,35.6281616973
31 | 2151,Transport,139.5075917244,35.6037536336
32 |
--------------------------------------------------------------------------------
/Trip/origin_data/poi-Osak.csv:
--------------------------------------------------------------------------------
1 | poiID,poiCat,poiLon,poiLat
2 | 1,Amusement,135.4289382878105,34.65479175959366
3 | 2,Amusement,135.4306267944665,34.65614490909093
4 | 3,Amusement,135.50855763940035,34.65242308514182
5 | 4,Amusement,135.4336271953132,34.66475830078134
6 | 5,Amusement,135.4998940285709,34.703680330158626
7 | 6,Amusement,135.49013254999943,34.705322627272615
8 | 7,Park,135.50696077664924,34.69266442131975
9 | 8,Park,135.52582424865906,34.68653437337396
10 | 9,Park,135.48880599999998,34.612893
11 | 10,Park,135.51043665534092,34.65104219417449
12 | 11,Park,135.49385408196719,34.68545975409837
13 | 12,Park,135.52128527777776,34.61132941666667
14 | 15,Historical,135.5164392663755,34.65409199563325
15 | 16,Historical,135.49256448076923,34.612657000000006
16 | 17,Historical,135.52994,34.677756666666674
17 | 18,Historical,135.51246823934412,34.695621363934485
18 | 19,Entertainment,135.4796296190476,34.66778380952381
19 | 20,Entertainment,135.5009323832682,34.66742997665371
20 | 21,Entertainment,135.500556111111,34.67239257239053
21 | 22,Entertainment,135.5061961355934,34.652166481840126
22 | 23,Entertainment,135.50561435294117,34.660004392156864
23 | 24,Entertainment,135.49807975999988,34.67208495200003
24 | 25,Entertainment,135.48224743939394,34.72006954545454
25 | 26,Entertainment,139.76896139828057,35.675154257879626
26 | 27,Entertainment,135.501092,34.65772466666667
27 | 28,Entertainment,135.50244797297296,34.70325674324324
28 | 29,Entertainment,135.50583799999998,34.644641
29 |
--------------------------------------------------------------------------------
/Trip/origin_data/poi-Edin.csv:
--------------------------------------------------------------------------------
1 | poiID,poiCat,poiLon,poiLat
2 | 1,Historical,-3.199862034580875,55.94884683716422
3 | 2,Historical,-3.1724675937052966,55.95276297997137
4 | 3,Museum,-3.190347155095568,55.9467227646501
5 | 4,Structure,-3.1751214683972875,55.95223434537238
6 | 5,Structure,-3.3887200833333337,55.999739000000005
7 | 6,Structure,-3.404049090909091,56.0012820909091
8 | 7,Park,-3.1618879074074004,55.94412091798946
9 | 8,Historical,-3.193193944356141,55.952538583465206
10 | 9,Cultural,-3.190637230008985,55.94980869047352
11 | 10,Park,-3.1829573739644994,55.95515047337272
12 | 11,Historical,-3.181937191191187,55.95460454054049
13 | 12,Cultural,-3.2044906274834797,55.94680475745124
14 | 13,Cultural,-3.1869824308822845,55.94635503896903
15 | 14,Structure,-3.209618379562047,55.94610982238432
16 | 15,Structure,-3.196984674391582,55.95182393800676
17 | 16,Museum,-3.195737522458634,55.951008898345115
18 | 17,Structure,-3.1902347191097684,55.948767777436686
19 | 18,Park,-3.202453709104388,55.95054091561817
20 | 19,Structure,-3.1892391019884583,55.95262047466306
21 | 20,Park,-3.207734793416563,55.96588335187255
22 | 21,Structure,-3.172436005988025,55.977925371257456
23 | 22,Structure,-3.2076172933754026,55.95175426182978
24 | 23,Structure,-3.1847238000000044,55.95370758124997
25 | 24,Entertainment,-3.1742341271393735,55.95074926405848
26 | 25,Historical,-3.2392285999999997,55.9184182
27 | 27,Museum,-3.2271948343195236,55.95087610650889
28 | 28,Structure,-3.1868621630188576,55.948245795471635
29 | 29,Structure,-3.1952324337146703,55.94908885935436
30 |
--------------------------------------------------------------------------------
/Trip/origin_data/poi-Toro.csv:
--------------------------------------------------------------------------------
1 | poiID,poiCat,poiLon,poiLat
2 | 1,Sport,-79.379243379063,43.64318250142281
3 | 2,Sport,-79.41863385714308,43.63277161740573
4 | 3,Sport,-79.3800452078488,43.66217527180223
5 | 4,Sport,-79.38928974410585,43.641297368457444
6 | 6,Cultural,-79.39239594093709,43.65366205295312
7 | 7,Cultural,-79.37732735752459,43.647150975158695
8 | 8,Cultural,-79.3853489798394,43.64238526267252
9 | 9,Cultural,-79.33916950000024,43.71644713669081
10 | 10,Cultural,-79.36110748265887,43.66706691907516
11 | 11,Cultural,-79.3944584713709,43.66718257115105
12 | 12,Cultural,-79.18221148856527,43.82008013305617
13 | 13,Cultural,-79.40936380912937,43.67815669398353
14 | 14,Amusement,-79.41700068794327,43.633924191489356
15 | 15,Amusement,-79.3735729115044,43.61983586725666
16 | 16,Amusement,-79.38706547902999,43.642849137630805
17 | 17,Amusement,-79.41601125371233,43.63256282673258
18 | 18,Amusement,-79.45080761538462,43.6371833846154
19 | 19,Beach,-79.3782267477478,43.62169749549551
20 | 20,Beach,-79.46238176404492,43.64655716853935
21 | 21,Beach,-79.38045327151268,43.65627400696254
22 | 22,Beach,-79.38370190118991,43.652478209270036
23 | 23,Shopping,-79.3798840401936,43.653868152197106
24 | 24,Shopping,-79.38232002042837,43.63862138326839
25 | 25,Shopping,-79.4011678236331,43.65474784303318
26 | 26,Shopping,-79.45268321153846,43.725760711538456
27 | 27,Shopping,-79.3909345777776,43.67010353968252
28 | 28,Structure,-79.38118417735065,43.65218102670948
29 | 29,Structure,-79.39126472274569,43.66213848048437
30 | 30,Structure,-79.3805837906372,43.64565054031221
31 |
--------------------------------------------------------------------------------
/Trip/metric.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | def calc_F1(traj_act, traj_rec, noloop=False):
4 | '''Compute recall, precision and F1 for recommended trajectories'''
5 | assert (isinstance(noloop, bool))
6 | assert (len(traj_act) > 0)
7 | assert (len(traj_rec) > 0)
8 |
9 | if noloop == True:
10 | intersize = len(set(traj_act) & set(traj_rec))
11 | else:
12 | match_tags = np.zeros(len(traj_act), dtype=np.bool)
13 | for poi in traj_rec:
14 | for j in range(len(traj_act)):
15 | if match_tags[j] == False and poi == traj_act[j]:
16 | match_tags[j] = True
17 | break
18 | intersize = np.nonzero(match_tags)[0].shape[0]
19 |
20 | recall = intersize * 1.0 / len(traj_act)
21 | precision = intersize * 1.0 / len(traj_rec)
22 | Denominator=recall+precision
23 | if Denominator==0:
24 | Denominator=1
25 | F1 = 2 * precision * recall * 1.0 / Denominator
26 | return F1
27 |
28 |
29 | # cpdef float calc_pairsF1(y, y_hat):
30 | def calc_pairsF1(y, y_hat):
31 | assert (len(y) > 0)
32 | # assert (len(y) == len(set(y))) # no loops in y
33 | # cdef int n, nr, nc, poi1, poi2, i, j
34 | # cdef double n0, n0r
35 | n = len(y)
36 | nr = len(y_hat)
37 | n0 = n * (n - 1) / 2
38 | n0r = nr * (nr - 1) / 2
39 |
40 | # y determines the correct visiting order
41 | order_dict = dict()
42 | for i in range(n):
43 | order_dict[y[i]] = i
44 |
45 | nc = 0
46 | for i in range(nr):
47 | poi1 = y_hat[i]
48 | for j in range(i + 1, nr):
49 | poi2 = y_hat[j]
50 | if poi1 in order_dict and poi2 in order_dict and poi1 != poi2:
51 | if order_dict[poi1] < order_dict[poi2]: nc += 1
52 |
53 |
54 | precision = (1.0 * nc) / (1.0 * n0r)
55 | recall = (1.0 * nc) / (1.0 * n0)
56 | if nc == 0:
57 | F1 = 0
58 | else:
59 | F1 = 2. * precision * recall / (precision + recall)
60 | return float(F1)
61 |
62 | def calc_pairsF12(y, y_hat):
63 | f1=0
64 | for i in range(len(y)):
65 | if (y[i]==y_hat[i]):
66 | f1+=1
67 | return float(float(f1)/float(len(y))) #float type
68 |
--------------------------------------------------------------------------------
/seq2seq_c/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 |
16 | """Ops for building neural network seq2seq decoders and losses.
17 |
18 | See the @{$python/contrib.seq2seq} guide.
19 | """
20 |
21 | from __future__ import absolute_import
22 | from __future__ import division
23 | from __future__ import print_function
24 |
25 | # pylint: disable=unused-import,wildcard-import,line-too-long
26 | from seq2seq_c.python.ops.attention_wrapper import *
27 | from seq2seq_c.python.ops.basic_decoder import *
28 | from seq2seq_c.python.ops.beam_search_decoder import *
29 | from seq2seq_c.python.ops.beam_search_ops import *
30 | from seq2seq_c.python.ops.decoder import *
31 | from seq2seq_c.python.ops.helper import *
32 | from seq2seq_c.python.ops.loss import *
33 | from tensorflow.python.util.all_util import remove_undocumented
34 | # pylint: enable=unused-import,widcard-import,line-too-long
35 |
36 | _allowed_symbols = [
37 | "sequence_loss",
38 | "Decoder",
39 | "dynamic_decode",
40 | "BasicDecoder",
41 | "BasicDecoderOutput",
42 | "BeamSearchDecoder",
43 | "BeamSearchDecoderOutput",
44 | "BeamSearchDecoderState",
45 | "Helper",
46 | "CustomHelper",
47 | "FinalBeamSearchDecoderOutput",
48 | "gather_tree",
49 | "GreedyEmbeddingHelper",
50 | "GreedyEmbeddingHelper2",
51 | "InferenceHelper",
52 | "SampleEmbeddingHelper",
53 | "ScheduledEmbeddingTrainingHelper",
54 | "ScheduledOutputTrainingHelper",
55 | "TrainingHelper",
56 | "BahdanauAttention",
57 | "LuongAttention",
58 | "hardmax",
59 | "AttentionWrapperState",
60 | "AttentionWrapper",
61 | "AttentionMechanism",
62 | "tile_batch",
63 | "safe_cumprod",
64 | "monotonic_attention",
65 | "monotonic_probability_fn",
66 | "BahdanauMonotonicAttention",
67 | "LuongMonotonicAttention",
68 | ]
69 |
70 |
71 | remove_undocumented(__name__, _allowed_symbols)
72 |
--------------------------------------------------------------------------------
/Trip/data/Glas_set.dat:
--------------------------------------------------------------------------------
1 | 55329285@N05-1400 3 13 2
2 | 72746018@N00-1716 2 6 8
3 | 72746018@N00-1713 6 1 8 4
4 | 43002463@N00-1162 13 2 7
5 | 7797604@N05-1845 2 8 7
6 | 28370443@N08-539 0 16 6 9 11 1 4 10
7 | 24616128@N00-479 0 11 8
8 | 33278177@N00-790 3 19 1 4
9 | 84272941@N00-1976 0 1 4
10 | 89182227@N00-2048 13 2 5
11 | 30024546@N04-671 0 9 8
12 | 20483509@N00-238 0 13 7
13 | 22087304@N07-390 1 4 7
14 | 11452054@N00-55 0 11 10
15 | 16312054@N06-154 3 9 1 8 4
16 | 11037560@N04-39 0 1 4
17 | 74937968@N00-1774 3 1 8
18 | 9828412@N05-2204 6 7 10
19 | 56846157@N00-1426 2 6 5 1 4
20 | 11037560@N04-38 0 11 4
21 | 32714545@N05-781 11 22 1
22 | 32693718@N07-780 3 0 1
23 | 51207064@N00-1324 0 9 8
24 | 30024546@N04-666 9 8 10
25 | 33563858@N00-796 11 8 10
26 | 30024546@N04-588 2 9 14
27 | 62798459@N00-1495 24 9 1
28 | 17358831@N00-178 6 1 10
29 | 91685781@N00-2081 0 1 4
30 | 70938076@N00-1676 0 1 8
31 | 51168133@N00-1305 0 2 11 1 4
32 | 112042056@N06-49 0 2 4 7
33 | 17643132@N00-184 0 11 1 4
34 | 35761150@N04-977 0 1 17
35 | 35761150@N04-976 3 13 2 5 1 4 7
36 | 82619366@N00-1910 0 2 9
37 | 82619366@N00-1911 13 2 5 7
38 | 55329285@N05-1383 13 2 5
39 | 92367195@N00-2106 23 7 20
40 | 55898913@N04-1416 3 0 19 1 7
41 | 35034349172@N01-900 2 5 14
42 | 82103247@N00-1897 0 1 4
43 | 16516058@N03-159 3 0 9 1 4
44 | 82103247@N00-1899 1 8 4
45 | 82103247@N00-1898 1 8 4
46 | 56889878@N03-1429 0 9 10
47 | 14578360@N00-119 13 2 5
48 | 7512717@N06-1779 0 9 1 8 4
49 | 21047061@N04-248 3 0 1
50 | 85397137@N00-1992 3 1 8 17
51 | 63651050@N00-1507 3 6 11
52 | 22087304@N07-386 0 13 2 4
53 | 41418909@N06-1124 0 1 4
54 | 34427470616@N01-814 3 1 10
55 | 34427470616@N01-815 3 13 8 10
56 | 30985799@N00-698 0 8 7
57 | 35468159247@N01-958 0 13 2 14
58 | 10482493@N05-31 0 1 8 4
59 | 9286726@N03-2143 3 0 11 4
60 | 7512717@N06-1780 3 6 1
61 | 7868489@N05-1852 0 11 10
62 | 57879122@N00-1444 0 1 4 10
63 | 8503026@N04-1985 3 0 13 2 7
64 | 73429256@N00-1733 3 11 1
65 | 41894174966@N01-1128 3 0 4
66 | 12982579@N00-90 6 8 14 7 10
67 | 38166049@N00-1084 11 8 7
68 | 16497759@N07-157 6 12 10
69 | 45072550@N03-1223 0 13 11 1 4
70 | 52686644@N04-1336 3 9 1 8 10
71 | 57474170@N05-1441 0 12 8 4
72 | 30671962@N05-683 0 25 4
73 | 29797746@N08-576 3 6 9 14 7 10 17
74 | 7587786@N05-1797 3 0 4
75 | 70284683@N06-1560 24 1 4
76 | 51207064@N00-1317 0 1 4
77 | 30511496@N00-681 0 6 11
78 | 14086506@N02-107 3 0 9 22 1 4
79 | 8286908@N02-1951 0 2 11
80 | 38826189@N00-1085 3 13 6 1
81 | 7512717@N06-1778 0 1 8 4
82 | 49253583@N02-1294 6 9 10
83 | 9511818@N03-2156 3 1 4
84 | 44232489@N02-1216 3 0 8
85 | 30713600@N00-685 3 0 1
86 | 23363966@N02-454 0 12 7
87 | 31438571@N07-733 0 1 4
88 | 48001787@N06-1250 0 8 7
89 | 31803571@N02-762 0 11 15
90 | 82103247@N00-1901 3 0 1 4
91 | 22751315@N05-419 3 0 4
92 | 25396215@N00-484 0 6 4
93 | 65244239@N00-1518 12 14 7
94 | 22087304@N07-401 23 7 20
95 | 35468159247@N01-965 2 5 12
96 | 98833136@N00-2214 0 11 4
97 | 35468152754@N01-923 3 0 1 4
98 | 37622685@N02-1076 13 2 7
99 | 44124372821@N01-1211 0 11 7 10
100 | 7883724@N05-1853 0 11 4
101 | 85217508@N00-1988 0 6 11 1 4 20
102 | 70544918@N00-1633 9 5 4
103 | 31564588@N06-760 0 1 4
104 | 35034349172@N01-899 2 5 14 7
105 | 35034349172@N01-898 2 14 7
106 | 22087304@N07-402 5 14 7
107 | 30024546@N04-644 6 9 7
108 | 49998283@N00-1298 0 6 8 10
109 | 72579404@N00-1712 0 1 8
110 | 49998283@N00-1297 9 11 10
111 | 17424601@N00-179 3 0 4
112 | 34995267@N00-827 9 10 17
113 |
--------------------------------------------------------------------------------
/Trip/origin_data/poi-Melb.csv:
--------------------------------------------------------------------------------
1 | poiID,poiCat,poiLat,poiLon
2 | 0,City precincts,-37.821670000000005,144.96778
3 | 1,City precincts,-37.817,144.946
4 | 2,City precincts,-37.8119,144.97299999999998
5 | 3,City precincts,-37.79972,144.96694
6 | 4,City precincts,-37.80778,144.96333
7 | 5,City precincts,-37.829,144.957
8 | 6,City precincts,-37.81667,144.98333
9 | 7,City precincts,-37.7963,144.9614
10 | 8,Shopping,-37.81583,144.96444
11 | 9,Shopping,-37.8139,144.96452
12 | 10,Shopping,-37.817563899999996,144.99032780000002
13 | 11,Shopping,-37.79833,144.978472
14 | 12,Shopping,-37.84083,144.99528
15 | 13,Shopping,-37.816583,144.9655
16 | 14,Shopping,-37.8168,144.9622
17 | 15,Shopping,-37.8175,144.96583
18 | 16,Shopping,-37.8096,144.961
19 | 17,Shopping,-37.81694,144.96556
20 | 18,Shopping,-37.8151,144.9641
21 | 19,Shopping,-37.810559999999995,144.96
22 | 20,Shopping,-37.847414,144.993503
23 | 21,Shopping,-37.806965999999996,144.956693
24 | 22,Shopping,-37.81,144.9652
25 | 23,Shopping,-37.8145,144.9641
26 | 24,Shopping,-37.8064,144.963
27 | 25,Entertainment,-37.822324,144.96009899999999
28 | 26,Entertainment,-37.868036,144.976369
29 | 27,Entertainment,-37.821044,144.95801699999998
30 | 28,Entertainment,-37.784762,144.95209499999999
31 | 29,Public galleries,-37.826496999999996,144.96701399999998
32 | 30,Public galleries,-37.7975,144.964111
33 | 31,Public galleries,-37.822595,144.968634
34 | 32,Institutions,-37.81384,144.963028
35 | 33,Institutions,-37.827939,144.976939
36 | 34,Institutions,-37.8297,144.9755
37 | 35,Institutions,-37.815145,144.966777
38 | 36,Institutions,-37.80806,144.96528
39 | 37,Institutions,-37.8087,144.966
40 | 38,Institutions,-37.812153,144.956794
41 | 39,Institutions,-37.813153,144.974121
42 | 40,Institutions,-37.811054999999996,144.97329
43 | 41,Institutions,-37.809801,144.964787
44 | 42,Institutions,-37.814132,144.957932
45 | 43,Institutions,-37.827391,144.970447
46 | 44,Structures,-37.82028,144.96833
47 | 45,Structures,-37.821670000000005,144.96444
48 | 46,Structures,-37.8256,144.9541
49 | 47,Structures,-37.8110723,144.9368763
50 | 48,Structures,-37.804728000000004,144.971225
51 | 49,Structures,-37.830434000000004,144.97325800000002
52 | 50,Structures,-37.816853,144.967384
53 | 51,Structures,-37.81,144.97611
54 | 52,Sports stadiums,-37.82528,144.98389
55 | 53,Sports stadiums,-37.816390000000006,144.9475
56 | 54,Sports stadiums,-37.790279999999996,144.9125
57 | 55,Sports stadiums,-37.821112,144.97741000000002
58 | 56,Sports stadiums,-37.82278,144.98167
59 | 57,Sports stadiums,-37.82,144.98333
60 | 58,Sports stadiums,-37.84972,144.96833
61 | 59,Sports stadiums,-37.84667,144.96611000000001
62 | 60,Sports stadiums,-37.84028,144.965
63 | 61,Sports stadiums,-37.8123777,144.93556740000002
64 | 62,Sports stadiums,-37.82472,144.98111
65 | 63,Sports stadiums,-37.821670000000005,144.97833
66 | 64,Sports stadiums,-37.97,145.03
67 | 65,Sports stadiums,-37.78389,144.96167
68 | 66,Parks and spaces,-37.846,144.97
69 | 67,Parks and spaces,-37.820370000000004,144.971938
70 | 68,Parks and spaces,-37.818329999999996,144.9725
71 | 69,Parks and spaces,-37.80611,144.97028
72 | 70,Parks and spaces,-37.816,144.967
73 | 71,Parks and spaces,-37.817797999999996,144.96871399999998
74 | 72,Parks and spaces,-37.8125641,144.9803925
75 | 73,Parks and spaces,-37.8105,144.9544
76 | 74,Parks and spaces,-37.8282598,144.97758149999999
77 | 75,Parks and spaces,-37.8218,144.9716
78 | 76,Parks and spaces,-37.8334,144.98033
79 | 77,Parks and spaces,-37.790183299999995,144.9511667
80 | 78,Parks and spaces,-37.814158,144.976194
81 | 79,Parks and spaces,-37.818043700000004,144.9852312
82 | 80,Transport,-37.8194778,144.932125
83 | 81,Transport,-37.819744,144.968516
84 | 82,Transport,-37.818078,144.96681
85 | 83,Transport,-37.67333,144.84333
86 | 84,Transport,-37.81,144.96278
87 | 85,Transport,-37.8184,144.9524
88 | 86,Transport,-37.829440000000005,144.89806000000002
89 | 87,Transport,-37.85194,144.90833
90 |
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/loss.py:
--------------------------------------------------------------------------------
1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 | """Seq2seq loss operations for use in sequence models.
16 | """
17 |
18 | from __future__ import absolute_import
19 | from __future__ import division
20 | from __future__ import print_function
21 |
22 | from tensorflow.python.framework import ops
23 | from tensorflow.python.ops import array_ops
24 | from tensorflow.python.ops import math_ops
25 | from tensorflow.python.ops import nn_ops
26 |
27 | __all__ = ["sequence_loss"]
28 |
29 |
30 | def sequence_loss(logits,
31 | targets,
32 | weights,
33 | average_across_timesteps=True,
34 | average_across_batch=True,
35 | softmax_loss_function=None,
36 | name=None):
37 | """Weighted cross-entropy loss for a sequence of logits.
38 |
39 | Depending on the values of `average_across_timesteps` and
40 | `average_across_batch`, the return Tensor will have rank 0, 1, or 2 as these
41 | arguments reduce the cross-entropy at each target, which has shape
42 | `[batch_size, sequence_length]`, over their respective dimensions. For
43 | example, if `average_across_timesteps` is `True` and `average_across_batch`
44 | is `False`, then the return Tensor will have shape `[batch_size]`.
45 |
46 | Args:
47 | logits: A Tensor of shape
48 | `[batch_size, sequence_length, num_decoder_symbols]` and dtype float.
49 | The logits correspond to the prediction across all classes at each
50 | timestep.
51 | targets: A Tensor of shape `[batch_size, sequence_length]` and dtype
52 | int. The target represents the true class at each timestep.
53 | weights: A Tensor of shape `[batch_size, sequence_length]` and dtype
54 | float. `weights` constitutes the weighting of each prediction in the
55 | sequence. When using `weights` as masking, set all valid timesteps to 1
56 | and all padded timesteps to 0, e.g. a mask returned by `tf.sequence_mask`.
57 | average_across_timesteps: If set, sum the cost across the sequence
58 | dimension and divide the cost by the total label weight across timesteps.
59 | average_across_batch: If set, sum the cost across the batch dimension and
60 | divide the returned cost by the batch size.
61 | softmax_loss_function: Function (labels, logits) -> loss-batch
62 | to be used instead of the standard softmax (the default if this is None).
63 | **Note that to avoid confusion, it is required for the function to accept
64 | named arguments.**
65 | name: Optional name for this operation, defaults to "sequence_loss".
66 |
67 | Returns:
68 | A float Tensor of rank 0, 1, or 2 depending on the
69 | `average_across_timesteps` and `average_across_batch` arguments. By default,
70 | it has rank 0 (scalar) and is the weighted average cross-entropy
71 | (log-perplexity) per symbol.
72 |
73 | Raises:
74 | ValueError: logits does not have 3 dimensions or targets does not have 2
75 | dimensions or weights does not have 2 dimensions.
76 | """
77 | if len(logits.get_shape()) != 3:
78 | raise ValueError("Logits must be a "
79 | "[batch_size x sequence_length x logits] tensor")
80 | if len(targets.get_shape()) != 2:
81 | raise ValueError("Targets must be a [batch_size x sequence_length] "
82 | "tensor")
83 | if len(weights.get_shape()) != 2:
84 | raise ValueError("Weights must be a [batch_size x sequence_length] "
85 | "tensor")
86 | with ops.name_scope(name, "sequence_loss", [logits, targets, weights]):
87 | num_classes = array_ops.shape(logits)[2]
88 | logits_flat = array_ops.reshape(logits, [-1, num_classes])
89 | targets = array_ops.reshape(targets, [-1])
90 | if softmax_loss_function is None:
91 | crossent = nn_ops.sparse_softmax_cross_entropy_with_logits(
92 | labels=targets, logits=logits_flat)
93 | else:
94 | crossent = softmax_loss_function(labels=targets, logits=logits_flat)
95 | crossent *= array_ops.reshape(weights, [-1])
96 | if average_across_timesteps and average_across_batch:
97 | crossent = math_ops.reduce_sum(crossent)
98 | total_size = math_ops.reduce_sum(weights)
99 | total_size += 1e-12 # to avoid division by 0 for all-0 weights
100 | crossent /= total_size
101 | else:
102 | batch_size = array_ops.shape(logits)[0]
103 | sequence_length = array_ops.shape(logits)[1]
104 | crossent = array_ops.reshape(crossent, [batch_size, sequence_length])
105 | if average_across_timesteps and not average_across_batch:
106 | crossent = math_ops.reduce_sum(crossent, axis=[1])
107 | total_size = math_ops.reduce_sum(weights, axis=[1])
108 | total_size += 1e-12 # to avoid division by 0 for all-0 weights
109 | crossent /= total_size
110 | if not average_across_timesteps and average_across_batch:
111 | crossent = math_ops.reduce_sum(crossent, axis=[0])
112 | total_size = math_ops.reduce_sum(weights, axis=[0])
113 | total_size += 1e-12 # to avoid division by 0 for all-0 weights
114 | crossent /= total_size
115 | return crossent
116 |
--------------------------------------------------------------------------------
/seq2seq_c/ops/gen_beam_search_ops.py:
--------------------------------------------------------------------------------
1 | """Python wrappers around TensorFlow ops.
2 |
3 | This file is MACHINE GENERATED! Do not edit.
4 | Original C++ source file: beam_search_ops.cc
5 | """
6 |
7 | import collections as _collections
8 | import six as _six
9 |
10 | from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow
11 | from tensorflow.python.eager import context as _context
12 | from tensorflow.python.eager import core as _core
13 | from tensorflow.python.eager import execute as _execute
14 | from tensorflow.python.framework import dtypes as _dtypes
15 | from tensorflow.python.framework import errors as _errors
16 | from tensorflow.python.framework import tensor_shape as _tensor_shape
17 |
18 | from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
19 | # Needed to trigger the call to _set_call_cpp_shape_fn.
20 | from tensorflow.python.framework import common_shapes as _common_shapes
21 | from tensorflow.python.framework import op_def_registry as _op_def_registry
22 | from tensorflow.python.framework import ops as _ops
23 | from tensorflow.python.framework import op_def_library as _op_def_library
24 | from tensorflow.python.util.tf_export import tf_export
25 |
26 |
27 | @tf_export('gather_tree')
28 | def gather_tree(step_ids, parent_ids, max_sequence_lengths, end_token, name=None):
29 | r"""Calculates the full beams from the per-step ids and parent beam ids.
30 |
31 | On CPU, if an out of bound parent id is found, an error is returned.
32 | On GPU, if an out of bound parent id is found, a -1 is stored in the
33 | corresponding output value and the execution for that beam returns early.
34 |
35 | For a given beam, past the time step containing the first decoded `end_token`
36 | all values are filled in with `end_token`.
37 |
38 | TODO(ebrevdo): fill in the remainder of this docstring.
39 |
40 | Args:
41 | step_ids: A `Tensor`. Must be one of the following types: `int32`.
42 | `[max_time, batch_size, beam_width]`.
43 | parent_ids: A `Tensor`. Must have the same type as `step_ids`.
44 | `[max_time, batch_size, beam_width]`.
45 | max_sequence_lengths: A `Tensor` of type `int32`. `[batch_size]`.
46 | end_token: A `Tensor`. Must have the same type as `step_ids`. `[]`.
47 | name: A name for the operation (optional).
48 |
49 | Returns:
50 | A `Tensor`. Has the same type as `step_ids`.
51 | `[max_time, batch_size, beam_width]`.
52 | """
53 | _ctx = _context._context
54 | if _ctx is None or not _ctx._eager_context.is_eager:
55 | _, _, _op = _op_def_lib._apply_op_helper(
56 | "GatherTree", step_ids=step_ids, parent_ids=parent_ids,
57 | max_sequence_lengths=max_sequence_lengths, end_token=end_token,
58 | name=name)
59 | _result = _op.outputs[:]
60 | _inputs_flat = _op.inputs
61 | _attrs = ("T", _op.get_attr("T"))
62 | _execute.record_gradient(
63 | "GatherTree", _inputs_flat, _attrs, _result, name)
64 | _result, = _result
65 | return _result
66 |
67 | else:
68 | try:
69 | _result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
70 | _ctx._context_handle, _ctx._eager_context.device_name, "GatherTree",
71 | name, _ctx._post_execution_callbacks, step_ids, parent_ids,
72 | max_sequence_lengths, end_token)
73 | return _result
74 | except _core._FallbackException:
75 | return gather_tree_eager_fallback(
76 | step_ids, parent_ids, max_sequence_lengths, end_token, name=name,
77 | ctx=_ctx)
78 | except _core._NotOkStatusException as e:
79 | if name is not None:
80 | message = e.message + " name: " + name
81 | else:
82 | message = e.message
83 | _six.raise_from(_core._status_to_exception(e.code, message), None)
84 |
85 |
86 | def gather_tree_eager_fallback(step_ids, parent_ids, max_sequence_lengths, end_token, name=None, ctx=None):
87 | r"""This is the slowpath function for Eager mode.
88 | This is for function gather_tree
89 | """
90 | _ctx = ctx if ctx else _context.context()
91 | _attr_T, _inputs_T = _execute.args_to_matching_eager([step_ids, parent_ids, end_token], _ctx)
92 | (step_ids, parent_ids, end_token) = _inputs_T
93 | max_sequence_lengths = _ops.convert_to_tensor(max_sequence_lengths, _dtypes.int32)
94 | _inputs_flat = [step_ids, parent_ids, max_sequence_lengths, end_token]
95 | _attrs = ("T", _attr_T)
96 | _result = _execute.execute(b"GatherTree", 1, inputs=_inputs_flat,
97 | attrs=_attrs, ctx=_ctx, name=name)
98 | _execute.record_gradient(
99 | "GatherTree", _inputs_flat, _attrs, _result, name)
100 | _result, = _result
101 | return _result
102 |
103 | _ops.RegisterShape("GatherTree")(None)
104 |
105 | def _InitOpDefLibrary(op_list_proto_bytes):
106 | op_list = _op_def_pb2.OpList()
107 | op_list.ParseFromString(op_list_proto_bytes)
108 | _op_def_registry.register_op_list(op_list)
109 | op_def_lib = _op_def_library.OpDefLibrary()
110 | op_def_lib.add_op_list(op_list)
111 | return op_def_lib
112 | # op {
113 | # name: "GatherTree"
114 | # input_arg {
115 | # name: "step_ids"
116 | # type_attr: "T"
117 | # }
118 | # input_arg {
119 | # name: "parent_ids"
120 | # type_attr: "T"
121 | # }
122 | # input_arg {
123 | # name: "max_sequence_lengths"
124 | # type: DT_INT32
125 | # }
126 | # input_arg {
127 | # name: "end_token"
128 | # type_attr: "T"
129 | # }
130 | # output_arg {
131 | # name: "beams"
132 | # type_attr: "T"
133 | # }
134 | # attr {
135 | # name: "T"
136 | # type: "type"
137 | # allowed_values {
138 | # list {
139 | # type: DT_INT32
140 | # }
141 | # }
142 | # }
143 | # }
144 | _op_def_lib = _InitOpDefLibrary(b"\nt\n\nGatherTree\022\r\n\010step_ids\"\001T\022\017\n\nparent_ids\"\001T\022\030\n\024max_sequence_lengths\030\003\022\016\n\tend_token\"\001T\032\n\n\005beams\"\001T\"\020\n\001T\022\004type:\005\n\0032\001\003")
145 |
--------------------------------------------------------------------------------
/Trip/ops.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 |
3 | def fully_connected(input, output_shape, initializer, scope="fc", is_last=False, is_decoder=False):
4 | with tf.variable_scope(scope):
5 | input_shape = input.get_shape()[-1].value
6 | W = tf.get_variable("weight", [input_shape, output_shape], initializer=initializer)
7 | b = tf.get_variable("bias", [output_shape], initializer=initializer)
8 | fc = tf.add(tf.matmul(input, W), b)
9 | fc = normalize(fc)
10 |
11 | if not is_last:
12 | if not is_decoder:
13 | output = tf.nn.relu(fc)
14 | else:
15 | output = lrelu(fc)
16 | else:
17 | output = fc
18 |
19 | return output
20 |
21 | def normalize(inputs,
22 | type="bn",
23 | decay=.99,
24 | is_training=True,
25 | activation_fn=None,
26 | scope="normalize"):
27 | '''Applies {batch|layer} normalization.
28 |
29 | Args:
30 | inputs: A tensor with 2 or more dimensions, where the first dimension has
31 | `batch_size`. If type is `bn`, the normalization is over all but
32 | the last dimension. Or if type is `ln`, the normalization is over
33 | the last dimension. Note that this is different from the native
34 | `tf.contrib.layers.batch_norm`. For this I recommend you change
35 | a line in ``tensorflow/contrib/layers/python/layers/layer.py`
36 | as follows.
37 | Before: mean, variance = nn.moments(inputs, axis, keep_dims=True)
38 | After: mean, variance = nn.moments(inputs, [-1], keep_dims=True)
39 | type: A string. Either "bn" or "ln".
40 | decay: Decay for the moving average. Reasonable values for `decay` are close
41 | to 1.0, typically in the multiple-nines range: 0.999, 0.99, 0.9, etc.
42 | Lower `decay` value (recommend trying `decay`=0.9) if model experiences
43 | reasonably good training performance but poor validation and/or test
44 | performance.
45 | is_training: Whether or not the layer is in training mode. W
46 | activation_fn: Activation function.
47 | scope: Optional scope for `variable_scope`.
48 |
49 | Returns:
50 | A tensor with the same shape and data dtype as `inputs`.
51 | '''
52 | if type == "bn":
53 | inputs_shape = inputs.get_shape()
54 | inputs_rank = inputs_shape.ndims
55 |
56 | # use fused batch norm if inputs_rank in [2, 3, 4] as it is much faster.
57 | # pay attention to the fact that fused_batch_norm requires shape to be rank 4 of NHWC.
58 | if inputs_rank in [2, 3, 4]:
59 | if inputs_rank == 2:
60 | inputs = tf.expand_dims(inputs, axis=1)
61 | inputs = tf.expand_dims(inputs, axis=2)
62 | elif inputs_rank == 3:
63 | inputs = tf.expand_dims(inputs, axis=1)
64 |
65 | outputs = tf.contrib.layers.batch_norm(inputs=inputs,
66 | decay=decay,
67 | center=True,
68 | scale=True,
69 | activation_fn=activation_fn,
70 | updates_collections=None,
71 | is_training=is_training,
72 | scope=scope,
73 | zero_debias_moving_mean=True,
74 | fused=True)
75 | # restore original shape
76 | if inputs_rank == 2:
77 | outputs = tf.squeeze(outputs, axis=[1, 2])
78 | elif inputs_rank == 3:
79 | outputs = tf.squeeze(outputs, axis=1)
80 | else: # fallback to naive batch norm
81 | outputs = tf.contrib.layers.batch_norm(inputs=inputs,
82 | decay=decay,
83 | center=True,
84 | scale=True,
85 | activation_fn=activation_fn,
86 | updates_collections=None,
87 | is_training=is_training,
88 | scope=scope,
89 | fused=False)
90 | elif type == "ln":
91 | outputs = tf.contrib.layers.layer_norm(inputs=inputs,
92 | center=True,
93 | scale=True,
94 | activation_fn=activation_fn,
95 | scope=scope)
96 | elif type == "in":
97 | with tf.variable_scope(scope):
98 | batch, steps, channels = inputs.get_shape().as_list()
99 | var_shape = [channels]
100 | mu, sigma_sq = tf.nn.moments(inputs, [1], keep_dims=True)
101 | shift = tf.Variable(tf.zeros(var_shape))
102 | scale = tf.Variable(tf.ones(var_shape))
103 | epsilon = 1e-8
104 | normalized = (inputs - mu) / (sigma_sq + epsilon) ** (.5)
105 | outputs = scale * normalized + shift
106 | if activation_fn:
107 | outputs = activation_fn(outputs)
108 | else:
109 | raise ValueError("Currently we support `bn` or `ln` only.")
110 |
111 | return outputs
112 |
113 |
114 |
115 | def gaussian_noise_layer(input_layer, std):
116 | noise = tf.random_normal(shape=tf.shape(input_layer), mean=0.0, stddev=std, dtype=tf.float32)
117 | return input_layer + noise
118 |
119 | def lrelu(x, leak=0.2, name="lrelu"):
120 | with tf.variable_scope(name):
121 | f1 = 0.5 * (1 + leak)
122 | f2 = 0.5 * (1 - leak)
123 | return f1 * x + f2 * abs(x)
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/basic_decoder.py:
--------------------------------------------------------------------------------
1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 | """A class of Decoders that may sample to generate the next input.
16 | """
17 |
18 | from __future__ import absolute_import
19 | from __future__ import division
20 | from __future__ import print_function
21 |
22 | import collections
23 | import tensorflow as tf
24 | from seq2seq_c.python.ops import decoder
25 | from seq2seq_c.python.ops import helper as helper_py
26 | from tensorflow.python.framework import ops
27 | from tensorflow.python.framework import tensor_shape
28 | from tensorflow.python.layers import base as layers_base
29 | from tensorflow.python.ops import rnn_cell_impl
30 | from tensorflow.python.util import nest
31 |
32 |
33 | __all__ = [
34 | "BasicDecoderOutput",
35 | "BasicDecoder",
36 | ]
37 |
38 |
39 | class BasicDecoderOutput(
40 | collections.namedtuple("BasicDecoderOutput", ("rnn_output", "sample_id"))):
41 | pass
42 |
43 |
44 | class BasicDecoder(decoder.Decoder):
45 | """Basic sampling decoder."""
46 |
47 | def __init__(self, cell, helper, initial_state, lantent_tensor,output_layer=None):
48 | """Initialize BasicDecoder.
49 |
50 | Args:
51 | cell: An `RNNCell` instance.
52 | helper: A `Helper` instance.
53 | initial_state: A (possibly nested tuple of...) tensors and TensorArrays.
54 | The initial state of the RNNCell.
55 | output_layer: (Optional) An instance of `tf.layers.Layer`, i.e.,
56 | `tf.layers.Dense`. Optional layer to apply to the RNN output prior
57 | to storing the result or sampling.
58 |
59 | Raises:
60 | TypeError: if `cell`, `helper` or `output_layer` have an incorrect type.
61 | """
62 | rnn_cell_impl.assert_like_rnncell("cell", cell)
63 | if not isinstance(helper, helper_py.Helper):
64 | raise TypeError("helper must be a Helper, received: %s" % type(helper))
65 | if (output_layer is not None
66 | and not isinstance(output_layer, layers_base.Layer)):
67 | raise TypeError(
68 | "output_layer must be a Layer, received: %s" % type(output_layer))
69 | self._cell = cell
70 | self._helper = helper
71 | self._initial_state = initial_state
72 | self._output_layer = output_layer
73 | self._lantent_tensor=lantent_tensor
74 |
75 | @property
76 | def batch_size(self):
77 | return self._helper.batch_size
78 |
79 | def _rnn_output_size(self):
80 | size = self._cell.output_size
81 | if self._output_layer is None:
82 | return size
83 | else:
84 | # To use layer's compute_output_shape, we need to convert the
85 | # RNNCell's output_size entries into shapes with an unknown
86 | # batch size. We then pass this through the layer's
87 | # compute_output_shape and read off all but the first (batch)
88 | # dimensions to get the output size of the rnn with the layer
89 | # applied to the top.
90 | output_shape_with_unknown_batch = nest.map_structure(
91 | lambda s: tensor_shape.TensorShape([None]).concatenate(s),
92 | size)
93 | layer_output_shape = self._output_layer.compute_output_shape(
94 | output_shape_with_unknown_batch)
95 | return nest.map_structure(lambda s: s[1:], layer_output_shape)
96 |
97 | @property
98 | def output_size(self):
99 | # Return the cell output and the id
100 | return BasicDecoderOutput(
101 | rnn_output=self._rnn_output_size(),
102 | sample_id=self._helper.sample_ids_shape)
103 |
104 | @property
105 | def output_dtype(self):
106 | # Assume the dtype of the cell is the output_size structure
107 | # containing the input_state's first component's dtype.
108 | # Return that structure and the sample_ids_dtype from the helper.
109 | dtype = nest.flatten(self._initial_state)[0].dtype
110 | return BasicDecoderOutput(
111 | nest.map_structure(lambda _: dtype, self._rnn_output_size()),
112 | self._helper.sample_ids_dtype)
113 |
114 | def initialize(self, name=None):
115 | """Initialize the decoder.
116 |
117 | Args:
118 | name: Name scope for any created operations.
119 |
120 | Returns:
121 | `(finished, first_inputs, initial_state)`.
122 | """
123 | return self._helper.initialize() + (self._initial_state,)
124 |
125 | def step(self, time, inputs, state, name=None):
126 | """Perform a decoding step.
127 |
128 | Args:
129 | time: scalar `int32` tensor.
130 | inputs: A (structure of) input tensors.
131 | state: A (structure of) state tensors and TensorArrays.
132 | name: Name scope for any created operations.
133 |
134 | Returns:
135 | `(outputs, next_state, next_inputs, finished)`.
136 | """
137 | with ops.name_scope(name, "BasicDecoderStep", (time, inputs, state)):
138 | cell_outputs, cell_state = self._cell(inputs, state)
139 | #add tensor to each output
140 | cell_outputs = tf.concat([cell_outputs, self._lantent_tensor], 1)
141 |
142 | if self._output_layer is not None:
143 | cell_outputs = self._output_layer(cell_outputs)
144 | sample_ids = self._helper.sample(
145 | time=time, outputs=cell_outputs, state=cell_state)
146 | (finished, next_inputs, next_state) = self._helper.next_inputs(
147 | time=time,
148 | outputs=cell_outputs,
149 | state=cell_state,
150 | sample_ids=sample_ids)
151 | outputs = BasicDecoderOutput(cell_outputs, sample_ids)
152 | return (outputs, next_state, next_inputs, finished)
153 |
--------------------------------------------------------------------------------
/Trip/data/trajid.dat:
--------------------------------------------------------------------------------
1 | 4
2 | 13
3 | 32
4 | 33
5 | 34
6 | 39
7 | 41
8 | 42
9 | 44
10 | 46
11 | 67
12 | 74
13 | 81
14 | 82
15 | 98
16 | 99
17 | 101
18 | 122
19 | 127
20 | 142
21 | 150
22 | 164
23 | 185
24 | 193
25 | 194
26 | 202
27 | 203
28 | 219
29 | 222
30 | 229
31 | 240
32 | 260
33 | 265
34 | 266
35 | 267
36 | 274
37 | 275
38 | 282
39 | 284
40 | 311
41 | 316
42 | 317
43 | 349
44 | 365
45 | 366
46 | 368
47 | 370
48 | 371
49 | 386
50 | 387
51 | 398
52 | 411
53 | 438
54 | 446
55 | 449
56 | 450
57 | 464
58 | 481
59 | 507
60 | 512
61 | 513
62 | 514
63 | 528
64 | 529
65 | 540
66 | 542
67 | 543
68 | 581
69 | 644
70 | 652
71 | 672
72 | 680
73 | 682
74 | 690
75 | 691
76 | 694
77 | 695
78 | 701
79 | 702
80 | 705
81 | 734
82 | 739
83 | 758
84 | 800
85 | 802
86 | 807
87 | 808
88 | 826
89 | 837
90 | 838
91 | 867
92 | 881
93 | 882
94 | 907
95 | 913
96 | 924
97 | 931
98 | 937
99 | 944
100 | 945
101 | 948
102 | 956
103 | 969
104 | 970
105 | 981
106 | 986
107 | 1017
108 | 1022
109 | 1027
110 | 1029
111 | 1030
112 | 1042
113 | 1045
114 | 1046
115 | 1051
116 | 1055
117 | 1074
118 | 1075
119 | 1077
120 | 1080
121 | 1083
122 | 1087
123 | 1094
124 | 1095
125 | 1097
126 | 1103
127 | 1107
128 | 1110
129 | 1111
130 | 1120
131 | 1121
132 | 1122
133 | 1128
134 | 1146
135 | 1158
136 | 1160
137 | 1173
138 | 1178
139 | 1179
140 | 1180
141 | 1181
142 | 1186
143 | 1193
144 | 1201
145 | 1203
146 | 1208
147 | 1216
148 | 1219
149 | 1220
150 | 1226
151 | 1262
152 | 1264
153 | 1270
154 | 1271
155 | 1273
156 | 1281
157 | 1282
158 | 1286
159 | 1287
160 | 1290
161 | 1294
162 | 1302
163 | 1308
164 | 1317
165 | 1320
166 | 1323
167 | 1338
168 | 1339
169 | 1345
170 | 1349
171 | 1351
172 | 1367
173 | 1392
174 | 1394
175 | 1423
176 | 1427
177 | 1428
178 | 1432
179 | 1452
180 | 1471
181 | 1473
182 | 1483
183 | 1487
184 | 1490
185 | 1495
186 | 1507
187 | 1508
188 | 1522
189 | 1526
190 | 1529
191 | 1533
192 | 1535
193 | 1536
194 | 1537
195 | 1538
196 | 1542
197 | 1555
198 | 1560
199 | 1567
200 | 1568
201 | 1575
202 | 1577
203 | 1607
204 | 1626
205 | 1627
206 | 1639
207 | 1651
208 | 1652
209 | 1656
210 | 1669
211 | 1680
212 | 1683
213 | 1692
214 | 1700
215 | 1703
216 | 1714
217 | 1717
218 | 1747
219 | 1761
220 | 1765
221 | 1774
222 | 1799
223 | 1811
224 | 1821
225 | 1839
226 | 1850
227 | 1862
228 | 1874
229 | 1875
230 | 1876
231 | 1884
232 | 1953
233 | 1965
234 | 1966
235 | 1970
236 | 1989
237 | 1996
238 | 2003
239 | 2007
240 | 2052
241 | 2054
242 | 2055
243 | 2060
244 | 2065
245 | 2072
246 | 2078
247 | 2091
248 | 2092
249 | 2093
250 | 2095
251 | 2108
252 | 2109
253 | 2137
254 | 2138
255 | 2144
256 | 2151
257 | 2175
258 | 2185
259 | 2209
260 | 2217
261 | 2230
262 | 2231
263 | 2233
264 | 2237
265 | 2246
266 | 2249
267 | 2254
268 | 2261
269 | 2262
270 | 2266
271 | 2267
272 | 2268
273 | 2270
274 | 2271
275 | 2272
276 | 2273
277 | 2280
278 | 2286
279 | 2307
280 | 2312
281 | 2318
282 | 2319
283 | 2320
284 | 2333
285 | 2334
286 | 2335
287 | 2339
288 | 2348
289 | 2370
290 | 2371
291 | 2379
292 | 2380
293 | 2388
294 | 2401
295 | 2402
296 | 2404
297 | 2405
298 | 2421
299 | 2427
300 | 2428
301 | 2429
302 | 2436
303 | 2461
304 | 2463
305 | 2491
306 | 2492
307 | 2493
308 | 2495
309 | 2496
310 | 2498
311 | 2503
312 | 2508
313 | 2514
314 | 2515
315 | 2516
316 | 2528
317 | 2572
318 | 2578
319 | 2583
320 | 2587
321 | 2600
322 | 2602
323 | 2605
324 | 2608
325 | 2617
326 | 2618
327 | 2625
328 | 2633
329 | 2639
330 | 2645
331 | 2647
332 | 2648
333 | 2651
334 | 2673
335 | 2674
336 | 2683
337 | 2684
338 | 2688
339 | 2718
340 | 2720
341 | 2721
342 | 2722
343 | 2723
344 | 2727
345 | 2749
346 | 2752
347 | 2757
348 | 2769
349 | 2791
350 | 2794
351 | 2795
352 | 2820
353 | 2840
354 | 2845
355 | 2850
356 | 2853
357 | 2879
358 | 2891
359 | 2893
360 | 2896
361 | 2907
362 | 2924
363 | 2942
364 | 2947
365 | 2949
366 | 2950
367 | 2953
368 | 2962
369 | 2963
370 | 2967
371 | 2968
372 | 2969
373 | 2972
374 | 2978
375 | 2979
376 | 2980
377 | 2981
378 | 2983
379 | 2998
380 | 3025
381 | 3032
382 | 3038
383 | 3046
384 | 3047
385 | 3049
386 | 3056
387 | 3057
388 | 3059
389 | 3064
390 | 3070
391 | 3076
392 | 3080
393 | 3085
394 | 3096
395 | 3099
396 | 3113
397 | 3115
398 | 3122
399 | 3135
400 | 3136
401 | 3138
402 | 3140
403 | 3145
404 | 3146
405 | 3147
406 | 3154
407 | 3155
408 | 3180
409 | 3181
410 | 3183
411 | 3199
412 | 3214
413 | 3216
414 | 3225
415 | 3233
416 | 3245
417 | 3257
418 | 3282
419 | 3284
420 | 3285
421 | 3288
422 | 3301
423 | 3302
424 | 3303
425 | 3308
426 | 3323
427 | 3324
428 | 3329
429 | 3332
430 | 3335
431 | 3340
432 | 3344
433 | 3364
434 | 3371
435 | 3372
436 | 3381
437 | 3388
438 | 3389
439 | 3394
440 | 3399
441 | 3411
442 | 3413
443 | 3414
444 | 3416
445 | 3417
446 | 3423
447 | 3424
448 | 3425
449 | 3466
450 | 3505
451 | 3530
452 | 3565
453 | 3572
454 | 3586
455 | 3597
456 | 3601
457 | 3604
458 | 3609
459 | 3611
460 | 3616
461 | 3623
462 | 3625
463 | 3638
464 | 3644
465 | 3650
466 | 3651
467 | 3654
468 | 3656
469 | 3657
470 | 3665
471 | 3667
472 | 3677
473 | 3681
474 | 3684
475 | 3693
476 | 3696
477 | 3699
478 | 3700
479 | 3701
480 | 3707
481 | 3709
482 | 3711
483 | 3719
484 | 3734
485 | 3748
486 | 3758
487 | 3776
488 | 3789
489 | 3796
490 | 3807
491 | 3810
492 | 3811
493 | 3814
494 | 3825
495 | 3839
496 | 3840
497 | 3842
498 | 3862
499 | 3882
500 | 3889
501 | 3895
502 | 3896
503 | 3920
504 | 3928
505 | 3930
506 | 3948
507 | 3966
508 | 3968
509 | 3985
510 | 3991
511 | 4010
512 | 4011
513 | 4012
514 | 4020
515 | 4031
516 | 4043
517 | 4044
518 | 4046
519 | 4051
520 | 4053
521 | 4065
522 | 4066
523 | 4071
524 | 4080
525 | 4083
526 | 4087
527 | 4095
528 | 4108
529 | 4117
530 | 4120
531 | 4155
532 | 4157
533 | 4160
534 | 4161
535 | 4162
536 | 4164
537 | 4165
538 | 4168
539 | 4169
540 | 4173
541 | 4174
542 | 4175
543 | 4176
544 | 4180
545 | 4181
546 | 4196
547 | 4197
548 | 4210
549 | 4212
550 | 4213
551 | 4219
552 | 4220
553 | 4221
554 | 4227
555 | 4229
556 | 4232
557 | 4235
558 | 4236
559 | 4239
560 | 4242
561 | 4277
562 | 4287
563 | 4292
564 | 4293
565 | 4295
566 | 4297
567 | 4316
568 | 4318
569 | 4335
570 | 4346
571 | 4352
572 | 4353
573 | 4355
574 | 4356
575 | 4357
576 | 4358
577 | 4362
578 | 4369
579 | 4421
580 | 4455
581 | 4456
582 | 4457
583 | 4468
584 | 4469
585 | 4470
586 | 4471
587 | 4473
588 | 4474
589 | 4475
590 | 4490
591 | 4491
592 | 4493
593 | 4496
594 | 4557
595 | 4568
596 | 4574
597 | 4575
598 | 4591
599 | 4592
600 | 4593
601 | 4702
602 | 4718
603 | 4719
604 | 4721
605 | 4722
606 | 4727
607 | 4728
608 | 4744
609 | 4745
610 | 4746
611 | 4748
612 | 4750
613 | 4799
614 | 4804
615 | 4805
616 | 4812
617 | 4817
618 | 4818
619 | 4823
620 | 4858
621 | 4894
622 | 4922
623 | 4923
624 | 4937
625 | 4941
626 | 4951
627 | 4952
628 | 4955
629 | 4969
630 | 4989
631 | 5001
632 | 5016
633 | 5017
634 | 5027
635 |
--------------------------------------------------------------------------------
/Trip/AMSGrad.py:
--------------------------------------------------------------------------------
1 | """AMSGrad for TensorFlow."""
2 |
3 | from tensorflow.python.eager import context
4 | from tensorflow.python.framework import ops
5 | from tensorflow.python.ops import control_flow_ops
6 | from tensorflow.python.ops import math_ops
7 | from tensorflow.python.ops import resource_variable_ops
8 | from tensorflow.python.ops import state_ops
9 | from tensorflow.python.ops import variable_scope
10 | from tensorflow.python.training import optimizer
11 |
12 |
13 | class AMSGrad(optimizer.Optimizer):
14 | def __init__(self, learning_rate=0.01, beta1=0.9, beta2=0.99, epsilon=1e-8, use_locking=False, name="AMSGrad"):
15 | super(AMSGrad, self).__init__(use_locking, name)
16 | self._lr = learning_rate
17 | self._beta1 = beta1
18 | self._beta2 = beta2
19 | self._epsilon = epsilon
20 |
21 | self._lr_t = None
22 | self._beta1_t = None
23 | self._beta2_t = None
24 | self._epsilon_t = None
25 |
26 | self._beta1_power = None
27 | self._beta2_power = None
28 |
29 | def _create_slots(self, var_list):
30 | first_var = min(var_list, key=lambda x: x.name)
31 |
32 | create_new = self._beta1_power is None
33 | if not create_new and context.in_graph_mode():
34 | create_new = (self._beta1_power.graph is not first_var.graph)
35 |
36 | if create_new:
37 | with ops.colocate_with(first_var):
38 | self._beta1_power = variable_scope.variable(self._beta1, name="beta1_power", trainable=False)
39 | self._beta2_power = variable_scope.variable(self._beta2, name="beta2_power", trainable=False)
40 | # Create slots for the first and second moments.
41 | for v in var_list :
42 | self._zeros_slot(v, "m", self._name)
43 | self._zeros_slot(v, "v", self._name)
44 | self._zeros_slot(v, "vhat", self._name)
45 |
46 | def _prepare(self):
47 | self._lr_t = ops.convert_to_tensor(self._lr)
48 | self._beta1_t = ops.convert_to_tensor(self._beta1)
49 | self._beta2_t = ops.convert_to_tensor(self._beta2)
50 | self._epsilon_t = ops.convert_to_tensor(self._epsilon)
51 |
52 | def _apply_dense(self, grad, var):
53 | beta1_power = math_ops.cast(self._beta1_power, var.dtype.base_dtype)
54 | beta2_power = math_ops.cast(self._beta2_power, var.dtype.base_dtype)
55 | lr_t = math_ops.cast(self._lr_t, var.dtype.base_dtype)
56 | beta1_t = math_ops.cast(self._beta1_t, var.dtype.base_dtype)
57 | beta2_t = math_ops.cast(self._beta2_t, var.dtype.base_dtype)
58 | epsilon_t = math_ops.cast(self._epsilon_t, var.dtype.base_dtype)
59 |
60 | lr = (lr_t * math_ops.sqrt(1 - beta2_power) / (1 - beta1_power))
61 |
62 | # m_t = beta1 * m + (1 - beta1) * g_t
63 | m = self.get_slot(var, "m")
64 | m_scaled_g_values = grad * (1 - beta1_t)
65 | m_t = state_ops.assign(m, beta1_t * m + m_scaled_g_values, use_locking=self._use_locking)
66 |
67 | # v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
68 | v = self.get_slot(var, "v")
69 | v_scaled_g_values = (grad * grad) * (1 - beta2_t)
70 | v_t = state_ops.assign(v, beta2_t * v + v_scaled_g_values, use_locking=self._use_locking)
71 |
72 | # amsgrad
73 | vhat = self.get_slot(var, "vhat")
74 | vhat_t = state_ops.assign(vhat, math_ops.maximum(v_t, vhat))
75 | v_sqrt = math_ops.sqrt(vhat_t)
76 |
77 | var_update = state_ops.assign_sub(var, lr * m_t / (v_sqrt + epsilon_t), use_locking=self._use_locking)
78 | return control_flow_ops.group(*[var_update, m_t, v_t, vhat_t])
79 |
80 | def _resource_apply_dense(self, grad, var):
81 | var = var.handle
82 | beta1_power = math_ops.cast(self._beta1_power, grad.dtype.base_dtype)
83 | beta2_power = math_ops.cast(self._beta2_power, grad.dtype.base_dtype)
84 | lr_t = math_ops.cast(self._lr_t, grad.dtype.base_dtype)
85 | beta1_t = math_ops.cast(self._beta1_t, grad.dtype.base_dtype)
86 | beta2_t = math_ops.cast(self._beta2_t, grad.dtype.base_dtype)
87 | epsilon_t = math_ops.cast(self._epsilon_t, grad.dtype.base_dtype)
88 |
89 | lr = (lr_t * math_ops.sqrt(1 - beta2_power) / (1 - beta1_power))
90 |
91 | # m_t = beta1 * m + (1 - beta1) * g_t
92 | m = self.get_slot(var, "m").handle
93 | m_scaled_g_values = grad * (1 - beta1_t)
94 | m_t = state_ops.assign(m, beta1_t * m + m_scaled_g_values, use_locking=self._use_locking)
95 |
96 | # v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
97 | v = self.get_slot(var, "v").handle
98 | v_scaled_g_values = (grad * grad) * (1 - beta2_t)
99 | v_t = state_ops.assign(v, beta2_t * v + v_scaled_g_values, use_locking=self._use_locking)
100 |
101 | # amsgrad
102 | vhat = self.get_slot(var, "vhat").handle
103 | vhat_t = state_ops.assign(vhat, math_ops.maximum(v_t, vhat))
104 | v_sqrt = math_ops.sqrt(vhat_t)
105 |
106 | var_update = state_ops.assign_sub(var, lr * m_t / (v_sqrt + epsilon_t), use_locking=self._use_locking)
107 | return control_flow_ops.group(*[var_update, m_t, v_t, vhat_t])
108 |
109 | def _apply_sparse_shared(self, grad, var, indices, scatter_add):
110 | beta1_power = math_ops.cast(self._beta1_power, var.dtype.base_dtype)
111 | beta2_power = math_ops.cast(self._beta2_power, var.dtype.base_dtype)
112 | lr_t = math_ops.cast(self._lr_t, var.dtype.base_dtype)
113 | beta1_t = math_ops.cast(self._beta1_t, var.dtype.base_dtype)
114 | beta2_t = math_ops.cast(self._beta2_t, var.dtype.base_dtype)
115 | epsilon_t = math_ops.cast(self._epsilon_t, var.dtype.base_dtype)
116 |
117 | lr = (lr_t * math_ops.sqrt(1 - beta2_power) / (1 - beta1_power))
118 |
119 | # m_t = beta1 * m + (1 - beta1) * g_t
120 | m = self.get_slot(var, "m")
121 | m_scaled_g_values = grad * (1 - beta1_t)
122 | m_t = state_ops.assign(m, m * beta1_t, use_locking=self._use_locking)
123 | with ops.control_dependencies([m_t]):
124 | m_t = scatter_add(m, indices, m_scaled_g_values)
125 |
126 | # v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
127 | v = self.get_slot(var, "v")
128 | v_scaled_g_values = (grad * grad) * (1 - beta2_t)
129 | v_t = state_ops.assign(v, v * beta2_t, use_locking=self._use_locking)
130 | with ops.control_dependencies([v_t]):
131 | v_t = scatter_add(v, indices, v_scaled_g_values)
132 |
133 | # amsgrad
134 | vhat = self.get_slot(var, "vhat")
135 | vhat_t = state_ops.assign(vhat, math_ops.maximum(v_t, vhat))
136 | v_sqrt = math_ops.sqrt(vhat_t)
137 | var_update = state_ops.assign_sub(var, lr * m_t / (v_sqrt + epsilon_t), use_locking=self._use_locking)
138 | return control_flow_ops.group(*[var_update, m_t, v_t, vhat_t])
139 |
140 | def _apply_sparse(self, grad, var):
141 | return self._apply_sparse_shared(
142 | grad.values, var, grad.indices,
143 | lambda x, i, v: state_ops.scatter_add( # pylint: disable=g-long-lambda
144 | x, i, v, use_locking=self._use_locking))
145 |
146 | def _resource_scatter_add(self, x, i, v):
147 | with ops.control_dependencies(
148 | [resource_variable_ops.resource_scatter_add(x.handle, i, v)]):
149 | return x.value()
150 |
151 | def _resource_apply_sparse(self, grad, var, indices):
152 | return self._apply_sparse_shared(
153 | grad, var, indices, self._resource_scatter_add)
154 |
155 | def _finish(self, update_ops, name_scope):
156 | # Update the power accumulators.
157 | with ops.control_dependencies(update_ops):
158 | with ops.colocate_with(self._beta1_power):
159 | update_beta1 = self._beta1_power.assign(
160 | self._beta1_power * self._beta1_t,
161 | use_locking=self._use_locking)
162 | update_beta2 = self._beta2_power.assign(
163 | self._beta2_power * self._beta2_t,
164 | use_locking=self._use_locking)
165 | return control_flow_ops.group(*update_ops + [update_beta1, update_beta2],
166 | name=name_scope)
167 |
--------------------------------------------------------------------------------
/Trip/data/TKY_split200_set.dat:
--------------------------------------------------------------------------------
1 | 187-21480 1 0 16
2 | 187-21486 1 0 16
3 | 151-17418 13 6 4
4 | 151-17411 6 1 1 6
5 | 162-18806 21 17 17 21
6 | 192-22008 15 24 11
7 | 192-22004 5 12 3
8 | 55-6316 0 8 0
9 | 135-15393 3 9 9 3
10 | 117-13773 7 1 1
11 | 29-3680 2 17 2
12 | 117-13779 10 17 17
13 | 8-788 4 1 12
14 | 118-13805 22 2 2 1
15 | 118-13806 1 4 19 0 8 23 22
16 | 65-7834 3 0 5
17 | 163-18882 0 22 16
18 | 134-15360 11 1 0
19 | 113-13048 9 0 0
20 | 117-13783 1 1 10
21 | 51-5874 19 0 13
22 | 36-4372 3 3 13
23 | 159-18400 4 1 3
24 | 43-4908 15 10 1
25 | 63-7408 0 8 8 4
26 | 14-1494 1 1 25
27 | 196-22454 6 1 19
28 | 30-3739 13 0 0
29 | 195-22360 1 2 14
30 | 196-22452 3 19 4 6
31 | 12-1275 1 1 11 1
32 | 113-12997 0 2 9
33 | 12-1277 25 1 11
34 | 30-3731 13 3 20 20 13
35 | 12-1270 6 1 11
36 | 12-1273 1 7 11
37 | 113-12991 21 7 4 0
38 | 18-2243 1 11 8
39 | 68-8190 0 21 4 1
40 | 29-3545 3 3 0
41 | 35-4137 0 19 0
42 | 26-3077 3 3 2
43 | 102-11773 15 0 0 1 11 15
44 | 26-3075 3 2 3
45 | 26-3072 3 26 25 26 0 0
46 | 130-14983 22 0 13
47 | 196-22535 0 1 4
48 | 196-22532 19 1 6
49 | 161-18553 0 0 10
50 | 30-3748 13 3 0
51 | 30-3747 2 21 4
52 | 30-3741 13 0 0
53 | 83-9524 3 13 13 3
54 | 196-22433 6 1 19
55 | 187-21600 3 11 3
56 | 35-4142 0 2 14
57 | 21-2519 1 11 1
58 | 13-1352 8 20 20
59 | 43-4884 24 10 1
60 | 21-2620 1 1 13
61 | 124-14432 6 6 11
62 | 196-22508 1 1 21
63 | 147-17030 20 1 1 13
64 | 113-13009 9 10 0
65 | 45-5035 1 11 25
66 | 113-13004 0 2 9
67 | 126-14577 10 1 11
68 | 151-17465 0 19 1
69 | 151-17463 6 1 6
70 | 151-17462 13 6 1 11 1 6
71 | 36-4335 2 0 0
72 | 146-16892 6 1 1 12
73 | 13-1420 8 20 20
74 | 124-14404 3 3 13
75 | 150-17361 15 1 11 19
76 | 118-13836 4 1 1 0
77 | 118-13830 0 20 0 1 1 1 4
78 | 68-8146 2 3 3
79 | 65-7867 5 12 3
80 | 18-2048 1 11 8
81 | 18-1965 4 0 8
82 | 119-13923 27 27 27
83 | 113-12975 0 10 9
84 | 96-11178 12 0 2
85 | 113-12979 9 0 0
86 | 76-8967 4 7 7 4
87 | 76-8963 1 1 4
88 | 146-16808 1 11 7
89 | 65-7783 3 3 5
90 | 65-7788 4 4 5
91 | 86-9746 9 5 5
92 | 86-9748 9 21 21
93 | 196-22403 6 1 19
94 | 196-22407 6 1 19
95 | 198-22859 17 2 13
96 | 196-22656 19 1 6
97 | 26-3042 3 2 2
98 | 196-22654 19 1 6
99 | 63-7541 25 1 11
100 | 1-14 1 11 11 1 6
101 | 1-17 6 1 1
102 | 1-13 26 4 0
103 | 141-16263 14 8 8
104 | 96-11088 2 0 0 2
105 | 196-22663 6 1 19
106 | 49-5556 14 27 27
107 | 196-22544 19 1 6
108 | 196-22540 19 1 6
109 | 196-22548 11 1 6
110 | 12-1313 6 1 0
111 | 12-1317 1 4 24 28 5
112 | 12-1315 1 24 1
113 | 26-3038 3 3 2
114 | 187-21492 1 19 0 27
115 | 187-21496 6 0 16
116 | 102-11766 0 21 15
117 | 47-5121 9 28 28
118 | 187-21556 1 0 16
119 | 187-21653 1 0 16
120 | 76-9006 1 1 2
121 | 76-9008 4 0 1
122 | 8-772 12 21 4
123 | 187-21446 1 0 16
124 | 55-6365 0 19 19 4
125 | 117-13764 10 1 1
126 | 79-9121 0 1 3
127 | 117-13760 7 1 1 10
128 | 36-4391 4 1 6
129 | 105-12125 0 19 1
130 | 105-12123 10 2 0 0
131 | 105-12120 0 1 11 1 0
132 | 65-7820 3 0 2 2 5
133 | 98-11333 0 2 14
134 | 29-3603 2 3 3
135 | 116-13686 21 0 0
136 | 187-21449 1 19 0
137 | 164-19004 3 3 4
138 | 147-16959 23 23 0
139 | 1-5 1 1 6
140 | 5-457 1 1 2
141 | 135-15520 3 7 7 9
142 | 168-19371 8 8 0
143 | 65-7896 3 0 2 10
144 | 196-22442 6 1 19
145 | 124-14403 3 3 13
146 | 12-1266 6 1 7 2 1
147 | 65-7951 5 10 3
148 | 65-7952 3 1 5
149 | 12-1280 4 1 11 6
150 | 63-7500 1 4 8 8 0 21
151 | 99-11541 8 4 6
152 | 21-2593 1 11 1
153 | 102-11764 11 1 15
154 | 33-3980 5 1 5
155 | 196-22506 1 1 6
156 | 196-22509 19 1 4
157 | 58-6792 7 28 5
158 | 49-5519 12 27 27
159 | 161-18546 2 0 18
160 | 161-18540 10 2 27
161 | 135-15459 3 9 7 3
162 | 26-2994 3 6 25 6
163 | 96-11031 2 0 0
164 | 30-3754 13 3 20
165 | 38-4513 10 4 1
166 | 55-6339 2 2 10
167 | 38-4519 10 4 1
168 | 113-13124 9 10 0
169 | 196-22595 1 6 6
170 | 196-22597 19 1 6
171 | 43-4894 1 3 1
172 | 196-22592 19 4 1 6
173 | 100-11596 19 1 1
174 | 164-18991 19 3 3
175 | 187-21468 1 0 16
176 | 47-5173 0 9 28
177 | 187-21462 1 0 16
178 | 187-21461 16 0 1 19
179 | 163-18855 12 12 5
180 | 187-21501 1 0 16
181 | 45-5026 3 11 1
182 | 187-21509 1 0 16
183 | 151-17473 1 19 0 2
184 | 113-13010 0 4 19 0
185 | 187-21667 1 0 16
186 | 196-22424 1 1 6
187 | 31-3794 21 4 6
188 | 31-3791 7 4 6
189 | 13-1434 0 0 8
190 | 13-1436 0 20 20
191 | 146-16881 1 11 1
192 | 146-16884 2 0 19 1
193 | 117-13717 10 1 1
194 | 78-9073 0 0 1
195 | 117-13714 5 1 1
196 | 13-1439 4 19 1
197 | 126-14605 1 1 6
198 | 196-22486 6 1 19
199 | 126-14602 0 0 13
200 | 196-22485 6 1 1 4 6
201 | 124-14441 13 6 11 4 4
202 | 68-8159 26 4 0
203 | 68-8157 9 1 1 9 26
204 | 59-6857 20 8 22
205 | 68-8155 9 1 11
206 | 68-8153 0 19 1 1 6
207 | 68-8150 9 4 1 1
208 | 113-13066 9 0 0
209 | 30-3733 10 4 13
210 | 29-3672 3 2 1 3
211 | 113-12994 9 10 0
212 | 29-3677 3 21 21 3
213 | 43-4920 15 1 11
214 | 96-11160 2 20 2 0
215 | 128-14716 11 0 1
216 | 5-462 10 29 29 28
217 | 96-11168 2 0 0 2
218 | 12-1272 6 1 11 7 1 11
219 | 196-22467 11 1 6
220 | 30-3716 24 1 6
221 | 33-4012 5 23 1
222 | 196-22435 6 1 19
223 | 86-9771 4 8 23
224 | 18-2222 1 11 8
225 | 14-1567 25 1 11
226 | 63-7470 2 2 21
227 | 196-22558 1 6 13
228 | 194-22311 26 6 1
229 | 194-22313 1 9 9
230 | 199-22883 13 3 1 11 0
231 | 173-20014 0 22 22 0
232 | 98-11405 20 0 0
233 | 187-21540 1 0 16
234 | 12-1295 29 29 7 1 1 6 6
235 | 187-21542 1 0 16
236 | 113-13111 0 2 2
237 | 18-2127 1 11 8
238 | 80-9239 4 3 20
239 | 192-22025 3 2 3
240 | 105-12113 0 3 6 6
241 | 65-7694 7 5 5
242 | 105-12114 6 3 0
243 | 163-18866 16 0 0 16
244 | 187-21531 1 0 16
245 | 65-7814 5 21 2 5
246 | 187-21537 1 0 16
247 | 36-4312 17 17 18 2
248 | 164-19014 3 2 0
249 | 161-18703 10 2 18
250 | 150-17281 15 7 15
251 | 78-9083 2 1 0
252 | 78-9080 2 2 1
253 | 65-7751 4 12 5
254 | 75-8898 3 0 14
255 | 21-2449 0 3 13
256 | 13-1373 8 0 2 14
257 | 44-4923 2 1 1
258 | 196-22477 6 1 19
259 | 196-22476 6 1 19
260 | 168-19362 11 1 18
261 | 68-8160 1 1 9
262 | 65-7961 3 19 4 5
263 | 65-7964 5 10 3
264 | 13-1382 8 0 0 8
265 | 18-2062 20 3 3 0
266 | 117-13695 7 7 7
267 | 196-22394 6 19 1
268 | 96-11052 2 0 0 2
269 | 147-16945 0 0 0
270 | 63-7514 11 1 4
271 | 102-11756 24 4 1 11
272 | 141-16237 14 19 1
273 | 49-5527 4 22 22 22
274 | 49-5526 1 11 1 0
275 | 21-2619 0 1 1
276 | 196-22512 19 1 6
277 | 21-2614 1 1 9 9 1 11
278 | 26-3083 4 4 18
279 | 187-21668 1 0 16
280 | 124-14502 13 3 1 1
281 | 96-11122 12 16 16 12
282 | 63-7560 2 0 8 0
283 | 194-22320 25 26 26
284 | 161-18522 10 2 27
285 | 187-21478 1 0 16
286 | 187-21475 1 0 16
287 | 187-21476 0 1 3
288 | 1-2 1 11 1 6
289 | 187-21470 11 1 19 0
290 | 1-7 6 1 11 11 1 6
291 | 187-21572 1 0 16
292 | 55-6394 17 7 10
293 | 151-17401 0 1 6
294 | 192-22010 5 4 1 11 1
295 | 18-2110 1 11 8
296 | 13-1409 20 23 23 8
297 | 58-6744 5 10 7
298 | 128-14744 19 0 0 19
299 | 65-7844 5 10 0 20
300 | 65-7840 3 1 5
301 | 196-22629 19 1 6
302 | 59-6840 6 1 0
303 | 113-13077 9 10 0
304 | 113-13074 9 2 0
305 | 113-13075 2 0 0
306 | 18-2184 1 11 8
307 | 151-17497 23 8 0
308 | 117-13794 7 6 26
309 | 117-13793 1 1 10
310 | 36-4340 28 28 13
311 | 18-2026 1 11 8
312 | 151-17498 0 1 6
313 | 96-11155 23 2 2
314 | 9-1057 6 1 7 9 9 10
315 | 159-18413 0 19 1
316 | 5-478 1 1 1
317 | 135-15421 3 9 9
318 | 13-1320 6 1 1
319 | 13-1323 0 0 20
320 | 146-16829 18 1 28
321 | 13-1328 8 0 0 8
322 | 196-22422 6 1 19
323 | 30-3729 0 20 3 13
324 | 196-22426 20 20 3 13
325 | 30-3726 0 3 13
326 | 113-12981 0 2 2
327 | 113-12982 9 0 0
328 | 63-7462 4 1 11
329 | 124-14399 9 7 4 1
330 | 21-2577 1 1 4
331 | 57-6566 4 0 1
332 | 100-11639 1 11 1
333 | 196-22473 1 6 25
334 | 196-22640 6 1 6
335 | 196-22528 19 1 6
336 | 63-7489 4 1 11 1 4
337 | 58-6757 5 7 1
338 | 35-4191 8 0 2
339 | 76-8948 1 0 18
340 | 191-21905 11 7 7
341 | 146-16910 12 4 1
342 | 96-11204 2 0 2
343 | 191-21908 22 8 0 11
344 | 124-14398 1 4 7 9
345 | 143-16516 19 1 11
346 | 151-17527 0 19 0 0
347 | 18-2096 1 11 8
348 | 196-22633 19 1 6
349 | 196-22634 6 19 1
350 | 196-22580 6 1 4
351 | 65-7916 4 10 5
352 | 116-13467 21 18 18
353 | 147-17041 21 21 0
354 | 100-11630 23 19 11 1
355 | 187-21528 1 0 16
356 | 187-21529 1 0 16
357 | 187-21522 1 0 16
358 | 10-1112 2 1 11 6
359 | 151-17450 0 0 0
360 | 151-17457 0 19 1
361 | 8-888 12 3 1
362 | 179-20532 5 28 28 5
363 | 18-1984 24 8 7
364 | 113-13039 9 0 0
365 | 65-7721 5 7 4 5
366 | 12-1288 6 1 19
367 | 78-9076 0 0 3
368 | 146-16860 1 11 1 4 12
369 | 78-9072 1 1 0
370 | 126-14628 4 19 1 11
371 | 126-14629 1 6 9
372 | 13-1361 8 20 0
373 | 196-22466 11 1 6
374 | 118-13843 20 0 1
375 | 68-8176 13 2 3 26
376 | 107-12395 20 10 5 5 10
377 | 68-8178 18 4 4
378 | 13-1444 8 0 0
379 | 35-4228 10 28 5
380 | 13-1446 10 2 14 2
381 | 8-839 12 4 1 4
382 | 102-11744 11 1 4
383 | 102-11745 4 8 23
384 | 76-8979 4 1 4
385 | 29-3576 2 17 2
386 | 49-5532 0 0 27
387 | 49-5536 11 1 0 0
388 | 146-16810 1 10 15
389 | 196-22418 6 1 19
390 | 102-11759 9 1 11 1 0
391 | 196-22411 6 1 19
392 | 196-22413 6 1 19
393 | 124-14510 1 4 9
394 | 198-22866 4 6 13
395 | 193-22193 3 3 2 2
396 | 63-7458 21 8 0 21
397 | 9-987 1 1 4
398 | 49-5547 14 2 27 27
399 | 196-22575 19 1 6
400 | 22-2708 25 3 20
401 | 49-5548 27 2 14
402 | 12-1303 1 1 11
403 | 29-3609 2 17 2
404 |
--------------------------------------------------------------------------------
/Trip/data/Toro_set.dat:
--------------------------------------------------------------------------------
1 | 20741443@N00-745 9 0 2
2 | 20741443@N00-744 0 8 2
3 | 64834051@N00-4182 10 14 4
4 | 11191102@N07-155 10 4 1
5 | 11191102@N07-157 5 3 13
6 | 11090433@N05-139 15 1 3 2
7 | 86292040@N00-5377 11 14 4 3 2
8 | 9911655@N08-5959 0 8 2
9 | 23908938@N02-1121 5 17 4
10 | 69525052@N06-4345 0 1 2
11 | 91032493@N00-5556 6 4 1 8 2
12 | 39038071@N00-2431 9 1 7
13 | 20741443@N00-884 0 3 8 2
14 | 84987970@N00-5237 15 1 2
15 | 20741443@N00-883 0 3 2
16 | 20741443@N00-880 6 1 2
17 | 23119895@N00-1078 6 14 4
18 | 99127884@N00-5963 0 3 8
19 | 67694608@N00-4284 10 4 1 8
20 | 67194633@N00-4276 4 1 16
21 | 14391210@N00-379 9 1 2
22 | 26736723@N00-1335 10 14 4
23 | 43139087@N00-2703 6 12 2
24 | 60597745@N00-3932 10 1 8
25 | 9025385@N07-5539 6 4 12 2
26 | 32827327@N03-1786 9 5 27 12 7 2
27 | 75450299@N07-4580 0 1 8
28 | 66922282@N00-4271 1 3 8
29 | 32934461@N00-1792 14 4 0 2
30 | 51260381@N05-3556 6 7 2
31 | 50542255@N00-3469 6 4 8
32 | 84518681@N00-5073 10 14 4
33 | 57519914@N00-3767 6 14 4 1 8
34 | 84987970@N00-5337 15 0 1 3 8
35 | 84987970@N00-5256 0 1 3
36 | 65438265@N00-4198 6 4 1 2
37 | 84987970@N00-5228 15 0 3
38 | 34211328@N00-1931 0 1 3
39 | 82845843@N04-5022 9 5 13
40 | 23908938@N02-1118 6 4 8 2
41 | 23908938@N02-1117 10 1 2
42 | 25802393@N02-1281 10 6 14 4 3 2
43 | 98715075@N00-5932 0 1 3
44 | 99771506@N00-5981 19 6 5 17 0 3 16 2
45 | 20342758@N00-681 10 14 4 3 12 8 2
46 | 23269969@N00-1082 9 14 4 2
47 | 20741443@N00-898 14 4 2
48 | 15205252@N00-472 19 24 21
49 | 61377999@N03-3967 9 0 1
50 | 62373848@N00-4004 10 4 8
51 | 89613559@N07-5530 6 14 4
52 | 96684313@N00-5856 6 14 2
53 | 35468159247@N01-2196 9 4 0 12 2
54 | 44359705@N07-2896 10 4 0 2
55 | 70554294@N00-4408 4 1 8 16 2
56 | 20741443@N00-729 6 0 1 3 8
57 | 34211328@N00-1934 6 14 4
58 | 11349639@N00-169 10 4 2
59 | 63706803@N00-4132 1 3 16
60 | 81471618@N06-4972 0 3 8
61 | 47616704@N00-3052 10 9 6 4 1 12 2
62 | 20741443@N00-809 6 0 3 8 2
63 | 20741443@N00-807 6 1 2
64 | 81471618@N06-4973 5 23 22
65 | 98222757@N02-5913 6 3 12 8
66 | 24616128@N00-1194 14 3 16
67 | 20741443@N00-730 6 3 8
68 | 20741443@N00-731 0 1 3
69 | 44925192@N00-2916 10 6 4 0 2
70 | 94064020@N00-5709 9 5 0 8
71 | 20741443@N00-929 6 0 3 8
72 | 20741443@N00-928 0 13 16
73 | 84987970@N00-5328 0 1 3
74 | 9985167@N04-6011 0 1 8
75 | 84987970@N00-5324 15 1 3 8
76 | 22996605@N05-1066 6 14 4 2
77 | 37517876@N07-2340 0 1 12 8
78 | 20741443@N00-920 6 0 2
79 | 20741443@N00-923 6 0 2
80 | 97675182@N00-5890 19 24 21
81 | 42907325@N00-2686 10 4 8
82 | 28288718@N03-1424 3 8 2
83 | 84035351@N00-5029 0 3 2
84 | 28288718@N03-1423 0 8 2
85 | 33473816@N00-1835 14 1 2
86 | 28288718@N03-1421 0 13 2
87 | 91255327@N00-5575 15 9 0 1
88 | 84987970@N00-5215 15 0 1
89 | 16326397@N02-515 5 4 3
90 | 55929357@N00-3707 1 3 8
91 | 88276719@N00-5497 6 4 2
92 | 84987970@N00-5325 23 22 12
93 | 34128007@N04-1912 11 10 14 4 16
94 | 84987970@N00-5250 9 4 0 1 3 2
95 | 32944866@N04-1796 5 1 16
96 | 58919362@N00-3856 6 0 3 2
97 | 78433558@N00-4805 11 6 8
98 | 32781576@N04-1782 1 7 8
99 | 42536354@N00-2666 9 0 7
100 | 37517876@N07-2338 11 6 2
101 | 34211328@N00-1968 6 0 1 3 2
102 | 25561968@N00-1275 10 14 4
103 | 37517876@N07-2331 5 3 13
104 | 26240579@N05-1312 4 0 8 2
105 | 16048448@N00-510 6 4 0 1 3 8 16 2
106 | 26344918@N04-1321 11 6 1 8
107 | 37517876@N07-2334 11 9 6 0
108 | 62486370@N00-4005 19 24 21
109 | 25133522@N00-1259 14 17 0 12
110 | 20741443@N00-722 6 1 3
111 | 20741443@N00-813 14 3 8 2
112 | 13273343@N00-288 14 4 2
113 | 22111831@N05-1031 6 1 3
114 | 20741443@N00-814 6 3 8 2
115 | 84987970@N00-5254 9 6 0 1 3 8 2
116 | 20741443@N00-816 3 8 2
117 | 20741443@N00-778 6 8 2
118 | 18412989@N00-623 0 3 12
119 | 39460517@N03-2472 0 1 3 8
120 | 10627620@N06-100 0 3 2
121 | 18412989@N00-624 11 6 3
122 | 46769540@N00-3008 9 1 8
123 | 27895744@N00-1378 11 14 4
124 | 99127884@N00-5964 11 9 6 5 23 1 8 16 2
125 | 35107151@N04-2110 9 6 1
126 | 33547369@N00-1877 0 1 3
127 | 84987970@N00-5301 6 1 8 2
128 | 37804979@N00-2377 4 0 1 8
129 | 16926694@N06-544 6 14 2
130 | 11191102@N07-162 6 1 2
131 | 24854893@N00-1237 5 12 7 2
132 | 20741443@N00-790 0 3 8
133 | 13644648@N03-298 11 10 9 6 14 5 4 1 3 12 8 16 2
134 | 59525924@N05-3881 11 10 18
135 | 31114347@N05-1642 14 12 2
136 | 49877040@N03-3438 10 4 7
137 | 20741443@N00-843 4 3 2
138 | 87473264@N00-5475 17 4 12
139 | 10627620@N06-99 15 3 13
140 | 84711541@N00-5099 6 0 1 3 2
141 | 27008470@N00-1344 14 1 2
142 | 99436246@N00-5971 4 1 16
143 | 72778314@N00-4493 10 0 1 3 7 8
144 | 39587684@N00-2499 1 3 8
145 | 43919105@N02-2803 1 3 8
146 | 34961066@N00-2048 5 0 3
147 | 35034348378@N01-2072 11 14 4 0 1 8 2
148 | 41721440@N00-2592 0 3 8
149 | 35034347485@N01-2069 9 0 1 3 8
150 | 49894734@N00-3454 0 1 3
151 | 39587684@N00-2497 11 6 7
152 | 39587684@N00-2496 6 14 2
153 | 37663331@N00-2349 0 3 8
154 | 39357749@N00-2440 6 4 2
155 | 20741443@N00-716 11 6 0
156 | 98715075@N00-5927 11 10 2
157 | 13528098@N00-294 14 4 2
158 | 20741443@N00-867 6 14 2
159 | 20741443@N00-712 6 3 2
160 | 54058873@N00-3636 15 14 0 1 3 8
161 | 34608255@N08-2036 6 5 1
162 | 14878709@N00-453 4 3 8
163 | 34619038@N00-2039 9 1 8
164 | 71482738@N00-4450 14 5 4 7 2
165 | 48678427@N06-3149 9 0 1 8 16
166 | 8250978@N04-4997 6 14 1 8 2
167 | 26195569@N00-1310 9 5 13
168 | 27031977@N04-1345 1 3 8
169 | 35468159247@N01-2198 0 1 3
170 | 37996606796@N01-2401 14 4 12
171 | 50642338@N00-3481 5 13 2
172 | 30739892@N03-1620 0 1 8
173 | 33182025@N00-1802 6 22 0 3
174 | 24854893@N00-1225 4 21 22
175 | 34314322@N00-1993 4 1 8 16 2
176 | 35237095252@N01-2158 10 6 4 2
177 | 9985167@N04-6018 0 3 8
178 | 20741443@N00-783 6 0 1 3 2
179 | 47501960@N00-3040 0 3 13
180 | 20741443@N00-786 6 0 3 8
181 | 20741443@N00-788 5 0 1 3 8
182 | 60597745@N00-3935 10 17 4
183 | 39460517@N03-2471 0 1 8
184 | 77112637@N00-4701 0 1 3
185 | 41894165897@N01-2604 6 5 16
186 | 17373191@N00-549 11 14 0 1 8
187 | 11241023@N03-166 6 1 8
188 | 9449875@N03-5751 19 24 21
189 | 20741443@N00-979 6 1 3 16
190 | 31349854@N00-1653 1 3 7
191 | 92487715@N03-5640 6 4 1
192 | 24128704@N08-1171 11 14 12
193 | 61239510@N00-3960 14 12 2
194 | 84987970@N00-5278 9 0 3 8
195 | 20741443@N00-708 6 0 3 8 2
196 | 9025385@N07-5541 11 10 6 4 2
197 | 34211328@N00-1940 9 5 1 3 16
198 | 75450299@N07-4578 9 0 1 7 8
199 | 20741443@N00-870 15 0 8
200 | 69754957@N00-4351 11 19 15 10 6 5 4 3 2
201 | 96083563@N00-5842 6 5 4
202 | 84987970@N00-5183 15 0 1 7
203 | 69754957@N00-4354 15 0 16 2
204 | 34211328@N00-1949 14 17 4
205 | 20741443@N00-879 6 0 3
206 | 84987970@N00-5277 0 1 3 13 8 16
207 | 30624156@N00-1598 6 1 13 8 2
208 | 60142746@N00-3898 4 3 2
209 | 29619840@N00-1536 10 14 4
210 | 91008793@N00-5555 10 9 14 5 4
211 | 94828981@N08-5798 0 3 8
212 | 35764233@N00-2209 4 0 1
213 | 35764233@N00-2207 11 17 0 1 3
214 | 30624156@N00-1591 6 14 4 2
215 | 30624156@N00-1592 0 1 3 8
216 | 14391210@N00-380 1 3 8 2
217 | 48453097@N00-3133 0 7 8
218 | 58919362@N00-3851 1 12 2
219 | 13672753@N00-302 10 14 4 1
220 | 9985167@N04-6028 15 1 3 8 2
221 | 84987970@N00-5287 0 1 8
222 | 9449875@N03-5747 0 3 8
223 | 44124367252@N01-2838 10 9 4
224 | 84987970@N00-5282 5 1 12 16
225 | 39460517@N03-2466 0 1 3 8
226 | 28157992@N03-1404 11 23 22
227 | 67952341@N00-4308 10 14 4
228 | 34211328@N00-1969 6 8 16
229 | 39460517@N03-2461 19 24 21
230 | 10502709@N05-58 6 5 13
231 | 34211328@N00-1933 19 24 21
232 | 51035555243@N01-3493 0 1 3 2
233 | 13907834@N00-316 5 0 13
234 | 23987663@N00-1128 18 0 1 8
235 | 34211328@N00-1937 19 24 21
236 | 13907834@N00-315 10 14 4
237 | 34211328@N00-1938 5 24 21 13
238 | 93241698@N00-5649 4 1 3
239 | 93241698@N00-5648 1 3 8
240 | 61377999@N03-3968 10 4 1
241 | 61377999@N03-3969 6 5 7
242 | 36357702@N08-2252 10 6 14 4 1 8 2
243 | 51035681908@N01-3504 10 5 4 0 1 7
244 | 84987970@N00-5253 11 0 3
245 | 20741443@N00-963 3 8 2
246 | 20741443@N00-962 0 3 8
247 | 20741443@N00-965 6 0 2
248 | 84987970@N00-5276 10 9 6 0 8 2
249 | 20741443@N00-969 15 0 8
250 | 20741443@N00-775 0 1 2
251 | 84987970@N00-5207 19 10 21
252 | 72236905@N00-4474 14 5 13
253 | 9161595@N03-5625 9 4 0 1
254 | 22979982@N06-1063 3 7 16
255 | 78015320@N00-4782 9 14 4 1 8
256 | 20456447@N03-686 11 6 14 4 0 1 2
257 | 20456447@N03-687 10 6 14 4
258 | 20456447@N03-688 15 9 1 16
259 | 68803445@N00-4326 10 14 4
260 | 30855862@N07-1627 11 6 4
261 | 23987663@N00-1123 5 0 1 3
262 | 43334562@N00-2795 6 0 3
263 | 84987970@N00-5294 19 24 21
264 | 84800532@N02-5132 5 4 1
265 | 78015320@N00-4783 9 5 17
266 | 20075574@N00-675 9 1 7
267 | 20741443@N00-988 6 3 12 8
268 | 53874606@N00-3626 11 14 4
269 | 29352917@N00-1523 4 1 3 8
270 | 29352917@N00-1524 4 16 2
271 | 20741443@N00-980 6 12 8
272 | 20344209@N00-683 17 4 16
273 | 14878709@N00-454 14 4 0 1 3 13 8
274 | 10502709@N05-67 1 3 8
275 | 47554402@N00-3045 0 3 8
276 | 23987663@N00-1135 1 3 8
277 | 8070489@N02-4914 1 3 8
278 | 51035610516@N01-3501 10 14 4 0 2
279 | 44654079@N05-2908 9 1 8
280 | 24395354@N02-1179 9 5 26
281 | 25475928@N04-1270 0 3 8 2
282 | 51035729697@N01-3514 9 6 2
283 | 98185064@N00-5912 10 4 8
284 | 20741443@N00-911 0 3 16
285 | 43139087@N00-2705 6 0 1 3 8
286 | 25846522@N00-1283 11 1 3 8
287 | 84987970@N00-5214 15 6 1 3 16
288 | 29352917@N00-1525 9 3 7
289 | 20741443@N00-763 0 3 8
290 | 24854893@N00-1247 4 12 7
291 | 20741443@N00-852 11 6 2
292 | 20741443@N00-753 6 3 8 2
293 | 27168489@N00-1358 14 4 0 1 3 8
294 | 7776449@N06-4758 9 0 3 7 13
295 | 23339848@N00-1089 5 17 2
296 | 47501960@N00-3037 15 3 8
297 | 9911655@N08-5947 14 7 16
298 | 32044903@N04-1726 6 13 8
299 | 18948547@N00-634 1 3 8
300 | 33547369@N00-1875 1 3 8
301 | 33906853@N00-1894 11 1 16
302 | 7776449@N06-4756 5 1 16
303 | 36553196@N08-2275 9 4 1 2
304 | 61609579@N00-3975 0 1 3 8
305 | 48784629@N00-3155 10 14 4
306 | 71482738@N00-4451 9 0 1 8
307 | 93897327@N00-5707 6 5 4 12
308 | 84987970@N00-5263 15 6 0 1 3 8 2
309 | 48085290@N00-3090 6 4 2
310 | 94725783@N00-5759 0 1 3 7
311 | 73416633@N00-4506 9 0 3
312 | 70289230@N00-4374 3 13 16
313 | 16693950@N00-523 4 1 3
314 | 85801042@N00-5369 15 10 6 14 4 0 3 8
315 | 10502709@N05-71 0 1 3 8
316 | 16693950@N00-525 6 14 4 0 1 3 8 2
317 | 16693950@N00-524 23 22 7
318 | 20741443@N00-821 1 3 8
319 | 20741443@N00-822 6 3 8 2
320 | 27947906@N08-1385 17 4 12
321 | 20741443@N00-825 1 3 8
322 | 34211328@N00-1918 10 12 2
323 | 75396048@N00-4570 0 12 2
324 | 20741443@N00-905 6 5 0 1 3 8 2
325 | 20741443@N00-904 1 3 2
326 | 49503002894@N01-3401 15 14 1
327 | 71206023@N00-4420 14 4 12
328 | 49503002894@N01-3403 14 4 1
329 | 49503002894@N01-3402 14 22 1
330 | 18412989@N00-619 4 1 3
331 | 84987970@N00-5343 0 1 2
332 | 9416709@N08-5715 14 4 1
333 | 33473816@N00-1865 6 13 2
334 | 84987970@N00-5180 0 1 3 8
335 | 8110030@N05-4961 11 10 6 4 22 0 3 12 8 2
336 |
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/decoder.py:
--------------------------------------------------------------------------------
1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 | """Seq2seq layer operations for use in neural networks."""
16 |
17 | from __future__ import absolute_import
18 | from __future__ import division
19 | from __future__ import print_function
20 |
21 | import abc
22 | import six
23 |
24 | from tensorflow.python.framework import constant_op
25 | from tensorflow.python.framework import dtypes
26 | from tensorflow.python.framework import ops
27 | from tensorflow.python.framework import tensor_shape
28 | from tensorflow.python.framework import tensor_util
29 | from tensorflow.python.ops import array_ops
30 | from tensorflow.python.ops import control_flow_ops
31 | from tensorflow.python.ops import math_ops
32 | from tensorflow.python.ops import rnn
33 | from tensorflow.python.ops import rnn_cell_impl
34 | from tensorflow.python.ops import tensor_array_ops
35 | from tensorflow.python.ops import variable_scope
36 | from tensorflow.python.util import nest
37 |
38 |
39 | __all__ = ["Decoder", "dynamic_decode"]
40 |
41 |
42 | _transpose_batch_time = rnn._transpose_batch_time # pylint: disable=protected-access
43 | _zero_state_tensors = rnn_cell_impl._zero_state_tensors # pylint: disable=protected-access
44 |
45 |
46 | @six.add_metaclass(abc.ABCMeta)
47 | class Decoder(object):
48 | """An RNN Decoder abstract interface object.
49 |
50 | Concepts used by this interface:
51 | - `inputs`: (structure of) tensors and TensorArrays that is passed as input to
52 | the RNNCell composing the decoder, at each time step.
53 | - `state`: (structure of) tensors and TensorArrays that is passed to the
54 | RNNCell instance as the state.
55 | - `finished`: boolean tensor telling whether each sequence in the batch is
56 | finished.
57 | - `outputs`: Instance of BasicDecoderOutput. Result of the decoding, at each
58 | time step.
59 | """
60 |
61 | @property
62 | def batch_size(self):
63 | """The batch size of input values."""
64 | raise NotImplementedError
65 |
66 | @property
67 | def output_size(self):
68 | """A (possibly nested tuple of...) integer[s] or `TensorShape` object[s]."""
69 | raise NotImplementedError
70 |
71 | @property
72 | def output_dtype(self):
73 | """A (possibly nested tuple of...) dtype[s]."""
74 | raise NotImplementedError
75 |
76 | @abc.abstractmethod
77 | def initialize(self, name=None):
78 | """Called before any decoding iterations.
79 |
80 | This methods must compute initial input values and initial state.
81 |
82 | Args:
83 | name: Name scope for any created operations.
84 |
85 | Returns:
86 | `(finished, initial_inputs, initial_state)`: initial values of
87 | 'finished' flags, inputs and state.
88 | """
89 | raise NotImplementedError
90 |
91 | @abc.abstractmethod
92 | def step(self, time, inputs, state, name=None):
93 | """Called per step of decoding (but only once for dynamic decoding).
94 |
95 | Args:
96 | time: Scalar `int32` tensor. Current step number.
97 | inputs: RNNCell input (possibly nested tuple of) tensor[s] for this time
98 | step.
99 | state: RNNCell state (possibly nested tuple of) tensor[s] from previous
100 | time step.
101 | name: Name scope for any created operations.
102 |
103 | Returns:
104 | `(outputs, next_state, next_inputs, finished)`: `outputs` is an object
105 | containing the decoder output, `next_state` is a (structure of) state
106 | tensors and TensorArrays, `next_inputs` is the tensor that should be used
107 | as input for the next step, `finished` is a boolean tensor telling whether
108 | the sequence is complete, for each sequence in the batch.
109 | """
110 | raise NotImplementedError
111 |
112 | def finalize(self, outputs, final_state, sequence_lengths):
113 | raise NotImplementedError
114 |
115 | @property
116 | def tracks_own_finished(self):
117 | """Describes whether the Decoder keeps track of finished states.
118 |
119 | Most decoders will emit a true/false `finished` value independently
120 | at each time step. In this case, the `dynamic_decode` function keeps track
121 | of which batch entries are already finished, and performs a logical OR to
122 | insert new batches to the finished set.
123 |
124 | Some decoders, however, shuffle batches / beams between time steps and
125 | `dynamic_decode` will mix up the finished state across these entries because
126 | it does not track the reshuffle across time steps. In this case, it is
127 | up to the decoder to declare that it will keep track of its own finished
128 | state by setting this property to `True`.
129 |
130 | Returns:
131 | Python bool.
132 | """
133 | return False
134 |
135 |
136 | def _create_zero_outputs(size, dtype, batch_size):
137 | """Create a zero outputs Tensor structure."""
138 | def _create(s, d):
139 | return _zero_state_tensors(s, batch_size, d)
140 |
141 | return nest.map_structure(_create, size, dtype)
142 |
143 |
144 | def dynamic_decode(decoder,
145 | output_time_major=False,
146 | impute_finished=False,
147 | maximum_iterations=None,
148 | parallel_iterations=32,
149 | swap_memory=False,
150 | scope=None):
151 | """Perform dynamic decoding with `decoder`.
152 |
153 | Calls initialize() once and step() repeatedly on the Decoder object.
154 |
155 | Args:
156 | decoder: A `Decoder` instance.
157 | output_time_major: Python boolean. Default: `False` (batch major). If
158 | `True`, outputs are returned as time major tensors (this mode is faster).
159 | Otherwise, outputs are returned as batch major tensors (this adds extra
160 | time to the computation).
161 | impute_finished: Python boolean. If `True`, then states for batch
162 | entries which are marked as finished get copied through and the
163 | corresponding outputs get zeroed out. This causes some slowdown at
164 | each time step, but ensures that the final state and outputs have
165 | the correct values and that backprop ignores time steps that were
166 | marked as finished.
167 | maximum_iterations: `int32` scalar, maximum allowed number of decoding
168 | steps. Default is `None` (decode until the decoder is fully done).
169 | parallel_iterations: Argument passed to `tf.while_loop`.
170 | swap_memory: Argument passed to `tf.while_loop`.
171 | scope: Optional variable scope to use.
172 |
173 | Returns:
174 | `(final_outputs, final_state, final_sequence_lengths)`.
175 |
176 | Raises:
177 | TypeError: if `decoder` is not an instance of `Decoder`.
178 | ValueError: if `maximum_iterations` is provided but is not a scalar.
179 | """
180 | if not isinstance(decoder, Decoder):
181 | raise TypeError("Expected decoder to be type Decoder, but saw: %s" %
182 | type(decoder))
183 |
184 | with variable_scope.variable_scope(scope, "decoder") as varscope:
185 | # Properly cache variable values inside the while_loop
186 | if varscope.caching_device is None:
187 | varscope.set_caching_device(lambda op: op.device)
188 |
189 | if maximum_iterations is not None:
190 | maximum_iterations = ops.convert_to_tensor(
191 | maximum_iterations, dtype=dtypes.int32, name="maximum_iterations")
192 | if maximum_iterations.get_shape().ndims != 0:
193 | raise ValueError("maximum_iterations must be a scalar")
194 |
195 | initial_finished, initial_inputs, initial_state = decoder.initialize()
196 |
197 | zero_outputs = _create_zero_outputs(decoder.output_size,
198 | decoder.output_dtype,
199 | decoder.batch_size)
200 |
201 | if maximum_iterations is not None:
202 | initial_finished = math_ops.logical_or(
203 | initial_finished, 0 >= maximum_iterations)
204 | initial_sequence_lengths = array_ops.zeros_like(
205 | initial_finished, dtype=dtypes.int32)
206 | initial_time = constant_op.constant(0, dtype=dtypes.int32)
207 |
208 | def _shape(batch_size, from_shape):
209 | if (not isinstance(from_shape, tensor_shape.TensorShape) or
210 | from_shape.ndims == 0):
211 | return tensor_shape.TensorShape(None)
212 | else:
213 | batch_size = tensor_util.constant_value(
214 | ops.convert_to_tensor(
215 | batch_size, name="batch_size"))
216 | return tensor_shape.TensorShape([batch_size]).concatenate(from_shape)
217 |
218 | def _create_ta(s, d):
219 | return tensor_array_ops.TensorArray(
220 | dtype=d,
221 | size=0,
222 | dynamic_size=True,
223 | element_shape=_shape(decoder.batch_size, s))
224 |
225 | initial_outputs_ta = nest.map_structure(_create_ta, decoder.output_size,
226 | decoder.output_dtype)
227 |
228 | def condition(unused_time, unused_outputs_ta, unused_state, unused_inputs,
229 | finished, unused_sequence_lengths):
230 | return math_ops.logical_not(math_ops.reduce_all(finished))
231 |
232 | def body(time, outputs_ta, state, inputs, finished, sequence_lengths):
233 | """Internal while_loop body.
234 |
235 | Args:
236 | time: scalar int32 tensor.
237 | outputs_ta: structure of TensorArray.
238 | state: (structure of) state tensors and TensorArrays.
239 | inputs: (structure of) input tensors.
240 | finished: bool tensor (keeping track of what's finished).
241 | sequence_lengths: int32 tensor (keeping track of time of finish).
242 |
243 | Returns:
244 | `(time + 1, outputs_ta, next_state, next_inputs, next_finished,
245 | next_sequence_lengths)`.
246 | ```
247 | """
248 | (next_outputs, decoder_state, next_inputs,
249 | decoder_finished) = decoder.step(time, inputs, state)
250 | if decoder.tracks_own_finished:
251 | next_finished = decoder_finished
252 | else:
253 | next_finished = math_ops.logical_or(decoder_finished, finished)
254 | if maximum_iterations is not None:
255 | next_finished = math_ops.logical_or(
256 | next_finished, time + 1 >= maximum_iterations)
257 | next_sequence_lengths = array_ops.where(
258 | math_ops.logical_and(math_ops.logical_not(finished), next_finished),
259 | array_ops.fill(array_ops.shape(sequence_lengths), time + 1),
260 | sequence_lengths)
261 |
262 | nest.assert_same_structure(state, decoder_state)
263 | nest.assert_same_structure(outputs_ta, next_outputs)
264 | nest.assert_same_structure(inputs, next_inputs)
265 |
266 | # Zero out output values past finish
267 | if impute_finished:
268 | emit = nest.map_structure(
269 | lambda out, zero: array_ops.where(finished, zero, out),
270 | next_outputs,
271 | zero_outputs)
272 | else:
273 | emit = next_outputs
274 |
275 | # Copy through states past finish
276 | def _maybe_copy_state(new, cur):
277 | # TensorArrays and scalar states get passed through.
278 | if isinstance(cur, tensor_array_ops.TensorArray):
279 | pass_through = True
280 | else:
281 | new.set_shape(cur.shape)
282 | pass_through = (new.shape.ndims == 0)
283 | return new if pass_through else array_ops.where(finished, cur, new)
284 |
285 | if impute_finished:
286 | next_state = nest.map_structure(
287 | _maybe_copy_state, decoder_state, state)
288 | else:
289 | next_state = decoder_state
290 |
291 | outputs_ta = nest.map_structure(lambda ta, out: ta.write(time, out),
292 | outputs_ta, emit)
293 | return (time + 1, outputs_ta, next_state, next_inputs, next_finished,
294 | next_sequence_lengths)
295 |
296 | res = control_flow_ops.while_loop(
297 | condition,
298 | body,
299 | loop_vars=[
300 | initial_time, initial_outputs_ta, initial_state, initial_inputs,
301 | initial_finished, initial_sequence_lengths,
302 | ],
303 | parallel_iterations=parallel_iterations,
304 | swap_memory=swap_memory)
305 |
306 | final_outputs_ta = res[1]
307 | final_state = res[2]
308 | final_sequence_lengths = res[5]
309 |
310 | final_outputs = nest.map_structure(lambda ta: ta.stack(), final_outputs_ta)
311 |
312 | try:
313 | final_outputs, final_state = decoder.finalize(
314 | final_outputs, final_state, final_sequence_lengths)
315 | except NotImplementedError:
316 | pass
317 |
318 | if not output_time_major:
319 | final_outputs = nest.map_structure(_transpose_batch_time, final_outputs)
320 |
321 | return final_outputs, final_state, final_sequence_lengths
322 |
--------------------------------------------------------------------------------
/Trip/data/TKY_split400_set.dat:
--------------------------------------------------------------------------------
1 | 187-21480 1 0 18
2 | 205-23354 9 1 7
3 | 187-21486 1 0 18
4 | 321-35723 16 1 9 1
5 | 321-35722 1 16 16
6 | 321-35725 16 1 9
7 | 257-28597 5 5 2
8 | 353-39412 14 3 3 10
9 | 151-17418 15 7 4
10 | 288-32428 0 17 1 1
11 | 151-17411 7 1 1 7
12 | 162-18806 25 20 20 25
13 | 192-22008 22 24 9
14 | 203-23260 5 4 6
15 | 203-23263 5 2 2
16 | 338-37598 3 3 0
17 | 192-22004 5 19 3
18 | 55-6316 0 10 0
19 | 251-28049 7 20 6
20 | 135-15393 3 12 12 3
21 | 117-13773 8 1 1
22 | 336-37434 1 9 14
23 | 29-3680 2 20 2
24 | 117-13779 6 20 20
25 | 8-788 4 1 19
26 | 118-13805 28 2 2 1
27 | 118-13806 1 4 17 0 10 26 28
28 | 65-7834 3 0 5
29 | 163-18882 0 28 18
30 | 134-15360 9 1 0
31 | 302-34017 0 6 12
32 | 288-32417 0 1 9 1
33 | 113-13048 12 0 0
34 | 396-44060 21 7 3
35 | 117-13783 1 1 6
36 | 396-44109 2 25 3
37 | 284-31715 0 1 1
38 | 51-5874 17 0 15
39 | 365-40845 2 0 0
40 | 336-37393 1 3 0
41 | 36-4372 3 3 15
42 | 159-18400 4 1 3
43 | 43-4908 22 6 1
44 | 63-7408 0 10 10 4
45 | 227-25674 0 3 15
46 | 227-25678 1 17 4
47 | 385-42788 4 1 9
48 | 235-26451 9 9 1
49 | 385-42786 23 4 1
50 | 385-42781 0 2 0 0 10
51 | 14-1494 1 1 21
52 | 196-22454 7 1 17
53 | 30-3739 15 0 0
54 | 195-22360 1 2 11
55 | 196-22452 3 17 4 7
56 | 12-1275 1 1 9 1
57 | 113-12997 0 2 12
58 | 12-1277 21 1 9
59 | 30-3731 15 3 14 14 15
60 | 12-1270 7 1 9
61 | 12-1273 1 8 9
62 | 113-12991 25 8 4 0
63 | 293-32875 4 15 15
64 | 243-27221 27 24 22
65 | 18-2243 1 9 10
66 | 342-37981 14 3 3 0
67 | 68-8190 0 25 4 1
68 | 29-3545 3 3 0
69 | 243-27223 1 24 22
70 | 35-4137 0 17 0
71 | 289-32514 20 6 23
72 | 289-32515 23 6 1
73 | 26-3077 3 3 2
74 | 102-11773 22 0 0 1 9 22
75 | 26-3075 3 2 3
76 | 26-3072 3 27 21 27 0 0
77 | 130-14983 28 0 15
78 | 196-22535 0 1 4
79 | 196-22532 17 1 7
80 | 161-18553 0 0 6
81 | 282-31521 2 1 0
82 | 250-27889 1 1 6
83 | 30-3748 15 3 0
84 | 30-3747 2 25 4
85 | 285-31914 28 0 3
86 | 324-36006 2 1 3
87 | 30-3741 15 0 0
88 | 83-9524 3 15 15 3
89 | 396-44093 3 25 6 12
90 | 196-22433 7 1 17
91 | 187-21600 3 9 3
92 | 35-4142 0 2 11
93 | 289-32519 7 1 29
94 | 277-30964 1 8 12
95 | 21-2519 1 9 1
96 | 13-1352 10 14 14
97 | 227-25695 2 3 15
98 | 227-25694 15 2 11
99 | 43-4884 24 6 1
100 | 21-2620 1 1 15
101 | 124-14432 7 7 9
102 | 284-31681 0 17 1
103 | 196-22508 1 1 25
104 | 147-17030 14 1 1 15
105 | 365-40796 23 0 10
106 | 113-13009 12 6 0
107 | 45-5035 1 9 21
108 | 339-37659 0 12 12 0
109 | 230-25971 10 17 1
110 | 113-13004 0 2 12
111 | 126-14577 6 1 9
112 | 151-17465 0 17 1
113 | 151-17463 7 1 7
114 | 151-17462 15 7 1 9 1 7
115 | 241-26960 0 1 1
116 | 36-4335 2 0 0
117 | 203-23251 5 6 2 2
118 | 336-37348 12 1 17 0
119 | 206-23359 0 1 9 4
120 | 365-40902 23 2 0
121 | 254-28315 2 6 5
122 | 146-16892 7 1 1 19
123 | 342-38144 6 6 2
124 | 13-1420 10 14 14
125 | 338-37622 4 17 1 1 6 22
126 | 124-14404 3 3 15
127 | 354-39503 2 0 3
128 | 338-37621 4 1 1 4
129 | 338-37627 25 16 16
130 | 235-26454 1 9 9
131 | 150-17361 22 1 9 17
132 | 359-40117 15 27 4
133 | 338-37619 4 1 1 4
134 | 118-13836 4 1 1 0
135 | 118-13830 0 14 0 1 1 1 4
136 | 244-27236 1 1 22
137 | 361-40260 2 1 9
138 | 68-8146 2 3 3
139 | 65-7867 5 19 3
140 | 302-34046 0 2 2
141 | 325-36187 27 9 7
142 | 201-23093 0 17 1
143 | 18-2048 1 9 10
144 | 18-1965 4 0 10
145 | 227-25783 15 0 2 2 3 7
146 | 119-13923 13 13 13
147 | 113-12975 0 6 12
148 | 96-11178 19 0 2
149 | 113-12979 12 0 0
150 | 76-8967 4 8 8 4
151 | 76-8963 1 1 4
152 | 146-16808 1 9 8
153 | 65-7783 3 3 5
154 | 390-43105 2 3 3 21
155 | 65-7788 4 4 5
156 | 235-26402 1 9 1 1
157 | 86-9746 12 5 5
158 | 86-9748 12 25 25
159 | 196-22403 7 1 17
160 | 285-31898 2 0 0 3
161 | 196-22407 7 1 17
162 | 369-41514 0 0 2
163 | 198-22859 20 2 15
164 | 196-22656 17 1 7
165 | 26-3042 3 2 2
166 | 196-22654 17 1 7
167 | 63-7541 21 1 9
168 | 1-14 1 9 9 1 7
169 | 1-17 7 1 1
170 | 1-13 27 4 0
171 | 289-32523 6 1 7
172 | 141-16263 11 10 10
173 | 96-11088 2 0 0 2
174 | 196-22663 7 1 17
175 | 227-25713 0 3 15
176 | 289-32538 2 6 29
177 | 49-5556 11 13 13
178 | 196-22544 17 1 7
179 | 365-40906 23 2 2
180 | 196-22540 17 1 7
181 | 342-38136 1 1 4 4
182 | 196-22548 9 1 7
183 | 12-1313 7 1 0
184 | 12-1317 1 4 24 23 5
185 | 12-1315 1 24 1
186 | 26-3038 3 3 2
187 | 187-21492 1 17 0 13
188 | 365-40750 23 2 2 0
189 | 187-21496 7 0 18
190 | 321-35734 9 1 16
191 | 102-11766 0 25 22
192 | 47-5121 12 23 23
193 | 365-40759 0 2 2 23
194 | 187-21556 1 0 18
195 | 187-21653 1 0 18
196 | 76-9006 1 1 2
197 | 76-9008 4 0 1
198 | 339-37759 0 1 1 17
199 | 205-23341 0 17 1
200 | 350-39085 11 11 0
201 | 8-772 19 25 4
202 | 288-32436 1 1 0
203 | 384-42749 3 0 0 10
204 | 187-21446 1 0 18
205 | 365-40831 2 23 23 0
206 | 55-6365 0 17 17 4
207 | 251-28038 12 1 9
208 | 205-23344 9 9 1 4
209 | 117-13764 6 1 1
210 | 79-9121 0 1 3
211 | 117-13760 8 1 1 6
212 | 248-27628 0 1 9
213 | 36-4391 4 1 7
214 | 105-12125 0 17 1
215 | 105-12123 6 2 0 0
216 | 105-12120 0 1 9 1 0
217 | 65-7820 3 0 2 2 5
218 | 353-39452 14 0 0 14
219 | 321-35740 12 12 16
220 | 98-11333 0 2 11
221 | 29-3603 2 3 3
222 | 288-32423 0 17 1
223 | 230-25943 10 17 1 9 9 27
224 | 385-42771 6 16 3
225 | 116-13686 25 0 0
226 | 187-21449 1 17 0
227 | 320-35625 1 9 1
228 | 164-19004 3 3 4
229 | 271-30356 1 9 1
230 | 283-31623 25 4 25
231 | 252-28148 20 23 5
232 | 204-23291 1 1 0
233 | 227-25665 0 3 15
234 | 302-34092 0 3 0
235 | 227-25660 3 3 15
236 | 147-16959 26 26 0
237 | 227-25663 1 1 27
238 | 1-5 1 1 7
239 | 5-457 1 1 2
240 | 398-44303 0 3 1
241 | 385-42778 10 4 8
242 | 235-26330 1 9 1
243 | 135-15520 3 8 8 12
244 | 336-37377 1 9 3
245 | 168-19371 10 10 0
246 | 65-7896 3 0 2 6
247 | 196-22442 7 1 17
248 | 347-38808 4 1 9
249 | 359-40111 15 27 1 7
250 | 124-14403 3 3 15
251 | 12-1266 7 1 8 2 1
252 | 65-7951 5 6 3
253 | 65-7952 3 1 5
254 | 289-32525 27 7 1
255 | 285-31860 11 2 3
256 | 12-1280 4 1 9 7
257 | 63-7500 1 4 10 10 0 25
258 | 99-11541 10 4 7
259 | 21-2593 1 9 1
260 | 102-11764 9 1 22
261 | 289-32528 23 6 2 1 7
262 | 33-3980 5 1 5
263 | 352-39346 27 21 0
264 | 205-23342 5 1 9
265 | 196-22506 1 1 7
266 | 371-41651 4 6 19
267 | 205-23349 25 1 3 3 1
268 | 196-22509 17 1 4
269 | 58-6792 8 23 5
270 | 49-5519 19 13 13
271 | 161-18546 2 0 16
272 | 161-18540 6 2 13
273 | 282-31536 2 0 0 2
274 | 332-36951 24 24 7
275 | 135-15459 3 12 8 3
276 | 233-26198 9 1 21
277 | 26-2994 3 7 21 7
278 | 96-11031 2 0 0
279 | 298-33660 13 0 17 0
280 | 30-3754 15 3 14
281 | 38-4513 6 4 1
282 | 274-30607 9 1 15
283 | 55-6339 2 2 6
284 | 38-4519 6 4 1
285 | 113-13124 12 6 0
286 | 327-36504 2 6 29
287 | 395-43957 7 2 1
288 | 196-22595 1 7 7
289 | 227-25680 1 9 1
290 | 196-22597 17 1 7
291 | 43-4894 1 3 1
292 | 196-22592 17 4 1 7
293 | 227-25685 15 2 11
294 | 100-11596 17 1 1
295 | 164-18991 17 3 3
296 | 187-21468 1 0 18
297 | 47-5173 0 12 23
298 | 187-21462 1 0 18
299 | 187-21461 18 0 1 17
300 | 353-39377 1 1 0
301 | 163-18855 19 19 5
302 | 353-39470 0 14 14
303 | 187-21501 1 0 18
304 | 339-37710 0 1 17
305 | 230-25985 10 17 9
306 | 45-5026 3 9 1
307 | 187-21509 1 0 18
308 | 151-17473 1 17 0 2
309 | 113-13010 0 4 17 0
310 | 187-21667 1 0 18
311 | 385-42782 0 16 6
312 | 196-22424 1 1 7
313 | 31-3794 25 4 7
314 | 31-3791 8 4 7
315 | 235-26326 1 9 9 9
316 | 13-1434 0 0 10
317 | 13-1436 0 14 14
318 | 146-16881 1 9 1
319 | 146-16884 2 0 17 1
320 | 338-37618 2 2 11
321 | 117-13717 6 1 1
322 | 78-9073 0 0 1
323 | 117-13714 5 1 1
324 | 13-1439 4 17 1
325 | 338-37611 17 1 1 4
326 | 126-14605 1 1 7
327 | 365-40883 23 0 0
328 | 196-22486 7 1 17
329 | 126-14602 0 0 15
330 | 196-22485 7 1 1 4 7
331 | 232-26125 12 1 9 7
332 | 318-35554 6 12 8 4 1 17
333 | 318-35555 1 9 1 4 8 6
334 | 124-14441 15 7 9 4 4
335 | 68-8159 27 4 0
336 | 68-8157 12 1 1 12 27
337 | 59-6857 14 10 28
338 | 68-8155 12 1 9
339 | 68-8153 0 17 1 1 7
340 | 68-8150 12 4 1 1
341 | 113-13066 12 0 0
342 | 201-23083 0 1 9
343 | 365-40826 23 10 10
344 | 30-3733 6 4 15
345 | 29-3672 3 2 1 3
346 | 113-12994 12 6 0
347 | 29-3677 3 25 25 3
348 | 43-4920 22 1 9
349 | 96-11160 2 14 2 0
350 | 128-14716 9 0 1
351 | 5-462 6 29 29 23
352 | 96-11168 2 0 0 2
353 | 293-32982 1 9 7 15
354 | 293-33018 7 1 15
355 | 12-1272 7 1 9 8 1 9
356 | 196-22467 9 1 7
357 | 30-3716 24 1 7
358 | 33-4012 5 26 1
359 | 196-22435 7 1 17
360 | 86-9771 4 10 26
361 | 338-37608 6 4 7 0 0 2
362 | 18-2222 1 9 10
363 | 14-1567 21 1 9
364 | 342-37998 14 3 3
365 | 63-7470 2 2 25
366 | 257-28689 2 11 19 5 5
367 | 257-28687 5 5 5
368 | 196-22558 1 7 15
369 | 194-22311 27 7 1
370 | 194-22313 1 12 12
371 | 265-29658 28 10 0
372 | 257-28696 5 16 5 5
373 | 334-37195 25 24 1
374 | 199-22883 15 3 1 9 0
375 | 336-37476 0 1 1
376 | 285-31831 11 11 11
377 | 289-32561 1 7 27
378 | 385-42774 6 3 0 10
379 | 385-42776 0 28 0 4 7
380 | 385-42777 2 3 0
381 | 285-31931 11 2 3
382 | 394-43872 0 26 10 10
383 | 353-39438 3 14 14
384 | 173-20014 0 28 28 0
385 | 227-25686 11 2 15
386 | 98-11405 14 0 0
387 | 187-21540 1 0 18
388 | 12-1295 29 29 8 1 1 7 7
389 | 187-21542 1 0 18
390 | 113-13111 0 2 2
391 | 18-2127 1 9 10
392 | 289-32511 7 23 12
393 | 80-9239 4 3 14
394 | 264-29517 27 9 1
395 | 289-32517 29 1 7 27
396 | 192-22025 3 2 3
397 | 227-25774 1 9 1
398 | 365-40963 23 2 0
399 | 365-40809 0 6 23 23
400 | 365-40967 23 2 2 0
401 | 105-12113 0 3 7 7
402 | 65-7694 8 5 5
403 | 105-12114 7 3 0
404 | 233-26226 19 5 23
405 | 284-31690 0 1 1
406 | 163-18866 18 0 0 18
407 | 187-21531 1 0 18
408 | 65-7814 5 25 2 5
409 | 353-39444 8 8 14
410 | 187-21537 1 0 18
411 | 248-27622 1 9 1
412 | 36-4312 20 20 16 2
413 | 164-19014 3 2 0
414 | 257-28703 5 6 4 5
415 | 230-25959 10 1 10
416 | 304-34213 9 1 4
417 | 161-18703 6 2 16
418 | 150-17281 22 8 22
419 | 78-9083 2 1 0
420 | 78-9080 2 2 1
421 | 65-7751 4 19 5
422 | 75-8898 3 0 11
423 | 21-2449 0 3 15
424 | 13-1373 10 0 2 11
425 | 371-41639 3 0 17 1
426 | 44-4923 2 1 1
427 | 196-22477 7 1 17
428 | 196-22476 7 1 17
429 | 371-41638 7 1 9
430 | 168-19362 9 1 16
431 | 359-40109 15 4 0
432 | 68-8160 1 1 12
433 | 65-7961 3 17 4 5
434 | 318-35567 21 27 4 8
435 | 318-35566 5 6 8 27
436 | 359-40106 17 1 4 7
437 | 65-7964 5 6 3
438 | 318-35563 1 9 1
439 | 365-40856 23 23 2
440 | 13-1382 10 0 0 10
441 | 18-2062 14 3 3 0
442 | 117-13695 8 8 8
443 | 201-23070 2 2 0
444 | 196-22394 7 17 1
445 | 365-40817 2 2 0
446 | 96-11052 2 0 0 2
447 | 147-16945 0 0 0
448 | 63-7514 9 1 4
449 | 336-37360 15 0 17 1 9
450 | 102-11756 24 4 1 9
451 | 371-41647 21 1 4
452 | 205-23352 2 1 9 1 4
453 | 141-16237 11 17 1
454 | 49-5527 4 28 28 28
455 | 49-5526 1 9 1 0
456 | 21-2619 0 1 1
457 | 196-22512 17 1 7
458 | 21-2614 1 1 12 12 1 9
459 | 233-26178 1 9 1
460 | 26-3083 4 4 16
461 | 310-34863 0 25 24
462 | 350-39116 0 16 5
463 | 272-30461 9 7 27 3
464 | 187-21668 1 0 18
465 | 246-27430 12 1 2 1
466 | 369-41531 6 1 1
467 | 124-14502 15 3 1 1
468 | 96-11122 19 18 18 19
469 | 384-42762 3 2 3
470 | 384-42763 3 2 0
471 | 396-44091 3 0 27
472 | 339-37701 0 1 1
473 | 384-42764 0 0 2
474 | 384-42765 3 0 10
475 | 264-29556 1 4 6
476 | 250-27903 1 0 2 6
477 | 63-7560 2 0 10 0
478 | 360-40198 0 0 1 9
479 | 194-22320 21 27 27
480 | 296-33441 13 13 18
481 | 161-18522 6 2 13
482 | 290-32582 0 1 9
483 | 289-32532 27 7 1
484 | 285-31805 0 0 3
485 | 187-21478 1 0 18
486 | 187-21475 1 0 18
487 | 187-21476 0 1 3
488 | 1-2 1 9 1 7
489 | 187-21470 9 1 17 0
490 | 255-28489 1 9 1
491 | 1-7 7 1 9 9 1 7
492 | 388-42942 0 17 1
493 | 353-39403 1 1 14
494 | 187-21572 1 0 18
495 | 55-6394 20 8 6
496 | 151-17401 0 1 7
497 | 231-26032 3 0 27
498 | 192-22010 5 4 1 9 1
499 | 203-23278 5 1 1
500 | 18-2110 1 9 10
501 | 365-40727 2 23 23 0
502 | 398-44301 1 9 1
503 | 338-37600 0 2 11
504 | 338-37601 6 1 4
505 | 338-37603 4 1 1 4
506 | 338-37605 2 0 10 10 0
507 | 338-37607 6 1 1 4
508 | 13-1409 14 26 26 10
509 | 58-6744 5 6 8
510 | 248-27602 1 9 1
511 | 128-14744 17 0 0 17
512 | 65-7844 5 6 0 14
513 | 65-7840 3 1 5
514 | 196-22629 17 1 7
515 | 233-26258 6 4 1
516 | 359-40112 27 4 1 7
517 | 59-6840 7 1 0
518 | 113-13077 12 6 0
519 | 113-13074 12 2 0
520 | 113-13075 2 0 0
521 | 18-2184 1 9 10
522 | 151-17497 26 10 0
523 | 342-38011 14 1 7 14
524 | 117-13794 8 7 27
525 | 117-13793 1 1 6
526 | 369-41526 2 1 1 14
527 | 36-4340 23 23 15
528 | 18-2026 1 9 10
529 | 151-17498 0 1 7
530 | 96-11155 26 2 2
531 | 9-1057 7 1 8 12 12 6
532 | 159-18413 0 17 1
533 | 5-478 1 1 1
534 | 267-29919 2 0 25
535 | 243-27202 1 15 0
536 | 135-15421 3 12 12
537 | 13-1320 7 1 1
538 | 243-27204 1 24 22
539 | 13-1323 0 0 14
540 | 146-16829 16 1 23
541 | 243-27209 1 24 22
542 | 13-1328 10 0 0 10
543 | 196-22422 7 1 17
544 | 30-3729 0 14 3 15
545 | 196-22426 14 14 3 15
546 | 30-3726 0 3 15
547 | 113-12981 0 2 2
548 | 113-12982 12 0 0
549 | 293-32860 0 0 15
550 | 336-37380 12 1 9
551 | 63-7462 4 1 9
552 | 124-14399 12 8 4 1
553 | 293-32869 4 1 1
554 | 302-34096 0 3 3 0
555 | 293-32907 4 1 1
556 | 21-2577 1 1 4
557 | 57-6566 4 0 1
558 | 100-11639 1 9 1
559 | 342-38150 4 1 9 1 12
560 | 196-22473 1 7 21
561 | 196-22640 7 1 7
562 | 196-22528 17 1 7
563 | 63-7489 4 1 9 1 4
564 | 58-6757 5 8 1
565 | 377-42126 7 20 20
566 | 390-43158 1 1 24
567 | 336-37466 1 7 17 0
568 | 35-4191 10 0 2
569 | 338-37616 4 1 1 4
570 | 324-36010 2 0 14
571 | 76-8948 1 0 16
572 | 191-21905 9 8 8
573 | 369-41544 14 8 6 14
574 | 146-16910 19 4 1
575 | 296-33384 13 0 1
576 | 96-11204 2 0 2
577 | 191-21908 28 10 0 9
578 | 342-38096 17 4 0
579 | 124-14398 1 4 8 12
580 | 143-16516 17 1 9
581 | 151-17527 0 17 0 0
582 | 18-2096 1 9 10
583 | 327-36525 2 0 2
584 | 196-22633 17 1 7
585 | 196-22634 7 17 1
586 | 360-40141 3 1 9
587 | 332-37021 1 17 21
588 | 233-26212 0 1 1
589 | 196-22580 7 1 4
590 | 65-7916 4 6 5
591 | 116-13467 25 16 16
592 | 147-17041 25 25 0
593 | 100-11630 26 17 9 1
594 | 187-21528 1 0 18
595 | 187-21529 1 0 18
596 | 187-21522 1 0 18
597 | 230-25960 14 0 1 9 1 10
598 | 10-1112 2 1 9 7
599 | 151-17450 0 0 0
600 | 151-17457 0 17 1
601 | 284-31744 0 0 1
602 | 8-888 19 3 1
603 | 179-20532 5 23 23 5
604 | 18-1984 24 10 8
605 | 113-13039 12 0 0
606 | 65-7721 5 8 4 5
607 | 233-26236 0 17 1 1
608 | 12-1288 7 1 17
609 | 78-9076 0 0 3
610 | 338-37632 2 11 1 6
611 | 146-16860 1 9 1 4 19
612 | 78-9072 1 1 0
613 | 233-26230 26 10 0 17 4 6
614 | 126-14628 4 17 1 9
615 | 126-14629 1 7 12
616 | 13-1361 10 14 0
617 | 196-22466 9 1 7
618 | 118-13843 14 0 1
619 | 68-8176 15 2 3 27
620 | 365-40812 23 2 2 0
621 | 250-27827 24 6 4
622 | 107-12395 14 6 5 5 6
623 | 246-27437 8 4 4
624 | 68-8178 16 4 4
625 | 13-1444 10 0 0
626 | 35-4228 6 23 5
627 | 13-1446 6 2 11 2
628 | 8-839 19 4 1 4
629 | 365-40805 10 2 23
630 | 302-34036 4 0 3
631 | 102-11744 9 1 4
632 | 102-11745 4 10 26
633 | 76-8979 4 1 4
634 | 29-3576 2 20 2
635 | 371-41637 10 0 17
636 | 264-29643 1 9 0
637 | 49-5532 0 0 13
638 | 49-5536 9 1 0 0
639 | 146-16810 1 6 22
640 | 390-43117 1 24 22
641 | 196-22418 7 1 17
642 | 102-11759 12 1 9 1 0
643 | 196-22411 7 1 17
644 | 196-22413 7 1 17
645 | 124-14510 1 4 12
646 | 198-22866 4 7 15
647 | 193-22193 3 3 2 2
648 | 63-7458 25 10 0 25
649 | 396-44085 21 1 1 3
650 | 384-42754 3 0 18
651 | 384-42752 0 0 3
652 | 201-23085 0 2 2 1 2 0
653 | 9-987 1 1 4
654 | 289-32494 21 1 23
655 | 289-32493 23 23 1
656 | 227-25706 15 2 11
657 | 49-5547 11 2 13 13
658 | 396-44110 3 25 24
659 | 365-40938 23 16 0
660 | 196-22575 17 1 7
661 | 370-41605 11 2 12 12 2 11
662 | 22-2708 21 3 14
663 | 296-33450 0 10 13
664 | 205-23350 1 0 10 17
665 | 49-5548 13 2 11
666 | 285-31818 3 2 2
667 | 289-32549 2 2 1
668 | 12-1303 1 1 9
669 | 29-3609 2 20 2
670 | 205-23357 9 1 4
671 |
--------------------------------------------------------------------------------
/Trip/data/Melb_set.dat:
--------------------------------------------------------------------------------
1 | 48453097@N00-2242 33 0 7
2 | 48453097@N00-2243 52 25 7
3 | 48453097@N00-2240 19 27 6 4
4 | 48453097@N00-2241 6 13 31
5 | 91151414@N00-4472 1 27 49 14 0 4
6 | 93913731@N00-4783 9 24 52
7 | 31148157@N00-1241 47 2 49 15
8 | 8502118@N08-4217 35 48 32 33 30 24 14 58 7 4
9 | 12900555@N02-212 29 27 24
10 | 17211040@N00-413 13 0 4
11 | 8502118@N08-4219 20 34 27 49 54 38 18 15 44
12 | 44124479801@N01-2042 13 14 0 4
13 | 17211040@N00-414 6 0 8
14 | 14197203@N05-328 11 76 24
15 | 64015960@N00-3128 29 2 3
16 | 14197203@N05-325 28 26 2
17 | 64015960@N00-3127 38 18 16 31 5 22
18 | 30206889@N00-1150 5 25 0 7
19 | 52618473@N02-2469 19 4 12
20 | 59551944@N00-2893 21 25 4
21 | 48213112@N00-2223 1 18 15
22 | 56384399@N00-2673 54 77 79 25 0 4
23 | 65638600@N05-3187 20 58 16
24 | 51035603197@N01-2408 24 5 4
25 | 79235216@N00-3888 46 3 5 22
26 | 65779544@N00-3201 1 36 2
27 | 23875378@N06-727 35 24 14 0 4
28 | 7414413@N05-3534 36 73 4
29 | 16867627@N06-394 20 51 38 18 16 44 53 56
30 | 16867627@N06-395 48 45 19 29 27 9 11 57 50 81 53 0 12
31 | 72327345@N00-3477 38 5 22
32 | 45676902@N00-2099 24 79 7
33 | 8502118@N08-4216 1 0 7
34 | 91256982@N00-4621 48 43 12
35 | 27759575@N05-959 29 51 56
36 | 91748271@N00-4650 42 49 8
37 | 52952793@N00-2505 19 24 14 0 7
38 | 56935359@N00-2678 45 17 13
39 | 37468950@N08-1685 48 28 32 17 33 9 30 3 38 44 21 0 61 12
40 | 37468950@N08-1686 1 6 15 31 52 7 8
41 | 60111909@N00-2903 27 15 24 14 0 56 7 8
42 | 60111909@N00-2904 24 58 5 43 0 56 7
43 | 58085609@N00-2824 5 66 7
44 | 78681550@N00-3869 9 30 11 49 12
45 | 78681550@N00-3868 5 0 4
46 | 62948807@N00-3007 33 2 4
47 | 28685313@N00-1075 28 9 71 75 4 12
48 | 87584208@N00-4389 1 9 4
49 | 33200530@N04-1320 1 33 26 3 24 0 7
50 | 24761036@N00-798 32 19 33
51 | 21160385@N02-560 9 14 12
52 | 10966462@N02-147 27 6 15 8
53 | 23806189@N00-721 28 19 36 14 55 22
54 | 23806189@N00-720 34 5 22 8
55 | 79925938@N00-3963 34 26 9
56 | 69931111@N00-3408 1 2 51 56
57 | 67774014@N00-3338 33 3 22
58 | 49456569@N06-2301 1 5 22 4
59 | 49456569@N06-2302 1 5 22
60 | 8429346@N03-4160 33 22 0 4
61 | 89743353@N00-4450 36 2 8 12
62 | 89165847@N00-4431 5 0 4
63 | 45676902@N00-2101 27 30 25 56
64 | 75873727@N00-3626 2 14 5 0 7
65 | 84987970@N00-4205 11 3 54 7
66 | 12284993@N00-198 1 10 18 5
67 | 14021337@N00-291 1 6 15 5 0 66 8
68 | 9433773@N08-4817 1 10 36 3 24 5
69 | 58673657@N07-2857 1 26 36
70 | 48583839@N07-2268 1 19 36 3 5 81 0 4
71 | 15213592@N03-385 19 30 0 4
72 | 87355829@N00-4316 32 33 26 2
73 | 87393953@N00-4368 28 30 5 0
74 | 57768536@N05-2700 20 1 32 17 38 18 0
75 | 99804259@N00-5096 19 58 12
76 | 60451415@N00-2912 18 21 25 0
77 | 36656940@N00-1593 42 2 3 8
78 | 13237839@N00-241 57 73 59
79 | 13237839@N00-245 28 36 9 2 14 39 0
80 | 27759575@N05-961 6 31 52 73 59
81 | 13233236@N04-232 0 4 12
82 | 27759575@N05-966 17 2 22 8
83 | 10559879@N00-64 20 1 2 18
84 | 10559879@N00-63 17 33 18
85 | 43929620@N05-1984 3 18 14 0
86 | 10936890@N00-145 19 18 16 5 53 22 4 8
87 | 43929620@N05-1982 38 63 61
88 | 26071009@N04-912 35 3 14 5 25 0
89 | 10681948@N00-129 45 36 18
90 | 19447876@N00-443 19 5 0
91 | 19447876@N00-445 14 73 59 7
92 | 33062815@N00-1315 32 30 3 4
93 | 84987970@N00-4210 2 16 44
94 | 84987970@N00-4213 65 47 54
95 | 84987970@N00-4212 34 1 17 26 2 49 58 8
96 | 84987970@N00-4214 1 47 38 53 12
97 | 24510946@N07-779 35 24 14
98 | 24510946@N07-778 1 3 0
99 | 25287507@N02-835 1 19 3 5
100 | 7878308@N04-3878 45 16 0
101 | 8329215@N04-4133 28 15 14 4
102 | 47980775@N02-2185 29 62 25
103 | 46778425@N00-2144 3 24 0 69
104 | 8329215@N04-4134 35 9 11 24 14 5 0 7 4
105 | 28067218@N00-1017 20 33 5 7 4
106 | 77152181@N06-3758 26 36 11 2 3
107 | 77152181@N06-3757 3 5 22 0 4
108 | 99395734@N08-5062 5 4 8
109 | 77152181@N06-3752 1 6 7
110 | 77152181@N06-3751 35 6 11
111 | 77152181@N06-3750 11 0 4
112 | 9662258@N02-4890 20 68 19 9 14 5 21 40 0
113 | 76395609@N00-3665 20 18 0
114 | 92828317@N00-4710 32 42 27 3
115 | 26085795@N02-916 20 34 28 17 33 29 3 51 18 50 8
116 | 87109756@N02-4290 20 32 33 18 5 22
117 | 30342939@N00-1203 48 28 1 32 17 19 33 9 30 2 3 50 24 14 5 43 0 7 4 12
118 | 99109486@N00-5049 9 30 39
119 | 71088526@N00-3470 9 24 14 39 63 7
120 | 97867688@N00-4935 64 11 51
121 | 8107002@N03-4069 45 6 36 0 4
122 | 87393953@N00-4369 37 7 4
123 | 30206889@N00-1149 30 64 11 24 16 5 55 40 25 53 0 7
124 | 14197203@N05-339 1 22 0
125 | 14197203@N05-337 35 20 18
126 | 64015960@N00-3134 17 2 25
127 | 44690184@N07-2075 19 24 8
128 | 44690184@N07-2071 19 6 24 7 4
129 | 49503156729@N01-2338 6 49 54
130 | 73014677@N05-3513 54 5 0
131 | 99804259@N00-5067 34 42 29 47
132 | 45904697@N05-2111 13 3 0
133 | 63022030@N00-3017 20 45 53 4
134 | 48453097@N00-2254 20 18 0
135 | 99804259@N00-5065 11 31 5 22 0
136 | 91872843@N00-4689 5 7 4
137 | 8281413@N06-4127 14 5 0
138 | 99804259@N00-5066 47 16 63
139 | 80373436@N00-4053 25 0 7
140 | 70995385@N00-3417 14 0 4
141 | 32394656@N00-1284 33 49 0
142 | 22670857@N03-665 3 40 25
143 | 35468141938@N01-1477 28 23 36 11 0
144 | 93012314@N00-4742 1 31 59 7
145 | 52952793@N00-2515 48 9 39 43 12
146 | 52952793@N00-2517 64 76 24 14 31 52 25 7
147 | 14481533@N00-368 1 19 2 15 24 5 0 4 8
148 | 52952793@N00-2519 28 1 26 36 2 0
149 | 25869855@N04-895 0 66 59
150 | 50556076@N00-2379 5 22 0
151 | 12238277@N04-191 70 36 14 0 61
152 | 89864780@N00-4454 35 11 0
153 | 28135215@N00-1042 6 13 15 8
154 | 82698298@N00-4124 76 31 0 7
155 | 32165728@N00-1277 65 62 82 53
156 | 58554451@N00-2850 77 25 0
157 | 42583704@N00-1935 68 28 1 17 36 9 30 2 24 14 5 22 0 7 4
158 | 42583704@N00-1936 27 2 15 8
159 | 42583704@N00-1938 35 45 47 9 11 3 15 76 24 14 16 5 44 53 22 0 63 7
160 | 58301516@N00-2846 1 10 3 18 22
161 | 48264126@N00-2225 13 7 4
162 | 51928417@N00-2430 11 2 66 7
163 | 24761036@N00-785 1 6 36 15
164 | 110608682@N04-150 30 52 0 7
165 | 71088526@N00-3467 3 15 21 25 0 80 4
166 | 41894181187@N01-1925 14 0 4
167 | 87584208@N00-4376 45 34 13 56
168 | 60444977@N00-2908 48 1 32 14 5 22 0 4 8
169 | 60444977@N00-2909 5 0 4
170 | 60444977@N00-2907 35 1 32 33 3 49 15 24 25 0
171 | 28964800@N03-1087 17 26 3 22
172 | 34948727@N00-1397 1 19 2 5 25
173 | 47988174@N04-2220 1 9 4 12
174 | 77235006@N00-3772 1 19 33 3 22 8
175 | 77235006@N00-3771 24 39 12
176 | 77235006@N00-3775 1 3 24 66
177 | 77235006@N00-3774 62 55 40
178 | 86292040@N00-4245 10 2 50 0
179 | 86292040@N00-4244 10 2 50
180 | 53701455@N00-2556 20 16 52
181 | 41894169655@N01-1918 15 14 8
182 | 12284993@N00-200 20 19 2
183 | 12284993@N00-202 20 34 5 0 4
184 | 76726772@N00-3736 9 25 0 4 8
185 | 76726772@N00-3735 62 55 40
186 | 14197203@N05-345 3 5 22
187 | 14197203@N05-342 1 3 15 0 7 4
188 | 87584208@N00-4375 1 9 8
189 | 90716728@N00-4468 3 39 22
190 | 10896675@N00-143 49 15 5 62 0 7
191 | 25287507@N02-836 49 54 18
192 | 25287507@N02-837 34 17 3 5 8
193 | 29174998@N05-1101 1 2 3
194 | 29465723@N00-1117 32 38 7
195 | 44124450371@N01-2032 34 6 2 49 18 15 44 0 7
196 | 84987970@N00-4211 20 1 9 3 22 7 4
197 | 28527865@N05-1066 3 39 5 0 61
198 | 97972855@N00-4982 14 0 7
199 | 63287673@N00-3062 35 11 0
200 | 52952793@N00-2527 9 14 0 7 4
201 | 52952793@N00-2526 48 20 38 18 22
202 | 48889122860@N01-2289 32 19 33 50 5 0 4
203 | 97972855@N00-4980 15 5 0 7 4 8
204 | 7640968@N02-3668 1 33 38 18 4
205 | 29728054@N00-1119 11 2 5 0
206 | 63287673@N00-3047 22 0 4
207 | 46421193@N07-2126 0 73 7
208 | 44669264@N06-2066 40 4 12
209 | 53836246@N00-2561 20 1 36 2 3 18 5 4
210 | 57841116@N03-2733 19 3 0
211 | 57841116@N03-2735 62 55 40
212 | 82397118@N00-4113 3 0 4
213 | 22670857@N03-614 11 52 0
214 | 7666975@N03-3671 29 15 16 53 0 8
215 | 22670857@N03-611 5 0 4
216 | 94482242@N00-4842 28 26 47 49
217 | 68529605@N00-3368 9 3 14 0 4
218 | 58085609@N00-2826 27 14 31 59
219 | 25976134@N04-902 33 36 3 5
220 | 57841116@N03-2789 17 19 12
221 | 84987970@N00-4209 34 1 19 26 6 36 47 2 49 8
222 | 82698298@N00-4123 1 3 5
223 | 84987970@N00-4202 13 11 2 12
224 | 84987970@N00-4203 18 4 8
225 | 84987970@N00-4206 28 6 36 11 8
226 | 84987970@N00-4204 1 36 11 2
227 | 39916636@N00-1786 3 5 22
228 | 54086610@N00-2575 14 5 25 0 7
229 | 87355829@N00-4315 10 32 33
230 | 87355829@N00-4314 17 33 0
231 | 54086610@N00-2572 9 72 75 4
232 | 63238390@N00-3034 9 0 4
233 | 75286866@N00-3600 45 10 11
234 | 89165847@N00-4440 2 51 69 12
235 | 8249050@N02-4121 49 18 43
236 | 41312242@N00-1906 35 9 24 7
237 | 28067218@N00-1023 1 6 15
238 | 93003266@N04-4726 20 68 28 1 2 3 38 22
239 | 18935424@N00-438 35 32 19 11 24 7 4
240 | 41725486@N08-1914 19 0 61 7
241 | 30265340@N00-1191 35 28 32 6 15 24 5 8
242 | 93987037@N00-4786 19 27 9 18 24 14 5 81 0 7 4
243 | 87801055@N06-4396 9 11 57 24
244 | 77152181@N06-3745 20 10 18 16
245 | 77152181@N06-3747 1 6 15
246 | 77152181@N06-3748 11 76 31 66 7 4
247 | 47334482@N00-2177 27 2 14 5 0 63 4
248 | 87801055@N06-4399 6 49 15
249 | 30265340@N00-1172 35 1 36 11 3 24 22 7
250 | 43929620@N05-2005 2 15 0
251 | 30265340@N00-1170 6 2 15 8
252 | 82698298@N00-4126 28 3 5 22 4
253 | 30265340@N00-1176 1 6 18
254 | 71088526@N00-3468 20 32 19 2 31 5 22
255 | 62744044@N00-3004 1 2 0 4
256 | 63357531@N00-3092 5 22 0
257 | 63357531@N00-3091 1 26 6 5
258 | 62744044@N00-3002 13 3 8
259 | 63357531@N00-3094 1 26 2
260 | 97867688@N00-4922 32 33 5 0
261 | 76225887@N00-3659 14 0 7 4
262 | 97867688@N00-4929 19 69 12
263 | 21225669@N00-562 2 3 61
264 | 43408458@N08-1956 2 38 22
265 | 13956722@N06-290 20 66 4
266 | 78518246@N00-3846 35 34 22
267 | 81697278@N02-4085 19 5 8
268 | 78518246@N00-3844 42 9 11
269 | 9438227@N06-4828 33 3 15 5 0
270 | 55579714@N00-2646 6 2 0 8
271 | 31058815@N00-1231 5 40 25
272 | 38331851@N00-1724 1 15 39
273 | 57841116@N03-2771 32 22 7
274 | 36424619@N00-1562 15 5 79 59
275 | 45904697@N05-2109 36 2 12
276 | 72744226@N00-3506 1 36 2
277 | 72744226@N00-3505 9 30 0 7
278 | 99804259@N00-5070 2 83 8
279 | 28461660@N00-1062 32 9 24 7 4
280 | 28461660@N00-1063 27 36 47 16 4
281 | 25287507@N02-841 28 1 32 36
282 | 44204242@N04-2059 1 5 0
283 | 25369515@N00-856 11 3 7
284 | 75546495@N03-3609 1 49 5 52 0 66 7 4
285 | 75546495@N03-3607 28 1 32
286 | 24413182@N00-765 13 24 14 7
287 | 24413182@N00-766 1 3 38 5 22 0 7 4
288 | 40742367@N04-1895 27 30 59
289 | 40742367@N04-1894 1 17 14 31 43 53 4 8
290 | 44124401641@N01-2030 36 13 5
291 | 63357531@N00-3072 32 54 5 22 0 4
292 | 86589646@N00-4261 1 19 42 29 6 36 3 15 5 25 0
293 | 89743353@N00-4449 17 30 12
294 | 83029300@N00-4130 24 14 25 4
295 | 82256568@N03-4106 19 3 14 5 22 0 4
296 | 82256568@N03-4107 24 14 7
297 | 54352459@N03-2598 1 0 7
298 | 82256568@N03-4108 24 5 7
299 | 31148157@N00-1249 11 24 0
300 | 77967821@N00-3811 3 15 5 4
301 | 74693095@N00-3568 1 37 2
302 | 54352459@N03-2608 29 3 51
303 | 44124479801@N01-2040 3 7 12
304 | 31148157@N00-1240 28 17 19
305 | 57990672@N04-2818 24 5 25 0 7
306 | 31148157@N00-1245 28 17 26 0
307 | 16767930@N05-392 67 23 11 57 24 31 73 66 7 4
308 | 44259454@N00-2063 11 24 66
309 | 97064055@N00-4902 11 24 7
310 | 58196392@N00-2839 15 4 8
311 | 42583704@N00-1940 20 57 3 18 31 0 66
312 | 68892129@N00-3387 11 78 31 5 0
313 | 24998164@N06-819 35 19 9 24 14 7 4
314 | 14136614@N03-313 1 29 6
315 | 14136614@N03-311 1 19 2 5
316 | 24998164@N06-817 24 14 5 0 7 4
317 | 41894175689@N01-1921 2 3 15 5
318 | 41894175689@N01-1923 1 11 54 5
319 | 30628871@N00-1217 32 50 5 0
320 | 89165847@N00-4436 70 1 2 3 38 50 24 14 21 25 0 61 80 7 4
321 | 69041444@N00-3395 35 24 0
322 | 37086457@N00-1677 20 5 0
323 | 84385698@N00-4171 45 17 6 0 7
324 | 94520611@N05-4852 3 5 40 52 7 4
325 | 34948727@N00-1386 34 11 24 0 7
326 | 53840569@N07-2566 29 6 8
327 | 53840569@N07-2567 35 11 7
328 | 29954226@N00-1134 19 13 15 0
329 | 22131083@N08-587 70 21 63
330 | 66172503@N00-3248 35 34 27 11 2 3 18 15 5 22 0 7 4
331 | 77235006@N00-3767 20 3 18 5 44
332 | 58589111@N04-2852 15 52 25
333 | 8473570@N02-4182 37 2 5
334 | 8473570@N02-4183 32 2 78 31
335 | 99804259@N00-5101 2 58 43
336 | 8473570@N02-4185 11 2 39 44 0
337 | 48453097@N00-2255 20 1 15
338 | 76726772@N00-3729 35 24 40 52 7 4
339 | 96256161@N00-4887 45 34 16 53
340 | 49787588@N00-2345 1 3 15 5 22 0 4 8
341 | 72562013@N06-3478 14 5 7
342 | 49503207397@N01-2340 1 3 18 0
343 | 49503207397@N01-2341 48 16 43
344 | 25963075@N04-901 35 11 18
345 | 24122821@N05-749 15 44 0
346 | 87356726@N00-4328 38 0 63
347 | 31124964@N00-1233 30 3 49 53
348 | 52618473@N02-2495 15 14 7
349 | 32446767@N03-1286 24 14 4
350 | 37103729@N02-1679 36 2 3 5 22
351 | 86589646@N00-4257 1 3 0
352 | 8485766@N07-4198 24 31 66 7
353 | 79235216@N00-3891 38 18 44 25
354 | 79235216@N00-3890 28 1 6 36 30 2 54 51 15 50
355 | 66883888@N00-3274 27 6 31
356 | 24413182@N00-763 1 9 30 3 14 5 7
357 | 36330826404@N01-1521 28 32 19 2 5 44 22 0 4
358 | 36330826404@N01-1522 20 1 30 2 40 25 7
359 | 11674539@N06-166 1 24 7
360 | 93384183@N00-4749 35 28 32 29 2
361 | 22670857@N03-607 34 2 15
362 | 91821745@N00-4682 5 0 4
363 | 40082311@N00-1823 17 3 18
364 | 40082311@N00-1820 28 3 18
365 | 40082311@N00-1826 3 38 18
366 | 40082311@N00-1827 20 17 3 38 18
367 | 40575018@N07-1854 32 5 22
368 | 96155869@N00-4885 17 19 33 51 12
369 | 80021819@N00-4049 29 36 2
370 | 51986774@N00-2455 3 50 22
371 | 82955378@N05-4129 29 2 3 15 5 44 56
372 | 98999190@N00-5047 65 32 19 22 7 4
373 | 68529605@N00-3370 28 33 12
374 | 21929094@N02-582 15 16 0
375 | 8755651@N05-4372 46 6 4 8
376 | 92496717@N00-4698 35 43 4
377 | 57841116@N03-2808 38 18 44
378 | 82155102@N07-4105 32 33 4
379 | 63357531@N00-3084 1 3 0
380 | 94327650@N03-4790 42 15 8
381 | 69539023@N00-3398 48 32 26 36 5 0
382 | 42656687@N00-1941 1 2 50
383 | 49975776@N07-2352 20 1 6 57 3 14 5 25 0 63 4
384 | 97867688@N00-4915 26 27 9 2 15 79 0 69 12
385 | 37912374670@N01-1703 18 5 22
386 | 97972855@N00-4983 19 3 18 16 44 12
387 | 10195518@N02-14 62 55 21 66 61 7
388 | 66647015@N06-3271 3 76 63 7
389 | 26670541@N07-931 28 42 26 6 2 8
390 | 79925938@N00-3977 34 26 9
391 | 9438227@N06-4832 17 29 30 16
392 | 57841116@N03-2747 50 44 56
393 | 24453935@N00-771 24 0 66 7
394 | 44925192@N00-2079 28 1 32 26 9 11 3 14 5 22 0
395 | 44925192@N00-2078 13 64 7
396 | 19289077@N00-440 34 19 33 49 15 22 7 4 8
397 | 31043650@N04-1230 33 2 21
398 | 97304820@N00-4906 2 5 22 0
399 | 97304820@N00-4907 34 1 42 29 6 36 3 15 5 22 0 8 12
400 | 33195950@N00-1318 35 42 29 27 3 15 24 22 0 73 7 8
401 | 63287673@N00-3042 35 61 7
402 | 22853276@N05-681 3 38 61
403 | 22853276@N05-680 25 0 59 7
404 | 94482242@N00-4849 20 6 8
405 | 22853276@N05-686 52 0 7
406 | 22853276@N05-685 17 9 30 14 25 0 4 12
407 | 49462309@N04-2323 31 25 7
408 | 83332749@N00-4141 35 17 9 11 44 0 7
409 | 92496717@N00-4702 77 21 7 4
410 | 56935359@N00-2680 42 26 4
411 | 36656940@N00-1583 26 2 3
412 | 48737213@N00-2282 64 11 24
413 | 13233236@N04-228 9 2 0
414 | 13233236@N04-224 32 19 18
415 | 13233236@N04-225 1 32 33 26 36
416 | 68183366@N05-3354 34 47 8
417 | 39297387@N00-1769 9 2 14 0
418 | 43929620@N05-1990 1 36 2 8
419 | 10559879@N00-78 42 6 15
420 | 60422641@N04-2905 38 15 61
421 | 68892129@N00-3391 11 5 7
422 | 24998164@N06-820 28 1 32 19 33 26 36 0 4
423 | 30628871@N00-1208 19 5 0 4
424 | 95464641@N00-4868 1 17 19
425 | 31404528@N02-1265 2 39 0 4
426 | 56140378@N07-2672 20 38 15 5
427 | 56140378@N07-2671 6 49 8
428 | 22852813@N04-676 9 30 50 16 53
429 | 12505664@N00-206 35 0 7
430 | 36521966482@N01-1563 30 0 4
431 | 23809369@N08-724 20 27 2 18 16 44
432 | 23809369@N08-725 33 26 36 31
433 | 23809369@N08-723 32 19 33 11 3 5 0 7
434 | 36521966482@N01-1565 13 14 7
435 | 57608438@N08-2690 35 3 7 4
436 | 57608438@N08-2691 9 30 24 5 25
437 | 99804259@N00-5064 35 38 14 31
438 | 49992082@N08-2354 1 6 3 15
439 | 77152181@N06-3762 17 5 0 7 4
440 | 77152181@N06-3761 32 17 19 33 11 3 24 5 0 7
441 | 84987970@N00-4199 65 13 38 18 61
442 | 61579728@N05-2956 68 64 11 3 12
443 |
--------------------------------------------------------------------------------
/Trip/data/Edin_set.dat:
--------------------------------------------------------------------------------
1 | 7512717@N06-3700 0 10 20
2 | 31397036@N00-1656 4 8 1
3 | 96423179@N00-4823 6 5 1
4 | 16048448@N00-449 4 3 0 6 5 1
5 | 67975030@N00-3214 4 11 3 0 15 13 6 14 7 5 16 19 1
6 | 60611127@N00-2978 4 2 6 14 5 1
7 | 60611127@N00-2979 17 11 3 0 6 1
8 | 34427470616@N01-1839 2 0 10 1
9 | 88185060@N00-4358 11 13 6 16 19
10 | 26436271@N00-1294 3 6 7
11 | 28543475@N08-1495 2 8 9
12 | 37574483@N07-2072 2 7 5
13 | 88185060@N00-4352 0 6 16
14 | 88185060@N00-4353 0 6 5
15 | 88185060@N00-4355 2 6 8 7
16 | 88185060@N00-4356 2 7 5
17 | 88185060@N00-4357 11 0 7 19
18 | 11154928@N04-150 0 6 8 5 1
19 | 39745335@N06-2137 0 7 16 1
20 | 72429059@N00-3372 2 0 6 8
21 | 47616704@N00-2495 3 0 6 14 7 1
22 | 47616704@N00-2496 4 0 1
23 | 56617401@N00-2845 3 0 6 14 5 1
24 | 60006733@N05-2953 2 3 9
25 | 56617401@N00-2840 4 3 5
26 | 54022106@N00-2749 11 19 9
27 | 12771303@N00-240 11 13 10 19
28 | 29057345@N04-1522 2 0 21
29 | 65958138@N00-3138 4 0 6 1
30 | 90113037@N00-4475 2 6 8
31 | 60206297@N00-2969 23 0 8
32 | 60206297@N00-2968 4 20 1
33 | 32812410@N06-1774 4 2 0 6 7 5 1
34 | 19473027@N00-758 3 6 16
35 | 51367743@N00-2651 4 17 3 13 14
36 | 27948364@N00-1427 2 0 6 14 1
37 | 84533614@N00-4210 15 12 16
38 | 59446027@N00-2942 4 0 13 8 1
39 | 27948364@N00-1428 4 2 6
40 | 88185060@N00-4369 4 2 0 13
41 | 81795065@N00-4120 15 13 16
42 | 78461351@N00-3968 3 0 7 5 1
43 | 42629370@N00-2272 17 11 15 13 16
44 | 42629370@N00-2273 2 0 1
45 | 42629370@N00-2270 3 0 5 1
46 | 42629370@N00-2271 4 2 1
47 | 92742787@N04-4702 2 0 15 1
48 | 79335735@N06-4020 3 6 14
49 | 8043663@N04-4053 4 13 6 7
50 | 25984556@N07-1271 6 8 1
51 | 25984556@N07-1270 4 3 0 10 6 14 8 1
52 | 44124427374@N01-2379 11 0 6 7 21 1
53 | 10354154@N05-46 14 7 1
54 | 20587560@N00-924 2 10 5
55 | 52439376@N00-2683 11 3 8 5 1
56 | 7150832@N06-3324 17 15 13
57 | 33278177@N00-1799 4 11 7
58 | 48762356@N00-2528 3 0 1
59 | 30963564@N00-1607 17 15 14
60 | 98833136@N00-4989 0 15 13
61 | 20799701@N00-931 4 17 2 26 15 13 8 7 5 18 16 1
62 | 35034357529@N01-1884 0 6 14
63 | 20483509@N00-826 11 3 0 6
64 | 36539950@N00-1989 15 10 16
65 | 63079834@N00-3076 4 7 1
66 | 30671962@N05-1575 0 8 18
67 | 11037560@N04-142 2 0 9
68 | 28355390@N03-1452 0 13 6 14 16 1
69 | 18966576@N00-542 3 0 14 7 1
70 | 10091295@N02-4 15 8 9
71 | 42027916@N00-2230 6 5 21
72 | 42027916@N00-2231 4 7 1
73 | 37493306@N00-2065 2 3 0 14 8 9 1
74 | 42027916@N00-2233 6 9 1
75 | 71812696@N00-3344 3 12 5
76 | 28638538@N00-1508 4 3 0 1
77 | 16497759@N07-481 11 0 21 19
78 | 10335112@N05-41 4 17 3 6 1
79 | 10335112@N05-42 15 13 16
80 | 23363966@N02-1097 17 11 3 1
81 | 23363966@N02-1094 14 7 1
82 | 23363966@N02-1095 17 11 6
83 | 40139809@N00-2151 17 0 16 1
84 | 90514086@N00-4491 4 0 6 14 21 1
85 | 28543475@N08-1507 10 12 1
86 | 23351536@N07-1075 2 0 12 8 1
87 | 23351536@N07-1074 3 12 9
88 | 23351536@N07-1077 2 0 8 9 1
89 | 77321230@N00-3882 4 0 10 1
90 | 77036619@N00-3811 2 3 0 6 8 5 1
91 | 21712240@N00-969 4 11 12 5 19 9 1
92 | 7512717@N06-3601 0 9 1
93 | 12982579@N00-266 3 0 14 5 16 1
94 | 7512717@N06-3609 3 0 8
95 | 62136333@N02-3056 4 17 11 3 15
96 | 23351536@N07-1080 2 3 15 5 16 9
97 | 40230716@N03-2175 4 3 0
98 | 23351536@N07-1083 3 15 13 12 14 16
99 | 67008765@N00-3181 17 11 19
100 | 54458860@N00-2791 17 11 0
101 | 67008765@N00-3183 4 3 0 6 14 5 1
102 | 23351536@N07-1087 4 2 8
103 | 50802125@N00-2618 4 0 6
104 | 7359973@N08-3417 2 0 6 7 9 1
105 | 32481757@N04-1761 4 17 11 0 8 16 1
106 | 80997939@N00-4083 4 0 8
107 | 14360576@N06-365 4 2 3 0 15 13 10 6 8 7 16 1
108 | 14360576@N06-366 23 18 22
109 | 77704116@N00-3896 3 6 7 5 1
110 | 14360576@N06-368 0 20 19
111 | 25396215@N00-1219 3 0 6 14 7 1
112 | 88185060@N00-4362 0 6 1
113 | 53836246@N00-2718 17 11 3 15
114 | 18276635@N00-528 4 0 8 1
115 | 18276635@N00-529 13 6 7
116 | 93511097@N00-4722 0 8 1
117 | 19473027@N00-652 14 5 1
118 | 29978367@N00-1542 4 17 2 8 1
119 | 22607526@N00-1027 2 0 8 1
120 | 22607526@N00-1029 4 2 1
121 | 95974557@N08-4812 3 14 5
122 | 24401095@N00-1160 4 2 3 12
123 | 36749913@N00-1996 10 6 7
124 | 16190595@N05-464 0 8 1
125 | 60006733@N05-2950 3 15 13 14 16
126 | 64119933@N00-3113 11 15 13 6 7
127 | 50551683@N00-2608 4 2 0 6
128 | 11261325@N03-164 4 17 11 0 5 1
129 | 32263295@N00-1717 3 12 6 9
130 | 32263295@N00-1714 6 5 9
131 | 23480782@N05-1103 4 2 0
132 | 42348675@N00-2254 15 13 9
133 | 19909714@N00-807 4 2 1
134 | 20545573@N00-913 6 14 7
135 | 9047223@N07-4490 26 7 1
136 | 24697990@N00-1181 11 0 19 1
137 | 28536568@N00-1490 17 11 5
138 | 19909714@N00-808 3 6 5
139 | 27681741@N06-1351 0 15 13 16
140 | 20483509@N00-867 0 6 14
141 | 32111003@N00-1703 3 6 14 1
142 | 43619118@N04-2335 4 3 6
143 | 7790703@N02-3920 4 17 11 0
144 | 73124528@N00-3394 4 17 11
145 | 43619118@N04-2339 4 3 0 14 1
146 | 80052783@N00-4046 4 17 11 0 5 16 1
147 | 7790703@N02-3928 0 6 1
148 | 23108377@N07-1051 11 3 13 6 8 24
149 | 46980778@N07-2463 4 3 1
150 | 36424619@N00-1970 2 11 23 16
151 | 99185699@N00-5001 3 14 1
152 | 32693718@N07-1765 2 7 5
153 | 31450835@N05-1669 2 3 0 14 8 9 1
154 | 21254955@N04-945 17 0 1
155 | 21254955@N04-944 4 0 1
156 | 27462949@N00-1338 11 18 21
157 | 27462949@N00-1339 4 2 0 15 5
158 | 61897811@N00-3038 17 11 3 0 12 1
159 | 7880512@N06-3985 23 8 19
160 | 61369090@N07-2998 0 8 5
161 | 38943965@N05-2109 4 3 0 21 1
162 | 30129139@N04-1560 2 13 16 9
163 | 24183489@N00-1146 2 15 13
164 | 8684258@N02-4295 4 2 0 13 16 1
165 | 7512717@N06-3654 4 0 10 8 1
166 | 7512717@N06-3657 3 6 14 1
167 | 7512717@N06-3656 3 6 9
168 | 8308954@N06-4157 4 0 10
169 | 11037560@N04-101 11 15 5
170 | 43405950@N07-2319 2 3 10 14 7 1
171 | 43405950@N07-2318 2 0 1
172 | 9813542@N08-4955 4 3 0 6 14 5
173 | 25389244@N00-1216 4 17 11 0 5 1
174 | 62136333@N02-3049 4 3 1
175 | 44124372821@N01-2370 0 6 16
176 | 44124372821@N01-2371 4 2 0
177 | 62136333@N02-3046 4 3 15 7 1
178 | 39927207@N00-2138 4 0 8 5
179 | 25148740@N03-1203 4 17 11 0 15 13 1
180 | 63679518@N00-3099 4 7 5
181 | 28404646@N00-1483 11 5 16 19
182 | 25396215@N00-1220 3 12 8 5 9
183 | 84533614@N00-4213 23 3 0 6 5
184 | 16048448@N00-450 23 15 13 7
185 | 84533614@N00-4212 2 9 1
186 | 49998283@N00-2578 4 0 5
187 | 15462727@N07-438 4 2 3 0 6 7 5
188 | 43126182@N00-2307 2 0 5
189 | 19153501@N00-543 17 11 13 16
190 | 46237053@N00-2436 15 5 1
191 | 34961066@N00-1876 11 0 1
192 | 86752930@N00-4292 3 0 6
193 | 27195496@N00-1323 11 3 0
194 | 34792843@N04-1875 0 8 7 1
195 | 51035737977@N01-2639 11 0 1
196 | 58554451@N00-2893 17 11 0 1
197 | 58034287@N00-2879 3 0 6 14 5 1
198 | 27718315@N02-1367 0 5 1
199 | 24616128@N00-1173 4 0 6 5 1
200 | 36749913@N00-2003 4 2 10 1
201 | 8349002@N04-4168 4 17 11 3 6 7 5 1
202 | 54022106@N00-2752 2 3 1
203 | 36749913@N00-2007 15 13 16
204 | 84035351@N00-4180 4 0 7 1
205 | 25034321@N05-1201 3 10 6 7
206 | 97768382@N00-4951 4 17 11 3 0 16 1
207 | 21916524@N07-981 17 11 8
208 | 8520591@N03-4227 0 5 21 9 1
209 | 8057454@N06-4066 17 11 13 5 16
210 | 37996646802@N01-2091 4 23 0
211 | 37996646802@N01-2092 3 6 14
212 | 26226560@N02-1282 2 0 6 8
213 | 26226560@N02-1281 0 8 1
214 | 50833784@N00-2625 4 17 2 11 3 7 5 16 19 1
215 | 51367743@N00-2647 17 11 0 15 13 6 8
216 | 57192952@N00-2850 4 3 14 1
217 | 8997662@N06-4468 15 13 6 14 5 16 1
218 | 8997662@N06-4469 4 5 1
219 | 38783516@N00-2108 2 3 0
220 | 51367743@N00-2648 4 14 1
221 | 47495910@N05-2491 3 0 14 8 1
222 | 47495910@N05-2493 4 2 0 15 13 10 8 7 9
223 | 47495910@N05-2492 4 0 1
224 | 25302607@N00-1208 4 3 0 15 13
225 | 71668585@N00-3329 4 0 1
226 | 19473027@N00-581 0 10 7 5
227 | 9352211@N06-4727 4 3 0 10 7 1
228 | 14146962@N07-349 4 3 1
229 | 55826454@N00-2820 4 3 0 15 10 6 5 16 9 1
230 | 12495774@N02-219 2 11 23 0 1
231 | 99962592@N00-5027 4 3 6
232 | 71401076@N00-3308 4 10 20 14 7
233 | 9352211@N06-4728 0 6 1
234 | 20483509@N00-907 14 7 9
235 | 77047514@N00-3862 2 0 6 14
236 | 66227636@N04-3140 3 0 1
237 | 77047514@N00-3825 2 12 20 6 7 5 16
238 | 90001203@N00-4474 0 10 8 1
239 | 90001203@N00-4473 4 0 1
240 | 31803571@N02-1683 17 16 1
241 | 51035639266@N01-2633 4 10 6 5
242 | 27817416@N02-1392 0 7 5
243 | 27817416@N02-1394 0 6 1
244 | 20483509@N00-838 11 6 14 5 1
245 | 20483509@N00-837 17 11 19
246 | 42072165@N08-2237 4 0 1
247 | 37029314@N00-2054 4 17 11 0 10 6 14 7 1
248 | 37029314@N00-2055 4 15 13 8 16
249 | 85217508@N00-4229 4 0 6 5 1
250 | 48277923@N00-2508 4 2 0 1
251 | 8110030@N05-4087 17 23 0 15 13 6 14 5 16 1
252 | 33613749@N00-1811 4 0 6 1
253 | 7512717@N06-3681 0 6 21
254 | 30099537@N02-1555 4 3 1
255 | 7512717@N06-3684 0 21 9
256 | 88185060@N00-4346 0 10 6
257 | 9351020@N06-4719 0 5 1
258 | 9351020@N06-4718 4 0 6
259 | 71088526@N00-3303 2 6 14 1
260 | 61663261@N00-3032 15 13 14 5
261 | 71088526@N00-3301 17 23 3 0 6 14 7 5
262 | 7512717@N06-3667 0 6 14 9
263 | 34427470616@N01-1862 2 0 9 1
264 | 7512717@N06-3505 0 6 7 1
265 | 32054787@N00-1700 17 0 8
266 | 7512717@N06-3616 3 6 21
267 | 7512717@N06-3611 0 10 6 1
268 | 34427470616@N01-1850 0 10 12 18
269 | 7512717@N06-3597 0 6 14
270 | 61635869@N00-3025 4 3 0 14 5 1
271 | 28103568@N00-1432 3 0 13 14 7 5 16
272 | 26835091@N04-1308 12 6 8 9
273 | 68493542@N00-3225 3 0 6 5 22
274 | 15786211@N00-446 4 2 14
275 | 77456049@N00-3895 17 3 0 6 14 5 19 1
276 | 20483509@N00-882 2 0 13
277 | 89187910@N00-4421 2 0 15 13
278 | 20483509@N00-881 11 0 19
279 | 50234824@N07-2587 4 0 7 1
280 | 83308875@N00-4165 2 0 15 13 8 5 16
281 | 83308875@N00-4164 3 6 14 5 1
282 | 65393953@N03-3136 2 0 14 8
283 | 14360576@N06-371 0 6 5 1
284 | 14360576@N06-370 0 15 10 20 5 21 16 1
285 | 86533211@N00-4287 11 8 7 5 19
286 | 14915235@N00-411 2 0 10
287 | 96241281@N00-4818 4 2 3 13 1
288 | 23062920@N00-1046 2 3 0 12 1
289 | 23062920@N00-1045 3 6 7 21
290 | 63238112@N02-3085 0 8 1
291 | 9588476@N02-4799 3 6 7
292 | 29812295@N07-1537 4 2 8 1
293 | 7512717@N06-3701 3 7 5
294 | 12930951@N07-265 3 14 5
295 | 29812295@N07-1538 2 0 13 16
296 | 19473027@N00-705 3 6 7
297 | 92237387@N00-4593 17 23 19
298 | 19473027@N00-702 6 7 1
299 | 31612046@N04-1680 4 11 0 8
300 | 45972156@N04-2428 11 23 3 13 7 19
301 | 45972156@N04-2427 4 2 0 6 1
302 | 22607526@N00-1030 3 6 7
303 | 9156485@N02-4568 3 0 6 14 1
304 | 80021819@N00-4043 4 17 11 0 14 8
305 | 22415346@N06-1017 0 15 13 1
306 | 80021819@N00-4044 15 13 16
307 | 120694055@N06-203 4 2 1
308 | 19473027@N00-682 0 5 16
309 | 19473027@N00-680 3 0 5
310 | 60107315@N00-2963 0 6 7
311 | 52706710@N03-2688 4 2 3 6
312 | 9159505@N02-4574 4 0 1
313 | 48122140@N00-2503 4 2 3 0 15 8 5 1
314 | 7666975@N03-3796 4 2 1
315 | 63458311@N00-3096 4 0 1
316 | 48321512@N06-2514 3 6 7 5 21
317 | 22087304@N07-986 15 13 16
318 | 9588476@N02-4805 7 21 24
319 | 94735786@N00-4744 4 3 6 5 1
320 | 94735786@N00-4745 4 17 2 11 3 10 8 5 9 1
321 | 94735786@N00-4746 4 3 6 5
322 | 21283177@N00-948 17 15 14 16 1
323 | 94735786@N00-4748 4 17 2 3 0 15 6 8 5 21 24 1
324 | 40725466@N03-2185 4 3 6 5 1
325 | 71088526@N00-3302 4 3 1
326 | 43222683@N00-2312 11 0 7 1
327 | 28536568@N00-1487 4 0 5 1
328 | 7633518@N08-3789 4 0 1
329 | 37185577@N00-2060 4 0 7 1
330 | 86765090@N00-4293 11 13 16 19
331 | 27681741@N06-1349 2 7 5
332 | 10655856@N04-82 3 12 14 5 1
333 | 10655856@N04-81 0 10 21 1
334 | 71057941@N00-3288 3 6 8
335 | 7790703@N02-3930 2 10 5
336 | 97534175@N00-4922 4 2 0 1
337 | 97534175@N00-4923 2 0 8 1
338 | 44232237@N03-2388 3 0 6 14 8
339 | 86401769@N00-4277 4 17 6 1
340 | 90585585@N00-4496 0 5 9
341 | 62308216@N00-3059 4 0 1
342 | 90585585@N00-4493 4 3 19
343 | 11613952@N05-185 8 7 1
344 | 50542255@N00-2605 3 10 6
345 | 73173768@N00-3399 4 20 6 14 1
346 | 7512717@N06-3625 2 20 8 9
347 | 7512717@N06-3623 0 8 1
348 | 7512717@N06-3719 4 0 10
349 | 21492331@N03-956 3 7 5 9
350 | 17643132@N00-513 2 0 13 12 5 9 1
351 | 10332960@N03-39 2 0 8
352 | 17643132@N00-514 4 11 0 5 1
353 | 10340081@N02-44 4 2 3
354 | 19663529@N00-800 4 0 5 16
355 | 19663529@N00-802 11 19 1
356 | 58433801@N07-2891 11 6 5
357 | 60444977@N00-2972 3 0 1
358 | 85603833@N00-4239 15 16 9
359 | 73478860@N00-3413 0 5 9 1
360 | 75951171@N02-3776 7 9 1
361 | 58695432@N00-2907 15 13 5
362 | 7512717@N06-3530 0 14 7
363 | 7512717@N06-3651 3 10 6 7 1
364 | 7512717@N06-3650 2 11 6 14
365 | 29624180@N04-1535 11 3 19
366 | 29624180@N04-1533 4 3 0 1
367 | 52683275@N00-2684 4 2 0
368 | 87658636@N00-4316 0 12 1
369 | 27828336@N00-1423 14 7 1
370 | 9878315@N04-4969 0 18 19
371 | 88185060@N00-4335 4 0 20
372 | 87658636@N00-4318 2 0 1
373 | 85217508@N00-4232 13 5 16
374 | 71668585@N00-3340 3 5 19
375 | 9351020@N06-4721 4 5 1
376 | 8326485@N06-4160 4 2 11 0 16 1
377 | 40055757@N00-2144 11 3 5
378 | 71909564@N00-3364 15 13 1
379 | 26680618@N02-1302 0 6 14 9
380 | 48355243@N00-2515 0 6 14
381 | 23518714@N00-1107 4 3 0 6 5 19 1
382 | 12061846@N00-202 4 3 0 15 6 5 1
383 | 43002463@N00-2286 2 3 5
384 | 71668585@N00-3332 6 14 7
385 | 71668585@N00-3335 5 18 21
386 | 7929654@N05-4012 17 11 3 0 7
387 | 7929654@N05-4011 2 11 23 15 13 16
388 | 8997662@N06-4470 17 11 3 6 7 5 19
389 | 37804979@N00-2078 3 5 1
390 | 11037560@N04-98 4 11 0
391 | 11037560@N04-99 11 15 10 6 14 5
392 | 19473027@N00-672 3 0 14 1
393 | 64876844@N00-3122 0 13 8 1
394 | 9159505@N02-4575 17 11 3 15 13 5 16 19
395 | 59271446@N00-2924 6 21 1
396 | 12495774@N02-222 4 2 0 6 7
397 | 48355243@N00-2516 4 2 0
398 | 11881702@N00-194 6 14 8
399 | 11881702@N00-193 4 2 0 8 1
400 | 30409117@N07-1567 2 5 9
401 | 24803078@N00-1186 3 0 6 14 5 1
402 | 24645033@N00-1180 3 13 5 16
403 | 14869313@N00-398 17 11 6
404 | 80649505@N08-4071 4 23 19
405 | 44124427374@N01-2380 17 11 0 6
406 | 58598613@N00-2896 17 11 0
407 | 66434265@N00-3145 4 11 15 13 8 1
408 | 66434265@N00-3146 23 10 8 19
409 | 66434265@N00-3147 2 0 8
410 | 26265986@N00-1287 4 14 7 18 24
411 | 26265986@N00-1286 4 2 7
412 | 31363435@N07-1652 2 0 15 13 8 5 16 1
413 | 31363435@N07-1651 2 0 8 7 5 1
414 | 71445279@N00-3323 2 0 12 6 14 7 9 1
415 | 7310714@N06-3388 11 0 8 1
416 | 7310714@N06-3389 3 0 9
417 | 64332066@N00-3115 4 17 2 0 13 8 5 1
418 | 70384696@N00-3257 4 2 1
419 | 62136333@N02-3047 4 11 3 15 1
420 | 42907325@N00-2280 2 0 16
421 | 83511002@N00-4169 23 13 6 16
422 | 38115734@N00-2095 23 6 7
423 | 7512717@N06-3699 3 6 5
424 | 38115734@N00-2093 4 11 19
425 | 7512717@N06-3696 2 0 8 1
426 | 32329639@N00-1747 11 3 0 6 7 1
427 | 91357983@N05-4557 3 13 10
428 | 41692261@N00-2209 4 0 1
429 | 13518023@N03-274 4 19 1
430 | 13518023@N03-275 4 2 3 14 5 9 1
431 | 44756385@N00-2401 2 11 3 0 1
432 | 44756385@N00-2402 2 0 8 9
433 | 105307156@N06-74 4 17 23
434 | 96241281@N00-4817 0 13 8
435 | 18402635@N00-540 4 2 0
436 | 51272891@N00-2645 2 0 5 16 1
437 | 7512717@N06-3677 0 20 6 7
438 | 8997662@N06-4471 2 11 3 14 7 5 19 1
439 | 97595808@N00-4941 2 12 8 9
440 | 66913692@N00-3155 3 0 16 1
441 | 66913692@N00-3154 4 3 12
442 | 57258627@N00-2853 3 0 14 8 5
443 | 7512717@N06-3572 2 12 9
444 | 77036619@N00-3807 4 17 2 11 3 0 14 8 5 1
445 | 8095506@N07-4080 4 3 0 10 6 7 1
446 | 13016459@N08-267 8 5 16
447 | 7512717@N06-3586 0 6 14 8
448 | 60611127@N00-2983 4 2 0 15 13
449 | 60611127@N00-2981 4 3 0 13 8 1
450 | 60611127@N00-2980 4 3 0 1
451 | 32054787@N00-1692 11 0 8
452 | 42583704@N00-2268 4 3 6 5
453 | 63238112@N02-3080 10 6 7 5
454 | 68121184@N00-3216 0 14 1
455 | 42583704@N00-2267 4 3 14 7 1
456 | 42583704@N00-2266 17 2 11 23 8 5 19
457 | 62744044@N00-3070 3 0 5 1
458 | 97595808@N00-4937 11 15 13 19
459 | 42509361@N05-2262 4 6 5 1
460 | 35703376@N00-1953 4 17 1
461 | 53921762@N00-2723 0 6 7
462 | 77351046@N00-3889 3 0 13 14
463 | 53921762@N00-2727 3 5 1
464 | 47905916@N00-2498 4 17 0 15 8 16 1
465 | 8389569@N08-4176 0 15 5
466 | 42479008@N02-2261 17 11 15 13 16
467 | 41894148532@N01-2217 2 6 14 1
468 | 22583729@N06-1022 0 5 9
469 | 19473027@N00-739 17 2 0 19
470 | 21712240@N00-970 2 15 13 5 16 19
471 | 19473027@N00-701 3 6 5 9
472 | 8389569@N08-4173 3 0 15 8 1
473 | 14057878@N00-311 2 0 1
474 | 14057878@N00-317 0 8 9 1
475 | 14057878@N00-316 3 12 6 7
476 | 19473027@N00-734 11 0 1
477 | 54549576@N00-2795 2 11 19
478 | 75136617@N00-3734 0 6 7
479 | 60006733@N05-2947 4 0 7
480 | 65326158@N00-3135 4 17 0 1
481 | 31191687@N06-1639 3 6 5
482 | 60006733@N05-2949 15 13 5 16
483 | 8590701@N06-4242 4 11 3 0 5 1
484 | 19473027@N00-691 10 20 21 9
485 | 19473027@N00-690 12 8 5 9
486 | 84989911@N00-4220 4 2 11 14 8 9 1
487 | 19473027@N00-695 6 7 1
488 | 19473027@N00-694 3 14 5
489 | 23837403@N03-1122 4 17 15 13 16
490 | 23837403@N03-1121 4 17 13 16
491 | 23837403@N03-1120 6 7 5
492 | 30985799@N00-1627 11 3 19
493 | 30985799@N00-1626 4 2 14 8 1
494 | 7512717@N06-3693 4 0 10 20 8 1
495 | 94869158@N00-4750 3 6 1
496 | 24645033@N00-1178 3 5 21
497 | 24645033@N00-1179 15 13 5
498 | 45972156@N04-2429 0 5 1
499 | 46952961@N00-2461 3 15 13 5 16
500 | 81719291@N00-4117 15 13 16
501 | 8389569@N08-4175 4 17 11 3 15 13 14 7 1
502 | 8389569@N08-4174 4 17 11 3 0 15 13 6 14 5 16 9 1
503 | 79295762@N00-4010 0 5 1
504 | 7512717@N06-3665 3 6 14 5
505 | 7790703@N02-3948 3 6 1
506 | 68838281@N00-3233 3 14 1
507 | 7958312@N08-4031 4 3 0 6 1
508 | 72429059@N00-3371 11 0 1
509 | 96721948@N00-4858 4 11 0 15 13 8 1
510 | 80246494@N00-4051 4 17 11 16 1
511 | 10486919@N00-67 0 6 9
512 | 36101699310@N01-1966 11 3 19 1
513 | 36101699310@N01-1965 3 0 15 6 5
514 | 21116747@N02-937 2 11 9 1
515 | 97206958@N00-4894 4 5 16
516 | 9788232@N03-4952 4 3 5
517 | 22890158@N08-1042 4 17 11 0
518 | 75286866@N00-3758 4 2 3 13 8
519 | 8970099@N04-4456 2 0 8 1
520 | 44036847@N05-2348 3 0 14 1
521 | 8970099@N04-4455 2 0 8
522 | 8286908@N02-4155 4 3 5
523 | 54549576@N00-2794 4 17 11 15 7 16
524 | 27594569@N04-1345 0 8 9 1
525 | 75181076@N00-3748 11 3 13 14 5 16 1
526 | 28404646@N00-1473 2 13 5 9
527 | 10159442@N00-13 4 11 3 0 15 13 6 7 5 16 19 1
528 | 99646216@N00-5017 11 3 6
529 | 99646216@N00-5016 17 11 19
530 | 7512717@N06-3711 3 15 13 6 21 16
531 | 7512717@N06-3709 3 6 14
532 | 11037560@N04-127 6 5 1
533 | 14711846@N00-387 4 17 11 3 0 8 5 1
534 | 7512717@N06-3707 0 14 8 1
535 | 52015062@N00-2673 4 3 0 6 5 1
536 | 73422480@N00-3411 2 12 7
537 | 11037560@N04-122 0 6 8
538 | 84276425@N00-4196 2 11 0 15 7 1
539 | 84276425@N00-4197 2 0 6 8 7 21 9 1
540 | 30420228@N05-1568 4 2 0
541 | 33925347@N05-1821 0 10 6 5
542 | 34792843@N04-1874 4 6 7
543 | 50509003@N00-2602 4 3 0 6 7 1
544 | 50509003@N00-2600 4 3 0 7
545 | 81311832@N00-4095 4 5 1
546 | 77036619@N00-3810 3 0 8 5 1
547 | 7354829@N03-3414 4 0 1
548 | 7354829@N03-3416 4 0 14 8 7 5 21 16 1
549 | 14711846@N00-386 15 13 6
550 | 29172291@N00-1526 4 17 2 11 3 0 15 13 1
551 | 60206297@N00-2967 0 9 1
552 | 78361112@N00-3966 17 0 1
553 | 73788492@N00-3423 4 17 11 23 0 1
554 | 44925192@N00-2404 3 0 6 14
555 | 44925192@N00-2405 17 11 0
556 | 7512717@N06-3604 2 12 9
557 | 73788492@N00-3425 17 11 3 0 6 8 5 1
558 | 73788492@N00-3424 3 15 13 6 7 5 21
559 | 49503132817@N01-2572 0 9 1
560 | 7512717@N06-3644 0 6 8
561 | 13996991@N07-284 4 2 0 1
562 | 13996991@N07-282 15 6 5 16
563 | 50794252@N04-2617 4 6 1
564 | 26052518@N05-1273 0 6 1
565 | 62579590@N00-3064 4 2 19
566 | 60107315@N00-2962 2 11 0
567 | 85318305@N00-4235 4 2 0 12 14 8 5 1
568 | 85318305@N00-4236 2 0 10 12 8 5 9
569 | 92237387@N00-4592 3 0 6 7 5
570 | 29797746@N08-1536 4 17 11 3 0 15 13 6 14 5 16 1
571 | 54022106@N00-2757 2 8 5
572 | 67433276@N05-3199 2 14 1
573 | 10287726@N02-32 17 2 3 0 8 1
574 | 10287726@N02-33 4 2 8 7 1
575 | 23873159@N08-1128 2 8 5 1
576 | 8057454@N06-4065 2 0 1
577 | 10287726@N02-34 13 8 16
578 | 25561968@N00-1226 17 13 16
579 | 53836246@N00-2722 17 11 15 13
580 | 53836246@N00-2720 4 2 0 1
581 | 53836246@N00-2721 0 15 13 14 5
582 | 17505188@N06-512 3 0 16
583 | 27105072@N00-1320 2 9 1
584 | 84035351@N00-4181 0 15 8
585 | 84989911@N00-4219 4 17 11 0 8 7 5 16 1
586 | 19473027@N00-644 0 6 14
587 | 67008765@N00-3180 0 8 1
588 | 81397230@N00-4108 2 0 6 1
589 | 8970099@N04-4457 11 23 0 15 13 16
590 | 8328114@N02-4161 4 3 14 1
591 | 9588476@N02-4804 4 11 0 10 1
592 | 8328114@N02-4162 3 14 1
593 | 30713600@N00-1577 4 2 6 14
594 | 24401095@N00-1158 4 0 6 7 1
595 | 77047514@N00-3840 2 10 12
596 | 77047514@N00-3842 2 11 3 6 16 19
597 | 8503026@N04-4221 4 0 6 1
598 | 27017895@N02-1317 2 8 1
599 | 24931020@N02-1193 4 11 15 13 14 8 5 16 1
600 | 42348675@N00-2249 0 6 8 24
601 | 17424601@N00-507 3 6 7
602 | 42348675@N00-2246 2 0 6 7 1
603 | 77047514@N00-3839 2 10 8 1
604 | 45457437@N00-2421 3 0 13
605 | 7512717@N06-3638 0 6 14
606 | 28370443@N08-1471 17 11 0 16
607 | 29365083@N06-1529 2 0 15
608 | 69349066@N00-3245 6 7 1
609 | 36967808@N00-2052 4 3 14 7 9 1
610 | 43405950@N07-2320 2 5 16
611 | 92134758@N00-4591 3 6 5
612 | 7888746@N08-3991 3 6 8 7
613 | 70938076@N00-3282 2 3 12
614 | 70938076@N00-3285 15 13 16
615 | 70938076@N00-3284 17 11 16
616 | 77047514@N00-3814 0 20 5
617 | 25866217@N00-1264 4 7 1
618 | 25866217@N00-1262 0 7 19
619 | 26276803@N05-1290 3 0 6 7 5 16
620 | 12917962@N00-260 2 12 6 14
621 | 23539578@N06-1111 4 0 6 5
622 | 23539578@N06-1110 4 3 0 5
623 | 12658277@N00-229 4 3 0 13
624 | 7512717@N06-3565 4 0 8
625 | 54069048@N07-2769 3 5 1
626 | 73014677@N05-3381 4 2 11 6
627 | 23351536@N07-1055 2 6 8 9
628 | 50195515@N00-2583 4 3 0 13 6 14 1
629 | 8701764@N02-4297 4 0 6 1
630 | 74691311@N00-3466 4 15 14 16
631 | 43498286@N00-2334 3 6 5
632 | 62136333@N02-3057 2 11 3 15 7 1
633 | 52015062@N00-2674 17 11 0 5
634 | 43498286@N00-2333 4 0 9 1
635 |
--------------------------------------------------------------------------------
/seq2seq_c/python/ops/helper.py:
--------------------------------------------------------------------------------
1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 | """A library of helpers for use with SamplingDecoders.
16 | """
17 |
18 | from __future__ import absolute_import
19 | from __future__ import division
20 | from __future__ import print_function
21 |
22 | import abc
23 |
24 | import six
25 | import tensorflow as tf
26 | from seq2seq_c.python.ops import decoder
27 | from tensorflow.python.framework import dtypes
28 | from tensorflow.python.framework import ops
29 | from tensorflow.python.framework import tensor_shape
30 | from tensorflow.python.ops import array_ops
31 | from tensorflow.python.ops import control_flow_ops
32 | from tensorflow.python.ops import embedding_ops
33 | from tensorflow.python.ops import gen_array_ops
34 | from tensorflow.python.ops import math_ops
35 | from tensorflow.python.ops import tensor_array_ops
36 | from tensorflow.python.ops.distributions import bernoulli
37 | from tensorflow.python.ops.distributions import categorical
38 | from tensorflow.python.util import nest
39 |
40 | __all__ = [
41 | "Helper",
42 | "TrainingHelper",
43 | "GreedyEmbeddingHelper",
44 | "GreedyEmbeddingHelper2",
45 | "SampleEmbeddingHelper",
46 | "CustomHelper",
47 | "ScheduledEmbeddingTrainingHelper",
48 | "ScheduledOutputTrainingHelper",
49 | "InferenceHelper",
50 | ]
51 |
52 | _transpose_batch_time = decoder._transpose_batch_time # pylint: disable=protected-access
53 |
54 |
55 | def _unstack_ta(inp):
56 | return tensor_array_ops.TensorArray(
57 | dtype=inp.dtype, size=array_ops.shape(inp)[0],
58 | element_shape=inp.get_shape()[1:]).unstack(inp)
59 |
60 |
61 | @six.add_metaclass(abc.ABCMeta)
62 | class Helper(object):
63 | """Interface for implementing sampling in seq2seq decoders.
64 |
65 | Helper instances are used by `BasicDecoder`.
66 | """
67 |
68 | @abc.abstractproperty
69 | def batch_size(self):
70 | """Batch size of tensor returned by `sample`.
71 |
72 | Returns a scalar int32 tensor.
73 | """
74 | raise NotImplementedError("batch_size has not been implemented")
75 |
76 | @abc.abstractproperty
77 | def sample_ids_shape(self):
78 | """Shape of tensor returned by `sample`, excluding the batch dimension.
79 |
80 | Returns a `TensorShape`.
81 | """
82 | raise NotImplementedError("sample_ids_shape has not been implemented")
83 |
84 | @abc.abstractproperty
85 | def sample_ids_dtype(self):
86 | """DType of tensor returned by `sample`.
87 |
88 | Returns a DType.
89 | """
90 | raise NotImplementedError("sample_ids_dtype has not been implemented")
91 |
92 | @abc.abstractmethod
93 | def initialize(self, name=None):
94 | """Returns `(initial_finished, initial_inputs)`."""
95 | pass
96 |
97 | @abc.abstractmethod
98 | def sample(self, time, outputs, state, name=None):
99 | """Returns `sample_ids`."""
100 | pass
101 |
102 | @abc.abstractmethod
103 | def next_inputs(self, time, outputs, state, sample_ids, name=None):
104 | """Returns `(finished, next_inputs, next_state)`."""
105 | pass
106 |
107 |
108 | class CustomHelper(Helper):
109 | """Base abstract class that allows the user to customize sampling."""
110 |
111 | def __init__(self, initialize_fn, sample_fn, next_inputs_fn,
112 | sample_ids_shape=None, sample_ids_dtype=None):
113 | """Initializer.
114 |
115 | Args:
116 | initialize_fn: callable that returns `(finished, next_inputs)`
117 | for the first iteration.
118 | sample_fn: callable that takes `(time, outputs, state)`
119 | and emits tensor `sample_ids`.
120 | next_inputs_fn: callable that takes `(time, outputs, state, sample_ids)`
121 | and emits `(finished, next_inputs, next_state)`.
122 | sample_ids_shape: Either a list of integers, or a 1-D Tensor of type
123 | `int32`, the shape of each value in the `sample_ids` batch. Defaults to
124 | a scalar.
125 | sample_ids_dtype: The dtype of the `sample_ids` tensor. Defaults to int32.
126 | """
127 | self._initialize_fn = initialize_fn
128 | self._sample_fn = sample_fn
129 | self._next_inputs_fn = next_inputs_fn
130 | self._batch_size = None
131 | self._sample_ids_shape = tensor_shape.TensorShape(sample_ids_shape or [])
132 | self._sample_ids_dtype = sample_ids_dtype or dtypes.int32
133 |
134 | @property
135 | def batch_size(self):
136 | if self._batch_size is None:
137 | raise ValueError("batch_size accessed before initialize was called")
138 | return self._batch_size
139 |
140 | @property
141 | def sample_ids_shape(self):
142 | return self._sample_ids_shape
143 |
144 | @property
145 | def sample_ids_dtype(self):
146 | return self._sample_ids_dtype
147 |
148 | def initialize(self, name=None):
149 | with ops.name_scope(name, "%sInitialize" % type(self).__name__):
150 | (finished, next_inputs) = self._initialize_fn()
151 | if self._batch_size is None:
152 | self._batch_size = array_ops.size(finished)
153 | return (finished, next_inputs)
154 |
155 | def sample(self, time, outputs, state, name=None):
156 | with ops.name_scope(
157 | name, "%sSample" % type(self).__name__, (time, outputs, state)):
158 | return self._sample_fn(time=time, outputs=outputs, state=state)
159 |
160 | def next_inputs(self, time, outputs, state, sample_ids, name=None):
161 | with ops.name_scope(
162 | name, "%sNextInputs" % type(self).__name__, (time, outputs, state)):
163 | return self._next_inputs_fn(
164 | time=time, outputs=outputs, state=state, sample_ids=sample_ids)
165 |
166 |
167 | class TrainingHelper(Helper):
168 | """A helper for use during training. Only reads inputs.
169 |
170 | Returned sample_ids are the argmax of the RNN output logits.
171 | """
172 |
173 | def __init__(self, inputs, sequence_length, time_major=False, name=None):
174 | """Initializer.
175 |
176 | Args:
177 | inputs: A (structure of) input tensors.
178 | sequence_length: An int32 vector tensor.
179 | time_major: Python bool. Whether the tensors in `inputs` are time major.
180 | If `False` (default), they are assumed to be batch major.
181 | name: Name scope for any created operations.
182 |
183 | Raises:
184 | ValueError: if `sequence_length` is not a 1D tensor.
185 | """
186 | with ops.name_scope(name, "TrainingHelper", [inputs, sequence_length]):
187 | inputs = ops.convert_to_tensor(inputs, name="inputs")
188 | self._inputs = inputs
189 | if not time_major:
190 | inputs = nest.map_structure(_transpose_batch_time, inputs)
191 |
192 | self._input_tas = nest.map_structure(_unstack_ta, inputs)
193 | self._sequence_length = ops.convert_to_tensor(
194 | sequence_length, name="sequence_length")
195 | if self._sequence_length.get_shape().ndims != 1:
196 | raise ValueError(
197 | "Expected sequence_length to be a vector, but received shape: %s" %
198 | self._sequence_length.get_shape())
199 |
200 | self._zero_inputs = nest.map_structure(
201 | lambda inp: array_ops.zeros_like(inp[0, :]), inputs)
202 |
203 | self._batch_size = array_ops.size(sequence_length)
204 |
205 | @property
206 | def inputs(self):
207 | return self._inputs
208 |
209 | @property
210 | def sequence_length(self):
211 | return self._sequence_length
212 |
213 | @property
214 | def batch_size(self):
215 | return self._batch_size
216 |
217 | @property
218 | def sample_ids_shape(self):
219 | return tensor_shape.TensorShape([])
220 |
221 | @property
222 | def sample_ids_dtype(self):
223 | return dtypes.int32
224 |
225 | def initialize(self, name=None):
226 | with ops.name_scope(name, "TrainingHelperInitialize"):
227 | finished = math_ops.equal(0, self._sequence_length)
228 | all_finished = math_ops.reduce_all(finished)
229 | next_inputs = control_flow_ops.cond(
230 | all_finished, lambda: self._zero_inputs,
231 | lambda: nest.map_structure(lambda inp: inp.read(0), self._input_tas))
232 | return (finished, next_inputs)
233 |
234 | def sample(self, time, outputs, name=None, **unused_kwargs):
235 | with ops.name_scope(name, "TrainingHelperSample", [time, outputs]):
236 | sample_ids = math_ops.cast(
237 | math_ops.argmax(outputs, axis=-1), dtypes.int32)
238 | return sample_ids
239 |
240 | def next_inputs(self, time, outputs, state, name=None, **unused_kwargs):
241 | """next_inputs_fn for TrainingHelper."""
242 | with ops.name_scope(name, "TrainingHelperNextInputs",
243 | [time, outputs, state]):
244 | next_time = time + 1
245 | finished = (next_time >= self._sequence_length)
246 | all_finished = math_ops.reduce_all(finished)
247 | def read_from_ta(inp):
248 | return inp.read(next_time)
249 | next_inputs = control_flow_ops.cond(
250 | all_finished, lambda: self._zero_inputs,
251 | lambda: nest.map_structure(read_from_ta, self._input_tas))
252 | return (finished, next_inputs, state)
253 |
254 |
255 | class ScheduledEmbeddingTrainingHelper(TrainingHelper):
256 | """A training helper that adds scheduled sampling.
257 |
258 | Returns -1s for sample_ids where no sampling took place; valid sample id
259 | values elsewhere.
260 | """
261 |
262 | def __init__(self, inputs, sequence_length, embedding, sampling_probability,
263 | time_major=False, seed=None, scheduling_seed=None, name=None):
264 | """Initializer.
265 |
266 | Args:
267 | inputs: A (structure of) input tensors.
268 | sequence_length: An int32 vector tensor.
269 | embedding: A callable that takes a vector tensor of `ids` (argmax ids),
270 | or the `params` argument for `embedding_lookup`.
271 | sampling_probability: A 0D `float32` tensor: the probability of sampling
272 | categorically from the output ids instead of reading directly from the
273 | inputs.
274 | time_major: Python bool. Whether the tensors in `inputs` are time major.
275 | If `False` (default), they are assumed to be batch major.
276 | seed: The sampling seed.
277 | scheduling_seed: The schedule decision rule sampling seed.
278 | name: Name scope for any created operations.
279 |
280 | Raises:
281 | ValueError: if `sampling_probability` is not a scalar or vector.
282 | """
283 | with ops.name_scope(name, "ScheduledEmbeddingSamplingWrapper",
284 | [embedding, sampling_probability]):
285 | if callable(embedding):
286 | self._embedding_fn = embedding
287 | else:
288 | self._embedding_fn = (
289 | lambda ids: embedding_ops.embedding_lookup(embedding, ids))
290 | self._sampling_probability = ops.convert_to_tensor(
291 | sampling_probability, name="sampling_probability")
292 | if self._sampling_probability.get_shape().ndims not in (0, 1):
293 | raise ValueError(
294 | "sampling_probability must be either a scalar or a vector. "
295 | "saw shape: %s" % (self._sampling_probability.get_shape()))
296 | self._seed = seed
297 | self._scheduling_seed = scheduling_seed
298 | super(ScheduledEmbeddingTrainingHelper, self).__init__(
299 | inputs=inputs,
300 | sequence_length=sequence_length,
301 | time_major=time_major,
302 | name=name)
303 |
304 | def initialize(self, name=None):
305 | return super(ScheduledEmbeddingTrainingHelper, self).initialize(name=name)
306 |
307 | def sample(self, time, outputs, state, name=None):
308 | with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperSample",
309 | [time, outputs, state]):
310 | # Return -1s where we did not sample, and sample_ids elsewhere
311 | select_sampler = bernoulli.Bernoulli(
312 | probs=self._sampling_probability, dtype=dtypes.bool)
313 | select_sample = select_sampler.sample(
314 | sample_shape=self.batch_size, seed=self._scheduling_seed)
315 | sample_id_sampler = categorical.Categorical(logits=outputs)
316 | return array_ops.where(
317 | select_sample,
318 | sample_id_sampler.sample(seed=self._seed),
319 | gen_array_ops.fill([self.batch_size], -1))
320 |
321 | def next_inputs(self, time, outputs, state, sample_ids, name=None):
322 | with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperNextInputs",
323 | [time, outputs, state, sample_ids]):
324 | (finished, base_next_inputs, state) = (
325 | super(ScheduledEmbeddingTrainingHelper, self).next_inputs(
326 | time=time,
327 | outputs=outputs,
328 | state=state,
329 | sample_ids=sample_ids,
330 | name=name))
331 |
332 | def maybe_sample():
333 | """Perform scheduled sampling."""
334 | where_sampling = math_ops.cast(
335 | array_ops.where(sample_ids > -1), dtypes.int32)
336 | where_not_sampling = math_ops.cast(
337 | array_ops.where(sample_ids <= -1), dtypes.int32)
338 | sample_ids_sampling = array_ops.gather_nd(sample_ids, where_sampling)
339 | inputs_not_sampling = array_ops.gather_nd(
340 | base_next_inputs, where_not_sampling)
341 | sampled_next_inputs = self._embedding_fn(sample_ids_sampling)
342 | base_shape = array_ops.shape(base_next_inputs)
343 | return (array_ops.scatter_nd(indices=where_sampling,
344 | updates=sampled_next_inputs,
345 | shape=base_shape)
346 | + array_ops.scatter_nd(indices=where_not_sampling,
347 | updates=inputs_not_sampling,
348 | shape=base_shape))
349 |
350 | all_finished = math_ops.reduce_all(finished)
351 | next_inputs = control_flow_ops.cond(
352 | all_finished, lambda: base_next_inputs, maybe_sample)
353 | return (finished, next_inputs, state)
354 |
355 |
356 | class ScheduledOutputTrainingHelper(TrainingHelper):
357 | """A training helper that adds scheduled sampling directly to outputs.
358 |
359 | Returns False for sample_ids where no sampling took place; True elsewhere.
360 | """
361 |
362 | def __init__(self, inputs, sequence_length, sampling_probability,
363 | time_major=False, seed=None, next_inputs_fn=None,
364 | auxiliary_inputs=None, name=None):
365 | """Initializer.
366 |
367 | Args:
368 | inputs: A (structure) of input tensors.
369 | sequence_length: An int32 vector tensor.
370 | sampling_probability: A 0D `float32` tensor: the probability of sampling
371 | from the outputs instead of reading directly from the inputs.
372 | time_major: Python bool. Whether the tensors in `inputs` are time major.
373 | If `False` (default), they are assumed to be batch major.
374 | seed: The sampling seed.
375 | next_inputs_fn: (Optional) callable to apply to the RNN outputs to create
376 | the next input when sampling. If `None` (default), the RNN outputs will
377 | be used as the next inputs.
378 | auxiliary_inputs: An optional (structure of) auxiliary input tensors with
379 | a shape that matches `inputs` in all but (potentially) the final
380 | dimension. These tensors will be concatenated to the sampled output or
381 | the `inputs` when not sampling for use as the next input.
382 | name: Name scope for any created operations.
383 |
384 | Raises:
385 | ValueError: if `sampling_probability` is not a scalar or vector.
386 | """
387 | with ops.name_scope(name, "ScheduledOutputTrainingHelper",
388 | [inputs, auxiliary_inputs, sampling_probability]):
389 | self._sampling_probability = ops.convert_to_tensor(
390 | sampling_probability, name="sampling_probability")
391 | if self._sampling_probability.get_shape().ndims not in (0, 1):
392 | raise ValueError(
393 | "sampling_probability must be either a scalar or a vector. "
394 | "saw shape: %s" % (self._sampling_probability.get_shape()))
395 |
396 | if auxiliary_inputs is None:
397 | maybe_concatenated_inputs = inputs
398 | else:
399 | inputs = ops.convert_to_tensor(inputs, name="inputs")
400 | auxiliary_inputs = ops.convert_to_tensor(
401 | auxiliary_inputs, name="auxiliary_inputs")
402 | maybe_concatenated_inputs = nest.map_structure(
403 | lambda x, y: array_ops.concat((x, y), -1),
404 | inputs, auxiliary_inputs)
405 | if not time_major:
406 | auxiliary_inputs = nest.map_structure(
407 | _transpose_batch_time, auxiliary_inputs)
408 |
409 | self._auxiliary_input_tas = (
410 | nest.map_structure(_unstack_ta, auxiliary_inputs)
411 | if auxiliary_inputs is not None else None)
412 |
413 | self._seed = seed
414 |
415 | self._next_inputs_fn = next_inputs_fn
416 |
417 | super(ScheduledOutputTrainingHelper, self).__init__(
418 | inputs=maybe_concatenated_inputs,
419 | sequence_length=sequence_length,
420 | time_major=time_major,
421 | name=name)
422 |
423 | def initialize(self, name=None):
424 | return super(ScheduledOutputTrainingHelper, self).initialize(name=name)
425 |
426 | def sample(self, time, outputs, state, name=None):
427 | with ops.name_scope(name, "ScheduledOutputTrainingHelperSample",
428 | [time, outputs, state]):
429 | sampler = bernoulli.Bernoulli(probs=self._sampling_probability)
430 | return sampler.sample(sample_shape=self.batch_size, seed=self._seed)
431 |
432 | def next_inputs(self, time, outputs, state, sample_ids, name=None):
433 | with ops.name_scope(name, "ScheduledOutputTrainingHelperNextInputs",
434 | [time, outputs, state, sample_ids]):
435 | (finished, base_next_inputs, state) = (
436 | super(ScheduledOutputTrainingHelper, self).next_inputs(
437 | time=time,
438 | outputs=outputs,
439 | state=state,
440 | sample_ids=sample_ids,
441 | name=name))
442 | sample_ids = math_ops.cast(sample_ids, dtypes.bool)
443 |
444 | def maybe_sample():
445 | """Perform scheduled sampling."""
446 |
447 | def maybe_concatenate_auxiliary_inputs(outputs_, indices=None):
448 | """Concatenate outputs with auxiliary inputs, if they exist."""
449 | if self._auxiliary_input_tas is None:
450 | return outputs_
451 |
452 | next_time = time + 1
453 | auxiliary_inputs = nest.map_structure(
454 | lambda ta: ta.read(next_time), self._auxiliary_input_tas)
455 | if indices is not None:
456 | auxiliary_inputs = array_ops.gather_nd(auxiliary_inputs, indices)
457 | return nest.map_structure(
458 | lambda x, y: array_ops.concat((x, y), -1),
459 | outputs_, auxiliary_inputs)
460 |
461 | if self._next_inputs_fn is None:
462 | return array_ops.where(
463 | sample_ids, maybe_concatenate_auxiliary_inputs(outputs),
464 | base_next_inputs)
465 |
466 | where_sampling = math_ops.cast(
467 | array_ops.where(sample_ids), dtypes.int32)
468 | where_not_sampling = math_ops.cast(
469 | array_ops.where(math_ops.logical_not(sample_ids)), dtypes.int32)
470 | outputs_sampling = array_ops.gather_nd(outputs, where_sampling)
471 | inputs_not_sampling = array_ops.gather_nd(base_next_inputs,
472 | where_not_sampling)
473 | sampled_next_inputs = maybe_concatenate_auxiliary_inputs(
474 | self._next_inputs_fn(outputs_sampling), where_sampling)
475 |
476 | base_shape = array_ops.shape(base_next_inputs)
477 | return (array_ops.scatter_nd(indices=where_sampling,
478 | updates=sampled_next_inputs,
479 | shape=base_shape)
480 | + array_ops.scatter_nd(indices=where_not_sampling,
481 | updates=inputs_not_sampling,
482 | shape=base_shape))
483 |
484 | all_finished = math_ops.reduce_all(finished)
485 | no_samples = math_ops.logical_not(math_ops.reduce_any(sample_ids))
486 | next_inputs = control_flow_ops.cond(
487 | math_ops.logical_or(all_finished, no_samples),
488 | lambda: base_next_inputs, maybe_sample)
489 | return (finished, next_inputs, state)
490 |
491 |
492 | class GreedyEmbeddingHelper(Helper):
493 | """A helper for use during inference.
494 |
495 | Uses the argmax of the output (treated as logits) and passes the
496 | result through an embedding layer to get the next input.
497 | """
498 |
499 | def __init__(self, embedding, start_tokens, end_token):
500 | """Initializer.
501 |
502 | Args:
503 | embedding: A callable that takes a vector tensor of `ids` (argmax ids),
504 | or the `params` argument for `embedding_lookup`. The returned tensor
505 | will be passed to the decoder input.
506 | start_tokens: `int32` vector shaped `[batch_size]`, the start tokens.
507 | end_token: `int32` scalar, the token that marks end of decoding.
508 |
509 | Raises:
510 | ValueError: if `start_tokens` is not a 1D tensor or `end_token` is not a
511 | scalar.
512 | """
513 | if callable(embedding):
514 | self._embedding_fn = embedding
515 | else:
516 | self._embedding_fn = (
517 | lambda ids: embedding_ops.embedding_lookup(embedding, ids))
518 |
519 | self._start_tokens = ops.convert_to_tensor(
520 | start_tokens, dtype=dtypes.int32, name="start_tokens")
521 | self._end_token = ops.convert_to_tensor(
522 | end_token, dtype=dtypes.int32, name="end_token")
523 | if self._start_tokens.get_shape().ndims != 1:
524 | raise ValueError("start_tokens must be a vector")
525 | self._batch_size = array_ops.size(start_tokens)
526 | if self._end_token.get_shape().ndims != 0:
527 | raise ValueError("end_token must be a scalar")
528 | self._start_inputs = self._embedding_fn(self._start_tokens)
529 |
530 | @property
531 | def batch_size(self):
532 | return self._batch_size
533 |
534 | @property
535 | def sample_ids_shape(self):
536 | return tensor_shape.TensorShape([])
537 |
538 | @property
539 | def sample_ids_dtype(self):
540 | return dtypes.int32
541 |
542 | def initialize(self, name=None):
543 | finished = array_ops.tile([False], [self._batch_size])
544 | return (finished, self._start_inputs)
545 |
546 | def sample(self, time, outputs, state, name=None):
547 | """sample for GreedyEmbeddingHelper."""
548 | del time, state # unused by sample_fn
549 | # Outputs are logits, use argmax to get the most probable id
550 | if not isinstance(outputs, ops.Tensor):
551 | raise TypeError("Expected outputs to be a single Tensor, got: %s" %
552 | type(outputs))
553 | sample_ids = math_ops.argmax(outputs, axis=-1, output_type=dtypes.int32)
554 | return sample_ids
555 |
556 | def next_inputs(self, time, outputs, state, sample_ids, name=None):
557 | """next_inputs_fn for GreedyEmbeddingHelper."""
558 | del time, outputs # unused by next_inputs_fn
559 | finished = math_ops.equal(sample_ids, self._end_token)
560 | all_finished = math_ops.reduce_all(finished)
561 | next_inputs = control_flow_ops.cond(
562 | all_finished,
563 | # If we're finished, the next_inputs value doesn't matter
564 | lambda: self._start_inputs,
565 | lambda: self._embedding_fn(sample_ids))
566 | return (finished, next_inputs, state)
567 |
568 | class GreedyEmbeddingHelper2(Helper):
569 | """A helper for use during training. Only reads inputs.
570 |
571 | Returned sample_ids are the argmax of the RNN output logits.
572 | """
573 |
574 | def __init__(self, embedding, sequence_length, start_tokens,name=None):
575 | """Initializer.
576 |
577 | Args:
578 | inputs: A (structure of) input tensors.
579 | sequence_length: An int32 vector tensor.
580 | time_major: Python bool. Whether the tensors in `inputs` are time major.
581 | If `False` (default), they are assumed to be batch major.
582 | name: Name scope for any created operations.
583 |
584 | Raises:
585 | ValueError: if `sequence_length` is not a 1D tensor.
586 | """
587 | with ops.name_scope(name, "TrainingHelper", [sequence_length]):
588 | self._sequence_length = ops.convert_to_tensor(
589 | sequence_length, name="sequence_length")
590 | if self._sequence_length.get_shape().ndims != 1:
591 | raise ValueError(
592 | "Expected sequence_length to be a vector, but received shape: %s" %
593 | self._sequence_length.get_shape())
594 |
595 |
596 | self._batch_size = array_ops.size(sequence_length)
597 | if callable(embedding):
598 | self._embedding_fn = embedding
599 | else:
600 | self._embedding_fn = (
601 | lambda ids: embedding_ops.embedding_lookup(embedding, ids))
602 |
603 | self._start_tokens = ops.convert_to_tensor(
604 | start_tokens, dtype=dtypes.int32, name="start_tokens")
605 | self._start_inputs = self._embedding_fn(self._start_tokens)
606 |
607 |
608 | @property
609 | def sequence_length(self):
610 | return self._sequence_length
611 | @property
612 | def batch_size(self):
613 | return self._batch_size
614 |
615 | @property
616 | def sample_ids_shape(self):
617 | return tensor_shape.TensorShape([])
618 |
619 | @property
620 | def sample_ids_dtype(self):
621 | return dtypes.int32
622 |
623 | def initialize(self, name=None):
624 | with ops.name_scope(name, "TrainingHelperInitialize"):
625 | finished = math_ops.equal(0, self._sequence_length)
626 |
627 | return (finished, self._start_inputs)
628 |
629 | def sample(self, time, outputs, name=None, **unused_kwargs):
630 | with ops.name_scope(name, "TrainingHelperSample", [time, outputs]):
631 | sample_ids = math_ops.cast(
632 | math_ops.argmax(outputs, axis=-1), dtypes.int32)
633 | return sample_ids
634 |
635 | def next_inputs(self, time, outputs, state, sample_ids,name=None, **unused_kwargs):
636 | """next_inputs_fn for TrainingHelper."""
637 | with ops.name_scope(name, "TrainingHelperNextInputs",
638 | [time, outputs, state]):
639 | next_time = time + 1
640 | finished = (next_time >= self._sequence_length)
641 | all_finished = math_ops.reduce_all(finished)
642 | next_inputs = control_flow_ops.cond(
643 | all_finished,
644 | # If we're finished, the next_inputs value doesn't matter
645 | lambda: self._start_inputs,
646 | lambda: self._embedding_fn(sample_ids))
647 |
648 |
649 | return (finished, next_inputs, state)
650 |
651 | class SampleEmbeddingHelper(GreedyEmbeddingHelper):
652 | """A helper for use during inference.
653 |
654 | Uses sampling (from a distribution) instead of argmax and passes the
655 | result through an embedding layer to get the next input.
656 | """
657 |
658 | def __init__(self, embedding, start_tokens, end_token,
659 | softmax_temperature=None, seed=None):
660 | """Initializer.
661 |
662 | Args:
663 | embedding: A callable that takes a vector tensor of `ids` (argmax ids),
664 | or the `params` argument for `embedding_lookup`. The returned tensor
665 | will be passed to the decoder input.
666 | start_tokens: `int32` vector shaped `[batch_size]`, the start tokens.
667 | end_token: `int32` scalar, the token that marks end of decoding.
668 | softmax_temperature: (Optional) `float32` scalar, value to divide the
669 | logits by before computing the softmax. Larger values (above 1.0) result
670 | in more random samples, while smaller values push the sampling
671 | distribution towards the argmax. Must be strictly greater than 0.
672 | Defaults to 1.0.
673 | seed: (Optional) The sampling seed.
674 |
675 | Raises:
676 | ValueError: if `start_tokens` is not a 1D tensor or `end_token` is not a
677 | scalar.
678 | """
679 | super(SampleEmbeddingHelper, self).__init__(
680 | embedding, start_tokens, end_token)
681 | self._softmax_temperature = softmax_temperature
682 | self._seed = seed
683 |
684 | def sample(self, time, outputs, state, name=None):
685 | """sample for SampleEmbeddingHelper."""
686 | del time, state # unused by sample_fn
687 | # Outputs are logits, we sample instead of argmax (greedy).
688 | if not isinstance(outputs, ops.Tensor):
689 | raise TypeError("Expected outputs to be a single Tensor, got: %s" %
690 | type(outputs))
691 | if self._softmax_temperature is None:
692 | logits = outputs
693 | else:
694 | logits = outputs / self._softmax_temperature
695 |
696 | sample_id_sampler = categorical.Categorical(logits=logits)
697 | sample_ids = sample_id_sampler.sample(seed=self._seed)
698 |
699 | return sample_ids
700 |
701 |
702 | class InferenceHelper(Helper):
703 | """A helper to use during inference with a custom sampling function."""
704 |
705 | def __init__(self, sample_fn, sample_shape, sample_dtype,
706 | start_inputs, end_fn, next_inputs_fn=None):
707 | """Initializer.
708 |
709 | Args:
710 | sample_fn: A callable that takes `outputs` and emits tensor `sample_ids`.
711 | sample_shape: Either a list of integers, or a 1-D Tensor of type `int32`,
712 | the shape of the each sample in the batch returned by `sample_fn`.
713 | sample_dtype: the dtype of the sample returned by `sample_fn`.
714 | start_inputs: The initial batch of inputs.
715 | end_fn: A callable that takes `sample_ids` and emits a `bool` vector
716 | shaped `[batch_size]` indicating whether each sample is an end token.
717 | next_inputs_fn: (Optional) A callable that takes `sample_ids` and returns
718 | the next batch of inputs. If not provided, `sample_ids` is used as the
719 | next batch of inputs.
720 | """
721 | self._sample_fn = sample_fn
722 | self._end_fn = end_fn
723 | self._sample_shape = tensor_shape.TensorShape(sample_shape)
724 | self._sample_dtype = sample_dtype
725 | self._next_inputs_fn = next_inputs_fn
726 | self._batch_size = array_ops.shape(start_inputs)[0]
727 | self._start_inputs = ops.convert_to_tensor(
728 | start_inputs, name="start_inputs")
729 |
730 | @property
731 | def batch_size(self):
732 | return self._batch_size
733 |
734 | @property
735 | def sample_ids_shape(self):
736 | return self._sample_shape
737 |
738 | @property
739 | def sample_ids_dtype(self):
740 | return self._sample_dtype
741 |
742 | def initialize(self, name=None):
743 | finished = array_ops.tile([False], [self._batch_size])
744 | return (finished, self._start_inputs)
745 |
746 | def sample(self, time, outputs, state, name=None):
747 | del time, state # unused by sample
748 | return self._sample_fn(outputs)
749 |
750 | def next_inputs(self, time, outputs, state, sample_ids, name=None):
751 | del time, outputs # unused by next_inputs
752 | if self._next_inputs_fn is None:
753 | next_inputs = sample_ids
754 | else:
755 | next_inputs = self._next_inputs_fn(sample_ids)
756 | finished = self._end_fn(sample_ids)
757 | return (finished, next_inputs, state)
758 |
--------------------------------------------------------------------------------
/Trip/gae_context.py:
--------------------------------------------------------------------------------
1 | # -*- coding: UTF-8 -*-
2 | from __future__ import division
3 | import math
4 | from tensorflow.python.layers.core import Dense
5 | import seq2seq_c as seqc
6 | from metric import *
7 | from ops import *
8 | import time
9 | import datetime
10 | from tensorflow.python.ops.rnn_cell_impl import LSTMStateTuple
11 | # =============================== vars ====================================== #
12 | is_initial=True
13 | EPSILON = 1e-6
14 | batch_size=8
15 | n_hidden=512
16 | AE_learning_rate=0.6
17 | initializer=tf.truncated_normal_initializer(stddev=0.02)
18 | critic_lr=1e-02
19 | gen_lr=1e-02
20 | z_dim=128
21 | c_dim=512
22 | train_iters =20 # 遍历样本次数 for training
23 | embedding_size=256
24 | dynamic_traning=True
25 | dat_suffix = ['Osak', 'Glas', 'Edin', 'Toro', 'Melb','TKY_split200']
26 | dat_ix=5
27 | poi_name="poi-"+dat_suffix[dat_ix]+".csv" #Edin
28 | tra_name="traj-"+dat_suffix[dat_ix]+".csv"
29 | embedding_name=dat_suffix[dat_ix]
30 | model='./logs/model_'+embedding_name+'.pkt'
31 | # =============================== data load ====================================== #
32 | #load original data
33 | op_tdata = open('origin_data/'+poi_name, 'r')
34 |
35 | ot_tdata = open('origin_data/'+tra_name, 'r')
36 | print 'To Train',dat_suffix[dat_ix]
37 | POIs=[]
38 | Trajectory=[]
39 | for line in op_tdata.readlines():
40 | lineArr = line.split(',')
41 | temp_line=list()
42 | for item in lineArr:
43 | temp_line.append(item.strip('\n'))
44 | POIs.append(temp_line)
45 | POIs=POIs[1:] #remove first line
46 |
47 | def calc_dist_vec(longitudes1, latitudes1, longitudes2, latitudes2):
48 | """Calculate the distance (unit: km) between two places on earth, vectorised"""
49 | # convert degrees to radians
50 | lng1 = np.radians(longitudes1)
51 | lat1 = np.radians(latitudes1)
52 | lng2 = np.radians(longitudes2)
53 | lat2 = np.radians(latitudes2)
54 | radius = 6371.0088 # mean earth radius, en.wikipedia.org/wiki/Earth_radius#Mean_radius
55 |
56 | # The haversine formula, en.wikipedia.org/wiki/Great-circle_distance
57 | dlng = np.fabs(lng1 - lng2)
58 | dlat = np.fabs(lat1 - lat2)
59 | dist = 2 * radius * np.arcsin( np.sqrt(
60 | (np.sin(0.5*dlat))**2 + np.cos(lat1) * np.cos(lat2) * (np.sin(0.5*dlng))**2 ))
61 | return dist
62 | #
63 |
64 |
65 |
66 | #print POIs
67 | get_POIs={}
68 | char_pois=[] #pois chars
69 |
70 | for items in POIs:
71 | char_pois.append(items[0])
72 | get_POIs.setdefault(items[0],[]).append([items[2],items[3]]) # pois to category
73 | Users=[]
74 | poi_count={}
75 | for line in ot_tdata.readlines():
76 | lineArr=line.split(',')
77 | temp_line=list()
78 | if lineArr[0]=='userID':
79 | continue
80 | poi_count.setdefault(lineArr[2], []).append(lineArr[2])
81 | for i in range(len(lineArr)):
82 |
83 | if i==0:
84 | user = lineArr[i]
85 | Users.append(user) # add user id
86 | temp_line.append(user)
87 | continue
88 | temp_line.append(lineArr[i].strip('\n'))
89 | Trajectory.append(temp_line)
90 | Users=sorted(list(set(Users)))
91 | print 'user number',len(Users)
92 |
93 | TRAIN_TRA=[]
94 | TRAIN_USER=[]
95 | TRAIN_TIME=[]
96 | TRAIN_DIST=[]
97 | DATA={} #temp_data
98 | for index in range(len(Trajectory)):
99 | if(int(Trajectory[index][-2])>=3): #the length of the trajectory must over than 3
100 | DATA.setdefault(Trajectory[index][0]+'-'+Trajectory[index][1],[]).append([Trajectory[index][2],Trajectory[index][3],Trajectory[index][4]]) #userID+trajID
101 |
102 |
103 |
104 | # #calc distance
105 | # distance_count=[]
106 | # for i in range(len(POIs)):
107 | # lon1=float(POIs[i][2])
108 | # lat1=float(POIs[i][3])
109 | # for j in range(i+1,len(POIs)):
110 | # lon2 = float(POIs[j][2])
111 | # lat2 =float( POIs[j][3])
112 | # dist=calc_dist_vec(lon1,lat1,lon2,lat2)
113 | # distance_count.append(dist)
114 | # print 'max',max(distance_count)
115 | # print 'min',min(distance_count)
116 |
117 | #calc_distance
118 | distance_count=[]
119 | for key in DATA.keys():
120 | traj=DATA[key]
121 | #print traj
122 | for i in range(len(traj)):
123 | #print get_POIs[traj[i][0]][0][0]
124 | lon1=float(get_POIs[traj[i][0]][0][0])
125 | lat1=float(get_POIs[traj[i][0]][0][1])
126 | for j in range(i+1,len(traj)):
127 | lon2 = float(get_POIs[traj[j][0]][0][0])
128 | lat2 = float(get_POIs[traj[j][0]][0][1])
129 | distance_count.append(calc_dist_vec(lon1,lat1,lon2,lat2))
130 | upper_dis=max(distance_count)
131 | lower_dis=min(distance_count)
132 | print len(distance_count)
133 | # rand_l=np.random.rand(-1,1)
134 | # rand_u=np.random.rand(-1,1)
135 | # print rand_l
136 | # val=((min(distance_count)-min(distance_count))*rand_l+(max(distance_count)-min(distance_count))*rand_u)/(min(distance_count)-min(distance_count)+max(distance_count)-min(distance_count))
137 | # # def calc_distance(distance):
138 | # # val = ((distance - min(distance_count)) * rand_l + (max(distance_count) - distance) * rand_u)/(distance - min(distance_count) + max(distance_count) - distance)
139 | # # return val
140 |
141 | for keys in DATA.keys():
142 | user_traj=DATA[keys]
143 | temp_poi=[]
144 | temp_time=[]
145 | temp_dist=[]
146 | for i in range(len(user_traj)):
147 | temp_poi.append(user_traj[i][0]) #add poi id
148 | lon1=float(get_POIs[user_traj[i][0]][0][0])
149 | lat1=float(get_POIs[user_traj[i][0]][0][1])
150 | lons=float(get_POIs[user_traj[0][0]][0][0])
151 | lats=float(get_POIs[user_traj[0][0]][0][1])
152 | lone=float(get_POIs[user_traj[-1][0]][0][0])
153 | late=float(get_POIs[user_traj[-1][0]][0][1])
154 | sd=calc_dist_vec(lon1,lat1,lons,lats)
155 | ed = calc_dist_vec(lon1, lat1, lone, late)
156 | value1=0.5*(sd)/max(distance_count)
157 | value2=0.5*(ed)/max(distance_count)
158 | #print value
159 | temp_dist.append([value1,value2]) #lon,lat
160 |
161 | dt = time.strftime("%H:%M:%S", time.localtime(int(user_traj[i][1:][0])))
162 | #print dt.split(":")[0]
163 | temp_time.append(int(dt.split(":")[0])) #add poi time
164 | TRAIN_USER.append(keys)
165 | TRAIN_TRA.append(temp_poi)
166 | TRAIN_TIME.append(temp_time)
167 | TRAIN_DIST.append(temp_dist)
168 | dictionary={}
169 | for key in poi_count.keys():
170 | count=len(poi_count[key])
171 | dictionary[key]=count
172 | dictionary['GO']=1
173 | dictionary['PAD']=1
174 | dictionary['END']=1
175 | new_dict=sorted(dictionary.items(),key = lambda x:x[1],reverse = True)
176 |
177 | print 'poi number is',len(new_dict)-3
178 | voc_poi=list()
179 |
180 | for item in new_dict:
181 | voc_poi.append(item[0]) #has been sorted by frequency
182 |
183 | def extract_words_vocab():
184 | int_to_vocab = {idx: word for idx, word in enumerate(voc_poi)}
185 | vocab_to_int = {word: idx for idx, word in int_to_vocab.items()}
186 | return int_to_vocab, vocab_to_int
187 | int_to_vocab, vocab_to_int=extract_words_vocab()
188 |
189 | #generate pre-traning dataset
190 | new_trainT = list()
191 | for i in range(len(TRAIN_TRA)): #TRAIN
192 | temp = list()
193 | temp.append(vocab_to_int['GO'])
194 | for j in range(len(TRAIN_TRA[i])):
195 | temp.append(vocab_to_int[TRAIN_TRA[i][j]])
196 | temp.append(vocab_to_int['END'])
197 | temp.append(vocab_to_int['PAD'])
198 | new_trainT.append(temp)
199 |
200 | #generate traning dataset
201 | new_trainTs = list()
202 | for i in range(len(TRAIN_TRA)): #TRAIN
203 | temp = list()
204 | for j in range(len(TRAIN_TRA[i])):
205 | temp.append(vocab_to_int[TRAIN_TRA[i][j]])
206 | new_trainTs.append(temp)
207 |
208 | dataset=open('data/'+embedding_name+'_set.dat','w')
209 | for i in range(len(new_trainTs)):
210 | dataset.write(str(TRAIN_USER[i])+'\t')
211 | for j in range(len(new_trainTs[i])):
212 | dataset.write(str(new_trainTs[i][j])+'\t')
213 | dataset.write('\n')
214 | dataset.close()
215 |
216 | #embeddings
217 | if dynamic_traning is True:
218 | embeddings=tf.Variable(tf.random_uniform([len(voc_poi),embedding_size],-1.0,1.0))
219 | time_embeddings = tf.Variable(tf.random_uniform([24,32], -1.0, 1.0))
220 | distance_embeddings1=tf.Variable(tf.random_uniform([32], -1.0, 1.0))
221 | distance_embeddings2=tf.Variable(tf.random_uniform([32], -1.0, 1.0))
222 | weights=tf.Variable(tf.truncated_normal([embedding_size,embedding_size],stddev=1.0/math.sqrt(embedding_size)))
223 | bias=tf.Variable(tf.zeros([embedding_size]),dtype=tf.float32)
224 | embeddings=tf.nn.xw_plus_b(embeddings,weights,bias)
225 | else:
226 | embeddings=[]
227 | fread_emb=open('data/'+embedding_name+'vec.dat','r')
228 | for line in fread_emb.readlines():
229 | lineArr=line.split()
230 | temp=list()
231 | for i in range(1,len(lineArr)):
232 | temp.append(float(lineArr[i]))
233 | embeddings.append(temp)
234 | embeddings=tf.constant(embeddings)
235 |
236 | # =============================== data load end====================================== #
237 |
238 | # =============================== tf.vars ====================================== #
239 | keep_prob = tf.placeholder("float")
240 | lens=tf.placeholder(dtype=tf.int32)
241 | input_X=tf.placeholder(dtype=tf.int32, shape=[batch_size, None])
242 | input_X_de=tf.placeholder(dtype=tf.int32, shape=[batch_size, None])
243 | input_t=tf.placeholder(dtype=tf.int32, shape=[batch_size, None])
244 | input_d1=tf.placeholder(dtype=tf.float32, shape=[batch_size, None])
245 | input_d2=tf.placeholder(dtype=tf.float32, shape=[batch_size, None])
246 | target_sequence_length = tf.placeholder(tf.int32, [None], name='target_sequence_length')
247 | max_target_sequence_length = tf.reduce_max(target_sequence_length, name='max_target_len')
248 |
249 | code = tf.placeholder(tf.float32, shape=[None, c_dim])
250 | z = tf.placeholder(dtype=tf.int32, shape=[batch_size, None])
251 | z_t=tf.placeholder(dtype=tf.int32, shape=[batch_size, None])
252 | z_d1=tf.placeholder(dtype=tf.float32, shape=[batch_size, None])
253 | z_d2=tf.placeholder(dtype=tf.float32, shape=[batch_size, None])
254 |
255 | # =============================== Encoder ====================================== #
256 | def encoder(X,context,keep_prob=0.5):
257 | """
258 | encode discrete feature to continuous latent vector
259 | :param tensor: [batch_size,length,embedding_size].
260 | :return:encoded latent vector
261 | """
262 | with tf.variable_scope("encoder"):
263 | tensor=tf.nn.embedding_lookup(embeddings,X) #find embeddings of trajectory:[batch_size,length,embedding_size].
264 | time_t=tf.nn.embedding_lookup(time_embeddings,context[0])
265 | print 'time_t',time_t
266 | space=tf.tensordot(context[1],distance_embeddings1,0)+tf.tensordot(context[2],distance_embeddings2,0)
267 | print 'space',space
268 | tensor=tf.concat([tensor,time_t],2)
269 | tensor=tf.concat([tensor,space],2)
270 | trans_tensor=tf.transpose(tensor,[1,0,2]) #[length,batch_size,embedding_size].
271 | lstm_cell=tf.nn.rnn_cell.LSTMCell(n_hidden)
272 | dr_lstm_cell=tf.nn.rnn_cell.DropoutWrapper(lstm_cell,output_keep_prob=keep_prob)
273 | (output,states)=tf.nn.dynamic_rnn(dr_lstm_cell,trans_tensor,time_major=True,dtype=tf.float32)
274 | latent_code=output[-1]
275 | latent_code=tf.nn.l2_normalize(latent_code)
276 | print 'latentcode',latent_code
277 | #latent_code= fully_connected(latent_code, c_dim, initializer=initializer, is_last=True, scope="encoder_output")
278 | return latent_code,states
279 |
280 | # =============================== Decoder ====================================== #
281 | def decoder(tensor,X,en_state,reuse=False):
282 | """
283 | decode continuous vector to probability of pixel
284 | :param tensor: 2-D tensor.
285 | :param output_dim: Integer. dimension of output
286 | :param is_mnist: Boolean. mnist or not
287 | :param reuse: Boolean. reuse or not
288 | :param is_train:Boolean. train or not
289 | :return: 2-D tensor. decoded vector (image)
290 | """
291 | with tf.variable_scope('decoder',reuse=reuse) as scope:
292 |
293 | decode_lstm=tf.nn.rnn_cell.LSTMCell(n_hidden)
294 | decode_dr_lstm = tf.nn.rnn_cell.DropoutWrapper(decode_lstm, output_keep_prob=keep_prob)
295 | output_layer=Dense(len(vocab_to_int))
296 | decoder_initial_state=en_state#LSTMStateTuple(c_state, h_state)
297 |
298 | copy = tf.tile(tf.constant([vocab_to_int['GO']]), [batch_size])
299 | training_helper = seqc.GreedyEmbeddingHelper2(embeddings,
300 | sequence_length=target_sequence_length, start_tokens=copy)
301 | training_decoder = seqc.BasicDecoder(decode_dr_lstm, training_helper, decoder_initial_state,tensor,output_layer) # cell,helper, initial_state, out_layer(convert rnn_size to vocab_size)
302 | output, _, _ = seqc.dynamic_decode(training_decoder,
303 | impute_finished=True,
304 | maximum_iterations=max_target_sequence_length)
305 | predicting_logits = tf.identity(output.sample_id, name='predictions')
306 | training_logits = tf.identity(output.rnn_output, 'logits')
307 | masks = tf.sequence_mask(target_sequence_length, max_target_sequence_length, dtype=tf.float32, name='masks')
308 | target = X
309 | return output, predicting_logits, training_logits, masks, target
310 |
311 |
312 | # =============================== Generator ====================================== #
313 | def generator(z,context,reuse=False):
314 | """
315 | generator of WGAN
316 | :param z: 2-D tensor. noise with standard normal distribution
317 | :param reuse: Boolean. reuse or not
318 | :return: 2-D tensor. latent vector
319 | """
320 | """
321 | encode discrete feature to continuous latent vector
322 | :param tensor: [batch_size,length,embedding_size].
323 | :return:encoded latent vector
324 | """
325 | with tf.variable_scope("generator"):
326 | tensor=tf.nn.embedding_lookup(embeddings,z) #find embeddings of trajectory:[batch_size,length,embedding_size].
327 | time_t=tf.nn.embedding_lookup(time_embeddings,context[0])
328 | space=tf.tensordot(context[1],distance_embeddings1,0)+tf.tensordot(context[2],distance_embeddings2,0)
329 | tensor=tf.concat([tensor,time_t],2)
330 | tensor=tf.concat([tensor,space],2)
331 | trans_tensor=tf.transpose(tensor,[1,0,2]) #[length,batch_size,embedding_size].
332 | lstm_cell=tf.nn.rnn_cell.LSTMCell(n_hidden)
333 | dr_lstm_cell=tf.nn.rnn_cell.DropoutWrapper(lstm_cell,output_keep_prob=keep_prob)
334 | (output,states)=tf.nn.dynamic_rnn(dr_lstm_cell,trans_tensor,time_major=True,dtype=tf.float32)
335 | latent_code=output[-1]
336 | latent_code=tf.nn.l2_normalize(latent_code)
337 | return latent_code,states
338 | # =============================== Discriminator(Critic) ====================================== #
339 | def critic(latent,reuse=False):
340 | """
341 | discriminator of WGAN
342 | :param latent: 2-D tensor. latent vector
343 | :param reuse: Boolean. reuse or not
344 | :return: 2-D tensor. logit of data or noise
345 | """
346 | with tf.variable_scope("critic",reuse=reuse):
347 | fc_100 = fully_connected(latent, 100, initializer=initializer, scope="fc_100")
348 | fc_60 = fully_connected(fc_100, 60, initializer=initializer, scope="fc_60")
349 | fc_20 = fully_connected(fc_60, 20, initializer=initializer, scope="fc_20")
350 | output=fully_connected(fc_20,1,initializer=initializer,is_last=True,scope="critic_output")
351 | #WGAN does not using activate
352 | return output
353 |
354 | # =============================== Function ====================================== #
355 | def autoencoder(X,de_X,context,keep_prob):
356 | """
357 | deep autoencoder. reconstruction the input data
358 | :param data: 2-D tensor. data for reconstruction
359 | :return: 2-D tensor. reconstructed data and latent vector
360 | """
361 | with tf.variable_scope("autoencoder"):
362 | latent,en_state=encoder(X,context,keep_prob)
363 | output_, predicting_logits_, training_logits_, masks_, target_=decoder(latent,de_X,en_state)
364 | return training_logits_,masks_,target_,latent,predicting_logits_,en_state
365 | def pad_sentence_batch(sentence_batch, pad_int):
366 | '''
367 | 对batch中的序列进行补全,保证batch中的每行都有相同的sequence_length
368 |
369 | 参数:
370 | - sentence batch
371 | - pad_int: 对应索引号
372 | '''
373 | max_sentence = max([len(sentence) for sentence in sentence_batch]) # 取最大长度
374 | return [sentence + [pad_int] * (max_sentence - len(sentence)) for sentence in sentence_batch]
375 |
376 | def pad_time_batch(time_batch):
377 | '''
378 | 对batch中的序列进行补全,保证batch中的每行都有相同的sequence_length
379 |
380 | 参数:
381 | - sentence batch
382 | - pad_int: 对应索引号
383 | '''
384 | max_sentence = max([len(sentence) for sentence in time_batch]) # 取最大长度
385 | return [sentence + [0] * (max_sentence - len(sentence)) for sentence in time_batch]
386 | def pad_dist_batch(dist_batch):
387 | '''
388 | 对batch中的序列进行补全,保证batch中的每行都有相同的sequence_length
389 |
390 | 参数:
391 | - sentence batch
392 | - pad_int: 对应索引号
393 | '''
394 | max_sentence = max([len(sentence) for sentence in dist_batch]) # 取最大长度
395 | return [sentence + [sentence[-1]] * (max_sentence - len(sentence)) for sentence in dist_batch]
396 | def eos_sentence_batch(sentence_batch, eos_in):
397 | return [sentence + [eos_in] for sentence in sentence_batch]
398 |
399 | # =============================== Graph ====================================== #
400 | """
401 | build network
402 | :return:
403 | """
404 | context=[input_t,input_d1,input_d2]
405 | z_context=[z_t,z_d1,z_d2]
406 | training_logits_,masks_,target_,real_code,predicting_logits_,encoder_state=autoencoder(input_X,input_X_de,context,keep_prob)
407 | g_code,g_state=generator(z,z_context) #flat c
408 | print g_code
409 | critic_real=critic(real_code)
410 | print critic_real
411 | critic_fake=critic(g_code,reuse=True)
412 | print critic_fake
413 | #WGAN loss
414 | disc_real_loss=tf.reduce_mean(critic_real)
415 | disc_fake_loss=-tf.reduce_mean(critic_fake) #Negative sample phase
416 | critic_loss=tf.reduce_mean(critic_real)-tf.reduce_mean(critic_fake)
417 | gen_loss=tf.reduce_mean(critic_fake) #Train the generator
418 |
419 | #for discrete input, use cross entropy loss
420 | AE_loss=seqc.sequence_loss(training_logits_, target_, masks_)
421 |
422 | #get trainable variables
423 | AE_variables=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope="autoencoder")
424 | gen_variables=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope="generator")
425 | critic_variables=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope='critic')
426 | encoder_variables=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,scope='autoencoder/encoder')
427 |
428 | #set optimizer for each module
429 | disc_op=tf.train.AdamOptimizer(learning_rate=critic_lr)
430 | gen_op=tf.train.AdamOptimizer(learning_rate=gen_lr)
431 | AE_op=tf.train.GradientDescentOptimizer(learning_rate=AE_learning_rate)
432 |
433 |
434 | #compute gradients
435 | pos_critic_grad=disc_op.compute_gradients(disc_real_loss,critic_variables+encoder_variables)
436 |
437 | neg_critic_grad=disc_op.compute_gradients(disc_fake_loss,critic_variables)
438 |
439 | #clipping gradients for negative samples
440 | neg_critic_grad = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in neg_critic_grad]
441 | gen_grad = gen_op.compute_gradients(gen_loss, gen_variables)
442 | AE_grad = AE_op.compute_gradients(AE_loss,AE_variables)
443 | #AE_grad = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in AE_grad]
444 |
445 | #apply gradients
446 | update_critic_pos=disc_op.apply_gradients(pos_critic_grad)
447 | update_critic_neg=disc_op.apply_gradients(neg_critic_grad)
448 | update_G=gen_op.apply_gradients(gen_grad)
449 | update_AE=AE_op.apply_gradients(AE_grad)
450 |
451 | #reconstruction
452 | with tf.variable_scope("autoencoder"):
453 | toutput_, tpredicting_logits_, ttraining_logits_, tmasks_, ttarget_=decoder(real_code,input_X_de,encoder_state,reuse=True)
454 | foutput_, fpredicting_logits_, ftraining_logits_, fmasks_, ftarget_ = decoder(g_code, input_X_de,g_state,reuse=True)
455 | def train(data):
456 | init=tf.global_variables_initializer()
457 | with tf.Session() as sess:
458 | sess.run(init)
459 | train_variables, test_variables=data
460 | encoder_train, decoder_train, train_batch_lenth, n_trainTime, n_trainDist1, n_trainDist2, z_train, z_train_time, z_train_dist1, z_train_dist2=train_variables
461 |
462 | max_F1=[]
463 | max_pair_F1=[]
464 | test_max_F1=[]
465 | test_max_pair_F1=[]
466 | ftest_max_F1=[]
467 | ftest_max_pair_F1=[]
468 | res = {}
469 | fres={}
470 | for epoch in range(train_iters):
471 | step = 0
472 | ACC=0
473 | F1=[]
474 | pairs_F1=[]
475 | LOSS=[]
476 | gen_LOSS=[]
477 | Critic_Loss=[]
478 | Fake_Loss=[]
479 | GAN_Loss=[]
480 | while step < len(encoder_train) // batch_size:
481 | start_i = step * batch_size
482 | dist_1_=n_trainDist1[start_i:start_i + batch_size]
483 | dist_2_=n_trainDist2[start_i:start_i + batch_size]
484 | input_time_=n_trainTime[start_i:start_i + batch_size]
485 | encode_batch =encoder_train[start_i:start_i + batch_size]
486 | decode_batch = decoder_train[start_i:start_i + batch_size]
487 | pad_source_lengths =train_batch_lenth[start_i:start_i + batch_size]
488 | z_in =z_train[start_i:start_i + batch_size]
489 | z_time=z_train_time[start_i:start_i + batch_size]
490 | z_dist1=z_train_dist1[start_i:start_i + batch_size]
491 | z_dist2=z_train_dist2[start_i:start_i + batch_size]
492 |
493 | #update AE
494 | _,_AEloss=sess.run([update_AE,AE_loss],feed_dict={target_sequence_length: pad_source_lengths,
495 | input_X: encode_batch,input_X_de: decode_batch,input_t:input_time_,input_d1:dist_1_,input_d2:dist_2_,keep_prob: 0.5})
496 | # update critic & encoder at positive sample phase
497 | for k in range(10):
498 | _, _critic_loss, real_loss = sess.run([update_critic_pos,critic_loss,disc_real_loss],feed_dict={target_sequence_length: pad_source_lengths,
499 | input_X: encode_batch,input_X_de: decode_batch,z: z_in,z_t:z_time,z_d1:z_dist1,z_d2:z_dist2,input_t:input_time_,input_d1:dist_1_,input_d2:dist_2_,keep_prob: 0.5})
500 | fake_loss, _ = sess.run([disc_fake_loss, update_critic_neg], feed_dict={z: z_in, z_t:z_time,z_d1:z_dist1,z_d2:z_dist2,keep_prob: 0.5})
501 | # update generate
502 | _, gan_loss = sess.run([update_G, gen_loss], feed_dict={z: z_in, z_t:z_time,z_d1:z_dist1,z_d2:z_dist2,keep_prob: 0.5})
503 | #training result
504 | values= sess.run(predicting_logits_, feed_dict={target_sequence_length: pad_source_lengths,
505 | input_X: encode_batch,input_X_de: decode_batch,input_t:input_time_,input_d1:dist_1_,input_d2:dist_2_,
506 | z_t: z_time, z_d1: z_dist1, z_d2: z_dist2,keep_prob: 0.5})
507 | LOSS.append(_AEloss)
508 | gen_LOSS.append(real_loss)
509 | Critic_Loss.append(_critic_loss)
510 | Fake_Loss.append(fake_loss)
511 | GAN_Loss.append(gan_loss)
512 |
513 | for v in range(len(values)):
514 | length=pad_source_lengths[v]-1
515 | if (decode_batch[v][:length]==values[v][:length]).all():
516 | ACC+=1
517 | f=calc_F1(decode_batch[v][:length],values[v][:length])
518 | p_f=calc_pairsF1(decode_batch[v][:length],values[v][:length])
519 | F1.append(f)
520 | pairs_F1.append(p_f)
521 | step += 1
522 | #saver.save(sess, model)
523 | #print 'train F1',np.mean(F1)
524 | #print 'pairs-F1',np.mean(pairs_F1)
525 | #print 'epoch',epoch,'train accuracy',ACC/len(trainU)
526 | #print 'testing------------------>loss',epoch,np.sum(LOSS)
527 | test_f1,test_pairs_f1,ftest_f1,ftest_pairs_f1=test(sess=sess,test_variables=test_variables) #
528 | max_F1.append(np.mean(F1))
529 | max_pair_F1.append(np.mean(pairs_F1))
530 | #print test_f1,test_pairs_f1,ftest_f1,ftest_pairs_f1
531 | max_epoch = -1
532 | res.setdefault(test_f1, []).append(test_pairs_f1)
533 | fres.setdefault(ftest_f1, []).append(ftest_pairs_f1)
534 | keys = res.keys()
535 | fkeys =fres.keys()
536 | keys = sorted(keys)
537 | fkeys = sorted(fkeys)
538 | return (max_F1[max_epoch]), (max_pair_F1)[max_epoch],keys[-1],max(res[keys[-1]]),fkeys[-1],max(fres[fkeys[-1]])
539 |
540 | def test(sess,test_variables):#,Test_Time=None
541 | step = 0
542 | Pred_ACC = 0
543 | F1 = []
544 | pairs_F1 = []
545 | fake_F1=[]
546 | fake_pairs_F1=[]
547 | encoder_test, decoder_test, test_batch_lenth, n_testTime, n_testDist1, n_testDist2, z_test, z_test_time, z_test_dist1, z_test_dist2 = test_variables
548 | while step < len(encoder_test) // batch_size:
549 | start_i = step * batch_size
550 | dist_1_ = n_testDist1[start_i:start_i + batch_size]
551 | dist_2_ = n_testDist2[start_i:start_i + batch_size]
552 | input_time_ = n_testTime[start_i:start_i + batch_size]
553 | encode_batch = encoder_test[start_i:start_i + batch_size]
554 | decode_batch = decoder_test[start_i:start_i + batch_size]
555 | pad_source_lengths = test_batch_lenth[start_i:start_i + batch_size]
556 | z_in = z_test[start_i:start_i + batch_size]
557 | z_time = z_test_time[start_i:start_i + batch_size]
558 | z_dist1 = z_test_dist1[start_i:start_i + batch_size]
559 | z_dist2 = z_test_dist2[start_i:start_i + batch_size]
560 | otpredicting_logits_,fake_ = sess.run([tpredicting_logits_,fpredicting_logits_],feed_dict={target_sequence_length: pad_source_lengths,
561 | input_X: z_in,input_t:z_time,input_d1:z_dist1,input_d2:z_dist2,z:z_in,z_t:z_time,z_d1:z_dist1,z_d2:z_dist2,
562 | keep_prob: 1.0}) # 梯度更新后的结果
563 |
564 | for v in range(len(otpredicting_logits_[:1])):
565 | length = pad_source_lengths[v] - 1
566 | if (decode_batch[v][:length]== otpredicting_logits_[v][:length]).all():
567 | Pred_ACC += 1
568 | actual = decode_batch[v][:length]
569 | recommend = np.concatenate([[actual[0]], otpredicting_logits_[v][1:length - 1]], axis=0)
570 | recommend = np.concatenate([recommend, [actual[-1]]], axis=0)
571 |
572 | frecommend = np.concatenate([[actual[0]], fake_[v][1:length - 1]], axis=0)
573 | frecommend = np.concatenate([frecommend, [actual[-1]]], axis=0)
574 | #print actual,recommend,frecommend
575 | f_f = calc_F1(actual, frecommend)
576 | f_p_f = calc_pairsF1(actual, frecommend)
577 | f = calc_F1(actual, recommend)
578 | p_f = calc_pairsF1(actual, recommend)
579 | F1.append(f)
580 | pairs_F1.append(p_f)
581 | fake_F1.append(f_f)
582 | fake_pairs_F1.append(f_p_f)
583 | step += 1
584 | #print 'trajectory length',length
585 | return np.mean(F1),np.mean(pairs_F1),np.mean(fake_F1),np.mean((fake_pairs_F1))
586 | def get_data(index,K):
587 | # sort original data
588 | index_T = {}
589 | trainT = []
590 | trainU = []
591 | trainTime=[]
592 | trainDist=[]
593 | for i in range(len(new_trainTs)):
594 | index_T[i] = len(new_trainTs[i])
595 | temp_size = sorted(index_T.items(), key=lambda item: item[1])
596 | for i in range(len(temp_size)):
597 | id = temp_size[i][0]
598 | trainT.append(new_trainTs[id])
599 | trainU.append(TRAIN_USER[id])
600 | trainTime.append(TRAIN_TIME[id])
601 | trainDist.append(TRAIN_DIST[id])
602 | value=int(math.ceil(len(trainT)/K))
603 | if index==K-1:
604 | testT=trainT[-value:]
605 | testU=trainU[-value:]
606 | trainT=trainT[:-value]
607 | trainU=trainU[:-value]
608 |
609 | testTime=trainTime[-value:]
610 | testDist=trainDist[-value:]
611 | trainTime=trainTime[:-value]
612 | trainDist=trainDist[:-value]
613 |
614 | elif index==0:
615 | testT=trainT[:(index+1)*value]
616 | testU=trainU[:(index+1)*value]
617 | trainT =trainT[(index+1)*value:]
618 | trainU =trainU[(index+1)*value:]
619 |
620 | testTime=trainTime[:(index+1)*value]
621 | testDist=trainDist[:(index+1)*value]
622 | trainTime=trainTime[(index+1)*value:]
623 | trainDist=trainDist[(index+1)*value:]
624 |
625 | else:
626 | testT=trainT[index*value:(index+1)*value]
627 | testU=trainU[index*value:(index+1)*value]
628 | trainT = trainT[0:index*value]+trainT[(index+1)*value:]
629 | trainU = trainU[0:index*value]+trainU[(index+1)*value:]
630 |
631 | testTime=trainTime[index*value:(index+1)*value]
632 | testDist=trainDist[index*value:(index+1)*value]
633 | trainTime=trainTime[0:index*value]+trainTime[(index+1)*value:]
634 | trainDist=trainDist[0:index*value]+trainDist[(index+1)*value:]
635 | train_size = len(trainT) % batch_size
636 | #if
637 | trainT = trainT + [trainT[-1]]*(batch_size-train_size) # copy data and fill the last batch size
638 | trainU = trainU + [trainU[-1]]*(batch_size-train_size)
639 | trainTime=trainTime+[trainTime[-1]]*(batch_size-train_size)
640 | trainDist = trainDist + [trainDist[-1]] * (batch_size - train_size)
641 | #print 'Text', testT,index,K
642 | test_size = len(testT) % batch_size
643 | if test_size!=0:
644 | testT = testT + [testT[-1]]*(batch_size-test_size) # copy data and fill the last batch size
645 | testU = testU + [testU[-1]]*(batch_size-test_size) #BUG for test_size