├── .DS_Store
├── .idea
├── .gitignore
├── S2GSL-main.iml
├── deployment.xml
├── inspectionProfiles
│ └── profiles_settings.xml
├── misc.xml
├── modules.xml
└── webServers.xml
├── README.md
├── attention.py
├── data
├── .DS_Store
└── V2
│ ├── .DS_Store
│ ├── Laptops
│ ├── .DS_Store
│ ├── dep_type.json
│ ├── test.txt.dep
│ ├── test_con_new.json
│ ├── test_con_new_new.json
│ ├── train.txt.dep
│ ├── train_con_new.json
│ ├── train_con_new_new.json
│ ├── valid.txt.dep
│ ├── valid_con_new.json
│ ├── valid_con_new_new.json
│ ├── vocab_pol.vocab
│ └── vocab_tok.vocab
│ ├── MAMS
│ ├── .DS_Store
│ ├── dep_type.json
│ ├── test.txt.dep
│ ├── test_con_new.json
│ ├── test_con_new_new.json
│ ├── train.txt.dep
│ ├── train_con_new.json
│ ├── train_con_new_new.json
│ ├── valid.txt.dep
│ ├── valid_con_new.json
│ ├── valid_con_new_new.json
│ ├── vocab_pol.vocab
│ └── vocab_tok.vocab
│ ├── Restaurants
│ ├── .DS_Store
│ ├── dep_type.json
│ ├── test.txt.dep
│ ├── test_con_new.json
│ ├── test_con_new_new.json
│ ├── train.txt.dep
│ ├── train_con_new.json
│ ├── train_con_new_new.json
│ ├── valid.txt.dep
│ ├── valid_con_new.json
│ ├── valid_con_new_new.json
│ ├── vocab_pol.vocab
│ └── vocab_tok.vocab
│ ├── Tweets
│ ├── .DS_Store
│ ├── dep_type.json
│ ├── test.txt.dep
│ ├── test_con_new.json
│ ├── test_con_new_new.json
│ ├── train.txt.dep
│ ├── train_con_new.json
│ ├── train_con_new_new.json
│ ├── valid.txt.dep
│ ├── valid_con_new.json
│ ├── valid_con_new_new.json
│ ├── vocab_pol.vocab
│ └── vocab_tok.vocab
│ ├── corenlp.py
│ ├── preprocess_dependency.py
│ └── stanford-corenlp
│ └── .DS_Store
├── dataloader.py
├── dep_parser.py
├── gcn.py
├── model.py
├── module_interaction.py
├── parse_tree.py
├── pretrain_model
├── .DS_Store
└── bert-base-uncased
│ └── .DS_Store
├── run_bash
├── .DS_Store
└── start.sh
├── snippet.py
├── spans.py
├── train.py
└── vocab.py
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/.DS_Store
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Editor-based HTTP Client requests
5 | /httpRequests/
6 | # Datasource local storage ignored files
7 | /dataSources/
8 | /dataSources.local.xml
9 |
--------------------------------------------------------------------------------
/.idea/S2GSL-main.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/deployment.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/webServers.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
20 |
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # S2GSL
2 |
3 |
4 | This repository contains Pytorch implementation for "S2GSL: Incorporating Segment to Syntactic Enhanced Graph Structure Learning for Aspect-based Sentiment Analysis" (ACL2024)
5 |
6 |
7 |
8 | ## Requirements
9 | ```
10 | python = 3.8.10
11 | torch == 2.0.0+cu118
12 | transformers == 4.3.3
13 | packaging == 21.3
14 | scikit-learn == 1.3.2
15 | ```
16 |
17 | ## Get Start
18 | 1. Prepare data
19 |
20 | We follow the dataset setting in https://github.com/CCIIPLab/BiSyn_GAT_plus, and provide the parsed data at directory **data/V2** (or, you can use ***preprocess_file*** function in ```parse_tree.py``` to preprocess on your own)
21 |
22 | 2. Download pre-trained [BERT-Base English](https://drive.google.com/drive/folders/1sbwkL3NQ8c7I0vugAO-HuLmg5SiO2iPS?usp=sharing) to **pretrain_model/bert-base-uncased**
23 |
24 | Download [stanford corenlp](https://drive.google.com/drive/folders/12epkro2pU8ICURm9eWMjv7uMbWC0wQiK?usp=drive_link) to **data/V2/stanford-corenlp**.
25 |
26 |
27 | 3. Train
28 |
29 | ```
30 | bash run_bash/start.sh
31 | ```
32 |
33 | # Credit
34 | The code and datasets in this repository is based on [Bisyn-gat+](https://github.com/CCIIPLab/BiSyn_GAT_plus).
35 |
36 | ## Citation
37 |
38 | **Please kindly cite our paper if this paper and the code are helpful.**
39 |
40 | ```
41 | @inproceedings{chen-etal-2024-s2gsl,
42 | title = "{S}$^2${GSL}: Incorporating Segment to Syntactic Enhanced Graph Structure Learning for Aspect-based Sentiment Analysis",
43 | author = "Chen, Bingfeng and
44 | Ouyang, Qihan and
45 | Luo, Yongqi and
46 | Xu, Boyan and
47 | Cai, Ruichu and
48 | Hao, Zhifeng",
49 | editor = "Ku, Lun-Wei and
50 | Martins, Andre and
51 | Srikumar, Vivek",
52 | booktitle = "Proceedings of the 62nd Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
53 | month = aug,
54 | year = "2024",
55 | address = "Bangkok, Thailand",
56 | publisher = "Association for Computational Linguistics",
57 | url = "https://aclanthology.org/2024.acl-long.721",
58 | pages = "13366--13379",
59 | abstract = "Previous graph-based approaches in Aspect-based Sentiment Analysis(ABSA) have demonstrated impressive performance by utilizing graph neural networks and attention mechanisms to learn structures of static dependency trees and dynamic latent trees. However, incorporating both semantic and syntactic information simultaneously within complex global structures can introduce irrelevant contexts and syntactic dependencies during the process of graph structure learning, potentially resulting in inaccurate predictions. In order to address the issues above, we propose S$^2$GSL, incorporating Segment to Syntactic enhanced Graph Structure Learning for ABSA. Specifically, S$^2$GSL is featured with a segment-aware semantic graph learning and a syntax-based latent graph learning enabling the removal of irrelevant contexts and dependencies, respectively. We further propose a self-adaptive aggregation network that facilitates the fusion of two graph learning branches, thereby achieving complementarity across diverse structures. Experimental results on four benchmarks demonstrate the effectiveness of our framework.",
60 | }
61 | ```
62 |
--------------------------------------------------------------------------------
/attention.py:
--------------------------------------------------------------------------------
1 | import math
2 | import copy
3 | import torch
4 | import numpy as np
5 | import torch.nn as nn
6 | import torch.nn.functional as F
7 | from torch.nn import BCELoss
8 |
9 |
10 | def _getMatrixTree_multi(scores,root,is_multi_head):
11 | A=scores.exp()
12 | R=root.exp()
13 |
14 | if is_multi_head is True:
15 | L = torch.sum(A, 2)
16 | else:
17 | L = torch.sum(A, 1)
18 |
19 | L=torch.diag_embed(L)
20 | L=L-A #拉普拉斯矩阵
21 | if is_multi_head is True:
22 | R = R.squeeze(-1).unsqueeze(1).expand(A.size(0), A.size(1), A.size(2))
23 | else:
24 | R = R.squeeze(-1)
25 |
26 | LL=L+torch.diag_embed(R) #加了根概率的拉普拉斯矩阵
27 |
28 | LL_inv=torch.inverse((LL)) #得到LL的逆矩阵
29 | if is_multi_head is True:
30 | LL_inv_diag = torch.diagonal(LL_inv, 0, 2, 3)
31 | else:
32 | LL_inv_diag = torch.diagonal(LL_inv, 0, 1, 2)
33 |
34 | d0=R*LL_inv_diag #每个节点成为根的边际概率 B,h,L
35 |
36 | if is_multi_head is True:
37 | LL_inv_diag = torch.unsqueeze(LL_inv_diag, 3)
38 | else:
39 | LL_inv_diag = torch.unsqueeze(LL_inv_diag, 2)
40 |
41 |
42 | _A=torch.transpose(A,-2,-1)
43 | _A=_A*LL_inv_diag
44 | tmp1=torch.transpose(_A,-2,-1)
45 | tmp2=A*torch.transpose(LL_inv,-2,-1)
46 |
47 | d=tmp1-tmp2 #两个节点的边的边际概率 B,L,L
48 |
49 | return d,d0
50 |
51 | class StructuredAttention(nn.Module):
52 | def __init__(self,args):
53 | self.model_dim=args.hidden_dim
54 | self.h=args.dynamic_tree_attn_head
55 | self.d_k=args.hidden_dim//args.dynamic_tree_attn_head
56 | self.device=args.device
57 |
58 | super(StructuredAttention,self).__init__()
59 |
60 | self.linear_keys=nn.Linear(args.hidden_dim,self.model_dim)
61 | self.linear_query=nn.Linear(args.hidden_dim,self.model_dim)
62 | self.linear_root=nn.Linear(args.hidden_dim,1)
63 |
64 |
65 | def forward(self,x,mask=None,roots_label=None,root_mask=None,dep_type_adj=None,is_multi_head=None):
66 |
67 |
68 | key=self.linear_keys(x)
69 | query=self.linear_query(x)
70 | root=self.linear_root(x)
71 | batches = key.size(0)
72 | len=key.size(1)
73 |
74 | query=query/math.sqrt(self.model_dim)
75 | if is_multi_head is True:
76 | key = key.view(batches, -1, self.h, self.d_k).transpose(1, 2)
77 | query = query.view(batches, -1, self.h, self.d_k).transpose(1, 2)
78 |
79 |
80 | scores=torch.matmul(query,key.transpose(-2,-1))
81 |
82 | if dep_type_adj is not None:
83 | scores=scores+dep_type_adj
84 |
85 | mask=mask/-10000
86 | root=root-mask.unsqueeze(-1)*50
87 | root=torch.clamp(root,min=-40)
88 |
89 | scores_mask=mask.unsqueeze(-1).repeat(1,1,x.shape[1])
90 | if is_multi_head is True:
91 | scores_mask = scores_mask.unsqueeze(1).expand(batches, self.h, len, len)
92 |
93 |
94 | scores = scores - scores_mask * 50
95 | scores = scores - torch.transpose(scores_mask, -2, -1) * 50
96 | scores = torch.clamp(scores, min=-40)
97 |
98 |
99 | d,d0=_getMatrixTree_multi(scores,root,is_multi_head) #d->B,h,L,L d0->B,L
100 |
101 | loss_root_all=[]
102 | if roots_label is not None:
103 |
104 | loss_fct=BCELoss(reduction='none')
105 | if root_mask is not None:
106 | active_labels=roots_label.view(-1)
107 |
108 | if is_multi_head is True:
109 | d0_all = [d0_temp.squeeze(1) for d0_temp in torch.split(d0, 1, dim=1)]
110 | for i in range(self.h):
111 | d0_all[i] = d0_all[i].contiguous().view(-1)
112 | d0_all[i] = torch.clamp(d0_all[i], 1e-5, 1 - 1e-5)
113 | temp_loss = loss_fct(d0_all[i].to(torch.float32), active_labels.to(torch.float32))
114 | loss_root_all.append(temp_loss)
115 |
116 | loss_root = sum(loss_root_all)
117 | loss_root = (loss_root * roots_label.view(-1).float()).mean()
118 |
119 | else:
120 | active_logits = d0.view(-1)
121 | active_logits=torch.clamp(active_logits,1e-5,1 - 1e-5)
122 | loss_root=loss_fct(active_logits.to(torch.float32),active_labels.to(torch.float32))
123 | loss_root = (loss_root * roots_label.view(-1).float()).mean()
124 |
125 |
126 |
127 | attn=torch.transpose(d,-2,-1)
128 | if mask is not None:
129 | scores_mask=scores_mask+torch.transpose(scores_mask,-2,-1)
130 | scores_mask=scores_mask.bool()
131 |
132 | attn=attn.masked_fill(scores_mask,0)
133 |
134 | if is_multi_head is True:
135 | x = x.unsqueeze(1).expand(batches, self.h, len, self.model_dim)
136 |
137 |
138 |
139 | context=torch.matmul(attn,x)
140 |
141 |
142 | return context,d, d0, loss_root
143 |
144 |
145 |
146 |
147 | def attention(query,key,mask=None,dropout=None):
148 | d_k=query.size(-1)
149 | scores=torch.matmul(query,key.transpose(-2,-1))/math.sqrt(d_k)
150 |
151 | if mask is not None:
152 | scores_mask = mask.unsqueeze(-1).repeat(1, 1, query.shape[2])
153 | scores_mask = scores_mask.unsqueeze(1).expand(query.size(0), query.size(1), query.size(2), query.size(2))
154 |
155 | scores = scores - scores_mask.masked_fill(scores_mask == -10000, 1e9)
156 | scores_mask_T = torch.transpose(scores_mask, -2, -1)
157 | scores_mask_T = scores_mask_T.masked_fill(scores_mask_T == -10000, 1e9)
158 | scores = scores - scores_mask_T
159 |
160 | p_attn=F.softmax(scores,dim=-1)
161 |
162 |
163 | if dropout is not None:
164 | p_attn=dropout(p_attn)
165 | return p_attn
166 |
167 |
168 | def clones(module,N):
169 | return nn.ModuleList([copy.deepcopy(module) for _ in range(N)])
170 |
171 | class MultiHeadAttention(nn.Module):
172 |
173 | def __init__(self,args,dropout=0.1):
174 | super(MultiHeadAttention,self).__init__()
175 | self.model_dim = args.hidden_dim
176 | self.h = args.attention_heads
177 | self.d_k = args.hidden_dim // args.attention_heads
178 | self.dropout=nn.Dropout(p=dropout)
179 | self.linears=clones(nn.Linear(args.hidden_dim,args.hidden_dim),2)
180 |
181 | def forward(self,query,key,mask=None):
182 | nbatches=query.size(0)
183 |
184 | query,key=[l(x).view(nbatches,-1,self.h,self.d_k).transpose(1,2) for l,x in zip(self.linears,(query,key))]
185 |
186 | attn=attention(query,key,mask=mask,dropout=self.dropout)
187 | return attn
188 |
189 |
190 |
191 | class LocalAttention(nn.Module):
192 |
193 | def __init__(self,args,dropout=0.1):
194 | super(LocalAttention,self).__init__()
195 | self.model_dim=args.hidden_dim
196 | self.h = args.attention_heads
197 | self.d_k = args.hidden_dim // args.attention_heads
198 | self.dropout = nn.Dropout(p=dropout)
199 | self.device = args.device
200 |
201 |
202 | #左掩盖线性层
203 | self.linear_keys_left = nn.Linear(args.hidden_dim, self.model_dim)
204 | self.linear_query_left = nn.Linear(args.hidden_dim, self.model_dim)
205 |
206 | #右掩盖线性层
207 | self.linear_keys_right = nn.Linear(args.hidden_dim, self.model_dim)
208 | self.linear_query_right = nn.Linear(args.hidden_dim, self.model_dim)
209 |
210 | #注意力分数线性层
211 | self.linear_keys = nn.Linear(args.hidden_dim, self.model_dim)
212 | self.linear_query = nn.Linear(args.hidden_dim, self.model_dim)
213 |
214 |
215 | def forward(self,query,key,mask=None,span_matrix=None,aspect_mask=None):
216 | batch_size=query.size(0)
217 | seq_len=query.size(1)
218 | hidden_dim=query.size(2)
219 |
220 | left_boundary=np.ones([batch_size,self.h,seq_len,seq_len])
221 | left_boundary=np.tril(left_boundary,k=0)
222 | left_boundary=np.where(left_boundary==0,1e9,left_boundary)
223 | left_boundary = np.where(left_boundary == 1, 0, left_boundary)
224 | left_boundary=torch.tensor(left_boundary)
225 | left_boundary=left_boundary.cuda(0)
226 | right_boundary=torch.transpose(left_boundary,-2,-1)
227 |
228 | key_left=key
229 | key_right=key
230 | query_left=query
231 | query_right=query
232 | #左边界
233 | key_left=self.linear_keys_left(key_left)
234 | query_left = self.linear_query_left(query_left)
235 | key_left = key_left.view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
236 | query_left = query_left.view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
237 | query_left = query_left / math.sqrt(self.model_dim)
238 | left_scores = torch.matmul(query_left, key_left.transpose(-2, -1))
239 | theta_l=F.softmax(left_scores-left_boundary,dim=-1)
240 |
241 | # 右边界
242 | key_right = self.linear_keys_right(key_right)
243 | query_right = self.linear_query_right(query_right)
244 | key_right = key_right.view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
245 | query_right = query_right.view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
246 | query_right = query_right / math.sqrt(self.model_dim)
247 | right_scores = torch.matmul(query_right, key_right.transpose(-2, -1))
248 | theta_r = F.softmax(right_scores - right_boundary, dim=-1)
249 |
250 |
251 | #软掩码
252 | downTri_M = np.ones([batch_size, self.h, seq_len, seq_len])
253 | downTri_M = np.tril(downTri_M, k=0)
254 | downTri_M = torch.tensor(downTri_M)
255 | downTri_M=downTri_M.cuda(0)
256 | upperTri_M=torch.transpose(downTri_M,-2,-1)
257 | soft_Mask_l=torch.matmul(theta_l,upperTri_M)
258 | soft_Mask_r = torch.matmul(theta_r, downTri_M)
259 | soft_Mask=soft_Mask_l*soft_Mask_r
260 |
261 |
262 | #注意力分数
263 | #self attention
264 | bert_out=query
265 | key = self.linear_keys(key)
266 | query = self.linear_query(query)
267 | key = key.view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
268 | query = query.view(batch_size, -1, self.h, self.d_k).transpose(1, 2)
269 | query = query / math.sqrt(self.model_dim)
270 | att_scores = torch.matmul(query, key.transpose(-2, -1))
271 |
272 |
273 |
274 | scores_mask = mask.unsqueeze(-1).repeat(1, 1, query.shape[2])
275 | scores_mask = scores_mask.unsqueeze(1).expand(query.size(0), query.size(1), query.size(2), query.size(2))
276 | att_scores = att_scores - scores_mask.masked_fill(scores_mask == -10000, 1e9)
277 | scores_mask_T = torch.transpose(scores_mask, -2, -1)
278 | scores_mask_T = scores_mask_T.masked_fill(scores_mask_T == -10000, 1e9)
279 | att_scores = att_scores - scores_mask_T
280 |
281 |
282 | p_local_attn = F.softmax(att_scores * soft_Mask, dim=-1) #batch_size,h,seq_len,seq_len
283 |
284 | #范围约束
285 | span_matrix=span_matrix.transpose(0,1)
286 | p_attn_all = [p_attn_temp.squeeze(1) for p_attn_temp in torch.split(p_local_attn, 1, dim=1)]
287 | span_matrix_all=[span_matrix_temp.squeeze(1) for span_matrix_temp in torch.split(span_matrix, 1, dim=1)]
288 | span_loss_all=[]
289 | loss_fct = BCELoss(reduction='none')
290 | for i in range(self.h):
291 | p_attn_all[i]=torch.sigmoid(p_attn_all[i])
292 | temp_loss = loss_fct(p_attn_all[i].view(-1).to(torch.float32), span_matrix_all[i].view(-1).to(torch.float32))
293 | loss_mean=torch.mean(temp_loss,dim=-1)
294 | span_loss_all.append(loss_mean)
295 |
296 | span_loss=sum(span_loss_all)/len(span_loss_all)
297 |
298 | return p_local_attn,span_loss
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
--------------------------------------------------------------------------------
/data/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/.DS_Store
--------------------------------------------------------------------------------
/data/V2/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/.DS_Store
--------------------------------------------------------------------------------
/data/V2/Laptops/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Laptops/.DS_Store
--------------------------------------------------------------------------------
/data/V2/Laptops/dep_type.json:
--------------------------------------------------------------------------------
1 | ["nmod", "conj", "csubjpass", "ccomp", "compound", "xcomp", "punct", "dobj", "nummod", "advcl", "case", "mwe", "appos", "nmod:npmod", "mark", "det:predet", "root", "det", "cc", "parataxis", "expl", "advmod", "nsubj", "csubj", "nsubjpass", "cop", "dep", "nmod:tmod", "cc:preconj", "discourse", "neg", "compound:prt", "iobj", "acl", "ROOT", "acl:relcl", "nmod:poss", "amod", "aux", "auxpass"]
--------------------------------------------------------------------------------
/data/V2/Laptops/valid.txt.dep:
--------------------------------------------------------------------------------
1 | 0 4 ROOT
2 | 2 1 compound
3 | 4 2 nsubj
4 | 4 3 cop
5 | 4 5 advmod
6 | 4 6 punct
7 | 8 7 advmod
8 | 4 8 advmod
9 | 11 9 case
10 | 11 10 nummod
11 | 8 11 nmod
12 | 14 12 case
13 | 14 13 nummod
14 | 11 14 nmod
15 | 4 15 punct
16 |
17 | 0 5 ROOT
18 | 2 1 compound
19 | 5 2 nsubj
20 | 5 3 aux
21 | 5 4 neg
22 | 7 6 det
23 | 5 7 dobj
24 | 10 8 mark
25 | 10 9 nsubj
26 | 5 10 advcl
27 | 12 11 nmod:poss
28 | 10 12 dobj
29 | 15 13 case
30 | 15 14 dep
31 | 10 15 nmod
32 | 15 16 advmod
33 | 5 17 punct
34 |
35 | 0 4 ROOT
36 | 4 1 csubj
37 | 1 2 compound:prt
38 | 4 3 cop
39 | 4 5 punct
40 |
41 | 0 3 ROOT
42 | 3 1 aux
43 | 3 2 neg
44 | 6 4 det
45 | 6 5 amod
46 | 3 6 dobj
47 | 10 7 nummod
48 | 7 8 cc
49 | 7 9 conj
50 | 6 10 dep
51 | 3 11 punct
52 |
53 | 0 24 ROOT
54 | 15 1 dep
55 | 6 2 mark
56 | 6 3 neg
57 | 6 4 cop
58 | 6 5 det
59 | 15 6 dep
60 | 9 7 case
61 | 9 8 compound
62 | 6 9 nmod
63 | 15 10 dep
64 | 12 11 compound
65 | 15 12 amod
66 | 14 13 det
67 | 15 14 dep
68 | 24 15 advcl
69 | 15 16 cc
70 | 20 17 det
71 | 20 18 amod
72 | 20 19 amod
73 | 15 20 conj
74 | 24 21 punct
75 | 24 22 nsubj
76 | 24 23 cop
77 | 28 25 mark
78 | 28 26 nsubj
79 | 28 27 mark
80 | 24 28 advcl
81 | 28 29 dobj
82 | 32 30 case
83 | 32 31 det
84 | 29 32 nmod
85 | 36 33 nsubj
86 | 36 34 aux
87 | 36 35 neg
88 | 29 36 acl:relcl
89 | 36 37 punct
90 | 39 38 advmod
91 | 36 39 advcl
92 | 44 40 det
93 | 44 41 amod
94 | 41 42 compound
95 | 44 43 compound
96 | 39 44 dobj
97 | 24 45 punct
98 |
99 | 0 8 ROOT
100 | 6 1 neg
101 | 6 2 compound
102 | 6 3 compound
103 | 6 4 nummod
104 | 6 5 compound
105 | 8 6 nsubjpass
106 | 8 7 auxpass
107 | 8 9 punct
108 |
109 | 0 5 ROOT
110 | 5 1 nsubj
111 | 5 2 cop
112 | 5 3 advmod
113 | 5 4 punct
114 | 5 6 punct
115 | 5 7 cc
116 | 5 8 conj
117 | 10 9 mark
118 | 8 10 xcomp
119 | 5 11 punct
120 |
121 | 0 8 ROOT
122 | 8 1 nsubj
123 | 1 2 advmod
124 | 1 3 punct
125 | 1 4 cc
126 | 1 5 conj
127 | 8 6 cop
128 | 8 7 advmod
129 | 8 9 dep
130 | 9 10 dep
131 | 10 11 advmod
132 | 15 12 case
133 | 15 13 det
134 | 15 14 compound
135 | 11 15 nmod
136 | 8 16 punct
137 |
138 | 0 5 ROOT
139 | 5 1 advmod
140 | 5 2 nsubj
141 | 5 3 cop
142 | 5 4 neg
143 | 5 6 cc
144 | 5 7 conj
145 | 5 8 cc
146 | 10 9 det
147 | 11 10 nsubj
148 | 5 11 conj
149 | 11 12 compound:prt
150 | 14 13 case
151 | 11 14 nmod
152 | 16 15 nummod
153 | 11 16 dobj
154 | 5 17 punct
155 |
156 | 0 3 ROOT
157 | 3 1 nsubjpass
158 | 3 2 auxpass
159 | 7 4 case
160 | 7 5 det
161 | 7 6 amod
162 | 3 7 nmod
163 | 12 8 case
164 | 12 9 punct
165 | 12 10 amod
166 | 12 11 compound
167 | 7 12 nmod
168 | 12 13 cc
169 | 17 14 det
170 | 17 15 amod
171 | 17 16 compound
172 | 12 17 conj
173 | 17 18 nummod
174 | 18 19 amod
175 | 21 20 nummod
176 | 19 21 dep
177 | 21 22 nummod
178 | 3 23 punct
179 |
180 | 0 7 ROOT
181 | 3 1 det
182 | 3 2 compound
183 | 7 3 nsubj
184 | 7 4 aux
185 | 7 5 neg
186 | 7 6 advmod
187 | 10 8 det
188 | 10 9 amod
189 | 7 10 dobj
190 | 7 11 punct
191 |
192 | 0 1 ROOT
193 | 1 2 xcomp
194 | 4 3 nummod
195 | 2 4 dobj
196 | 7 5 case
197 | 7 6 nmod:poss
198 | 4 7 nmod
199 | 10 8 advmod
200 | 10 9 advmod
201 | 1 10 xcomp
202 | 12 11 mark
203 | 10 12 ccomp
204 | 1 13 punct
205 |
206 | 0 15 ROOT
207 | 15 1 advmod
208 | 15 2 punct
209 | 4 3 case
210 | 15 4 nmod
211 | 6 5 case
212 | 4 6 nmod
213 | 4 7 acl
214 | 9 8 det
215 | 7 9 dobj
216 | 15 10 punct
217 | 13 11 det
218 | 13 12 compound
219 | 15 13 nsubjpass
220 | 15 14 auxpass
221 | 17 16 advmod
222 | 15 17 advmod
223 | 15 18 punct
224 |
225 | 0 9 ROOT
226 | 2 1 amod
227 | 9 2 nsubj
228 | 2 3 punct
229 | 5 4 amod
230 | 2 5 conj
231 | 2 6 cc
232 | 2 7 conj
233 | 9 8 advmod
234 | 9 10 punct
235 |
236 | 0 6 ROOT
237 | 2 1 advmod
238 | 6 2 nsubj
239 | 5 3 dobj
240 | 5 4 nsubj
241 | 2 5 acl:relcl
242 | 14 7 mark
243 | 10 8 det
244 | 10 9 compound
245 | 14 10 nsubj
246 | 14 11 cop
247 | 14 12 neg
248 | 14 13 advmod
249 | 6 14 ccomp
250 | 16 15 case
251 | 14 16 nmod
252 | 14 17 punct
253 | 20 18 nsubj
254 | 20 19 advmod
255 | 14 20 ccomp
256 | 22 21 det
257 | 20 22 dobj
258 | 20 23 advmod
259 | 23 24 cc
260 | 23 25 conj
261 | 27 26 case
262 | 20 27 nmod
263 | 14 28 punct
264 | 14 29 cc
265 | 36 30 advmod
266 | 32 31 nmod:poss
267 | 36 32 nsubj
268 | 36 33 cop
269 | 35 34 det
270 | 36 35 nmod:npmod
271 | 14 36 conj
272 | 6 37 punct
273 |
274 | 0 3 ROOT
275 | 3 1 nsubj
276 | 3 2 cop
277 | 3 4 advmod
278 | 3 5 cc
279 | 3 6 conj
280 | 8 7 amod
281 | 6 8 dobj
282 | 3 9 punct
283 |
284 | 0 8 ROOT
285 | 8 1 cc
286 | 4 2 det
287 | 4 3 compound
288 | 8 4 nsubj
289 | 8 5 cop
290 | 8 6 advmod
291 | 8 7 advmod
292 | 8 9 punct
293 |
294 | 0 2 ROOT
295 | 2 1 amod
296 | 6 3 mark
297 | 6 4 nsubj
298 | 6 5 advmod
299 | 2 6 advcl
300 | 9 7 case
301 | 9 8 nmod:poss
302 | 6 9 nmod
303 | 2 10 punct
304 |
305 | 0 5 ROOT
306 | 3 1 det
307 | 3 2 compound
308 | 5 3 nsubj
309 | 5 4 cop
310 | 5 6 punct
311 | 8 7 nummod
312 | 5 8 dep
313 | 10 9 mark
314 | 8 10 acl
315 | 5 11 punct
316 |
317 | 0 3 ROOT
318 | 3 1 nsubj
319 | 3 2 aux
320 | 5 4 nmod:poss
321 | 3 5 dobj
322 | 8 6 case
323 | 8 7 nummod
324 | 3 8 nmod
325 | 3 9 advmod
326 | 3 10 cc
327 | 12 11 nsubj
328 | 3 12 conj
329 | 12 13 advmod
330 | 3 14 punct
331 |
332 | 0 7 ROOT
333 | 7 1 cc
334 | 7 2 nsubj
335 | 7 3 aux
336 | 7 4 cop
337 | 7 5 det
338 | 7 6 amod
339 | 7 8 cc
340 | 13 9 nsubj
341 | 13 10 cop
342 | 12 11 advmod
343 | 13 12 amod
344 | 7 13 conj
345 | 13 14 nummod
346 | 7 15 punct
347 |
348 | 0 5 ROOT
349 | 2 1 det
350 | 5 2 nsubj
351 | 5 3 cop
352 | 5 4 advmod
353 | 5 6 punct
354 |
355 | 0 4 ROOT
356 | 2 1 nmod:poss
357 | 4 2 nsubj
358 | 4 3 cop
359 | 4 5 cc
360 | 7 6 det
361 | 9 7 nsubj
362 | 9 8 cop
363 | 4 9 conj
364 | 4 10 punct
365 |
366 | 0 3 ROOT
367 | 3 1 nsubj
368 | 3 2 aux
369 | 8 4 mark
370 | 8 5 nsubjpass
371 | 8 6 auxpass
372 | 8 7 advmod
373 | 3 8 ccomp
374 | 11 9 case
375 | 11 10 det
376 | 8 11 nmod
377 | 16 12 dobj
378 | 14 13 det
379 | 16 14 nsubj
380 | 16 15 aux
381 | 11 16 acl:relcl
382 | 3 17 punct
383 |
384 | 0 3 ROOT
385 | 2 1 det
386 | 3 2 nsubj
387 | 5 4 advmod
388 | 7 5 nummod
389 | 7 6 compound
390 | 3 7 dobj
391 | 3 8 punct
392 | 3 9 cc
393 | 16 10 nsubj
394 | 16 11 cop
395 | 16 12 dep
396 | 16 13 case
397 | 16 14 det
398 | 16 15 amod
399 | 3 16 conj
400 | 3 17 punct
401 |
402 | 0 2 ROOT
403 | 2 1 nsubj
404 | 4 3 advmod
405 | 5 4 advmod
406 | 6 5 amod
407 | 2 6 dobj
408 | 2 7 cc
409 | 9 8 det
410 | 12 9 nsubj
411 | 12 10 cop
412 | 12 11 advmod
413 | 2 12 conj
414 | 2 13 punct
415 |
416 | 0 1 ROOT
417 | 3 2 nsubj
418 | 1 3 acl:relcl
419 | 3 4 cc
420 | 7 5 nsubj
421 | 7 6 advmod
422 | 3 7 conj
423 | 3 8 cc
424 | 10 9 det
425 | 12 10 nsubj
426 | 12 11 cop
427 | 3 12 conj
428 | 1 13 punct
429 |
430 | 0 4 ROOT
431 | 4 1 nsubj
432 | 4 2 cop
433 | 4 3 neg
434 | 4 5 cc
435 | 8 6 det
436 | 8 7 compound
437 | 10 8 nsubj
438 | 10 9 cop
439 | 4 10 conj
440 | 13 11 case
441 | 13 12 det
442 | 10 13 nmod
443 | 15 14 det
444 | 13 15 amod
445 | 4 16 punct
446 |
447 | 0 4 ROOT
448 | 2 1 det
449 | 4 2 nsubj
450 | 4 3 cop
451 | 4 5 punct
452 | 8 6 nsubj
453 | 8 7 advmod
454 | 4 8 parataxis
455 | 4 9 punct
456 | 13 10 nsubj
457 | 13 11 cop
458 | 13 12 advmod
459 | 4 13 parataxis
460 | 15 14 mark
461 | 13 15 xcomp
462 | 15 16 compound:prt
463 | 4 17 punct
464 | 20 18 nsubj
465 | 20 19 cop
466 | 4 20 parataxis
467 | 4 21 punct
468 | 25 22 nmod:poss
469 | 25 23 advmod
470 | 25 24 compound
471 | 27 25 nsubj
472 | 27 26 aux
473 | 4 27 parataxis
474 | 27 28 nmod
475 | 4 29 punct
476 |
477 | 0 2 ROOT
478 | 2 1 nsubj
479 | 2 3 advmod
480 | 5 4 case
481 | 2 5 nmod
482 | 2 6 punct
483 |
484 | 0 6 ROOT
485 | 6 1 advmod
486 | 6 2 punct
487 | 6 3 nsubj
488 | 3 4 nummod
489 | 6 5 cop
490 | 6 7 punct
491 |
492 | 0 4 ROOT
493 | 4 1 nsubj
494 | 4 2 cop
495 | 4 3 advmod
496 | 4 5 cc
497 | 4 6 conj
498 | 8 7 case
499 | 6 8 nmod
500 | 8 9 cc
501 | 8 10 conj
502 | 4 11 punct
503 |
504 | 0 4 ROOT
505 | 2 1 amod
506 | 4 2 nsubj
507 | 4 3 cop
508 | 4 5 nummod
509 | 4 6 punct
510 |
511 | 0 6 ROOT
512 | 2 1 advmod
513 | 4 2 nummod
514 | 4 3 compound
515 | 6 4 nsubj
516 | 4 5 punct
517 | 6 7 xcomp
518 | 10 8 case
519 | 10 9 punct
520 | 7 10 acl
521 | 6 11 punct
522 |
523 | 0 2 ROOT
524 | 2 1 nsubj
525 | 6 3 det:predet
526 | 6 4 det
527 | 6 5 amod
528 | 2 6 dobj
529 | 6 7 cc
530 | 9 8 advmod
531 | 6 9 conj
532 | 9 10 cc
533 | 13 11 det
534 | 13 12 amod
535 | 9 13 conj
536 | 6 14 cc
537 | 6 15 conj
538 | 18 16 case
539 | 18 17 amod
540 | 15 18 nmod
541 | 2 19 punct
542 |
543 | 0 2 ROOT
544 | 2 1 amod
545 | 6 3 mark
546 | 6 4 nsubj
547 | 6 5 advmod
548 | 2 6 acl
549 | 8 7 case
550 | 6 8 nmod
551 | 2 9 punct
552 |
553 | 0 4 ROOT
554 | 2 1 det
555 | 4 2 nsubj
556 | 4 3 cop
557 | 6 5 case
558 | 4 6 nmod
559 | 9 7 case
560 | 9 8 det
561 | 6 9 nmod
562 | 9 10 cc
563 | 9 11 conj
564 | 4 12 punct
565 |
566 | 0 4 ROOT
567 | 4 1 nsubj
568 | 4 2 cop
569 | 4 3 advmod
570 | 7 5 case
571 | 7 6 det
572 | 4 7 nmod
573 | 7 8 nmod:npmod
574 | 4 9 punct
575 |
576 | 0 4 ROOT
577 | 3 1 det
578 | 3 2 amod
579 | 4 3 nsubj
580 | 6 5 amod
581 | 4 6 dobj
582 | 4 7 cc
583 | 4 8 conj
584 | 8 9 iobj
585 | 8 10 dobj
586 | 15 11 mark
587 | 15 12 nsubj
588 | 15 13 aux
589 | 15 14 neg
590 | 10 15 ccomp
591 | 17 16 mark
592 | 15 17 xcomp
593 | 19 18 mark
594 | 17 19 xcomp
595 | 22 20 det
596 | 22 21 amod
597 | 19 22 dobj
598 | 25 23 case
599 | 25 24 nummod
600 | 19 25 nmod
601 | 4 26 punct
602 |
603 | 0 4 ROOT
604 | 2 1 det
605 | 4 2 nsubj
606 | 4 3 aux
607 | 4 5 punct
608 | 4 6 cc
609 | 12 7 advmod
610 | 12 8 nsubj
611 | 12 9 aux
612 | 12 10 cop
613 | 12 11 det
614 | 4 12 conj
615 | 14 13 mark
616 | 12 14 xcomp
617 | 16 15 det
618 | 14 16 dobj
619 | 20 17 case
620 | 19 18 advmod
621 | 20 19 amod
622 | 16 20 nmod
623 | 4 21 punct
624 |
625 | 0 4 ROOT
626 | 4 1 nsubj
627 | 4 2 aux
628 | 4 3 aux
629 | 4 5 iobj
630 | 7 6 nummod
631 | 4 7 dobj
632 | 7 8 dep
633 | 8 9 nsubj
634 | 13 10 neg
635 | 13 11 case
636 | 13 12 det
637 | 9 13 dep
638 | 16 14 mark
639 | 16 15 nsubj
640 | 13 16 ccomp
641 | 16 17 dobj
642 | 16 18 nmod:tmod
643 |
644 | 0 7 ROOT
645 | 2 1 compound
646 | 7 2 nsubj
647 | 2 3 nummod
648 | 5 4 case
649 | 2 5 nmod
650 | 7 6 cop
651 | 7 8 punct
652 | 11 9 advmod
653 | 11 10 case
654 | 7 11 nmod
655 | 7 12 punct
656 |
657 | 0 10 ROOT
658 | 10 1 cc
659 | 3 2 det
660 | 10 3 nsubj
661 | 6 4 case
662 | 6 5 compound
663 | 3 6 nmod
664 | 10 7 cop
665 | 10 8 det
666 | 10 9 amod
667 | 10 11 punct
668 |
669 | 0 5 ROOT
670 | 5 1 nsubj
671 | 5 2 aux
672 | 5 3 neg
673 | 5 4 advmod
674 | 5 6 xcomp
675 | 5 7 punct
676 | 9 8 nsubj
677 | 5 9 parataxis
678 | 11 10 amod
679 | 9 11 dobj
680 | 5 12 punct
681 |
682 | 0 3 ROOT
683 | 3 1 nsubj
684 | 3 2 aux
685 | 3 4 dobj
686 | 7 5 case
687 | 7 6 det
688 | 3 7 nmod
689 | 12 8 advmod
690 | 12 9 case
691 | 12 10 case
692 | 12 11 det
693 | 7 12 nmod
694 | 15 13 case
695 | 15 14 det
696 | 12 15 nmod
697 | 3 16 punct
698 | 3 17 punct
699 | 19 18 nsubj
700 | 3 19 parataxis
701 | 23 20 det
702 | 23 21 amod
703 | 21 22 compound:prt
704 | 19 23 dobj
705 | 23 24 cc
706 | 26 25 compound
707 | 23 26 conj
708 | 19 27 punct
709 | 29 28 det
710 | 31 29 nsubj
711 | 31 30 cop
712 | 19 31 parataxis
713 | 31 32 cc
714 | 31 33 conj
715 | 36 34 mark
716 | 36 35 aux
717 | 31 36 advcl
718 | 3 37 punct
719 |
720 | 0 7 ROOT
721 | 4 1 det
722 | 4 2 compound
723 | 4 3 compound
724 | 7 4 nsubj
725 | 7 5 cop
726 | 7 6 neg
727 | 9 8 mark
728 | 7 9 xcomp
729 | 9 10 compound:prt
730 | 14 11 mark
731 | 14 12 nsubj
732 | 14 13 cop
733 | 9 14 advcl
734 | 17 15 case
735 | 17 16 amod
736 | 14 17 nmod
737 | 7 18 punct
738 |
739 | 0 5 ROOT
740 | 5 1 advmod
741 | 5 2 punct
742 | 5 3 nsubj
743 | 5 4 aux
744 | 10 6 mark
745 | 10 7 nsubj
746 | 10 8 cop
747 | 10 9 punct
748 | 5 10 ccomp
749 | 10 11 punct
750 | 5 12 punct
751 |
752 | 0 1 ROOT
753 | 1 2 xcomp
754 | 4 3 amod
755 | 2 4 dobj
756 | 1 5 punct
757 |
758 | 0 2 ROOT
759 | 2 1 amod
760 | 2 3 cc
761 | 5 4 amod
762 | 2 5 conj
763 | 2 6 punct
764 |
765 | 0 5 ROOT
766 | 2 1 amod
767 | 5 2 nsubj
768 | 5 3 cop
769 | 5 4 advmod
770 | 5 6 punct
771 |
772 | 0 1 ROOT
773 | 1 2 punct
774 | 4 3 compound
775 | 1 4 dep
776 | 1 5 punct
777 |
778 | 0 7 ROOT
779 | 3 1 case
780 | 3 2 det
781 | 7 3 nmod
782 | 6 4 case
783 | 6 5 det
784 | 3 6 nmod
785 | 7 8 dobj
786 | 11 9 det
787 | 11 10 compound
788 | 12 11 nsubj
789 | 8 12 acl:relcl
790 | 12 13 dobj
791 | 12 14 advmod
792 | 7 15 punct
793 |
794 | 0 10 ROOT
795 | 10 1 dep
796 | 5 2 advmod
797 | 5 3 case
798 | 5 4 det
799 | 10 5 nsubj
800 | 5 6 cc
801 | 5 7 conj
802 | 10 8 cop
803 | 10 9 det
804 | 13 11 case
805 | 13 12 det
806 | 10 13 nmod
807 | 10 14 punct
808 |
809 | 0 2 ROOT
810 | 2 1 nsubj
811 | 10 3 case
812 | 10 4 det
813 | 10 5 compound
814 | 10 6 amod
815 | 10 7 compound
816 | 10 8 amod
817 | 10 9 compound
818 | 2 10 nmod
819 | 12 11 nummod
820 | 10 12 dep
821 | 14 13 compound
822 | 12 14 nummod
823 | 17 15 nsubj
824 | 17 16 advmod
825 | 12 17 acl:relcl
826 | 19 18 punct
827 | 17 19 dobj
828 | 19 20 punct
829 | 22 21 case
830 | 19 22 nmod
831 | 22 23 cc
832 | 22 24 conj
833 | 28 25 nsubjpass
834 | 28 26 auxpass
835 | 28 27 neg
836 | 22 28 acl:relcl
837 | 30 29 case
838 | 28 30 nmod
839 | 2 31 punct
840 |
841 | 0 3 ROOT
842 | 3 1 nsubj
843 | 3 2 cop
844 | 3 4 cc
845 | 3 5 conj
846 | 7 6 case
847 | 5 7 nmod
848 | 3 8 punct
849 |
850 | 0 3 ROOT
851 | 3 1 advmod
852 | 3 2 nsubj
853 | 5 4 mark
854 | 3 5 advcl
855 | 8 6 advmod
856 | 8 7 mark
857 | 5 8 ccomp
858 | 12 9 det
859 | 11 10 advmod
860 | 12 11 amod
861 | 8 12 dobj
862 | 14 13 nummod
863 | 12 14 dep
864 | 17 15 nsubj
865 | 17 16 aux
866 | 14 17 acl:relcl
867 | 17 18 xcomp
868 | 3 19 punct
869 |
870 | 0 4 ROOT
871 | 4 1 nsubjpass
872 | 4 2 auxpass
873 | 4 3 cop
874 | 8 5 case
875 | 8 6 det
876 | 8 7 amod
877 | 4 8 nmod
878 | 4 9 punct
879 | 4 10 cc
880 | 13 11 det
881 | 13 12 amod
882 | 16 13 nsubj
883 | 16 14 cop
884 | 16 15 det
885 | 4 16 conj
886 | 20 17 case
887 | 20 18 det
888 | 20 19 compound
889 | 16 20 nmod
890 | 4 21 punct
891 | 4 22 cc
892 | 25 23 mark
893 | 25 24 mark
894 | 4 25 conj
895 | 29 26 case
896 | 29 27 det
897 | 29 28 compound
898 | 25 29 nmod
899 | 4 30 punct
900 |
901 | 0 2 ROOT
902 | 2 1 det
903 | 5 3 advmod
904 | 5 4 nsubj
905 | 2 5 acl:relcl
906 | 7 6 compound
907 | 5 7 dobj
908 | 11 8 case
909 | 8 9 mwe
910 | 11 10 nmod:poss
911 | 5 11 nmod
912 | 11 12 cc
913 | 15 13 det
914 | 15 14 compound
915 | 11 15 conj
916 | 2 16 punct
917 |
918 | 0 5 ROOT
919 | 3 1 det
920 | 3 2 compound
921 | 5 3 nsubj
922 | 5 4 advmod
923 | 7 6 nsubj
924 | 5 7 ccomp
925 | 7 8 compound:prt
926 | 5 9 punct
927 |
928 | 0 4 ROOT
929 | 4 1 nsubj
930 | 4 2 cop
931 | 4 3 advmod
932 | 6 5 mark
933 | 4 6 xcomp
934 | 8 7 compound
935 | 6 8 dobj
936 | 4 9 punct
937 | 4 10 cc
938 | 12 11 compound
939 | 14 12 nsubjpass
940 | 14 13 auxpass
941 | 4 14 conj
942 | 16 15 advmod
943 | 14 16 advmod
944 | 4 17 punct
945 |
946 | 0 17 ROOT
947 | 17 1 cc
948 | 3 2 det
949 | 17 3 nsubj
950 | 7 4 mark
951 | 7 5 nsubj
952 | 7 6 aux
953 | 3 7 ccomp
954 | 11 8 det
955 | 11 9 nummod
956 | 11 10 punct
957 | 7 11 dobj
958 | 16 12 case
959 | 16 13 det
960 | 16 14 compound
961 | 16 15 amod
962 | 7 16 nmod
963 | 19 18 amod
964 | 17 19 xcomp
965 | 21 20 case
966 | 19 21 nmod
967 | 17 22 punct
968 |
969 | 0 2 ROOT
970 | 2 1 nsubj
971 | 5 3 det
972 | 5 4 compound
973 | 2 5 dobj
974 | 7 6 advmod
975 | 8 7 amod
976 | 10 8 nsubj
977 | 10 9 aux
978 | 5 10 acl:relcl
979 | 13 11 case
980 | 13 12 compound
981 | 10 13 nmod
982 | 10 14 cc
983 | 10 15 conj
984 | 18 16 det
985 | 18 17 compound
986 | 15 18 dobj
987 | 22 19 case
988 | 22 20 det
989 | 22 21 amod
990 | 15 22 nmod
991 | 25 23 case
992 | 25 24 det
993 | 22 25 nmod
994 | 2 26 punct
995 |
996 | 0 7 ROOT
997 | 2 1 det
998 | 7 2 nsubj
999 | 5 3 case
1000 | 5 4 det
1001 | 2 5 nmod
1002 | 7 6 aux
1003 | 7 8 xcomp
1004 | 11 9 case
1005 | 11 10 det
1006 | 8 11 dep
1007 | 7 12 punct
1008 |
1009 | 0 2 ROOT
1010 | 2 1 advmod
1011 | 2 3 cc
1012 | 2 4 conj
1013 | 2 5 punct
1014 |
1015 | 0 11 ROOT
1016 | 5 1 cc
1017 | 5 2 nsubj
1018 | 5 3 cop
1019 | 5 4 advmod
1020 | 11 5 advcl
1021 | 11 6 punct
1022 | 8 7 det
1023 | 11 8 nsubj
1024 | 11 9 cop
1025 | 11 10 amod
1026 | 11 12 cc
1027 | 11 13 conj
1028 | 15 14 mark
1029 | 13 15 ccomp
1030 | 11 16 punct
1031 |
1032 | 0 3 ROOT
1033 | 3 1 advmod
1034 | 3 2 mark
1035 | 3 4 cc
1036 | 7 5 det
1037 | 7 6 amod
1038 | 3 7 conj
1039 | 3 8 punct
1040 |
1041 | 0 5 ROOT
1042 | 2 1 det
1043 | 5 2 nsubj
1044 | 5 3 cop
1045 | 5 4 advmod
1046 | 5 6 advmod
1047 | 5 7 punct
1048 |
1049 | 0 3 ROOT
1050 | 3 1 nsubj
1051 | 3 2 cop
1052 | 6 4 case
1053 | 6 5 compound
1054 | 3 6 nmod
1055 | 6 7 punct
1056 | 6 8 conj
1057 | 6 9 punct
1058 | 6 10 conj
1059 | 6 11 punct
1060 | 6 12 cc
1061 | 14 13 compound
1062 | 6 14 conj
1063 | 3 15 punct
1064 |
1065 | 0 5 ROOT
1066 | 5 1 nsubj
1067 | 5 2 cop
1068 | 5 3 det
1069 | 5 4 amod
1070 | 5 6 cc
1071 | 8 7 amod
1072 | 5 8 conj
1073 | 8 9 nummod
1074 | 5 10 dep
1075 | 5 11 punct
1076 |
1077 | 0 4 ROOT
1078 | 4 1 nsubj
1079 | 4 2 aux
1080 | 4 3 neg
1081 | 6 5 advmod
1082 | 7 6 amod
1083 | 4 7 dobj
1084 | 7 8 nummod
1085 | 4 9 punct
1086 |
1087 | 0 5 ROOT
1088 | 2 1 compound
1089 | 5 2 nsubj
1090 | 5 3 cop
1091 | 5 4 advmod
1092 | 5 6 punct
1093 | 9 7 case
1094 | 9 8 nummod
1095 | 5 9 nmod
1096 | 5 10 punct
1097 |
1098 | 0 3 ROOT
1099 | 3 1 advmod
1100 | 3 2 amod
1101 | 3 4 cc
1102 | 3 5 conj
1103 |
1104 | 0 9 ROOT
1105 | 4 1 amod
1106 | 4 2 compound
1107 | 4 3 amod
1108 | 9 4 nsubj
1109 | 6 5 det
1110 | 9 6 dobj
1111 | 9 7 aux
1112 | 9 8 nsubj
1113 | 9 10 nmod
1114 | 9 11 punct
1115 |
1116 | 0 4 ROOT
1117 | 2 1 det
1118 | 4 2 nsubj
1119 | 4 3 cop
1120 | 4 5 punct
1121 |
1122 | 0 4 ROOT
1123 | 4 1 nsubj
1124 | 4 2 cop
1125 | 4 3 advmod
1126 | 7 5 case
1127 | 7 6 det
1128 | 4 7 nmod
1129 | 4 8 punct
1130 |
1131 | 0 4 ROOT
1132 | 4 1 cc
1133 | 4 2 compound
1134 | 4 3 nummod
1135 | 7 5 case
1136 | 7 6 det
1137 | 4 7 nmod
1138 | 4 8 punct
1139 |
1140 | 0 3 ROOT
1141 | 3 1 amod
1142 | 3 2 amod
1143 | 3 4 punct
1144 | 6 5 amod
1145 | 3 6 conj
1146 | 3 7 punct
1147 | 9 8 amod
1148 | 3 9 conj
1149 | 3 10 punct
1150 | 3 11 cc
1151 | 13 12 amod
1152 | 3 13 conj
1153 | 3 14 punct
1154 |
1155 | 0 3 ROOT
1156 | 3 1 mark
1157 | 3 2 nsubj
1158 | 5 4 advmod
1159 | 3 5 xcomp
1160 | 5 6 cc
1161 | 8 7 compound
1162 | 10 8 nsubj
1163 | 10 9 cop
1164 | 5 10 conj
1165 | 3 11 punct
1166 |
1167 | 0 1 ROOT
1168 | 1 2 dobj
1169 | 2 3 nummod
1170 | 1 4 cc
1171 | 1 5 conj
1172 | 5 6 dobj
1173 | 1 7 punct
1174 |
1175 | 0 5 ROOT
1176 | 5 1 csubj
1177 | 1 2 compound:prt
1178 | 5 3 cop
1179 | 5 4 det
1180 | 5 6 punct
1181 |
1182 | 0 5 ROOT
1183 | 5 1 cc
1184 | 5 2 nsubj
1185 | 5 3 aux
1186 | 5 4 neg
1187 | 5 6 dobj
1188 | 5 7 punct
1189 |
1190 | 0 6 ROOT
1191 | 6 1 nsubj
1192 | 6 2 cop
1193 | 6 3 advmod
1194 | 6 4 case
1195 | 6 5 det
1196 | 8 7 mark
1197 | 6 8 acl
1198 | 11 9 case
1199 | 11 10 nmod:poss
1200 | 8 11 nmod
1201 | 6 12 punct
1202 |
1203 | 0 2 ROOT
1204 | 2 1 nsubj
1205 | 5 3 det
1206 | 5 4 amod
1207 | 2 5 dobj
1208 | 9 6 case
1209 | 9 7 amod
1210 | 9 8 compound
1211 | 5 9 nmod
1212 | 2 10 punct
1213 | 12 11 det
1214 | 2 12 dep
1215 | 12 13 punct
1216 | 16 14 det
1217 | 16 15 amod
1218 | 12 16 appos
1219 | 18 17 case
1220 | 16 18 nmod
1221 | 18 19 cc
1222 | 18 20 conj
1223 | 22 21 case
1224 | 20 22 nmod
1225 | 24 23 case
1226 | 22 24 nmod
1227 | 2 25 punct
1228 |
1229 | 0 2 ROOT
1230 | 2 1 nsubj
1231 | 2 3 dobj
1232 | 7 4 mark
1233 | 7 5 nsubj
1234 | 7 6 cop
1235 | 2 7 advcl
1236 | 7 8 cc
1237 | 7 9 conj
1238 | 7 10 punct
1239 | 14 11 det
1240 | 14 12 amod
1241 | 14 13 amod
1242 | 17 14 nsubj
1243 | 17 15 cop
1244 | 17 16 advmod
1245 | 7 17 parataxis
1246 | 2 18 punct
1247 |
1248 | 0 2 ROOT
1249 | 2 1 nsubj
1250 | 4 3 det
1251 | 2 4 dobj
1252 | 9 5 mark
1253 | 9 6 cop
1254 | 9 7 punct
1255 | 9 8 advmod
1256 | 2 9 xcomp
1257 | 11 10 mark
1258 | 9 11 xcomp
1259 | 11 12 compound:prt
1260 |
1261 | 0 1 ROOT
1262 | 1 2 dobj
1263 | 1 3 dep
1264 | 6 4 advmod
1265 | 6 5 nsubj
1266 | 3 6 advcl
1267 | 8 7 det
1268 | 6 8 dobj
1269 | 1 9 punct
1270 |
1271 | 0 4 ROOT
1272 | 2 1 det
1273 | 4 2 nsubj
1274 | 4 3 cop
1275 | 4 5 cc
1276 | 4 6 conj
1277 | 4 7 punct
1278 | 11 8 nsubj
1279 | 11 9 aux
1280 | 11 10 neg
1281 | 4 11 parataxis
1282 | 13 12 det
1283 | 11 13 dobj
1284 | 16 14 case
1285 | 16 15 compound
1286 | 13 16 nmod
1287 | 4 17 punct
1288 |
1289 | 0 4 ROOT
1290 | 4 1 nsubj
1291 | 4 2 cop
1292 | 4 3 advmod
1293 | 4 5 punct
1294 | 8 6 det
1295 | 8 7 compound
1296 | 12 8 nsubj
1297 | 12 9 cop
1298 | 11 10 advmod
1299 | 12 11 advmod
1300 | 4 12 ccomp
1301 | 14 13 case
1302 | 12 14 nmod
1303 | 4 15 punct
1304 |
1305 | 0 5 ROOT
1306 | 2 1 det
1307 | 5 2 nsubj
1308 | 5 3 cop
1309 | 5 4 neg
1310 | 5 6 advmod
1311 | 5 7 punct
1312 |
1313 | 0 1 ROOT
1314 | 1 2 dobj
1315 | 4 3 case
1316 | 1 4 nmod
1317 | 1 5 punct
1318 |
1319 | 0 3 ROOT
1320 | 3 1 amod
1321 | 3 2 compound
1322 | 3 4 punct
1323 | 7 5 amod
1324 | 7 6 compound
1325 | 3 7 appos
1326 | 3 8 punct
1327 | 10 9 amod
1328 | 3 10 appos
1329 | 3 11 punct
1330 |
1331 | 0 2 ROOT
1332 | 2 1 nsubj
1333 | 4 3 det
1334 | 2 4 dobj
1335 | 4 5 cc
1336 | 7 6 det
1337 | 4 7 conj
1338 | 2 8 punct
1339 |
1340 | 0 4 ROOT
1341 | 4 1 advmod
1342 | 4 2 punct
1343 | 4 3 expl
1344 | 6 5 amod
1345 | 4 6 nsubj
1346 | 9 7 case
1347 | 9 8 det
1348 | 6 9 nmod
1349 | 11 10 nsubj
1350 | 6 11 acl:relcl
1351 | 13 12 det
1352 | 11 13 dobj
1353 | 15 14 advmod
1354 | 11 15 xcomp
1355 | 4 16 punct
1356 |
1357 | 0 4 ROOT
1358 | 4 1 nsubj
1359 | 4 2 aux
1360 | 4 3 advmod
1361 | 6 5 compound
1362 | 4 6 dobj
1363 | 6 7 cc
1364 | 6 8 conj
1365 | 10 9 advmod
1366 | 8 10 acl
1367 | 12 11 case
1368 | 10 12 nmod
1369 | 15 13 case
1370 | 15 14 det
1371 | 12 15 nmod
1372 | 4 16 punct
1373 |
1374 | 0 3 ROOT
1375 | 3 1 neg
1376 | 3 2 advmod
1377 | 8 4 mark
1378 | 8 5 nsubj
1379 | 8 6 aux
1380 | 8 7 aux
1381 | 3 8 dep
1382 | 11 9 case
1383 | 11 10 det
1384 | 8 11 nmod
1385 | 3 12 punct
1386 |
1387 | 0 1 ROOT
1388 | 4 2 case
1389 | 4 3 amod
1390 | 1 4 nmod
1391 | 4 5 cc
1392 | 4 6 conj
1393 | 1 7 punct
1394 |
1395 | 0 3 ROOT
1396 | 2 1 det
1397 | 3 2 nsubj
1398 | 5 4 advmod
1399 | 3 5 xcomp
1400 | 3 6 punct
1401 | 3 7 cc
1402 | 9 8 amod
1403 | 3 9 conj
1404 | 12 10 case
1405 | 12 11 compound
1406 | 9 12 nmod
1407 | 9 13 amod
1408 | 16 14 case
1409 | 16 15 nmod:poss
1410 | 13 16 nmod
1411 | 16 17 cc
1412 | 16 18 conj
1413 | 3 19 punct
1414 |
1415 | 0 3 ROOT
1416 | 3 1 nsubj
1417 | 3 2 aux
1418 | 3 4 compound:prt
1419 | 7 5 det
1420 | 7 6 amod
1421 | 3 7 dobj
1422 | 12 8 case
1423 | 12 9 det
1424 | 12 10 compound
1425 | 12 11 nummod
1426 | 7 12 nmod
1427 | 16 13 dobj
1428 | 16 14 nsubj
1429 | 16 15 advmod
1430 | 12 16 acl:relcl
1431 | 3 17 punct
1432 |
1433 | 0 9 ROOT
1434 | 9 1 csubj
1435 | 1 2 compound:prt
1436 | 5 3 case
1437 | 5 4 det
1438 | 1 5 nmod
1439 | 5 6 cc
1440 | 5 7 conj
1441 | 9 8 cop
1442 | 11 10 mark
1443 | 9 11 xcomp
1444 | 9 12 punct
1445 |
1446 | 0 7 ROOT
1447 | 7 1 advmod
1448 | 7 2 punct
1449 | 7 3 nsubj
1450 | 7 4 cop
1451 | 7 5 det
1452 | 7 6 amod
1453 | 7 8 punct
1454 | 7 9 cc
1455 | 12 10 nsubjpass
1456 | 12 11 auxpass
1457 | 7 12 conj
1458 | 14 13 case
1459 | 12 14 nmod
1460 | 16 15 nsubj
1461 | 14 16 acl:relcl
1462 | 16 17 advmod
1463 | 7 18 punct
1464 |
1465 | 0 1 ROOT
1466 | 1 2 cc
1467 | 4 3 aux
1468 | 1 4 conj
1469 | 7 5 case
1470 | 7 6 compound
1471 | 4 7 nmod
1472 | 1 8 punct
1473 |
1474 | 0 1 ROOT
1475 | 4 2 det
1476 | 4 3 amod
1477 | 1 4 dobj
1478 | 4 5 acl
1479 | 7 6 mark
1480 | 5 7 xcomp
1481 | 9 8 case
1482 | 7 9 nmod
1483 | 12 10 case
1484 | 12 11 compound
1485 | 9 12 nmod
1486 | 9 13 cc
1487 | 9 14 conj
1488 | 14 15 advmod
1489 | 1 16 punct
1490 |
1491 | 0 4 ROOT
1492 | 4 1 nsubjpass
1493 | 4 2 auxpass
1494 | 4 3 advmod
1495 | 13 5 mark
1496 | 7 6 det
1497 | 13 7 nsubj
1498 | 11 8 case
1499 | 11 9 det
1500 | 11 10 compound
1501 | 7 11 nmod
1502 | 13 12 cop
1503 | 4 13 ccomp
1504 | 4 14 punct
1505 |
1506 | 0 7 ROOT
1507 | 3 1 det
1508 | 3 2 compound
1509 | 7 3 nsubj
1510 | 7 4 aux
1511 | 7 5 cop
1512 | 7 6 advmod
1513 | 7 8 punct
1514 | 12 9 nsubj
1515 | 12 10 aux
1516 | 12 11 neg
1517 | 7 12 ccomp
1518 | 16 13 mark
1519 | 16 14 nsubj
1520 | 16 15 advmod
1521 | 12 16 advcl
1522 | 19 17 case
1523 | 19 18 nummod
1524 | 16 19 nmod
1525 | 16 20 punct
1526 | 16 21 dobj
1527 | 7 22 punct
1528 |
1529 | 0 4 ROOT
1530 | 4 1 nsubj
1531 | 4 2 cop
1532 | 4 3 advmod
1533 | 4 5 dep
1534 | 7 6 det
1535 | 5 7 root
1536 | 10 8 advmod
1537 | 10 9 advmod
1538 | 7 10 dep
1539 | 10 11 cc
1540 | 15 12 mark
1541 | 14 13 det
1542 | 15 14 nsubj
1543 | 10 15 conj
1544 | 18 16 det
1545 | 18 17 compound
1546 | 15 18 dobj
1547 | 7 19 punct
1548 |
1549 | 0 3 ROOT
1550 | 3 1 advmod
1551 | 3 2 nsubj
1552 | 6 4 nummod
1553 | 6 5 compound
1554 | 3 6 dobj
1555 | 3 7 cc
1556 | 9 8 nsubj
1557 | 3 9 conj
1558 | 9 10 compound:prt
1559 | 12 11 det
1560 | 9 12 nmod:tmod
1561 | 14 13 mark
1562 | 9 14 advcl
1563 | 3 15 punct
1564 |
1565 | 0 2 ROOT
1566 | 2 1 nsubj
1567 | 2 3 advmod
1568 | 6 4 dobj
1569 | 6 5 nsubj
1570 | 2 6 ccomp
1571 | 2 7 punct
1572 | 2 8 cc
1573 | 10 9 nsubj
1574 | 2 10 conj
1575 | 14 11 dep
1576 | 14 12 det
1577 | 14 13 amod
1578 | 10 14 dobj
1579 | 14 15 cc
1580 | 14 16 conj
1581 | 2 17 punct
1582 |
1583 | 0 3 ROOT
1584 | 3 1 aux
1585 | 3 2 nsubj
1586 | 5 4 det
1587 | 3 5 dobj
1588 | 7 6 nsubj
1589 | 5 7 acl:relcl
1590 | 9 8 det
1591 | 7 9 dobj
1592 | 12 10 case
1593 | 12 11 det
1594 | 9 12 nmod
1595 | 3 13 punct
1596 |
1597 | 0 10 ROOT
1598 | 2 1 nsubj
1599 | 10 2 ccomp
1600 | 2 3 xcomp
1601 | 10 4 punct
1602 | 6 5 det
1603 | 10 6 nsubj
1604 | 10 7 cop
1605 | 10 8 neg
1606 | 10 9 advmod
1607 | 10 11 punct
1608 |
1609 | 0 2 ROOT
1610 | 2 1 mark
1611 | 2 3 dobj
1612 | 7 4 mark
1613 | 7 5 advmod
1614 | 7 6 nsubj
1615 | 3 7 ccomp
1616 | 7 8 punct
1617 | 7 9 cc
1618 | 13 10 det
1619 | 13 11 amod
1620 | 13 12 compound
1621 | 14 13 nsubj
1622 | 7 14 conj
1623 | 2 15 punct
1624 |
1625 | 0 4 ROOT
1626 | 4 1 nsubj
1627 | 4 2 cop
1628 | 4 3 advmod
1629 | 6 5 mark
1630 | 4 6 xcomp
1631 | 6 7 advmod
1632 | 10 8 case
1633 | 10 9 det
1634 | 6 10 nmod
1635 | 4 11 punct
1636 |
1637 | 0 8 ROOT
1638 | 8 1 aux
1639 | 8 2 nsubj
1640 | 4 3 nsubj
1641 | 2 4 acl:relcl
1642 | 4 5 dobj
1643 | 4 6 nmod
1644 | 8 7 punct
1645 | 12 9 det
1646 | 12 10 amod
1647 | 12 11 compound
1648 | 8 12 dobj
1649 | 8 13 cc
1650 | 18 14 nsubj
1651 | 18 15 aux
1652 | 18 16 neg
1653 | 18 17 cop
1654 | 8 18 conj
1655 | 8 19 punct
1656 |
1657 | 0 2 ROOT
1658 | 2 1 amod
1659 | 2 3 cc
1660 | 2 4 conj
1661 | 2 5 punct
1662 |
1663 | 0 2 ROOT
1664 | 2 1 nsubj
1665 | 4 3 compound
1666 | 2 4 dobj
1667 | 4 5 punct
1668 | 7 6 compound
1669 | 4 7 conj
1670 | 4 8 punct
1671 | 4 9 cc
1672 | 4 10 conj
1673 | 10 11 nummod
1674 | 10 12 advmod
1675 | 2 13 punct
1676 |
1677 | 0 3 ROOT
1678 | 3 1 nsubj
1679 | 3 2 aux
1680 | 6 4 det
1681 | 6 5 compound
1682 | 3 6 dobj
1683 | 6 7 cc
1684 | 6 8 conj
1685 |
1686 | 0 2 ROOT
1687 | 2 1 case
1688 | 5 3 mark
1689 | 5 4 nsubj
1690 | 2 5 ccomp
1691 | 7 6 amod
1692 | 5 7 dobj
1693 | 10 8 advmod
1694 | 10 9 advmod
1695 | 12 10 nsubj
1696 | 12 11 aux
1697 | 5 12 ccomp
1698 | 12 13 iobj
1699 | 12 14 dobj
1700 | 17 15 case
1701 | 17 16 nmod:poss
1702 | 14 17 nmod
1703 | 2 18 punct
1704 |
1705 | 0 6 ROOT
1706 | 6 1 nsubj
1707 | 6 2 aux
1708 | 6 3 cop
1709 | 6 4 det
1710 | 6 5 compound
1711 | 11 7 case
1712 | 11 8 det
1713 | 10 9 advmod
1714 | 11 10 amod
1715 | 6 11 nmod
1716 | 17 12 advmod
1717 | 12 13 cc
1718 | 12 14 conj
1719 | 17 15 aux
1720 | 17 16 auxpass
1721 | 6 17 acl
1722 | 21 18 case
1723 | 21 19 det
1724 | 21 20 amod
1725 | 17 21 nmod
1726 | 6 22 punct
1727 |
1728 | 0 9 ROOT
1729 | 3 1 nummod
1730 | 3 2 amod
1731 | 9 3 dep
1732 | 9 4 punct
1733 | 6 5 det
1734 | 9 6 nsubj
1735 | 9 7 aux
1736 | 9 8 neg
1737 | 11 10 mark
1738 | 9 11 advcl
1739 | 11 12 dobj
1740 | 11 13 cc
1741 | 17 14 nsubj
1742 | 17 15 cop
1743 | 17 16 neg
1744 | 11 17 conj
1745 | 21 18 mark
1746 | 21 19 nsubj
1747 | 21 20 aux
1748 | 17 21 advcl
1749 | 23 22 nsubj
1750 | 21 23 ccomp
1751 | 26 24 case
1752 | 26 25 det
1753 | 23 26 nmod
1754 | 29 27 mark
1755 | 29 28 nsubj
1756 | 23 29 advcl
1757 | 32 30 case
1758 | 32 31 amod
1759 | 29 32 nmod
1760 | 32 33 case
1761 | 11 34 punct
1762 | 40 35 advmod
1763 | 38 36 det
1764 | 38 37 amod
1765 | 40 38 nsubjpass
1766 | 40 39 auxpass
1767 | 11 40 dep
1768 | 45 41 case
1769 | 45 42 det
1770 | 45 43 amod
1771 | 45 44 amod
1772 | 40 45 nmod
1773 | 50 46 case
1774 | 50 47 det
1775 | 50 48 compound
1776 | 50 49 compound
1777 | 40 50 nmod
1778 | 9 51 punct
1779 |
1780 | 0 2 ROOT
1781 | 2 1 nsubj
1782 | 2 3 dobj
1783 | 6 4 case
1784 | 6 5 nmod:poss
1785 | 2 6 nmod
1786 | 8 7 nsubj
1787 | 6 8 acl:relcl
1788 | 8 9 dobj
1789 | 12 10 case
1790 | 12 11 amod
1791 | 8 12 nmod
1792 | 2 13 punct
1793 |
1794 | 0 3 ROOT
1795 | 3 1 nsubj
1796 | 3 2 neg
1797 | 6 4 det
1798 | 6 5 amod
1799 | 3 6 dobj
1800 | 9 7 case
1801 | 9 8 det
1802 | 3 9 nmod
1803 | 3 10 punct
1804 |
1805 | 0 5 ROOT
1806 | 3 1 det
1807 | 3 2 amod
1808 | 5 3 nsubj
1809 | 5 4 cop
1810 | 8 6 case
1811 | 8 7 nmod:poss
1812 | 5 8 nmod
1813 | 8 9 dep
1814 | 5 10 punct
1815 |
1816 | 0 3 ROOT
1817 | 3 1 nsubj
1818 | 3 2 aux
1819 | 5 4 neg
1820 | 3 5 dobj
1821 | 3 6 punct
1822 | 3 7 advmod
1823 | 9 8 mark
1824 | 7 9 advcl
1825 | 3 10 punct
1826 |
1827 | 0 3 ROOT
1828 | 3 1 nsubj
1829 | 3 2 aux
1830 | 3 4 xcomp
1831 | 3 5 punct
1832 |
1833 | 0 1 ROOT
1834 | 3 2 mark
1835 | 1 3 ccomp
1836 | 3 4 dobj
1837 | 3 5 cc
1838 | 7 6 advmod
1839 | 3 7 conj
1840 | 10 8 nmod:poss
1841 | 10 9 amod
1842 | 7 10 dobj
1843 | 1 11 punct
1844 |
1845 | 0 16 ROOT
1846 | 3 1 det
1847 | 3 2 compound
1848 | 16 3 nsubj
1849 | 3 4 punct
1850 | 6 5 amod
1851 | 3 6 appos
1852 | 6 7 punct
1853 | 6 8 dep
1854 | 8 9 cc
1855 | 12 10 det
1856 | 12 11 compound
1857 | 8 12 conj
1858 | 16 13 cop
1859 | 16 14 dep
1860 | 16 15 nsubj
1861 | 18 17 mark
1862 | 16 18 xcomp
1863 | 21 19 det
1864 | 21 20 amod
1865 | 22 21 nsubj
1866 | 18 22 xcomp
1867 | 16 23 punct
1868 |
1869 | 0 2 ROOT
1870 | 2 1 nsubj
1871 | 5 3 det:predet
1872 | 5 4 det
1873 | 2 5 dobj
1874 | 8 6 dobj
1875 | 8 7 nsubj
1876 | 5 8 acl:relcl
1877 | 5 9 cc
1878 | 11 10 det
1879 | 13 11 nsubj
1880 | 13 12 cop
1881 | 5 13 conj
1882 | 13 14 punct
1883 | 13 15 xcomp
1884 | 15 16 advmod
1885 | 18 17 advmod
1886 | 15 18 advmod
1887 | 2 19 punct
1888 |
1889 | 0 1 ROOT
1890 | 1 2 nsubj
1891 | 5 3 case
1892 | 5 4 det
1893 | 2 5 nmod
1894 | 5 6 cc
1895 | 5 7 conj
1896 | 2 8 cc
1897 | 10 9 advmod
1898 | 2 10 conj
1899 | 13 11 det
1900 | 13 12 amod
1901 | 10 13 dobj
1902 | 15 14 mark
1903 | 13 15 acl
1904 | 19 16 case
1905 | 19 17 punct
1906 | 19 18 neg
1907 | 15 19 nmod
1908 | 1 20 punct
1909 |
1910 | 0 5 ROOT
1911 | 4 1 det
1912 | 4 2 amod
1913 | 4 3 compound
1914 | 5 4 nsubj
1915 | 7 6 det
1916 | 5 7 dobj
1917 | 11 8 case
1918 | 11 9 det
1919 | 11 10 amod
1920 | 5 11 nmod
1921 | 5 12 punct
1922 |
1923 | 0 1 ROOT
1924 | 3 2 mark
1925 | 1 3 xcomp
1926 | 3 4 compound:prt
1927 | 3 5 cc
1928 | 3 6 conj
1929 | 1 7 punct
1930 |
1931 | 0 4 ROOT
1932 | 4 1 nsubj
1933 | 4 2 aux
1934 | 4 3 neg
1935 | 6 5 advmod
1936 | 10 6 dep
1937 | 9 7 det
1938 | 9 8 amod
1939 | 10 9 nsubj
1940 | 4 10 ccomp
1941 | 10 11 cc
1942 | 13 12 advmod
1943 | 16 13 advmod
1944 | 15 14 det
1945 | 16 15 nsubj
1946 | 10 16 conj
1947 | 16 17 advmod
1948 | 4 18 punct
1949 |
1950 | 0 4 ROOT
1951 | 3 1 det
1952 | 3 2 amod
1953 | 4 3 nsubj
1954 | 7 5 advmod
1955 | 7 6 nsubj
1956 | 4 7 advcl
1957 | 7 8 xcomp
1958 | 11 9 case
1959 | 11 10 det
1960 | 8 11 nmod
1961 | 4 12 punct
1962 |
1963 | 0 2 ROOT
1964 | 2 1 nsubj
1965 | 7 3 nsubj
1966 | 7 4 cop
1967 | 7 5 advmod
1968 | 7 6 advmod
1969 | 2 7 ccomp
1970 | 10 8 mark
1971 | 10 9 nsubj
1972 | 7 10 advcl
1973 | 12 11 case
1974 | 10 12 nmod
1975 | 12 13 amod
1976 | 17 14 case
1977 | 17 15 det
1978 | 17 16 compound
1979 | 13 17 nmod
1980 | 2 18 punct
1981 |
1982 | 0 2 ROOT
1983 | 2 1 nsubj
1984 | 7 3 advmod
1985 | 7 4 dobj
1986 | 7 5 nsubj
1987 | 7 6 aux
1988 | 2 7 ccomp
1989 | 7 8 nmod
1990 | 7 9 cc
1991 | 11 10 nsubj
1992 | 7 11 conj
1993 | 11 12 advmod
1994 | 2 13 punct
1995 |
1996 | 0 2 ROOT
1997 | 2 1 nsubj
1998 | 2 3 xcomp
1999 | 2 4 punct
2000 | 2 5 cc
2001 | 8 6 det:predet
2002 | 8 7 det
2003 | 9 8 nsubj
2004 | 2 9 conj
2005 | 11 10 mark
2006 | 9 11 xcomp
2007 | 13 12 advmod
2008 | 11 13 advmod
2009 | 2 14 punct
2010 |
2011 | 0 3 ROOT
2012 | 3 1 nsubj
2013 | 3 2 aux
2014 | 5 4 det
2015 | 3 5 dobj
2016 | 8 6 case
2017 | 8 7 compound
2018 | 3 8 nmod
2019 | 8 9 punct
2020 | 11 10 compound
2021 | 8 11 conj
2022 | 8 12 punct
2023 | 14 13 det
2024 | 8 14 conj
2025 | 8 15 punct
2026 | 8 16 cc
2027 | 19 17 nsubj
2028 | 19 18 aux
2029 | 8 19 conj
2030 | 19 20 dobj
2031 | 3 21 punct
2032 |
2033 | 0 5 ROOT
2034 | 2 1 nmod:poss
2035 | 5 2 nsubj
2036 | 5 3 cop
2037 | 5 4 advmod
2038 | 7 6 mark
2039 | 5 7 xcomp
2040 | 9 8 det
2041 | 7 9 dobj
2042 | 5 10 punct
2043 | 5 11 cc
2044 | 13 12 advmod
2045 | 5 13 conj
2046 | 15 14 mark
2047 | 13 15 xcomp
2048 | 20 16 mark
2049 | 20 17 nsubj
2050 | 20 18 aux
2051 | 20 19 neg
2052 | 15 20 ccomp
2053 | 23 21 mark
2054 | 23 22 nsubj
2055 | 20 23 advcl
2056 | 5 24 punct
2057 |
2058 | 0 2 ROOT
2059 | 2 1 nsubj
2060 | 4 3 det
2061 | 2 4 dobj
2062 | 9 5 nsubj
2063 | 9 6 cop
2064 | 9 7 advmod
2065 | 9 8 punct
2066 | 4 9 acl:relcl
2067 | 2 10 punct
2068 | 2 11 cc
2069 | 13 12 nsubj
2070 | 2 13 conj
2071 | 16 14 amod
2072 | 16 15 amod
2073 | 13 16 dobj
2074 | 2 17 punct
2075 |
2076 | 0 8 ROOT
2077 | 3 1 compound
2078 | 3 2 punct
2079 | 8 3 nsubj
2080 | 3 4 punct
2081 | 8 5 cop
2082 | 8 6 det
2083 | 8 7 amod
2084 | 8 9 punct
2085 |
2086 | 0 5 ROOT
2087 | 5 1 nsubj
2088 | 5 2 advmod
2089 | 5 3 aux
2090 | 5 4 neg
2091 | 5 6 punct
2092 |
2093 | 0 5 ROOT
2094 | 5 1 nsubj
2095 | 5 2 cop
2096 | 5 3 case
2097 | 5 4 amod
2098 | 5 6 punct
2099 | 5 7 acl
2100 | 9 8 mark
2101 | 7 9 xcomp
2102 | 9 10 dobj
2103 | 5 11 punct
2104 |
2105 | 0 7 ROOT
2106 | 3 1 det
2107 | 3 2 amod
2108 | 7 3 nsubj
2109 | 7 4 cop
2110 | 6 5 det
2111 | 7 6 nmod:npmod
2112 | 11 8 case
2113 | 11 9 det
2114 | 11 10 amod
2115 | 7 11 nmod
2116 | 7 12 punct
2117 |
2118 | 0 7 ROOT
2119 | 3 1 nmod:poss
2120 | 3 2 amod
2121 | 7 3 nsubj
2122 | 7 4 cop
2123 | 7 5 det
2124 | 7 6 amod
2125 | 9 8 case
2126 | 7 9 nmod
2127 | 11 10 nsubj
2128 | 9 11 acl:relcl
2129 | 15 12 case
2130 | 15 13 det
2131 | 15 14 compound
2132 | 11 15 nmod
2133 | 7 16 punct
2134 |
2135 | 0 3 ROOT
2136 | 3 1 det
2137 | 3 2 amod
2138 | 10 4 mark
2139 | 6 5 det
2140 | 10 6 nsubj
2141 | 10 7 aux
2142 | 10 8 neg
2143 | 10 9 cop
2144 | 3 10 ccomp
2145 | 10 11 advmod
2146 | 15 12 mark
2147 | 15 13 aux
2148 | 15 14 cop
2149 | 10 15 advcl
2150 | 3 16 punct
2151 | 23 17 advmod
2152 | 22 18 det
2153 | 22 19 amod
2154 | 22 20 compound
2155 | 22 21 nummod
2156 | 23 22 nsubj
2157 | 3 23 acl:relcl
2158 | 23 24 advmod
2159 | 27 25 case
2160 | 27 26 amod
2161 | 23 27 nmod
2162 | 3 28 punct
2163 |
2164 | 0 2 ROOT
2165 | 2 1 nsubj
2166 | 2 3 dobj
2167 | 2 4 advmod
2168 | 9 5 case
2169 | 9 6 amod
2170 | 9 7 compound
2171 | 9 8 compound
2172 | 2 9 nmod
2173 | 9 10 punct
2174 | 9 11 conj
2175 | 9 12 punct
2176 | 15 13 compound
2177 | 15 14 compound
2178 | 9 15 conj
2179 | 9 16 cc
2180 | 18 17 nmod:poss
2181 | 9 18 conj
2182 | 2 19 punct
2183 |
2184 | 0 3 ROOT
2185 | 3 1 nsubj
2186 | 3 2 cop
2187 | 3 4 cc
2188 | 3 5 conj
2189 | 3 6 punct
2190 |
2191 | 0 4 ROOT
2192 | 3 1 det
2193 | 3 2 amod
2194 | 4 3 nsubj
2195 | 6 5 mark
2196 | 4 6 xcomp
2197 | 8 7 det
2198 | 6 8 dobj
2199 | 6 9 compound:prt
2200 | 6 10 punct
2201 | 6 11 dep
2202 | 4 12 punct
2203 |
2204 | 0 9 ROOT
2205 | 3 1 mark
2206 | 3 2 nsubj
2207 | 9 3 csubj
2208 | 5 4 amod
2209 | 3 5 dobj
2210 | 7 6 case
2211 | 5 7 nmod
2212 | 7 8 dep
2213 | 9 10 ccomp
2214 | 12 11 case
2215 | 10 12 nmod
2216 | 10 13 cc
2217 | 10 14 conj
2218 | 18 15 case
2219 | 18 16 det
2220 | 18 17 compound
2221 | 14 18 nmod
2222 | 21 19 case
2223 | 21 20 amod
2224 | 14 21 nmod
2225 | 24 22 case
2226 | 24 23 det
2227 | 21 24 nmod
2228 | 9 25 punct
2229 |
2230 | 0 3 ROOT
2231 | 3 1 nsubj
2232 | 3 2 cop
2233 | 3 4 punct
2234 | 8 5 case
2235 | 8 6 det
2236 | 8 7 amod
2237 | 3 8 nmod
2238 | 12 9 case
2239 | 12 10 det
2240 | 12 11 compound
2241 | 8 12 nmod
2242 | 3 13 punct
2243 |
2244 | 0 3 ROOT
2245 | 3 1 nsubj
2246 | 3 2 cop
2247 | 3 4 cc
2248 | 3 5 conj
2249 | 7 6 mark
2250 | 3 7 ccomp
2251 | 3 8 punct
2252 |
2253 | 0 1 ROOT
2254 | 3 2 det
2255 | 1 3 dep
2256 | 6 4 mark
2257 | 6 5 nsubj
2258 | 3 6 ccomp
2259 | 10 7 case
2260 | 10 8 det
2261 | 10 9 compound
2262 | 6 10 nmod
2263 | 12 11 advmod
2264 | 10 12 acl
2265 | 12 13 dobj
2266 | 12 14 compound:prt
2267 |
2268 | 0 4 ROOT
2269 | 4 1 nsubj
2270 | 4 2 aux
2271 | 4 3 cop
2272 | 6 5 case
2273 | 4 6 nmod
2274 | 4 7 cc
2275 | 9 8 det
2276 | 11 9 nsubj
2277 | 11 10 cop
2278 | 4 11 conj
2279 | 11 12 cc
2280 | 14 13 nmod:poss
2281 | 20 14 nsubj
2282 | 16 15 case
2283 | 14 16 nmod
2284 | 20 17 cop
2285 | 20 18 det
2286 | 20 19 amod
2287 | 11 20 conj
2288 |
2289 | 0 5 ROOT
2290 | 2 1 amod
2291 | 5 2 nmod:tmod
2292 | 5 3 punct
2293 | 5 4 nsubj
2294 | 5 6 advmod
2295 | 5 7 punct
2296 |
2297 | 0 3 ROOT
2298 | 3 1 nsubjpass
2299 | 3 2 auxpass
2300 | 9 4 mark
2301 | 6 5 advmod
2302 | 9 6 advmod
2303 | 8 7 det
2304 | 9 8 nsubj
2305 | 3 9 advcl
2306 | 3 10 punct
2307 |
2308 | 0 3 ROOT
2309 | 3 1 nsubj
2310 | 3 2 aux
2311 | 5 4 case
2312 | 3 5 nmod
2313 | 7 6 amod
2314 | 3 7 dobj
2315 | 7 8 cc
2316 | 10 9 amod
2317 | 7 10 conj
2318 | 3 11 punct
2319 |
2320 | 0 4 ROOT
2321 | 2 1 det
2322 | 4 2 nsubjpass
2323 | 4 3 auxpass
2324 | 4 5 cc
2325 | 9 6 nsubj
2326 | 9 7 cop
2327 | 9 8 neg
2328 | 4 9 conj
2329 | 12 10 mark
2330 | 12 11 auxpass
2331 | 9 12 xcomp
2332 | 4 13 punct
2333 |
2334 | 0 2 ROOT
2335 | 2 1 nsubj
2336 | 8 3 nsubj
2337 | 3 4 cc
2338 | 3 5 conj
2339 | 8 6 cop
2340 | 8 7 advmod
2341 | 2 8 ccomp
2342 | 10 9 nsubj
2343 | 8 10 ccomp
2344 | 10 11 dobj
2345 | 2 12 punct
2346 |
2347 | 0 1 ROOT
2348 | 4 2 case
2349 | 4 3 det
2350 | 1 4 nmod
2351 | 4 5 cc
2352 | 4 6 conj
2353 | 8 7 case
2354 | 4 8 nmod
2355 | 11 9 case
2356 | 11 10 det
2357 | 8 11 nmod
2358 | 11 12 punct
2359 | 11 13 appos
2360 | 15 14 case
2361 | 13 15 nmod
2362 | 1 16 punct
2363 |
2364 | 0 3 ROOT
2365 | 3 1 nsubj
2366 | 3 2 advmod
2367 | 6 4 nmod:poss
2368 | 6 5 amod
2369 | 11 6 nsubj
2370 | 6 7 cc
2371 | 9 8 det
2372 | 6 9 conj
2373 | 11 10 cop
2374 | 3 11 ccomp
2375 | 3 12 punct
2376 |
2377 | 0 5 ROOT
2378 | 2 1 compound
2379 | 5 2 nsubj
2380 | 5 3 cop
2381 | 5 4 advmod
2382 | 8 6 case
2383 | 8 7 amod
2384 | 5 8 nmod
2385 | 5 9 punct
2386 |
2387 | 0 1 ROOT
2388 | 5 2 case
2389 | 5 3 det
2390 | 5 4 amod
2391 | 1 5 nmod
2392 | 7 6 case
2393 | 1 7 nmod
2394 | 1 8 punct
2395 |
2396 | 0 4 ROOT
2397 | 4 1 nsubj
2398 | 4 2 cop
2399 | 4 3 advmod
2400 | 6 5 mark
2401 | 4 6 xcomp
2402 | 4 7 cc
2403 | 10 8 nsubj
2404 | 10 9 cop
2405 | 4 10 conj
2406 | 12 11 mark
2407 | 10 12 xcomp
2408 | 12 13 compound:prt
2409 | 4 14 punct
2410 |
2411 | 0 3 ROOT
2412 | 3 1 nsubj
2413 | 3 2 aux
2414 | 6 4 case
2415 | 6 5 det
2416 | 3 6 nmod
2417 | 6 7 cc
2418 | 9 8 amod
2419 | 6 9 conj
2420 | 9 10 cc
2421 | 9 11 conj
2422 | 3 12 punct
2423 |
2424 | 0 6 ROOT
2425 | 2 1 compound
2426 | 6 2 nsubj
2427 | 6 3 cop
2428 | 6 4 advmod
2429 | 6 5 advmod
2430 | 10 7 case
2431 | 10 8 det
2432 | 10 9 compound
2433 | 6 10 nmod
2434 | 6 11 punct
2435 |
2436 | 0 4 ROOT
2437 | 4 1 cc
2438 | 4 2 conj
2439 | 4 3 aux
2440 | 4 5 cc
2441 | 7 6 nsubj
2442 | 4 7 conj
2443 | 7 8 dobj
2444 | 12 9 case
2445 | 12 10 case
2446 | 12 11 amod
2447 | 7 12 nmod
2448 | 4 13 punct
2449 |
2450 | 0 5 ROOT
2451 | 5 1 nsubj
2452 | 5 2 cop
2453 | 5 3 advmod
2454 | 5 4 punct
2455 | 5 6 punct
2456 | 5 7 cc
2457 | 9 8 cop
2458 | 5 9 conj
2459 | 12 10 case
2460 | 12 11 compound
2461 | 9 12 nmod
2462 | 12 13 punct
2463 | 12 14 appos
2464 | 14 15 dep
2465 | 17 16 advmod
2466 | 19 17 advmod
2467 | 19 18 nsubj
2468 | 15 19 advcl
2469 | 19 20 dobj
2470 | 24 21 case
2471 | 24 22 det
2472 | 24 23 amod
2473 | 19 24 nmod
2474 | 5 25 punct
2475 |
2476 | 0 3 ROOT
2477 | 2 1 det
2478 | 3 2 nsubj
2479 | 7 4 mark
2480 | 6 5 amod
2481 | 7 6 nsubj
2482 | 3 7 advcl
2483 | 7 8 cc
2484 | 7 9 conj
2485 | 11 10 nummod
2486 | 12 11 nmod:npmod
2487 | 7 12 advmod
2488 | 3 13 punct
2489 | 3 14 cc
2490 | 18 15 det
2491 | 18 16 amod
2492 | 18 17 compound
2493 | 21 18 nsubj
2494 | 21 19 cop
2495 | 21 20 advmod
2496 | 3 21 conj
2497 | 3 22 punct
2498 |
2499 | 0 1 ROOT
2500 | 5 2 case
2501 | 5 3 det
2502 | 5 4 amod
2503 | 1 5 nmod
2504 | 5 6 cc
2505 | 8 7 amod
2506 | 5 8 conj
2507 | 1 9 punct
2508 |
2509 | 0 1 ROOT
2510 | 3 2 amod
2511 | 1 3 nsubj
2512 | 3 4 punct
2513 | 3 5 conj
2514 | 3 6 punct
2515 | 3 7 conj
2516 | 3 8 cc
2517 | 10 9 compound
2518 | 3 10 conj
2519 | 1 11 punct
2520 |
2521 | 0 6 ROOT
2522 | 3 1 det
2523 | 3 2 amod
2524 | 6 3 nsubj
2525 | 6 4 cop
2526 | 6 5 det
2527 | 10 7 case
2528 | 7 8 mwe
2529 | 10 9 compound
2530 | 6 10 nmod
2531 | 6 11 punct
2532 |
2533 | 0 2 ROOT
2534 | 2 1 nsubj
2535 | 5 3 det
2536 | 5 4 compound
2537 | 2 5 dobj
2538 | 2 6 punct
2539 |
2540 | 0 5 ROOT
2541 | 2 1 case
2542 | 5 2 nmod
2543 | 5 3 nsubj
2544 | 5 4 advmod
2545 | 7 6 compound
2546 | 8 7 nsubj
2547 | 5 8 ccomp
2548 | 8 9 dobj
2549 | 9 10 punct
2550 | 12 11 compound
2551 | 9 12 conj
2552 | 9 13 punct
2553 | 9 14 conj
2554 | 9 15 punct
2555 | 18 16 compound
2556 | 18 17 compound
2557 | 9 18 conj
2558 | 5 19 punct
2559 |
2560 | 0 1 ROOT
2561 | 5 2 case
2562 | 5 3 det
2563 | 5 4 compound
2564 | 1 5 nmod
2565 | 1 6 punct
2566 |
2567 | 0 2 ROOT
2568 | 2 1 nsubj
2569 | 5 3 det
2570 | 5 4 compound
2571 | 2 5 dobj
2572 | 2 6 punct
2573 |
2574 | 0 5 ROOT
2575 | 5 1 nsubj
2576 | 5 2 cop
2577 | 5 3 advmod
2578 | 5 4 mark
2579 | 7 6 det
2580 | 5 7 dobj
2581 | 5 8 punct
2582 |
2583 | 0 2 ROOT
2584 | 2 1 advmod
2585 | 6 3 advmod
2586 | 3 4 cc
2587 | 3 5 conj
2588 | 2 6 dep
2589 | 2 7 punct
2590 |
2591 | 0 4 ROOT
2592 | 4 1 advmod
2593 | 4 2 punct
2594 | 4 3 nsubj
2595 | 4 5 dobj
2596 | 5 6 cc
2597 | 5 7 conj
2598 | 9 8 aux
2599 | 4 9 dep
2600 | 9 10 dobj
2601 | 12 11 case
2602 | 9 12 nmod
2603 | 4 13 punct
2604 |
2605 | 0 7 ROOT
2606 | 2 1 advmod
2607 | 7 2 advmod
2608 | 7 3 punct
2609 | 7 4 nsubj
2610 | 7 5 advmod
2611 | 7 6 aux
2612 | 9 8 nmod:poss
2613 | 7 9 dobj
2614 | 7 10 punct
2615 | 12 11 advmod
2616 | 7 12 parataxis
2617 | 15 13 case
2618 | 15 14 det
2619 | 12 15 nmod
2620 | 19 16 nsubj
2621 | 19 17 cop
2622 | 19 18 advmod
2623 | 15 19 acl:relcl
2624 | 21 20 mark
2625 | 19 21 xcomp
2626 | 21 22 advmod
2627 | 12 23 punct
2628 | 12 24 cc
2629 | 27 25 det
2630 | 27 26 amod
2631 | 29 27 nsubj
2632 | 29 28 cop
2633 | 12 29 conj
2634 | 12 30 punct
2635 | 35 31 advmod
2636 | 33 32 det
2637 | 35 33 nsubj
2638 | 35 34 cop
2639 | 12 35 parataxis
2640 | 38 36 case
2641 | 38 37 det
2642 | 35 38 nmod
2643 | 40 39 dep
2644 | 35 40 acl
2645 | 40 41 nmod
2646 | 7 42 punct
2647 |
2648 | 0 3 ROOT
2649 | 3 1 nsubjpass
2650 | 3 2 auxpass
2651 | 7 4 case
2652 | 7 5 amod
2653 | 7 6 amod
2654 | 3 7 nmod
2655 | 3 8 cc
2656 | 13 9 mark
2657 | 13 10 nsubj
2658 | 13 11 aux
2659 | 13 12 neg
2660 | 3 13 conj
2661 | 15 14 det
2662 | 13 15 dobj
2663 | 15 16 acl
2664 | 18 17 nmod:poss
2665 | 16 18 dobj
2666 | 13 19 dep
2667 | 22 20 nsubjpass
2668 | 22 21 auxpass
2669 | 19 22 ccomp
2670 | 25 23 case
2671 | 25 24 det
2672 | 22 25 nmod
2673 | 27 26 det
2674 | 25 27 nmod:npmod
2675 | 3 28 punct
2676 |
2677 | 0 4 ROOT
2678 | 2 1 advmod
2679 | 4 2 amod
2680 | 4 3 amod
2681 | 4 5 punct
2682 |
2683 | 0 7 ROOT
2684 | 3 1 det
2685 | 3 2 compound
2686 | 7 3 nsubj
2687 | 7 4 cop
2688 | 7 5 advmod
2689 | 7 6 punct
2690 | 7 8 punct
2691 | 7 9 cc
2692 | 7 10 conj
2693 | 10 11 xcomp
2694 | 11 12 dep
2695 | 12 13 cc
2696 | 12 14 conj
2697 | 7 15 punct
2698 |
2699 | 0 3 ROOT
2700 | 3 1 nsubj
2701 | 3 2 cop
2702 | 3 4 cc
2703 | 7 5 aux
2704 | 7 6 neg
2705 | 3 7 conj
2706 | 10 8 det
2707 | 10 9 compound
2708 | 7 10 dobj
2709 | 15 11 dobj
2710 | 15 12 nsubj
2711 | 15 13 aux
2712 | 15 14 neg
2713 | 10 15 acl:relcl
2714 | 19 16 mark
2715 | 19 17 mark
2716 | 19 18 nsubj
2717 | 15 19 advcl
2718 | 19 20 dobj
2719 | 3 21 punct
2720 |
2721 | 0 2 ROOT
2722 | 2 1 expl
2723 | 5 3 neg
2724 | 5 4 compound
2725 | 2 5 nsubj
2726 | 2 6 punct
2727 | 2 7 cc
2728 | 2 8 conj
2729 | 8 9 expl
2730 | 13 10 det
2731 | 13 11 compound
2732 | 13 12 compound
2733 | 8 13 nsubj
2734 | 13 14 amod
2735 | 18 15 advmod
2736 | 18 16 case
2737 | 18 17 det
2738 | 14 18 nmod
2739 | 2 19 punct
2740 |
2741 | 0 2 ROOT
2742 | 2 1 nsubj
2743 | 4 3 case
2744 | 2 4 nmod
2745 | 4 5 amod
2746 | 2 6 cc
2747 | 2 7 conj
2748 | 7 8 advmod
2749 | 2 9 punct
2750 |
2751 | 0 4 ROOT
2752 | 4 1 nsubj
2753 | 4 2 aux
2754 | 4 3 neg
2755 | 6 5 case
2756 | 11 6 nmod
2757 | 11 7 punct
2758 | 11 8 nsubj
2759 | 11 9 aux
2760 | 11 10 neg
2761 | 4 11 ccomp
2762 | 14 12 det
2763 | 14 13 compound
2764 | 11 14 dobj
2765 | 14 15 amod
2766 | 15 16 cc
2767 | 15 17 conj
2768 | 4 18 punct
2769 |
2770 | 0 5 ROOT
2771 | 2 1 nmod:poss
2772 | 5 2 nsubj
2773 | 5 3 aux
2774 | 5 4 neg
2775 | 5 6 punct
2776 |
2777 | 0 2 ROOT
2778 | 2 1 nsubj
2779 | 2 3 cc
2780 | 2 4 conj
2781 | 2 5 xcomp
2782 | 2 6 punct
2783 | 10 7 case
2784 | 10 8 det
2785 | 10 9 amod
2786 | 2 10 nmod
2787 | 2 11 punct
2788 |
2789 | 0 3 ROOT
2790 | 3 1 nsubj
2791 | 3 2 cop
2792 | 5 4 advmod
2793 | 3 5 advcl
2794 | 7 6 case
2795 | 5 7 nmod
2796 | 9 8 advmod
2797 | 5 9 advcl
2798 | 11 10 mark
2799 | 9 11 xcomp
2800 | 11 12 xcomp
2801 | 14 13 case
2802 | 12 14 nmod
2803 | 17 15 advmod
2804 | 17 16 nsubj
2805 | 12 17 advcl
2806 | 19 18 det
2807 | 17 19 dobj
2808 | 3 20 punct
2809 |
2810 | 0 8 ROOT
2811 | 8 1 advmod
2812 | 3 2 det
2813 | 8 3 nsubj
2814 | 8 4 aux
2815 | 6 5 amod
2816 | 8 6 nsubj
2817 | 8 7 neg
2818 | 8 9 compound:prt
2819 | 13 10 case
2820 | 13 11 det
2821 | 13 12 amod
2822 | 8 13 nmod
2823 | 8 14 punct
2824 |
2825 | 0 4 ROOT
2826 | 2 1 det
2827 | 4 2 nsubj
2828 | 4 3 cop
2829 | 7 5 case
2830 | 7 6 nmod:poss
2831 | 4 7 nmod
2832 | 4 8 punct
2833 |
2834 | 0 6 ROOT
2835 | 6 1 cc
2836 | 3 2 det
2837 | 6 3 nsubj
2838 | 6 4 cop
2839 | 6 5 det
2840 | 6 7 advmod
2841 | 6 8 punct
2842 |
2843 | 0 6 ROOT
2844 | 3 1 nmod:poss
2845 | 3 2 det
2846 | 6 3 dep
2847 | 6 4 punct
2848 | 6 5 nsubj
2849 | 8 7 det
2850 | 6 8 dobj
2851 | 8 9 punct
2852 | 8 10 amod
2853 | 10 11 xcomp
2854 | 13 12 mark
2855 | 11 13 xcomp
2856 | 13 14 cc
2857 | 13 15 conj
2858 | 13 16 dobj
2859 | 19 17 nsubj
2860 | 19 18 aux
2861 | 16 19 acl:relcl
2862 | 22 20 case
2863 | 22 21 det
2864 | 19 22 nmod
2865 | 13 23 punct
2866 | 6 24 punct
2867 | 6 25 punct
2868 |
2869 | 0 4 ROOT
2870 | 2 1 compound
2871 | 4 2 nsubj
2872 | 4 3 cop
2873 | 4 5 cc
2874 | 4 6 conj
2875 | 4 7 punct
2876 |
2877 | 0 4 ROOT
2878 | 4 1 nsubj
2879 | 4 2 cop
2880 | 4 3 advmod
2881 | 7 5 case
2882 | 7 6 det
2883 | 4 7 nmod
2884 | 7 8 cc
2885 | 10 9 det
2886 | 7 10 conj
2887 | 13 11 case
2888 | 13 12 det
2889 | 10 13 nmod
2890 | 4 14 punct
2891 |
2892 | 0 6 ROOT
2893 | 2 1 det
2894 | 6 2 nsubj
2895 | 6 3 cop
2896 | 6 4 det
2897 | 6 5 amod
2898 | 6 7 punct
2899 | 9 8 nsubj
2900 | 6 9 acl:relcl
2901 | 9 10 advmod
2902 | 6 11 punct
2903 | 14 12 nsubj
2904 | 14 13 cop
2905 | 6 14 parataxis
2906 | 16 15 mark
2907 | 14 16 ccomp
2908 | 16 17 cc
2909 | 16 18 conj
2910 | 6 19 punct
2911 |
2912 | 0 4 ROOT
2913 | 4 1 nsubj
2914 | 4 2 aux
2915 | 4 3 advmod
2916 | 10 5 case
2917 | 10 6 compound
2918 | 10 7 nummod
2919 | 10 8 compound
2920 | 10 9 compound
2921 | 4 10 nmod
2922 | 4 11 punct
2923 | 13 12 advmod
2924 | 14 13 amod
2925 | 4 14 dobj
2926 | 14 15 cc
2927 | 18 16 amod
2928 | 18 17 compound
2929 | 14 18 conj
2930 | 4 19 punct
2931 |
2932 | 0 1 ROOT
2933 | 1 2 amod
2934 | 1 3 cc
2935 | 10 4 aux
2936 | 6 5 det
2937 | 10 6 nsubj
2938 | 10 7 punct
2939 | 10 8 aux
2940 | 10 9 neg
2941 | 1 10 conj
2942 | 12 11 case
2943 | 10 12 nmod
2944 | 1 13 punct
2945 |
2946 | 0 3 ROOT
2947 | 3 1 nsubj
2948 | 3 2 cop
2949 | 3 4 cc
2950 | 3 5 conj
2951 | 9 6 det
2952 | 8 7 advmod
2953 | 9 8 amod
2954 | 5 9 dobj
2955 | 12 10 case
2956 | 12 11 nmod:poss
2957 | 9 12 nmod
2958 | 3 13 punct
2959 |
2960 | 0 5 ROOT
2961 | 2 1 det
2962 | 5 2 nsubj
2963 | 5 3 cop
2964 | 5 4 advmod
2965 | 5 6 punct
2966 |
2967 | 0 2 ROOT
2968 | 2 1 nsubj
2969 | 6 3 det
2970 | 5 4 advmod
2971 | 6 5 amod
2972 | 2 6 dobj
2973 | 2 7 punct
2974 | 2 8 dep
2975 | 8 9 dep
2976 | 9 10 root
2977 | 10 11 nsubj
2978 | 11 12 acl
2979 | 12 13 nmod
2980 | 10 14 punct
2981 |
2982 | 0 3 ROOT
2983 | 3 1 nsubj
2984 | 3 2 cop
2985 | 6 4 case
2986 | 6 5 det
2987 | 3 6 nmod
2988 | 3 7 cc
2989 | 12 8 nsubj
2990 | 12 9 cop
2991 | 12 10 cc:preconj
2992 | 12 11 amod
2993 | 3 12 conj
2994 | 12 13 cc
2995 | 16 14 det
2996 | 16 15 amod
2997 | 12 16 conj
2998 | 12 17 acl
2999 | 22 18 det
3000 | 22 19 compound
3001 | 22 20 nummod
3002 | 22 21 compound
3003 | 17 22 dobj
3004 | 3 23 punct
3005 |
3006 | 0 3 ROOT
3007 | 3 1 compound
3008 | 3 2 compound
3009 | 6 4 advmod
3010 | 6 5 mark
3011 | 3 6 acl
3012 | 8 7 case
3013 | 6 8 nmod
3014 | 3 9 punct
3015 |
3016 | 0 3 ROOT
3017 | 3 1 nsubj
3018 | 3 2 advmod
3019 | 7 4 det
3020 | 7 5 amod
3021 | 7 6 compound
3022 | 18 7 nsubj
3023 | 7 8 punct
3024 | 12 9 det
3025 | 12 10 nummod
3026 | 12 11 punct
3027 | 7 12 appos
3028 | 7 13 punct
3029 | 7 14 cc
3030 | 7 15 conj
3031 | 18 16 aux
3032 | 18 17 neg
3033 | 3 18 ccomp
3034 | 20 19 nsubj
3035 | 18 20 ccomp
3036 | 20 21 xcomp
3037 | 24 22 det
3038 | 24 23 amod
3039 | 21 24 dobj
3040 | 26 25 case
3041 | 21 26 nmod
3042 | 28 27 mark
3043 | 26 28 acl
3044 | 28 29 dobj
3045 | 3 30 punct
3046 |
3047 | 0 2 ROOT
3048 | 2 1 nsubj
3049 | 4 3 case
3050 | 2 4 nmod
3051 | 7 5 case
3052 | 7 6 det
3053 | 4 7 nmod
3054 | 10 8 advmod
3055 | 10 9 nsubj
3056 | 7 10 acl:relcl
3057 | 12 11 det
3058 | 10 12 dobj
3059 | 2 13 punct
3060 |
3061 | 0 5 ROOT
3062 | 5 1 nsubj
3063 | 5 2 cop
3064 | 5 3 det
3065 | 5 4 amod
3066 | 5 6 cc
3067 | 5 7 conj
3068 | 9 8 case
3069 | 5 9 nmod
3070 | 5 10 punct
3071 |
3072 | 0 5 ROOT
3073 | 3 1 det
3074 | 3 2 compound
3075 | 5 3 nsubj
3076 | 5 4 cop
3077 | 5 6 punct
3078 |
3079 | 0 10 ROOT
3080 | 3 1 nmod:poss
3081 | 3 2 amod
3082 | 10 3 nsubj
3083 | 10 4 cop
3084 | 10 5 det
3085 | 10 6 nummod
3086 | 10 7 punct
3087 | 10 8 compound
3088 | 10 9 compound
3089 | 10 11 punct
3090 | 13 12 nsubj
3091 | 10 13 acl:relcl
3092 | 13 14 advmod
3093 | 10 15 punct
3094 | 10 16 cc
3095 | 19 17 aux
3096 | 19 18 advmod
3097 | 32 19 advcl
3098 | 22 20 nmod:poss
3099 | 22 21 amod
3100 | 19 22 dobj
3101 | 19 23 cc
3102 | 25 24 advmod
3103 | 19 25 conj
3104 | 29 26 case
3105 | 29 27 det
3106 | 29 28 compound
3107 | 25 29 nmod
3108 | 32 30 punct
3109 | 32 31 nsubj
3110 | 10 32 conj
3111 | 34 33 nsubj
3112 | 32 34 dep
3113 | 34 35 dobj
3114 | 35 36 amod
3115 | 36 37 cc
3116 | 36 38 conj
3117 | 36 39 dep
3118 | 34 40 punct
3119 | 43 41 neg
3120 | 43 42 mark
3121 | 34 43 dep
3122 | 50 44 mark
3123 | 46 45 det
3124 | 50 46 nsubj
3125 | 50 47 aux
3126 | 50 48 aux
3127 | 50 49 advmod
3128 | 43 50 ccomp
3129 | 50 51 dobj
3130 | 55 52 advmod
3131 | 55 53 mark
3132 | 55 54 nsubj
3133 | 50 55 advcl
3134 | 55 56 dobj
3135 | 58 57 advmod
3136 | 59 58 nummod
3137 | 60 59 nmod:npmod
3138 | 55 60 advmod
3139 | 10 61 punct
3140 |
3141 | 0 6 ROOT
3142 | 6 1 advmod
3143 | 6 2 punct
3144 | 6 3 nsubj
3145 | 6 4 aux
3146 | 6 5 neg
3147 | 8 7 det
3148 | 6 8 dobj
3149 | 6 9 punct
3150 | 13 10 nummod
3151 | 13 11 compound
3152 | 13 12 compound
3153 | 6 13 conj
3154 | 13 14 nummod
3155 | 16 15 advmod
3156 | 17 16 amod
3157 | 13 17 dep
3158 | 6 18 punct
3159 | 6 19 cc
3160 | 22 20 det:predet
3161 | 22 21 det
3162 | 23 22 nsubj
3163 | 6 23 conj
3164 | 26 24 mark
3165 | 26 25 aux
3166 | 23 26 xcomp
3167 | 26 27 advmod
3168 | 6 28 punct
3169 |
3170 | 0 2 ROOT
3171 | 2 1 mark
3172 | 2 3 advmod
3173 | 6 4 case
3174 | 6 5 det
3175 | 2 6 nmod
3176 | 8 7 nsubj
3177 | 2 8 ccomp
3178 | 8 9 xcomp
3179 | 11 10 det
3180 | 9 11 dobj
3181 | 14 12 case
3182 | 14 13 det
3183 | 9 14 nmod
3184 | 8 15 cc
3185 | 8 16 conj
3186 | 18 17 det
3187 | 16 18 dobj
3188 | 20 19 case
3189 | 18 20 nmod
3190 | 22 21 mark
3191 | 16 22 advcl
3192 | 24 23 det
3193 | 22 24 dobj
3194 | 2 25 punct
3195 |
3196 | 0 8 ROOT
3197 | 8 1 cc
3198 | 7 2 case
3199 | 7 3 case
3200 | 7 4 det:predet
3201 | 7 5 det
3202 | 7 6 amod
3203 | 8 7 nmod
3204 | 8 9 punct
3205 | 12 10 nsubj
3206 | 12 11 advmod
3207 | 8 12 parataxis
3208 | 12 13 compound:prt
3209 | 12 14 cc
3210 | 12 15 conj
3211 | 17 16 det
3212 | 15 17 dobj
3213 | 8 18 punct
3214 |
3215 | 0 2 ROOT
3216 | 2 1 nsubj
3217 | 4 3 det
3218 | 2 4 dobj
3219 | 7 5 case
3220 | 7 6 amod
3221 | 2 7 nmod
3222 | 7 8 punct
3223 | 11 9 det
3224 | 11 10 amod
3225 | 7 11 conj
3226 | 7 12 cc
3227 | 17 13 advmod
3228 | 17 14 mark
3229 | 17 15 nsubj
3230 | 17 16 aux
3231 | 7 17 conj
3232 | 20 18 case
3233 | 20 19 det
3234 | 17 20 nmod
3235 | 2 21 punct
3236 |
3237 | 0 1 ROOT
3238 | 3 2 mark
3239 | 1 3 advcl
3240 | 3 4 advmod
3241 | 6 5 mark
3242 | 3 6 xcomp
3243 | 6 7 dobj
3244 | 3 8 cc
3245 | 10 9 det
3246 | 12 10 nsubj
3247 | 12 11 aux
3248 | 3 12 conj
3249 | 15 13 case
3250 | 15 14 compound
3251 | 12 15 nmod
3252 | 23 16 case
3253 | 23 17 case
3254 | 23 18 nmod:poss
3255 | 23 19 amod
3256 | 19 20 compound
3257 | 23 21 amod
3258 | 23 22 compound
3259 | 12 23 advcl
3260 | 1 24 punct
3261 |
3262 | 0 1 ROOT
3263 | 3 2 advmod
3264 | 1 3 advmod
3265 | 3 4 cc
3266 | 3 5 conj
3267 | 1 6 punct
3268 |
3269 | 0 5 ROOT
3270 | 3 1 det
3271 | 3 2 compound
3272 | 5 3 nsubj
3273 | 5 4 cop
3274 | 5 6 punct
3275 |
3276 | 0 5 ROOT
3277 | 2 1 det
3278 | 5 2 nsubj
3279 | 5 3 cop
3280 | 5 4 advmod
3281 | 8 6 case
3282 | 8 7 det
3283 | 5 8 nmod
3284 | 12 9 advmod
3285 | 12 10 aux
3286 | 12 11 advmod
3287 | 8 12 acl
3288 | 14 13 nmod:poss
3289 | 12 14 dobj
3290 | 12 15 nmod
3291 | 5 16 punct
3292 |
3293 | 0 3 ROOT
3294 | 3 1 nsubj
3295 | 3 2 cop
3296 | 6 4 case
3297 | 6 5 compound
3298 | 3 6 nmod
3299 | 6 7 cc
3300 | 10 8 amod
3301 | 10 9 compound
3302 | 6 10 conj
3303 | 3 11 punct
3304 |
3305 | 0 1 ROOT
3306 | 4 2 case
3307 | 4 3 det
3308 | 1 4 nmod
3309 | 4 5 cc
3310 | 7 6 nmod:poss
3311 | 4 7 conj
3312 | 4 8 cc
3313 | 10 9 expl
3314 | 4 10 conj
3315 | 12 11 compound
3316 | 13 12 nsubj
3317 | 10 13 ccomp
3318 | 13 14 dobj
3319 | 17 15 mark
3320 | 17 16 auxpass
3321 | 14 17 acl
3322 | 1 18 punct
3323 |
3324 | 0 2 ROOT
3325 | 2 1 expl
3326 | 4 3 amod
3327 | 2 4 nsubj
3328 | 7 5 case
3329 | 7 6 compound
3330 | 4 7 nmod
3331 | 2 8 punct
3332 |
3333 | 0 1 ROOT
3334 | 1 2 dep
3335 | 5 3 nsubj
3336 | 5 4 aux
3337 | 2 5 ccomp
3338 | 8 6 compound
3339 | 8 7 compound
3340 | 5 8 dobj
3341 | 11 9 case
3342 | 11 10 det
3343 | 8 11 nmod
3344 | 11 12 acl
3345 | 16 13 nsubj
3346 | 16 14 cop
3347 | 16 15 nmod:poss
3348 | 12 16 ccomp
3349 | 18 17 mark
3350 | 16 18 acl
3351 | 20 19 det
3352 | 18 20 dobj
3353 | 20 21 nummod
3354 | 29 22 mark
3355 | 29 23 nsubj
3356 | 29 24 aux
3357 | 29 25 cop
3358 | 29 26 det
3359 | 29 27 amod
3360 | 29 28 compound
3361 | 18 29 advcl
3362 | 1 30 punct
3363 |
3364 | 0 2 ROOT
3365 | 2 1 nsubj
3366 | 6 3 nmod:poss
3367 | 6 4 nummod
3368 | 6 5 compound
3369 | 2 6 dobj
3370 | 9 7 mark
3371 | 9 8 auxpass
3372 | 2 9 advcl
3373 | 11 10 case
3374 | 9 11 nmod
3375 | 17 12 case
3376 | 17 13 det
3377 | 17 14 amod
3378 | 17 15 nummod
3379 | 17 16 punct
3380 | 11 17 nmod
3381 | 2 18 punct
3382 |
3383 | 0 3 ROOT
3384 | 3 1 nsubj
3385 | 3 2 advmod
3386 | 3 4 cc
3387 | 8 5 nsubj
3388 | 8 6 cop
3389 | 8 7 advmod
3390 | 3 8 conj
3391 | 8 9 amod
3392 | 3 10 punct
3393 | 14 11 advmod
3394 | 14 12 nsubj
3395 | 14 13 aux
3396 | 3 14 advcl
3397 | 14 15 dobj
3398 | 19 16 case
3399 | 19 17 amod
3400 | 19 18 compound
3401 | 14 19 nmod
3402 | 14 20 cc
3403 | 23 21 advmod
3404 | 23 22 case
3405 | 14 23 conj
3406 | 27 24 nsubj
3407 | 27 25 aux
3408 | 27 26 advmod
3409 | 23 27 acl:relcl
3410 | 32 28 case
3411 | 32 29 det
3412 | 31 30 advmod
3413 | 32 31 amod
3414 | 27 32 nmod
3415 | 34 33 mark
3416 | 32 34 acl
3417 | 34 35 dobj
3418 |
3419 | 0 4 ROOT
3420 | 4 1 nmod:poss
3421 | 4 2 amod
3422 | 4 3 punct
3423 | 6 5 mark
3424 | 4 6 dep
3425 | 6 7 cc
3426 | 9 8 nsubj
3427 | 6 9 conj
3428 | 9 10 xcomp
3429 | 4 11 punct
3430 |
3431 | 0 6 ROOT
3432 | 6 1 nsubjpass
3433 | 1 2 cc
3434 | 1 3 conj
3435 | 6 4 aux
3436 | 6 5 auxpass
3437 | 11 7 dep
3438 | 11 8 mark
3439 | 11 9 nsubj
3440 | 11 10 advmod
3441 | 6 11 advcl
3442 | 13 12 det
3443 | 11 13 dobj
3444 | 11 14 advmod
3445 | 14 15 dep
3446 | 15 16 dep
3447 | 18 17 case
3448 | 16 18 root
3449 | 18 19 dep
3450 | 21 20 det
3451 | 19 21 dobj
3452 | 18 22 punct
3453 |
3454 | 0 4 ROOT
3455 | 4 1 nsubj
3456 | 4 2 aux
3457 | 4 3 neg
3458 | 6 5 det
3459 | 4 6 dobj
3460 | 9 7 case
3461 | 9 8 compound
3462 | 4 9 nmod
3463 | 4 10 punct
3464 | 14 11 advmod
3465 | 14 12 case
3466 | 14 13 det
3467 | 4 14 nmod
3468 | 4 15 punct
3469 |
3470 | 0 5 ROOT
3471 | 5 1 nsubj
3472 | 5 2 cop
3473 | 4 3 advmod
3474 | 5 4 advmod
3475 | 8 6 case
3476 | 8 7 det
3477 | 5 8 nmod
3478 | 5 9 punct
3479 | 13 10 advmod
3480 | 13 11 mark
3481 | 13 12 nsubj
3482 | 5 13 advcl
3483 | 15 14 det
3484 | 13 15 dobj
3485 | 17 16 case
3486 | 13 17 nmod
3487 | 5 18 punct
3488 |
3489 | 0 1 ROOT
3490 | 1 2 punct
3491 | 2 3 root
3492 | 3 4 punct
3493 | 14 5 mark
3494 | 9 6 det
3495 | 9 7 amod
3496 | 9 8 compound
3497 | 14 9 nsubj
3498 | 11 10 nsubj
3499 | 9 11 acl:relcl
3500 | 14 12 aux
3501 | 14 13 neg
3502 | 4 14 root
3503 | 19 15 case
3504 | 19 16 det
3505 | 19 17 compound
3506 | 19 18 nummod
3507 | 14 19 nmod
3508 | 14 20 punct
3509 |
3510 | 0 8 ROOT
3511 | 3 1 det
3512 | 3 2 amod
3513 | 8 3 nsubj
3514 | 5 4 compound
3515 | 3 5 dep
3516 | 8 6 cop
3517 | 8 7 det
3518 | 8 9 punct
3519 | 8 10 appos
3520 | 13 11 case
3521 | 13 12 det
3522 | 10 13 nmod
3523 | 15 14 nmod:poss
3524 | 21 15 nsubj
3525 | 15 16 cc
3526 | 15 17 conj
3527 | 20 18 compound
3528 | 20 19 compound
3529 | 15 20 dep
3530 | 13 21 acl:relcl
3531 | 27 22 nsubj
3532 | 27 23 advmod
3533 | 27 24 punct
3534 | 27 25 nsubj
3535 | 27 26 aux
3536 | 21 27 ccomp
3537 | 27 28 xcomp
3538 | 30 29 det
3539 | 28 30 dobj
3540 | 32 31 case
3541 | 28 32 nmod
3542 | 8 33 punct
3543 |
3544 | 0 6 ROOT
3545 | 3 1 det
3546 | 3 2 compound
3547 | 6 3 nsubj
3548 | 6 4 cop
3549 | 6 5 case
3550 | 6 7 amod
3551 | 11 8 case
3552 | 11 9 nmod:poss
3553 | 11 10 amod
3554 | 7 11 nmod
3555 | 16 12 nsubj
3556 | 16 13 cop
3557 | 16 14 det
3558 | 16 15 amod
3559 | 11 16 acl:relcl
3560 | 16 17 cc
3561 | 16 18 conj
3562 | 20 19 advmod
3563 | 27 20 dep
3564 | 27 21 punct
3565 | 27 22 advmod
3566 | 24 23 det
3567 | 27 24 nsubj
3568 | 27 25 cop
3569 | 27 26 neg
3570 | 18 27 xcomp
3571 | 31 28 case
3572 | 31 29 nmod:poss
3573 | 31 30 amod
3574 | 27 31 nmod
3575 |
3576 | 0 9 ROOT
3577 | 9 1 advmod
3578 | 5 2 det
3579 | 5 3 compound
3580 | 5 4 compound
3581 | 9 5 nsubj
3582 | 9 6 cop
3583 | 9 7 neg
3584 | 9 8 det
3585 | 11 10 nsubj
3586 | 9 11 acl:relcl
3587 | 13 12 mark
3588 | 11 13 xcomp
3589 | 9 14 punct
3590 |
3591 | 0 2 ROOT
3592 | 2 1 nsubj
3593 | 4 3 det
3594 | 7 4 nsubj
3595 | 7 5 aux
3596 | 7 6 cop
3597 | 2 7 ccomp
3598 | 9 8 case
3599 | 7 9 nmod
3600 | 7 10 cc
3601 | 12 11 aux
3602 | 7 12 conj
3603 | 14 13 det
3604 | 12 14 dobj
3605 | 17 15 mark
3606 | 17 16 advmod
3607 | 12 17 advcl
3608 | 17 18 dobj
3609 | 23 19 case
3610 | 23 20 det
3611 | 23 21 amod
3612 | 23 22 compound
3613 | 17 23 nmod
3614 | 2 24 punct
3615 |
3616 | 0 4 ROOT
3617 | 4 1 nsubj
3618 | 4 2 cop
3619 | 4 3 advmod
3620 | 4 5 cc
3621 | 4 6 conj
3622 | 8 7 det
3623 | 4 8 dep
3624 | 4 9 punct
3625 |
3626 | 0 3 ROOT
3627 | 3 1 nsubj
3628 | 3 2 aux
3629 | 7 4 case
3630 | 7 5 det
3631 | 7 6 compound
3632 | 3 7 nmod
3633 | 9 8 case
3634 | 7 9 nmod
3635 | 3 10 punct
3636 | 14 11 case
3637 | 14 12 case
3638 | 14 13 det
3639 | 3 14 nmod
3640 | 14 15 amod
3641 | 14 16 punct
3642 | 14 17 dep
3643 | 17 18 cc
3644 | 17 19 conj
3645 | 3 20 punct
3646 |
3647 | 0 1 ROOT
3648 | 7 2 case
3649 | 7 3 det:predet
3650 | 7 4 nmod:poss
3651 | 7 5 amod
3652 | 7 6 compound
3653 | 1 7 nmod
3654 | 10 8 advmod
3655 | 10 9 aux
3656 | 7 10 acl
3657 | 12 11 det
3658 | 10 12 nmod:tmod
3659 | 14 13 case
3660 | 10 14 nmod
3661 | 1 15 punct
3662 | 1 16 punct
3663 | 1 17 dep
3664 |
3665 | 0 5 ROOT
3666 | 5 1 nsubj
3667 | 5 2 aux
3668 | 5 3 neg
3669 | 5 4 aux
3670 | 7 6 det
3671 | 5 7 dobj
3672 | 5 8 advmod
3673 | 5 9 punct
3674 |
3675 | 0 4 ROOT
3676 | 2 1 det
3677 | 4 2 nsubj
3678 | 4 3 cop
3679 | 4 5 cc
3680 | 4 6 conj
3681 | 4 7 punct
3682 | 10 8 nsubjpass
3683 | 10 9 auxpass
3684 | 4 10 conj
3685 | 4 11 cc
3686 | 15 12 nsubj
3687 | 15 13 cop
3688 | 15 14 advmod
3689 | 4 15 conj
3690 | 4 16 punct
3691 |
3692 | 0 3 ROOT
3693 | 3 1 nsubj
3694 | 3 2 cop
3695 | 5 4 mark
3696 | 3 5 xcomp
3697 | 3 6 punct
3698 |
3699 | 0 3 ROOT
3700 | 3 1 punct
3701 | 3 2 advmod
3702 | 3 4 punct
3703 | 3 5 punct
3704 | 3 6 dep
3705 | 6 7 punct
3706 | 3 8 punct
3707 | 3 9 advmod
3708 | 3 10 punct
3709 | 3 11 punct
3710 | 14 12 advmod
3711 | 14 13 nsubj
3712 | 3 14 parataxis
3713 | 16 15 nmod:poss
3714 | 14 16 dobj
3715 | 3 17 punct
3716 |
3717 | 0 11 ROOT
3718 | 11 1 advmod
3719 | 11 2 punct
3720 | 4 3 mark
3721 | 11 4 advcl
3722 | 7 5 det
3723 | 7 6 amod
3724 | 4 7 dobj
3725 | 11 8 punct
3726 | 10 9 nmod:poss
3727 | 11 10 nsubj
3728 | 14 12 case
3729 | 14 13 nmod:poss
3730 | 11 14 nmod
3731 | 11 15 dobj
3732 | 19 16 case
3733 | 19 17 det
3734 | 19 18 amod
3735 | 15 19 nmod
3736 | 26 20 case
3737 | 26 21 det
3738 | 26 22 punct
3739 | 26 23 compound
3740 | 26 24 compound
3741 | 26 25 punct
3742 | 19 26 nmod
3743 | 11 27 punct
3744 |
3745 | 0 2 ROOT
3746 | 2 1 nsubj
3747 | 5 3 det
3748 | 5 4 amod
3749 | 2 5 dobj
3750 | 2 6 advmod
3751 | 2 7 punct
3752 |
3753 | 0 5 ROOT
3754 | 5 1 nsubj
3755 | 5 2 advmod
3756 | 5 3 aux
3757 | 5 4 neg
3758 | 7 6 det
3759 | 5 7 dobj
3760 | 11 8 case
3761 | 11 9 det
3762 | 11 10 compound
3763 | 7 11 nmod
3764 | 5 12 punct
3765 |
3766 | 0 4 ROOT
3767 | 4 1 advmod
3768 | 3 2 nmod:poss
3769 | 4 3 nsubj
3770 | 6 5 mark
3771 | 4 6 xcomp
3772 | 6 7 advmod
3773 | 4 8 punct
3774 |
3775 | 0 4 ROOT
3776 | 4 1 nsubjpass
3777 | 4 2 auxpass
3778 | 4 3 advmod
3779 | 4 5 punct
3780 |
3781 | 0 2 ROOT
3782 | 2 1 advmod
3783 | 4 3 det
3784 | 2 4 nsubj
3785 | 6 5 nsubj
3786 | 4 6 acl:relcl
3787 | 8 7 nsubj
3788 | 6 8 xcomp
3789 | 11 9 case
3790 | 11 10 nmod:poss
3791 | 8 11 nmod
3792 | 4 12 punct
3793 | 4 13 dep
3794 | 13 14 dobj
3795 | 13 15 punct
3796 | 13 16 advmod
3797 | 13 17 punct
3798 | 21 18 nsubj
3799 | 21 19 aux
3800 | 21 20 neg
3801 | 13 21 parataxis
3802 | 24 22 det
3803 | 24 23 amod
3804 | 21 24 dobj
3805 | 2 25 punct
3806 |
3807 | 0 2 ROOT
3808 | 2 1 nsubj
3809 | 4 3 advmod
3810 | 5 4 amod
3811 | 2 5 dobj
3812 | 7 6 case
3813 | 5 7 nmod
3814 | 2 8 punct
3815 |
3816 | 0 2 ROOT
3817 | 2 1 nsubj
3818 | 4 3 det
3819 | 2 4 dobj
3820 | 7 5 case
3821 | 7 6 det
3822 | 4 7 nmod
3823 | 2 8 punct
3824 | 11 9 case
3825 | 11 10 det
3826 | 2 11 nmod
3827 | 14 12 case
3828 | 14 13 det
3829 | 11 14 nmod
3830 | 2 15 cc
3831 | 17 16 nsubj
3832 | 2 17 conj
3833 | 21 18 advmod
3834 | 20 19 det
3835 | 21 20 nsubj
3836 | 17 21 advcl
3837 | 23 22 case
3838 | 21 23 nmod
3839 | 2 24 punct
3840 |
3841 | 0 4 ROOT
3842 | 2 1 compound
3843 | 4 2 nsubj
3844 | 4 3 aux
3845 | 4 5 punct
3846 |
3847 | 0 6 ROOT
3848 | 3 1 det
3849 | 3 2 amod
3850 | 6 3 nsubj
3851 | 6 4 cop
3852 | 6 5 neg
3853 | 6 7 advmod
3854 | 7 8 mwe
3855 | 6 9 punct
3856 |
3857 | 0 2 ROOT
3858 | 2 1 nsubj
3859 | 5 3 det
3860 | 5 4 amod
3861 | 2 5 dobj
3862 | 2 6 cc
3863 | 8 7 expl
3864 | 2 8 conj
3865 | 10 9 neg
3866 | 8 10 nsubj
3867 | 2 11 punct
3868 |
3869 | 0 1 ROOT
3870 | 1 2 dep
3871 | 2 3 acl
3872 | 5 4 amod
3873 | 3 5 xcomp
3874 | 1 6 punct
3875 |
3876 | 0 8 ROOT
3877 | 8 1 auxpass
3878 | 3 2 case
3879 | 1 3 nmod
3880 | 3 4 cc
3881 | 6 5 compound
3882 | 3 6 conj
3883 | 8 7 advmod
3884 | 8 9 punct
3885 |
3886 | 0 4 ROOT
3887 | 2 1 compound
3888 | 4 2 nsubj
3889 | 4 3 advmod
3890 | 4 5 punct
3891 | 4 6 conj
3892 | 4 7 punct
3893 | 4 8 cc
3894 | 4 9 conj
3895 | 9 10 advmod
3896 | 9 11 cc
3897 | 9 12 conj
3898 | 4 13 punct
3899 |
3900 | 0 4 ROOT
3901 | 2 1 det
3902 | 4 2 nsubj
3903 | 4 3 cop
3904 | 6 5 advmod
3905 | 8 6 advmod
3906 | 8 7 nsubj
3907 | 4 8 advcl
3908 | 8 9 compound:prt
3909 | 11 10 det
3910 | 15 11 nsubj
3911 | 15 12 advmod
3912 | 15 13 det
3913 | 15 14 amod
3914 | 8 15 ccomp
3915 | 4 16 punct
3916 |
3917 | 0 6 ROOT
3918 | 2 1 compound
3919 | 6 2 nsubj
3920 | 6 3 cop
3921 | 6 4 det
3922 | 6 5 amod
3923 | 6 7 punct
3924 | 6 8 punct
3925 | 10 9 advmod
3926 | 6 10 dep
3927 | 10 11 cc
3928 | 10 12 conj
3929 | 6 13 punct
3930 |
3931 | 0 17 ROOT
3932 | 2 1 aux
3933 | 17 2 advcl
3934 | 4 3 case
3935 | 2 4 nmod
3936 | 4 5 cc
3937 | 4 6 conj
3938 | 13 7 mark
3939 | 9 8 advmod
3940 | 13 9 dep
3941 | 12 10 det
3942 | 12 11 compound
3943 | 13 12 nsubj
3944 | 2 13 advcl
3945 | 17 14 punct
3946 | 17 15 nsubj
3947 | 17 16 neg
3948 | 19 18 mark
3949 | 17 19 xcomp
3950 | 22 20 det
3951 | 22 21 compound
3952 | 19 22 dobj
3953 | 26 23 case
3954 | 26 24 det
3955 | 26 25 amod
3956 | 19 26 nmod
3957 | 17 27 punct
3958 | 17 28 cc
3959 | 30 29 nsubj
3960 | 17 30 conj
3961 | 17 31 punct
3962 |
3963 | 0 8 ROOT
3964 | 3 1 det
3965 | 3 2 compound
3966 | 6 3 nmod:poss
3967 | 3 4 case
3968 | 6 5 amod
3969 | 8 6 nsubj
3970 | 8 7 cop
3971 | 8 9 punct
3972 |
3973 | 0 7 ROOT
3974 | 2 1 det
3975 | 4 2 nmod:poss
3976 | 2 3 case
3977 | 7 4 nsubj
3978 | 7 5 aux
3979 | 7 6 neg
3980 | 10 8 case
3981 | 10 9 amod
3982 | 7 10 nmod
3983 | 7 11 punct
3984 | 7 12 cc
3985 | 16 13 case
3986 | 16 14 det
3987 | 16 15 amod
3988 | 7 16 conj
3989 | 7 17 punct
3990 |
3991 | 0 6 ROOT
3992 | 3 1 det
3993 | 3 2 amod
3994 | 6 3 nsubj
3995 | 6 4 advmod
3996 | 6 5 aux
3997 | 8 7 advmod
3998 | 9 8 advmod
3999 | 6 9 xcomp
4000 | 14 10 case
4001 | 14 11 nmod:poss
4002 | 11 12 case
4003 | 14 13 amod
4004 | 9 14 nmod
4005 | 6 15 punct
4006 |
4007 | 0 3 ROOT
4008 | 2 1 compound
4009 | 3 2 nsubj
4010 | 3 4 dobj
4011 | 3 5 compound:prt
4012 | 3 6 advmod
4013 | 3 7 punct
4014 |
4015 | 0 1 ROOT
4016 | 4 2 det
4017 | 4 3 compound
4018 | 1 4 dobj
4019 | 1 5 dep
4020 | 5 6 dobj
4021 | 6 7 cc
4022 | 9 8 compound
4023 | 6 9 conj
4024 | 12 10 case
4025 | 12 11 nmod:poss
4026 | 6 12 nmod
4027 | 1 13 punct
4028 |
4029 | 0 2 ROOT
4030 | 2 1 nummod
4031 | 4 3 case
4032 | 2 4 nmod
4033 | 2 5 punct
4034 |
4035 | 0 6 ROOT
4036 | 3 1 det:predet
4037 | 3 2 det
4038 | 6 3 nsubjpass
4039 | 6 4 auxpass
4040 | 6 5 advmod
4041 | 12 7 mark
4042 | 12 8 nsubj
4043 | 12 9 cop
4044 | 12 10 nmod:poss
4045 | 12 11 amod
4046 | 6 12 advcl
4047 | 6 13 punct
4048 |
4049 | 0 14 ROOT
4050 | 4 1 det
4051 | 4 2 amod
4052 | 4 3 amod
4053 | 14 4 nsubj
4054 | 8 5 case
4055 | 8 6 det
4056 | 8 7 compound
4057 | 4 8 nmod
4058 | 10 9 case
4059 | 8 10 nmod
4060 | 14 11 cop
4061 | 14 12 case
4062 | 14 13 dep
4063 | 14 15 cc
4064 | 14 16 conj
4065 | 14 17 advmod
4066 | 14 18 punct
4067 | 14 19 cc
4068 | 26 20 nsubj
4069 | 26 21 cop
4070 | 26 22 det
4071 | 26 23 amod
4072 | 26 24 nummod
4073 | 26 25 compound
4074 | 14 26 conj
4075 | 26 27 nummod
4076 | 26 28 amod
4077 | 34 29 case
4078 | 34 30 det
4079 | 34 31 amod
4080 | 31 32 compound
4081 | 34 33 compound
4082 | 28 34 nmod
4083 | 14 35 punct
4084 | 44 36 case
4085 | 44 37 compound
4086 | 44 38 compound
4087 | 44 39 compound
4088 | 44 40 nummod
4089 | 44 41 compound
4090 | 44 42 amod
4091 | 44 43 compound
4092 | 48 44 nmod
4093 | 46 45 det
4094 | 48 46 nsubjpass
4095 | 48 47 auxpass
4096 | 14 48 conj
4097 | 52 49 compound
4098 | 52 50 nummod
4099 | 52 51 compound
4100 | 48 52 dobj
4101 | 14 53 punct
4102 | 14 54 cc
4103 | 57 55 nsubj
4104 | 57 56 advmod
4105 | 14 57 conj
4106 | 61 58 case
4107 | 61 59 amod
4108 | 61 60 compound
4109 | 57 61 nmod
4110 | 61 62 cc
4111 | 61 63 conj
4112 | 57 64 punct
4113 | 66 65 dep
4114 | 57 66 dep
4115 | 66 67 advmod
4116 | 72 68 case
4117 | 72 69 amod
4118 | 69 70 cc
4119 | 69 71 conj
4120 | 66 72 nmod
4121 | 14 73 punct
4122 |
4123 | 0 1 ROOT
4124 | 3 2 det
4125 | 1 3 dobj
4126 | 5 4 case
4127 | 1 5 nmod
4128 | 1 6 cc
4129 | 10 7 cop
4130 | 9 8 det
4131 | 10 9 nmod:npmod
4132 | 1 10 conj
4133 | 10 11 cc
4134 | 16 12 nsubj
4135 | 16 13 cop
4136 | 16 14 nmod:poss
4137 | 16 15 amod
4138 | 10 16 conj
4139 | 1 17 punct
4140 |
4141 | 0 7 ROOT
4142 | 4 1 case
4143 | 4 2 det
4144 | 4 3 amod
4145 | 7 4 nmod
4146 | 7 5 nsubj
4147 | 7 6 aux
4148 | 9 8 det
4149 | 7 9 dobj
4150 | 7 10 advmod
4151 | 7 11 punct
4152 |
4153 | 0 4 ROOT
4154 | 2 1 det
4155 | 4 2 nsubj
4156 | 4 3 aux
4157 | 8 5 case
4158 | 8 6 det
4159 | 8 7 amod
4160 | 4 8 nmod
4161 | 8 9 cc
4162 | 8 10 conj
4163 | 12 11 aux
4164 | 4 12 xcomp
4165 | 12 13 dobj
4166 | 15 14 advmod
4167 | 12 15 advmod
4168 | 12 16 advcl
4169 | 18 17 det
4170 | 20 18 nsubj
4171 | 20 19 aux
4172 | 16 20 ccomp
4173 | 20 21 xcomp
4174 | 25 22 case
4175 | 25 23 det
4176 | 25 24 amod
4177 | 20 25 nmod
4178 | 27 26 compound
4179 | 25 27 dep
4180 | 29 28 mark
4181 | 27 29 acl
4182 | 31 30 det
4183 | 29 31 dobj
4184 | 4 32 punct
4185 |
4186 | 0 9 ROOT
4187 | 6 1 det
4188 | 6 2 nummod
4189 | 6 3 nummod
4190 | 6 4 amod
4191 | 6 5 compound
4192 | 9 6 nsubj
4193 | 8 7 advmod
4194 | 9 8 advmod
4195 | 9 10 dobj
4196 | 9 11 punct
4197 |
4198 | 0 4 ROOT
4199 | 4 1 nsubj
4200 | 4 2 cop
4201 | 4 3 advmod
4202 | 4 5 cc
4203 | 4 6 conj
4204 | 6 7 compound:prt
4205 | 11 8 case
4206 | 10 9 advmod
4207 | 11 10 nummod
4208 | 6 11 nmod
4209 | 14 12 nsubj
4210 | 14 13 cop
4211 | 11 14 acl:relcl
4212 | 14 15 advmod
4213 | 17 16 case
4214 | 14 17 nmod
4215 | 4 18 punct
4216 |
4217 | 0 2 ROOT
4218 | 2 1 neg
4219 | 4 3 case
4220 | 2 4 nmod
4221 | 4 5 nummod
4222 | 2 6 punct
4223 |
4224 | 0 4 ROOT
4225 | 3 1 nmod:poss
4226 | 3 2 nummod
4227 | 4 3 nsubj
4228 | 7 5 mark
4229 | 7 6 expl
4230 | 4 7 ccomp
4231 | 11 8 neg
4232 | 11 9 amod
4233 | 11 10 compound
4234 | 7 11 nsubj
4235 | 4 12 punct
4236 |
4237 | 0 4 ROOT
4238 | 3 1 det
4239 | 3 2 amod
4240 | 4 3 nsubj
4241 | 7 5 neg
4242 | 7 6 amod
4243 | 4 7 dobj
4244 | 7 8 cc
4245 | 10 9 compound
4246 | 7 10 conj
4247 | 4 11 punct
4248 |
4249 | 0 3 ROOT
4250 | 3 1 nsubj
4251 | 3 2 aux
4252 | 6 4 det
4253 | 6 5 advmod
4254 | 3 6 dobj
4255 | 6 7 acl
4256 | 13 8 case
4257 | 13 9 det
4258 | 13 10 nummod
4259 | 13 11 punct
4260 | 13 12 compound
4261 | 7 13 nmod
4262 | 13 14 cc
4263 | 23 15 det
4264 | 23 16 compound
4265 | 23 17 amod
4266 | 23 18 nummod
4267 | 23 19 punct
4268 | 23 20 dep
4269 | 23 21 det
4270 | 23 22 compound
4271 | 13 23 conj
4272 | 3 24 punct
4273 | 3 25 dep
4274 | 30 26 case
4275 | 30 27 nummod
4276 | 30 28 amod
4277 | 28 29 dep
4278 | 25 30 nmod
4279 |
4280 | 0 3 ROOT
4281 | 3 1 neg
4282 | 3 2 advmod
4283 | 3 4 cc
4284 | 3 5 conj
4285 | 7 6 amod
4286 | 5 7 dobj
4287 | 10 8 case
4288 | 10 9 amod
4289 | 7 10 nmod
4290 | 10 11 cc
4291 | 13 12 amod
4292 | 10 13 conj
4293 | 3 14 punct
4294 |
4295 | 0 6 ROOT
4296 | 3 1 det
4297 | 3 2 amod
4298 | 6 3 nsubj
4299 | 6 4 cop
4300 | 6 5 advmod
4301 | 6 7 cc
4302 | 9 8 advmod
4303 | 6 9 conj
4304 | 12 10 case
4305 | 12 11 det
4306 | 6 12 nmod
4307 | 16 13 case
4308 | 16 14 det
4309 | 16 15 compound
4310 | 12 16 nmod
4311 | 6 17 punct
4312 |
4313 | 0 3 ROOT
4314 | 3 1 nsubj
4315 | 3 2 cop
4316 | 3 4 cc
4317 | 7 5 det
4318 | 7 6 amod
4319 | 3 7 conj
4320 | 9 8 mark
4321 | 7 9 acl
4322 | 11 10 case
4323 | 9 11 nmod
4324 | 3 12 punct
4325 |
4326 | 0 3 ROOT
4327 | 3 1 nsubjpass
4328 | 3 2 auxpass
4329 | 5 4 det
4330 | 3 5 dobj
4331 | 7 6 case
4332 | 5 7 nmod
4333 | 7 8 nummod
4334 | 3 9 punct
4335 |
4336 | 0 4 ROOT
4337 | 2 1 det
4338 | 4 2 nsubj
4339 | 4 3 cop
4340 | 4 5 dep
4341 | 8 6 amod
4342 | 8 7 amod
4343 | 5 8 dobj
4344 | 4 9 punct
4345 |
4346 | 0 2 ROOT
4347 | 2 1 nsubj
4348 | 7 3 mark
4349 | 7 4 nsubj
4350 | 7 5 aux
4351 | 7 6 cop
4352 | 2 7 ccomp
4353 | 7 8 punct
4354 | 11 9 mark
4355 | 11 10 nsubj
4356 | 7 11 advcl
4357 | 13 12 det
4358 | 11 13 dobj
4359 | 2 14 punct
4360 |
4361 | 0 1 ROOT
4362 | 3 2 advmod
4363 | 1 3 advmod
4364 | 1 4 punct
4365 |
4366 | 0 2 ROOT
4367 | 2 1 nsubj
4368 | 2 3 advmod
4369 | 9 4 mark
4370 | 7 5 det
4371 | 7 6 compound
4372 | 9 7 nsubjpass
4373 | 9 8 auxpass
4374 | 2 9 advcl
4375 | 2 10 punct
4376 |
4377 | 0 5 ROOT
4378 | 2 1 advmod
4379 | 5 2 dep
4380 | 5 3 nsubj
4381 | 5 4 aux
4382 | 5 6 xcomp
4383 | 9 7 det
4384 | 9 8 compound
4385 | 6 9 dobj
4386 | 12 10 mark
4387 | 12 11 nsubj
4388 | 6 12 advcl
4389 | 14 13 nsubj
4390 | 12 14 ccomp
4391 | 16 15 det
4392 | 14 16 dobj
4393 | 14 17 cc
4394 | 14 18 conj
4395 | 18 19 compound:prt
4396 | 22 20 case
4397 | 22 21 det
4398 | 18 22 nmod
4399 | 5 23 punct
4400 |
4401 | 0 3 ROOT
4402 | 3 1 nsubj
4403 | 3 2 cop
4404 | 7 4 mark
4405 | 7 5 nsubj
4406 | 7 6 cop
4407 | 3 7 ccomp
4408 | 7 8 xcomp
4409 | 10 9 mark
4410 | 8 10 xcomp
4411 | 10 11 dobj
4412 | 14 12 case
4413 | 14 13 det
4414 | 11 14 nmod
4415 | 16 15 nsubj
4416 | 14 16 acl:relcl
4417 | 19 17 case
4418 | 19 18 det
4419 | 16 19 nmod
4420 | 19 20 punct
4421 | 22 21 compound
4422 | 19 22 conj
4423 | 19 23 punct
4424 | 25 24 amod
4425 | 19 25 conj
4426 | 19 26 cc
4427 | 19 27 conj
4428 | 3 28 punct
4429 |
4430 | 0 1 ROOT
4431 | 5 2 case
4432 | 5 3 det
4433 | 5 4 amod
4434 | 8 5 nmod
4435 | 8 6 nsubj
4436 | 8 7 cop
4437 | 1 8 nmod
4438 | 10 9 case
4439 | 8 10 nmod
4440 | 8 11 cc
4441 | 8 12 conj
4442 | 8 13 punct
4443 | 8 14 cc
4444 | 17 15 nsubj
4445 | 17 16 aux
4446 | 37 17 csubj
4447 | 20 18 mark
4448 | 20 19 nsubj
4449 | 17 20 advcl
4450 | 20 21 cc
4451 | 23 22 aux
4452 | 20 23 conj
4453 | 25 24 det
4454 | 23 25 dobj
4455 | 27 26 mark
4456 | 23 27 advcl
4457 | 30 28 advmod
4458 | 30 29 mark
4459 | 27 30 ccomp
4460 | 30 31 dobj
4461 | 34 32 case
4462 | 34 33 det
4463 | 31 34 nmod
4464 | 34 35 dep
4465 | 37 36 cop
4466 | 8 37 conj
4467 | 41 38 case
4468 | 41 39 case
4469 | 41 40 det
4470 | 37 41 nmod
4471 | 45 42 advmod
4472 | 45 43 det
4473 | 45 44 amod
4474 | 37 45 nmod
4475 | 47 46 compound
4476 | 45 47 dep
4477 | 1 48 punct
4478 |
4479 | 0 3 ROOT
4480 | 3 1 nsubj
4481 | 3 2 amod
4482 | 3 4 punct
4483 |
4484 | 0 1 ROOT
4485 | 5 2 det
4486 | 5 3 amod
4487 | 5 4 compound
4488 | 1 5 dobj
4489 | 1 6 cc
4490 | 9 7 nsubj
4491 | 9 8 aux
4492 | 1 9 conj
4493 | 11 10 det
4494 | 9 11 dobj
4495 | 1 12 punct
4496 |
4497 | 0 15 ROOT
4498 | 8 1 mark
4499 | 3 2 det
4500 | 6 3 nmod:poss
4501 | 3 4 case
4502 | 6 5 amod
4503 | 8 6 nsubj
4504 | 8 7 cop
4505 | 15 8 advcl
4506 | 10 9 case
4507 | 8 10 nmod
4508 | 15 11 punct
4509 | 15 12 nsubj
4510 | 15 13 cop
4511 | 15 14 det
4512 | 15 16 punct
4513 |
4514 | 0 2 ROOT
4515 | 2 1 compound
4516 | 2 3 punct
4517 | 5 4 compound
4518 | 2 5 conj
4519 | 2 6 cc
4520 | 9 7 amod
4521 | 9 8 compound
4522 | 2 9 conj
4523 | 11 10 mark
4524 | 2 11 acl
4525 | 11 12 dobj
4526 | 12 13 cc
4527 | 12 14 conj
4528 | 2 15 acl
4529 | 15 16 nsubj
4530 | 19 17 nsubjpass
4531 | 19 18 auxpass
4532 | 16 19 acl:relcl
4533 | 2 20 punct
4534 |
4535 | 0 7 ROOT
4536 | 4 1 det
4537 | 4 2 compound
4538 | 4 3 compound
4539 | 7 4 nsubjpass
4540 | 7 5 auxpass
4541 | 7 6 advmod
4542 | 7 8 cc
4543 | 14 9 advmod
4544 | 14 10 case
4545 | 14 11 nummod
4546 | 14 12 det
4547 | 14 13 compound
4548 | 21 14 nsubjpass
4549 | 17 15 case
4550 | 17 16 det
4551 | 14 17 nmod
4552 | 21 18 aux
4553 | 21 19 neg
4554 | 21 20 auxpass
4555 | 7 21 conj
4556 | 21 22 advmod
4557 | 7 23 punct
4558 | 28 24 mark
4559 | 28 25 nsubj
4560 | 28 26 cop
4561 | 28 27 det
4562 | 7 28 advcl
4563 | 30 29 amod
4564 | 28 30 dobj
4565 | 36 31 nsubj
4566 | 36 32 aux
4567 | 36 33 neg
4568 | 36 34 cop
4569 | 36 35 det
4570 | 30 36 acl:relcl
4571 | 7 37 punct
4572 | 7 38 cc
4573 | 40 39 nmod:poss
4574 | 7 40 conj
4575 | 7 41 punct
4576 |
4577 | 0 2 ROOT
4578 | 2 1 amod
4579 | 2 3 punct
4580 | 5 4 amod
4581 | 2 5 conj
4582 | 9 6 case
4583 | 9 7 compound
4584 | 9 8 compound
4585 | 5 9 nmod
4586 | 2 10 cc
4587 | 12 11 neg
4588 | 2 12 conj
4589 | 2 13 punct
4590 |
4591 | 0 9 ROOT
4592 | 2 1 amod
4593 | 9 2 nsubj
4594 | 2 3 punct
4595 | 5 4 amod
4596 | 2 5 conj
4597 | 2 6 cc
4598 | 8 7 amod
4599 | 2 8 conj
4600 | 12 10 det
4601 | 12 11 compound
4602 | 9 12 dobj
4603 | 17 13 advmod
4604 | 17 14 det
4605 | 17 15 amod
4606 | 17 16 compound
4607 | 9 17 advmod
4608 | 17 18 punct
4609 | 20 19 amod
4610 | 22 20 compound
4611 | 22 21 amod
4612 | 17 22 appos
4613 | 9 23 punct
4614 |
4615 | 0 1 ROOT
4616 | 3 2 case
4617 | 1 3 nmod
4618 | 8 4 advmod
4619 | 8 5 nsubj
4620 | 8 6 cop
4621 | 8 7 det
4622 | 3 8 acl:relcl
4623 | 8 9 cc
4624 | 11 10 nsubj
4625 | 8 11 conj
4626 | 15 12 det
4627 | 15 13 amod
4628 | 15 14 amod
4629 | 11 15 dobj
4630 | 1 16 punct
4631 |
4632 | 0 10 ROOT
4633 | 4 1 mark
4634 | 4 2 nsubjpass
4635 | 4 3 auxpass
4636 | 10 4 advcl
4637 | 4 5 cc
4638 | 4 6 conj
4639 | 10 7 punct
4640 | 10 8 nsubj
4641 | 10 9 advmod
4642 | 10 11 dobj
4643 | 14 12 amod
4644 | 12 13 compound
4645 | 11 14 dobj
4646 | 16 15 mark
4647 | 14 16 acl
4648 | 18 17 nummod
4649 | 16 18 dobj
4650 | 20 19 case
4651 | 18 20 nmod
4652 | 14 21 punct
4653 | 27 22 nummod
4654 | 27 23 compound
4655 | 27 24 compound
4656 | 27 25 compound
4657 | 27 26 compound
4658 | 14 27 conj
4659 | 14 28 cc
4660 | 32 29 det
4661 | 32 30 compound
4662 | 32 31 amod
4663 | 14 32 conj
4664 | 34 33 mark
4665 | 14 34 acl
4666 | 38 35 det
4667 | 38 36 amod
4668 | 38 37 amod
4669 | 34 38 dobj
4670 | 10 39 punct
4671 |
4672 | 0 5 ROOT
4673 | 3 1 det
4674 | 3 2 compound
4675 | 5 3 nsubj
4676 | 5 4 cop
4677 | 5 6 cc
4678 | 5 7 conj
4679 | 10 8 det
4680 | 10 9 compound
4681 | 7 10 dobj
4682 | 13 11 case
4683 | 13 12 det
4684 | 10 13 nmod
4685 | 10 14 punct
4686 | 17 15 dobj
4687 | 17 16 nsubj
4688 | 10 17 acl:relcl
4689 | 21 18 case
4690 | 21 19 nmod:poss
4691 | 21 20 compound
4692 | 17 21 nmod
4693 | 5 22 punct
4694 |
4695 | 0 3 ROOT
4696 | 3 1 nsubjpass
4697 | 3 2 auxpass
4698 | 13 4 mark
4699 | 6 5 det
4700 | 13 6 nsubj
4701 | 11 7 case
4702 | 11 8 det
4703 | 11 9 amod
4704 | 11 10 amod
4705 | 6 11 nmod
4706 | 13 12 aux
4707 | 3 13 ccomp
4708 | 16 14 nsubj
4709 | 16 15 advmod
4710 | 13 16 xcomp
4711 | 13 17 cc
4712 | 22 18 nsubj
4713 | 22 19 aux
4714 | 22 20 neg
4715 | 22 21 aux
4716 | 13 22 conj
4717 | 3 23 punct
4718 | 25 24 det
4719 | 29 25 nsubj
4720 | 29 26 cop
4721 | 29 27 det
4722 | 29 28 amod
4723 | 3 29 parataxis
4724 | 32 30 aux
4725 | 32 31 advmod
4726 | 29 32 acl
4727 | 3 33 punct
4728 |
4729 | 0 4 ROOT
4730 | 2 1 det
4731 | 4 2 nsubj
4732 | 4 3 advmod
4733 | 7 5 det
4734 | 7 6 compound
4735 | 4 7 dobj
4736 | 4 8 punct
4737 |
4738 | 0 3 ROOT
4739 | 3 1 neg
4740 | 3 2 compound
4741 | 3 4 punct
4742 |
4743 | 0 2 ROOT
4744 | 2 1 nsubj
4745 | 4 3 mark
4746 | 2 4 xcomp
4747 | 6 5 compound
4748 | 4 6 dobj
4749 | 2 7 cc
4750 | 9 8 nsubj
4751 | 2 9 conj
4752 | 13 10 det
4753 | 13 11 amod
4754 | 13 12 nummod
4755 | 9 13 dobj
4756 | 2 14 punct
4757 |
4758 | 0 5 ROOT
4759 | 5 1 nsubj
4760 | 3 2 case
4761 | 1 3 nmod
4762 | 5 4 cop
4763 | 5 6 punct
4764 |
4765 | 0 3 ROOT
4766 | 3 1 nsubj
4767 | 3 2 aux
4768 | 5 4 det
4769 | 3 5 dobj
4770 | 9 6 case
4771 | 9 7 det
4772 | 9 8 compound
4773 | 3 9 nmod
4774 | 3 10 advmod
4775 | 3 11 punct
4776 |
4777 | 0 15 ROOT
4778 | 4 1 cc
4779 | 3 2 nummod
4780 | 4 3 nsubj
4781 | 15 4 csubj
4782 | 7 5 case
4783 | 7 6 det
4784 | 4 7 nmod
4785 | 12 8 case
4786 | 12 9 det
4787 | 12 10 amod
4788 | 12 11 compound
4789 | 7 12 nmod
4790 | 15 13 cop
4791 | 15 14 advmod
4792 | 15 16 punct
4793 |
4794 | 0 4 ROOT
4795 | 2 1 det
4796 | 4 2 nsubj
4797 | 4 3 cop
4798 | 4 5 punct
4799 |
4800 | 0 4 ROOT
4801 | 3 1 det
4802 | 3 2 compound
4803 | 4 3 nsubj
4804 | 7 5 mark
4805 | 7 6 auxpass
4806 | 4 7 xcomp
4807 | 11 8 case
4808 | 11 9 det
4809 | 11 10 compound
4810 | 7 11 nmod
4811 | 13 12 mark
4812 | 7 13 xcomp
4813 | 15 14 det
4814 | 13 15 dobj
4815 | 4 16 cc
4816 | 19 17 aux
4817 | 19 18 neg
4818 | 4 19 conj
4819 | 19 20 dep
4820 | 4 21 punct
4821 |
4822 | 0 5 ROOT
4823 | 2 1 det
4824 | 5 2 nsubj
4825 | 5 3 cop
4826 | 5 4 advmod
4827 | 5 6 punct
4828 | 8 7 case
4829 | 11 8 nmod
4830 | 11 9 nsubj
4831 | 11 10 aux
4832 | 5 11 dep
4833 | 17 12 case
4834 | 17 13 det
4835 | 17 14 compound
4836 | 17 15 compound
4837 | 17 16 amod
4838 | 11 17 nmod
4839 | 20 18 case
4840 | 20 19 det
4841 | 17 20 nmod
4842 | 5 21 punct
4843 |
4844 | 0 8 ROOT
4845 | 8 1 mark
4846 | 8 2 nsubj
4847 | 4 3 case
4848 | 2 4 nmod
4849 | 8 5 nsubj
4850 | 8 6 cop
4851 | 8 7 det
4852 | 10 9 case
4853 | 8 10 nmod
4854 | 8 11 punct
4855 |
4856 | 0 4 ROOT
4857 | 2 1 det
4858 | 4 2 nsubj
4859 | 4 3 cop
4860 | 4 5 punct
4861 |
4862 | 0 2 ROOT
4863 | 2 1 nsubj
4864 | 4 3 amod
4865 | 2 4 dobj
4866 | 4 5 cc
4867 | 8 6 amod
4868 | 8 7 compound
4869 | 4 8 conj
4870 | 2 9 cc
4871 | 13 10 nsubj
4872 | 13 11 cop
4873 | 13 12 det
4874 | 2 13 conj
4875 | 15 14 det
4876 | 13 15 nmod:tmod
4877 | 2 16 punct
4878 |
4879 | 0 1 ROOT
4880 | 1 2 punct
4881 | 7 3 nsubj
4882 | 7 4 cop
4883 | 6 5 det
4884 | 7 6 nmod:npmod
4885 | 1 7 dep
4886 | 7 8 punct
4887 | 10 9 compound
4888 | 7 10 appos
4889 | 1 11 punct
4890 |
4891 | 0 3 ROOT
4892 | 2 1 det
4893 | 3 2 nsubj
4894 | 8 4 nsubj
4895 | 8 5 cop
4896 | 8 6 det
4897 | 8 7 amod
4898 | 3 8 ccomp
4899 | 11 9 mark
4900 | 11 10 nsubj
4901 | 8 11 advcl
4902 | 14 12 det
4903 | 14 13 amod
4904 | 11 14 dobj
4905 | 3 15 punct
4906 |
4907 | 0 3 ROOT
4908 | 3 1 nsubj
4909 | 3 2 advmod
4910 | 8 4 mark
4911 | 8 5 advmod
4912 | 8 6 nsubj
4913 | 8 7 aux
4914 | 3 8 advcl
4915 | 10 9 det
4916 | 8 10 dobj
4917 | 3 11 punct
4918 |
4919 | 0 5 ROOT
4920 | 5 1 dobj
4921 | 5 2 nsubj
4922 | 5 3 aux
4923 | 5 4 neg
4924 | 11 6 punct
4925 | 8 7 advmod
4926 | 11 8 nummod
4927 | 11 9 compound
4928 | 11 10 nummod
4929 | 5 11 dobj
4930 | 5 12 punct
4931 |
4932 | 0 6 ROOT
4933 | 3 1 neg
4934 | 3 2 compound
4935 | 6 3 nsubjpass
4936 | 6 4 auxpass
4937 | 6 5 neg
4938 | 6 7 cc
4939 | 12 8 nsubj
4940 | 12 9 aux
4941 | 12 10 cop
4942 | 12 11 nmod:poss
4943 | 6 12 conj
4944 | 6 13 punct
4945 |
4946 | 0 2 ROOT
4947 | 2 1 expl
4948 | 7 3 neg
4949 | 7 4 punct
4950 | 7 5 compound
4951 | 7 6 punct
4952 | 2 7 nsubj
4953 | 2 8 punct
4954 |
4955 | 0 4 ROOT
4956 | 4 1 nsubj
4957 | 4 2 cop
4958 | 4 3 advmod
4959 | 4 5 cc
4960 | 4 6 conj
4961 | 6 7 dobj
4962 | 10 8 mark
4963 | 10 9 nsubj
4964 | 7 10 ccomp
4965 | 10 11 xcomp
4966 | 15 12 case
4967 | 15 13 det
4968 | 15 14 compound
4969 | 11 15 nmod
4970 | 4 16 punct
4971 |
4972 | 0 1 ROOT
4973 | 4 2 det
4974 | 4 3 compound
4975 | 1 4 dobj
4976 | 4 5 cc
4977 | 9 6 det
4978 | 9 7 amod
4979 | 9 8 compound
4980 | 4 9 conj
4981 | 1 10 punct
4982 |
4983 | 0 6 ROOT
4984 | 2 1 nmod:poss
4985 | 6 2 nsubj
4986 | 4 3 case
4987 | 2 4 nmod
4988 | 4 5 nummod
4989 | 6 7 cc
4990 | 11 8 nsubj
4991 | 11 9 aux
4992 | 11 10 neg
4993 | 6 11 conj
4994 | 11 12 dobj
4995 | 12 13 nummod
4996 | 6 14 punct
4997 |
4998 | 0 1 ROOT
4999 | 3 2 mark
5000 | 1 3 xcomp
5001 | 5 4 advmod
5002 | 3 5 xcomp
5003 | 7 6 mark
5004 | 5 7 xcomp
5005 | 7 8 advmod
5006 | 11 9 case
5007 | 11 10 nmod:poss
5008 | 7 11 nmod
5009 | 14 12 advmod
5010 | 14 13 advmod
5011 | 7 14 xcomp
5012 | 16 15 mark
5013 | 14 16 xcomp
5014 | 19 17 case
5015 | 19 18 amod
5016 | 16 19 nmod
5017 | 19 20 cc
5018 | 19 21 conj
5019 | 1 22 punct
5020 |
5021 | 0 2 ROOT
5022 | 2 1 amod
5023 | 2 3 punct
5024 |
5025 | 0 2 ROOT
5026 | 2 1 nsubj
5027 | 4 3 case
5028 | 2 4 nmod
5029 | 7 5 mark
5030 | 7 6 nsubj
5031 | 2 7 advcl
5032 | 7 8 dobj
5033 | 8 9 amod
5034 | 2 10 punct
5035 | 14 11 advmod
5036 | 14 12 mark
5037 | 14 13 nsubj
5038 | 2 14 advcl
5039 | 14 15 dobj
5040 | 2 16 punct
5041 |
5042 | 0 4 ROOT
5043 | 4 1 nsubj
5044 | 3 2 neg
5045 | 4 3 advmod
5046 | 16 5 advmod
5047 | 8 6 case
5048 | 8 7 det
5049 | 5 8 nmod
5050 | 5 9 cc
5051 | 5 10 conj
5052 | 16 11 punct
5053 | 16 12 nsubj
5054 | 16 13 aux
5055 | 16 14 neg
5056 | 16 15 advmod
5057 | 4 16 ccomp
5058 | 16 17 dobj
5059 | 16 18 advmod
5060 | 4 19 punct
5061 |
5062 | 0 3 ROOT
5063 | 3 1 nsubj
5064 | 3 2 advmod
5065 | 3 4 dobj
5066 | 4 5 nummod
5067 | 4 6 punct
5068 | 10 7 dobj
5069 | 9 8 det
5070 | 10 9 nsubj
5071 | 4 10 acl:relcl
5072 | 3 11 punct
5073 |
5074 | 0 5 ROOT
5075 | 2 1 case
5076 | 5 2 advmod
5077 | 5 3 punct
5078 | 5 4 nsubj
5079 | 8 6 det
5080 | 8 7 amod
5081 | 9 8 nmod:npmod
5082 | 5 9 xcomp
5083 | 11 10 mark
5084 | 5 11 advcl
5085 | 14 12 det
5086 | 14 13 compound
5087 | 11 14 dobj
5088 | 5 15 punct
5089 |
5090 | 0 3 ROOT
5091 | 3 1 nsubjpass
5092 | 3 2 auxpass
5093 | 5 4 case
5094 | 3 5 nmod
5095 | 7 6 case
5096 | 3 7 nmod
5097 | 9 8 advmod
5098 | 11 9 nsubj
5099 | 11 10 aux
5100 | 3 11 parataxis
5101 | 14 12 det
5102 | 14 13 amod
5103 | 11 14 dobj
5104 | 14 15 acl
5105 | 17 16 nmod:poss
5106 | 15 17 dobj
5107 | 17 18 advmod
5108 | 3 19 punct
5109 |
5110 | 0 3 ROOT
5111 | 3 1 nsubj
5112 | 3 2 advmod
5113 | 7 4 case
5114 | 7 5 case
5115 | 7 6 det
5116 | 3 7 nmod
5117 | 3 8 cc
5118 | 10 9 nsubj
5119 | 3 10 conj
5120 | 12 11 det
5121 | 10 12 dobj
5122 | 15 13 case
5123 | 15 14 amod
5124 | 12 15 nmod
5125 | 12 16 acl
5126 | 19 17 case
5127 | 19 18 det
5128 | 16 19 nmod
5129 | 3 20 punct
5130 |
5131 | 0 1 ROOT
5132 | 3 2 mark
5133 | 9 3 advcl
5134 | 5 4 case
5135 | 3 5 nmod
5136 | 3 6 punct
5137 | 9 7 punct
5138 | 9 8 nsubj
5139 | 1 9 root
5140 | 9 10 xcomp
5141 | 9 11 cc
5142 | 15 12 cop
5143 | 15 13 neg
5144 | 15 14 det
5145 | 9 15 conj
5146 | 17 16 case
5147 | 15 17 nmod
5148 | 9 18 punct
5149 |
5150 | 0 1 ROOT
5151 | 4 2 case
5152 | 4 3 det
5153 | 1 4 nmod
5154 | 1 5 punct
5155 |
5156 | 0 2 ROOT
5157 | 2 1 amod
5158 | 5 3 case
5159 | 5 4 det
5160 | 2 5 nmod
5161 | 2 6 amod
5162 | 8 7 case
5163 | 6 8 nmod
5164 | 8 9 amod
5165 | 2 10 punct
5166 |
5167 | 0 5 ROOT
5168 | 5 1 nsubj
5169 | 5 2 cop
5170 | 4 3 det
5171 | 5 4 nmod:npmod
5172 | 9 6 case
5173 | 9 7 det
5174 | 9 8 compound
5175 | 5 9 nmod
5176 | 12 10 case
5177 | 12 11 case
5178 | 9 12 nmod
5179 | 5 13 punct
5180 | 5 14 cc
5181 | 17 15 nsubj
5182 | 17 16 aux
5183 | 5 17 conj
5184 | 21 18 nsubj
5185 | 21 19 dep
5186 | 21 20 mark
5187 | 17 21 xcomp
5188 | 21 22 nmod
5189 | 5 23 punct
5190 |
5191 | 0 9 ROOT
5192 | 2 1 det
5193 | 9 2 nsubj
5194 | 4 3 advmod
5195 | 2 4 acl
5196 | 4 5 advmod
5197 | 9 6 cop
5198 | 9 7 neg
5199 | 9 8 det
5200 | 9 10 punct
5201 | 9 11 cc
5202 | 15 12 nsubjpass
5203 | 15 13 aux
5204 | 15 14 auxpass
5205 | 9 15 conj
5206 | 17 16 case
5207 | 15 17 nmod
5208 | 9 18 punct
5209 |
5210 | 0 6 ROOT
5211 | 6 1 advmod
5212 | 6 2 punct
5213 | 4 3 det
5214 | 6 4 nsubj
5215 | 6 5 cop
5216 | 12 7 mark
5217 | 9 8 det
5218 | 12 9 nsubj
5219 | 12 10 aux
5220 | 12 11 neg
5221 | 6 12 advcl
5222 | 12 13 xcomp
5223 | 12 14 cc
5224 | 16 15 det
5225 | 19 16 nsubj
5226 | 19 17 aux
5227 | 19 18 advmod
5228 | 12 19 conj
5229 | 19 20 cc
5230 | 19 21 conj
5231 | 6 22 punct
5232 |
5233 | 0 12 ROOT
5234 | 3 1 mark
5235 | 3 2 nsubj
5236 | 12 3 advcl
5237 | 3 4 dobj
5238 | 12 5 punct
5239 | 8 6 case
5240 | 8 7 det
5241 | 12 8 nmod
5242 | 12 9 nsubjpass
5243 | 12 10 aux
5244 | 12 11 auxpass
5245 | 12 13 punct
5246 |
5247 | 0 6 ROOT
5248 | 2 1 det
5249 | 6 2 nsubj
5250 | 6 3 cop
5251 | 6 4 neg
5252 | 6 5 mark
5253 | 10 7 case
5254 | 10 8 det
5255 | 10 9 compound
5256 | 6 10 nmod
5257 | 6 11 punct
5258 |
5259 | 0 3 ROOT
5260 | 3 1 nsubj
5261 | 3 2 cop
5262 | 3 4 cc
5263 | 6 5 compound
5264 | 9 6 nsubj
5265 | 9 7 cop
5266 | 9 8 det
5267 | 3 9 conj
5268 | 9 10 dep
5269 | 10 11 cc
5270 | 13 12 advmod
5271 | 14 13 advmod
5272 | 10 14 conj
5273 | 3 15 punct
5274 |
5275 | 0 5 ROOT
5276 | 4 1 det
5277 | 4 2 compound
5278 | 4 3 compound
5279 | 5 4 nsubj
5280 | 7 6 compound
5281 | 8 7 nsubj
5282 | 5 8 ccomp
5283 | 10 9 nmod:npmod
5284 | 12 10 advmod
5285 | 12 11 nsubj
5286 | 8 12 ccomp
5287 | 5 13 punct
5288 |
5289 | 0 8 ROOT
5290 | 3 1 det
5291 | 3 2 compound
5292 | 8 3 nsubj
5293 | 8 4 cop
5294 | 8 5 advmod
5295 | 8 6 det
5296 | 8 7 amod
5297 | 11 9 case
5298 | 11 10 amod
5299 | 8 11 nmod
5300 | 13 12 case
5301 | 8 13 advmod
5302 | 8 14 punct
5303 |
5304 | 0 5 ROOT
5305 | 5 1 advmod
5306 | 5 2 punct
5307 | 5 3 nsubj
5308 | 5 4 aux
5309 | 5 6 advmod
5310 | 10 7 case
5311 | 10 8 det
5312 | 10 9 amod
5313 | 5 10 nmod
5314 | 10 11 cc
5315 | 13 12 compound
5316 | 10 13 conj
5317 | 16 14 case
5318 | 16 15 det
5319 | 10 16 nmod
5320 | 16 17 nmod:npmod
5321 | 21 18 advmod
5322 | 21 19 nsubj
5323 | 21 20 aux
5324 | 5 21 ccomp
5325 | 24 22 det
5326 | 24 23 amod
5327 | 21 24 dobj
5328 | 26 25 case
5329 | 24 26 nmod
5330 | 26 27 dep
5331 | 29 28 mark
5332 | 27 29 acl
5333 | 5 30 punct
5334 |
5335 | 0 3 ROOT
5336 | 2 1 amod
5337 | 3 2 nsubj
5338 | 3 4 dobj
5339 | 4 5 nummod
5340 | 4 6 punct
5341 | 8 7 compound
5342 | 4 8 conj
5343 | 4 9 punct
5344 | 11 10 compound
5345 | 4 11 conj
5346 | 4 12 punct
5347 | 14 13 compound
5348 | 4 14 conj
5349 | 3 15 punct
5350 |
5351 | 0 4 ROOT
5352 | 4 1 advmod
5353 | 4 2 punct
5354 | 4 3 expl
5355 | 8 5 advmod
5356 | 8 6 det
5357 | 8 7 compound
5358 | 4 8 nsubj
5359 | 11 9 case
5360 | 11 10 det
5361 | 8 11 nmod
5362 | 4 12 punct
5363 | 4 13 cc
5364 | 17 14 advmod
5365 | 17 15 det
5366 | 17 16 amod
5367 | 4 17 conj
5368 | 19 18 mark
5369 | 17 19 acl
5370 | 21 20 nsubj
5371 | 19 21 ccomp
5372 | 25 22 amod
5373 | 25 23 compound
5374 | 25 24 compound
5375 | 26 25 nsubj
5376 | 21 26 ccomp
5377 | 28 27 det
5378 | 26 28 dobj
5379 | 30 29 mark
5380 | 26 30 xcomp
5381 | 33 31 advmod
5382 | 33 32 advmod
5383 | 30 33 xcomp
5384 | 4 34 punct
5385 | 4 35 cc
5386 | 37 36 case
5387 | 38 37 nmod
5388 | 4 38 conj
5389 | 40 39 mark
5390 | 38 40 xcomp
5391 | 42 41 case
5392 | 40 42 nmod
5393 | 4 43 punct
5394 |
5395 | 0 8 ROOT
5396 | 8 1 advcl
5397 | 4 2 case
5398 | 4 3 compound
5399 | 1 4 nmod
5400 | 8 5 punct
5401 | 8 6 nsubjpass
5402 | 8 7 auxpass
5403 | 10 9 det
5404 | 12 10 nsubjpass
5405 | 12 11 auxpass
5406 | 8 12 ccomp
5407 | 15 13 amod
5408 | 15 14 compound
5409 | 12 15 dobj
5410 | 8 16 punct
5411 |
5412 | 0 6 ROOT
5413 | 6 1 nsubjpass
5414 | 6 2 auxpass
5415 | 6 3 cop
5416 | 6 4 det
5417 | 6 5 compound
5418 | 13 7 mark
5419 | 9 8 det
5420 | 13 9 nsubj
5421 | 9 10 cc
5422 | 9 11 conj
5423 | 13 12 cop
5424 | 6 13 advcl
5425 | 15 14 mark
5426 | 13 15 acl
5427 | 18 16 det
5428 | 18 17 amod
5429 | 15 18 dobj
5430 | 13 19 dep
5431 | 19 20 cc
5432 | 23 21 nsubjpass
5433 | 23 22 auxpass
5434 | 19 23 conj
5435 | 23 24 advmod
5436 | 6 25 punct
5437 |
5438 | 0 3 ROOT
5439 | 2 1 amod
5440 | 3 2 nsubj
5441 | 7 4 case
5442 | 7 5 det
5443 | 7 6 amod
5444 | 3 7 nmod
5445 | 3 8 punct
5446 | 3 9 cc
5447 | 12 10 nsubj
5448 | 12 11 cop
5449 | 3 12 conj
5450 | 16 13 mark
5451 | 16 14 nsubj
5452 | 16 15 mark
5453 | 12 16 advcl
5454 | 16 17 dobj
5455 | 16 18 advmod
5456 | 3 19 punct
5457 |
5458 | 0 5 ROOT
5459 | 5 1 advmod
5460 | 5 2 mark
5461 | 5 3 nsubj
5462 | 5 4 aux
5463 | 8 6 case
5464 | 8 7 compound
5465 | 5 8 nmod
5466 | 10 9 advmod
5467 | 11 10 advmod
5468 | 8 11 dep
5469 | 14 12 case
5470 | 14 13 compound
5471 | 11 14 nmod
5472 | 5 15 cc
5473 | 18 16 aux
5474 | 18 17 advmod
5475 | 5 18 conj
5476 | 20 19 det
5477 | 18 20 dobj
5478 | 22 21 case
5479 | 20 22 nmod
5480 | 5 23 punct
5481 |
5482 | 0 3 ROOT
5483 | 3 1 compound
5484 | 3 2 amod
5485 | 3 4 cc
5486 | 3 5 conj
5487 | 3 6 punct
5488 |
5489 | 0 3 ROOT
5490 | 3 1 cc
5491 | 3 2 nsubj
5492 | 5 4 det
5493 | 3 5 dobj
5494 | 9 6 case
5495 | 9 7 det
5496 | 9 8 amod
5497 | 3 9 nmod
5498 | 3 10 punct
5499 |
5500 | 0 25 ROOT
5501 | 5 1 nsubj
5502 | 5 2 cop
5503 | 5 3 det
5504 | 5 4 amod
5505 | 25 5 csubj
5506 | 7 6 case
5507 | 5 7 nmod
5508 | 12 8 mark
5509 | 12 9 nsubj
5510 | 12 10 aux
5511 | 12 11 neg
5512 | 5 12 advcl
5513 | 14 13 det
5514 | 12 14 dobj
5515 | 16 15 case
5516 | 14 16 nmod
5517 | 18 17 case
5518 | 16 18 nmod
5519 | 18 19 cc
5520 | 18 20 conj
5521 | 25 21 aux
5522 | 25 22 advmod
5523 | 25 23 cop
5524 | 25 24 amod
5525 | 27 26 case
5526 | 25 27 nmod
5527 | 30 28 case
5528 | 30 29 amod
5529 | 25 30 nmod
5530 | 32 31 mark
5531 | 30 32 acl
5532 | 32 33 dobj
5533 | 25 34 punct
5534 |
5535 | 0 10 ROOT
5536 | 4 1 mark
5537 | 3 2 det
5538 | 4 3 nsubj
5539 | 10 4 csubj
5540 | 6 5 det
5541 | 4 6 dobj
5542 | 9 7 case
5543 | 9 8 amod
5544 | 6 9 nmod
5545 | 13 11 nsubj
5546 | 13 12 advmod
5547 | 10 13 xcomp
5548 | 10 14 punct
5549 |
5550 | 0 1 ROOT
5551 | 3 2 advmod
5552 | 1 3 dep
5553 | 5 4 mark
5554 | 3 5 xcomp
5555 | 11 6 compound
5556 | 6 7 cc
5557 | 6 8 conj
5558 | 11 9 punct
5559 | 11 10 compound
5560 | 5 11 dobj
5561 | 11 12 punct
5562 | 1 13 punct
5563 |
5564 | 0 3 ROOT
5565 | 3 1 nsubj
5566 | 3 2 aux
5567 | 3 4 advcl
5568 | 6 5 det
5569 | 4 6 dobj
5570 | 3 7 punct
5571 | 3 8 cc
5572 | 10 9 case
5573 | 14 10 nmod
5574 | 14 11 punct
5575 | 13 12 det
5576 | 14 13 nsubj
5577 | 3 14 conj
5578 | 18 15 det
5579 | 17 16 advmod
5580 | 18 17 amod
5581 | 14 18 dobj
5582 | 14 19 cc
5583 | 23 20 cop
5584 | 22 21 advmod
5585 | 23 22 advmod
5586 | 14 23 conj
5587 | 14 24 punct
5588 | 32 25 advmod
5589 | 32 26 mark
5590 | 32 27 nsubj
5591 | 32 28 cop
5592 | 30 29 advmod
5593 | 31 30 nummod
5594 | 32 31 nmod:npmod
5595 | 14 32 advcl
5596 | 32 33 cc
5597 | 35 34 nummod
5598 | 36 35 nmod:npmod
5599 | 32 36 conj
5600 | 3 37 punct
5601 |
5602 | 0 25 ROOT
5603 | 2 1 case
5604 | 18 2 nmod
5605 | 2 3 cc
5606 | 5 4 det
5607 | 2 5 conj
5608 | 8 6 nsubjpass
5609 | 8 7 auxpass
5610 | 5 8 acl:relcl
5611 | 15 9 case
5612 | 15 10 det
5613 | 15 11 amod
5614 | 15 12 nummod
5615 | 15 13 punct
5616 | 15 14 compound
5617 | 8 15 nmod
5618 | 18 16 punct
5619 | 18 17 advmod
5620 | 25 18 csubj
5621 | 21 19 case
5622 | 21 20 compound
5623 | 18 21 nmod
5624 | 25 22 cop
5625 | 25 23 neg
5626 | 25 24 advmod
5627 | 25 26 punct
5628 |
5629 | 0 2 ROOT
5630 | 2 1 advmod
5631 | 5 3 neg
5632 | 5 4 compound
5633 | 2 5 dobj
5634 | 9 6 advmod
5635 | 9 7 advmod
5636 | 9 8 nsubj
5637 | 2 9 advcl
5638 | 11 10 mark
5639 | 9 11 xcomp
5640 | 13 12 compound
5641 | 11 13 dobj
5642 | 9 14 dep
5643 | 17 15 case
5644 | 17 16 compound
5645 | 14 17 nmod
5646 | 14 18 dep
5647 | 18 19 neg
5648 | 2 20 punct
5649 |
5650 | 0 2 ROOT
5651 | 2 1 nsubj
5652 | 2 3 dobj
5653 | 6 4 mark
5654 | 6 5 cop
5655 | 2 6 xcomp
5656 | 8 7 mark
5657 | 6 8 xcomp
5658 | 13 9 det
5659 | 13 10 amod
5660 | 13 11 punct
5661 | 13 12 amod
5662 | 8 13 dobj
5663 | 16 14 case
5664 | 16 15 nmod:poss
5665 | 8 16 nmod
5666 | 2 17 cc
5667 | 19 18 auxpass
5668 | 2 19 conj
5669 | 21 20 mark
5670 | 19 21 xcomp
5671 | 21 22 dobj
5672 | 22 23 dep
5673 | 23 24 acl
5674 | 26 25 mark
5675 | 24 26 xcomp
5676 | 30 27 det
5677 | 30 28 compound
5678 | 30 29 compound
5679 | 26 30 dobj
5680 | 32 31 mark
5681 | 26 32 advcl
5682 | 32 33 advmod
5683 | 2 34 punct
5684 |
5685 | 0 2 ROOT
5686 | 2 1 det
5687 | 6 3 case
5688 | 6 4 compound
5689 | 6 5 amod
5690 | 2 6 nmod
5691 | 10 7 case
5692 | 10 8 det
5693 | 10 9 amod
5694 | 2 10 nmod
5695 | 10 11 nummod
5696 | 10 12 punct
5697 | 16 13 compound
5698 | 16 14 compound
5699 | 16 15 punct
5700 | 10 16 dep
5701 | 16 17 punct
5702 | 23 18 neg
5703 | 23 19 det
5704 | 23 20 amod
5705 | 23 21 amod
5706 | 23 22 compound
5707 | 16 23 appos
5708 | 26 24 det
5709 | 26 25 nummod
5710 | 27 26 nsubj
5711 | 23 27 acl:relcl
5712 | 2 28 punct
5713 |
5714 | 0 3 ROOT
5715 | 3 1 neg
5716 | 3 2 compound
5717 | 7 4 case
5718 | 7 5 det
5719 | 7 6 amod
5720 | 3 7 nmod
5721 | 9 8 expl
5722 | 3 9 acl:relcl
5723 | 11 10 neg
5724 | 21 11 nsubj
5725 | 14 12 nsubj
5726 | 14 13 aux
5727 | 11 14 acl:relcl
5728 | 16 15 det
5729 | 14 16 dobj
5730 | 18 17 case
5731 | 16 18 nmod
5732 | 20 19 case
5733 | 14 20 nmod
5734 | 9 21 ccomp
5735 | 23 22 amod
5736 | 21 23 dobj
5737 | 27 24 case
5738 | 27 25 det
5739 | 27 26 amod
5740 | 23 27 nmod
5741 | 3 28 punct
5742 |
5743 | 0 5 ROOT
5744 | 2 1 det
5745 | 5 2 nsubj
5746 | 5 3 cop
5747 | 5 4 advmod
5748 | 7 6 case
5749 | 5 7 advcl
5750 | 10 8 case
5751 | 10 9 det
5752 | 5 10 nmod
5753 | 14 11 neg
5754 | 14 12 compound
5755 | 14 13 compound
5756 | 10 14 dep
5757 | 18 15 mark
5758 | 18 16 cop
5759 | 18 17 advmod
5760 | 14 18 acl
5761 | 20 19 nsubj
5762 | 18 20 ccomp
5763 | 20 21 dobj
5764 | 24 22 case
5765 | 24 23 det
5766 | 21 24 nmod
5767 |
5768 | 0 1 ROOT
5769 | 1 2 dep
5770 | 4 3 neg
5771 | 2 4 amod
5772 | 7 5 mark
5773 | 7 6 advmod
5774 | 4 7 xcomp
5775 | 7 8 dobj
5776 | 10 9 case
5777 | 7 10 nmod
5778 | 13 11 advmod
5779 | 13 12 compound
5780 | 7 13 dep
5781 | 13 14 xcomp
5782 | 16 15 det
5783 | 14 16 dobj
5784 | 18 17 mark
5785 | 16 18 acl
5786 | 18 19 dobj
5787 | 1 20 punct
5788 |
5789 | 0 2 ROOT
5790 | 2 1 nsubj
5791 | 2 3 dobj
5792 | 7 4 case
5793 | 7 5 det
5794 | 7 6 amod
5795 | 2 7 nmod
5796 | 9 8 case
5797 | 7 9 nmod
5798 | 2 10 cc
5799 | 12 11 nsubj
5800 | 2 12 conj
5801 | 15 13 advmod
5802 | 15 14 case
5803 | 12 15 nmod
5804 | 2 16 punct
5805 |
5806 | 0 14 ROOT
5807 | 2 1 det
5808 | 14 2 nmod:tmod
5809 | 4 3 compound
5810 | 2 4 dep
5811 | 7 5 case
5812 | 7 6 det
5813 | 4 7 nmod
5814 | 11 8 case
5815 | 11 9 det
5816 | 11 10 amod
5817 | 2 11 nmod
5818 | 14 12 punct
5819 | 14 13 expl
5820 | 18 15 det
5821 | 17 16 advmod
5822 | 18 17 amod
5823 | 14 18 nsubj
5824 | 21 19 dobj
5825 | 21 20 nsubj
5826 | 18 21 acl:relcl
5827 | 23 22 mark
5828 | 21 23 xcomp
5829 | 23 24 advmod
5830 | 14 25 punct
5831 |
5832 | 0 17 ROOT
5833 | 13 1 dep
5834 | 3 2 det
5835 | 1 3 dobj
5836 | 1 4 cc
5837 | 1 5 conj
5838 | 10 6 det
5839 | 9 7 compound
5840 | 9 8 punct
5841 | 10 9 amod
5842 | 5 10 dobj
5843 | 13 11 punct
5844 | 13 12 nsubj
5845 | 17 13 csubj
5846 | 13 14 compound:prt
5847 | 17 15 cop
5848 | 17 16 advmod
5849 | 17 18 punct
5850 |
5851 | 0 6 ROOT
5852 | 2 1 det
5853 | 6 2 nsubj
5854 | 6 3 cop
5855 | 6 4 advmod
5856 | 6 5 advmod
5857 | 10 7 case
5858 | 10 8 det
5859 | 10 9 amod
5860 | 6 10 nmod
5861 | 6 11 punct
5862 | 6 12 cc
5863 | 14 13 det
5864 | 20 14 nsubj
5865 | 19 15 case
5866 | 19 16 det
5867 | 19 17 amod
5868 | 19 18 compound
5869 | 14 19 nmod
5870 | 6 20 conj
5871 | 23 21 det
5872 | 23 22 amod
5873 | 20 23 dobj
5874 | 25 24 case
5875 | 23 25 nmod
5876 | 6 26 punct
5877 |
5878 | 0 2 ROOT
5879 | 2 1 nsubj
5880 | 4 3 mark
5881 | 2 4 xcomp
5882 | 7 5 det
5883 | 7 6 amod
5884 | 4 7 dobj
5885 | 9 8 mark
5886 | 4 9 advcl
5887 | 11 10 case
5888 | 9 11 nmod
5889 | 2 12 punct
5890 | 17 13 mark
5891 | 17 14 nsubj
5892 | 17 15 cop
5893 | 17 16 amod
5894 | 2 17 advcl
5895 | 17 18 cc
5896 | 17 19 conj
5897 | 21 20 det
5898 | 19 21 dobj
5899 | 2 22 punct
5900 | 2 23 cc
5901 | 2 24 conj
5902 | 33 25 mark
5903 | 28 26 mark
5904 | 28 27 nsubj
5905 | 33 28 advcl
5906 | 28 29 dobj
5907 | 33 30 punct
5908 | 33 31 advmod
5909 | 33 32 nsubj
5910 | 24 33 ccomp
5911 | 33 34 advmod
5912 | 33 35 punct
5913 | 33 36 dep
5914 | 38 37 mark
5915 | 36 38 xcomp
5916 | 38 39 xcomp
5917 | 43 40 case
5918 | 43 41 det
5919 | 43 42 amod
5920 | 39 43 nmod
5921 | 39 44 punct
5922 | 48 45 mark
5923 | 48 46 nsubjpass
5924 | 48 47 auxpass
5925 | 39 48 advcl
5926 | 52 49 case
5927 | 52 50 det
5928 | 52 51 amod
5929 | 48 52 nmod
5930 | 52 53 cc
5931 | 52 54 conj
5932 | 2 55 punct
5933 |
5934 | 0 6 ROOT
5935 | 2 1 advmod
5936 | 6 2 advcl
5937 | 4 3 det
5938 | 2 4 dobj
5939 | 6 5 punct
5940 | 10 7 case
5941 | 10 8 det
5942 | 10 9 amod
5943 | 6 10 nmod
5944 | 12 11 case
5945 | 10 12 nmod
5946 | 10 13 cc
5947 | 19 14 neg
5948 | 19 15 advmod
5949 | 19 16 det
5950 | 19 17 amod
5951 | 19 18 compound
5952 | 10 19 conj
5953 | 6 20 punct
5954 |
5955 | 0 1 ROOT
5956 | 5 2 det:predet
5957 | 5 3 det
5958 | 5 4 amod
5959 | 1 5 dobj
5960 | 5 6 dep
5961 | 6 7 acl
5962 | 11 8 case
5963 | 11 9 det
5964 | 11 10 compound
5965 | 7 11 nmod
5966 | 11 12 punct
5967 | 11 13 conj
5968 | 11 14 punct
5969 | 11 15 conj
5970 | 11 16 cc
5971 | 19 17 nummod
5972 | 19 18 compound
5973 | 11 19 conj
5974 | 1 20 punct
5975 |
5976 | 0 12 ROOT
5977 | 3 1 det
5978 | 3 2 amod
5979 | 12 3 nsubj
5980 | 5 4 compound
5981 | 3 5 dep
5982 | 8 6 case
5983 | 8 7 det
5984 | 5 8 nmod
5985 | 12 9 cop
5986 | 12 10 det
5987 | 12 11 compound
5988 | 12 13 acl
5989 | 16 14 case
5990 | 16 15 det
5991 | 13 16 nmod
5992 | 19 17 case
5993 | 19 18 det
5994 | 16 19 nmod
5995 | 21 20 case
5996 | 13 21 nmod
5997 | 12 22 punct
5998 |
5999 | 0 5 ROOT
6000 | 5 1 nsubj
6001 | 5 2 cop
6002 | 5 3 det
6003 | 5 4 amod
6004 | 8 6 case
6005 | 8 7 det
6006 | 5 8 nmod
6007 | 5 9 cc
6008 | 13 10 advmod
6009 | 13 11 nsubj
6010 | 13 12 aux
6011 | 5 13 conj
6012 | 16 14 det
6013 | 16 15 amod
6014 | 13 16 nmod:tmod
6015 | 5 17 punct
6016 |
6017 | 0 8 ROOT
6018 | 3 1 det
6019 | 3 2 amod
6020 | 8 3 nsubj
6021 | 8 4 cop
6022 | 8 5 det
6023 | 8 6 amod
6024 | 8 7 compound
6025 | 8 9 cc
6026 | 12 10 compound
6027 | 12 11 amod
6028 | 8 12 conj
6029 | 8 13 punct
6030 |
6031 | 0 3 ROOT
6032 | 3 1 nsubj
6033 | 3 2 aux
6034 | 5 4 det
6035 | 3 5 dobj
6036 | 5 6 nmod:npmod
6037 | 3 7 advmod
6038 | 16 8 mark
6039 | 11 9 case
6040 | 11 10 det
6041 | 16 11 nmod
6042 | 16 12 nsubj
6043 | 16 13 aux
6044 | 16 14 nsubj
6045 | 16 15 aux
6046 | 3 16 advcl
6047 | 16 17 dobj
6048 | 16 18 advmod
6049 | 20 19 advmod
6050 | 16 20 advmod
6051 | 3 21 punct
6052 |
6053 | 0 5 ROOT
6054 | 2 1 det
6055 | 5 2 nsubj
6056 | 5 3 cop
6057 | 5 4 nummod
6058 | 5 6 advmod
6059 | 5 7 punct
6060 |
6061 | 0 6 ROOT
6062 | 3 1 det
6063 | 3 2 compound
6064 | 6 3 nsubj
6065 | 6 4 aux
6066 | 6 5 neg
6067 | 9 7 det
6068 | 9 8 amod
6069 | 6 9 dobj
6070 | 6 10 punct
6071 | 6 11 cc
6072 | 14 12 nsubj
6073 | 14 13 aux
6074 | 6 14 conj
6075 | 22 15 mark
6076 | 18 16 nmod:poss
6077 | 18 17 compound
6078 | 22 18 nsubj
6079 | 18 19 nummod
6080 | 22 20 aux
6081 | 22 21 neg
6082 | 14 22 ccomp
6083 | 24 23 amod
6084 | 22 24 dobj
6085 | 22 25 advmod
6086 | 6 26 punct
6087 |
6088 | 0 2 ROOT
6089 | 2 1 det
6090 | 4 3 case
6091 | 2 4 nmod
6092 | 4 5 cc
6093 | 4 6 conj
6094 | 9 7 case
6095 | 9 8 det
6096 | 2 9 nmod
6097 | 14 10 mark
6098 | 14 11 nsubjpass
6099 | 14 12 auxpass
6100 | 14 13 neg
6101 | 9 14 ccomp
6102 | 19 15 case
6103 | 19 16 case
6104 | 19 17 det
6105 | 19 18 amod
6106 | 14 19 nmod
6107 | 2 20 punct
6108 |
6109 | 0 3 ROOT
6110 | 3 1 cop
6111 | 3 2 neg
6112 | 7 4 mark
6113 | 7 5 nsubj
6114 | 7 6 aux
6115 | 3 7 advcl
6116 | 9 8 mark
6117 | 7 9 xcomp
6118 | 9 10 iobj
6119 | 12 11 advmod
6120 | 13 12 advmod
6121 | 9 13 xcomp
6122 | 13 14 dobj
6123 | 17 15 advmod
6124 | 17 16 nsubj
6125 | 13 17 dep
6126 | 22 18 case
6127 | 22 19 det
6128 | 22 20 amod
6129 | 22 21 amod
6130 | 17 22 nmod
6131 | 17 23 cc
6132 | 17 24 conj
6133 | 24 25 compound:prt
6134 | 29 26 case
6135 | 29 27 det
6136 | 29 28 compound
6137 | 24 29 nmod
6138 | 33 30 case
6139 | 33 31 det
6140 | 33 32 compound
6141 | 24 33 nmod
6142 | 17 34 cc
6143 | 38 35 nsubj
6144 | 38 36 cop
6145 | 38 37 advmod
6146 | 17 38 conj
6147 | 38 39 cc
6148 | 38 40 conj
6149 | 3 41 punct
6150 |
6151 | 0 2 ROOT
6152 | 2 1 nsubj
6153 | 4 3 amod
6154 | 2 4 dobj
6155 | 4 5 cc
6156 | 7 6 det
6157 | 9 7 nsubj
6158 | 9 8 cop
6159 | 4 9 conj
6160 | 2 10 punct
6161 |
6162 | 0 7 ROOT
6163 | 7 1 csubj
6164 | 3 2 case
6165 | 1 3 nmod
6166 | 7 4 cop
6167 | 6 5 advmod
6168 | 7 6 advmod
6169 | 7 8 punct
6170 | 10 9 advmod
6171 | 12 10 amod
6172 | 12 11 amod
6173 | 7 12 appos
6174 | 7 13 punct
6175 |
6176 | 0 1 ROOT
6177 | 4 2 case
6178 | 4 3 det
6179 | 1 4 nmod
6180 | 4 5 cc
6181 | 4 6 conj
6182 | 1 7 punct
6183 |
6184 | 0 1 ROOT
6185 | 3 2 advmod
6186 | 1 3 dep
6187 | 5 4 det
6188 | 6 5 nsubj
6189 | 3 6 ccomp
6190 | 9 7 det
6191 | 9 8 amod
6192 | 6 9 dobj
6193 | 9 10 acl
6194 | 10 11 nmod
6195 | 1 12 punct
6196 |
6197 | 0 6 ROOT
6198 | 3 1 det
6199 | 3 2 amod
6200 | 6 3 nsubj
6201 | 5 4 nsubj
6202 | 3 5 acl:relcl
6203 | 12 7 mark
6204 | 11 8 nmod:poss
6205 | 11 9 amod
6206 | 11 10 compound
6207 | 12 11 nsubj
6208 | 6 12 ccomp
6209 | 14 13 amod
6210 | 12 14 dobj
6211 | 6 15 punct
6212 |
6213 | 0 5 ROOT
6214 | 3 1 det
6215 | 3 2 amod
6216 | 5 3 nsubj
6217 | 5 4 compound
6218 | 12 6 cop
6219 | 12 7 det
6220 | 12 8 punct
6221 | 12 9 compound
6222 | 12 10 punct
6223 | 12 11 compound
6224 | 5 12 ccomp
6225 | 12 13 cc
6226 | 15 14 amod
6227 | 12 15 conj
6228 | 18 16 mark
6229 | 18 17 nsubj
6230 | 15 18 ccomp
6231 | 18 19 xcomp
6232 | 23 20 case
6233 | 23 21 case
6234 | 23 22 advmod
6235 | 19 23 advcl
6236 | 5 24 punct
6237 |
6238 | 0 9 ROOT
6239 | 2 1 advmod
6240 | 9 2 ccomp
6241 | 5 3 case
6242 | 5 4 det
6243 | 2 5 nmod
6244 | 9 6 punct
6245 | 9 7 nsubj
6246 | 9 8 advmod
6247 | 11 10 nsubj
6248 | 9 11 ccomp
6249 | 14 12 case
6250 | 14 13 compound
6251 | 11 14 nmod
6252 | 9 15 punct
6253 |
6254 | 0 2 ROOT
6255 | 2 1 nsubj
6256 | 4 3 advmod
6257 | 2 4 dobj
6258 | 4 5 cc
6259 | 4 6 conj
6260 | 8 7 mark
6261 | 4 8 acl
6262 | 10 9 nmod:poss
6263 | 8 10 dobj
6264 | 14 11 case
6265 | 14 12 det:predet
6266 | 14 13 det
6267 | 8 14 nmod
6268 | 16 15 nsubj
6269 | 14 16 acl:relcl
6270 | 19 17 case
6271 | 19 18 det
6272 | 16 19 nmod
6273 | 2 20 punct
6274 |
6275 | 0 2 ROOT
6276 | 2 1 det
6277 | 2 3 punct
6278 | 5 4 det
6279 | 2 5 appos
6280 | 2 6 punct
6281 | 8 7 det
6282 | 2 8 appos
6283 | 2 9 punct
6284 | 2 10 punct
6285 | 13 11 nsubj
6286 | 13 12 cop
6287 | 2 13 parataxis
6288 | 13 14 advmod
6289 | 17 15 case
6290 | 17 16 det
6291 | 14 17 nmod
6292 | 21 18 nsubj
6293 | 21 19 aux
6294 | 21 20 advmod
6295 | 13 21 ccomp
6296 | 2 22 punct
6297 |
6298 | 0 5 ROOT
6299 | 3 1 det
6300 | 3 2 compound
6301 | 5 3 nsubj
6302 | 5 4 cop
6303 | 10 6 punct
6304 | 8 7 det
6305 | 10 8 nsubj
6306 | 10 9 cop
6307 | 5 10 dep
6308 | 10 11 punct
6309 | 5 12 cc
6310 | 14 13 amod
6311 | 17 14 nsubj
6312 | 17 15 cop
6313 | 17 16 det
6314 | 5 17 conj
6315 | 5 18 punct
6316 |
6317 | 0 2 ROOT
6318 | 2 1 det
6319 | 2 3 punct
6320 | 5 4 det
6321 | 2 5 conj
6322 | 2 6 cc
6323 | 8 7 det
6324 | 2 8 conj
6325 | 12 9 case
6326 | 12 10 det
6327 | 12 11 compound
6328 | 8 12 nmod
6329 | 2 13 punct
6330 |
6331 | 0 3 ROOT
6332 | 3 1 nsubj
6333 | 3 2 aux
6334 | 3 4 iobj
6335 | 7 5 det
6336 | 7 6 amod
6337 | 3 7 dobj
6338 | 13 8 case
6339 | 13 9 advmod
6340 | 13 10 det
6341 | 13 11 amod
6342 | 13 12 compound
6343 | 7 13 nmod
6344 | 13 14 acl
6345 | 14 15 dobj
6346 | 3 16 punct
6347 |
6348 | 0 3 ROOT
6349 | 3 1 nsubj
6350 | 3 2 advmod
6351 | 7 4 det
6352 | 7 5 compound
6353 | 7 6 compound
6354 | 3 7 dobj
6355 | 3 8 punct
6356 |
6357 | 0 10 ROOT
6358 | 10 1 cc
6359 | 4 2 case
6360 | 4 3 det
6361 | 10 4 nmod
6362 | 10 5 punct
6363 | 7 6 det
6364 | 10 7 nsubj
6365 | 10 8 cop
6366 | 10 9 advmod
6367 | 10 11 cc
6368 | 13 12 det
6369 | 14 13 nsubj
6370 | 10 14 conj
6371 | 14 15 compound:prt
6372 | 14 16 xcomp
6373 | 16 17 xcomp
6374 | 10 18 punct
6375 |
6376 | 0 6 ROOT
6377 | 3 1 det
6378 | 3 2 amod
6379 | 6 3 nsubj
6380 | 6 4 cop
6381 | 6 5 advmod
6382 | 6 7 punct
6383 | 11 8 nsubj
6384 | 11 9 cop
6385 | 11 10 advmod
6386 | 6 11 conj
6387 | 6 12 cc
6388 | 6 13 conj
6389 | 16 14 case
6390 | 16 15 det
6391 | 13 16 nmod
6392 | 6 17 punct
6393 |
6394 | 0 3 ROOT
6395 | 3 1 nsubj
6396 | 3 2 aux
6397 | 6 4 det
6398 | 6 5 amod
6399 | 3 6 dobj
6400 | 3 7 advmod
6401 | 3 8 punct
6402 |
6403 | 0 22 ROOT
6404 | 6 1 advmod
6405 | 6 2 punct
6406 | 5 3 det
6407 | 5 4 compound
6408 | 6 5 nsubj
6409 | 22 6 csubj
6410 | 10 7 det
6411 | 10 8 amod
6412 | 10 9 compound
6413 | 6 10 dobj
6414 | 12 11 nummod
6415 | 10 12 dep
6416 | 17 13 case
6417 | 17 14 det
6418 | 17 15 compound
6419 | 17 16 compound
6420 | 12 17 nmod
6421 | 10 18 cc
6422 | 20 19 det
6423 | 10 20 conj
6424 | 22 21 cop
6425 | 22 23 punct
6426 |
6427 | 0 3 ROOT
6428 | 3 1 mark
6429 | 3 2 nsubj
6430 | 8 4 case
6431 | 8 5 det
6432 | 7 6 advmod
6433 | 8 7 amod
6434 | 3 8 nmod
6435 | 8 9 cc
6436 | 8 10 conj
6437 | 13 11 mark
6438 | 13 12 nsubj
6439 | 3 13 advcl
6440 | 16 14 det
6441 | 16 15 compound
6442 | 19 16 nsubj
6443 | 19 17 aux
6444 | 19 18 advmod
6445 | 13 19 ccomp
6446 | 19 20 compound:prt
6447 | 3 21 punct
6448 |
6449 | 0 7 ROOT
6450 | 3 1 nmod:poss
6451 | 3 2 amod
6452 | 7 3 nsubj
6453 | 7 4 aux
6454 | 7 5 cop
6455 | 7 6 det
6456 | 9 8 mark
6457 | 7 9 acl
6458 | 11 10 amod
6459 | 9 11 dobj
6460 | 7 12 punct
6461 |
6462 | 0 1 ROOT
6463 | 4 2 mark
6464 | 4 3 nsubj
6465 | 1 4 advcl
6466 | 7 5 det
6467 | 7 6 compound
6468 | 4 7 dobj
6469 | 1 8 punct
6470 |
6471 | 0 13 ROOT
6472 | 2 1 advmod
6473 | 13 2 advmod
6474 | 5 3 case
6475 | 5 4 compound
6476 | 13 5 nmod
6477 | 5 6 punct
6478 | 5 7 dep
6479 | 13 8 nsubj
6480 | 10 9 case
6481 | 8 10 nmod
6482 | 12 11 case
6483 | 10 12 nmod
6484 | 15 14 compound
6485 | 13 15 dobj
6486 | 17 16 case
6487 | 13 17 nmod
6488 | 13 18 punct
6489 | 13 19 cc
6490 | 13 20 conj
6491 | 20 21 advmod
6492 | 13 22 punct
6493 |
6494 | 0 1 ROOT
6495 | 7 2 amod
6496 | 7 3 det
6497 | 7 4 punct
6498 | 7 5 compound
6499 | 7 6 punct
6500 | 1 7 dep
6501 | 7 8 advmod
6502 | 7 9 cc
6503 | 7 10 conj
6504 | 14 11 det
6505 | 14 12 amod
6506 | 14 13 compound
6507 | 10 14 dobj
6508 | 1 15 punct
6509 |
6510 | 0 4 ROOT
6511 | 4 1 nsubj
6512 | 3 2 case
6513 | 4 3 advmod
6514 | 6 5 det
6515 | 4 6 dobj
6516 | 4 7 compound:prt
6517 | 10 8 case
6518 | 10 9 det
6519 | 4 10 nmod
6520 | 12 11 nmod:tmod
6521 | 4 12 dep
6522 | 12 13 advmod
6523 | 12 14 punct
6524 | 16 15 compound
6525 | 17 16 nsubj
6526 | 12 17 parataxis
6527 | 4 18 punct
6528 |
6529 | 0 4 ROOT
6530 | 2 1 det
6531 | 4 2 nsubj
6532 | 4 3 cop
6533 | 4 5 punct
6534 | 4 6 cc
6535 | 4 7 conj
6536 | 9 8 det
6537 | 7 9 dobj
6538 | 11 10 case
6539 | 9 11 nmod
6540 | 4 12 punct
6541 |
6542 | 0 2 ROOT
6543 | 2 1 nsubj
6544 | 4 3 mark
6545 | 2 4 xcomp
6546 | 7 5 compound
6547 | 7 6 compound
6548 | 4 7 dobj
6549 | 9 8 mark
6550 | 4 9 advcl
6551 | 11 10 det
6552 | 9 11 dobj
6553 | 2 12 punct
6554 |
6555 | 0 5 ROOT
6556 | 3 1 det
6557 | 3 2 amod
6558 | 5 3 nsubj
6559 | 5 4 cop
6560 | 5 6 nummod
6561 | 11 7 nsubj
6562 | 10 8 case
6563 | 10 9 nmod:poss
6564 | 11 10 nmod
6565 | 5 11 ccomp
6566 | 15 12 advmod
6567 | 15 13 advmod
6568 | 15 14 compound
6569 | 11 15 dobj
6570 | 17 16 case
6571 | 15 17 nmod
6572 | 26 18 nsubjpass
6573 | 22 19 case
6574 | 22 20 compound
6575 | 22 21 nummod
6576 | 18 22 nmod
6577 | 26 23 auxpass
6578 | 26 24 neg
6579 | 26 25 advmod
6580 | 11 26 parataxis
6581 | 30 27 case
6582 | 30 28 det
6583 | 30 29 compound
6584 | 32 30 nmod
6585 | 32 31 nsubj
6586 | 26 32 ccomp
6587 | 35 33 nsubj
6588 | 35 34 cop
6589 | 32 35 ccomp
6590 | 38 36 case
6591 | 38 37 amod
6592 | 35 38 nmod
6593 | 40 39 case
6594 | 38 40 nmod
6595 | 40 41 nummod
6596 | 5 42 punct
6597 |
6598 | 0 1 ROOT
6599 | 3 2 case
6600 | 1 3 nmod
6601 | 3 4 punct
6602 | 7 5 det
6603 | 7 6 amod
6604 | 4 7 root
6605 | 7 8 punct
6606 | 12 9 det
6607 | 12 10 amod
6608 | 12 11 compound
6609 | 7 12 appos
6610 | 7 13 punct
6611 | 18 14 det:predet
6612 | 18 15 det
6613 | 18 16 amod
6614 | 18 17 compound
6615 | 7 18 appos
6616 | 7 19 punct
6617 |
6618 | 0 5 ROOT
6619 | 3 1 det
6620 | 3 2 amod
6621 | 5 3 nsubj
6622 | 3 4 dep
6623 | 8 6 det
6624 | 8 7 compound
6625 | 5 8 dobj
6626 | 10 9 nsubj
6627 | 8 10 acl:relcl
6628 | 13 11 det:predet
6629 | 13 12 det
6630 | 10 13 nmod:tmod
6631 | 8 14 cc
6632 | 16 15 det
6633 | 8 16 conj
6634 | 18 17 case
6635 | 16 18 nmod
6636 | 20 19 mark
6637 | 5 20 advcl
6638 | 22 21 det
6639 | 20 22 dobj
6640 | 5 23 punct
6641 |
6642 | 0 16 ROOT
6643 | 2 1 mark
6644 | 16 2 advcl
6645 | 6 3 det
6646 | 6 4 amod
6647 | 6 5 amod
6648 | 2 6 dobj
6649 | 10 7 case
6650 | 10 8 det
6651 | 10 9 compound
6652 | 2 10 nmod
6653 | 16 11 punct
6654 | 13 12 nmod:poss
6655 | 16 13 nsubj
6656 | 16 14 aux
6657 | 16 15 advmod
6658 | 16 17 punct
6659 |
6660 | 0 2 ROOT
6661 | 2 1 nsubj
6662 | 4 3 det
6663 | 5 4 nsubj
6664 | 2 5 ccomp
6665 | 8 6 case
6666 | 8 7 compound
6667 | 5 8 nmod
6668 | 2 9 cc
6669 | 11 10 nsubj
6670 | 2 11 conj
6671 | 14 12 det
6672 | 14 13 compound
6673 | 11 14 dobj
6674 | 16 15 mark
6675 | 11 16 advcl
6676 | 18 17 det
6677 | 16 18 dobj
6678 | 2 19 punct
6679 |
6680 | 0 3 ROOT
6681 | 3 1 case
6682 | 3 2 det
6683 | 8 4 case
6684 | 8 5 det
6685 | 8 6 amod
6686 | 8 7 compound
6687 | 3 8 nmod
6688 | 10 9 advmod
6689 | 3 10 acl
6690 | 12 11 mark
6691 | 10 12 xcomp
6692 | 12 13 compound:prt
6693 | 17 14 case
6694 | 17 15 det
6695 | 17 16 amod
6696 | 12 17 nmod
6697 | 3 18 punct
6698 |
6699 | 0 5 ROOT
6700 | 5 1 nsubj
6701 | 1 2 cc
6702 | 1 3 conj
6703 | 5 4 cop
6704 |
6705 | 0 1 ROOT
6706 | 1 2 acl
6707 | 2 3 xcomp
6708 | 6 4 advmod
6709 | 6 5 nsubj
6710 | 2 6 advcl
6711 | 6 7 iobj
6712 | 10 8 det
6713 | 10 9 amod
6714 | 6 10 dobj
6715 | 6 11 cc
6716 | 13 12 nsubj
6717 | 6 13 conj
6718 | 13 14 dobj
6719 | 18 15 mark
6720 | 18 16 nsubj
6721 | 18 17 aux
6722 | 13 18 ccomp
6723 | 18 19 dobj
6724 | 23 20 case
6725 | 23 21 det
6726 | 23 22 compound
6727 | 19 23 nmod
6728 | 1 24 punct
6729 |
6730 | 0 16 ROOT
6731 | 2 1 nsubj
6732 | 16 2 csubjpass
6733 | 5 3 mark
6734 | 5 4 aux
6735 | 2 5 xcomp
6736 | 7 6 compound
6737 | 5 7 dobj
6738 | 11 8 case
6739 | 11 9 det
6740 | 11 10 compound
6741 | 5 11 nmod
6742 | 14 12 case
6743 | 14 13 det
6744 | 11 14 nmod
6745 | 16 15 auxpass
6746 | 18 17 case
6747 | 16 18 advcl
6748 | 18 19 punct
6749 | 18 20 amod
6750 | 24 21 case
6751 | 24 22 det
6752 | 24 23 compound
6753 | 20 24 nmod
6754 | 16 25 punct
6755 |
6756 | 0 3 ROOT
6757 | 3 1 nsubj
6758 | 3 2 advmod
6759 | 5 4 case
6760 | 3 5 nmod
6761 | 7 6 case
6762 | 5 7 nmod
6763 | 3 8 cc
6764 | 13 9 mark
6765 | 13 10 nsubj
6766 | 13 11 cop
6767 | 13 12 case
6768 | 3 13 conj
6769 | 15 14 nsubj
6770 | 13 15 acl:relcl
6771 | 17 16 mark
6772 | 15 17 xcomp
6773 | 17 18 dobj
6774 | 17 19 compound:prt
6775 | 23 20 advmod
6776 | 23 21 nsubj
6777 | 23 22 advmod
6778 | 17 23 dep
6779 | 23 24 dobj
6780 | 26 25 case
6781 | 24 26 nmod
6782 | 28 27 case
6783 | 23 28 nmod
6784 | 23 29 cc
6785 | 33 30 nsubj
6786 | 33 31 aux
6787 | 33 32 neg
6788 | 23 33 conj
6789 | 35 34 det
6790 | 33 35 dobj
6791 | 37 36 nsubj
6792 | 35 37 acl:relcl
6793 | 37 38 advmod
6794 | 3 39 punct
6795 |
6796 | 0 5 ROOT
6797 | 2 1 det
6798 | 5 2 nsubj
6799 | 5 3 cop
6800 | 5 4 advmod
6801 | 8 6 case
6802 | 8 7 nmod:poss
6803 | 5 8 nmod
6804 | 11 9 amod
6805 | 11 10 compound
6806 | 8 11 dep
6807 | 5 12 punct
6808 |
6809 | 0 1 ROOT
6810 | 1 2 dep
6811 | 2 3 advmod
6812 | 5 4 mark
6813 | 3 5 advcl
6814 | 8 6 case
6815 | 8 7 amod
6816 | 5 8 nmod
6817 | 1 9 punct
6818 |
6819 | 0 6 ROOT
6820 | 6 1 advmod
6821 | 6 2 punct
6822 | 6 3 punct
6823 | 6 4 amod
6824 | 6 5 compound
6825 | 6 7 punct
6826 | 6 8 dep
6827 | 8 9 cc
6828 | 8 10 conj
6829 | 13 11 case
6830 | 13 12 amod
6831 | 8 13 nmod
6832 | 6 14 punct
6833 | 16 15 amod
6834 | 6 16 dep
6835 | 19 17 case
6836 | 19 18 det
6837 | 16 19 nmod
6838 | 6 20 punct
6839 | 22 21 det
6840 | 28 22 nsubj
6841 | 22 23 acl
6842 | 26 24 case
6843 | 26 25 det
6844 | 23 26 nmod
6845 | 28 27 cop
6846 | 6 28 parataxis
6847 | 32 29 case
6848 | 32 30 det
6849 | 32 31 amod
6850 | 28 32 nmod
6851 | 6 33 punct
6852 |
6853 | 0 3 ROOT
6854 | 2 1 det
6855 | 3 2 nsubj
6856 | 6 4 mark
6857 | 6 5 cop
6858 | 3 6 xcomp
6859 | 9 7 case
6860 | 9 8 det
6861 | 6 9 nmod
6862 | 16 10 case
6863 | 16 11 mark
6864 | 16 12 nsubj
6865 | 16 13 cop
6866 | 16 14 neg
6867 | 16 15 dep
6868 | 9 16 nmod
6869 | 16 17 cc
6870 | 16 18 conj
6871 | 3 19 punct
6872 |
6873 | 0 3 ROOT
6874 | 3 1 nsubj
6875 | 3 2 aux
6876 | 3 4 dobj
6877 | 8 5 case
6878 | 7 6 advmod
6879 | 8 7 nummod
6880 | 3 8 nmod
6881 | 3 9 advmod
6882 | 3 10 cc
6883 | 3 11 conj
6884 | 13 12 neg
6885 | 11 13 dobj
6886 | 15 14 case
6887 | 13 15 nmod
6888 | 15 16 cc
6889 | 15 17 conj
6890 | 3 18 punct
6891 |
6892 | 0 6 ROOT
6893 | 3 1 det
6894 | 3 2 amod
6895 | 6 3 nsubj
6896 | 6 4 aux
6897 | 6 5 neg
6898 | 9 7 det
6899 | 9 8 compound
6900 | 6 9 dobj
6901 | 6 10 punct
6902 |
6903 | 0 1 ROOT
6904 | 1 2 punct
6905 | 7 3 mark
6906 | 5 4 det
6907 | 7 5 nsubj
6908 | 7 6 aux
6909 | 1 7 dep
6910 | 10 8 advmod
6911 | 10 9 amod
6912 | 7 10 dobj
6913 | 13 11 dobj
6914 | 13 12 nsubj
6915 | 10 13 acl:relcl
6916 | 15 14 cop
6917 | 13 15 ccomp
6918 | 1 16 punct
6919 |
6920 |
--------------------------------------------------------------------------------
/data/V2/Laptops/vocab_pol.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Laptops/vocab_pol.vocab
--------------------------------------------------------------------------------
/data/V2/Laptops/vocab_tok.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Laptops/vocab_tok.vocab
--------------------------------------------------------------------------------
/data/V2/MAMS/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/MAMS/.DS_Store
--------------------------------------------------------------------------------
/data/V2/MAMS/dep_type.json:
--------------------------------------------------------------------------------
1 | ["root", "det", "nsubjpass", "nmod:poss", "ccomp", "compound:prt", "advcl", "compound", "acl", "ROOT", "auxpass", "acl:relcl", "nmod:tmod", "csubjpass", "parataxis", "nummod", "mark", "aux", "dep", "expl", "det:predet", "amod", "cc:preconj", "cop", "case", "xcomp", "iobj", "discourse", "conj", "punct", "nsubj", "nmod:npmod", "mwe", "csubj", "advmod", "neg", "cc", "appos", "nmod", "dobj"]
--------------------------------------------------------------------------------
/data/V2/MAMS/vocab_pol.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/MAMS/vocab_pol.vocab
--------------------------------------------------------------------------------
/data/V2/MAMS/vocab_tok.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/MAMS/vocab_tok.vocab
--------------------------------------------------------------------------------
/data/V2/Restaurants/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Restaurants/.DS_Store
--------------------------------------------------------------------------------
/data/V2/Restaurants/dep_type.json:
--------------------------------------------------------------------------------
1 | ["advcl", "acl", "nmod:npmod", "dobj", "discourse", "aux", "ROOT", "det", "appos", "nmod:poss", "csubj", "cc", "nsubj", "cc:preconj", "nmod", "conj", "auxpass", "compound:prt", "advmod", "neg", "punct", "cop", "xcomp", "det:predet", "ccomp", "iobj", "expl", "amod", "nmod:tmod", "parataxis", "acl:relcl", "mark", "compound", "nsubjpass", "csubjpass", "dep", "nummod", "root", "mwe", "case"]
--------------------------------------------------------------------------------
/data/V2/Restaurants/vocab_pol.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Restaurants/vocab_pol.vocab
--------------------------------------------------------------------------------
/data/V2/Restaurants/vocab_tok.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Restaurants/vocab_tok.vocab
--------------------------------------------------------------------------------
/data/V2/Tweets/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Tweets/.DS_Store
--------------------------------------------------------------------------------
/data/V2/Tweets/dep_type.json:
--------------------------------------------------------------------------------
1 | ["xcomp", "nmod:poss", "root", "acl", "nsubj", "cc", "compound:prt", "ROOT", "case", "appos", "advcl", "nsubjpass", "compound", "nmod", "discourse", "det", "nmod:npmod", "neg", "nummod", "expl", "dep", "cc:preconj", "mwe", "dobj", "aux", "punct", "acl:relcl", "nmod:tmod", "parataxis", "amod", "conj", "auxpass", "cop", "mark", "advmod", "ccomp", "csubj", "det:predet", "iobj", "csubjpass"]
--------------------------------------------------------------------------------
/data/V2/Tweets/vocab_pol.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Tweets/vocab_pol.vocab
--------------------------------------------------------------------------------
/data/V2/Tweets/vocab_tok.vocab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/Tweets/vocab_tok.vocab
--------------------------------------------------------------------------------
/data/V2/corenlp.py:
--------------------------------------------------------------------------------
1 | # _*_coding:utf-8_*_
2 | from __future__ import print_function
3 |
4 | import glob
5 | import json
6 | import logging
7 | import os
8 | import re
9 | import socket
10 | import subprocess
11 | import sys
12 | import time
13 |
14 | import psutil
15 |
16 | try:
17 | from urlparse import urlparse
18 | except ImportError:
19 | from urllib.parse import urlparse
20 |
21 | import requests
22 |
23 |
24 | class StanfordCoreNLP:
25 | def __init__(self, path_or_host, port=None, memory='4g', lang='en', timeout=1500, quiet=True,
26 | logging_level=logging.WARNING, max_retries=5):
27 | self.path_or_host = path_or_host
28 | self.port = port
29 | self.memory = memory
30 | self.lang = lang
31 | self.timeout = timeout
32 | self.quiet = quiet
33 | self.logging_level = logging_level
34 |
35 | logging.basicConfig(level=self.logging_level)
36 |
37 | # Check args
38 | self._check_args()
39 |
40 | if path_or_host.startswith('http'):
41 | self.url = path_or_host + ':' + str(port)
42 | logging.info('Using an existing server {}'.format(self.url))
43 | else:
44 |
45 | # Check Java
46 | if not subprocess.call(['java', '-version'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) == 0:
47 | raise RuntimeError('Java not found.')
48 |
49 | # Check if the dir exists
50 | if not os.path.isdir(self.path_or_host):
51 | raise IOError(str(self.path_or_host) + ' is not a directory.')
52 | directory = os.path.normpath(self.path_or_host) + os.sep
53 | self.class_path_dir = directory
54 |
55 | # Check if the language specific model file exists
56 | switcher = {
57 | 'en': 'stanford-corenlp-[0-9].[0-9].[0-9]-models.jar',
58 | 'zh': 'stanford-chinese-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar',
59 | 'ar': 'stanford-arabic-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar',
60 | 'fr': 'stanford-french-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar',
61 | 'de': 'stanford-german-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar',
62 | 'es': 'stanford-spanish-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar'
63 | }
64 | jars = {
65 | 'en': 'stanford-corenlp-x.x.x-models.jar',
66 | 'zh': 'stanford-chinese-corenlp-yyyy-MM-dd-models.jar',
67 | 'ar': 'stanford-arabic-corenlp-yyyy-MM-dd-models.jar',
68 | 'fr': 'stanford-french-corenlp-yyyy-MM-dd-models.jar',
69 | 'de': 'stanford-german-corenlp-yyyy-MM-dd-models.jar',
70 | 'es': 'stanford-spanish-corenlp-yyyy-MM-dd-models.jar'
71 | }
72 | if len(glob.glob(directory + switcher.get(self.lang))) <= 0:
73 | raise IOError(jars.get(
74 | self.lang) + ' not exists. You should download and place it in the ' + directory + ' first.')
75 |
76 | # If port not set, auto select
77 | if self.port is None:
78 | for port_candidate in range(9000, 65535):
79 | if port_candidate not in [conn.laddr[1] for conn in psutil.net_connections()]:
80 | self.port = port_candidate
81 | break
82 |
83 | # Check if the port is in use
84 | if self.port in [conn.laddr[1] for conn in psutil.net_connections()]:
85 | raise IOError('Port ' + str(self.port) + ' is already in use.')
86 |
87 | # Start native server
88 | logging.info('Initializing native server...')
89 | cmd = "java"
90 | java_args = "-Xmx{}".format(self.memory)
91 | java_class = "edu.stanford.nlp.pipeline.StanfordCoreNLPServer"
92 | class_path = '"{}*"'.format(directory)
93 |
94 | args = [cmd, java_args, '-cp', class_path, java_class, '-port', str(self.port)]
95 |
96 | args = ' '.join(args)
97 |
98 | logging.info(args)
99 |
100 | # Silence
101 | with open(os.devnull, 'w') as null_file:
102 | out_file = None
103 | if self.quiet:
104 | out_file = null_file
105 |
106 | self.p = subprocess.Popen(args, shell=True, stdout=out_file, stderr=subprocess.STDOUT)
107 | logging.info('Server shell PID: {}'.format(self.p.pid))
108 |
109 | self.url = 'http://localhost:' + str(self.port)
110 |
111 | # Wait until server starts
112 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
113 | host_name = urlparse(self.url).hostname
114 | time.sleep(1) # OSX, not tested
115 | trial = 1
116 | while sock.connect_ex((host_name, self.port)):
117 | if trial > max_retries:
118 | raise ValueError('Corenlp server is not available')
119 | logging.info('Waiting until the server is available.')
120 | trial += 1
121 | time.sleep(1)
122 | logging.info('The server is available.')
123 |
124 | def __enter__(self):
125 | return self
126 |
127 | def __exit__(self, exc_type, exc_val, exc_tb):
128 | self.close()
129 |
130 | def close(self):
131 | logging.info('Cleanup...')
132 | if hasattr(self, 'p'):
133 | try:
134 | parent = psutil.Process(self.p.pid)
135 | except psutil.NoSuchProcess:
136 | logging.info('No process: {}'.format(self.p.pid))
137 | return
138 |
139 | if self.class_path_dir not in ' '.join(parent.cmdline()):
140 | logging.info('Process not in: {}'.format(parent.cmdline()))
141 | return
142 |
143 | children = parent.children(recursive=True)
144 | for process in children:
145 | logging.info('Killing pid: {}, cmdline: {}'.format(process.pid, process.cmdline()))
146 | # process.send_signal(signal.SIGTERM)
147 | process.kill()
148 |
149 | logging.info('Killing shell pid: {}, cmdline: {}'.format(parent.pid, parent.cmdline()))
150 | # parent.send_signal(signal.SIGTERM)
151 | parent.kill()
152 |
153 | def annotate(self, text, properties=None):
154 | if sys.version_info.major >= 3:
155 | text = text.encode('utf-8')
156 |
157 | r = requests.post(self.url, params={'properties': str(properties)}, data=text,
158 | headers={'Connection': 'close'})
159 | r_dict = json.loads(r.text)
160 | return r_dict
161 |
162 | def tregex(self, sentence, pattern):
163 | tregex_url = self.url + '/tregex'
164 | r_dict = self._request(tregex_url, "tokenize,ssplit,depparse,parse", sentence, pattern=pattern)
165 | return r_dict
166 |
167 | def tokensregex(self, sentence, pattern):
168 | tokensregex_url = self.url + '/tokensregex'
169 | r_dict = self._request(tokensregex_url, "tokenize,ssplit,depparse", sentence, pattern=pattern)
170 | return r_dict
171 |
172 | def semgrex(self, sentence, pattern):
173 | semgrex_url = self.url + '/semgrex'
174 | r_dict = self._request(semgrex_url, "tokenize,ssplit,depparse", sentence, pattern=pattern)
175 | return r_dict
176 |
177 | def word_tokenize(self, sentence, span=False):
178 | r_dict = self._request('ssplit,tokenize', sentence)
179 | tokens = [token['originalText'] for s in r_dict['sentences'] for token in s['tokens']]
180 |
181 | # Whether return token span
182 | if span:
183 | spans = [(token['characterOffsetBegin'], token['characterOffsetEnd']) for s in r_dict['sentences'] for token
184 | in s['tokens']]
185 | return tokens, spans
186 | else:
187 | return tokens
188 |
189 | def pos_tag(self, sentence):
190 | r_dict = self._request(self.url, 'pos', sentence)
191 | words = []
192 | tags = []
193 | for s in r_dict['sentences']:
194 | for token in s['tokens']:
195 | words.append(token['originalText'])
196 | tags.append(token['pos'])
197 | return list(zip(words, tags))
198 |
199 | def ner(self, sentence):
200 | r_dict = self._request(self.url, 'ner', sentence)
201 | words = []
202 | ner_tags = []
203 | for s in r_dict['sentences']:
204 | for token in s['tokens']:
205 | words.append(token['originalText'])
206 | ner_tags.append(token['ner'])
207 | return list(zip(words, ner_tags))
208 |
209 | def parse(self, sentence):
210 | r_dict = self._request(self.url, 'pos,parse', sentence)
211 | return [s['parse'] for s in r_dict['sentences']]
212 |
213 | def dependency_parse(self, sentence):
214 | r_dict = self._request(self.url, 'depparse', sentence)
215 | return [(dep['dep'], dep['governor'], dep['dependent']) for s in r_dict['sentences'] for dep in
216 | s['basicDependencies']]
217 |
218 | def coref(self, text):
219 | r_dict = self._request('coref', text)
220 |
221 | corefs = []
222 | for k, mentions in r_dict['corefs'].items():
223 | simplified_mentions = []
224 | for m in mentions:
225 | simplified_mentions.append((m['sentNum'], m['startIndex'], m['endIndex'], m['text']))
226 | corefs.append(simplified_mentions)
227 | return corefs
228 |
229 | def switch_language(self, language="en"):
230 | self._check_language(language)
231 | self.lang = language
232 |
233 | def request(self, annotators=None, data=None, *args, **kwargs):
234 | if sys.version_info.major >= 3:
235 | data = data.encode('utf-8')
236 |
237 | properties = {'annotators': annotators, 'outputFormat': 'json'}
238 | params = {'properties': str(properties), 'pipelineLanguage': self.lang, 'parse.kbest': 3}
239 | if 'pattern' in kwargs:
240 | params = {"pattern": kwargs['pattern'], 'properties': str(properties), 'pipelineLanguage': self.lang}
241 |
242 | logging.info(params)
243 | r = requests.post(self.url, params=params, data=data, headers={'Connection': 'close'})
244 | r_dict = json.loads(r.text)
245 |
246 | return r_dict
247 |
248 | def _check_args(self):
249 | self._check_language(self.lang)
250 | if not re.match('\dg', self.memory):
251 | raise ValueError('memory=' + self.memory + ' not supported. Use 4g, 6g, 8g and etc. ')
252 |
253 | def _check_language(self, lang):
254 | if lang not in ['en', 'zh', 'ar', 'fr', 'de', 'es']:
255 | raise ValueError('lang=' + self.lang + ' not supported. Use English(en), Chinese(zh), Arabic(ar), '
256 | 'French(fr), German(de), Spanish(es).')
257 |
--------------------------------------------------------------------------------
/data/V2/preprocess_dependency.py:
--------------------------------------------------------------------------------
1 | import os
2 | import argparse
3 | import json
4 | from corenlp import StanfordCoreNLP
5 | from tqdm import tqdm
6 |
7 | FULL_MODEL='./stanford-corenlp-full-2018-10-05'
8 |
9 | def request_features_from_stanford(data_dir,flag):
10 | data_path=os.path.join(data_dir,flag+'_con_new.json')
11 | if not os.path.exists(data_path):
12 | print("{} not exist".format(data_path))
13 | return
14 |
15 | token_str=[]
16 | with open(data_path,'r',encoding='utf-8') as f:
17 | raw_data=json.load(f)
18 | for d in raw_data:
19 | tok=list(d['token'])
20 | tok=[t.lower() for t in tok]
21 | token_str.append(tok)
22 |
23 | all_data=[]
24 | with StanfordCoreNLP(FULL_MODEL,lang='en') as nlp:
25 | for sentence in tqdm(token_str):
26 | props = {'timeout': '5000000', 'annotators': 'pos, parse, depparse', 'tokenize.whitespace': 'true',
27 | 'ssplit.eolonly': 'true', 'pipelineLanguage': 'en', 'outputFormat': 'json'}
28 | results = nlp.annotate(' '.join(sentence), properties=props)
29 | all_data.append(results)
30 |
31 | with open(os.path.join(data_dir,flag+'.txt.dep'),'w',encoding='utf8') as fout_dep:
32 | for data in all_data:
33 | for dep_info in data["sentences"][0]["basicDependencies"]:
34 | fout_dep.write("{}\t{}\t{}\n".format(dep_info["governor"],dep_info["dependent"],dep_info["dep"]))
35 | fout_dep.write("\n")
36 |
37 |
38 | def get_dep_type_dict(data_dir):
39 | dep_type_set=set()
40 | for flag in ["train","valid","test"]:
41 | data_path=os.path.join(data_dir,flag+'.txt.dep')
42 | if not os.path.exists(data_path):
43 | continue
44 | with open(data_path,'r',encoding='utf-8') as f:
45 | for line in f:
46 | if line == "\n":
47 | continue
48 | governor,dependent,dep_type=line.strip().split("\t")
49 | dep_type_set.add(dep_type)
50 |
51 | save_path=os.path.join(data_dir,"dep_type.json")
52 | with open(save_path,'w',encoding='utf-8') as f:
53 | json.dump(list(dep_type_set),f,ensure_ascii=False)
54 |
55 |
56 | def get_args():
57 | parser=argparse.ArgumentParser()
58 | parser.add_argument("--data_path",default="./Tweets/",type=str,required=True)
59 | args=parser.parse_args()
60 |
61 | return args
62 |
63 | if __name__=='__main__':
64 | args=get_args()
65 | for flag in ["train","valid","test"]:
66 | request_features_from_stanford(args.data_path,flag)
67 | get_dep_type_dict(args.data_path)
68 |
--------------------------------------------------------------------------------
/data/V2/stanford-corenlp/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/data/V2/stanford-corenlp/.DS_Store
--------------------------------------------------------------------------------
/dataloader.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import torch
4 | import numpy as np
5 | from transformers import BertTokenizer
6 |
7 | import copy
8 | import random
9 | import itertools
10 | from itertools import chain
11 |
12 | from torch.utils.data import Dataset, DataLoader
13 | from torch.utils.data.dataloader import _SingleProcessDataLoaderIter, _MultiProcessingDataLoaderIter
14 | from spans import *
15 | import pickle
16 | from dep_parser import DepInstanceParser
17 |
18 |
19 |
20 |
21 | class ABSA_Dataset(Dataset):
22 | def __init__(self, args, file_name, vocab, tokenizer,flag):
23 | super().__init__()
24 |
25 | # load raw data
26 | with open(file_name,'r',encoding='utf-8') as f:
27 | raw_data = json.load(f)
28 |
29 | if args.need_preprocess:
30 | raw_data = self.process_raw(raw_data)
31 | new_file_name = file_name.replace('.json','_con.json')
32 | with open(new_file_name, 'w', encoding='utf-8') as f:
33 | json.dump(raw_data,f)
34 | print('Saving to:', new_file_name)
35 |
36 | # 加载依赖关系图信息
37 | all_dep_info = load_depfile(os.path.join(args.data_dir,'{}.txt.dep'.format(flag)))
38 |
39 | dependency_type_dict=prepare_type_dict(args.data_dir)
40 |
41 |
42 | self.data = self.process(raw_data, vocab, args, tokenizer,all_dep_info,dependency_type_dict)
43 |
44 | def __len__(self):
45 | return len(self.data)
46 |
47 | def __getitem__(self, index):
48 | return self.data[index]
49 |
50 |
51 | def process_raw(self, data):
52 | # get parserd data
53 | # we already provide here
54 | pass
55 |
56 |
57 | def process(self, data, vocab, args, tokenizer,all_dep_info,dependency_type_dict):
58 | token_vocab = vocab['token']
59 | pol_vocab = vocab['polarity']
60 |
61 | processed = []
62 | max_len = args.max_len
63 | CLS_id = tokenizer.convert_tokens_to_ids(["[CLS]"])
64 | SEP_id = tokenizer.convert_tokens_to_ids(["[SEP]"])
65 | sub_len = len(args.special_token)
66 |
67 | span_num = []
68 |
69 | i=0
70 | for d,dep_info in zip(data,all_dep_info):
71 | graph_id=i
72 | tok = list(d['token'])
73 | if args.lower:
74 | tok = [t.lower() for t in tok]
75 |
76 | text_raw_bert_indices, word_mapback, _ = text2bert_id(tok, tokenizer)
77 |
78 | text_raw_bert_indices = text_raw_bert_indices[:max_len]
79 | word_mapback = word_mapback[:max_len]
80 |
81 | length = word_mapback[-1] + 1
82 |
83 | # tok = tok[:length]
84 | bert_length = len(word_mapback)
85 |
86 | dep_head = list(d['dep_head'])[:length]
87 |
88 | # map2id
89 | # tok = [token_vocab.stoi.get(t, token_vocab.unk_index) for t in tok]
90 |
91 | # con
92 | con_head = d['con_head']
93 | con_mapnode = d['con_mapnode']
94 | con_path_dict, con_children = get_path_and_children_dict(con_head)
95 | mapback = [ idx for idx ,word in enumerate(con_mapnode) if word[-sub_len: ]!= args.special_token]
96 |
97 | layers, influence_range, node2layerid = form_layers_and_influence_range(con_path_dict, mapback)
98 |
99 | spans = form_spans(layers, influence_range, length, con_mapnode)
100 |
101 | adj_i_oneshot = head_to_adj_oneshot(dep_head, length, d['aspects'])
102 |
103 | cd_adj = np.ones((length,length))
104 | if args.con_dep_conditional:
105 | father = 1
106 | if father in con_children and [con_mapnode[node] for node in con_children[father]].count('S[N]') > 1 and con_mapnode[father] == 'S[N]':
107 | cd_span = spans[node2layerid[father]+1]
108 | cd_adj = get_conditional_adj(father, length, cd_span, con_children, con_mapnode)
109 |
110 | adj_i_oneshot = adj_i_oneshot * cd_adj
111 |
112 | dep_instance_parser=DepInstanceParser(basicDependencies=dep_info,tokens=tok)
113 | dep_type_matrix=dep_instance_parser.get_first_order()
114 |
115 |
116 | # aspect-specific
117 | bert_sequence_list = []
118 | bert_segments_ids_list = []
119 | label_list = []
120 | aspect_indi_list = []
121 | aspect_token_list = []
122 | select_spans_list = []
123 |
124 | for aspect in d['aspects']:
125 | asp = list(aspect['term'])
126 | asp_bert_ids, _, _ = text2bert_id(asp, tokenizer)
127 | bert_sequence = CLS_id + text_raw_bert_indices + SEP_id + asp_bert_ids + SEP_id
128 | bert_segments_ids = [0] * (bert_length + 2) + [1] * (len(asp_bert_ids ) +1)
129 |
130 | bert_sequence = bert_sequence[:max_len+3]
131 | bert_segments_ids = bert_segments_ids[:max_len+3]
132 |
133 | label = aspect['polarity']
134 |
135 | aspect_indi = [0] * length
136 |
137 | for pidx in range(aspect['from'], aspect['to']):
138 | aspect_indi[pidx] = 1
139 |
140 | label = pol_vocab.stoi.get(label)
141 |
142 | aspect_range = list(range(mapback[aspect['from']], mapback[aspect['to']-1] + 1))
143 |
144 | con_lca = find_inner_LCA(con_path_dict, aspect_range)
145 |
146 |
147 | select_spans, span_indications = form_aspect_related_spans(con_lca, spans, con_mapnode, node2layerid, con_path_dict)
148 |
149 | # span_num.append(len(select_spans))
150 | #
151 | # averge_layers=np.array(span_num).sum()/len(span_num)
152 |
153 | select_spans = select_func(select_spans, args.max_num_spans, length)
154 |
155 | select_spans = [[ x+ 1 for x in span] for span in select_spans]
156 |
157 |
158 | label_list.append(label)
159 | aspect_indi_list.append(aspect_indi)
160 | aspect_token_list.append(asp_bert_ids)
161 | bert_sequence_list.append(bert_sequence)
162 | bert_segments_ids_list.append(bert_segments_ids)
163 |
164 | select_spans_list.append(select_spans)
165 |
166 |
167 |
168 |
169 |
170 | processed += [
171 | (
172 | length, bert_length, word_mapback,
173 | adj_i_oneshot,
174 | # aspect-specific
175 | bert_sequence_list, bert_segments_ids_list, aspect_indi_list, aspect_token_list,select_spans_list,
176 | # label
177 | label_list,
178 | #dep_type_matrix
179 | dep_type_matrix,
180 | #dependency_type_dict
181 | dependency_type_dict
182 | )
183 | ]
184 | i=i+1
185 |
186 |
187 | return processed
188 |
189 |
190 | def ABSA_collate_fn(batch):
191 | batch_size = len(batch)
192 | batch = list(zip(*batch))
193 |
194 | lens = batch[0]
195 |
196 | (length_, bert_length_, word_mapback_,
197 | adj_i_oneshot_,
198 | bert_sequence_list_, bert_segments_ids_list_,
199 | aspect_indi_list_, aspect_token_list_,select_spans_list_,
200 | label_list_,dep_type_matrix_,dependency_type_dict_) = batch
201 |
202 | max_lens = max(lens)
203 | dep_label_map=dependency_type_dict_[0]
204 |
205 | #str=dep_type_matrix_[0][0][3]
206 |
207 |
208 | length = torch.LongTensor(length_)
209 | bert_length = torch.LongTensor(bert_length_)
210 | word_mapback = get_long_tensor(word_mapback_, batch_size)
211 |
212 | adj_oneshot = np.zeros((batch_size, max_lens, max_lens), dtype=np.float32)
213 |
214 | for idx in range(batch_size):
215 | mlen = adj_i_oneshot_[idx].shape[0]
216 | adj_oneshot[idx,:mlen,:mlen] = adj_i_oneshot_[idx]
217 |
218 |
219 | adj_oneshot = torch.FloatTensor(adj_oneshot)
220 |
221 | map_AS = [[idx] * len(a_i) for idx, a_i in enumerate(bert_sequence_list_)]
222 | map_AS_idx = [range(len(a_i)) for a_i in bert_sequence_list_]
223 |
224 | # add_pre = np.array([0] + [len(m) for m in map_AS[:-1]]).cumsum()
225 |
226 | map_AS = torch.LongTensor([m for m_list in map_AS for m in m_list])
227 | map_AS_idx = torch.LongTensor([m for m_list in map_AS_idx for m in m_list])
228 |
229 | as_batch_size = len(map_AS)
230 |
231 | bert_sequence = [p for p_list in bert_sequence_list_ for p in p_list]
232 | bert_sequence = get_long_tensor(bert_sequence, as_batch_size)
233 |
234 | bert_segments_ids = [p for p_list in bert_segments_ids_list_ for p in p_list]
235 | bert_segments_ids = get_long_tensor(bert_segments_ids, as_batch_size)
236 |
237 | aspect_indi = [p for p_list in aspect_indi_list_ for p in p_list]
238 | aspect_indi = get_long_tensor(aspect_indi, as_batch_size)
239 |
240 | # aspect_token_list
241 | aspect_token_list = [p for p_list in aspect_token_list_ for p in p_list]
242 | aspect_token_list = get_long_tensor(aspect_token_list, as_batch_size)
243 |
244 | con_spans_list = [p for p_list in select_spans_list_ for p in p_list]
245 | max_num_spans = max([len(p) for p in con_spans_list])
246 | con_spans = np.zeros((as_batch_size, max_num_spans, max_lens), dtype=np.int64)
247 | for idx in range(as_batch_size):
248 | mlen = len(con_spans_list[idx][0])
249 | con_spans[idx,:,:mlen] = con_spans_list[idx]
250 |
251 | con_spans = torch.LongTensor(con_spans)
252 |
253 | # label
254 | label = torch.LongTensor([sl for sl_list in label_list_ for sl in sl_list if isinstance(sl, int)])
255 |
256 |
257 |
258 |
259 | def get_adj_with_value_matrix(dep_type_matrix):
260 | final_dep_type_matrix=np.zeros((batch_size, max_lens, max_lens), dtype=int)
261 | for idx in range(batch_size):
262 | mlen = len(dep_type_matrix[idx])
263 | for pi in range(mlen):
264 | for pj in range(mlen):
265 | final_dep_type_matrix[idx][pi][pj] = dep_label_map[dep_type_matrix[idx][pi][pj]]
266 |
267 | return final_dep_type_matrix
268 |
269 | dep_type_matrix=get_adj_with_value_matrix(dep_type_matrix_)
270 | dep_type_matrix=torch.IntTensor(dep_type_matrix)
271 |
272 |
273 |
274 |
275 |
276 | return (
277 | length, bert_length, word_mapback, adj_oneshot,
278 | map_AS, map_AS_idx,
279 | bert_sequence, bert_segments_ids,
280 | aspect_indi, aspect_token_list,con_spans,
281 | dep_type_matrix,
282 | label
283 | )
284 |
285 |
286 | def text2bert_id(token, tokenizer):
287 | re_token = []
288 | word_mapback = []
289 | word_split_len = []
290 | for idx, word in enumerate(token):
291 | temp = tokenizer.tokenize(word)
292 | re_token.extend(temp)
293 | word_mapback.extend([idx] * len(temp))
294 | word_split_len.append(len(temp))
295 | re_id = tokenizer.convert_tokens_to_ids(re_token)
296 | return re_id ,word_mapback, word_split_len
297 |
298 | class ABSA_DataLoader(DataLoader):
299 | def __init__(self, dataset, sort_key, sort_bs_num=None, is_shuffle=True, **kwargs):
300 | '''
301 | :param dataset: Dataset object
302 | :param sort_idx: sort_function
303 | :param sort_bs_num: sort range; default is None(sort for all sequence)
304 | :param is_shuffle: shuffle chunk , default if True
305 | :return:
306 | '''
307 | assert isinstance(dataset.data, list)
308 | super().__init__(dataset,**kwargs)
309 | self.sort_key = sort_key
310 | self.sort_bs_num = sort_bs_num
311 | self.is_shuffle = is_shuffle
312 |
313 | def __iter__(self):
314 | if self.is_shuffle:
315 | self.dataset.data = self.block_shuffle(self.dataset.data, self.batch_size, self.sort_bs_num, self.sort_key, self.is_shuffle)
316 |
317 | if self.num_workers == 0:
318 | return _SingleProcessDataLoaderIter(self)
319 | else:
320 | return _MultiProcessingDataLoaderIter(self)
321 |
322 | @staticmethod
323 | def block_shuffle(data, batch_size, sort_bs_num, sort_key, is_shuffle):
324 | # sort
325 | random.shuffle(data)
326 | data = sorted(data, key = sort_key) # 先按照长度排序
327 | batch_data = [data[i : i + batch_size] for i in range(0,len(data),batch_size)]
328 | batch_data = [sorted(batch, key = sort_key) for batch in batch_data]
329 | if is_shuffle:
330 | random.shuffle(batch_data)
331 | batch_data = list(chain(*batch_data))
332 | return batch_data
333 |
334 | def get_long_tensor(tokens_list, batch_size):
335 | """ Convert list of list of tokens to a padded LongTensor. """
336 | token_len = max(len(x) for x in tokens_list)
337 | tokens = torch.LongTensor(batch_size, token_len).fill_(0)
338 | for i, s in enumerate(tokens_list):
339 | tokens[i, : len(s)] = torch.LongTensor(s)
340 | return tokens
341 |
342 |
343 | def get_float_tensor(tokens_list, batch_size):
344 | """ Convert list of list of tokens to a padded FloatTensor. """
345 | token_len = max(len(x) for x in tokens_list)
346 | tokens = torch.FloatTensor(batch_size, token_len).fill_(0)
347 | for i, s in enumerate(tokens_list):
348 | tokens[i, : len(s)] = torch.FloatTensor(s)
349 | return tokens
350 |
351 |
352 | def sort_all(batch, lens):
353 | """ Sort all fields by descending order of lens, and return the original indices. """
354 | unsorted_all = [lens] + [range(len(lens))] + list(batch)
355 | sorted_all = [list(t) for t in zip(*sorted(zip(*unsorted_all), reverse=True))]
356 | return sorted_all[2:], sorted_all[1]
357 |
358 |
359 | def get_dep_labels(data_dir,direct=False):
360 | dep_labels=["self_loop"]
361 | dep_type_path=os.path.join(data_dir,"dep_type.json")
362 | with open(dep_type_path,'r') as f:
363 | dep_types=json.load(f)
364 | for label in dep_types:
365 | if direct:
366 | dep_labels.append("{}_in".format(label))
367 | dep_labels.append("{}_out".format(label))
368 | else:
369 | dep_labels.append(label)
370 | return dep_labels
371 |
372 |
373 | def prepare_type_dict(data_dir):
374 | dep_type_list=get_dep_labels(data_dir)
375 | types_dict={"none":0}
376 | for dep_type in dep_type_list:
377 | types_dict[dep_type]=len(types_dict)
378 |
379 | return types_dict
380 |
381 |
382 |
383 | def load_depfile(filename):
384 | data=[]
385 | with open(filename,'r') as f:
386 | dep_info=[]
387 | for line in f:
388 | line=line.strip()
389 | if len(line)>0:
390 | items=line.split("\t")
391 | dep_info.append({
392 | "governor":int(items[0]),
393 | "dependent":int(items[1]),
394 | "dep":items[2],
395 | })
396 | else:
397 | if len(dep_info)>0:
398 | data.append(dep_info)
399 | dep_info=[]
400 | if len(dep_info)>0:
401 | data.append(dep_info)
402 | dep_info = []
403 |
404 | return data
405 |
406 |
--------------------------------------------------------------------------------
/dep_parser.py:
--------------------------------------------------------------------------------
1 | import copy
2 |
3 | class DepInstanceParser():
4 | def __init__(self,basicDependencies,tokens=[]):
5 | self.basicDependencies=basicDependencies
6 | self.tokens=tokens
7 | self.words=[]
8 | self.dep_governed_info=[]
9 | self.dep_parsing()
10 |
11 |
12 | def dep_parsing(self):
13 | if len(self.tokens)>0:
14 | words=[]
15 | for token in self.tokens:
16 | words.append(token)
17 | dep_governed_info=[
18 | {"word":word}
19 | for i,word in enumerate(words)
20 | ]
21 | self.words=words
22 | else:
23 | dep_governed_info=[{}]*len(self.basicDependencies)
24 | for dep in self.basicDependencies:
25 | dependent_index=dep['dependent']-1
26 | governed_index=dep['governor']-1
27 | dep_governed_info[dependent_index]={
28 | "governor":governed_index,
29 | "dep":dep['dep']
30 | }
31 | self.dep_governed_info=dep_governed_info
32 | #print(dep_governed_info)
33 |
34 |
35 |
36 |
37 |
38 | def get_init_dep_matrix(self):
39 | dep_type_matrix=[["none"]*len(self.words) for _ in range(len(self.words))]
40 | for i in range(len(self.words)):
41 | dep_type_matrix[i][i]="self_loop"
42 | return dep_type_matrix
43 |
44 | def get_first_order(self,direct=False):
45 | dep_type_matrix=self.get_init_dep_matrix()
46 |
47 | for i,dep_info in enumerate(self.dep_governed_info):
48 | governor=dep_info["governor"]
49 | dep_type=dep_info["dep"]
50 | dep_type_matrix[i][governor]=dep_type if direct is False else "{}_in".format(dep_type)
51 | dep_type_matrix[governor][i]=dep_type if direct is False else "{}_out".format((dep_type))
52 |
53 | return dep_type_matrix
54 |
--------------------------------------------------------------------------------
/gcn.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import math
3 | import torch
4 | import numpy as np
5 | import torch.nn as nn
6 | import torch.nn.functional as F
7 |
8 | class LayerNorm(nn.Module):
9 |
10 | def __init__(self,features,eps=1e-6):
11 | super(LayerNorm,self).__init__()
12 | self.a_2=nn.Parameter(torch.ones(features))
13 | self.b_2=nn.Parameter(torch.zeros(features))
14 | self.eps=eps
15 |
16 | def forward(self,x):
17 | mean=x.mean(-1,keepdim=True)
18 | std=x.std(-1,keepdim=True)
19 | return self.a_2*(x-mean)/(std+self.eps)+self.b_2
20 |
21 |
22 | class GCN_for_segment_aware_graph(nn.Module):
23 | def __init__(self,args):
24 | super(GCN_for_segment_aware_graph,self).__init__()
25 | self.opt=args
26 | self.layers=args.gcn_layers
27 | self.attention_heads=args.attention_heads
28 | self.hidden_dim=args.hidden_dim
29 | self.bert_drop=nn.Dropout(args.bert_dropout)
30 | self.gcn_drop=nn.Dropout(args.gcn_dropout)
31 | self.layernorm=LayerNorm(args.hidden_dim)
32 |
33 | self.gcn_dim=300
34 | self.W=nn.Linear(self.gcn_dim,self.gcn_dim)
35 | self.Wxx=nn.Linear(self.hidden_dim,self.gcn_dim)
36 |
37 |
38 | def forward(self,bert_input,attn_score,layer_mask,is_multi_head):
39 | batch=bert_input.size(0)
40 | len=bert_input.size(1)
41 |
42 | bert_input=self.layernorm(bert_input)
43 | bert_input=self.bert_drop(bert_input)
44 | gcn_input=self.Wxx(bert_input)
45 |
46 |
47 | if layer_mask is not None:
48 | weight_adj = attn_score * layer_mask.transpose(0, 1)
49 | else:
50 | weight_adj=attn_score
51 |
52 | gcn_output=gcn_input
53 | layer_list=[gcn_input]
54 |
55 | for i in range(self.layers):
56 | if is_multi_head is True:
57 | gcn_output = gcn_output.unsqueeze(1).expand(batch, self.attention_heads, len, self.gcn_dim)
58 | else:
59 | gcn_output=gcn_output
60 |
61 |
62 | Ax=torch.matmul(weight_adj,gcn_output)
63 |
64 | if is_multi_head is True:
65 | Ax = Ax.mean(dim=1)
66 | Ax = self.W(Ax)
67 | else:
68 | Ax = self.W(Ax)
69 |
70 |
71 | weight_gcn_output=F.relu(Ax)
72 |
73 | gcn_output=weight_gcn_output
74 | layer_list.append(gcn_output)
75 | gcn_output=self.gcn_drop(gcn_output) if i= (lengths.unsqueeze(1))
248 | return a
249 |
250 |
251 | def get_span_matrix_4D(span_list, rm_loop=False, max_len=None):
252 | '''
253 | span_list: [N,B,L]
254 | return span:[N,B,L,L]
255 | '''
256 | # [N,B,L]
257 | N, B, L = span_list.shape
258 | span = get_span_matrix_3D(span_list.contiguous().view(-1, L), rm_loop, max_len).contiguous().view(N, B, L, L)
259 | return span
260 |
261 |
262 | def get_span_matrix_3D(span_list, rm_loop=False, max_len=None):
263 | # [N,L]
264 | origin_dim = len(span_list.shape)
265 | if origin_dim == 1: # [L]
266 | span_list = span_list.unsqueeze(dim=0)
267 | N, L = span_list.shape
268 | if max_len is not None:
269 | L = min(L, max_len)
270 | span_list = span_list[:, :L]
271 | span = span_list.unsqueeze(dim=-1).repeat(1, 1, L)
272 | span = span * (span.transpose(-1, -2) == span)
273 | if rm_loop:
274 | span = span * (~torch.eye(L).bool()).unsqueeze(dim=0).repeat(N, 1, 1)
275 | span = span.squeeze(dim=0) if origin_dim == 1 else span # [N,L,L]
276 | return span
277 |
278 |
279 |
280 | class LayerNorm(nn.Module):
281 |
282 | def __init__(self, features, eps=1e-6):
283 | super(LayerNorm, self).__init__()
284 | self.a_2 = nn.Parameter(torch.ones(features))
285 | self.b_2 = nn.Parameter(torch.zeros(features))
286 | self.eps = eps
287 |
288 | def forward(self, x):
289 | mean = x.mean(-1, keepdim=True)
290 | std = x.std(-1, keepdim=True)
291 | return self.a_2 * (x - mean) / (std + self.eps) + self.b_2
292 |
--------------------------------------------------------------------------------
/module_interaction.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.nn.functional as F
4 | import numpy as np
5 |
6 | class LayerNorm(nn.Module):
7 | """
8 | Layer Normalization
9 | """
10 |
11 | def __init__(self, features, eps=1e-6):
12 | super().__init__()
13 | self.gamma = nn.Parameter(torch.ones(features))
14 | self.beta = nn.Parameter(torch.zeros(features))
15 | self.eps = eps
16 |
17 | def forward(self, x):
18 | mean = x.mean(-1, keepdim=True)
19 | std = x.std(-1, keepdim=True)
20 | return self.gamma * (x - mean) / (std + self.eps) + self.beta
21 |
22 |
23 |
24 | class MyMultiHeadAttention(nn.Module):
25 | def __init__(self,args,n_head,d_model,d_kv,dropout=0.1):
26 | super(MyMultiHeadAttention,self).__init__()
27 | self.args=args
28 | self.slf_attn=MultiHeadAttention(args,n_head,d_model,d_kv,d_kv,dropout=dropout)
29 | self.pos_ffn=PositionwiseFeedForward(args,d_model,d_model,dropout=dropout)
30 |
31 |
32 | def forward(self,q,k,v,mask=None):
33 | output,p_attn=self.slf_attn(q,k,v,mask)
34 | output=self.pos_ffn(output)
35 |
36 | return output,p_attn
37 |
38 |
39 |
40 | class MultiHeadAttention(nn.Module):
41 | def __init__(self,args,n_head,d_model,d_k,d_v,dropout=0.1):
42 | super().__init__()
43 | self.args = args
44 |
45 | self.n_head=n_head
46 | self.d_k=d_k
47 | self.d_v=d_v
48 |
49 | self.w_q=nn.Linear(d_model,n_head*d_k)
50 | self.w_k=nn.Linear(d_model,n_head*d_k)
51 | self.w_v=nn.Linear(d_model,n_head*d_v)
52 | nn.init.normal_(self.w_q.weight,mean=0,std=np.sqrt(2.0/(d_model+d_k)))
53 | nn.init.normal_(self.w_k.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k)))
54 | nn.init.normal_(self.w_v.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_v)))
55 |
56 | self.attention=ScaledDotProductAttention(temperature=np.power(d_k,0.5))
57 | self.layer_norm=LayerNorm(d_model)
58 |
59 |
60 | self.fc=nn.Linear(n_head*d_v,d_model)
61 | nn.init.xavier_normal_(self.fc.weight)
62 | self.dropout=nn.Dropout(dropout)
63 |
64 |
65 | def forward(self,q,k,v,mask=None):
66 |
67 | d_k,d_v,n_head=self.d_k,self.d_v,self.n_head
68 |
69 | sz_b,len_q,_=q.size()
70 | sz_b,len_k,_=k.size()
71 | sz_b,len_v,_=v.size()
72 |
73 | residual=q
74 |
75 | q=self.w_q(q).view(sz_b,len_q,n_head,d_k)
76 | k=self.w_k(k).view(sz_b,len_k,n_head,d_k)
77 | v=self.w_v(v).view(sz_b,len_v,n_head,d_v)
78 |
79 | q=q.permute(2,0,1,3).contiguous().view(-1,len_q,d_k) #n_head*batch_size,seq_len,model_dim
80 | k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # n_head*batch_size,seq_len,model_dim
81 | v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # n_head*batch_size,seq_len,model_dim
82 |
83 | output,attn=self.attention(q,k,v,mask=mask,n_head=n_head)
84 |
85 | output=output.view(n_head,sz_b,len_q,d_v)
86 |
87 | output=output.permute(1,2,0,3).contiguous()
88 | output=output.view(sz_b,len_q,-1)
89 |
90 |
91 | output = self.dropout(self.fc(output))
92 | output = self.layer_norm(output + residual)
93 |
94 | return output,attn
95 |
96 | class ScaledDotProductAttention(nn.Module):
97 | def __init__(self,temperature,attn_dropout=0.1):
98 | super().__init__()
99 | self.temperature=temperature
100 | self.dropout=nn.Dropout(attn_dropout)
101 | self.softmax=nn.Softmax(dim=2)
102 |
103 | def forward(self,q,k,v,mask=None,n_head=None):
104 | attn=torch.bmm(q,k.transpose(1,2))
105 | attn=attn/self.temperature
106 |
107 | if mask is not None:
108 | attn_mask = mask.unsqueeze(-1).repeat(1, 1, q.shape[1])
109 | attn_mask = attn_mask.unsqueeze(1).expand(mask.size(0), n_head, q.size(1), q.size(1))
110 | attn_mask=attn_mask.reshape(mask.size(0)*n_head,q.size(1),q.size(1))
111 |
112 | attn = attn - attn_mask.masked_fill(attn_mask == -10000, 1e9)
113 | attn_mask_T = torch.transpose(attn_mask, -2, -1)
114 | attn_mask_T = attn_mask_T.masked_fill(attn_mask_T == -10000, 1e9)
115 | attn = attn - attn_mask_T
116 |
117 | attn=self.softmax(attn)
118 | attn=self.dropout(attn)
119 | output=torch.bmm(attn,v)
120 |
121 | return output,attn
122 |
123 |
124 | class PositionwiseFeedForward(nn.Module):
125 | def __init__(self,args,d_in,d_hid,dropout=0.1):
126 | super().__init__()
127 | self.args=args
128 | self.w_1=nn.Linear(d_in,d_hid)
129 | self.w_2=nn.Linear(d_hid,d_in)
130 | self.layer_norm=LayerNorm(d_in)
131 | self.dropout=nn.Dropout(dropout)
132 |
133 | def forward(self,x):
134 | residual=x
135 | output=x
136 | output=self.w_2(F.relu(self.w_1(output)))
137 | #output=output.transpose(1,2)
138 | output=self.dropout(output)
139 |
140 | output = self.layer_norm(output + residual)
141 |
142 |
143 | return output
144 |
--------------------------------------------------------------------------------
/parse_tree.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | from collections import Counter
4 | import itertools
5 |
6 |
7 |
8 |
9 | def GetTree_heads(t):
10 | heads = [0] * len(t)
11 | mapnode = [0] * len(t)
12 |
13 | def Findheads(cidx, t, headidx):
14 | if (cidx >= len(t)):
15 | return cidx
16 | mapnode[cidx] = t[cidx].lhs()
17 | heads[cidx] = headidx
18 |
19 | if t[cidx].lhs().__str__() == '_':
20 | mapnode[cidx] = t[cidx].rhs()[0]
21 |
22 | return cidx + 1
23 |
24 | nidx = cidx + 1
25 | for r in t[cidx].rhs():
26 | nidx = Findheads(nidx, t, cidx)
27 |
28 | return nidx
29 |
30 | Findheads(0, t, -1)
31 | return heads, mapnode
32 |
33 |
34 |
35 |
36 | def get_path_and_children_dict(heads):
37 | path_dict = {}
38 | remain_nodes = list(range(len(heads)))
39 | delete_nodes = []
40 |
41 | while len(remain_nodes) > 0:
42 | for idx in remain_nodes:
43 | # 初始状态
44 | if idx not in path_dict:
45 | path_dict[idx] = [heads[idx]] # no self
46 | if heads[idx] == -1:
47 | delete_nodes.append(idx) # need delete root
48 | else:
49 | last_node = path_dict[idx][-1]
50 | if last_node not in remain_nodes:
51 | path_dict[idx].extend(path_dict[last_node])
52 | delete_nodes.append(idx)
53 | else:
54 | path_dict[idx].append(heads[last_node])
55 | # remove nodes
56 | for del_node in delete_nodes:
57 | remain_nodes.remove(del_node)
58 | delete_nodes = []
59 |
60 | # children_dict
61 | children_dict = {}
62 | for x, l in path_dict.items():
63 | if l[0] == -1:
64 | continue
65 | if l[0] not in children_dict:
66 | children_dict[l[0]] = [x]
67 | else:
68 | children_dict[l[0]].append(x)
69 |
70 | return path_dict, children_dict
71 |
72 |
73 | def find_inner_LCA(path_dict, aspect_range):
74 | path_range = [[x] + path_dict[x] for x in aspect_range]
75 | path_range.sort(key=lambda l: len(l))
76 |
77 | for idx in range(len(path_range[0])):
78 | flag = True
79 | for pid in range(1, len(path_range)):
80 | if path_range[0][idx] not in path_range[pid]:
81 | flag = False
82 | break
83 |
84 | if flag:
85 | LCA_node = path_range[0][idx]
86 | break # already find
87 | return LCA_node
88 |
89 | # get_word_range
90 | def find_LCA_and_PATH(A, B):
91 | for idx in range(min(len(A), len(B))):
92 | if A[idx] in B:
93 | return A[idx], A[:idx], B[:B.index(A[idx])]
94 | elif B[idx] in A:
95 | return B[idx], A[:A.index(B[idx])], B[:idx]
96 | return -1, A[:-1], B[:-1]
97 |
98 | def FindS(l, children, mapback):
99 | def inner_Find(x, index):
100 | if x[index] not in children:
101 | return x[index]
102 | else:
103 | return inner_Find(children[x[index]], index)
104 |
105 | return mapback.index(inner_Find(l, 0)), mapback.index(inner_Find(l, -1))
106 |
107 | def get_word_range(lca_A, lca_B, path_dict, children, mapback, default_range):
108 |
109 | LCA, pathA, pathB = find_LCA_and_PATH([lca_A] + path_dict[lca_A], [lca_B] + path_dict[lca_B])
110 | inner_node_LCA = children[LCA][children[LCA].index(pathA[-1]) + 1:children[LCA].index(pathB[-1])] if (
111 | len(pathA) and len(pathB)) else []
112 | word_range = FindS(inner_node_LCA, children, mapback) if len(inner_node_LCA) > 0 else \
113 | default_range
114 | return word_range
115 |
116 |
117 |
118 |
119 |
120 | def preprocess_file(file_name, dep_parser=None, con_parser=None, special_token='[N]'):
121 |
122 |
123 | print('Processing:',file_name)
124 | from tqdm import tqdm
125 | from supar import Parser
126 | if dep_parser is None:
127 | dep_parser = Parser.load('biaffine-dep-en')
128 | if con_parser is None:
129 | con_parser = Parser.load('crf-con-en')
130 |
131 |
132 | sub_len = len(special_token)
133 |
134 | with open(file_name,'r',encoding='utf-8') as f:
135 | data = json.load(f)
136 |
137 | for d in tqdm(data):
138 | token = d['token']
139 | token = [tok.replace(u'\xa0', u'') for tok in token]
140 | d['token'] = token
141 |
142 | # dependency parsing
143 | dataset = dep_parser.predict(token, verbose=False)
144 | dep_head = dataset.arcs[0]
145 | d['dep_head'] = [x-1 for x in dep_head]
146 |
147 |
148 | # constituent parsing
149 | parser_inputs = ' '.join(token).replace('(', '<').replace(')', '>').split(' ') # [ver1]
150 | # parser_inputs = ' '.join(token).replace('(','<').replace(')','>').replace(r"'s",'is').replace(r"n't",'not').split(' ') #[ver2]
151 | dataset = con_parser.predict(parser_inputs, verbose=False)
152 | t = dataset.trees[0]
153 | con_head, con_mapnode = GetTree_heads(t.productions())
154 | d['con_head'] = con_head
155 |
156 |
157 | con_mapnode = [x if isinstance(x, str) else x.__str__() + special_token for x in con_mapnode]
158 | d['con_mapnode'] = con_mapnode
159 |
160 |
161 | d['aspects'].sort(key=lambda x:(x['to'],x['from']))
162 |
163 | con_path_dict,con_children = get_path_and_children_dict(d['con_head'])
164 |
165 | mapS = [
166 | idx for idx, word in enumerate(con_mapnode) if word[-sub_len:] == special_token and word[:-sub_len] == 'S'
167 | ]
168 |
169 | mapback = [ idx for idx,word in enumerate(con_mapnode) if word[-sub_len:]!=special_token]
170 |
171 | for aspect_info in d['aspects']:
172 | aspect_range = list(range(mapback[aspect_info['from']],mapback[aspect_info['to']-1]+1))
173 |
174 | con_lca = find_inner_LCA(con_path_dict, aspect_range)
175 | aspect_info['con_lca'] = con_lca
176 |
177 |
178 | choice_list = itertools.combinations(list(range(len(d['aspects']))),2)
179 | aa_choice = []
180 | for first,second in choice_list:
181 | temp = {'select_idx':(first,second)}
182 | A_asp = d['aspects'][first]
183 | B_asp = d['aspects'][second]
184 |
185 | default_range = (A_asp['to'],B_asp['from']-1)
186 |
187 | word_range = get_word_range(A_asp['con_lca'],
188 | B_asp['con_lca'],
189 | con_path_dict,
190 | con_children,mapback,
191 | default_range)
192 |
193 |
194 | assert(word_range[0] < len(token) and word_range[1] < len(token))
195 |
196 | temp['word_range'] = word_range
197 | temp['polarity_pair'] = (A_asp['polarity'],B_asp['polarity'])
198 |
199 | aa_choice.append(temp)
200 |
201 | d['aa_choice'] = aa_choice
202 |
203 | with open(file_name.replace('.json','_new.json'), 'w', encoding='utf-8') as f:
204 | json.dump(data,f)
205 |
206 | print('Done!')
207 |
208 | if __name__ == '__main__':
209 | data_dir = 'data/V2'
210 | for data_set in ['Laptops','MAMS','Restaurants','Tweets']:
211 | for file_type in ['train','valid','test']:
212 | file_name = data_dir + '/' + data_set + '/' + file_type + '_con_new.json'
213 | preprocess_file(file_name)
--------------------------------------------------------------------------------
/pretrain_model/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/pretrain_model/.DS_Store
--------------------------------------------------------------------------------
/pretrain_model/bert-base-uncased/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/pretrain_model/bert-base-uncased/.DS_Store
--------------------------------------------------------------------------------
/run_bash/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ouy7han/S2GSL/df018ce8f32de6a83f46832de6ed535e8c354eef/run_bash/.DS_Store
--------------------------------------------------------------------------------
/run_bash/start.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exp_path=log/result
3 |
4 | if [ ! -d "$exp_path" ]; then
5 | echo "making new dir.."
6 | mkdir -p "$exp_path"
7 | fi
8 |
9 |
10 | DATE=$(date +%Y-%m-%d-%H_%M_%S)
11 | CUDA_VISIBLE_DEVICES=0 python3 -u ../train.py \
12 | --data_dir ../data/V2/Laptops \
13 | --vocab_dir ../data/V2/Laptops \
14 | --data_name laptop \
15 | --batch_size 16 \
16 | --alpha 0.65 \
17 | --beta 0.6 \
18 | --input_dropout 0.2 \
19 | --layer_dropout 0.1 \
20 | --gcn_dropout 0.1 \
21 | --max_len 100 \
22 | --lr 2e-5 \
23 | --seed 1 \
24 | --attention_heads 4 \
25 | --max_num_spans 4 \
26 | --num_epoch 20 2>&1 | tee $exp_path/laptop_$DATE.log
27 |
28 | DATE=$(date +%Y-%m-%d-%H_%M_%S)
29 | CUDA_VISIBLE_DEVICES=0 python3 -u ../train.py \
30 | --data_dir ../data/V2/Restaurants \
31 | --vocab_dir ../data/V2/Restaurants \
32 | --data_name restaurant \
33 | --batch_size 16 \
34 | --alpha 0.3 \
35 | --beta 0.3 \
36 | --input_dropout 0.2 \
37 | --layer_dropout 0.1 \
38 | --gcn_dropout 0.1 \
39 | --max_len 100 \
40 | --lr 2e-5 \
41 | --seed 1 \
42 | --attention_heads 4 \
43 | --max_num_spans 4 \
44 | --num_epoch 20 2>&1 | tee $exp_path/restaurant_$DATE.log
45 |
46 | DATE=$(date +%Y-%m-%d-%H_%M_%S)
47 | CUDA_VISIBLE_DEVICES=0 python3 -u ../train.py \
48 | --data_dir ../data/V2/Tweets \
49 | --vocab_dir ../data/V2/Tweets \
50 | --data_name twitter \
51 | --batch_size 16 \
52 | --alpha 0.035 \
53 | --beta 0.95 \
54 | --input_dropout 0.2 \
55 | --layer_dropout 0.1 \
56 | --gcn_dropout 0.1 \
57 | --max_len 100 \
58 | --lr 2e-5 \
59 | --seed 1 \
60 | --attention_heads 4 \
61 | --max_num_spans 4 \
62 | --num_epoch 20 2>&1 | tee $exp_path/twitter_$DATE.log
63 |
64 | DATE=$(date +%Y-%m-%d-%H_%M_%S)
65 | CUDA_VISIBLE_DEVICES=0 python3 -u ../train.py \
66 | --data_dir ../data/V2/MAMS \
67 | --vocab_dir ../data/V2/MAMS \
68 | --data_name mams \
69 | --batch_size 16 \
70 | --alpha 0.7 \
71 | --beta 0.4 \
72 | --input_dropout 0.2 \
73 | --layer_dropout 0.1 \
74 | --gcn_dropout 0.1 \
75 | --max_len 100 \
76 | --lr 2e-5 \
77 | --seed 1 \
78 | --attention_heads 4 \
79 | --max_num_spans 4 \
80 | --num_epoch 20 2>&1 | tee $exp_path/mams_$DATE.log
--------------------------------------------------------------------------------
/snippet.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import six
3 | from vocab import Vocab
4 | from dataloader import ABSA_Dataset, ABSA_DataLoader, ABSA_collate_fn
5 |
6 | from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
7 |
8 | def get_parameter():
9 | parser = argparse.ArgumentParser()
10 | parser.add_argument('--device', type=str, default='cuda:0')
11 | parser.add_argument('--data_dir', type=str, default='./data/V2/MAMS')
12 | parser.add_argument('--vocab_dir', type=str, default='./data/V2/MAMS')
13 | parser.add_argument('--data_name', type=str, default='mams')
14 | parser.add_argument('--best_model_dir', type=str, default='./best_mod]el_checkpoints/best_bert/')
15 |
16 | parser.add_argument('--num_class', type=int, default=3, help='Num of sentiment class.')
17 |
18 | parser.add_argument('--bert_hidden_dim', type=int, default=768, help='bert dim.')
19 | parser.add_argument('--lstm_dim', type=int, default=300, help="dimension of bi-lstm")
20 | parser.add_argument('--hidden_dim', type=int, default=300)
21 | parser.add_argument("--gamma", default=0.1, type=float, help="The balance of adaptive loss.")
22 | parser.add_argument("--alpha", default=0.15, type=float, help="The balance of span loss.")
23 | parser.add_argument("--beta", default=0.7, type=float, help="The balance of root loss.")
24 | parser.add_argument("--dep_type_size",default=42,type=float, help="The length of dependency type dict.")
25 | parser.add_argument("--dep_embed_dim", default=25, type=int, help="The dimension of dependency type .")
26 |
27 |
28 | parser.add_argument('--input_dropout', type=float, default=0.2, help='input dropout rate.')
29 | parser.add_argument('--layer_dropout', type=float, default=0.1, help='layer dropout rate.')
30 | parser.add_argument('--attn_dropout', type=float, default=0.0, help='self-attention layer dropout rate.')
31 | parser.add_argument('--bert_dropout',type=float,default=0.1,help='bert dropout rate.')
32 | parser.add_argument('--gcn_dropout', type=float, default=0.1, help='gcn dropout rate.')
33 | parser.add_argument('--gcn_layers', type=int, default=3, help='gcn num layers.')
34 | parser.add_argument('--num_layers', type=int, default=2, help='module num layers.')
35 |
36 |
37 | parser.add_argument('--lower', default=True, help = 'lowercase all words.')
38 | parser.add_argument('--need_preprocess', default=False, help = 'need parse data.')
39 |
40 | parser.add_argument('--lr', type=float, default=2e-5, help='learning rate.')
41 | parser.add_argument('--bert_lr', type=float, default=2e-5, help='learning rate for bert.')
42 | parser.add_argument('--l2', type=float, default=1e-5, help='weight decay rate.')
43 |
44 | parser.add_argument('--num_encoder_layer', type=int, default=3, help='Number of graph layers.')
45 | parser.add_argument('--num_epoch', type=int, default=20, help='Number of total training epochs.')
46 | parser.add_argument('--max_patience', type=int, default=20, help='max patience in training')
47 | parser.add_argument('--batch_size', type=int, default=16, help='Training batch size.')
48 | parser.add_argument('--log_step', type=int, default=16, help='Print log every k steps.')
49 |
50 | parser.add_argument('--seed', type=int, default= 1)
51 | parser.add_argument('--max_len', type=int, default=100)
52 |
53 | parser.add_argument('--average_mapback', default=False, action='store_true')
54 |
55 | parser.add_argument('--leaf2root', default=False, action='store_true')
56 | parser.add_argument('--root2leaf', default=False, action='store_true')
57 |
58 | parser.add_argument('--con_dep_version', default='wo_dep', type=str)
59 | parser.add_argument('--losstype', type=str, default='None')
60 | parser.add_argument('--con_dep_conditional', default=False, action = 'store_true')
61 |
62 | parser.add_argument('--dynamic_tree_attn_head', type=int, default=4)
63 | parser.add_argument('--fusion_attention_heads', type=int, default=6)
64 | parser.add_argument('--attention_heads', type=int, default=4)
65 | parser.add_argument('--max_num_spans', type=int, default=4)
66 | parser.add_argument('--special_token', default='[N]')
67 | parser.add_argument('--adj_span_version', type=int, default=0)
68 |
69 | parser.add_argument('--sort_key_idx', default=0, help='sort idx')
70 |
71 |
72 |
73 | args = parser.parse_args()
74 |
75 | return args
76 |
77 |
78 |
79 |
80 |
81 |
82 | def print_arguments(args):
83 | print('----------- Configuration Arguments -----------')
84 | for arg, value in sorted(six.iteritems(vars(args))):
85 | print('%s: %s' % (arg, value))
86 | print('------------------------------------------------')
87 |
88 | def totally_parameters(model): #
89 | n_params = sum([p.nelement() for p in model.parameters()])
90 | return n_params
91 |
92 | def load_vocab(args):
93 | print('Loading vocab...')
94 |
95 | vocab = {
96 | 'token': Vocab.load_vocab(args.vocab_dir + '/vocab_tok.vocab'),
97 | 'polarity': Vocab.load_vocab(args.vocab_dir + '/vocab_pol.vocab')
98 | }
99 |
100 | print(
101 | 'token_vocab: {}, polarity_vocab:{}'.format(len(vocab['token']), len(vocab['polarity']))
102 | )
103 |
104 | args.tok_size = len(vocab['token'])
105 | return vocab
106 |
107 |
108 | def load_one_data(args, file_name, vocab, tokenizer, block_shuffle = True, is_shuffle=True,flag="train"):
109 | print('Loading data from {} with batch size {}...'.format(file_name, args.batch_size))
110 | one_dataset = ABSA_Dataset(args, file_name, vocab, tokenizer,flag)
111 |
112 | if block_shuffle and is_shuffle:
113 | one_dataloader = ABSA_DataLoader(one_dataset,
114 | sort_key = lambda x: x[args.sort_key_idx],
115 | is_shuffle = is_shuffle,
116 | batch_size = args.batch_size,
117 | collate_fn = ABSA_collate_fn
118 | )
119 | else:
120 | one_sampler = RandomSampler(one_dataset) if is_shuffle else SequentialSampler(one_dataset)
121 |
122 | one_dataloader = DataLoader(one_dataset,
123 | sampler=one_sampler,
124 | batch_size=args.batch_size,
125 | collate_fn = ABSA_collate_fn)
126 | return one_dataloader
127 |
128 | def load_data(args, vocab, tokenizer=None):
129 | train_dataloader = load_one_data(args, file_name = args.data_dir + '/train_con_new.json',
130 | vocab = vocab, tokenizer = tokenizer, is_shuffle = True,flag="train")
131 |
132 | valid_dataloader = load_one_data(args, file_name = args.data_dir + '/valid_con_new.json',
133 | vocab = vocab, tokenizer = tokenizer, is_shuffle = False,flag="valid")
134 |
135 | test_dataloader = load_one_data(args, file_name = args.data_dir + '/test_con_new.json',
136 | vocab = vocab, tokenizer = tokenizer, is_shuffle = False,flag="test")
137 |
138 | return train_dataloader, valid_dataloader, test_dataloader
139 |
140 |
--------------------------------------------------------------------------------
/spans.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | def find_inner_LCA(path_dict,aspect_range):
4 | path_range = [ [x] + path_dict[x] for x in aspect_range]
5 | path_range.sort(key=lambda l:len(l))
6 |
7 | for idx in range(len(path_range[0])):
8 | flag = True
9 | for pid in range(1,len(path_range)):
10 | if path_range[0][idx] not in path_range[pid]:
11 | flag = False #其中一个不在
12 | break
13 |
14 | if flag: #都在
15 | LCA_node = path_range[0][idx]
16 | break #already find
17 | return LCA_node
18 |
19 | def get_path_and_children_dict(heads):
20 | path_dict = {}
21 | remain_nodes = list(range(len(heads)))
22 | delete_nodes = []
23 |
24 | while len(remain_nodes) > 0:
25 | for idx in remain_nodes:
26 | #初始状态
27 | if idx not in path_dict:
28 | path_dict[idx] = [heads[idx]] # no self
29 | if heads[idx] == -1:
30 | delete_nodes.append(idx) #need delete root
31 | else:
32 | last_node = path_dict[idx][-1]
33 | if last_node not in remain_nodes:
34 | path_dict[idx].extend(path_dict[last_node])
35 | delete_nodes.append(idx)
36 | else:
37 | path_dict[idx].append(heads[last_node])
38 | #remove nodes
39 | for del_node in delete_nodes:
40 | remain_nodes.remove(del_node)
41 | delete_nodes = []
42 |
43 | #children_dict
44 | children_dict = {}
45 | for x,l in path_dict.items():
46 | if l[0] == -1:
47 | continue
48 | if l[0] not in children_dict:
49 | children_dict[l[0]] = [x]
50 | else:
51 | children_dict[l[0]].append(x)
52 |
53 | return path_dict, children_dict
54 |
55 | def form_layers_and_influence_range(path_dict,mapback):
56 | sorted_path_dict = sorted(path_dict.items(),key=lambda x: len(x[1]))
57 | influence_range = { cid:[idx,idx+1] for idx,cid in enumerate(mapback) }
58 | layers = {}
59 | node2layerid = {}
60 | for cid,path_dict in sorted_path_dict[::-1]:
61 |
62 | length = len(path_dict)-1
63 | if length not in layers:
64 | layers[length] = [cid]
65 | node2layerid[cid] = length
66 | else:
67 | layers[length].append(cid)
68 | node2layerid[cid] = length
69 | father_idx = path_dict[0]
70 |
71 |
72 | assert(father_idx not in mapback)
73 | if father_idx not in influence_range:
74 | influence_range[father_idx] = influence_range[cid][:] #deep copy
75 | else:
76 | influence_range[father_idx][0] = min(influence_range[father_idx][0], influence_range[cid][0])
77 | influence_range[father_idx][1] = max(influence_range[father_idx][1], influence_range[cid][1])
78 |
79 | layers = sorted(layers.items(),key=lambda x:x[0])
80 | layers = [(cid,sorted(l)) for cid,l in layers] # or [(cid,l.sort()) for cid,l in layers]
81 |
82 | return layers, influence_range,node2layerid
83 |
84 | def form_spans(layers, influence_range, token_len, con_mapnode, special_token = '[N]'):
85 | spans = []
86 | sub_len = len(special_token)
87 |
88 | for _, nodes in layers:
89 |
90 | pointer = 0
91 | add_pre = 0
92 | temp = [0] * token_len
93 | temp_indi = ['-'] * token_len
94 |
95 | for node_idx in nodes:
96 | begin,end = influence_range[node_idx]
97 |
98 | if con_mapnode[node_idx][-sub_len:] == special_token:
99 | temp_indi[begin:end] = [con_mapnode[node_idx][:-sub_len]] * (end-begin)
100 |
101 | if(begin != pointer):
102 | sub_pre = spans[-1][pointer]
103 | temp[pointer:begin] = [x + add_pre-sub_pre for x in spans[-1][pointer:begin]] #
104 | add_pre = temp[begin-1] + 1
105 | temp[begin:end] = [add_pre] * (end-begin)
106 |
107 | add_pre += 1
108 | pointer = end
109 | if pointer != token_len:
110 | sub_pre = spans[-1][pointer]
111 | temp[pointer:token_len] = [x + add_pre-sub_pre for x in spans[-1][pointer:token_len]]
112 | add_pre = temp[begin-1] + 1
113 | spans.append(temp)
114 |
115 | return spans
116 |
117 | def head_to_adj_oneshot(heads, sent_len, aspect_dict,
118 | leaf2root=True, root2leaf=True, self_loop=True):
119 | """
120 | Convert a sequence of head indexes into a 0/1 matirx.
121 | """
122 | adj_matrix = np.zeros((sent_len, sent_len), dtype=np.float32)
123 |
124 | heads = heads[:sent_len]
125 |
126 | # aspect
127 | for asp in aspect_dict:
128 | from_ = asp['from']
129 | to_ = asp['to']
130 | for i_idx in range(from_, to_):
131 | for j_idx in range(from_, to_):
132 | adj_matrix[i_idx][j_idx] = 1
133 |
134 |
135 |
136 | for idx, head in enumerate(heads):
137 | if head != -1:
138 | if leaf2root:
139 | adj_matrix[head, idx] = 1
140 | if root2leaf:
141 | adj_matrix[idx, head] = 1
142 |
143 | if self_loop:
144 | adj_matrix[idx, idx] = 1
145 |
146 | return adj_matrix
147 |
148 | def get_conditional_adj(father, length, cd_span,
149 | con_children, con_mapnode):
150 | s_slist = [idx for idx, node in enumerate(con_children[father]) if con_mapnode[node] == 'S[N]' ]
151 | st_adj = np.ones((length,length))
152 | for i in range(len(s_slist)-1):
153 | idx = s_slist[i]
154 | begin_idx = cd_span.index(idx)
155 | end_idx = len(cd_span) - cd_span[::-1].index(idx)
156 |
157 | for j in range(idx + 1, len(s_slist)):
158 | jdx = s_slist[j]
159 | begin_jdx = cd_span.index(jdx)
160 | end_jdx = len(cd_span) - cd_span[::-1].index(jdx)
161 | for w_i in range(begin_idx,end_idx):
162 | for w_j in range(begin_jdx,end_jdx):
163 | st_adj[w_i][w_j] = 0
164 | st_adj[w_j][w_i] = 0
165 | return st_adj
166 |
167 |
168 | def form_aspect_related_spans(aspect_node_idx, spans, mapnode, node2layerid, path_dict,select_N = ['ROOT','TOP','S','NP','VP'], special_token = '[N]'):
169 | aspect2root_path = path_dict[aspect_node_idx]
170 | span_indications = []
171 | spans_range = []
172 |
173 | for idx,f in enumerate(aspect2root_path[:-1]):
174 | if mapnode[f][:-len(special_token)] in select_N:
175 | span_idx = node2layerid[f]
176 | span_temp = spans[span_idx]
177 |
178 | if len(spans_range) == 0 or span_temp != spans_range[-1]:
179 | spans_range.append(span_temp)
180 | span_indications.append(mapnode[f][:-len(special_token)])
181 |
182 | return spans_range, span_indications
183 |
184 |
185 |
186 |
187 |
188 | def select_func(spans, max_num_spans, length):
189 | if len(spans) <= max_num_spans:
190 | lacd_span = spans[-1] if len(spans) > 0 else [0] * length
191 | select_spans = spans + [lacd_span] * (max_num_spans - len(spans))
192 |
193 | else:
194 | if max_num_spans == 1:
195 | select_spans = spans[0] if len(spans) > 0 else [0] * length
196 | else:
197 | gap = len(spans) // (max_num_spans-1)
198 | select_spans = [ spans[gap * i] for i in range(max_num_spans-1)] + [spans[-1]]
199 |
200 | return select_spans
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | os.environ['CUDA_LAUNCH_BLOCKING'] = '1'
5 |
6 | import torch
7 | import random
8 | import numpy as np
9 | import time
10 |
11 | from sklearn import metrics
12 | import torch.nn.functional as F
13 |
14 | from transformers import BertTokenizer
15 | from snippet import *
16 | from model import S2GSL
17 |
18 |
19 |
20 | def set_random_seed(args):
21 | # set random seed
22 | # torch.manual_seed(args.seed)
23 | # np.random.seed(args.seed)
24 | # random.seed(args.seed)
25 | # torch.cuda.manual_seed(args.seed)
26 |
27 | os.environ['PYTHONHASHSEED'] = str(args.seed)
28 | np.random.seed(args.seed)
29 | torch.manual_seed(args.seed)
30 | random.seed(args.seed)
31 | if args.device == 'cuda':
32 | torch.cuda.manual_seed(args.seed)
33 | torch.cuda.manual_seed_all(args.seed)
34 | torch.backends.cudnn.benchmark = False
35 | torch.backends.cudnn.deterministic = True
36 |
37 |
38 | def evaluate(model, dataloader, args, vocab):
39 | token_vocab = vocab['token']
40 | polarity_vocab = vocab['polarity']
41 | predictions, labels = [], []
42 |
43 | val_loss, val_acc = 0.0, 0.0
44 |
45 | for step, batch in enumerate(dataloader):
46 | model.eval()
47 | with torch.no_grad():
48 | batch = [b.to(args.device) for b in batch]
49 | inputs = batch[:-1]
50 | label = batch[-1]
51 |
52 | logits, span_loss, loss_root,graph_balance = model(inputs)
53 | loss = F.cross_entropy(logits, label, reduction='mean')
54 | val_loss += loss.data
55 |
56 | predictions += np.argmax(logits.data.cpu().numpy(), axis=1).tolist()
57 | labels += label.data.cpu().numpy().tolist()
58 |
59 | val_acc = metrics.accuracy_score(labels, predictions) * 100.0
60 | f1_score = metrics.f1_score(labels, predictions, average='macro')
61 | # confusion = metrics.confusion_matrix(labels, predictions)
62 | # print("------------")
63 | # print("confusion matrix:\n{}".format(confusion))
64 |
65 | return val_loss / len(dataloader), val_acc, f1_score
66 |
67 |
68 | def train(args, vocab, tokenizer, train_dataloader, valid_dataloader, test_dataloader, model, optimizer):
69 | ############################################################
70 | # train
71 | print("Training Set: {}".format(len(train_dataloader)))
72 | print("Valid Set: {}".format(len(valid_dataloader)))
73 | print("Test Set: {}".format(len(test_dataloader)))
74 |
75 | train_acc_history, train_loss_history = [0.0], [0.0]
76 | val_acc_history, val_history, val_f1_score_history = [0.0], [0.0], [0.0]
77 |
78 | in_test_epoch, in_test_acc, in_test_f1 = 0, 0.0, 0.0
79 | patience = 0
80 |
81 | in_test_best = list()
82 | for epoch in range(1, args.num_epoch + 1):
83 | begin_time = time.time()
84 |
85 | print("Epoch {}".format(epoch) + "-" * 60)
86 |
87 | train_loss, train_acc, train_step = 0.0, 0.0, 0
88 |
89 | train_all_predict = 0
90 | train_all_correct = 0
91 |
92 | for i, batch in enumerate(train_dataloader):
93 | model.train()
94 | optimizer.zero_grad()
95 |
96 | batch = [b.to(args.device) for b in batch]
97 | inputs = batch[:-1]
98 | label = batch[-1]
99 |
100 | logits, span_loss, loss_root,graph_balance = model(inputs)
101 | loss = F.cross_entropy(logits, label, reduction='mean')
102 | loss = loss + args.alpha * span_loss + args.beta * loss_root+ 0.1*graph_balance
103 | loss.backward()
104 | optimizer.step()
105 |
106 | train_loss += loss.item()
107 | corrects = (torch.max(logits, 1)[1].view(label.size()).data == label.data).sum()
108 |
109 | train_all_predict += label.size()[0]
110 | train_all_correct += corrects.item()
111 |
112 | train_step += 1
113 | if train_step % args.log_step == 0:
114 | print('{}/{} train_loss:{:.6f}, train_acc:{:.4f}'.format(
115 | i, len(train_dataloader), train_loss / train_step, 100.0 * train_all_correct / train_all_predict
116 | ))
117 |
118 | train_acc = 100.0 * train_all_correct / train_all_predict
119 | val_loss, val_acc, val_f1 = evaluate(model, valid_dataloader, args, vocab)
120 |
121 | print(
122 | "[{:.2f}s] Pass!\nEnd of {} train_loss: {:.4f}, train_acc: {:.4f}, val_loss: {:.4f}, val_acc: {:.4f}, f1_score: {:.4f}".format(
123 | time.time() - begin_time, epoch, train_loss / train_step, train_acc, val_loss, val_acc, val_f1
124 | )
125 | )
126 |
127 | train_acc_history.append(train_acc)
128 | train_loss_history.append(train_loss / train_step)
129 |
130 | if epoch == 1 or float(val_acc) > max(val_acc_history) or float(val_f1) > max(val_f1_score_history):
131 | patience = 0
132 | test_loss, test_acc, test_f1 = evaluate(model, test_dataloader, args, vocab)
133 |
134 | in_test_epoch = epoch
135 | in_test_acc = test_acc
136 | in_test_f1 = test_f1
137 | in_test_best.append((epoch, test_acc, test_f1))
138 |
139 | print('-->In test: patience:{}, test_acc:{}, test_f1:{}'.format(patience, test_acc, test_f1))
140 | else:
141 | patience += 1
142 |
143 | val_acc_history.append(float(val_acc))
144 | val_f1_score_history.append(val_f1)
145 |
146 | if patience >= args.max_patience:
147 | print('Exceeding max patience', patience)
148 |
149 | max_f1 = 0
150 | for item in in_test_best:
151 | if item[-1] > max_f1:
152 | max_f1 = item[-1]
153 | in_test_epoch = item[0]
154 | in_test_acc = item[1]
155 | in_test_f1 = item[-1]
156 |
157 | print('Training ended with {} epoches.'.format(epoch))
158 | _, last_test_acc, last_test_f1 = evaluate(model, test_dataloader, args, vocab)
159 |
160 | print('In Results: test_epoch:{}, test_acc:{}, test_f1:{}'.format(in_test_epoch, in_test_acc, in_test_f1))
161 | print('Last In Results: test_epoch:{}, test_acc:{}, test_f1:{}'.format(epoch, last_test_acc, last_test_f1))
162 |
163 |
164 |
165 | def run(args, vocab, tokenizer):
166 | print_arguments(args)
167 |
168 | ###########################################################
169 | # data
170 | train_dataloader, valid_dataloader, test_dataloader = load_data(args, vocab, tokenizer=tokenizer)
171 |
172 | ###########################################################
173 | # mode
174 | model = S2GSL(args, tokenizer).to(device=args.device)
175 | print(model)
176 | print('# parameters:', totally_parameters(model))
177 |
178 | ###########################################################
179 | # optimizer
180 | bert_model = model.dual_graph_learning_module.context_encoder
181 | bert_params_dict = list(map(id, bert_model.parameters()))
182 |
183 |
184 | base_params = filter(lambda p: id(p) not in bert_params_dict, model.parameters())
185 | optimizer_grouped_parameters = [
186 | {"params": [p for p in base_params if p.requires_grad]},
187 | {"params": [p for p in bert_model.parameters() if p.requires_grad], "lr": args.bert_lr}
188 | ]
189 |
190 | optimizer = torch.optim.Adam(optimizer_grouped_parameters, lr=args.lr, weight_decay=args.l2)
191 |
192 | train(args, vocab, tokenizer, train_dataloader, valid_dataloader, test_dataloader, model, optimizer)
193 |
194 |
195 | if __name__ == '__main__':
196 | bert_tokenizer = BertTokenizer.from_pretrained('../pretrain_model/bert-base-uncased')
197 | args = get_parameter()
198 | set_random_seed(args)
199 |
200 | vocab = load_vocab(args)
201 |
202 | run(args, vocab, tokenizer=bert_tokenizer)
--------------------------------------------------------------------------------
/vocab.py:
--------------------------------------------------------------------------------
1 | import pickle
2 |
3 |
4 | class Vocab(object):
5 | def __init__(self, counter, specials=["", ""]):
6 | self.pad_index = 0
7 | self.unk_index = 1
8 | counter = counter.copy()
9 | self.itos = list(specials)
10 | for tok in specials:
11 | del counter[tok]
12 |
13 | # sort by frequency, then alphabetically
14 | words_and_frequencies = sorted(counter.items(), key=lambda tup: tup[0])
15 | words_and_frequencies.sort(key=lambda tup: tup[1], reverse=True)
16 |
17 | for word, _ in words_and_frequencies:
18 | self.itos.append(word)
19 |
20 | # stoi is simply a reverse dict for itos
21 | self.stoi = {tok: i for i, tok in enumerate(self.itos)}
22 |
23 | def __eq__(self, other):
24 | if self.stoi != other.stoi:
25 | return False
26 | if self.itos != other.itos:
27 | return False
28 | return True
29 |
30 | def __len__(self):
31 | return len(self.itos)
32 |
33 | def extend(self, v):
34 | words = v.itos
35 | for w in words:
36 | if w not in self.stoi:
37 | self.itos.append(w)
38 | self.stoi[w] = len(self.itos) - 1
39 | return self
40 |
41 | @staticmethod
42 | def load_vocab(vocab_path: str):
43 | with open(vocab_path, "rb") as f:
44 | print('Loading vocab from:', vocab_path)
45 | return pickle.load(f)
46 |
47 | def save_vocab(self, vocab_path):
48 | with open(vocab_path, "wb") as f:
49 | print('Saving vocab to:', vocab_path)
50 | pickle.dump(self, f)
--------------------------------------------------------------------------------