├── README.md
├── code base
└── parquet_playground
│ ├── .gitignore
│ ├── pom.xml
│ └── src
│ └── main
│ ├── java
│ └── org
│ │ └── apache
│ │ └── parquet
│ │ ├── filter2
│ │ ├── compat
│ │ │ ├── RowGroupFilter.java
│ │ │ └── RowGroupFilter.java.orig.txt
│ │ └── recordlevel
│ │ │ ├── FilteringRecordMaterializer.java
│ │ │ ├── FilteringRecordMaterializer.java.orig.txt
│ │ │ ├── IncrementallyUpdatedFilterPredicateBuilder.java
│ │ │ └── IncrementallyUpdatedFilterPredicateBuilder.java.orig.txt
│ │ ├── hadoop
│ │ ├── InternalParquetRecordReader.java
│ │ ├── InternalParquetRecordReader.java.orig.txt
│ │ ├── ParquetInputFormat.java
│ │ ├── ParquetInputFormat.java.orig.txt
│ │ ├── ParquetReader.java
│ │ ├── ParquetReader.java.orig.txt
│ │ ├── ParquetRecordReader.java
│ │ └── ParquetRecordReader.java.orig.txt
│ │ └── io
│ │ ├── ColumnIOFactory.java
│ │ ├── ColumnIOFactory.java.orig.txt
│ │ ├── MessageColumnIO.java
│ │ └── MessageColumnIO.java.orig.txt
│ └── scala
│ ├── com
│ └── gdt
│ │ └── parquet
│ │ └── playground
│ │ ├── PlayWithColumnIOFactoryVisit.scala
│ │ └── util
│ │ └── TempFileUtil.scala
│ └── orig
│ ├── DataSourceStrategy.scala
│ ├── DataSourceStrategy.scala.orig.txt
│ ├── ParquetRelation.scala
│ └── ParquetRelation.scala.orig.txt
├── parquet_logo.png
└── unstable
└── jira&pr
└── parquet_class_structure.png
/README.md:
--------------------------------------------------------------------------------
1 | # 酷玩 Parquet (alpha)
2 |
3 | Welcome to Coolplay Parquet!
4 |
5 | 
6 |
7 | 目前只是 alpha 版,只会放一些 parquet 周边的小代码。
8 |
9 | 1.0 版会放出最新 Parquet 的完整源代码解析,敬请期待!
10 |
--------------------------------------------------------------------------------
/code base/parquet_playground/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | target
3 | *.iml
--------------------------------------------------------------------------------
/code base/parquet_playground/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | com.gdt.parquet
8 | parquet_playground
9 | 1.0-SNAPSHOT
10 |
11 |
12 | UTF-8
13 |
14 |
15 |
16 |
17 | org.apache.spark
18 | spark-sql_2.10
19 | 1.5.1
20 |
21 |
22 | junit
23 | junit
24 | 4.12
25 | test
26 |
27 |
28 | org.scala-lang
29 | scala-library
30 | 2.10.4
31 |
34 |
35 |
36 |
37 |
38 | src/main/java
39 |
40 |
41 | net.alchim31.maven
42 | scala-maven-plugin
43 | 3.2.0
44 |
45 |
46 | scala-compile
47 |
48 | compile
49 |
50 |
51 |
52 | scala-test-compile
53 |
54 | testCompile
55 |
56 |
57 |
58 |
59 | incremental
60 |
61 | -target:jvm-1.7
62 | -encoding
63 | UTF-8
64 |
65 |
66 | -source
67 | 1.7
68 | -target
69 | 1.7
70 |
71 |
72 |
73 |
74 | maven-compiler-plugin
75 |
76 |
77 | default-compile
78 | none
79 |
80 |
81 | default-testCompile
82 | none
83 |
84 |
85 |
86 |
87 | org.apache.maven.plugins
88 | maven-assembly-plugin
89 | 2.2-beta-5
90 |
91 |
92 | jar-with-dependencies
93 |
94 |
95 |
96 |
97 | package
98 |
99 | single
100 |
101 |
102 |
103 |
104 |
105 | org.apache.maven.plugins
106 | maven-compiler-plugin
107 |
108 | 1.6
109 | 1.6
110 |
111 |
112 |
113 |
114 |
115 | src/main/resources
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/filter2/compat/RowGroupFilter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.apache.parquet.filter2.compat;
20 |
21 | import java.util.ArrayList;
22 | import java.util.List;
23 |
24 | import org.apache.parquet.filter2.compat.FilterCompat.Filter;
25 | import org.apache.parquet.filter2.compat.FilterCompat.NoOpFilter;
26 | import org.apache.parquet.filter2.compat.FilterCompat.Visitor;
27 | import org.apache.parquet.filter2.predicate.FilterPredicate;
28 | import org.apache.parquet.filter2.predicate.SchemaCompatibilityValidator;
29 | import org.apache.parquet.filter2.statisticslevel.StatisticsFilter;
30 | import org.apache.parquet.hadoop.metadata.BlockMetaData;
31 | import org.apache.parquet.schema.MessageType;
32 |
33 | import static org.apache.parquet.Preconditions.checkNotNull;
34 |
35 | /**
36 | * Given a {@link Filter} applies it to a list of BlockMetaData (row groups)
37 | * If the Filter is an {@link org.apache.parquet.filter.UnboundRecordFilter} or the no op filter,
38 | * no filtering will be performed.
39 | */
40 | public class RowGroupFilter implements Visitor> {
41 | private final List blocks;
42 | private final MessageType schema;
43 |
44 | public static List filterRowGroups(Filter filter, List blocks, MessageType schema) {
45 | checkNotNull(filter, "filter");
46 | return filter.accept(new RowGroupFilter(blocks, schema));
47 | }
48 |
49 | private RowGroupFilter(List blocks, MessageType schema) {
50 | this.blocks = checkNotNull(blocks, "blocks");
51 | this.schema = checkNotNull(schema, "schema");
52 | }
53 |
54 | @Override
55 | public List visit(FilterCompat.FilterPredicateCompat filterPredicateCompat) {
56 | FilterPredicate filterPredicate = filterPredicateCompat.getFilterPredicate();
57 |
58 | // check that the schema of the filter matches the schema of the file
59 | SchemaCompatibilityValidator.validate(filterPredicate, schema);
60 |
61 | List filteredBlocks = new ArrayList();
62 |
63 | for (BlockMetaData block : blocks) {
64 | if (!StatisticsFilter.canDrop(filterPredicate, block.getColumns())) {
65 | filteredBlocks.add(block);
66 | }
67 | }
68 |
69 | return filteredBlocks;
70 | }
71 |
72 | @Override
73 | public List visit(FilterCompat.UnboundRecordFilterCompat unboundRecordFilterCompat) {
74 | return blocks;
75 | }
76 |
77 | @Override
78 | public List visit(NoOpFilter noOpFilter) {
79 | return blocks;
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/filter2/compat/RowGroupFilter.java.orig.txt:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.apache.parquet.filter2.compat;
20 |
21 | import java.util.ArrayList;
22 | import java.util.List;
23 |
24 | import org.apache.parquet.filter2.compat.FilterCompat.Filter;
25 | import org.apache.parquet.filter2.compat.FilterCompat.NoOpFilter;
26 | import org.apache.parquet.filter2.compat.FilterCompat.Visitor;
27 | import org.apache.parquet.filter2.predicate.FilterPredicate;
28 | import org.apache.parquet.filter2.predicate.SchemaCompatibilityValidator;
29 | import org.apache.parquet.filter2.statisticslevel.StatisticsFilter;
30 | import org.apache.parquet.hadoop.metadata.BlockMetaData;
31 | import org.apache.parquet.schema.MessageType;
32 |
33 | import static org.apache.parquet.Preconditions.checkNotNull;
34 |
35 | /**
36 | * Given a {@link Filter} applies it to a list of BlockMetaData (row groups)
37 | * If the Filter is an {@link org.apache.parquet.filter.UnboundRecordFilter} or the no op filter,
38 | * no filtering will be performed.
39 | */
40 | public class RowGroupFilter implements Visitor> {
41 | private final List blocks;
42 | private final MessageType schema;
43 |
44 | public static List filterRowGroups(Filter filter, List blocks, MessageType schema) {
45 | checkNotNull(filter, "filter");
46 | return filter.accept(new RowGroupFilter(blocks, schema));
47 | }
48 |
49 | private RowGroupFilter(List blocks, MessageType schema) {
50 | this.blocks = checkNotNull(blocks, "blocks");
51 | this.schema = checkNotNull(schema, "schema");
52 | }
53 |
54 | @Override
55 | public List visit(FilterCompat.FilterPredicateCompat filterPredicateCompat) {
56 | FilterPredicate filterPredicate = filterPredicateCompat.getFilterPredicate();
57 |
58 | // check that the schema of the filter matches the schema of the file
59 | SchemaCompatibilityValidator.validate(filterPredicate, schema);
60 |
61 | List filteredBlocks = new ArrayList();
62 |
63 | for (BlockMetaData block : blocks) {
64 | if (!StatisticsFilter.canDrop(filterPredicate, block.getColumns())) {
65 | filteredBlocks.add(block);
66 | }
67 | }
68 |
69 | return filteredBlocks;
70 | }
71 |
72 | @Override
73 | public List visit(FilterCompat.UnboundRecordFilterCompat unboundRecordFilterCompat) {
74 | return blocks;
75 | }
76 |
77 | @Override
78 | public List visit(NoOpFilter noOpFilter) {
79 | return blocks;
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringRecordMaterializer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.apache.parquet.filter2.recordlevel;
20 |
21 | import java.util.ArrayList;
22 | import java.util.Collections;
23 | import java.util.HashMap;
24 | import java.util.List;
25 | import java.util.Map;
26 |
27 | import org.apache.parquet.hadoop.metadata.ColumnPath;
28 | import org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
29 | import org.apache.parquet.io.PrimitiveColumnIO;
30 | import org.apache.parquet.io.api.GroupConverter;
31 | import org.apache.parquet.io.api.RecordMaterializer;
32 |
33 | import static org.apache.parquet.Preconditions.checkNotNull;
34 |
35 | /**
36 | * A pass-through proxy for a {@link RecordMaterializer} that updates a {@link IncrementallyUpdatedFilterPredicate}
37 | * as it receives concrete values for the current record. If, after the record assembly signals that
38 | * there are no more values, the predicate indicates that this record should be dropped, {@link #getCurrentRecord()}
39 | * returns null to signal that this record is being skipped.
40 | * Otherwise, the record is retrieved from the delegate.
41 | */
42 | public class FilteringRecordMaterializer extends RecordMaterializer {
43 | // the real record materializer
44 | private final RecordMaterializer delegate;
45 |
46 | // the proxied root converter
47 | private final FilteringGroupConverter rootConverter;
48 |
49 | // the predicate
50 | private final IncrementallyUpdatedFilterPredicate filterPredicate;
51 |
52 | public FilteringRecordMaterializer(
53 | RecordMaterializer delegate,
54 | List columnIOs,
55 | Map> valueInspectorsByColumn,
56 | IncrementallyUpdatedFilterPredicate filterPredicate) {
57 |
58 | checkNotNull(columnIOs, "columnIOs");
59 | checkNotNull(valueInspectorsByColumn, "valueInspectorsByColumn");
60 | this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
61 | this.delegate = checkNotNull(delegate, "delegate");
62 |
63 | // keep track of which path of indices leads to which primitive column
64 | Map, PrimitiveColumnIO> columnIOsByIndexFieldPath = new HashMap, PrimitiveColumnIO>();
65 |
66 | for (PrimitiveColumnIO c : columnIOs) {
67 | columnIOsByIndexFieldPath.put(getIndexFieldPathList(c), c);
68 | }
69 |
70 | // create a proxy for the delegate's root converter
71 | this.rootConverter = new FilteringGroupConverter(
72 | delegate.getRootConverter(), Collections.emptyList(), valueInspectorsByColumn, columnIOsByIndexFieldPath);
73 | }
74 |
75 | public static List getIndexFieldPathList(PrimitiveColumnIO c) {
76 | return intArrayToList(c.getIndexFieldPath());
77 | }
78 |
79 | public static List intArrayToList(int[] arr) {
80 | List list = new ArrayList(arr.length);
81 | for (int i : arr) {
82 | list.add(i);
83 | }
84 | return list;
85 | }
86 |
87 |
88 |
89 | @Override
90 | public T getCurrentRecord() {
91 |
92 | // find out if the predicate thinks we should keep this record
93 | boolean keep = IncrementallyUpdatedFilterPredicateEvaluator.evaluate(filterPredicate);
94 |
95 | // reset the stateful predicate no matter what
96 | IncrementallyUpdatedFilterPredicateResetter.reset(filterPredicate);
97 |
98 | if (keep) { T t= delegate.getCurrentRecord();
99 | return t;
100 | } else {
101 | System.out.println();
102 | return null;
103 | }
104 | }
105 |
106 | @Override
107 | public void skipCurrentRecord() {
108 | delegate.skipCurrentRecord();
109 | }
110 |
111 | @Override
112 | public GroupConverter getRootConverter() {
113 | return rootConverter;
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringRecordMaterializer.java.orig.txt:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.apache.parquet.filter2.recordlevel;
20 |
21 | import java.util.ArrayList;
22 | import java.util.Collections;
23 | import java.util.HashMap;
24 | import java.util.List;
25 | import java.util.Map;
26 |
27 | import org.apache.parquet.hadoop.metadata.ColumnPath;
28 | import org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
29 | import org.apache.parquet.io.PrimitiveColumnIO;
30 | import org.apache.parquet.io.api.GroupConverter;
31 | import org.apache.parquet.io.api.RecordMaterializer;
32 |
33 | import static org.apache.parquet.Preconditions.checkNotNull;
34 |
35 | /**
36 | * A pass-through proxy for a {@link RecordMaterializer} that updates a {@link IncrementallyUpdatedFilterPredicate}
37 | * as it receives concrete values for the current record. If, after the record assembly signals that
38 | * there are no more values, the predicate indicates that this record should be dropped, {@link #getCurrentRecord()}
39 | * returns null to signal that this record is being skipped.
40 | * Otherwise, the record is retrieved from the delegate.
41 | */
42 | public class FilteringRecordMaterializer extends RecordMaterializer {
43 | // the real record materializer
44 | private final RecordMaterializer delegate;
45 |
46 | // the proxied root converter
47 | private final FilteringGroupConverter rootConverter;
48 |
49 | // the predicate
50 | private final IncrementallyUpdatedFilterPredicate filterPredicate;
51 |
52 | public FilteringRecordMaterializer(
53 | RecordMaterializer delegate,
54 | List columnIOs,
55 | Map> valueInspectorsByColumn,
56 | IncrementallyUpdatedFilterPredicate filterPredicate) {
57 |
58 | checkNotNull(columnIOs, "columnIOs");
59 | checkNotNull(valueInspectorsByColumn, "valueInspectorsByColumn");
60 | this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
61 | this.delegate = checkNotNull(delegate, "delegate");
62 |
63 | // keep track of which path of indices leads to which primitive column
64 | Map, PrimitiveColumnIO> columnIOsByIndexFieldPath = new HashMap, PrimitiveColumnIO>();
65 |
66 | for (PrimitiveColumnIO c : columnIOs) {
67 | columnIOsByIndexFieldPath.put(getIndexFieldPathList(c), c);
68 | }
69 |
70 | // create a proxy for the delegate's root converter
71 | this.rootConverter = new FilteringGroupConverter(
72 | delegate.getRootConverter(), Collections.emptyList(), valueInspectorsByColumn, columnIOsByIndexFieldPath);
73 | }
74 |
75 | public static List getIndexFieldPathList(PrimitiveColumnIO c) {
76 | return intArrayToList(c.getIndexFieldPath());
77 | }
78 |
79 | public static List intArrayToList(int[] arr) {
80 | List list = new ArrayList(arr.length);
81 | for (int i : arr) {
82 | list.add(i);
83 | }
84 | return list;
85 | }
86 |
87 |
88 |
89 | @Override
90 | public T getCurrentRecord() {
91 |
92 | // find out if the predicate thinks we should keep this record
93 | boolean keep = IncrementallyUpdatedFilterPredicateEvaluator.evaluate(filterPredicate);
94 |
95 | // reset the stateful predicate no matter what
96 | IncrementallyUpdatedFilterPredicateResetter.reset(filterPredicate);
97 |
98 | if (keep) {
99 | return delegate.getCurrentRecord();
100 | } else {
101 | // signals a skip
102 | return null;
103 | }
104 | }
105 |
106 | @Override
107 | public void skipCurrentRecord() {
108 | delegate.skipCurrentRecord();
109 | }
110 |
111 | @Override
112 | public GroupConverter getRootConverter() {
113 | return rootConverter;
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateBuilder.java:
--------------------------------------------------------------------------------
1 | package org.apache.parquet.filter2.recordlevel;
2 |
3 | import org.apache.parquet.hadoop.metadata.ColumnPath;
4 | import org.apache.parquet.filter2.predicate.Operators.Eq;
5 | import org.apache.parquet.filter2.predicate.Operators.Gt;
6 | import org.apache.parquet.filter2.predicate.Operators.GtEq;
7 | import org.apache.parquet.filter2.predicate.Operators.LogicalNotUserDefined;
8 | import org.apache.parquet.filter2.predicate.Operators.Lt;
9 | import org.apache.parquet.filter2.predicate.Operators.LtEq;
10 | import org.apache.parquet.filter2.predicate.Operators.NotEq;
11 | import org.apache.parquet.filter2.predicate.Operators.UserDefined;
12 | import org.apache.parquet.filter2.predicate.UserDefinedPredicate;
13 | import org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
14 | import org.apache.parquet.io.api.Binary;
15 |
16 | /**
17 | * This class is auto-generated by {@link parquet.filter2.IncrementallyUpdatedFilterPredicateGenerator}
18 | * Do not manually edit!
19 | * See {@link IncrementallyUpdatedFilterPredicateBuilderBase}
20 | */
21 | public class IncrementallyUpdatedFilterPredicateBuilder extends IncrementallyUpdatedFilterPredicateBuilderBase {
22 |
23 | @Override
24 | public > IncrementallyUpdatedFilterPredicate visit(Eq pred) {
25 | ColumnPath columnPath = pred.getColumn().getColumnPath();
26 | Class clazz = pred.getColumn().getColumnType();
27 |
28 | ValueInspector valueInspector = null;
29 |
30 | if (clazz.equals(Integer.class)) {
31 | if (pred.getValue() == null) {
32 | valueInspector = new ValueInspector() {
33 | @Override
34 | public void updateNull() {
35 | setResult(true);
36 | }
37 |
38 | @Override
39 | public void update(int value) {
40 | setResult(false);
41 | }
42 | };
43 | } else {
44 | final int target = (Integer) (Object) pred.getValue();
45 |
46 | valueInspector = new ValueInspector() {
47 | @Override
48 | public void updateNull() {
49 | setResult(false);
50 | }
51 |
52 | @Override
53 | public void update(int value) {
54 | setResult(value == target);
55 | }
56 | };
57 | }
58 | }
59 |
60 | if (clazz.equals(Long.class)) {
61 | if (pred.getValue() == null) {
62 | valueInspector = new ValueInspector() {
63 | @Override
64 | public void updateNull() {
65 | setResult(true);
66 | }
67 |
68 | @Override
69 | public void update(long value) {
70 | setResult(false);
71 | }
72 | };
73 | } else {
74 | final long target = (Long) (Object) pred.getValue();
75 |
76 | valueInspector = new ValueInspector() {
77 | @Override
78 | public void updateNull() {
79 | setResult(false);
80 | }
81 |
82 | @Override
83 | public void update(long value) {
84 | setResult(value == target);
85 | }
86 | };
87 | }
88 | }
89 |
90 | if (clazz.equals(Boolean.class)) {
91 | if (pred.getValue() == null) {
92 | valueInspector = new ValueInspector() {
93 | @Override
94 | public void updateNull() {
95 | setResult(true);
96 | }
97 |
98 | @Override
99 | public void update(boolean value) {
100 | setResult(false);
101 | }
102 | };
103 | } else {
104 | final boolean target = (Boolean) (Object) pred.getValue();
105 |
106 | valueInspector = new ValueInspector() {
107 | @Override
108 | public void updateNull() {
109 | setResult(false);
110 | }
111 |
112 | @Override
113 | public void update(boolean value) {
114 | setResult(value == target);
115 | }
116 | };
117 | }
118 | }
119 |
120 | if (clazz.equals(Float.class)) {
121 | if (pred.getValue() == null) {
122 | valueInspector = new ValueInspector() {
123 | @Override
124 | public void updateNull() {
125 | setResult(true);
126 | }
127 |
128 | @Override
129 | public void update(float value) {
130 | setResult(false);
131 | }
132 | };
133 | } else {
134 | final float target = (Float) (Object) pred.getValue();
135 |
136 | valueInspector = new ValueInspector() {
137 | @Override
138 | public void updateNull() {
139 | setResult(false);
140 | }
141 |
142 | @Override
143 | public void update(float value) {
144 | setResult(value == target);
145 | }
146 | };
147 | }
148 | }
149 |
150 | if (clazz.equals(Double.class)) {
151 | if (pred.getValue() == null) {
152 | valueInspector = new ValueInspector() {
153 | @Override
154 | public void updateNull() {
155 | setResult(true);
156 | }
157 |
158 | @Override
159 | public void update(double value) {
160 | setResult(false);
161 | }
162 | };
163 | } else {
164 | final double target = (Double) (Object) pred.getValue();
165 |
166 | valueInspector = new ValueInspector() {
167 | @Override
168 | public void updateNull() {
169 | setResult(false);
170 | }
171 |
172 | @Override
173 | public void update(double value) {
174 | setResult(value == target);
175 | }
176 | };
177 | }
178 | }
179 |
180 | if (clazz.equals(Binary.class)) {
181 | if (pred.getValue() == null) {
182 | valueInspector = new ValueInspector() {
183 | @Override
184 | public void updateNull() {
185 | setResult(true);
186 | }
187 |
188 | @Override
189 | public void update(Binary value) {
190 | setResult(false);
191 | }
192 | };
193 | } else {
194 | final Binary target = (Binary) (Object) pred.getValue();
195 |
196 | valueInspector = new ValueInspector() {
197 | @Override
198 | public void updateNull() {
199 | setResult(false);
200 | }
201 |
202 | @Override
203 | public void update(Binary value) {
204 | setResult(value.compareTo(target) == 0 );
205 | }
206 | };
207 | }
208 | }
209 |
210 | if (valueInspector == null) {
211 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
212 | }
213 |
214 | addValueInspector(columnPath, valueInspector);
215 | return valueInspector;
216 | }
217 |
218 | @Override
219 | public > IncrementallyUpdatedFilterPredicate visit(NotEq pred) {
220 | ColumnPath columnPath = pred.getColumn().getColumnPath();
221 | Class clazz = pred.getColumn().getColumnType();
222 |
223 | ValueInspector valueInspector = null;
224 |
225 | if (clazz.equals(Integer.class)) {
226 | if (pred.getValue() == null) {
227 | valueInspector = new ValueInspector() {
228 | @Override
229 | public void updateNull() {
230 | setResult(false);
231 | }
232 |
233 | @Override
234 | public void update(int value) {
235 | setResult(true);
236 | }
237 | };
238 | } else {
239 | final int target = (Integer) (Object) pred.getValue();
240 |
241 | valueInspector = new ValueInspector() {
242 | @Override
243 | public void updateNull() {
244 | setResult(true);
245 | }
246 |
247 | @Override
248 | public void update(int value) {
249 | setResult(value != target);
250 | }
251 | };
252 | }
253 | }
254 |
255 | if (clazz.equals(Long.class)) {
256 | if (pred.getValue() == null) {
257 | valueInspector = new ValueInspector() {
258 | @Override
259 | public void updateNull() {
260 | setResult(false);
261 | }
262 |
263 | @Override
264 | public void update(long value) {
265 | setResult(true);
266 | }
267 | };
268 | } else {
269 | final long target = (Long) (Object) pred.getValue();
270 |
271 | valueInspector = new ValueInspector() {
272 | @Override
273 | public void updateNull() {
274 | setResult(true);
275 | }
276 |
277 | @Override
278 | public void update(long value) {
279 | setResult(value != target);
280 | }
281 | };
282 | }
283 | }
284 |
285 | if (clazz.equals(Boolean.class)) {
286 | if (pred.getValue() == null) {
287 | valueInspector = new ValueInspector() {
288 | @Override
289 | public void updateNull() {
290 | setResult(false);
291 | }
292 |
293 | @Override
294 | public void update(boolean value) {
295 | setResult(true);
296 | }
297 | };
298 | } else {
299 | final boolean target = (Boolean) (Object) pred.getValue();
300 |
301 | valueInspector = new ValueInspector() {
302 | @Override
303 | public void updateNull() {
304 | setResult(true);
305 | }
306 |
307 | @Override
308 | public void update(boolean value) {
309 | setResult(value != target);
310 | }
311 | };
312 | }
313 | }
314 |
315 | if (clazz.equals(Float.class)) {
316 | if (pred.getValue() == null) {
317 | valueInspector = new ValueInspector() {
318 | @Override
319 | public void updateNull() {
320 | setResult(false);
321 | }
322 |
323 | @Override
324 | public void update(float value) {
325 | setResult(true);
326 | }
327 | };
328 | } else {
329 | final float target = (Float) (Object) pred.getValue();
330 |
331 | valueInspector = new ValueInspector() {
332 | @Override
333 | public void updateNull() {
334 | setResult(true);
335 | }
336 |
337 | @Override
338 | public void update(float value) {
339 | setResult(value != target);
340 | }
341 | };
342 | }
343 | }
344 |
345 | if (clazz.equals(Double.class)) {
346 | if (pred.getValue() == null) {
347 | valueInspector = new ValueInspector() {
348 | @Override
349 | public void updateNull() {
350 | setResult(false);
351 | }
352 |
353 | @Override
354 | public void update(double value) {
355 | setResult(true);
356 | }
357 | };
358 | } else {
359 | final double target = (Double) (Object) pred.getValue();
360 |
361 | valueInspector = new ValueInspector() {
362 | @Override
363 | public void updateNull() {
364 | setResult(true);
365 | }
366 |
367 | @Override
368 | public void update(double value) {
369 | setResult(value != target);
370 | }
371 | };
372 | }
373 | }
374 |
375 | if (clazz.equals(Binary.class)) {
376 | if (pred.getValue() == null) {
377 | valueInspector = new ValueInspector() {
378 | @Override
379 | public void updateNull() {
380 | setResult(false);
381 | }
382 |
383 | @Override
384 | public void update(Binary value) {
385 | setResult(true);
386 | }
387 | };
388 | } else {
389 | final Binary target = (Binary) (Object) pred.getValue();
390 |
391 | valueInspector = new ValueInspector() {
392 | @Override
393 | public void updateNull() {
394 | setResult(true);
395 | }
396 |
397 | @Override
398 | public void update(Binary value) {
399 | setResult(value.compareTo(target) != 0);
400 | }
401 | };
402 | }
403 | }
404 |
405 | if (valueInspector == null) {
406 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
407 | }
408 |
409 | addValueInspector(columnPath, valueInspector);
410 | return valueInspector;
411 | }
412 |
413 | @Override
414 | public > IncrementallyUpdatedFilterPredicate visit(Lt pred) {
415 | ColumnPath columnPath = pred.getColumn().getColumnPath();
416 | Class clazz = pred.getColumn().getColumnType();
417 |
418 | ValueInspector valueInspector = null;
419 |
420 | if (clazz.equals(Integer.class)) {
421 | final int target = (Integer) (Object) pred.getValue();
422 |
423 | valueInspector = new ValueInspector() {
424 | @Override
425 | public void updateNull() {
426 | setResult(false);
427 | }
428 |
429 | @Override
430 | public void update(int value) {
431 | setResult(value < target);
432 | }
433 | };
434 | }
435 |
436 | if (clazz.equals(Long.class)) {
437 | final long target = (Long) (Object) pred.getValue();
438 |
439 | valueInspector = new ValueInspector() {
440 | @Override
441 | public void updateNull() {
442 | setResult(false);
443 | }
444 |
445 | @Override
446 | public void update(long value) {
447 | setResult(value < target);
448 | }
449 | };
450 | }
451 |
452 | if (clazz.equals(Boolean.class)) {
453 | throw new IllegalArgumentException("Operator < not supported for Boolean");
454 | }
455 |
456 | if (clazz.equals(Float.class)) {
457 | final float target = (Float) (Object) pred.getValue();
458 |
459 | valueInspector = new ValueInspector() {
460 | @Override
461 | public void updateNull() {
462 | setResult(false);
463 | }
464 |
465 | @Override
466 | public void update(float value) {
467 | setResult(value < target);
468 | }
469 | };
470 | }
471 |
472 | if (clazz.equals(Double.class)) {
473 | final double target = (Double) (Object) pred.getValue();
474 |
475 | valueInspector = new ValueInspector() {
476 | @Override
477 | public void updateNull() {
478 | setResult(false);
479 | }
480 |
481 | @Override
482 | public void update(double value) {
483 | setResult(value < target);
484 | }
485 | };
486 | }
487 |
488 | if (clazz.equals(Binary.class)) {
489 | final Binary target = (Binary) (Object) pred.getValue();
490 |
491 | valueInspector = new ValueInspector() {
492 | @Override
493 | public void updateNull() {
494 | setResult(false);
495 | }
496 |
497 | @Override
498 | public void update(Binary value) {
499 | setResult(value.compareTo(target) < 0);
500 | }
501 | };
502 | }
503 |
504 | if (valueInspector == null) {
505 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
506 | }
507 |
508 | addValueInspector(columnPath, valueInspector);
509 | return valueInspector;
510 | }
511 |
512 | @Override
513 | public > IncrementallyUpdatedFilterPredicate visit(LtEq pred) {
514 | ColumnPath columnPath = pred.getColumn().getColumnPath();
515 | Class clazz = pred.getColumn().getColumnType();
516 |
517 | ValueInspector valueInspector = null;
518 |
519 | if (clazz.equals(Integer.class)) {
520 | final int target = (Integer) (Object) pred.getValue();
521 |
522 | valueInspector = new ValueInspector() {
523 | @Override
524 | public void updateNull() {
525 | setResult(false);
526 | }
527 |
528 | @Override
529 | public void update(int value) {
530 | setResult(value <= target);
531 | }
532 | };
533 | }
534 |
535 | if (clazz.equals(Long.class)) {
536 | final long target = (Long) (Object) pred.getValue();
537 |
538 | valueInspector = new ValueInspector() {
539 | @Override
540 | public void updateNull() {
541 | setResult(false);
542 | }
543 |
544 | @Override
545 | public void update(long value) {
546 | setResult(value <= target);
547 | }
548 | };
549 | }
550 |
551 | if (clazz.equals(Boolean.class)) {
552 | throw new IllegalArgumentException("Operator <= not supported for Boolean");
553 | }
554 |
555 | if (clazz.equals(Float.class)) {
556 | final float target = (Float) (Object) pred.getValue();
557 |
558 | valueInspector = new ValueInspector() {
559 | @Override
560 | public void updateNull() {
561 | setResult(false);
562 | }
563 |
564 | @Override
565 | public void update(float value) {
566 | setResult(value <= target);
567 | }
568 | };
569 | }
570 |
571 | if (clazz.equals(Double.class)) {
572 | final double target = (Double) (Object) pred.getValue();
573 |
574 | valueInspector = new ValueInspector() {
575 | @Override
576 | public void updateNull() {
577 | setResult(false);
578 | }
579 |
580 | @Override
581 | public void update(double value) {
582 | setResult(value <= target);
583 | }
584 | };
585 | }
586 |
587 | if (clazz.equals(Binary.class)) {
588 | final Binary target = (Binary) (Object) pred.getValue();
589 |
590 | valueInspector = new ValueInspector() {
591 | @Override
592 | public void updateNull() {
593 | setResult(false);
594 | }
595 |
596 | @Override
597 | public void update(Binary value) {
598 | setResult(value.compareTo(target) <= 0);
599 | }
600 | };
601 | }
602 |
603 | if (valueInspector == null) {
604 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
605 | }
606 |
607 | addValueInspector(columnPath, valueInspector);
608 | return valueInspector;
609 | }
610 |
611 | @Override
612 | public > IncrementallyUpdatedFilterPredicate visit(Gt pred) {
613 | ColumnPath columnPath = pred.getColumn().getColumnPath();
614 | Class clazz = pred.getColumn().getColumnType();
615 |
616 | ValueInspector valueInspector = null;
617 |
618 | if (clazz.equals(Integer.class)) {
619 | final int target = (Integer) (Object) pred.getValue();
620 |
621 | valueInspector = new ValueInspector() {
622 | @Override
623 | public void updateNull() {
624 | setResult(false);
625 | }
626 |
627 | @Override
628 | public void update(int value) {
629 | setResult(value > target);
630 | }
631 | };
632 | }
633 |
634 | if (clazz.equals(Long.class)) {
635 | final long target = (Long) (Object) pred.getValue();
636 |
637 | valueInspector = new ValueInspector() {
638 | @Override
639 | public void updateNull() {
640 | setResult(false);
641 | }
642 |
643 | @Override
644 | public void update(long value) {
645 | setResult(value > target);
646 | }
647 | };
648 | }
649 |
650 | if (clazz.equals(Boolean.class)) {
651 | throw new IllegalArgumentException("Operator > not supported for Boolean");
652 | }
653 |
654 | if (clazz.equals(Float.class)) {
655 | final float target = (Float) (Object) pred.getValue();
656 |
657 | valueInspector = new ValueInspector() {
658 | @Override
659 | public void updateNull() {
660 | setResult(false);
661 | }
662 |
663 | @Override
664 | public void update(float value) {
665 | setResult(value > target);
666 | }
667 | };
668 | }
669 |
670 | if (clazz.equals(Double.class)) {
671 | final double target = (Double) (Object) pred.getValue();
672 |
673 | valueInspector = new ValueInspector() {
674 | @Override
675 | public void updateNull() {
676 | setResult(false);
677 | }
678 |
679 | @Override
680 | public void update(double value) {
681 | setResult(value > target);
682 | }
683 | };
684 | }
685 |
686 | if (clazz.equals(Binary.class)) {
687 | final Binary target = (Binary) (Object) pred.getValue();
688 |
689 | valueInspector = new ValueInspector() {
690 | @Override
691 | public void updateNull() {
692 | setResult(false);
693 | }
694 |
695 | @Override
696 | public void update(Binary value) {
697 | setResult(value.compareTo(target) > 0);
698 | }
699 | };
700 | }
701 |
702 | if (valueInspector == null) {
703 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
704 | }
705 |
706 | addValueInspector(columnPath, valueInspector);
707 | return valueInspector;
708 | }
709 |
710 | @Override
711 | public > IncrementallyUpdatedFilterPredicate visit(GtEq pred) {
712 | ColumnPath columnPath = pred.getColumn().getColumnPath();
713 | Class clazz = pred.getColumn().getColumnType();
714 |
715 | ValueInspector valueInspector = null;
716 |
717 | if (clazz.equals(Integer.class)) {
718 | final int target = (Integer) (Object) pred.getValue();
719 |
720 | valueInspector = new ValueInspector() {
721 | @Override
722 | public void updateNull() {
723 | setResult(false);
724 | }
725 |
726 | @Override
727 | public void update(int value) {
728 | setResult(value >= target);
729 | }
730 | };
731 | }
732 |
733 | if (clazz.equals(Long.class)) {
734 | final long target = (Long) (Object) pred.getValue();
735 |
736 | valueInspector = new ValueInspector() {
737 | @Override
738 | public void updateNull() {
739 | setResult(false);
740 | }
741 |
742 | @Override
743 | public void update(long value) {
744 | setResult(value >= target);
745 | }
746 | };
747 | }
748 |
749 | if (clazz.equals(Boolean.class)) {
750 | throw new IllegalArgumentException("Operator >= not supported for Boolean");
751 | }
752 |
753 | if (clazz.equals(Float.class)) {
754 | final float target = (Float) (Object) pred.getValue();
755 |
756 | valueInspector = new ValueInspector() {
757 | @Override
758 | public void updateNull() {
759 | setResult(false);
760 | }
761 |
762 | @Override
763 | public void update(float value) {
764 | setResult(value >= target);
765 | }
766 | };
767 | }
768 |
769 | if (clazz.equals(Double.class)) {
770 | final double target = (Double) (Object) pred.getValue();
771 |
772 | valueInspector = new ValueInspector() {
773 | @Override
774 | public void updateNull() {
775 | setResult(false);
776 | }
777 |
778 | @Override
779 | public void update(double value) {
780 | setResult(value >= target);
781 | }
782 | };
783 | }
784 |
785 | if (clazz.equals(Binary.class)) {
786 | final Binary target = (Binary) (Object) pred.getValue();
787 |
788 | valueInspector = new ValueInspector() {
789 | @Override
790 | public void updateNull() {
791 | setResult(false);
792 | }
793 |
794 | @Override
795 | public void update(Binary value) {
796 | setResult(value.compareTo(target) >= 0);
797 | }
798 | };
799 | }
800 |
801 | if (valueInspector == null) {
802 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
803 | }
804 |
805 | addValueInspector(columnPath, valueInspector);
806 | return valueInspector;
807 | }
808 |
809 | @Override
810 | public , U extends UserDefinedPredicate> IncrementallyUpdatedFilterPredicate visit(UserDefined pred) {
811 | ColumnPath columnPath = pred.getColumn().getColumnPath();
812 | Class clazz = pred.getColumn().getColumnType();
813 |
814 | ValueInspector valueInspector = null;
815 |
816 | final U udp = pred.getUserDefinedPredicate();
817 |
818 | if (clazz.equals(Integer.class)) {
819 | valueInspector = new ValueInspector() {
820 | @Override
821 | public void updateNull() {
822 | setResult(udp.keep(null));
823 | }
824 |
825 | @SuppressWarnings("unchecked")
826 | @Override
827 | public void update(int value) {
828 | setResult(udp.keep((T) (Object) value));
829 | }
830 | };
831 | }
832 |
833 | if (clazz.equals(Long.class)) {
834 | valueInspector = new ValueInspector() {
835 | @Override
836 | public void updateNull() {
837 | setResult(udp.keep(null));
838 | }
839 |
840 | @SuppressWarnings("unchecked")
841 | @Override
842 | public void update(long value) {
843 | setResult(udp.keep((T) (Object) value));
844 | }
845 | };
846 | }
847 |
848 | if (clazz.equals(Boolean.class)) {
849 | valueInspector = new ValueInspector() {
850 | @Override
851 | public void updateNull() {
852 | setResult(udp.keep(null));
853 | }
854 |
855 | @SuppressWarnings("unchecked")
856 | @Override
857 | public void update(boolean value) {
858 | setResult(udp.keep((T) (Object) value));
859 | }
860 | };
861 | }
862 |
863 | if (clazz.equals(Float.class)) {
864 | valueInspector = new ValueInspector() {
865 | @Override
866 | public void updateNull() {
867 | setResult(udp.keep(null));
868 | }
869 |
870 | @SuppressWarnings("unchecked")
871 | @Override
872 | public void update(float value) {
873 | setResult(udp.keep((T) (Object) value));
874 | }
875 | };
876 | }
877 |
878 | if (clazz.equals(Double.class)) {
879 | valueInspector = new ValueInspector() {
880 | @Override
881 | public void updateNull() {
882 | setResult(udp.keep(null));
883 | }
884 |
885 | @SuppressWarnings("unchecked")
886 | @Override
887 | public void update(double value) {
888 | setResult(udp.keep((T) (Object) value));
889 | }
890 | };
891 | }
892 |
893 | if (clazz.equals(Binary.class)) {
894 | valueInspector = new ValueInspector() {
895 | @Override
896 | public void updateNull() {
897 | setResult(udp.keep(null));
898 | }
899 |
900 | @SuppressWarnings("unchecked")
901 | @Override
902 | public void update(Binary value) {
903 | setResult(udp.keep((T) (Object) value));
904 | }
905 | };
906 | }
907 |
908 | if (valueInspector == null) {
909 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
910 | }
911 |
912 | addValueInspector(columnPath, valueInspector);
913 | return valueInspector;
914 | }
915 |
916 | @Override
917 | public , U extends UserDefinedPredicate> IncrementallyUpdatedFilterPredicate visit(LogicalNotUserDefined notPred) {
918 | UserDefined pred = notPred.getUserDefined();
919 | ColumnPath columnPath = pred.getColumn().getColumnPath();
920 | Class clazz = pred.getColumn().getColumnType();
921 |
922 | ValueInspector valueInspector = null;
923 |
924 | final U udp = pred.getUserDefinedPredicate();
925 |
926 | if (clazz.equals(Integer.class)) {
927 | valueInspector = new ValueInspector() {
928 | @Override
929 | public void updateNull() {
930 | setResult(!udp.keep(null));
931 | }
932 |
933 | @SuppressWarnings("unchecked")
934 | @Override
935 | public void update(int value) {
936 | setResult(!udp.keep((T) (Object) value));
937 | }
938 | };
939 | }
940 |
941 | if (clazz.equals(Long.class)) {
942 | valueInspector = new ValueInspector() {
943 | @Override
944 | public void updateNull() {
945 | setResult(!udp.keep(null));
946 | }
947 |
948 | @SuppressWarnings("unchecked")
949 | @Override
950 | public void update(long value) {
951 | setResult(!udp.keep((T) (Object) value));
952 | }
953 | };
954 | }
955 |
956 | if (clazz.equals(Boolean.class)) {
957 | valueInspector = new ValueInspector() {
958 | @Override
959 | public void updateNull() {
960 | setResult(!udp.keep(null));
961 | }
962 |
963 | @SuppressWarnings("unchecked")
964 | @Override
965 | public void update(boolean value) {
966 | setResult(!udp.keep((T) (Object) value));
967 | }
968 | };
969 | }
970 |
971 | if (clazz.equals(Float.class)) {
972 | valueInspector = new ValueInspector() {
973 | @Override
974 | public void updateNull() {
975 | setResult(!udp.keep(null));
976 | }
977 |
978 | @SuppressWarnings("unchecked")
979 | @Override
980 | public void update(float value) {
981 | setResult(!udp.keep((T) (Object) value));
982 | }
983 | };
984 | }
985 |
986 | if (clazz.equals(Double.class)) {
987 | valueInspector = new ValueInspector() {
988 | @Override
989 | public void updateNull() {
990 | setResult(!udp.keep(null));
991 | }
992 |
993 | @SuppressWarnings("unchecked")
994 | @Override
995 | public void update(double value) {
996 | setResult(!udp.keep((T) (Object) value));
997 | }
998 | };
999 | }
1000 |
1001 | if (clazz.equals(Binary.class)) {
1002 | valueInspector = new ValueInspector() {
1003 | @Override
1004 | public void updateNull() {
1005 | setResult(!udp.keep(null));
1006 | }
1007 |
1008 | @SuppressWarnings("unchecked")
1009 | @Override
1010 | public void update(Binary value) {
1011 | setResult(!udp.keep((T) (Object) value));
1012 | }
1013 | };
1014 | }
1015 |
1016 | if (valueInspector == null) {
1017 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
1018 | }
1019 |
1020 | addValueInspector(columnPath, valueInspector);
1021 | return valueInspector;
1022 | }
1023 |
1024 | }
1025 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateBuilder.java.orig.txt:
--------------------------------------------------------------------------------
1 | package org.apache.parquet.filter2.recordlevel;
2 |
3 | import org.apache.parquet.hadoop.metadata.ColumnPath;
4 | import org.apache.parquet.filter2.predicate.Operators.Eq;
5 | import org.apache.parquet.filter2.predicate.Operators.Gt;
6 | import org.apache.parquet.filter2.predicate.Operators.GtEq;
7 | import org.apache.parquet.filter2.predicate.Operators.LogicalNotUserDefined;
8 | import org.apache.parquet.filter2.predicate.Operators.Lt;
9 | import org.apache.parquet.filter2.predicate.Operators.LtEq;
10 | import org.apache.parquet.filter2.predicate.Operators.NotEq;
11 | import org.apache.parquet.filter2.predicate.Operators.UserDefined;
12 | import org.apache.parquet.filter2.predicate.UserDefinedPredicate;
13 | import org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
14 | import org.apache.parquet.io.api.Binary;
15 |
16 | /**
17 | * This class is auto-generated by {@link parquet.filter2.IncrementallyUpdatedFilterPredicateGenerator}
18 | * Do not manually edit!
19 | * See {@link IncrementallyUpdatedFilterPredicateBuilderBase}
20 | */
21 | public class IncrementallyUpdatedFilterPredicateBuilder extends IncrementallyUpdatedFilterPredicateBuilderBase {
22 |
23 | @Override
24 | public > IncrementallyUpdatedFilterPredicate visit(Eq pred) {
25 | ColumnPath columnPath = pred.getColumn().getColumnPath();
26 | Class clazz = pred.getColumn().getColumnType();
27 |
28 | ValueInspector valueInspector = null;
29 |
30 | if (clazz.equals(Integer.class)) {
31 | if (pred.getValue() == null) {
32 | valueInspector = new ValueInspector() {
33 | @Override
34 | public void updateNull() {
35 | setResult(true);
36 | }
37 |
38 | @Override
39 | public void update(int value) {
40 | setResult(false);
41 | }
42 | };
43 | } else {
44 | final int target = (Integer) (Object) pred.getValue();
45 |
46 | valueInspector = new ValueInspector() {
47 | @Override
48 | public void updateNull() {
49 | setResult(false);
50 | }
51 |
52 | @Override
53 | public void update(int value) {
54 | setResult(value == target);
55 | }
56 | };
57 | }
58 | }
59 |
60 | if (clazz.equals(Long.class)) {
61 | if (pred.getValue() == null) {
62 | valueInspector = new ValueInspector() {
63 | @Override
64 | public void updateNull() {
65 | setResult(true);
66 | }
67 |
68 | @Override
69 | public void update(long value) {
70 | setResult(false);
71 | }
72 | };
73 | } else {
74 | final long target = (Long) (Object) pred.getValue();
75 |
76 | valueInspector = new ValueInspector() {
77 | @Override
78 | public void updateNull() {
79 | setResult(false);
80 | }
81 |
82 | @Override
83 | public void update(long value) {
84 | setResult(value == target);
85 | }
86 | };
87 | }
88 | }
89 |
90 | if (clazz.equals(Boolean.class)) {
91 | if (pred.getValue() == null) {
92 | valueInspector = new ValueInspector() {
93 | @Override
94 | public void updateNull() {
95 | setResult(true);
96 | }
97 |
98 | @Override
99 | public void update(boolean value) {
100 | setResult(false);
101 | }
102 | };
103 | } else {
104 | final boolean target = (Boolean) (Object) pred.getValue();
105 |
106 | valueInspector = new ValueInspector() {
107 | @Override
108 | public void updateNull() {
109 | setResult(false);
110 | }
111 |
112 | @Override
113 | public void update(boolean value) {
114 | setResult(value == target);
115 | }
116 | };
117 | }
118 | }
119 |
120 | if (clazz.equals(Float.class)) {
121 | if (pred.getValue() == null) {
122 | valueInspector = new ValueInspector() {
123 | @Override
124 | public void updateNull() {
125 | setResult(true);
126 | }
127 |
128 | @Override
129 | public void update(float value) {
130 | setResult(false);
131 | }
132 | };
133 | } else {
134 | final float target = (Float) (Object) pred.getValue();
135 |
136 | valueInspector = new ValueInspector() {
137 | @Override
138 | public void updateNull() {
139 | setResult(false);
140 | }
141 |
142 | @Override
143 | public void update(float value) {
144 | setResult(value == target);
145 | }
146 | };
147 | }
148 | }
149 |
150 | if (clazz.equals(Double.class)) {
151 | if (pred.getValue() == null) {
152 | valueInspector = new ValueInspector() {
153 | @Override
154 | public void updateNull() {
155 | setResult(true);
156 | }
157 |
158 | @Override
159 | public void update(double value) {
160 | setResult(false);
161 | }
162 | };
163 | } else {
164 | final double target = (Double) (Object) pred.getValue();
165 |
166 | valueInspector = new ValueInspector() {
167 | @Override
168 | public void updateNull() {
169 | setResult(false);
170 | }
171 |
172 | @Override
173 | public void update(double value) {
174 | setResult(value == target);
175 | }
176 | };
177 | }
178 | }
179 |
180 | if (clazz.equals(Binary.class)) {
181 | if (pred.getValue() == null) {
182 | valueInspector = new ValueInspector() {
183 | @Override
184 | public void updateNull() {
185 | setResult(true);
186 | }
187 |
188 | @Override
189 | public void update(Binary value) {
190 | setResult(false);
191 | }
192 | };
193 | } else {
194 | final Binary target = (Binary) (Object) pred.getValue();
195 |
196 | valueInspector = new ValueInspector() {
197 | @Override
198 | public void updateNull() {
199 | setResult(false);
200 | }
201 |
202 | @Override
203 | public void update(Binary value) {
204 | setResult(value.compareTo(target) == 0 );
205 | }
206 | };
207 | }
208 | }
209 |
210 | if (valueInspector == null) {
211 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
212 | }
213 |
214 | addValueInspector(columnPath, valueInspector);
215 | return valueInspector;
216 | }
217 |
218 | @Override
219 | public > IncrementallyUpdatedFilterPredicate visit(NotEq pred) {
220 | ColumnPath columnPath = pred.getColumn().getColumnPath();
221 | Class clazz = pred.getColumn().getColumnType();
222 |
223 | ValueInspector valueInspector = null;
224 |
225 | if (clazz.equals(Integer.class)) {
226 | if (pred.getValue() == null) {
227 | valueInspector = new ValueInspector() {
228 | @Override
229 | public void updateNull() {
230 | setResult(false);
231 | }
232 |
233 | @Override
234 | public void update(int value) {
235 | setResult(true);
236 | }
237 | };
238 | } else {
239 | final int target = (Integer) (Object) pred.getValue();
240 |
241 | valueInspector = new ValueInspector() {
242 | @Override
243 | public void updateNull() {
244 | setResult(true);
245 | }
246 |
247 | @Override
248 | public void update(int value) {
249 | setResult(value != target);
250 | }
251 | };
252 | }
253 | }
254 |
255 | if (clazz.equals(Long.class)) {
256 | if (pred.getValue() == null) {
257 | valueInspector = new ValueInspector() {
258 | @Override
259 | public void updateNull() {
260 | setResult(false);
261 | }
262 |
263 | @Override
264 | public void update(long value) {
265 | setResult(true);
266 | }
267 | };
268 | } else {
269 | final long target = (Long) (Object) pred.getValue();
270 |
271 | valueInspector = new ValueInspector() {
272 | @Override
273 | public void updateNull() {
274 | setResult(true);
275 | }
276 |
277 | @Override
278 | public void update(long value) {
279 | setResult(value != target);
280 | }
281 | };
282 | }
283 | }
284 |
285 | if (clazz.equals(Boolean.class)) {
286 | if (pred.getValue() == null) {
287 | valueInspector = new ValueInspector() {
288 | @Override
289 | public void updateNull() {
290 | setResult(false);
291 | }
292 |
293 | @Override
294 | public void update(boolean value) {
295 | setResult(true);
296 | }
297 | };
298 | } else {
299 | final boolean target = (Boolean) (Object) pred.getValue();
300 |
301 | valueInspector = new ValueInspector() {
302 | @Override
303 | public void updateNull() {
304 | setResult(true);
305 | }
306 |
307 | @Override
308 | public void update(boolean value) {
309 | setResult(value != target);
310 | }
311 | };
312 | }
313 | }
314 |
315 | if (clazz.equals(Float.class)) {
316 | if (pred.getValue() == null) {
317 | valueInspector = new ValueInspector() {
318 | @Override
319 | public void updateNull() {
320 | setResult(false);
321 | }
322 |
323 | @Override
324 | public void update(float value) {
325 | setResult(true);
326 | }
327 | };
328 | } else {
329 | final float target = (Float) (Object) pred.getValue();
330 |
331 | valueInspector = new ValueInspector() {
332 | @Override
333 | public void updateNull() {
334 | setResult(true);
335 | }
336 |
337 | @Override
338 | public void update(float value) {
339 | setResult(value != target);
340 | }
341 | };
342 | }
343 | }
344 |
345 | if (clazz.equals(Double.class)) {
346 | if (pred.getValue() == null) {
347 | valueInspector = new ValueInspector() {
348 | @Override
349 | public void updateNull() {
350 | setResult(false);
351 | }
352 |
353 | @Override
354 | public void update(double value) {
355 | setResult(true);
356 | }
357 | };
358 | } else {
359 | final double target = (Double) (Object) pred.getValue();
360 |
361 | valueInspector = new ValueInspector() {
362 | @Override
363 | public void updateNull() {
364 | setResult(true);
365 | }
366 |
367 | @Override
368 | public void update(double value) {
369 | setResult(value != target);
370 | }
371 | };
372 | }
373 | }
374 |
375 | if (clazz.equals(Binary.class)) {
376 | if (pred.getValue() == null) {
377 | valueInspector = new ValueInspector() {
378 | @Override
379 | public void updateNull() {
380 | setResult(false);
381 | }
382 |
383 | @Override
384 | public void update(Binary value) {
385 | setResult(true);
386 | }
387 | };
388 | } else {
389 | final Binary target = (Binary) (Object) pred.getValue();
390 |
391 | valueInspector = new ValueInspector() {
392 | @Override
393 | public void updateNull() {
394 | setResult(true);
395 | }
396 |
397 | @Override
398 | public void update(Binary value) {
399 | setResult(value.compareTo(target) != 0);
400 | }
401 | };
402 | }
403 | }
404 |
405 | if (valueInspector == null) {
406 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
407 | }
408 |
409 | addValueInspector(columnPath, valueInspector);
410 | return valueInspector;
411 | }
412 |
413 | @Override
414 | public > IncrementallyUpdatedFilterPredicate visit(Lt pred) {
415 | ColumnPath columnPath = pred.getColumn().getColumnPath();
416 | Class clazz = pred.getColumn().getColumnType();
417 |
418 | ValueInspector valueInspector = null;
419 |
420 | if (clazz.equals(Integer.class)) {
421 | final int target = (Integer) (Object) pred.getValue();
422 |
423 | valueInspector = new ValueInspector() {
424 | @Override
425 | public void updateNull() {
426 | setResult(false);
427 | }
428 |
429 | @Override
430 | public void update(int value) {
431 | setResult(value < target);
432 | }
433 | };
434 | }
435 |
436 | if (clazz.equals(Long.class)) {
437 | final long target = (Long) (Object) pred.getValue();
438 |
439 | valueInspector = new ValueInspector() {
440 | @Override
441 | public void updateNull() {
442 | setResult(false);
443 | }
444 |
445 | @Override
446 | public void update(long value) {
447 | setResult(value < target);
448 | }
449 | };
450 | }
451 |
452 | if (clazz.equals(Boolean.class)) {
453 | throw new IllegalArgumentException("Operator < not supported for Boolean");
454 | }
455 |
456 | if (clazz.equals(Float.class)) {
457 | final float target = (Float) (Object) pred.getValue();
458 |
459 | valueInspector = new ValueInspector() {
460 | @Override
461 | public void updateNull() {
462 | setResult(false);
463 | }
464 |
465 | @Override
466 | public void update(float value) {
467 | setResult(value < target);
468 | }
469 | };
470 | }
471 |
472 | if (clazz.equals(Double.class)) {
473 | final double target = (Double) (Object) pred.getValue();
474 |
475 | valueInspector = new ValueInspector() {
476 | @Override
477 | public void updateNull() {
478 | setResult(false);
479 | }
480 |
481 | @Override
482 | public void update(double value) {
483 | setResult(value < target);
484 | }
485 | };
486 | }
487 |
488 | if (clazz.equals(Binary.class)) {
489 | final Binary target = (Binary) (Object) pred.getValue();
490 |
491 | valueInspector = new ValueInspector() {
492 | @Override
493 | public void updateNull() {
494 | setResult(false);
495 | }
496 |
497 | @Override
498 | public void update(Binary value) {
499 | setResult(value.compareTo(target) < 0);
500 | }
501 | };
502 | }
503 |
504 | if (valueInspector == null) {
505 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
506 | }
507 |
508 | addValueInspector(columnPath, valueInspector);
509 | return valueInspector;
510 | }
511 |
512 | @Override
513 | public > IncrementallyUpdatedFilterPredicate visit(LtEq pred) {
514 | ColumnPath columnPath = pred.getColumn().getColumnPath();
515 | Class clazz = pred.getColumn().getColumnType();
516 |
517 | ValueInspector valueInspector = null;
518 |
519 | if (clazz.equals(Integer.class)) {
520 | final int target = (Integer) (Object) pred.getValue();
521 |
522 | valueInspector = new ValueInspector() {
523 | @Override
524 | public void updateNull() {
525 | setResult(false);
526 | }
527 |
528 | @Override
529 | public void update(int value) {
530 | setResult(value <= target);
531 | }
532 | };
533 | }
534 |
535 | if (clazz.equals(Long.class)) {
536 | final long target = (Long) (Object) pred.getValue();
537 |
538 | valueInspector = new ValueInspector() {
539 | @Override
540 | public void updateNull() {
541 | setResult(false);
542 | }
543 |
544 | @Override
545 | public void update(long value) {
546 | setResult(value <= target);
547 | }
548 | };
549 | }
550 |
551 | if (clazz.equals(Boolean.class)) {
552 | throw new IllegalArgumentException("Operator <= not supported for Boolean");
553 | }
554 |
555 | if (clazz.equals(Float.class)) {
556 | final float target = (Float) (Object) pred.getValue();
557 |
558 | valueInspector = new ValueInspector() {
559 | @Override
560 | public void updateNull() {
561 | setResult(false);
562 | }
563 |
564 | @Override
565 | public void update(float value) {
566 | setResult(value <= target);
567 | }
568 | };
569 | }
570 |
571 | if (clazz.equals(Double.class)) {
572 | final double target = (Double) (Object) pred.getValue();
573 |
574 | valueInspector = new ValueInspector() {
575 | @Override
576 | public void updateNull() {
577 | setResult(false);
578 | }
579 |
580 | @Override
581 | public void update(double value) {
582 | setResult(value <= target);
583 | }
584 | };
585 | }
586 |
587 | if (clazz.equals(Binary.class)) {
588 | final Binary target = (Binary) (Object) pred.getValue();
589 |
590 | valueInspector = new ValueInspector() {
591 | @Override
592 | public void updateNull() {
593 | setResult(false);
594 | }
595 |
596 | @Override
597 | public void update(Binary value) {
598 | setResult(value.compareTo(target) <= 0);
599 | }
600 | };
601 | }
602 |
603 | if (valueInspector == null) {
604 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
605 | }
606 |
607 | addValueInspector(columnPath, valueInspector);
608 | return valueInspector;
609 | }
610 |
611 | @Override
612 | public > IncrementallyUpdatedFilterPredicate visit(Gt pred) {
613 | ColumnPath columnPath = pred.getColumn().getColumnPath();
614 | Class clazz = pred.getColumn().getColumnType();
615 |
616 | ValueInspector valueInspector = null;
617 |
618 | if (clazz.equals(Integer.class)) {
619 | final int target = (Integer) (Object) pred.getValue();
620 |
621 | valueInspector = new ValueInspector() {
622 | @Override
623 | public void updateNull() {
624 | setResult(false);
625 | }
626 |
627 | @Override
628 | public void update(int value) {
629 | setResult(value > target);
630 | }
631 | };
632 | }
633 |
634 | if (clazz.equals(Long.class)) {
635 | final long target = (Long) (Object) pred.getValue();
636 |
637 | valueInspector = new ValueInspector() {
638 | @Override
639 | public void updateNull() {
640 | setResult(false);
641 | }
642 |
643 | @Override
644 | public void update(long value) {
645 | setResult(value > target);
646 | }
647 | };
648 | }
649 |
650 | if (clazz.equals(Boolean.class)) {
651 | throw new IllegalArgumentException("Operator > not supported for Boolean");
652 | }
653 |
654 | if (clazz.equals(Float.class)) {
655 | final float target = (Float) (Object) pred.getValue();
656 |
657 | valueInspector = new ValueInspector() {
658 | @Override
659 | public void updateNull() {
660 | setResult(false);
661 | }
662 |
663 | @Override
664 | public void update(float value) {
665 | setResult(value > target);
666 | }
667 | };
668 | }
669 |
670 | if (clazz.equals(Double.class)) {
671 | final double target = (Double) (Object) pred.getValue();
672 |
673 | valueInspector = new ValueInspector() {
674 | @Override
675 | public void updateNull() {
676 | setResult(false);
677 | }
678 |
679 | @Override
680 | public void update(double value) {
681 | setResult(value > target);
682 | }
683 | };
684 | }
685 |
686 | if (clazz.equals(Binary.class)) {
687 | final Binary target = (Binary) (Object) pred.getValue();
688 |
689 | valueInspector = new ValueInspector() {
690 | @Override
691 | public void updateNull() {
692 | setResult(false);
693 | }
694 |
695 | @Override
696 | public void update(Binary value) {
697 | setResult(value.compareTo(target) > 0);
698 | }
699 | };
700 | }
701 |
702 | if (valueInspector == null) {
703 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
704 | }
705 |
706 | addValueInspector(columnPath, valueInspector);
707 | return valueInspector;
708 | }
709 |
710 | @Override
711 | public > IncrementallyUpdatedFilterPredicate visit(GtEq pred) {
712 | ColumnPath columnPath = pred.getColumn().getColumnPath();
713 | Class clazz = pred.getColumn().getColumnType();
714 |
715 | ValueInspector valueInspector = null;
716 |
717 | if (clazz.equals(Integer.class)) {
718 | final int target = (Integer) (Object) pred.getValue();
719 |
720 | valueInspector = new ValueInspector() {
721 | @Override
722 | public void updateNull() {
723 | setResult(false);
724 | }
725 |
726 | @Override
727 | public void update(int value) {
728 | setResult(value >= target);
729 | }
730 | };
731 | }
732 |
733 | if (clazz.equals(Long.class)) {
734 | final long target = (Long) (Object) pred.getValue();
735 |
736 | valueInspector = new ValueInspector() {
737 | @Override
738 | public void updateNull() {
739 | setResult(false);
740 | }
741 |
742 | @Override
743 | public void update(long value) {
744 | setResult(value >= target);
745 | }
746 | };
747 | }
748 |
749 | if (clazz.equals(Boolean.class)) {
750 | throw new IllegalArgumentException("Operator >= not supported for Boolean");
751 | }
752 |
753 | if (clazz.equals(Float.class)) {
754 | final float target = (Float) (Object) pred.getValue();
755 |
756 | valueInspector = new ValueInspector() {
757 | @Override
758 | public void updateNull() {
759 | setResult(false);
760 | }
761 |
762 | @Override
763 | public void update(float value) {
764 | setResult(value >= target);
765 | }
766 | };
767 | }
768 |
769 | if (clazz.equals(Double.class)) {
770 | final double target = (Double) (Object) pred.getValue();
771 |
772 | valueInspector = new ValueInspector() {
773 | @Override
774 | public void updateNull() {
775 | setResult(false);
776 | }
777 |
778 | @Override
779 | public void update(double value) {
780 | setResult(value >= target);
781 | }
782 | };
783 | }
784 |
785 | if (clazz.equals(Binary.class)) {
786 | final Binary target = (Binary) (Object) pred.getValue();
787 |
788 | valueInspector = new ValueInspector() {
789 | @Override
790 | public void updateNull() {
791 | setResult(false);
792 | }
793 |
794 | @Override
795 | public void update(Binary value) {
796 | setResult(value.compareTo(target) >= 0);
797 | }
798 | };
799 | }
800 |
801 | if (valueInspector == null) {
802 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
803 | }
804 |
805 | addValueInspector(columnPath, valueInspector);
806 | return valueInspector;
807 | }
808 |
809 | @Override
810 | public , U extends UserDefinedPredicate> IncrementallyUpdatedFilterPredicate visit(UserDefined pred) {
811 | ColumnPath columnPath = pred.getColumn().getColumnPath();
812 | Class clazz = pred.getColumn().getColumnType();
813 |
814 | ValueInspector valueInspector = null;
815 |
816 | final U udp = pred.getUserDefinedPredicate();
817 |
818 | if (clazz.equals(Integer.class)) {
819 | valueInspector = new ValueInspector() {
820 | @Override
821 | public void updateNull() {
822 | setResult(udp.keep(null));
823 | }
824 |
825 | @SuppressWarnings("unchecked")
826 | @Override
827 | public void update(int value) {
828 | setResult(udp.keep((T) (Object) value));
829 | }
830 | };
831 | }
832 |
833 | if (clazz.equals(Long.class)) {
834 | valueInspector = new ValueInspector() {
835 | @Override
836 | public void updateNull() {
837 | setResult(udp.keep(null));
838 | }
839 |
840 | @SuppressWarnings("unchecked")
841 | @Override
842 | public void update(long value) {
843 | setResult(udp.keep((T) (Object) value));
844 | }
845 | };
846 | }
847 |
848 | if (clazz.equals(Boolean.class)) {
849 | valueInspector = new ValueInspector() {
850 | @Override
851 | public void updateNull() {
852 | setResult(udp.keep(null));
853 | }
854 |
855 | @SuppressWarnings("unchecked")
856 | @Override
857 | public void update(boolean value) {
858 | setResult(udp.keep((T) (Object) value));
859 | }
860 | };
861 | }
862 |
863 | if (clazz.equals(Float.class)) {
864 | valueInspector = new ValueInspector() {
865 | @Override
866 | public void updateNull() {
867 | setResult(udp.keep(null));
868 | }
869 |
870 | @SuppressWarnings("unchecked")
871 | @Override
872 | public void update(float value) {
873 | setResult(udp.keep((T) (Object) value));
874 | }
875 | };
876 | }
877 |
878 | if (clazz.equals(Double.class)) {
879 | valueInspector = new ValueInspector() {
880 | @Override
881 | public void updateNull() {
882 | setResult(udp.keep(null));
883 | }
884 |
885 | @SuppressWarnings("unchecked")
886 | @Override
887 | public void update(double value) {
888 | setResult(udp.keep((T) (Object) value));
889 | }
890 | };
891 | }
892 |
893 | if (clazz.equals(Binary.class)) {
894 | valueInspector = new ValueInspector() {
895 | @Override
896 | public void updateNull() {
897 | setResult(udp.keep(null));
898 | }
899 |
900 | @SuppressWarnings("unchecked")
901 | @Override
902 | public void update(Binary value) {
903 | setResult(udp.keep((T) (Object) value));
904 | }
905 | };
906 | }
907 |
908 | if (valueInspector == null) {
909 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
910 | }
911 |
912 | addValueInspector(columnPath, valueInspector);
913 | return valueInspector;
914 | }
915 |
916 | @Override
917 | public , U extends UserDefinedPredicate> IncrementallyUpdatedFilterPredicate visit(LogicalNotUserDefined notPred) {
918 | UserDefined pred = notPred.getUserDefined();
919 | ColumnPath columnPath = pred.getColumn().getColumnPath();
920 | Class clazz = pred.getColumn().getColumnType();
921 |
922 | ValueInspector valueInspector = null;
923 |
924 | final U udp = pred.getUserDefinedPredicate();
925 |
926 | if (clazz.equals(Integer.class)) {
927 | valueInspector = new ValueInspector() {
928 | @Override
929 | public void updateNull() {
930 | setResult(!udp.keep(null));
931 | }
932 |
933 | @SuppressWarnings("unchecked")
934 | @Override
935 | public void update(int value) {
936 | setResult(!udp.keep((T) (Object) value));
937 | }
938 | };
939 | }
940 |
941 | if (clazz.equals(Long.class)) {
942 | valueInspector = new ValueInspector() {
943 | @Override
944 | public void updateNull() {
945 | setResult(!udp.keep(null));
946 | }
947 |
948 | @SuppressWarnings("unchecked")
949 | @Override
950 | public void update(long value) {
951 | setResult(!udp.keep((T) (Object) value));
952 | }
953 | };
954 | }
955 |
956 | if (clazz.equals(Boolean.class)) {
957 | valueInspector = new ValueInspector() {
958 | @Override
959 | public void updateNull() {
960 | setResult(!udp.keep(null));
961 | }
962 |
963 | @SuppressWarnings("unchecked")
964 | @Override
965 | public void update(boolean value) {
966 | setResult(!udp.keep((T) (Object) value));
967 | }
968 | };
969 | }
970 |
971 | if (clazz.equals(Float.class)) {
972 | valueInspector = new ValueInspector() {
973 | @Override
974 | public void updateNull() {
975 | setResult(!udp.keep(null));
976 | }
977 |
978 | @SuppressWarnings("unchecked")
979 | @Override
980 | public void update(float value) {
981 | setResult(!udp.keep((T) (Object) value));
982 | }
983 | };
984 | }
985 |
986 | if (clazz.equals(Double.class)) {
987 | valueInspector = new ValueInspector() {
988 | @Override
989 | public void updateNull() {
990 | setResult(!udp.keep(null));
991 | }
992 |
993 | @SuppressWarnings("unchecked")
994 | @Override
995 | public void update(double value) {
996 | setResult(!udp.keep((T) (Object) value));
997 | }
998 | };
999 | }
1000 |
1001 | if (clazz.equals(Binary.class)) {
1002 | valueInspector = new ValueInspector() {
1003 | @Override
1004 | public void updateNull() {
1005 | setResult(!udp.keep(null));
1006 | }
1007 |
1008 | @SuppressWarnings("unchecked")
1009 | @Override
1010 | public void update(Binary value) {
1011 | setResult(!udp.keep((T) (Object) value));
1012 | }
1013 | };
1014 | }
1015 |
1016 | if (valueInspector == null) {
1017 | throw new IllegalArgumentException("Encountered unknown type " + clazz);
1018 | }
1019 |
1020 | addValueInspector(columnPath, valueInspector);
1021 | return valueInspector;
1022 | }
1023 |
1024 | }
1025 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordReader.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.apache.parquet.hadoop;
20 |
21 | import java.io.IOException;
22 | import java.util.Collections;
23 | import java.util.HashMap;
24 | import java.util.HashSet;
25 | import java.util.List;
26 | import java.util.Map;
27 |
28 | import java.util.Set;
29 | import org.apache.hadoop.conf.Configuration;
30 | import org.apache.hadoop.fs.Path;
31 |
32 | import org.apache.parquet.Log;
33 | import org.apache.parquet.column.ColumnDescriptor;
34 | import org.apache.parquet.column.page.PageReadStore;
35 | import org.apache.parquet.filter.UnboundRecordFilter;
36 | import org.apache.parquet.filter2.compat.FilterCompat;
37 | import org.apache.parquet.filter2.compat.FilterCompat.Filter;
38 | import org.apache.parquet.hadoop.api.InitContext;
39 | import org.apache.parquet.hadoop.api.ReadSupport;
40 | import org.apache.parquet.hadoop.metadata.BlockMetaData;
41 | import org.apache.parquet.hadoop.util.counters.BenchmarkCounter;
42 | import org.apache.parquet.io.ColumnIOFactory;
43 | import org.apache.parquet.io.MessageColumnIO;
44 | import org.apache.parquet.io.ParquetDecodingException;
45 | import org.apache.parquet.io.api.RecordMaterializer;
46 | import org.apache.parquet.schema.GroupType;
47 | import org.apache.parquet.schema.MessageType;
48 | import org.apache.parquet.schema.Type;
49 |
50 | import static java.lang.String.format;
51 | import static org.apache.parquet.Log.DEBUG;
52 | import static org.apache.parquet.Preconditions.checkNotNull;
53 | import static org.apache.parquet.hadoop.ParquetInputFormat.STRICT_TYPE_CHECKING;
54 |
55 | class InternalParquetRecordReader {
56 | private static final Log LOG = Log.getLog(InternalParquetRecordReader.class);
57 |
58 | private final ColumnIOFactory columnIOFactory = new ColumnIOFactory();
59 | private final Filter filter;
60 |
61 | private MessageType requestedSchema;
62 | private MessageType fileSchema;
63 | private int columnCount;
64 | private final ReadSupport readSupport;
65 |
66 | private RecordMaterializer recordConverter;
67 |
68 | private T currentValue;
69 | private long total;
70 | private long current = 0;
71 | private int currentBlock = -1;
72 | private ParquetFileReader reader;
73 | private org.apache.parquet.io.RecordReader recordReader;
74 | private boolean strictTypeChecking;
75 |
76 | private long totalTimeSpentReadingBytes;
77 | private long totalTimeSpentProcessingRecords;
78 | private long startedAssemblingCurrentBlockAt;
79 |
80 | private long totalCountLoadedSoFar = 0;
81 |
82 | private Path file;
83 |
84 | /**
85 | * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro.
86 | * @param filter for filtering individual records
87 | */
88 | public InternalParquetRecordReader(ReadSupport readSupport, Filter filter) {
89 | this.readSupport = readSupport;
90 | this.filter = checkNotNull(filter, "filter");
91 | }
92 |
93 | /**
94 | * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro.
95 | */
96 | public InternalParquetRecordReader(ReadSupport readSupport) {
97 | this(readSupport, FilterCompat.NOOP);
98 | }
99 |
100 | /**
101 | * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro.
102 | * @param filter Optional filter for only returning matching records.
103 | * @deprecated use {@link #InternalParquetRecordReader(ReadSupport, Filter)}
104 | */
105 | @Deprecated
106 | public InternalParquetRecordReader(ReadSupport readSupport, UnboundRecordFilter filter) {
107 | this(readSupport, FilterCompat.get(filter));
108 | }
109 |
110 | private void checkRead() throws IOException {
111 | if (current == totalCountLoadedSoFar) {
112 | if (current != 0) {
113 | totalTimeSpentProcessingRecords += (System.currentTimeMillis() - startedAssemblingCurrentBlockAt);
114 | if (Log.INFO) {
115 | LOG.info("Assembled and processed " + totalCountLoadedSoFar + " records from " + columnCount + " columns in " + totalTimeSpentProcessingRecords + " ms: "+((float)totalCountLoadedSoFar / totalTimeSpentProcessingRecords) + " rec/ms, " + ((float)totalCountLoadedSoFar * columnCount / totalTimeSpentProcessingRecords) + " cell/ms");
116 | final long totalTime = totalTimeSpentProcessingRecords + totalTimeSpentReadingBytes;
117 | if (totalTime != 0) {
118 | final long percentReading = 100 * totalTimeSpentReadingBytes / totalTime;
119 | final long percentProcessing = 100 * totalTimeSpentProcessingRecords / totalTime;
120 | LOG.info("time spent so far " + percentReading + "% reading ("+totalTimeSpentReadingBytes+" ms) and " + percentProcessing + "% processing ("+totalTimeSpentProcessingRecords+" ms)");
121 | }
122 | }
123 | }
124 |
125 | LOG.info("at row " + current + ". reading next block");
126 | long t0 = System.currentTimeMillis();
127 | PageReadStore pages = reader.readNextRowGroup();
128 | if (pages == null) {
129 | throw new IOException("expecting more rows but reached last block. Read " + current + " out of " + total);
130 | }
131 | long timeSpentReading = System.currentTimeMillis() - t0;
132 | totalTimeSpentReadingBytes += timeSpentReading;
133 | BenchmarkCounter.incrementTime(timeSpentReading);
134 | if (Log.INFO) LOG.info("block read in memory in " + timeSpentReading + " ms. row count = " + pages.getRowCount());
135 | if (Log.DEBUG) LOG.debug("initializing Record assembly with requested schema " + requestedSchema);
136 | MessageColumnIO columnIO = columnIOFactory.getColumnIO(requestedSchema, fileSchema, strictTypeChecking);
137 | recordReader = columnIO.getRecordReader(pages, recordConverter, filter);
138 | startedAssemblingCurrentBlockAt = System.currentTimeMillis();
139 | totalCountLoadedSoFar += pages.getRowCount();
140 | ++ currentBlock;
141 | }
142 | }
143 |
144 | public void close() throws IOException {
145 | if (reader != null) {
146 | reader.close();
147 | }
148 | }
149 |
150 | public Void getCurrentKey() throws IOException, InterruptedException {
151 | return null;
152 | }
153 |
154 | public T getCurrentValue() throws IOException,
155 | InterruptedException {
156 | return currentValue;
157 | }
158 |
159 | public float getProgress() throws IOException, InterruptedException {
160 | return (float) current / total;
161 | }
162 |
163 | public void initialize(MessageType fileSchema,
164 | Map fileMetadata,
165 | Path file, List blocks, Configuration configuration)
166 | throws IOException {
167 | // initialize a ReadContext for this file
168 | ReadSupport.ReadContext readContext = readSupport.init(new InitContext(
169 | configuration, toSetMultiMap(fileMetadata), fileSchema));
170 | this.requestedSchema = readContext.getRequestedSchema();
171 | this.fileSchema = fileSchema;
172 | this.file = file;
173 | this.columnCount = requestedSchema.getPaths().size();
174 | this.recordConverter = readSupport.prepareForRead(
175 | configuration, fileMetadata, fileSchema, readContext);
176 | this.strictTypeChecking = configuration.getBoolean(STRICT_TYPE_CHECKING, true);
177 | List columns = requestedSchema.getColumns();
178 | reader = new ParquetFileReader(configuration, file, blocks, columns);
179 | for (BlockMetaData block : blocks) {
180 | total += block.getRowCount();
181 | }
182 | LOG.info("RecordReader initialized will read a total of " + total + " records.");
183 | }
184 |
185 | private boolean contains(GroupType group, String[] path, int index) {
186 | if (index == path.length) {
187 | return false;
188 | }
189 | if (group.containsField(path[index])) {
190 | Type type = group.getType(path[index]);
191 | if (type.isPrimitive()) {
192 | return index + 1 == path.length;
193 | } else {
194 | return contains(type.asGroupType(), path, index + 1);
195 | }
196 | }
197 | return false;
198 | }
199 |
200 | public boolean nextKeyValue() throws IOException, InterruptedException {
201 | boolean recordFound = false;
202 |
203 | while (!recordFound) {
204 | // no more records left
205 | if (current >= total) { return false; }
206 |
207 | try {
208 | checkRead();
209 | currentValue = recordReader.read();
210 | current ++;
211 | if (recordReader.shouldSkipCurrentRecord()) {
212 | // this record is being filtered via the filter2 package
213 | if (DEBUG) LOG.debug("skipping record");
214 | continue;
215 | }
216 |
217 | if (currentValue == null) {
218 | // only happens with FilteredRecordReader at end of block
219 | current = totalCountLoadedSoFar;
220 | if (DEBUG) LOG.debug("filtered record reader reached end of block");
221 | continue;
222 | }
223 |
224 | recordFound = true;
225 |
226 | if (DEBUG) LOG.debug("read value: " + currentValue);
227 | } catch (RuntimeException e) {
228 | throw new ParquetDecodingException(format("Can not read value at %d in block %d in file %s", current, currentBlock, file), e);
229 | }
230 | }
231 | return true;
232 | }
233 |
234 | private static Map> toSetMultiMap(Map map) {
235 | Map> setMultiMap = new HashMap>();
236 | for (Map.Entry entry : map.entrySet()) {
237 | Set set = new HashSet();
238 | set.add(entry.getValue());
239 | setMultiMap.put(entry.getKey(), Collections.unmodifiableSet(set));
240 | }
241 | return Collections.unmodifiableMap(setMultiMap);
242 | }
243 |
244 | }
245 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordReader.java.orig.txt:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.apache.parquet.hadoop;
20 |
21 | import java.io.IOException;
22 | import java.util.Collections;
23 | import java.util.HashMap;
24 | import java.util.HashSet;
25 | import java.util.List;
26 | import java.util.Map;
27 |
28 | import java.util.Set;
29 | import org.apache.hadoop.conf.Configuration;
30 | import org.apache.hadoop.fs.Path;
31 |
32 | import org.apache.parquet.Log;
33 | import org.apache.parquet.column.ColumnDescriptor;
34 | import org.apache.parquet.column.page.PageReadStore;
35 | import org.apache.parquet.filter.UnboundRecordFilter;
36 | import org.apache.parquet.filter2.compat.FilterCompat;
37 | import org.apache.parquet.filter2.compat.FilterCompat.Filter;
38 | import org.apache.parquet.hadoop.api.InitContext;
39 | import org.apache.parquet.hadoop.api.ReadSupport;
40 | import org.apache.parquet.hadoop.metadata.BlockMetaData;
41 | import org.apache.parquet.hadoop.util.counters.BenchmarkCounter;
42 | import org.apache.parquet.io.ColumnIOFactory;
43 | import org.apache.parquet.io.MessageColumnIO;
44 | import org.apache.parquet.io.ParquetDecodingException;
45 | import org.apache.parquet.io.api.RecordMaterializer;
46 | import org.apache.parquet.schema.GroupType;
47 | import org.apache.parquet.schema.MessageType;
48 | import org.apache.parquet.schema.Type;
49 |
50 | import static java.lang.String.format;
51 | import static org.apache.parquet.Log.DEBUG;
52 | import static org.apache.parquet.Preconditions.checkNotNull;
53 | import static org.apache.parquet.hadoop.ParquetInputFormat.STRICT_TYPE_CHECKING;
54 |
55 | class InternalParquetRecordReader {
56 | private static final Log LOG = Log.getLog(InternalParquetRecordReader.class);
57 |
58 | private final ColumnIOFactory columnIOFactory = new ColumnIOFactory();
59 | private final Filter filter;
60 |
61 | private MessageType requestedSchema;
62 | private MessageType fileSchema;
63 | private int columnCount;
64 | private final ReadSupport readSupport;
65 |
66 | private RecordMaterializer recordConverter;
67 |
68 | private T currentValue;
69 | private long total;
70 | private long current = 0;
71 | private int currentBlock = -1;
72 | private ParquetFileReader reader;
73 | private org.apache.parquet.io.RecordReader recordReader;
74 | private boolean strictTypeChecking;
75 |
76 | private long totalTimeSpentReadingBytes;
77 | private long totalTimeSpentProcessingRecords;
78 | private long startedAssemblingCurrentBlockAt;
79 |
80 | private long totalCountLoadedSoFar = 0;
81 |
82 | private Path file;
83 |
84 | /**
85 | * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro.
86 | * @param filter for filtering individual records
87 | */
88 | public InternalParquetRecordReader(ReadSupport readSupport, Filter filter) {
89 | this.readSupport = readSupport;
90 | this.filter = checkNotNull(filter, "filter");
91 | }
92 |
93 | /**
94 | * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro.
95 | */
96 | public InternalParquetRecordReader(ReadSupport readSupport) {
97 | this(readSupport, FilterCompat.NOOP);
98 | }
99 |
100 | /**
101 | * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro.
102 | * @param filter Optional filter for only returning matching records.
103 | * @deprecated use {@link #InternalParquetRecordReader(ReadSupport, Filter)}
104 | */
105 | @Deprecated
106 | public InternalParquetRecordReader(ReadSupport readSupport, UnboundRecordFilter filter) {
107 | this(readSupport, FilterCompat.get(filter));
108 | }
109 |
110 | private void checkRead() throws IOException {
111 | if (current == totalCountLoadedSoFar) {
112 | if (current != 0) {
113 | totalTimeSpentProcessingRecords += (System.currentTimeMillis() - startedAssemblingCurrentBlockAt);
114 | if (Log.INFO) {
115 | LOG.info("Assembled and processed " + totalCountLoadedSoFar + " records from " + columnCount + " columns in " + totalTimeSpentProcessingRecords + " ms: "+((float)totalCountLoadedSoFar / totalTimeSpentProcessingRecords) + " rec/ms, " + ((float)totalCountLoadedSoFar * columnCount / totalTimeSpentProcessingRecords) + " cell/ms");
116 | final long totalTime = totalTimeSpentProcessingRecords + totalTimeSpentReadingBytes;
117 | if (totalTime != 0) {
118 | final long percentReading = 100 * totalTimeSpentReadingBytes / totalTime;
119 | final long percentProcessing = 100 * totalTimeSpentProcessingRecords / totalTime;
120 | LOG.info("time spent so far " + percentReading + "% reading ("+totalTimeSpentReadingBytes+" ms) and " + percentProcessing + "% processing ("+totalTimeSpentProcessingRecords+" ms)");
121 | }
122 | }
123 | }
124 |
125 | LOG.info("at row " + current + ". reading next block");
126 | long t0 = System.currentTimeMillis();
127 | PageReadStore pages = reader.readNextRowGroup();
128 | if (pages == null) {
129 | throw new IOException("expecting more rows but reached last block. Read " + current + " out of " + total);
130 | }
131 | long timeSpentReading = System.currentTimeMillis() - t0;
132 | totalTimeSpentReadingBytes += timeSpentReading;
133 | BenchmarkCounter.incrementTime(timeSpentReading);
134 | if (Log.INFO) LOG.info("block read in memory in " + timeSpentReading + " ms. row count = " + pages.getRowCount());
135 | if (Log.DEBUG) LOG.debug("initializing Record assembly with requested schema " + requestedSchema);
136 | MessageColumnIO columnIO = columnIOFactory.getColumnIO(requestedSchema, fileSchema, strictTypeChecking);
137 | recordReader = columnIO.getRecordReader(pages, recordConverter, filter);
138 | startedAssemblingCurrentBlockAt = System.currentTimeMillis();
139 | totalCountLoadedSoFar += pages.getRowCount();
140 | ++ currentBlock;
141 | }
142 | }
143 |
144 | public void close() throws IOException {
145 | if (reader != null) {
146 | reader.close();
147 | }
148 | }
149 |
150 | public Void getCurrentKey() throws IOException, InterruptedException {
151 | return null;
152 | }
153 |
154 | public T getCurrentValue() throws IOException,
155 | InterruptedException {
156 | return currentValue;
157 | }
158 |
159 | public float getProgress() throws IOException, InterruptedException {
160 | return (float) current / total;
161 | }
162 |
163 | public void initialize(MessageType fileSchema,
164 | Map fileMetadata,
165 | Path file, List blocks, Configuration configuration)
166 | throws IOException {
167 | // initialize a ReadContext for this file
168 | ReadSupport.ReadContext readContext = readSupport.init(new InitContext(
169 | configuration, toSetMultiMap(fileMetadata), fileSchema));
170 | this.requestedSchema = readContext.getRequestedSchema();
171 | this.fileSchema = fileSchema;
172 | this.file = file;
173 | this.columnCount = requestedSchema.getPaths().size();
174 | this.recordConverter = readSupport.prepareForRead(
175 | configuration, fileMetadata, fileSchema, readContext);
176 | this.strictTypeChecking = configuration.getBoolean(STRICT_TYPE_CHECKING, true);
177 | List columns = requestedSchema.getColumns();
178 | reader = new ParquetFileReader(configuration, file, blocks, columns);
179 | for (BlockMetaData block : blocks) {
180 | total += block.getRowCount();
181 | }
182 | LOG.info("RecordReader initialized will read a total of " + total + " records.");
183 | }
184 |
185 | private boolean contains(GroupType group, String[] path, int index) {
186 | if (index == path.length) {
187 | return false;
188 | }
189 | if (group.containsField(path[index])) {
190 | Type type = group.getType(path[index]);
191 | if (type.isPrimitive()) {
192 | return index + 1 == path.length;
193 | } else {
194 | return contains(type.asGroupType(), path, index + 1);
195 | }
196 | }
197 | return false;
198 | }
199 |
200 | public boolean nextKeyValue() throws IOException, InterruptedException {
201 | boolean recordFound = false;
202 |
203 | while (!recordFound) {
204 | // no more records left
205 | if (current >= total) { return false; }
206 |
207 | try {
208 | checkRead();
209 | currentValue = recordReader.read();
210 | current ++;
211 | if (recordReader.shouldSkipCurrentRecord()) {
212 | // this record is being filtered via the filter2 package
213 | if (DEBUG) LOG.debug("skipping record");
214 | continue;
215 | }
216 |
217 | if (currentValue == null) {
218 | // only happens with FilteredRecordReader at end of block
219 | current = totalCountLoadedSoFar;
220 | if (DEBUG) LOG.debug("filtered record reader reached end of block");
221 | continue;
222 | }
223 |
224 | recordFound = true;
225 |
226 | if (DEBUG) LOG.debug("read value: " + currentValue);
227 | } catch (RuntimeException e) {
228 | throw new ParquetDecodingException(format("Can not read value at %d in block %d in file %s", current, currentBlock, file), e);
229 | }
230 | }
231 | return true;
232 | }
233 |
234 | private static Map> toSetMultiMap(Map map) {
235 | Map> setMultiMap = new HashMap>();
236 | for (Map.Entry entry : map.entrySet()) {
237 | Set set = new HashSet();
238 | set.add(entry.getValue());
239 | setMultiMap.put(entry.getKey(), Collections.unmodifiableSet(set));
240 | }
241 | return Collections.unmodifiableMap(setMultiMap);
242 | }
243 |
244 | }
245 |
--------------------------------------------------------------------------------
/code base/parquet_playground/src/main/java/org/apache/parquet/hadoop/ParquetReader.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing,
13 | * software distributed under the License is distributed on an
14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | * KIND, either express or implied. See the License for the
16 | * specific language governing permissions and limitations
17 | * under the License.
18 | */
19 | package org.apache.parquet.hadoop;
20 |
21 | import static org.apache.parquet.Preconditions.checkNotNull;
22 |
23 | import java.io.Closeable;
24 | import java.io.IOException;
25 | import java.util.Arrays;
26 | import java.util.Iterator;
27 | import java.util.List;
28 |
29 | import org.apache.hadoop.conf.Configuration;
30 | import org.apache.hadoop.fs.FileStatus;
31 | import org.apache.hadoop.fs.FileSystem;
32 | import org.apache.hadoop.fs.Path;
33 |
34 | import org.apache.parquet.filter.UnboundRecordFilter;
35 | import org.apache.parquet.filter2.compat.FilterCompat;
36 | import org.apache.parquet.filter2.compat.FilterCompat.Filter;
37 | import org.apache.parquet.filter2.compat.RowGroupFilter;
38 | import org.apache.parquet.hadoop.api.ReadSupport;
39 | import org.apache.parquet.hadoop.metadata.BlockMetaData;
40 | import org.apache.parquet.hadoop.util.HiddenFileFilter;
41 | import org.apache.parquet.schema.MessageType;
42 |
43 | /**
44 | * Read records from a Parquet file.
45 | * TODO: too many constructors (https://issues.apache.org/jira/browse/PARQUET-39)
46 | */
47 | public class ParquetReader implements Closeable {
48 |
49 | private final ReadSupport readSupport;
50 | private final Configuration conf;
51 | private final Iterator