Skip to content

Commit 8043508

Browse files
committed
updated api usage
1 parent e9a966f commit 8043508

40 files changed

Lines changed: 71 additions & 79 deletions

hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMultiOutputFormat.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -374,7 +374,7 @@ private List<String> getTableData(String table, String database) throws Exceptio
374374
Hive hive = Hive.get(conf);
375375
org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table);
376376
FetchWork work;
377-
if (!tbl.getSupportedPartCols().isEmpty()) {
377+
if (!tbl.getPartCols().isEmpty()) {
378378
List<Partition> partitions = hive.getPartitions(tbl);
379379
List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
380380
List<Path> partLocs = new ArrayList<Path>();

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/AlterTableUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ public static boolean isSchemaEvolutionEnabled(Table table, Configuration conf)
7575
}
7676

7777
public static boolean isFullPartitionSpec(Table table, Map<String, String> partitionSpec) {
78-
for (FieldSchema partitionCol : table.getNativePartCols()) {
78+
for (FieldSchema partitionCol : table.getPartCols()) {
7979
if (partitionSpec.get(partitionCol.getName()) == null) {
8080
return false;
8181
}

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ private Table createViewLikeTable(Table oldTable) throws HiveException {
100100
setUserSpecifiedLocation(table);
101101

102102
table.setFields(oldTable.getCols());
103-
table.setPartCols(oldTable.getNativePartCols());
103+
table.setPartCols(oldTable.getPartCols());
104104

105105
if (desc.getDefaultSerdeProps() != null) {
106106
for (Map.Entry<String, String> e : desc.getDefaultSerdeProps().entrySet()) {

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/DescTableOperation.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ private void getColumnsNoColumnPath(Table table, Partition partition, List<Field
131131
cols.addAll(partition == null || table.getTableType() == TableType.VIRTUAL_VIEW ?
132132
table.getCols() : partition.getCols());
133133
if (!desc.isFormatted()) {
134-
cols.addAll(table.getNativePartCols());
134+
cols.addAll(table.getPartCols());
135135
}
136136

137137
// Fetch partition statistics only for describe extended or formatted.

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/formatter/JsonShowTableStatusFormatter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ private Map<String, Object> makeOneTableStatus(Table table, Hive db, HiveConf co
6969

7070
builder.put("partitioned", table.isPartitioned());
7171
if (table.isPartitioned()) {
72-
builder.put("partitionColumns", JsonDescTableFormatter.createColumnsInfo(table.getSupportedPartCols(),
72+
builder.put("partitionColumns", JsonDescTableFormatter.createColumnsInfo(table.getPartCols(),
7373
Collections.emptyList()));
7474
}
7575

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/status/formatter/TextShowTableStatusFormatter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ private void writeStorageInfo(DataOutputStream out, Partition partition, Table t
7373
private void writeColumnsInfo(DataOutputStream out, Table table) throws IOException, UnsupportedEncodingException {
7474
String columns = MetaStoreUtils.getDDLFromFieldSchema("columns", table.getCols());
7575
String partitionColumns = table.isPartitioned() ?
76-
MetaStoreUtils.getDDLFromFieldSchema("partition_columns", table.getSupportedPartCols()) : "";
76+
MetaStoreUtils.getDDLFromFieldSchema("partition_columns", table.getPartCols()) : "";
7777

7878
out.write(Utilities.newLineCode);
7979
out.write(("columns:" + columns).getBytes(StandardCharsets.UTF_8));

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/PartitionUtils.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import java.util.List;
2424
import java.util.Map;
2525
import java.util.Set;
26-
import java.util.Map.Entry;
2726

2827
import org.apache.hadoop.hive.conf.HiveConf;
2928
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -150,7 +149,7 @@ public static List<Partition> getPartitionsWithSpecs(Hive db, Table table, GetPa
150149
}
151150

152151
private static String tablePartitionColNames(Table table) {
153-
List<FieldSchema> partCols = table.getNativePartCols();
152+
List<FieldSchema> partCols = table.getPartCols();
154153
return String.join("/", partCols.toString());
155154
}
156155

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/exchange/AlterTableExchangePartitionAnalyzer.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
8484
if (AcidUtils.isTransactionalTable(sourceTable) || AcidUtils.isTransactionalTable(destTable)) {
8585
throw new SemanticException(ErrorMsg.EXCHANGE_PARTITION_NOT_ALLOWED_WITH_TRANSACTIONAL_TABLES.getMsg());
8686
}
87-
List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getNativePartCols(), partitionSpecs);
87+
List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getPartCols(), partitionSpecs);
8888

8989
// check if source partition exists
9090
GetPartitionsFilterSpec sourcePartitionsFilterSpec = new GetPartitionsFilterSpec();
@@ -106,7 +106,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
106106
throw new SemanticException(ErrorMsg.PARTITION_VALUE_NOT_CONTINUOUS.getMsg(partitionSpecs.toString()));
107107
}
108108

109-
List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getNativePartCols(), partitionSpecs);
109+
List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getPartCols(), partitionSpecs);
110110

111111
// check if dest partition exists
112112
GetPartitionsFilterSpec getDestPartitionsFilterSpec = new GetPartitionsFilterSpec();

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/show/ShowPartitionAnalyzer.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ ExprNodeDesc getShowPartitionsFilter(Table table, ASTNode command) throws Semant
102102
if (astChild.getType() == HiveParser.TOK_WHERE) {
103103
RowResolver rwsch = new RowResolver();
104104
Map<String, String> colTypes = new HashMap<String, String>();
105-
for (FieldSchema fs : table.getSupportedPartCols()) {
105+
for (FieldSchema fs : table.getPartCols()) {
106106
rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(),
107107
TypeInfoFactory.stringTypeInfo, null, true));
108108
colTypes.put(fs.getName().toLowerCase(), fs.getType());
@@ -202,8 +202,8 @@ private String getShowPartitionsOrder(Table table, ASTNode command) throws Seman
202202
if (astChild.getType() == HiveParser.TOK_ORDERBY) {
203203
Map<String, Integer> poses = new HashMap<String, Integer>();
204204
RowResolver rwsch = new RowResolver();
205-
for (int i = 0; i < table.getSupportedPartCols().size(); i++) {
206-
FieldSchema fs = table.getSupportedPartCols().get(i);
205+
for (int i = 0; i < table.getPartCols().size(); i++) {
206+
FieldSchema fs = table.getPartCols().get(i);
207207
rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(),
208208
TypeInfoFactory.getPrimitiveTypeInfo(fs.getType()), null, true));
209209
poses.put(fs.getName().toLowerCase(), i);

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/storage/archive/AlterTableArchiveOperation.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ private Path getOriginalDir(Table table, PartSpecInfo partitionSpecInfo, List<Pa
129129
// in full partition specification case we allow custom locations to keep backward compatibility
130130
if (partitions.isEmpty()) {
131131
throw new HiveException("No partition matches the specification");
132-
} else if (partitionSpecInfo.values.size() != table.getNativePartCols().size()) {
132+
} else if (partitionSpecInfo.values.size() != table.getPartCols().size()) {
133133
// for partial specifications we need partitions to follow the scheme
134134
for (Partition partition : partitions) {
135135
if (AlterTableArchiveUtils.partitionInCustomLocation(table, partition)) {

0 commit comments

Comments
 (0)