diff --git a/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/KeyInfo.java b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/KeyInfo.java new file mode 100644 index 000000000..b8900d125 --- /dev/null +++ b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/KeyInfo.java @@ -0,0 +1,25 @@ +package com.netflix.metacat.common.server.connectors.model; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; + +import java.io.Serializable; +import java.util.List; + +/** + * Key Info. + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = false) +public class KeyInfo implements Serializable { + private static final long serialVersionUID = 7254898853779135216L; + + private String name; + private List fields; +} diff --git a/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/KeySetInfo.java b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/KeySetInfo.java new file mode 100644 index 000000000..26aae1ff0 --- /dev/null +++ b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/KeySetInfo.java @@ -0,0 +1,88 @@ +package com.netflix.metacat.common.server.connectors.model; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; + +/** + * KeySet Info. + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = false) +public class KeySetInfo implements Serializable { + private static final long serialVersionUID = 3659843901964058788L; + + private static final String PARTITION_KEY_DEFAULT_NAME = "partition"; + private static final String PRIMARY_KEY_DEFAULT_NAME = "primary"; + private static final String SORT_KEY_DEFAULT_NAME = "sort"; + private static final String INDEX_KEY_DEFAULT_NAME = "index"; + + private List partition; + private List primary; + private List sort; + private List index; + + /** + * builds a keyset from fieldInfo list. + * + * @param fields list of fieldInfo + * @return keyset + */ + public static KeySetInfo buildKeySet(final List fields) { + return buildKeySet(fields, null); + } + + /** + * builds a keyset from fieldInfo list and primary key list. + * + * @param fields list of fieldInfo + * @param primary list of primary keys + * @return keyset + */ + public static KeySetInfo buildKeySet(final List fields, final List primary) { + if (fields == null) { + return null; + } else if (fields.isEmpty()) { + return new KeySetInfo(); + } + + final List partitionKeys = new LinkedList<>(); + final List sortKeys = new LinkedList<>(); + final List indexKeys = new LinkedList<>(); + for (FieldInfo field : fields) { + if (field.isPartitionKey()) { + partitionKeys.add(field.getName()); + } + if (field.getIsSortKey() != null && field.getIsSortKey()) { + sortKeys.add(field.getName()); + } + if (field.getIsIndexKey() != null && field.getIsIndexKey()) { + indexKeys.add(field.getName()); + } + } + + final KeySetInfo keySetInfo = new KeySetInfo(); + keySetInfo.partition = partitionKeys.isEmpty() ? Collections.emptyList() + : Arrays.asList( + KeyInfo.builder().name(PARTITION_KEY_DEFAULT_NAME).fields(partitionKeys).build()); + keySetInfo.sort = sortKeys.isEmpty() ? Collections.emptyList() + : Arrays.asList(KeyInfo.builder().name(SORT_KEY_DEFAULT_NAME).fields(sortKeys).build()); + keySetInfo.index = indexKeys.isEmpty() ? Collections.emptyList() + : Arrays.asList(KeyInfo.builder().name(INDEX_KEY_DEFAULT_NAME).fields(indexKeys).build()); + keySetInfo.primary = (primary == null || primary.isEmpty()) ? Collections.emptyList() + : primary; + + return keySetInfo; + } +} diff --git a/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/TableInfo.java b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/TableInfo.java index b0f28a194..a61bc33b3 100644 --- a/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/TableInfo.java +++ b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/connectors/model/TableInfo.java @@ -40,6 +40,7 @@ public class TableInfo extends BaseInfo { private List fields; private StorageInfo serde; private ViewInfo view; + private KeySetInfo keys; /** * Constructor. @@ -57,11 +58,35 @@ private TableInfo( final Map metadata, final List fields, final StorageInfo serde, - final ViewInfo view + final ViewInfo view, + final KeySetInfo keys ) { super(name, auditInfo, metadata); this.fields = fields; this.serde = serde; this.view = view; + this.keys = keys; + } + + /** + * builds key set info from the fields. + * + * @return key set + */ + public KeySetInfo getKeys() { + if (this.keys != null) { + return keys; + } + keys = KeySetInfo.buildKeySet(this.fields); + return keys; + } + + /** + * sets the keyset. + * + * @param keys keyset to be set + */ + public void setKeys(final KeySetInfo keys) { + this.keys = keys; } } diff --git a/metacat-common-server/src/main/java/com/netflix/metacat/common/server/converter/ConverterUtil.java b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/converter/ConverterUtil.java index 05690c312..a1c0ce232 100644 --- a/metacat-common-server/src/main/java/com/netflix/metacat/common/server/converter/ConverterUtil.java +++ b/metacat-common-server/src/main/java/com/netflix/metacat/common/server/converter/ConverterUtil.java @@ -25,6 +25,8 @@ import com.netflix.metacat.common.dto.DatabaseDto; import com.netflix.metacat.common.dto.FieldDto; import com.netflix.metacat.common.dto.GetPartitionsRequestDto; +import com.netflix.metacat.common.dto.KeyDto; +import com.netflix.metacat.common.dto.KeySetDto; import com.netflix.metacat.common.dto.ViewDto; import com.netflix.metacat.common.dto.Pageable; import com.netflix.metacat.common.dto.PartitionDto; @@ -39,6 +41,8 @@ import com.netflix.metacat.common.server.connectors.model.ClusterInfo; import com.netflix.metacat.common.server.connectors.model.DatabaseInfo; import com.netflix.metacat.common.server.connectors.model.FieldInfo; +import com.netflix.metacat.common.server.connectors.model.KeyInfo; +import com.netflix.metacat.common.server.connectors.model.KeySetInfo; import com.netflix.metacat.common.server.connectors.model.ViewInfo; import com.netflix.metacat.common.server.connectors.model.PartitionInfo; import com.netflix.metacat.common.server.connectors.model.PartitionListRequest; @@ -101,6 +105,8 @@ protected void configure() { mapping(AuditDto.class, AuditInfo.class); mapping(ViewDto.class, ViewInfo.class); mapping(StorageDto.class, StorageInfo.class); + mapping(KeySetDto.class, KeySetInfo.class); + mapping(KeyDto.class, KeyInfo.class); } }; dozerBeanMapper.addMapping(builder); diff --git a/metacat-common-server/src/test/groovy/com/netflix/metacat/common/server/converter/ConverterUtilSpec.groovy b/metacat-common-server/src/test/groovy/com/netflix/metacat/common/server/converter/ConverterUtilSpec.groovy index 4dc456628..5eed8f9b9 100644 --- a/metacat-common-server/src/test/groovy/com/netflix/metacat/common/server/converter/ConverterUtilSpec.groovy +++ b/metacat-common-server/src/test/groovy/com/netflix/metacat/common/server/converter/ConverterUtilSpec.groovy @@ -50,8 +50,13 @@ class ConverterUtilSpec extends Specification { given: def dto = new TableDto(name: QualifiedName.ofTable('prodhive', 'amajumdar', 'part'), audit: new AuditDto('test', new Date(), 'test', new Date()), - fields: [FieldDto.builder().name('esn').type('string').source_type('string').jsonType(new TextNode('string')).pos(0).build()] , - serde: new StorageDto(owner: 'test')) + fields: [FieldDto.builder().name('esn').type('string').source_type('string') + .jsonType(new TextNode('string')).pos(0).build()] , + serde: new StorageDto(owner: 'test'), + keys: KeySetDto.builder() + .partition(Arrays.asList(KeyDto.builder().name('partition').fields(Arrays.asList('esn')).build())) + .build() + ) when: def info = converter.fromTableDto(dto) def resultDto = converter.toTableDto(info) @@ -65,7 +70,11 @@ class ConverterUtilSpec extends Specification { audit: new AuditDto('test', new Date(), 'test', new Date()), fields: [FieldDto.builder().name('esn').type('string').source_type('string').jsonType(new TextNode('string')).pos(0).build()], serde: new StorageDto(owner: 'test'), - view: new ViewDto("select test", "select test2")) + view: new ViewDto("select test", "select test2"), + keys: KeySetDto.builder() + .partition(Arrays.asList(KeyDto.builder().name('partition').fields(Arrays.asList('esn')).build())) + .build() + ) when: def info = converter.fromTableDto(dto) def resultDto = converter.toTableDto(info) diff --git a/metacat-common/src/main/java/com/netflix/metacat/common/dto/ClusterDto.java b/metacat-common/src/main/java/com/netflix/metacat/common/dto/ClusterDto.java index 11ec016ec..ffa51cceb 100644 --- a/metacat-common/src/main/java/com/netflix/metacat/common/dto/ClusterDto.java +++ b/metacat-common/src/main/java/com/netflix/metacat/common/dto/ClusterDto.java @@ -1,3 +1,20 @@ +/* + * + * Copyright 2016 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ package com.netflix.metacat.common.dto; import io.swagger.annotations.ApiModel; diff --git a/metacat-common/src/main/java/com/netflix/metacat/common/dto/KeyDto.java b/metacat-common/src/main/java/com/netflix/metacat/common/dto/KeyDto.java new file mode 100644 index 000000000..8d454f1c2 --- /dev/null +++ b/metacat-common/src/main/java/com/netflix/metacat/common/dto/KeyDto.java @@ -0,0 +1,41 @@ +/* + * + * Copyright 2016 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.netflix.metacat.common.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; + +import java.util.List; + +/** + * Key DTO. + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = false) +public class KeyDto extends BaseDto { + private static final long serialVersionUID = 5511551575484406779L; + + private String name; + private List fields; +} diff --git a/metacat-common/src/main/java/com/netflix/metacat/common/dto/KeySetDto.java b/metacat-common/src/main/java/com/netflix/metacat/common/dto/KeySetDto.java new file mode 100644 index 000000000..caaae2314 --- /dev/null +++ b/metacat-common/src/main/java/com/netflix/metacat/common/dto/KeySetDto.java @@ -0,0 +1,43 @@ +/* + * + * Copyright 2016 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package com.netflix.metacat.common.dto; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.NoArgsConstructor; + +import java.util.List; + +/** + * KeySet DTO. + */ +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +@EqualsAndHashCode(callSuper = false) +public class KeySetDto extends BaseDto { + private static final long serialVersionUID = 6250093794701822334L; + + private List partition; + private List primary; + private List sort; + private List index; +} diff --git a/metacat-common/src/main/java/com/netflix/metacat/common/dto/TableDto.java b/metacat-common/src/main/java/com/netflix/metacat/common/dto/TableDto.java index c00d3d79f..c4f36f92a 100644 --- a/metacat-common/src/main/java/com/netflix/metacat/common/dto/TableDto.java +++ b/metacat-common/src/main/java/com/netflix/metacat/common/dto/TableDto.java @@ -72,6 +72,10 @@ public class TableDto extends BaseDto implements HasDataMetadata, HasDefinitionM //Naming as view required by dozer mapping private ViewDto view; + @ApiModelProperty(value = "keys defined in the table") + @JsonProperty + private KeySetDto keys; + @Nonnull @Override @JsonIgnore @@ -97,19 +101,25 @@ public QualifiedName getDefinitionName() { @JsonProperty @SuppressWarnings("checkstyle:methodname") public List getPartition_keys() { - if (fields == null) { - return null; - } else if (fields.isEmpty()) { - return Collections.emptyList(); - } + if (this.keys == null) { + if (fields == null) { + return null; + } else if (fields.isEmpty()) { + return Collections.emptyList(); + } - final List keys = new LinkedList<>(); - for (FieldDto field : fields) { - if (field.isPartition_key()) { - keys.add(field.getName()); + final List partitionKeys = new LinkedList<>(); + for (FieldDto field : fields) { + if (field.isPartition_key()) { + partitionKeys.add(field.getName()); + } } + return partitionKeys; } - return keys; + + return this.keys.getPartition().isEmpty() + ? Collections.EMPTY_LIST + : this.keys.getPartition().get(0).getFields(); } /** @@ -120,6 +130,19 @@ public List getPartition_keys() { public void setPartition_keys(final List ignored) { } + @JsonIgnore + public KeySetDto getKeys() { + return keys; + } + + /** + * Sets the keyset. + * @param keys keyset + */ + public void setKeys(final KeySetDto keys) { + this.keys = keys; + } + @Override @JsonProperty public boolean isDataExternal() { diff --git a/metacat-connector-jdbc/src/main/java/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableService.java b/metacat-connector-jdbc/src/main/java/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableService.java index 1b95d0c84..bd154aa0c 100644 --- a/metacat-connector-jdbc/src/main/java/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableService.java +++ b/metacat-connector-jdbc/src/main/java/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableService.java @@ -28,6 +28,8 @@ import com.netflix.metacat.common.server.connectors.exception.ConnectorException; import com.netflix.metacat.common.server.connectors.exception.TableNotFoundException; import com.netflix.metacat.common.server.connectors.model.FieldInfo; +import com.netflix.metacat.common.server.connectors.model.KeyInfo; +import com.netflix.metacat.common.server.connectors.model.KeySetInfo; import com.netflix.metacat.common.server.connectors.model.TableInfo; import com.netflix.metacat.connector.jdbc.JdbcExceptionMapper; import com.netflix.metacat.connector.jdbc.JdbcTypeConverter; @@ -45,8 +47,11 @@ import java.sql.ResultSet; import java.sql.SQLDataException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Comparator; +import java.util.HashMap; import java.util.List; +import java.util.Map; /** * Generic JDBC implementation of the ConnectorTableService. @@ -140,9 +145,37 @@ public TableInfo get(@Nonnull final ConnectorRequestContext context, @Nonnull fi if (fieldInfos.isEmpty() && !exists(context, name)) { throw new TableNotFoundException(name); } + + final Map> pkMap = new HashMap<>(); + try (ResultSet primaryKeys = this.getPrimaryKeys(connection, name)) { + while (primaryKeys.next()) { + final String tableName = primaryKeys.getString("TABLE_NAME"); + if (!tableName.equals(name.getTableName())) { + continue; + } + final String pkName = primaryKeys.getString("PK_NAME"); + final String columnName = primaryKeys.getString("COLUMN_NAME"); + final int keySeq = Integer.parseInt(primaryKeys.getString("KEY_SEQ")); + if (pkMap.containsKey(pkName)) { + pkMap.get(pkName).add(keySeq, columnName); + } else { + final List columnArray = new ArrayList<>(); + columnArray.add(columnName); + pkMap.put(pkName, columnArray); + } + } + } + + final List primaryKeys = new ArrayList<>(); + for (Map.Entry> entry : pkMap.entrySet()) { + primaryKeys.add(KeyInfo.builder().name(entry.getKey()).fields(entry.getValue()).build()); + } + final KeySetInfo keySetInfo = KeySetInfo.buildKeySet(fieldInfos, primaryKeys); + // Set table details final TableInfo result = TableInfo.builder().name(name).fields(fields.build()).build(); setTableInfoDetails(connection, result); + result.setKeys(keySetInfo); log.debug("Finished getting table metadata for qualified name {} for request {}", name, context); return result; } catch (final SQLException se) { @@ -340,7 +373,18 @@ protected ResultSet getColumns( ); } - /** + protected ResultSet getPrimaryKeys( + @Nonnull @NonNull final Connection connection, + @Nonnull @NonNull final QualifiedName name) throws SQLException { + final String database = name.getDatabaseName(); + final DatabaseMetaData metaData = connection.getMetaData(); + return metaData.getPrimaryKeys( + database, + database, + name.getTableName()); + } + + /** * Rebuild a source type definition. * * @param type The base type e.g. VARCHAR diff --git a/metacat-connector-jdbc/src/test/groovy/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableServiceSpec.groovy b/metacat-connector-jdbc/src/test/groovy/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableServiceSpec.groovy index ce9c6cbc4..3621953ab 100644 --- a/metacat-connector-jdbc/src/test/groovy/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableServiceSpec.groovy +++ b/metacat-connector-jdbc/src/test/groovy/com/netflix/metacat/connector/jdbc/services/JdbcConnectorTableServiceSpec.groovy @@ -99,13 +99,14 @@ class JdbcConnectorTableServiceSpec extends Specification { def connection = Mock(Connection) def metadata = Mock(DatabaseMetaData) def columnResultSet = Mock(ResultSet) + def primaryKeyResultSet = Mock(ResultSet) when: def tableInfo = this.service.get(this.context, qName) then: 1 * this.dataSource.getConnection() >> connection - 1 * connection.getMetaData() >> metadata + 2 * connection.getMetaData() >> metadata 1 * metadata.getColumns(database, database, table, JdbcConnectorUtils.MULTI_CHARACTER_SEARCH) >> columnResultSet 4 * columnResultSet.next() >>> [true, true, true, false] 3 * columnResultSet.getString("REMARKS") >>> ["comment1", "comment2", "comment3"] @@ -125,8 +126,23 @@ class JdbcConnectorTableServiceSpec extends Specification { tableInfo.getFields().get(0).getName() == "column1" tableInfo.getFields().get(1).getName() == "column2" tableInfo.getFields().get(2).getName() == "column3" - } + 1 * metadata.getPrimaryKeys(database, database, table) >> primaryKeyResultSet + 4 * primaryKeyResultSet.next() >>> [true, true, true, false] + 3 * primaryKeyResultSet.getString("TABLE_NAME") >>> [table, table, table] + 3 * primaryKeyResultSet.getString("PK_NAME") >>> ["PK1", "PK1", "PK2"] + 3 * primaryKeyResultSet.getString("COLUMN_NAME") >>> ["column1", "column2", "column3"] + 3 * primaryKeyResultSet.getString("KEY_SEQ") >>> ["1", "0", "0"] + + tableInfo.keys.primary.size() == 2 + def pk1 = tableInfo.keys.primary.find {keyInfo -> keyInfo.name == "PK1"} + pk1.fields.size() == 2 + pk1.fields.get(0) == "column2" + pk1.fields.get(1) == "column1" + def pk2 = tableInfo.keys.primary.find {keyInfo -> keyInfo.name == "PK2"} + pk2.fields.size() == 1 + pk2.fields.get(0) == "column3" + } def "Cannot get table metadata"() { def catalog = UUID.randomUUID().toString() diff --git a/metacat-connector-snowflake/src/test/groovy/com/netflix/metacat/connector/snowflake/SnowflakeConnectorTableServiceSpec.groovy b/metacat-connector-snowflake/src/test/groovy/com/netflix/metacat/connector/snowflake/SnowflakeConnectorTableServiceSpec.groovy index fad1ba320..9dbc93dbf 100644 --- a/metacat-connector-snowflake/src/test/groovy/com/netflix/metacat/connector/snowflake/SnowflakeConnectorTableServiceSpec.groovy +++ b/metacat-connector-snowflake/src/test/groovy/com/netflix/metacat/connector/snowflake/SnowflakeConnectorTableServiceSpec.groovy @@ -98,6 +98,7 @@ class SnowflakeConnectorTableServiceSpec extends Specification { def metadata = Mock(DatabaseMetaData) def resultset = Mock(ResultSet) def resultset1 = Mock(ResultSet) + def primaryKeyResultSet = Mock(ResultSet) dataSource.getConnection() >> connection connection.getMetaData() >> metadata def database = UUID.randomUUID().toString().toUpperCase() @@ -113,6 +114,8 @@ class SnowflakeConnectorTableServiceSpec extends Specification { 1 * resultset.next() >> false 1 * metadata.getTables(_,_,_,_) >> resultset1 1 * resultset1.next() >> true + 1 * metadata.getPrimaryKeys(_,_,_) >> primaryKeyResultSet + 1 * primaryKeyResultSet.next() >> false when: this.service.get(context, qName) diff --git a/metacat-testdata-provider/src/main/groovy/com/netflix/metacat/testdata/provider/DataDtoProvider.groovy b/metacat-testdata-provider/src/main/groovy/com/netflix/metacat/testdata/provider/DataDtoProvider.groovy index 060769569..4b470f2dc 100644 --- a/metacat-testdata-provider/src/main/groovy/com/netflix/metacat/testdata/provider/DataDtoProvider.groovy +++ b/metacat-testdata-provider/src/main/groovy/com/netflix/metacat/testdata/provider/DataDtoProvider.groovy @@ -39,54 +39,60 @@ class DataDtoProvider { uri = String.format("s3://wh/%s.db/%s", databaseName, tableName); } return new TableDto( - name: QualifiedName.ofTable(sourceName, databaseName, tableName), - serde: new StorageDto( - owner: owner, - inputFormat: 'org.apache.hadoop.mapred.TextInputFormat', - outputFormat: 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat', - serializationLib: 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe', - serdeInfoParameters: [ - 'serialization.format': '1' - ], - uri: uri + name: QualifiedName.ofTable(sourceName, databaseName, tableName), + serde: new StorageDto( + owner: owner, + inputFormat: 'org.apache.hadoop.mapred.TextInputFormat', + outputFormat: 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat', + serializationLib: 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe', + serdeInfoParameters: [ + 'serialization.format': '1' + ], + uri: uri + ), + audit: new AuditDto( + createdBy: owner, + createdDate: new Date(), + lastModifiedBy: owner, + lastModifiedDate: new Date() + ), + fields: [ + new FieldDto( + comment: 'added 1st - partition key', + name: 'field1', + pos: 0, + type: 'string', + partition_key: true ), - audit: new AuditDto( - createdBy: owner, - createdDate: new Date(), - lastModifiedBy: owner, - lastModifiedDate: new Date() + new FieldDto( + comment: 'added 2st', + name: 'field2', + pos: 1, + type: 'string', + partition_key: false ), - fields: [ - new FieldDto( - comment: 'added 1st - partition key', - name: 'field1', - pos: 0, - type: 'string', - partition_key: true - ), - new FieldDto( - comment: 'added 2st', - name: 'field2', - pos: 1, - type: 'string', - partition_key: false - ), - new FieldDto( - comment: 'added 3st - partition key', - name: 'field3', - pos: 2, - type: 'string', - partition_key: true - ), - new FieldDto( - comment: 'added 4st', - name: 'field4', - pos: 3, - type: 'string', - partition_key: false - ) - ], - definitionMetadata: getDefinitionMetadata(owner) + new FieldDto( + comment: 'added 3st - partition key', + name: 'field3', + pos: 2, + type: 'string', + partition_key: true + ), + new FieldDto( + comment: 'added 4st', + name: 'field4', + pos: 3, + type: 'string', + partition_key: false + ) + ], + definitionMetadata: getDefinitionMetadata(owner), + keys: new KeySetDto( + partition: [ new KeyDto( + name: "primary", + fields: ["field1", "field3"] + )] + ) ) }