SQOOP-489. Cannot define partition keys for Hive tables created through Sqoop.

(Cheolsoo Park via Jarek Jarcec Cecho)


git-svn-id: https://svn.apache.org/repos/asf/sqoop/trunk@1344429 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jarek Jarcec Cecho 2012-05-30 19:30:10 +00:00
Родитель b0de5ca47c
Коммит 4505205588
2 изменённых файлов: 88 добавлений и 2 удалений

Просмотреть файл

@ -155,7 +155,13 @@ public class TableDefWriter {
}
boolean first = true;
String partitionKey = options.getHivePartitionKey();
for (String col : colNames) {
if (col.equals(partitionKey)) {
throw new IllegalArgumentException("Partition key " + col + " cannot "
+ "be a column to import.");
}
if (!first) {
sb.append(", ");
}
@ -188,9 +194,9 @@ public class TableDefWriter {
sb.append("COMMENT 'Imported by sqoop on " + curDateStr + "' ");
}
if (options.getHivePartitionKey() != null) {
if (partitionKey != null) {
sb.append("PARTITIONED BY (")
.append(options.getHivePartitionKey())
.append(partitionKey)
.append(" STRING) ");
}

Просмотреть файл

@ -108,6 +108,28 @@ public class TestHiveImport extends ImportJobTestCase {
return args.toArray(new String[0]);
}
/**
* @return the argv to supply to a create-table only job for Hive imports.
*/
protected String [] getCreateTableArgv(boolean includeHadoopFlags,
String [] moreArgs) {
ArrayList<String> args = new ArrayList<String>();
if (null != moreArgs) {
for (String arg: moreArgs) {
args.add(arg);
}
}
args.add("--table");
args.add(getTableName());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
return args.toArray(new String[0]);
}
/**
* @return the argv to supply to a code-gen only job for Hive imports.
*/
@ -452,4 +474,62 @@ public class TestHiveImport extends ImportJobTestCase {
getArgv(false, moreArgs), new ImportTool());
}
/**
* If partition key is set to one of importing columns, we should get an
* IOException.
* */
@Test
public void testImportWithBadPartitionKey() {
final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
LOG.info("Doing import of single row into " + TABLE_NAME + " table");
setCurTableName(TABLE_NAME);
setNumCols(3);
String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
String[] vals = { "'key'", "42", "'I am a row in a partition'", };
String partitionKey = getColNames()[0];
// Specify 1st column as partition key and import every column of the
// table by default (i.e. no --columns option).
String[] moreArgs1 = {
"--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
partitionKey,
};
// Specify 1st column as both partition key and importing column.
String[] moreArgs2 = {
"--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
partitionKey,
"--" + BaseSqoopTool.COLUMNS_ARG,
partitionKey,
};
// Test hive-import with the 1st args.
try {
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
getArgv(false, moreArgs1), new ImportTool());
fail(TABLE_NAME + " test should have thrown IOException");
} catch (IOException ioe) {
// expected; ok.
}
// Test hive-import with the 2nd args.
try {
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
getArgv(false, moreArgs2), new ImportTool());
fail(TABLE_NAME + " test should have thrown IOException");
} catch (IOException ioe) {
// expected; ok.
}
// Test create-hive-table with the 1st args.
try {
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
fail(TABLE_NAME + " test should have thrown IOException");
} catch (IOException ioe) {
// expected; ok.
}
}
}