com.google.common.collect.ImmutableMultimap.Builder#org.apache.hive.hcatalog.data.schema.HCatFieldSchema源码实例Demo

下面列出了com.google.common.collect.ImmutableMultimap.Builder#org.apache.hive.hcatalog.data.schema.HCatFieldSchema 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: Flink-CEPplus   文件: HCatInputFormatBase.java
/**
 * Specifies that the InputFormat returns Flink tuples instead of
 * {@link org.apache.hive.hcatalog.data.HCatRecord}.
 *
 * <p>Note: Flink tuples might only support a limited number of fields (depending on the API).
 *
 * @return This InputFormat.
 * @throws org.apache.hive.hcatalog.common.HCatException
 */
public HCatInputFormatBase<T> asFlinkTuples() throws HCatException {

	// build type information
	int numFields = outputSchema.getFields().size();
	if (numFields > this.getMaxFlinkTupleSize()) {
		throw new IllegalArgumentException("Only up to " + this.getMaxFlinkTupleSize() +
				" fields can be returned as Flink tuples.");
	}

	TypeInformation[] fieldTypes = new TypeInformation[numFields];
	fieldNames = new String[numFields];
	for (String fieldName : outputSchema.getFieldNames()) {
		HCatFieldSchema field = outputSchema.get(fieldName);

		int fieldPos = outputSchema.getPosition(fieldName);
		TypeInformation fieldType = getFieldType(field);

		fieldTypes[fieldPos] = fieldType;
		fieldNames[fieldPos] = fieldName;

	}
	this.resultType = new TupleTypeInfo(fieldTypes);

	return this;
}
 
源代码2 项目: ES-Fastloader   文件: NormalTransformer.java
public NormalTransformer(Reducer.Context context, IndexInfo indexInfo) throws Exception {
    Map<String, String> esTypeMap = indexInfo.getTypeMap();

    HCatSchema hCatSchema = HCatInputFormat.getTableSchema(context.getConfiguration());
    List<HCatFieldSchema> hCatFieldSchemas = hCatSchema.getFields();
    for(HCatFieldSchema hCatFieldSchema : hCatFieldSchemas) {
        String fieldName = hCatFieldSchema.getName();
        String hiveType = hCatFieldSchema.getTypeString();

        if(esTypeMap.containsKey(fieldName)) {
            String esType = esTypeMap.get(fieldName);
            typeList.add(Type.matchESType(fieldName, esType, indexInfo));
        } else {
            typeList.add(Type.matchHiveType(fieldName, hiveType, indexInfo));
        }
    }
}
 
源代码3 项目: flink   文件: HCatInputFormatBase.java
/**
 * Specifies that the InputFormat returns Flink tuples instead of
 * {@link org.apache.hive.hcatalog.data.HCatRecord}.
 *
 * <p>Note: Flink tuples might only support a limited number of fields (depending on the API).
 *
 * @return This InputFormat.
 * @throws org.apache.hive.hcatalog.common.HCatException
 */
public HCatInputFormatBase<T> asFlinkTuples() throws HCatException {

	// build type information
	int numFields = outputSchema.getFields().size();
	if (numFields > this.getMaxFlinkTupleSize()) {
		throw new IllegalArgumentException("Only up to " + this.getMaxFlinkTupleSize() +
				" fields can be returned as Flink tuples.");
	}

	TypeInformation[] fieldTypes = new TypeInformation[numFields];
	fieldNames = new String[numFields];
	for (String fieldName : outputSchema.getFieldNames()) {
		HCatFieldSchema field = outputSchema.get(fieldName);

		int fieldPos = outputSchema.getPosition(fieldName);
		TypeInformation fieldType = getFieldType(field);

		fieldTypes[fieldPos] = fieldType;
		fieldNames[fieldPos] = fieldName;

	}
	this.resultType = new TupleTypeInfo(fieldTypes);

	return this;
}
 
public void validateHCatTableFieldTypes() throws IOException {
  StringBuilder sb = new StringBuilder();
  boolean hasComplexFields = false;
  for (HCatFieldSchema hfs : projectedSchema.getFields()) {
    if (hfs.isComplex()) {
      sb.append('.').append(hfs.getName());
      hasComplexFields = true;
    }
  }

  if (hasComplexFields) {
    String unsupportedFields = sb.substring(1);
    throw new IOException("The HCatalog table provided "
      + getQualifiedHCatTableName() + " has complex field types ("
      + unsupportedFields + ").  They are currently not supported");
  }

}
 
private Object convertClobType(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  ClobRef cr = (ClobRef) val;
  String s = cr.isExternal() ? cr.toString() : cr.getData();

  if (hfsType == HCatFieldSchema.Type.STRING) {
    return s;
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hc = new HiveChar(s, cti.getLength());
    return hc;
  }
  return null;
}
 
public void testIntTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "boolean", Types.BOOLEAN, HCatFieldSchema.Type.BOOLEAN, 0, 0,
      Boolean.TRUE, Boolean.TRUE, KeyType.NOT_A_KEY),
    // Netezza does not have tinyint
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "smallint", Types.SMALLINT, HCatFieldSchema.Type.INT, 0, 0, 100,
      (short) 100, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "int", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 1000,
      1000, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
      "bigint", Types.BIGINT, HCatFieldSchema.Type.BIGINT, 0, 0, 10000L,
      10000L, KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}
 
public void testIntTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "boolean", Types.BOOLEAN, HCatFieldSchema.Type.BOOLEAN, 0, 0,
      Boolean.TRUE, Boolean.TRUE, KeyType.NOT_A_KEY),
    // Netezza does not have tinyint
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "smallint", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 100,
      100, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "int", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 1000,
      1000, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
      "bigint", Types.BIGINT, HCatFieldSchema.Type.BIGINT, 0, 0, 10000L,
      10000L, KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  super.runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testIntTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "boolean", Types.BOOLEAN, HCatFieldSchema.Type.BOOLEAN, 0, 0,
      Boolean.TRUE, Boolean.TRUE, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "tinyint", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 10,
      10, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "smallint", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 100,
      100, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
      "int", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 1000,
      1000, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(4),
      "bigint", Types.BIGINT, HCatFieldSchema.Type.BIGINT, 0, 0, 10000L,
      10000L, KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testFloatTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "float", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 0, 0, 10.0F,
      10.F, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "real", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 0, 0, 20.0F,
      20.0F, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "double", Types.DOUBLE, HCatFieldSchema.Type.DOUBLE, 0, 0, 30.0D,
      30.0D, KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testNumberTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, 0, 0,
      "1000", new BigDecimal("1000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, 0, 0,
      "2000", new BigDecimal("2000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.DECIMAL, 18, 2,
        HiveDecimal.create(new BigDecimal("2000")),
        new BigDecimal("2000"), KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testDateTypesToBigInt() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  long offset = TimeZone.getDefault().getRawOffset();
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "date", Types.DATE, HCatFieldSchema.Type.BIGINT, 0, 0, 0 - offset,
      new Date(70, 0, 1), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "time", Types.TIME, HCatFieldSchema.Type.BIGINT, 0, 0,
      36672000L - offset, new Time(10, 11, 12), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.BIGINT, 0, 0,
      36672000L - offset, new Timestamp(70, 0, 1, 10, 11, 12, 0),
      KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--map-column-hive");
  addlArgsArray.add("COL0=bigint,COL1=bigint,COL2=bigint");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testStringTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "string to test", "string to test", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
        "char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0,
        new HiveChar("string to test", 14), "string to test",
        KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0,
        new HiveVarchar("string to test", 14), "string to test",
        KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
      "longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "string to test", "string to test", KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testBinaryTypes() throws Exception {
  ByteBuffer bb = ByteBuffer.wrap(new byte[] { 0, 1, 2 });
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "binary(10)", Types.BINARY, HCatFieldSchema.Type.BINARY, 0, 0,
      bb.array(), bb.array(), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "longvarbinary", Types.BINARY, HCatFieldSchema.Type.BINARY, 0, 0,
      bb.array(), bb.array(), KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testColumnProjection() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      null, null, KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  List<String> cfgParams = new ArrayList<String>();
  cfgParams.add("-D");
  cfgParams.add(SqoopHCatUtilities.DEBUG_HCAT_IMPORT_MAPPER_PROP
    + "=true");
  setConfigParams(cfgParams);
  String[] colNames = new String[] { "ID", "MSG" };
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, colNames);
}
 
public void testColumnProjectionMissingPartKeys() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      null, null, KeyType.DYNAMIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  List<String> cfgParams = new ArrayList<String>();
  cfgParams.add("-D");
  cfgParams.add(SqoopHCatUtilities.DEBUG_HCAT_IMPORT_MAPPER_PROP
    + "=true");
  setConfigParams(cfgParams);
  String[] colNames = new String[] { "ID", "MSG" };
  try {
    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, colNames);
    fail("Column projection with missing dynamic partition keys must fail");
  } catch (Throwable t) {
    LOG.info("Job fails as expected : " + t);
    StringWriter sw = new StringWriter();
    t.printStackTrace(new PrintWriter(sw));
    LOG.info("Exception stack trace = " + sw);
  }
}
 
public void testStaticPartitioning() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "1", "1", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col0");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("1");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testStaticPartitioningWithMultipleKeys() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "1", "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "2", "2", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hcatalog-partition-keys");
  addlArgsArray.add("col0,col1");
  addlArgsArray.add("--hcatalog-partition-values");
  addlArgsArray.add("1,2");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testStaticAndDynamicPartitioning() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "1", "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "2", "2", KeyType.DYNAMIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col0");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("1");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testMultipleStaticKeysAndDynamicPartitioning() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "1", "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "2", "2", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "3", "3", KeyType.DYNAMIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hcatalog-partition-keys");
  addlArgsArray.add("col0,col1");
  addlArgsArray.add("--hcatalog-partition-values");
  addlArgsArray.add("1,2");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
/**
 * Test other file formats.
 */
public void testSequenceFile() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
          "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
          "1", "1", KeyType.STATIC_KEY),
      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
          "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
          "2", "2", KeyType.DYNAMIC_KEY), };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col0");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("1");
  setExtraArgs(addlArgsArray);
  utils.setStorageInfo(HCatalogTestUtils.STORED_AS_SEQFILE);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testTextFile() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "1", "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "2", "2", KeyType.DYNAMIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col0");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("1");
  setExtraArgs(addlArgsArray);
  utils.setStorageInfo(HCatalogTestUtils.STORED_AS_TEXT);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testTableCreation() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols,
    null, true, false);
}
 
public void testTableCreationWithPartition() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
     new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
     new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col1");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("2");
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
 
public void testTableCreationWithMultipleStaticPartKeys() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hcatalog-partition-keys");
  addlArgsArray.add("col0,col1");
  addlArgsArray.add("--hcatalog-partition-values");
  addlArgsArray.add("1,2");
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
 
public void testTableCreationWithStorageStanza() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col1");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("2");
  addlArgsArray.add("--create-hcatalog-table");
  addlArgsArray.add("--hcatalog-storage-stanza");
  addlArgsArray.add(HCatalogTestUtils.STORED_AS_TEXT);
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
 
public void testHiveDropDelims() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "Test", "\u0001\n\rTest", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "Test2", "\u0001\r\nTest2", KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-drop-import-delims");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testHiveDelimsReplacement() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "^^^Test", "\u0001\n\rTest", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "^^^Test2", "\u0001\r\nTest2", KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-delims-replacement");
  addlArgsArray.add("^");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
public void testCreateTableWithPreExistingTable() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY), };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  try {
    // Precreate table
    utils.createHCatTable(CreateMode.CREATE, TOTAL_RECORDS, table, cols);
    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols,
      null, true, false);
    fail("HCatalog job with --create-hcatalog-table and pre-existing"
      + " table should fail");
  } catch (Exception e) {
    LOG.debug("Caught expected exception while running "
      + " create-hcatalog-table with pre-existing table test", e);
  }
}
 
public void testIntTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "boolean", Types.BOOLEAN, HCatFieldSchema.Type.BOOLEAN, 0, 0,
      Boolean.TRUE, Boolean.TRUE, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "tinyint", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 10,
      10, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "smallint", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 100,
      100, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
      "int", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 1000,
      1000, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(4),
      "bigint", Types.BIGINT, HCatFieldSchema.Type.BIGINT, 0, 0, 10000L,
      10000L, KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}
 
public void testFloatTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "float", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 0, 0, 10.0F,
      10.F, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "real", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 0, 0, 20.0F,
      20.0F, KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
      "double", Types.DOUBLE, HCatFieldSchema.Type.DOUBLE, 0, 0, 30.0D,
      30.0D, KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}