下面列出了怎么用org.apache.spark.sql.types.FloatType的API类实例代码及写法,或者点击链接到github查看源代码。
public static SegmentSchema sparkSchemaToIndexRSchema(List<StructField> sparkSchema, IsIndexed isIndexed) {
List<ColumnSchema> columns = new ArrayList<>();
for (StructField f : sparkSchema) {
SQLType type;
if (f.dataType() instanceof IntegerType) {
type = SQLType.INT;
} else if (f.dataType() instanceof LongType) {
type = SQLType.BIGINT;
} else if (f.dataType() instanceof FloatType) {
type = SQLType.FLOAT;
} else if (f.dataType() instanceof DoubleType) {
type = SQLType.DOUBLE;
} else if (f.dataType() instanceof StringType) {
type = SQLType.VARCHAR;
} else if (f.dataType() instanceof DateType) {
type = SQLType.DATE;
} else if (f.dataType() instanceof TimestampType) {
type = SQLType.DATETIME;
} else {
throw new IllegalStateException("Unsupported type: " + f.dataType());
}
columns.add(new ColumnSchema(f.name(), type, isIndexed.apply(f.name())));
}
return new SegmentSchema(columns);
}
@Override
@SuppressWarnings("checkstyle:CyclomaticComplexity")
public Object get(int ordinal, DataType dataType) {
if (dataType instanceof IntegerType) {
return getInt(ordinal);
} else if (dataType instanceof LongType) {
return getLong(ordinal);
} else if (dataType instanceof StringType) {
return getUTF8String(ordinal);
} else if (dataType instanceof FloatType) {
return getFloat(ordinal);
} else if (dataType instanceof DoubleType) {
return getDouble(ordinal);
} else if (dataType instanceof DecimalType) {
DecimalType decimalType = (DecimalType) dataType;
return getDecimal(ordinal, decimalType.precision(), decimalType.scale());
} else if (dataType instanceof BinaryType) {
return getBinary(ordinal);
} else if (dataType instanceof StructType) {
return getStruct(ordinal, ((StructType) dataType).size());
} else if (dataType instanceof ArrayType) {
return getArray(ordinal);
} else if (dataType instanceof MapType) {
return getMap(ordinal);
} else if (dataType instanceof BooleanType) {
return getBoolean(ordinal);
} else if (dataType instanceof ByteType) {
return getByte(ordinal);
} else if (dataType instanceof ShortType) {
return getShort(ordinal);
}
return null;
}
public static List<StructField> indexrSchemaToSparkSchema(SegmentSchema schema) {
List<StructField> fields = new ArrayList<>();
for (ColumnSchema cs : schema.getColumns()) {
DataType dataType;
switch (cs.getSqlType()) {
case INT:
dataType = DataTypes.IntegerType;
break;
case BIGINT:
dataType = DataTypes.LongType;
break;
case FLOAT:
dataType = DataTypes.FloatType;
break;
case DOUBLE:
dataType = DataTypes.DoubleType;
break;
case VARCHAR:
dataType = DataTypes.StringType;
break;
case DATE:
dataType = DataTypes.DateType;
break;
case DATETIME:
dataType = DataTypes.TimestampType;
break;
default:
throw new IllegalStateException("Unsupported type: " + cs.getSqlType());
}
fields.add(new StructField(cs.getName(), dataType, scala.Boolean.box(false), Metadata.empty()));
}
return fields;
}