下面列出了怎么用org.apache.spark.sql.sources.BaseRelation的API类实例代码及写法,或者点击链接到github查看源代码。
@Override
public BaseRelation createRelation(SQLContext arg0, Map<String,
String> arg1) {
log.debug("-> createRelation()");
java.util.Map<String, String> javaMap = scala.collection.JavaConverters
.mapAsJavaMapConverter(arg1).asJava();
SubStringCounterRelation br = new SubStringCounterRelation();
br.setSqlContext(arg0);
for (java.util.Map.Entry<String, String> entry : javaMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
log.debug("[{}] --> [{}]", key, value);
if (key.compareTo(K.PATH) == 0) {
br.setFilename(value);
} else if (key.startsWith(K.COUNT)) {
br.addCriteria(value);
}
}
return br;
}
@Override
public Dataset<Row> baseRelationToDataFrame(final BaseRelation baseRelation) {
final boolean userTriggered = initializeFunction(baseRelation);
final Dataset<Row> result = Dataset.from(super.baseRelationToDataFrame(baseRelation));
this.setIsUserTriggered(userTriggered);
return result;
}
@Override
public Dataset<Row> baseRelationToDataFrame(final BaseRelation baseRelation) {
final boolean userTriggered = initializeFunction(baseRelation);
final Dataset<Row> result = Dataset.from(super.baseRelationToDataFrame(baseRelation));
this.setIsUserTriggered(userTriggered);
return result;
}