下面列出了怎么用weka.core.WeightedInstancesHandler的API类实例代码及写法,或者点击链接到github查看源代码。
/**
* Checks whether the scheme says it can handle instance weights.
*
* @return true if the clusterer handles instance weights
*/
protected boolean[] weightedInstancesHandler() {
boolean[] result = new boolean[2];
print("weighted instances clusterer...");
if (m_Clusterer instanceof WeightedInstancesHandler) {
println("yes");
result[0] = true;
}
else {
println("no");
result[0] = false;
}
return result;
}
/**
* Checks whether the scheme says it can handle instance weights.
*
* @return true if the associator handles instance weights
*/
protected boolean[] weightedInstancesHandler() {
boolean[] result = new boolean[2];
print("weighted instances associator...");
if (m_Associator instanceof WeightedInstancesHandler) {
println("yes");
result[0] = true;
}
else {
println("no");
result[0] = false;
}
return result;
}
/**
* Checks whether the scheme says it can handle instance weights.
*
* @return true if the scheme handles instance weights
*/
protected boolean[] weightedInstancesHandler() {
boolean[] result = new boolean[2];
print("weighted instances scheme...");
if (getTestObject() instanceof WeightedInstancesHandler) {
println("yes");
result[0] = true;
}
else {
println("no");
result[0] = false;
}
return result;
}
/**
* Checks whether the scheme says it can handle instance weights.
*
* @return true if the classifier handles instance weights
*/
protected boolean[] weightedInstancesHandler() {
boolean[] result = new boolean[2];
print("weighted instances classifier...");
if (m_Classifier instanceof WeightedInstancesHandler) {
println("yes");
result[0] = true;
}
else {
println("no");
result[0] = false;
}
return result;
}
/**
* Checks whether the scheme says it can handle instance weights.
*
* @return true if the kernel handles instance weights
*/
protected boolean[] weightedInstancesHandler() {
boolean[] result = new boolean[2];
print("weighted instances kernel...");
if (m_Kernel instanceof WeightedInstancesHandler) {
println("yes");
result[0] = true;
}
else {
println("no");
result[0] = false;
}
return result;
}
/**
* Checks whether the scheme says it can handle instance weights.
*
* @return true if the estimator handles instance weights
*/
protected boolean[] weightedInstancesHandler() {
boolean[] result = new boolean[2];
print("weighted instances estimator...");
if (m_Estimator instanceof WeightedInstancesHandler) {
println("yes");
result[0] = true;
}
else {
println("no");
result[0] = false;
}
return result;
}
/**
* the standard collective classifier accepts only nominal, binary classes
* otherwise an exception is thrown. Additionally, all classifiers must be
* able to handle weighted instances.
* @throws Exception if the data doesn't have a nominal, binary class
*/
@Override
protected void checkRestrictions() throws Exception {
int i;
String nonWeighted;
super.checkRestrictions();
// do all implement the WeightedInstancesHandler?
nonWeighted = "";
for (i = 0; i < getClassifiers().length; i++) {
if (!(getClassifiers()[i] instanceof WeightedInstancesHandler)) {
if (nonWeighted.length() > 0)
nonWeighted += ", ";
nonWeighted += getClassifiers()[i].getClass().getName();
}
}
if (nonWeighted.length() > 0)
throw new Exception(
"The following classifier(s) cannot handle weighted instances:\n"
+ nonWeighted);
}
/**
* Build the classifier on the filtered data.
*
* @param data the training data
* @throws Exception if the classifier could not be built successfully
*/
public void buildClassifier(Instances data) throws Exception {
if (m_Classifier == null) {
throw new Exception("No base classifier has been set!");
}
getCapabilities().testWithFail(data);
Random r = (data.numInstances() > 0) ? data.getRandomNumberGenerator(getSeed()) : new Random(getSeed());
data = setUp(data, r);
if (!data.allInstanceWeightsIdentical() && !(m_Classifier instanceof WeightedInstancesHandler)) {
data = data.resampleWithWeights(r); // The filter may have assigned weights.
}
if (!data.allAttributeWeightsIdentical() && !(m_Classifier instanceof WeightedAttributesHandler)) {
data = resampleAttributes(data, false, r);
}
if (m_Classifier instanceof Randomizable) {
((Randomizable)m_Classifier).setSeed(r.nextInt());
}
m_Classifier.buildClassifier(data);
}
/**
* Builds the committee of randomizable classifiers.
*
* @param data the training data to be used for generating the
* bagged classifier.
* @exception Exception if the classifier could not be built successfully
*/
public void buildClassifier(Instances data) throws Exception {
// can classifier handle the data?
getCapabilities().testWithFail(data);
// remove instances with missing class
m_data = new Instances(data);
m_data.deleteWithMissingClass();
super.buildClassifier(m_data);
if (!(m_Classifier instanceof Randomizable)) {
throw new IllegalArgumentException("Base learner must implement Randomizable!");
}
m_Classifiers = AbstractClassifier.makeCopies(m_Classifier, m_NumIterations);
Random random = m_data.getRandomNumberGenerator(m_Seed);
// Resample data based on weights if base learner can't handle weights
if (!(m_Classifier instanceof WeightedInstancesHandler)) {
m_data = m_data.resampleWithWeights(random);
}
for (int j = 0; j < m_Classifiers.length; j++) {
// Set the random number seed for the current classifier.
((Randomizable) m_Classifiers[j]).setSeed(random.nextInt());
// Build the classifier.
// m_Classifiers[j].buildClassifier(m_data);
}
buildClassifiers();
// save memory
m_data = null;
}
/**
* Build the classifier on the dimensionally reduced data.
*
* @param data the training data
* @throws Exception if the classifier could not be built successfully
*/
public void buildClassifier(Instances data) throws Exception {
if (m_Classifier == null) {
throw new Exception("No base classifier has been set!");
}
if (m_Evaluator == null) {
throw new Exception("No attribute evaluator has been set!");
}
if (m_Search == null) {
throw new Exception("No search method has been set!");
}
// can classifier handle the data?
getCapabilities().testWithFail(data);
// remove instances with missing class
Instances newData = new Instances(data);
newData.deleteWithMissingClass();
if (newData.numInstances() == 0) {
m_Classifier.buildClassifier(newData);
return;
}
if (newData.classAttribute().isNominal()) {
m_numClasses = newData.classAttribute().numValues();
} else {
m_numClasses = 1;
}
Instances resampledData = null;
// check to see if training data has all equal weights
double weight = newData.instance(0).weight();
boolean ok = false;
for (int i = 1; i < newData.numInstances(); i++) {
if (newData.instance(i).weight() != weight) {
ok = true;
break;
}
}
if (ok) {
if (!(m_Evaluator instanceof WeightedInstancesHandler) ||
!(m_Classifier instanceof WeightedInstancesHandler)) {
Random r = new Random(1);
for (int i = 0; i < 10; i++) {
r.nextDouble();
}
resampledData = newData.resampleWithWeights(r);
}
} else {
// all equal weights in the training data so just use as is
resampledData = newData;
}
m_AttributeSelection = new AttributeSelection();
m_AttributeSelection.setEvaluator(m_Evaluator);
m_AttributeSelection.setSearch(m_Search);
long start = System.currentTimeMillis();
m_AttributeSelection.
SelectAttributes((m_Evaluator instanceof WeightedInstancesHandler)
? newData
: resampledData);
long end = System.currentTimeMillis();
if (m_Classifier instanceof WeightedInstancesHandler) {
newData = m_AttributeSelection.reduceDimensionality(newData);
m_Classifier.buildClassifier(newData);
} else {
resampledData = m_AttributeSelection.reduceDimensionality(resampledData);
m_Classifier.buildClassifier(resampledData);
}
long end2 = System.currentTimeMillis();
m_numAttributesSelected = m_AttributeSelection.numberAttributesSelected();
m_ReducedHeader =
new Instances((m_Classifier instanceof WeightedInstancesHandler) ?
newData
: resampledData, 0);
m_selectionTime = (double)(end - start);
m_totalTime = (double)(end2 - start);
}