类org.apache.hadoop.io.BooleanWritable源码实例Demo

下面列出了怎么用org.apache.hadoop.io.BooleanWritable的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: CloverETL-Engine   文件: HadoopCloverConvert.java
@SuppressWarnings("rawtypes")
public static DataFieldType hadoopType2Clover( Class data) throws IOException{
	if (data == BooleanWritable.class){
		return DataFieldType.BOOLEAN;
	}else if (data == BytesWritable.class){
		return DataFieldType.BYTE;
	}else if (data == LongWritable.class){
		return DataFieldType.LONG;
	}else if (data == IntWritable.class){
		return DataFieldType.INTEGER;
	}else if (data == DoubleWritable.class){
		return DataFieldType.NUMBER;
	}else if (data == Text.class){
		return DataFieldType.STRING;
	}else{
		throw new IOException(String.format("Unsupported Hadoop data/Class type \"%s\" in conversion from Hadoop to Clover.",data.getName()));
		
	}
}
 
源代码2 项目: RDFS   文件: TestTupleWritable.java
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
 
源代码3 项目: pxf   文件: RecordkeyAdapterTest.java
/**
 * Test convertKeyValue for several calls of the same type
 */
@Test
public void convertKeyValueManyCalls() {
    Boolean key = true;
    mockLog();
    initRecordkeyAdapter();
    runConvertKeyValue(key, new BooleanWritable(key));
    verifyLog("converter initialized for type " + key.getClass() +
            " (key value: " + key + ")");

    for (int i = 0; i < 5; ++i) {
        key = (i % 2) == 0;
        runConvertKeyValue(key, new BooleanWritable(key));
    }
    verifyLogOnlyOnce();
}
 
源代码4 项目: hadoop   文件: TestTupleWritable.java
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
 
源代码5 项目: CloverETL-Engine   文件: HadoopCloverConvert.java
@SuppressWarnings("rawtypes")
public static Class cloverType2Hadoop(DataFieldMetadata field) throws IOException{
	switch (field.getDataType()){
	case BOOLEAN:
		return BooleanWritable.class;
	case BYTE:
	case CBYTE:
		return BytesWritable.class;
	case DATE:
		return LongWritable.class;
	case INTEGER:
		return IntWritable.class;
	case LONG:
		return LongWritable.class;
	case NUMBER:
		return DoubleWritable.class;
	case STRING:
		return Text.class;
	default:
		throw new IOException(String.format("Unsupported CloverDX data type \"%s\" of field \"%s\" in conversion to Hadoop.",field.getDataType().getName(),field.getName()));
		
	}
}
 
源代码6 项目: hadoop   文件: TestJoinTupleWritable.java
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
 
源代码7 项目: hadoop   文件: TestJoinTupleWritable.java
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
 
源代码8 项目: hadoop   文件: TestJoinTupleWritable.java
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
 
源代码9 项目: hadoop   文件: TestJoinTupleWritable.java
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
 
源代码10 项目: laser   文件: AdmmReducerContextWritable.java
public void write(DataOutput out) throws IOException {
	new BooleanWritable(null != context.getXUpdated()).write(out);
	if (null != context.getXUpdated()) {
		writer.set(context.getXUpdated());
		writer.write(out);
	}

	new BooleanWritable(null != context.getUInitial()).write(out);
	if (null != context.getUInitial()) {
		writer.set(context.getUInitial());
		writer.write(out);
	}

	new BooleanWritable(null != context.getZUpdated()).write(out);
	if (null != context.getZUpdated()) {
		writer.set(context.getZUpdated());
		writer.write(out);
	}

	new DoubleWritable(context.getRho()).write(out);
	new DoubleWritable(context.getLambdaValue()).write(out);
	new DoubleWritable(context.getPrimalObjectiveValue()).write(out);
	new LongWritable(context.getCount()).write(out);
}
 
源代码11 项目: RDFS   文件: TestTupleWritable.java
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
 
源代码12 项目: pentaho-hadoop-shims   文件: CommonHadoopShim.java
@Override
public Class<? extends Writable> getHadoopWritableCompatibleClass( ValueMetaInterface kettleType ) {
  if ( kettleType == null ) {
    return NullWritable.class;
  }
  switch ( kettleType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_DATE:
      return Text.class;
    case ValueMetaInterface.TYPE_INTEGER:
      return LongWritable.class;
    case ValueMetaInterface.TYPE_NUMBER:
      return DoubleWritable.class;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return BooleanWritable.class;
    case ValueMetaInterface.TYPE_BINARY:
      return BytesWritable.class;
    default:
      return Text.class;
  }
}
 
源代码13 项目: RDFS   文件: TestTupleWritable.java
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
 
源代码14 项目: hadoop-gpu   文件: TestTupleWritable.java
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
 
源代码15 项目: big-c   文件: TestTupleWritable.java
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
 
源代码16 项目: big-c   文件: TestJoinTupleWritable.java
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
 
源代码17 项目: big-c   文件: TestJoinTupleWritable.java
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
 
源代码18 项目: big-c   文件: TestJoinTupleWritable.java
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
 
/**
 * Determine the Hadoop writable type to pass Kettle type back to Hadoop as.
 *
 * @param kettleType
 * @return Java type to convert {@code kettleType} to when sending data back to Hadoop.
 */
public static Class<? extends Writable> getWritableForKettleType( ValueMetaInterface kettleType ) {
  if ( kettleType == null ) {
    return NullWritable.class;
  }
  switch ( kettleType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_DATE:
      return Text.class;
    case ValueMetaInterface.TYPE_INTEGER:
      return LongWritable.class;
    case ValueMetaInterface.TYPE_NUMBER:
      return DoubleWritable.class;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return BooleanWritable.class;
    case ValueMetaInterface.TYPE_BINARY:
      return BytesWritable.class;
    default:
      return Text.class;
  }
}
 
源代码20 项目: hadoop-gpu   文件: TestTupleWritable.java
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
 
源代码21 项目: parquet-mr   文件: DataWritableWriter.java
private void writePrimitive(final Writable value) {
  if (value == null) {
    return;
  }
  if (value instanceof DoubleWritable) {
    recordConsumer.addDouble(((DoubleWritable) value).get());
  } else if (value instanceof BooleanWritable) {
    recordConsumer.addBoolean(((BooleanWritable) value).get());
  } else if (value instanceof FloatWritable) {
    recordConsumer.addFloat(((FloatWritable) value).get());
  } else if (value instanceof IntWritable) {
    recordConsumer.addInteger(((IntWritable) value).get());
  } else if (value instanceof LongWritable) {
    recordConsumer.addLong(((LongWritable) value).get());
  } else if (value instanceof ShortWritable) {
    recordConsumer.addInteger(((ShortWritable) value).get());
  } else if (value instanceof ByteWritable) {
    recordConsumer.addInteger(((ByteWritable) value).get());
  } else if (value instanceof BigDecimalWritable) {
    throw new UnsupportedOperationException("BigDecimal writing not implemented");
  } else if (value instanceof BinaryWritable) {
    recordConsumer.addBinary(((BinaryWritable) value).getBinary());
  } else {
    throw new IllegalArgumentException("Unknown value type: " + value + " " + value.getClass());
  }
}
 
源代码22 项目: incubator-hivemall   文件: UDAFToOrderedList.java
@Override
public Object terminatePartial(@SuppressWarnings("deprecation") AggregationBuffer agg)
        throws HiveException {
    QueueAggregationBuffer myagg = (QueueAggregationBuffer) agg;

    Pair<List<Object>, List<Object>> tuples = myagg.drainQueue();
    if (tuples == null) {
        return null;
    }
    List<Object> keyList = tuples.getKey();
    List<Object> valueList = tuples.getValue();

    Object[] partialResult = new Object[outKV || outVK ? 5 : 4];
    partialResult[0] = valueList;
    partialResult[1] = keyList;
    partialResult[2] = new IntWritable(myagg.size);
    partialResult[3] = new BooleanWritable(myagg.reverseOrder);
    if (myagg.outKV) {
        partialResult[4] = new BooleanWritable(true);
    } else if (myagg.outVK) {
        partialResult[4] = new BooleanWritable(true);
    }
    return partialResult;
}
 
@Override
protected void reduce(Text rowId, Iterable<NullWritable> nothing,
    Reducer<Text, NullWritable, Text, BooleanWritable>.Context context) throws IOException, InterruptedException {
  if (_matcher == null) {
    _matcher = getMergeSortRowIdMatcher(rowId, context);
  }
  if (_writer == null) {
    _writer = getRowIdWriter(rowId, context);
  }
  _writer.append(rowId, NullWritable.get());
  _rowIds.increment(1);
  if (_action == null) {
    _action = new Action() {
      @Override
      public void found(Text rowId) throws IOException {
        _rowIdsToUpdate.increment(1);
        try {
          context.write(rowId, new BooleanWritable(true));
        } catch (InterruptedException e) {
          throw new IOException(e);
        }
      }
    };
  }
  _matcher.lookup(rowId, _action);
}
 
源代码24 项目: spork   文件: SequenceFileLoader.java
protected Object translateWritableToPigDataType(Writable w, byte dataType) {
  switch(dataType) {
    case DataType.CHARARRAY: return ((Text) w).toString();
    case DataType.BYTEARRAY:
          BytesWritable bw = (BytesWritable) w;
          // Make a copy
          return new DataByteArray(bw.getBytes(), 0, bw.getLength());
    case DataType.BOOLEAN: return ((BooleanWritable) w).get();
    case DataType.INTEGER: return ((IntWritable) w).get();
    case DataType.LONG: return ((LongWritable) w).get();
    case DataType.FLOAT: return ((FloatWritable) w).get();
    case DataType.DOUBLE: return ((DoubleWritable) w).get();
    case DataType.BYTE: return ((ByteWritable) w).get();
    case DataType.DATETIME: return ((DateTimeWritable) w).get();
  }

  return null;
}
 
@Test
public void testPushAll() throws Exception {

  // push all configs; parser, enrichment, indexing, etc
  pushAllConfigs();

  // validate
  final Set<String> sensorsInZookeeper = new HashSet<>();
  final BooleanWritable foundGlobal = new BooleanWritable(false);
  ConfigurationsUtils.visitConfigs(client, new ConfigurationsUtils.ConfigurationVisitor() {
    @Override
    public void visit(ConfigurationType configurationType, String name, String data) {
      assertTrue(data.length() > 0);
      validateConfig(name, configurationType, data);
      if(configurationType == GLOBAL) {
        validateConfig(name, configurationType, data);
        foundGlobal.set(true);
      }
      else {
        sensorsInZookeeper.add(name);
      }
    }
  });
  assertTrue(foundGlobal.get());
  assertEquals(sensorsInZookeeper, sensors);
}
 
源代码26 项目: Cubert   文件: CompactWritablesDeserializer.java
private static final WritableComparable<?> createWritable(DataType type)
{
    switch (type)
    {
    case BOOLEAN:
        return new BooleanWritable();
    case BYTE:
        return new ByteWritable();
    case INT:
        return new IntWritable();
    case LONG:
        return new LongWritable();
    case FLOAT:
        return new FloatWritable();
    case DOUBLE:
        return new DoubleWritable();
    case STRING:
        return new Text();
    default:
        return null;
    }
}
 
@SuppressWarnings("unchecked")
private<T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) {
	if(hadoopType == LongWritable.class ) {
		return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.Text.class) {
		return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.IntWritable.class) {
		return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.FloatWritable.class) {
		return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
		return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
		return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.ByteWritable.class) {
		return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue());
	}

	throw new RuntimeException("Unable to convert Stratosphere type ("+stratosphereType.getClass().getCanonicalName()+") to Hadoop.");
}
 
源代码28 项目: spatial-framework-for-hadoop   文件: ST_Is3D.java
public BooleanWritable evaluate(BytesWritable geomref) {
	if (geomref == null || geomref.getLength() == 0) {
		LogUtils.Log_ArgumentsNull(LOG);
		return null;
	}

	OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
	if (ogcGeometry == null) {
		LogUtils.Log_ArgumentsNull(LOG);
		return null;
	}

	resultBoolean.set(ogcGeometry.is3D());
	return resultBoolean;
}
 
源代码29 项目: pxf   文件: RecordkeyAdapterTest.java
/**
 * Test convertKeyValue for boolean type and then string type - negative
 * test
 */
@Test
public void convertKeyValueBadSecondValue() {
    boolean key = true;
    initRecordkeyAdapter();
    runConvertKeyValue(key, new BooleanWritable(key));
    String badKey = "bad";
    try {
        recordkeyAdapter.convertKeyValue(badKey);
        fail("conversion of string to boolean should fail");
    } catch (ClassCastException e) {
        assertEquals(e.getMessage(),
                "java.lang.String cannot be cast to java.lang.Boolean");
    }
}
 
public BooleanWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2)
{
	if (geometryref1 == null || geometryref2 == null ||
	    geometryref1.getLength() == 0 || geometryref2.getLength() == 0) {
		LogUtils.Log_ArgumentsNull(LOG);
		return null;
	}
	if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) {
		LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2);
		return null;
	}

	OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1);
	OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2);
	if (ogcGeom1 == null || ogcGeom2 == null){
		LogUtils.Log_ArgumentsNull(LOG);
		return null;
	}

	Geometry geometry1 = ogcGeom1.getEsriGeometry();
	Geometry geometry2 = ogcGeom2.getEsriGeometry();
	Envelope env1 = new Envelope(), env2 = new Envelope();
	geometry1.queryEnvelope(env1);
	geometry2.queryEnvelope(env2);

	resultBoolean.set(env1.isIntersecting(env2));
	return resultBoolean;
}
 
 类所在包
 类方法
 同包方法