java.io.ByteArrayInputStream#close ( )源码实例Demo

下面列出了java.io.ByteArrayInputStream#close ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: flutter-intellij   文件: InspectorService.java
@NotNull
private Screenshot getScreenshotFromJson(JsonObject result) {
  final String imageString = result.getAsJsonPrimitive("image").getAsString();
  // create a buffered image
  final Base64.Decoder decoder = Base64.getDecoder();
  final byte[] imageBytes = decoder.decode(imageString);
  final ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(imageBytes);
  BufferedImage image = null;
  try {
    image = ImageIO.read(byteArrayInputStream);
    byteArrayInputStream.close();
  }
  catch (IOException e) {
    throw new RuntimeException("Error decoding image: " + e.getMessage());
  }

  final TransformedRect transformedRect = new TransformedRect(result.getAsJsonObject("transformedRect"));
  return new Screenshot(image, transformedRect);
}
 
源代码2 项目: Ngram-Graphs   文件: INSECTCompressedMemoryDB.java
@Override
public TObjectType loadObject(String sObjectName, String sObjectCategory) {
    ByteArrayInputStream bIn = new ByteArrayInputStream((byte[])ObjectMap.get(
            getObjectName(sObjectName, sObjectCategory)));
    TObjectType tObj = null;
    try {
        GZIPInputStream gzIn = new GZIPInputStream(bIn);
        ObjectInputStream oIn = new ObjectInputStream(gzIn);
        tObj = (TObjectType) oIn.readObject();
        oIn.close();
        gzIn.close();
        bIn.close();
    } catch (IOException iOException) {
        System.err.println("Cannot load object from memory. Reason:");
        iOException.printStackTrace(System.err);
    } catch (ClassNotFoundException classNotFoundException) {
        System.err.println("Cannot load object from memory. Reason:");
        classNotFoundException.printStackTrace(System.err);
    }
    
    return tObj;        
}
 
源代码3 项目: Extractor   文件: QueryNodeGzip.java
@Override
public byte[] repackData(byte[] context, byte[] data) {
    byte[] result = "noData".getBytes();
    try { //gzip encode the payload
        ByteArrayInputStream textis = new ByteArrayInputStream(data);
        ByteArrayOutputStream fos = new ByteArrayOutputStream();
        GZIPOutputStream gzipOS = new GZIPOutputStream(fos);
        byte[] buffer = new byte[data.length];
        int len;
        while((len=textis.read(buffer)) != -1){
            gzipOS.write(buffer, 0, len);
        }

        gzipOS.close();
        textis.close();

        result = fos.toByteArray();
        fos.close();
    } catch (IOException e) {
        Globals.callbacks.printError("gzip packing error: ");
        Globals.callbacks.printError(e.getMessage());
    }
    return result;    
}
 
源代码4 项目: PDD   文件: BSBFDeDuplicatorSerializerTest.java
@Test
public void testWriteToReadFromVersion2() throws IOException {
    final ProbabilisticDeDuplicatorSerializer<BSBFDeDuplicator> serializer =
            BSBFDeDuplicatorSerializers.VERSION_2;
    final BSBFDeDuplicator deDuplicator = new BSBFDeDuplicator(64L, 1);
    final ByteBuffer byteBuffer = ByteBuffer.allocate(64);
    byteBuffer.putLong(1L);
    assertTrue(deDuplicator.classifyDistinct(byteBuffer.array()));
    byteBuffer.clear();
    final ByteArrayOutputStream out = new ByteArrayOutputStream();
    serializer.writeTo(deDuplicator, out);
    out.close();
    final ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
    final BSBFDeDuplicator serialized = serializer.readFrom(in);
    in.close();
    assertEquals(deDuplicator, serialized);
}
 
源代码5 项目: snowplow-android-tracker   文件: Util.java
/**
 * Converts a byte array back into an
 * event map for sending.
 *
 * @param bytes the bytes to be converted
 * @return the Map or null
 */
@SuppressWarnings("unchecked")
public static Map<String, String> deserializer(byte[] bytes) {
    Map<String, String> newMap = null;
    try {
        ByteArrayInputStream mem_in = new ByteArrayInputStream(bytes);
        ObjectInputStream in = new ObjectInputStream(mem_in);
        Map<String, String> map = (HashMap<String, String>) in.readObject();
        in.close();
        mem_in.close();
        newMap = map;
    } catch (NullPointerException | ClassNotFoundException | IOException e) {
        e.printStackTrace();
    }
    return newMap;
}
 
源代码6 项目: cyberduck   文件: B2LargeUploadWriteFeatureTest.java
@Test
public void testWrite() throws Exception {
    final B2FileidProvider fileid = new B2FileidProvider(session).withCache(cache);
    final B2LargeUploadWriteFeature feature = new B2LargeUploadWriteFeature(session, fileid);
    final Path container = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume));
    final TransferStatus status = new TransferStatus();
    status.setLength(-1L);
    status.setTimestamp(1503654614004L);
    final Path file = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file));
    final OutputStream out = feature.write(file, status, new DisabledConnectionCallback());
    final byte[] content = new RandomStringGenerator.Builder().build().generate(6 * 1024 * 1024).getBytes(StandardCharsets.UTF_8);
    final ByteArrayInputStream in = new ByteArrayInputStream(content);
    assertEquals(content.length, IOUtils.copy(in, out));
    in.close();
    out.close();
    assertTrue(new B2FindFeature(session, fileid).find(file));
    final byte[] compare = new byte[content.length];
    final InputStream stream = new B2ReadFeature(session, fileid).read(file, new TransferStatus().length(content.length), new DisabledConnectionCallback());
    IOUtils.readFully(stream, compare);
    stream.close();
    assertArrayEquals(content, compare);
    assertEquals(1503654614004L, new B2AttributesFinderFeature(session, fileid).find(file).getModificationDate());
    new B2DeleteFeature(session, fileid).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
 
源代码7 项目: zabbixj   文件: ActiveThread.java
private JSONObject getResponse(byte[] responseBytes) throws Exception {
	byte[] sizeBuffer = new byte[8];
	int index = 0;
	for(int i = 12; i > 4; i--) {
		sizeBuffer[index++] = responseBytes[i];
	}
	ByteArrayInputStream bais = new ByteArrayInputStream(sizeBuffer);
	DataInputStream dis = new DataInputStream(bais);
	long size = dis.readLong();
	dis.close();
	bais.close();

	byte[] jsonBuffer = new byte[responseBytes.length - 13];
	if(jsonBuffer.length != size) {
		throw new ZabbixException("Reported and actual buffer sizes differ!");
	}
	
	index = 0;
	for(int i = 13; i < responseBytes.length; i++) {
		jsonBuffer[index++] = responseBytes[i];
	}
	
	JSONObject response = new JSONObject(new String(jsonBuffer));
	
	return response;
}
 
源代码8 项目: Flashtool   文件: USBFlash.java
public static S1Packet writeS1(S1Packet p) throws IOException,X10FlashException {
	write(p.getHeaderWithChecksum());
	if (p.getDataLength()>0) {
		long totalread=0;
		ByteArrayInputStream in = new ByteArrayInputStream(p.getDataArray());
		while (totalread<p.getDataLength()) {
			long remaining = p.getDataLength()-totalread;
			long bufsize=(remaining<buffersize)?remaining:buffersize;
			byte[] buf = new byte[(int)bufsize];
			int read = in.read(buf);
			write(buf);
			totalread+=read;
		}
		in.close();
	}
	write(p.getCRC32());
	return readS1Reply();
}
 
源代码9 项目: hottub   文件: WriteOutputStream.java
public static void main(String[] args) throws Exception {
    RIFFWriter writer = null;
    RIFFReader reader = null;
    File tempfile = File.createTempFile("test",".riff");
    try
    {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        writer = new RIFFWriter(baos, "TEST");
        RIFFWriter chunk = writer.writeChunk("TSCH");
        chunk.write((byte)33);
        writer.close();
        writer = null;
        ByteArrayInputStream fis = new ByteArrayInputStream(baos.toByteArray());
        reader = new RIFFReader(fis);
        assertEquals(reader.getFormat(), "RIFF");
        assertEquals(reader.getType(), "TEST");
        RIFFReader readchunk = reader.nextChunk();
        assertEquals(readchunk.getFormat(), "TSCH");
        assertEquals(readchunk.read(), 33);
        fis.close();
        reader = null;


    }
    finally
    {
        if(writer != null)
            writer.close();
        if(reader != null)
            reader.close();

        if(tempfile.exists())
            if(!tempfile.delete())
                tempfile.deleteOnExit();
    }
}
 
源代码10 项目: symja_android_library   文件: SerializableTest.java
private void equalsStringCopy(Object original) {
	try {

		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		ObjectOutputStream oos = new ObjectOutputStream(baos);

		long start0 = System.currentTimeMillis();
		oos.writeObject(original);
		byte[] bArray = baos.toByteArray();
		baos.close();
		oos.close();

		long start1 = System.currentTimeMillis();
		ByteArrayInputStream bais = new ByteArrayInputStream(bArray);
		ObjectInputStream ois = new ObjectInputStream(bais);
		Object copy = ois.readObject();
		bais.close();
		ois.close();
		long end = System.currentTimeMillis();
		long temp = start1 - start0;
		System.out.println(Long.valueOf(temp).toString());
		temp = end - start1;
		System.out.println(Long.valueOf(temp).toString());
		assertEquals(original.toString(), copy.toString());

	} catch (ClassNotFoundException cnfe) {
		cnfe.printStackTrace();
		assertEquals("", cnfe.toString());
	} catch (IOException ioe) {
		ioe.printStackTrace();
		assertEquals("", ioe.toString());
	}
}
 
源代码11 项目: hadoop   文件: TestWasbUriAndConfiguration.java
@Test
public void testConnectUsingSASReadonly() throws Exception {
  // Create the test account with SAS credentials.
  testAccount = AzureBlobStorageTestAccount.create("", EnumSet.of(
      CreateOptions.UseSas, CreateOptions.CreateContainer,
      CreateOptions.Readonly));
  assumeNotNull(testAccount);

  // Create a blob in there
  final String blobKey = "blobForReadonly";
  CloudBlobContainer container = testAccount.getRealContainer();
  CloudBlockBlob blob = container.getBlockBlobReference(blobKey);
  ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[] { 1,
      2, 3 });
  blob.upload(inputStream, 3);
  inputStream.close();

  // Make sure we can read it from the file system
  Path filePath = new Path("/" + blobKey);
  FileSystem fs = testAccount.getFileSystem();
  assertTrue(fs.exists(filePath));
  byte[] obtained = new byte[3];
  DataInputStream obtainedInputStream = fs.open(filePath);
  obtainedInputStream.readFully(obtained);
  obtainedInputStream.close();
  assertEquals(3, obtained[2]);
}
 
源代码12 项目: cyberduck   文件: GraphBufferWriteFeatureTest.java
@Test
public void testWrite() throws Exception {
    final GraphBufferWriteFeature feature = new GraphBufferWriteFeature(session);
    final Path container = new OneDriveHomeFinderService(session).find();
    final byte[] content = RandomUtils.nextBytes(5 * 1024);
    final TransferStatus status = new TransferStatus();
    status.setLength(content.length);
    final Path file = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
    final HttpResponseOutputStream<Void> out = feature.write(file, status, new DisabledConnectionCallback());
    final ByteArrayInputStream in = new ByteArrayInputStream(content);
    new StreamCopier(status, status).transfer(in, out);
    in.close();
    out.flush();
    assertEquals(content.length, status.getOffset());
    assertEquals(content.length, status.getLength());
    out.close();
    assertEquals(content.length, status.getOffset());
    assertEquals(content.length, status.getLength());
    assertNull(out.getStatus());
    assertTrue(new DefaultFindFeature(session).find(file));
    final byte[] compare = new byte[content.length];
    final InputStream stream = new GraphReadFeature(session).read(file, new TransferStatus().length(content.length), new DisabledConnectionCallback());
    IOUtils.readFully(stream, compare);
    stream.close();
    assertArrayEquals(content, compare);
    new GraphDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
 
源代码13 项目: zeno   文件: BasicSerializationExample.java
public FastBlobStateEngine deserializeLatestData(byte snapshot[], byte delta[]) {
    /// now we are on the client.  We need to create a state engine, and again
    /// tell it about our data model.
    FastBlobStateEngine stateEngine = new FastBlobStateEngine(new ExampleSerializerFactory());

    /// we need to create a FastBlobReader, which is responsible for reading
    /// serialized blobs.
    FastBlobReader reader = new FastBlobReader(stateEngine);

    /// get a stream from the snapshot file location
    ByteArrayInputStream snapshotStream = new ByteArrayInputStream(snapshot);
    /// get a stream from the delta file location
    ByteArrayInputStream deltaStream = new ByteArrayInputStream(delta);


    try {
        /// first read the snapshot
        reader.readSnapshot(snapshotStream);
        /// then apply the delta
        reader.readDelta(deltaStream);
    } catch (IOException e) {
        /// either of these methods throws an exception if the FastBlobReader
        /// is unable to read from the provided stream.
    } finally {
        /// it is your responsibility to close the streams.  The FastBlobReader will not do this.
        try {
            snapshotStream.close();
            deltaStream.close();
        } catch (IOException ignore) { }
    }

    return stateEngine;
}
 
源代码14 项目: netty-4.1.22   文件: Base64Test.java
private static X509Certificate certFromString(String string) throws Exception {
    CertificateFactory factory = CertificateFactory.getInstance("X.509");
    ByteArrayInputStream bin = new ByteArrayInputStream(string.getBytes(CharsetUtil.US_ASCII));
    try {
        return (X509Certificate) factory.generateCertificate(bin);
    } finally {
        bin.close();
    }
}
 
@Test
public void testStreamNormal() throws Exception {
  String value = "abc";
  ByteArrayOutputStream os = new ByteArrayOutputStream();

  pp.encodeResponse(os, value);
  Assert.assertEquals(value, os.toString(StandardCharsets.UTF_8.name()));

  ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
  Object result = pp.decodeResponse(is, stringType);
  Assert.assertEquals(value, result);

  os.close();
  is.close();
}
 
源代码16 项目: nextreports-server   文件: ReportUtil.java
public static void copyImages(String directoryName, List<JcrFile> images) throws Exception {
    if ((images == null) || (images.size() == 0)) {
        return;
    }
    File folder = new File(directoryName);
    folder.mkdir();
    for (JcrFile image : images) {
        String name = image.getName();

        byte[] xml = image.getDataProvider().getBytes();
        String fileName = directoryName + File.separator + name;
        File f = new File(fileName);
        if (f.exists()) {
            continue;
        }
        f.createNewFile();
        ByteArrayInputStream bais = new ByteArrayInputStream(xml);
        FileOutputStream fos = new FileOutputStream(f);
        try {
            int n = 0;
            byte[] b = new byte[1024];
            while ((n = bais.read(b)) >= 0) {
                fos.write(b, 0, n);
            }
        } finally {
            fos.close();
            bais.close();
        }
    }
}
 
源代码17 项目: dragonwell8_jdk   文件: WriteOutputStream.java
public static void main(String[] args) throws Exception {
    RIFFWriter writer = null;
    RIFFReader reader = null;
    File tempfile = File.createTempFile("test",".riff");
    try
    {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        writer = new RIFFWriter(baos, "TEST");
        RIFFWriter chunk = writer.writeChunk("TSCH");
        chunk.write((byte)33);
        writer.close();
        writer = null;
        ByteArrayInputStream fis = new ByteArrayInputStream(baos.toByteArray());
        reader = new RIFFReader(fis);
        assertEquals(reader.getFormat(), "RIFF");
        assertEquals(reader.getType(), "TEST");
        RIFFReader readchunk = reader.nextChunk();
        assertEquals(readchunk.getFormat(), "TSCH");
        assertEquals(readchunk.read(), 33);
        fis.close();
        reader = null;


    }
    finally
    {
        if(writer != null)
            writer.close();
        if(reader != null)
            reader.close();

        if(tempfile.exists())
            if(!tempfile.delete())
                tempfile.deleteOnExit();
    }
}
 
源代码18 项目: mrgeo   文件: AccumuloMrsPyramidMetadataReader.java
private MrsPyramidMetadata loadMetadata() throws IOException
{

  Properties mrgeoAccProps = AccumuloConnector.getAccumuloProperties();
  String authsString = mrgeoAccProps.getProperty(MrGeoAccumuloConstants.MRGEO_ACC_KEY_AUTHS);
  Properties p1 = null;
  Properties p2 = null;

  if (dataProvider == null)
  {
    //log.info("no data provider used");
    mrgeoAccProps = AccumuloConnector.getAccumuloProperties();
  }
  else
  {

    // get authorizations from dataProvider
    p1 = AccumuloUtils.providerPropertiesToProperties(dataProvider.getProviderProperties());
    p2 = dataProvider.getQueryProperties();
    if (p1 != null)
    {
      mrgeoAccProps.putAll(p1);
    }
    if (p2 != null)
    {
      mrgeoAccProps.putAll(p2);
    }

  }

  if (p1 != null)
  {
    if (p1.getProperty(MrGeoAccumuloConstants.MRGEO_ACC_KEY_AUTHS) != null &&
        p1.getProperty(MrGeoAccumuloConstants.MRGEO_ACC_KEY_AUTHS).length() > 0)
    {
      authsString = p1.getProperty(MrGeoAccumuloConstants.MRGEO_ACC_KEY_AUTHS);
    }
  }
  if (mrgeoAccProps.getProperty(DataProviderFactory.PROVIDER_PROPERTY_USER_ROLES) != null &&
      mrgeoAccProps.getProperty(DataProviderFactory.PROVIDER_PROPERTY_USER_ROLES).length() > 0)
  {
    authsString = mrgeoAccProps.getProperty(DataProviderFactory.PROVIDER_PROPERTY_USER_ROLES);
  }

  auths = AccumuloUtils.createAuthorizationsFromDelimitedString(authsString);

  if (conn == null)
  {
    try
    {
      conn = AccumuloConnector.getConnector(mrgeoAccProps);
    }
    catch (DataProviderException dpe)
    {
      throw new IOException("No connection to Accumulo!", dpe);
    }
  }

  if (name == null || name.length() == 0)
  {
    throw new IOException("Can not load metadata, resource name is empty!");
  }

  Scanner scan = null;
  try
  {
    scan = conn.createScanner(name, auths);
  }
  catch (Exception e)
  {
    throw new IOException("Can not connect to table " + name + " with auths " + auths + " - " + e.getMessage(), e);
  }

  MrsPyramidMetadata retMeta = null;
  Range range = new Range(MrGeoAccumuloConstants.MRGEO_ACC_METADATA, MrGeoAccumuloConstants.MRGEO_ACC_METADATA + " ");
  scan.setRange(range);
  scan.fetchColumn(new Text(MrGeoAccumuloConstants.MRGEO_ACC_METADATA),
      new Text(MrGeoAccumuloConstants.MRGEO_ACC_CQALL));
  for (Entry<Key, Value> entry : scan)
  {
    ByteArrayInputStream bis = new ByteArrayInputStream(entry.getValue().get());
    retMeta = MrsPyramidMetadata.load(bis);
    bis.close();
    break;
  }

  return retMeta;
}
 
源代码19 项目: hottub   文件: SupplementaryChars.java
@Test(dataProvider = "unsupported", expectedExceptions = SAXParseException.class)
public void testInvalid(String xml) throws Exception {
    ByteArrayInputStream stream = new ByteArrayInputStream(xml.getBytes("UTF-8"));
    getParser().parse(stream, new DefaultHandler());
    stream.close();
}
 
源代码20 项目: nifi   文件: TestJdbcCommon.java
@Test
public void testBlob() throws Exception {
    try (final Statement stmt = con.createStatement()) {
        stmt.executeUpdate("CREATE TABLE blobtest (id INT, b BLOB(64 K))");
        stmt.execute("INSERT INTO blobtest VALUES (41, NULL)");
        PreparedStatement ps = con.prepareStatement("INSERT INTO blobtest VALUES (?, ?)");
        ps.setInt(1, 42);
        final byte[] buffer = new byte[4002];
        IntStream.range(0, 4002).forEach((i) -> buffer[i] = (byte) ((i % 10) + 65));
        // Put a zero-byte in to test the buffer building logic
        buffer[1] = 0;
        ByteArrayInputStream bais = new ByteArrayInputStream(buffer);

        // - set the value of the input parameter to the input stream
        ps.setBlob(2, bais, 4002);
        ps.execute();
        bais.close();

        final ResultSet resultSet = stmt.executeQuery("select * from blobtest");

        final ByteArrayOutputStream outStream = new ByteArrayOutputStream();
        JdbcCommon.convertToAvroStream(resultSet, outStream, false);

        final byte[] serializedBytes = outStream.toByteArray();
        assertNotNull(serializedBytes);

        // Deserialize bytes to records
        final InputStream instream = new ByteArrayInputStream(serializedBytes);

        final DatumReader<GenericRecord> datumReader = new GenericDatumReader<>();
        try (final DataFileStream<GenericRecord> dataFileReader = new DataFileStream<>(instream, datumReader)) {
            GenericRecord record = null;
            while (dataFileReader.hasNext()) {
                // Reuse record object by passing it to next(). This saves us from
                // allocating and garbage collecting many objects for files with
                // many items.
                record = dataFileReader.next(record);
                Integer id = (Integer) record.get("ID");
                Object o = record.get("B");
                if (id == 41) {
                    assertNull(o);
                } else {
                    assertNotNull(o);
                    assertTrue(o instanceof ByteBuffer);
                    final byte[] blob = ((ByteBuffer) o).array();
                    assertEquals(4002, blob.length);
                    // Third byte should be 67 ('C')
                    assertEquals('C', blob[2]);
                }
            }
        }
    }
}