下面列出了com.google.common.io.ByteStreams#copy ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Helper to generate files for testing.
*
* @param filePath The path to the file to write.
* @param lines The lines to write.
* @param compression The compression type of the file.
* @return The file written.
* @throws IOException If an error occurs while creating or writing the file.
*/
public static ResourceId writeToFile(
String filePath, List<String> lines, Compression compression) throws IOException {
String fileContents = String.join(System.lineSeparator(), lines);
ResourceId resourceId = FileSystems.matchNewResource(filePath, false);
String mimeType =
compression == Compression.UNCOMPRESSED ? MimeTypes.TEXT : MimeTypes.BINARY;
// Write the file contents to the channel and close.
try (ReadableByteChannel readChannel =
Channels.newChannel(new ByteArrayInputStream(fileContents.getBytes()))) {
try (WritableByteChannel writeChannel =
compression.writeCompressed(FileSystems.create(resourceId, mimeType))) {
ByteStreams.copy(readChannel, writeChannel);
}
}
return resourceId;
}
private void copyExternalInput(Digest digest, CancellableOutputStream out)
throws IOException, InterruptedException {
logger.log(Level.FINE, format("downloading %s", DigestUtil.toString(digest)));
boolean complete = false;
try (InputStream in = newExternalInput(digest, /* offset=*/ 0)) {
ByteStreams.copy(in, out);
complete = true;
} catch (IOException e) {
out.cancel();
logger.log(
Level.WARNING,
format("error downloading %s", DigestUtil.toString(digest)),
e); // prevent burial by early end of stream during close
throw e;
}
logger.log(Level.FINE, format("download of %s complete", DigestUtil.toString(digest)));
}
public Result addFile ( final String targetPath, final InputStream stream, final Consumer<CpioArchiveEntry> customizer ) throws IOException
{
final Path tmpFile = Files.createTempFile ( "rpm-payload-", null );
try
{
try ( OutputStream os = Files.newOutputStream ( tmpFile ) )
{
ByteStreams.copy ( stream, os );
}
return addFile ( targetPath, tmpFile, customizer );
}
finally
{
Files.deleteIfExists ( tmpFile );
}
}
static void loadNativeLibrary() {
try {
final String osName = System.getProperty("os.name");
final String arch = System.getProperty("os.arch");
final String resourcePath = "/native/" + osName + "-" + arch + "/libvarint.so.1.0.1";
final InputStream is = NativeDocIdStream.class.getResourceAsStream(resourcePath);
if (is == null) {
throw new FileNotFoundException("unable to find libvarint.so.1.0.1 at resource path "+resourcePath);
}
final File tempFile = File.createTempFile("libvarint", ".so");
final OutputStream os = new FileOutputStream(tempFile);
ByteStreams.copy(is, os);
os.close();
is.close();
System.load(tempFile.getAbsolutePath());
// noinspection ResultOfMethodCallIgnored
tempFile.delete();
} catch (Throwable e) {
log.warn("unable to load libvarint using class loader, looking in java.library.path", e);
System.loadLibrary("varint"); // if this fails it throws UnsatisfiedLinkError
}
}
@Test
public void testConditionalGet() throws Exception {
assumeTrue(!blobStoreType.equals("b2"));
String blobName = "blob-name";
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(BYTE_SOURCE.size());
PutObjectResult result = client.putObject(containerName, blobName,
BYTE_SOURCE.openStream(), metadata);
S3Object object = client.getObject(
new GetObjectRequest(containerName, blobName)
.withMatchingETagConstraint(result.getETag()));
try (InputStream is = object.getObjectContent()) {
assertThat(is).isNotNull();
ByteStreams.copy(is, ByteStreams.nullOutputStream());
}
object = client.getObject(
new GetObjectRequest(containerName, blobName)
.withNonmatchingETagConstraint(result.getETag()));
assertThat(object).isNull();
}
private Map<String, byte[]> extractContents(InputStream inputStream)
throws IOException {
Map<String, byte[]> contents = new HashMap<String, byte[]>();
// assumption: the zip is non-empty
ZipInputStream zip = new ZipInputStream(inputStream);
ZipEntry entry;
while ((entry = zip.getNextEntry()) != null) {
if (entry.isDirectory()) continue;
ByteArrayOutputStream contentStream = new ByteArrayOutputStream();
ByteStreams.copy(zip, contentStream);
contents.put(entry.getName(), contentStream.toByteArray());
}
zip.close();
return contents;
}
private boolean copyResource(String resource, String destination, String description) {
try (
InputStream in = MarkdownDoclet.class.getResourceAsStream(resource);
OutputStream out = new FileOutputStream(new File(options.getDestinationDir(), destination))
)
{
ByteStreams.copy(in, out);
return true;
}
catch ( IOException e ) {
printError("Error writing " + description + ": " + e.getLocalizedMessage());
return false;
}
}
@Override
public void generateFile(String fileName, String outputCfgName, InputStream content) throws RuntimeIOException {
try {
URI uri = getURI(fileName, outputCfgName);
try (OutputStream out = converter.createOutputStream(uri)) {
ByteStreams.copy(beforeWrite.beforeWrite(uri, outputCfgName, content), out);
}
} catch (IOException t) {
throw new RuntimeIOException(t);
}
}
/** Archive and compress 'pathsToIncludeInArchive' into 'out', using tar+zstandard. */
@VisibleForTesting
static long compress(
ProjectFilesystem projectFilesystem, Collection<Path> pathsToIncludeInArchive, Path out)
throws IOException {
long fullSize = 0L;
try (OutputStream o = new BufferedOutputStream(Files.newOutputStream(out));
OutputStream z = new ZstdCompressorOutputStream(o);
TarArchiveOutputStream archive = new TarArchiveOutputStream(z)) {
archive.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
for (Path path : pathsToIncludeInArchive) {
boolean isRegularFile = !projectFilesystem.isDirectory(path);
// Add a file entry.
TarArchiveEntry e = new TarArchiveEntry(path.toString() + (isRegularFile ? "" : "/"));
int mode = (int) projectFilesystem.getPosixFileMode(path);
// If permissions don't allow for owner to r or w, update to u+=rw and g+=r
e.setMode((mode & 384) == 0 ? (mode | 416) : mode);
e.setModTime((long) ObjectFileCommonModificationDate.COMMON_MODIFICATION_TIME_STAMP * 1000);
if (isRegularFile) {
long pathSize = projectFilesystem.getFileSize(path);
e.setSize(pathSize);
fullSize += pathSize;
archive.putArchiveEntry(e);
try (InputStream input = projectFilesystem.newFileInputStream(path)) {
ByteStreams.copy(input, archive);
}
} else {
archive.putArchiveEntry(e);
}
archive.closeArchiveEntry();
}
archive.finish();
}
return fullSize;
}
@Override
public StepExecutionResult execute(ExecutionContext context) throws IOException {
try (InputStream in = filesystem.newFileInputStream(sourceFile);
OutputStream out = filesystem.newFileOutputStream(outputPath);
ZstdCompressorOutputStream zstdOut =
new ZstdCompressorOutputStream(out, DEFAULT_COMPRESSION_LEVEL, false, true)) {
ByteStreams.copy(in, zstdOut);
} finally {
filesystem.deleteFileAtPath(sourceFile);
}
return StepExecutionResults.SUCCESS;
}
private static File copyResource(String name) throws Exception {
InputStream is = URLClassLoader.getSystemResourceAsStream(name);
String tempDir = System.getProperty("java.io.tmpdir");
File tmp = new File(tempDir + File.separator + name);
tmp.deleteOnExit();
ByteStreams.copy(is, new FileOutputStream(tmp));
return tmp;
}
private static ByteArrayInputStream openFile(String filePath) throws IOException {
try (FileInputStream fileStream = new FileInputStream(filePath)) {
// Read data fully to memory to be able to close the file stream.
ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
ByteStreams.copy(fileStream, byteOutputStream);
return new ByteArrayInputStream(byteOutputStream.toByteArray());
}
}
private boolean recordLog(OutputStream output, String userId, long start, long end, JobId id, String submissionId) {
try{
final String startTime = JodaDateUtility.formatTimeStampMilli.print(start - PRE_TIME_BUFFER_MS);
final String endTime = JodaDateUtility.formatTimeStampMilli.print(end + POST_TIME_BUFFER_MS);
final SqlQuery query = JobRequestUtil.createSqlQuery(
String.format(LOG_QUERY, SqlUtils.quoteIdentifier(submissionId), startTime, endTime, "%" + id.getId() + "%"),
Collections.singletonList(LOGS_STORAGE_PLUGIN), userId);
final CompletionListener completionListener = new CompletionListener();
jobsService.get().submitJob(SubmitJobRequest.newBuilder().setSqlQuery(query).setQueryType(QueryType.UI_INTERNAL_RUN).build(), completionListener);
try {
completionListener.await(TIMEOUT_IN_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
throw new RuntimeException("Log search took more than " + TIMEOUT_IN_SECONDS + " seconds to complete.");
}
if (!completionListener.isCompleted()) {
throw new RuntimeException("Log search was cancelled or failed.");
}
Path outputFile = supportPath.resolve(submissionId).resolve("0_0_0.json");
try (FileInputStream fis = new FileInputStream(outputFile.toFile())) {
ByteStreams.copy(fis, output);
}
return true;
} catch (Exception ex) {
logger.warn("Failure while attempting to query log files for support submission.", ex);
PrintWriter writer = new PrintWriter(output);
writer.write(String.format("{\"message\": \"Log searching failed with exception %s.\"}", ex.getMessage()));
writer.flush();
return false;
}
}
/**
* Write to cloud storage using the FileSystems API. See https://stackoverflow.com/a/50050583.
*/
private void writeToStorage(String path, byte[] data) throws Exception {
ResourceId resourceId = FileSystems.matchNewResource(path, false);
try (ByteArrayInputStream inputStream = new ByteArrayInputStream(data);
ReadableByteChannel readerChannel = Channels.newChannel(inputStream);
WritableByteChannel writerChannel = FileSystems.create(resourceId, MimeTypes.BINARY)) {
ByteStreams.copy(readerChannel, writerChannel);
}
}
/**
* Decompresses the inputFile using the specified compression and outputs to the main output of
* the {@link Decompress} doFn. Files output to the destination will be first written as temp
* files with a "temp-" prefix within the output directory. If a file fails decompression, the
* filename and the associated error will be output to the dead-letter.
*
* @param inputFile The inputFile to decompress.
* @return A {@link ResourceId} which points to the resulting file from the decompression.
*/
private ResourceId decompress(ResourceId inputFile) throws IOException {
// Remove the compressed extension from the file. Example: demo.txt.gz -> demo.txt
String outputFilename = Files.getNameWithoutExtension(inputFile.toString());
// Resolve the necessary resources to perform the transfer.
ResourceId outputDir = FileSystems.matchNewResource(destinationLocation.get(), true);
ResourceId outputFile =
outputDir.resolve(outputFilename, StandardResolveOptions.RESOLVE_FILE);
ResourceId tempFile =
outputDir.resolve(Files.getFileExtension(inputFile.toString())
+ "-temp-" + outputFilename, StandardResolveOptions.RESOLVE_FILE);
// Resolve the compression
Compression compression = Compression.detect(inputFile.toString());
// Perform the copy of the decompressed channel into the destination.
try (ReadableByteChannel readerChannel =
compression.readDecompressed(FileSystems.open(inputFile))) {
try (WritableByteChannel writerChannel = FileSystems.create(tempFile, MimeTypes.TEXT)) {
ByteStreams.copy(readerChannel, writerChannel);
}
// Rename the temp file to the output file.
FileSystems.rename(
ImmutableList.of(tempFile),
ImmutableList.of(outputFile),
MoveOptions.StandardMoveOptions.IGNORE_MISSING_FILES);
} catch (IOException e) {
String msg = e.getMessage();
LOG.error("Error occurred during decompression of {}", inputFile.toString(), e);
throw new IOException(sanitizeDecompressionErrorMsg(msg, inputFile, compression));
}
return outputFile;
}
@ProcessElement
public void processElement(ProcessContext context) {
ResourceId inputFile = context.element().resourceId();
Compression compression = compressionValue.get();
// Add the compression extension to the output filename. Example: demo.txt -> demo.txt.gz
String outputFilename = inputFile.getFilename() + compression.getSuggestedSuffix();
// Resolve the necessary resources to perform the transfer
ResourceId outputDir = FileSystems.matchNewResource(destinationLocation.get(), true);
ResourceId outputFile =
outputDir.resolve(outputFilename, StandardResolveOptions.RESOLVE_FILE);
ResourceId tempFile =
outputDir.resolve("temp-" + outputFilename, StandardResolveOptions.RESOLVE_FILE);
// Perform the copy of the compressed channel to the destination.
try (ReadableByteChannel readerChannel = FileSystems.open(inputFile)) {
try (WritableByteChannel writerChannel =
compression.writeCompressed(FileSystems.create(tempFile, MimeTypes.BINARY))) {
// Execute the copy to the temporary file
ByteStreams.copy(readerChannel, writerChannel);
}
// Rename the temporary file to the output file
FileSystems.rename(ImmutableList.of(tempFile), ImmutableList.of(outputFile));
// Output the path to the uncompressed file
context.output(outputFile.toString());
} catch (IOException e) {
LOG.error("Error occurred during compression of {}", inputFile.toString(), e);
context.output(DEADLETTER_TAG, KV.of(inputFile.toString(), e.getMessage()));
}
}
public static byte[] gzipdecompress(byte[] contentBytes) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
ByteStreams.copy(new GZIPInputStream(new ByteArrayInputStream(contentBytes)), out);
} catch (IOException e) {
throw new RuntimeException(e);
}
return out.toByteArray();
}
private boolean shouldUpload ( final MavenReference ref ) throws Exception
{
System.out.format ( "baseline validation: %s%n", ref );
final String group = ref.getGroupId ().replace ( '.', '/' );
final String uri = String.format ( "http://central.maven.org/maven2/%s/%s/%s/%s", group, ref.getArtifactId (), ref.getVersion (), ref.toFileName () );
final URL url = new URL ( uri );
final HttpURLConnection con = openConnection ( url );
con.setAllowUserInteraction ( false );
con.setConnectTimeout ( getInteger ( "maven.central.connectTimeout", getInteger ( "maven.central.timeout", 0 ) ) );
con.setReadTimeout ( getInteger ( "maven.central.readTimeout", getInteger ( "maven.central.timeout", 0 ) ) );
con.connect ();
try
{
final int rc = con.getResponseCode ();
System.out.format ( "\t%s -> %s%n", url, rc );
if ( rc == 404 )
{
// file is not there ... upload
return true;
}
final Path tmp = Files.createTempFile ( null, ".jar" );
try
{
try ( final InputStream in = con.getInputStream ();
final OutputStream out = Files.newOutputStream ( tmp ) )
{
ByteStreams.copy ( in, out );
}
performBaselineCheck ( makeJarFile ( makeVersionBase ( ref ), ref ), tmp );
}
finally
{
Files.deleteIfExists ( tmp );
}
}
finally
{
con.disconnect ();
}
// don't upload, since the bundle is already there
return false;
}
/** Unzip the contents into the specified destination directory. */
public static IStatus unzip(File zip, File destination, IProgressMonitor monitor) {
SubMonitor progress = SubMonitor.convert(monitor);
if (!destination.exists()) {
if (!destination.mkdirs()) {
return StatusUtil.error(ZipUtil.class, "Unable to create destination: " + destination);
}
} else if (!destination.isDirectory()) {
return StatusUtil.error(ZipUtil.class, "Destination is not a directory: " + destination);
}
try (ZipFile zipFile = new ZipFile(zip)) {
String canonicalDestination = destination.getCanonicalPath();
progress.setWorkRemaining(zipFile.size());
for (Enumeration<? extends ZipEntry> entries = zipFile.entries(); entries.hasMoreElements()
&& !progress.isCanceled();) {
ZipEntry entry = entries.nextElement();
File entryLocation = new File(destination, entry.getName());
if (!entryLocation.getCanonicalPath().startsWith(canonicalDestination + File.separator)) {
return StatusUtil.error(
ZipUtil.class, "Blocked unzipping file outside the destination: " + entry.getName());
}
if (entry.isDirectory()) {
if (!entryLocation.exists()) {
if (!entryLocation.mkdirs()) {
return StatusUtil.error(ZipUtil.class,
"Unable to create destination: " + entryLocation);
}
} else if (!entryLocation.isDirectory()) {
return StatusUtil.error(ZipUtil.class,
"Destination is not a directory: " + entryLocation);
}
} else {
try (InputStream input = zipFile.getInputStream(entry);
FileOutputStream output = new FileOutputStream(entryLocation)) {
ByteStreams.copy(input, output);
}
}
progress.worked(1);
}
return Status.OK_STATUS;
} catch (IOException ex) {
return StatusUtil.error(ZipUtil.class, "Unable to extract zip file: " + zip, ex);
}
}
public static void main(String[] args) throws Exception {
String pprofCmd = args.length == 0 ? "/bin/pprof" : args[0];
if (!new File(pprofCmd).exists()) {
throw new AssertionError("no pprof command: " + pprofCmd);
}
// This test will fail during profiling of the Java tests
// because a process (the JVM) can have only one profiler.
// That's ok; just ignore it.
// Start writing profile to temporary file.
File profile = java.io.File.createTempFile("pprof", ".gz", null);
OutputStream prof = new FileOutputStream(profile);
Starlark.startCpuProfile(prof, Duration.ofMillis(10));
// This program consumes about 3s of CPU.
ParserInput input =
ParserInput.fromLines(
"x = [0]", //
"",
"def f():",
" for i in range(10000):",
" g()",
"",
"def g():",
" list(range(10000))",
" int(3)",
" sorted(range(10000))",
"",
"f()");
// Execute the workload.
Module module = Module.create();
try (Mutability mu = Mutability.create("test")) {
StarlarkThread thread = new StarlarkThread(mu, StarlarkSemantics.DEFAULT);
EvalUtils.exec(input, FileOptions.DEFAULT, module, thread);
}
Starlark.stopCpuProfile();
// Run pprof -top. Typical output (may vary by pprof release):
//
// Type: CPU
// Time: Jan 21, 2020 at 11:08am (PST)
// Duration: 3.26s, Total samples = 2640ms (80.97%)
// Showing nodes accounting for 2640ms, 100% of 2640ms total
// flat flat% sum% cum cum%
// 1390ms 52.65% 52.65% 1390ms 52.65% sorted
// 960ms 36.36% 89.02% 960ms 36.36% list
// 220ms 8.33% 97.35% 220ms 8.33% range
// 70ms 2.65% 100% 70ms 2.65% int
// 0 0% 100% 2640ms 100% <unknown>
// 0 0% 100% 2640ms 100% f
// 0 0% 100% 2640ms 100% g
// Runtime.exec is deprecated at Google but its open-source replacement is not yet available.
@SuppressWarnings("RuntimeExec")
Process pprof =
Runtime.getRuntime()
.exec(pprofCmd + " -top " + profile, /*envp=*/ new String[0], /*dir=*/ null);
ByteArrayOutputStream out = new ByteArrayOutputStream();
ByteStreams.copy(pprof.getInputStream(), out);
String got = out.toString(); // encoding undefined but unimportant---result is ASCII
// We'll assert that a few key substrings are present.
boolean ok = true;
for (String want : new String[] {"flat%", "sorted", "range"}) {
if (!got.contains(want)) {
System.err.println("pprof output does not contain substring: " + got);
ok = false;
}
}
if (!ok) {
System.err.println("pprof output:\n" + out);
System.exit(1);
}
}