类org.apache.hadoop.fs.FsUrlStreamHandlerFactory源码实例Demo

下面列出了怎么用org.apache.hadoop.fs.FsUrlStreamHandlerFactory的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: rheem   文件: HadoopFileSystem.java
/**
 * Make sure that this instance is initialized. This is particularly required to use HDFS {@link URL}s.
 */
public void ensureInitialized() {
    if (this.isInitialized) return;

    // Add handler for HDFS URL for java.net.URL
    LoggerFactory.getLogger(HadoopFileSystem.class).info("Adding handler for HDFS URLs.");
    try {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
    } catch (Throwable t) {
        LoggerFactory.getLogger(HadoopFileSystem.class).error(
                "Could not set URL stream handler factory.", t
        );
    } finally {
        this.isInitialized = true;
    }
}
 
源代码2 项目: geowave   文件: SparkIngestDriver.java
public static void setHdfsURLStreamHandlerFactory() throws NoSuchFieldException,
    SecurityException, IllegalArgumentException, IllegalAccessException {
  final Field factoryField = URL.class.getDeclaredField("factory");
  factoryField.setAccessible(true);
  // HP Fortify "Access Control" false positive
  // The need to change the accessibility here is
  // necessary, has been review and judged to be safe

  final URLStreamHandlerFactory urlStreamHandlerFactory =
      (URLStreamHandlerFactory) factoryField.get(null);

  if (urlStreamHandlerFactory == null) {
    URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
  } else {
    try {
      factoryField.setAccessible(true);
      // HP Fortify "Access Control" false positive
      // The need to change the accessibility here is
      // necessary, has been review and judged to be safe
      factoryField.set(null, new FsUrlStreamHandlerFactory());
    } catch (final IllegalAccessException e1) {
      LOGGER.error("Could not access URLStreamHandler factory field on URL class: {}", e1);
      throw new RuntimeException(
          "Could not access URLStreamHandler factory field on URL class: {}",
          e1);
    }
  }
}
 
源代码3 项目: RDFS   文件: TestUrlStreamHandler.java
/**
 * Test opening and reading from an InputStream through a hdfs:// URL.
 * <p>
 * First generate a file with some content through the FileSystem API, then
 * try to open and read the file through the URL stream API.
 * 
 * @throws IOException
 */
public void testDfsUrls() throws IOException {

  Configuration conf = new Configuration();
  MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
  FileSystem fs = cluster.getFileSystem();

  // Setup our own factory
  // setURLSteramHandlerFactor is can be set at most once in the JVM
  // the new URLStreamHandler is valid for all tests cases 
  // in TestStreamHandler
  FsUrlStreamHandlerFactory factory =
      new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
  java.net.URL.setURLStreamHandlerFactory(factory);

  Path filePath = new Path("/thefile");

  try {
    byte[] fileContent = new byte[1024];
    for (int i = 0; i < fileContent.length; ++i)
      fileContent[i] = (byte) i;

    // First create the file through the FileSystem API
    OutputStream os = fs.create(filePath);
    os.write(fileContent);
    os.close();

    // Second, open and read the file content through the URL API
    URI uri = fs.getUri();
    URL fileURL =
        new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath
            .toString());

    InputStream is = fileURL.openStream();
    assertNotNull(is);

    byte[] bytes = new byte[4096];
    assertEquals(1024, is.read(bytes));
    is.close();

    for (int i = 0; i < fileContent.length; ++i)
      assertEquals(fileContent[i], bytes[i]);

    // Cleanup: delete the file
    fs.delete(filePath, false);

  } finally {
    fs.close();
    cluster.shutdown();
  }

}
 
源代码4 项目: hadoop-gpu   文件: TestUrlStreamHandler.java
/**
 * Test opening and reading from an InputStream through a hdfs:// URL.
 * <p>
 * First generate a file with some content through the FileSystem API, then
 * try to open and read the file through the URL stream API.
 * 
 * @throws IOException
 */
public void testDfsUrls() throws IOException {

  Configuration conf = new Configuration();
  MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
  FileSystem fs = cluster.getFileSystem();

  // Setup our own factory
  // setURLSteramHandlerFactor is can be set at most once in the JVM
  // the new URLStreamHandler is valid for all tests cases 
  // in TestStreamHandler
  FsUrlStreamHandlerFactory factory =
      new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
  java.net.URL.setURLStreamHandlerFactory(factory);

  Path filePath = new Path("/thefile");

  try {
    byte[] fileContent = new byte[1024];
    for (int i = 0; i < fileContent.length; ++i)
      fileContent[i] = (byte) i;

    // First create the file through the FileSystem API
    OutputStream os = fs.create(filePath);
    os.write(fileContent);
    os.close();

    // Second, open and read the file content through the URL API
    URI uri = fs.getUri();
    URL fileURL =
        new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath
            .toString());

    InputStream is = fileURL.openStream();
    assertNotNull(is);

    byte[] bytes = new byte[4096];
    assertEquals(1024, is.read(bytes));
    is.close();

    for (int i = 0; i < fileContent.length; ++i)
      assertEquals(fileContent[i], bytes[i]);

    // Cleanup: delete the file
    fs.delete(filePath, false);

  } finally {
    fs.close();
    cluster.shutdown();
  }

}
 
 类所在包
 类方法
 同包方法