org.apache.commons.io.Charsets#UTF_8 ( )源码实例Demo

下面列出了org.apache.commons.io.Charsets#UTF_8 ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: big-c   文件: GraphiteSink.java
public void connect() {
  if (isConnected()) {
    throw new MetricsException("Already connected to Graphite");
  }
  if (tooManyConnectionFailures()) {
    // return silently (there was ERROR in logs when we reached limit for the first time)
    return;
  }
  try {
    // Open a connection to Graphite server.
    socket = new Socket(serverHost, serverPort);
    writer = new OutputStreamWriter(socket.getOutputStream(), Charsets.UTF_8);
  } catch (Exception e) {
    connectionFailures++;
    if (tooManyConnectionFailures()) {
      // first time when connection limit reached, report to logs
      LOG.error("Too many connection failures, would not try to connect again.");
    }
    throw new MetricsException("Error creating connection, "
        + serverHost + ":" + serverPort, e);
  }
}
 
源代码2 项目: elastic-rabbitmq   文件: DocumentService.java
public ESQueryResponse query(String index, String type, Map<String, String> params, String body) {
    params = addTenantId2Param(params);

    HttpEntity requestBody = new StringEntity(body, Charsets.UTF_8);
    try {
        Response response = client.performRequest(
                "GET",
                index + "/" + type + "/_search",
                params,
                requestBody);

        int statusCode = response.getStatusLine().getStatusCode();
        if (statusCode > 299) {
            logger.warn("Problem while indexing a document: {}" + response.getStatusLine().getReasonPhrase());
            throw new ElasticAPIException("Could not index a document, status code is " + statusCode);
        }

        ESQueryResponse esQueryResponse = gson.fromJson(IOUtils.toString(response.getEntity().getContent()),
                ESQueryResponse.class);

        return esQueryResponse;
    } catch (IOException e) {
        logger.error("Failed to update document with type [" + type + "] body [" + body + "]: ",e);
    }
    return null;
}
 
private void loadStructureDefinitions(FhirContext theContext, Map<String, StructureDefinition> theCodeSystems, String theClasspath) {
  logD("SNOMEDMOCK Loading structure definitions from classpath: "+ theClasspath);
  InputStream valuesetText = SNOMEDUKMockValidationSupport.class.getResourceAsStream(theClasspath);
  if (valuesetText != null) {
    InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8);

    Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
    for (BundleEntryComponent next : bundle.getEntry()) {
      if (next.getResource() instanceof StructureDefinition) {
        StructureDefinition nextSd = (StructureDefinition) next.getResource();
        nextSd.getText().setDivAsString("");
        String system = nextSd.getUrl();
        if (isNotBlank(system)) {
          theCodeSystems.put(system, nextSd);
        }
      }
    }
  } else {
    log.warn("Unable to load resource: {}", theClasspath);
  }
}
 
源代码4 项目: hadoop   文件: FSDirMkdirOp.java
/**
 * For a given absolute path, create all ancestors as directories along the
 * path. All ancestors inherit their parent's permission plus an implicit
 * u+wx permission. This is used by create() and addSymlink() for
 * implicitly creating all directories along the path.
 *
 * For example, path="/foo/bar/spam", "/foo" is an existing directory,
 * "/foo/bar" is not existing yet, the function will create directory bar.
 *
 * @return a tuple which contains both the new INodesInPath (with all the
 * existing and newly created directories) and the last component in the
 * relative path. Or return null if there are errors.
 */
static Map.Entry<INodesInPath, String> createAncestorDirectories(
    FSDirectory fsd, INodesInPath iip, PermissionStatus permission)
    throws IOException {
  final String last = new String(iip.getLastLocalName(), Charsets.UTF_8);
  INodesInPath existing = iip.getExistingINodes();
  List<String> children = iip.getPath(existing.length(),
      iip.length() - existing.length());
  int size = children.size();
  if (size > 1) { // otherwise all ancestors have been created
    List<String> directories = children.subList(0, size - 1);
    INode parentINode = existing.getLastINode();
    // Ensure that the user can traversal the path by adding implicit
    // u+wx permission to all ancestor directories
    existing = createChildrenDirectories(fsd, existing, directories,
        addImplicitUwx(parentINode.getPermissionStatus(), permission));
    if (existing == null) {
      return null;
    }
  }
  return new AbstractMap.SimpleImmutableEntry<>(existing, last);
}
 
源代码5 项目: big-c   文件: LdapGroupsMapping.java
String extractPassword(String pwFile) {
  if (pwFile.isEmpty()) {
    // If there is no password file defined, we'll assume that we should do
    // an anonymous bind
    return "";
  }

  StringBuilder password = new StringBuilder();
  try (Reader reader = new InputStreamReader(
      new FileInputStream(pwFile), Charsets.UTF_8)) {
    int c = reader.read();
    while (c > -1) {
      password.append((char)c);
      c = reader.read();
    }
    return password.toString().trim();
  } catch (IOException ioe) {
    throw new RuntimeException("Could not read password file: " + pwFile, ioe);
  }
}
 
源代码6 项目: carbon-identity   文件: EndpointUtil.java
/**
 * Extracts the username and password info from the HTTP Authorization Header
 *
 * @param authorizationHeader "Basic " + base64encode(username + ":" + password)
 * @return String array with client id and client secret.
 * @throws org.wso2.carbon.identity.base.IdentityException If the decoded data is null.
 */
public static String[] extractCredentialsFromAuthzHeader(String authorizationHeader)
        throws OAuthClientException {
    String[] splitValues = authorizationHeader.trim().split(" ");
    if(splitValues.length == 2) {
        byte[] decodedBytes = Base64Utils.decode(splitValues[1].trim());
        if (decodedBytes != null) {
            String userNamePassword = new String(decodedBytes, Charsets.UTF_8);
            return userNamePassword.split(":");
        }
    }
    String errMsg = "Error decoding authorization header. Space delimited \"<authMethod> <base64Hash>\" format violated.";
    throw new OAuthClientException(errMsg);
}
 
源代码7 项目: griffin   文件: FSUtil.java
public static InputStream getSampleInputStream(String path)
    throws IOException {
    checkHDFSConf();
    if (isFileExist(path)) {
        FSDataInputStream missingData = fileSystem.open(new Path(path));
        BufferedReader bufReader = new BufferedReader(
            new InputStreamReader(missingData, Charsets.UTF_8));
        try {
            String line = null;
            int rowCnt = 0;
            StringBuilder output = new StringBuilder(1024);

            while ((line = bufReader.readLine()) != null) {
                if (rowCnt < SAMPLE_ROW_COUNT) {
                    output.append(line);
                    output.append("\n");
                }
                rowCnt++;
            }

            return IOUtils.toInputStream(output, Charsets.UTF_8);
        } finally {
            bufReader.close();
        }
    } else {
        LOGGER.warn("HDFS file does not exist.", path);
        throw new GriffinException.NotFoundException(HDFS_FILE_NOT_EXIST);
    }
}
 
源代码8 项目: hadoop   文件: BZip2Codec.java
private BufferedInputStream readStreamHeader() throws IOException {
  // We are flexible enough to allow the compressed stream not to
  // start with the header of BZ. So it works fine either we have
  // the header or not.
  if (super.in != null) {
    bufferedIn.mark(HEADER_LEN);
    byte[] headerBytes = new byte[HEADER_LEN];
    int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
    if (actualRead != -1) {
      String header = new String(headerBytes, Charsets.UTF_8);
      if (header.compareTo(HEADER) != 0) {
        bufferedIn.reset();
      } else {
        this.isHeaderStripped = true;
        // In case of BYBLOCK mode, we also want to strip off
        // remaining two character of the header.
        if (this.readMode == READ_MODE.BYBLOCK) {
          actualRead = bufferedIn.read(headerBytes, 0,
              SUB_HEADER_LEN);
          if (actualRead != -1) {
            this.isSubHeaderStripped = true;
          }
        }
      }
    }
  }

  if (bufferedIn == null) {
    throw new IOException("Failed to read bzip2 stream.");
  }

  return bufferedIn;

}
 
源代码9 项目: hadoop   文件: FSDirStatAndListingOp.java
static DirectoryListing getListingInt(FSDirectory fsd, final String srcArg,
    byte[] startAfter, boolean needLocation) throws IOException {
  FSPermissionChecker pc = fsd.getPermissionChecker();
  byte[][] pathComponents = FSDirectory
      .getPathComponentsForReservedPath(srcArg);
  final String startAfterString = new String(startAfter, Charsets.UTF_8);
  final String src = fsd.resolvePath(pc, srcArg, pathComponents);
  final INodesInPath iip = fsd.getINodesInPath(src, true);

  // Get file name when startAfter is an INodePath
  if (FSDirectory.isReservedName(startAfterString)) {
    byte[][] startAfterComponents = FSDirectory
        .getPathComponentsForReservedPath(startAfterString);
    try {
      String tmp = FSDirectory.resolvePath(src, startAfterComponents, fsd);
      byte[][] regularPath = INode.getPathComponents(tmp);
      startAfter = regularPath[regularPath.length - 1];
    } catch (IOException e) {
      // Possibly the inode is deleted
      throw new DirectoryListingStartAfterNotFoundException(
          "Can't find startAfter " + startAfterString);
    }
  }

  boolean isSuperUser = true;
  if (fsd.isPermissionEnabled()) {
    if (iip.getLastINode() != null && iip.getLastINode().isDirectory()) {
      fsd.checkPathAccess(pc, iip, FsAction.READ_EXECUTE);
    } else {
      fsd.checkTraverse(pc, iip);
    }
    isSuperUser = pc.isSuperUser();
  }
  return getListing(fsd, iip, src, startAfter, needLocation, isSuperUser);
}
 
源代码10 项目: hadoop   文件: StreamPumper.java
protected void pump() throws IOException {
  InputStreamReader inputStreamReader = new InputStreamReader(
      stream, Charsets.UTF_8);
  BufferedReader br = new BufferedReader(inputStreamReader);
  String line = null;
  while ((line = br.readLine()) != null) {
    if (type == StreamType.STDOUT) {
      log.info(logPrefix + ": " + line);
    } else {
      log.warn(logPrefix + ": " + line);          
    }
  }
}
 
源代码11 项目: big-c   文件: FileBasedIPList.java
/**
 * Reads the lines in a file.
 * @param fileName
 * @return lines in a String array; null if the file does not exist or if the
 * file name is null
 * @throws IOException
 */
private static String[] readLines(String fileName) throws IOException {
  try {
    if (fileName != null) {
      File file = new File (fileName);
      if (file.exists()) {
        try (
            Reader fileReader = new InputStreamReader(
                new FileInputStream(file), Charsets.UTF_8);
            BufferedReader bufferedReader = new BufferedReader(fileReader)) {
          List<String> lines = new ArrayList<String>();
          String line = null;
          while ((line = bufferedReader.readLine()) != null) {
            lines.add(line);
          }
          if (LOG.isDebugEnabled()) {
            LOG.debug("Loaded IP list of size = " + lines.size() +
                " from file = " + fileName);
          }
          return (lines.toArray(new String[lines.size()]));
        }
      } else {
        LOG.debug("Missing ip list file : "+ fileName);
      }
    }
  } catch (IOException ioe) {
    LOG.error(ioe);
    throw ioe;
  }
  return null;
}
 
源代码12 项目: hadoop   文件: DefaultStringifier.java
@Override
public String toString(T obj) throws IOException {
  outBuf.reset();
  serializer.serialize(obj);
  byte[] buf = new byte[outBuf.getLength()];
  System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
  return new String(Base64.encodeBase64(buf), Charsets.UTF_8);
}
 
源代码13 项目: lams   文件: ReversedLinesFileReader.java
/**
 * Creates a ReversedLinesFileReader with the given block size and encoding.
 *
 * @param file
 *            the file to be read
 * @param blockSize
 *            size of the internal buffer (for ideal performance this should
 *            match with the block size of the underlying file system).
 * @param encoding
 *            the encoding of the file
 * @throws IOException  if an I/O error occurs
 * @since 2.3
 */
@SuppressWarnings("deprecation") // unavoidable until Java 7
public ReversedLinesFileReader(final File file, final int blockSize, final Charset encoding) throws IOException {
    this.blockSize = blockSize;
    this.encoding = encoding;

    // --- check & prepare encoding ---
    final Charset charset = Charsets.toCharset(encoding);
    final CharsetEncoder charsetEncoder = charset.newEncoder();
    final float maxBytesPerChar = charsetEncoder.maxBytesPerChar();
    if (maxBytesPerChar == 1f) {
        // all one byte encodings are no problem
        byteDecrement = 1;
    } else if (charset == Charsets.UTF_8) {
        // UTF-8 works fine out of the box, for multibyte sequences a second UTF-8 byte can never be a newline byte
        // http://en.wikipedia.org/wiki/UTF-8
        byteDecrement = 1;
    } else if(charset == Charset.forName("Shift_JIS") || // Same as for UTF-8
            // http://www.herongyang.com/Unicode/JIS-Shift-JIS-Encoding.html
            charset == Charset.forName("windows-31j") || // Windows code page 932 (Japanese)
            charset == Charset.forName("x-windows-949") || // Windows code page 949 (Korean)
            charset == Charset.forName("gbk") || // Windows code page 936 (Simplified Chinese)
            charset == Charset.forName("x-windows-950")) { // Windows code page 950 (Traditional Chinese)
        byteDecrement = 1;
    } else if (charset == Charsets.UTF_16BE || charset == Charsets.UTF_16LE) {
        // UTF-16 new line sequences are not allowed as second tuple of four byte sequences,
        // however byte order has to be specified
        byteDecrement = 2;
    } else if (charset == Charsets.UTF_16) {
        throw new UnsupportedEncodingException("For UTF-16, you need to specify the byte order (use UTF-16BE or " +
                "UTF-16LE)");
    } else {
        throw new UnsupportedEncodingException("Encoding " + encoding + " is not supported yet (feel free to " +
                "submit a patch)");
    }

    // NOTE: The new line sequences are matched in the order given, so it is important that \r\n is BEFORE \n
    newLineSequences = new byte[][] { "\r\n".getBytes(encoding), "\n".getBytes(encoding), "\r".getBytes(encoding) };

    avoidNewlineSplitBufferSize = newLineSequences[0].length;

    // Open file
    randomAccessFile = new RandomAccessFile(file, "r");
    totalByteLength = randomAccessFile.length();
    int lastBlockLength = (int) (totalByteLength % blockSize);
    if (lastBlockLength > 0) {
        totalBlockCount = totalByteLength / blockSize + 1;
    } else {
        totalBlockCount = totalByteLength / blockSize;
        if (totalByteLength > 0) {
            lastBlockLength = blockSize;
        }
    }
    currentFilePart = new FilePart(totalBlockCount, lastBlockLength, null);

}
 
源代码14 项目: Nicobar   文件: JarScriptArchive.java
/** Build the {@link JarScriptArchive}. */
public JarScriptArchive build() throws IOException {
    ScriptModuleSpec buildModuleSpec = moduleSpec;
    String moduleSpecEntry = null;

    if (buildModuleSpec == null){
        String buildSpecFileName = specFileName != null ? specFileName : DEFAULT_MODULE_SPEC_FILE_NAME;
        // attempt to find a module spec in the jar file
        JarFile jarFile = new JarFile(jarPath.toFile());
        try {
            ZipEntry zipEntry = jarFile.getEntry(buildSpecFileName);
            if (zipEntry != null) {
                moduleSpecEntry = buildSpecFileName;
                InputStream inputStream = jarFile.getInputStream(zipEntry);
                ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
                IOUtils.copy(inputStream, outputStream);
                byte[] bytes = outputStream.toByteArray();
                if (bytes != null && bytes.length > 0) {
                    String json = new String(bytes, Charsets.UTF_8);
                    ScriptModuleSpecSerializer buildSpecSerializer = specSerializer != null  ? specSerializer :
                        DEFAULT_SPEC_SERIALIZER;
                    buildModuleSpec = buildSpecSerializer.deserialize(json);
                }
            }
        } finally {
            IOUtils.closeQuietly(jarFile);
        }
        // create a default module spec
        if (buildModuleSpec == null) {
            String jarFileName = this.jarPath.getFileName().toString();
            if (jarFileName.endsWith(JAR_FILE_SUFFIX)) {
              jarFileName = jarFileName.substring(0, jarFileName.lastIndexOf(JAR_FILE_SUFFIX));
            }

            ModuleId moduleId = ModuleId.fromString(jarFileName);
            buildModuleSpec = new ScriptModuleSpec.Builder(moduleId).build();
        }
    }
    long buildCreateTime = createTime;
    if (buildCreateTime <= 0) {
        buildCreateTime = Files.getLastModifiedTime(jarPath).toMillis();
    }
    return new JarScriptArchive(buildModuleSpec, jarPath, moduleSpecEntry, buildCreateTime);
}
 
源代码15 项目: hadoop   文件: XDR.java
public String readString() {
  return new String(readVariableOpaque(), Charsets.UTF_8);
}
 
源代码16 项目: hadoop   文件: ShellBasedIdMapping.java
static StaticMapping parseStaticMap(File staticMapFile)
    throws IOException {
  
  Map<Integer, Integer> uidMapping = new HashMap<Integer, Integer>();
  Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
  
  BufferedReader in = new BufferedReader(new InputStreamReader(
      new FileInputStream(staticMapFile), Charsets.UTF_8));
  
  try {
    String line = null;
    while ((line = in.readLine()) != null) {
      // Skip entirely empty and comment lines.
      if (EMPTY_LINE.matcher(line).matches() ||
          COMMENT_LINE.matcher(line).matches()) {
        continue;
      }
      
      Matcher lineMatcher = MAPPING_LINE.matcher(line);
      if (!lineMatcher.matches()) {
        LOG.warn("Could not parse line '" + line + "'. Lines should be of " +
            "the form '[uid|gid] [remote id] [local id]'. Blank lines and " +
            "everything following a '#' on a line will be ignored.");
        continue;
      }
      
      // We know the line is fine to parse without error checking like this
      // since it matched the regex above.
      String firstComponent = lineMatcher.group(1);
      int remoteId = parseId(lineMatcher.group(2));
      int localId = parseId(lineMatcher.group(3));
      if (firstComponent.equals("uid")) {
        uidMapping.put(localId, remoteId);
      } else {
        gidMapping.put(localId, remoteId);
      }
    }
  } finally {
    in.close();
  }
  
  return new StaticMapping(uidMapping, gidMapping);
}
 
源代码17 项目: hadoop   文件: SaslRpcServer.java
static String encodeIdentifier(byte[] identifier) {
  return new String(Base64.encodeBase64(identifier), Charsets.UTF_8);
}
 
源代码18 项目: SI   文件: CoapMessage.java
private CoapMessage(boolean incoming, Type type, int mId, String token, OptionSet options, byte[] payload) {
    this.incoming = incoming;
    this.timestamp = System.currentTimeMillis();
    this.type = type.toString();
    this.mId = mId;
    this.token = token;

    if (options != null) {
        List<Option> opts = options.asSortedList();
        if (!opts.isEmpty()) {
            Map<String, List<String>> optMap = new HashMap<>();
            for (Option opt : opts) {
                String strOption = OptionNumberRegistry.toString(opt.getNumber());
                List<String> values = optMap.get(strOption);
                if (values == null) {
                    values = new ArrayList<>();
                    optMap.put(strOption, values);
                }
                values.add(opt.toValueString());
            }

            StringBuilder builder = new StringBuilder();
            for (Entry<String, List<String>> e : optMap.entrySet()) {
                if (builder.length() > 0) {
                    builder.append(" - ");
                }
                builder.append(e.getKey()).append(": ").append(StringUtils.join(e.getValue(), ", "));
            }
            this.options = builder.toString();

        }
    }
    if (payload != null && payload.length > 0) {
        String strPayload = new String(payload, Charsets.UTF_8);
        if (StringUtils.isAsciiPrintable(strPayload)) {
            this.payload = strPayload;
        } else {
            this.payload = "Hex:" + Hex.encodeHexString(payload);
        }
    }
}
 
源代码19 项目: hadoop   文件: KMSClientProvider.java
private static void writeJson(Map map, OutputStream os) throws IOException {
  Writer writer = new OutputStreamWriter(os, Charsets.UTF_8);
  ObjectMapper jsonMapper = new ObjectMapper();
  jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
}
 
源代码20 项目: big-c   文件: KMSClientProvider.java
private static void writeJson(Map map, OutputStream os) throws IOException {
  Writer writer = new OutputStreamWriter(os, Charsets.UTF_8);
  ObjectMapper jsonMapper = new ObjectMapper();
  jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
}
 
 同类方法