下面列出了org.apache.commons.lang3.ArrayUtils#addAll ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* 对一个ConfigUpdateEnvelope对象进行签名
*
* @param originalEnvelope
* @param signer
* @return
* @deprecated 计划去掉ILocalSigner接口的使用,改用采用标准的ISigningIdentity接口
*/
public static Configtx.ConfigUpdateEnvelope signConfigUpdateEnvelope(Configtx.ConfigUpdateEnvelope originalEnvelope,
ILocalSigner signer) {
//获取ConfigUpdateEnvelope对象的构造器,拷贝原对象
Configtx.ConfigUpdateEnvelope.Builder envelopeBuilder = Configtx.ConfigUpdateEnvelope.newBuilder(originalEnvelope);
//构造签名对象,由两个字段构成SignatureHeader和Signature(其中Signature是针对SignatureHeader+ConfigUpdate的签名)
Configtx.ConfigSignature.Builder configSignatureBuilder = Configtx.ConfigSignature.newBuilder();
Common.SignatureHeader signatureHeader = signer.newSignatureHeader();
//由SignatureHeader+ConfigUpdate合成原始字节数组
byte[] original = ArrayUtils.addAll(signatureHeader.toByteArray(), originalEnvelope.getConfigUpdate().toByteArray());
//对原始数组进行签名
byte[] signature = signer.sign(original);
configSignatureBuilder.setSignatureHeader(signatureHeader.toByteString());
configSignatureBuilder.setSignature(ByteString.copyFrom(signature));
Configtx.ConfigSignature configSignature = configSignatureBuilder.build();
//ConfigUpdateEnvelope对象由ConfigUpdate和若干个ConfigSignature组成。增加一个签名即可
envelopeBuilder.addSignatures(configSignature);
return envelopeBuilder.build();
}
@Override
public byte[] serializeContent() {
// to avoid copying everything around often we first collect results in a list
// and only concatenate them once
List<byte[]> elements = new LinkedList<byte[]>();
int length = 0;
for (NBTElement element : content.values()) {
byte[] temp = ArrayUtils.addAll(element.serializingPrefix(), element.serializeContent());
elements.add(temp);
length += temp.length;
}
byte[] res = new byte[length + 1];
int index = 0;
for (byte[] curr : elements) {
for (int i = 0; i < curr.length; i++) {
res[i + index] = curr[i];
}
index += curr.length;
}
// end tag
res[res.length - 1] = 0;
return res;
}
private int readNextChunk() throws IOException {
final ByteBuffer ciphertextBuf = ByteBuffer.allocate(SDSSession.DEFAULT_CHUNKSIZE);
final int read = IOUtils.read(proxy, ciphertextBuf.array());
if(lastread == 0) {
return IOUtils.EOF;
}
ciphertextBuf.position(read);
ciphertextBuf.flip();
try {
final PlainDataContainer pDataContainer;
if(read == 0) {
final PlainDataContainer c1 = cipher.processBytes(createEncryptedDataContainer(ciphertextBuf.array(), read, null));
final PlainDataContainer c2 = cipher.doFinal(new EncryptedDataContainer(null, tag));
pDataContainer = new PlainDataContainer(ArrayUtils.addAll(c1.getContent(), c2.getContent()));
}
else {
pDataContainer = cipher.processBytes(createEncryptedDataContainer(ciphertextBuf.array(), read, null));
}
final byte[] content = pDataContainer.getContent();
buffer = ByteBuffer.allocate(content.length);
buffer.put(content);
buffer.flip();
lastread = read;
return content.length;
}
catch(CryptoException e) {
throw new IOException(e);
}
}
private String[] expandServerStartupCommandList(Map<String, String> commandMap) {
if (commandMap == null || commandMap.size() == 0) {
return null;
}
String[] cmdParaArray = null;
String cmdArg = null;
if (commandMap.containsKey(CMD_ARG)) {
cmdArg = commandMap.get(CMD_ARG);
cmdParaArray = cmdArg.trim().split("\\s+");
commandMap.remove(CMD_ARG);
}
String[] parameterArray = new String[commandMap.size()];
int arrayIndex = 0;
Set<Map.Entry<String, String>> entries = commandMap.entrySet();
for (Map.Entry<String, String> entry : entries) {
String parameter;
String key = entry.getKey();
String value = entry.getValue();
if (value == null || value.isEmpty()) {
parameter = key;
} else {
parameter = key + "=" + value;
}
parameterArray[arrayIndex++] = parameter;
}
//setting cmdArg again
if (cmdArg != null) {
commandMap.put(CMD_ARG, cmdArg);
}
if (cmdParaArray == null || cmdParaArray.length == 0) {
return parameterArray;
} else {
return ArrayUtils.addAll(parameterArray, cmdParaArray);
}
}
private SignedSDSData getSDSData(SignedScDepSpec.SignedSmartContractDeploymentSpec ssds) throws JulongChainException {
if(ssds == null){
log.error("Null sds");
return null;
}
SmartContractPackage.SmartContractDeploymentSpec sds = null;
try {
sds = SmartContractPackage.SmartContractDeploymentSpec.parseFrom(ssds.getSmartContractDeploymentSpec());
} catch (Exception e){
log.error(e.getMessage(), e);
throw new JulongChainException(e);
}
//组装signedSDSData
SignedSDSData ssdsData = new SignedSDSData();
ssdsData.setCodeHash(sds.getCodePackage().toByteArray());
ssdsData.setMetaDataHash(ArrayUtils.addAll(
sds.getSmartContractSpec().getSmartContractId().getName().getBytes(StandardCharsets.UTF_8),
sds.getSmartContractSpec().getSmartContractId().getVersion().getBytes(StandardCharsets.UTF_8)
));
if(ssds.getInstantiationPolicy() == null){
log.error(String.format("instantiation policy can not be null for smartcontract (%s:%s)",
sds.getSmartContractSpec().getSmartContractId().getName(),
sds.getSmartContractSpec().getSmartContractId().getVersion()
));
return null;
}
byte[] signatureHash = new byte[0];
for(ProposalResponsePackage.Endorsement o : ssds.getOwnerEndorsementsList()){
signatureHash = ArrayUtils.addAll(signatureHash, o.getEndorser().toByteArray());
}
ssdsData.setSignatureHash(signatureHash);
return ssdsData;
}
@Override
public String[] extractPartitions(Message message) throws Exception {
JSONObject jsonObject = (JSONObject) JSONValue.parse(message.getPayload());
if (jsonObject == null) {
throw new RuntimeException("Failed to parse message as Json object");
}
String eventType = extractEventType(jsonObject);
long timestampMillis = extractTimestampMillis(jsonObject);
String[] timestampPartitions = generatePartitions(timestampMillis, mUsingHourly, mUsingMinutely);
return (String[]) ArrayUtils.addAll(new String[]{eventType}, timestampPartitions);
}
/**
* Get the list of allowed locales as controlled by config params for {@value #SAKAI_LOCALES_KEY} and {@value #SAKAI_LOCALES_MORE}
* @return an array of all allowed Locales for this installation
*/
public Locale[] getSakaiLocales() {
String localesStr = getString(SAKAI_LOCALES_KEY, SakaiLocales.SAKAI_LOCALES_DEFAULT);
if (localesStr == null) { // means locales= is set
localesStr = ""; // empty to get default locale only
} else if (StringUtils.isBlank(localesStr)) { // missing or not set
localesStr = SakaiLocales.SAKAI_LOCALES_DEFAULT;
}
String[] locales = StringUtils.split(localesStr, ','); // NOTE: these need to be trimmed (which getLocaleFromString will do)
String[] localesMore = getStrings(SAKAI_LOCALES_MORE);
locales = (String[]) ArrayUtils.addAll(locales, localesMore);
HashSet<Locale> localesSet = new HashSet<Locale>();
// always include the default locale
localesSet.add(Locale.getDefault());
if (!ArrayUtils.isEmpty(locales)) {
// convert from strings to Locales
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < locales.length; i++) {
localesSet.add(getLocaleFromString(locales[i]));
}
}
// Sort Locales and remove duplicates
Locale[] localesArray = localesSet.toArray(new Locale[localesSet.size()]);
Arrays.sort(localesArray, new LocaleComparator());
return localesArray;
}
@Test(dataProvider = "dataSiteFrequency", groups = "createsTempFiles")
public void testHDF5AndTSVOutput(final String[] testArguments, final File expectedHDF5File, final File expectedTSVFile) {
final File allelicPoNHDF5File = createTempFile("create-allelic-pon-test", ".pon");
allelicPoNHDF5File.delete();
final File allelicPoNTSVFile = createTempFile("create-allelic-pon-test", ".tsv");
allelicPoNTSVFile.delete();
final String[] commonArguments = ArrayUtils.addAll(PULLDOWN_FILE_ARGUMENTS,
"--" + StandardArgumentDefinitions.OUTPUT_LONG_NAME, allelicPoNHDF5File.getAbsolutePath(),
"--" + TSV_OUTPUT_FILE_LONG_NAME, allelicPoNTSVFile.getAbsolutePath());
final String[] arguments = ArrayUtils.addAll(commonArguments, testArguments);
runCommandLine(arguments);
final AllelicPanelOfNormals resultHDF5 = AllelicPanelOfNormals.read(allelicPoNHDF5File);
final AllelicPanelOfNormals expectedHDF5 = AllelicPanelOfNormals.read(expectedHDF5File);
AllelicPoNTestUtils.assertAllelicPoNsEqual(resultHDF5, expectedHDF5);
final AllelicPanelOfNormals resultTSV = AllelicPanelOfNormals.read(allelicPoNTSVFile);
final AllelicPanelOfNormals expectedTSV = AllelicPanelOfNormals.read(expectedTSVFile);
AllelicPoNTestUtils.assertAllelicPoNsEqual(resultTSV, expectedTSV);
//check overwrite
runCommandLine(arguments);
final AllelicPanelOfNormals resultHDF5Overwrite = AllelicPanelOfNormals.read(allelicPoNHDF5File);
AllelicPoNTestUtils.assertAllelicPoNsEqual(resultHDF5Overwrite, expectedHDF5);
final AllelicPanelOfNormals resultTSVOverwrite = AllelicPanelOfNormals.read(allelicPoNTSVFile);
AllelicPoNTestUtils.assertAllelicPoNsEqual(resultTSVOverwrite, expectedTSV);
}
public BatchMmul(SameDiff sameDiff,
SDVariable[] matrices,
boolean transposeA,
boolean transposeB) {
super(null, sameDiff, ArrayUtils.addAll(
new SDVariable[]{
sameDiff.var(Nd4j.ones(matrices[0].dataType(), matrices.length / 2)), // alphas
sameDiff.var(Nd4j.zeros(matrices[1].dataType(), matrices.length / 2))}, // betas
matrices));
Preconditions.checkState(matrices.length % 2 == 0, "The number of provided matrices needs" +
"to be divisible by two.");
this.batchSize = matrices.length / 2;
SDVariable firstMatrix = matrices[0];
long[] firstShape = firstMatrix.getShape();
for (int i = 0; i < batchSize; i++) {
Preconditions.checkState(Arrays.equals(firstShape, matrices[i].getShape()));
}
SDVariable lastMatrix = matrices[2 * batchSize - 1];
long[] lastShape = lastMatrix.getShape();
for (int i = batchSize; i < 2 * batchSize; i++) {
Preconditions.checkState(Arrays.equals(lastShape, matrices[i].getShape()));
}
this.transposeA = transposeA ? 1 : 0;
this.transposeB = transposeB ? 1 : 0;
this.M = transposeA ? (int) firstShape[1]: (int) firstShape[0];
this.N = transposeA ? (int) firstShape[0]: (int) firstShape[1];
this.K = transposeB ? (int) lastShape[0]: (int) lastShape[1];
addArgs();
}
private String[] getDataArray()
{
return ArrayUtils.addAll(
new String[] { METADATA_VERSION.toString(), type.toString(), filesize != null ? filesize.toString() : null,
creationtime != null ? creationtime.toString() : null, modifytime != null ? modifytime.toString() : null,
accesstime != null ? accesstime.toString() : null, checksum }, attributes);
}
public Parse[] getAllChildren(Parse[] parseAr){
Parse[] allChildren = parseAr;
Parse[] allChldr;
for (int i=0; i<parseAr.length;i++){
Parse[] children = parseAr[i].getChildren();
allChldr= getAllChildren(children);
allChildren =ArrayUtils.addAll(allChildren, allChldr);
}
return allChildren;
}
@Test
public void testMultiLayerIndex(){
Svg2Vector_IS app = new Svg2Vector_IS();
String[] args = ArrayUtils.addAll(STD_OPTIONS,
"-f", "src/test/resources/svg-files/time-interval-based.svgz",
"-d", OUT_DIR_PREFIX + "layers-index",
"-l", "-i"
);
assertEquals(0, app.executeApplication(args));
}
@Override
protected String[] getExcludeConfigurations() {
return ArrayUtils.addAll(super.getExcludeConfigurations(),
"org.springframework.boot.autoconfigure.transaction.jta.JtaAutoConfiguration");
}
protected String[] addArgs(String[] cmds) {
cmds = ArrayUtils.addAll(cmds, deployDefinition.getArgs());
return cmds;
}
@Override
public DrillBuf[] getBuffers(boolean clear) {
return ArrayUtils.addAll(offsets.getBuffers(clear), super.getBuffers(clear));
}
private String[] mergerArrays(String[] array1, String[] array2) {
return ArrayUtils.addAll(array1, array2);
}
public void download(List<EventDecorator> data, HttpServletResponse response, List<String> additionalHeaders) throws IOException, SecurityException, NoSuchMethodException {
String headerKey = "Content-Disposition";
String headerValue = String.format("attachment; filename=\"%s\"", "events.csv");
response.setContentType("application/csv");
response.setHeader(headerKey, headerValue);
PrintWriter writer = response.getWriter();
String[] header = createHeader(EventDecorator.class);
String[] additionalHeader = additionalHeaders.toArray(new String[0]);
header = ArrayUtils.addAll(header, additionalHeader);
StringBuffer bufferHeader = new StringBuffer();
for (String head : header) {
bufferHeader.append(head).append(",");
}
bufferHeader.deleteCharAt(bufferHeader.lastIndexOf(","));
writer.println(bufferHeader.toString());
for (EventDecorator event : data) {
Map<String, String> jsonMap = new LinkedHashMap<>();
jsonToMap("", new ObjectMapper().readTree(event.getPayload()), jsonMap);
StringBuffer bufferJson = new StringBuffer();
Arrays.asList(additionalHeader).forEach(c -> {
String jsonMapValue = jsonMap.get(c);
jsonMapValue = handleSpecialCharacters(jsonMapValue);
bufferJson.append(Optional.ofNullable(jsonMapValue).orElse("")).append(",");
});
bufferJson.deleteCharAt(bufferJson.lastIndexOf(","));
writer.println(event.getTimestampFormated() +","+
event.getTimestamp() +","+
Optional.ofNullable(event.getIncoming()).orElse(EventActor.builder().tenantDomain("").build()).getTenantDomain() + "," +
Optional.ofNullable(event.getIncoming()).orElse(EventActor.builder().applicationName("").build()).getApplicationName() + "," +
Optional.ofNullable(event.getIncoming()).orElse(EventActor.builder().deviceGuid("").build()).getDeviceGuid() + "," +
Optional.ofNullable(event.getIncoming()).orElse(EventActor.builder().deviceId("").build()).getDeviceId() + "," +
Optional.ofNullable(event.getIncoming()).orElse(EventActor.builder().channel("").build()).getChannel() + "," +
bufferJson.toString());
}
writer.flush();
writer.close();
}
@Inject(method = "<init>", at = @At("RETURN"))
private void init(Minecraft minecraft, File file, CallbackInfo ci) {
ClientProxy.registerKeyBindings();
keyBindings = ArrayUtils.addAll(keyBindings, KeyListener.keyBindings());
this.loadOptions();
}
private String[] mergerArrays(String[] array1, String[] array2) {
return (String[]) ArrayUtils.addAll(array1, array2);
}
/**
* Get all Resources.
*
* @param container
* The container for this resource.
* @param softFilter
* an optional software filter
* @param sqlFilter
* an optional conditional for select statement
* @param asc
* true means ascending
* @param pager
* an optional range of elements to return inclusive
* @param bindVariables
* an optional list of bind variables
* @return The list (Resource) of all Resources.
*/
public List getAllResources(Entity container, Filter softFilter, String sqlFilter, boolean asc, PagingPosition pager, List <Object> bindVariables)
{
pager = fixPagingPosition(softFilter, pager);
// Get the orders and get the ORDER BY clause
Order[] orders = null;
if ( softFilter instanceof Search )
{
orders = ((Search) softFilter).getOrders();
}
String orderString = doubleStorageSql.getOrderClause(orders, m_resourceTableOrderField, asc);
// Turn the search string into a WHERE clause if we can
int searchFieldCount = 0;
String searchString = null;
if ( m_resourceTableSearchFields != null && softFilter instanceof SearchFilter )
{
searchString = ((SearchFilter) softFilter).getSearchString();
if ( searchString != null && searchString.length() > 0 )
{
String searchWhere = doubleStorageSql.getSearchWhereClause(m_resourceTableSearchFields);
if ( searchWhere != null && searchWhere.length() > 0 )
{
if (sqlFilter == null )
{
sqlFilter = searchWhere;
}
else
{
sqlFilter = sqlFilter + " and " + searchWhere ;
}
searchFieldCount = m_resourceTableSearchFields.length;
}
}
}
String sql = doubleStorageSql.getSelectXml5filterSql(m_resourceTableName,
m_resourceTableContainerIdField, orderString, sqlFilter);
// Add Paging to the Search if requested
// TODO: Someday make this think Filter and emulate PagingPosition
boolean pagedInSql = false;
if ( pager != null )
{
String limitedSql = doubleStorageSql.addLimitToQuery(sql, pager.getFirst()-1, pager.getLast()-1);
if ( limitedSql != null )
{
pagedInSql = true;
sql = limitedSql;
} else {
// We don't subtract 1 because TOP is a count, not zero based like LIMIT
String topSql = doubleStorageSql.addTopToQuery(sql, pager.getLast());
if ( topSql != null )
{
sql = topSql;
}
}
}
Object[] fields = new Object[1+searchFieldCount];
fields[0] = container.getReference();
for ( int i=0; i < searchFieldCount; i++) fields[i+1] = "%" + searchString + "%";
if (bindVariables != null && bindVariables.size() > 0) {
// Add the bind variables to the fields to substitute in the prepared statement
fields = ArrayUtils.addAll(fields, bindVariables.toArray(new Object[fields.length]));
}
// If we are paged in SQL - then do not pass in the pager
List all = m_sql.dbRead(sql, fields, new SearchFilterReader(container, softFilter, pagedInSql ? null : pager, false));
return all;
}