下面列出了java.util.HashSet#clear ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
public void test_forEach() throws Exception {
IdentityHashMap<String, String> map = new IdentityHashMap<>();
map.put("one", "1");
map.put("two", "2");
map.put("three", "3");
IdentityHashMap<String, String> output = new IdentityHashMap<>();
map.forEach((k, v) -> output.put(k,v));
assertEquals(map, output);
HashSet<String> setOutput = new HashSet<>();
map.keySet().forEach((k) -> setOutput.add(k));
assertEquals(map.keySet(), setOutput);
setOutput.clear();
map.values().forEach((v) -> setOutput.add(v));
assertEquals(new HashSet<>(map.values()), setOutput);
HashSet<Map.Entry<String,String>> entrySetOutput = new HashSet<>();
map.entrySet().forEach((v) -> entrySetOutput.add(v));
assertEquals(map.entrySet(), entrySetOutput);
}
private void changeCrossLocaleEnabledFields() throws NoSuchFieldException, IllegalAccessException {
Field crossLocalePropertiesField = AlfrescoSolrDataModel
.getInstance()
.getClass()
.getDeclaredField("crossLocaleSearchProperties");
crossLocalePropertiesField.setAccessible(true);
Field crossLocaleTypesField = AlfrescoSolrDataModel
.getInstance()
.getClass()
.getDeclaredField("crossLocaleSearchDataTypes");
crossLocaleTypesField.setAccessible(true);
HashSet<QName> crossLocaleProperties = (HashSet<QName>) crossLocalePropertiesField.get(AlfrescoSolrDataModel.getInstance());
HashSet<QName> crossLocaleTypes = (HashSet<QName>) crossLocaleTypesField.get(AlfrescoSolrDataModel.getInstance());
oldCrossLocaleTypes = (HashSet<QName>) crossLocaleTypes.clone();
oldCrossLocaleProperties = (HashSet<QName>) crossLocaleProperties.clone();
crossLocaleTypes.clear();
crossLocaleProperties.clear();
crossLocaleProperties.add(PROP_TITLE);
}
public Set<String> getSurroundingCaughtExceptions(int pc, int maxTryBlockSize) {
HashSet<String> result = new HashSet<>();
if (code == null) {
throw new IllegalStateException("Not visiting Code");
}
int size = maxTryBlockSize;
if (code.getExceptionTable() == null) {
return result;
}
for (CodeException catchBlock : code.getExceptionTable()) {
int startPC = catchBlock.getStartPC();
int endPC = catchBlock.getEndPC();
if (pc >= startPC && pc <= endPC) {
int thisSize = endPC - startPC;
if (size > thisSize) {
result.clear();
size = thisSize;
result.add("C" + catchBlock.getCatchType());
} else if (size == thisSize) {
result.add("C" + catchBlock.getCatchType());
}
}
}
return result;
}
private static HashSet<Statement>[] processStatementLabel(Statement stat) {
HashSet<Statement> setBreak = new HashSet<>();
HashSet<Statement> setContinue = new HashSet<>();
if (stat.getExprents() == null) {
for (Statement st : stat.getStats()) {
HashSet<Statement>[] arr = processStatementLabel(st);
setBreak.addAll(arr[0]);
setContinue.addAll(arr[1]);
}
boolean shieldType = (stat.type == Statement.TYPE_DO || stat.type == Statement.TYPE_SWITCH);
if (shieldType) {
for (StatEdge edge : stat.getLabelEdges()) {
if (edge.explicit && ((edge.getType() == StatEdge.TYPE_BREAK && setBreak.contains(edge.getSource()))
|| (edge.getType() == StatEdge.TYPE_CONTINUE && setContinue.contains(edge.getSource())))) {
edge.labeled = false;
}
}
}
switch (stat.type) {
case Statement.TYPE_DO:
setContinue.clear();
case Statement.TYPE_SWITCH:
setBreak.clear();
}
}
setBreak.add(stat);
setContinue.add(stat);
return new HashSet[] { setBreak, setContinue };
}
/**
* Test if changing the pythonpath has the desired effects in the python model.
*/
public void testPythonpathChanges() throws Exception {
final HashSet<String> pythonPathSet = new HashSet<String>();
pythonPathSet.add(TestDependent.TEST_PYSRC_NAVIGATOR_LOC + "projroot/source");
pythonPathSet.add("invalid");
PythonNature nature = createNature(pythonPathSet);
project = new ProjectStub(new File(TestDependent.TEST_PYSRC_NAVIGATOR_LOC + "projroot"), nature, true);
provider = new PythonModelProvider();
Object[] children1 = provider.getChildren(project);
assertTrue(children1[0] instanceof PythonSourceFolder);
//no changes in the pythonpath
provider.internalDoNotifyPythonPathRebuilt(project, new ArrayList<String>(pythonPathSet));//still the same
Object[] children2 = provider.getChildren(project);
assertEquals(1, children1.length);
assertEquals(1, children2.length);
assertSame(children1[0], children2[0]);
//changed pythonpath (source folders should be removed)
pythonPathSet.clear();
pythonPathSet.add(TestDependent.TEST_PYSRC_NAVIGATOR_LOC + "projroot/source/python");
provider.internalDoNotifyPythonPathRebuilt(project, new ArrayList<String>(pythonPathSet));
Object[] children3 = provider.getChildren(project);
assertFalse(children3[0] instanceof PythonSourceFolder);
//restore initial
pythonPathSet.clear();
pythonPathSet.add(TestDependent.TEST_PYSRC_NAVIGATOR_LOC + "projroot/source");
Object[] children4 = provider.getChildren(project);
assertTrue(children4[0] instanceof PythonSourceFolder);
assertNotSame(children1[0], children4[0]); //because it was removed
}
public List<ReportQueryParamItem> parseQueryParamItems(final String sqlText) {
Connection conn = null;
Statement stmt = null;
ResultSet rs = null;
final HashSet<String> set = new HashSet<>();
final List<ReportQueryParamItem> rows = new ArrayList<>();
try {
this.logger.debug(sqlText);
conn = this.getJdbcConnection();
stmt = conn.createStatement();
rs = stmt.executeQuery(sqlText);
while (rs.next()) {
String name = rs.getString("name");
String text = rs.getString("text");
name = (name == null) ? "" : name.trim();
text = (text == null) ? "" : text.trim();
if (!set.contains(name)) {
set.add(name);
}
rows.add(new ReportQueryParamItem(name, text));
}
} catch (final SQLException ex) {
throw new RuntimeException(ex);
} finally {
JdbcUtils.releaseJdbcResource(conn, stmt, rs);
}
set.clear();
return rows;
}
@Test
public void testTAzureStorageQueueOutputProperties() {
TAzureStorageQueueOutputProperties op = new TAzureStorageQueueOutputProperties("test");
op.setupProperties();
HashSet<PropertyPathConnector> connectors = new HashSet<>();
connectors.add(op.FLOW_CONNECTOR);
assertEquals(connectors, op.getAllSchemaPropertiesConnectors(true));
connectors.clear();
connectors.add(op.MAIN_CONNECTOR);
assertEquals(connectors, op.getAllSchemaPropertiesConnectors(false));
}
public void testAddIndex() throws Exception {
String tablename = "mytable";
String pkname = "id";
String colname = "col";
String ixname = "col_ix";
createBasicTable(tablename, pkname);
addBasicColumn(tablename, colname, Types.VARCHAR, 255);
AddIndexDDL ddl = new AddIndexDDL(getSpecification(), getSchema(),
fixIdentifier(tablename));
HashSet cols = new HashSet();
cols.add(fixIdentifier(colname));
boolean wasException = ddl.execute(ixname, false, cols);
assertFalse(wasException);
assertTrue(columnInIndex(tablename, colname, ixname));
colname = "col2";
ixname = "col2_ix";
addBasicColumn(tablename, colname, Types.VARCHAR, 255);
cols.clear();
cols.add(fixIdentifier(colname));
wasException = ddl.execute(ixname, true, cols);
assertFalse(wasException);
assertTrue(columnInIndex(tablename, colname, ixname));
assertTrue(indexIsUnique(tablename, ixname));
}
public void clearNodeSetForAttempt(ApplicationAttemptId attemptId) {
super.writeLock.lock();
try {
HashSet<NodeId> nodeSet = this.appAttemptToNodeKeyMap.get(attemptId);
if (nodeSet != null) {
LOG.info("Clear node set for " + attemptId);
nodeSet.clear();
}
} finally {
super.writeLock.unlock();
}
}
public static void rebuildSelection(DatFile df) {
final Composite3D c3d = df.getLastSelectedComposite();
if (c3d == null || df.getLastSelectedComposite().isDisposed()) return;
final HashSet<GData3> selectedTriangles = selectedTrianglesMap.putIfAbsent(df, new HashSet<GData3>());
final HashSet<GDataCSG> selectedBodies = selectedBodyMap.get(df);
selectedTriangles.clear();
if (selectedBodies != null) {
try {
for (GDataCSG c : selectedBodies) {
if (c == null) {
selectedTriangles.clear();
selectedBodies.clear();
return;
}
if (c.dataCSG == null) {
selectedTriangles.clear();
} else {
for (Polygon p : c.dataCSG.getPolygons()) {
Matrix4f id = new Matrix4f();
Matrix4f.setIdentity(id);
GData1 g1 = new GData1(-1, .5f, .5f, .5f, 1f, id, View.ACCURATE_ID, new ArrayList<String>(), null, null, 1, false, id, View.ACCURATE_ID, null, View.DUMMY_REFERENCE, true, false,
new HashSet<String>(), View.DUMMY_REFERENCE);
selectedTriangles.addAll(p.toLDrawTriangles(g1).keySet());
}
}
}
if (selectedTriangles.isEmpty()) {
selectedBodies.clear();
}
} catch (ConcurrentModificationException consumed) {
}
}
}
public static void main(String[] args) {
//create object of HashSet
HashSet hSet = new HashSet();
//add elements to HashSet object
hSet.add(new Integer("1"));
hSet.add(new Integer("2"));
hSet.add(new Integer("3"));
System.out.println("HashSet before removal : " + hSet);
/*
To remove all elements from Java HashSet or to clear HashSet object use,
void clear() method.
This method removes all elements from HashSet.
*/
hSet.clear();
System.out.println("HashSet after removal : " + hSet);
/*
To check whether HashSet contains any elements or not use
boolean isEmpty() method.
This method returns true if the HashSet does not contains any elements
otherwise false.
*/
System.out.println("Is HashSet empty ? " + hSet.isEmpty());
/*
Please note that removeAll method of Java HashSet class can also be
used to remove all elements from HashSet object.
*/
}
/**
* Returns whether the taxonomy is currently consistent or not.
* @return is taxonomy complete.
*/
public boolean isConsistent() {
// check taxonomy is consistent - all nodes filled out, only a single root
final TaxonNode root = getRoot();
if (root == null) {
mReason = "No root node.";
return false;
}
final HashSet<TaxonNode> seen = new HashSet<>(); // to detect loops
for (final TaxonNode node : mNodes.values()) {
if (node.getName() == null) {
mReason = "Node " + node.getId() + " does not have a name.";
return false;
}
if (node.getRank() == null) {
mReason = "Node " + node.getId() + " does not have a rank.";
return false;
}
// search to root of taxonomy
TaxonNode x = node;
seen.clear();
while (x != null && x != root && !seen.contains(x)) {
seen.add(x);
x = x.getParent();
}
if (x != root) {
mReason = "Node " + node.getId() + " does not link to root.";
return false;
}
}
return true;
}
/**
* When scanning notifications, column filtering is done on the client side. This test ensures
* that filtering works correctly.
*/
@Test
public void testColumnFiltering() {
Data data = new Data();
data.putNtfy("r001", "f8", "q2");
data.putNtfy("r001", "f9", "q1");
data.putNtfy("r002", "f8", "q2");
data.putNtfy("r002", "f8", "q3");
data.putNtfy("r004", "f9", "q3");
data.putNtfy("r004", "f9", "q4");
HashSet<RowColumnValue> expected = new HashSet<>();
expected.add(new RowColumnValue("r001", new Column("f8", "q2"), ""));
expected.add(new RowColumnValue("r001", new Column("f9", "q1"), ""));
expected.add(new RowColumnValue("r002", new Column("f8", "q2"), ""));
expected.add(new RowColumnValue("r002", new Column("f8", "q3"), ""));
expected.add(new RowColumnValue("r004", new Column("f9", "q3"), ""));
expected.add(new RowColumnValue("r004", new Column("f9", "q4"), ""));
NotificationScanner scanner = new NotificationScanner(data, Collections.emptySet());
HashSet<RowColumnValue> actual = new HashSet<>();
scanner.forEach(actual::add);
Assert.assertEquals(expected, actual);
scanner = new NotificationScanner(data, Arrays.asList(new Column("f9")));
actual.clear();
scanner.forEach(actual::add);
HashSet<RowColumnValue> expected2 = new HashSet<>();
expected.stream().filter(rcv -> rcv.getColumn().getsFamily().equals("f9"))
.forEach(expected2::add);
Assert.assertEquals(expected2, actual);
scanner = new NotificationScanner(data, Arrays.asList(new Column("f9"), new Column("f8")));
actual.clear();
scanner.forEach(actual::add);
Assert.assertEquals(expected, actual);
scanner = new NotificationScanner(data, Arrays.asList(new Column("f9", "q1")));
actual.clear();
scanner.forEach(actual::add);
expected2.clear();
expected2.add(new RowColumnValue("r001", new Column("f9", "q1"), ""));
Assert.assertEquals(expected2, actual);
scanner =
new NotificationScanner(data, Arrays.asList(new Column("f9", "q1"), new Column("f8")));
actual.clear();
scanner.forEach(actual::add);
expected2.clear();
expected2.add(new RowColumnValue("r001", new Column("f9", "q1"), ""));
expected2.add(new RowColumnValue("r001", new Column("f8", "q2"), ""));
expected2.add(new RowColumnValue("r002", new Column("f8", "q2"), ""));
expected2.add(new RowColumnValue("r002", new Column("f8", "q3"), ""));
Assert.assertEquals(expected2, actual);
}
private static Set<Object> getFactories(String serviceName) {
HashSet<Object> result = new HashSet<Object>();
if ((serviceName == null) || (serviceName.length() == 0) ||
(serviceName.endsWith("."))) {
return result;
}
Provider[] providers = Security.getProviders();
HashSet<String> classes = new HashSet<String>();
Object fac;
for (int i = 0; i < providers.length; i++) {
classes.clear();
// Check the keys for each provider.
for (Enumeration<Object> e = providers[i].keys(); e.hasMoreElements(); ) {
String currentKey = (String)e.nextElement();
if (currentKey.startsWith(serviceName)) {
// We should skip the currentKey if it contains a
// whitespace. The reason is: such an entry in the
// provider property contains attributes for the
// implementation of an algorithm. We are only interested
// in entries which lead to the implementation
// classes.
if (currentKey.indexOf(" ") < 0) {
String className = providers[i].getProperty(currentKey);
if (!classes.contains(className)) {
classes.add(className);
try {
fac = loadFactory(providers[i], className);
if (fac != null) {
result.add(fac);
}
}catch (Exception ignore) {
}
}
}
}
}
}
return Collections.unmodifiableSet(result);
}
/**
* Tests for scenes that a task fails for its own error, in which case the
* region containing the failed task and its consumer regions should be restarted.
* <pre>
* (v1) -+-> (v4)
* x
* (v2) -+-> (v5)
*
* (v3) -+-> (v6)
*
* ^
* |
* (blocking)
* </pre>
* Each vertex is in an individual region.
*/
@Test
public void testRegionFailoverForRegionInternalErrors() throws Exception {
TestFailoverTopology.Builder topologyBuilder = new TestFailoverTopology.Builder();
TestFailoverTopology.TestFailoverVertex v1 = topologyBuilder.newVertex();
TestFailoverTopology.TestFailoverVertex v2 = topologyBuilder.newVertex();
TestFailoverTopology.TestFailoverVertex v3 = topologyBuilder.newVertex();
TestFailoverTopology.TestFailoverVertex v4 = topologyBuilder.newVertex();
TestFailoverTopology.TestFailoverVertex v5 = topologyBuilder.newVertex();
TestFailoverTopology.TestFailoverVertex v6 = topologyBuilder.newVertex();
topologyBuilder.connect(v1, v4, ResultPartitionType.BLOCKING);
topologyBuilder.connect(v1, v5, ResultPartitionType.BLOCKING);
topologyBuilder.connect(v2, v4, ResultPartitionType.BLOCKING);
topologyBuilder.connect(v2, v5, ResultPartitionType.BLOCKING);
topologyBuilder.connect(v3, v6, ResultPartitionType.BLOCKING);
FailoverTopology topology = topologyBuilder.build();
RestartPipelinedRegionStrategy strategy = new RestartPipelinedRegionStrategy(topology);
// when v1 fails, {v1,v4,v5} should be restarted
HashSet<ExecutionVertexID> expectedResult = new HashSet<>();
expectedResult.add(v1.getExecutionVertexID());
expectedResult.add(v4.getExecutionVertexID());
expectedResult.add(v5.getExecutionVertexID());
assertEquals(expectedResult,
strategy.getTasksNeedingRestart(v1.getExecutionVertexID(), new Exception("Test failure")));
// when v2 fails, {v2,v4,v5} should be restarted
expectedResult.clear();
expectedResult.add(v2.getExecutionVertexID());
expectedResult.add(v4.getExecutionVertexID());
expectedResult.add(v5.getExecutionVertexID());
assertEquals(expectedResult,
strategy.getTasksNeedingRestart(v2.getExecutionVertexID(), new Exception("Test failure")));
// when v3 fails, {v3,v6} should be restarted
expectedResult.clear();
expectedResult.add(v3.getExecutionVertexID());
expectedResult.add(v6.getExecutionVertexID());
assertEquals(expectedResult,
strategy.getTasksNeedingRestart(v3.getExecutionVertexID(), new Exception("Test failure")));
// when v4 fails, {v4} should be restarted
expectedResult.clear();
expectedResult.add(v4.getExecutionVertexID());
assertEquals(expectedResult,
strategy.getTasksNeedingRestart(v4.getExecutionVertexID(), new Exception("Test failure")));
// when v5 fails, {v5} should be restarted
expectedResult.clear();
expectedResult.add(v5.getExecutionVertexID());
assertEquals(expectedResult,
strategy.getTasksNeedingRestart(v5.getExecutionVertexID(), new Exception("Test failure")));
// when v6 fails, {v6} should be restarted
expectedResult.clear();
expectedResult.add(v6.getExecutionVertexID());
assertEquals(expectedResult,
strategy.getTasksNeedingRestart(v6.getExecutionVertexID(), new Exception("Test failure")));
}
@Test
public void testAddMultiple() throws Exception {
// add a statement to an index
HashSet<Statement> added = new HashSet<>();
HashSet<Statement> removed = new HashSet<>();
added.add(statement11);
added.add(statement12);
added.add(statement21);
added.add(statement22);
index.begin();
index.addRemoveStatements(added, removed);
index.commit();
// check that it arrived properly
long count = client.prepareSearch(index.getIndexName())
.setTypes(index.getTypes())
.get()
.getHits()
.getTotalHits();
assertEquals(2, count);
// check the documents
SearchDocument document = index.getDocuments(subject).iterator().next();
assertEquals(subject.toString(), document.getResource());
assertStatement(statement11, document);
assertStatement(statement12, document);
document = index.getDocuments(subject2).iterator().next();
assertEquals(subject2.toString(), document.getResource());
assertStatement(statement21, document);
assertStatement(statement22, document);
// check if the text field stores all added string values
Set<String> texts = new HashSet<>();
texts.add("cats");
texts.add("dogs");
// FIXME
// assertTexts(texts, document);
// add/remove one
added.clear();
removed.clear();
added.add(statement23);
removed.add(statement22);
index.begin();
index.addRemoveStatements(added, removed);
index.commit();
// check doc 2
document = index.getDocuments(subject2).iterator().next();
assertEquals(subject2.toString(), document.getResource());
assertStatement(statement21, document);
assertStatement(statement23, document);
assertNoStatement(statement22, document);
// check if the text field stores all added and no deleted string values
texts.remove("dogs");
texts.add("chicken");
// FIXME
// assertTexts(texts, document);
// TODO: check deletion of the rest
}
private static Set<Object> getFactories(String serviceName) {
HashSet<Object> result = new HashSet<Object>();
if ((serviceName == null) || (serviceName.length() == 0) ||
(serviceName.endsWith("."))) {
return result;
}
Provider[] providers = Security.getProviders();
HashSet<String> classes = new HashSet<String>();
Object fac;
for (int i = 0; i < providers.length; i++) {
classes.clear();
// Check the keys for each provider.
for (Enumeration<Object> e = providers[i].keys(); e.hasMoreElements(); ) {
String currentKey = (String)e.nextElement();
if (currentKey.startsWith(serviceName)) {
// We should skip the currentKey if it contains a
// whitespace. The reason is: such an entry in the
// provider property contains attributes for the
// implementation of an algorithm. We are only interested
// in entries which lead to the implementation
// classes.
if (currentKey.indexOf(" ") < 0) {
String className = providers[i].getProperty(currentKey);
if (!classes.contains(className)) {
classes.add(className);
try {
fac = loadFactory(providers[i], className);
if (fac != null) {
result.add(fac);
}
}catch (Exception ignore) {
}
}
}
}
}
}
return Collections.unmodifiableSet(result);
}
@Test
public void testAddMultiple() throws Exception {
// add a statement to an index
HashSet<Statement> added = new HashSet<>();
HashSet<Statement> removed = new HashSet<>();
added.add(statement11);
added.add(statement12);
added.add(statement21);
added.add(statement22);
index.begin();
index.addRemoveStatements(added, removed);
index.commit();
// check that it arrived properly
long count = client.query(new SolrQuery("*:*").setRows(0)).getResults().getNumFound();
assertEquals(2, count);
// check the documents
SearchDocument document = index.getDocuments(subject).iterator().next();
assertEquals(subject.toString(), document.getResource());
assertStatement(statement11, document);
assertStatement(statement12, document);
document = index.getDocuments(subject2).iterator().next();
assertEquals(subject2.toString(), document.getResource());
assertStatement(statement21, document);
assertStatement(statement22, document);
// check if the text field stores all added string values
Set<String> texts = new HashSet<>();
texts.add("cats");
texts.add("dogs");
// FIXME
// assertTexts(texts, document);
// add/remove one
added.clear();
removed.clear();
added.add(statement23);
removed.add(statement22);
index.begin();
index.addRemoveStatements(added, removed);
index.commit();
// check doc 2
document = index.getDocuments(subject2).iterator().next();
assertEquals(subject2.toString(), document.getResource());
assertStatement(statement21, document);
assertStatement(statement23, document);
assertNoStatement(statement22, document);
// check if the text field stores all added and no deleted string values
texts.remove("dogs");
texts.add("chicken");
// FIXME
// assertTexts(texts, document);
// TODO: check deletion of the rest
}
@Override
protected void setUp() throws Exception
{
if (AlfrescoTransactionSupport.getTransactionReadState() != TxnReadState.TXN_NONE)
{
throw new AlfrescoRuntimeException(
"A previous tests did not clean up transaction: " +
AlfrescoTransactionSupport.getTransactionId());
}
AuthenticationUtil authUtil = new AuthenticationUtil();
authUtil.setDefaultAdminUserName("admin");
authUtil.setDefaultGuestUserName("guest");
authUtil.afterPropertiesSet();
service1 = new TestAuthenticationServiceImpl(ALFRESCO, true, true, true, false);
service1.createAuthentication("andy", "andy".toCharArray());
HashMap<String, String> up = new HashMap<String, String>();
HashSet<String> disabled = new HashSet<String>();
up.put("lone", "lone");
service2 = new TestAuthenticationServiceImpl(LONELY_ENABLED, false, false, false, true, up, disabled);
up.clear();
disabled.clear();
up.put("ranger", "ranger");
disabled.add("ranger");
service3 = new TestAuthenticationServiceImpl(LONELY_DISABLE, false, false, false, false, up, disabled);
service4 = new TestAuthenticationServiceImpl(EMPTY, true, true, true, false);
up.clear();
disabled.clear();
up.put("A", "A");
up.put("B", "B");
up.put("C", "C");
up.put("D", "D");
up.put("E", "E");
service5 = new TestAuthenticationServiceImpl(FIVE, false, false, false, false, up, disabled);
up.clear();
disabled.clear();
up.put("A", "a");
up.put("B", "b");
up.put("C", "c");
up.put("D", "d");
up.put("E", "e");
up.put("F", "f");
up.put("G", "g");
up.put("H", "h");
up.put("I", "i");
up.put("J", "j");
up.put("K", "k");
service6 = new TestAuthenticationServiceImpl(FIVE_AND_MORE, false, false, false, false, up, disabled);
}
/**
* Client authorization configuration
*/
public IClientAuthConfig getClientAuthConfig() throws InvalidConfigurationException {
if (this.clientAuthConfig != null) return this.clientAuthConfig;
// Default to True unless explicitly disabled
Boolean authRequired = !data.hasPath(AUTH_REQUIRED_KEY)
|| data.getString(AUTH_REQUIRED_KEY).isEmpty()
|| data.getBool(AUTH_REQUIRED_KEY);
String authServiceUrl = data.getString(AUTH_WEB_SERVICE_URL_KEY);
// Default to JWT
String authType = "JWT";
if (data.hasPath(AUTH_REQUIRED_KEY)) {
authType = data.getString(AUTH_TYPE_KEY);
}
// Default to RS256, RS384, RS512
HashSet<String> jwtAllowedAlgos = new HashSet<>();
jwtAllowedAlgos.add("RS256");
jwtAllowedAlgos.add("RS384");
jwtAllowedAlgos.add("RS512");
if (data.hasPath(JWT_ALGOS_KEY)) {
jwtAllowedAlgos.clear();
Collections.addAll(
jwtAllowedAlgos,
data.getString(JWT_ALGOS_KEY).split(","));
}
// Default to empty, no issuer
String jwtIssuer = "";
if (data.hasPath(JWT_ISSUER_KEY)) {
jwtIssuer = data.getString(JWT_ISSUER_KEY);
}
// Default to empty, no audience
String jwtAudience = "";
if (data.hasPath(JWT_AUDIENCE_KEY)) {
jwtAudience = data.getString(JWT_AUDIENCE_KEY);
}
// Default to 2 minutes
Duration jwtClockSkew = Duration.ofSeconds(120);
if (data.hasPath(JWT_AUDIENCE_KEY)) {
jwtClockSkew = data.getDuration(JWT_CLOCK_SKEW_KEY);
}
this.clientAuthConfig = new ClientAuthConfig(
authRequired,
authServiceUrl,
authType,
jwtAllowedAlgos,
jwtIssuer,
jwtAudience,
jwtClockSkew);
return this.clientAuthConfig;
}