下面列出了java.util.LinkedHashSet#contains ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Returns the registered Kryo types.
*/
public LinkedHashSet<Class<?>> getRegisteredKryoTypes() {
if (isForceKryoEnabled()) {
// if we force kryo, we must also return all the types that
// were previously only registered as POJO
LinkedHashSet<Class<?>> result = new LinkedHashSet<>();
result.addAll(registeredKryoTypes);
for(Class<?> t : registeredPojoTypes) {
if (!result.contains(t)) {
result.add(t);
}
}
return result;
} else {
return registeredKryoTypes;
}
}
/**
* Returns the registered Kryo types.
*/
public LinkedHashSet<Class<?>> getRegisteredKryoTypes() {
if (isForceKryoEnabled()) {
// if we force kryo, we must also return all the types that
// were previously only registered as POJO
LinkedHashSet<Class<?>> result = new LinkedHashSet<>();
result.addAll(registeredKryoTypes);
for(Class<?> t : registeredPojoTypes) {
if (!result.contains(t)) {
result.add(t);
}
}
return result;
} else {
return registeredKryoTypes;
}
}
private static void collectType(
GeneratedType generatedType, LinkedHashSet<GeneratedType> orderedTypes,
Map<String, GeneratedType> typeMap, LinkedHashSet<String> typeHierarchy) {
typeHierarchy.add(generatedType.getTypeName());
for (String superType : generatedType.getSuperTypes()) {
GeneratedType requiredType = typeMap.get(superType);
if (requiredType != null) {
if (typeHierarchy.contains(superType)) {
ErrorUtil.error("Duplicate type name found in "
+ typeHierarchy.stream().collect(Collectors.joining("->")) + "->" + superType);
return;
}
collectType(requiredType, orderedTypes, typeMap, typeHierarchy);
}
}
typeHierarchy.remove(generatedType.getTypeName());
orderedTypes.add(generatedType);
}
private static ImmutableSet<String> makeColumnSet(String configKey, Set<String> validColumns) {
List<String> columnList = Configuration.getMultiValue(configKey, Collections.emptyList(),
Configuration.STRING_PARSER).get();
LinkedHashSet<String> columnSet = new LinkedHashSet<>();
List<String> errors = new ArrayList<>();
for (String name : columnList) {
name = name.trim();
if (validColumns != null && !validColumns.contains(name)) {
errors.add("invalid: " + name);
} else if (columnSet.contains(name)) {
errors.add("duplicate: " + name);
} else {
columnSet.add(name);
}
}
if (!errors.isEmpty()) {
throw new InvalidConfigurationException(configKey + " errors: " + errors);
}
return ImmutableSet.copyOf(columnSet);
}
/**
* Returns true if child creates a dependency with any member(s) of dependencyAncestors.
* Also adds the stringified cycle to the cycles list
*/
private boolean detectCycle(LinkedHashSet<String> dependencyAncestors, Set<String> cycles, final String child) {
if (dependencyAncestors.contains(child)) {
// Find index of the child in the ancestors list
int index = -1;
for (String s : dependencyAncestors) {
index++;
if (s.equals(child)) {
break;
}
}
// The cycle starts from the first time the child is seen in the ancestors list
// and continues till the end of the list, followed by the child again.
cycles.add(Joiner.on(" -> ").join(Iterables.skip(dependencyAncestors, index)) + " -> " + child);
return true;
}
return false;
}
/**
*
* @param polygon
* @return
*/
private static Polygon filterPolygon(Polygon polygon) {
Area area = new Area(polygon);
Polygon newPoly = new Polygon();
PathIterator it = area.getPathIterator(AffineTransform.getTranslateInstance(0, 0), 0);
float[] coords = new float[6];
LinkedHashSet<String> set = new LinkedHashSet<String>();
while (!it.isDone()) {
it.currentSegment(coords);
Point v = new Point((int) coords[0], (int) coords[1]);
if (!set.contains(v.toString())) {
newPoly.addPoint(v.x, v.y);
set.add(v.toString());
}
it.next();
}
return newPoly;
}
/**
* Adds all super-interfaces of pivot to acceptor except those listed in exclude
*
* @param acceptor
* list to add to
* @param exclude
* exclusion set
* @param pivot
* interface providing super-interfaces
*/
private <T extends TClassifier> void includeAdditionsSuperInterfaces2(LinkedHashSet<T> acceptor,
LinkedHashSet<T> exclude, T pivot, Class<T> castGuard) {
for (ParameterizedTypeRef superApiClassifier : pivot.getSuperClassifierRefs()) {
Type superApiDeclaredType = superApiClassifier.getDeclaredType();
if (castGuard.isAssignableFrom(superApiDeclaredType.getClass())) {
@SuppressWarnings("unchecked")
T superInterface = (T) superApiClassifier.getDeclaredType();
if (!exclude.contains(superInterface)) {
acceptor.add(superInterface);
}
} else {
// should we handle this or gracefully skip for broken models?
if (logger.isDebugEnabled()) {
logger.debug("Oopss ... Casting could not be performed Guard = " + castGuard.getName()
+ " DeclaredType of superApiClassifier '" + superApiDeclaredType.getClass().getName()
+ "' ");
}
}
}
}
/**
* Stores the listed object under the specified hash key in map. Unlike a
* standard map, the listed object will not replace any object already at
* the appropriate Map location, but rather will be appended to a List
* stored in that location.
*/
private <H, L> void store(H hashed, L listed, Map<H, LinkedHashSet<L>> map) {
LinkedHashSet<L> list = map.get(hashed);
if (list == null) {
list = new LinkedHashSet<>(1);
map.put(hashed, list);
}
if (!list.contains(listed)) {
list.add(listed);
}
}
/**
* Stores the listed object under the specified hash key in map. Unlike a
* standard map, the listed object will not replace any object already at
* the appropriate Map location, but rather will be appended to a List
* stored in that location.
*/
private <H, L> void store(H hashed, L listed, Map<H, LinkedHashSet<L>> map) {
LinkedHashSet<L> list = map.get(hashed);
if (list == null) {
list = new LinkedHashSet<>(1);
map.put(hashed, list);
}
if (!list.contains(listed)) {
list.add(listed);
}
}
private void getReferedTables(final LinkedHashSet<ERTable> referedTables, final ERTable table) {
for (final NodeElement nodeElement : table.getReferedElementList()) {
if (nodeElement instanceof ERTable) {
if (nodeElement != table) {
final ERTable referedTable = (ERTable) nodeElement;
if (!referedTables.contains(referedTable)) {
getReferedTables(referedTables, referedTable);
referedTables.add(referedTable);
}
}
}
}
}
private static boolean isArrayElementTypeAssignableFrom(ClassInformationRepository repo, Type t, Type u) {
Validate.notNull(repo);
Validate.notNull(t);
Validate.notNull(u);
Validate.isTrue(t.getSort() == Type.OBJECT);
Validate.isTrue(u.getSort() == Type.OBJECT);
ClassInformation ci = repo.getInformation(t.getInternalName());
Validate.isTrue(ci != null, "Unable to find class information for %s", t);
LinkedHashSet<String> hierarchy = flattenHierarchy(repo, u.getInternalName());
return hierarchy.contains(t.getInternalName());
}
/**
* Stores the listed object under the specified hash key in map. Unlike a
* standard map, the listed object will not replace any object already at
* the appropriate Map location, but rather will be appended to a List
* stored in that location.
*/
private <H, L> void store(H hashed, L listed, Map<H, LinkedHashSet<L>> map) {
LinkedHashSet<L> list = map.get(hashed);
if (list == null) {
list = new LinkedHashSet<>(1);
map.put(hashed, list);
}
if (!list.contains(listed)) {
list.add(listed);
}
}
private ArrayList<SourceCodeInstance> filterSrcInstances(ArrayList<SourceCodeInstance> srcInstances){
//remove dupes and choose instance which must be compiled if there is a choice between them
LinkedHashSet<SourceCodeInstance> filtered = new LinkedHashSet<SourceCodeInstance>();
for(SourceCodeInstance srci : srcInstances) {
if(filtered.contains(srci)) {
if(srci.mustCompile) {
filtered.remove(srci);
filtered.add(srci);
}
}else {
filtered.add(srci);
}
}
return new ArrayList<SourceCodeInstance>(filtered);
}
/**
* Stores the listed object under the specified hash key in map. Unlike a
* standard map, the listed object will not replace any object already at
* the appropriate Map location, but rather will be appended to a List
* stored in that location.
*/
private <H, L> void store(H hashed, L listed, Map<H, LinkedHashSet<L>> map) {
LinkedHashSet<L> list = map.get(hashed);
if (list == null) {
list = new LinkedHashSet<>(1);
map.put(hashed, list);
}
if (!list.contains(listed)) {
list.add(listed);
}
}
/**
* Generates a super set of fields from both old and new schema.
*/
private static LinkedHashSet<Field> getCombinedFieldsToWrite(Schema oldSchema, Schema newSchema) {
LinkedHashSet<Field> allFields = new LinkedHashSet<>(oldSchema.getFields());
for (Schema.Field f : newSchema.getFields()) {
if (!allFields.contains(f) && !isMetadataField(f.name())) {
allFields.add(f);
}
}
return allFields;
}
/**
* Stores the listed object under the specified hash key in map. Unlike a
* standard map, the listed object will not replace any object already at
* the appropriate Map location, but rather will be appended to a List
* stored in that location.
*/
private <H, L> void store(H hashed, L listed, Map<H, LinkedHashSet<L>> map) {
LinkedHashSet<L> list = map.get(hashed);
if (list == null) {
list = new LinkedHashSet<>(1);
map.put(hashed, list);
}
if (!list.contains(listed)) {
list.add(listed);
}
}
@Override
public void checkCyclesStart(Class clazz, String name) {
final Pair pair = new Pair(clazz, name);
final LinkedHashSet<Pair> linkedHashSet = cycleDetectionStack.get();
if (linkedHashSet.contains(pair)) {
throw new CyclicDependencyException(Pair.getClassList(linkedHashSet), clazz);
}
linkedHashSet.add(pair);
}
public LinkedHashSet<String> getPlatformMappingsForFlavor(DataFlavor df) {
LinkedHashSet<String> natives = new LinkedHashSet<>(1);
if (df == null) {
return natives;
}
String charset = df.getParameter("charset");
String baseType = df.getPrimaryType() + "/" + df.getSubType();
String mimeType = baseType;
if (charset != null && DataTransferer.isFlavorCharsetTextType(df)) {
mimeType += ";charset=" + charset;
}
// Add a mapping to the MIME native whenever the representation class
// doesn't require translation.
if (df.getRepresentationClass() != null &&
(df.isRepresentationClassInputStream() ||
df.isRepresentationClassByteBuffer() ||
byte[].class.equals(df.getRepresentationClass()))) {
natives.add(mimeType);
}
if (DataFlavor.imageFlavor.equals(df)) {
String[] mimeTypes = ImageIO.getWriterMIMETypes();
if (mimeTypes != null) {
for (int i = 0; i < mimeTypes.length; i++) {
Iterator writers =
ImageIO.getImageWritersByMIMEType(mimeTypes[i]);
while (writers.hasNext()) {
ImageWriter imageWriter = (ImageWriter)writers.next();
ImageWriterSpi writerSpi =
imageWriter.getOriginatingProvider();
if (writerSpi != null &&
writerSpi.canEncodeImage(getDefaultImageTypeSpecifier())) {
natives.add(mimeTypes[i]);
break;
}
}
}
}
} else if (DataTransferer.isFlavorCharsetTextType(df)) {
// stringFlavor is semantically equivalent to the standard
// "text/plain" MIME type.
if (DataFlavor.stringFlavor.equals(df)) {
baseType = "text/plain";
}
for (String encoding : DataTransferer.standardEncodings()) {
if (!encoding.equals(charset)) {
natives.add(baseType + ";charset=" + encoding);
}
}
// Add a MIME format without specified charset.
if (!natives.contains(baseType)) {
natives.add(baseType);
}
}
return natives;
}
@Override
public LinkedHashSet<String> getPlatformMappingsForFlavor(DataFlavor df) {
LinkedHashSet<String> natives = new LinkedHashSet<>(1);
if (df == null) {
return natives;
}
String charset = df.getParameter("charset");
String baseType = df.getPrimaryType() + "/" + df.getSubType();
String mimeType = baseType;
if (charset != null && DataFlavorUtil.isFlavorCharsetTextType(df)) {
mimeType += ";charset=" + charset;
}
// Add a mapping to the MIME native whenever the representation class
// doesn't require translation.
if (df.getRepresentationClass() != null &&
(df.isRepresentationClassInputStream() ||
df.isRepresentationClassByteBuffer() ||
byte[].class.equals(df.getRepresentationClass()))) {
natives.add(mimeType);
}
if (DataFlavor.imageFlavor.equals(df)) {
String[] mimeTypes = ImageIO.getWriterMIMETypes();
if (mimeTypes != null) {
for (String mime : mimeTypes) {
Iterator<ImageWriter> writers = ImageIO.getImageWritersByMIMEType(mime);
while (writers.hasNext()) {
ImageWriter imageWriter = writers.next();
ImageWriterSpi writerSpi = imageWriter.getOriginatingProvider();
if (writerSpi != null &&
writerSpi.canEncodeImage(getDefaultImageTypeSpecifier())) {
natives.add(mime);
break;
}
}
}
}
} else if (DataFlavorUtil.isFlavorCharsetTextType(df)) {
// stringFlavor is semantically equivalent to the standard
// "text/plain" MIME type.
if (DataFlavor.stringFlavor.equals(df)) {
baseType = "text/plain";
}
for (String encoding : DataFlavorUtil.standardEncodings()) {
if (!encoding.equals(charset)) {
natives.add(baseType + ";charset=" + encoding);
}
}
// Add a MIME format without specified charset.
if (!natives.contains(baseType)) {
natives.add(baseType);
}
}
return natives;
}
/**
* If a cycle is detected, then cycleParticipants contains all the paths of projects involved in this cycle (directly and indirectly),
* no cycle if the set is empty (and started empty)
* @param prereqChain ArrayList
* @param cycleParticipants HashSet
* @param workspaceRoot IWorkspaceRoot
* @param traversed HashSet
* @param preferredClasspaths Map
*/
public void updateCycleParticipants(
ArrayList prereqChain,
LinkedHashSet cycleParticipants,
IWorkspaceRoot workspaceRoot,
HashSet traversed,
Map preferredClasspaths){
IPath path = getPath();
prereqChain.add(path);
traversed.add(path);
try {
IClasspathEntry[] classpath = null;
if (preferredClasspaths != null) classpath = (IClasspathEntry[])preferredClasspaths.get(this);
if (classpath == null) classpath = getResolvedClasspath();
for (int i = 0, length = classpath.length; i < length; i++) {
IClasspathEntry entry = classpath[i];
if (entry.getEntryKind() == IClasspathEntry.CPE_PROJECT){
IPath prereqProjectPath = entry.getPath();
int index = cycleParticipants.contains(prereqProjectPath) ? 0 : prereqChain.indexOf(prereqProjectPath);
if (index >= 0) { // refer to cycle, or in cycle itself
for (int size = prereqChain.size(); index < size; index++) {
cycleParticipants.add(prereqChain.get(index));
}
} else {
if (!traversed.contains(prereqProjectPath)) {
IResource member = workspaceRoot.findMember(prereqProjectPath);
if (member != null && member.getType() == IResource.PROJECT){
JavaProject javaProject = (JavaProject)JavaCore.create((IProject)member);
javaProject.updateCycleParticipants(prereqChain, cycleParticipants, workspaceRoot, traversed, preferredClasspaths);
}
}
}
}
}
} catch(JavaModelException e){
// project doesn't exist: ignore
}
prereqChain.remove(path);
}