下面列出了怎么用com.fasterxml.jackson.core.filter.TokenFilter的API类实例代码及写法,或者点击链接到github查看源代码。
@Override
public void writeFieldName(String name) throws IOException {
TokenFilter state = filterContext.setFieldName(name);
if (state == null) {
itemFilter = null;
} else if (state == TokenFilter.INCLUDE_ALL) {
itemFilter = state;
delegate.writeFieldName(name);
} else {
state = state.includeProperty(name);
itemFilter = state;
if (state != null) {
delegate.writeFieldName(name);
}
}
}
@Override
public void writeFieldName(SerializableString name) throws IOException {
TokenFilter state = filterContext.setFieldName(name.getValue());
if (state == null) {
itemFilter = null;
} else if (state == TokenFilter.INCLUDE_ALL) {
itemFilter = state;
delegate.writeFieldName(name);
} else {
state = state.includeProperty(name.getValue());
itemFilter = state;
if (state != null) {
delegate.writeFieldName(name);
}
}
}
@Override
public void writeFieldId(long id) throws IOException {
String idString = Long.toString(id);
TokenFilter state = filterContext.setFieldName(idString);
if (state == null) {
itemFilter = null;
} else if (state == TokenFilter.INCLUDE_ALL) {
itemFilter = state;
delegate.writeFieldId(id);
} else {
state = state.includeProperty(idString);
itemFilter = state;
if (state != null) {
delegate.writeFieldId(id);
}
}
}
protected ObjectReader(ObjectReader base, TokenFilter filter) {
_config = base._config;
_context = base._context;
_rootDeserializers = base._rootDeserializers;
_parserFactory = base._parserFactory;
_valueType = base._valueType;
_rootDeserializer = base._rootDeserializer;
_valueToUpdate = base._valueToUpdate;
_schema = base._schema;
_injectableValues = base._injectableValues;
_unwrapRoot = base._unwrapRoot;
_dataFormatReaders = base._dataFormatReaders;
_filter = filter;
}
/**
* Evaluates if a property name matches one of the given filter paths.
*/
private TokenFilter evaluate(String name, FilterPath[] filters) {
if (filters != null) {
List<FilterPath> nextFilters = null;
for (FilterPath filter : filters) {
FilterPath next = filter.matchProperty(name);
if (next != null) {
if (next.matches()) {
return MATCHING;
} else {
if (nextFilters == null) {
nextFilters = new ArrayList<>();
}
if (filter.isDoubleWildcard()) {
nextFilters.add(filter);
}
nextFilters.add(next);
}
}
}
if ((nextFilters != null) && (nextFilters.isEmpty() == false)) {
return new FilterPathBasedFilter(nextFilters.toArray(new FilterPath[nextFilters.size()]));
}
}
return NO_MATCHING;
}
@Override
public TokenFilter includeProperty(String name) {
TokenFilter include = evaluate(name, filters);
if (include == MATCHING) {
return TokenFilter.INCLUDE_ALL;
}
if (include == NO_MATCHING) {
return null;
}
return include;
}
@Override
public TokenFilter includeProperty(String name) {
if (!includedProperties.isEmpty() && !includedProperties.contains("*") && !includedProperties.contains(name)) {
return null;
} else if (excludedProperties.contains("*") || excludedProperties.contains(name)) {
return null;
} else if (nestedProperties.containsKey(name)) {
return nestedProperties.get(name);
} else if (nestedProperties.containsKey("*")) {
return nestedProperties.get("*");
} else {
return TokenFilter.INCLUDE_ALL;
}
}
protected PropertyFilteringTokenContext(int type, PropertyFilteringTokenContext parent, TokenFilter filter, boolean startHandled) {
super();
_type = type;
_parent = parent;
_filter = filter;
_index = -1;
_startHandled = startHandled;
}
protected PropertyFilteringTokenContext reset(int type, TokenFilter filter, boolean startWritten) {
_type = type;
_filter = filter;
_index = -1;
_currentName = null;
_startHandled = startWritten;
return this;
}
public PropertyFilteringTokenContext createChildArrayContext(TokenFilter filter, boolean writeStart) {
PropertyFilteringTokenContext ctxt = _child;
if (ctxt == null) {
_child = ctxt = new PropertyFilteringTokenContext(TYPE_ARRAY, this, filter, writeStart);
return ctxt;
}
return ctxt.reset(TYPE_ARRAY, filter, writeStart);
}
public PropertyFilteringTokenContext createChildObjectContext(TokenFilter filter, boolean writeStart) {
PropertyFilteringTokenContext ctxt = _child;
if (ctxt == null) {
_child = ctxt = new PropertyFilteringTokenContext(TYPE_OBJECT, this, filter, writeStart);
return ctxt;
}
return ctxt.reset(TYPE_OBJECT, filter, writeStart);
}
/**
* Evaluates if a property name matches one of the given filter paths.
*/
private TokenFilter evaluate(String name, FilterPath[] filters) {
if (filters != null) {
List<FilterPath> nextFilters = null;
for (FilterPath filter : filters) {
FilterPath next = filter.matchProperty(name);
if (next != null) {
if (next.matches()) {
return MATCHING;
} else {
if (nextFilters == null) {
nextFilters = new ArrayList<>();
}
if (filter.isDoubleWildcard()) {
nextFilters.add(filter);
}
nextFilters.add(next);
}
}
}
if ((nextFilters != null) && (nextFilters.isEmpty() == false)) {
return new FilterPathBasedFilter(nextFilters.toArray(new FilterPath[nextFilters.size()]), inclusive);
}
}
return NO_MATCHING;
}
@Override
public TokenFilter includeProperty(String name) {
TokenFilter filter = evaluate(name, filters);
if (filter == MATCHING) {
return inclusive ? TokenFilter.INCLUDE_ALL : null;
}
if (filter == NO_MATCHING) {
return inclusive ? null : TokenFilter.INCLUDE_ALL;
}
return filter;
}
@Override
public TokenFilter includeProperty(String name) {
return filter.includeProperty(name);
}
public static PropertyFilteringTokenContext createRootContext(TokenFilter filter) {
// true -> since we have no start/end marker, consider start handled
return new PropertyFilteringTokenContext(TYPE_ROOT, null, filter, true);
}
public TokenFilter setFieldName(String name) {
_currentName = name;
return _filter;
}
public TokenFilter getFilter() {
return _filter;
}