下面列出了java.util.HashMap#get ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* Gets document parameters and return a string in the form ¶m1=value1¶m2=value2 ...
*
* @param drillParameters the drill parameters
*
* @return the document_ parameters
*/
public String getDocument_Parameters(HashMap<String, DrillParameter> _drillParametersMap) {
logger.debug("IN");
String document_parameter="";
if(_drillParametersMap!=null){
for (Iterator iterator = _drillParametersMap.keySet().iterator(); iterator.hasNext();) {
String name = (String) iterator.next();
DrillParameter drillPar=(DrillParameter)_drillParametersMap.get(name);
String value=drillPar.getValue();
if(name!=null && !name.equals("") && value!=null && !value.equals("")){
document_parameter+="%26"+name+"%3D"+value;
//document_parameter+="&"+name+"="+value;
}
}
}
logger.debug("OUT");
return document_parameter;
}
private void findLeafPage(BTreePage p, HashMap<PageOperationHandler, Integer> map, AtomicLong leafPageCount) {
if (p.isNode()) {
for (PageReference ref : p.getChildren()) {
BTreePage child = ref.getPage();
if (child.isLeaf()) {
PageOperationHandler handler = child.getHandler();
// System.out.println("handler: " + handler);
Integer count = map.get(handler);
if (count == null)
count = 1;
else
count++;
map.put(handler, count);
leafPageCount.incrementAndGet();
} else {
findLeafPage(child, map, leafPageCount);
}
}
}
}
/**
* The following task is used for client Q conflation tests. Similar to
* checkForConflation task but the task fails if the individual client events
* are not conflated. (checkForConflation validation pass if there is atleast
* one event conflated in the total test.
*/
public static void checkClientConflation() {
PoolImpl pool = HAClientQueue.mypool;
ClientProxyMembershipID cpm = pool.getProxyID();
String proxyIdStr = cpm.toString();
Log.getLogWriter().info("Proxy String : " + proxyIdStr);
HashMap mp = (HashMap)HAClientQueueBB.getBB().getSharedMap()
.get(proxyIdStr);
Long numOfPuts = (Long)mp.get("eventsPut");
Long numOfConflated = (Long)mp.get("eventsConflated");
Long numOfMissRemovals = (Long)mp.get("numVoidRemovals");
Log.getLogWriter().info("value of numOfConflated : " + numOfConflated);
long totalConflation = HAClientQueueBB.getBB().getSharedCounters().read(
HAClientQueueBB.NUM_GLOBAL_CONFLATE);
Log.getLogWriter().info("Total Conflation : " + totalConflation);
if (numOfConflated.equals(new Long(0))) {
throw new TestException("No conflation done - Test Issue - needs tuning ");
}
}
private static HashMap<Integer, List<Report>> filterReports() {
HashMap<Integer, List<Report>> filteredReportsByApp = new HashMap<>();
HashSet<String> filterMap = new HashSet<>();
String address;
ArrayList<Report> list;
ArrayList<Report> filteredList;
for (int key : mUidReportMap.keySet()) {
filteredReportsByApp.put(key, new ArrayList<Report>());
list = (ArrayList<Report>) mUidReportMap.get(key);
filteredList = (ArrayList<Report>) filteredReportsByApp.get(key);
filterMap.clear();
for (int i = 0; i < list.size(); i++) {
address = list.get(i).remoteAdd.getHostAddress();
if (!filterMap.contains(address)) {
filteredList.add(list.get(i));
filterMap.add(address);
}
}
}
return filteredReportsByApp;
}
@RunAsClient
@Test(groups = TEST_GROUP_CDI_JSON,
description = "Verify that the injected iat claim is as expected from Token2")
public void verifyInjectedIssuedAt2() throws Exception {
Reporter.log("Begin verifyInjectedIssuedAt2\n");
HashMap<String, Long> timeClaims = new HashMap<>();
String token2 = TokenUtils.generateTokenString("/Token2.json", null, timeClaims);
Long iatClaim = timeClaims.get(Claims.auth_time.name());
Long authTimeClaim = timeClaims.get(Claims.auth_time.name());
String uri = baseURL.toExternalForm() + "endp/verifyInjectedIssuedAt";
WebTarget echoEndpointTarget = ClientBuilder.newClient()
.target(uri)
.queryParam(Claims.iat.name(), iatClaim)
.queryParam(Claims.auth_time.name(), authTimeClaim);
Response response = echoEndpointTarget.request(MediaType.APPLICATION_JSON).header(HttpHeaders.AUTHORIZATION, "Bearer " + token2).get();
Assert.assertEquals(response.getStatus(), HttpURLConnection.HTTP_OK);
String replyString = response.readEntity(String.class);
JsonReader jsonReader = Json.createReader(new StringReader(replyString));
JsonObject reply = jsonReader.readObject();
Reporter.log(reply.toString());
Assert.assertTrue(reply.getBoolean("pass"), reply.getString("msg"));
}
/**
* Checks that there are no duplicates and that all prior prefs have a value
* @param lst List of values
* @return true if checks ok, false otherwise
*/
public boolean checkPrefs(List... lst) {
HashMap map = new HashMap();
for(int i=0; i<lst[0].size(); i++) {
String value = ((String) lst[0].get(i));
// No selection made - ignore
if( value==null || value.trim().equals(Preference.BLANK_PREF_VALUE)) {
continue;
}
// Add additional columns, when present
for (int j = 1; j < lst.length; j++)
value += "|" + ((String)lst[j].get(i));
// Duplicate selection made
if(map.get(value.trim())!=null) {
// lst[0].set(i, Preference.BLANK_PREF_VALUE);
return false;
}
map.put(value, value);
}
return true;
}
/**
* Creates a UsernameIterator object.
*
* @param patterns The patterns to be evaluated as a String array.
* @param userData The user information, the required fields are at least first name and last
* name.
* @throws Exception
*/
public UsernameIterator(String[] patterns, HashMap<String, String> userData) throws Exception {
if (userData == null || patterns == null) {
throw new Exception("The patterns and userData parameters can't be set as null.");
}
if (!userData.containsKey(UsernameManager.FIRST_NAME)
|| !userData.containsKey(UsernameManager.LAST_NAME)) {
throw new Exception(
"The userData parameter must cointain at least firstname and lastname fields.");
}
String firstname = userData.get(UsernameManager.FIRST_NAME);
String lastname = userData.get(UsernameManager.LAST_NAME);
if (firstname.length() > UsernameManager.MAX_NAME_LENGTH
|| lastname.length() > UsernameManager.MAX_NAME_LENGTH) {
throw new Exception("One of the fields exceds the maximum length. 60 (firstname,lastname).");
}
this.patterns = patterns;
this.userData = userData;
autonumeric = 1;
patternIndex = 0;
}
private void testCategoricalOutput()
{
HashMap<MatrixValue.CellIndex, Double> dmlfileC = readDMLMatrixFromHDFS("C");
HashMap<MatrixValue.CellIndex, Double> rfileC = readRMatrixFromFS("C");
// compare categorical imputations
int countTrue = 0;
for (MatrixValue.CellIndex index : dmlfileC.keySet()) {
Double v1 = dmlfileC.get(index);
Double v2 = rfileC.get(index);
if(v1.equals(v2))
countTrue++;
}
if(countTrue / (double)dmlfileC.size() > 0.98)
Assert.assertTrue(true);
else
Assert.fail();
}
@SuppressWarnings("unchecked")
private static boolean checkTokenizer(Class<? extends TokenizerFactory> tokenizerFactoryClass, Map<String, Object> fieldTypeInfoMap) {
HashMap<String, Object> analyzer = (HashMap<String, Object>) fieldTypeInfoMap.get("analyzer");
HashMap<String, Object> tokenizerMap = (HashMap<String, Object>)MapUtils.getObject(analyzer, "tokenizer");
if (tokenizerMap != null) {
String tokenizerClass = (String) tokenizerMap.get("class");
if (StringUtils.isNotEmpty(tokenizerClass)) {
tokenizerClass = tokenizerClass.replace("solr.", "");
return tokenizerClass.equalsIgnoreCase(tokenizerFactoryClass.getSimpleName());
}
}
return false;
}
/**
* Returns the setter or getter requested. This utility function checks whether the
* requested method exists in the propertyMapMap cache. If not, it calls another
* utility function to request the Method from the targetClass directly.
* @param targetClass The Class on which the requested method should exist.
* @param propertyMapMap The cache of setters/getters derived so far.
* @param prefix "set" or "get", for the setter or getter.
* @param valueType The type of parameter passed into the method (null for getter).
* @return Method the method associated with mPropertyName.
*/
private Method setupSetterOrGetter(Class targetClass,
HashMap<Class, HashMap<String, Method>> propertyMapMap,
String prefix, Class valueType) {
Method setterOrGetter = null;
try {
// Have to lock property map prior to reading it, to guard against
// another thread putting something in there after we've checked it
// but before we've added an entry to it
mPropertyMapLock.writeLock().lock();
HashMap<String, Method> propertyMap = propertyMapMap.get(targetClass);
if (propertyMap != null) {
setterOrGetter = propertyMap.get(mPropertyName);
}
if (setterOrGetter == null) {
setterOrGetter = getPropertyFunction(targetClass, prefix, valueType);
if (propertyMap == null) {
propertyMap = new HashMap<String, Method>();
propertyMapMap.put(targetClass, propertyMap);
}
propertyMap.put(mPropertyName, setterOrGetter);
}
} finally {
mPropertyMapLock.writeLock().unlock();
}
return setterOrGetter;
}
private static boolean isAValidCategoryOptionCombo(
HashMap<String, List<CategoryCombo>> dataElementCategoryOptionRelation, Field field) {
for (CategoryCombo categoryCombo : dataElementCategoryOptionRelation.get(
field.getDataElement())) {
for (String categoryOptionComboUId : categoryCombo.getCategoryOptionComboUIdList()) {
if (field.getCategoryOptionCombo().equals(categoryOptionComboUId)) {
return true;
}
}
}
return false;
}
public void applyGraphResults(DirectedGraph graph, HashMap map){
Node node = (Node)map.get(this);
((CFGNodeFigure)getFigure()).setBounds(new Rectangle(node.x, node.y, node.width, node.height));//getFigure().getBounds().height));//getFigure().getBounds().height));
List outgoing = getSourceConnections();
for (int i = 0; i < outgoing.size(); i++){
CFGEdgeEditPart edge = (CFGEdgeEditPart)outgoing.get(i);
edge.applyGraphResults(graph, map);
}
}
/** Reconstruct from an XML entry. */
public LayerSet(final Project project, final long id, final HashMap<String,String> ht_attributes, final HashMap<Displayable,String> ht_links) {
super(project, id, ht_attributes, ht_links);
String data;
if (null != (data = ht_attributes.get("layer_width"))) this.layer_width = Float.parseFloat(data);
else xmlError("layer_width", this.layer_width);
if (null != (data = ht_attributes.get("layer_height"))) this.layer_height = Float.parseFloat(data);
else xmlError("layer_height", this.layer_height);
if (null != (data = ht_attributes.get("rot_x"))) this.rot_x = Double.parseDouble(data);
else xmlError("rot_x", this.rot_x);
if (null != (data = ht_attributes.get("rot_y"))) this.rot_y = Double.parseDouble(data);
else xmlError("rot_y", this.rot_y);
if (null != (data = ht_attributes.get("rot_z"))) this.rot_y = Double.parseDouble(data);
else xmlError("rot_z", this.rot_z);
if (null != (data = ht_attributes.get("snapshots_quality"))) snapshots_quality = Boolean.valueOf(data.trim().toLowerCase());
if (null != (data = ht_attributes.get("snapshots_mode"))) {
final String smode = data.trim();
for (int i=0; i<snapshot_modes.length; i++) {
if (smode.equals(snapshot_modes[i])) {
snapshots_mode = i;
break;
}
}
}
if (null != (data = ht_attributes.get("color_cues"))) color_cues = Boolean.valueOf(data.trim().toLowerCase());
if (null != (data = ht_attributes.get("area_color_cues"))) area_color_cues = Boolean.valueOf(data.trim().toLowerCase());
if (null != (data = ht_attributes.get("n_layers_color_cue"))) {
n_layers_color_cue = Integer.parseInt(data.trim().toLowerCase());
if (n_layers_color_cue < -1) n_layers_color_cue = -1;
}
if (null != (data = ht_attributes.get("avoid_color_cue_colors"))) {
// If there's any error in the parsing, default to true for use_color_cue_colors:
use_color_cue_colors = !Boolean.valueOf(data.trim().toLowerCase());
}
if (null != (data = ht_attributes.get("paint_arrows"))) paint_arrows = Boolean.valueOf(data.trim().toLowerCase());
if (null != (data = ht_attributes.get("paint_tags"))) paint_tags = Boolean.valueOf(data.trim().toLowerCase());
if (null != (data = ht_attributes.get("paint_edge_confidence_boxes"))) paint_edge_confidence_boxes = Boolean.valueOf(data.trim().toLowerCase());
if (null != (data = ht_attributes.get("prepaint"))) prepaint = Boolean.valueOf(data.trim().toLowerCase());
if (null != (data = ht_attributes.get("preload_ahead"))) preload_ahead = Integer.parseInt(data);
}
/**
* Registers standard transliterators with the system. Called by
* Transliterator during initialization. Scan all current targets
* and register those that are scripts T as Any-T/V.
*/
static void register() {
HashMap<String, Set<String>> seen = new HashMap<String, Set<String>>(); // old code used set, but was dependent on order
for (Enumeration<String> s = Transliterator.getAvailableSources(); s.hasMoreElements(); ) {
String source = s.nextElement();
// Ignore the "Any" source
if (source.equalsIgnoreCase(ANY)) continue;
for (Enumeration<String> t = Transliterator.getAvailableTargets(source);
t.hasMoreElements(); ) {
String target = t.nextElement();
// Get the script code for the target. If not a script, ignore.
int targetScript = scriptNameToCode(target);
if (targetScript == UScript.INVALID_CODE) {
continue;
}
Set<String> seenVariants = seen.get(target);
if (seenVariants == null) {
seen.put(target, seenVariants = new HashSet<String>());
}
for (Enumeration<String> v = Transliterator.getAvailableVariants(source, target);
v.hasMoreElements(); ) {
String variant = v.nextElement();
// Only process each target/variant pair once
if (seenVariants.contains(variant)) {
continue;
}
seenVariants.add(variant);
String id;
id = TransliteratorIDParser.STVtoID(ANY, target, variant);
AnyTransliterator trans = new AnyTransliterator(id, target, variant,
targetScript);
Transliterator.registerInstance(trans);
Transliterator.registerSpecialInverse(target, NULL_ID, false);
}
}
}
}
/**
* Computes the bounding box of each region within a 3D label image.
*
* @param image
* the input image containing label of particles
* @param labels
* the array of labels within the image
* @param calib
* the calibration of the image
* @return an array of Box3D representing the calibrated coordinates of
* the bounding box of each region
*/
public Box3D[] analyzeRegions(ImageStack image, int[] labels, Calibration calib)
{
// size of image
int sizeX = image.getWidth();
int sizeY = image.getHeight();
int sizeZ = image.getSize();
// Extract spatial calibration
double sx = 1, sy = 1, sz = 1;
double ox = 0, oy = 0, oz = 0;
if (calib != null)
{
sx = calib.pixelWidth;
sy = calib.pixelHeight;
sz = calib.pixelDepth;
ox = calib.xOrigin;
oy = calib.yOrigin;
oz = calib.zOrigin;
}
// create associative array to know index of each label
HashMap<Integer, Integer> labelIndices = LabelImages.mapLabelIndices(labels);
// allocate memory for result
int nLabels = labels.length;
double[] xmin = new double[nLabels];
double[] xmax = new double[nLabels];
double[] ymin = new double[nLabels];
double[] ymax = new double[nLabels];
double[] zmin = new double[nLabels];
double[] zmax = new double[nLabels];
// initialize to extreme values
for (int i = 0; i < nLabels; i++)
{
xmin[i] = Double.POSITIVE_INFINITY;
xmax[i] = Double.NEGATIVE_INFINITY;
ymin[i] = Double.POSITIVE_INFINITY;
ymax[i] = Double.NEGATIVE_INFINITY;
zmin[i] = Double.POSITIVE_INFINITY;
zmax[i] = Double.NEGATIVE_INFINITY;
}
// compute extreme coordinates of each region
fireStatusChanged(this, "Compute bounds");
for (int z = 0; z < sizeZ; z++)
{
for (int y = 0; y < sizeY; y++)
{
for (int x = 0; x < sizeX; x++)
{
int label = (int) image.getVoxel(x, y, z);
if (label == 0)
continue;
// do not process labels that are not in the input list
if (!labelIndices.containsKey(label))
continue;
int index = labelIndices.get(label);
xmin[index] = Math.min(xmin[index], x);
xmax[index] = Math.max(xmax[index], x + 1);
ymin[index] = Math.min(ymin[index], y);
ymax[index] = Math.max(ymax[index], y + 1);
zmin[index] = Math.min(zmin[index], z);
zmax[index] = Math.max(zmax[index], z + 1);
}
}
}
// create bounding box instances
Box3D[] boxes = new Box3D[nLabels];
for (int i = 0; i < nLabels; i++)
{
boxes[i] = new Box3D(
xmin[i] * sx + ox, xmax[i] * sx + ox,
ymin[i] * sy + oy, ymax[i] * sy + oy,
zmin[i] * sz + oz, zmax[i] * sz + oz);
}
return boxes;
}
@Execute
public void process() throws Exception {
checkNull(inElev);
// extract some attributes of the map
HashMap<String, Double> attribute = CoverageUtilities.getRegionParamsFromGridCoverage(inElev);
double dx = attribute.get(CoverageUtilities.XRES);
CoverageUtilities.getRegionParamsFromGridCoverage(inElev);
// extract the raster.
RenderedImage pitTmpRI = inElev.getRenderedImage();
WritableRaster pitWR = CoverageUtilities.replaceNovalue(pitTmpRI, -9999.0);
pitTmpRI = null;
minX = pitWR.getMinX();
minY = pitWR.getMinY();
rows = pitWR.getHeight();
cols = pitWR.getWidth();
WritableRaster skyWR = skyviewfactor(pitWR, dx);
int maxY = minY + rows;
int maxX = minX + cols;
for( int y = minY + 2; y < maxY - 2; y++ ) {
for( int x = minX + 2; x < maxX - 2; x++ ) {
if (pitWR.getSampleDouble(x, y, 0) == -9999.0) {
skyWR.setSample(x, y, 0, doubleNovalue);
}
}
}
for( int y = minY; y < maxY; y++ ) {
skyWR.setSample(0, y, 0, doubleNovalue);
skyWR.setSample(1, y, 0, doubleNovalue);
skyWR.setSample(cols - 2, y, 0, doubleNovalue);
skyWR.setSample(cols - 1, y, 0, doubleNovalue);
}
for( int x = minX + 2; x < maxX - 2; x++ ) {
skyWR.setSample(x, 0, 0, doubleNovalue);
skyWR.setSample(x, 1, 0, doubleNovalue);
skyWR.setSample(x, rows - 2, 0, doubleNovalue);
skyWR.setSample(x, rows - 1, 0, doubleNovalue);
}
outSky = CoverageUtilities.buildCoverage("skyview factor", skyWR, attribute, inElev.getCoordinateReferenceSystem());
}
public static ArrayList<ModelFamilyEC> getActiveModels() {
ArrayList<ModelFamilyEC> modelFamilies = new ArrayList<>();
Connection conn = null;
PreparedStatement sql = null;
ResultSet result = null;
try {
conn = getMySqlConnection();
Statement sql2 = conn.createStatement();
sql2.execute("SET group_concat_max_len = 10240");
sql2.close();
sql = conn
.prepareStatement(
"SELECT \n"
+ " fam.modelFamilyID, \n"
+ " fam.crisisID, \n"
+ " col.code AS crisisCode, \n"
+ " col.name AS crisisName, \n"
+ " fam.nominalAttributeID, \n"
+ " attr.code AS nominalAttributeCode, \n"
+ " attr.name AS nominalAttributeName, \n"
+ " attr.description AS nominalAttributeDescription, \n"
+ " mdl.modelID, \n"
+ " lbl.nominalLabelID,\n"
+ " lbl.nominalLabelCode,\n"
+ " lbl.name as nominalLabelName,\n"
+ " lbl.description as nominLabelDescription, \n"
+ " COUNT(DISTINCT dnl.documentID) AS labeledItemCount\n"
+ "FROM model_family fam \n"
+ "LEFT JOIN model mdl on mdl.modelFamilyID = fam.modelFamilyID \n"
+ "JOIN collection col on col.id = fam.crisisID and col.classifier_enabled = 1 \n"
+ "JOIN nominal_attribute attr ON attr.nominalAttributeID = fam.nominalAttributeID \n"
+ "JOIN nominal_label lbl ON lbl.nominalAttributeID = fam.nominalAttributeID \n"
+ "LEFT JOIN document doc ON doc.crisisID=fam.crisisID \n"
+ "LEFT JOIN document_nominal_label dnl ON dnl.documentID=doc.documentID AND dnl.nominalLabelID=lbl.nominalLabelID \n"
+ "WHERE fam.isActive AND (mdl.modelID IS NULL OR mdl.isCurrentModel) \n"
+ "GROUP BY crisisID, nominalAttributeID, nominalLabelID ");
result = sql.executeQuery();
ModelFamilyEC family = null;
NominalAttributeEC attribute = null;
HashMap<ModelFamilyEC, Integer> familyLabelCount = new HashMap<>();
while (result.next()) {
if (family == null || family.getModelFamilyID() != result.getInt("modelFamilyID")) {
//create attribute
attribute = new NominalAttributeEC();
attribute.setNominalAttributeID(result.getInt("nominalAttributeID"));
attribute.setCode(result.getString("nominalAttributeCode"));
attribute.setDescription(result.getString("nominalAttributeDescription"));
attribute.setName(result.getString("nominalAttributeName"));
//create model family
family = new ModelFamilyEC();
family.setCrisisID(result.getInt("crisisID"));
int tmpModelID = result.getInt("modelID");
if (!result.wasNull()) {
family.setCurrentModelID(tmpModelID);
}
family.setIsActive(true);
family.setModelFamilyID(result.getInt("modelFamilyID"));
family.setNominalAttribute(attribute);
familyLabelCount.put(family, 0);
modelFamilies.add(family);
}
//create label
NominalLabelEC label = new NominalLabelEC();
label.setDescription(result.getString("nominLabelDescription"));
label.setName(result.getString("nominalLabelName"));
label.setNominalAttribute(attribute);
label.setNominalLabelCode(result.getString("nominalLabelCode"));
label.setNominalLabelID(result.getInt("nominalLabelID"));
attribute.addNominalLabel(label);
int count = familyLabelCount.get(family);
familyLabelCount.put(family, count + result.getInt("labeledItemCount"));
}
//sum training sample counts per attribute
for (Map.Entry<ModelFamilyEC, Integer> entry : familyLabelCount.entrySet()) {
entry.getKey().setTrainingExampleCount(entry.getValue());
}
} catch (SQLException e) {
logger.error("Exception when getting model state", e);
} finally {
close(result);
close(sql);
close(conn);
}
return modelFamilies;
}
@Override
public void setGeopaparazziProjectViewerPreferencesMap( HashMap<String, String> prefsMap ) {
Preferences preferences = Preferences.userRoot().node(PreferencesHandler.PREFS_NODE_NAME);
String lastPath = prefsMap.get(LAST_GP_PROJECTS_PATH);
preferences.put(LAST_GP_PROJECTS_PATH, lastPath);
}
/**
* Expand labels by a given factor
* @param image input label image
* @param ratio percentage of expansion (values between 0 and 100)
* @return expanded image
*/
public static final ImageStack expandLabels(ImageStack image,
float ratio)
{
// size of input image
int sizeX = image.getWidth();
int sizeY = image.getHeight();
int sizeZ = image.getSize();
// size of result image
int sizeX2 = (int) Math.round(sizeX * (1. + ratio / 100));
int sizeY2 = (int) Math.round(sizeY * (1. + ratio / 100));
int sizeZ2 = (int) Math.round(sizeZ * (1. + ratio / 100));
// allocate memory for result
int bitDepth = image.getBitDepth();
ImageStack result = ImageStack.create(sizeX2, sizeY2, sizeZ2, bitDepth);
// compute centroids of labels
int[] labels = LabelImages.findAllLabels(image);
double[][] centroids = Centroid3D.centroids(image, labels);
// compute shift associated to each label
int nLabels = labels.length;
int[][] shifts = new int[nLabels][3];
for (int i = 0; i < nLabels; i++)
{
shifts[i][0] = (int) Math.floor(centroids[i][0] * ratio / 100);
shifts[i][1] = (int) Math.floor(centroids[i][1] * ratio / 100);
shifts[i][2] = (int) Math.floor(centroids[i][2] * ratio / 100);
}
// create associative array to know index of each label
HashMap<Integer, Integer> labelIndices = new HashMap<Integer, Integer>();
for (int i = 0; i < nLabels; i++)
{
labelIndices.put(labels[i], i);
}
for (int z = 0; z < sizeZ; z++)
{
for (int y = 0; y < sizeY; y++)
{
for (int x = 0; x < sizeX; x++)
{
double label = image.getVoxel( x, y, z );
if ( Double.compare( label, 0 ) == 0 )
continue;
int index = labelIndices.get( (int) label );
int x2 = x + shifts[index][0];
int y2 = y + shifts[index][1];
int z2 = z + shifts[index][2];
result.setVoxel( x2, y2, z2, label );
}
}
}
return result;
}
public void startElement(String namespace_URI, String local_name, String qualified_name, Attributes attributes) throws SAXException {
if (null == loader) return;
//Utils.log2("startElement: " + qualified_name);
this.counter++;
if (0 == counter % 100) { // davi-experimenting: don't talk so much when you have > 600,000 patches to load!
Utils.showStatus("Loading " + counter, false);
}
try {
// failsafe:
qualified_name = qualified_name.toLowerCase();
final HashMap<String,String> ht_attributes = new HashMap<String,String>();
for (int i=attributes.getLength() -1; i>-1; i--) {
ht_attributes.put(attributes.getQName(i).toLowerCase(), attributes.getValue(i));
}
// get the id, which whenever possible it's the id of the encapsulating Thing object. The encapsulated object id is the oid
// The type is specified by the qualified_name
Thing thing = null;
if (0 == qualified_name.indexOf("t2_")) {
if (qualified_name.equals("t2_display")) {
if (open_displays) al_displays.add(ht_attributes); // store for later, until the layers exist
} else {
// a Layer, LayerSet or Displayable object
thing = makeLayerThing(qualified_name, ht_attributes);
if (null != thing) {
if (null == root_lt && thing.getObject() instanceof LayerSet) {
root_lt = (LayerThing)thing;
}
}
}
} else if (qualified_name.equals("project")) {
if (null != this.root_pt) {
Utils.log("WARNING: more than one project definitions.");
return;
}
// Create the project
this.project = new Project(Long.parseLong(ht_attributes.remove("id")), ht_attributes.remove("title"));
this.project.setTempLoader(this.loader); // temp, but will be the same anyway
this.project.parseXMLOptions(ht_attributes);
this.project.addToDatabase(); // register id
String title = ht_attributes.get("title");
if (null != title) this.project.setTitle(title);
// Add all unique TemplateThing types to the project
for (Iterator<TemplateThing> it = root_tt.getUniqueTypes(new HashMap<String,TemplateThing>()).values().iterator(); it.hasNext(); ) {
this.project.addUniqueType(it.next());
}
this.project.addUniqueType(this.project_tt);
this.root_pt = new ProjectThing(this.project_tt, this.project, this.project);
// Add a project pointer to all template things
this.root_tt.addToDatabase(this.project);
thing = root_pt;
} else if (qualified_name.startsWith("ict_transform")||qualified_name.startsWith("iict_transform")) {
makeCoordinateTransform(qualified_name, ht_attributes);
} else if (!qualified_name.equals("trakem2")) {
// Any abstract object
thing = makeProjectThing(qualified_name, ht_attributes);
}
if (null != thing) {
// get the previously open thing and add this new_thing to it as a child
int size = al_open.size();
if (size > 0) {
Thing parent = al_open.get(size -1);
parent.addChild(thing);
//Utils.log2("Adding child " + thing + " to parent " + parent);
}
// add the new thing as open
al_open.add(thing);
}
} catch (Exception e) {
IJError.print(e);
skip = true;
}
}