下面列出了javax.persistence.JoinTable#org.apache.commons.collections4.BidiMap 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private static FunctionGraph createGraph(Function function, FGController controller,
TaskMonitor monitor) throws CancelledException {
BidiMap<CodeBlock, FGVertex> vertices = createVertices(function, controller, monitor);
Collection<FGEdge> edges = createdEdges(vertices, controller, monitor);
FunctionGraphVertexAttributes settings =
new FunctionGraphVertexAttributes(controller.getProgram());
FunctionGraph graph = new FunctionGraph(function, settings, vertices.values(), edges);
for (FGVertex vertex : vertices.values()) {
vertex.setVertexType(getVertexType(graph, vertex));
}
return graph;
}
private static Collection<FGEdge> getEdgesForStartVertex(
BidiMap<CodeBlock, FGVertex> blockToVertexMap, FGVertex startVertex,
FGController controller, TaskMonitor monitor) throws CancelledException {
List<FGEdge> edges = new ArrayList<>();
CodeBlock codeBlock = blockToVertexMap.getKey(startVertex);
CodeBlockReferenceIterator destinations = codeBlock.getDestinations(monitor);
for (; destinations.hasNext();) {
CodeBlockReference reference = destinations.next();
CodeBlock destinationBlock = reference.getDestinationBlock();
FGVertex destinationVertex = blockToVertexMap.get(destinationBlock);
if (destinationVertex == null) {
continue;// no vertex means the code block is not in our function
}
edges.add(new FGEdgeImpl(startVertex, destinationVertex, reference.getFlowType(),
controller.getFunctionGraphOptions()));
}
return edges;
}
/**Getters*/
public BidiMap<String, Integer> getTextAttributes(){
try{
tba.clear();
BufferedReader rdr = new BufferedReader(new FileReader(new File(folder+"attributes/text.tsv")));
String inline;
while ((inline=rdr.readLine())!=null){
String[] dic = inline.split("\\t");
tba.put(dic[0], Integer.parseInt(dic[1]));
}
rdr.close();
} catch (Exception e){
e.printStackTrace();
}
return tba;
}
private static BidiMap<String, String> loadGte(String gtePath) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(gtePath), "UTF-8"));
String line;
BidiMap<String, String> gte = new DualHashBidiMap<String, String>();
while ((line = reader.readLine()) != null) {
String[] elements = StringUtils.split(line, '\t');
if (elements.length != 2) {
throw new RuntimeException("Error in GTE file line: " + line);
}
gte.put(elements[0], elements[1]);
}
return gte;
}
/**
* Processes PRELINK and Macho-O offsets in order to map files to their Mach-O offsets in the
* providers.
*
* @param prelinkList The list of discovered {@link PrelinkMap}s.
* @param machoHeaderOffsets The list of provider offsets where prelinked Mach-O headers start.
* @param monitor A monitor
* @throws IOException if an IO-related problem occurred.
* @throws MachException if there was a problem parsing Mach-O headers.
*/
private void processPrelinkWithMacho(List<PrelinkMap> prelinkList,
List<Long> machoHeaderOffsets, TaskMonitor monitor) throws IOException, MachException {
monitor.setMessage("Processing PRELINK with found Mach-O headers...");
monitor.initialize(prelinkList.size());
BidiMap<PrelinkMap, Long> map = MachoPrelinkUtils.matchPrelinkToMachoHeaderOffsets(provider,
prelinkList, machoHeaderOffsets, monitor);
for (PrelinkMap info : map.keySet()) {
if (monitor.isCancelled()) {
break;
}
monitor.incrementProgress(1);
if (info.getPrelinkBundlePath() == null) {
continue;
}
// The following could end up being a directory once we discover it has a child...we'll
// handle that in storeFile()
GFileImpl file =
GFileImpl.fromPathString(this, root, info.getPrelinkBundlePath(), null, false, 0);
if (info.getPrelinkExecutableSize() > -1) {
file.setLength(info.getPrelinkExecutableSize());
}
file = storeFile(file, info);
if (isChildOf(systemKextFile, file)) {
continue;
}
fileToMachoOffsetMap.put(file, map.get(info));
}
}
private static Collection<FGEdge> createdEdges(BidiMap<CodeBlock, FGVertex> vertices,
FGController controller, TaskMonitor monitor) throws CancelledException {
List<FGEdge> edges = new ArrayList<>();
for (FGVertex startVertex : vertices.values()) {
Collection<FGEdge> vertexEdges =
getEdgesForStartVertex(vertices, startVertex, controller, monitor);
edges.addAll(vertexEdges);
}
return edges;
}
private static BidiMap<CodeBlock, FGVertex> createVertices(Function function,
final FGController controller, TaskMonitor monitor) throws CancelledException {
BidiMap<CodeBlock, FGVertex> vertices = new DualHashBidiMap<>();
CodeBlockModel blockModel = new BasicBlockModel(controller.getProgram());
AddressSetView addresses = function.getBody();
CodeBlockIterator iterator = blockModel.getCodeBlocksContaining(addresses, monitor);
monitor.initialize(addresses.getNumAddresses());
for (; iterator.hasNext();) {
CodeBlock codeBlock = iterator.next();
FlowType flowType = codeBlock.getFlowType();
boolean isEntry = isEntry(codeBlock);
Address cbStart = codeBlock.getFirstStartAddress();
if (cbStart.equals(function.getEntryPoint())) {
isEntry = true;
}
FGVertex vertex =
new ListingFunctionGraphVertex(controller, codeBlock, flowType, isEntry);
vertices.put(codeBlock, vertex);
long blockAddressCount = codeBlock.getNumAddresses();
long currentProgress = monitor.getProgress();
monitor.setProgress(currentProgress + blockAddressCount);
}
return vertices;
}
protected void assignColumnNames()
{
BidiMap<Integer, String> indexColumns = new DualHashBidiMap<Integer, String>();
for (int i = 0; i < crtRecordColumnValues.size(); i++)
{
String name = crtRecordColumnValues.get(i);
Integer existingIdx = indexColumns.getKey(name);
if (existingIdx == null)
{
//use the name from the file if possible
indexColumns.put(i, name);
}
else
{
//the name is taken, force COLUMN_i for this column and recursively if COLUMN_x is already used
Integer forceIndex = i;
do
{
String indexName = INDEXED_COLUMN_PREFIX + forceIndex;
Integer existingIndex = indexColumns.getKey(indexName);
indexColumns.put(forceIndex, indexName);
forceIndex = existingIndex;
}
while(forceIndex != null);
}
}
this.columnNames = new LinkedHashMap<String, Integer>();
for (int i = 0; i < crtRecordColumnValues.size(); i++)
{
String columnName = indexColumns.get(i);
this.columnNames.put(columnName, i);
}
}
public static void main(String[] args) {
String[] englishWords = {"one", "two", "three","ball","snow"};
String[] russianWords = {"jeden", "dwa", "trzy", "kula", "snieg"};
// Create Multiset
BidiMap<String, String> biMap = new DualHashBidiMap();
// Create Polish-English dictionary
int i = 0;
for(String englishWord: englishWords) {
biMap.put(englishWord, russianWords[i]);
i++;
}
// Print count words
System.out.println(biMap); // Print "{ball=kula, snow=snieg, one=jeden, two=dwa, three=trzy}" - in random orders
// Print unique words
System.out.println(biMap.keySet()); // print "[ball, snow, one, two, three]"- in random orders
System.out.println(biMap.values()); // print "[kula, snieg, jeden, dwa, trzy]" - in random orders
// Print translate by words
System.out.println("one = " + biMap.get("one")); // print one = jeden
System.out.println("two = " + biMap.get("two")); // print two = dwa
System.out.println("kula = " + biMap.getKey("kula")); // print kula = ball
System.out.println("snieg = " + biMap.getKey("snieg")); // print snieg = snow
System.out.println("empty = " + biMap.get("empty")); // print empty = null
// Print count word's pair
System.out.println(biMap.size()); //print 5
}
public static void main(String[] args) {
String[] englishWords = {"one", "two", "three","ball","snow"};
String[] russianWords = {"jeden", "dwa", "trzy", "kula", "snieg"};
// Создаем Multiset
BidiMap<String, String> biMap = new DualHashBidiMap();
// создаем англо-польский словарь
int i = 0;
for(String englishWord: englishWords) {
biMap.put(englishWord, russianWords[i]);
i++;
}
// Выводим кол-вом вхождений слов
System.out.println(biMap); // напечатает {ball=kula, snow=snieg, one=jeden, two=dwa, three=trzy}- в произвольном порядке
// Выводим все уникальные слова
System.out.println(biMap.keySet()); // напечатает [ball, snow, one, two, three]- в произвольном порядке
System.out.println(biMap.values()); // напечатает [kula, snieg, jeden, dwa, trzy]- в произвольном порядке
// Выводим перевод по каждому слову
System.out.println("one = " + biMap.get("one")); // напечатает one = jeden
System.out.println("two = " + biMap.get("two")); // напечатает two = dwa
System.out.println("kula = " + biMap.getKey("kula")); // напечатает kula = ball
System.out.println("snieg = " + biMap.getKey("snieg")); // напечатает snieg = snow
System.out.println("empty = " + biMap.get("empty")); // напечатает empty = null
// Выводим общее количество переводов в словаре
System.out.println(biMap.size()); //напечатает 5
}
public BidiMap<String, Integer> getFeatureAttributes(){
try{
fba.clear();
BufferedReader rdr = new BufferedReader(new FileReader(new File(folder+"attributes/feature.tsv")));
String inline;
while ((inline=rdr.readLine())!=null){
String[] dic = inline.split("\\t");
fba.put(dic[0], Integer.parseInt(dic[1]));
}
rdr.close();
} catch (Exception e){
e.printStackTrace();
}
return fba;
}
public BidiMap<String, Integer> getComplexAttributes(){
try{
cba.clear();
BufferedReader rdr = new BufferedReader(new FileReader(new File(folder+"attributes/complex.tsv")));
String inline;
while ((inline=rdr.readLine())!=null){
String[] dic = inline.split("\\t");
cba.put(dic[0], Integer.parseInt(dic[1]));
}
rdr.close();
} catch (Exception e){
e.printStackTrace();
}
return cba;
}
/**Constructor of the class. "tba", "fba" and "cba" refer to the "attribute-->position" relations.*/
public PolarityClassifier(String f, BidiMap<String, Integer> tb, BidiMap<String, Integer> fb, BidiMap<String, Integer> cb){
folder = f;
initializeAttributes(tb, fb, cb);
text = new Instances[2];
feature = new Instances[2];
complex = new Instances[2];
initialiseTextFilter();
initializeClassifiers();
}
/**Initializes the BidiMaps.*/
private void initializeAttributes(BidiMap<String, Integer> tb, BidiMap<String, Integer> fb, BidiMap<String, Integer> cb){
tba = new DualHashBidiMap<String, Integer>();
fba = new DualHashBidiMap<String, Integer>();
cba = new DualHashBidiMap<String, Integer>();
tba = tb;
fba = fb;
cba = cb;
}
@Test
public void givenKeyValue_whenPut_thenAddEntryToMap() {
BidiMap<String, String> map = new DualHashBidiMap<>();
map.put("key1", "value1");
map.put("key2", "value2");
assertEquals(map.size(), 2);
}
@Test
public void whenInverseBidiMap_thenInverseKeyValue() {
BidiMap<String, String> map = new DualHashBidiMap<>();
map.put("key1", "value1");
map.put("key2", "value2");
BidiMap<String, String> rMap = map.inverseBidiMap();
assertTrue(rMap.containsKey("value1") && rMap.containsKey("value2"));
}
@Test
public void givenValue_whenRemoveValue_thenRemoveMatchingMapEntry() {
BidiMap<String, String> map = new DualHashBidiMap<>();
map.put("key1", "value1");
map.put("key2", "value2");
map.removeValue("value2");
assertFalse(map.containsKey("key2"));
}
@Test
public void givenKeyValue_whenAddValue_thenReplaceFirstKey() {
BidiMap<String, String> map = new DualHashBidiMap<>();
map.put("key1", "value1");
map.put("key2", "value1");
assertEquals(map.size(), 1);
assertFalse(map.containsKey("key1"));
}
@Test
public void whenUsingBidiMap_shouldReturnKey() {
BidiMap<String, String> capitalCountryMap = new DualHashBidiMap<String, String>();
capitalCountryMap.put("Berlin", "Germany");
capitalCountryMap.put("Cape Town", "South Africa");
assertEquals("Berlin", capitalCountryMap.getKey("Germany"));
}
@Test
public void whenUsingBidiMapAddDuplicateValue_shouldRemoveOldEntry() {
BidiMap<String, String> capitalCountryMap = new DualHashBidiMap<String, String>();
capitalCountryMap.put("Berlin", "Germany");
capitalCountryMap.put("Cape Town", "South Africa");
capitalCountryMap.put("Pretoria", "South Africa");
assertEquals("Pretoria", capitalCountryMap.getKey("South Africa"));
}
public InteractionAnalysisDetermineDirection(RandomAccessGenotypeData genotypeData, DoubleMatrixDataset<String, String> expressionData, DoubleMatrixDataset<String, String> covariatesData, BidiMap<String, String> gte) {
this.genotypeData = genotypeData;
this.expressionData = expressionData;
this.covariatesData = covariatesData;
this.gte = gte;
this.variantIdMap = genotypeData.getVariantIdMap();
HashSet<String> genotypedSamples = new HashSet<String>();
Collections.addAll(genotypedSamples, genotypeData.getSampleNames());
for (Iterator<Map.Entry<String, String>> it = gte.entrySet().iterator(); it.hasNext();) {
Map.Entry<String, String> gteEntry = it.next();
if (!genotypedSamples.contains(gteEntry.getKey())) {
it.remove();
}
if (!expressionData.containsCol(gteEntry.getValue())) {
it.remove();
}
if (!covariatesData.containsCol(gteEntry.getValue())) {
it.remove();
}
}
System.out.println("Samples with: genotypes, expression & covariate data: " + gte.size());
}
private BlockGraph buildCurrentFunctionGraph(Program program,
VisualGraph<FGVertex, FGEdge> jungGraph, TaskMonitor taskMonitor)
throws CancelledException {
CodeBlockModel blockModel = new BasicBlockModel(program);
AddressSetView addresses = function.getBody();
CodeBlockIterator iterator = blockModel.getCodeBlocksContaining(addresses, taskMonitor);
BlockGraph blockGraph = new BlockGraph();
BidiMap<CodeBlock, PcodeBlock> bidiMap = new DualHashBidiMap<>();
for (; iterator.hasNext();) {
taskMonitor.checkCanceled();
CodeBlock codeBlock = iterator.next();
FGVertex vertex = getVertex(jungGraph, codeBlock.getMinAddress());
if (vertex == null) {
// this is unusual; can happen if the program is being changed while this is running
continue;
}
PcodeBlock pcodeBlock = new BlockCopy(vertex, codeBlock.getMinAddress());
bidiMap.put(codeBlock, pcodeBlock);
blockGraph.addBlock(pcodeBlock);
}
for (CodeBlock block : bidiMap.keySet()) {
taskMonitor.checkCanceled();
CodeBlockReferenceIterator destinations = block.getDestinations(taskMonitor);
while (destinations.hasNext()) {
taskMonitor.checkCanceled();
CodeBlockReference ref = destinations.next();
// We only want control flow that is internal to the function. Make sure to
// exclude the case where a function contains a (recursive) call to itself:
// The reference would be between addresses internal to the function, but the
// link doesn't represent internal flow. So we filter out ANY call reference.
if (ref.getFlowType().isCall()) {
continue;
}
CodeBlock destination = ref.getDestinationBlock();
PcodeBlock sourcePcodeBlock = bidiMap.get(block);
PcodeBlock destPcodeBlock = bidiMap.get(destination);
if (destPcodeBlock == null) {
continue;
}
blockGraph.addEdge(sourcePcodeBlock, destPcodeBlock);
}
}
blockGraph.setIndices();
return blockGraph;
}
@Override
protected void build() throws Exception {
// We want to handle the start of the Mach-O normally. It represents the System.kext.
super.build();
// Fixup any chained pointers
List<Address> fixedAddresses = fixupChainedPointers();
// The rest of the Mach-O's live in the memory segments that the System.kext already
// defined. Therefore, we really just want to go through and do additional markup on them
// since they are already loaded in.
List<Long> machoHeaderOffsets =
MachoPrelinkUtils.findPrelinkMachoHeaderOffsets(provider, monitor);
if (machoHeaderOffsets.isEmpty()) {
return;
}
// Match PRELINK information to the Mach-O's we've found
BidiMap<PrelinkMap, Long> map = MachoPrelinkUtils.matchPrelinkToMachoHeaderOffsets(provider,
prelinkList, machoHeaderOffsets, monitor);
// Determine the starting address of the PRELINK Mach-O's
long prelinkStart = MachoPrelinkUtils.getPrelinkStartAddr(machoHeader);
Address prelinkStartAddr = null;
if (prelinkStart == 0) {
// Probably iOS 12, which doesn't define a proper __PRELINK_TEXT segment.
// Assume the file offset is the same as the offset from image base.
prelinkStartAddr = program.getImageBase().add(machoHeaderOffsets.get(0));
}
else {
prelinkStartAddr = space.getAddress(prelinkStart);
}
// Create an "info" object for each PRELINK Mach-O, which will make processing them easier
List<PrelinkMachoInfo> prelinkMachoInfoList = new ArrayList<>();
for (Long machoHeaderOffset : machoHeaderOffsets) {
prelinkMachoInfoList.add(new PrelinkMachoInfo(provider, machoHeaderOffset,
prelinkStartAddr.add(machoHeaderOffset - machoHeaderOffsets.get(0)),
map.getKey(machoHeaderOffset)));
}
// Process each PRELINK Mach-O
monitor.initialize(prelinkMachoInfoList.size());
for (int i = 0; i < prelinkMachoInfoList.size(); i++) {
PrelinkMachoInfo info = prelinkMachoInfoList.get(i);
PrelinkMachoInfo next = null;
if (i < prelinkMachoInfoList.size() - 1) {
next = prelinkMachoInfoList.get(i + 1);
}
info.processMemoryBlocks();
info.markupHeaders();
info.addToProgramTree(next);
monitor.incrementProgress(1);
}
// Create pointers at any fixed-up addresses
fixedAddresses.forEach(addr -> {
try {
DataUtilities.createData(program, addr, Pointer64DataType.dataType, -1, false,
DataUtilities.ClearDataMode.CLEAR_ALL_UNDEFINED_CONFLICT_DATA);
}
catch (CodeUnitInsertionException e) {
// No worries, something presumably more important was there already
}
});
}
@Test
public void givenValue_whenGetKey_thenMappedKey() {
BidiMap<String, String> map = new DualHashBidiMap<>();
map.put("key1", "value1");
assertEquals(map.getKey("value1"), "key1");
}