java.io.PrintWriter#close ( )源码实例Demo

下面列出了java.io.PrintWriter#close ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: JAADAS   文件: JimpleIDESolver.java
public void dumpResults() {
	try {
		PrintWriter out = new PrintWriter(new FileOutputStream("ideSolverDump"+System.currentTimeMillis()+".csv"));
		List<String> res = new ArrayList<String>();
		for(Cell<Unit, D, V> entry: val.cellSet()) {
			SootMethod methodOf = (SootMethod) icfg.getMethodOf(entry.getRowKey());
			PatchingChain<Unit> units = methodOf.getActiveBody().getUnits();
			int i=0;
			for (Unit unit : units) {
				if(unit==entry.getRowKey())
					break;
				i++;
			}

			res.add(methodOf+";"+entry.getRowKey()+"@"+i+";"+entry.getColumnKey()+";"+entry.getValue());
		}
		Collections.sort(res);
		for (String string : res) {
			out.println(string);
		}
		out.flush();
		out.close();
	} catch (FileNotFoundException e) {
		e.printStackTrace();
	}
}
 
源代码2 项目: netbeans   文件: SimplifiedJspServlet.java
public final void create(Document doc, String virtualClassBody) {
    FileObject fileDummyJava = null;
    List<? extends CompletionItem> javaCompletionItems = null;

    try {
        FileSystem memFS = FileUtil.createMemoryFileSystem();
        fileDummyJava = memFS.getRoot().createData("SimplifiedJSPServlet", "java"); //NOI18N
        PrintWriter writer = new PrintWriter(fileDummyJava.getOutputStream());
        writer.print(virtualClassBody);
        writer.close();

        Source source = Source.create(fileDummyJava);
        process(fileDummyJava, source);
    } catch (IOException ex) {
        logger.log(Level.SEVERE, ex.getMessage(), ex);
    }
}
 
源代码3 项目: urule   文件: RuleSetServletEditorHandler.java
@Override
public void execute(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
	String method=retriveMethod(req);
	if(method!=null){
		invokeMethod(method, req, resp);
	}else{
		VelocityContext context = new VelocityContext();
		context.put("contextPath", req.getContextPath());
		String file=req.getParameter("file");
		file=Utils.decodeURL(file);
		String project = buildProjectNameFromFile(file);
		if(project!=null){
			context.put("project", project);
		}
		resp.setContentType("text/html");
		resp.setCharacterEncoding("utf-8");
		Template template=ve.getTemplate("html/ruleset-editor.html","utf-8");
		PrintWriter writer=resp.getWriter();
		template.merge(context, writer);
		writer.close();
	}
}
 
源代码4 项目: anno4j   文件: DiskBlobStore.java
protected void newBlobVersion(String version, File file) throws IOException {
	lock();
	try {
		File f = new File(journal, "index");
		PrintWriter index = new PrintWriter(openWriter(f, true));
		try {
			String jpath = journal.getAbsolutePath();
			String path = file.getAbsolutePath();
			if (path.startsWith(jpath) && path.charAt(jpath.length()) == File.separatorChar) {
				path = path.substring(jpath.length() + 1);
			} else {
				throw new AssertionError("Invalid version entry path: " + path);
			}
			index.print(path.replace(File.separatorChar, '/'));
			index.print(' ');
			index.println(version);
		} finally {
			index.close();
		}
	} finally {
		unlock();
	}
}
 
源代码5 项目: sctalk   文件: HttpUtils.java
public static void setJsonBody(HttpServletResponse response,ResponseInfo info)
{
    Gson gson = new Gson();
    String bodyStr = gson.toJson(info);
    
    response.setCharacterEncoding("UTF-8");
    response.setContentType("application/json; charset=utf-8");
    PrintWriter out = null;
    try {
        out = response.getWriter();
        out.append(bodyStr);
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        if (out != null) {
            out.close();
        }
    }
}
 
源代码6 项目: FEMultiPlayer-V2   文件: FEServerFrame.java
private static void logError(Throwable e) {
	System.err.println("Exception occurred, writing to logs...");
	e.printStackTrace();
	try {
		File errLog = new File("error_log_server" + System.currentTimeMillis() % 100000000 + ".log");
		PrintWriter pw = new PrintWriter(errLog);
		e.printStackTrace(pw);
		pw.close();
	} catch (IOException e2) {
		e2.printStackTrace();
	}
	System.exit(-1);
}
 
源代码7 项目: coordination_oru   文件: PathEditor.java
public void saveObstaclesToPoses(String fileName) {
       try {
           File file = new File(fileName);
           PrintWriter writer = new PrintWriter(file);
           for (int i = 0; i < obstacleCenters.size(); i++) {
           	writer.println(obstacleNames.get(i) + " " + obstacleCenters.get(i).getX() + " " + obstacleCenters.get(i).getY() + " " + obstacleCenters.get(i).getTheta());
           }
           writer.close();
       }
       catch (Exception e) { e.printStackTrace(); }
	
}
 
源代码8 项目: glowroot   文件: PropertiesFiles.java
private static void writePropertiesFile(File propFile, List<String> newLines)
        throws FileNotFoundException {
    // properties files must be ISO_8859_1
    PrintWriter out = new PrintWriter(Files.newWriter(propFile, ISO_8859_1));
    try {
        for (String newLine : newLines) {
            out.println(newLine);
        }
    } finally {
        out.close();
    }
}
 
源代码9 项目: openbd-core   文件: xmlCFML.java
public void writeTo(String name, File _dest) throws IOException {
	OutputStream fOut = cfEngine.thisPlatform.getFileIO().getFileOutputStream(_dest);
	OutputStreamWriter outW = new OutputStreamWriter(fOut, Localization.convertCharSetToCharEncoding("UTF-8"));
	PrintWriter outStream = new PrintWriter(outW);
	outStream.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
	printXML(name, outStream);
	outStream.flush();
	outStream.close();
}
 
源代码10 项目: SPADE   文件: CompressedStorage.java
/**
 * renames Nodes by giving them IDs instead of hashes and produces a TextFile exactly likt the one in entry but with IDs instead of hashes
 * @param textfile TestFile storage issued by SPADE
 * @throws FileNotFoundException
 * @throws UnsupportedEncodingException
 */
public static void renameNodes(String textfile) throws FileNotFoundException, UnsupportedEncodingException{
	String output = textfile + "_preproc.txt";
	File file = new File(textfile + ".txt");
	Scanner sc = new Scanner(file );
	Integer nextID = 0;
	//Vector<String> alreadyRenamed = new Vector<String>();
	PrintWriter writer = new PrintWriter(output, "UTF-8");

	while(sc.hasNextLine()){
		String aux = sc.nextLine();
		if(aux.substring(0,6).equals("VERTEX")) {
			// set new id of the node aka. nextID and put its hashcode in the vector then write the line in file.
			String hashID = aux.substring(8,72);
			alreadyRenamed.add(hashID);
			aux = aux.replaceFirst(hashID, nextID.toString());
			writer.println(aux);
			nextID++;
		}
		else if(aux.substring(0,4).equals("EDGE")) {
			// find in the vector the id corresponding to the two vertex involved and replace it in the line. Then write the line to file.
			String node1 = aux.substring(6, 70);				
			String node2 = aux.substring(74, 138);
			Integer id1 = alreadyRenamed.indexOf(node1);
			Integer id2 = alreadyRenamed.indexOf(node2);
			aux = aux.replaceFirst(node1, id1.toString());
			aux = aux.replaceFirst(node2, id2.toString()); 
			writer.println(aux);
		}

	} 
	sc.close();
	writer.close();			
}
 
@Override
public void onAuthenticationFailure(HttpServletRequest httpServletRequest, HttpServletResponse
        httpServletResponse, AuthenticationException e) throws IOException, ServletException, IOException {
    httpServletRequest.setCharacterEncoding("UTF-8");
    // 获得用户名密码
    String username = httpServletRequest.getParameter("uname");
    String password = httpServletRequest.getParameter("pwd");

   httpServletResponse.setContentType("application/json;charset=utf-8");
    PrintWriter out = httpServletResponse.getWriter();
    out.write("{\"status\":\"error\",\"message\":\"用户名或密码错误\"}");
    out.flush();
    out.close();
}
 
源代码12 项目: codekata   文件: Main.java
public static void main(String[] args) throws java.lang.Exception {
    BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in));
    PrintWriter stdout = new PrintWriter(System.out);
    int count = Integer.valueOf(stdin.readLine());
    int result = count % 3 == 0 ? (count / 3) * 2 : (count / 3) * 2 + 1;
    stdout.println(result);
    stdout.close();
}
 
源代码13 项目: gemfirexd-oss   文件: ExportImportTestDUnit.java
/**
 * Test for multi-threaded import of data (in all the columns of the table) through a data file.
 * Added to verify defect #46758
 */
public void testMultiThreadedImportTable_PR() throws Exception {
  try {
  // Start one client a three servers
  startVMs(1, 3);

  //create the table
  clientSQLExecute(1, "create table app.t1(flight_id int not null, "
      + "segment_number int not null, aircraft varchar(20), "
      + "CONSTRAINT FLIGHTS_PK PRIMARY KEY (FLIGHT_ID, SEGMENT_NUMBER))");

  //create import data file from which data is to be imported into the table
  PrintWriter p = new PrintWriter(new File("import_table_ex.txt"));
  p.println("1354,11,Airbus");
  p.println("7363,12,Boeing");
  p.println("2562,13,Airbus");
  p.println("6355,14,Boeing");
  p.println("8376,15,Airbus");
  p.close();
  
  //call import table procedure with 6 threads (data file has 5 rows)
  clientSQLExecute(1,
      "CALL SYSCS_UTIL.IMPORT_TABLE_EX('APP', 'T1', 'import_table_ex.txt', null, null, null, 0, 0, 6, 0, null, null)");
  
  //verify data has been imported successfully
  Connection conn = TestUtil.getConnection();
  Statement st = conn.createStatement();
  st.execute("select count(*) from app.t1");
  ResultSet rs = st.getResultSet();
  assertTrue(rs.next());
  assertEquals("Number of rows in table should be 5", 5, rs.getInt(1));
  } finally {
    //delete the import data file
    new File("import_table_ex.txt").delete();
  }
}
 
源代码14 项目: nalu   文件: ProcessorUtils.java
public void createWarningMessage(String warningMessage) {
  StringWriter sw = new StringWriter();
  PrintWriter pw = new PrintWriter(sw);
  pw.println(warningMessage);
  pw.close();
  messager.printMessage(Diagnostic.Kind.WARNING,
                        sw.toString());
}
 
源代码15 项目: incubator-ratis   文件: StringUtils.java
public static String stringifyException(Throwable e) {
  StringWriter stm = new StringWriter();
  PrintWriter wrt = new PrintWriter(stm);
  e.printStackTrace(wrt);
  wrt.close();
  return stm.toString();
}
 
源代码16 项目: vxquery   文件: HDFSFunctions.java
/**
 * Writes the schedule to a temporary file, then uploads the file to the HDFS.
 *
 * @throws UnsupportedEncodingException
 *            The encoding of the file is not correct     
 * @throws FileNotFoundException
 *            The file doesn't exist
 */
public void addScheduleToDistributedCache() throws FileNotFoundException, UnsupportedEncodingException {
    PrintWriter writer = new PrintWriter(FILEPATH, "UTF-8");
    for (int split : this.schedule.keySet()) {
        writer.write(split + "," + this.schedule.get(split));
    }
    writer.close();
    // Add file to HDFS
    this.put(FILEPATH, DFS_PATH);
}
 
源代码17 项目: KEEL   文件: MOEA_Gosh.java
/**
 * It launches the algorithm
 */
public void execute() {
	if (somethingWrong) { //We do not execute the program
		System.err.println("An error was found");
		System.err.println("Aborting the program");
		//We should not use the statement: System.exit(-1);
	} 
	else {
		this.proc = new MOEA_GoshProcess(this.dataset, this.numObjectives, this.nTrials, this.popSize, this.pointCrossover, this.pc, this.pm, this.af);
		this.proc.run();
		this.associationRulesPareto = this.proc.generateRulesPareto();
	
		try {
			int r, i;
			Gene gen;
			AssociationRule a_r;

			PrintWriter rules_writer = new PrintWriter(this.rulesFilename);
			PrintWriter values_writer = new PrintWriter(this.valuesFilename);
			PrintWriter pareto_writer = new PrintWriter(this.paretoFilename);

			rules_writer.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
			rules_writer.println("<association_rules>");				
			
			values_writer.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
			values_writer.println("<values>");
			
			pareto_writer.print("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
			pareto_writer.println("<values>");


			for (r=0; r < this.associationRulesPareto.size(); r++) {
				a_r = this.associationRulesPareto.get(r);

				ArrayList<Gene> ant = a_r.getAntecedents();
				ArrayList<Gene> cons = a_r.getConsequents();

				rules_writer.println("<rule id=\"" + r + "\">");
				values_writer.println("<rule id=\"" + r + "\" rule_support=\"" + MOEA_GoshProcess.roundDouble(a_r.getSupport(),2) + "\" antecedent_support=\"" + MOEA_GoshProcess.roundDouble(a_r.getAntSupport(),2) + "\" consequent_support=\"" + MOEA_GoshProcess.roundDouble(a_r.getConsSupport(),2) + "\" confidence=\"" + MOEA_GoshProcess.roundDouble(a_r.getConfidence(),2) +"\" lift=\"" + MOEA_GoshProcess.roundDouble(a_r.getLift(),2) + "\" conviction=\"" + MOEA_GoshProcess.roundDouble(a_r.getConv(),2) + "\" certainFactor=\"" + MOEA_GoshProcess.roundDouble(a_r.getCF(),2) + "\" netConf=\"" + MOEA_GoshProcess.roundDouble(a_r.getNetConf(),2)  + "\" yulesQ=\"" + MOEA_GoshProcess.roundDouble(a_r.getYulesQ(),2) +  "\" nAttributes=\"" + (a_r.getnAnts()+1) + "\"/>");
				rules_writer.println("<antecedents>");			

				for (i=0; i < ant.size(); i++) {
					gen = ant.get(i);
					createRule(gen, gen.getAttr(), rules_writer);
				}

				rules_writer.println("</antecedents>");				
				rules_writer.println("<consequents>");			

				for (i=0; i < cons.size(); i++) {
					gen = cons.get(i);
					createRule(gen, gen.getAttr(), rules_writer);
				}

				rules_writer.println("</consequents>");
				rules_writer.println("</rule>");					
			}

			rules_writer.println("</association_rules>");
			values_writer.println("</values>");
			this.proc.saveReport(this.associationRulesPareto, values_writer);
			rules_writer.close();
			values_writer.close();

			pareto_writer.print(this.proc.getParetos());
			pareto_writer.println("</values>");
			pareto_writer.close();

			totalTime = System.currentTimeMillis() - startTime;
			this.writeTime();

			System.out.println("Algorithm Finished");
		}
		catch (FileNotFoundException e)
		{
			e.printStackTrace();
		}
	}
}
 
源代码18 项目: RDFS   文件: TestFairScheduler.java
/**
 * This test starts by submitting three large jobs:
 * - job1 in the default pool, at time 0
 * - job2 in pool_a, with an allocation of 1 map / 2 reduces, at time 200
 * - job3 in pool_b, with an allocation of 2 maps / 1 reduce, at time 200
 *
 * After this, we sleep 100ms, until time 300. At this point, job1 has the
 * highest map deficit, job3 the second, and job2 the third. This is because
 * job3 has more maps in its min share than job2, but job1 has been around
 * a long time at the beginning. The reduce deficits are similar, except job2
 * comes before job3 because it had a higher reduce minimum share.
 *
 * Finally, assign tasks to all slots. The maps should be assigned in the
 * order job3, job2, job1 because 3 and 2 both have guaranteed slots and 3
 * has a higher deficit. The reduces should be assigned as job2, job3, job1.
 */
public void testLargeJobsWithPools() throws Exception {
  // Set up pools file
  PrintWriter out = new PrintWriter(new FileWriter(ALLOC_FILE));
  out.println("<?xml version=\"1.0\"?>");
  out.println("<allocations>");
  // Give pool A a minimum of 1 map, 2 reduces
  out.println("<pool name=\"pool_a\">");
  out.println("<minMaps>1</minMaps>");
  out.println("<minReduces>2</minReduces>");
  out.println("</pool>");
  // Give pool B a minimum of 2 maps, 1 reduce
  out.println("<pool name=\"pool_b\">");
  out.println("<minMaps>2</minMaps>");
  out.println("<minReduces>1</minReduces>");
  out.println("</pool>");
  out.println("</allocations>");
  out.close();
  scheduler.getPoolManager().reloadAllocs();

  JobInProgress job1 = submitJob(JobStatus.RUNNING, 10, 10);
  JobInfo info1 = scheduler.infos.get(job1);

  // Check scheduler variables
  assertEquals(0,    info1.runningMaps);
  assertEquals(0,    info1.runningReduces);
  assertEquals(10,   info1.neededMaps);
  assertEquals(10,   info1.neededReduces);
  assertEquals(0,    info1.mapDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(0,    info1.reduceDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(4.0,  info1.mapFairShare, ALLOW_ERROR);
  assertEquals(4.0,  info1.reduceFairShare, ALLOW_ERROR);

  // Advance time 200ms and submit jobs 2 and 3
  advanceTime(200);
  assertEquals(800,  info1.mapDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(800,  info1.reduceDeficit, ALLOW_DEFICIT_ERROR);
  JobInProgress job2 = submitJob(JobStatus.RUNNING, 10, 10, "pool_a");
  JobInfo info2 = scheduler.infos.get(job2);
  JobInProgress job3 = submitJob(JobStatus.RUNNING, 10, 10, "pool_b");
  JobInfo info3 = scheduler.infos.get(job3);

  // Check that minimum and fair shares have been allocated
  assertEquals(0,    info1.minMaps);
  assertEquals(0,    info1.minReduces);
  assertEquals(1.0,  info1.mapFairShare, ALLOW_ERROR);
  assertEquals(1.0,  info1.reduceFairShare, ALLOW_ERROR);
  assertEquals(1,    info2.minMaps);
  assertEquals(2,    info2.minReduces);
  assertEquals(1.0,  info2.mapFairShare, ALLOW_ERROR);
  assertEquals(2.0,  info2.reduceFairShare, ALLOW_ERROR);
  assertEquals(2,    info3.minMaps);
  assertEquals(1,    info3.minReduces);
  assertEquals(2.0,  info3.mapFairShare, ALLOW_ERROR);
  assertEquals(1.0,  info3.reduceFairShare, ALLOW_ERROR);

  // Advance time 100ms and check deficits
  advanceTime(100);
  assertEquals(900,  info1.mapDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(900,  info1.reduceDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(100,  info2.mapDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(200,  info2.reduceDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(200,  info3.mapDeficit, ALLOW_DEFICIT_ERROR);
  assertEquals(100,  info3.reduceDeficit, ALLOW_DEFICIT_ERROR);

  // Assign tasks and check that slots are first given to needy jobs
  checkAssignment("tt1", "attempt_test_0003_m_000000_0 on tt1");
  checkAssignment("tt1", "attempt_test_0003_m_000001_0 on tt1");
  checkAssignment("tt1", "attempt_test_0002_r_000000_0 on tt1");
  checkAssignment("tt1", "attempt_test_0002_r_000001_0 on tt1");
  checkAssignment("tt2", "attempt_test_0002_m_000000_0 on tt2");
  checkAssignment("tt2", "attempt_test_0001_m_000000_0 on tt2");
  checkAssignment("tt2", "attempt_test_0003_r_000000_0 on tt2");
  checkAssignment("tt2", "attempt_test_0001_r_000000_0 on tt2");
}
 
源代码19 项目: spork   文件: TestTmpFileCompression.java
@Test
public void testTFileRecordReader() throws Exception {
    PrintWriter w = new PrintWriter(new FileWriter("1.txt"));
    for (int i = 0; i < 30; i++) {
        w.println("1\tthis is a test for compression of temp files");
    }
    w.close();

    Util.copyFromLocalToCluster(cluster, "1.txt", "1.txt");

    PrintWriter w1 = new PrintWriter(new FileWriter("tfile.pig"));
    w1.println("A = load '1.txt' as (a0:int, a1:chararray);");
    w1.println("B = group A by a0;");
    w1.println("store B into 'tfile' using org.apache.pig.impl.io.TFileStorage();");
    w1.close();

    PrintWriter w2 = new PrintWriter(new FileWriter("tfile2.pig"));
    w2.println("A = load 'tfile' using org.apache.pig.impl.io.TFileStorage() as (a:int, b:bag{(b0:int, b1:chararray)});");
    w2.println("B = foreach A generate flatten($1);");
    w2.println("store B into '2.txt';");
    w2.close();

    try {
        String[] args = { "-Dpig.tmpfilecompression.codec=gz",
                "-Dtfile.io.chunk.size=100", "tfile.pig" };
        PigStats stats = PigRunner.run(args, null);

        assertTrue(stats.isSuccessful());

        String[] args2 = { "-Dpig.tmpfilecompression.codec=gz",
                "-Dtfile.io.chunk.size=100", "tfile2.pig" };
        PigStats stats2 = PigRunner.run(args2, null);

        assertTrue(stats2.isSuccessful());

        OutputStats os = stats2.result("B");
        Iterator<Tuple> iter = os.iterator();
        int count = 0;
        String expected = "(1,this is a test for compression of temp files)";
        while (iter.hasNext()) {
            count++;
            assertEquals(expected, iter.next().toString());
        }
        assertEquals(30, count);

    } finally {
        new File("tfile.pig").delete();
        new File("tfile2.pig").delete();
        new File("1.txt").delete();
    }
}
 
源代码20 项目: translationstudio8   文件: EncodingResolver.java
private static String getXMLEncoding(String fileName) {
    // return UTF-8 as default
	String result = "UTF-8";	     //$NON-NLS-1$
    try{
	    // check if there is a BOM (byte order mark)
	    // at the start of the document
	    FileInputStream inputStream = new FileInputStream(fileName);
	    byte[] array = new byte[2];
	    inputStream.read(array);
	    inputStream.close();
	    byte[] lt = "<".getBytes(); //$NON-NLS-1$
	    byte[] feff = {-1,-2};
	    byte[] fffe = {-2,-1};
	    if (array[0] != lt[0] ) {
	        // there is a BOM, now check the order
	        if (array[0] == fffe[0] && array[1] == fffe[1]) {
	            return "UTF-16BE"; //$NON-NLS-1$
	        }
	        if (array[0] == feff[0] && array[1] == feff[1]) {
	            return "UTF-16LE"; //$NON-NLS-1$
	        }
	    }
	    // check declared encoding
		FileReader input = new FileReader(fileName);
		BufferedReader buffer = new BufferedReader(input);
		String line = buffer.readLine();
		input.close();
		if (line.startsWith("<?")) { //$NON-NLS-1$
			line = line.substring(2, line.indexOf("?>")); //$NON-NLS-1$
			line = line.replaceAll("\'", "\""); //$NON-NLS-1$ //$NON-NLS-2$
			StringTokenizer tokenizer = new StringTokenizer(line);
			while (tokenizer.hasMoreTokens()) {
				String token = tokenizer.nextToken();
				if (token.startsWith("encoding")) { //$NON-NLS-1$
					result = token.substring(token.indexOf("\"") + 1, token.lastIndexOf("\"")); //$NON-NLS-1$ //$NON-NLS-2$
				}
			}
		}
    } catch(Exception e) {
    	if(Constant.RUNNING_MODE == Constant.MODE_DEBUG){
       		e.printStackTrace();
       	}

    	try {
			File log = File.createTempFile("error", ".log", new File("logs"));
			FileWriter writer = new FileWriter(log);
			PrintWriter print = new PrintWriter(writer);
			e.printStackTrace(print);
			writer.close();
			print.close();
			writer = null;
			print = null;
		} catch (IOException e2) {
			//do nothing
		} //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$

    }
    String[] encodings = TextUtil.getPageCodes();
    for (int i=0 ; i<encodings.length ; i++) {
    		if (encodings[i].equalsIgnoreCase(result)) {
    			return encodings[i];
    		}
    }
	return result;
}