下面列出了java.io.BufferedWriter#append ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Test
public void testDirectoryImport() throws Exception {
new File("target/sample").mkdir();
BufferedWriter writer = Files.newWriter(
new File("target/sample/one.csv"), CSVSchemaCommand.SCHEMA_CHARSET);
writer.append("id,username,email\n");
writer.append("1,test,[email protected]\n");
writer.close();
writer = Files.newWriter(
new File("target/sample/two.csv"), CSVSchemaCommand.SCHEMA_CHARSET);
writer.append("id,username,email\n");
writer.append("2,user,[email protected]\n");
writer.close();
command.targets = Lists.newArrayList("target/sample", datasetName);
command.run();
Assert.assertEquals("Should contain expected records",
expected, DatasetTestUtilities.materialize(dataset));
verify(console).trace(contains("repo:file:target/data"));
verify(console).info("Added {} records to \"{}\"", 2l, datasetName);
verifyNoMoreInteractions(console);
}
/**
* 打印信息
*
* @param message
*/
public static synchronized void writeLog(String message) {
File f = getFile();
if (f != null) {
try {
FileWriter fw = new FileWriter(f, true);
BufferedWriter bw = new BufferedWriter(fw);
bw.append("\n");
bw.append(message);
bw.append("\n");
bw.flush();
bw.close();
fw.close();
} catch (IOException e) {
print("writeLog error, " + e.getMessage());
}
} else {
print("writeLog error, due to the file dir is error");
}
}
void writeToLog(String message, boolean appendMode) {
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss", Locale.US);
String time = sdf.format(new Date());
message = time + " - " + message;
File logFile = new File(getLogFilePath());
if (!logFile.getParentFile().exists()) {
//noinspection ResultOfMethodCallIgnored
logFile.getParentFile().mkdirs();
}
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(logFile, appendMode));
bufferedWriter.newLine();
bufferedWriter.append(message);
bufferedWriter.close();
} catch (IOException e) {
Log.e(TAG, "Exception in writeToLog", e);
}
}
@Override
public Void run() throws Exception {
String effectiveDirPath = "/user/" + (serviceAsNamespace ? "" : (hdfsUser + "/")) + filePath;
FileSystem fileSystem = fsGetter.get();
Path path = new Path(effectiveDirPath);
FSDataOutputStream out = fileSystem.append(path);
if (out == null) {
fileSystem.close();
throw new CygnusPersistenceError("The /user/" + (serviceAsNamespace ? "" : (hdfsUser + "/"))
+ filePath + " file could not be created in HDFS");
} // if
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out));
writer.append(data + "\n");
writer.close();
fileSystem.close();
return null;
}
/**
* Dump MLN clause description table to a file
* @param relAtoms
* @param fout
*/
public void dumpClauseDescToFile(String relClauses, String relClauseDesc, String fout){
BufferedWriter bufferedWriter = FileMan.getBufferedWriterMaybeGZ(fout);
int digits = 4;
String sql;
try {
sql = "SELECT c.weight, d.clauseDesc FROM " + relClauseDesc + " d, ";
sql += relClauses + " c WHERE c.cid = d.clauseId ORDER BY c.weight, d.clauseDesc";
ResultSet rs = db.query(sql);
while(rs.next()) {
String clauseDesc = rs.getString("clauseDesc");
double weight = rs.getDouble("weight");
UIMan.verbose(3, UIMan.decimalRound(digits, weight) + "\t" + clauseDesc);
bufferedWriter.append(UIMan.decimalRound(digits, weight) + "\t" + clauseDesc + "\n");
}
rs.close();
bufferedWriter.close();
}catch (Exception e) {
ExceptionMan.handle(e);
}
}
public void logError(Throwable throwable) {
try {
File logFile = new File(LOG_FILE);
if(!logFile.exists() ) {
logFile.getParentFile().mkdirs();
logFile.createNewFile();
}
if( logFile.exists() && logFile.canWrite() ) {
BufferedWriter buffer = new BufferedWriter(new FileWriter(logFile, true));
buffer.append(new Date().toString());
buffer.newLine();
buffer.append(this.createFullErrorMessage(throwable));
buffer.newLine();
buffer.close();
}
} catch (Throwable e) {
e.printStackTrace();
}
}
private static void writeConfigForTable(BufferedWriter writer, Class<?> clazz) throws SQLException, IOException {
String tableName = DatabaseTableConfig.extractTableName(clazz);
List<DatabaseFieldConfig> fieldConfigs = new ArrayList<DatabaseFieldConfig>();
// walk up the classes finding the fields
try {
for (Class<?> working = clazz; working != null; working = working.getSuperclass()) {
for (Field field : working.getDeclaredFields()) {
DatabaseFieldConfig fieldConfig = DatabaseFieldConfig.fromField(databaseType, tableName, field);
if (fieldConfig != null) {
fieldConfigs.add(fieldConfig);
}
}
}
} catch (Error e) {
System.err.println("Skipping " + clazz + " because we got an error finding its definition: "
+ e.getMessage());
return;
}
if (fieldConfigs.isEmpty()) {
System.out.println("Skipping " + clazz + " because no annotated fields found");
return;
}
@SuppressWarnings({"rawtypes", "unchecked"})
DatabaseTableConfig<?> tableConfig = new DatabaseTableConfig(clazz, tableName, fieldConfigs);
DatabaseTableConfigLoader.write(writer, tableConfig);
writer.append("#################################");
writer.newLine();
System.out.println("Wrote config for " + clazz);
}
@BeforeClass
public static void createCSVSchemaAndSample() throws Exception {
sample = "target/users.csv";
avsc = "target/user.avsc";
datasetName = "users";
repoURI = "repo:" + getDFS().getUri().toString() + "/tmp/data";
BufferedWriter writer = Files.newWriter(
new File(sample), CSVSchemaCommand.SCHEMA_CHARSET);
writer.append("id,username,email\n");
writer.append("1,test,[email protected]\n");
writer.append("2,user,[email protected]\n");
writer.close();
TestUtil.run("csv-schema", sample, "-o", avsc, "--class", "User");
GenericRecordBuilder builder = new GenericRecordBuilder(
new Schema.Parser().parse(new File(avsc)));
builder.set("id", 1l);
builder.set("username", "test");
builder.set("email", "[email protected]");
expected.add(builder.build());
builder.set("id", 2l);
builder.set("username", "user");
builder.set("email", "[email protected]");
expected.add(builder.build());
}
public void logThread(Thread thread, String logId) {
try {
File logFile = new File(getLogDir(logId), getThreadLabel(thread) + ".log");
BufferedWriter buffer = new BufferedWriter(new FileWriter(logFile, true));
buffer.append(this.getThreadBodyMessage(thread));
buffer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void writeFile(String fileName, StringBuffer buffer) {
try {
File file = new File(fileName);
final BufferedWriter f = new BufferedWriter(new FileWriter(file));
f.append(buffer);
f.close();
} catch (Exception e) {
e.printStackTrace();
// assertEquals("", e.getMessage());
}
}
/** Remap meanings probability info between two different releases of WordNet
* @param args the command line arguments
*/
public static void main(String[] args) {
try {
WnMapping mapping = new WnMapping(new File(args[0]));
mapping.init();
BufferedReader reader = new BufferedReader(new FileReader(args[1]));
BufferedWriter writer = new BufferedWriter(new FileWriter(args[2]));
while (reader.ready()) {
String line = reader.readLine();
String[] split = line.split("\t");
if (split.length > 2) {
writer.append(split[0]);
for (int k = 1; k < split.length; k = k + 2) {
String map = mapping.map(split[k]);
if (map == null) {
map = split[k];
Logger.getLogger(RemapFreq.class.getName()).log(Level.WARNING, "No mapping for {0}", split[k]);
}
writer.append("\t").append(map).append("\t");
writer.append(split[k + 1]);
}
writer.newLine();
}
}
reader.close();
writer.close();
} catch (IOException ex) {
Logger.getLogger(RemapFreq.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void writeFile(final File file, final String contents) {
try {
BufferedWriter writer = new BufferedWriter(new FileWriter(file));
writer.append(contents);
writer.close();
} catch (IOException ex) {
Assert.fail();
}
}
/**
* initialize input information for data summaries generation
* @param location Directory location of the resulting FedSummaries file (i.e. location/FedSum.n3)
* @throws IOException IO Exceptions
*/
public SemagrowSummariesGenerator(String location) throws IOException
{
bw = new BufferedWriter(new FileWriter(new File(location))); //--name/location where the summaries file will be stored
bw.append("@prefix void: <http://rdfs.org/ns/void#> ."); bw.newLine();
bw.append("@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> . "); bw.newLine();
bw.append("@prefix dc: <http://purl.org/dc/elements/1.1/> ."); bw.newLine();
bw.append("@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> ."); bw.newLine();
bw.newLine();
bw.append("_:DatasetRoot rdf:type void:Dataset .");
bw.newLine();
}
/**
* Write the data of the Boundary objects to a BED file, specified by @param bedFilePath.
* @param bedFilePath
* @param trackName
* @throws IOException
*/
public void writeBoundariesToBedFile(String bedFilePath, String trackName) throws IOException{
BufferedWriter bedBufferWriter = new BufferedWriter(new FileWriter(bedFilePath));
bedBufferWriter.append("track name=\"" + trackName + "\"\n");
for(GenomicBoundary<V> genomicBoundary : this){
bedBufferWriter.append("chr" + genomicBoundary.getChromosome() + "\t" + genomicBoundary.getStart() + "\t" + genomicBoundary.getStop() + "\n");
}
bedBufferWriter.close();
}
private void writeMapFile(File mapFile) throws IOException {
Utils.createEmptyFile(mapFile, "map");
BufferedWriter mapFileWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(mapFile), FILE_ENCODING));
for (GeneticVariant variant : genotypeData) {
if (variant.getAlleleCount() > 2 || !variant.isSnp()) {
LOGGER.warn("Skipping variant: " + variant.getPrimaryVariantId() + ", it is not a biallelic SNP");
++excludedVariantsCounter;
continue;
}
mapFileWriter.append(FormatPlinkChr.formatChr(variant.getSequenceName()));
mapFileWriter.append(SEPARATOR);
mapFileWriter.append(variant.getPrimaryVariantId() == null ? variant.getSequenceName() + ":" + variant.getStartPos() : variant.getPrimaryVariantId());
mapFileWriter.append(SEPARATOR);
mapFileWriter.append('0');
mapFileWriter.append(SEPARATOR);
mapFileWriter.append(String.valueOf(variant.getStartPos()));
mapFileWriter.append('\n');
++writtenVariantsCounter;
}
mapFileWriter.close();
}
private static void writeParamZone(BufferedWriter writer) throws IOException {
writer.append("PARAM");
writer.newLine();
writer.append(" 0 0 0");
writer.newLine();
writer.append(PADDING_8)
.append(format(0.00001, 15))
.append(format(0.0001, 15))
.append(format(0.001, 15))
.append(format(0.0001, 15))
.append(format(0.0001, 15))
.append("2");
writer.newLine();
writer.newLine();
}
/**
* Concatenate files.
*
* @param hFile the set with files to concatenate
* @param outputfile the outputfile
*/
public static void concatFile(HashSet<File> hFile, File outputfile) {
try{
String fileName=outputfile.getName();
File fTmp = new File(outputfile.getParentFile() + "/tmp_" + fileName);
if (fTmp.exists())
fTmp.delete();
FileOutputStream fos = new FileOutputStream( fTmp);
OutputStreamWriter osw = new OutputStreamWriter(fos,"UTF-8");
BufferedWriter bw = new BufferedWriter(osw);
boolean first = true;
String nextLine;
for (File file:hFile){
FileInputStream fis = new FileInputStream(file );
InputStreamReader isr = new InputStreamReader(fis,"UTF-8");
BufferedReader br = new BufferedReader(isr);
nextLine=br.readLine();
if (first && nextLine!=null){
bw.append(nextLine);
bw.append("\r\n");
first=false;
}
while ((nextLine=br.readLine())!=null){
bw.append(nextLine);
bw.append("\r\n");
}
br.close();
isr.close();
fis.close();
br=null;
isr=null;
fis=null;
}
bw.close();
if (outputfile.exists())
outputfile.delete();
fTmp.renameTo(outputfile) ;
if (fTmp.exists())
fTmp.delete();
} catch (IOException e) {
e.printStackTrace();
}finally{
}
}
private void appendHeader(BufferedWriter writer, boolean textOnly) throws IOException {
if (!textOnly) {
writer.append("<html><body>\n");
}
}
void dumpTo(java.io.File loc) throws Exception {
File file = new File(loc, filename);
BufferedWriter bw = new java.io.BufferedWriter(new FileWriter(file));
bw.append(contents);
bw.close();
}
private void generateMapperJavaFile(MapperInfo mapper, Collection<String> classVars, Collection<String> imports, Collection<String> directFields, Collection<String> inverseFields) {
try {
String mapperCanonicalName = String.format("%s.%s", mapper.mapperPackageName, mapper.mapperClassName);
writeTrace(String.format("Generating source file for Mapper with name %s", mapperCanonicalName));
JavaFileObject javaFileObject = processingEnv.getFiler().createSourceFile(mapperCanonicalName);
BufferedWriter buffer = new BufferedWriter(javaFileObject.openWriter());
buffer.append(String.format(Tools.PACKAGE_PATTERN, mapper.mapperPackageName));
buffer.newLine();
for (String classImport : imports) {
buffer.newLine();
buffer.append(classImport);
}
buffer.newLine();
buffer.newLine();
buffer.append(String.format(Tools.CLASS_PATTERN, mapper.mapperClassName));
if (classVars.size() > 0) {
buffer.newLine();
for (String classVar : classVars) {
buffer.newLine();
buffer.append("\t").append(classVar);
}
}
generateTransformMethod(buffer, mapper.className, mapper.linkedClassName, directFields);
generateTransformMethod(buffer, mapper.linkedClassName, mapper.className, inverseFields);
buffer.newLine();
buffer.append("}");
buffer.close();
} catch (IOException error) {
throw new RuntimeException(error);
}
}