com.fasterxml.jackson.core.JsonFactory#createGenerator ( )源码实例Demo

下面列出了com.fasterxml.jackson.core.JsonFactory#createGenerator ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: ure   文件: LandedModal.java
void saveScaper(ULandscaper scaper, String filename) {
    String path = commander.savePath();
    File file = new File(path + filename);
    try (
            FileOutputStream stream = new FileOutputStream(file);
    ) {
        JsonFactory jfactory = new JsonFactory();
        JsonGenerator jGenerator = jfactory
                .createGenerator(stream, JsonEncoding.UTF8);
        jGenerator.setCodec(objectMapper);
        jGenerator.writeObject(scaper);
        jGenerator.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
 
源代码2 项目: pinpoint   文件: AgentTimeHistogramTest.java
@Test
public void testViewModel() throws IOException {

    Application app = new Application("test", ServiceType.STAND_ALONE);
    AgentTimeHistogramBuilder builder = new AgentTimeHistogramBuilder(app, Range.newRange(0, 1000*60));
    List<ResponseTime> responseHistogramList = createResponseTime(app, "test1", "test2");
    AgentTimeHistogram histogram = builder.build(responseHistogramList);

    List<AgentResponseTimeViewModel> viewModel = histogram.createViewModel();
    logger.debug("{}", viewModel);

    JsonFactory jsonFactory = mapper.getFactory();
    StringWriter stringWriter = new StringWriter();
    JsonGenerator jsonGenerator = jsonFactory.createGenerator(stringWriter);
    jsonGenerator.writeStartObject();
    for (AgentResponseTimeViewModel agentResponseTimeViewModel : viewModel) {
        jsonGenerator.writeObject(agentResponseTimeViewModel);
    }
    jsonGenerator.writeEndObject();
    jsonGenerator.flush();
    jsonGenerator.close();
    logger.debug(stringWriter.toString());

}
 
源代码3 项目: keycloak   文件: RealmsConfigurationBuilder.java
public RealmsConfigurationBuilder(String filename) {
    this.file = filename;

    try {
        JsonFactory f = new JsonFactory();
        g = f.createGenerator(new File(file), JsonEncoding.UTF8);

        ObjectMapper mapper = new ObjectMapper();
        mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
        mapper.enable(SerializationFeature.INDENT_OUTPUT);
        g.setCodec(mapper);

    } catch (Exception e) {
        throw new RuntimeException("Failed to create realms export file", e);
    }
}
 
源代码4 项目: Quicksql   文件: DruidQuery.java
/** Generates a JSON string to query metadata about a data source. */
static String metadataQuery(String dataSourceName,
    List<Interval> intervals) {
    final StringWriter sw = new StringWriter();
    final JsonFactory factory = new JsonFactory();
    try {
        final JsonGenerator generator = factory.createGenerator(sw);
        generator.writeStartObject();
        generator.writeStringField("queryType", "segmentMetadata");
        generator.writeStringField("dataSource", dataSourceName);
        generator.writeBooleanField("merge", true);
        generator.writeBooleanField("lenientAggregatorMerge", true);
        generator.writeArrayFieldStart("analysisTypes");
        generator.writeString("aggregators");
        generator.writeEndArray();
        writeFieldIf(generator, "intervals", intervals);
        generator.writeEndObject();
        generator.close();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
    return sw.toString();
}
 
源代码5 项目: roncoo-education   文件: ResultMapperExt.java
private String buildJSONFromFields(Collection<SearchHitField> values) {
	JsonFactory nodeFactory = new JsonFactory();
	try {
		ByteArrayOutputStream stream = new ByteArrayOutputStream();
		JsonGenerator generator = nodeFactory.createGenerator(stream, JsonEncoding.UTF8);
		generator.writeStartObject();
		for (SearchHitField value : values) {
			if (value.getValues().size() > 1) {
				generator.writeArrayFieldStart(value.getName());
				for (Object val : value.getValues()) {
					generator.writeObject(val);
				}
				generator.writeEndArray();
			} else {
				generator.writeObjectField(value.getName(), value.getValue());
			}
		}
		generator.writeEndObject();
		generator.flush();
		return new String(stream.toByteArray(), Charset.forName("UTF-8"));
	} catch (IOException e) {
		return null;
	}
}
 
源代码6 项目: incubator-nemo   文件: MetricStore.java
/**
 * Send changed metric data to {@link MetricBroadcaster}, which will broadcast it to
 * all active WebSocket sessions. This method should be called manually if you want to
 * send changed metric data to the frontend client. Also this method is synchronized.
 *
 * @param metricClass class of the metric.
 * @param id          id of the metric.
 * @param <T>         type of the metric to broadcast
 */
public synchronized <T extends Metric> void triggerBroadcast(final Class<T> metricClass, final String id) {
  final MetricBroadcaster metricBroadcaster = MetricBroadcaster.getInstance();
  final ObjectMapper objectMapper = new ObjectMapper();
  final T metric = getMetricWithId(metricClass, id);
  final JsonFactory jsonFactory = new JsonFactory();
  final ByteArrayOutputStream stream = new ByteArrayOutputStream();
  try (JsonGenerator jsonGenerator = jsonFactory.createGenerator(stream, JsonEncoding.UTF8)) {
    jsonGenerator.setCodec(objectMapper);

    jsonGenerator.writeStartObject();
    jsonGenerator.writeFieldName("metricType");
    jsonGenerator.writeString(metricClass.getSimpleName());

    jsonGenerator.writeFieldName("data");
    jsonGenerator.writeObject(metric);
    jsonGenerator.writeEndObject();

    metricBroadcaster.broadcast(stream.toString());
  } catch (final IOException e) {
    throw new MetricException(e);
  }
}
 
源代码7 项目: ure   文件: UCartographer.java
/**
 * Persist an object to disk.  This will most likely be an area or region, but in theory you could
 * write anything that serializes properly.
 * @param object
 * @param filename
 */
public void persist(Object object, String filename) {
    String path = commander.savePath();
    if (commander.config.isPersistentAreas()) {
        File dir = new File(path);
        dir.mkdirs();
        log.info("saving file " + path + filename);
        File file = new File(path + filename);
        try (
                FileOutputStream stream = new FileOutputStream(file);
                GZIPOutputStream gzip = new GZIPOutputStream(stream)
        ) {
            JsonFactory jfactory = new JsonFactory();
            JsonGenerator jGenerator = jfactory
                    .createGenerator(gzip, JsonEncoding.UTF8);
            jGenerator.setCodec(objectMapper);
            jGenerator.writeObject(object);
            jGenerator.close();
        } catch (IOException e) {
            throw new RuntimeException("Couldn't persist object " + object.toString(), e);
        }
    }
}
 
源代码8 项目: ure   文件: UVaultSet.java
public void persist(String absoluteFilepath) {
    File file = new File(absoluteFilepath);
    try (
            FileOutputStream stream = new FileOutputStream(file);
            //GZIPOutputStream gzip = new GZIPOutputStream(stream)
    ) {
        JsonFactory jfactory = new JsonFactory();
        JsonGenerator jGenerator = jfactory
                .createGenerator(stream, JsonEncoding.UTF8);
        jGenerator.setCodec(new ObjectMapper());
        jGenerator.writeObject(this);
        jGenerator.close();
    } catch (IOException e) {
        throw new RuntimeException("Couldn't persist object " + toString(), e);
    }
}
 
源代码9 项目: calcite   文件: DruidQueryFilterTest.java
@Test void testInFilter() throws IOException {
  final Fixture f = new Fixture();
  final List<? extends RexNode> listRexNodes =
      ImmutableList.of(f.rexBuilder.makeInputRef(f.varcharRowType, 0),
          f.rexBuilder.makeExactLiteral(BigDecimal.valueOf(1)),
          f.rexBuilder.makeExactLiteral(BigDecimal.valueOf(5)),
          f.rexBuilder.makeLiteral("value1"));

  RexNode inRexNode =
      f.rexBuilder.makeCall(SqlStdOperatorTable.IN, listRexNodes);
  DruidJsonFilter returnValue = DruidJsonFilter
      .toDruidFilters(inRexNode, f.varcharRowType, druidQuery);
  assertThat("Filter is null", returnValue, notNullValue());
  JsonFactory jsonFactory = new JsonFactory();
  final StringWriter sw = new StringWriter();
  JsonGenerator jsonGenerator = jsonFactory.createGenerator(sw);
  returnValue.write(jsonGenerator);
  jsonGenerator.close();

  assertThat(sw.toString(),
      is("{\"type\":\"in\",\"dimension\":\"dimensionName\","
          + "\"values\":[\"1\",\"5\",\"value1\"]}"));
}
 
源代码10 项目: constellation   文件: RestClient.java
/**
 * Generate a json string from a flat Map<String, String> of key and values
 *
 * @param params A Map of key/value pairs
 * @return A json representation of a simple map
 *
 * @throws IOException
 */
private String generateJsonFromFlatMap(final Map<String, String> params) throws IOException {
    final ByteArrayOutputStream json = new ByteArrayOutputStream();
    final JsonFactory jsonFactory = new MappingJsonFactory();
    try (JsonGenerator jg = jsonFactory.createGenerator(json)) {
        jg.writeStartObject();
        for (final Map.Entry<String, String> param : params.entrySet()) {
            jg.writeStringField(param.getKey(), param.getValue());
        }
        jg.writeEndObject();
        jg.flush();
    }

    return json.toString(StandardCharsets.UTF_8.name());
}
 
源代码11 项目: JavaWeb   文件: JsonAndXmlUtil.java
public static String object2Json(Object obj) throws Exception{
	if(obj==null){
		return "";
	}
	ObjectMapper mapper = new ObjectMapper();
	StringWriter sw = new StringWriter();
	JsonFactory jf = new JsonFactory();
	JsonGenerator jg = jf.createGenerator(sw);
	mapper.writeValue(jg, obj);
	jg.close();
	return sw.toString();
}
 
源代码12 项目: transit-java   文件: TransitTest.java
public void testPrettyPrint() {
    try {
        ByteArrayOutputStream bytes = new ByteArrayOutputStream();
        JsonFactory jf = new JsonFactory();
        JsonGenerator jg = jf.createGenerator(bytes);
        jg.writeString(":db/ident");
        jg.close();
        String s = new String(bytes.toByteArray());
        System.out.println(s);
    } catch (Throwable t) {
        t.printStackTrace();
    }
}
 
源代码13 项目: BIMserver   文件: JsonHandler.java
public void execute(ObjectNode incomingMessage, HttpServletRequest httpRequest, Writer out) {
	JsonFactory jsonFactory = new JsonFactory();
	JsonGenerator writer = null;
	try {
		writer = jsonFactory.createGenerator(out);
		writer.writeStartObject();
		String token = incomingMessage.has("token") ? incomingMessage.get("token").asText() : null;
		String oAuthCode = incomingMessage.has("oauthcode") ? incomingMessage.get("oauthcode").asText() : null;
		long messageId = incomingMessage.has("id") ? incomingMessage.get("id").asLong() : -1;
		if (messageId != -1) {
			writer.writeFieldName("id");
			writer.writeNumber(messageId);
		}
		if (incomingMessage.has("request")) {
			writer.writeFieldName("response");
			processSingleRequest((ObjectNode) incomingMessage.get("request"), token, oAuthCode, httpRequest, writer);
		} else if (incomingMessage.has("requests")) {
			processMultiRequest((ArrayNode) incomingMessage.get("requests"), token, oAuthCode, httpRequest, writer);
		}
	} catch (Throwable throwable) {
		if (throwable instanceof UserException) {

		} else {
			LOGGER.info(incomingMessage.toString());
			LOGGER.info("", throwable);
		}
		// throwable.printStackTrace();
		handleThrowable(writer, throwable);
	} finally {
		try {
			writer.writeEndObject();
			writer.close();
		} catch (Exception e) {
			LOGGER.error("", e);
		}
	}
}
 
private ByteArrayOutputStream testWriting(TestObject value, JsonFactory factory, Class<TestObject> clazz) throws IOException {
  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  JsonGenerator g = factory.createGenerator(outputStream);
  JsonGeneratorWriter generatorWriter = new JsonGeneratorWriter(g);
  gson.toJson(value, clazz, generatorWriter);
  generatorWriter.flush();
  return outputStream;
}
 
源代码15 项目: dbsync   文件: JSonDatabaseWriter.java
public JSonDatabaseWriter(final OutputStream outStream) throws IOException {
    this.mOutStream = outStream;

    JsonFactory f = new JsonFactory();
    mGen = f.createGenerator(outStream, JsonEncoding.UTF8);
    mGen.setPrettyPrinter(new DefaultPrettyPrinter());
}
 
源代码16 项目: calcite   文件: DruidQuery.java
@Nullable
private String planAsTimeSeries(List<DimensionSpec> groupByKeyDims, DruidJsonFilter jsonFilter,
    List<VirtualColumn> virtualColumnList, List<JsonAggregation> aggregations,
    List<JsonExpressionPostAgg> postAggregations, JsonLimit limit, DruidJsonFilter havingFilter) {
  if (havingFilter != null) {
    return null;
  }
  if (groupByKeyDims.size() > 1) {
    return null;
  }
  if (limit.limit != null) {
    // it has a limit not supported by time series
    return null;
  }
  if (limit.collations != null && limit.collations.size() > 1) {
    // it has multiple sort columns
    return null;
  }
  final String sortDirection;
  if (limit.collations != null && limit.collations.size() == 1) {
    if (groupByKeyDims.isEmpty()
        || !limit.collations.get(0).dimension.equals(groupByKeyDims.get(0).getOutputName())) {
      // sort column is not time column
      return null;
    }
    sortDirection = limit.collations.get(0).direction;
  } else {
    sortDirection = null;
  }

  final Granularity timeseriesGranularity;
  if (groupByKeyDims.size() == 1) {
    DimensionSpec dimensionSpec = Iterables.getOnlyElement(groupByKeyDims);
    Granularity granularity = ExtractionDimensionSpec.toQueryGranularity(dimensionSpec);
    // case we have project expression on the top of the time extract then can not use timeseries
    boolean hasExpressionOnTopOfTimeExtract = false;
    for (JsonExpressionPostAgg postAgg : postAggregations) {
      if (postAgg instanceof JsonExpressionPostAgg) {
        if (postAgg.expression.contains(groupByKeyDims.get(0).getOutputName())) {
          hasExpressionOnTopOfTimeExtract = true;
        }
      }
    }
    timeseriesGranularity = hasExpressionOnTopOfTimeExtract ? null : granularity;
    if (timeseriesGranularity == null) {
      // can not extract granularity bailout
      return null;
    }
  } else {
    timeseriesGranularity = Granularities.all();
  }

  final boolean skipEmptyBuckets = Granularities.all() != timeseriesGranularity;

  final StringWriter sw = new StringWriter();
  final JsonFactory factory = new JsonFactory();
  try {
    final JsonGenerator generator = factory.createGenerator(sw);
    generator.writeStartObject();
    generator.writeStringField("queryType", "timeseries");
    generator.writeStringField("dataSource", druidTable.dataSource);
    generator.writeBooleanField("descending", sortDirection != null
        && sortDirection.equals("descending"));
    writeField(generator, "granularity", timeseriesGranularity);
    writeFieldIf(generator, "filter", jsonFilter);
    writeField(generator, "aggregations", aggregations);
    writeFieldIf(generator, "virtualColumns",
        virtualColumnList.size() > 0 ? virtualColumnList : null);
    writeFieldIf(generator, "postAggregations",
        postAggregations.size() > 0 ? postAggregations : null);
    writeField(generator, "intervals", intervals);
    generator.writeFieldName("context");
    // The following field is necessary to conform with SQL semantics (CALCITE-1589)
    generator.writeStartObject();
    // Count(*) returns 0 if result set is empty thus need to set skipEmptyBuckets to false
    generator.writeBooleanField("skipEmptyBuckets", skipEmptyBuckets);
    generator.writeEndObject();
    generator.close();
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
  return sw.toString();
}
 
源代码17 项目: Flink-CEPplus   文件: Configuration.java
/**
 *  Writes properties and their attributes (final and resource)
 *  to the given {@link Writer}.
 *
 *  <li>
 *  When propertyName is not empty, and the property exists
 *  in the configuration, the format of the output would be,
 *  <pre>
 *  {
 *    "property": {
 *      "key" : "key1",
 *      "value" : "value1",
 *      "isFinal" : "key1.isFinal",
 *      "resource" : "key1.resource"
 *    }
 *  }
 *  </pre>
 *  </li>
 *
 *  <li>
 *  When propertyName is null or empty, it behaves same as
 *  {@link #dumpConfiguration(Configuration, Writer)}, the
 *  output would be,
 *  <pre>
 *  { "properties" :
 *      [ { key : "key1",
 *          value : "value1",
 *          isFinal : "key1.isFinal",
 *          resource : "key1.resource" },
 *        { key : "key2",
 *          value : "value2",
 *          isFinal : "ke2.isFinal",
 *          resource : "key2.resource" }
 *       ]
 *   }
 *  </pre>
 *  </li>
 *
 *  <li>
 *  When propertyName is not empty, and the property is not
 *  found in the configuration, this method will throw an
 *  {@link IllegalArgumentException}.
 *  </li>
 *  <p>
 * @param config the configuration
 * @param propertyName property name
 * @param out the Writer to write to
 * @throws IOException
 * @throws IllegalArgumentException when property name is not
 *   empty and the property is not found in configuration
 **/
public static void dumpConfiguration(Configuration config,
                                     String propertyName, Writer out) throws IOException {
	if(Strings.isNullOrEmpty(propertyName)) {
		dumpConfiguration(config, out);
	} else if (Strings.isNullOrEmpty(config.get(propertyName))) {
		throw new IllegalArgumentException("Property " +
				propertyName + " not found");
	} else {
		JsonFactory dumpFactory = new JsonFactory();
		JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
		dumpGenerator.writeStartObject();
		dumpGenerator.writeFieldName("property");
		appendJSONProperty(dumpGenerator, config, propertyName,
				new ConfigRedactor(config));
		dumpGenerator.writeEndObject();
		dumpGenerator.flush();
	}
}
 
源代码18 项目: Flink-CEPplus   文件: Configuration.java
/**
 *  Writes out all properties and their attributes (final and resource) to
 *  the given {@link Writer}, the format of the output would be,
 *
 *  <pre>
 *  { "properties" :
 *      [ { key : "key1",
 *          value : "value1",
 *          isFinal : "key1.isFinal",
 *          resource : "key1.resource" },
 *        { key : "key2",
 *          value : "value2",
 *          isFinal : "ke2.isFinal",
 *          resource : "key2.resource" }
 *       ]
 *   }
 *  </pre>
 *
 *  It does not output the properties of the configuration object which
 *  is loaded from an input stream.
 *  <p>
 *
 * @param config the configuration
 * @param out the Writer to write to
 * @throws IOException
 */
public static void dumpConfiguration(Configuration config,
                                     Writer out) throws IOException {
	JsonFactory dumpFactory = new JsonFactory();
	JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
	dumpGenerator.writeStartObject();
	dumpGenerator.writeFieldName("properties");
	dumpGenerator.writeStartArray();
	dumpGenerator.flush();
	ConfigRedactor redactor = new ConfigRedactor(config);
	synchronized (config) {
		for (Map.Entry<Object,Object> item: config.getProps().entrySet()) {
			appendJSONProperty(dumpGenerator, config, item.getKey().toString(),
					redactor);
		}
	}
	dumpGenerator.writeEndArray();
	dumpGenerator.writeEndObject();
	dumpGenerator.flush();
}
 
源代码19 项目: dhis2-core   文件: OrganisationUnitController.java
@RequestMapping( value = "", method = RequestMethod.GET, produces = { "application/json+geo", "application/json+geojson" } )
public void getGeoJson(
    @RequestParam( value = "level", required = false ) List<Integer> rpLevels,
    @RequestParam( value = "parent", required = false ) List<String> rpParents,
    @RequestParam( value = "properties", required = false, defaultValue = "true" ) boolean rpProperties,
    User currentUser, HttpServletResponse response ) throws IOException
{
    rpLevels = rpLevels != null ? rpLevels : new ArrayList<>();
    rpParents = rpParents != null ? rpParents : new ArrayList<>();

    List<OrganisationUnit> parents = manager.getByUid( OrganisationUnit.class, rpParents );

    if ( rpLevels.isEmpty() )
    {
        rpLevels.add( 1 );
    }

    if ( parents.isEmpty() )
    {
        parents.addAll( organisationUnitService.getRootOrganisationUnits() );
    }

    List<OrganisationUnit> organisationUnits = organisationUnitService.getOrganisationUnitsAtLevels( rpLevels, parents );

    response.setContentType( "application/json" );

    JsonFactory jsonFactory = new JsonFactory();
    JsonGenerator generator = jsonFactory.createGenerator( response.getOutputStream() );

    generator.writeStartObject();
    generator.writeStringField( "type", "FeatureCollection" );
    generator.writeArrayFieldStart( "features" );

    for ( OrganisationUnit organisationUnit : organisationUnits )
    {
        writeFeature( generator, organisationUnit, rpProperties, currentUser );
    }

    generator.writeEndArray();
    generator.writeEndObject();

    generator.close();
}
 
源代码20 项目: dbsync   文件: JSonDatabaseWriter.java
long write(File file) throws IOException {

        long timestamp = System.currentTimeMillis();

        if (file.exists()) {
            file.delete();
        }

        JsonFactory f = new JsonFactory();
        JsonGenerator g = f.createGenerator(file, JsonEncoding.UTF8);
        g.setPrettyPrinter(new DefaultPrettyPrinter() );

        g.writeStartObject();

        g.writeFieldName("database");
        g.writeStartObject();
        // Nome database
        g.writeStringField("name",databaseName);

        // Tabelle
        g.writeFieldName("tables");
        g.writeStartArray();

        for (Map.Entry<String, Integer> entry : tablesToWrite.entrySet()) {

            g.writeStartObject();

            g.writeStringField("name",entry.getKey());

            g.writeArrayFieldStart("records");

            SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
            for (int i = 0; i < entry.getValue();++i) {

                g.writeStartObject();

                g.writeNumberField("cId", new Date().getTime());

                String name = entry.getKey();
                name = name + " " + i;

                g.writeStringField("name", name);

                g.writeStringField("dateCreated", simpleDateFormat.format(new Date()));
                g.writeStringField("dateUpdated", simpleDateFormat.format(new Date()));


                g.writeEndObject();
            }

            g.writeEndArray();

            g.writeEndObject();
        }

        g.writeEndArray();
        g.writeEndObject();

        g.writeEndObject();
        g.close();

        return System.currentTimeMillis() - timestamp;
    }