下面列出了org.apache.logging.log4j.core.config.plugins.PluginFactory#org.apache.kafka.connect.json.JsonConverter 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private static MirusOffsetTool newOffsetTool(Args args) throws IOException {
// This needs to be the admin topic properties.
// By default these are in the worker properties file, as this has the has admin producer and
// consumer settings. Separating these might be wise - also useful for storing state in
// source cluster if it proves necessary.
final Map<String, String> properties =
!args.propertiesFile.isEmpty()
? Utils.propsToStringMap(Utils.loadProps(args.propertiesFile))
: Collections.emptyMap();
final DistributedConfig config = new DistributedConfig(properties);
final KafkaOffsetBackingStore offsetBackingStore = new KafkaOffsetBackingStore();
offsetBackingStore.configure(config);
// Avoid initializing the entire Kafka Connect plugin system by assuming the
// internal.[key|value].converter is org.apache.kafka.connect.json.JsonConverter
final Converter internalConverter = new JsonConverter();
internalConverter.configure(config.originalsWithPrefix("internal.key.converter."), true);
final OffsetSetter offsetSetter = new OffsetSetter(internalConverter, offsetBackingStore);
final OffsetFetcher offsetFetcher = new OffsetFetcher(config, internalConverter);
final OffsetSerDe offsetSerDe = OffsetSerDeFactory.create(args.format);
return new MirusOffsetTool(args, offsetFetcher, offsetSetter, offsetSerDe);
}
@Test
public void customConvertersShouldBeInstantiated() {
properties = new HashMap<>();
properties.put("source.key.converter", "org.apache.kafka.connect.json.JsonConverter");
properties.put("source.value.converter", "org.apache.kafka.connect.json.JsonConverter");
properties.put("source.header.converter", "org.apache.kafka.connect.json.JsonConverter");
TaskConfig taskConfig = new TaskConfig(properties);
assertThat(taskConfig.getKeyConverter(), instanceOf(JsonConverter.class));
assertThat(taskConfig.getValueConverter(), instanceOf(JsonConverter.class));
assertThat(taskConfig.getHeaderConverter(), instanceOf(JsonConverter.class));
}
public TigerGraphSinkTask () {
this.converter = new JsonConverter();
this.conn = null;
this.gson = new Gson();
this.accumulated = 0;
this.lastCommitTime = System.currentTimeMillis();
this.ret = new StringBuilder();
this.parseTime = 0;
}
public JsonMessageBuilder() {
log.info("Building messages using com.ibm.eventstreams.connect.mqsink.builders.JsonMessageBuilder");
converter = new JsonConverter();
// We just want the payload, not the schema in the output message
HashMap<String, String> m = new HashMap<>();
m.put("schemas.enable", "false");
// Convert the value, not the key (isKey == false)
converter.configure(m, false);
}
public JsonRecordBuilder() {
log.info("Building records using com.ibm.eventstreams.connect.mqsource.builders.JsonRecordBuilder");
converter = new JsonConverter();
// We just want the payload, not the schema in the output message
HashMap<String, String> m = new HashMap<>();
m.put("schemas.enable", "false");
// Convert the value, not the key (isKey == false)
converter.configure(m, false);
}
/**
* default c.tor
*/
public JsonEventParser() {
this.keyConverter = new JsonConverter();
this.valueConverter = new JsonConverter();
Map<String, String> props = new HashMap<>(1);
props.put("schemas.enable", Boolean.FALSE.toString());
this.keyConverter.configure(props, true);
this.valueConverter.configure(props, false);
}
public KafkaSchemaWrappedSchema(org.apache.pulsar.kafka.shade.avro.Schema schema,
Converter converter) {
Map<String, String> props = new HashMap<>();
boolean isJsonConverter = converter instanceof JsonConverter;
props.put(GenericAvroSchema.OFFSET_PROP, isJsonConverter ? "0" : "5");
this.schemaInfo = SchemaInfo.builder()
.name(isJsonConverter? "KafKaJson" : "KafkaAvro")
.type(isJsonConverter ? SchemaType.JSON : SchemaType.AVRO)
.schema(schema.toString().getBytes(UTF_8))
.properties(props)
.build();
}
@PluginFactory
public static StructuredLayout createLayout(
@PluginElement("Properties") final Property[] properties) {
final JsonConverter converter = new JsonConverter();
converter.configure(
Arrays.stream(properties).collect(
Collectors.toMap(Property::getName, Property::getValue)
),
false
);
return new StructuredLayout(struct -> converter.fromConnectData("", struct.schema(), struct));
}
public ExtJsonConverter(RegistryService client) {
super(client);
this.jsonConverter = new JsonConverter();
this.mapper = new ObjectMapper();
this.formatStrategy = new PrettyFormatStrategy();
}
private JsonConverter createNewConverter() {
JsonConverter result = new JsonConverter();
result.configure(configs, false);
return result;
}
public SchemaJsonSerializer(JsonConverter jsonConverter) {
this.jsonConverter = jsonConverter;
}
public SchemaJsonDeserializer(JsonConverter jsonConverter) {
this.jsonConverter = jsonConverter;
}
@Before
public void setUp() {
offsetSetter = new OffsetSetter(new JsonConverter(), kafkaOffsetBackingStore);
}
private static JsonConverter newSchemalessJsonConverter() {
JsonConverter converter = new JsonConverter();
converter.configure(singletonMap("schemas.enable", false), false);
return converter;
}