下面列出了freemarker.template.Version#org.nd4j.shade.jackson.databind.ObjectMapper 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private static ObjectMapper configureMapper(ObjectMapper ret) {
ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, false);
ret.enable(SerializationFeature.INDENT_OUTPUT);
SimpleModule atomicModule = new SimpleModule();
atomicModule.addSerializer(AtomicDouble.class, new JsonSerializerAtomicDouble());
atomicModule.addSerializer(AtomicBoolean.class, new JsonSerializerAtomicBoolean());
atomicModule.addDeserializer(AtomicDouble.class, new JsonDeserializerAtomicDouble());
atomicModule.addDeserializer(AtomicBoolean.class, new JsonDeserializerAtomicBoolean());
ret.registerModule(atomicModule);
//Serialize fields only, not using getters
ret.setVisibilityChecker(ret.getSerializationConfig().getDefaultVisibilityChecker()
.withFieldVisibility(JsonAutoDetect.Visibility.ANY)
.withGetterVisibility(JsonAutoDetect.Visibility.NONE)
.withSetterVisibility(JsonAutoDetect.Visibility.NONE)
.withCreatorVisibility(JsonAutoDetect.Visibility.ANY)
);
return ret;
}
@Test
public void testDistributionDeserializer() throws Exception {
//Test current format:
Distribution[] distributions =
new Distribution[] {new NormalDistribution(3, 0.5), new UniformDistribution(-2, 1),
new GaussianDistribution(2, 1.0), new BinomialDistribution(10, 0.3)};
ObjectMapper om = NeuralNetConfiguration.mapper();
for (Distribution d : distributions) {
String json = om.writeValueAsString(d);
Distribution fromJson = om.readValue(json, Distribution.class);
assertEquals(d, fromJson);
}
}
@Test
public void testReadingJson() throws Exception {
//Load 3 values from 3 JSON files
//stricture: a:value, b:value, c:x:value, c:y:value
//And we want to load only a:value, b:value and c:x:value
//For first JSON file: all values are present
//For second JSON file: b:value is missing
//For third JSON file: c:x:value is missing
ClassPathResource cpr = new ClassPathResource("json/json_test_0.txt");
String path = cpr.getFile().getAbsolutePath().replace("0", "%d");
InputSplit is = new NumberedFileInputSplit(path, 0, 2);
RecordReader rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new JsonFactory()));
rr.initialize(is);
testJacksonRecordReader(rr);
}
protected static ValidationResult isValidJson(String content, File f) {
try {
ObjectMapper om = new ObjectMapper();
JavaType javaType = om.getTypeFactory().constructMapType(Map.class, String.class, Object.class);
om.readValue(content, javaType); //Don't care about result, just that it can be parsed successfully
} catch (Throwable t) {
//Jackson should tell us specifically where error occurred also
return ValidationResult.builder()
.valid(false)
.formatType("JSON")
.path(getPath(f))
.issues(Collections.singletonList("File does not appear to be valid JSON"))
.exception(t)
.build();
}
return ValidationResult.builder()
.valid(true)
.formatType("JSON")
.path(getPath(f))
.build();
}
private static ObjectMapper configureMapper(ObjectMapper ret) {
ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, false);
ret.enable(SerializationFeature.INDENT_OUTPUT);
SimpleModule atomicModule = new SimpleModule();
atomicModule.addSerializer(AtomicDouble.class,new JsonSerializerAtomicDouble());
atomicModule.addSerializer(AtomicBoolean.class,new JsonSerializerAtomicBoolean());
atomicModule.addDeserializer(AtomicDouble.class,new JsonDeserializerAtomicDouble());
atomicModule.addDeserializer(AtomicBoolean.class,new JsonDeserializerAtomicBoolean());
ret.registerModule(atomicModule);
//Serialize fields only, not using getters
ret.setVisibilityChecker(ret.getSerializationConfig().getDefaultVisibilityChecker()
.withFieldVisibility(JsonAutoDetect.Visibility.ANY)
.withGetterVisibility(JsonAutoDetect.Visibility.NONE)
.withSetterVisibility(JsonAutoDetect.Visibility.NONE)
.withCreatorVisibility(JsonAutoDetect.Visibility.NONE));
return ret;
}
@Test
public void testJsonSerialization() throws Exception {
INDArray w = Nd4j.create(new double[] {1.0, 2.0, 3.0});
ILossFunction[] lossFns = new ILossFunction[] {new LossBinaryXENT(), new LossBinaryXENT(w),
new LossCosineProximity(), new LossHinge(), new LossKLD(), new LossL1(), new LossL1(w),
new LossL2(), new LossL2(w), new LossMAE(), new LossMAE(w), new LossMAPE(), new LossMAPE(w),
new LossMCXENT(), new LossMCXENT(w), new LossMSE(), new LossMSE(w), new LossMSLE(),
new LossMSLE(w), new LossNegativeLogLikelihood(), new LossNegativeLogLikelihood(w),
new LossPoisson(), new LossSquaredHinge(), new LossMultiLabel()};
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
mapper.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
mapper.enable(SerializationFeature.INDENT_OUTPUT);
for (ILossFunction lf : lossFns) {
String asJson = mapper.writeValueAsString(lf);
// System.out.println(asJson);
ILossFunction fromJson = mapper.readValue(asJson, ILossFunction.class);
assertEquals(lf, fromJson);
}
}
public static TraceEvent[] readEvents(File file) throws IOException {
String content = FileUtils.readFileToString(file, StandardCharsets.UTF_8);
content = StringUtils.trimTrailingWhitespace(content);
if (content.endsWith(","))
content = content.substring(0, content.length()-1) + "]";
if (StringUtils.isEmpty(content))
return new TraceEvent[0];
TraceEvent[] events = new ObjectMapper().readValue(content, TraceEvent[].class);
return events;
}
public String toJson() {
ObjectMapper mapper = mapper();
try {
/*
we need JSON as single line to save it at first line of the CSV model file
*/
return mapper.writeValueAsString(this);
} catch (org.nd4j.shade.jackson.core.JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@Test
public void testEventsJson() throws JsonProcessingException {
String content = "[{\"name\":\"Runner\",\"cat\":\"START\",\"ts\":577532080904,\"pid\":17104,\"tid\":1,\"ph\":\"B\"},\n" +
"{\"name\":\"Runner\",\"cat\":\"END\",\"ts\":577532194829,\"pid\":17104,\"tid\":1,\"ph\":\"E\"},\n" +
"{\"name\":\"LoggingPipelineRunner\",\"cat\":\"START\",\"ts\":577532194878,\"pid\":17104,\"tid\":1,\"ph\":\"B\"},\n" +
"{\"name\":\"LoggingPipelineRunner\",\"cat\":\"END\",\"ts\":577532195804,\"pid\":17104,\"tid\":1,\"ph\":\"E\"},\n" +
"{\"name\":\"Runner\",\"cat\":\"START\",\"ts\":577532195829,\"pid\":17104,\"tid\":1,\"ph\":\"B\"}]";
TraceEvent[] events = new ObjectMapper().readValue(content, TraceEvent[].class);
assertEquals("Runner", events[0].getName());
assertEquals("LoggingPipelineRunner", events[2].getName());
}
/**
* Deserialize a JSON DataAnalysis String that was previously serialized with {@link #toJson()}
*/
public static DataAnalysis fromJson(String json) {
try{
return new JsonSerializer().getObjectMapper().readValue(json, DataAnalysis.class);
} catch (Exception e){
//Legacy format
ObjectMapper om = new JsonSerializer().getObjectMapper();
return fromMapper(om, json);
}
}
/**
* Deserialize a YAML DataAnalysis String that was previously serialized with {@link #toYaml()}
*/
public static DataAnalysis fromYaml(String yaml) {
try{
return new YamlSerializer().getObjectMapper().readValue(yaml, DataAnalysis.class);
} catch (Exception e){
//Legacy format
ObjectMapper om = new YamlSerializer().getObjectMapper();
return fromMapper(om, yaml);
}
}
@Override
public String toJson() {
ObjectMapper om = getJsonMapper();
try {
return om.writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException("Error producing JSON representation for ParameterAveragingTrainingMaster", e);
}
}
private String toYaml(Map<String, List<Map<String, Object>>> jsonRepresentation) {
ObjectMapper om = new YamlSerializer().getObjectMapper();
try {
return om.writeValueAsString(jsonRepresentation);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Test
public void testNDArrayTextSerializer() throws Exception {
for(char order : new char[]{'c', 'f'}) {
Nd4j.factory().setOrder(order);
for (DataType globalDT : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
Nd4j.setDefaultDataTypes(globalDT, globalDT);
Nd4j.getRandom().setSeed(12345);
INDArray in = Nd4j.rand(DataType.DOUBLE, 3, 4).muli(20).subi(10);
val om = new ObjectMapper();
for (DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.LONG, DataType.INT, DataType.SHORT,
DataType.BYTE, DataType.UBYTE, DataType.BOOL, DataType.UTF8}) {
INDArray arr;
if(dt == DataType.UTF8){
arr = Nd4j.create("aaaaa", "bbbb", "ccc", "dd", "e", "f", "g", "h", "i", "j", "k", "l").reshape('c', 3, 4);
} else {
arr = in.castTo(dt);
}
TestClass tc = new TestClass(arr);
String s = om.writeValueAsString(tc);
// System.out.println(dt);
// System.out.println(s);
// System.out.println("\n\n\n");
TestClass deserialized = om.readValue(s, TestClass.class);
assertEquals(dt.toString(), tc, deserialized);
}
}
}
}
private static ObjectMapper getModelMapper() {
ObjectMapper ret = new ObjectMapper();
ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
ret.enable(SerializationFeature.INDENT_OUTPUT);
return ret;
}
private <T> T load(String str, TypeReference<T> typeReference) {
ObjectMapper om = getObjectMapper();
try {
return om.readValue(str, typeReference);
} catch (Exception e) {
//TODO better exception
throw new RuntimeException(e);
}
}
/**
* Create a neural net configuration from YAML
*
* @param json the neural net configuration from YAML
* @return {@link ComputationGraphConfiguration}
*/
public static ComputationGraphConfiguration fromYaml(String json) {
ObjectMapper mapper = NeuralNetConfiguration.mapperYaml();
try {
return mapper.readValue(json, ComputationGraphConfiguration.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Create a ParameterAveragingTrainingMaster instance by deserializing a YAML string that has been serialized with
* {@link #toYaml()}
*
* @param yamlStr ParameterAveragingTrainingMaster configuration serialized as YAML
*/
public static ParameterAveragingTrainingMaster fromYaml(String yamlStr) {
ObjectMapper om = getYamlMapper();
try {
return om.readValue(yamlStr, ParameterAveragingTrainingMaster.class);
} catch (IOException e) {
throw new RuntimeException("Could not parse YAML", e);
}
}
/**
* This utility method returns ElementPair from Base64-encoded string
*
* @param encoded
* @return
*/
protected static ElementPair fromEncodedJson(String encoded) {
ObjectMapper mapper = SequenceElement.mapper();
try {
String decoded = new String(Base64.decodeBase64(encoded), "UTF-8");
return mapper.readValue(decoded, ElementPair.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Serialize a list of IReducers
*/
public String serializeReducerList(List<IAssociativeReducer> list) {
ObjectMapper om = getObjectMapper();
try {
return om.writeValueAsString(new ListWrappers.ReducerList(list));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Serialize a list of SequenceComparators
*/
public String serializeSequenceComparatorList(List<SequenceComparator> list) {
ObjectMapper om = getObjectMapper();
try {
return om.writeValueAsString(new ListWrappers.SequenceComparatorList(list));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Serialize a list of DataActions
*/
public String serializeDataActionList(List<DataAction> list) {
ObjectMapper om = getObjectMapper();
try {
return om.writeValueAsString(new ListWrappers.DataActionList(list));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public String toJson() {
ObjectMapper mapper = mapper();
try {
/*
we need JSON as single line to save it at first line of the CSV model file
That's ugly method, but its way more memory-friendly then loading whole 10GB json file just to create another 10GB memory array.
*/
return mapper.writeValueAsString(this);
} catch (org.nd4j.shade.jackson.core.JsonProcessingException e) {
throw new RuntimeException(e);
}
}
@Override
public String toJson() {
ObjectMapper om = getJsonMapper();
try {
return om.writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException("Error producing JSON representation for ParameterAveragingTrainingMaster", e);
}
}
/**
* Get a mapper (minus general config) suitable for loading old format JSON - 1.0.0-alpha and before
* @return Object mapper
*/
public static ObjectMapper getMapper100alpha(){
//After 1.0.0-alpha, we switched from wrapper object to @class for subtype information
ObjectMapper om = new ObjectMapper();
om.addMixIn(InputPreProcessor.class, InputPreProcessorMixin.class);
om.addMixIn(GraphVertex.class, GraphVertexMixin.class);
om.addMixIn(Layer.class, LayerMixin.class);
om.addMixIn(ReconstructionDistribution.class, ReconstructionDistributionMixin.class);
om.addMixIn(IActivation.class, IActivationMixin.class);
om.addMixIn(ILossFunction.class, ILossFunctionMixin.class);
return om;
}
/**
* @return YAML representation of configuration
*/
public String toYaml() {
ObjectMapper mapper = NeuralNetConfiguration.mapperYaml();
synchronized (mapper) {
try {
return mapper.writeValueAsString(this);
} catch (org.nd4j.shade.jackson.core.JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
/**
* Read JSON-formatted string attribute.
*
* @param attribute HDF5 attribute to read as JSON formatted string.
* @return JSON formatted string from HDF5 attribute
* @throws UnsupportedKerasConfigurationException Unsupported Keras config
*/
private String readAttributeAsJson(Attribute attribute) throws UnsupportedKerasConfigurationException {
synchronized (Hdf5Archive.LOCK_OBJECT) {
VarLenType vl = attribute.getVarLenType();
int currBufferLength = 2048;
String s;
/* TODO: find a less hacky way to do this.
* Reading variable length strings (from attributes) is a giant
* pain. There does not appear to be any way to determine the
* length of the string in advance, so we use a hack: choose a
* buffer size and read the config. If Jackson fails to parse
* it, then we must not have read the entire config. Increase
* buffer and repeat.
*/
while (true) {
byte[] attrBuffer = new byte[currBufferLength];
BytePointer attrPointer = new BytePointer(currBufferLength);
attribute.read(vl, attrPointer);
attrPointer.get(attrBuffer);
s = new String(attrBuffer);
ObjectMapper mapper = new ObjectMapper();
mapper.enable(DeserializationFeature.FAIL_ON_READING_DUP_TREE_KEY);
try {
mapper.readTree(s);
break;
} catch (IOException e) {
//OK - we don't know how long the buffer needs to be, so we'll try again with larger buffer
}
if(currBufferLength == MAX_BUFFER_SIZE_BYTES){
throw new UnsupportedKerasConfigurationException("Could not read abnormally long HDF5 attribute: size exceeds " + currBufferLength + " bytes");
} else {
currBufferLength = (int)Math.min(MAX_BUFFER_SIZE_BYTES, currBufferLength * 4L);
}
}
vl.deallocate();
return s;
}
}
@Test
public void testAppendingLabelsMetaData() throws Exception {
ClassPathResource cpr = new ClassPathResource("json/json_test_0.txt");
String path = cpr.getFile().getAbsolutePath().replace("0", "%d");
InputSplit is = new NumberedFileInputSplit(path, 0, 2);
//Insert at the end:
RecordReader rr = new JacksonRecordReader(getFieldSelection(), new ObjectMapper(new JsonFactory()), false, -1,
new LabelGen());
rr.initialize(is);
List<List<Writable>> out = new ArrayList<>();
while (rr.hasNext()) {
out.add(rr.next());
}
assertEquals(3, out.size());
rr.reset();
List<List<Writable>> out2 = new ArrayList<>();
List<Record> outRecord = new ArrayList<>();
List<RecordMetaData> meta = new ArrayList<>();
while (rr.hasNext()) {
Record r = rr.nextRecord();
out2.add(r.getRecord());
outRecord.add(r);
meta.add(r.getMetaData());
}
assertEquals(out, out2);
List<Record> fromMeta = rr.loadFromMetaData(meta);
assertEquals(outRecord, fromMeta);
}
/**
* This utility method serializes ElementPair into JSON + packs it into Base64-encoded string
*
* @return
*/
protected String toEncodedJson() {
ObjectMapper mapper = SequenceElement.mapper();
Base64 base64 = new Base64(Integer.MAX_VALUE);
try {
String json = mapper.writeValueAsString(this);
String output = base64.encodeAsString(json.getBytes("UTF-8"));
return output;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Create a SharedTrainingMaster instance by deserializing a JSON string that has been serialized with
* {@link #toJson()}
*
* @param jsonStr SharedTrainingMaster configuration serialized as JSON
*/
public static SharedTrainingMaster fromJson(String jsonStr) {
ObjectMapper om = getJsonMapper();
try {
return om.readValue(jsonStr, SharedTrainingMaster.class);
} catch (IOException e) {
throw new RuntimeException("Could not parse JSON", e);
}
}