org.junit.contrib.java.lang.system.Assertion#org.apache.avro.Schema源码实例Demo

下面列出了org.junit.contrib.java.lang.system.Assertion#org.apache.avro.Schema 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: components   文件: MarketoConstants.java
public static Schema getListOperationRejectRESTSchema() {
    return SchemaBuilder.builder().record("REST").fields() //
            .name(FIELD_LIST_ID)//
            .prop(SchemaConstants.TALEND_IS_LOCKED, "true")//
            .type().nullable().intType().noDefault() //
            .name(FIELD_LEAD_ID)//
            .prop(SchemaConstants.TALEND_IS_LOCKED, "true")//
            .type().nullable().intType().noDefault() //
            .name(FIELD_STATUS)//
            .prop(SchemaConstants.TALEND_IS_LOCKED, "true")//
            .type().nullable().stringType().noDefault() //
            .name(FIELD_ERROR_MSG)//
            .prop(SchemaConstants.TALEND_IS_LOCKED, "true")//
            .type().nullable().stringType().noDefault() //
            .endRecord();
}
 
@Test(expected = Exception.class)
public void demoteDoubleToLong() throws Exception {
  Schema schema = getSchemaFieldAssembler().requiredDouble(EVOLUTION_COLUMN)
      .endRecord();
  Schema evolvedSchema = getSchemaFieldAssembler().requiredLong(EVOLUTION_COLUMN)
      .endRecord();
  FieldDataWrapper beforeEvolution = new FieldDataWrapper(EVOLUTION_COLUMN, 1d);
  FieldDataWrapper afterEvolution = new FieldDataWrapper(EVOLUTION_COLUMN, 2l);
  List<String> expectedData = Lists.newArrayList(
      "1\t1.0\t1",
      "2\t2.0\t2"
  );

  runTest(schema, evolvedSchema, beforeEvolution, afterEvolution);
  runDataChecks(evolvedSchema, expectedData);
}
 
源代码3 项目: big-c   文件: Display.java
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
 
源代码4 项目: envelope   文件: AvroSerializer.java
private void validateSchemaIsSupported(Schema schema) {
  for (Field field : schema.getFields()) {
    Type type = field.schema().getType();

    if (type.equals(Type.UNION)) {
      List<Schema> types = field.schema().getTypes();

      if (types.size() != 2) {
        throw new RuntimeException("Union type in Avro serializer schema must only contain two types");
      }

      if (types.get(0).getType().equals(Type.NULL)) {
        type = types.get(1).getType();
      }
      else {
        type = types.get(0).getType();
      }
    }

    if (!supportedTypes.contains(type)) {
      throw new RuntimeException("Avro serializer for Kafka output does not support Avro schema type: " + type);
    }
  }
}
 
源代码5 项目: components   文件: NetSuiteOutputTransducerTest.java
@Test
public void testBasic() throws Exception {

    NetSuiteRuntime netSuiteRuntime = new TestNetSuiteRuntimeImpl(webServiceMockTestFixture.getClientFactory());
    NetSuiteDatasetRuntime dataSetRuntime = netSuiteRuntime.getDatasetRuntime(mockTestFixture.getConnectionProperties());

    mockGetRequestResults(null);

    TypeDesc typeDesc = clientService.getMetaDataSource().getTypeInfo("Opportunity");

    Schema schema = dataSetRuntime.getSchema(typeDesc.getTypeName());

    NsObjectOutputTransducer transducer = new NsObjectOutputTransducer(
            webServiceMockTestFixture.getClientService(), typeDesc.getTypeName());

    List<IndexedRecord> indexedRecordList = makeIndexedRecords(clientService, schema,
            new AbstractNetSuiteTestBase.SimpleObjectComposer<>(Opportunity.class), 10);

    for (IndexedRecord indexedRecord : indexedRecordList) {
        Opportunity record = (Opportunity) transducer.write(indexedRecord);
        assertNsObject(typeDesc, record);
    }
}
 
源代码6 项目: components   文件: TypeConverterUtilsTest.java
@Test
public void testConvertIntToString() {
    // No format
    String conv = testConvertValue(Schema.create(Schema.Type.INT), 123,
            TypeConverterProperties.TypeConverterOutputTypes.String, null, String.class);
    assertThat(conv, is("123"));

    // One format
    conv = testConvertValue(Schema.create(Schema.Type.INT), 123, TypeConverterProperties.TypeConverterOutputTypes.String,
            "'#'#", String.class);
    assertThat(conv, is("#123"));

    // Another format
    conv = testConvertValue(Schema.create(Schema.Type.INT), 123456, TypeConverterProperties.TypeConverterOutputTypes.String,
            "#,###.00", String.class);
    assertThat(conv, is("123,456.00"));
}
 
源代码7 项目: spork   文件: AvroSchema2Pig.java
/**
 * Convert an Avro schema to a Pig schema
 */
public static ResourceSchema convert(Schema schema) throws IOException {

    if (AvroStorageUtils.containsGenericUnion(schema))
        throw new IOException ("We don't accept schema containing generic unions.");

    Set<Schema> visitedRecords = new HashSet<Schema>();
    ResourceFieldSchema inSchema = inconvert(schema, FIELD, visitedRecords);

    ResourceSchema tupleSchema;
    if (inSchema.getType() == DataType.TUPLE) {
        tupleSchema = inSchema.getSchema();
    } else { // other typs
        ResourceFieldSchema tupleWrapper = AvroStorageUtils.wrapAsTuple(inSchema);

        ResourceSchema topSchema = new ResourceSchema();
        topSchema.setFields(new ResourceFieldSchema[] { tupleWrapper });

        tupleSchema = topSchema;

    }
    return tupleSchema;
}
 
源代码8 项目: incubator-gobblin   文件: AvroUtilsTest.java
@Test
public void testDecorateRecordWithPrimitiveField() {
  Schema inputRecordSchema = SchemaBuilder.record("test").fields()
          .name("integer1")
          .prop("innerProp", "innerVal")
          .type().intBuilder().endInt().noDefault()
          .requiredString("string1")
          .endRecord();

  GenericRecord inputRecord = new GenericData.Record(inputRecordSchema);
  inputRecord.put("integer1", 10);
  inputRecord.put("string1", "hello");

  Schema outputRecordSchema = AvroUtils.decorateRecordSchema(inputRecordSchema, Collections.singletonList(new Schema.Field("newField", SchemaBuilder.builder().intType(), "test field", null)));
  Map<String, Object> newFields = new HashMap<>();
  newFields.put("newField", 5);

  GenericRecord outputRecord = AvroUtils.decorateRecord(inputRecord, newFields, outputRecordSchema);
  Assert.assertEquals(outputRecord.get("newField"), 5);
  Assert.assertEquals(outputRecord.get("integer1"), 10);
  Assert.assertEquals(outputRecord.get("string1"), "hello");

}
 
源代码9 项目: parquet-mr   文件: TestGenericLogicalTypes.java
@Test
public void testReadUUIDWithParquetUUID() throws IOException {
  Schema uuidSchema = record("R",
      field("uuid", LogicalTypes.uuid().addToSchema(Schema.create(STRING))));
  GenericRecord u1 = instance(uuidSchema, "uuid", UUID.randomUUID());
  GenericRecord u2 = instance(uuidSchema, "uuid", UUID.randomUUID());
  File test = write(conf(AvroWriteSupport.WRITE_PARQUET_UUID, true), uuidSchema, u1, u2);

  Assert.assertEquals("Should read UUID objects",
      Arrays.asList(u1, u2), read(GENERIC, uuidSchema, test));

  GenericRecord s1 = instance(uuidSchema, "uuid", u1.get("uuid").toString());
  GenericRecord s2 = instance(uuidSchema, "uuid", u2.get("uuid").toString());

  Assert.assertEquals("Should read UUID as Strings",
      Arrays.asList(s1, s2), read(GenericData.get(), uuidSchema, test));

}
 
源代码10 项目: beam   文件: ParquetIOTest.java
@Test
public void testWriteAndReadUsingReflectDataSchemaWithDataModel() {
  Schema testRecordSchema = ReflectData.get().getSchema(TestRecord.class);

  List<GenericRecord> records = generateGenericRecords(1000);
  mainPipeline
      .apply(Create.of(records).withCoder(AvroCoder.of(testRecordSchema)))
      .apply(
          FileIO.<GenericRecord>write()
              .via(ParquetIO.sink(testRecordSchema))
              .to(temporaryFolder.getRoot().getAbsolutePath()));
  mainPipeline.run().waitUntilFinish();

  PCollection<GenericRecord> readBack =
      readPipeline.apply(
          ParquetIO.read(testRecordSchema)
              .withAvroDataModel(GenericData.get())
              .from(temporaryFolder.getRoot().getAbsolutePath() + "/*"));

  PAssert.that(readBack).containsInAnyOrder(records);
  readPipeline.run().waitUntilFinish();
}
 
源代码11 项目: apicurio-registry   文件: SchemasConfluentIT.java
@Test
void createAndUpdateSchema() throws Exception {
    String artifactId = TestUtils.generateArtifactId();

    Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo1\",\"type\":\"string\"}]}");
    createArtifactViaConfluentClient(schema, artifactId);

    Schema updatedSchema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord2\",\"fields\":[{\"name\":\"foo2\",\"type\":\"long\"}]}");
    createArtifactViaConfluentClient(updatedSchema, artifactId);

    assertThrows(SchemaParseException.class, () -> new Schema.Parser().parse("<type>record</type>\n<name>test</name>"));
    assertThat(confluentService.getAllVersions(artifactId), hasItems(1, 2));

    confluentService.deleteSubject(artifactId);
    waitForSubjectDeleted(artifactId);
}
 
源代码12 项目: incubator-gobblin   文件: AvroExternalTable.java
private AvroExternalTable(AvroExternalTable.Builder builder) throws IOException {
  super(builder);

  if (builder.moveDataToTmpHdfsDir) {
    this.dataLocationInHdfs = moveDataFileToSeparateHdfsDir(builder.dataLocationInHdfs, builder.extensionToBeMoved);
    this.deleteDataAfterDone = true;
  } else {
    this.dataLocationInHdfs = builder.dataLocationInHdfs;
    this.deleteDataAfterDone = false;
  }

  if (StringUtils.isNotBlank(builder.schemaLocationInHdfs)) {
    this.schemaLocationInHdfs = builder.schemaLocationInHdfs;
    this.attributes = getAttributesFromAvroSchemaFile();
    this.deleteSchemaAfterDone = false;
  } else {
    Schema schema = getSchemaFromAvroDataFile();
    this.attributes = parseSchema(schema);
    this.schemaLocationInHdfs = writeSchemaToHdfs(schema);
    this.deleteSchemaAfterDone = true;
  }
}
 
/**
 * For the schema that is a UNION type with NULL and Record type, it provides Records type.
 * @param inputSchema
 * @return
 */
private static Schema getActualRecord(Schema inputSchema) {
  if (Type.RECORD.equals(inputSchema.getType())) {
    return inputSchema;
  }

  Preconditions.checkArgument(Type.UNION.equals(inputSchema.getType()), "Nested schema is only support with either record or union type of null with record");
  Preconditions.checkArgument(inputSchema.getTypes().size() <= 2,
      "For union type in nested record, it should only have NULL and Record type");

  for (Schema inner : inputSchema.getTypes()) {
    if (Type.NULL.equals(inner.getType())) {
      continue;
    }
    Preconditions.checkArgument(Type.RECORD.equals(inner.getType()), "For union type in nested record, it should only have NULL and Record type");
    return inner;

  }
  throw new IllegalArgumentException(inputSchema + " is not supported.");
}
 
源代码14 项目: component-runtime   文件: AvroSchema.java
private Type doMapType(final Schema schema) {
    switch (schema.getType()) {
    case LONG:
        if (Boolean.parseBoolean(readProp(schema, Type.DATETIME.name()))
                || LogicalTypes.timestampMillis().equals(LogicalTypes.fromSchemaIgnoreInvalid(schema))) {
            return Type.DATETIME;
        }
        return Type.LONG;
    default:
        return Type.valueOf(schema.getType().name());
    }
}
 
源代码15 项目: Cubert   文件: AvroSchemaManager.java
/**
 * Look up schema using type name or field name
 */
public Schema getSchema(String name) {
    Schema schema = typeName2Schema.get(name);
    schema = (schema == null) ? name2Schema.get(name) : schema;
    return schema;

}
 
源代码16 项目: parquet-mr   文件: AvroWriteSupport.java
/**
 * Calls an appropriate write method based on the value.
 * Value MUST not be null.
 *
 * @param type the Parquet type
 * @param avroSchema the Avro schema
 * @param value a non-null value to write
 */
private void writeValue(Type type, Schema avroSchema, Object value) {
  Schema nonNullAvroSchema = AvroSchemaConverter.getNonNull(avroSchema);
  LogicalType logicalType = nonNullAvroSchema.getLogicalType();
  if (logicalType != null) {
    Conversion<?> conversion = model.getConversionByClass(
        value.getClass(), logicalType);
    writeValueWithoutConversion(type, nonNullAvroSchema,
        convert(nonNullAvroSchema, logicalType, conversion, value));
  } else {
    writeValueWithoutConversion(type, nonNullAvroSchema, value);
  }
}
 
源代码17 项目: simplesource   文件: AvroGenericUtils.java
private static Schema generateSchema(final Schema aggregateSchema) {
    return SchemaBuilder
            .record(aggregateSchema.getName() + "OptionalAggregateWithSequence").namespace(aggregateSchema.getNamespace())
            .fields()
            .name(AGGREGATION).type(toNullableSchema(aggregateSchema)).withDefault(null)
            .name(SEQUENCE).type().longType().noDefault()
            .endRecord();
}
 
源代码18 项目: flink   文件: AvroSchemaConverter.java
private static SchemaBuilder.BaseTypeBuilder<Schema> getNullableBuilder(LogicalType logicalType) {
	SchemaBuilder.TypeBuilder<Schema> builder = SchemaBuilder.builder();
	if (logicalType.isNullable()) {
		return builder.nullable();
	}
	return builder;
}
 
源代码19 项目: xml-avro   文件: SchemaBuilder.java
private Schema createRecordSchema(String name, XSComplexTypeDefinition type) {
    Schema record = Schema.createRecord(name, null, null, false);
    schemas.put(name, record);

    record.setFields(createFields(type));
    return record;
}
 
源代码20 项目: kite   文件: SchemaCommand.java
@Override
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
    value={"NP_GUARANTEED_DEREF", "NP_NULL_ON_SOME_PATH"},
    justification="Null case checked by precondition")
public int run() throws IOException {
  Preconditions.checkArgument(
      datasets != null && !datasets.isEmpty(),
      "Missing dataset name");
  if (merge || datasets.size() == 1) {
    Schema mergedSchema = null;
    for (String uriOrPath : datasets) {
      mergedSchema = merge(mergedSchema, schema(uriOrPath));
    }

    Preconditions.checkNotNull(mergedSchema, "No valid schema found");

    output(mergedSchema.toString(!minimize), console, outputPath);

  } else {
    Preconditions.checkArgument(outputPath == null,
        "Cannot output multiple schemas to one file");
    for (String name : datasets) {
      console.info("Dataset \"{}\" schema: {}",
          name, schema(name).toString(!minimize));
    }
  }
  return 0;
}
 
源代码21 项目: kite   文件: DatasetKeyInputFormat.java
@SuppressWarnings({"deprecation", "unchecked"})
private static <E> View<E> load(Configuration conf) {
  Class<E> type;
  try {
    type = (Class<E>)conf.getClass(KITE_TYPE, GenericData.Record.class);
  } catch (RuntimeException e) {
    if (e.getCause() instanceof ClassNotFoundException) {
      throw new TypeNotFoundException(String.format(
          "The Java class %s for the entity type could not be found",
          conf.get(KITE_TYPE)),
          e.getCause());
    } else {
      throw e;
    }
  }

  DefaultConfiguration.set(conf);

  String schemaStr = conf.get(KITE_READER_SCHEMA);
  Schema projection = null;
  if (schemaStr != null) {
    projection = new Schema.Parser().parse(schemaStr);
  }

  String inputUri = conf.get(KITE_INPUT_URI);
  if (projection != null) {
    return Datasets.load(inputUri).asSchema(projection).asType(type);
  } else {
    return Datasets.load(inputUri, type);
  }
}
 
源代码22 项目: nifi   文件: TestPutHiveStreaming.java
private void assertOutputAvroRecords(List<Map<String, Object>> expectedRecords, MockFlowFile resultFlowFile) throws IOException {
    assertEquals(String.valueOf(expectedRecords.size()), resultFlowFile.getAttribute(PutHiveStreaming.HIVE_STREAMING_RECORD_COUNT_ATTR));

    final DataFileStream<GenericRecord> reader = new DataFileStream<>(
            new ByteArrayInputStream(resultFlowFile.toByteArray()),
            new GenericDatumReader<GenericRecord>());

    Schema schema = reader.getSchema();

    // Verify that the schema is preserved
    assertTrue(schema.equals(new Schema.Parser().parse(new File("src/test/resources/user.avsc"))));

    GenericRecord record = null;
    for (Map<String, Object> expectedRecord : expectedRecords) {
        assertTrue(reader.hasNext());
        record = reader.next(record);
        final String name = record.get("name").toString();
        final Integer favorite_number = (Integer) record.get("favorite_number");
        assertNotNull(name);
        assertNotNull(favorite_number);
        assertNull(record.get("favorite_color"));
        assertNull(record.get("scale"));

        assertEquals(expectedRecord.get("name"), name);
        assertEquals(expectedRecord.get("favorite_number"), favorite_number);
    }
    assertFalse(reader.hasNext());
}
 
源代码23 项目: kite   文件: TestDataModelUtil.java
@Test
public void testResolveTypeObjectToGeneric() {
  Class<Object> type = Object.class;
  Schema schema = SchemaBuilder.record("User").fields()
      .requiredString("name")
      .requiredString("color")
      .endRecord();
  Class expResult = GenericData.Record.class;
  Class result = DataModelUtil.resolveType(type, schema);
  assertEquals(expResult, result);
}
 
@Override
protected Schema fetchSchemaByKey(Integer key) throws SchemaRegistryException {
  try {
    return this.schemaRegistryClient.getByID(key);
  } catch (IOException | RestClientException e) {
    throw new SchemaRegistryException(e);
  }
}
 
源代码25 项目: components   文件: MarketoSOAPClientTest.java
@Test
public void testGetLeadActivity() throws Exception {
    doReturn(getLeadActivityResult()).when(port).getLeadActivity(any(ParamsGetLeadActivity.class),
            any(AuthenticationHeader.class));
    iprops.inputOperation.setValue(InputOperation.getLeadActivity);
    iprops.afterInputOperation();
    Field attr = new Field("attrName", AvroUtils._string(), "", null);
    iprops.schemaInput.schema
            .setValue(MarketoUtils.newSchema(iprops.schemaInput.schema.getValue(), "test", Collections.singletonList(attr)));
    iprops.beforeMappingInput();
    iprops.leadKeyTypeSOAP.setValue(LeadKeyTypeSOAP.IDNUM);
    mktoRR = client.getLeadActivity(iprops, null);
    assertNotNull(mktoRR);
    assertTrue(mktoRR.isSuccess());
    List<IndexedRecord> records = mktoRR.getRecords();
    assertNotNull(records);
    IndexedRecord record = records.get(0);
    assertNotNull(record);
    Schema refSchema = iprops.schemaInput.schema.getValue();
    assertEquals(refSchema, record.getSchema());
    assertEquals("ABC-123-DEF", record.get(refSchema.getField("marketoGUID").pos()));
    assertEquals(123456L, record.get(refSchema.getField("Id").pos()));
    assertEquals("mktgAssetName", record.get(refSchema.getField("MktgAssetName").pos()));
    assertTrue(record.get(refSchema.getField("ActivityDateTime").pos()) instanceof Long);
    assertEquals("activityType", record.get(refSchema.getField("ActivityType").pos()));
    assertEquals("mktgAssetName", record.get(refSchema.getField("MktgAssetName").pos()));
    assertEquals("mktPersonId", record.get(refSchema.getField("MktPersonId").pos()));
    assertEquals("campaign", record.get(refSchema.getField("Campaign").pos()));
    assertEquals("foreignSysId", record.get(refSchema.getField("ForeignSysId").pos()));
    assertEquals("personName", record.get(refSchema.getField("PersonName").pos()));
    assertEquals("orgName", record.get(refSchema.getField("OrgName").pos()));
    assertEquals("foreignSysOrgId", record.get(refSchema.getField("ForeignSysOrgId").pos()));
    assertEquals("attrValue", record.get(refSchema.getField("attrName").pos()));
    //
    doThrow(new RuntimeException("error")).when(port).getLeadActivity(any(ParamsGetLeadActivity.class),
            any(AuthenticationHeader.class));
    mktoRR = client.getLeadActivity(iprops, null);
    assertNotNull(mktoRR);
    assertFalse(mktoRR.isSuccess());
}
 
@Test
public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException {
  WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L);
  mockWorkUnitState.setProp("schema.registry.url", TEST_URL);

  Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .endRecord();

  Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME)
      .namespace(TEST_NAMESPACE).fields()
      .name(TEST_FIELD_NAME).type().stringType().noDefault()
      .optionalString(TEST_FIELD_NAME2).endRecord();

  GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build();

  SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class);
  when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1);

  Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient);
  Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient);

  ByteBuffer testGenericRecordByteBuffer =
      ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord));

  KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class);
  when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2);

  KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState,
      Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry);

  when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2);

  ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer);

  GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset);
  Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}");

}
 
源代码27 项目: nifi   文件: AvroRecordSetWriter.java
@OnEnabled
public void onEnabled(final ConfigurationContext context) {
    final int cacheSize = context.getProperty(CACHE_SIZE).asInteger();
    compiledAvroSchemaCache = Caffeine.newBuilder()
            .maximumSize(cacheSize)
            .build(schemaText -> new Schema.Parser().parse(schemaText));

    final int capacity = context.getProperty(ENCODER_POOL_SIZE).evaluateAttributeExpressions().asInteger();
    encoderPool = new LinkedBlockingQueue<>(capacity);
}
 
源代码28 项目: xml-avro   文件: ConverterTest.java
@Test
public void uniqueFieldNames() {
    String xsd =
            "<xs:schema xmlns:xs='http://www.w3.org/2001/XMLSchema'>" +
            "  <xs:complexType name='type'>" +
            "    <xs:sequence>" +
            "      <xs:element name='field' type='xs:string'/>" +
            "    </xs:sequence>" +
            "    <xs:attribute name='field' type='xs:string'/>" +
            "  </xs:complexType>" +
            "  <xs:element name='root' type='type'/>" +
            "</xs:schema>";

    Schema schema = Converter.createSchema(xsd);

    assertEquals(2, schema.getFields().size());
    Schema.Field field = schema.getField("field");
    assertNotNull(field);
    assertEquals("" + new Source("field", true), field.getProp(Source.SOURCE));

    Schema.Field field0 = schema.getField("field0");
    assertEquals("" + new Source("field", false), field0.getProp(Source.SOURCE));

    String xml = "<root field='value'><field>value0</field></root>";
    GenericData.Record record = Converter.createDatum(schema, xml);

    assertEquals("value", record.get("field"));
    assertEquals("value0", record.get("field0"));
}
 
@Test
public void testNonSchemaConstructableNewInstance() {
  Schema schema = Mockito.mock(Schema.class);
  Object instance = AvroCompatibilityHelper.newInstance(Pojo.class, schema);
  Assert.assertNotNull(instance);
  Assert.assertTrue(instance instanceof  Pojo);
}
 
源代码30 项目: components   文件: PythonRowDoFn.java
private void map(IndexedRecord input, ProcessContext context) throws IOException {
    PyObject output = pyFn.__call__(new PyUnicode(input.toString()));

    if (jsonGenericRecordConverter == null) {
        JsonSchemaInferrer jsonSchemaInferrer = new JsonSchemaInferrer(new ObjectMapper());
        Schema jsonSchema = jsonSchemaInferrer.inferSchema(output.toString());
        jsonGenericRecordConverter = new JsonGenericRecordConverter(jsonSchema);
    }

    GenericRecord outputRecord = jsonGenericRecordConverter.convertToAvro(output.toString());
    context.output(outputRecord);
}