com.fasterxml.jackson.core.util.DefaultPrettyPrinter.Indenter#org.eclipse.rdf4j.model.Model源码实例Demo

下面列出了com.fasterxml.jackson.core.util.DefaultPrettyPrinter.Indenter#org.eclipse.rdf4j.model.Model 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: semagrow   文件: SemagrowRepositoryResolver.java
private Model parseConfig(File file) throws SailConfigException, IOException
{
    RDFFormat format = Rio.getParserFormatForFileName(file.getAbsolutePath()).get();
    if (format==null)
        throw new SailConfigException("Unsupported file format: " + file.getAbsolutePath());
    RDFParser parser = Rio.createParser(format);
    Model model = new LinkedHashModel();
    parser.setRDFHandler(new StatementCollector(model));
    InputStream stream = new FileInputStream(file);

    try {
        parser.parse(stream, file.getAbsolutePath());
    } catch (Exception e) {
        throw new SailConfigException("Error parsing file!");
    }

    stream.close();
    return model;
}
 
源代码2 项目: rdf4j   文件: RDFWriterTest.java
private void testSES2030BNodeCollisionsInternal(boolean preserveBNodeIDs) throws Exception {
		ByteArrayOutputStream output = new ByteArrayOutputStream();
		RDFWriter rdfWriter = rdfWriterFactory.getWriter(output);
		setupWriterConfig(rdfWriter.getWriterConfig());
		rdfWriter.startRDF();
		int count = 18;
		for (int i = 0; i < count; i++) {
			BNode bNode2 = vf.createBNode("a" + Integer.toHexString(i).toUpperCase());
			// System.out.println(bNode2.getID());
			rdfWriter.handleStatement(vf.createStatement(uri1, uri2, bNode2));
		}
		rdfWriter.endRDF();
		RDFParser rdfParser = rdfParserFactory.getParser();
		setupParserConfig(rdfParser.getParserConfig());
		if (preserveBNodeIDs) {
			rdfParser.getParserConfig().set(BasicParserSettings.PRESERVE_BNODE_IDS, true);
		}
		Model parsedModel = new LinkedHashModel();
		rdfParser.setRDFHandler(new StatementCollector(parsedModel));
		rdfParser.parse(new ByteArrayInputStream(output.toByteArray()), "");
//		if (count != parsedModel.size()) {
//			Rio.write(parsedModel, System.out, RDFFormat.NQUADS);
//		}
		assertEquals(count, parsedModel.size());
	}
 
源代码3 项目: rdf4j   文件: RDFWriterTest.java
@Test
public void testWriteCommentBNodeContextBNodeWithNamespaceBeforeNamespace() throws Exception {
	ByteArrayOutputStream outputWriter = new ByteArrayOutputStream();
	RDFWriter rdfWriter = rdfWriterFactory.getWriter(outputWriter);
	setupWriterConfig(rdfWriter.getWriterConfig());
	rdfWriter.startRDF();
	rdfWriter.handleNamespace("ex", exNs);
	rdfWriter.handleStatement(vf.createStatement(uri1, uri1, uri1, bnode));
	rdfWriter.handleComment("This comment should not screw up parsing");
	rdfWriter.handleNamespace("ex1", exNs);
	rdfWriter.handleStatement(vf.createStatement(uri1, uri1, uri2, bnode));
	rdfWriter.endRDF();
	ByteArrayInputStream inputReader = new ByteArrayInputStream(outputWriter.toByteArray());
	RDFParser rdfParser = rdfParserFactory.getParser();
	setupParserConfig(rdfParser.getParserConfig());
	Model parsedOutput = new LinkedHashModel();
	rdfParser.setRDFHandler(new StatementCollector(parsedOutput));
	rdfParser.parse(inputReader, "");
	assertEquals(2, parsedOutput.size());
	assertTrue(parsedOutput.contains(uri1, uri1, uri1));
	assertTrue(parsedOutput.contains(uri1, uri1, uri2));
	assertEquals(1, parsedOutput.contexts().size());
}
 
protected void initialize(Model graph, Resource repNode) {
	
	// name: the node's value
	setProperty("name", repNode.stringValue());

	// repositoryServer / location
	Model repositoryServer = graph.filter(repNode, SimpleValueFactory.getInstance().createIRI("http://fluidops.org/config#repositoryServer"), null);
	String repoLocation = repositoryServer.iterator().next().getObject().stringValue();
	setProperty("location", repoLocation);
	setProperty("repositoryServer", repoLocation);
	
	// repositoryName
	Model repositoryName = graph.filter(repNode, SimpleValueFactory.getInstance().createIRI("http://fluidops.org/config#repositoryName"), null);
	String repoName = repositoryName.iterator().next().getObject().stringValue();
	setProperty("repositoryName", repoName);
	
	// id: the name of the location
	String id = repNode.stringValue().replace("http://", "");
	id = "remote_" + id.replace("/", "_");
	setProperty("id", id);
}
 
源代码5 项目: Halyard   文件: JSONParserParseTest.java
@Test
public void testParse() throws Exception {
    Model transformedModel = new LinkedHashModel();
    RDFParser parser = new JSONParser();
    parser.setValueFactory(SimpleValueFactory.getInstance());
    parser.set(JSONParser.GENERATE_ONTOLOGY, true);
    parser.setRDFHandler(new ContextStatementCollector(transformedModel, SimpleValueFactory.getInstance()));
    parser.parse(JSONParserParseTest.class.getResourceAsStream(parameter + ".json"), "http://testParse/"+parameter + "/");

    WriterConfig wc = new WriterConfig();
    wc.set(BasicWriterSettings.PRETTY_PRINT, true);
    System.out.println("-------------- " + parameter + " ------------------");
    Rio.write(transformedModel, System.out, RDFFormat.TURTLE, wc);

    Model expectedModel = Rio.parse(JSONParserParseTest.class.getResourceAsStream(parameter + ".ttl"), "http://testParse/" + parameter + "/", RDFFormat.TURTLE);

    JSONParserParseTest.assertEquals(expectedModel, transformedModel);
}
 
源代码6 项目: rdf4j   文件: ElasticsearchStoreConfigTest.java
@Test
public void exportAddsAllConfigData() {

	mb
			.add(ElasticsearchStoreSchema.hostname, "host1")
			.add(ElasticsearchStoreSchema.clusterName, "cluster1")
			.add(ElasticsearchStoreSchema.index, "index1")
			.add(ElasticsearchStoreSchema.port, 9300);
	// @formatter:on

	subject.parse(mb.build(), implNode);

	Model m = new TreeModel();
	Resource node = subject.export(m);

	assertThat(m.contains(node, ElasticsearchStoreSchema.hostname, null)).isTrue();
	assertThat(m.contains(node, ElasticsearchStoreSchema.clusterName, null)).isTrue();
	assertThat(m.contains(node, ElasticsearchStoreSchema.index, null)).isTrue();
	assertThat(m.contains(node, ElasticsearchStoreSchema.port, null)).isTrue();

}
 
@Test
@Deprecated
public void testModifySystemRepository() {
	RepositoryConfig config = subject.getRepositoryConfig(TEST_REPO);
	subject.addRepositoryConfig(new RepositoryConfig(SystemRepository.ID, new SystemRepositoryConfig()));
	subject.shutDown();
	subject = new LocalRepositoryManager(datadir);
	subject.initialize();
	try (RepositoryConnection con = subject.getSystemRepository().getConnection()) {
		Model model = new TreeModel();
		config.setTitle("Changed");
		config.export(model, con.getValueFactory().createBNode());
		Resource ctx = RepositoryConfigUtil.getContext(con, config.getID());
		con.begin();
		con.clear(ctx);
		con.add(model, ctx == null ? con.getValueFactory().createBNode() : ctx);
		con.commit();
	}
	assertEquals("Changed", subject.getRepositoryConfig(TEST_REPO).getTitle());
}
 
源代码8 项目: rdf4j   文件: RDFWriterTest.java
@Test
public void testWriteCommentURIContextWithNamespaceBeforeNamespace() throws Exception {
	ByteArrayOutputStream outputWriter = new ByteArrayOutputStream();
	RDFWriter rdfWriter = rdfWriterFactory.getWriter(outputWriter);
	setupWriterConfig(rdfWriter.getWriterConfig());
	rdfWriter.startRDF();
	rdfWriter.handleNamespace("ex", exNs);
	rdfWriter.handleStatement(vf.createStatement(uri1, uri1, uri1, uri1));
	rdfWriter.handleComment("This comment should not screw up parsing");
	rdfWriter.handleNamespace("ex1", exNs);
	rdfWriter.endRDF();
	ByteArrayInputStream inputReader = new ByteArrayInputStream(outputWriter.toByteArray());
	RDFParser rdfParser = rdfParserFactory.getParser();
	setupParserConfig(rdfParser.getParserConfig());
	Model parsedOutput = new LinkedHashModel();
	rdfParser.setRDFHandler(new StatementCollector(parsedOutput));
	rdfParser.parse(inputReader, "");
	assertEquals(1, parsedOutput.size());
	if (rdfWriterFactory.getRDFFormat().supportsContexts()) {
		assertTrue(parsedOutput.contains(uri1, uri1, uri1, uri1));
	} else {
		assertTrue(parsedOutput.contains(uri1, uri1, uri1));
	}
}
 
源代码9 项目: rdf4j   文件: ModelsTest.java
@Test
public void testConvertRDFStarToReification() {
	Model rdfStarModel = RDFStarTestHelper.createRDFStarModel();
	Model referenceModel = RDFStarTestHelper.createRDFReificationModel();

	Model reificationModel1 = Models.convertRDFStarToReification(VF, rdfStarModel);
	assertTrue("RDF* conversion to reification with explicit VF, model-to-model",
			Models.isomorphic(reificationModel1, referenceModel));

	Model reificationModel2 = Models.convertRDFStarToReification(rdfStarModel);
	assertTrue("RDF* conversion to reification with implicit VF, model-to-model",
			Models.isomorphic(reificationModel2, referenceModel));

	Model reificationModel3 = new TreeModel();
	Models.convertRDFStarToReification(VF, rdfStarModel, (Consumer<Statement>) reificationModel3::add);
	assertTrue("RDF* conversion to reification with explicit VF, model-to-consumer",
			Models.isomorphic(reificationModel3, referenceModel));

	Model reificationModel4 = new TreeModel();
	Models.convertRDFStarToReification(rdfStarModel, reificationModel4::add);
	assertTrue("RDF* conversion to reification with explicit VF, model-to-consumer",
			Models.isomorphic(reificationModel4, referenceModel));
}
 
源代码10 项目: rdf4j   文件: RDFWriterTest.java
@Test
public void testWriteTwoStatementsSubjectBNodeSinglePredicateSingleContextIRIWithNamespace() throws Exception {
	Model input = new LinkedHashModel();
	input.setNamespace("ex", exNs);
	input.add(vf.createStatement(bnodeSingleUseSubject, uri1, uri1, uri1));
	input.add(vf.createStatement(bnodeSingleUseSubject, uri1, uri2, uri1));
	ByteArrayOutputStream outputWriter = new ByteArrayOutputStream();
	write(input, outputWriter);
	ByteArrayInputStream inputReader = new ByteArrayInputStream(outputWriter.toByteArray());
	Model parsedOutput = parse(inputReader, "");
	assertEquals(2, parsedOutput.size());
	if (rdfWriterFactory.getRDFFormat().supportsContexts()) {
		assertEquals(1, parsedOutput.filter(null, uri1, uri1, uri1).size());
		assertEquals(1, parsedOutput.filter(null, uri1, uri2, uri1).size());
	} else {
		assertEquals(1, parsedOutput.filter(null, uri1, uri1).size());
		assertEquals(1, parsedOutput.filter(null, uri1, uri2).size());
	}
	assertEquals(1, parsedOutput.subjects().size());
	assertTrue(parsedOutput.subjects().iterator().next() instanceof BNode);
}
 
源代码11 项目: rdf4j   文件: ValidationResult.java
public Model asModel(Model model) {

		model.add(getId(), RDF.TYPE, SHACL.VALIDATION_RESULT);

		model.add(getId(), SHACL.FOCUS_NODE, getFocusNode());
		model.add(getId(), SHACL.SOURCE_CONSTRAINT_COMPONENT, getSourceConstraintComponent().getIri());
		model.add(getId(), SHACL.SOURCE_SHAPE, getSourceShapeResource());

		if (getPath() != null) {
			model.add(getId(), SHACL.RESULT_PATH, ((SimplePath) getPath()).getPath());
		}

		if (detail != null) {
			model.add(getId(), SHACL.DETAIL, detail.getId());
			detail.asModel(model);
		}

		return model;
	}
 
源代码12 项目: rdf4j   文件: RDFWriterTest.java
@Test
public void testSuccessBNodeParsesAreDistinct() throws Exception {
	ByteArrayOutputStream outputWriter = new ByteArrayOutputStream();
	RDFWriter rdfWriter = rdfWriterFactory.getWriter(outputWriter);
	setupWriterConfig(rdfWriter.getWriterConfig());
	rdfWriter.startRDF();
	rdfWriter.handleStatement(vf.createStatement(uri1, uri1, bnode));
	rdfWriter.endRDF();
	ByteArrayInputStream inputReader = new ByteArrayInputStream(outputWriter.toByteArray());
	RDFParser rdfParser = rdfParserFactory.getParser();
	setupParserConfig(rdfParser.getParserConfig());
	Model parsedOutput = new LinkedHashModel();
	rdfParser.setRDFHandler(new StatementCollector(parsedOutput));
	rdfParser.parse(inputReader, "");
	assertEquals(1, parsedOutput.size());
	ByteArrayInputStream inputReader2 = new ByteArrayInputStream(outputWriter.toByteArray());
	rdfParser.parse(inputReader2, "");
	assertEquals(2, parsedOutput.size());
}
 
源代码13 项目: rdf4j   文件: RepositoryConnectionTest.java
@Test
public void testGraphSerialization() throws Exception {
	testCon.add(bob, name, nameBob);
	testCon.add(alice, name, nameAlice);

	try (RepositoryResult<Statement> statements = testCon.getStatements(null, null, null, true);) {
		Model graph = Iterations.addAll(statements, new LinkedHashModel());

		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		ObjectOutputStream out = new ObjectOutputStream(baos);
		out.writeObject(graph);
		out.close();

		ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
		ObjectInputStream in = new ObjectInputStream(bais);
		Model deserializedGraph = (Model) in.readObject();
		in.close();

		assertThat(deserializedGraph.isEmpty()).isFalse();
		assertThat(deserializedGraph).hasSameSizeAs(graph);
		for (Statement st : deserializedGraph) {
			assertThat(graph).contains(st);
			assertThat(testCon.hasStatement(st, true)).isTrue();
		}
	}
}
 
源代码14 项目: molgenis   文件: EntityModelWriter.java
private void addStatementsForAttributeTags(
    Entity objectEntity, Model model, Resource subject, EntityType entityType) {
  for (Attribute objectAttribute : entityType.getAtomicAttributes()) {
    Object value = objectEntity.get(objectAttribute.getName());
    if (value == null) {
      continue;
    }
    for (LabeledResource tag :
        tagService
            .getTagsForAttribute(entityType, objectAttribute)
            .get(Relation.isAssociatedWith)) {
      IRI predicate = valueFactory.createIRI(tag.getIri());
      addRelationForAttribute(model, subject, predicate, objectEntity, objectAttribute);
    }
  }
}
 
源代码15 项目: rdf4j   文件: ModelsTest.java
@Test
public void testSetPropertyWithContext2() {
	Literal lit1 = VF.createLiteral(1.0);
	IRI graph1 = VF.createIRI("urn:g1");
	IRI graph2 = VF.createIRI("urn:g2");
	model1.add(foo, bar, lit1, graph1);
	model1.add(foo, bar, bar);
	model1.add(foo, bar, foo, graph2);

	Literal lit2 = VF.createLiteral(2.0);

	Model m = Models.setProperty(model1, foo, bar, lit2);

	assertNotNull(m);
	assertEquals(model1, m);
	assertFalse(model1.contains(foo, bar, lit1));
	assertFalse(model1.contains(foo, bar, lit1, graph1));
	assertFalse(model1.contains(foo, bar, foo));
	assertFalse(model1.contains(foo, bar, bar));
	assertFalse(model1.contains(foo, bar, foo, graph2));
	assertTrue(model1.contains(foo, bar, lit2));
}
 
源代码16 项目: rdf4j   文件: ConfigController.java
private ModelAndView handleQuery(HttpServletRequest request, HttpServletResponse response)
		throws ClientHTTPException {

	RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response,
			RDFWriterRegistry.getInstance());
	String repId = RepositoryInterceptor.getRepositoryID(request);
	RepositoryConfig repositoryConfig = repositoryManager.getRepositoryConfig(repId);

	Model configData = modelFactory.createEmptyModel();
	String baseURI = request.getRequestURL().toString();
	Resource ctx = SimpleValueFactory.getInstance().createIRI(baseURI + "#" + repositoryConfig.getID());

	repositoryConfig.export(configData, ctx);
	Map<String, Object> model = new HashMap<>();
	model.put(ConfigView.FORMAT_KEY, rdfWriterFactory.getRDFFormat());
	model.put(ConfigView.CONFIG_DATA_KEY, configData);
	model.put(ConfigView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod()));
	return new ModelAndView(ConfigView.getInstance(), model);
}
 
源代码17 项目: rdf4j   文件: MultithreadedTest.java
private void add(String turtle, IRI graph) {
	turtle = String.join("\n", "",
			"@prefix ex: <http://example.com/ns#> .",
			"@prefix sh: <http://www.w3.org/ns/shacl#> .",
			"@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .",
			"@prefix foaf: <http://xmlns.com/foaf/0.1/>.") + turtle;

	StringReader shaclRules = new StringReader(turtle);

	try {
		Model parse = Rio.parse(shaclRules, "", RDFFormat.TURTLE);
		parse.stream()
				.map(statement -> {
					if (graph != null) {
						return vf.createStatement(statement.getSubject(), statement.getPredicate(),
								statement.getObject(), graph);
					}

					return statement;
				})
				.forEach(statement -> addedStatements.add(statement));
	} catch (IOException e) {
		throw new RuntimeException(e);
	}

}
 
源代码18 项目: rdf4j   文件: ModelsTest.java
@Test
public void testSetProperty() {
	Literal lit1 = VF.createLiteral(1.0);
	model1.add(foo, bar, lit1);
	model1.add(foo, bar, foo);

	Literal lit2 = VF.createLiteral(2.0);

	Model m = Models.setProperty(model1, foo, bar, lit2);

	assertNotNull(m);
	assertEquals(model1, m);
	assertFalse(model1.contains(foo, bar, lit1));
	assertFalse(model1.contains(foo, bar, foo));
	assertTrue(model1.contains(foo, bar, lit2));

}
 
源代码19 项目: rdf4j   文件: RDFWriterTest.java
@Test
public void testWriteCommentBNodeContext() throws Exception {
	ByteArrayOutputStream outputWriter = new ByteArrayOutputStream();
	RDFWriter rdfWriter = rdfWriterFactory.getWriter(outputWriter);
	setupWriterConfig(rdfWriter.getWriterConfig());
	rdfWriter.startRDF();
	rdfWriter.handleStatement(vf.createStatement(uri1, uri1, uri1, bnode));
	rdfWriter.handleComment("This comment should not screw up parsing");
	rdfWriter.endRDF();
	ByteArrayInputStream inputReader = new ByteArrayInputStream(outputWriter.toByteArray());
	RDFParser rdfParser = rdfParserFactory.getParser();
	setupParserConfig(rdfParser.getParserConfig());
	Model parsedOutput = new LinkedHashModel();
	rdfParser.setRDFHandler(new StatementCollector(parsedOutput));
	rdfParser.parse(inputReader, "");
	assertEquals(1, parsedOutput.size());
	assertTrue(parsedOutput.contains(uri1, uri1, uri1));
}
 
源代码20 项目: semagrow   文件: SemagrowRepositoryConfig.java
@Override
public void parse(Model graph, Resource node) throws RepositoryConfigException {

    try {
        Optional<Resource> sailImplNode = Models.objectResource(graph.filter(node, SAILIMPL,null));

        if (sailImplNode.isPresent()) {

                sailConfig  = new SemagrowSailConfig();
                sailConfig.parse(graph, sailImplNode.get());
        }
    }
    catch (SailConfigException e) {
        throw new RepositoryConfigException(e.getMessage(), e);
    }
}
 
源代码21 项目: rdf4j   文件: RDFWriterTest.java
@Test
public void testWriteCommentBNodeContextBNodeWithNamespace() throws Exception {
	ByteArrayOutputStream outputWriter = new ByteArrayOutputStream();
	RDFWriter rdfWriter = rdfWriterFactory.getWriter(outputWriter);
	setupWriterConfig(rdfWriter.getWriterConfig());
	rdfWriter.startRDF();
	rdfWriter.handleNamespace("ex", exNs);
	rdfWriter.handleStatement(vf.createStatement(uri1, uri1, uri1, bnode));
	rdfWriter.handleComment("This comment should not screw up parsing");
	rdfWriter.handleStatement(vf.createStatement(uri1, uri1, uri2, bnode));
	rdfWriter.endRDF();
	ByteArrayInputStream inputReader = new ByteArrayInputStream(outputWriter.toByteArray());
	RDFParser rdfParser = rdfParserFactory.getParser();
	setupParserConfig(rdfParser.getParserConfig());
	Model parsedOutput = new LinkedHashModel();
	rdfParser.setRDFHandler(new StatementCollector(parsedOutput));
	rdfParser.parse(inputReader, "");
	assertEquals(2, parsedOutput.size());
	assertTrue(parsedOutput.contains(uri1, uri1, uri1));
	assertTrue(parsedOutput.contains(uri1, uri1, uri2));
	assertEquals(1, parsedOutput.contexts().size());
}
 
源代码22 项目: rdf4j   文件: AbstractNTriplesParserUnitTest.java
@Test(expected = RDFParseException.class)
public void testBlankNodeIdentifiersWithOtherCharactersAsFirstCharacter() throws Exception {
	// The characters -, U+00B7, U+0300 to U+036F and U+203F to U+2040 are permitted anywhere except the first
	// character.
	List<Character> charactersList = new ArrayList<>();
	charactersList.add('-');
	charactersList.add('\u00B7');
	charactersList.add('\u0300');
	charactersList.add('\u036F');
	charactersList.add('\u0301');
	charactersList.add('\u203F');

	for (Character character : charactersList) {
		RDFParser ntriplesParser = new NTriplesParser();
		Model model = new LinkedHashModel();
		ntriplesParser.setRDFHandler(new StatementCollector(model));

		try {
			ntriplesParser.parse(
					new StringReader("<urn:test:subject> <urn:test:predicate> _:" + character + "1 . "),
					NTRIPLES_TEST_URL);
		} catch (RDFParseException e) {
			assertEquals(0, model.size());
			assertEquals(0, model.subjects().size());
			assertEquals(0, model.predicates().size());
			assertEquals(0, model.objects().size());
			throw e;
		}
		fail("Should have failed to parse invalid N-Triples bnode with '" + character
				+ "' at the begining of the bnode label");
	}
}
 
源代码23 项目: powsybl-core   文件: TripleStoreRDF4J.java
private void write(Model model, OutputStream out) {
    try (PrintStream pout = new PrintStream(out)) {
        RDFWriter writer = new PowsyblWriter(pout);
        writer.getWriterConfig().set(BasicWriterSettings.PRETTY_PRINT, true);
        if (writeBySubject) {
            writeBySubject(model, writer);
        } else {
            Rio.write(model, writer);
        }
    }
}
 
源代码24 项目: mobi   文件: FullSimpleOntologyTest.java
@Test
public void testConstructEntityUsages() throws Exception {
    Resource class1a = vf.createIRI("http://mobi.com/ontology#Class1a");
    Resource class1b = vf.createIRI("http://mobi.com/ontology#Class1b");
    IRI subClassOf = vf.createIRI("http://www.w3.org/2000/01/rdf-schema#subClassOf");
    Resource individual1a = vf.createIRI("http://mobi.com/ontology#Individual1a");
    IRI type = vf.createIRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
    com.mobi.rdf.api.Model expected = mf.createModel(Stream.of(vf.createStatement(class1b, subClassOf,
            class1a), vf.createStatement(individual1a, type, class1a)).collect(Collectors.toSet()));

    com.mobi.rdf.api.Model result = queryOntology.constructEntityUsages(class1a, mf);
    assertEquals(result, expected);
}
 
源代码25 项目: rdf4j   文件: AbstractDelegatingSailImplConfig.java
@Override
public Resource export(Model m) {
	Resource implNode = super.export(m);

	if (delegate != null) {
		Resource delegateNode = delegate.export(m);
		m.add(implNode, DELEGATE, delegateNode);
	}

	return implNode;
}
 
源代码26 项目: rdf4j   文件: ProxyRepositoryConfig.java
@Override
public void parse(Model model, Resource implNode) throws RepositoryConfigException {
	super.parse(model, implNode);

	try {
		Models.objectLiteral(model.getStatements(implNode, ProxyRepositorySchema.PROXIED_ID, null))
				.ifPresent(lit -> setProxiedRepositoryID(lit.getLabel()));
	} catch (ModelException e) {
		throw new RepositoryConfigException(e.getMessage(), e);
	}
}
 
源代码27 项目: rdf4j   文件: IsomorphicBenchmark.java
private Model getModel(String name) {
	try {
		try (InputStream resourceAsStream = IsomorphicBenchmark.class.getClassLoader()
				.getResourceAsStream("benchmark/" + name)) {
			return Rio.parse(resourceAsStream, "http://example.com/", RDFFormat.TURTLE);
		}
	} catch (IOException e) {
		throw new RuntimeException(e);
	}
}
 
源代码28 项目: rdf4j   文件: BinaryHandlingTest.java
@Override
protected InputStream getRDFLangStringWithNoLanguageStream(Model model) throws Exception {
	String fileName = "src/test/resources/testcases/binary/binary-RDF-langString-no-language-test.rdf";
	InputStream file = new FileInputStream(fileName);
	long fileSize = new File(fileName).length();
	byte[] byteArray = new byte[(int) fileSize];

	file.read(byteArray);

	InputStream RDFLangStringWithNoLanguageStatements = new ByteArrayInputStream(byteArray);
	return RDFLangStringWithNoLanguageStatements;
}
 
源代码29 项目: rdf4j   文件: SystemRepository.java
private RepositoryConfig getSystemConfig() {
	URL ttl = this.getClass().getClassLoader().getResource(CONFIG_SYSTEM_TTL);
	if (ttl == null) {
		return null;
	}
	try (InputStream in = ttl.openStream()) {
		Model model = Rio.parse(in, ttl.toString(), RDFFormat.TURTLE);
		return RepositoryConfigUtil.getRepositoryConfig(model, ID);
	} catch (IOException e) {
		throw new RepositoryConfigException(e);
	}
}
 
源代码30 项目: rdf4j   文件: AbstractParserHandlingTest.java
/**
 * Tests whether an known language with the correct settings will both generate no message and not fail when
 * addNonFatalError is called with the given setting.
 */
@Test
public final void testKnownLanguageNoMessageNoFailCase4() throws Exception {
	Model expectedModel = getTestModel(KNOWN_LANGUAGE_VALUE, KNOWN_LANGUAGE_TAG);
	InputStream input = getKnownLanguageStream(expectedModel);

	testParser.getParserConfig().set(BasicParserSettings.FAIL_ON_UNKNOWN_LANGUAGES, false);
	testParser.getParserConfig().addNonFatalError(BasicParserSettings.FAIL_ON_UNKNOWN_LANGUAGES);

	testParser.parse(input, BASE_URI);

	assertErrorListener(0, 0, 0);
	assertModel(expectedModel);
}