Package org.openrdf.model.impl

Examples of org.openrdf.model.impl.LinkedHashModel


    RDFFormat rdfFormat = Rio.getParserFormatForMIMEType(mimeType);

    try {
      RDFParser parser = Rio.createParser(rdfFormat);

      Model model = new LinkedHashModel();
      parser.setRDFHandler(new StatementCollector(model));

      parser.parse(entity.getStream(), "");

      return model;
View Full Code Here


      URIFactory uf = repository.getURIFactory();
      LiteralFactory lf = repository.getLiteralFactory();

      URI subj = uf.createURI(getRequest().getResourceRef().toString(false, false));

      Model model = new LinkedHashModel();
      for (PropertyDescriptor p : properties) {
        Object o = p.getReadMethod().invoke(data);
        if (o instanceof Object[]) {
          for (Object e : (Object[])o) {
            add(model, subj, uf, p.getName(), lf, e);
View Full Code Here

    return workflow;
  }
 
  private void writePROV() throws IOException {
    ValueFactory factory = ValueFactoryImpl.getInstance();   
    Model stmts = new LinkedHashModel();
    FileInputStream fis = new FileInputStream(workflow.file());
    String workflowMD5sum = DigestUtils.md5Hex(fis);
    long currentTimeMilis = System.currentTimeMillis();
   
    // Define all the URI's that we are going to (re)use
    URI eURI = factory.createURI(PROV_NAMESPACE, "Entity");
    URI acURI = factory.createURI(PROV_NAMESPACE, "Activity");
    URI usedURI = factory.createURI(PROV_NAMESPACE, "used");
    URI wgbURI  = factory.createURI(PROV_NAMESPACE, "wasGeneratedBy");
    URI  genAtURI  = factory.createURI(PROV_NAMESPACE, "generatedAtTime");
    URI  startAtURI  = factory.createURI(PROV_NAMESPACE, "startedAtTime");
    URI  endAtURI  = factory.createURI(PROV_NAMESPACE, "endedAtTime");
   
    URI valueURI = factory.createURI(NAMESPACE, "value");
   
    URI agURI = factory.createURI(PROV_NAMESPACE, "Agent");
    URI watURI  = factory.createURI(PROV_NAMESPACE, "wasAttributedTo");
    URI wawURI  = factory.createURI(PROV_NAMESPACE, "wasAssociatedWith");
   
    URI planURI  = factory.createURI(PROV_NAMESPACE, "Plan");
    URI assoURI  = factory.createURI(PROV_NAMESPACE, "Association")
    URI qualAssoURI  = factory.createURI(PROV_NAMESPACE, "qualifiedAssociation")
   
    URI hadPlanURI  = factory.createURI(PROV_NAMESPACE, "hadPlan");
    URI hadAgentURI  = factory.createURI(PROV_NAMESPACE, "agent");
     
    URI platformURI = factory.createURI(NAMESPACE + "ducktape/", InetAddress.getLocalHost().getHostName() + "/" + Global.getSerialversionuid());
    URI workflowURI = factory.createURI(NAMESPACE + "workflow/", workflow.file().getAbsolutePath() + "/" + workflowMD5sum);
     
    // The software is the agent and the workflow is the plan
    stmts.add(factory.createStatement(platformURI, RDF.TYPE, agURI));
    stmts.add(factory.createStatement(workflowURI, RDF.TYPE, planURI));
   
    stmts.add(factory.createStatement(platformURI, RDFS.LABEL,
        Literals.createLiteral(factory, "ducktape on: " + InetAddress.getLocalHost().getHostName() + ", versionID: " + Global.getSerialversionuid())));
    stmts.add(factory.createStatement(workflowURI, RDFS.LABEL,
        Literals.createLiteral(factory, workflow.name() + ", date: " + new Date(workflow.file().lastModified()))));
   
 
   
    String moduleInstanceSumTimestamp = "module/instance/"+InetAddress.getLocalHost().getHostName()+"/"+workflowMD5sum+"/"+currentTimeMilis+"/";
    for (Module module : workflow.modules()) {
     
      for (ModuleInstance mi : module.instances()) {
        // Create provenance for the module (as an activity)
        URI miURI = factory.createURI(NAMESPACE + moduleInstanceSumTimestamp, module.name() + mi.moduleID());
        stmts.add(factory.createStatement(miURI, RDF.TYPE, acURI)); // Activity
        stmts.add(factory.createStatement(miURI, startAtURI, Literals.createLiteral(factory, new Date(mi.startTime())))); // Start time
        stmts.add(factory.createStatement(miURI, endAtURI, Literals.createLiteral(factory, new Date(mi.endTime())))); // end time     
        stmts.add(factory.createStatement(miURI, wawURI, platformURI)); // wasAssociatedWith
       
        // qualified Association
        BNode bn = factory.createBNode();
        stmts.add(factory.createStatement(bn, RDF.TYPE, assoURI));
        stmts.add(factory.createStatement(bn, hadPlanURI, workflowURI));
        stmts.add(factory.createStatement(bn, hadAgentURI, platformURI));
        stmts.add(factory.createStatement(miURI, qualAssoURI, bn));
       
        // Create provenance for the outputs (as entities)
        for (InstanceOutput io : mi.outputs()) {
          URI ioURI = factory.createURI(NAMESPACE + moduleInstanceSumTimestamp, module.name() + mi.moduleID() + "/output/" + io.name());
          stmts.add(factory.createStatement(ioURI, RDF.TYPE, eURI)); // entity
          stmts.add(factory.createStatement(ioURI, wgbURI, miURI)); // wasGeneratedBy
          stmts.add(factory.createStatement(ioURI, genAtURI, Literals.createLiteral(factory, new Date(io.creationTime())))); // generated at time
          stmts.add(factory.createStatement(ioURI, watURI, platformURI)); // wasAttributedTo
         
          // If we can create a literal of the value, save it and create a rdfs-label
          if (Literals.canCreateLiteral(io.value())) {
            stmts.add(factory.createStatement(ioURI, valueURI, Literals.createLiteral(factory, io.value())));
            stmts.add(factory.createStatement(ioURI, RDFS.LABEL, Literals.createLiteral(factory, io)));
          }
        }
       
        // Create provenance for the inputs (as entities)
        for (InstanceInput ii : mi.inputs()) {
          URI iiURI = null;
         
          if (ii.instanceOutput() != null) {
            iiURI = factory.createURI(NAMESPACE + moduleInstanceSumTimestamp, ii.instanceOutput().module().name()
                + ii.instanceOutput().instance().moduleID() + "/output/" + ii.name());
          } else {
            iiURI = factory.createURI(NAMESPACE + moduleInstanceSumTimestamp, module.name() + mi.moduleID()
                + "/input/" + ii.name());
           
            // If we can create a literal
            if (Literals.canCreateLiteral(ii.value())) {
              stmts.add(factory.createStatement(iiURI, valueURI, Literals.createLiteral(factory, ii.value())));
              stmts.add(factory.createStatement(iiURI, RDFS.LABEL, Literals.createLiteral(factory, ii)));
            }     
          }
             
          stmts.add(factory.createStatement(iiURI, RDF.TYPE, eURI)); // entity
          stmts.add(factory.createStatement(miURI, usedURI, iiURI)); // used         
        }
      }
    }
   
    File file = new File(root, PROV_FILE);
View Full Code Here

    }
   
  
    public static void serializeRDFJSON(Map<String,Metadata> data, OutputStream out) throws IOException {
        ValueFactory vf = ValueFactoryImpl.getInstance();
        Model results = new LinkedHashModel();
       
        for(Map.Entry<String,Metadata> subject : data.entrySet()) {
            Resource subjectResource = stringToResource(subject.getKey(), vf);
            for(Map.Entry<String,Set<RDFNode>> predicate : subject.getValue().entrySet()) {
                org.openrdf.model.URI predicateURI = vf.createURI(predicate.getKey());
                for(RDFNode objectNode : predicate.getValue()) {
                    org.openrdf.model.Value objectValue;
                    if( objectNode instanceof Literal) {
                        if(((Literal) objectNode).getLanguage() != null )
                            objectValue = vf.createLiteral(((Literal)objectNode).getContent(),
                                                ((Literal)objectNode).getLanguage());
                        else if(((Literal) objectNode).getType() != null)
                            objectValue = vf.createLiteral(((Literal)objectNode).getContent(),
                                                vf.createURI(((Literal)objectNode).getType().getUri()));
                        else
                            objectValue = vf.createLiteral(((Literal)objectNode).getContent());
                    } else {
                        if( objectNode instanceof URI ) {
                            objectValue = vf.createURI(((URI)objectNode).getUri());
                        } else {
                            objectValue = vf.createBNode(((BNode)objectNode).getAnonId());
                        }
                    }
                    results.add(subjectResource, predicateURI, objectValue);
                }
            }
               
        }
       
View Full Code Here

        // "{\"@id\":{\"@id\":\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/machine/DVC-1_8\"},\"http://igreen-projekt.de/ontologies/isoxml#deviceElement\":\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceelement/DET-1_8\",\"http://igreen-projekt.de/ontologies/isoxml#deviceID\":{\"@datatype\":\"http://www.w3.org/2001/XMLSchema#string\",\"@literal\":\"DVC-1\"},\"http://igreen-projekt.de/ontologies/isoxml#deviceLocalizationLabel\":{\"@datatype\":\"http://www.w3.org/2001/XMLSchema#string\",\"@literal\":\"FF000000406564\"},\"http://igreen-projekt.de/ontologies/isoxml#deviceProcessData\":[\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/13_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/6_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/14_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/11_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/8_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/4_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/5_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/10_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/2_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/21_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/15_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/16_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/19_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/17_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/3_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/12_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/7_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/18_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/9_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/22_8\",\"http://pc-4107.kl.dfki.de:38080/onlinebox/resource/deviceprocessdata/20_8\"],\"http://igreen-projekt.de/ontologies/isoxml#deviceSerialNumber\":{\"@datatype\":\"http://www.w3.org/2001/XMLSchema#string\",\"@literal\":\"12345\"},\"http://igreen-projekt.de/ontologies/isoxml#deviceSoftwareVersion\":{\"@datatype\":\"http://www.w3.org/2001/XMLSchema#string\",\"@literal\":\"01.009\"},\"http://igreen-projekt.de/ontologies/isoxml#deviceStructureLabel\":{\"@datatype\":\"http://www.w3.org/2001/XMLSchema#string\",\"@literal\":\"31303030303030\"},\"http://igreen-projekt.de/ontologies/isoxml#workingSetMasterNAME\":{\"@datatype\":\"http://www.w3.org/2001/XMLSchema#string\",\"@literal\":\"A000860020800001\"},\"http://www.w3.org/1999/02/22-rdf-syntax-ns#type\":{\"@iri\":\"http://www.agroxml.de/rdfs#Machine\"},\"http://www.w3.org/2000/01/rdf-schema#label\":{\"@datatype\":\"http://www.w3.org/2001/XMLSchema#string\",\"@literal\":\"Krone Device\"}}";
        final String inputstring = "{ \"@id\":\"http://nonexistent.com/abox#Document1823812\", \"@type\":\"http://nonexistent.com/tbox#Document\" }";
        final String expectedString = "(http://nonexistent.com/abox#Document1823812, http://www.w3.org/1999/02/22-rdf-syntax-ns#type, http://nonexistent.com/tbox#Document) [null]";
        final Object input = JsonUtils.fromString(inputstring);

        final Graph graph = new LinkedHashModel();
        final ParseErrorCollector parseErrorListener = new ParseErrorCollector();
        final ParserConfig parserConfig = new ParserConfig();
        final SesameTripleCallback callback = new SesameTripleCallback(
                new StatementCollector(graph), ValueFactoryImpl.getInstance(), parserConfig,
                parseErrorListener);

        JsonLdProcessor.toRDF(input, callback);

        final Iterator<Statement> statements = graph.iterator();

        // contains only one statement (type)
        while (statements.hasNext()) {
            final Statement stmt = statements.next();
View Full Code Here

    if (wait > MAX_WAIT_TIME) {
      throw new PrimalException("wait time is not allowed to exceed " + MAX_WAIT_TIME + " seconds", 0);
    }

    final Model model = new LinkedHashModel();

    final String requestURL = createRequestURLString(topic, contentSource, minScore, maxContentCount,
        wait);

    final GetMethod method = new GetMethod(requestURL);
    try {
      configureRequest(method);
      logRequestDetails(method, requestURL);

      final int httpCode = httpClient.executeMethod(method);
      logger.debug("response code: " + httpCode);
      if (httpCode == HTTP_OK) {
        final JsonReader reader = new JsonReader(new InputStreamReader(
            method.getResponseBodyAsStream()));

        final JsonParser jsonParser = new JsonParser();
        final JsonObject responseObject = jsonParser.parse(reader).getAsJsonObject();

        logger.trace("json response string: " + responseObject.toString());

        final JsonObject responseInfo = responseObject
            .getAsJsonObject(ResponseKey.PRIMAL_RESPONSE_INFO);

        if (responseInfo != null) {
          logger.debug("processing primal response info in response");

          final Resource responseInfoId = vf.createURI(requestURL);

          ResponseValues.processIntValue(responseInfoId, PRIMAL.CONTENT_COUNT, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_CONTENT_COUNT);

          ResponseValues.processIntValue(responseInfoId, PRIMAL.TOTAL_CONCEPTS_COUNT, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_TOTAL_CONCEPTS_COUNT);

          ResponseValues.processFloatValue(responseInfoId, PRIMAL.MIN_SEMANTIC_COVERAGE, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_MIN_SEMANTIC_COVERAGE);

          ResponseValues.processStringValue(responseInfoId, PRIMAL.STATUS, model,
              PRIMAL.RESPONSE_INFO, responseInfo,
              Version.latest == getPrimalVersion() ? ResponseKey.PRIMAL_STATUS
                  : ResponseKey.PRIMAL_STATUS);

          ResponseValues.processIntValue(responseInfoId, PRIMAL.CONTENT_FILTERED_OUT, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_CONTENT_FILTERED_OUT);

          ResponseValues.processIntValue(responseInfoId, PRIMAL.CONCEPT_COUNT, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_CONCEPT_COUNT);

          ResponseValues.processFloatValue(responseInfoId, PRIMAL.HIGH_CONTENT_SCORE, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_HIGH_CONTENT_SCORE);

          ResponseValues.processFloatValue(responseInfoId, PRIMAL.TERM_COVERAGE, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_TERM_COVERAGE);

          if (responseInfo.has(ResponseKey.PRIMAL_RECOGNIZED_TERMS)) {
            final JsonArray recTerms = responseInfo.get(ResponseKey.PRIMAL_RECOGNIZED_TERMS)
                .getAsJsonArray();

            for (int i = 0; i < recTerms.size(); i++) {
              final String term = recTerms.get(i).getAsString();
              model.add(responseInfoId, PRIMAL.RECOGNIZED_TERM, vf.createLiteral(term),
                  PRIMAL.RESPONSE_INFO);
            }
          }

          ResponseValues.processStringValue(responseInfoId, PRIMAL.VIABILITY_MESSAGE, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_VIABILITY_MESSAGE);

          ResponseValues.processFloatValue(responseInfoId, PRIMAL.SEMANTIC_RATIO, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_SEMANTIC_RATIO);

          ResponseValues.processBooleanValue(responseInfoId, PRIMAL.HAS_EXPANSION, model,
              PRIMAL.RESPONSE_INFO, responseInfo, ResponseKey.PRIMAL_HAS_EXPANSION);

        }

        logger.debug("processing SKOS concepts in response");

        final JsonObject conceptScheme = responseObject
            .getAsJsonObject(ResponseKey.SKOS_CONCEPT_SCHEME);

        final JsonElement topConcepts = conceptScheme.get(ResponseKey.SKOS_HAS_TOP_CONCEPT);

        final JsonObject collection = conceptScheme.getAsJsonObject(ResponseKey.SKOS_COLLECTION);

        final Set<Entry<String, JsonElement>> members = collection.entrySet();

        final Resource conceptSchemeId = vf.createBNode();
        model.add(conceptSchemeId, RDF.TYPE, SKOS.CONCEPT_SCHEME, PRIMAL.CONCEPTS);

        final Resource collectionId = vf.createBNode();
        model.add(collectionId, RDF.TYPE, SKOS.COLLECTION, PRIMAL.CONCEPTS);

        final List<URI> rootConcepts = new ArrayList<URI>();

        if (topConcepts.isJsonArray()) {
          URI rcURI;
          final JsonArray rcArray = topConcepts.getAsJsonArray();
          for (int i = 0; i < rcArray.size(); i++) {
            rcURI = vf.createURI(rcArray.get(i).getAsString());
            rootConcepts.add(rcURI);
          }
        } else {
          final URI rcURI = vf.createURI(topConcepts.getAsString());
          rootConcepts.add(rcURI);
        }

        for (final URI rootConceptId : rootConcepts) {
          model.add(collectionId, SKOS.MEMBER, rootConceptId, PRIMAL.CONCEPTS);
          model.add(conceptSchemeId, SKOS.HAS_TOP_CONCEPT, rootConceptId, PRIMAL.CONCEPTS);

          for (final Entry<String, JsonElement> member : members) {
            final String conceptId = member.getKey();

            final JsonObject conceptAsJson = member.getValue().getAsJsonObject();

            final JsonElement prefLabelElement = conceptAsJson.get(ResponseKey.SKOS_PREF_LABEL);
            String prefLabel = null;
            if (prefLabelElement != null && !prefLabelElement.isJsonNull()) {
              prefLabel = prefLabelElement.getAsString();
            } else {
              logger.warn("no prefLabel found for {}. Skipping concept creation", conceptId);
              continue;
            }

            final URI concept = vf.createURI(conceptId);
            model.add(concept, RDF.TYPE, SKOS.CONCEPT, PRIMAL.CONCEPTS);
            model.add(concept, SKOS.PREF_LABEL, vf.createLiteral(prefLabel), PRIMAL.CONCEPTS);

            model.add(collectionId, SKOS.MEMBER, concept, PRIMAL.CONCEPTS);

            ResponseValues.processFloatValue(concept, PRIMAL.CONCEPT_SCORE, model,
                PRIMAL.CONCEPTS, conceptAsJson, ResponseKey.PRIMAL_CONCEPT_SCORE);

            ResponseValues.processStringValue(concept, SKOS.ALT_LABEL, model, PRIMAL.CONCEPTS,
                conceptAsJson, ResponseKey.SKOS_ALT_LABEL);

            ResponseValues.processStringValue(concept, PRIMAL.SOURCE, model, PRIMAL.CONCEPTS,
                conceptAsJson, ResponseKey.PRIMAL_SOURCE);

            final JsonElement narrowerElem = conceptAsJson.get(ResponseKey.SKOS_NARROWER);

            if (narrowerElem != null) {
              final JsonArray narrower = narrowerElem.getAsJsonArray();

              for (int i = 0; i < narrower.size(); i++) {
                final String narrowerId = narrower.get(i).getAsString();
                model.add(concept, SKOS.NARROWER, vf.createURI(narrowerId), PRIMAL.CONCEPTS);
              }
            }
          }
        }

        logger.debug("processing DC part of response...");
        final JsonElement dcCollection = responseObject.get(ResponseKey.DC_COLLECTION);

        if (dcCollection.isJsonArray()) {
          final JsonArray array = dcCollection.getAsJsonArray();

          logger.debug("response contains {} content items", array.size());

          for (int i = 0; i < array.size(); i++) {
            final JsonObject contentItem = array.get(i).getAsJsonObject();

            final String dcIdentifier = contentItem.get(ResponseKey.DC_IDENTIFIER).getAsString();

            final URI contentItemId = vf.createURI(dcIdentifier);
            model.add(contentItemId, RDF.TYPE, PRIMAL.CONTENT_ITEM, PRIMAL.CONTENT);

            model.add(contentItemId, DC.IDENTIFIER, vf.createLiteral(dcIdentifier),
                PRIMAL.CONTENT);

            ResponseValues.processStringValue(contentItemId, DC.TITLE, model, PRIMAL.CONTENT,
                contentItem, ResponseKey.DC_TITLE);

            ResponseValues.processStringValue(contentItemId, DC.DESCRIPTION, model,
                PRIMAL.CONTENT, contentItem, ResponseKey.DC_DESCRIPTION);

            ResponseValues.processStringValue(contentItemId, DC.PUBLISHER, model, PRIMAL.CONTENT,
                contentItem, ResponseKey.DC_PUBLISHER);

            ResponseValues.processStringValue(contentItemId, DC.SOURCE, model, PRIMAL.CONTENT,
                contentItem, ResponseKey.DC_SOURCE);

            ResponseValues.processStringValue(contentItemId, DC.RELATION, model, PRIMAL.CONTENT,
                contentItem, ResponseKey.DC_RELATION);

            ResponseValues.processDateValue(contentItemId, DC.DATE, model, PRIMAL.CONTENT,
                contentItem, ResponseKey.DC_DATE);

            ResponseValues.processFloatValue(contentItemId, PRIMAL.CONTENT_SCORE, model,
                PRIMAL.CONTENT, contentItem, ResponseKey.PRIMAL_CONTENT_SCORE);

            final JsonArray subjects = contentItem.get(ResponseKey.DC_SUBJECT).getAsJsonArray();
            for (int j = 0; j < subjects.size(); j++) {
              final String subject = subjects.get(j).getAsString();
              model.add(contentItemId, DC.SUBJECT, vf.createURI(subject), PRIMAL.CONTENT);
            }
          }

        } else {
          // TODO can this happen?
View Full Code Here

        ParserConfig config = new ParserConfig();
        config.set(BasicParserSettings.FAIL_ON_UNKNOWN_DATATYPES, true);
        config.set(BasicParserSettings.FAIL_ON_UNKNOWN_LANGUAGES, true);
        rdfParser.setParserConfig(config);
        rdfParser.setValueFactory(vf);
        Model model = new LinkedHashModel();
        rdfParser.setRDFHandler(new StatementCollector(model));

        rdfParser.parse(in, "foo:bar");

        assertEquals("Unexpected number of statements, found " + model.size(), 1, model.size());

        assertTrue("missing namespaced statement", model.contains(st1));

        if (rdfParser.getRDFFormat().supportsNamespaces()) {
            assertTrue("Expected at least one namespace, found " + model.getNamespaces().size(),
                    model.getNamespaces().size() >= 1);
            assertEquals(exNs, model.getNamespace("ex").getName());
        }
    }
View Full Code Here

    private ParserConfig parserConfig;

    private final ParseErrorListener parseErrorListener;

    public SesameTripleCallback() {
        this(new StatementCollector(new LinkedHashModel()));
    }
View Full Code Here

 
  private Model model;
 
  @Before
  public void setUp() throws Exception {
    model = new LinkedHashModel();
  }
View Full Code Here

        return result;
    }

    private static long benchmarkSemarglSesame(File path) throws SAXException, ParseException {
        System.out.println("Semargl-Sesame benchmark");
        AbstractModel model = new LinkedHashModel();
        StreamProcessor streamProcessor = new StreamProcessor(JsonLdParser.connect(SesameSink.connect(new StatementCollector(model))));

        List<File> files = listFiles(path);
        long time = System.nanoTime();
        for (File file : files) {
            streamProcessor.process(file, HTTP_EXAMPLE_COM);
        }
        System.out.println("Model size = " + model.size());
        return System.nanoTime() - time;
    }
View Full Code Here

TOP

Related Classes of org.openrdf.model.impl.LinkedHashModel

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.