Package org.apache.jena.riot.tokens

Examples of org.apache.jena.riot.tokens.Tokenizer


     * @deprecated use an RDFDataMgr operation with argument Lang.Turtle
     */
    @Deprecated
    public static LangTurtle createParserTurtle(InputStream input, String baseIRI, StreamRDF dest)
    {
        Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(input) ;
        return createParserTurtle(tokenizer, baseIRI, dest) ;
    }
View Full Code Here


     * @deprecated use an RDFDataMgr operation with argument Lang.TRIG
     */
    @Deprecated
    public static LangTriG createParserTriG(InputStream input, String baseIRI, StreamRDF dest)
    {
        Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(input) ;
        return createParserTriG(tokenizer, baseIRI, dest) ;
    }
View Full Code Here

     * @deprecated use an RDFDataMgr operation with argument Lang.NTRIPLES
     */
    @Deprecated
    public static LangNTriples createParserNTriples(InputStream input, CharSpace charSpace, StreamRDF dest)
    {
        Tokenizer tokenizer = charSpace == CharSpace.ASCII ? TokenizerFactory.makeTokenizerASCII(input) : TokenizerFactory.makeTokenizerUTF8(input) ;
        return createParserNTriples(tokenizer, dest) ;
    }
View Full Code Here

     * @deprecated use an RDFDataMgr operation with argument Lang.NQUADS)
     */
    @Deprecated
    public static LangNQuads createParserNQuads(InputStream input, CharSpace charSpace, StreamRDF dest)
    {
        Tokenizer tokenizer = charSpace == CharSpace.ASCII ? TokenizerFactory.makeTokenizerASCII(input) : TokenizerFactory.makeTokenizerUTF8(input) ;
        return createParserNQuads(tokenizer, dest) ;
    }
View Full Code Here

        totalTuples += n ;
    }
   
    protected Tokenizer makeTokenizer(InputStream in)
    {
        Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(in) ;
        return tokenizer ;
    }
View Full Code Here

            }
           
            @Override
            public Iterator<Triple> createDeserializer(InputStream in)
            {
                Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ;
                ParserProfileBase profile = new ParserProfileBase(new Prologue(null, IRIResolver.createNoResolve()), null, LabelToNode.createUseLabelEncoded()) ;
                LangNTriples parser = new LangNTriples(tokenizer, profile, null) ;
                return parser ;
            }
           
View Full Code Here

            }
           
            @Override
            public Iterator<Quad> createDeserializer(InputStream in)
            {
                Tokenizer tokenizer = TokenizerFactory.makeTokenizerASCII(in) ;
                ParserProfileBase profile = new ParserProfileBase(new Prologue(null, IRIResolver.createNoResolve()), null, LabelToNode.createUseLabelEncoded()) ;
                LangNQuads parser = new LangNQuads(tokenizer, profile, null) ;
                return parser ;
            }
           
View Full Code Here

            System.exit(1) ;
        }
        for ( String filename : args )
        {
            InputStream in = IO.openFile(filename) ;
            Tokenizer tokenize = TokenizerFactory.makeTokenizerUTF8(in) ;
            Timer timer = new Timer() ;
            long count = 0 ;
            timer.startTimer() ;
            for ( ; tokenize.hasNext() ; )
            {
                Token t = tokenize.next() ;
                if ( print )
                    System.out.println(t) ;
                count++ ;
            }
            tokenize.close();
            long millis = timer.endTimer() ;
            if ( timing )
            {
                if ( millis == 0 )
                    System.out.printf("Tokens=%,d : Time=0.00s\n", count) ;
View Full Code Here

    private static Graph parse(String ...strings)
    {
        String string = StrUtils.strjoin("\n", strings) ;
        Reader reader = new StringReader(string) ;
        String baseIRI = "http://base/" ;
        Tokenizer tokenizer = TokenizerFactory.makeTokenizer(reader) ;
       
        Graph graph = GraphFactory.createDefaultGraph() ;
        StreamRDF sink = StreamRDFLib.graph(graph) ;
        LangTurtle parser = RiotReader.createParserTurtle(tokenizer, "http://base/", sink) ;
        parser.getProfile().setHandler(new ErrorHandlerEx()) ;
View Full Code Here

   
    @Override
    protected long parseCount(String... strings)
    {
        String string = StrUtils.strjoin("\n", strings) ;
        Tokenizer tokenizer = tokenizer(string) ;
        RDFParserOutputCounting sink = StreamRDFLib.count() ;
        LangNTriples x = RiotReader.createParserNTriples(tokenizer, sink) ;
        x.getProfile().setHandler(new ErrorHandlerEx()) ;
        x.parse() ;
        return sink.count() ;
View Full Code Here

TOP

Related Classes of org.apache.jena.riot.tokens.Tokenizer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.