Package org.apache.commons.lang.time

Examples of org.apache.commons.lang.time.StopWatch


    public void testCache()
        throws Exception
    {
        m_engine.saveText( "TestPage", TEST_TEXT );
       
        StopWatch sw = new StopWatch();
       
        System.out.println("DOM cache speed test:");
        sw.start();
       
        for( int i = 0; i < 100; i++ )
        {
            WikiPage page = m_engine.getPage( "TestPage" );
            String pagedata = m_engine.getPureText( page );
           
            WikiContext context = new WikiContext( m_engine, page );
           
            MarkupParser p = m_manager.getParser( context, pagedata );
           
            WikiDocument d = p.parse();
           
            String html = m_manager.getHTML( context, d );
            assertNotNull( "noncached got null response",html);
        }
       
        sw.stop();
        System.out.println("  Nocache took "+sw);

        long nocachetime = sw.getTime();
       
        sw.reset();
        sw.start();
       
        for( int i = 0; i < 100; i++ )
        {
            WikiPage page = m_engine.getPage( "TestPage" );
            String pagedata = m_engine.getPureText( page );
           
            WikiContext context = new WikiContext( m_engine, page );
           
            String html = m_manager.getHTML( context, pagedata );
           
            assertNotNull("cached got null response",html);
        }
       
        sw.stop();
        System.out.println("  Cache took "+sw);
       
        long speedup = nocachetime / sw.getTime();
        System.out.println("  Approx speedup: "+speedup+"x");
    }
View Full Code Here


    this.expiredTime = expiredTime;
  }

  @Override
  public void run() {
    StopWatch watch = new StopWatch();
    watch.start();
    logger.debug("clean up expired or over maxOnlineTime session start ...");
    Calendar calendar = Calendar.getInstance();
    @SuppressWarnings("unchecked")
    OqlBuilder<Sessioninfo> builder = OqlBuilder.from(registry.getSessioninfoBuilder()
        .getSessioninfoClass(), "info");
    builder.where("info.serverName=:server and info.lastAccessAt<:givenTime", registry.getController()
        .getServerName(), DateUtils.rollMinutes(calendar.getTime(), -expiredTime));
    List<Sessioninfo> activities = entityDao.search(builder);
    int removed = 0;
    for (Sessioninfo activity : activities) {
      registry.remove(activity.getId());
      removed++;
    }
    if (removed > 0 || watch.getTime() > 50) {
      logger.info("removed {} expired sessions in {} ms", removed, watch.getTime());
    }
    registry.getController().stat();
  }
View Full Code Here

    }
   
    public void doPropFind( HttpServletRequest req, HttpServletResponse res )
        throws IOException,ServletException
    {
        StopWatch sw = new StopWatch();
        sw.start();
       
        // Do the "sanitize url" trick
        String p = new String(req.getPathInfo().getBytes("ISO-8859-1"), "UTF-8");
       
        DavPath path = new DavPath( p );
        if( path.isRoot() )
        {
            DavMethod dm = new PropFindMethod( m_rootProvider );
            dm.execute( req, res, path );
        }
        else
        {
            String context = path.get(0);
           
            PropFindMethod m = new PropFindMethod( pickProvider(context) );
            m.execute( req, res, path.subPath(1) );
        }
       
        sw.stop();
       
        log.debug("Propfind done for path "+path+", took "+sw);
    }
View Full Code Here

     */
    public void initialize( Collection pages )
        throws ProviderException
    {
        log.debug( "Initializing new ReferenceManager with "+pages.size()+" initial pages." );
        StopWatch sw = new StopWatch();
        sw.start();
        log.info( "Starting cross reference scan of WikiPages" );

        //
        //  First, try to serialize old data from disk.  If that fails,
        //  we'll go and update the entire reference lists (which'll take
        //  time)
        //
        try
        {
            //
            //  Unserialize things.  The loop below cannot be combined with
            //  the other loop below, simply because engine.getPage() has
            //  side effects such as loading initializing the user databases,
            //  which in turn want all of the pages to be read already...
            //
            //  Yes, this is a kludge.  We know.  Will be fixed.
            //
            long saved = unserializeFromDisk();

            for( Iterator it = pages.iterator(); it.hasNext(); )
            {
                WikiPage page = (WikiPage) it.next();

                unserializeAttrsFromDisk( page );
            }

            //
            //  Now we must check if any of the pages have been changed
            //  while we were in the electronic la-la-land, and update
            //  the references for them.
            //

            Iterator it = pages.iterator();

            while( it.hasNext() )
            {
                WikiPage page = (WikiPage) it.next();

                if( page instanceof Attachment )
                {
                    // Skip attachments
                }
                else
                {

                    // Refresh with the latest copy
                    page = m_engine.getPage( page.getName() );

                    if( page.getLastModified() == null )
                    {
                        log.fatal( "Provider returns null lastModified.  Please submit a bug report." );
                    }
                    else if( page.getLastModified().getTime() > saved )
                    {
                        updatePageReferences( page );
                    }
                }
            }

        }
        catch( Exception e )
        {
            log.info("Unable to unserialize old refmgr information, rebuilding database: "+e.getMessage());
            buildKeyLists( pages );

            // Scan the existing pages from disk and update references in the manager.
            Iterator it = pages.iterator();
            while( it.hasNext() )
            {
                WikiPage page  = (WikiPage)it.next();

                if( page instanceof Attachment )
                {
                    // We cannot build a reference list from the contents
                    // of attachments, so we skip them.
                }
                else
                {
                    updatePageReferences( page );

                    serializeAttrsToDisk( page );
                }

            }

            serializeToDisk();
        }

        sw.stop();
        log.info( "Cross reference scan done in "+sw );

        WikiEventUtils.addWikiEventListener(m_engine.getPageManager(),
                                            WikiPageEvent.PAGE_DELETED, this);
    }
View Full Code Here

        ObjectInputStream in = null;
        long saved = 0L;

        try
        {
            StopWatch sw = new StopWatch();
            sw.start();

            File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );

            in = new ObjectInputStream( new BufferedInputStream(new FileInputStream(f)) );

            long ver     = in.readLong();

            if( ver != serialVersionUID )
            {
                throw new IOException("File format has changed; I need to recalculate references.");
            }

            saved        = in.readLong();
            m_refersTo   = (Map) in.readObject();
            m_referredBy = (Map) in.readObject();

            in.close();

            m_unmutableReferredBy = Collections.unmodifiableMap( m_referredBy );
            m_unmutableRefersTo   = Collections.unmodifiableMap( m_refersTo );

            sw.stop();
            log.debug("Read serialized data successfully in "+sw);
        }
        finally
        {
            if( in != null ) in.close();
View Full Code Here

    {
        ObjectOutputStream out = null;

        try
        {
            StopWatch sw = new StopWatch();
            sw.start();

            File f = new File( m_engine.getWorkDir(), SERIALIZATION_FILE );

            out = new ObjectOutputStream( new BufferedOutputStream(new FileOutputStream(f)) );

            out.writeLong( serialVersionUID );
            out.writeLong( System.currentTimeMillis() ); // Timestamp
            out.writeObject( m_refersTo );
            out.writeObject( m_referredBy );

            out.close();

            sw.stop();

            log.debug("serialization done - took "+sw);
        }
        catch( IOException e )
        {
View Full Code Here

        ObjectInputStream in = null;
        long saved = 0L;

        try
        {
            StopWatch sw = new StopWatch();
            sw.start();

            //
            //  Find attribute cache, and check if it exists
            //
            File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );

            f = new File( f, getHashFileName(p.getName()) );

            if( !f.exists() )
            {
                return 0L;
            }

            log.debug("Deserializing attributes for "+p.getName());

            in = new ObjectInputStream( new BufferedInputStream(new FileInputStream(f)) );

            long ver     = in.readLong();

            if( ver != serialVersionUID )
            {
                log.debug("File format has changed; cannot deserialize.");
                return 0L;
            }

            saved        = in.readLong();

            String name  = in.readUTF();

            if( !name.equals(p.getName()) )
            {
                log.debug("File name does not match ("+name+"), skipping...");
                return 0L; // Not here
            }

            long entries = in.readLong();

            for( int i = 0; i < entries; i++ )
            {
                String key   = in.readUTF();
                Object value = in.readObject();

                p.setAttribute( key, value );

                log.debug("   attr: "+key+"="+value);
            }

            in.close();

            sw.stop();
            log.debug("Read serialized data for "+name+" successfully in "+sw);
            p.setHasMetadata();
        }
        catch( NoSuchAlgorithmException e )
        {
View Full Code Here

     *  Serializes hashmaps to disk.  The format is private, don't touch it.
     */
    private synchronized void serializeAttrsToDisk( WikiPage p )
    {
        ObjectOutputStream out = null;
        StopWatch sw = new StopWatch();
        sw.start();

        try
        {
            File f = new File( m_engine.getWorkDir(), SERIALIZATION_DIR );

            if( !f.exists() ) f.mkdirs();

            //
            //  Create a digest for the name
            //
            f = new File( f, getHashFileName(p.getName()) );

            // FIXME: There is a concurrency issue here...
            Set entries = p.getAttributes().entrySet();

            if( entries.size() == 0 )
            {
                //  Nothing to serialize, therefore we will just simply remove the
                //  serialization file so that the next time we boot, we don't
                //  deserialize old data.
                f.delete();
                return;
            }

            out = new ObjectOutputStream( new BufferedOutputStream(new FileOutputStream(f)) );

            out.writeLong( serialVersionUID );
            out.writeLong( System.currentTimeMillis() ); // Timestamp

            out.writeUTF( p.getName() );
            out.writeLong( entries.size() );

            for( Iterator i = entries.iterator(); i.hasNext(); )
            {
                Map.Entry e = (Map.Entry) i.next();

                if( e.getValue() instanceof Serializable )
                {
                    out.writeUTF( (String)e.getKey() );
                    out.writeObject( e.getValue() );
                }
            }

            out.close();

        }
        catch( IOException e )
        {
            log.error("Unable to serialize!");

            try
            {
                if( out != null ) out.close();
            }
            catch( IOException ex ) {}
        }
        catch( NoSuchAlgorithmException e )
        {
            log.fatal("No MD5 algorithm!?!");
        }
        finally
        {
            sw.stop();

            log.debug("serialization for "+p.getName()+" done - took "+sw);
        }
    }
View Full Code Here

         @param maxLength maximum number of suggestions
         *  @return the suggestions
         */
        public List getSuggestions( String wikiName, int maxLength )
        {
            StopWatch sw = new StopWatch();
            sw.start();
            List<String> list = new ArrayList<String>(maxLength);

            if( wikiName.length() > 0 )
            {
               
                // split pagename and attachment filename
                String filename = "";
                int pos = wikiName.indexOf("/");
                if( pos >= 0 )
                {
                    filename = wikiName.substring( pos ).toLowerCase();
                    wikiName = wikiName.substring( 0, pos );
                }
               
                String cleanWikiName = MarkupParser.cleanLink(wikiName).toLowerCase() + filename;

                String oldStyleName = MarkupParser.wikifyLink(wikiName).toLowerCase() + filename;

                Set allPages = m_engine.getReferenceManager().findCreated();

                int counter = 0;
                for( Iterator i = allPages.iterator(); i.hasNext() && counter < maxLength; )
                {
                    String p = (String) i.next();
                    String pp = p.toLowerCase();
                    if( pp.startsWith( cleanWikiName) || pp.startsWith( oldStyleName ) )
                    {
                        list.add( p );
                        counter++;
                    }
                }
            }

            sw.stop();
            if( log.isDebugEnabled() ) log.debug("Suggestion request for "+wikiName+" done in "+sw);
            return list;
        }
View Full Code Here

         @param maxLength How many hits to return
         @return the pages found
         */
        public List findPages( String searchString, int maxLength )
        {
            StopWatch sw = new StopWatch();
            sw.start();

            List<HashMap> list = new ArrayList<HashMap>(maxLength);

            if( searchString.length() > 0 )
            {
                try
                {
                    Collection c;

                    if( m_searchProvider instanceof LuceneSearchProvider )
                        c = ((LuceneSearchProvider)m_searchProvider).findPages( searchString, 0 );
                    else
                        c = m_searchProvider.findPages( searchString );

                    int count = 0;
                    for( Iterator i = c.iterator(); i.hasNext() && count < maxLength; count++ )
                    {
                        SearchResult sr = (SearchResult)i.next();
                        HashMap<String,Object> hm = new HashMap<String,Object>();
                        hm.put( "page", sr.getPage().getName() );
                        hm.put( "score", sr.getScore() );
                        list.add( hm );
                    }
                }
                catch(Exception e)
                {
                    log.info("AJAX search failed; ",e);
                }
            }

            sw.stop();
            if( log.isDebugEnabled() ) log.debug("AJAX search complete in "+sw);
            return list;
        }
View Full Code Here

TOP

Related Classes of org.apache.commons.lang.time.StopWatch

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.