Package net.yacy.search

Examples of net.yacy.search.Switchboard


// this is a dummy class. Without it, the templates in interactivesearch.html do not load

public class yacyinteractive {

    public static serverObjects respond(final RequestHeader header, serverObjects post, final serverSwitch env) {
        final Switchboard sb = (Switchboard) env;
        final serverObjects prop = new serverObjects();
        prop.put("topmenu", sb.getConfigBool("publicTopmenu", true) ? 1 : 0);
        final String promoteSearchPageGreeting =
                (env.getConfigBool(SwitchboardConstants.GREETING_NETWORK_NAME, false)) ?
                    env.getConfig("network.unit.description", "") :
                    env.getConfig(SwitchboardConstants.GREETING, "");
        prop.put("promoteSearchPageGreeting", promoteSearchPageGreeting);
        prop.put("promoteSearchPageGreeting.homepage", sb.getConfig(SwitchboardConstants.GREETING_HOMEPAGE, ""));
        prop.put("promoteSearchPageGreeting.smallImage", sb.getConfig(SwitchboardConstants.GREETING_SMALL_IMAGE, ""));

        final String query = (post == null) ? "" : post.get("query", "");
        final String startRecord = (post == null) ? "0" : post.get("startRecord", "");
        final String maximumRecords = (post == null) ? "1000" : post.get("maximumRecords", "");
        prop.putHTML("query", query);
View Full Code Here


        return dayFormatter.format(date);
    }
   
    public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
        // return variable that accumulates replacements
        final Switchboard sb = (Switchboard) env;
        final serverObjects prop = new serverObjects();
        int showLimit = 100;
        if (post != null) {
            showLimit = post.getInt("limit", 100);
           
            if (post.containsKey("clearcrawlqueue")) {
                final int c = sb.crawlQueues.noticeURL.stackSize(NoticedURL.StackType.LIMIT);
                sb.crawlQueues.noticeURL.clear(NoticedURL.StackType.LIMIT);
                try { sb.cleanProfiles(); } catch (final InterruptedException e) { /* Ignore this */}
                /*
                int c = 0;
                while (switchboard.urlPool.noticeURL.stackSize(plasmaCrawlNURL.StackType.LIMIT) > 0) {
                    urlHash = switchboard.urlPool.noticeURL.pop(plasmaCrawlNURL.StackType.LIMIT).hash();
                    if (urlHash != null) { switchboard.urlPool.noticeURL.remove(urlHash); c++; }
View Full Code Here

    private final static int CONCURRENT_RUNNER = 100;

    public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {

        final serverObjects prop = new serverObjects();
        final Switchboard sb = (Switchboard)env;

        // clean up all search events
        SearchEventCache.cleanupEvents(true);

        prop.put("noserverdetected", 0);
        prop.put("hosts", "");
        prop.put("intranet.checked", sb.isIntranetMode() ? 1 : 0);

        int timeout = sb.isIntranetMode() ? 200 : 3000;
        timeout = post == null ? timeout : post.getInt("timeout", timeout);

        // make a scanhosts entry
        String hosts = post == null ? "" : post.get("scanhosts", "");
        final Set<InetAddress> ips = Domains.myIntranetIPs();
        prop.put("intranethosts", ips.toString());
        prop.put("intranetHint", sb.isIntranetMode() ? 0 : 1);
        if (hosts.length() == 0) {
            InetAddress ip;
            if (sb.isIntranetMode()) {
                if (ips.size() > 0) ip = ips.iterator().next();
                else ip = Domains.dnsResolve("192.168.0.1");
            } else {
                ip = Domains.myPublicLocalIP();
                if (Domains.isThisHostIP(ip)) ip = sb.peers.mySeed().getInetAddress();
            }
            if (ip != null) hosts = ip.getHostAddress();
        }
        prop.put("scanhosts", hosts);

        // parse post requests
        if (post != null) {
            int repeat_time = 0;
            String repeat_unit = "seldays";
            long validTime = 0;

            // check scheduler
            if (post.get("rescan", "").equals("scheduler")) {
                repeat_time = post.getInt("repeat_time", -1);
                repeat_unit = post.get("repeat_unit", "selminutes"); // selminutes, selhours, seldays
                if (repeat_unit.equals("selminutes")) validTime = repeat_time * 60 * 1000;
                if (repeat_unit.equals("selhours")) validTime = repeat_time * 60 * 60 * 1000;
                if (repeat_unit.equals("seldays")) validTime = repeat_time * 24 * 60 * 60 * 1000;
            }

            final boolean bigrange = post.getBoolean("bigrange", false);

            // case: an IP range was given; scan the range for services and display result
            if (post.containsKey("scan") && "hosts".equals(post.get("source", ""))) {
                final Set<InetAddress> ia = new HashSet<InetAddress>();
                for (String host : hosts.split(",")) {
                    if (host.startsWith("http://")) host = host.substring(7);
                    if (host.startsWith("https://")) host = host.substring(8);
                    if (host.startsWith("ftp://")) host = host.substring(6);
                    if (host.startsWith("smb://")) host = host.substring(6);
                    final int p = host.indexOf('/',0);
                    if (p >= 0) host = host.substring(0, p);
                    ia.add(Domains.dnsResolve(host));
                }
                final Scanner scanner = new Scanner(ia, CONCURRENT_RUNNER, timeout);
                if (post.get("scanftp", "").equals("on")) scanner.addFTP(bigrange);
                if (post.get("scanhttp", "").equals("on")) scanner.addHTTP(bigrange);
                if (post.get("scanhttps", "").equals("on")) scanner.addHTTPS(bigrange);
                if (post.get("scansmb", "").equals("on")) scanner.addSMB(bigrange);
                scanner.start();
                scanner.terminate();
                if ("on".equals(post.get("accumulatescancache", "")) && !"scheduler".equals(post.get("rescan", ""))) {
                    Scanner.scancacheExtend(scanner, validTime);
                } else {
                    Scanner.scancacheReplace(scanner, validTime);
                }
            }

            if (post.containsKey("scan") && "intranet".equals(post.get("source", ""))) {
                final Scanner scanner = new Scanner(Domains.myIntranetIPs(), CONCURRENT_RUNNER, timeout);
                if ("on".equals(post.get("scanftp", ""))) scanner.addFTP(bigrange);
                if ("on".equals(post.get("scanhttp", ""))) scanner.addHTTP(bigrange);
                if ("on".equals(post.get("scanhttps", ""))) scanner.addHTTPS(bigrange);
                if ("on".equals(post.get("scansmb", ""))) scanner.addSMB(bigrange);
                scanner.start();
                scanner.terminate();
                if ("on".equals(post.get("accumulatescancache", "")) && !"scheduler".equals(post.get("rescan", ""))) {
                    Scanner.scancacheExtend(scanner, validTime);
                } else {
                    Scanner.scancacheReplace(scanner, validTime);
                }
            }

            // check crawl request
            if (post.containsKey("crawl")) {
                // make a pk/url mapping
                final Iterator<Map.Entry<Scanner.Service, Scanner.Access>> se = Scanner.scancacheEntries();
                final Map<byte[], DigestURI> pkmap = new TreeMap<byte[], DigestURI>(Base64Order.enhancedCoder);
                while (se.hasNext()) {
                    final Scanner.Service u = se.next().getKey();
                    DigestURI uu;
                    try {
                        uu = new DigestURI(u.url());
                        pkmap.put(uu.hash(), uu);
                    } catch (final MalformedURLException e) {
                        Log.logException(e);
                    }
                }
                // search for crawl start requests in this mapping
                for (final Map.Entry<String, String> entry: post.entrySet()) {
                    if (entry.getValue().startsWith("mark_")) {
                        final byte [] pk = entry.getValue().substring(5).getBytes();
                        final DigestURI url = pkmap.get(pk);
                        if (url != null) {
                            String path = "/Crawler_p.html?createBookmark=off&xsstopw=off&crawlingDomMaxPages=10000&intention=&range=domain&indexMedia=on&recrawl=nodoubles&xdstopw=off&storeHTCache=on&sitemapURL=&repeat_time=7&crawlingQ=on&cachePolicy=iffresh&indexText=on&crawlingMode=url&mustnotmatch=&crawlingDomFilterDepth=1&crawlingDomFilterCheck=off&crawlingstart=Start%20New%20Crawl&xpstopw=off&repeat_unit=seldays&crawlingDepth=99";
                            path += "&crawlingURL=" + url.toNormalform(true, false);
                            WorkTables.execAPICall("localhost", (int) sb.getConfigLong("port", 8090), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""), path, pk);
                        }
                    }
                }
            }

            // check scheduler
            if ("scheduler".equals(post.get("rescan", ""))) {

                // store this call as api call
                if (repeat_time > 0) {
                    // store as scheduled api call
                    sb.tables.recordAPICall(post, "CrawlStartScanner_p.html", WorkTables.TABLE_API_TYPE_CRAWLER, "network scanner for hosts: " + hosts, repeat_time, repeat_unit.substring(3));
                }

                // execute the scan results
                if (Scanner.scancacheSize() > 0) {
                    // make a comment cache
                    final Map<byte[], String> apiCommentCache = WorkTables.commentCache(sb);

                    String urlString;
                    DigestURI u;
                    try {
                        final Iterator<Map.Entry<Scanner.Service, Scanner.Access>> se = Scanner.scancacheEntries();
                        Map.Entry<Scanner.Service, Scanner.Access> host;
                        while (se.hasNext()) {
                            host = se.next();
                            try {
                                u = new DigestURI(host.getKey().url());
                                urlString = u.toNormalform(true, false);
                                if (host.getValue() == Access.granted && Scanner.inIndex(apiCommentCache, urlString) == null) {
                                    String path = "/Crawler_p.html?createBookmark=off&xsstopw=off&crawlingDomMaxPages=10000&intention=&range=domain&indexMedia=on&recrawl=nodoubles&xdstopw=off&storeHTCache=on&sitemapURL=&repeat_time=7&crawlingQ=on&cachePolicy=iffresh&indexText=on&crawlingMode=url&mustnotmatch=&crawlingDomFilterDepth=1&crawlingDomFilterCheck=off&crawlingstart=Start%20New%20Crawl&xpstopw=off&repeat_unit=seldays&crawlingDepth=99";
                                    path += "&crawlingURL=" + urlString;
                                    WorkTables.execAPICall("localhost", (int) sb.getConfigLong("port", 8090), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""), path, u.hash());
                                }
                            } catch (final MalformedURLException e) {
                                Log.logException(e);
                            }
                        }
View Full Code Here

import de.anomic.server.serverSwitch;

public class opensearchdescription {

    public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
        final Switchboard sb = (Switchboard) env;
        // generate message content for open search description
        String promoteSearchPageGreeting = env.getConfig(SwitchboardConstants.GREETING, "");
        if (env.getConfigBool(SwitchboardConstants.GREETING_NETWORK_NAME, false)) promoteSearchPageGreeting = env.getConfig("network.unit.description", "");

        String thisaddress = header.get("Host", "127.0.0.1");
View Full Code Here

    private static MetadataRepository.BlacklistCleaner urldbCleanerThread = null;
    private static Segment.ReferenceCleaner indexCleanerThread = null;

    public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
        final serverObjects prop = new serverObjects();
        final Switchboard sb = (Switchboard) env;
        prop.put("title", "DbCleanup_p");
       
        // get segment
        Segment indexSegment = null;
        if (post != null && post.containsKey("segment")) {
View Full Code Here

import de.anomic.server.servletProperties;

public class ConfigRobotsTxt_p {
   
    public static servletProperties respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
        final Switchboard sb = (Switchboard) env;
        final servletProperties prop = new servletProperties();
       
        final RobotsTxtConfig rbc = ((Switchboard)env).robotstxtConfig;
        prop.put("clientname", sb.peers.mySeed().getPublicAddress());
       
View Full Code Here

public class IndexFederated_p {

    public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
        // return variable that accumulates replacements
        final serverObjects prop = new serverObjects();
        final Switchboard sb = (Switchboard) env;

        if (post != null && post.containsKey("set")) {
            // yacy
            env.setConfig("federated.service.yacy.indexing.enabled", post.getBoolean("yacy.indexing.enabled", false));

            // solr
            final boolean solrWasOn = env.getConfigBool("federated.service.solr.indexing.enabled", true);
            final boolean solrIsOnAfterwards = post.getBoolean("solr.indexing.enabled", false);
            env.setConfig("federated.service.solr.indexing.enabled", solrIsOnAfterwards);
            String solrurls = post.get("solr.indexing.url", env.getConfig("federated.service.solr.indexing.url", "http://127.0.0.1:8983/solr"));
            final BufferedReader r = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(UTF8.getBytes(solrurls))));
            final StringBuilder s = new StringBuilder();
            String s0;
            try {
                while ((s0 = r.readLine()) != null) {
                    s0 = s0.trim();
                    if (s0.length() > 0) s.append(s0).append(',');
                }
            } catch (final IOException e1) {
            }
            if (s.length() > 0) s.setLength(s.length() - 1);
            solrurls = s.toString().trim();
            env.setConfig("federated.service.solr.indexing.url", solrurls);
            env.setConfig("federated.service.solr.indexing.sharding", post.get("solr.indexing.sharding", env.getConfig("federated.service.solr.indexing.sharding", "modulo-host-md5")));
            final String schemename = post.get("solr.indexing.schemefile", env.getConfig("federated.service.solr.indexing.schemefile", "solr.keys.default.list"));
            env.setConfig("federated.service.solr.indexing.schemefile", schemename);

            if (solrWasOn) {
                // switch off
                sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).getSolr().close();
                sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).connectSolr(null);
            }

            final SolrScheme scheme = new SolrScheme(new File(env.getDataPath(), "DATA/SETTINGS/" + schemename));

            if (solrIsOnAfterwards) {
                // switch on
                final boolean usesolr = sb.getConfigBool("federated.service.solr.indexing.enabled", false) & solrurls.length() > 0;
                try {
                    sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).connectSolr((usesolr) ? new SolrShardingConnector(solrurls, scheme, SolrShardingSelection.Method.MODULO_HOST_MD5, 10000) : null);
                } catch (final IOException e) {
                    Log.logException(e);
                    sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).connectSolr(null);
                }
            }

            // read index scheme table flags
            final Iterator<ConfigurationSet.Entry> i = scheme.allIterator();
            ConfigurationSet.Entry entry;
            while (i.hasNext()) {
                entry = i.next();
                final String v = post.get("scheme_" + entry.key());
                final boolean c = v != null && v.equals("checked");
                try {
                    if (entry.enabled()) {
                        if (!c) scheme.disable(entry.key());
                    } else {
                        if (c) scheme.enable(entry.key());
                    }
                } catch (final IOException e) {}
            }
        }

        // show solr host table
        if (sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).getSolr() == null) {
            prop.put("table", 0);
        } else {
            prop.put("table", 1);
            final SolrConnector solr = sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).getSolr();
            final long[] size = (solr instanceof SolrShardingConnector) ? ((SolrShardingConnector) solr).getSizeList() : new long[]{((SolrSingleConnector) solr).getSize()};
            final String[] urls = (solr instanceof SolrShardingConnector) ? ((SolrShardingConnector) solr).getAdminInterfaceList() : new String[]{((SolrSingleConnector) solr).getAdminInterface()};
            boolean dark = false;
            for (int i = 0; i < size.length; i++) {
                prop.put("table_list_" + i + "_dark", dark ? 1 : 0); dark = !dark;
                prop.put("table_list_" + i + "_url", urls[i]);
                prop.put("table_list_" + i + "_size", size[i]);
            }
            prop.put("table_list", size.length);
        }

        // write scheme
        SolrScheme scheme = (sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).getSolr() == null) ? null : sb.indexSegments.segment(Segments.Process.LOCALCRAWLING).getSolr().getScheme();
        final String schemename = sb.getConfig("federated.service.solr.indexing.schemefile", "solr.keys.default.list");
        if (scheme == null) {
            scheme = new SolrScheme(new File(env.getDataPath(), "DATA/SETTINGS/" + schemename));
        }
        final Iterator<ConfigurationSet.Entry> i = scheme.allIterator();
        int c = 0;
View Full Code Here

public class ConfigLiveSearch {

    public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
        final serverObjects prop = new serverObjects();
        final Switchboard sb = (Switchboard) env;
       
        prop.putHTML("ip", sb.peers.mySeed().getIP());
        prop.putHTML("port", sb.getConfig("port", "8090"));
        return prop;
    }
View Full Code Here

public class IndexImportMediawiki_p {

    public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
        final serverObjects prop = new serverObjects();
        final Switchboard sb = (Switchboard) env;

        if (MediawikiImporter.job != null && MediawikiImporter.job.isAlive()) {
            // one import is running, no option to insert anything
            prop.put("import", 1);
            prop.put("import_thread", "running");
View Full Code Here

public class cytag {
   
    public static Image respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
       
        final Switchboard sb = (Switchboard)env;
        final MultiProtocolURI referer = header.referer();
       
        // harvest request information
        StringBuilder connect = new StringBuilder();
        connect.append('{');
        appendJSON(connect, "time", GenericFormatter.SHORT_MILSEC_FORMATTER.format());
        appendJSON(connect, "trail", (referer == null) ? "" : referer.toNormalform(false, false));
        appendJSON(connect, "nick"(post == null) ? "" : post.get("nick", ""));
        appendJSON(connect, "tag",   (post == null) ? "" : post.get("tag", ""));
        appendJSON(connect, "icon"(post == null) ? "" : post.get("icon", ""));
        appendJSON(connect, "ip",    header.get(HeaderFramework.CONNECTION_PROP_CLIENTIP, ""));
        appendJSON(connect, "agent", header.get("User-Agent", ""));
        connect.append('}');
       
        if (sb.trail.size() >= 100) sb.trail.remove();
        sb.trail.add(connect.toString());
        //Log.logInfo("CYTAG", "catched trail - " + connect.toString());
       
        final String defaultimage;
        if (post != null && post.get("icon", "").equals("invisible")) {
            defaultimage = "invisible.png";
        } else {
            defaultimage = "redpillmini.png";
        }
        final File iconfile = new File(sb.getAppPath(), "/htroot/env/grafics/" + defaultimage);
       
        byte[] imgb = null;
        try {
            imgb = FileUtils.read(iconfile);
        } catch (final IOException e) {
View Full Code Here

TOP

Related Classes of net.yacy.search.Switchboard

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.