Package org.geoserver.importer

Source Code of org.geoserver.importer.Importer

/* (c) 2014 Open Source Geospatial Foundation - all rights reserved
* (c) 2001 - 2013 OpenPlans
* This code is licensed under the GPL 2.0 license, available at the root
* application directory.
*/
package org.geoserver.importer;

import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.thoughtworks.xstream.XStream;
import com.vividsolutions.jts.geom.Geometry;
import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;

import org.geoserver.catalog.Catalog;
import org.geoserver.catalog.CatalogBuilder;
import org.geoserver.catalog.CoverageInfo;
import org.geoserver.catalog.CoverageStoreInfo;
import org.geoserver.catalog.DataStoreInfo;
import org.geoserver.catalog.FeatureTypeInfo;
import org.geoserver.catalog.LayerInfo;
import org.geoserver.catalog.NamespaceInfo;
import org.geoserver.catalog.ProjectionPolicy;
import org.geoserver.catalog.ResourceInfo;
import org.geoserver.catalog.StoreInfo;
import org.geoserver.catalog.StyleInfo;
import org.geoserver.catalog.WorkspaceInfo;
import org.geoserver.config.util.XStreamPersister;
import org.geoserver.config.util.XStreamPersister.CRSConverter;
import org.geoserver.config.util.XStreamPersisterFactory;
import org.geoserver.platform.ContextLoadedEvent;
import org.geoserver.platform.GeoServerExtensions;
import org.geotools.data.DataStore;
import org.geotools.data.DefaultTransaction;
import org.geotools.data.FeatureReader;
import org.geotools.data.FeatureStore;
import org.geotools.data.FeatureWriter;
import org.geotools.data.Transaction;
import org.geotools.data.directory.DirectoryDataStore;
import org.geotools.data.shapefile.ShapefileDataStore;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.geometry.GeneralEnvelope;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.jdbc.JDBCDataStore;
import org.geotools.referencing.CRS;
import org.geotools.util.logging.Logging;
import org.geoserver.importer.ImportTask.State;
import org.geoserver.importer.job.Job;
import org.geoserver.importer.job.JobQueue;
import org.geoserver.importer.job.ProgressMonitor;
import org.geoserver.importer.job.Task;
import org.geoserver.importer.mosaic.Mosaic;
import org.geoserver.importer.transform.RasterTransformChain;
import org.geoserver.importer.transform.ReprojectTransform;
import org.geoserver.importer.transform.TransformChain;
import org.geoserver.importer.transform.VectorTransformChain;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;

import org.opengis.feature.type.FeatureType;
import org.opengis.filter.Filter;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;

/**
* Primary controller/facade of the import subsystem.
*
* @author Justin Deoliveira, OpenGeo
*
*/
public class Importer implements DisposableBean, ApplicationListener {

    static Logger LOGGER = Logging.getLogger(Importer.class);

    /** catalog */
    Catalog catalog;

    /** import context storage */
    ImportStore contextStore;

    /** style generator */
    StyleGenerator styleGen;

    /** job queue */
    JobQueue jobs = new JobQueue();
   
    ConcurrentHashMap<Long,ImportTask> currentlyProcessing = new ConcurrentHashMap<Long, ImportTask>();

    public Importer(Catalog catalog) {
        this.catalog = catalog;
        this.styleGen = new StyleGenerator(catalog);
    }

    ImportStore createContextStore() {
        // check the spring context for an import store
        ImportStore store = null;

        String name = GeoServerExtensions.getProperty("org.geoserver.importer.store");
        if (name == null) {
            //backward compatability check
            name = GeoServerExtensions.getProperty("org.opengeo.importer.store");
        }

        if (name != null) {
            for (ImportStore bean : GeoServerExtensions.extensions(ImportStore.class)) {
                if (name.equals(bean.getName())) {
                    store = bean;
                    break;
                }
            }

            if (store == null) {
                LOGGER.warning("Invalid value for import store, no such store " + name);
            }
        }

        if (store == null) {
            store = new MemoryImportStore();
        }

        LOGGER.info("Enabling import store: " + store.getName());
        return store;
    }

    public ImportStore getStore() {
        return contextStore;
    }

    public ImportTask getCurrentlyProcessingTask(long contextId) {
        return currentlyProcessing.get(new Long(contextId));
    }

    @Override
    public void onApplicationEvent(ApplicationEvent event) {
        // load the context store here to avoid circular dependency on creation of the importer
        if (event instanceof ContextLoadedEvent) {
            contextStore = createContextStore();
            contextStore.init();
        }
    }

    public Catalog getCatalog() {
        return catalog;
    }

    public ImportContext getContext(long id) {
        ImportContext context = contextStore.get(id);
        return context != null ? reattach(context) : null;
    }

    public ImportContext reattach(ImportContext context) {
        //reload store and workspace objects from catalog so they are "attached" with
        // the proper references to the catalog initialized
        context.reattach(catalog);
        for (ImportTask task : context.getTasks()) {
            StoreInfo store = task.getStore();
            if (store != null && store.getId() != null) {
                task.setStore(catalog.getStore(store.getId(), StoreInfo.class));
                //((StoreInfoImpl) task.getStore()).setCatalog(catalog); // @todo remove if the above sets catalog
            }
            if (task.getLayer() != null) {
                LayerInfo l = task.getLayer();
                if (l.getDefaultStyle() != null && l.getDefaultStyle().getId() != null) {
                    l.setDefaultStyle(catalog.getStyle(l.getDefaultStyle().getId()));
                }
                if (l.getResource() != null) {
                    ResourceInfo r = l.getResource();
                    r.setCatalog(catalog);

                    if (r.getStore() == null && resourceMatchesStore(r, store)) {
                        r.setStore(store);
                    }

                }
            }

        }
        return context;
    }


    public Iterator<ImportContext> getContexts() {
        return contextStore.allNonCompleteImports();
    }

    public Iterator<ImportContext> getContextsByUser(String user) {
        return contextStore.importsByUser(user);
    }
   
    public Iterator<ImportContext> getAllContexts() {
        return contextStore.iterator();
    }
   
    public Iterator<ImportContext> getAllContextsByUpdated() {
        try {
            return contextStore.iterator("updated");
        }
        catch(UnsupportedOperationException e) {
            //fallback
            TreeSet sorted = new TreeSet<ImportContext>(new Comparator<ImportContext>() {
                @Override
                public int compare(ImportContext o1, ImportContext o2) {
                    Date d1 = o1.getUpdated();
                    Date d2 = o2.getUpdated();
                    return -1 * d1.compareTo(d2);
                }
            });
            Iterators.addAll(sorted, contextStore.iterator());
            return sorted.iterator();
        }
    }

    public ImportContext createContext(ImportData data, WorkspaceInfo targetWorkspace) throws IOException {
        return createContext(data, targetWorkspace, null);
    }

    public ImportContext createContext(ImportData data, StoreInfo targetStore) throws IOException {
        return createContext(data, null, targetStore);
    }

    public ImportContext createContext(ImportData data) throws IOException {
        return createContext(data, null, null);
    }
   
    /**
     * Create a context with the provided optional id.
     * The provided id must be higher than the current mark.
     * @param id optional id to use
     * @return Created ImportContext
     * @throws IOException
     * @throws IllegalArgumentException if the provided id is invalid
     */
    public ImportContext createContext(Long id) throws IOException, IllegalArgumentException {
        ImportContext context = new ImportContext();
        if (id != null) {
            Long retval = contextStore.advanceId(id);
            assert retval >= id;
            context.setId(retval);
            contextStore.save(context);
        } else {
            contextStore.add(context);
        }
        return context;
    }
   
    public ImportContext createContext(ImportData data, WorkspaceInfo targetWorkspace,
        StoreInfo targetStore) throws IOException {
        return createContext(data, targetWorkspace, targetStore, null);
    }

    public ImportContext createContext(ImportData data, WorkspaceInfo targetWorkspace,
            StoreInfo targetStore, ProgressMonitor monitor) throws IOException {

        ImportContext context = new ImportContext();
        context.setProgress(monitor);
        context.setData(data);

        if (targetWorkspace == null && targetStore != null) {
            targetWorkspace = targetStore.getWorkspace();
        }
        if (targetWorkspace == null) {
            targetWorkspace = catalog.getDefaultWorkspace();
        }
        context.setTargetWorkspace(targetWorkspace);
        context.setTargetStore(targetStore);

        init(context);
        if (!context.progress().isCanceled()) {
            contextStore.add(context);
        }
        //JD: don't think we really need to maintain these, and they aren't persisted
        //else {
        //    context.setState(ImportContext.State.CANCELLED);
        //}
        return context;
    }

    public Long createContextAsync(final ImportData data, final WorkspaceInfo targetWorkspace,
        final StoreInfo targetStore) throws IOException {
        return jobs.submit(new Job<ImportContext>() {
            @Override
            protected ImportContext call(ProgressMonitor monitor) throws Exception {
                return createContext(data, targetWorkspace, targetStore, monitor);
            }

            @Override
            public String toString() {
                return "Processing data " + data.toString();
            }
        });
    }

    public void init(ImportContext context) throws IOException {
        init(context, true);
    }
   
    public void init(ImportContext context, boolean prepData) throws IOException {
        context.reattach(catalog);

        ImportData data = context.getData();
        if (data != null) {
            addTasks(context, data, prepData);
        }
    }

    public List<ImportTask> update(ImportContext context, ImportData data) throws IOException {
        List<ImportTask> tasks = addTasks(context, data, true);
       
        //prep(context);
        changed(context);

        return tasks;
    }

    List<ImportTask> addTasks(ImportContext context, ImportData data, boolean prepData) throws IOException {
        if (data == null) {
            return Collections.emptyList();
        }

        if (prepData) {
            data.prepare(context.progress());
        }

        if (data instanceof FileData) {
            if (data instanceof Mosaic) {
                return initForMosaic(context, (Mosaic)data);
            }
            else if (data instanceof Directory) {
                return initForDirectory(context, (Directory)data);
            }
            else {
                return initForFile(context, (FileData)data);
            }
        }
        else if (data instanceof Table) {
        }
        else if (data instanceof Database) {
            return initForDatabase(context, (Database)data);
        }

        throw new IllegalStateException();
        /*for (ImportTask t : tasks) {
            context.addTask(t);
        }
        prep(context, prepData);
        return tasks;*/
    }

    /**
     * Initializes the import for a mosaic.
     * <p>
     * Mosaics only support direct import (context.targetStore must be null) and
     * </p>
     */
    List<ImportTask> initForMosaic(ImportContext context, Mosaic mosaic) throws IOException {

        if (context.getTargetStore() != null) {
            throw new IllegalArgumentException("ingest not supported for mosaics");
        }

        return createTasks(mosaic, context);
        //tasks.add(createTask(mosaic, context, context.getTargetStore()));
    }

    List<ImportTask> initForDirectory(ImportContext context, Directory data) throws IOException {
        List<ImportTask> tasks = new ArrayList<ImportTask>();

        //flatten out the directory into itself and all sub directories and process in order
        for (Directory dir : data.flatten()) {
            //ignore empty directories
            if (dir.getFiles().isEmpty()) continue;

            //group the contents of the directory by format
            Map<DataFormat,List<FileData>> map = new HashMap<DataFormat,List<FileData>>();
            for (FileData f : dir.getFiles()) {
                DataFormat format = f.getFormat();
                List<FileData> files = map.get(format);
                if (files == null) {
                    files = new ArrayList<FileData>();
                    map.put(format, files);
                }
                files.add(f);
            }
   
            //handle case of importing a single file that we don't know the format of, in this
            // case rather than ignore it we wnat to rpocess it and ssets its state to "NO_FORMAT"
            boolean skipNoFormat = !(map.size() == 1 && map.containsKey(null));
           
            // if no target store specified group the directory into pieces that can be
            // processed as a single task
            StoreInfo targetStore = context.getTargetStore();
            if (targetStore == null) {
   
                //create a task for each "format" if that format can handle a directory
                for (DataFormat format: new ArrayList<DataFormat>(map.keySet())) {
                    if (format != null && format.canRead(dir)) {
                        List<FileData> files = map.get(format);
                        if (files.size() == 1) {
                            //use the file directly
                            //createTasks(files.get(0), format, context, null));
                            tasks.addAll(createTasks(files.get(0), format, context));
                        }
                        else {
                            tasks.addAll(createTasks(dir.filter(files), format, context));
                            //tasks.addAll(createTasks(dir.filter(files), format, context, null));
                        }
                       
                        map.remove(format);
                    }
                }
   
                //handle the left overs, each file gets its own task
                for (List<FileData> files : map.values()) {
                    for (FileData file : files) {
                        //tasks.add(createTask(file, context, null));
                        tasks.addAll(createTasks(file, file.getFormat(), context, skipNoFormat));
                    }
                }

            }
            else {
                for (FileData file : dir.getFiles()) {
                    tasks.addAll(createTasks(file, file.getFormat(), context, skipNoFormat));
                }
            }
        }

        return tasks;
    }

    List<ImportTask>  initForFile(ImportContext context, FileData file) throws IOException {
        return createTasks(file, context);
    }

    List<ImportTask>  initForDatabase(ImportContext context, Database db) throws IOException {
        //JD: we use check for direct vs non-direct in order to determine if there should be
        // one task with many items, or one task per table... can;t think of the use case for
        //many tasks

        //tasks.add(createTask(db, context, targetStore));
        return createTasks(db, context);
    }
   
    List<ImportTask> createTasks(ImportData data, ImportContext context) throws IOException {
        return createTasks(data, data.getFormat(), context);
    }
   

    List<ImportTask> createTasks(ImportData data, DataFormat format, ImportContext context)
        throws IOException {
        return createTasks(data, format, context, true);
    }

    List<ImportTask> createTasks(ImportData data, DataFormat format, ImportContext context,
        boolean skipNoFormat) throws IOException {

        List<ImportTask> tasks = new ArrayList<ImportTask>();

        boolean direct = false;

        StoreInfo targetStore = context.getTargetStore();
        if (targetStore == null) {
            //direct import, use the format to create a store
            direct = true;

            if (format != null) {
                targetStore = format.createStore(data, context.getTargetWorkspace(), catalog);   
            }
           
            if (targetStore == null) {
                //format unable to create store, switch to indirect import and use
                // default store from catalog
                targetStore = lookupDefaultStore();

                direct = targetStore == null;
            }
        }

        if (format != null) {
            // create the set of tasks by having the format list the avialable items
            // from the input data
            for (ImportTask t : format.list(data, catalog, context.progress())) {
                //initialize transform chain based on vector vs raster
                t.setTransform(format instanceof VectorFormat
                        ? new VectorTransformChain() : new RasterTransformChain());
                t.setDirect(direct);
                t.setStore(targetStore);

                prep(t);
                tasks.add(t);
            }
        }
        else if (!skipNoFormat) {
            ImportTask t = new ImportTask(data);
            t.setDirect(direct);
            t.setStore(targetStore);
            prep(t);
            tasks.add(t);
        }

        for (ImportTask t : tasks) {
            context.addTask(t);
        }
        return tasks;
    }

    boolean prep(ImportTask task) {
        if (task.getState() == ImportTask.State.COMPLETE) {
            return true;
        }

        //check the format
        DataFormat format = task.getData().getFormat();
        if (format == null) {
            task.setState(State.NO_FORMAT);
            return false;
        }

       
        //check the target
        if (task.getStore() == null) {
            task.setError(new Exception("No target store for task"));
            task.setState(State.ERROR);
            return false;
        }

        //check for a mismatch between store and format
        if (!formatMatchesStore(format, task.getStore())) {
            String msg = task.getStore() instanceof DataStoreInfo ?
                    "Unable to import raster data into vector store" :
                    "Unable to import vector data into raster store";

            task.setError(new Exception(msg));
            task.setState(State.BAD_FORMAT);
            return false;
        }

        if (task.getLayer() == null || task.getLayer().getResource() == null) {
            task.setError(new Exception("Task has no layer configuration"));
            task.setState(State.ERROR);
            return false;
        }

        LayerInfo l = task.getLayer();
        ResourceInfo r = l.getResource();
       
        //initialize resource references
        r.setStore(task.getStore());
        r.setNamespace(
            catalog.getNamespaceByPrefix(task.getStore().getWorkspace().getName()));

        //style
        //assign a default style to the layer if not already done
        if (l.getDefaultStyle() == null) {
            try {
                StyleInfo style = null;
                if (r instanceof FeatureTypeInfo) {
                    //since this resource is still detached from the catalog we can't call
                    // through to get it's underlying resource, so we depend on the "native"
                    // type provided from the format
                    FeatureType featureType =
                        (FeatureType) task.getMetadata().get(FeatureType.class);
                    if (featureType != null) {
                        style = styleGen.createStyle((FeatureTypeInfo) r, featureType);
                    } else {
                        throw new RuntimeException("Unable to compute style");
                    }

                }
                else if (r instanceof CoverageInfo) {
                    style = styleGen.createStyle((CoverageInfo) r);
                }
                else {
                    throw new RuntimeException("Unknown resource type :"
                            + r.getClass());
                }
                l.setDefaultStyle(style);
            }
            catch(Exception e) {
                task.setError(e);
                task.setState(ImportTask.State.ERROR);
                return false;
            }
        }
       
        //srs
        if (r.getSRS() == null) {
            task.setState(ImportTask.State.NO_CRS);
            return false;
        }
        else if (task.getState() == ImportTask.State.NO_CRS) {
            //changed after setting srs manually, compute the lat long bounding box
            try {
                computeLatLonBoundingBox(task, false);
            }
            catch(Exception e) {
                LOGGER.log(Level.WARNING, "Error computing lat long bounding box", e);
                task.setState(ImportTask.State.ERROR);
                task.setError(e);
                return false;
            }

            //also since this resource has no native crs set the project policy to force declared
            task.getLayer().getResource().setProjectionPolicy(ProjectionPolicy.FORCE_DECLARED);
        }
        else {
            task.getLayer().getResource().setProjectionPolicy(ProjectionPolicy.NONE);
        }

        //bounds
        if (r.getNativeBoundingBox() == null) {
            task.setState(ImportTask.State.NO_BOUNDS);
            return false;
        }
       
        task.setState(ImportTask.State.READY);
        return true;
    }

    boolean formatMatchesStore(DataFormat format, StoreInfo store) {
        if (format instanceof VectorFormat) {
            return store instanceof DataStoreInfo;
        }
        if (format instanceof GridFormat) {
            return store instanceof CoverageStoreInfo;
        }
        return false;
    }

    boolean resourceMatchesStore(ResourceInfo resource, StoreInfo store) {
        if (resource instanceof FeatureTypeInfo) {
            return store instanceof DataStoreInfo;
        }
        if (resource instanceof CoverageInfo) {
            return store instanceof CoverageStoreInfo;
        }
        return false;
    }

    public void run(ImportContext context) throws IOException {
        run(context, ImportFilter.ALL);
    }

    public void run(ImportContext context, ImportFilter filter) throws IOException {
        run(context, filter, null);
    }
   
    public void run(ImportContext context, ImportFilter filter, ProgressMonitor monitor) throws IOException {
        context.setProgress(monitor);
        context.setState(ImportContext.State.RUNNING);
       
        if (LOGGER.isLoggable(Level.FINE)) {
            LOGGER.fine("Running import " + context.getId());
        }
       
        for (ImportTask task : context.getTasks()) {
            if (!filter.include(task)) {
                continue;
            }
            if (!task.readyForImport()) {
                continue;
            }

            if (context.progress().isCanceled()) {
                break;
            }
            run(task);
        }

        context.updated();
        contextStore.save(context);

        if (context.isArchive() && context.getState() == ImportContext.State.COMPLETE) {
            boolean canArchive = !Iterables.any(context.getTasks(), new Predicate<ImportTask>() {
                @Override
                public boolean apply(ImportTask input) {
                    return input.isDirect();
                }
            });

            if (canArchive) {
                Directory directory = null;
                if (context.getData() instanceof Directory) {
                    directory = (Directory) context.getData();
                } else if ( context.getData() instanceof SpatialFile ) {
                    directory = new Directory( ((SpatialFile) context.getData()).getFile().getParentFile() );
                }
                if (directory != null) {
                    if (LOGGER.isLoggable(Level.FINE)) {
                        LOGGER.fine("Archiving directory " + directory.getFile().getAbsolutePath());
                    }      
                    try {
                        directory.archive(getArchiveFile(context));
                    } catch (Exception ioe) {
                        ioe.printStackTrace();
                        // this is not a critical operation, so don't make the whole thing fail
                        LOGGER.log(Level.WARNING, "Error archiving", ioe);
                    }
                }
            }

        }
    }

    void run(ImportTask task) throws IOException {
        if (task.getState() == ImportTask.State.COMPLETE) {
            return;
        }
        task.setState(ImportTask.State.RUNNING);

        if (task.isDirect()) {
            //direct import, simply add configured store and layers to catalog
            doDirectImport(task);
        }
        else {
            //indirect import, read data from the source and into the target datastore
            doIndirectImport(task);
        }

    }
   
    public File getArchiveFile(ImportContext context) throws IOException {
        //String archiveName = "import-" + task.getContext().getId() + "-" + task.getId() + "-" + task.getData().getName() + ".zip";
        String archiveName = "import-" + context.getId() + ".zip";
        File dir = getCatalog().getResourceLoader().findOrCreateDirectory("uploads","archives");
        return new File(dir, archiveName);
    }
   
    public void changed(ImportContext context) {
        context.updated();
        contextStore.save(context);
    }

    public void changed(ImportTask task)  {
        prep(task);
        changed(task.getContext());
    }

    public Long runAsync(final ImportContext context, final ImportFilter filter) {
        return jobs.submit(new Job<ImportContext>() {
            @Override
            protected ImportContext call(ProgressMonitor monitor) throws Exception {
                run(context, filter, monitor);
                return context;
            }

            @Override
            public String toString() {
                return "Processing import " + context.getId();
            }
        });
    }

    public Task<ImportContext> getTask(Long job) {
        return (Task<ImportContext>) jobs.getTask(job);
    }

    public List<Task<ImportContext>> getTasks() {
        return (List) jobs.getTasks();
    }

    /*
     * an import that involves consuming a data source directly
     */
    void doDirectImport(ImportTask task) throws IOException {
        //TODO: this needs to be transactional in case of errors along the way

        //add the store, may have been added in a previous iteration of this task
        if (task.getStore().getId() == null) {
            StoreInfo store = task.getStore();

            //ensure a unique name
            store.setName(findUniqueStoreName(task.getStore()));
           
            //ensure a namespace connection parameter set matching workspace/namespace
            if (!store.getConnectionParameters().containsKey("namespace")) {
                WorkspaceInfo ws = task.getContext().getTargetWorkspace();
                if (ws == null && task.getContext().getTargetStore() != null) {
                    ws = task.getContext().getTargetStore().getWorkspace();
                }
                if (ws != null) {
                    NamespaceInfo ns = catalog.getNamespaceByPrefix(ws.getName());
                    if (ns != null) {
                        store.getConnectionParameters().put("namespace", ns.getURI());
                    }
                }
            }
            catalog.add(task.getStore());
        }

        task.setState(ImportTask.State.RUNNING);

        try {
            //set up transform chain
            TransformChain tx = (TransformChain) task.getTransform();
           
            //apply pre transform
            if (!doPreTransform(task, task.getData(), tx)) {
                return;
            }

            addToCatalog(task);

            //apply pre transform
            if (!doPostTransform(task, task.getData(), tx)) {
                return;
            }

            task.setState(ImportTask.State.COMPLETE);
        }
        catch(Exception e) {
            task.setState(ImportTask.State.ERROR);
            task.setError(e);
        }

    }

    /*
     * an import that involves reading from the datastore and writing into a specified target store
     */
    void doIndirectImport(ImportTask task) throws IOException {
        if (!task.getStore().isEnabled()) {
            task.getStore().setEnabled(true);
        }

        if (task.progress().isCanceled()){
            return;
        }

        task.setState(ImportTask.State.RUNNING);

        //setup transform chain
        TransformChain tx = task.getTransform();

        //pre transform
        if (!doPreTransform(task, task.getData(), tx)) {
            return;
        }

        boolean canceled = false;
        DataFormat format = task.getData().getFormat();
        if (format instanceof VectorFormat) {
            try {
                currentlyProcessing.put(task.getContext().getId(), task);
                loadIntoDataStore(task, (DataStoreInfo)task.getStore(), (VectorFormat) format,
                    (VectorTransformChain) tx);
                canceled = task.progress().isCanceled();

                FeatureTypeInfo featureType = (FeatureTypeInfo) task.getLayer().getResource();
                featureType.getAttributes().clear();

                if (!canceled) {
                    if (task.getUpdateMode() == UpdateMode.CREATE) {
                        addToCatalog(task);
                    }
   
                    // verify that the newly created featuretype's resource
                    // has bounding boxes computed - this might be required
                    // for csv or other uploads that have a geometry that is
                    // the result of a transform. there may be another way...
                    FeatureTypeInfo resource = getCatalog().getResourceByName(
                            featureType.getQualifiedName(), FeatureTypeInfo.class);
                    if (resource.getNativeBoundingBox().isEmpty()
                            || resource.getMetadata().get("recalculate-bounds") != null) {
                        // force computation
                        CatalogBuilder cb = new CatalogBuilder(getCatalog());
                        ReferencedEnvelope nativeBounds = cb.getNativeBounds(resource);
                        resource.setNativeBoundingBox(nativeBounds);
                        resource.setLatLonBoundingBox(cb.getLatLonBounds(nativeBounds,
                                resource.getCRS()));
                        getCatalog().save(resource);
                    }
                }
            }
            catch(Exception e) {
                LOGGER.log(Level.SEVERE, "Error occured during import", e);
                task.setError(e);
                task.setState(ImportTask.State.ERROR);
                return;
            } finally {
                currentlyProcessing.remove(task.getContext().getId());
            }
        }
        else {
            throw new UnsupportedOperationException("Indirect raster import not yet supported");
        }

        if (!canceled && !doPostTransform(task, task.getData(), tx)) {
            return;
        }

        task.setState(canceled ? ImportTask.State.CANCELED : ImportTask.State.COMPLETE);

    }

    boolean doPreTransform(ImportTask task, ImportData data, TransformChain tx) {
        try {
            tx.pre(task, data);
        }
        catch (Exception e) {
            LOGGER.log(Level.SEVERE, "Error occured during pre transform", e);
            task.setError(e);
            task.setState(ImportTask.State.ERROR);
            return false;
        }
        return true;
    }

    boolean doPostTransform(ImportTask task, ImportData data, TransformChain tx) {
        try {
            tx.post(task, data);
        }
        catch (Exception e) {
            LOGGER.log(Level.SEVERE, "Error occured during post transform", e);
            task.setError(e);
            task.setState(ImportTask.State.ERROR);
            return false;
        }
        return true;
    }

    void loadIntoDataStore(ImportTask task, DataStoreInfo store, VectorFormat format,
        VectorTransformChain tx) throws Exception {

        ImportData data = task.getData();
        FeatureReader reader = null;
        FeatureWriter writer = null;
        // using this exception to throw at the end
        Exception error = null;
        try {
            reader = format.read(data, task);

            SimpleFeatureType featureType = (SimpleFeatureType) reader.getFeatureType();
            final String featureTypeName = featureType.getName().getLocalPart();
   
            DataStore dataStore = (DataStore) store.getDataStore(null);
            FeatureDataConverter featureDataConverter = FeatureDataConverter.DEFAULT;
            if (isShapefileDataStore(dataStore)) {
                featureDataConverter = FeatureDataConverter.TO_SHAPEFILE;
            }
            else if (isOracleDataStore(dataStore)) {
                featureDataConverter = FeatureDataConverter.TO_ORACLE;
            }
            else if (isPostGISDataStore(dataStore)) {
                featureDataConverter = FeatureDataConverter.TO_POSTGIS;
            }
           
            featureType = featureDataConverter.convertType(featureType, format, data, task);
            UpdateMode updateMode = task.getUpdateMode();
            final String uniquifiedFeatureTypeName;
            if (updateMode == UpdateMode.CREATE) {
                //find a unique type name in the target store
                uniquifiedFeatureTypeName = findUniqueNativeFeatureTypeName(featureType, store);
                task.setOriginalLayerName(featureTypeName);
   
                if (!uniquifiedFeatureTypeName.equals(featureTypeName)) {
                    //update the metadata
                    task.getLayer().getResource().setName(uniquifiedFeatureTypeName);
                    task.getLayer().getResource().setNativeName(uniquifiedFeatureTypeName);
                   
                    //retype
                    SimpleFeatureTypeBuilder typeBuilder = new SimpleFeatureTypeBuilder();
                    typeBuilder.setName(uniquifiedFeatureTypeName);
                    typeBuilder.addAll(featureType.getAttributeDescriptors());
                    featureType = typeBuilder.buildFeatureType();
                }
   
                // @todo HACK remove this at some point when timezone issues are fixed
                // this will force postgis to create timezone w/ timestamp fields
                if (dataStore instanceof JDBCDataStore) {
                    JDBCDataStore ds = (JDBCDataStore) dataStore;
                    // sniff for postgis (h2 is used in tests and will cause failure if this occurs)
                    if (ds.getSqlTypeNameToClassMappings().containsKey("timestamptz")) {
                        ds.getSqlTypeToSqlTypeNameOverrides().put(java.sql.Types.TIMESTAMP, "timestamptz");
                    }
                }
   
                //apply the feature type transform
                featureType = tx.inline(task, dataStore, featureType);
   
                dataStore.createSchema(featureType);
            } else {
                // @todo what to do if featureType transform is present?
               
                // @todo implement me - need to specify attribute used for id
                if (updateMode == UpdateMode.UPDATE) {
                    throw new UnsupportedOperationException("updateMode UPDATE is not supported yet");
                }
                uniquifiedFeatureTypeName = featureTypeName;
            }
               
            Transaction transaction = new DefaultTransaction();
           
            if (updateMode == UpdateMode.REPLACE) {
               
                FeatureStore fs = (FeatureStore) dataStore.getFeatureSource(featureTypeName);
                fs.setTransaction(transaction);
                fs.removeFeatures(Filter.INCLUDE);
            }
           
            //start writing features
            // @todo ability to collect transformation errors for use in a dry-run (auto-rollback)
           
            ProgressMonitor monitor = task.progress();
           
            // @todo need better way to communicate to client
            int skipped = 0;
            int cnt = 0;
            // metrics
            long startTime = System.currentTimeMillis();
            task.clearMessages();
           
            task.setTotalToProcess(format.getFeatureCount(task.getData(), task));
           
            LOGGER.info("begining import");
            try {
                writer = dataStore.getFeatureWriterAppend(uniquifiedFeatureTypeName, transaction);
               
                while(reader.hasNext()) {
                    if (monitor.isCanceled()){
                        break;
                    }
                    SimpleFeature feature = (SimpleFeature) reader.next();
                    SimpleFeature next = (SimpleFeature) writer.next();
   
                    //(JD) TODO: some formats will rearrange the geometry type (like shapefile) which
                    // makes the goemetry the first attribute reagardless, so blindly copying over
                    // attributes won't work unless the source type also  has the geometry as the
                    // first attribute in the schema
                    featureDataConverter.convert(feature, next);
                   
                    // @hack #45678 - mask empty geometry or postgis will complain
                    Geometry geom = (Geometry) next.getDefaultGeometry();
                    if (geom != null && geom.isEmpty()) {
                        next.setDefaultGeometry(null);
                    }
                   
                    //apply the feature transform
                    next = tx.inline(task, dataStore, feature, next);
                   
                    if (next == null) {
                        skipped++;
                    } else {
                        writer.write();
                    }
                    task.setNumberProcessed(++cnt);
                }
                transaction.commit();
                if (skipped > 0) {
                    task.addMessage(Level.WARNING,skipped + " features were skipped.");
                }
                LOGGER.info("load to target took " + (System.currentTimeMillis() - startTime));
            }
            catch (Exception e) {
                error = e;
            }
            // no finally block, there is too much to do
           
            if (error != null || monitor.isCanceled()) {
                // all sub exceptions in this catch block should be logged, not thrown
                // as the triggering exception will be thrown
   
                //failure, rollback transaction
                try {
                    transaction.rollback();
                } catch (Exception e1) {
                    LOGGER.log(Level.WARNING, "Error rolling back transaction",e1);
                }
   
                //attempt to drop the type that was created as well
                try {
                    dropSchema(dataStore,featureTypeName);
                } catch(Exception e1) {
                    LOGGER.log(Level.WARNING, "Error dropping schema in rollback",e1);
                }
            }
   
            // try to cleanup, but if an error occurs here and one hasn't already been set, set the error
            try {
                transaction.close();
            } catch (Exception e) {
                if (error != null) {
                    error = e;
                }
                LOGGER.log(Level.WARNING, "Error closing transaction",e);
            }
   
            // @revisit - when this gets disposed, any following uses seem to
            // have a problem where later users of the dataStore get an NPE
            // since the dataStore gets cached by the ResourcePool but is in a
            // closed state???
           
            // do this last in case we have to drop the schema
    //        try {
    //            dataStore.dispose();
    //        } catch (Exception e) {
    //            LOGGER.log(Level.WARNING, "Error closing dataStore",e);
    //        }
        } finally {
            if (writer != null) {
                try {
                    writer.close();
                } catch (Exception e) {
                    if (error != null) {
                        error = e;
                    }
                    LOGGER.log(Level.WARNING, "Error closing writer",e);
                }
            }
            try {   
                if(reader != null) {
                    format.dispose(reader, task);
                    // @hack catch _all_ Exceptions here - occassionally closing a shapefile
                    // seems to result in an IllegalArgumentException related to not
                    // holding the lock...
                }
            } catch (Exception e) {
                LOGGER.log(Level.WARNING, "Error closing reader",e);
            }
        }
       
        // finally, throw any error
        if (error != null) {
            throw error;
        }
    }

    StoreInfo lookupDefaultStore() {
        WorkspaceInfo ws = catalog.getDefaultWorkspace();
        if (ws == null) {
            return null;
        }

        return catalog.getDefaultDataStore(ws);
    }

    void addToCatalog(ImportTask task) throws IOException {
        LayerInfo layer = task.getLayer();
        ResourceInfo resource = layer.getResource();
        resource.setStore(task.getStore());

        //add the resource
        String name = findUniqueResourceName(resource);
        resource.setName(name);

        //JD: not setting a native name, it should actually already be set by this point and we
        // don't want to blindly set it to the same name as the resource name, which might have
        // changed to deal with name clashes
        //resource.setNativeName(name);
        resource.setEnabled(true);
        catalog.add(resource);

        //add the layer (and style)
        if (layer.getDefaultStyle().getId() == null) {
            catalog.add(layer.getDefaultStyle());
        }

        layer.setEnabled(true);
        catalog.add(layer);
    }

    String findUniqueStoreName(StoreInfo store) {
        WorkspaceInfo workspace = store.getWorkspace();

        //TODO: put an upper limit on how many times to try
        String name = store.getName();
        if (catalog.getStoreByName(workspace, store.getName(), StoreInfo.class) != null) {
            int i = 0;
            name += i;
            while (catalog.getStoreByName(workspace, name, StoreInfo.class) != null) {
                name = name.replaceAll(i + "$", String.valueOf(i+1));
                i++;
            }
        }

        return name;
    }
   
    String findUniqueResourceName(ResourceInfo resource)
        throws IOException {

        //TODO: put an upper limit on how many times to try
        StoreInfo store = resource.getStore();
        NamespaceInfo ns = catalog.getNamespaceByPrefix(store.getWorkspace().getName());
       
        String name = resource.getName();
        if (catalog.getResourceByName(ns, name, ResourceInfo.class) != null) {
            int i = 0;
            name += i;
            while (catalog.getResourceByName(ns, name, ResourceInfo.class) != null) {
                name = name.replaceAll(i + "$", String.valueOf(i+1));
                i++;
            }
        }

        return name;
    }

    String findUniqueNativeFeatureTypeName(FeatureType featureType, DataStoreInfo store) throws IOException {
        return findUniqueNativeFeatureTypeName(featureType.getName().getLocalPart(), store);
    }

    private String findUniqueNativeFeatureTypeName(String name, DataStoreInfo store) throws IOException {
        DataStore dataStore = (DataStore) store.getDataStore(null);

        //hack for oracle, all names must be upper case
        //TODO: abstract this into FeatureConverter
        if (isOracleDataStore(dataStore)) {
            name = name.toUpperCase();
        }

        //TODO: put an upper limit on how many times to try
        List<String> names = Arrays.asList(dataStore.getTypeNames());
        if (names.contains(name)) {
            int i = 0;
            name += i;
            while(names.contains(name)) {
                name = name.replaceAll(i + "$", String.valueOf(i+1));
                i++;
            }
        }

        return name;
    }

    boolean isShapefileDataStore(DataStore dataStore) {
        return dataStore instanceof ShapefileDataStore || dataStore instanceof DirectoryDataStore;
    }

    boolean isOracleDataStore(DataStore dataStore) {
        return dataStore instanceof JDBCDataStore && "org.geotools.data.oracle.OracleDialect"
            .equals(((JDBCDataStore)dataStore).getSQLDialect().getClass().getName());
    }

    boolean isPostGISDataStore(DataStore dataStore) {
        return dataStore instanceof JDBCDataStore && ((JDBCDataStore)dataStore).getSQLDialect()
            .getClass().getName().startsWith("org.geotools.data.postgis");
    }

    /*
     * computes the lat/lon bounding box from the native bounding box and srs, optionally overriding
     * the value already set.
     */
    boolean computeLatLonBoundingBox(ImportTask task, boolean force) throws Exception {
        ResourceInfo r = task.getLayer().getResource();
        if (force || r.getLatLonBoundingBox() == null && r.getNativeBoundingBox() != null) {
            CoordinateReferenceSystem nativeCRS = CRS.decode(r.getSRS());
            ReferencedEnvelope nativeBbox =
                new ReferencedEnvelope(r.getNativeBoundingBox(), nativeCRS);
            r.setLatLonBoundingBox(nativeBbox.transform(CRS.decode("EPSG:4326"), true));
            return true;
        }
        return false;
    }

    //file location methods
    public File getImportRoot() {
        try {
            return catalog.getResourceLoader().findOrCreateDirectory("imports");
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    public File getUploadRoot() {
        try {
            return catalog.getResourceLoader().findOrCreateDirectory("uploads");
        }
        catch(IOException e) {
            throw new RuntimeException(e);
        }
    }

    public void destroy() throws Exception {
        jobs.shutdown();
        contextStore.destroy();
    }

    public void delete(ImportContext importContext) throws IOException {
        delete(importContext, false);
    }
   
    public void delete(ImportContext importContext, boolean purge) throws IOException {
        if (purge) {
            importContext.delete();   
        }
       
        contextStore.remove(importContext);
    }

    private void dropSchema(DataStore ds, String featureTypeName) throws Exception {
        // @todo this needs implementation in geotools
        SimpleFeatureType schema = ds.getSchema(featureTypeName);
        if (schema != null) {
            if (ds instanceof JDBCDataStore) {
                JDBCDataStore dataStore = (JDBCDataStore) ds;
                Connection conn = dataStore.getConnection(Transaction.AUTO_COMMIT);
                Statement st = null;
                try {
                    st = conn.createStatement();
                    st.execute("drop table " + featureTypeName);
                    LOGGER.fine("dropSchema " + featureTypeName + " successful");
                } finally {
                    dataStore.closeSafe(conn);
                    dataStore.closeSafe(st);
                }
            } else {
                LOGGER.warning("Unable to dropSchema " + featureTypeName + " from datastore " + ds.getClass());
            }
        } else {
            LOGGER.warning("Unable to dropSchema " + featureTypeName + " as it does not appear to exist in dataStore");
        }
    }

    public XStreamPersister createXStreamPersisterXML() {
        return initXStreamPersister(new XStreamPersisterFactory().createXMLPersister());
    }

    public XStreamPersister createXStreamPersisterJSON() {
        return initXStreamPersister(new XStreamPersisterFactory().createJSONPersister());
    }

    public XStreamPersister initXStreamPersister(XStreamPersister xp) {
        xp.setCatalog(catalog);
        //xp.setReferenceByName(true);
       
        XStream xs = xp.getXStream();

        //ImportContext
        xs.alias("import", ImportContext.class);

        //ImportTask
        xs.alias("task", ImportTask.class);
        xs.omitField(ImportTask.class, "context");

        //ImportItem
        //xs.alias("item", ImportItem.class);
        //xs.omitField(ImportItem.class, "task");

        //DataFormat
        xs.alias("dataStoreFormat", DataStoreFormat.class);

        //ImportData
        xs.alias("spatialFile", SpatialFile.class);
        xs.alias("database", org.geoserver.importer.Database.class);
        xs.alias("table", Table.class);
        xs.omitField(Table.class, "db");

        xs.alias("vectorTransformChain", VectorTransformChain.class);
        xs.registerLocalConverter(ReprojectTransform.class, "source", new CRSConverter());
        xs.registerLocalConverter(ReprojectTransform.class, "target", new CRSConverter());

        xs.registerLocalConverter( ReferencedEnvelope.class, "crs", new CRSConverter() );
        xs.registerLocalConverter( GeneralEnvelope.class, "crs", new CRSConverter() );
       
        return xp;
    }

}
TOP

Related Classes of org.geoserver.importer.Importer

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.