Package com.socrata.datasync.job

Examples of com.socrata.datasync.job.JobStatus


        try {
            InputStream inputControlFile = new FileInputStream(controlFile);
            return publishViaFTPDropboxV2(userPrefs, datasetId, csvOrTsvFile, inputControlFile);
        } catch (Exception e) {
            e.printStackTrace();
            JobStatus status = JobStatus.PUBLISH_ERROR;
            status.setMessage("Error uploading control file: " + e.getMessage());
            return status;
        }
    }
View Full Code Here


        try {
            InputStream inputControlFile = new ByteArrayInputStream(controlFileContent.getBytes("UTF-8"));
            return publishViaFTPDropboxV2(userPrefs, datasetId, csvOrTsvFile, inputControlFile);
        } catch (Exception e) {
            e.printStackTrace();
            JobStatus status = JobStatus.PUBLISH_ERROR;
            status.setMessage("Error uploading control file content: " + e.getMessage());
            return status;
        }
    }
View Full Code Here

                job = new MetadataJob(jobFileToRun);
              }
              else {
                job = new IntegrationJob(jobFileToRun);
              }
                JobStatus status = job.run();
                if(status.isError()) {
                    System.err.print("Job completed with errors: ");
                    System.err.println(status.getMessage());
                    System.exit(1);
                } else {
                    // job ran successfully!
                    System.out.println("Job completed successfully");
                    System.out.println(status.getMessage());
                }
            } catch (IOException e) {
                System.err.println("Error running " + jobFileToRun + ": " + e.toString());
                System.exit(1);
            }
View Full Code Here

            System.exit(1);
        }
  }

    public SimpleIntegrationRunner(Job job) {
        JobStatus status;
        try {
            status = job.run();
            if(status.isError()) {
                System.err.print("Job completed with errors: ");
                System.err.println(status.getMessage());
                System.exit(1);
            } else {
                System.out.println("Job completed successfully");
                if(job.getClass() == PortJob.class) {
                    System.out.println(status.getMessage() + ". " +
                        "Your newly created dataset is at:\n" +
                        ((PortJob)job).getSinkSiteDomain() + "/d/" + ((PortJob)job).getSinkSetID());
                    }
                System.out.println(status.getMessage());
            }
        } catch (IOException e) {
            System.err.println(e.getMessage());
            System.exit(1);
        }
View Full Code Here

        // Uncommend to do replace via FTP (SmartUpdate)
        //jobToRun.setPublishViaFTP(true);
        //jobToRun.setPathToFTPControlFile("control.json");

        JobStatus status = jobToRun.run();
        if(status.isError()) {
            System.err.println("Job failed: " + status.getMessage());
        } else {
            System.out.println("Job ran successfully!");
        }
    }
View Full Code Here

        if(fileToPublish.equals(""))
            return JobStatus.MISSING_FILE_TO_PUBLISH;

        File publishFile = new File(fileToPublish);
        if(!publishFile.exists() || publishFile.isDirectory()) {
            JobStatus errorStatus = JobStatus.FILE_TO_PUBLISH_DOESNT_EXIST;
            errorStatus.setMessage(fileToPublish + ": File to publish does not exist");
            return errorStatus;
        }

        String fileExtension = Utils.getFileExtension(fileToPublish);
        if(!allowedFileToPublishExtensions.contains(fileExtension))
            return JobStatus.FILE_TO_PUBLISH_INVALID_FORMAT;

        Dataset schema;
        try {
            schema = DatasetUtils.getDatasetInfo(connectionInfo.getUrl(), job.getDatasetID());

            if(job.getPublishViaDi2Http() || job.getPublishViaFTP()) {

                ControlFile control = job.getControlFile();
                FileTypeControl fileControl = null;
                switch (fileExtension) {
                    case "csv": fileControl = control.csv; break;
                    case "tsv": fileControl = control.tsv; break;
                }

                JobStatus actionOkay = checkAction(control.action, job, schema);
                if (actionOkay.isError())
                    return actionOkay;

                if (fileControl == null && !control.action.equalsIgnoreCase(PublishMethod.delete.name())) {
                    JobStatus noFileTypeContent = JobStatus.PUBLISH_ERROR;
                    noFileTypeContent.setMessage("The control file for '" + publishFile.getName() +
                            "' requires that the '" + fileExtension + "' option be filled in");
                    return noFileTypeContent;
                }

                String[] headers = getHeaders(fileControl, publishFile);
                if (headers == null) {
                    JobStatus noHeaders = JobStatus.PUBLISH_ERROR;
                    noHeaders.setMessage("Headers must be specified in one of " + publishFile.getName() + " or the control file using 'columns'");
                    return noHeaders;
                }

                Set<String> synthetics = fileControl.syntheticLocations.keySet();
                JobStatus csvDatasetAgreement = checkColumnAgreement(schema, headers, synthetics, publishFile.getName());
                if (csvDatasetAgreement.isError())
                    return csvDatasetAgreement;

                JobStatus controlHeaderAgreement = checkControlAgreement(fileControl, schema, headers, publishFile.getName());
                if (controlHeaderAgreement.isError())
                    return controlHeaderAgreement;

                JobStatus controlSensibility = validateControlFile(fileControl, connectionInfo.getUrl());
                if (controlSensibility.isError())
                    return controlSensibility;
            }
        } catch (Exception e) {
            // Not going to fail jobs on the validation check
        }
View Full Code Here

    private static JobStatus validateControlFile(FileTypeControl fileControl, String urlBase) {

        if (fileControl == null) return JobStatus.VALID;

        JobStatus goodTimestampFormats = checkTimeFormattingValidity(fileControl);
        if (goodTimestampFormats.isError())
            return goodTimestampFormats;

        JobStatus goodEncoding = checkEncodingValidity(fileControl, urlBase);
        if (goodEncoding.isError())
            return goodEncoding;

        return JobStatus.VALID;
    }
View Full Code Here

            methods.append("\t" + m.name() + "\n");
            if (m.name().equalsIgnoreCase(action))
                okAction = true;
        }
        if (!okAction) {
            JobStatus status = JobStatus.PUBLISH_ERROR;
            status.setMessage("Unknown Publish Method: " +
                    "The control file must specify the publishing method via the 'action' option as one of: \n" +
                    methods.toString());
            return status;
        }
        if (!PublishMethod.replace.name().equalsIgnoreCase(action) && job.getPublishViaFTP()) {
            JobStatus status = JobStatus.PUBLISH_ERROR;
            status.setMessage("FTP does not currently support upsert, append or delete");
            return status;
        }
        PublishMethod publishMethod = job.getPublishMethod();
        if (publishMethod != null && !action.equalsIgnoreCase(publishMethod.name())) {
            JobStatus status = JobStatus.PUBLISH_ERROR;
            status.setMessage("Conflicting Publish Methods: " +
                    "The publish method selected was '" + publishMethod.name() +
                    "', but the 'action' option in the control file specifies the publish method as '" + action + ".");
            return status;
        }
        String rowIdentifier = DatasetUtils.getRowIdentifierName(schema);
        if (rowIdentifier == null && PublishMethod.delete.name().equalsIgnoreCase(action)) {
            JobStatus status = JobStatus.PUBLISH_ERROR;
            status.setMessage("Dataset Requirement Unfulfilled: " +
                    "To delete from a dataset, a row identifier must be set. Dataset '" + schema.getId() +
                    "' does not have a row identifier set");
            return status;
        }
        return JobStatus.VALID;
View Full Code Here

        for (String format : timeFormats) {
            if (format.equalsIgnoreCase(supportedTimeFormat))
                continue;
            try { formatter = DateTimeFormat.forPattern(format); }
            catch (IllegalArgumentException e) {
                JobStatus status = JobStatus.PUBLISH_ERROR;
                status.setMessage("Unsupported Date Time Format: The time format '" + format +
                        "' specified in the control file is not a valid pattern." +
                        "\nPlease consult " + jodaLink + " for more information");
                return status;
            }
        }
View Full Code Here

                    encodingFound = true;
                    break;
                }
            }
            if (!encodingFound) {
                JobStatus status = JobStatus.PUBLISH_ERROR;
                status.setMessage("Unsupported Encoding: The encoding '" + encoding + "' in the control file is not supported." +
                        "\nPlease consult " + charsetUri + " for a listing of supported encodings");
                return status;
            }
        } catch (Exception e) {
            // no reason to fail jobs because of encoding check
View Full Code Here

TOP

Related Classes of com.socrata.datasync.job.JobStatus

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.