Package com.google.api.services.bigquery.model

Examples of com.google.api.services.bigquery.model.Job


      return null;
    }

    Projects project = projectResponse.getProjects().get(0);
    Get jobsRequest = bigquery.jobs().get(project.getId(), jobId);
    Job j = jobsRequest.execute();
    return j;
  }
View Full Code Here


    // now create the ingestion
    for (String kind : exporterConfig.getEntityKindsToExport()) {
      String gsUrl = convertHandleToUrl(gsHandleOfBackup, kind);
      log.warning("gsUrl: " + gsUrl);
     
      Job job = new Job();
      JobConfiguration config = new JobConfiguration();
      JobConfigurationLoad loadConfig = new JobConfigurationLoad();

      loadConfig.setSourceUris(Arrays.asList(gsUrl));
      loadConfig.set("sourceFormat", "DATASTORE_BACKUP");
      loadConfig.set("allowQuotedNewlines", true);

      TableReference table = new TableReference();
      table.setProjectId(exporterConfig.getBigqueryProjectId());
      table.setDatasetId(exporterConfig.getBigqueryDatasetId());
      table.setTableId(kind + datatableSuffix);
      loadConfig.setDestinationTable(table);

      config.setLoad(loadConfig);
      job.setConfiguration(config);
      Insert insert = bigquery.jobs().insert(exporterConfig.getBigqueryProjectId(), job);

      JobReference jr = insert.execute().getJobReference();
      log.warning("Uri: " + gsUrl + ", JobId: " + jr.getJobId());
    }
View Full Code Here

  /**
   * Triggers a bigquery load {@link Job} request and returns the job Id for the same.
   */
  private BigQueryLoadJobReference triggerBigQueryLoadJob() {
    Job job = createJob();
    // Set up Bigquery Insert
    try {
      Insert insert =
          BigQueryLoadGoogleCloudStorageFilesJob.getBigquery().jobs().insert(projectId, job);
      Job executedJob = insert.execute();
      log.info("Triggered the bigQuery load job for files " + fileSet + " . Job Id = "
          + executedJob.getId());
      return new BigQueryLoadJobReference(projectId, executedJob.getJobReference());
    } catch (IOException e) {
      throw new RuntimeException("Error in triggering BigQuery load job for files " + fileSet, e);
    }
  }
View Full Code Here

  /**
   * Create a {@link Job} instance for the specified files and bigquery {@link TableSchema} with
   * default settings.
   */
  private Job createJob() {
    Job job = new Job();
    JobConfiguration jobConfig = new JobConfiguration();
    JobConfigurationLoad loadConfig = new JobConfigurationLoad();
    jobConfig.setLoad(loadConfig);
    job.setConfiguration(jobConfig);

    loadConfig.setAllowQuotedNewlines(false);
    loadConfig.setSourceFormat("NEWLINE_DELIMITED_JSON");

    List<String> sources = new ArrayList<String>();
View Full Code Here

      private static final long serialVersionUID = 6194863971729831899L;

      @Override
      public void run() {
        String jobRef = jobToPoll.getJobReference().getJobId();
        Job pollJob = null;
        try {
          pollJob = BigQueryLoadGoogleCloudStorageFilesJob.getBigquery().jobs()
              .get(jobToPoll.getJobReference().getProjectId(), jobRef).execute();
        } catch (IOException e) {
          log.warning("Unable to poll the status of the job " + jobRef + " . Retrying after "
              + BigQueryConstants.MIN_TIME_BEFORE_NEXT_POLL + " seconds");
          setForRetry();
        }
        log.info("Job status of job " + jobRef + " : " + pollJob.getStatus().getState());
        if (pollJob.getStatus().getState().equals("PENDING")
            || pollJob.getStatus().getState().equals("RUNNING")) {
          setForRetry();
          return;
        }
        submitPromisedValue(pollJob.getStatus().getState());
      }

      private void setForRetry() {
        HttpServletRequest request = DeferredTaskContext.getCurrentRequest();
        int attempts = request.getIntHeader("X-AppEngine-TaskExecutionCount");
View Full Code Here

  }

  @Override
  public Value<BigQueryLoadJobReference> run(BigQueryLoadJobReference pollResult,
      Integer numRetries) throws Exception {
    Job pollJob = BigQueryLoadGoogleCloudStorageFilesJob.getBigquery().jobs()
        .get(pollResult.getJobReference().getProjectId(), pollResult.getJobReference().getJobId())
        .execute();
    ErrorProto fatalError = pollJob.getStatus().getErrorResult();
    List<ErrorProto> errors = pollJob.getStatus().getErrors();
    if (fatalError != null) {
      log.severe("Job failed while writing to Bigquery. Retrying...#attempt " + numRetries
          + " Error details : " + fatalError.getReason() + ": " + fatalError.getMessage() + " at "
          + fatalError.getLocation());
      return futureCall(new BigQueryLoadFileSetJob(dataset, tableName, projectId, bundle, schema),
          immediate(++numRetries));
    }
    if (errors != null) {
      log.log(Level.SEVERE, "Bigquery load job for files " + bundle
          + " completed with following errors. Bigquery does not consider these errors fatal. Hence the job went to completion.");
      for (ErrorProto error : errors) {
        log.log(Level.SEVERE, "Error: [REASON] " + error.getReason() + " [MESSAGE] "
            + error.getMessage() + " [LOCATION] " + error.getLocation());
      }
    }
    FutureValue<Void> deleteJob = futureCall(new DeleteFilesJob(), immediate(bundle));
    return futureCall(new ReturnResult<BigQueryLoadJobReference>(),
        immediate(new BigQueryLoadJobReference("DONE", pollJob.getJobReference())),
        waitFor(deleteJob));
  }
View Full Code Here

TOP

Related Classes of com.google.api.services.bigquery.model.Job

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.