Package com.ikanow.infinit.e.data_model.api

Examples of com.ikanow.infinit.e.data_model.api.ResponsePojo


   
    //Step 3, remove the jobs via removeJob, used forced
    List<CustomMapReduceJobPojo> failedToRemove = new ArrayList<CustomMapReduceJobPojo>();
    for ( CustomMapReduceJobPojo cmr : jobs )
    {     
      ResponsePojo rp = removeJob(null, cmr.jobtitle, true, true);
      if ( !rp.getResponse().isSuccess() )
      {
        failedToRemove.add(cmr);
      }
    }
   
View Full Code Here


        //remove the jar
        return new ShareHandler().removeShare(ownerid, jarid);
      }
      else
      {
        return new ResponsePojo(new ResponseObject("removejar", false, "More than 1 job use this jar, could not remove."));
      }
    }
    return new ResponsePojo(new ResponseObject("removejar", false, "Jar URL is not an infinite share, could not remove."));
  }
View Full Code Here

   * @throws ResourceException
   */
  @Get
  public Representation get( ) throws ResourceException
  {
     ResponsePojo rp = new ResponsePojo();
     Date startTime = new Date();    
    
     if ( needCookie )
     {
       cookieLookup = RESTTools.cookieLookup(cookie);
       if ( cookieLookup == null )
       {
         rp = new ResponsePojo();
         rp.setResponse(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
       }
       else
       {
         if ( action.equals("getresults") )
         {
           rp = this.customhandler.getJobResults(cookieLookup, jobid, limit, json, findStr, sortStr);
         }
         else if ( action.equals("schedule"))
         {
           rp = this.customhandler.scheduleJob(cookieLookup, title, desc, communityIds, jarURL, nextRunTime, freqSched, mapperClass, reducerClass, combinerClass, query, inputColl, outputKey, outputValue,appendResults,ageOutInDays,jsonPojo.incrementalMode,jobsToDependOn,jsonPojo.arguments, jsonPojo.exportToHdfs, bQuickRun, jsonPojo.selfMerge);
         }
         else if ( action.equals("update") )
         {
           rp = this.customhandler.updateJob(cookieLookup, (jobid==null)?(title):(jobid), title, desc, communityIds, jarURL, nextRunTime, freqSched, mapperClass, reducerClass, combinerClass, query, inputColl, outputKey, outputValue,appendResults,ageOutInDays,jsonPojo.incrementalMode,jobsToDependOn,jsonPojo.arguments, jsonPojo.exportToHdfs, bQuickRun, jsonPojo.selfMerge);
         }
         else if ( action.equals("getjobs"))
         {
           rp = this.customhandler.getJobOrJobs(cookieLookup, jobid);
         }
         else if ( action.equals("removejob") )
         {
           rp = CustomHandler.removeJob(cookieLookup, jobid, shouldRemoveJar, false);
         }
         else if (action.equals("failed")) {
           rp.setResponse(new ResponseObject("Schedule/Update MapReduce Job", false, "Failed to parse POSTed content"));
         }
       }      
     }    
     else
     {
      // Note: Currently there are no methods that can be called without a cookie
     }
    
     Date endTime = new Date();
     rp.getResponse().setTime(endTime.getTime() - startTime.getTime());
     if (!rp.getResponse().isSuccess()) {
       if (rp.getResponse().getMessage().contains("ermission")) { // likely to be a permissions error
         RESTTools.logRequest(this);
       }
     }//TOTEST (TODO-2194)
     return new StringRepresentation(rp.toApi(), MediaType.APPLICATION_JSON);
  }   
View Full Code Here

   * @param jobid
   * @return
   */
  public ResponsePojo getJobResults(String userid, String jobid, int limit, String fields, String findStr, String sortStr )
  {
    ResponsePojo rp = new ResponsePojo();   
   
    List<Object> searchTerms = new ArrayList<Object>();
    try
    {
      ObjectId jid = new ObjectId(jobid);
      searchTerms.add(new BasicDBObject(CustomMapReduceJobPojo._id_,jid));
    }
    catch (Exception ex)
    {
      //oid failed, will only add title
    }
    searchTerms.add(new BasicDBObject(CustomMapReduceJobPojo.jobtitle_,jobid));
       
    try
    {
      //find admin entry);
      DBObject dbo = DbManager.getCustom().getLookup().findOne(new BasicDBObject(DbManager.or_,searchTerms.toArray()));     
      if ( dbo != null )
      {       
        CustomMapReduceJobPojo cmr = CustomMapReduceJobPojo.fromDb(dbo, CustomMapReduceJobPojo.class);
        //make sure user is allowed to see results
        if ( RESTTools.adminLookup(userid) || isInAllCommunities(cmr.communityIds, userid) )
        {                   
          //get results collection if done and return
          if ( ( cmr.lastCompletionTime != null ) || (cmr.mapper.equals("none") && cmr.exportToHdfs))
          {
            BasicDBObject queryDbo = null;
            if (null != findStr) {
              queryDbo = (BasicDBObject) com.mongodb.util.JSON.parse(findStr);
            }
            else {
              queryDbo = new BasicDBObject()
            }//TOTEST
           
            BasicDBObject fieldsDbo = new BasicDBObject();
            if (null != fields) {
              fieldsDbo = (BasicDBObject) com.mongodb.util.JSON.parse("{" + fields + "}");
            }

            //return the results:
           
            // Need to handle sorting...
            BasicDBObject sort = null;
            if (null != sortStr) { //override
              sort = (BasicDBObject) com.mongodb.util.JSON.parse(sortStr);
            }
            else { //defaults
              String sortField = "_id";
              int sortDir = 1;
              BasicDBObject postProcObject = (BasicDBObject) com.mongodb.util.JSON.parse(InfiniteHadoopUtils.getQueryOrProcessing(cmr.query, InfiniteHadoopUtils.QuerySpec.POSTPROC));
              if ( postProcObject != null )
              {
                sortField = postProcObject.getString("sortField", "_id");
                sortDir = postProcObject.getInt("sortDirection", 1);
              }//TESTED (post proc and no post proc)
              sort = new BasicDBObject(sortField, sortDir);
            }//TOTEST
           
            // Case 1: DB
            rp.setResponse(new ResponseObject("Custom Map Reduce Job Results",true,"Map reduce job completed at: " + cmr.lastCompletionTime));
            if ((null == cmr.exportToHdfs) || !cmr.exportToHdfs) {
              DBCursor resultCursor = null;
              if (limit > 0) {
                resultCursor = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection).find(queryDbo, fieldsDbo).sort(sort).limit(limit);
              }
              else {
                resultCursor = DbManager.getCollection(cmr.getOutputDatabase(), cmr.outputCollection).find(queryDbo, fieldsDbo).sort(sort);
              }
              CustomMapReduceResultPojo cmrr = new CustomMapReduceResultPojo();
              cmrr.lastCompletionTime = cmr.lastCompletionTime;
              cmrr.results = resultCursor.toArray();
              rp.setData(cmrr);
            }//TESTED
            else { // Case 2: HDFS
             
              if ((null != cmr.outputKey) && (null != cmr.outputValue) &&
                cmr.outputKey.equalsIgnoreCase("org.apache.hadoop.io.text") && cmr.outputValue.equalsIgnoreCase("org.apache.hadoop.io.text"))
              {
                // special case, text file
                try {
                  rp.setData(HadoopUtils.getBsonFromTextFiles(cmr, limit, fields), (BasePojoApiMap<BasicDBList>) null);
                }
                catch (Exception e) {
                  rp.setResponse(new ResponseObject("Custom Map Reduce Job Results",false,"Files don't appear to be in text file format, did you run the job before changing the output to Text/Text?"));
                }
              }//TESTED
              else { // sequence file
                try {
                  rp.setData(HadoopUtils.getBsonFromSequenceFile(cmr, limit, fields), (BasePojoApiMap<BasicDBList>) null);
                }
                catch (Exception e) {
                  rp.setResponse(new ResponseObject("Custom Map Reduce Job Results",false,"Files don't appear to be in sequence file format, did you run the job with Text/Text?"));
                }
              }//TESTED
            }//TESTED
          }
          else
          {
            rp.setResponse(new ResponseObject("Custom Map Reduce Job Results",false,"Map reduce job has not completed yet"));
          }
        }
        else
        {
          rp.setResponse(new ResponseObject("Custom Map Reduce Job Results",false,"User is not a member of communities with read permissions"));
        }
      }
      else
      {
        rp.setResponse(new ResponseObject("Custom Map Reduce Job Results",false,"Job does not exist"));
      }
    }
    catch (Exception e)
    {
      // If an exception occurs log the error
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Custom Map Reduce Job Results",false,"error retrieving job info"));
    }
    return rp;
  }
View Full Code Here

   * @param userid
   * @return
   */
  public ResponsePojo scheduleJob(String userid, String title, String desc, String communityIds, String jarURL, String nextRunTime, String schedFreq, String mapperClass, String reducerClass, String combinerClass, String query, String inputColl, String outputKey, String outputValue, String appendResults, String ageOutInDays, Boolean incrementalMode, String jobsToDependOn, String json, Boolean exportToHdfs, boolean bQuickRun, Boolean selfMerge)
  {
    ResponsePojo rp = new ResponsePojo();
    List<ObjectId> commids = new ArrayList<ObjectId>();
    for ( String s : communityIds.split(","))
      commids.add(new ObjectId(s));
    boolean bAdmin = RESTTools.adminLookup(userid);
    //first make sure user is allowed to submit on behalf of the commids given
    if ( bAdmin || isInAllCommunities(commids, userid) )
    {
      CustomMapReduceJobPojo cmr = new CustomMapReduceJobPojo();
      //make sure user can use the input collection
      String inputCollection = getStandardInputCollection(inputColl);     
      if ( inputCollection != null )
      {
        cmr.isCustomTable = false;
      }
      else
      {
        inputCollection = getCustomInputCollection(inputColl, commids);
        cmr.isCustomTable = true;
      }
      if ( inputCollection != null)
      {       
        try
        {         
          cmr.communityIds = commids;
          cmr._id = new ObjectId();
          cmr.jobtitle = title;
          cmr.jobdesc = desc;
          cmr.inputCollection = inputCollection;
          if ((null == jarURL) || jarURL.equals("null")) {
            cmr.jarURL = null;
            // Force the types:
            outputKey = "org.apache.hadoop.io.Text";
            outputValue = "com.mongodb.hadoop.io.BSONWritable";
          }
          else {
            cmr.jarURL = jarURL;
          }
          cmr.outputCollection = cmr._id.toString() + "_1";
          cmr.outputCollectionTemp = cmr._id.toString() + "_2";
          cmr.exportToHdfs = exportToHdfs;
         
          // Get the output database, based on the size of the collection
          long nJobs = DbManager.getCustom().getLookup().count();
          long nDbNum = nJobs / 3000; // (3000 jobs per collection, max is 6000)
          if (nDbNum > 0) { // else defaults to custommr
            String dbName = cmr.getOutputDatabase() + Long.toString(nDbNum);
            cmr.setOutputDatabase(dbName);
          }
         
          cmr.submitterID = new ObjectId(userid);
          long nextRun = Long.parseLong(nextRunTime);
          cmr.firstSchedule = new Date(nextRun);         
          cmr.nextRunTime = nextRun;
          //if this job is set up to run before now, just set the next run time to now
          //so we can schedule jobs appropriately
          long nNow = new Date().getTime();
          if ( cmr.nextRunTime < nNow )
            cmr.nextRunTime = nNow - 1;
          //TESTED
         
          cmr.scheduleFreq = SCHEDULE_FREQUENCY.valueOf(schedFreq);
          if ( (null != mapperClass) && !mapperClass.equals("null"))
            cmr.mapper = mapperClass;
          else
            cmr.mapper = "";
          if ( (null != reducerClass) && !reducerClass.equals("null"))
            cmr.reducer = reducerClass;
          else
            cmr.reducer = "";
          if ( (null != combinerClass) &&  !combinerClass.equals("null"))
            cmr.combiner = combinerClass;
          else
            cmr.combiner = "";
          if ( (null != outputKey) && !outputKey.equals("null"))
            cmr.outputKey = outputKey;
          else
            cmr.outputKey = "com.mongodb.hadoop.io.BSONWritable";
          if ( (null != outputValue) && !outputValue.equals("null"))
            cmr.outputValue = outputValue;
          else
            cmr.outputValue = "com.mongodb.hadoop.io.BSONWritable";
          if ( (null != query) && !query.equals("null") && !query.isEmpty())
            cmr.query = query;
          else
            cmr.query = "{}";
         
          boolean append = false;
          double ageOut = 0.0;
          try
          {
            append = Boolean.parseBoolean(appendResults);
            ageOut = Double.parseDouble(ageOutInDays);
          }
          catch (Exception ex)
          {
            append = false;
            ageOut = 0.0;
          }
          cmr.appendResults = append;
          cmr.appendAgeOutInDays = ageOut;
          cmr.incrementalMode = incrementalMode;
          cmr.selfMerge = selfMerge;
         
          if ( json != null && !json.equals("null") )
            cmr.arguments = json;
          else
            cmr.arguments = null;
         
          if ((null == cmr.jarURL) && (null != cmr.arguments) && !cmr.arguments.isEmpty()) {
            // In saved query, if arguments is valid BSON then copy over query
            try {
              Object tmpQuery = com.mongodb.util.JSON.parse(cmr.arguments);
              if (tmpQuery instanceof BasicDBObject) {
                cmr.query = cmr.arguments;
              }
            }
            catch (Exception e) {} // fine just carry on
          }
          else if ((null == cmr.jarURL)) { // ie args == null, copy from query
            cmr.arguments = cmr.query;
          }
         
          //try to work out dependencies, error out if they fail
          if ( (null != jobsToDependOn) && !jobsToDependOn.equals("null"))
          {
            try
            {
              cmr.jobDependencies = getJobDependencies(jobsToDependOn);
              cmr.waitingOn = cmr.jobDependencies;
            }
            catch (Exception ex)
            {
              rp.setResponse(new ResponseObject("Schedule MapReduce Job",false,"Error parsing the job dependencies, did a title or id get set incorrectly or did a job not exist?"));
              return rp;
            }
          }
         
          //make sure title hasn't been used before
          DBObject dbo = DbManager.getCustom().getLookup().findOne(new BasicDBObject("jobtitle",title));
          if ( dbo == null )
          {
            Date nextRunDate = new Date(nextRun);
            Date now = new Date();
            String nextRunString = nextRunDate.toString();
            boolean bRunNowIfPossible = false;
            if ( nextRunDate.getTime() < now.getTime() ) {
              nextRunString = "next available timeslot";
              bRunNowIfPossible = true;
            }
            rp.setResponse(new ResponseObject("Schedule MapReduce Job",true,"Job scheduled successfully, will run on: " + nextRunString));
            rp.setData(cmr._id.toString(), null);
                       
            if (bRunNowIfPossible) {
              runJobAndWaitForCompletion(cmr, bQuickRun);
            }//TESTED
            else {
              DbManager.getCustom().getLookup().save(cmr.toDb());             
            }
          }
          else
          {         
            rp.setResponse(new ResponseObject("Schedule MapReduce Job",false,"A job already matches that title, please choose another title"));
          }
        }
        catch (IllegalArgumentException e)
        {
          logger.error("Exception Message: " + e.getMessage(), e);
          rp.setResponse(new ResponseObject("Schedule MapReduce Job",false,"No enum matching scheduled frequency, try NONE, DAILY, WEEKLY, MONTHLY"));
        }
        catch (Exception e)
        {
          // If an exception occurs log the error
          logger.error("Exception Message: " + e.getMessage(), e);
          rp.setResponse(new ResponseObject("Schedule MapReduce Job",false,"error scheduling job"));
        }         
      }
      else
      {
        rp.setResponse(new ResponseObject("Schedule MapReduce Job",false,"You do not have permission to use the given input collection."));
      }
    }
    else
    {
      rp.setResponse(new ResponseObject("Schedule MapReduce Job",false,"You do not have permissions for all the communities given."));
    }
    return rp;
  }
View Full Code Here

    return rp;
  }
 
  public ResponsePojo updateJob(String userid, String jobidortitle, String title, String desc, String communityIds, String jarURL, String nextRunTime, String schedFreq, String mapperClass, String reducerClass, String combinerClass, String query, String inputColl, String outputKey, String outputValue, String appendResults, String ageOutInDays, Boolean incrementalMode, String jobsToDependOn, String json, Boolean exportToHdfs, boolean bQuickRun, Boolean selfMerge)
  {
    ResponsePojo rp = new ResponsePojo();
    //first make sure job exists, and user is allowed to edit
    List<Object> searchTerms = new ArrayList<Object>();
    try
    {
      ObjectId jid = new ObjectId(jobidortitle);
      searchTerms.add(new BasicDBObject(CustomMapReduceJobPojo._id_,jid));
    }
    catch (Exception ex)
    {
      //oid failed, will only add title
    }
    searchTerms.add(new BasicDBObject(CustomMapReduceJobPojo.jobtitle_,jobidortitle));
    DBObject dbo = DbManager.getCustom().getLookup().findOne(new BasicDBObject(DbManager.or_,searchTerms.toArray()));
   
    if ( dbo != null )
    {
      CustomMapReduceJobPojo cmr = CustomMapReduceJobPojo.fromDb(dbo, CustomMapReduceJobPojo.class);
      //verify user can update this job
      if ( RESTTools.adminLookup(userid) || cmr.submitterID.toString().equals(userid) )
      {
        //check if job is already running
        if ( ( cmr.jobidS != null ) && !cmr.jobidS.equals( "CHECKING_COMPLETION" ) &&  !cmr.jobidS.equals( "" ) ) // (< robustness, sometimes server gets stuck here...)
        {
          // If it is running and we're trying to turn it off .. .then kill the job:
          com.ikanow.infinit.e.processing.custom.utils.PropertiesManager customProps = new com.ikanow.infinit.e.processing.custom.utils.PropertiesManager();
          boolean bLocalMode = customProps.getHadoopLocalMode();
         
          boolean tryToKillJob = false;
          if (!bLocalMode) { // else not possible
           
            // This line means: either we're NONE already (and it hasn't changed), or we've been changed to NONE
            if ((((null == schedFreq) || (schedFreq.equalsIgnoreCase("null")))
                      && (CustomMapReduceJobPojo.SCHEDULE_FREQUENCY.NONE == cmr.scheduleFreq))
                ||
              (null != schedFreq) && (schedFreq.equalsIgnoreCase("none")))
            {
              long candidateNextRuntime = 0L;
              try {
                candidateNextRuntime = Long.parseLong(nextRunTime);
              }
              catch (Exception e) {}
              if (candidateNextRuntime >= DONT_RUN_TIME) {
                tryToKillJob = true;
              }
            }
          }//TESTED - (don't run/daily/once-only) - covers all the cases, except the "theoretical" null cases
         
          if (tryToKillJob) {
            // (ie is running and updating it to mean don't run anymore .. that 4e12 number is 2099 in ms, anything bigger than that is assumed to mean "don't run)
            CustomProcessingController pxController = new CustomProcessingController();
            if (pxController.killRunningJob(cmr)) {           
              rp.setResponse(new ResponseObject("Update MapReduce Job",true,"Killed job, may take a few moments for the status to update."));
            }
            else {
              rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Failed to kill the job - it may not have started yet, try again in a few moments."));
            }             
            return rp;
          }//TODO (INF-2395): TOTEST
          else {
            rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Job is currently running (or not yet marked as completed).  Please wait until the job completes to update it."));
            return rp;
          }
        }
        if (cmr.jobidS != null) { // (must be checking completion, ie in bug state, so reset...)
          cmr.jobidS = null;
          cmr.jobidN = 0;
        }
        //check each variable to see if its needs/can be updated
        if ( (null != communityIds) && !communityIds.equals("null") )
        {
          List<ObjectId> commids = new ArrayList<ObjectId>();
          for ( String s : communityIds.split(","))
            commids.add(new ObjectId(s));
          boolean bAdmin = RESTTools.adminLookup(userid);
          //make sure user is allowed to submit on behalf of the commids given
          if ( bAdmin || isInAllCommunities(commids, userid) )
          {
            ElasticSearchManager customIndex = CustomOutputIndexingEngine.getExistingIndex(cmr);
            if (null != customIndex) {
              CustomOutputIndexingEngine.swapAliases(customIndex, commids, true)
            }//TESTED (by hand - removal and deletion)           
           
            cmr.communityIds = commids;
          }
          else
          {
            rp.setResponse(new ResponseObject("Update MapReduce Job",false,"You do have permissions for all the communities given."));
            return rp;
          }
        }
        if ( (null != inputColl) && !inputColl.equals("null"))
        {
          //make sure user can use the input collection
          String inputCollection = getStandardInputCollection(inputColl);     
          if ( inputCollection != null )
          {
            cmr.isCustomTable = false;
          }
          else
          {
            inputCollection = getCustomInputCollection(inputColl, cmr.communityIds);
            cmr.isCustomTable = true;
          }
          if ( inputCollection != null)
          {
            cmr.inputCollection = inputCollection;
          }
          else
          {
            rp.setResponse(new ResponseObject("Update MapReduce Job",false,"You do not have permission to use the given input collection."));
            return rp;
          }
        }
        try
        {
          if ( (null != title) && !title.equals("null"))
          {
            // If this is indexed then can't change the title
            if (null != CustomOutputIndexingEngine.getExistingIndex(cmr)) {
              rp.setResponse(new ResponseObject("Update MapReduce Job",false,"You cannot change the title of a non-empty indexed job - you can turn indexing off and then change the title"));
              return rp;             
            }//TESTED (by hand)
           
            cmr.jobtitle = title;
            //make sure the new title hasn't been used before
            DBObject dbo1 = DbManager.getCustom().getLookup().findOne(new BasicDBObject("jobtitle",title));
            if ( dbo1 != null )
            {
              rp.setResponse(new ResponseObject("Update MapReduce Job",false,"A job already matches that title, please choose another title"));
              return rp;
            }
          }
          if ( (null != desc) && !desc.equals("null"))
          {
            cmr.jobdesc = desc;
          }
          if ( (null != jarURL) && !jarURL.equals("null"))
          {
            cmr.jarURL = jarURL;
          }
          if ( (null != nextRunTime) && !nextRunTime.equals("null"))
          {
            cmr.nextRunTime = Long.parseLong(nextRunTime);
            long nNow = new Date().getTime();
            cmr.firstSchedule = new Date(cmr.nextRunTime);
            if (cmr.nextRunTime < nNow) { // ie leave firstSchedule alone since that affects when we next run, but just set this to now...
              cmr.nextRunTime = nNow - 1;
            }//TESTED
            cmr.timesRan = 0;
            cmr.timesFailed = 0;
          }
          if ( (null != schedFreq) && !schedFreq.equals("null"))
          {
            cmr.scheduleFreq = SCHEDULE_FREQUENCY.valueOf(schedFreq);
          }
          if ( (null != mapperClass) && !mapperClass.equals("null"))
          {
            cmr.mapper = mapperClass;
          }
          if ( (null != reducerClass) && !reducerClass.equals("null"))
          {
            cmr.reducer = reducerClass;
          }
          if ( (null != combinerClass) && !combinerClass.equals("null"))
          {
            cmr.combiner = combinerClass;
          }
          if ( (null != query) && !query.equals("null"))
          {
            boolean wasIndexed = CustomOutputIndexingEngine.isIndexed(cmr);
           
            if ( !query.isEmpty() )
              cmr.query = query;
            else
              cmr.query = "{}";
           
            // If we're in indexing mode, check if the index has been turned off, in which case delete the index
            if (wasIndexed && !CustomOutputIndexingEngine.isIndexed(cmr)) {
              CustomOutputIndexingEngine.deleteOutput(cmr)
            }//TESTED (by hand)
          }
          if (null == cmr.jarURL) { // (if in savedQuery mode, force types to be Text/BSONWritable)
            // Force the types:
            outputKey = "org.apache.hadoop.io.Text";
            outputValue = "com.mongodb.hadoop.io.BSONWritable";           
          }
          if ( (null != outputKey) && !outputKey.equals("null"))
          {
            cmr.outputKey = outputKey;
          }
          if ( (null != outputValue) && !outputValue.equals("null"))
          {
            cmr.outputValue = outputValue;
          }
          if ( (null != appendResults) && !appendResults.equals("null"))
          {
            try
            {
              cmr.appendResults = Boolean.parseBoolean(appendResults);
            }
            catch (Exception ex)
            {
              cmr.appendResults = false;
            }
          }
          if ( (null != ageOutInDays) && !ageOutInDays.equals("null"))
          {
            try
            {
              cmr.appendAgeOutInDays = Double.parseDouble(ageOutInDays);
            }
            catch (Exception ex)
            {
              cmr.appendAgeOutInDays = 0.0;
            }
          }
          if (null != incrementalMode)
          {
            cmr.incrementalMode = incrementalMode;
          }
          if (null != selfMerge)
          {
            cmr.selfMerge = selfMerge;
          }
         
          if (null != exportToHdfs) {
            cmr.exportToHdfs = exportToHdfs;
          }
         
          //try to work out dependencies, error out if they fail
          if ( (null != jobsToDependOn) && !jobsToDependOn.equals("null"))
          {
            try
            {
              cmr.jobDependencies = getJobDependencies(jobsToDependOn);
              cmr.waitingOn = cmr.jobDependencies;
            }
            catch (Exception ex)
            {
              rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Error parsing the job dependencies, did a title or id get set incorrectly or did a job not exist?"));
              return rp;
            }
          }
          if ( json != null && !json.equals("null"))
          {
            cmr.arguments = json;
          }
          else
          {
            cmr.arguments = null;
          }
          if ((null == cmr.jarURL) && (null != cmr.arguments) && !cmr.arguments.isEmpty()) {
            // In saved query, if arguments is valid BSON then copy over query
            try {
              Object tmpQuery = com.mongodb.util.JSON.parse(cmr.arguments);
              if (tmpQuery instanceof BasicDBObject) {
                cmr.query = cmr.arguments;
              }
            }
            catch (Exception e) {} // fine just carry on
          }
          else if ((null == cmr.jarURL)) { // ie args == null, copy from query
            cmr.arguments = cmr.query;
          }
         
        }
        catch (IllegalArgumentException e)
        {
          // If an exception occurs log the error
          logger.error("Exception Message: " + e.getMessage(), e);
          rp.setResponse(new ResponseObject("Update MapReduce Job",false,"Illegal arg (enum needs to be DAILY/WEEKLY/MONTHLY/NONE?): " + e.getMessage()));
          return rp;
        }
        catch (Exception e)
        {
          // If an exception occurs log the error
          logger.error("Exception Message: " + e.getMessage(), e);
          rp.setResponse(new ResponseObject("Update MapReduce Job",false,"error scheduling job: " + e.getMessage()));
          return rp;
        }

        // Setup post-processing
       
        String nextRunString = new Date(cmr.nextRunTime).toString();
        boolean bRunNowIfPossible = false;
        if ( cmr.nextRunTime < new Date().getTime() ) {
          nextRunString = "next available timeslot";
          bRunNowIfPossible = true;
        }
       
        rp.setResponse(new ResponseObject("Update MapReduce Job",true,"Job updated successfully, will run on: " + nextRunString));
        rp.setData(cmr._id.toString(), null);

        if (bRunNowIfPossible) {
          runJobAndWaitForCompletion(cmr, bQuickRun);
        }//TESTED
        else {
          DbManager.getCustom().getLookup().save(cmr.toDb());         
        }
      }
      else
      {
        rp.setResponse(new ResponseObject("Update MapReduce Job", false, "You do not have permission to submit this job"));
      }
    }
    else
    {
      rp.setResponse(new ResponseObject("Update MapReduce Job", false, "No jobs with this ID exist"));
    }
    return rp;
  }
View Full Code Here

   * @param communityName
   * @return ResponsePojo
   */
  public ResponsePojo addCommunity(String personId, String communityId, String communityName)
  {
    ResponsePojo rp = new ResponsePojo();
    try
    {
      // Find person record to update
      PersonPojo personQuery = new PersonPojo();
      personQuery.set_id(new ObjectId(personId));
      DBObject dbo = DbManager.getSocial().getPerson().findOne(personQuery.toDb());
     
      if (dbo != null)
      {
        // Get GsonBuilder object with MongoDb de/serializers registered
        PersonPojo person = PersonPojo.fromDb(dbo, PersonPojo.class);

        // Create a new PersonCommunityPojo object
        PersonCommunityPojo community = new PersonCommunityPojo();
        community.set_id(new ObjectId(communityId));
        community.setName(communityName);
       
        // Check to see if person is already a member of the community to be added
        List<PersonCommunityPojo> communities = person.getCommunities();
        Boolean alreadyMember = false;
        for (PersonCommunityPojo c : communities)
        {
          String idToTest = c.get_id().toStringMongod();         
          if (idToTest.equals(communityId))
          {
            alreadyMember = true;
            break;
          }
        }
       
        // Add the community to the list if it does not already exist
        if (!alreadyMember)
        {
          //TODO (INF-1214): (not thread safe)         
          communities.add(community)
          person.setModified(new Date());         
          DbManager.getSocial().getPerson().update(personQuery.toDb(), person.toDb());
          rp.setData(person, new PersonPojoApiMap());
          rp.setResponse(new ResponseObject("Add community status",true,"Community added successfully."))
        }         
        else
        {
          rp.setResponse(new ResponseObject("Add community status",true,"Community already exists."))
        }
      }
      else
      {
        rp.setResponse(new ResponseObject("Add community status", false, "Person not found."));
      }
    }
    catch (Exception e)
    {     
      // If an exception occurs log the error
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Add community status", false,
          "Error adding community to person " + e.getMessage()));
   
    return rp;
  }
View Full Code Here

   * @param communityId
   * @return
   */
  public ResponsePojo removeCommunity(String personId, String communityId)
  {
    ResponsePojo rp = new ResponsePojo()
    try
    {
      // Find person record to update
      PersonPojo personQuery = new PersonPojo();
      personQuery.set_id(new ObjectId(personId));
      DBObject dbo = DbManager.getSocial().getPerson().findOne(personQuery.toDb());
     
      if (dbo != null)
      {
        PersonPojo person = PersonPojo.fromDb(dbo, PersonPojo.class);
       
        // Check to see if person is already a member of the community to be added
        List<PersonCommunityPojo> communities = person.getCommunities();
        Boolean alreadyMember = false;
        int communityIndex = 0;
        for (PersonCommunityPojo c : communities)
        {
          String idToTest = c.get_id().toStringMongod();         
          if (idToTest.equals(communityId))
          {
            alreadyMember = true;
            break;
          }
          communityIndex++;
        }       
       
        // Remove the community from the list
        if (alreadyMember)
        {
          //TODO (INF-1214): (not thread safe)         
          communities.remove(communityIndex)
          person.setModified(new Date());         
          DbManager.getSocial().getPerson().update(personQuery.toDb(), person.toDb());
          rp.setData(person, new PersonPojoApiMap());
          rp.setResponse(new ResponseObject("Remove community status",true,"Community removed successfully."))
        }         
        else
        {
          rp.setResponse(new ResponseObject("Remove community status",true,"Person is not a member of the specified community."))
        }
      }
      else
      {
        rp.setResponse(new ResponseObject("Remove community status", false, "Person not found."));
      }
    }
    catch (Exception e)
    {     
      // If an exception occurs log the error
      logger.error("Exception Message: " + e.getMessage(), e);
      rp.setResponse(new ResponseObject("Remove community status", false,
          "Error removing community record "));
   
    return rp;
  }
View Full Code Here

   *
   */
  @Get
  public Representation get()
  {
     ResponsePojo rp = new ResponsePojo();
     Date startTime = new Date()
     cookieLookup = RESTTools.cookieLookup(cookie);

     // If JSON is != null, check that it is valid JSON
     boolean isValidJson = true;
     if (json != null)
     {      
       try
       {
         JSON.parse(json);        
       }
       catch (Exception e)
       {
         rp.setResponse(new ResponseObject("Parsing JSON",false,"The value passed via the json parameter could not be" +
         " parsed as valid JSON."));
         isValidJson = false;
       }
      
       try
       {
         checkForBadCharacters(json);
       }
       catch (Exception e)
       {
         rp.setResponse(new ResponseObject("Parsing JSON",false,"The value passed via the json parameter has invalid characters: " + e.getMessage()));
         isValidJson = false;
       }
     }
    
     if (isValidJson)
     {
       if (cookieLookup == null )
       {
         // User is not logged in
         rp.setResponse(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
       }
       else
       {
         // UserId which will serve as the OwnerId for transactions below that require it
         personId = cookieLookup;

         if (action.equals("saveJson"))
         {
           rp = this.shareController.saveJson(personId, id, type, title, description, json);
         }
         else if (action.equals("addBinaryGET"))
         {
           rp = new ResponsePojo(new ResponseObject("addBinary",false,"Can only add binary in POST (do not use GET)"));
         }
         else if (action.equals("addBinaryPOST"))
         {
           rp = this.shareController.addBinary(personId,"binary",title,description,this.getRequest().getEntity().getMediaType().toString(),binaryData);
         }
         else if ( action.equals("updateBinaryGET"))
         {
           rp = new ResponsePojo(new ResponseObject("updateBinary",false,"Can only update binary in POST (do not use GET)"));
         }
         else if ( action.equals("updateBinaryPOST"))
         {
           rp = this.shareController.updateBinary(personId, id, "binary",title,description,this.getRequest().getEntity().getMediaType().toString(),binaryData);
         }
         else if (action.equals("addRef"))
         {
           rp = this.shareController.addRef(personId, type, documentLoc, documentId, title, description);
         }
         else if (action.equals("updateRef"))
         {
           rp = this.shareController.updateRef(personId, id, type, documentLoc, documentId, title, description);
         }
         else if (action.equals("removeShare"))
         {
           rp = this.shareController.removeShare(personId, shareId);
         }
         else if (action.equals("endorseShare"))
         {
           rp = this.shareController.endorseShare(personId, communityId, shareId, isEndorsed);
         }
         else if (action.equals("addCommunity"))
         {
           rp = this.shareController.addCommunity(personId, shareId, communityId, comment, readWrite);
         }
         else if (action.equals("removeCommunity"))
         {
           rp = this.shareController.removeCommunity(personId, shareId, communityId);
         }
         else if (action.equals("getShare"))
         {     
           rp = this.shareController.getShare(personId, shareId, returnContent)
           SharePojo share = (SharePojo) rp.getData();
           if (null != share) {
             boolean bBinary = share.getType().equals("binary");
             if ( bBinary && returnContent )          
             {     
               try
               {              
                 ByteArrayOutputRepresentation rep = new ByteArrayOutputRepresentation(MediaType.valueOf(share.getMediaType()));
                 rep.setOutputBytes(share.getBinaryData());
                 return rep;              
               }
               catch (Exception ex )
               {
                 rp = new ResponsePojo(new ResponseObject("get Share",false,"error converting bytes to output: " + ex.getMessage()));
               }            
             }
             else if (!bBinary && jsonOnly) {
               try {
                 BasicDBObject dbo = (BasicDBObject) com.mongodb.util.JSON.parse(share.getShare());
                 rp.setData(dbo, null);
               }
               catch (Exception e) { // Try a list instead
                 BasicDBList dbo = (BasicDBList) com.mongodb.util.JSON.parse(share.getShare());
                 rp.setData(dbo, (BasePojoApiMap<BasicDBList>)null);                
               }
             }
           }
           //(else error)
         }
         else if (action.equals("searchShares"))
         {
           rp = this.shareController.searchShares(personId, searchby, id, type, skip, limit, ignoreAdmin, returnContent, searchParent);
         }  
       }
     }
    
     Date endTime = new Date();
     rp.getResponse().setTime(endTime.getTime() - startTime.getTime());
     if (!rp.getResponse().isSuccess()) {
       if (rp.getResponse().getMessage().contains("ermission")) { // likely to be a permissions error
         RESTTools.logRequest(this);
       }
     }//TOTEST (TODO-2194)
     return new StringRepresentation(rp.toApi(), MediaType.APPLICATION_JSON);
  }
View Full Code Here

   * @throws ResourceException
   */
  @Get
  public Representation get()
  {
     ResponsePojo rp = new ResponsePojo();
     Date startTime = new Date()
    
     if ( needCookie )
     {
       cookieLookup = RESTTools.cookieLookup(cookie);
       if ( cookieLookup == null )
       {
         rp = new ResponsePojo();
         rp.setResponse(new ResponseObject("Cookie Lookup",false,"Cookie session expired or never existed, please login first"));
       }
       else
       {
         if ( action.equals("add"))
         {
           rp = this.featureHandler.suggestAlias(entity,updateItem, cookieLookup);
         }
         else if ( action.equals("approve"))
         {
           rp = this.featureHandler.approveAlias(updateItem);
         }
         else if ( action.equals("decline"))
         {
           rp = this.featureHandler.declineAlias(updateItem);
         }
         else if ( action.equals("all"))
         {
           rp = this.featureHandler.allAlias(cookieLookup);
         }
         else if ( action.equals("feature"))
         {
           rp = this.featureHandler.getEntityFeature(updateItem);
         }
       }
     }
     else
     {
       //no methods that dont need cookies
     }
    
    
     Date endTime = new Date();
     rp.getResponse().setTime(endTime.getTime() - startTime.getTime());
     return new StringRepresentation(rp.toApi(), MediaType.APPLICATION_JSON);
  }
View Full Code Here

TOP

Related Classes of com.ikanow.infinit.e.data_model.api.ResponsePojo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.