Package org.apache.falcon.entity.v0.feed

Examples of org.apache.falcon.entity.v0.feed.Cluster


        }
    }

    @Test(expectedExceptions = ValidationException.class)
    public void testValidateInputPartitionForTable() throws Exception {
        Process process = parser.parse(
                ProcessEntityParserTest.class.getResourceAsStream("/config/process/process-table.xml"));
        if (process.getInputs() != null) {
            for (Input input : process.getInputs().getInputs()) {
                input.setPartition("region=usa");
            }
        }

        parser.validate(process);
View Full Code Here


            return null;
        }

        Cluster cluster = CONFIG_STORE.get(EntityType.CLUSTER, processCluster.getName());
        Path bundlePath = new Path(ClusterHelper.getLocation(cluster, "staging"), EntityUtil.getStagingPath(process));
        Process processClone = (Process) process.copy();
        EntityUtil.setStartDate(processClone, clusterName, startDate);

        OozieProcessMapper mapper = new OozieProcessMapper(processClone);
        if (!mapper.map(cluster, bundlePath)) {
            return null;
View Full Code Here

            feed.setName(name);
            store.publish(type, feed);
            break;

        case PROCESS:
            Process process = (Process) unmarshaller.unmarshal(this.getClass().getResource(resource));
            process.setName(name);
            store.publish(type, process);
            break;

        default:
        }
View Full Code Here

        STORE.publish(EntityType.FEED, feed1);
        STORE.publish(EntityType.FEED, feed2);
        STORE.publish(EntityType.FEED, feed3);

        Process process1 = (Process) EntityType.PROCESS.getUnmarshaller()
                .unmarshal(this.getClass().getResource(PROCESS1_XML));
        STORE.publish(EntityType.PROCESS, process1);
        Process process2 = (Process) EntityType.PROCESS.getUnmarshaller()
                .unmarshal(this.getClass().getResource(PROCESS2_XML));
        STORE.publish(EntityType.PROCESS, process2);
    }
View Full Code Here

     * @param bundlePath - bundle path
     * @return COORDINATORAPP
     * @throws FalconException on Error
     */
    public COORDINATORAPP createDefaultCoordinator(Cluster cluster, Path bundlePath) throws FalconException {
        Process process = getEntity();
        if (process == null) {
            return null;
        }

        COORDINATORAPP coord = new COORDINATORAPP();
        String coordName = EntityUtil.getWorkflowName(Tag.DEFAULT, process).toString();
        Path coordPath = getCoordPath(bundlePath, coordName);

        // coord attributes
        initializeCoordAttributes(cluster, process, coord, coordName);

        CONTROLS controls = initializeControls(process); // controls
        coord.setControls(controls);

        // Configuration
        Map<String, String> props = createCoordDefaultConfiguration(cluster, coordPath, coordName);

        initializeInputPaths(cluster, process, coord, props); // inputs
        initializeOutputPaths(cluster, process, coord, props)// outputs

        Workflow processWorkflow = process.getWorkflow();
        props.put("userWorkflowEngine", processWorkflow.getEngine().value());

        // create parent wf
        createWorkflow(cluster, process, processWorkflow, coordName, coordPath);

View Full Code Here

        return expr;
    }

    @Override
    protected Map<String, String> getEntityProperties() {
        Process process = getEntity();
        Map<String, String> props = new HashMap<String, String>();
        if (process.getProperties() != null) {
            for (Property prop : process.getProperties().getProperties()) {
                props.put(prop.getName(), prop.getValue());
            }
        }
        return props;
    }
View Full Code Here

        ClusterHelper.getInterface(cluster, Interfacetype.WRITE).setEndpoint(hdfsUrl);
        ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).setEndpoint("thrift://localhost:49083");
        fs = new Path(hdfsUrl).getFileSystem(new Configuration());
        fs.create(new Path(ClusterHelper.getLocation(cluster, "working"), "libext/PROCESS/ext.jar")).close();

        Process process = store.get(EntityType.PROCESS, "clicksummary");
        Path wfpath = new Path(process.getWorkflow().getPath());
        assert new Path(hdfsUrl).getFileSystem(new Configuration()).mkdirs(wfpath);
    }
View Full Code Here

        Map<String, String> props = createCoordDefaultConfiguration(cluster, coordPath, coordName);

        initializeInputPaths(cluster, process, coord, props); // inputs
        initializeOutputPaths(cluster, process, coord, props)// outputs

        Workflow processWorkflow = process.getWorkflow();
        props.put("userWorkflowEngine", processWorkflow.getEngine().value());

        // create parent wf
        createWorkflow(cluster, process, processWorkflow, coordName, coordPath);

        WORKFLOW wf = new WORKFLOW();
View Full Code Here

        return null;
    }
   
  public static Location getLocation(Feed feed, LocationType type,
      String clusterName) {
    Cluster cluster = getCluster(feed, clusterName);
    if (cluster!=null &&cluster.getLocations() != null
        && cluster.getLocations() .getLocations().size() != 0) {
      return getLocation(cluster.getLocations() , type);
    }
    else{
      return getLocation(feed.getLocations(), type);
    }
View Full Code Here

      case CLUSTER:
          return entity;
         
      case FEED:
          Feed feed = (Feed) entity.clone();
          Cluster feedCluster = FeedHelper.getCluster(feed, clusterName);
          Iterator<Cluster> itr = feed.getClusters().getClusters().iterator();
          while(itr.hasNext()) {
              Cluster cluster = itr.next();
              //In addition to retaining the required clster, retain the sources clusters if this is the target cluster
              //1. Retain cluster if cluster n
              if(!(cluster.getName().equals(clusterName) ||
                      (feedCluster.getType() == ClusterType.TARGET && cluster.getType() == ClusterType.SOURCE)))
                  itr.remove();
          }
          return (T) feed;           
         
      case PROCESS:
          Process process = (Process) entity.clone();
          Iterator<org.apache.ivory.entity.v0.process.Cluster> procItr = process.getClusters().getClusters().iterator();
          while(procItr.hasNext()) {
              org.apache.ivory.entity.v0.process.Cluster cluster = procItr.next();
              if(!cluster.getName().equals(clusterName))
                  procItr.remove();
          }
          return (T) process;
      }
      throw new UnsupportedOperationException("Not supported for entity type " + entity.getEntityType());
View Full Code Here

TOP

Related Classes of org.apache.falcon.entity.v0.feed.Cluster

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.