Package org.apache.helix

Examples of org.apache.helix.HelixProperty


      public <T extends HelixProperty> List<T> getChildValues(PropertyKey key) {
        PropertyType type = key.getType();
        List<T> result = new ArrayList<T>();
        Class<? extends HelixProperty> clazz = key.getTypeClass();
        if (type == PropertyType.EXTERNALVIEW || type == PropertyType.IDEALSTATES) {
          HelixProperty typedInstance = HelixProperty.convertToTypedInstance(clazz, _externalView);
          result.add((T) typedInstance);
          return result;
        } else if (type == PropertyType.LIVEINSTANCES) {
          return (List<T>) HelixProperty.convertToTypedList(clazz, _liveInstances);
        }
View Full Code Here


    // Filter out non-workflow resources
    Iterator<Map.Entry<String, HelixProperty>> it = resourceConfigMap.entrySet().iterator();
    while (it.hasNext()) {
      Map.Entry<String, HelixProperty> e = it.next();
      HelixProperty resource = e.getValue();
      Map<String, String> simpleFields = resource.getRecord().getSimpleFields();
      if (!simpleFields.containsKey(WorkflowConfig.TARGET_STATE)
          || !simpleFields.containsKey(WorkflowConfig.DAG)) {
        it.remove();
      }
    }
View Full Code Here

      public <T extends HelixProperty> List<T> getChildValues(PropertyKey key) {
        List<T> result = new ArrayList<T>();
        PropertyType type = key.getType();
        Class<? extends HelixProperty> clazz = key.getTypeClass();
        if (type == PropertyType.EXTERNALVIEW) {
          HelixProperty typedInstance = HelixProperty.convertToTypedInstance(clazz, _externalView);
          result.add((T) typedInstance);
          return result;
        } else if (type == PropertyType.LIVEINSTANCES) {
          return (List<T>) HelixProperty.convertToTypedList(clazz, _liveInstances);
        }
View Full Code Here

  }

  private void createISSpec(HelixDataAccessor accessor, String specId, String stateModelDefRef,
      RebalanceMode rebalanceMode) {
    PropertyKey propertyKey = accessor.keyBuilder().clusterConfig();
    HelixProperty property = accessor.getProperty(propertyKey);
    if (property == null) {
      property = new HelixProperty("sampleClusterConfig");
    }
    String key = "IdealStateRule!" + specId;
    String value =
        IdealStateProperty.REBALANCE_MODE.toString() + "=" + rebalanceMode.toString() + ","
            + IdealStateProperty.STATE_MODEL_DEF_REF.toString() + "=" + stateModelDefRef;
    property.getRecord().setSimpleField(key, value);
    accessor.setProperty(propertyKey, property);
  }
View Full Code Here

  /** Creates a new named job queue (workflow) */
  public void createQueue(JobQueue queue) throws Exception {
    String queueName = queue.getName();
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    HelixProperty property = new HelixProperty(queueName);
    property.getRecord().getSimpleFields().putAll(queue.getResourceConfigMap());
    boolean created =
        accessor.createProperty(accessor.keyBuilder().resourceConfig(queueName), property);
    if (!created) {
      throw new IllegalArgumentException("Queue " + queueName + " already exists!");
    }
View Full Code Here

  /** Adds a new job to the end an existing named queue */
  public void enqueueJob(final String queueName, final String jobName, JobConfig.Builder jobBuilder)
      throws Exception {
    // Get the job queue config and capacity
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    HelixProperty workflowConfig =
        accessor.getProperty(accessor.keyBuilder().resourceConfig(queueName));
    if (workflowConfig == null) {
      throw new IllegalArgumentException("Queue " + queueName + " does not yet exist!");
    }
    boolean isTerminable =
        workflowConfig.getRecord().getBooleanField(WorkflowConfig.TERMINABLE, true);
    if (isTerminable) {
      throw new IllegalArgumentException(queueName + " is not a queue!");
    }
    final int capacity =
        workflowConfig.getRecord().getIntField(JobQueue.CAPACITY, Integer.MAX_VALUE);

    // Create the job to ensure that it validates
    JobConfig jobConfig = jobBuilder.setWorkflow(queueName).build();

    // Add the job to the end of the queue in the DAG
View Full Code Here

    _admin.addResource(_clusterName, jobResource, numPartitions, TaskConstants.STATE_MODEL_NAME);

    // Set the job configuration
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    PropertyKey.Builder keyBuilder = accessor.keyBuilder();
    HelixProperty resourceConfig = new HelixProperty(jobResource);
    resourceConfig.getRecord().getSimpleFields().putAll(jobConfig.getResourceConfigMap());
    Map<String, TaskConfig> taskConfigMap = jobConfig.getTaskConfigMap();
    if (taskConfigMap != null) {
      for (TaskConfig taskConfig : taskConfigMap.values()) {
        resourceConfig.getRecord().setMapField(taskConfig.getId(), taskConfig.getConfigMap());
      }
    }
    accessor.setProperty(keyBuilder.resourceConfig(jobResource), resourceConfig);

    // Push out new ideal state based on number of target partitions
View Full Code Here

   * @param jobResource The name of the job resource.
   * @return A {@link JobConfig} object if Helix contains valid configurations for the job, null
   *         otherwise.
   */
  public static JobConfig getJobCfg(HelixManager manager, String jobResource) {
    HelixProperty jobResourceConfig = getResourceConfig(manager, jobResource);
    return getJobCfg(jobResourceConfig);
  }
View Full Code Here

      LOG.error("Workflow with name " + newWorkflowName + " already exists!");
      return null;
    }

    // Create a new workflow with a new name
    HelixProperty workflowConfig = resourceConfigMap.get(origWorkflowName);
    Map<String, String> wfSimpleFields = workflowConfig.getRecord().getSimpleFields();
    JobDag jobDag = JobDag.fromJson(wfSimpleFields.get(WorkflowConfig.DAG));
    Map<String, Set<String>> parentsToChildren = jobDag.getParentsToChildren();
    Workflow.Builder builder = new Workflow.Builder(newWorkflowName);

    // Set the workflow expiry
    builder.setExpiry(Long.parseLong(wfSimpleFields.get(WorkflowConfig.EXPIRY)));

    // Set the schedule, if applicable
    ScheduleConfig scheduleConfig;
    if (newStartTime != null) {
      scheduleConfig = ScheduleConfig.oneTimeDelayedStart(newStartTime);
    } else {
      scheduleConfig = parseScheduleFromConfigMap(wfSimpleFields);
    }
    if (scheduleConfig != null) {
      builder.setScheduleConfig(scheduleConfig);
    }

    // Add each job back as long as the original exists
    Set<String> namespacedJobs = jobDag.getAllNodes();
    for (String namespacedJob : namespacedJobs) {
      if (resourceConfigMap.containsKey(namespacedJob)) {
        // Copy over job-level and task-level configs
        String job = getDenamespacedJobName(origWorkflowName, namespacedJob);
        HelixProperty jobConfig = resourceConfigMap.get(namespacedJob);
        Map<String, String> jobSimpleFields = jobConfig.getRecord().getSimpleFields();
        jobSimpleFields.put(JobConfig.WORKFLOW_ID, newWorkflowName); // overwrite workflow name
        for (Map.Entry<String, String> e : jobSimpleFields.entrySet()) {
          builder.addConfig(job, e.getKey(), e.getValue());
        }
        Map<String, Map<String, String>> rawTaskConfigMap = jobConfig.getRecord().getMapFields();
        List<TaskConfig> taskConfigs = Lists.newLinkedList();
        for (Map<String, String> rawTaskConfig : rawTaskConfigMap.values()) {
          TaskConfig taskConfig = TaskConfig.from(rawTaskConfig);
          taskConfigs.add(taskConfig);
        }
View Full Code Here

        case EXTERNAL_VIEW: {
          // check if bucketized
          BaseDataAccessor<ZNRecord> baseAccessor = new ZkBaseDataAccessor<ZNRecord>(_zkClient);
          List<ZNRecord> records = baseAccessor.getChildren(path, null, 0);
          for (ZNRecord record : records) {
            HelixProperty property = new HelixProperty(record);
            String childPath = path + "/" + record.getId();

            int bucketSize = property.getBucketSize();
            if (bucketSize > 0) {
              // subscribe both data-change and child-change on bucketized parent node
              // data-change gives a delete-callback which is used to remove watch
              subscribeChildChange(childPath, context);
              subscribeDataChange(childPath, context);
View Full Code Here

TOP

Related Classes of org.apache.helix.HelixProperty

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.