Package org.apache.helix

Examples of org.apache.helix.HelixProperty


    for (int i = 0; i < keys.size(); i++) {
      PropertyKey key = keys.get(i);
      PropertyType type = key.getType();
      String path = key.getPath();
      paths.add(path);
      HelixProperty value = children.get(i);
      records.add(value.getRecord());
      options = constructOptions(type);
    }
    return _baseDataAccessor.createChildren(paths, records, options);
  }
View Full Code Here


      PropertyType type = key.getType();
      String path = key.getPath();
      paths.add(path);
      options = constructOptions(type);

      HelixProperty value = children.get(i);

      switch (type) {
      case EXTERNALVIEW:
        if (value.getBucketSize() == 0) {
          records.add(value.getRecord());
        } else {
          _baseDataAccessor.remove(path, options);

          ZNRecord metaRecord = new ZNRecord(value.getId());
          metaRecord.setSimpleFields(value.getRecord().getSimpleFields());
          records.add(metaRecord);

          ZNRecordBucketizer bucketizer = new ZNRecordBucketizer(value.getBucketSize());

          Map<String, ZNRecord> map = bucketizer.bucketize(value.getRecord());
          List<String> childBucketizedPaths = new ArrayList<String>();
          List<ZNRecord> childBucketizedRecords = new ArrayList<ZNRecord>();
          for (String bucketName : map.keySet()) {
            childBucketizedPaths.add(path + "/" + bucketName);
            childBucketizedRecords.add(map.get(bucketName));
          }
          bucketizedPaths.set(i, childBucketizedPaths);
          bucketizedRecords.set(i, childBucketizedRecords);
        }
        break;
      case STATEMODELDEFS:
        if (value.isValid()) {
          records.add(value.getRecord());
        }
        break;
      default:
        records.add(value.getRecord());
        break;
      }
    }

    // set non-bucketized nodes or parent nodes of bucketized nodes
View Full Code Here

      String date = dateFormat.format(new Date());

      HelixDataAccessor accessor = manager.getHelixDataAccessor();
      Builder keyBuilder = accessor.keyBuilder();

      HelixProperty property = accessor.getProperty(keyBuilder.alertHistory());
      ZNRecord alertFiredHistory;
      if (property == null) {
        alertFiredHistory = new ZNRecord(PropertyType.ALERT_HISTORY.toString());
      } else {
        alertFiredHistory = property.getRecord();
      }
      while (alertFiredHistory.getMapFields().size() >= ALERT_HISTORY_SIZE) {
        // ZNRecord uses TreeMap which is sorted ascending internally
        String firstKey = (String) (alertFiredHistory.getMapFields().keySet().toArray()[0]);
        alertFiredHistory.getMapFields().remove(firstKey);
View Full Code Here

  /** Creates a new named job queue (workflow) */
  public void createQueue(JobQueue queue) throws Exception {
    String queueName = queue.getName();
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    HelixProperty property = new HelixProperty(queueName);
    property.getRecord().getSimpleFields().putAll(queue.getResourceConfigMap());
    boolean created =
        accessor.createProperty(accessor.keyBuilder().resourceConfig(queueName), property);
    if (!created) {
      throw new IllegalArgumentException("Queue " + queueName + " already exists!");
    }
View Full Code Here

  /** Adds a new job to the end an existing named queue */
  public void enqueueJob(final String queueName, final String jobName, JobConfig.Builder jobBuilder)
      throws Exception {
    // Get the job queue config and capacity
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    HelixProperty workflowConfig =
        accessor.getProperty(accessor.keyBuilder().resourceConfig(queueName));
    if (workflowConfig == null) {
      throw new IllegalArgumentException("Queue " + queueName + " does not yet exist!");
    }
    boolean isTerminable =
        workflowConfig.getRecord().getBooleanField(WorkflowConfig.TERMINABLE, true);
    if (isTerminable) {
      throw new IllegalArgumentException(queueName + " is not a queue!");
    }
    final int capacity =
        workflowConfig.getRecord().getIntField(JobQueue.CAPACITY, Integer.MAX_VALUE);

    // Create the job to ensure that it validates
    JobConfig jobConfig = jobBuilder.setWorkflow(queueName).build();

    // Add the job to the end of the queue in the DAG
View Full Code Here

    _admin.addResource(_clusterName, jobResource, numPartitions, TaskConstants.STATE_MODEL_NAME);

    // Set the job configuration
    HelixDataAccessor accessor = _manager.getHelixDataAccessor();
    PropertyKey.Builder keyBuilder = accessor.keyBuilder();
    HelixProperty resourceConfig = new HelixProperty(jobResource);
    resourceConfig.getRecord().getSimpleFields().putAll(jobConfig.getResourceConfigMap());
    Map<String, TaskConfig> taskConfigMap = jobConfig.getTaskConfigMap();
    if (taskConfigMap != null) {
      for (TaskConfig taskConfig : taskConfigMap.values()) {
        resourceConfig.getRecord().setMapField(taskConfig.getId(), taskConfig.getConfigMap());
      }
    }
    accessor.setProperty(keyBuilder.resourceConfig(jobResource), resourceConfig);

    // Push out new ideal state based on number of target partitions
View Full Code Here

      case CURRENTSTATES:
      case IDEALSTATES:
      case EXTERNALVIEW:
        // check if bucketized
        if (record != null) {
          HelixProperty property = new HelixProperty(record);

          int bucketSize = property.getBucketSize();
          if (bucketSize > 0) {
            List<ZNRecord> childRecords = _baseDataAccessor.getChildren(path, null, options);
            ZNRecord assembledRecord = new ZNRecordAssembler().assemble(childRecords);

            // merge with parent node value
View Full Code Here

    case CURRENTSTATES:
    case IDEALSTATES:
    case EXTERNALVIEW:
      // check if bucketized
      if (record != null) {
        HelixProperty property = new HelixProperty(record);

        int bucketSize = property.getBucketSize();
        if (bucketSize > 0) {
          List<ZNRecord> childRecords = _baseDataAccessor.getChildren(path, null, options);
          ZNRecord assembledRecord = new ZNRecordAssembler().assemble(childRecords);

          // merge with parent node value
View Full Code Here

        switch (type) {
        case CURRENTSTATES:
        case IDEALSTATES:
        case EXTERNALVIEW:
          if (record != null) {
            HelixProperty property = new HelixProperty(record);

            int bucketSize = property.getBucketSize();
            if (bucketSize > 0) {
              // TODO: fix this if record.id != pathName
              String childPath = parentPath + "/" + record.getId();
              List<ZNRecord> childRecords = _baseDataAccessor.getChildren(childPath, null, options);
              ZNRecord assembledRecord = new ZNRecordAssembler().assemble(childRecords);
View Full Code Here

    for (int i = 0; i < keys.size(); i++) {
      PropertyKey key = keys.get(i);
      PropertyType type = key.getType();
      String path = key.getPath();
      paths.add(path);
      HelixProperty value = children.get(i);
      records.add(value.getRecord());
      options = constructOptions(type);
    }
    return _baseDataAccessor.createChildren(paths, records, options);
  }
View Full Code Here

TOP

Related Classes of org.apache.helix.HelixProperty

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.