Search in sources :

Example 1 with Bytes

use of org.apache.hadoop.hbase.util.Bytes in project hbase by apache.

the class HTableDescriptor method addCoprocessorToMap.

/**
   * Add coprocessor to values Map
   * @param specStr The Coprocessor specification all in in one String formatted so matches
   * {@link HConstants#CP_HTD_ATTR_VALUE_PATTERN}
   * @return Returns <code>this</code>
   */
private HTableDescriptor addCoprocessorToMap(final String specStr) {
    if (specStr == null)
        return this;
    // generate a coprocessor key
    int maxCoprocessorNumber = 0;
    Matcher keyMatcher;
    for (Map.Entry<Bytes, Bytes> e : this.values.entrySet()) {
        keyMatcher = HConstants.CP_HTD_ATTR_KEY_PATTERN.matcher(Bytes.toString(e.getKey().get()));
        if (!keyMatcher.matches()) {
            continue;
        }
        maxCoprocessorNumber = Math.max(Integer.parseInt(keyMatcher.group(1)), maxCoprocessorNumber);
    }
    maxCoprocessorNumber++;
    String key = "coprocessor$" + Integer.toString(maxCoprocessorNumber);
    this.values.put(new Bytes(Bytes.toBytes(key)), new Bytes(Bytes.toBytes(specStr)));
    return this;
}
Also used : Bytes(org.apache.hadoop.hbase.util.Bytes) Matcher(java.util.regex.Matcher) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 2 with Bytes

use of org.apache.hadoop.hbase.util.Bytes in project hbase by apache.

the class CompoundConfiguration method addBytesMap.

/**
   * Add Bytes map to config list. This map is generally
   * created by HTableDescriptor or HColumnDescriptor, but can be abstractly
   * used. The added configuration overrides the previous ones if there are
   * name collisions.
   *
   * @param map
   *          Bytes map
   * @return this, for builder pattern
   */
public CompoundConfiguration addBytesMap(final Map<Bytes, Bytes> map) {
    freezeMutableConf();
    // put new map at the front of the list (top priority)
    this.configs.add(0, new ImmutableConfigMap() {

        private final Map<Bytes, Bytes> m = map;

        @Override
        public Iterator<Map.Entry<String, String>> iterator() {
            Map<String, String> ret = new HashMap<>();
            for (Map.Entry<Bytes, Bytes> entry : map.entrySet()) {
                String key = Bytes.toString(entry.getKey().get());
                String val = entry.getValue() == null ? null : Bytes.toString(entry.getValue().get());
                ret.put(key, val);
            }
            return ret.entrySet().iterator();
        }

        @Override
        public String get(String key) {
            Bytes ibw = new Bytes(Bytes.toBytes(key));
            if (!m.containsKey(ibw))
                return null;
            Bytes value = m.get(ibw);
            if (value == null || value.get() == null)
                return null;
            return Bytes.toString(value.get());
        }

        @Override
        public String getRaw(String key) {
            return get(key);
        }

        @Override
        public Class<?> getClassByName(String name) throws ClassNotFoundException {
            return null;
        }

        @Override
        public int size() {
            return m.size();
        }

        @Override
        public String toString() {
            return m.toString();
        }
    });
    return this;
}
Also used : Bytes(org.apache.hadoop.hbase.util.Bytes) Iterator(java.util.Iterator) UnmodifiableIterator(org.apache.commons.collections.iterators.UnmodifiableIterator) Map(java.util.Map) HashMap(java.util.HashMap)

Example 3 with Bytes

use of org.apache.hadoop.hbase.util.Bytes in project hbase by apache.

the class Constraints method getConstraints.

/**
   * Get the constraints stored in the table descriptor
   * 
   * @param desc
   *          To read from
   * @param classloader
   *          To use when loading classes. If a special classloader is used on a
   *          region, for instance, then that should be the classloader used to
   *          load the constraints. This could also apply to unit-testing
   *          situation, where want to ensure that class is reloaded or not.
   * @return List of configured {@link Constraint Constraints}
   * @throws IOException
   *           if any part of reading/arguments fails
   */
static List<? extends Constraint> getConstraints(HTableDescriptor desc, ClassLoader classloader) throws IOException {
    List<Constraint> constraints = new ArrayList<>();
    // loop through all the key, values looking for constraints
    for (Map.Entry<Bytes, Bytes> e : desc.getValues().entrySet()) {
        // read out the constraint
        String key = Bytes.toString(e.getKey().get()).trim();
        String[] className = CONSTRAINT_HTD_ATTR_KEY_PATTERN.split(key);
        if (className.length == 2) {
            key = className[1];
            if (LOG.isDebugEnabled()) {
                LOG.debug("Loading constraint:" + key);
            }
            // read in the rest of the constraint
            Configuration conf;
            try {
                conf = readConfiguration(e.getValue().get());
            } catch (IOException e1) {
                // long that we don't have a valid configuration stored, and move on.
                LOG.warn("Corrupted configuration found for key:" + key + ",  skipping it.");
                continue;
            }
            // if it is not enabled, skip it
            if (!conf.getBoolean(ENABLED_KEY, false)) {
                if (LOG.isDebugEnabled())
                    LOG.debug("Constraint: " + key + " is DISABLED - skipping it");
                // go to the next constraint
                continue;
            }
            try {
                // add the constraint, now that we expect it to be valid.
                Class<? extends Constraint> clazz = classloader.loadClass(key).asSubclass(Constraint.class);
                Constraint constraint = clazz.newInstance();
                constraint.setConf(conf);
                constraints.add(constraint);
            } catch (ClassNotFoundException e1) {
                throw new IOException(e1);
            } catch (InstantiationException e1) {
                throw new IOException(e1);
            } catch (IllegalAccessException e1) {
                throw new IOException(e1);
            }
        }
    }
    // sort them, based on the priorities
    Collections.sort(constraints, constraintComparator);
    return constraints;
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Bytes(org.apache.hadoop.hbase.util.Bytes) Map(java.util.Map)

Example 4 with Bytes

use of org.apache.hadoop.hbase.util.Bytes in project hbase by apache.

the class Constraints method remove.

/**
   * Remove all {@link Constraint Constraints} that have been added to the table
   * and turn off the constraint processing.
   * <p>
   * All {@link Configuration Configurations} and their associated
   * {@link Constraint} are removed.
   * 
   * @param desc
   *          {@link HTableDescriptor} to remove {@link Constraint Constraints}
   *          from.
   */
public static void remove(HTableDescriptor desc) {
    // disable constraints
    disable(desc);
    // remove all the constraint settings
    List<Bytes> keys = new ArrayList<>();
    // loop through all the key, values looking for constraints
    for (Map.Entry<Bytes, Bytes> e : desc.getValues().entrySet()) {
        String key = Bytes.toString((e.getKey().get()));
        String[] className = CONSTRAINT_HTD_ATTR_KEY_PATTERN.split(key);
        if (className.length == 2) {
            keys.add(e.getKey());
        }
    }
    // now remove all the keys we found
    for (Bytes key : keys) {
        desc.remove(key);
    }
}
Also used : Bytes(org.apache.hadoop.hbase.util.Bytes) ArrayList(java.util.ArrayList) Map(java.util.Map)

Example 5 with Bytes

use of org.apache.hadoop.hbase.util.Bytes in project hbase by apache.

the class ProtobufUtil method convertToTableSchema.

/**
   * Converts an HTableDescriptor to TableSchema
   * @param htd the HTableDescriptor
   * @return Convert the current {@link HTableDescriptor} into a pb TableSchema instance.
   */
public static TableSchema convertToTableSchema(HTableDescriptor htd) {
    TableSchema.Builder builder = TableSchema.newBuilder();
    builder.setTableName(toProtoTableName(htd.getTableName()));
    for (Map.Entry<Bytes, Bytes> e : htd.getValues().entrySet()) {
        BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
        aBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey().get()));
        aBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue().get()));
        builder.addAttributes(aBuilder.build());
    }
    for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
        builder.addColumnFamilies(convertToColumnFamilySchema(hcd));
    }
    for (Map.Entry<String, String> e : htd.getConfiguration().entrySet()) {
        NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
        aBuilder.setName(e.getKey());
        aBuilder.setValue(e.getValue());
        builder.addConfiguration(aBuilder.build());
    }
    return builder.build();
}
Also used : Bytes(org.apache.hadoop.hbase.util.Bytes) TableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) NameStringPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair) BytesBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) Map(java.util.Map) HashMap(java.util.HashMap)

Aggregations

Map (java.util.Map)11 Bytes (org.apache.hadoop.hbase.util.Bytes)11 HashMap (java.util.HashMap)8 TreeMap (java.util.TreeMap)5 Matcher (java.util.regex.Matcher)5 ArrayList (java.util.ArrayList)3 IOException (java.io.IOException)2 Configuration (org.apache.hadoop.conf.Configuration)2 Iterator (java.util.Iterator)1 TreeSet (java.util.TreeSet)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 UnmodifiableIterator (org.apache.commons.collections.iterators.UnmodifiableIterator)1 AbstractReferenceMap (org.apache.commons.collections.map.AbstractReferenceMap)1 ReferenceMap (org.apache.commons.collections.map.ReferenceMap)1 Path (org.apache.hadoop.fs.Path)1 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)1 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)1 ByteString (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString)1 BytesBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair)1