Skip to content

Commit

Permalink
HBASE-19488 Move to using Apache commons CollectionUtils
Browse files Browse the repository at this point in the history
Signed-off-by: Apekshit Sharma <[email protected]>
  • Loading branch information
belugabehr authored and apeksharma committed Apr 4, 2018
1 parent dcc840e commit d866e7c
Show file tree
Hide file tree
Showing 9 changed files with 30 additions and 97 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.List;

import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.yetus.audience.InterfaceAudience;

/**
Expand All @@ -47,7 +48,7 @@ public class RowMutations implements Row {
*/
public static RowMutations of(List<? extends Mutation> mutations) throws IOException {
if (CollectionUtils.isEmpty(mutations)) {
throw new IllegalArgumentException("Can't instantiate a RowMutations by empty list");
throw new IllegalArgumentException("Cannot instantiate a RowMutations by empty list");
}
return new RowMutations(mutations.get(0).getRow(), mutations.size())
.add(mutations);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import java.util.Iterator;
import java.util.List;

import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,6 @@
package org.apache.hadoop.hbase.util;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Supplier;

Expand All @@ -34,82 +30,6 @@
@InterfaceAudience.Private
public class CollectionUtils {

private static final List<Object> EMPTY_LIST = Collections.unmodifiableList(new ArrayList<>(0));


@SuppressWarnings("unchecked")
public static <T> Collection<T> nullSafe(Collection<T> in) {
if (in == null) {
return (Collection<T>)EMPTY_LIST;
}
return in;
}

/************************ size ************************************/

public static <T> int nullSafeSize(Collection<T> collection) {
if (collection == null) {
return 0;
}
return collection.size();
}

public static <A, B> boolean nullSafeSameSize(Collection<A> a, Collection<B> b) {
return nullSafeSize(a) == nullSafeSize(b);
}

/*************************** empty ****************************************/

public static <T> boolean isEmpty(Collection<T> collection) {
return collection == null || collection.isEmpty();
}

public static <T> boolean notEmpty(Collection<T> collection) {
return !isEmpty(collection);
}

/************************ first/last **************************/

public static <T> T getFirst(Collection<T> collection) {
if (CollectionUtils.isEmpty(collection)) {
return null;
}
for (T t : collection) {
return t;
}
return null;
}

/**
* @param list any list
* @return -1 if list is empty, otherwise the max index
*/
public static int getLastIndex(List<?> list){
if(isEmpty(list)){
return -1;
}
return list.size() - 1;
}

/**
* @param list
* @param index the index in question
* @return true if it is the last index or if list is empty and -1 is passed for the index param
*/
public static boolean isLastIndex(List<?> list, int index){
return index == getLastIndex(list);
}

public static <T> T getLast(List<T> list) {
if (isEmpty(list)) {
return null;
}
return list.get(list.size() - 1);
}

public static <T> List<T> nullToEmpty(List<T> list) {
return list != null ? list : Collections.emptyList();
}
/**
* In HBASE-16648 we found that ConcurrentHashMap.get is much faster than computeIfAbsent if the
* value already exists. Notice that the implementation does not guarantee that the supplier will
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@
package org.apache.hadoop.hbase.replication;

import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
Expand Down Expand Up @@ -131,7 +131,8 @@ public void updatePeerConfig(String peerId, ReplicationPeerConfig peerConfig)
@Override
public List<String> listPeerIds() throws ReplicationException {
try {
return CollectionUtils.nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, peersZNode));
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, peersZNode);
return children != null ? children : Collections.emptyList();
} catch (KeeperException e) {
throw new ReplicationException("Cannot get the list of peers", e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.replication;

import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hbase.util.CollectionUtils.nullToEmpty;

import java.util.ArrayList;
import java.util.Collections;
Expand All @@ -30,14 +29,14 @@
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.stream.Collectors;
import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
Expand Down Expand Up @@ -451,8 +450,11 @@ public void removeReplicatorIfQueueIsEmpty(ServerName serverName) throws Replica
}

private List<ServerName> getListOfReplicators0() throws KeeperException {
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, queuesZNode)).stream()
.map(ServerName::parseServerName).collect(toList());
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, queuesZNode);
if (children == null) {
children = Collections.emptyList();
}
return children.stream().map(ServerName::parseServerName).collect(toList());
}

@Override
Expand All @@ -466,7 +468,9 @@ public List<ServerName> getListOfReplicators() throws ReplicationException {

private List<String> getWALsInQueue0(ServerName serverName, String queueId)
throws KeeperException {
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, getQueueNode(serverName, queueId)));
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, getQueueNode(serverName,
queueId));
return children != null ? children : Collections.emptyList();
}

@Override
Expand All @@ -482,7 +486,8 @@ public List<String> getWALsInQueue(ServerName serverName, String queueId)
}

private List<String> getAllQueues0(ServerName serverName) throws KeeperException {
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, getRsNode(serverName)));
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, getRsNode(serverName));
return children != null ? children : Collections.emptyList();
}

@Override
Expand Down Expand Up @@ -602,7 +607,8 @@ public void removeHFileRefs(String peerId, List<String> files) throws Replicatio
}

private List<String> getAllPeersFromHFileRefsQueue0() throws KeeperException {
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, hfileRefsZNode));
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, hfileRefsZNode);
return children != null ? children : Collections.emptyList();
}

@Override
Expand All @@ -616,7 +622,9 @@ public List<String> getAllPeersFromHFileRefsQueue() throws ReplicationException
}

private List<String> getReplicableHFiles0(String peerId) throws KeeperException {
return nullToEmpty(ZKUtil.listChildrenNoWatch(this.zookeeper, getHFileRefsPeerNode(peerId)));
List<String> children = ZKUtil.listChildrenNoWatch(this.zookeeper,
getHFileRefsPeerNode(peerId));
return children != null ? children : Collections.emptyList();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -156,7 +157,6 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.CompressionTest;
import org.apache.hadoop.hbase.util.EncryptionTest;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -125,7 +126,6 @@
import org.apache.hadoop.hbase.security.access.AccessChecker;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.locks.ReentrantLock;

import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
Expand All @@ -45,13 +46,14 @@
import org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher;
import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;

/**
* Scanner scans both the memstore and the Store. Coalesce KeyValue stream into List&lt;KeyValue&gt;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@
import java.util.Set;
import java.util.TreeSet;

import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
Expand Down

0 comments on commit d866e7c

Please sign in to comment.