Skip to content

Commit

Permalink
Merge pull request apache#3065 from krichter722/checkstyle-autocreds
Browse files Browse the repository at this point in the history
STORM-3449: autocreds: fix all checkstyle warnings
  • Loading branch information
srdo authored Jul 4, 2019
2 parents bba702e + f5aecc1 commit 7ca1ffb
Show file tree
Hide file tree
Showing 17 changed files with 312 additions and 287 deletions.
2 changes: 1 addition & 1 deletion external/storm-autocreds/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@
<artifactId>maven-checkstyle-plugin</artifactId>
<!--Note - the version would be inherited-->
<configuration>
<maxAllowedViolations>249</maxAllowedViolations>
<maxAllowedViolations>0</maxAllowedViolations>
</configuration>
</plugin>
<plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.storm.common;

import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.security.auth.Subject;
import javax.xml.bind.DatatypeConverter;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.security.Credentials;
Expand All @@ -27,15 +37,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.security.auth.Subject;
import javax.xml.bind.DatatypeConverter;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
* The base class that for auto credential plugins that abstracts out some of the common functionality.
*/
Expand All @@ -62,7 +63,7 @@ public void populateCredentials(Map<String, String> credentials) {
@Override
public void populateSubject(Subject subject, Map<String, String> credentials) {
addCredentialToSubject(subject, credentials);
addTokensToUGI(subject);
addTokensToUgi(subject);
}

/**
Expand All @@ -71,22 +72,22 @@ public void populateSubject(Subject subject, Map<String, String> credentials) {
@Override
public void updateSubject(Subject subject, Map<String, String> credentials) {
addCredentialToSubject(subject, credentials);
addTokensToUGI(subject);
addTokensToUgi(subject);
}

public Set<Pair<String, Credentials>> getCredentials(Map<String, String> credentials) {
return HadoopCredentialUtil.getCredential(this, credentials, configKeys);
}

/**
* Prepare the plugin
* Prepare the plugin.
*
* @param topoConf the topology conf
*/
protected abstract void doPrepare(Map<String, Object> topoConf);

/**
* The lookup key for the config key string
* The lookup key for the config key string.
*
* @return the config key string
*/
Expand All @@ -104,7 +105,7 @@ private void addCredentialToSubject(Subject subject, Map<String, String> credent
}
}

private void addTokensToUGI(Subject subject) {
private void addTokensToUgi(Subject subject) {
if (subject != null) {
Set<Credentials> privateCredentials = subject.getPrivateCredentials(Credentials.class);
if (privateCredentials != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.storm.common;

import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.DatatypeConverter;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -29,14 +38,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.xml.bind.DatatypeConverter;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
* The base class that for auto credential plugins that abstracts out some of the common functionality.
*/
Expand All @@ -51,7 +52,9 @@ public void prepare(Map<String, Object> conf) {
}

@Override
public void populateCredentials(Map<String, String> credentials, Map<String, Object> topologyConf, final String topologyOwnerPrincipal) {
public void populateCredentials(Map<String, String> credentials,
Map<String, Object> topologyConf,
final String topologyOwnerPrincipal) {
try {
List<String> configKeys = getConfigKeys(topologyConf);
if (!configKeys.isEmpty()) {
Expand Down Expand Up @@ -103,14 +106,14 @@ protected void fillHadoopConfiguration(Map topologyConf, String configKey, Confi
}

/**
* Prepare the plugin
* Prepare the plugin.
*
* @param conf the storm cluster conf set via storm.yaml
*/
protected abstract void doPrepare(Map<String, Object> conf);

/**
* The lookup key for the config key string
* The lookup key for the config key string.
*
* @return the config key string
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.storm.common;

/**
* Provider interface for credential key.
*/
public interface CredentialKeyProvider {
/**
* The lookup key for the config key string
*
* @return the config key string
*/
String getCredentialKey(String configKey);
/**
* The lookup key for the config key string.
*
* @return the config key string
*/
String getCredentialKey(String configKey);
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,66 +15,69 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.common;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.security.Credentials;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
package org.apache.storm.common;

import javax.xml.bind.DatatypeConverter;
import java.io.ByteArrayInputStream;
import java.io.ObjectInputStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.DatatypeConverter;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.security.Credentials;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Utility class for getting credential for Hadoop.
*/
final class HadoopCredentialUtil {
private static final Logger LOG = LoggerFactory.getLogger(HadoopCredentialUtil.class);
private static final Logger LOG = LoggerFactory.getLogger(HadoopCredentialUtil.class);

private HadoopCredentialUtil() {
}
private HadoopCredentialUtil() {
}

static Set<Pair<String, Credentials>> getCredential(CredentialKeyProvider provider,
Map<String, String> credentials, Collection<String> configKeys) {
Set<Pair<String, Credentials>> res = new HashSet<>();
if (!configKeys.isEmpty()) {
for (String configKey : configKeys) {
Credentials cred = doGetCredentials(provider, credentials, configKey);
if (cred != null) {
res.add(new Pair(configKey, cred));
static Set<Pair<String, Credentials>> getCredential(CredentialKeyProvider provider,
Map<String, String> credentials,
Collection<String> configKeys) {
Set<Pair<String, Credentials>> res = new HashSet<>();
if (!configKeys.isEmpty()) {
for (String configKey : configKeys) {
Credentials cred = doGetCredentials(provider, credentials, configKey);
if (cred != null) {
res.add(new Pair(configKey, cred));
}
}
} else {
Credentials cred = doGetCredentials(provider, credentials, StringUtils.EMPTY);
if (cred != null) {
res.add(new Pair(StringUtils.EMPTY, cred));
}
}
}
} else {
Credentials cred = doGetCredentials(provider, credentials, StringUtils.EMPTY);
if (cred != null) {
res.add(new Pair(StringUtils.EMPTY, cred));
}
return res;
}
return res;
}

private static Credentials doGetCredentials(CredentialKeyProvider provider,
Map<String, String> credentials, String configKey) {
Credentials credential = null;
String credentialKey = provider.getCredentialKey(configKey);
if (credentials != null && credentials.containsKey(credentialKey)) {
try {
byte[] credBytes = DatatypeConverter.parseBase64Binary(credentialKey);
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(credBytes));
private static Credentials doGetCredentials(CredentialKeyProvider provider,
Map<String, String> credentials,
String configKey) {
Credentials credential = null;
String credentialKey = provider.getCredentialKey(configKey);
if (credentials != null && credentials.containsKey(credentialKey)) {
try {
byte[] credBytes = DatatypeConverter.parseBase64Binary(credentialKey);
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(credBytes));

credential = new Credentials();
credential.readFields(in);
} catch (Exception e) {
LOG.error("Could not obtain credentials from credentials map.", e);
}
credential = new Credentials();
credential.readFields(in);
} catch (Exception e) {
LOG.error("Could not obtain credentials from credentials map.", e);
}
}
return credential;
}
return credential;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@

package org.apache.storm.hbase.security;

import org.apache.storm.common.AbstractHadoopAutoCreds;
import static org.apache.storm.hbase.security.HBaseSecurityUtil.HBASE_CREDENTIALS;

import java.util.Map;

import static org.apache.storm.hbase.security.HBaseSecurityUtil.HBASE_CREDENTIALS;
import org.apache.storm.common.AbstractHadoopAutoCreds;

/**
* Auto credentials plugin for HBase implementation. This class provides a way to automatically
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,49 +15,50 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.storm.hbase.security;

import org.apache.storm.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.storm.hbase.security.HBaseSecurityUtil.HBASE_KEYTAB_FILE_KEY;
import static org.apache.storm.hbase.security.HBaseSecurityUtil.HBASE_PRINCIPAL_KEY;

import javax.security.auth.Subject;
import java.util.HashMap;
import java.util.Map;
import javax.security.auth.Subject;

import static org.apache.storm.hbase.security.HBaseSecurityUtil.HBASE_KEYTAB_FILE_KEY;
import static org.apache.storm.hbase.security.HBaseSecurityUtil.HBASE_PRINCIPAL_KEY;
import org.apache.storm.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Command tool of Hive credential renewer
* Command tool of Hive credential renewer.
*/
public final class AutoHBaseCommand {
private static final Logger LOG = LoggerFactory.getLogger(AutoHBaseCommand.class);

private AutoHBaseCommand() {
}

@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
Map<String, Object> conf = new HashMap<>();
conf.put(HBASE_PRINCIPAL_KEY, args[1]); // hbase principal [email protected]
conf.put(HBASE_KEYTAB_FILE_KEY,
args[2]); // storm hbase keytab /etc/security/keytabs/storm-hbase.keytab

AutoHBase autoHBase = new AutoHBase();
autoHBase.prepare(conf);
AutoHBaseNimbus autoHBaseNimbus = new AutoHBaseNimbus();
autoHBaseNimbus.prepare(conf);

Map<String, String> creds = new HashMap<>();
autoHBaseNimbus.populateCredentials(creds, conf, args[0]); //with realm e.g. [email protected]
LOG.info("Got HBase credentials" + autoHBase.getCredentials(creds));

Subject s = new Subject();
autoHBase.populateSubject(s, creds);
LOG.info("Got a Subject " + s);

autoHBaseNimbus.renew(creds, conf, args[0]);
LOG.info("renewed credentials" + autoHBase.getCredentials(creds));
}
private static final Logger LOG = LoggerFactory.getLogger(AutoHBaseCommand.class);

private AutoHBaseCommand() {
}

@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
Map<String, Object> conf = new HashMap<>();
conf.put(HBASE_PRINCIPAL_KEY, args[1]); // hbase principal [email protected]
conf.put(HBASE_KEYTAB_FILE_KEY,
args[2]); // storm hbase keytab /etc/security/keytabs/storm-hbase.keytab

AutoHBase autoHBase = new AutoHBase();
autoHBase.prepare(conf);
AutoHBaseNimbus autoHBaseNimbus = new AutoHBaseNimbus();
autoHBaseNimbus.prepare(conf);

Map<String, String> creds = new HashMap<>();
autoHBaseNimbus.populateCredentials(creds, conf, args[0]); //with realm e.g. [email protected]
LOG.info("Got HBase credentials" + autoHBase.getCredentials(creds));

Subject s = new Subject();
autoHBase.populateSubject(s, creds);
LOG.info("Got a Subject " + s);

autoHBaseNimbus.renew(creds, conf, args[0]);
LOG.info("renewed credentials" + autoHBase.getCredentials(creds));
}
}
Loading

0 comments on commit 7ca1ffb

Please sign in to comment.