Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Reverted unrelated changes
Browse files Browse the repository at this point in the history
bhattmanish98 committed Jan 30, 2025
1 parent fd8957f commit aeb8519
Showing 7 changed files with 62 additions and 95 deletions.
Original file line number Diff line number Diff line change
@@ -116,22 +116,7 @@
import static org.apache.hadoop.fs.CommonConfigurationKeys.IOSTATISTICS_LOGGING_LEVEL;
import static org.apache.hadoop.fs.CommonConfigurationKeys.IOSTATISTICS_LOGGING_LEVEL_DEFAULT;
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_STANDARD_OPTIONS;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_APPEND;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_CREATE;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_CREATE_NON_RECURSIVE;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_DELETE;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_EXIST;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_GET_DELEGATION_TOKEN;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_GET_FILE_STATUS;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_LIST_STATUS;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_MKDIRS;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_OPEN;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.CALL_RENAME;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.DIRECTORIES_CREATED;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.DIRECTORIES_DELETED;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.ERROR_IGNORED;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.FILES_CREATED;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.FILES_DELETED;
import static org.apache.hadoop.fs.azurebfs.AbfsStatistic.*;
import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.CPK_IN_NON_HNS_ACCOUNT_ERROR_MESSAGE;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.DATA_BLOCKS_BUFFER;
import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_IS_HNS_ENABLED;
Original file line number Diff line number Diff line change
@@ -21,8 +21,8 @@
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
@@ -51,36 +51,22 @@
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.fs.azurebfs.constants.AbfsServiceType;
import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation;
import org.apache.hadoop.fs.azurebfs.extensions.EncryptionContextProvider;
import org.apache.hadoop.fs.azurebfs.security.ContextProviderEncryptionAdapter;
import org.apache.hadoop.fs.azurebfs.security.ContextEncryptionAdapter;
import org.apache.hadoop.fs.azurebfs.security.NoContextEncryptionAdapter;
import org.apache.hadoop.fs.azurebfs.services.AbfsClientHandler;
import org.apache.hadoop.fs.azurebfs.utils.EncryptionType;
import org.apache.hadoop.fs.impl.BackReference;
import org.apache.hadoop.fs.PathIOException;

import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.EtagSource;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathIOException;
import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants;
import org.apache.hadoop.fs.azurebfs.constants.FileSystemUriSchemes;
import org.apache.hadoop.fs.azurebfs.constants.AbfsServiceType;
import org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations;
import org.apache.hadoop.fs.azurebfs.constants.FileSystemUriSchemes;
import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AbfsRestOperationException;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AzureBlobFileSystemException;
@@ -90,44 +76,52 @@
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidFileSystemPropertyException;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidUriAuthorityException;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidUriException;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.TrileanConversionException;
import org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode;
import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultEntrySchema;
import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema;
import org.apache.hadoop.fs.azurebfs.contracts.exceptions.TrileanConversionException;
import org.apache.hadoop.fs.azurebfs.enums.Trilean;
import org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider;
import org.apache.hadoop.fs.azurebfs.extensions.EncryptionContextProvider;
import org.apache.hadoop.fs.azurebfs.extensions.ExtensionHelper;
import org.apache.hadoop.fs.azurebfs.extensions.SASTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.AccessTokenProvider;
import org.apache.hadoop.fs.azurebfs.oauth2.AzureADAuthenticator;
import org.apache.hadoop.fs.azurebfs.oauth2.IdentityTransformer;
import org.apache.hadoop.fs.azurebfs.oauth2.IdentityTransformerInterface;
import org.apache.hadoop.fs.azurebfs.security.ContextEncryptionAdapter;
import org.apache.hadoop.fs.azurebfs.security.ContextProviderEncryptionAdapter;
import org.apache.hadoop.fs.azurebfs.security.NoContextEncryptionAdapter;
import org.apache.hadoop.fs.azurebfs.services.AbfsAclHelper;
import org.apache.hadoop.fs.azurebfs.services.AbfsClient;
import org.apache.hadoop.fs.azurebfs.services.AbfsClientContext;
import org.apache.hadoop.fs.azurebfs.services.AbfsClientContextBuilder;
import org.apache.hadoop.fs.azurebfs.services.AbfsClientHandler;
import org.apache.hadoop.fs.azurebfs.services.AbfsClientRenameResult;
import org.apache.hadoop.fs.azurebfs.services.AbfsCounters;
import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation;
import org.apache.hadoop.fs.azurebfs.services.AbfsInputStream;
import org.apache.hadoop.fs.azurebfs.services.AbfsInputStreamContext;
import org.apache.hadoop.fs.azurebfs.services.AbfsInputStreamStatisticsImpl;
import org.apache.hadoop.fs.azurebfs.services.AbfsLease;
import org.apache.hadoop.fs.azurebfs.services.AbfsOutputStream;
import org.apache.hadoop.fs.azurebfs.services.AbfsOutputStreamContext;
import org.apache.hadoop.fs.azurebfs.services.AbfsOutputStreamStatisticsImpl;
import org.apache.hadoop.fs.azurebfs.services.AbfsPerfInfo;
import org.apache.hadoop.fs.azurebfs.services.AbfsPerfTracker;
import org.apache.hadoop.fs.azurebfs.services.AbfsPermission;
import org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation;
import org.apache.hadoop.fs.azurebfs.services.AuthType;
import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy;
import org.apache.hadoop.fs.azurebfs.services.StaticRetryPolicy;
import org.apache.hadoop.fs.azurebfs.services.AbfsLease;
import org.apache.hadoop.fs.azurebfs.services.SharedKeyCredentials;
import org.apache.hadoop.fs.azurebfs.services.AbfsPerfTracker;
import org.apache.hadoop.fs.azurebfs.services.AbfsPerfInfo;
import org.apache.hadoop.fs.azurebfs.services.ListingSupport;
import org.apache.hadoop.fs.azurebfs.services.SharedKeyCredentials;
import org.apache.hadoop.fs.azurebfs.services.StaticRetryPolicy;
import org.apache.hadoop.fs.azurebfs.utils.Base64;
import org.apache.hadoop.fs.azurebfs.utils.CRC64;
import org.apache.hadoop.fs.azurebfs.utils.DateTimeUtils;
import org.apache.hadoop.fs.azurebfs.utils.EncryptionType;
import org.apache.hadoop.fs.azurebfs.utils.TracingContext;
import org.apache.hadoop.fs.azurebfs.utils.UriUtils;
import org.apache.hadoop.fs.impl.BackReference;
import org.apache.hadoop.fs.impl.OpenFileParameters;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
@@ -136,7 +130,11 @@
import org.apache.hadoop.fs.store.DataBlocks;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.Futures;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListenableFuture;
import org.apache.hadoop.util.BlockingThreadPoolExecutorService;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.SemaphoredDelegatingExecutor;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.http.client.utils.URIBuilder;
@@ -699,7 +697,7 @@ public OutputStream createFile(final Path path,
if (getClient().getEncryptionType()
== EncryptionType.ENCRYPTION_CONTEXT) {
contextEncryptionAdapter = new ContextProviderEncryptionAdapter(
getClient().getEncryptionContextProvider(), getRelativePath(path));
createClient.getEncryptionContextProvider(), getRelativePath(path));
} else {
contextEncryptionAdapter = NoContextEncryptionAdapter.getInstance();
}
@@ -714,7 +712,7 @@ public OutputStream createFile(final Path path,
);

} else {
op = getClient().createPath(relativePath, true,
op = createClient.createPath(relativePath, true,
overwrite,
new Permissions(isNamespaceEnabled, permission, umask),
isAppendBlob,
@@ -763,7 +761,7 @@ private AbfsRestOperation conditionalCreateOverwriteFile(final String relativePa
// Trigger a create with overwrite=false first so that eTag fetch can be
// avoided for cases when no pre-existing file is present (major portion
// of create file traffic falls into the case of no pre-existing file).
op = getClient().createPath(relativePath, true, false, permissions,
op = createClient.createPath(relativePath, true, false, permissions,
isAppendBlob, null, contextEncryptionAdapter, tracingContext);

} catch (AbfsRestOperationException e) {
@@ -787,7 +785,7 @@ private AbfsRestOperation conditionalCreateOverwriteFile(final String relativePa

try {
// overwrite only if eTag matches with the file properties fetched befpre
op = getClient().createPath(relativePath, true, true, permissions,
op = createClient.createPath(relativePath, true, true, permissions,
isAppendBlob, eTag, contextEncryptionAdapter, tracingContext);
} catch (AbfsRestOperationException ex) {
if (ex.getStatusCode() == HttpURLConnection.HTTP_PRECON_FAILED) {
Original file line number Diff line number Diff line change
@@ -325,10 +325,5 @@ public static ApiVersion getCurrentVersion() {
public static final String COPY_STATUS_ABORTED = "aborted";
public static final String COPY_STATUS_FAILED = "failed";

public static final String COPY_STATUS_SUCCESS = "success";
public static final String COPY_STATUS_PENDING = "pending";
public static final String COPY_STATUS_ABORTED = "aborted";
public static final String COPY_STATUS_FAILED = "failed";

private AbfsHttpConstants() {}
}
Original file line number Diff line number Diff line change
@@ -109,6 +109,11 @@ public final class HttpHeaderConfigurations {
* {@value}
*/
public static final String X_MS_BLOB_TYPE = "x-ms-blob-type";

/**
* Http Request Header for copy id.
* {@value}
*/
public static final String X_MS_COPY_ID = "x-ms-copy-id";

/**
Original file line number Diff line number Diff line change
@@ -62,8 +62,10 @@ public final class AbfsErrors {
* Exception message on filesystem init if token-provider-auth-type configs are provided
*/
public static final String UNAUTHORIZED_SAS = "Incorrect SAS token provider configured for non-hierarchical namespace account.";
public static final String ERR_RENAME_BLOB = "FNS-Blob rename was not successful for source and destination path: ";
public static final String ERR_DELETE_BLOB = "FNS-Blob delete was not successful for path: ";
public static final String ERR_RENAME_BLOB =
"FNS-Blob rename was not successful for source and destination path: ";
public static final String ERR_DELETE_BLOB =
"FNS-Blob delete was not successful for path: ";
public static final String ATOMIC_DIR_RENAME_RECOVERY_ON_GET_PATH_EXCEPTION =
"Path had to be recovered from atomic rename operation.";
private AbfsErrors() {}
Original file line number Diff line number Diff line change
@@ -58,24 +58,15 @@ public final class UriUtils {
private static final Logger LOG = LoggerFactory.getLogger(
UriUtils.class);

private static final String ABFS_URI_REGEX
= "[^.]+\\.dfs\\.(preprod\\.){0,1}core\\.windows\\.net";

private static final Pattern ABFS_URI_PATTERN = Pattern.compile(
ABFS_URI_REGEX);

private static final String ABFS_URI_REGEX = "[^.]+\\.dfs\\.(preprod\\.){0,1}core\\.windows\\.net";
private static final Pattern ABFS_URI_PATTERN = Pattern.compile(ABFS_URI_REGEX);
private static final Set<String> FULL_MASK_PARAM_KEYS = new HashSet<>(
Collections.singleton(QUERY_PARAM_SIGNATURE));

private static final Set<String> PARTIAL_MASK_PARAM_KEYS = new HashSet<>(
Arrays.asList(QUERY_PARAM_SKOID, QUERY_PARAM_SAOID, QUERY_PARAM_SUOID));

private static final Character CHAR_MASK = 'X';

private static final String FULL_MASK = "XXXXX";

private static final int DEFAULT_QUERY_STRINGBUILDER_CAPACITY = 550;

private static final int PARTIAL_MASK_VISIBLE_LEN = 18;

/**
@@ -121,9 +112,7 @@ public static String extractAccountNameFromHostName(final String hostName) {
*/
public static String generateUniqueTestPath() {
String testUniqueForkId = System.getProperty("test.unique.fork.id");
return testUniqueForkId == null
? "/test"
: "/" + testUniqueForkId + "/test";
return testUniqueForkId == null ? "/test" : "/" + testUniqueForkId + "/test";
}

public static String maskUrlQueryParameters(List<NameValuePair> keyValueList,
@@ -150,8 +139,7 @@ public static String maskUrlQueryParameters(List<NameValuePair> keyValueList,
for (NameValuePair keyValuePair : keyValueList) {
String key = keyValuePair.getName();
if (key.isEmpty()) {
throw new IllegalArgumentException(
"Query param key should not be empty");
throw new IllegalArgumentException("Query param key should not be empty");
}
String value = keyValuePair.getValue();
maskedUrl.append(key);
@@ -204,8 +192,7 @@ public static String getMaskedUrl(URL url) {
*/
public static URL changeUrlFromBlobToDfs(URL url) throws InvalidUriException {
try {
url = new URL(replacedUrl(url.toString(), ABFS_BLOB_DOMAIN_NAME,
ABFS_DFS_DOMAIN_NAME));
url = new URL(replacedUrl(url.toString(), ABFS_BLOB_DOMAIN_NAME, ABFS_DFS_DOMAIN_NAME));
} catch (MalformedURLException ex) {
throw new InvalidUriException(url.toString());
}
@@ -221,8 +208,7 @@ public static URL changeUrlFromBlobToDfs(URL url) throws InvalidUriException {
*/
public static URL changeUrlFromDfsToBlob(URL url) throws InvalidUriException {
try {
url = new URL(replacedUrl(url.toString(), ABFS_DFS_DOMAIN_NAME,
ABFS_BLOB_DOMAIN_NAME));
url = new URL(replacedUrl(url.toString(), ABFS_DFS_DOMAIN_NAME, ABFS_BLOB_DOMAIN_NAME));
} catch (MalformedURLException ex) {
throw new InvalidUriException(url.toString());
}
@@ -238,9 +224,7 @@ public static URL changeUrlFromDfsToBlob(URL url) throws InvalidUriException {
* @param newString the string to be replaced with.
* @return updated URL
*/
private static String replacedUrl(String baseUrl,
String oldString,
String newString) {
private static String replacedUrl(String baseUrl, String oldString, String newString) {
int startIndex = baseUrl.toString().indexOf("//") + 2;
int endIndex = baseUrl.toString().indexOf("/", startIndex);
if (oldString == null || newString == null || startIndex < 0
Loading

0 comments on commit aeb8519

Please sign in to comment.