diff --git hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index b8dd6d0..dd586b3 100644 --- hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -929,7 +929,25 @@ public static OutputStream create(Path f, boolean append) throws IOException { private static native long elevatedCreateImpl(String path, long desiredAccess, long shareMode, long creationDisposition, long flags) throws IOException; + + + public static boolean deleteFile(Path path) throws IOException { + if (!nativeLoaded) { + throw new IOException("NativeIO libraries are required for deleteFile"); + } + + return elevatedDeletePathImpl(path.toString(), false); + } + public static boolean deleteDirectory(Path path) throws IOException { + if (!nativeLoaded) { + throw new IOException("NativeIO libraries are required for deleteDirectory"); + } + + return elevatedDeletePathImpl(path.toString(), true); + } + + public native static boolean elevatedDeletePathImpl(String path, boolean isDir) throws IOException; } /** diff --git hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c index 3af82fc..bcce800 100644 --- hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c +++ hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c @@ -1467,6 +1467,42 @@ done: #endif } +/* + * Class: Java_org_apache_hadoop_io_nativeio_NativeIO_00024Elevated + * Method: elevatedDeletePathImpl + * Signature: (Ljava/lang/String;Z)Z + */ +JNIEXPORT jboolean JNICALL +Java_org_apache_hadoop_io_nativeio_NativeIO_00024Elevated_elevatedDeletePathImpl(JNIEnv* env, + jclass clazz, jstring jpath, jboolean jIsDir) { +#ifdef UNIX + THROW(env, "java/io/IOException", + "The function elevatedDeleteFileImpl is not supported on Unix"); + return jfalse; +#endif + +#ifdef WINDOWS + + LPCWSTR path = NULL; + DWORD dwError; + BOOL deleted = FALSE; + + path = (LPCWSTR) (*env)->GetStringChars(env, jpath, NULL); + if (!path) goto done; // exception was thrown + + dwError = RpcCall_WinutilsDeletePath(path, (BOOL) jIsDir, &deleted); + + if (dwError != ERROR_SUCCESS) { + throw_ioe (env, dwError); + } + +done: + if (path) (*env)->ReleaseStringChars(env, jpath, path); + return (jboolean) deleted; +#endif +} + + /** * vim: sw=2: ts=2: et: */ diff --git hadoop-common-project/hadoop-common/src/main/winutils/client.c hadoop-common-project/hadoop-common/src/main/winutils/client.c index 1657220..047bfb5 100644 --- hadoop-common-project/hadoop-common/src/main/winutils/client.c +++ hadoop-common-project/hadoop-common/src/main/winutils/client.c @@ -389,6 +389,58 @@ done: return dwError; } + +DWORD RpcCall_WinutilsDeletePath( + __in LPCWSTR path, + __in BOOL isDir, + __out BOOL* pDeleted) { + + DWORD dwError = EXIT_FAILURE; + ULONG ulCode; + DELETEPATH_REQUEST request; + DELETEPATH_RESPONSE *response = NULL; + RPC_BINDING_HANDLE hHadoopWinutilsSvcBinding; + BOOL rpcBindingInit = FALSE; + + pDeleted = FALSE; + + dwError = PrepareRpcBindingHandle(&hHadoopWinutilsSvcBinding); + if (dwError) { + ReportClientError(L"PrepareRpcBindingHandle", dwError); + goto done; + } + rpcBindingInit = TRUE; + + ZeroMemory(&request, sizeof(request)); + request.path = path; + request.type = isDir ? PATH_IS_DIR : PATH_IS_FILE; + + RpcTryExcept { + dwError = WinutilsDeletePath(hHadoopWinutilsSvcBinding, &request, &response); + } + RpcExcept(1) { + ulCode = RpcExceptionCode(); + ReportClientError(L"RpcExcept", ulCode); + dwError = (DWORD) ulCode; + } + RpcEndExcept; + + if (ERROR_SUCCESS == dwError) { + *pDeleted = response->deleted; + } + +done: + if (rpcBindingInit) RpcBindingFree(&hHadoopWinutilsSvcBinding); + + if(NULL != response) MIDL_user_free(response); + + LogDebugMessage(L"RpcCall_WinutilsDeletePath: %s %d: %d %d\n", + path, isDir, *pDeleted, dwError); + + return dwError; +} + + DWORD RpcCall_TaskCreateAsUser( LPCWSTR cwd, LPCWSTR jobName, LPCWSTR user, LPCWSTR pidFile, LPCWSTR cmdLine, diff --git hadoop-common-project/hadoop-common/src/main/winutils/hadoopwinutilsvc.idl hadoop-common-project/hadoop-common/src/main/winutils/hadoopwinutilsvc.idl index 2e3ea23..ee3685c 100644 --- hadoop-common-project/hadoop-common/src/main/winutils/hadoopwinutilsvc.idl +++ hadoop-common-project/hadoop-common/src/main/winutils/hadoopwinutilsvc.idl @@ -83,6 +83,17 @@ interface HadoopWinutilSvc LONG_PTR hFile; } CREATEFILE_RESPONSE; + typedef enum {PATH_IS_DIR = 1, PATH_IS_FILE = 2} DELETEPATH_TYPE; + + typedef struct { + DELETEPATH_TYPE type; + [string] const wchar_t* path; + } DELETEPATH_REQUEST; + + typedef struct { + boolean deleted; + } DELETEPATH_RESPONSE; + typedef struct { [string] const wchar_t* taskName; } KILLTASK_REQUEST; @@ -107,6 +118,10 @@ interface HadoopWinutilSvc [in] CREATEFILE_REQUEST *request, [out] CREATEFILE_RESPONSE **response); + error_status_t WinutilsDeletePath( + [in] DELETEPATH_REQUEST *request, + [out] DELETEPATH_RESPONSE **response); + error_status_t WinutilsCreateProcessAsUser( [in] int nmPid, [in] CREATE_PROCESS_REQUEST *request, diff --git hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h index dbae74e..0f09a63 100644 --- hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h +++ hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h @@ -272,6 +272,11 @@ DWORD RpcCall_WinutilsMoveFile( __in LPCWSTR destinationPath, __in BOOL replaceExisting); +DWORD RpcCall_WinutilsDeletePath( + __in LPCWSTR path, + __in BOOL isDir, + __out BOOL* pDeleted); + #ifdef __cplusplus } #endif diff --git hadoop-common-project/hadoop-common/src/main/winutils/service.c hadoop-common-project/hadoop-common/src/main/winutils/service.c index 3b9b566..51fa37d 100644 --- hadoop-common-project/hadoop-common/src/main/winutils/service.c +++ hadoop-common-project/hadoop-common/src/main/winutils/service.c @@ -1251,6 +1251,54 @@ done: } +error_status_t WinutilsDeletePath( + /* [in] */ handle_t IDL_handle, + /* [in] */ DELETEPATH_REQUEST *request, + /* [out] */ DELETEPATH_RESPONSE **response) { + + DWORD dwError = ERROR_SUCCESS; + BOOL deleted = FALSE; + + dwError = ValidateLocalPath(request->path); + CHECK_SVC_STATUS_DONE(dwError,L"ValidateLocalPath request->path"); + + switch(request->type) { + case PATH_IS_DIR: + deleted = RemoveDirectory(request->path); + if (!deleted) { + LogDebugMessage(L"Error %d deleting directory %s\n", GetLastError(), request->path); + } + break; + case PATH_IS_FILE: + deleted = DeleteFile(request->path); + if (!deleted) { + LogDebugMessage(L"Error %d deleting file %s\n", GetLastError(), request->path); + } + break; + default: + dwError = ERROR_BAD_ARGUMENTS; + CHECK_SVC_STATUS_DONE(dwError, L"request->operation"); + } + + *response = (DELETEPATH_RESPONSE*) MIDL_user_allocate(sizeof(DELETEPATH_RESPONSE)); + if (NULL == *response) { + dwError = ERROR_OUTOFMEMORY; + CHECK_SVC_STATUS_DONE(dwError, L"MIDL_user_allocate"); + } + + (*response)->deleted = deleted; + +done: + + LogDebugMessage(L"WinutilsDeletePath: %s %d: %d %d", + request->path, + request->type, + deleted, + dwError); + + return dwError; +} + error_status_t WinutilsMkDir( /* [in] */ handle_t IDL_handle, /* [in] */ MKDIR_REQUEST *request) { @@ -1334,6 +1382,10 @@ error_status_t WinutilsMoveFile( dwError = GetLastError(); CHECK_SVC_STATUS_DONE(dwError, L"CopyFileEx"); } + break; + default: + dwError = ERROR_BAD_ARGUMENTS; + CHECK_SVC_STATUS_DONE(dwError, L"request->operation"); } done: diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java index 6c4d56c..2bdcbe2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java @@ -148,7 +148,44 @@ public boolean delete(Path p, boolean recursive) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug(String.format("EFS:delete: %s %b", p, recursive)); } - return super.delete(p, recursive); + + // The super delete uses the FileUtil.fullyDelete, but we cannot rely on that + // because we need to use the elevated operations to remove the files + // + File f = pathToFile(p); + if (!f.exists()) { + //no path, return false "nothing to delete" + return false; + } + else if (f.isFile()) { + return NativeIO.Elevated.deleteFile(p); + } + else if (f.isDirectory()) { + + // This is a best-effort attempt. There are race conditions in that + // child files can be created/deleted after we snapped the list. + // No need to protect against that case. + File[] files = FileUtil.listFiles(f); + int childCount = files.length; + + if (recursive) { + for(File child:files) { + if (delete(new Path(child.getPath()), recursive)) { + --childCount; + } + } + } + if (childCount == 0) { + return NativeIO.Elevated.deleteDirectory(p); + } + else { + throw new IOException("Directory " + f.toString() + " is not empty"); + } + } + else { + // This can happen under race conditions if an external agent is messing with the file type between IFs + throw new IOException("Path " + f.toString() + " exists, but is neither a file nor a directory"); + } } }