diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c index b0b8e7613fc..2673a0c8729 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c @@ -1007,9 +1007,6 @@ static int open_file_as_nm(const char* filename) { */ static int copy_file(int input, const char* in_filename, const char* out_filename, mode_t perm) { - const int buffer_size = 128*1024; - char buffer[buffer_size]; - int out_fd = open(out_filename, O_WRONLY|O_CREAT|O_EXCL|O_NOFOLLOW, perm); if (out_fd == -1) { fprintf(LOGFILE, "Can't open %s for output - %s\n", out_filename, @@ -1018,6 +1015,16 @@ static int copy_file(int input, const char* in_filename, return -1; } + const int buffer_size = 128*1024; + char* buffer = malloc(buffer_size); + if (buffer == NULL) { + fprintf(LOGFILE, "Failed to allocate buffer while copying file: %s -> %s", + in_filename, out_filename); + fflush(LOGFILE); + close(out_fd); + return -1; + } + ssize_t len = read(input, buffer, buffer_size); while (len > 0) { ssize_t pos = 0; @@ -1027,12 +1034,15 @@ static int copy_file(int input, const char* in_filename, fprintf(LOGFILE, "Error writing to %s - %s\n", out_filename, strerror(errno)); close(out_fd); + free(buffer); return -1; } pos += write_result; } len = read(input, buffer, buffer_size); } + free(buffer); + if (len < 0) { fprintf(LOGFILE, "Failed to read file %s - %s\n", in_filename, strerror(errno));