From 9683eb18b833ae201e48f8abd0fcf63efa0f5aa6 Mon Sep 17 00:00:00 2001 From: "zhengchen.zhao" Date: Mon, 27 Oct 2025 10:57:25 +0800 Subject: [PATCH] feat: Improve slive stress tests with path scanning and better file handling Signed-off-by: zhengchen.zhao --- .../org/apache/hadoop/fs/slive/AppendOp.java | 4 +- .../hadoop/fs/slive/ConfigExtractor.java | 24 +++ .../apache/hadoop/fs/slive/ConfigOption.java | 4 + .../org/apache/hadoop/fs/slive/CreateOp.java | 9 +- .../org/apache/hadoop/fs/slive/DeleteOp.java | 5 +- .../org/apache/hadoop/fs/slive/ListOp.java | 4 +- .../org/apache/hadoop/fs/slive/MkdirOp.java | 2 +- .../apache/hadoop/fs/slive/PathFinder.java | 177 ++++++++++++++++++ .../org/apache/hadoop/fs/slive/ReadOp.java | 8 +- .../org/apache/hadoop/fs/slive/RenameOp.java | 18 +- .../apache/hadoop/fs/slive/ReportWriter.java | 1 + .../apache/hadoop/fs/slive/TruncateOp.java | 4 +- .../hadoop/fs/slive/WeightSelector.java | 9 +- .../hadoop/mapred/TestTextInputFormat.java | 4 +- 14 files changed, 250 insertions(+), 23 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java index b118e6e36d284..43a1dbf45be0e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java @@ -53,7 +53,7 @@ class AppendOp extends Operation { * @return Path */ protected Path getAppendFile() { - Path fn = getFinder().getFile(); + Path fn = getFinder().getFile("APPEND"); return fn; } @@ -100,7 +100,7 @@ List run(FileSystem fs) { } catch (FileNotFoundException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L)); - LOG.warn("Error with appending", e); + LOG.warn("AppendOp failed: File not found", e); } catch (IOException | UnsupportedOperationException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java index 2668770523f6c..910a81104ea5a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java @@ -131,6 +131,29 @@ boolean shouldExitOnFirstError(String primary) { return Boolean.parseBoolean(val); } + /** + * @return true|false for whether to use existing files only + */ + boolean shouldUseNewAlgorithm() { + return shouldUseNewAlgorithm(null); + } + + /** + * @param primary + * primary the initial string to be used for the value of this + * @return true|false for whether to use existing files only from primary,config,default (in that order) + */ + boolean shouldUseNewAlgorithm(String primary) { + String val = primary; + if (val == null) { + val = config.get(ConfigOption.USE_NEW_ALGORITHM.getCfgOption()); + } + if (val == null) { + val = ConfigOption.USE_NEW_ALGORITHM.getDefault().toString(); + } + return Boolean.parseBoolean(val); + } + /** * @return whether the mapper or reducer should wait for truncate recovery */ @@ -735,6 +758,7 @@ static void dumpOptions(ConfigExtractor cfg) { LOG.info("Operation amount = " + cfg.getOpCount()); LOG.info("Total file limit = " + cfg.getTotalFiles()); LOG.info("Total dir file limit = " + cfg.getDirSize()); + LOG.info("Use new algorithm = " + cfg.shouldUseNewAlgorithm()); { String read = "Read size = "; if (cfg.shouldReadFullFile()) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigOption.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigOption.java index bd663364cde05..291a2c9f1f1af 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigOption.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigOption.java @@ -52,6 +52,10 @@ class ConfigOption extends Option { "exitOnError", false, "Exit on first error", SLIVE_PREFIX + ".exit.on.error", false); + static final ConfigOption USE_NEW_ALGORITHM = new ConfigOption( + "useNewAlgorithm", false, "Use new algorithm for slivetest read/delete etc. operations", SLIVE_PREFIX + + ".use.new.algorithm", false); + static final ConfigOption FILES = new ConfigOption( "files", true, "Max total number of files", SLIVE_PREFIX + ".total.files", 10); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java index d7d2989287f9e..90872ead2f7f2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.slive; import java.io.IOException; +import org.apache.hadoop.fs.FileAlreadyExistsException; import java.util.List; import java.util.Random; @@ -111,7 +112,7 @@ private int getBufferSize() { * @return Path */ protected Path getCreateFile() { - Path fn = getFinder().getFile(); + Path fn = getFinder().getFile("CREATE"); return fn; } @@ -164,10 +165,14 @@ List run(FileSystem fs) { ReportWriter.BYTES_WRITTEN, bytesWritten)); out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.SUCCESSES, 1L)); + } catch (FileAlreadyExistsException e) { + out.add(new OperationOutput(OutputType.LONG, getType(), + ReportWriter.FILE_ALREADY_EXISTS, 1L)); + LOG.warn("CreateOp failed: File already exists", e); } catch (IOException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); - LOG.warn("Error with creating", e); + LOG.warn("CreateOp failed: IO error creating file", e); } finally { if (os != null) { try { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java index 0ed425a852b37..1f7499bc73583 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java @@ -49,7 +49,7 @@ class DeleteOp extends Operation { * Gets the file to delete */ protected Path getDeleteFile() { - Path fn = getFinder().getFile(); + Path fn = getFinder().getFile("DELETE"); return fn; } @@ -58,6 +58,7 @@ List run(FileSystem fs) { List out = super.run(fs); try { Path fn = getDeleteFile(); + LOG.info("Deleting file: " + fn); long timeTaken = 0; boolean deleteStatus = false; { @@ -80,7 +81,7 @@ List run(FileSystem fs) { } catch (FileNotFoundException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L)); - LOG.warn("Error with deleting", e); + LOG.warn("DeleteOp failed: File not found", e); } catch (IOException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java index 46377d5dff773..b5c25597586d3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java @@ -53,7 +53,7 @@ class ListOp extends Operation { * @return Path */ protected Path getDirectory() { - Path dir = getFinder().getDirectory(); + Path dir = getFinder().getDirectory("LS"); return dir; } @@ -81,7 +81,7 @@ List run(FileSystem fs) { } catch (FileNotFoundException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L)); - LOG.warn("Error with listing", e); + LOG.warn("ListOp failed: File not found", e); } catch (IOException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java index 0f24d14938721..c58c51d385f42 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java @@ -83,7 +83,7 @@ List run(FileSystem fs) { } catch (FileNotFoundException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L)); - LOG.warn("Error with mkdir", e); + LOG.warn("MkdirOp failed: File not found", e); } catch (IOException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/PathFinder.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/PathFinder.java index 2dfe2ef83622d..448481267a2c2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/PathFinder.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/PathFinder.java @@ -18,9 +18,17 @@ package org.apache.hadoop.fs.slive; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Random; +import java.util.UUID; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class which generates a file or directory path using a simple random @@ -28,6 +36,8 @@ */ class PathFinder { + private static final Logger LOG = LoggerFactory.getLogger(PathFinder.class); + private enum Type { FILE, DIRECTORY } @@ -38,11 +48,96 @@ private enum Type { private Path basePath; private ConfigExtractor config; private Random rnd; + + // Used to store scanned existing paths + private List existingFiles; + private List existingDirs; PathFinder(ConfigExtractor cfg, Random rnd) { this.basePath = cfg.getDataPath(); this.config = cfg; this.rnd = rnd; + this.existingFiles = new ArrayList<>(); + this.existingDirs = new ArrayList<>(); + } + + /** + * Scan all paths under base_dir and record existing files and directories + */ + private void scanBaseDirectory() { + try { + FileSystem fs = basePath.getFileSystem(config.getConfig()); + LOG.info("Starting to scan base_dir: " + basePath); + // Clear existing lists + clearExistingPaths(); + + // Recursively scan directories + scanDirectoryRecursively(fs, basePath); + + // Print summary only (avoid huge log output) + LOG.info("Scan complete: found " + existingFiles.size() + " files, " + + existingDirs.size() + " directories"); + + } catch (IOException e) { + LOG.error("Error scanning base_dir: " + e.getMessage(), e); + clearExistingPaths(); + } + } + + private void clearExistingPaths() { + existingFiles.clear(); + existingDirs.clear(); + } + + /** + * Recursively scan directories + */ + private void scanDirectoryRecursively(FileSystem fs, Path dir) throws IOException { + if (!fs.exists(dir)) { + return; + } + + FileStatus[] statuses = fs.listStatus(dir); + if (statuses == null || statuses.length == 0) { + return; + } + + for (FileStatus status : statuses) { + Path path = status.getPath(); + if (status.isFile()) { + existingFiles.add(path); + } else if (status.isDirectory()) { + existingDirs.add(path); + // Recursively scan subdirectories + scanDirectoryRecursively(fs, path); + } + } + } + + /** + * Randomly select one from existing files + */ + private Path getExistingFile() { + if (existingFiles.isEmpty()) { + throw new RuntimeException("No files found in base_dir, cannot perform read/delete operations"); + } + int index = rnd.nextInt(existingFiles.size()); + Path selectedFile = existingFiles.get(index); + LOG.info("Selected from existing files: " + selectedFile); + return selectedFile; + } + + /** + * Randomly select one from existing directories + */ + private Path getExistingDirectory() { + if (existingDirs.isEmpty()) { + throw new RuntimeException("No directories found in base_dir, cannot perform ls operations"); + } + int index = rnd.nextInt(existingDirs.size()); + Path selectedDir = existingDirs.get(index); + LOG.info("Selected from existing directories: " + selectedDir); + return selectedDir; } /** @@ -62,6 +157,10 @@ private enum Type { * @return Path */ private Path getPath(int curId, int limitPerDir, Type type) { + return getPath(curId, limitPerDir, type, null); + } + + private Path getPath(int curId, int limitPerDir, Type type, String suffix) { if (curId <= 0) { return basePath; } @@ -74,6 +173,9 @@ private Path getPath(int curId, int limitPerDir, Type type) { name = DIR_PREFIX + new Integer(curId % limitPerDir).toString(); break; } + if (suffix != null) { + name += "_" + suffix; + } Path base = getPath((curId / limitPerDir), limitPerDir, Type.DIRECTORY); return new Path(base, name); } @@ -85,6 +187,57 @@ private Path getPath(int curId, int limitPerDir, Type type) { * @return path */ Path getFile() { + return getFile(null); + } + + /** + * Gets a file path based on operation type and configuration + * + * @param operationType the type of operation (can be null for backward compatibility) + * @return path + */ + Path getFile(String operationType) { + boolean useNewAlgorithm = config.shouldUseNewAlgorithm(); + + // Handle operations that need existing files + if (isExistingFileOperation(operationType)) { + if (useNewAlgorithm) { + LOG.info("Use new algorithm mode: scanning base_dir for " + operationType + " operation"); + scanBaseDirectory(); + return getExistingFile(); + } + // Fall through to original algorithm for normal mode + } + + // Handle CREATE operation + if ("CREATE".equals(operationType)) { + if (useNewAlgorithm) { + LOG.info("Generating unique path for CREATE operation"); + return generateUniquePath(); + } + // Fall through to original algorithm for normal mode + } + + // Use original algorithm for all other cases + LOG.info("Using original algorithm for " + (operationType != null ? operationType : "default") + " operation"); + return generateOriginalPath(); + } + + private boolean isExistingFileOperation(String operationType) { + return "READ".equals(operationType) || "DELETE".equals(operationType) || + "TRUNCATE".equals(operationType) || "APPEND".equals(operationType) || + "RENAME_SRC".equals(operationType); + } + + private Path generateUniquePath() { + int fileLimit = config.getTotalFiles(); + int dirLimit = config.getDirSize(); + int startPoint = 1 + rnd.nextInt(fileLimit); + String uniqueId = UUID.randomUUID().toString().replace("-", "").substring(0, 10); + return getPath(startPoint, dirLimit, Type.FILE, uniqueId); + } + + private Path generateOriginalPath() { int fileLimit = config.getTotalFiles(); int dirLimit = config.getDirSize(); int startPoint = 1 + rnd.nextInt(fileLimit); @@ -98,6 +251,30 @@ Path getFile() { * @return path */ Path getDirectory() { + return getDirectory(null); + } + + /** + * Gets a directory path based on operation type + * For CREATE/MKDIR operations: use original algorithm (write to base_dir) + * For LS operations: scan base_dir and select from existing directories + * + * @param operationType the type of operation (can be null for backward compatibility) + * @return path + */ + Path getDirectory(String operationType) { + boolean useNewAlgorithm = config.shouldUseNewAlgorithm(); + // For LS operation, scan base_dir and select existing directories each time + if ("LS".equals(operationType)) { + if (useNewAlgorithm) { + LOG.info("Starting to scan base_dir and select existing directories for LS operation"); + scanBaseDirectory(); + return getExistingDirectory(); + } + // Fall through to original algorithm for normal mode + } + + // Use original algorithm by default int fileLimit = config.getTotalFiles(); int dirLimit = config.getDirSize(); int startPoint = rnd.nextInt(fileLimit); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java index d6f29a69646a0..a975f1e20cf9d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java @@ -55,7 +55,7 @@ class ReadOp extends Operation { * @return Path */ protected Path getReadFile() { - Path fn = getFinder().getFile(); + Path fn = getFinder().getFile("READ"); return fn; } @@ -118,15 +118,15 @@ List run(FileSystem fs) { } catch (FileNotFoundException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L)); - LOG.warn("Error with reading", e); + LOG.warn("ReadOp failed: File not found", e); } catch (BadFileException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.BAD_FILES, 1L)); - LOG.warn("Error reading bad file", e); + LOG.warn("ReadOp failed: File data corrupted", e); } catch (IOException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); - LOG.warn("Error reading", e); + LOG.warn("ReadOp failed: IO error reading file", e); } finally { if (is != null) { try { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java index a608a8793659d..d93371d9e7fe1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java @@ -72,8 +72,10 @@ Path getTarget() { * @return SrcTarget */ protected SrcTarget getRenames() { - Path src = getFinder().getFile(); - Path target = getFinder().getFile(); + // Source: scan existing files (use RENAME_SRC operation type) + Path src = getFinder().getFile("RENAME_SRC"); + // Target: generate new unique path (use CREATE operation type) + Path target = getFinder().getFile("CREATE"); return new SrcTarget(src, target); } @@ -85,6 +87,16 @@ List run(FileSystem fs) { SrcTarget targets = getRenames(); Path src = targets.getSrc(); Path target = targets.getTarget(); + LOG.info("Renaming " + src + " to " + target); + // Ensure target directory exists + Path targetDir = target.getParent(); + if (targetDir != null && !fs.exists(targetDir)) { + LOG.info("Creating target directory: " + targetDir); + boolean mkdirOk = fs.mkdirs(targetDir); + if (!mkdirOk) { + LOG.warn("Failed to create target directory: " + targetDir); + } + } // capture results boolean renamedOk = false; long timeTaken = 0; @@ -108,7 +120,7 @@ List run(FileSystem fs) { } catch (FileNotFoundException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L)); - LOG.warn("Error with renaming", e); + LOG.warn("RenameOp failed: File not found", e); } catch (IOException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java index 873a2ffd59796..53bd60ce468bb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java @@ -47,6 +47,7 @@ class ReportWriter { static final String BYTES_READ = "bytes_read"; static final String NOT_FOUND = "files_not_found"; static final String BAD_FILES = "bad_files"; + static final String FILE_ALREADY_EXISTS = "file_already_exists"; private static final Logger LOG = LoggerFactory.getLogger(ReportWriter.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java index 295b797cb1db1..5878b17e8bc2a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java @@ -52,7 +52,7 @@ class TruncateOp extends Operation { * @return Path */ protected Path getTruncateFile() { - Path fn = getFinder().getFile(); + Path fn = getFinder().getFile("TRUNCATE"); return fn; } @@ -93,7 +93,7 @@ List run(FileSystem fs) { } catch (FileNotFoundException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.NOT_FOUND, 1L)); - LOG.warn("Error with truncating", e); + LOG.warn("TruncateOp failed: File not found", e); } catch (IOException | UnsupportedOperationException e) { out.add(new OperationOutput(OutputType.LONG, getType(), ReportWriter.FAILURES, 1L)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java index 3d80357090c4a..24d6c079d85ca 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java @@ -28,6 +28,9 @@ import org.apache.hadoop.fs.slive.Constants.Distribution; import org.apache.hadoop.fs.slive.Constants.OperationType; +import org.apache.hadoop.fs.slive.Weights.BeginWeight; +import org.apache.hadoop.fs.slive.Weights.EndWeight; +import org.apache.hadoop.fs.slive.Weights.MidWeight; import org.apache.hadoop.fs.slive.Weights.UniformWeight; import org.apache.hadoop.fs.slive.ObserveableOp.Observer; import org.slf4j.Logger; @@ -75,9 +78,9 @@ protected RouletteSelector getSelector() { private void configureWeights(ConfigExtractor e) { weights = new HashMap(); weights.put(Distribution.UNIFORM, new UniformWeight()); - // weights.put(Distribution.BEG, new BeginWeight()); - // weights.put(Distribution.END, new EndWeight()); - // weights.put(Distribution.MID, new MidWeight()); + weights.put(Distribution.BEG, new BeginWeight()); + weights.put(Distribution.END, new EndWeight()); + weights.put(Distribution.MID, new MidWeight()); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java index dd9d6b5037d21..b69a92bcfbca4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java @@ -587,7 +587,7 @@ private static String unquote(String in) { * @param args * @throws Exception */ - /*public static void main(String[] args) throws Exception { + public static void main(String[] args) throws Exception { for(String arg: args) { System.out.println("Working on " + arg); LineReader reader = makeStream(unquote(arg)); @@ -599,5 +599,5 @@ private static String unquote(String in) { } reader.close(); } - }*/ + } }