diff options
author | freva <valerijf@yahoo-inc.com> | 2017-03-07 12:52:33 +0100 |
---|---|---|
committer | freva <valerijf@yahoo-inc.com> | 2017-03-07 12:54:29 +0100 |
commit | e2b9bf632b82cd6fdb80a6c1be09f4fa31672e9f (patch) | |
tree | cf294d9528c31e7dcfc3775b4461dc42375730ce /node-maintainer | |
parent | d45668444699864641e47390bec4e58b950a3914 (diff) |
Renamed node-admin-maintenance to node-maintainer
Diffstat (limited to 'node-maintainer')
14 files changed, 1561 insertions, 0 deletions
diff --git a/node-maintainer/OWNERS b/node-maintainer/OWNERS new file mode 100644 index 00000000000..e030acdbc5b --- /dev/null +++ b/node-maintainer/OWNERS @@ -0,0 +1 @@ +freva diff --git a/node-maintainer/README.md b/node-maintainer/README.md new file mode 100644 index 00000000000..eafffd0b857 --- /dev/null +++ b/node-maintainer/README.md @@ -0,0 +1,4 @@ +# Node Admin Maintenance + +Executes maintenance jobs, such as deleting old logs, processing and reporting coredumps, on behalf of node-admin. +Node admin maintenance runs as a separate JVM from node-admin to make it possible to run it as root if needed. diff --git a/node-maintainer/pom.xml b/node-maintainer/pom.xml new file mode 100644 index 00000000000..4c1899deb0d --- /dev/null +++ b/node-maintainer/pom.xml @@ -0,0 +1,108 @@ +<?xml version="1.0"?> +<!-- Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 + http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>com.yahoo.vespa</groupId> + <artifactId>parent</artifactId> + <version>6-SNAPSHOT</version> + </parent> + + <artifactId>node-maintainer</artifactId> + <version>6-SNAPSHOT</version> + <packaging>jar</packaging> + <name>${project.artifactId}</name> + + <dependencies> + <dependency> + <groupId>com.yahoo.vespa</groupId> + <artifactId>vespajlib</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>com.yahoo.vespa</groupId> + <artifactId>container-core</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>com.yahoo.vespa</groupId> + <artifactId>jdisc_core</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>com.yahoo.vespa</groupId> + <artifactId>jdisc_http_service</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>com.yahoo.vespa</groupId> + <artifactId>config</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-databind</artifactId> + </dependency> + + <dependency> + <groupId>org.hamcrest</groupId> + <artifactId>hamcrest-junit</artifactId> + <version>2.0.0.0</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-core</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-compiler-plugin</artifactId> + <configuration> + + <source>1.8</source> + <target>1.8</target> + <compilerArgs> + <arg>-Xlint:all</arg> + <arg>-Werror</arg> + </compilerArgs> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <finalName>node-maintainer</finalName> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + </configuration> + <executions> + <execution> + <id>make-assembly</id> + <phase>package</phase> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + </plugin> + </plugins> + </build> +</project> diff --git a/node-maintainer/src/main/application/services.xml b/node-maintainer/src/main/application/services.xml new file mode 100644 index 00000000000..09effc10ebd --- /dev/null +++ b/node-maintainer/src/main/application/services.xml @@ -0,0 +1,12 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!-- Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. --> +<services version="1.0"> + <jdisc version="1.0" jetty="true"> + <http> + <server id='server1' port='19094' /> + </http> + <handler id="com.yahoo.vespa.hosted.node.maintainer.restapi.v1.MaintainerApiHandler" bundle="node-maintainer"> + <binding>http://*/maintainer/*</binding> + </handler> + </jdisc> +</services> diff --git a/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/CoreCollector.java b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/CoreCollector.java new file mode 100644 index 00000000000..cd7d8834fba --- /dev/null +++ b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/CoreCollector.java @@ -0,0 +1,146 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer; + +import com.yahoo.collections.Pair; +import com.yahoo.system.ProcessExecuter; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Takes in a compressed (lz4) or uncompressed core dump and collects relevant metadata. + * + * @author freva + */ +public class CoreCollector { + private static final String GDB_PATH = "/home/y/bin64/gdb"; + private static final Pattern CORE_GENERATOR_PATH_PATTERN = Pattern.compile("^Core was generated by `(?<path>.*?)'.$"); + private static final Pattern EXECFN_PATH_PATTERN = Pattern.compile("^.* execfn: '(?<path>.*?)'"); + private static final Pattern FROM_PATH_PATTERN = Pattern.compile("^.* from '(?<path>.*?)'"); + private static final Pattern TOTAL_MEMORY_PATTERN = Pattern.compile("^MemTotal:\\s*(?<totalMem>\\d+) kB$", Pattern.MULTILINE); + + private static final Logger logger = Logger.getLogger(CoreCollector.class.getName()); + private final ProcessExecuter processExecuter; + + CoreCollector(ProcessExecuter processExecuter) { + this.processExecuter = processExecuter; + } + + Path readBinPathFallback(Path coredumpPath) throws IOException, InterruptedException { + String command = GDB_PATH + " -n -batch -core " + coredumpPath + " | grep \'^Core was generated by\'"; + Pair<Integer, String> result = processExecuter.exec(new String[]{"sh", "-c", "\"" + command + "\""}); + + Matcher matcher = CORE_GENERATOR_PATH_PATTERN.matcher(result.getSecond()); + if (! matcher.find()) { + throw new RuntimeException("Failed to extract binary path from " + result); + } + return Paths.get(matcher.group("path").split(" ")[0]); + } + + Path readBinPath(Path coredumpPath) throws IOException, InterruptedException { + try { + Pair<Integer, String> result = processExecuter.exec(new String[]{"file", coredumpPath.toString()}); + + Matcher execfnMatcher = EXECFN_PATH_PATTERN.matcher(result.getSecond()); + if (execfnMatcher.find()) { + return Paths.get(execfnMatcher.group("path").split(" ")[0]); + } + + Matcher fromMatcher = FROM_PATH_PATTERN.matcher(result.getSecond()); + if (fromMatcher.find()) { + return Paths.get(fromMatcher.group("path").split(" ")[0]); + } + } catch (Throwable e) { + logger.log(Level.WARNING, "Failed getting bin path, trying fallback instead", e); + } + + return readBinPathFallback(coredumpPath); + } + + List<String> readBacktrace(Path coredumpPath, Path binPath, boolean allThreads) throws IOException, InterruptedException { + String threads = allThreads ? "thread apply all bt" : "bt"; + Pair<Integer, String> result = processExecuter.exec( + new String[]{GDB_PATH, "-n", "-ex", threads, "-batch", binPath.toString(), coredumpPath.toString()}); + if (result.getFirst() != 0) { + throw new RuntimeException("Failed to read backtrace " + result); + } + return Arrays.asList(result.getSecond().split("\n")); + } + + public Map<String, Object> collect(Path coredumpPath) { + Map<String, Object> data = new LinkedHashMap<>(); + try { + coredumpPath = compressCoredump(coredumpPath); + Path binPath = readBinPath(coredumpPath); + + data.put("bin_path", binPath.toString()); + data.put("backtrace", readBacktrace(coredumpPath, binPath, false)); + data.put("backtrace_all_threads", readBacktrace(coredumpPath, binPath, true)); + + deleteDecompressedCoredump(coredumpPath); + } catch (Throwable e) { + logger.log(Level.WARNING, "Failed to collect core dump data", e); + } + return data; + } + + + /** + * This method will either compress or decompress the core dump if the input path is to a decompressed or + * compressed core dump, respectively. + * + * @return Path to the decompressed core dump + */ + private Path compressCoredump(Path coredumpPath) throws IOException, InterruptedException { + if (! coredumpPath.toString().endsWith(".lz4")) { + processExecuter.exec( + new String[]{"/home/y/bin64/lz4", coredumpPath.toString(), coredumpPath.toString() + ".lz4"}); + return coredumpPath; + + } else { + if (!diskSpaceAvailable(coredumpPath)) { + throw new RuntimeException("Not decompressing " + coredumpPath + " due to not enough disk space available"); + } + + Path decompressedPath = Paths.get(coredumpPath.toString().replaceFirst("\\.lz4$", "")); + Pair<Integer, String> result = processExecuter.exec( + new String[]{"/home/y/bin64/lz4", "-d", coredumpPath.toString(), decompressedPath.toString()}); + if (result.getFirst() != 0) { + throw new RuntimeException("Failed to decompress file " + coredumpPath + ": " + result); + } + return decompressedPath; + } + } + + /** + * Delete the core dump unless: + * - The file is compressed + * - There is no compressed file (i.e. it was not decompressed in the first place) + */ + void deleteDecompressedCoredump(Path coredumpPath) throws IOException { + if (! coredumpPath.toString().endsWith(".lz4") && Paths.get(coredumpPath.toString() + ".lz4").toFile().exists()) { + Files.delete(coredumpPath); + } + } + + private boolean diskSpaceAvailable(Path path) throws IOException { + String memInfo = new String(Files.readAllBytes(Paths.get("/proc/meminfo"))); + return path.toFile().getFreeSpace() > parseTotalMemorySize(memInfo); + } + + int parseTotalMemorySize(String memInfo) { + Matcher matcher = TOTAL_MEMORY_PATTERN.matcher(memInfo); + if (!matcher.find()) throw new RuntimeException("Could not parse meminfo: " + memInfo); + return Integer.valueOf(matcher.group("totalMem")); + } +} diff --git a/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/CoredumpHandler.java b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/CoredumpHandler.java new file mode 100644 index 00000000000..35f2885a857 --- /dev/null +++ b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/CoredumpHandler.java @@ -0,0 +1,132 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.http.HttpHeaders; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; + +/** + * Finds coredumps, collects metadata and reports them + * + * @author freva + */ +public class CoredumpHandler { + public static final String FEED_ENDPOINT = "http://panic.vespa.us-west-1.prod.vespa.yahooapis.com:4080/document/v1/panic/core_dump/docid"; + public static final String PROCESSING_DIRECTORY_NAME = "processing"; + public static final String METADATA_FILE_NAME = "metadata.json"; + + private final Logger logger = Logger.getLogger(CoredumpHandler.class.getName()); + private final ObjectMapper objectMapper = new ObjectMapper(); + + private final HttpClient httpClient; + private final CoreCollector coreCollector; + + public CoredumpHandler(HttpClient httpClient, CoreCollector coreCollector) { + this.httpClient = httpClient; + this.coreCollector = coreCollector; + } + + void processAndReportCoredumps(Path coredumpsPath, Path doneCoredumpPath, Map<String, Object> nodeAttributes) throws IOException { + Path processingCoredumps = processCoredumps(coredumpsPath, nodeAttributes); + reportCoredumps(processingCoredumps, doneCoredumpPath); + } + + void removeJavaCoredumps(Path javaCoredumpsPath) { + if (! javaCoredumpsPath.toFile().isDirectory()) return; + DeleteOldAppData.deleteFiles(javaCoredumpsPath.toString(), 0, "^java_pid.*\\.hprof$", false); + } + + Path processCoredumps(Path coredumpsPath, Map<String, Object> nodeAttributes) throws IOException { + Path processingCoredumpsPath = coredumpsPath.resolve(PROCESSING_DIRECTORY_NAME); + processingCoredumpsPath.toFile().mkdirs(); + + Files.list(coredumpsPath) + .filter(path -> path.toFile().isFile() && ! path.getFileName().toString().startsWith(".")) + .forEach(coredumpPath -> { + try { + coredumpPath.toFile().setReadable(true, false); + coredumpPath = startProcessing(coredumpPath, processingCoredumpsPath); + + Path metadataPath = coredumpPath.getParent().resolve(METADATA_FILE_NAME); + Map<String, Object> metadata = collectMetadata(coredumpPath, nodeAttributes); + writeMetadata(metadataPath, metadata); + } catch (Throwable e) { + logger.log(Level.WARNING, "Failed to process coredump " + coredumpPath, e); + } + }); + + return processingCoredumpsPath; + } + + void reportCoredumps(Path processingCoredumpsPath, Path doneCoredumpsPath) throws IOException { + doneCoredumpsPath.toFile().mkdirs(); + + Files.list(processingCoredumpsPath) + .filter(path -> path.toFile().isDirectory()) + .forEach(coredumpDirectory -> { + try { + report(coredumpDirectory); + finishProcessing(coredumpDirectory, doneCoredumpsPath); + } catch (Throwable e) { + logger.log(Level.WARNING, "Failed to report coredump " + coredumpDirectory, e); + } + }); + } + + Path startProcessing(Path coredumpPath, Path processingCoredumpsPath) throws IOException { + Path folder = processingCoredumpsPath.resolve(UUID.randomUUID().toString()); + folder.toFile().mkdirs(); + return Files.move(coredumpPath, folder.resolve(coredumpPath.getFileName())); + } + + private Map<String, Object> collectMetadata(Path coredumpPath, Map<String, Object> nodeAttributes) { + Map<String, Object> metadata = coreCollector.collect(coredumpPath); + metadata.putAll(nodeAttributes); + + Map<String, Object> fields = new HashMap<>(); + fields.put("fields", metadata); + return fields; + } + + private void writeMetadata(Path metadataPath, Map<String, Object> metadata) throws IOException { + Files.write(metadataPath, objectMapper.writeValueAsString(metadata).getBytes()); + } + + void report(Path coredumpDirectory) throws IOException { + // Use core dump UUID as document ID + String documentId = coredumpDirectory.getFileName().toString(); + String metadata = new String(Files.readAllBytes(coredumpDirectory.resolve(METADATA_FILE_NAME))); + + HttpPost post = new HttpPost(FEED_ENDPOINT + "/" + documentId); + post.setHeader(HttpHeaders.CONTENT_TYPE, "application/json"); + post.setEntity(new StringEntity(metadata)); + + HttpResponse response = httpClient.execute(post); + if (response.getStatusLine().getStatusCode() / 100 != 2) { + String result = new BufferedReader(new InputStreamReader(response.getEntity().getContent())) + .lines().collect(Collectors.joining("\n")); + throw new RuntimeException("POST to " + post.getURI() + " failed with HTTP: " + + response.getStatusLine().getStatusCode() + " [" + result + "]"); + } + logger.info("Successfully reported coredump " + documentId); + } + + void finishProcessing(Path coredumpDirectory, Path doneCoredumpsPath) throws IOException { + Files.move(coredumpDirectory, doneCoredumpsPath.resolve(coredumpDirectory.getFileName())); + } +} diff --git a/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/DeleteOldAppData.java b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/DeleteOldAppData.java new file mode 100644 index 00000000000..9bb5df1eeb8 --- /dev/null +++ b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/DeleteOldAppData.java @@ -0,0 +1,162 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.time.Duration; +import java.util.Arrays; +import java.util.List; +import java.util.logging.Logger; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * @author freva + */ + +public class DeleteOldAppData { + private static final Logger logger = Logger.getLogger(DeleteOldAppData.class.getSimpleName()); + + /** + * (Recursively) deletes files if they match all the criteria, also deletes empty directories. + * + * @param basePath Base path from where to start the search + * @param maxAgeSeconds Delete files older (last modified date) than maxAgeSeconds + * @param fileNameRegex Delete files where filename matches fileNameRegex + * @param recursive Delete files in sub-directories (with the same criteria) + */ + static void deleteFiles(String basePath, long maxAgeSeconds, String fileNameRegex, boolean recursive) { + Pattern fileNamePattern = fileNameRegex != null ? Pattern.compile(fileNameRegex) : null; + File[] filesInDeleteDirectory = getContentsOfDirectory(basePath); + + for (File file : filesInDeleteDirectory) { + if (file.isDirectory()) { + if (recursive) { + deleteFiles(file.getAbsolutePath(), maxAgeSeconds, fileNameRegex, true); + if (file.list().length == 0 && !file.delete()) { + logger.warning("Could not delete directory: " + file.getAbsolutePath()); + } + } + } else if (isPatternMatchingFilename(fileNamePattern, file) && + isTimeSinceLastModifiedMoreThan(file, Duration.ofSeconds(maxAgeSeconds))) { + if (!file.delete()) { + logger.warning("Could not delete file: " + file.getAbsolutePath()); + } + } + } + } + + /** + * Deletes all files in target directory except the n most recent (by modified date) + * + * @param basePath Base path to delete from + * @param nMostRecentToKeep Number of most recent files to keep + */ + static void deleteFilesExceptNMostRecent(String basePath, int nMostRecentToKeep) { + File[] deleteDirContents = getContentsOfDirectory(basePath); + + if (nMostRecentToKeep < 1) { + throw new IllegalArgumentException("Number of files to keep must be a positive number"); + } + + List<File> filesInDeleteDir = Arrays.stream(deleteDirContents) + .filter(File::isFile) + .sorted((f1, f2) -> Long.signum(f1.lastModified() - f2.lastModified())) + .collect(Collectors.toList()); + if (filesInDeleteDir.size() <= nMostRecentToKeep) return; + + for (int i = nMostRecentToKeep; i < filesInDeleteDir.size(); i++) { + if (!filesInDeleteDir.get(i).delete()) { + logger.warning("Could not delete file: " + filesInDeleteDir.get(i).getAbsolutePath()); + } + } + } + + static void deleteFilesLargerThan(File baseDirectory, long sizeInBytes) { + File[] filesInBaseDirectory = getContentsOfDirectory(baseDirectory.getAbsolutePath()); + + for (File file : filesInBaseDirectory) { + if (file.isDirectory()) { + deleteFilesLargerThan(file, sizeInBytes); + } else { + if (file.length() > sizeInBytes && !file.delete()) { + logger.warning("Could not delete file: " + file.getAbsolutePath()); + } + } + } + } + + /** + * Deletes directories and their contents if they match all the criteria + * + * @param basePath Base path to delete the directories from + * @param maxAgeSeconds Delete directories older (last modified date) than maxAgeSeconds + * @param dirNameRegex Delete directories where directory name matches dirNameRegex + */ + static void deleteDirectories(String basePath, long maxAgeSeconds, String dirNameRegex) { + Pattern dirNamePattern = dirNameRegex != null ? Pattern.compile(dirNameRegex) : null; + File[] filesInDeleteDirectory = getContentsOfDirectory(basePath); + + for (File file : filesInDeleteDirectory) { + if (file.isDirectory() && + isPatternMatchingFilename(dirNamePattern, file) && + isTimeSinceLastModifiedMoreThan(getMostRecentlyModifiedFileIn(file), Duration.ofSeconds(maxAgeSeconds))) { + deleteFiles(file.getPath(), 0, null, true); + if (file.list().length == 0 && !file.delete()) { + logger.warning("Could not delete directory: " + file.getAbsolutePath()); + } + } + } + } + + /** + * Similar to rm -rf file: + * - It's not an error if file doesn't exist + * - If file is a directory, it and all content is removed + * - For symlinks: Only the symlink is removed, not what the symlink points to + */ + static void recursiveDelete(String path) { + File file = new File(path); + if (file.isDirectory()) { + for (File childFile : file.listFiles()) { + recursiveDelete(childFile.getAbsolutePath()); + } + } + + try { + Files.deleteIfExists(file.toPath()); + } catch (IOException ignored) { } + } + + static File[] getContentsOfDirectory(String directoryPath) { + File directory = new File(directoryPath); + File[] directoryContents = directory.listFiles(); + + return directoryContents == null ? new File[0] : directoryContents; + } + + private static File getMostRecentlyModifiedFileIn(File baseFile) { + File mostRecent = baseFile; + File[] filesInDirectory = getContentsOfDirectory(baseFile.getAbsolutePath()); + + for (File file : filesInDirectory) { + if (file.isDirectory()) { + file = getMostRecentlyModifiedFileIn(file); + } + + if (file.lastModified() > mostRecent.lastModified()) { + mostRecent = file; + } + } + return mostRecent; + } + + private static boolean isTimeSinceLastModifiedMoreThan(File file, Duration duration) { + return System.currentTimeMillis() - file.lastModified() > duration.toMillis(); + } + + private static boolean isPatternMatchingFilename(Pattern pattern, File file) { + return pattern == null || pattern.matcher(file.getName()).find(); + } +} diff --git a/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/Maintainer.java b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/Maintainer.java new file mode 100644 index 00000000000..3d677865a3b --- /dev/null +++ b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/Maintainer.java @@ -0,0 +1,122 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.yahoo.system.ProcessExecuter; +import org.apache.http.impl.client.HttpClientBuilder; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +/** + * @author freva + */ +public class Maintainer { + + @SuppressWarnings("unchecked") + public static void main(String[] args) throws Exception { + if (args.length != 1) { + throw new RuntimeException("Expected only 1 argument - a JSON list of maintainer jobs to execute"); + } + + ObjectMapper mapper = new ObjectMapper(); + List<MaintenanceJob> maintenanceJobs = mapper.readValue(args[0], new TypeReference<List<MaintenanceJob>>(){}); + executeJobs(maintenanceJobs); + } + + public static void executeJobs(List<MaintenanceJob> maintenanceJobs) { + for (MaintenanceJob job : maintenanceJobs) { + try { + executeJob(job); + } catch (Exception e) { + throw new RuntimeException("Failed to execute job " + job.jobName + " with arguments " + + Arrays.toString(job.arguments.entrySet().toArray()), e); + } + } + } + + @SuppressWarnings("unchecked") + private static void executeJob(MaintenanceJob job) throws IOException { + switch (job.getJobName()) { + case "delete-files": + DeleteOldAppData.deleteFiles( + (String) job.getRequiredArgument("basePath"), + (Integer) job.getRequiredArgument("maxAgeSeconds"), + (String) job.getArgumentOrDefault("fileNameRegex", null), + (boolean) job.getArgumentOrDefault("recursive", false)); + break; + + case "delete-directories": + DeleteOldAppData.deleteDirectories( + (String) job.getRequiredArgument("basePath"), + (Integer) job.getRequiredArgument("maxAgeSeconds"), + (String) job.getArgumentOrDefault("dirNameRegex", null)); + break; + + case "recursive-delete": + DeleteOldAppData.recursiveDelete( + (String) job.getRequiredArgument("path")); + break; + + case "move-files": + Path from = Paths.get((String) job.getRequiredArgument("from")); + Path to = Paths.get((String) job.getRequiredArgument("to")); + if (Files.exists(from)) { + Files.move(from, to); + } + break; + + case "handle-core-dumps": + CoreCollector coreCollector = new CoreCollector(new ProcessExecuter()); + CoredumpHandler coredumpHandler = new CoredumpHandler(HttpClientBuilder.create().build(), coreCollector); + + Path containerCoredumpsPath = Paths.get((String) job.getRequiredArgument("containerCoredumpsPath")); + Path doneCoredumpsPath = Paths.get((String) job.getRequiredArgument("doneCoredumpsPath")); + Map<String, Object> attributesMap = (Map<String, Object>) job.getRequiredArgument("attributes"); + + coredumpHandler.removeJavaCoredumps(containerCoredumpsPath); + coredumpHandler.processAndReportCoredumps(containerCoredumpsPath, doneCoredumpsPath, attributesMap); + break; + + default: + throw new RuntimeException("Unknown job: " + job.getJobName()); + } + } + + /** + * Should be equal to MaintainerExecutorJob in StorageMaintainer + */ + public static class MaintenanceJob { + private final String jobName; + private final Map<String, Object> arguments; + + private MaintenanceJob(@JsonProperty(value="jobName") String jobName, + @JsonProperty(value="arguments") Map<String, Object> arguments) { + this.jobName = jobName; + this.arguments = arguments; + } + + String getJobName() { + return jobName; + } + + Object getRequiredArgument(String argumentName) { + Object value = arguments.get(argumentName); + if (value == null) { + throw new IllegalArgumentException("Missing required argument " + argumentName); + } + return value; + } + + Object getArgumentOrDefault(String argumentName, Object defaultValue) { + return arguments.getOrDefault(argumentName, defaultValue); + } + } +} diff --git a/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/ErrorResponse.java b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/ErrorResponse.java new file mode 100644 index 00000000000..702ef1e43d9 --- /dev/null +++ b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/ErrorResponse.java @@ -0,0 +1,59 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer.restapi.v1; + +import com.yahoo.container.jdisc.HttpResponse; +import com.yahoo.slime.Cursor; +import com.yahoo.slime.JsonFormat; +import com.yahoo.slime.Slime; + +import java.io.IOException; +import java.io.OutputStream; + +import static com.yahoo.jdisc.Response.Status.BAD_REQUEST; +import static com.yahoo.jdisc.Response.Status.INTERNAL_SERVER_ERROR; +import static com.yahoo.jdisc.Response.Status.METHOD_NOT_ALLOWED; +import static com.yahoo.jdisc.Response.Status.NOT_FOUND; + +public class ErrorResponse extends HttpResponse { + + private final Slime slime = new Slime(); + + public enum errorCodes { + NOT_FOUND, + BAD_REQUEST, + METHOD_NOT_ALLOWED, + INTERNAL_SERVER_ERROR + } + + public ErrorResponse(int code, String errorType, String message) { + super(code); + Cursor root = slime.setObject(); + root.setString("error-code", errorType); + root.setString("message", message); + } + + public static ErrorResponse notFoundError(String message) { + return new ErrorResponse(NOT_FOUND, errorCodes.NOT_FOUND.name(), message); + } + + public static ErrorResponse internalServerError(String message) { + return new ErrorResponse(INTERNAL_SERVER_ERROR, errorCodes.INTERNAL_SERVER_ERROR.name(), message); + } + + public static ErrorResponse badRequest(String message) { + return new ErrorResponse(BAD_REQUEST, errorCodes.BAD_REQUEST.name(), message); + } + + public static ErrorResponse methodNotAllowed(String message) { + return new ErrorResponse(METHOD_NOT_ALLOWED, errorCodes.METHOD_NOT_ALLOWED.name(), message); + } + + @Override + public void render(OutputStream stream) throws IOException { + new JsonFormat(true).encode(stream, slime); + } + + @Override + public String getContentType() { return "application/json"; } + +} diff --git a/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/MaintainerApiHandler.java b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/MaintainerApiHandler.java new file mode 100644 index 00000000000..b8f97b0bb3f --- /dev/null +++ b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/MaintainerApiHandler.java @@ -0,0 +1,65 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer.restapi.v1; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.yahoo.container.jdisc.HttpRequest; +import com.yahoo.container.jdisc.HttpResponse; +import com.yahoo.container.jdisc.LoggingRequestHandler; +import com.yahoo.container.logging.AccessLog; +import com.yahoo.vespa.hosted.node.maintainer.Maintainer; +import com.yahoo.yolean.Exceptions; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.Executor; +import java.util.logging.Level; + + +/** + * @author freva + */ +public class MaintainerApiHandler extends LoggingRequestHandler { + private final static ObjectMapper objectMapper = new ObjectMapper(); + + public MaintainerApiHandler(Executor executor, AccessLog accessLog) { + super(executor, accessLog); + } + + @Override + public HttpResponse handle(HttpRequest request) { + try { + switch (request.getMethod()) { + case POST: return handlePOST(request); + default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported"); + } + } +// catch (NotFoundException e) { +// return ErrorResponse.notFoundError(Exceptions.toMessageString(e)); +// } + catch (IllegalArgumentException e) { + return ErrorResponse.badRequest(Exceptions.toMessageString(e)); + } + catch (RuntimeException e) { + log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e); + return ErrorResponse.internalServerError(Exceptions.toMessageString(e)); + } + } + + private HttpResponse handlePOST(HttpRequest request) { + switch (request.getUri().getPath()) { + case "/maintainer/v1": + try { + List<Maintainer.MaintenanceJob> maintenanceJobs = objectMapper.readValue( + request.getData(), new TypeReference<List<Maintainer.MaintenanceJob>>(){}); + Maintainer.executeJobs(maintenanceJobs); + } catch (IOException e) { + throw new RuntimeException("Failed parsing JSON request", e); + } + return new MessageResponse("Successfully executed command"); + default: + return ErrorResponse.notFoundError("Fail"); +// throw new NotFoundException("Nothing at path '" + request.getUri().getPath() + "'"); + } + } +} diff --git a/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/MessageResponse.java b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/MessageResponse.java new file mode 100644 index 00000000000..8fb82cd18c9 --- /dev/null +++ b/node-maintainer/src/main/java/com/yahoo/vespa/hosted/node/maintainer/restapi/v1/MessageResponse.java @@ -0,0 +1,33 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer.restapi.v1; + +import com.yahoo.container.jdisc.HttpResponse; +import com.yahoo.slime.JsonFormat; +import com.yahoo.slime.Slime; + +import java.io.IOException; +import java.io.OutputStream; + +/** + * A 200 ok response with a message in JSON + * + * @author bratseth + */ +public class MessageResponse extends HttpResponse { + + private final Slime slime = new Slime(); + + public MessageResponse(String message) { + super(200); + slime.setObject().setString("message", message); + } + + @Override + public void render(OutputStream stream) throws IOException { + new JsonFormat(true).encode(stream, slime); + } + + @Override + public String getContentType() { return "application/json"; } + +} diff --git a/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/CoreCollectorTest.java b/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/CoreCollectorTest.java new file mode 100644 index 00000000000..b05f48edad2 --- /dev/null +++ b/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/CoreCollectorTest.java @@ -0,0 +1,228 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer; + +import com.yahoo.collections.Pair; +import com.yahoo.system.ProcessExecuter; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * @author freva + */ +public class CoreCollectorTest { + private final ProcessExecuter processExecuter = mock(ProcessExecuter.class); + private final CoreCollector coreCollector = new CoreCollector(processExecuter); + + private final Path TEST_CORE_PATH = Paths.get("/tmp/core.1234"); + private final Path TEST_BIN_PATH = Paths.get("/usr/bin/program"); + private final List<String> GDB_BACKTRACE = Arrays.asList("[New Thread 2703]", + "Core was generated by `/usr/bin/program\'.", "Program terminated with signal 11, Segmentation fault.", + "#0 0x00000000004004d8 in main (argv=0x1) at main.c:4", "4\t printf(argv[3]);", + "#0 0x00000000004004d8 in main (argv=0x1) at main.c:4"); + + @Rule + public TemporaryFolder folder= new TemporaryFolder(); + + private void mockExec(String[] cmd, String output) throws IOException, InterruptedException { + mockExec(cmd, output, ""); + } + + private void mockExec(String[] cmd, String output, String error) throws IOException, InterruptedException { + when(processExecuter.exec(cmd)).thenReturn(new Pair<Integer, String>(error.isEmpty() ? 0 : 1, output + error)); + } + + @Test + public void extractsBinaryPathTest() throws IOException, InterruptedException { + final String[] cmd = {"file", TEST_CORE_PATH.toString()}; + + mockExec(cmd, + "/tmp/core.1234: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style, from " + + "'/usr/bin/program'"); + assertEquals(TEST_BIN_PATH, coreCollector.readBinPath(TEST_CORE_PATH)); + + mockExec(cmd, + "/tmp/core.1234: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style, from " + + "'/usr/bin/program --foo --bar baz'"); + assertEquals(TEST_BIN_PATH, coreCollector.readBinPath(TEST_CORE_PATH)); + + mockExec(cmd, + "/tmp/core.1234: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style, from " + + "'/usr/bin//program'"); + assertEquals(TEST_BIN_PATH, coreCollector.readBinPath(TEST_CORE_PATH)); + + mockExec(cmd, + "/tmp/core.1234: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style, " + + "from 'program', real uid: 0, effective uid: 0, real gid: 0, effective gid: 0, " + + "execfn: '/usr/bin/program', platform: 'x86_64"); + assertEquals(TEST_BIN_PATH, coreCollector.readBinPath(TEST_CORE_PATH)); + + + Path fallbackResponse = Paths.get("/response/from/fallback"); + mockExec(new String[]{"sh", "-c", "\"/home/y/bin64/gdb -n -batch -core /tmp/core.1234 | grep '^Core was generated by'\""}, + "Core was generated by `/response/from/fallback'."); + mockExec(cmd, + "/tmp/core.1234: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style"); + assertEquals(fallbackResponse, coreCollector.readBinPath(TEST_CORE_PATH)); + + mockExec(cmd, "", "Error code 1234"); + assertEquals(fallbackResponse, coreCollector.readBinPath(TEST_CORE_PATH)); + } + + @Test + public void extractsBinaryPathUsingGdbTest() throws IOException, InterruptedException { + final String[] cmd = new String[]{"sh", "-c", + "\"/home/y/bin64/gdb -n -batch -core /tmp/core.1234 | grep '^Core was generated by'\""}; + + mockExec(cmd, "Core was generated by `/usr/bin/program-from-gdb --identity foo/search/cluster.content_'."); + assertEquals(Paths.get("/usr/bin/program-from-gdb"), coreCollector.readBinPathFallback(TEST_CORE_PATH)); + + mockExec(cmd, "", "Error 123"); + try { + coreCollector.readBinPathFallback(TEST_CORE_PATH); + fail("Expected not to be able to get bin path"); + } catch (RuntimeException e) { + assertEquals(e.getMessage(), "Failed to extract binary path from (1,Error 123)"); + } + } + + @Test + public void extractsBacktraceUsingGdb() throws IOException, InterruptedException { + mockExec(new String[]{"/home/y/bin64/gdb", "-n", "-ex", "bt", "-batch", "/usr/bin/program", "/tmp/core.1234"}, + String.join("\n", GDB_BACKTRACE)); + assertEquals(GDB_BACKTRACE, coreCollector.readBacktrace(TEST_CORE_PATH, TEST_BIN_PATH, false)); + + mockExec(new String[]{"/home/y/bin64/gdb", "-n", "-ex", "bt", "-batch", "/usr/bin/program", "/tmp/core.1234"}, + "", "Failure"); + try { + coreCollector.readBacktrace(TEST_CORE_PATH, TEST_BIN_PATH, false); + fail("Expected not to be able to read backtrace"); + } catch (RuntimeException e) { + assertEquals("Failed to read backtrace (1,Failure)", e.getMessage()); + } + } + + @Test + public void extractsBacktraceFromAllThreadsUsingGdb() throws IOException, InterruptedException { + mockExec(new String[]{"/home/y/bin64/gdb", "-n", "-ex", "thread apply all bt", "-batch", + "/usr/bin/program", "/tmp/core.1234"}, + String.join("\n", GDB_BACKTRACE)); + assertEquals(GDB_BACKTRACE, coreCollector.readBacktrace(TEST_CORE_PATH, TEST_BIN_PATH, true)); + } + + @Test + public void collectsDataTest() throws IOException, InterruptedException { + mockExec(new String[]{"file", TEST_CORE_PATH.toString()}, + "/tmp/core.1234: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style, from " + + "'/usr/bin/program'"); + mockExec(new String[]{"/home/y/bin64/gdb", "-n", "-ex", "bt", "-batch", "/usr/bin/program", "/tmp/core.1234"}, + String.join("\n", GDB_BACKTRACE)); + mockExec(new String[]{"/home/y/bin64/gdb", "-n", "-ex", "thread apply all bt", "-batch", + "/usr/bin/program", "/tmp/core.1234"}, + String.join("\n", GDB_BACKTRACE)); + + Map<String, Object> expectedData = new HashMap<>(); + expectedData.put("bin_path", TEST_BIN_PATH.toString()); + expectedData.put("backtrace", new ArrayList<>(GDB_BACKTRACE)); + expectedData.put("backtrace_all_threads", new ArrayList<>(GDB_BACKTRACE)); + assertEquals(expectedData, coreCollector.collect(TEST_CORE_PATH)); + } + + @Test + public void collectsPartialIfUnableToDetermineDumpingProgramTest() { + Map<String, Object> expectedData = new HashMap<>(); + assertEquals(expectedData, coreCollector.collect(TEST_CORE_PATH)); + } + + @Test + public void collectsPartialIfBacktraceFailsTest() throws IOException, InterruptedException { + mockExec(new String[]{"file", TEST_CORE_PATH.toString()}, + "/tmp/core.1234: ELF 64-bit LSB core file x86-64, version 1 (SYSV), SVR4-style, from " + + "'/usr/bin/program'"); + mockExec(new String[]{"/home/y/bin64/gdb -n -ex bt -batch /usr/bin/program /tmp/core.1234"}, + "", "Failure"); + + Map<String, Object> expectedData = new HashMap<>(); + expectedData.put("bin_path", TEST_BIN_PATH.toString()); + assertEquals(expectedData, coreCollector.collect(TEST_CORE_PATH)); + } + + @Test + public void parseTotalMemoryTestTest() throws IOException { + String memInfo = "MemTotal: 100000000 kB\nMemUsed: 1000000 kB\n"; + assertEquals(100000000, coreCollector.parseTotalMemorySize(memInfo)); + + String badMemInfo = "This string has no memTotal value"; + try { + coreCollector.parseTotalMemorySize(badMemInfo); + fail("Expected to fail on parsing"); + } catch (RuntimeException e) { + assertEquals("Could not parse meminfo: " + badMemInfo, e.getMessage()); + } + } + + @Test + public void testDeleteUncompressedFiles() throws IOException { + final String documentId = "UIDD-ABCD-EFGH"; + final String coreDumpFilename = "core.dump"; + + Path coredumpPath = folder.newFolder("crash").toPath() + .resolve(CoredumpHandler.PROCESSING_DIRECTORY_NAME) + .resolve(documentId); + coredumpPath.toFile().mkdirs(); + coredumpPath.resolve(coreDumpFilename).toFile().createNewFile(); + + Set<Path> expectedContentsOfCoredump = new HashSet<>(Arrays.asList( + coredumpPath.resolve(CoredumpHandler.METADATA_FILE_NAME), + coredumpPath.resolve(coreDumpFilename + ".lz4"))); + expectedContentsOfCoredump.forEach(path -> { + try { + path.toFile().createNewFile(); + } catch (IOException ignored) { ignored.printStackTrace();} + }); + coreCollector.deleteDecompressedCoredump(coredumpPath.resolve(coreDumpFilename)); + + assertEquals(expectedContentsOfCoredump, Files.list(coredumpPath).collect(Collectors.toSet())); + } + + @Test + public void testDeleteUncompressedFilesWithoutLz4() throws IOException { + final String documentId = "UIDD-ABCD-EFGH"; + final String coreDumpFilename = "core.dump"; + + Path coredumpPath = folder.newFolder("crash").toPath() + .resolve(CoredumpHandler.PROCESSING_DIRECTORY_NAME) + .resolve(documentId); + coredumpPath.toFile().mkdirs(); + + Set<Path> expectedContentsOfCoredump = new HashSet<>(Arrays.asList( + coredumpPath.resolve(CoredumpHandler.METADATA_FILE_NAME), + coredumpPath.resolve(coreDumpFilename))); + expectedContentsOfCoredump.forEach(path -> { + try { + path.toFile().createNewFile(); + } catch (IOException ignored) { ignored.printStackTrace();} + }); + coreCollector.deleteDecompressedCoredump(coredumpPath.resolve(coreDumpFilename)); + + assertEquals(expectedContentsOfCoredump, Files.list(coredumpPath).collect(Collectors.toSet())); + } +} diff --git a/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/CoredumpHandlerTest.java b/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/CoredumpHandlerTest.java new file mode 100644 index 00000000000..36c582dcd5f --- /dev/null +++ b/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/CoredumpHandlerTest.java @@ -0,0 +1,202 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer; + +import org.apache.http.HttpHeaders; +import org.apache.http.HttpResponse; +import org.apache.http.HttpVersion; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.DefaultHttpResponseFactory; +import org.apache.http.message.BasicStatusLine; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.ArgumentCaptor; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * @author freva + */ +public class CoredumpHandlerTest { + private final HttpClient httpClient = mock(HttpClient.class); + private final CoreCollector coreCollector = mock(CoreCollector.class); + private static final Map<String, Object> attributes = new LinkedHashMap<>(); + private static final Map<String, Object> metadata = new LinkedHashMap<>(); + private static final String expectedMetadataFileContents = "{\"fields\":{" + + "\"bin_path\":\"/bin/bash\"," + + "\"backtrace\":[\"call 1\",\"function 2\",\"something something\"]," + + "\"hostname\":\"host123.yahoo.com\"," + + "\"vespa_version\":\"6.48.4\"," + + "\"kernel_version\":\"2.6.32-573.22.1.el6.YAHOO.20160401.10.x86_64\"," + + "\"docker_image\":\"vespa/ci:6.48.4\"}}"; + + static { + attributes.put("hostname", "host123.yahoo.com"); + attributes.put("vespa_version", "6.48.4"); + attributes.put("kernel_version", "2.6.32-573.22.1.el6.YAHOO.20160401.10.x86_64"); + attributes.put("docker_image", "vespa/ci:6.48.4"); + + metadata.put("bin_path", "/bin/bash"); + metadata.put("backtrace", Arrays.asList("call 1", "function 2", "something something")); + } + + private final CoredumpHandler coredumpHandler = new CoredumpHandler(httpClient, coreCollector); + + + @Rule + public TemporaryFolder folder= new TemporaryFolder(); + + @Test + public void ignoresIncompleteCoredumps() throws IOException { + Path coredumpPath = createCoredump(".core.dump"); + Path crashPath = coredumpPath.getParent(); + Path processingPath = coredumpHandler.processCoredumps(crashPath, attributes); + + // The 'processing' directory should be empty + assertFolderContents(processingPath); + + // The 'crash' directory should have 'processing' and the incomplete core dump in it + assertFolderContents(crashPath, processingPath.getFileName().toString(), coredumpPath.getFileName().toString()); + } + + @Test + public void startProcessingTest() throws IOException { + Path coredumpPath = createCoredump("core.dump"); + Path crashPath = coredumpPath.getParent(); + Path processingPath = crashPath.resolve("processing_dir"); + coredumpHandler.startProcessing(coredumpPath, crashPath.resolve("processing_dir")); + + // Contents of 'crash' should be only the 'processing' directory + assertFolderContents(crashPath, processingPath.getFileName().toString()); + + // The 'processing' directory should have 1 directory inside for the core.dump we just created + List<Path> processedCoredumps = Files.list(processingPath).collect(Collectors.toList()); + assertEquals(processedCoredumps.size(), 1); + + // Inside the coredump directory, there should be 1 file: core.dump + assertFolderContents(processedCoredumps.get(0), coredumpPath.getFileName().toString()); + } + + @Test + public void coredumpMetadataCollectAndWriteTest() throws IOException, InterruptedException { + when(coreCollector.collect(any())).thenReturn(metadata); + Path coredumpPath = createCoredump("core.dump"); + Path crashPath = coredumpPath.getParent(); + Path processingPath = coredumpHandler.processCoredumps(crashPath, attributes); + + // Inside 'processing' directory, there should be a new directory containing 'metadata.json' file + List<Path> processedCoredumps = Files.list(processingPath).collect(Collectors.toList()); + String metadataFileContents = new String(Files.readAllBytes( + processedCoredumps.get(0).resolve(CoredumpHandler.METADATA_FILE_NAME))); + assertEquals(expectedMetadataFileContents, metadataFileContents); + } + + @Test + public void reportSuccessCoredumpTest() throws IOException, URISyntaxException, InterruptedException { + final String documentId = "UIDD-ABCD-EFGH"; + Path coredumpPath = createProcessedCoredump(documentId); + + setNextHttpResponse(200, Optional.empty()); + coredumpHandler.report(coredumpPath.getParent()); + validateNextHttpPost(documentId, expectedMetadataFileContents); + } + + @Test + public void reportFailCoredumpTest() throws IOException, URISyntaxException { + final String documentId = "UIDD-ABCD-EFGH"; + + Path metadataPath = createProcessedCoredump(documentId); + Path crashPath = metadataPath.getParent().getParent().getParent(); + Path donePath = folder.newFolder("done").toPath(); + + setNextHttpResponse(500, Optional.of("Internal server error")); + coredumpHandler.reportCoredumps(crashPath.resolve(CoredumpHandler.PROCESSING_DIRECTORY_NAME), donePath); + validateNextHttpPost(documentId, expectedMetadataFileContents); + + // The coredump should not have been moved out of 'processing' and into 'done' as the report failed + assertFolderContents(donePath); + assertFolderContents(metadataPath.getParent(), CoredumpHandler.METADATA_FILE_NAME); + } + + @Test + public void finishProcessingTest() throws IOException { + final String documentId = "UIDD-ABCD-EFGH"; + + Path coredumpPath = createProcessedCoredump(documentId); + Path crashPath = coredumpPath.getParent().getParent().getParent(); + Path donePath = folder.newFolder("done").toPath(); + + coredumpHandler.finishProcessing(coredumpPath.getParent(), donePath); + + // The coredump should've been moved out of 'processing' and into 'done' + assertFolderContents(crashPath.resolve(CoredumpHandler.PROCESSING_DIRECTORY_NAME)); + assertFolderContents(donePath.resolve(documentId), CoredumpHandler.METADATA_FILE_NAME); + } + + + private static void assertFolderContents(Path pathToFolder, String... filenames) throws IOException { + Set<Path> expectedContentsOfFolder = Arrays.stream(filenames) + .map(pathToFolder::resolve) + .collect(Collectors.toSet()); + Set<Path> actualContentsOfFolder = Files.list(pathToFolder).collect(Collectors.toSet()); + assertEquals(expectedContentsOfFolder, actualContentsOfFolder); + } + + private Path createCoredump(String coredumpName) throws IOException { + Path crashPath = folder.newFolder("crash").toPath(); + Path coredumpPath = crashPath.resolve(coredumpName); + coredumpPath.toFile().createNewFile(); + return coredumpPath; + } + + private Path createProcessedCoredump(String documentId) throws IOException { + Path crashPath = folder.newFolder("crash").toPath(); + Path coredumpPath = crashPath + .resolve(CoredumpHandler.PROCESSING_DIRECTORY_NAME) + .resolve(documentId) + .resolve(CoredumpHandler.METADATA_FILE_NAME); + coredumpPath.getParent().toFile().mkdirs(); + return Files.write(coredumpPath, expectedMetadataFileContents.getBytes()); + } + + private void setNextHttpResponse(int code, Optional<String> message) throws IOException { + DefaultHttpResponseFactory responseFactory = new DefaultHttpResponseFactory(); + HttpResponse httpResponse = responseFactory.newHttpResponse( + new BasicStatusLine(HttpVersion.HTTP_1_1, code, null), null); + if (message.isPresent()) httpResponse.setEntity(new StringEntity(message.get())); + + when(httpClient.execute(any())).thenReturn(httpResponse); + } + + private void validateNextHttpPost(String documentId, String expectedBody) throws IOException, URISyntaxException { + ArgumentCaptor<HttpPost> capturedPost = ArgumentCaptor.forClass(HttpPost.class); + verify(httpClient).execute(capturedPost.capture()); + + URI expectedURI = new URI(CoredumpHandler.FEED_ENDPOINT + "/" + documentId); + assertEquals(expectedURI, capturedPost.getValue().getURI()); + assertEquals("application/json", capturedPost.getValue().getHeaders(HttpHeaders.CONTENT_TYPE)[0].getValue()); + assertEquals(expectedBody, + new BufferedReader(new InputStreamReader(capturedPost.getValue().getEntity().getContent())).readLine()); + } +} diff --git a/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/DeleteOldAppDataTest.java b/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/DeleteOldAppDataTest.java new file mode 100644 index 00000000000..6406731c2b3 --- /dev/null +++ b/node-maintainer/src/test/java/com/yahoo/vespa/hosted/node/maintainer/DeleteOldAppDataTest.java @@ -0,0 +1,287 @@ +// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. +package com.yahoo.vespa.hosted.node.maintainer; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Duration; +import java.util.Arrays; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertTrue; + +/** + * @author freva + */ +public class DeleteOldAppDataTest { + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Before + public void initFiles() throws IOException { + for (int i=0; i<10; i++) { + File temp = folder.newFile("test_" + i + ".json"); + temp.setLastModified(System.currentTimeMillis() - i*Duration.ofSeconds(130).toMillis()); + } + + for (int i=0; i<7; i++) { + File temp = folder.newFile("test_" + i + "_file.test"); + temp.setLastModified(System.currentTimeMillis() - i*Duration.ofSeconds(250).toMillis()); + } + + for (int i=0; i<5; i++) { + File temp = folder.newFile(i + "-abc" + ".json"); + temp.setLastModified(System.currentTimeMillis() - i*Duration.ofSeconds(80).toMillis()); + } + + File temp = folder.newFile("week_old_file.json"); + temp.setLastModified(System.currentTimeMillis() - Duration.ofDays(8).toMillis()); + } + + @Test + public void testDeleteAll() { + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 0, null, false); + + assertThat(folder.getRoot().listFiles().length, is(0)); + } + + @Test + public void testDeletePrefix() { + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 0, "^test_", false); + + assertThat(folder.getRoot().listFiles().length, is(6)); // 5 abc files + 1 week_old_file + } + + @Test + public void testDeleteSuffix() { + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 0, ".json$", false); + + assertThat(folder.getRoot().listFiles().length, is(7)); + } + + @Test + public void testDeletePrefixAndSuffix() { + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 0, "^test_.*\\.json$", false); + + assertThat(folder.getRoot().listFiles().length, is(13)); // 5 abc files + 7 test_*_file.test files + week_old_file + } + + @Test + public void testDeleteOld() { + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 600, null, false); + + assertThat(folder.getRoot().listFiles().length, is(13)); // All 23 - 6 (from test_*_.json) - 3 (from test_*_file.test) - 1 week old file + } + + @Test + public void testDeleteWithAllParameters() { + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 200, "^test_.*\\.json$", false); + + assertThat(folder.getRoot().listFiles().length, is(15)); // All 23 - 8 (from test_*_.json) + } + + @Test + public void testDeleteWithSubDirectoriesNoRecursive() throws IOException { + initSubDirectories(); + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 0, "^test_.*\\.json$", false); + + // 6 test_*.json from test_folder1/ + // + 9 test_*.json and 4 abc_*.json from test_folder2/ + // + 13 test_*.json from test_folder2/subSubFolder2/ + // + 7 test_*_file.test and 5 *-abc.json and 1 week_old_file from root + // + test_folder1/ and test_folder2/ and test_folder2/subSubFolder2/ themselves + assertThat(getNumberOfFilesAndDirectoriesIn(folder.getRoot()), is(48)); + } + + @Test + public void testDeleteWithSubDirectoriesRecursive() throws IOException { + initSubDirectories(); + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 0, "^test_.*\\.json$", true); + + // 4 abc_*.json from test_folder2/ + // + 7 test_*_file.test and 5 *-abc.json and 1 week_old_file from root + // + test_folder2/ itself + assertThat(getNumberOfFilesAndDirectoriesIn(folder.getRoot()), is(18)); + } + + @Test + public void testDeleteFilesWhereFilenameRegexAlsoMatchesDirectories() throws IOException { + initSubDirectories(); + + DeleteOldAppData.deleteFiles(folder.getRoot().getAbsolutePath(), 0, "^test_", false); + + assertThat(folder.getRoot().listFiles().length, is(8)); // 5 abc files + 1 week_old_file + 2 directories + } + + @Test + public void testGetContentsOfNonExistingDirectory() throws IOException { + assertArrayEquals(new File[0], DeleteOldAppData.getContentsOfDirectory("/some/made/up/dir/")); + } + + @Test(expected=IllegalArgumentException.class) + public void testDeleteFilesExceptNMostRecentWithNegativeN() { + DeleteOldAppData.deleteFilesExceptNMostRecent(folder.getRoot().getAbsolutePath(), -5); + } + + @Test + public void testDeleteFilesExceptFiveMostRecent() { + DeleteOldAppData.deleteFilesExceptNMostRecent(folder.getRoot().getAbsolutePath(), 5); + + assertThat(folder.getRoot().listFiles().length, is(5)); + + String[] oldestFiles = {"test_5_file.test", "test_6_file.test", "test_8.json", "test_9.json", "week_old_file.json"}; + String[] remainingFiles = folder.getRoot().list(); + Arrays.sort(remainingFiles); + + assertArrayEquals(oldestFiles, remainingFiles); + } + + @Test + public void testDeleteFilesExceptNMostRecentWithLargeN() { + String[] filesPreDelete = folder.getRoot().list(); + + DeleteOldAppData.deleteFilesExceptNMostRecent(folder.getRoot().getAbsolutePath(), 50); + + assertArrayEquals(filesPreDelete, folder.getRoot().list()); + } + + @Test + public void testDeleteFilesLargerThan10B() throws IOException { + initSubDirectories(); + + File temp1 = new File(folder.getRoot(), "small_file"); + writeNBytesToFile(temp1, 50); + + File temp2 = new File(folder.getRoot(), "some_file"); + writeNBytesToFile(temp2, 20); + + File temp3 = new File(folder.getRoot(), "test_folder1/some_other_file"); + writeNBytesToFile(temp3, 75); + + DeleteOldAppData.deleteFilesLargerThan(folder.getRoot(), 10); + + assertThat(getNumberOfFilesAndDirectoriesIn(folder.getRoot()), is(58)); + assertThat(temp1.exists() || temp2.exists() || temp3.exists(), is(false)); + } + + @Test + public void testDeleteDirectories() throws IOException { + initSubDirectories(); + + DeleteOldAppData.deleteDirectories(folder.getRoot().getAbsolutePath(), 0, ".*folder2"); + + //23 files in root + // + 6 in test_folder1 + test_folder1 itself + assertThat(getNumberOfFilesAndDirectoriesIn(folder.getRoot()), is(30)); + } + + @Test + public void testDeleteDirectoriesBasedOnAge() throws IOException { + initSubDirectories(); + + DeleteOldAppData.deleteDirectories(folder.getRoot().getAbsolutePath(), 50, ".*folder.*"); + + //23 files in root + // + 13 in test_folder2 + // + 13 in subSubFolder2 + // + test_folder2 + subSubFolder2 itself + assertThat(getNumberOfFilesAndDirectoriesIn(folder.getRoot()), is(51)); + } + + @Test + public void testRecursivelyDeleteDirectory() throws IOException { + initSubDirectories(); + DeleteOldAppData.recursiveDelete(folder.getRoot().toString()); + assertTrue(!folder.getRoot().exists()); + } + + @Test + public void testRecursivelyDeleteRegularFile() throws IOException { + File file = folder.newFile(); + assertTrue(file.exists()); + assertTrue(file.isFile()); + DeleteOldAppData.recursiveDelete(file.toString()); + assertTrue(!file.exists()); + } + + @Test + public void testRecursivelyDeleteNonExistingFile() throws IOException { + File file = folder.getRoot().toPath().resolve("non-existing-file.json").toFile(); + assertTrue(!file.exists()); + DeleteOldAppData.recursiveDelete(file.toString()); + assertTrue(!file.exists()); + } + + @Test + public void testInitSubDirectories() throws IOException { + initSubDirectories(); + assertTrue(folder.getRoot().exists()); + assertTrue(folder.getRoot().isDirectory()); + + Path test_folder1 = folder.getRoot().toPath().resolve("test_folder1"); + assertTrue(test_folder1.toFile().exists()); + assertTrue(test_folder1.toFile().isDirectory()); + + Path test_folder2 = folder.getRoot().toPath().resolve("test_folder2"); + assertTrue(test_folder2.toFile().exists()); + assertTrue(test_folder2.toFile().isDirectory()); + + Path subSubFolder2 = test_folder2.resolve("subSubFolder2"); + assertTrue(subSubFolder2.toFile().exists()); + assertTrue(subSubFolder2.toFile().isDirectory()); + } + + private void initSubDirectories() throws IOException { + File subFolder1 = folder.newFolder("test_folder1"); + File subFolder2 = folder.newFolder("test_folder2"); + File subSubFolder2 = folder.newFolder("test_folder2/subSubFolder2"); + + + for (int j=0; j<6; j++) { + File temp = File.createTempFile("test_", ".json", subFolder1); + temp.setLastModified(System.currentTimeMillis() - (j+1)*Duration.ofSeconds(60).toMillis()); + } + + for (int j=0; j<9; j++) { + File.createTempFile("test_", ".json", subFolder2); + } + + for (int j=0; j<4; j++) { + File.createTempFile("abc_", ".txt", subFolder2); + } + + for (int j=0; j<13; j++) { + File temp = File.createTempFile("test_", ".json", subSubFolder2); + temp.setLastModified(System.currentTimeMillis() - (j+1)*Duration.ofSeconds(40).toMillis()); + } + + //Must be after all the files have been created + subFolder1.setLastModified(System.currentTimeMillis() - Duration.ofHours(2).toMillis()); + subFolder2.setLastModified(System.currentTimeMillis() - Duration.ofHours(1).toMillis()); + subSubFolder2.setLastModified(System.currentTimeMillis() - Duration.ofHours(3).toMillis()); + } + + private static int getNumberOfFilesAndDirectoriesIn(File folder) { + int total = 0; + for (File file : folder.listFiles()) { + if (file.isDirectory()) { + total += getNumberOfFilesAndDirectoriesIn(file); + } + total++; + } + + return total; + } + + private static void writeNBytesToFile(File file, int nBytes) throws IOException { + Files.write(file.toPath(), new byte[nBytes]); + } +} |