modules/common/ant-bundle/src/main/java/org/rhq/bundle/ant/type/DeploymentUnitType.java
| 67
modules/core/domain/src/main/java/org/rhq/core/domain/bundle/BundleDeploymentStatus.java
| 8
modules/core/util/src/main/java/org/rhq/core/util/updater/ChangesFileHashcodeMap.java
| 9
modules/core/util/src/main/java/org/rhq/core/util/updater/Deployer.java
| 26
modules/core/util/src/main/java/org/rhq/core/util/updater/DeploymentData.java
| 14
modules/core/util/src/main/java/org/rhq/core/util/updater/FileHashcodeMap.java
| 66
modules/core/util/src/test/java/org/rhq/core/util/updater/DeployerTest.java
| 14
modules/core/util/src/test/java/org/rhq/core/util/updater/FileHashcodeMapTest.java
| 109
modules/core/util/src/test/java/org/rhq/core/util/updater/ManageRootDirTest.java
| 1101 ++++++++++
modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawFileTest.java
| 37
modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawRelativeFileTest.java
| 37
modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerTest.java
| 94
modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentListView.java
| 2
modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentView.java
| 2
modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleResourceDeploymentHistoryListView.java
| 2
modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/destination/BundleDestinationListView.java
| 2
modules/enterprise/server/jar/src/main/java/org/rhq/enterprise/server/bundle/BundleManagerBean.java
| 9
17 files changed, 1490 insertions(+), 109 deletions(-)
New commits:
commit 067f66c0c2e4f1e9f544158f9d356f0ca70f2479
Author: John Mazzitelli <mazz(a)redhat.com>
Date: Wed Dec 22 03:25:03 2010 -0500
BZ 659142 - Add support for deploying bundles into a folder in which
the bundle doesn't have exclusive usage. this is to support being able to deploy
a bundle in a JBossAS deploy/ directory. All unit tests pass with this
commit, with a new set of unit tests being added to test this
feature. however, more testing needs to be done on the ant recipe side
to make sure the ant recipe is passing this flag correctly to the
Deployer utility.
diff --git
a/modules/common/ant-bundle/src/main/java/org/rhq/bundle/ant/type/DeploymentUnitType.java
b/modules/common/ant-bundle/src/main/java/org/rhq/bundle/ant/type/DeploymentUnitType.java
index dda489c..3727adb 100644
---
a/modules/common/ant-bundle/src/main/java/org/rhq/bundle/ant/type/DeploymentUnitType.java
+++
b/modules/common/ant-bundle/src/main/java/org/rhq/bundle/ant/type/DeploymentUnitType.java
@@ -17,9 +17,19 @@
*/
package org.rhq.bundle.ant.type;
+import java.io.File;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Pattern;
+
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Target;
+
import org.rhq.bundle.ant.DeployPropertyNames;
import org.rhq.core.domain.configuration.Configuration;
import org.rhq.core.domain.configuration.PropertySimple;
@@ -30,15 +40,6 @@ import org.rhq.core.util.updater.Deployer;
import org.rhq.core.util.updater.DeploymentData;
import org.rhq.core.util.updater.DeploymentProperties;
-import java.io.File;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Pattern;
-
/**
* An Ant task for deploying a bundle or previewing the deployment.
*
@@ -46,13 +47,13 @@ import java.util.regex.Pattern;
*/
public class DeploymentUnitType extends AbstractBundleType {
private String name;
+ private String manageRootDir = Boolean.TRUE.toString();
private Map<File, File> files = new LinkedHashMap<File, File>();
private Set<File> rawFilesToReplace = new LinkedHashSet<File>();
private Set<File> archives = new LinkedHashSet<File>();
private Map<File, Pattern> archiveReplacePatterns = new HashMap<File,
Pattern>();
private SystemServiceType systemService;
private Pattern ignorePattern;
- private boolean preview;
private String preinstallTarget;
private String postinstallTarget;
@@ -78,7 +79,8 @@ public class DeploymentUnitType extends AbstractBundleType {
TemplateEngine templateEngine = createTemplateEngine();
if (this.files.isEmpty() && this.archives.isEmpty()) {
- throw new BuildException("You must specify at least one file to deploy
via nested rhq:file, rhq:archive, and/or rhq:system-service elements.");
+ throw new BuildException(
+ "You must specify at least one file to deploy via nested rhq:file,
rhq:archive, and/or rhq:system-service elements.");
}
if (!this.files.isEmpty()) {
log("Deploying files " + this.files + "...",
Project.MSG_VERBOSE);
@@ -87,8 +89,17 @@ public class DeploymentUnitType extends AbstractBundleType {
log("Deploying archives " + this.archives + "...",
Project.MSG_VERBOSE);
}
+ boolean willManageRootDir = Boolean.parseBoolean(this.manageRootDir);
+ if (willManageRootDir) {
+ log("Managing the root directory of this deployment unit - unrelated
files found will be removed",
+ Project.MSG_VERBOSE);
+ } else {
+ log("Not managing the root directory of this deployment unit - unrelated
files will remain intact",
+ Project.MSG_VERBOSE);
+ }
+
DeploymentData deploymentData = new DeploymentData(deploymentProps,
this.archives, this.files, deployDir,
- this.archiveReplacePatterns, this.rawFilesToReplace, templateEngine,
this.ignorePattern);
+ this.archiveReplacePatterns, this.rawFilesToReplace, templateEngine,
this.ignorePattern, willManageRootDir);
Deployer deployer = new Deployer(deploymentData);
try {
DeployDifferences diffs = getProject().getDeployDifferences();
@@ -98,7 +109,7 @@ public class DeploymentUnitType extends AbstractBundleType {
} else {
deployer.deploy(diffs, clean, dryRun);
}
- getProject().log("Results:\n" + diffs + "\n");
+ getProject().log("Results:\n" + diffs + "\n");
} catch (Exception e) {
throw new BuildException("Failed to deploy bundle '" +
getProject().getBundleName() + "' version "
+ getProject().getBundleVersion() + ": " + e, e);
@@ -111,7 +122,8 @@ public class DeploymentUnitType extends AbstractBundleType {
if (this.postinstallTarget != null) {
Target target = (Target)
getProject().getTargets().get(this.postinstallTarget);
if (target == null) {
- throw new BuildException("Specified postinstall target (" +
this.postinstallTarget + ") does not exist.");
+ throw new BuildException("Specified postinstall target (" +
this.postinstallTarget
+ + ") does not exist.");
}
target.performTasks();
}
@@ -140,7 +152,7 @@ public class DeploymentUnitType extends AbstractBundleType {
this.systemService.uninstall();
}
}
-
+
public String getName() {
return name;
}
@@ -149,20 +161,24 @@ public class DeploymentUnitType extends AbstractBundleType {
this.name = name;
}
- public Map<File, File> getFiles() {
- return files;
+ public String getManageRootDir() {
+ return manageRootDir;
}
- public Set<File> getArchives() {
- return archives;
+ public void setManageRootDir(String booleanString) {
+ if (!Boolean.TRUE.toString().equalsIgnoreCase(booleanString)
+ && !Boolean.FALSE.toString().equalsIgnoreCase(booleanString)) {
+ throw new BuildException("manageRootDir attribute must be 'true'
or 'false': " + booleanString);
+ }
+ this.manageRootDir = booleanString;
}
- public boolean isPreview() {
- return this.preview;
+ public Map<File, File> getFiles() {
+ return files;
}
- public void setPreview(boolean preview) {
- this.preview = preview;
+ public Set<File> getArchives() {
+ return archives;
}
public String getPreinstallTarget() {
@@ -183,7 +199,8 @@ public class DeploymentUnitType extends AbstractBundleType {
public void addConfigured(SystemServiceType systemService) {
if (this.systemService != null) {
- throw new IllegalStateException("A rhq:deploymentUnit element can only
have one rhq:system-service child element.");
+ throw new IllegalStateException(
+ "A rhq:deployment-unit element can only have one rhq:system-service
child element.");
}
this.systemService = systemService;
this.systemService.validate();
@@ -230,7 +247,7 @@ public class DeploymentUnitType extends AbstractBundleType {
}
// And add the special rhq.deploy.dir prop.
templateEngine.getTokens().put(DeployPropertyNames.DEPLOY_DIR,
- getProject().getProperty(DeployPropertyNames.DEPLOY_DIR));
+ getProject().getProperty(DeployPropertyNames.DEPLOY_DIR));
return templateEngine;
}
}
\ No newline at end of file
diff --git
a/modules/core/util/src/main/java/org/rhq/core/util/updater/ChangesFileHashcodeMap.java
b/modules/core/util/src/main/java/org/rhq/core/util/updater/ChangesFileHashcodeMap.java
index 9fc824e..3f0c364 100644
---
a/modules/core/util/src/main/java/org/rhq/core/util/updater/ChangesFileHashcodeMap.java
+++
b/modules/core/util/src/main/java/org/rhq/core/util/updater/ChangesFileHashcodeMap.java
@@ -41,6 +41,7 @@ public class ChangesFileHashcodeMap extends FileHashcodeMap {
private final Map<String, String> additions = new HashMap<String,
String>();
private final Map<String, String> changes = new HashMap<String,
String>();
private final Set<String> ignored = new HashSet<String>();
+ private final Set<String> skipped = new HashSet<String>();
/**
* Creates an file/hashcode map populated with a map of original file data.
@@ -87,4 +88,12 @@ public class ChangesFileHashcodeMap extends FileHashcodeMap {
public Set<String> getIgnored() {
return ignored;
}
+
+ /**
+ * @return the files and directories located directly under the root deploy dir that
were skipped.
+ * These are files/directories that are considered unrelated to the
deployment and should be left alone.
+ */
+ public Set<String> getSkipped() {
+ return skipped;
+ }
}
diff --git a/modules/core/util/src/main/java/org/rhq/core/util/updater/Deployer.java
b/modules/core/util/src/main/java/org/rhq/core/util/updater/Deployer.java
index 2a4e99d..5116631 100644
--- a/modules/core/util/src/main/java/org/rhq/core/util/updater/Deployer.java
+++ b/modules/core/util/src/main/java/org/rhq/core/util/updater/Deployer.java
@@ -340,7 +340,7 @@ public class Deployer {
FileHashcodeMap original =
this.deploymentsMetadata.getCurrentDeploymentFileHashcodes();
ChangesFileHashcodeMap current =
original.rescan(this.deploymentData.getDestinationDir(), this.deploymentData
- .getIgnoreRegex());
+ .getIgnoreRegex(), this.deploymentData.isManageRootDir());
FileHashcodeMap newFiles = getNewDeploymentFileHashcodeMap();
if (current.getUnknownContent() != null) {
@@ -439,6 +439,9 @@ public class Deployer {
currentFilesToDelete.removeAll(newFiles.keySet());
currentFilesToDelete.removeAll(current.getDeletions().keySet()); // these are
already deleted, no sense trying to delete them again
+ // remember what files were skipped so we don't delete them during our purge
below (only care about this if we are going to 'clean')
+ Set<String> skippedFiles = (clean) ? current.getSkipped() : null;
+
// don't use this anymore - its underlying key set has been altered and this
no longer is the full current files
current = null;
@@ -487,7 +490,7 @@ public class Deployer {
if (clean) {
debug("Cleaning the existing deployment's files found in the
destination directory. dryRun=", dryRun);
if (!dryRun) {
- purgeFileOrDirectory(this.deploymentData.getDestinationDir(), false);
+ purgeFileOrDirectory(this.deploymentData.getDestinationDir(),
skippedFiles, 0, false);
}
}
diff.setCleaned(clean);
@@ -816,8 +819,9 @@ public class Deployer {
restoreBackupFilesRecursive(child, base, destDir, map, diff,
dryRun);
} else {
String childRelativePath =
child.getAbsolutePath().substring(base.length());
+ //if (this.deploymentData.isManageRootDir() || new
File(childRelativePath).getParent() != null) {
File restoredFile = new File(destDir, childRelativePath);
- debug("Restoring backup file [" + child + "] to
[" + restoredFile + "]. dryRun=" + dryRun);
+ debug("Restoring backup file [", child, "] to [",
restoredFile, "]. dryRun=", dryRun);
if (!dryRun) {
restoredFile.getParentFile().mkdirs();
String hashcode = copyFileAndCalcHashcode(child, restoredFile);
@@ -826,6 +830,10 @@ public class Deployer {
map.put(childRelativePath,
MessageDigestGenerator.getDigestString(child));
}
diff.addRestoredFile(childRelativePath, child.getAbsolutePath());
+ //} else {
+ // debug("Skipping the restoration of the backed up file
[", childRelativePath,
+ // "] since this deployment was told to not manage the
root directory");
+ //}
}
}
}
@@ -843,7 +851,7 @@ public class Deployer {
} else {
String childRelativePath =
child.getAbsolutePath().substring(base.length());
File restoredFile = new File(rootDir, childRelativePath);
- debug("Restoring backup file [" + child + "] to
external location [" + restoredFile + "]. dryRun="
+ debug("Restoring backup file [", child, "] to external
location [", restoredFile, "]. dryRun="
+ dryRun);
if (!dryRun) {
restoredFile.getParentFile().mkdirs();
@@ -882,14 +890,20 @@ public class Deployer {
}
}
- private void purgeFileOrDirectory(File fileOrDir, boolean deleteIt) {
+ private void purgeFileOrDirectory(File fileOrDir, Set<String> skippedFiles, int
level, boolean deleteIt) {
// make sure we only purge deployment files, never the metadata directory or its
files
+ // we also want to leave all skipped files alone - don't delete those since
they are unrelated to our deployment
if (fileOrDir != null &&
!fileOrDir.getName().equals(DeploymentsMetadata.METADATA_DIR)) {
if (fileOrDir.isDirectory()) {
File[] doomedFiles = fileOrDir.listFiles();
if (doomedFiles != null) {
for (File doomedFile : doomedFiles) {
- purgeFileOrDirectory(doomedFile, true); // call this method
recursively
+ // Do not purge any skipped files - we want to skip them.
+ // All our skipped files are always at the top root dir (level
0),
+ // so we can ignore the skipped set if we are at levels 1 or
below since there are no skipped files down there
+ if (level != 0 || !skippedFiles.contains(doomedFile.getName()))
{
+ purgeFileOrDirectory(doomedFile, skippedFiles, level + 1,
true); // call this method recursively
+ }
}
}
}
diff --git a/modules/core/util/src/main/java/org/rhq/core/util/updater/DeploymentData.java
b/modules/core/util/src/main/java/org/rhq/core/util/updater/DeploymentData.java
index 32c1a26..b1fac47 100644
--- a/modules/core/util/src/main/java/org/rhq/core/util/updater/DeploymentData.java
+++ b/modules/core/util/src/main/java/org/rhq/core/util/updater/DeploymentData.java
@@ -47,6 +47,7 @@ public class DeploymentData {
private final Set<File> rawFilesToRealize;
private final TemplateEngine templateEngine;
private final Pattern ignoreRegex;
+ private final boolean manageRootDir;
/**
* Constructors that prepares this object with the data that is necessary in order to
deploy archive/file content
@@ -69,10 +70,17 @@ public class DeploymentData {
* @param templateEngine if one or more filesToRealize are specified, this template
engine is used to determine
* the values that should replace all replacement variables
found in those files
* @param ignoreRegex the files/directories to ignore when updating an existing
deployment
+ * @param manageRootDir if false, the top directory where the files will be deployed
(i.e. the destinationDir)
+ * will be left alone. That is, if files already exist there,
they will not be removed or
+ * otherwise merged with this deployment's root files. If
true, this top root directory
+ * will be managed just as any subdirectory within the
deployment will be managed.
+ * The purpose of this is to be able to write files to an
existing directory that has other
+ * unrelated files in it that need to remain intact. e.g. the
deploy/ directory of JBossAS.
+ * Note: regardless of this setting, all subdirectories under
the root dir will be managed.
*/
public DeploymentData(DeploymentProperties deploymentProps, Set<File> zipFiles,
Map<File, File> rawFiles,
File destinationDir, Map<File, Pattern> zipEntriesToRealizeRegex,
Set<File> rawFilesToRealize,
- TemplateEngine templateEngine, Pattern ignoreRegex) {
+ TemplateEngine templateEngine, Pattern ignoreRegex, boolean manageRootDir) {
if (deploymentProps == null) {
throw new IllegalArgumentException("deploymentProps == null");
@@ -96,6 +104,7 @@ public class DeploymentData {
this.rawFiles = rawFiles;
this.destinationDir = destinationDir;
this.ignoreRegex = ignoreRegex;
+ this.manageRootDir = manageRootDir;
// if there is nothing to realize or we have no template engine to obtain
replacement values, then we null things out
if (templateEngine == null || (zipEntriesToRealizeRegex == null &&
rawFilesToRealize == null)) {
@@ -143,4 +152,7 @@ public class DeploymentData {
return ignoreRegex;
}
+ public boolean isManageRootDir() {
+ return manageRootDir;
+ }
}
diff --git
a/modules/core/util/src/main/java/org/rhq/core/util/updater/FileHashcodeMap.java
b/modules/core/util/src/main/java/org/rhq/core/util/updater/FileHashcodeMap.java
index 5945abe..2a68f5d 100644
--- a/modules/core/util/src/main/java/org/rhq/core/util/updater/FileHashcodeMap.java
+++ b/modules/core/util/src/main/java/org/rhq/core/util/updater/FileHashcodeMap.java
@@ -204,18 +204,22 @@ public class FileHashcodeMap extends TreeMap<String, String>
{
*
* The root directory is also scanned for new files that are not in this original
* map - if new files are found (and they do not match the ignoreRegex), they are
added to the
- * returned map.
+ * returned map. Note that if <code>reportNewRootFilesAsNew</code> is
false, and if new files
+ * are found in the top root directory and they are not related to the deployment
fileset,
+ * they will not be added to the returned map.
*
* @param rootDir directory where the relative paths are expected to be
* @param ignoreRegex if relative paths of files under rootDir match this, they will
be ignored.
* This will eliminate files/directories from being considered
"new" because
* they aren't in original.
+ * @param reportNewRootFilesAsNew do not report as new any unrelated files found in
the root dir
* @return a map with current files/hashcodes, including files that were not found in
original.
* the returned object also has additional info such as those files that were
added,
* deleted, changed from this original. It also indicates what was ignored
during the rescan.
* @throws Exception
*/
- public ChangesFileHashcodeMap rescan(File rootDir, Pattern ignoreRegex) throws
Exception {
+ public ChangesFileHashcodeMap rescan(File rootDir, Pattern ignoreRegex, boolean
reportNewRootFilesAsNew)
+ throws Exception {
ChangesFileHashcodeMap current = new ChangesFileHashcodeMap(this);
// go through our original files and recalculate their hashcodes
@@ -253,9 +257,12 @@ public class FileHashcodeMap extends TreeMap<String, String> {
// now recursively traverse the root directory and look for new files that
aren't in our original map
// files that have been added need to be put into our returned map and also
marked as added
FileHashcodeMap newFiles = new FileHashcodeMap();
- lookForNewFilesRecursive(newFiles, rootDir.getAbsolutePath(), 0, rootDir,
ignoreRegex, current.getIgnored());
+ Set<String> skippedFiles = new HashSet<String>();
+ lookForNewFilesRecursive(newFiles, skippedFiles, rootDir.getAbsolutePath(), 0,
rootDir, ignoreRegex, current
+ .getIgnored(), reportNewRootFilesAsNew);
current.putAll(newFiles);
current.getAdditions().putAll(newFiles);
+ current.getSkipped().addAll(skippedFiles);
return current;
}
@@ -264,16 +271,24 @@ public class FileHashcodeMap extends TreeMap<String, String>
{
* This looks for new files under the given fileOrDir and adds them to
<code>newFiles</code>.
*
* @param newFiles the map where the new, current file/hashcode data will be
stored
- * @param rootPath the top root directory that is being scanned
- * @param level the level deep in the file hierarchy currently being processed
(0==at top root dir)
- * @param fileOrDir existing directory/file to rescan
+ * @param skippedFiles a set where names of unrelated files/directories are stored.
The names
+ * found here after this method returns are those files/dirs that
were found
+ * in the top level root dir, but were skipped over and not
processed. This will not
+ * contain paths with subdirectories - they will only be a
filename with no paths
+ * because they are located directly under the root dir.
+ * @param rootPath the top root directory that is being scanned
+ * @param level the level deep in the file hierarchy currently being processed (0==at
top root dir)
+ * @param fileOrDir existing directory/file to rescan
* @param ignoreRegex a regular expression that indicates which files/directories
should be ignored.
* If a relative file/directory path matches this regex, it will
be skipped.
- * @param ignored a set that will contain those files/directories that were ignored
while scanning the root dir
+ * @param ignored a set that will contain those files/directories that were ignored
while scanning the root dir
+ * @param reportNewRootFilesAsNew if false, ignore unrelated files at the root dir
location (level=0)
+ *
* @throws Exception
*/
- private void lookForNewFilesRecursive(FileHashcodeMap newFiles, String rootPath, int
level, File fileOrDir,
- Pattern ignoreRegex, Set<String> ignored) throws Exception {
+ private void lookForNewFilesRecursive(FileHashcodeMap newFiles, Set<String>
skippedFiles, String rootPath,
+ int level, File fileOrDir, Pattern ignoreRegex, Set<String> ignored,
boolean reportNewRootFilesAsNew)
+ throws Exception {
if (fileOrDir == null || !fileOrDir.exists()) {
throw new Exception("Non-existent file/directory provided: " +
fileOrDir);
@@ -301,10 +316,41 @@ public class FileHashcodeMap extends TreeMap<String, String>
{
return;
}
+ // If we are currently at the top level and we are not to report unrelated
files as new
+ // then we need to find out which files ARE related first. Related files are
those files
+ // already in our Map at the root level (i.e. they are just a filename, no
parent paths)
+ // plus those in our Map with a top parent dir name that match a child
directory of
+ // fileOrDir (which is the root dir if we are at level=0).
+ // Note that we use File API to manipulate filenames/parents to ensure we do
this right
+ // on Windows (taking care of relative paths with drive letters, e.g.
C:subdir/file.txt).
+ HashSet<String> relatedTopLevelFiles = null;
+ if (level == 0 && !reportNewRootFilesAsNew) {
+ relatedTopLevelFiles = new HashSet<String>();
+ for (String relatedFilePath : keySet()) { // loop through our Map key
entries, these are the "related" files
+ File relatedFile = new File(relatedFilePath);
+ if (!relatedFile.isAbsolute()) {
+ String topLevelName = relatedFile.getName(); // prime the pump;
if we have no parent, this is our file already at the top root dir
+ File parent = relatedFile.getParentFile();
+ // walk up the file hierarchy until we hit the top parent - this
is the related dir found in our top root
+ while (parent != null) {
+ topLevelName = parent.getName();
+ parent = parent.getParentFile();
+ }
+ relatedTopLevelFiles.add(topLevelName); // this is a file or dir
at the top root dir
+ }
+ }
+ }
+
File[] children = fileOrDir.listFiles();
if (children != null) {
for (File child : children) {
- lookForNewFilesRecursive(newFiles, rootPath, level + 1, child,
ignoreRegex, ignored);
+ // skip this child if we are at the top root dir and it is not
related to our fileset
+ if (relatedTopLevelFiles == null ||
relatedTopLevelFiles.contains(child.getName())) {
+ lookForNewFilesRecursive(newFiles, skippedFiles, rootPath, level
+ 1, child, ignoreRegex,
+ ignored, reportNewRootFilesAsNew);
+ } else {
+ skippedFiles.add(child.getName());
+ }
}
}
} else {
diff --git a/modules/core/util/src/test/java/org/rhq/core/util/updater/DeployerTest.java
b/modules/core/util/src/test/java/org/rhq/core/util/updater/DeployerTest.java
index 50709cc..5f0fca6 100644
--- a/modules/core/util/src/test/java/org/rhq/core/util/updater/DeployerTest.java
+++ b/modules/core/util/src/test/java/org/rhq/core/util/updater/DeployerTest.java
@@ -256,7 +256,7 @@ public class DeployerTest {
Map<File, Pattern> filesToRealizeRegex1 = new HashMap<File,
Pattern>(1);
filesToRealizeRegex1.put(testZipFile1, realizeRegex);
DeploymentData dd = new DeploymentData(deploymentProps, zipFiles, rawFiles,
destDir, filesToRealizeRegex1,
- null, templateEngine, ignoreRegex);
+ null, templateEngine, ignoreRegex, true);
Deployer deployer = new Deployer(dd);
diff = new DeployDifferences();
@@ -292,7 +292,7 @@ public class DeployerTest {
Map<File, Pattern> filesToRealizeRegex2 = new HashMap<File,
Pattern>(1);
filesToRealizeRegex2.put(testZipFile2, realizeRegex);
dd = new DeploymentData(deploymentProps, zipFiles, rawFiles, destDir,
filesToRealizeRegex2, null,
- templateEngine, ignoreRegex);
+ templateEngine, ignoreRegex, true);
deployer = new Deployer(dd);
diff = new DeployDifferences();
deployer.deploy(diff);
@@ -420,7 +420,7 @@ public class DeployerTest {
rawFiles.put(testRawFileA, updaterAabsolute); // source raw file to absolute
path
rawFiles.put(testRawFileBChange1, updaterBabsolute); // source raw file to
absolute path
dd = new DeploymentData(deploymentProps, zipFiles, rawFiles, destDir,
filesToRealizeRegex2, null,
- templateEngine, ignoreRegex);
+ templateEngine, ignoreRegex, true);
deployer = new Deployer(dd);
diff = new DeployDifferences();
deployer.deploy(diff);
@@ -460,7 +460,7 @@ public class DeployerTest {
rawFiles.put(testRawFileAChange, updaterAabsolute); // source raw file to
absolute path
rawFiles.put(testRawFileBChange2, updaterBabsolute); // source raw file to
absolute path
dd = new DeploymentData(deploymentProps, zipFiles, rawFiles, destDir,
filesToRealizeRegex2, null,
- templateEngine, ignoreRegex);
+ templateEngine, ignoreRegex, true);
deployer = new Deployer(dd);
diff = new DeployDifferences();
deployer.deploy(diff);
@@ -526,7 +526,7 @@ public class DeployerTest {
Set<File> realizeRawFiles1 = new HashSet<File>(1);
realizeRawFiles1.add(testRawFileB);
DeploymentData dd = new DeploymentData(deploymentProps, zipFiles, rawFiles,
destDir, null,
- realizeRawFiles1, templateEngine, ignoreRegex);
+ realizeRawFiles1, templateEngine, ignoreRegex, true);
Deployer deployer = new Deployer(dd);
DeployDifferences diff = new DeployDifferences();
FileHashcodeMap map = deployer.deploy(diff);
@@ -580,7 +580,7 @@ public class DeployerTest {
realizeRawFiles1.add(testZipFile2);
realizeRawFiles1.add(testRawFileB);
DeploymentData dd = new DeploymentData(deploymentProps, zipFiles, rawFiles,
destDir, null,
- realizeRawFiles1, templateEngine, ignoreRegex);
+ realizeRawFiles1, templateEngine, ignoreRegex, true);
Deployer deployer = new Deployer(dd);
DeployDifferences listener = new DeployDifferences();
deployer.deploy(listener);
@@ -678,7 +678,7 @@ public class DeployerTest {
realizeRegex1.put(testZipFile1, filesToRealizeRegex);
DeploymentData dd = new DeploymentData(deploymentProps, zipFiles, rawFiles,
destDir, realizeRegex1, null,
- templateEngine, ignoreRegex);
+ templateEngine, ignoreRegex, true);
Deployer deployer = new Deployer(dd);
DeployDifferences listener = new DeployDifferences();
deployer.deploy(listener);
diff --git
a/modules/core/util/src/test/java/org/rhq/core/util/updater/FileHashcodeMapTest.java
b/modules/core/util/src/test/java/org/rhq/core/util/updater/FileHashcodeMapTest.java
index 2fce1e4..034f1e8 100644
--- a/modules/core/util/src/test/java/org/rhq/core/util/updater/FileHashcodeMapTest.java
+++ b/modules/core/util/src/test/java/org/rhq/core/util/updater/FileHashcodeMapTest.java
@@ -77,17 +77,18 @@ public class FileHashcodeMapTest {
assert ignored.contains("ignoreme1.txt") : ignored;
// first test - see that no changes can be detected
- ChangesFileHashcodeMap currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ ChangesFileHashcodeMap currentMap = originalMap.rescan(tmpDir, ignoreRegex,
true);
assertSameMap(originalMap, currentMap);
assert currentMap.getAdditions().isEmpty();
assert currentMap.getDeletions().isEmpty();
assert currentMap.getChanges().isEmpty();
assert currentMap.getIgnored().size() == 1 : currentMap;
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap;
+ assert currentMap.getSkipped().size() == 0;
// second test - change an original file
StreamUtil.copy(new
ByteArrayInputStream("test1-change".getBytes()), new
FileOutputStream(testFile1));
- currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ currentMap = originalMap.rescan(tmpDir, ignoreRegex, true);
assert currentMap.size() == 2 : currentMap;
assert currentMap.containsKey("test1.txt") : currentMap;
assert currentMap.containsKey("test2.txt") : currentMap;
@@ -99,10 +100,11 @@ public class FileHashcodeMapTest {
assert
currentMap.getChanges().get("test1.txt").equals(currentMap.get("test1.txt"));
assert currentMap.getIgnored().size() == 1 : currentMap;
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap;
+ assert currentMap.getSkipped().size() == 0;
// third test - delete an original file
assert testFile1.delete() : "could not delete file in order to test
delete-detection";
- currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ currentMap = originalMap.rescan(tmpDir, ignoreRegex, true);
assert currentMap.size() == 2 : currentMap;
assert currentMap.containsKey("test1.txt") : currentMap;
assert currentMap.containsKey("test2.txt") : currentMap;
@@ -114,12 +116,13 @@ public class FileHashcodeMapTest {
assert currentMap.getChanges().isEmpty();
assert currentMap.getIgnored().size() == 1 : currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap.getIgnored();
+ assert currentMap.getSkipped().size() == 0;
// fourth test - add a new file
StreamUtil.copy(new ByteArrayInputStream("test1".getBytes()), new
FileOutputStream(testFile1));
File testFile3 = new File(tmpDir, "test3.txt");
StreamUtil.copy(new ByteArrayInputStream("test3".getBytes()), new
FileOutputStream(testFile3));
- currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ currentMap = originalMap.rescan(tmpDir, ignoreRegex, true);
assert currentMap.size() == 3 : currentMap;
assert currentMap.containsKey("test1.txt") : currentMap;
assert currentMap.containsKey("test2.txt") : currentMap;
@@ -133,6 +136,7 @@ public class FileHashcodeMapTest {
assert currentMap.getChanges().isEmpty();
assert currentMap.getIgnored().size() == 1 : currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap.getIgnored();
+ assert currentMap.getSkipped().size() == 0;
// fifth test - concurrently change a file, delete a file add new file and
add new file in new directory
// changed file: testFile1
@@ -149,7 +153,7 @@ public class FileHashcodeMapTest {
assert ignoreFile2.getParentFile().mkdirs() : "could not create new
subdirectory for test";
StreamUtil.copy(new ByteArrayInputStream("ignore2".getBytes()), new
FileOutputStream(ignoreFile2));
- currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ currentMap = originalMap.rescan(tmpDir, ignoreRegex, true);
assert currentMap.size() == 4 : currentMap;
assert currentMap.containsKey("test1.txt") : currentMap;
assert currentMap.containsKey("test2.txt") : currentMap;
@@ -171,6 +175,7 @@ public class FileHashcodeMapTest {
assert currentMap.getIgnored().size() == 2 : currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme") :
currentMap.getIgnored();
+ assert currentMap.getSkipped().size() == 0;
// sixth test - starting from 5th test above, add an absolute path file
absPathFile = File.createTempFile("fileHashcodeMapTestFile",
".test");
@@ -178,7 +183,7 @@ public class FileHashcodeMapTest {
StreamUtil.copy(new ByteArrayInputStream("abs".getBytes()), new
FileOutputStream(absPathFile));
originalMap.put(absPathFile.getAbsolutePath(),
MessageDigestGenerator.getDigestString(absPathFile));
- currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ currentMap = originalMap.rescan(tmpDir, ignoreRegex, true);
assert currentMap.size() == 5 : currentMap;
assert currentMap.containsKey("test1.txt") : currentMap;
assert currentMap.containsKey("test2.txt") : currentMap;
@@ -203,10 +208,11 @@ public class FileHashcodeMapTest {
assert currentMap.getIgnored().size() == 2 : currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme") :
currentMap.getIgnored();
+ assert currentMap.getSkipped().size() == 0;
// seventh test - detect that the absolute path file has changed
StreamUtil.copy(new ByteArrayInputStream("abs-changed".getBytes()),
new FileOutputStream(absPathFile));
- currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ currentMap = originalMap.rescan(tmpDir, ignoreRegex, true);
assert currentMap.size() == 5 : currentMap;
assert currentMap.containsKey("test1.txt") : currentMap;
assert currentMap.containsKey("test2.txt") : currentMap;
@@ -233,10 +239,11 @@ public class FileHashcodeMapTest {
assert currentMap.getIgnored().size() == 2 : currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme") :
currentMap.getIgnored();
+ assert currentMap.getSkipped().size() == 0;
// eighth test - detect absolute path file has been deleted
assert absPathFile.delete() : "could not delete the absolute path file
for testing";
- currentMap = originalMap.rescan(tmpDir, ignoreRegex);
+ currentMap = originalMap.rescan(tmpDir, ignoreRegex, true);
assert currentMap.size() == 5 : currentMap;
assert currentMap.containsKey("test1.txt") : currentMap;
assert currentMap.containsKey("test2.txt") : currentMap;
@@ -262,6 +269,92 @@ public class FileHashcodeMapTest {
assert currentMap.getIgnored().size() == 2 : currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme1.txt") :
currentMap.getIgnored();
assert currentMap.getIgnored().contains("ignoreme") :
currentMap.getIgnored();
+ assert currentMap.getSkipped().size() == 0;
+
+ } finally {
+ FileUtil.purge(tmpDir, true);
+ if (absPathFile != null) {
+ absPathFile.delete();
+ }
+ }
+ }
+
+ public void testRescanSkipNewRootDirFiles() throws Exception {
+ File absPathFile = null;
+ File tmpDir = FileUtil.createTempDirectory("fileHashcodeMapTest",
".dir", null);
+ try {
+ File testFile1 = new File(tmpDir, "test1.txt");
+ File testFile2 = new File(tmpDir, "testsubdir/test2.txt");
+ File ignoreFile1 = new File(tmpDir, "ignoreme1.txt");
+ Pattern ignoreRegex = Pattern.compile("ignoreme.*");
+
+ assert testFile2.getParentFile().mkdirs() : "could not create
testsubdir";
+
+ StreamUtil.copy(new ByteArrayInputStream("test1".getBytes()), new
FileOutputStream(testFile1));
+ StreamUtil.copy(new ByteArrayInputStream("test2".getBytes()), new
FileOutputStream(testFile2));
+ StreamUtil.copy(new ByteArrayInputStream("ignore1".getBytes()), new
FileOutputStream(ignoreFile1));
+
+ Set<String> ignored = new HashSet<String>();
+ FileHashcodeMap originalMap = FileHashcodeMap.generateFileHashcodeMap(tmpDir,
ignoreRegex, ignored);
+ assert originalMap.size() == 2 : originalMap;
+ assert originalMap.containsKey("test1.txt") : originalMap;
+ assert originalMap.containsKey("testsubdir/test2.txt") :
originalMap;
+ assert ignored.size() == 1 : ignored;
+ assert ignored.contains("ignoreme1.txt") : ignored;
+
+ // concurrently change a file, delete a file add new file and add new file in
new directory
+ // changed file: testFile1
+ // deleted file: testFile2
+ // added file: testFile3 (should not be reported as new, its unrelated)
+ // added file in original directory: testsubdir/testFile4 (should be
reported, its in related dir "testsubdir")
+ // I'll add an "ignoreme" directory just to see that it still
gets ignored (technically it will get skipped)
+ StreamUtil.copy(new
ByteArrayInputStream("test1-change".getBytes()), new
FileOutputStream(testFile1));
+ assert testFile2.delete() : "could not delete file in order to test
delete-detection";
+ File testFile3 = new File(tmpDir, "test3.txt");
+ StreamUtil.copy(new ByteArrayInputStream("test3".getBytes()), new
FileOutputStream(testFile3));
+ File testFile4 = new File(tmpDir, "testsubdir/test4.txt");
+ StreamUtil.copy(new ByteArrayInputStream("test4".getBytes()), new
FileOutputStream(testFile4));
+ File ignoreFile2 = new File(tmpDir, "ignoreme/ignore2.txt");
+ assert ignoreFile2.getParentFile().mkdirs() : "could not create new
subdirectory for test";
+ StreamUtil.copy(new ByteArrayInputStream("ignore2".getBytes()), new
FileOutputStream(ignoreFile2));
+
+ // now add some more unrelated files/dirs - these should not be reported as
new
+ File unrelatedFile1 = new File(tmpDir, "unrelated1.txt");
+ File unrelatedFile2 = new File(tmpDir,
"unrelatedsubdir/unrelated2.txt");
+ assert unrelatedFile2.getParentFile().mkdirs() : "could not create
unrelated subdir";
+ StreamUtil.copy(new ByteArrayInputStream("unrelated1".getBytes()),
new FileOutputStream(unrelatedFile1));
+ StreamUtil.copy(new ByteArrayInputStream("unrelated2".getBytes()),
new FileOutputStream(unrelatedFile2));
+ assert unrelatedFile1.exists(); // sanity check
+ assert unrelatedFile2.exists(); // sanity check
+
+ ChangesFileHashcodeMap currentMap = originalMap.rescan(tmpDir, ignoreRegex,
false);
+ assert currentMap.size() == 3 : currentMap;
+ assert currentMap.containsKey("test1.txt") : currentMap;
+ assert currentMap.containsKey("testsubdir/test2.txt") :
currentMap;
+ assert !currentMap.containsKey("test3.txt") : currentMap;
+ assert currentMap.containsKey("testsubdir" + fileSeparator +
"test4.txt") : currentMap;
+ assert
!currentMap.get("test1.txt").equals(originalMap.get("test1.txt")) :
currentMap + ":" + originalMap;
+ assert
currentMap.get("testsubdir/test2.txt").equals(FileHashcodeMap.DELETED_FILE_HASHCODE)
: currentMap;
+ assert currentMap.get("testsubdir" + fileSeparator +
"test4.txt").equals(
+ MessageDigestGenerator.getDigestString(testFile4)) : currentMap;
+ assert currentMap.getAdditions().size() == 1;
+ assert currentMap.getAdditions().get("testsubdir" + fileSeparator +
"test4.txt").equals(
+ currentMap.get("testsubdir" + fileSeparator +
"test4.txt"));
+ assert currentMap.getDeletions().size() == 1;
+ assert
currentMap.getDeletions().get("testsubdir/test2.txt").equals(currentMap.get("testsubdir/test2.txt"));
+ assert currentMap.getChanges().size() == 1;
+ assert
currentMap.getChanges().get("test1.txt").equals(currentMap.get("test1.txt"));
+ // because we are not managing the root dir, those ignore dirs are totally
skipped since they are in the root dir
+ // therefore, they aren't ignored due to the regex, they are skipped due
to reportNewRootFilesAsNew=false
+ assert currentMap.getIgnored().size() == 0 : currentMap.getIgnored();
+ // now we can look at what was really skipped which were:
+ // test3.txt, unrelatedsubdir, ignoreme, unrelated1.txt, ignoreme1.txt
+ assert currentMap.getSkipped().size() == 5 : currentMap.getSkipped();
+ assert currentMap.getSkipped().contains("test3.txt") :
currentMap.getSkipped();
+ assert currentMap.getSkipped().contains("ignoreme1.txt") :
currentMap.getSkipped();
+ assert currentMap.getSkipped().contains("ignoreme") :
currentMap.getSkipped();
+ assert currentMap.getSkipped().contains("unrelatedsubdir") :
currentMap.getSkipped();
+ assert currentMap.getSkipped().contains("unrelated1.txt") :
currentMap.getSkipped();
} finally {
FileUtil.purge(tmpDir, true);
diff --git
a/modules/core/util/src/test/java/org/rhq/core/util/updater/ManageRootDirTest.java
b/modules/core/util/src/test/java/org/rhq/core/util/updater/ManageRootDirTest.java
new file mode 100644
index 0000000..88d2731
--- /dev/null
+++ b/modules/core/util/src/test/java/org/rhq/core/util/updater/ManageRootDirTest.java
@@ -0,0 +1,1101 @@
+/*
+ * RHQ Management Platform
+ * Copyright (C) 2005-2010 Red Hat, Inc.
+ * All rights reserved.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License, version 2, as
+ * published by the Free Software Foundation, and/or the GNU Lesser
+ * General Public License, version 2.1, also as published by the Free
+ * Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License and the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * and the GNU Lesser General Public License along with this program;
+ * if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+package org.rhq.core.util.updater;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.regex.Pattern;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import org.rhq.core.util.MessageDigestGenerator;
+import org.rhq.core.util.file.FileUtil;
+import org.rhq.core.util.stream.StreamUtil;
+
+/**
+ * Individually tests these situations but with the "manageRootDir" flag off so
we should not
+ * remove files not involved in the deployment that are found in the root destination
dir:
+ *
+ * (X, Y, Z, ? represent hashcodes; none means file doesn't exist):
+ *
+ * ORIGINAL CURRENT NEW What To Do...
+ * a. X X X New file is installed over current*
+ * b. X X Y New file is installed over current
+ * c. X Y X Current file is left as-is
+ * d. X Y Y New file is installed over current*
+ * e. X Y Z New file is installed over current, current is backed up
+ * f. none ? ? New file is installed over current, current is backed up
+ * g. X none ? New file is installed
+ * h. ? ? none Current file deleted, backed up if different than
original
+ *
+ * (*) means the new and current files will actually be the same content
+ *
+ * @author John Mazzitelli
+ */
+@Test
+public class ManageRootDirTest {
+
+ private final String originalContent = "original content";
+ private final String originalFileName = "original_file_name.txt";
+ private final String unrelatedContent = "unrelated file content";
+ private final String unrelatedFileName = "unrelated_file_name.txt";
+ private final String unrelatedDirName = "unrelated_dir";
+ private File tmpDir;
+ private File deployDir;
+ private File originalZipFile;
+ private Set<File> originalZipFiles;
+ private DeploymentProperties originalDeployProps;
+ private FileHashcodeMap originalFileHashcodeMap;
+ private String originalHashcode;
+ private File currentFile;
+ private DeploymentProperties newDeployProps;
+ private DeployDifferences diff;
+ private DeploymentsMetadata metadata;
+ private File unrelatedFile;
+ private String unrelatedFileHashcode;
+ private File unrelatedDir;
+ private File unrelatedDirFile;
+ private String unrelatedDirFileHashcode;
+
+ @BeforeMethod
+ public void beforeMethod() throws Exception {
+ this.tmpDir = FileUtil.createTempDirectory("simpleDeployer_TMP",
".test", null);
+ this.deployDir = FileUtil.createTempDirectory("simpleDeployer",
".test", null);
+ this.unrelatedFile = writeFile(unrelatedContent, deployDir, unrelatedFileName);
+ this.unrelatedFileHashcode =
MessageDigestGenerator.getDigestString(unrelatedContent);
+ this.unrelatedDir = new File(this.deployDir, this.unrelatedDirName);
+ assert this.unrelatedDir.mkdirs() : "Failed to create a test dir";
+ this.unrelatedDirFile = writeFile(unrelatedContent, this.unrelatedDir,
unrelatedFileName);
+ this.unrelatedDirFileHashcode =
MessageDigestGenerator.getDigestString(unrelatedContent);
+ this.originalHashcode = MessageDigestGenerator.getDigestString(originalContent);
+ this.originalZipFile = createZip(originalContent, tmpDir,
"original.zip", originalFileName);
+ this.originalZipFiles = new HashSet<File>(1);
+ this.originalZipFiles.add(originalZipFile);
+ this.originalDeployProps = new DeploymentProperties(1, "simple",
"1.0", "original test deployment");
+ DeploymentData dd = new DeploymentData(originalDeployProps, originalZipFiles,
null, deployDir, null, null,
+ null, null, false);
+ Deployer deployer = new Deployer(dd);
+ this.originalFileHashcodeMap = deployer.deploy(null);
+ this.currentFile = new File(deployDir, originalFileName);
+
+ this.newDeployProps = new DeploymentProperties(2, "simple",
"2.0", "new test deployment");
+ this.diff = new DeployDifferences();
+ this.metadata = new DeploymentsMetadata(this.deployDir);
+
+ // sanity check due to my paranoia
+ assert this.currentFile.exists();
+ assert
this.originalHashcode.equals(MessageDigestGenerator.getDigestString(currentFile));
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ assert this.unrelatedFile.exists();
+ assert this.unrelatedDirFile.exists();
+ }
+
+ @AfterMethod(alwaysRun = true)
+ public void afterMethod() throws Exception {
+ FileUtil.purge(this.tmpDir, true);
+ FileUtil.purge(this.deployDir, true);
+ }
+
+ public void testX_X_X() throws Exception {
+ baseX_X_X(false);
+ }
+
+ public void testX_X_Y() throws Exception {
+ baseX_X_Y(false);
+ }
+
+ public void testX_Y_X() throws Exception {
+ baseX_Y_X(false);
+ }
+
+ public void testX_Y_Y() throws Exception {
+ baseX_Y_Y(false);
+ }
+
+ public void testX_Y_Z() throws Exception {
+ baseX_Y_Z(false);
+ }
+
+ public void testX_Y_Z_Restore() throws Exception {
+ baseX_Y_Z_Restore(false);
+ }
+
+ public void testX_Y_Z_Clean() throws Exception {
+ baseX_Y_Z_Clean(false);
+ }
+
+ public void testNoOriginalNoCurrentWithNew() throws Exception {
+ baseNoOriginalNoCurrentWithNew(false);
+ }
+
+ public void testNoOriginalWithCurrentWithNew() throws Exception {
+ baseNoOriginalWithCurrentWithNew(false);
+ }
+
+ public void testNoCurrent() throws Exception {
+ baseNoCurrent(false);
+ }
+
+ public void testNoNew() throws Exception {
+ baseNoNew(false);
+ }
+
+ public void testNoNewWithCurrentDifferentThanOriginal() throws Exception {
+ baseNoNewWithCurrentDifferentThanOriginal(false);
+ }
+
+ public void testX_X_X_DryRun() throws Exception {
+ baseX_X_X(true);
+ }
+
+ public void testX_X_Y_DryRun() throws Exception {
+ baseX_X_Y(true);
+ }
+
+ public void testX_Y_X_DryRun() throws Exception {
+ baseX_Y_X(true);
+ }
+
+ public void testX_Y_Y_DryRun() throws Exception {
+ baseX_Y_Y(true);
+ }
+
+ public void testX_Y_Z_DryRun() throws Exception {
+ baseX_Y_Z(true);
+ }
+
+ public void testX_Y_Z_Restore_DryRun() throws Exception {
+ baseX_Y_Z_Restore(true);
+ }
+
+ public void testX_Y_Z_Clean_DryRun() throws Exception {
+ baseX_Y_Z_Clean(true);
+ }
+
+ public void testNoOriginalNoCurrentWithNew_DryRun() throws Exception {
+ baseNoOriginalNoCurrentWithNew(true);
+ }
+
+ public void testNoOriginalWithCurrentWithNew_DryRun() throws Exception {
+ baseNoOriginalWithCurrentWithNew(true);
+ }
+
+ public void testNoCurrent_DryRun() throws Exception {
+ baseNoCurrent(true);
+ }
+
+ public void testNoNew_DryRun() throws Exception {
+ baseNoNew(true);
+ }
+
+ public void testNoNewWithCurrentDifferentThanOriginal_DryRun() throws Exception {
+ baseNoNewWithCurrentDifferentThanOriginal(true);
+ }
+
+ public void testWithSubdirectories() throws Exception {
+ // this test is different than all the rest, start with clean tmp/dest dirs with
no beforeMethod buildup
+ FileUtil.purge(this.tmpDir, false);
+ FileUtil.purge(this.deployDir, false);
+
+ // fill the deployDir with some unrelated content
+ File unrelated1 = writeFile("unrelated1", this.deployDir,
"unrelated.txt");
+ File unrelated2 = writeFile("unrelated2", this.deployDir,
"unrelateddir/unrelated.txt");
+ assert unrelated1.exists();
+ assert unrelated2.exists();
+
+ // deploy initial content
+ String origFileName1 = "original-file1.txt";
+ String origFileName2 = "subdir/original-file2.txt";
+ this.originalZipFile = createZip(new String[] { "content1",
"content2" }, this.tmpDir, "original.zip",
+ new String[] { origFileName1, origFileName2 });
+ this.originalZipFiles = new HashSet<File>(1);
+ this.originalZipFiles.add(originalZipFile);
+ this.originalDeployProps = new DeploymentProperties(1, "simple",
"1.0", "original test deployment");
+ DeploymentData dd = new DeploymentData(originalDeployProps, originalZipFiles,
null, deployDir, null, null,
+ null, null, false);
+ Deployer deployer = new Deployer(dd);
+ this.originalFileHashcodeMap = deployer.deploy(null);
+ assert new File(this.deployDir, origFileName1).exists();
+ assert new File(this.deployDir, origFileName2).exists();
+ assert unrelated1.exists() : "the deployment removed unrelated file1";
+ assert unrelated2.getParentFile().isDirectory() : "the deployment removed an
unrelated dir";
+ assert unrelated2.exists() : "the deployment removed unrelated file2";
+
+ // deploy new content
+ this.newDeployProps = new DeploymentProperties(2, "simple",
"2.0", "new test deployment");
+ this.diff = new DeployDifferences();
+ this.metadata = new DeploymentsMetadata(this.deployDir);
+ String newFileName1 = "new-file1.txt";
+ String newFileName2 = "newsubdir/new-file2.txt";
+ File newZipFile = createZip(new String[] { "newcontent1",
"newcontent2" }, this.tmpDir, "new.zip",
+ new String[] { newFileName1, newFileName2 });
+ HashSet<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+ dd = new DeploymentData(newDeployProps, newZipFiles, null, deployDir, null, null,
null, null, false);
+ deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap = deployer.deploy(this.diff);
+ assert new File(this.deployDir, newFileName1).exists();
+ assert new File(this.deployDir, newFileName2).exists();
+ assert !new File(this.deployDir, origFileName1).exists();
+ assert !new File(this.deployDir, origFileName2).exists();
+ assert unrelated1.exists() : "the deployment removed unrelated file1";
+ assert unrelated2.getParentFile().isDirectory() : "the deployment removed an
unrelated dir";
+ assert unrelated2.exists() : "the deployment removed unrelated file2";
+ assert this.diff.getBackedUpFiles().size() == 0 : this.diff;
+ }
+
+ private void baseX_X_X(boolean dryRun) throws Exception {
+ DeploymentData dd = new DeploymentData(newDeployProps, originalZipFiles, null,
deployDir, null, null, null,
+ null, false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // nothing changed!
+
+ assert newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ assert contentHash[0].equals(this.originalContent);
+ assert contentHash[1].equals(this.originalHashcode);
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().isEmpty() : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getRestoredFiles().isEmpty() : this.diff;
+ assert !this.diff.wasCleaned() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseX_X_Y(boolean dryRun) throws Exception {
+ String newContent = "testX_X_Y";
+ String newHashcode = MessageDigestGenerator.getDigestString(newContent);
+ File newZipFile = createZip(newContent, tmpDir, "new-content.zip",
originalFileName);
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // The new file changed the original file. The current file was never touched, so
this is a simple upgrade
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 1;
+ assert newFileHashcodeMap.get(originalFileName).equals(newHashcode);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ if (dryRun) {
+ assert contentHash[0].equals(originalContent);
+ assert contentHash[1].equals(originalHashcode);
+ } else {
+ assert contentHash[0].equals(newContent);
+ assert contentHash[1].equals(newHashcode);
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().size() == 1 : this.diff;
+ assert this.diff.getChangedFiles().contains(originalFileName) : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseX_Y_X(boolean dryRun) throws Exception {
+ String newContent = "testX_Y_X";
+ String newHashcode = MessageDigestGenerator.getDigestString(newContent);
+ writeFile(newContent, this.currentFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, originalZipFiles, null,
deployDir, null, null, null,
+ null, false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // very important to understand this - even though the current file is changed,
the hashcode
+ // stored in the map and the metadata directory is the ORIGINAL hashcode. This is
to make it
+ // known that the new deployment itself is the same as the original deployment.
It is just
+ // that we allow the user's manual changes to continue to live on in the
filesystem. However,
+ // if a newer deployment comes along in the future and changes the new file, this
current file
+ // must be updated/backed up as appropriate and the only way to know when that
happens is if
+ // the metadata retains the original/new hashcode and not the current one.
+
+ assert newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ assert contentHash[0].equals(newContent);
+ assert contentHash[1].equals(newHashcode);
+
+ assertUnrelatedFiles();
+ // note nothing changed - our current file remains as is
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().isEmpty() : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseX_Y_Y(boolean dryRun) throws Exception {
+ String newContent = "testX_Y_Y";
+ String newHashcode = MessageDigestGenerator.getDigestString(newContent);
+ writeFile(newContent, this.currentFile);
+ File newZipFile = createZip(newContent, tmpDir, "new-content.zip",
originalFileName);
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // The new file changed the original, but our current file has already been
manually updated
+ // to match the new file. Therefore, the current file doesn't have to change
its content.
+ // Technically, the file could be overwritten, but the content will still be the
same.
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 1;
+ assert newFileHashcodeMap.get(originalFileName).equals(newHashcode);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ assert contentHash[0].equals(newContent);
+ assert contentHash[1].equals(newHashcode);
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().isEmpty() : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseX_Y_Z(boolean dryRun) throws Exception {
+ String newContentY = "testX_Y_Z_YYY";
+ writeFile(newContentY, this.currentFile);
+ String newHashcodeY = MessageDigestGenerator.getDigestString(newContentY);
+
+ String newContentZ = "testX_Y_Z_ZZZ";
+ String newHashcodeZ = MessageDigestGenerator.getDigestString(newContentZ);
+ File newZipFile = createZip(newContentZ, tmpDir, "new-content.zip",
originalFileName);
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // The new file changed the original, and our current file has been manually
updated
+ // but that current file's change does not match the new file. Therefore, the
current file
+ // is out of date. The safest thing to do is backup the current and copy the new
file
+ // to become the current file.
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 1;
+ assert newFileHashcodeMap.get(originalFileName).equals(newHashcodeZ);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ if (dryRun) {
+ assert contentHash[0].equals(newContentY);
+ assert contentHash[1].equals(newHashcodeY);
+ } else {
+ assert contentHash[0].equals(newContentZ);
+ assert contentHash[1].equals(newHashcodeZ);
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().size() == 1 : this.diff;
+ assert this.diff.getChangedFiles().contains(originalFileName) : this.diff;
+ assert this.diff.getBackedUpFiles().size() == 1 : this.diff;
+ assert this.diff.getBackedUpFiles().containsKey(originalFileName) : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+
+ // verify the backup copy
+ File backupFile = new File(this.diff.getBackedUpFiles().get(originalFileName));
+ if (dryRun) {
+ assert !backupFile.exists() : "dry run should not create backup";
+ } else {
+ assert readFile(backupFile).equals(newContentY) : "did not backup the
correct file?";
+ }
+ }
+
+ private void baseNoOriginalNoCurrentWithNew(boolean dryRun) throws Exception {
+ String newContent = "new content";
+ String newHashcode = MessageDigestGenerator.getDigestString(newContent);
+ String newFileName = "new_filename.new";
+ File newZipFile = createZip(new String[] { originalContent, newContent }, tmpDir,
"new.zip", new String[] {
+ originalFileName, newFileName });
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // the new deployment introduces a new file. This is simple - its just added to
the filesystem
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 2;
+ assert newFileHashcodeMap.get(originalFileName).equals(originalHashcode);
+ assert newFileHashcodeMap.get(newFileName).equals(newHashcode);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ assert contentHash[0].equals(originalContent);
+ assert contentHash[1].equals(originalHashcode);
+ try {
+ contentHash = getFilenameContentHashcode(newFileName);
+ assert contentHash[0].equals(newContent);
+ assert contentHash[1].equals(newHashcode);
+ } catch (FileNotFoundException e) {
+ // this is expected if we are in a dry run
+ if (!dryRun) {
+ throw e;
+ }
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().size() == 1 : this.diff;
+ assert this.diff.getAddedFiles().contains(newFileName) : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().isEmpty() : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseNoOriginalWithCurrentWithNew(boolean dryRun) throws Exception {
+ String newContent = "new content";
+ String newHashcode = MessageDigestGenerator.getDigestString(newContent);
+ String newFileName = "new_filename.new";
+ File newZipFile = createZip(new String[] { originalContent, newContent }, tmpDir,
"new.zip", new String[] {
+ originalFileName, newFileName });
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ File inTheWayFile = new File(this.deployDir, newFileName);
+ String inTheWayContent = "this is in the way";
+ String inTheWayHashcode =
MessageDigestGenerator.getDigestString(inTheWayContent);
+ writeFile(inTheWayContent, inTheWayFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // The new deployment introduces a new file. However, there is already a current
file at the new file location.
+ // That current file is unknown and in the way.
+ // This normally is considered a "change" not an "addition"
since the file system already had the file, it just got changed.
+ // Also, normally, it would be backed up.
+ // However, because we are not managing the root dir, that file "in the
way" is simply skipped and not backed up.
+ // But because our new zip has the same file in it, it is considered
"new" thus it will be "added" to the
+ // set of managed files.
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 2;
+ assert newFileHashcodeMap.get(originalFileName).equals(originalHashcode);
+ assert newFileHashcodeMap.get(newFileName).equals(newHashcode);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ assert contentHash[0].equals(originalContent);
+ assert contentHash[1].equals(originalHashcode);
+ contentHash = getFilenameContentHashcode(newFileName);
+ if (dryRun) {
+ assert contentHash[0].equals(inTheWayContent);
+ assert contentHash[1].equals(inTheWayHashcode);
+ } else {
+ assert contentHash[0].equals(newContent);
+ assert contentHash[1].equals(newHashcode);
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().size() == 1 : this.diff;
+ assert this.diff.getAddedFiles().contains(newFileName) : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().size() == 0 : this.diff;
+ assert this.diff.getBackedUpFiles().size() == 0 : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseNoCurrent(boolean dryRun) throws Exception {
+ assert this.currentFile.delete() : "Failed to delete the current file,
cannot prepare the test";
+
+ DeploymentData dd = new DeploymentData(newDeployProps, originalZipFiles, null,
deployDir, null, null, null,
+ null, false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // its the same deployment as before, except someone deleted our current file.
+ // This adds the new file back (which is the same as the original).
+
+ assert newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ try {
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ assert contentHash[0].equals(this.originalContent);
+ assert contentHash[1].equals(this.originalHashcode);
+ } catch (FileNotFoundException e) {
+ // this is expected if we are in a dry run
+ if (!dryRun) {
+ throw e;
+ }
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().size() == 1 : this.diff;
+ assert this.diff.getAddedFiles().contains(this.originalFileName) : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().isEmpty() : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseNoNew(boolean dryRun) throws Exception {
+ String newContent = "new content";
+ String newHashcode = MessageDigestGenerator.getDigestString(newContent);
+ String newFileName = "new_filename.new";
+ File newZipFile = createZip(newContent, tmpDir, "new.zip",
newFileName);
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // The new deployment removes a file that was in the original (it also introduces
a new file).
+ // There is already a current file at the original file location that is the same
as the original, as you would expect.
+ // That current file is to be deleted (since its not in the new deployment) and
is not backed up since it is
+ // the same as the original.
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 1;
+ assert newFileHashcodeMap.get(newFileName).equals(newHashcode);
+ if (dryRun) {
+ assert this.currentFile.exists() : "this should have been left
as-is";
+ } else {
+ assert !this.currentFile.exists() : "this should have been
deleted";
+ }
+ try {
+ String[] contentHash = getFilenameContentHashcode(newFileName);
+ assert contentHash[0].equals(newContent);
+ assert contentHash[1].equals(newHashcode);
+ } catch (FileNotFoundException e) {
+ // this is expected if we are in a dry run
+ if (!dryRun) {
+ throw e;
+ }
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().size() == 1 : this.diff;
+ assert this.diff.getAddedFiles().contains(newFileName) : this.diff;
+ assert this.diff.getDeletedFiles().size() == 1 : this.diff;
+ assert this.diff.getDeletedFiles().contains(originalFileName) : this.diff;
+ assert this.diff.getChangedFiles().isEmpty() : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+ }
+
+ private void baseNoNewWithCurrentDifferentThanOriginal(boolean dryRun) throws
Exception {
+ String currentContent = "modified content";
+ writeFile(currentContent, this.currentFile);
+
+ String newContent = "new content";
+ String newHashcode = MessageDigestGenerator.getDigestString(newContent);
+ String newFileName = "new_filename.new";
+ File newZipFile = createZip(newContent, tmpDir, "new.zip",
newFileName);
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ if (dryRun) {
+ newFileHashcodeMap = deployer.dryRun(this.diff);
+ } else {
+ newFileHashcodeMap = deployer.deploy(this.diff);
+ }
+
+ // The new deployment removes a file that was in the original (it also introduces
a new file).
+ // However, there is already a current file at the original file location as you
would expect but
+ // its different than the original.
+ // That current file is to be deleted (since its not in the new deployment) and
it must be backed up
+ // since it looks modified from the original.
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 1;
+ assert newFileHashcodeMap.get(newFileName).equals(newHashcode);
+ if (dryRun) {
+ assert this.currentFile.exists() : "this should have been left
as-is";
+ } else {
+ assert !this.currentFile.exists() : "this should have been
deleted";
+ }
+ try {
+ String[] contentHash = getFilenameContentHashcode(newFileName);
+ assert contentHash[0].equals(newContent);
+ assert contentHash[1].equals(newHashcode);
+ } catch (FileNotFoundException e) {
+ // this is expected if we are in a dry run
+ if (!dryRun) {
+ throw e;
+ }
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().size() == 1 : this.diff;
+ assert this.diff.getAddedFiles().contains(newFileName) : this.diff;
+ assert this.diff.getDeletedFiles().size() == 1 : this.diff;
+ assert this.diff.getDeletedFiles().contains(originalFileName) : this.diff;
+ assert this.diff.getChangedFiles().isEmpty() : this.diff;
+ assert this.diff.getBackedUpFiles().size() == 1 : this.diff;
+ assert this.diff.getBackedUpFiles().containsKey(originalFileName) : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+
+ // verify the backup copy
+ File backupFile = new File(this.diff.getBackedUpFiles().get(originalFileName));
+ if (dryRun) {
+ assert !backupFile.exists() : "dry run should not create backup";
+ } else {
+ assert readFile(backupFile).equals(currentContent) : "did not backup the
correct file?";
+ }
+ }
+
+ private void baseX_Y_Z_Restore(boolean dryRun) throws Exception {
+ String newContentY = "testX_Y_Z_YYY";
+ writeFile(newContentY, this.currentFile);
+ String newHashcodeY = MessageDigestGenerator.getDigestString(newContentY);
+
+ String newContentZ = "testX_Y_Z_ZZZ";
+ String newHashcodeZ = MessageDigestGenerator.getDigestString(newContentZ);
+ File newZipFile = createZip(newContentZ, tmpDir, "new-content.zip",
originalFileName);
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ newFileHashcodeMap = deployer.deploy(this.diff); // no dry run - we need to do
this to force backup file creation
+
+ // The new file changed the original, and our current file has been manually
updated
+ // but that current file's change does not match the new file. Therefore, the
current file
+ // is out of date. The safest thing to do is backup the current and copy the new
file
+ // to become the current file.
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 1;
+ assert newFileHashcodeMap.get(originalFileName).equals(newHashcodeZ);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ assert contentHash[0].equals(newContentZ);
+ assert contentHash[1].equals(newHashcodeZ);
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().size() == 1 : this.diff;
+ assert this.diff.getChangedFiles().contains(originalFileName) : this.diff;
+ assert this.diff.getBackedUpFiles().size() == 1 : this.diff;
+ assert this.diff.getBackedUpFiles().containsKey(originalFileName) : this.diff;
+ assert this.diff.getRestoredFiles().isEmpty() : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ assert this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+
+ // verify the backup copy
+ File backupFile = new File(this.diff.getBackedUpFiles().get(originalFileName));
+ assert readFile(backupFile).equals(newContentY) : "did not backup the
correct file?";
+
+ // all we did so far was upgrade to v2 and created a backup file, now we need to
redeploy v1 and see the backup restored
+ DeploymentProperties v1Duplicate = new DeploymentProperties();
+ v1Duplicate.putAll(this.originalDeployProps);
+ v1Duplicate.setDeploymentId(3); // this is the same as v1, but it needs a unique
deployment ID
+ dd = new DeploymentData(v1Duplicate, originalZipFiles, null, deployDir, null,
null, null, null, false);
+ deployer = new Deployer(dd);
+ this.diff = new DeployDifferences();
+ FileHashcodeMap restoreFileHashcodeMap;
+ restoreFileHashcodeMap = deployer.redeployAndRestoreBackupFiles(this.diff, false,
dryRun);
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().size() == 1 : this.diff;
+ assert this.diff.getChangedFiles().contains(originalFileName) : this.diff;
+ assert this.diff.getBackedUpFiles().isEmpty() : this.diff;
+ assert this.diff.getRestoredFiles().size() == 1 : this.diff;
+ assert this.diff.getRestoredFiles().containsKey(originalFileName) : this.diff;
+ assert this.diff.getIgnoredFiles().isEmpty() : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ assert restoreFileHashcodeMap.get(originalFileName).equals(newHashcodeY) :
"hashcode doesn't reflect restored backup";
+
+ if (dryRun) {
+ // still our v2
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ } else {
+ // we reverted back to v1 with the manual changes
+ assert this.metadata.getCurrentDeploymentProperties().equals(v1Duplicate);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(restoreFileHashcodeMap);
+ assert
MessageDigestGenerator.getDigestString(this.currentFile).equals(newHashcodeY) : "file
wasn't restored";
+ }
+ }
+
+ private void baseX_Y_Z_Clean(boolean dryRun) throws Exception {
+ String newContentY = "testX_Y_Z_YYY";
+ writeFile(newContentY, this.currentFile);
+ String newHashcodeY = MessageDigestGenerator.getDigestString(newContentY);
+
+ String newContentZ = "testX_Y_Z_ZZZ";
+ String newHashcodeZ = MessageDigestGenerator.getDigestString(newContentZ);
+ File newZipFile = createZip(newContentZ, tmpDir, "new-content.zip",
originalFileName);
+ Set<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+
+ File ignoredSubdir = new File(this.deployDir, "ignoreSubdir");
+ File ignoredFile = new File(ignoredSubdir, "ignore-me.txt");
+ ignoredSubdir.mkdirs();
+ writeFile("ignored content", ignoredFile);
+ Pattern iRegex = Pattern.compile(".*ignoreSubdir.*"); // this matches
the subdirectory name, thus everything under it is ignored
+ assert ignoredFile.exists() : "for some reason we couldn't create our
test file; cannot know if clean worked";
+
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, iRegex,
+ false);
+ Deployer deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap;
+ newFileHashcodeMap = deployer.deploy(this.diff, true, dryRun); // note: clean is
true
+
+ // The new file changed the original, and our current file has been manually
updated
+ // but that current file's change does not match the new file. Therefore, the
current file
+ // is out of date. The safest thing to do is backup the current and copy the new
file
+ // to become the current file.
+
+ assert !newFileHashcodeMap.equals(this.originalFileHashcodeMap);
+ assert newFileHashcodeMap.size() == 1;
+ assert newFileHashcodeMap.get(originalFileName).equals(newHashcodeZ);
+ String[] contentHash = getOriginalFilenameContentHashcode();
+ if (dryRun) {
+ assert contentHash[0].equals(newContentY);
+ assert contentHash[1].equals(newHashcodeY);
+ } else {
+ assert contentHash[0].equals(newContentZ);
+ assert contentHash[1].equals(newHashcodeZ);
+ }
+
+ assertUnrelatedFiles();
+ assert this.diff.getAddedFiles().isEmpty() : this.diff;
+ assert this.diff.getDeletedFiles().isEmpty() : this.diff;
+ assert this.diff.getChangedFiles().size() == 1 : this.diff;
+ assert this.diff.getChangedFiles().contains(originalFileName) : this.diff;
+ assert this.diff.getBackedUpFiles().size() == 1 : this.diff;
+ assert this.diff.getBackedUpFiles().containsKey(originalFileName) : this.diff;
+ // because we are not managing the root dir, that ignore dir found at the root
level is skipped
+ // so it never gets to the point where it matches the ignore regex therefore it
doesn't count as ignored.
+ assert this.diff.getIgnoredFiles().size() == 0 : this.diff;
+ assert this.diff.getRealizedFiles().isEmpty() : this.diff;
+ assert this.diff.wasCleaned() : this.diff;
+ assert this.diff.getErrors().isEmpty() : this.diff;
+
+ if (dryRun) {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(originalDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(originalFileHashcodeMap);
+ } else {
+ assert
this.metadata.getCurrentDeploymentProperties().equals(newDeployProps);
+ assert
this.metadata.getCurrentDeploymentFileHashcodes().equals(newFileHashcodeMap);
+ }
+
+ // verify the backup copy
+ File backupFile = new File(this.diff.getBackedUpFiles().get(originalFileName));
+ if (dryRun) {
+ assert !backupFile.exists() : "dry run should not create backup";
+ } else {
+ assert readFile(backupFile).equals(newContentY) : "did not backup the
correct file?";
+ }
+
+ // even though we cleaned, the ignored subdir and its file should have been
skipped due to
+ // the fact were told the deployer not to manage the top root dir. Therefore,
they should still exist.
+ if (dryRun) {
+ assert ignoredSubdir.isDirectory() : "dry run should not have really
cleaned";
+ assert ignoredFile.exists() : "dry run should not have really
cleaned";
+ } else {
+ assert ignoredSubdir.isDirectory() : "should not have really
cleaned";
+ assert ignoredFile.exists() : "should not have really cleaned";
+ }
+ }
+
+ private String[] getOriginalFilenameContentHashcode() throws Exception {
+ return getFilenameContentHashcode(this.originalFileName);
+ }
+
+ private String[] getFilenameContentHashcode(String filename) throws Exception {
+ String content = readFile(new File(this.deployDir, filename));
+ String hashcode = MessageDigestGenerator.getDigestString(content);
+ String[] contentHash = new String[] { content, hashcode };
+ return contentHash;
+ }
+
+ private String readFile(File file) throws Exception {
+ return new String(StreamUtil.slurp(new FileInputStream(file)));
+ }
+
+ private File writeFile(String content, File fileToOverwrite) throws Exception {
+ FileOutputStream out = null;
+
+ try {
+ fileToOverwrite.getParentFile().mkdirs();
+ out = new FileOutputStream(fileToOverwrite);
+ out.write(content.getBytes());
+ return fileToOverwrite;
+ } finally {
+ if (out != null) {
+ out.close();
+ }
+ }
+ }
+
+ private File writeFile(String content, File destDir, String fileName) throws
Exception {
+ File destFile = new File(destDir, fileName);
+ return writeFile(content, destFile);
+ }
+
+ private File createZip(String content, File destDir, String zipName, String
entryName) throws Exception {
+ FileOutputStream stream = null;
+ ZipOutputStream out = null;
+
+ try {
+ destDir.mkdirs();
+ File zipFile = new File(destDir, zipName);
+ stream = new FileOutputStream(zipFile);
+ out = new ZipOutputStream(stream);
+
+ ZipEntry zipAdd = new ZipEntry(entryName);
+ zipAdd.setTime(System.currentTimeMillis());
+ out.putNextEntry(zipAdd);
+ out.write(content.getBytes());
+ return zipFile;
+ } finally {
+ if (out != null) {
+ out.close();
+ }
+ if (stream != null) {
+ stream.close();
+ }
+ }
+ }
+
+ private File createZip(String[] content, File destDir, String zipName, String[]
entryName) throws Exception {
+ FileOutputStream stream = null;
+ ZipOutputStream out = null;
+
+ try {
+ destDir.mkdirs();
+ File zipFile = new File(destDir, zipName);
+ stream = new FileOutputStream(zipFile);
+ out = new ZipOutputStream(stream);
+
+ assert content.length == entryName.length;
+ for (int i = 0; i < content.length; i++) {
+ ZipEntry zipAdd = new ZipEntry(entryName[i]);
+ zipAdd.setTime(System.currentTimeMillis());
+ out.putNextEntry(zipAdd);
+ out.write(content[i].getBytes());
+ }
+ return zipFile;
+ } finally {
+ if (out != null) {
+ out.close();
+ }
+ if (stream != null) {
+ stream.close();
+ }
+ }
+ }
+
+ private void assertUnrelatedFiles() throws Exception {
+ // make sure we didn't delete or alter the file or dir that are unrelated to
the deployed content
+ // the dir and its file...
+ assert this.unrelatedDir.isDirectory() : "unrelated directory was deleted:
" + this.unrelatedDir;
+ assert this.unrelatedDirFile.exists() : "unrelated dir file was deleted:
" + this.unrelatedDirFile;
+ String hash = MessageDigestGenerator.getDigestString(this.unrelatedDirFile);
+ assert this.unrelatedDirFileHashcode.equals(hash) : "unrelated dir file was
changed: " + this.unrelatedDirFile;
+
+ // the file located directly in the deployDir...
+ assert this.unrelatedFile.exists() : "unrelated file was deleted: " +
this.unrelatedFile;
+ hash = MessageDigestGenerator.getDigestString(this.unrelatedFile);
+ assert this.unrelatedFileHashcode.equals(hash) : "unrelated file was
changed: " + this.unrelatedFile;
+ }
+}
diff --git
a/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawFileTest.java
b/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawFileTest.java
index e1eafcb..16275ef 100644
---
a/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawFileTest.java
+++
b/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawFileTest.java
@@ -93,7 +93,7 @@ public class SimpleDeployerRawFileTest {
this.sourceRawFiles.put(sourceRawFile, new File(extDir, originalFileName)); //
note we name it different than the source file
this.originalDeployProps = new DeploymentProperties(1, "simple",
"1.0", "original test deployment");
DeploymentData dd = new DeploymentData(originalDeployProps, null, sourceRawFiles,
deployDir, null, null, null,
- null);
+ null, true);
Deployer deployer = new Deployer(dd);
this.originalFileHashcodeMap = deployer.deploy(null);
this.currentFile = sourceRawFiles.get(sourceRawFile);
@@ -206,7 +206,8 @@ public class SimpleDeployerRawFileTest {
}
private void baseX_X_X(boolean dryRun) throws Exception {
- DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -248,7 +249,8 @@ public class SimpleDeployerRawFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -294,7 +296,8 @@ public class SimpleDeployerRawFileTest {
String newHashcode = MessageDigestGenerator.getDigestString(newContent);
writeFile(newContent, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -342,7 +345,8 @@ public class SimpleDeployerRawFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -390,7 +394,8 @@ public class SimpleDeployerRawFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -453,7 +458,8 @@ public class SimpleDeployerRawFileTest {
File newDestRawFile = new File(extDir, newFileName);
newRawFiles.put(newRawFile, newDestRawFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -515,7 +521,8 @@ public class SimpleDeployerRawFileTest {
String inTheWayHashcode =
MessageDigestGenerator.getDigestString(inTheWayContent);
writeFile(inTheWayContent, inTheWayFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -575,7 +582,8 @@ public class SimpleDeployerRawFileTest {
private void baseNoCurrent(boolean dryRun) throws Exception {
assert this.currentFile.delete() : "Failed to delete the current file,
cannot prepare the test";
- DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -626,7 +634,8 @@ public class SimpleDeployerRawFileTest {
File newDestRawFile = new File(extDir, newFileName);
newRawFiles.put(newRawFile, newDestRawFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -690,7 +699,8 @@ public class SimpleDeployerRawFileTest {
File newDestRawFile = new File(extDir, newFileName);
newRawFiles.put(newRawFile, newDestRawFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -763,7 +773,8 @@ public class SimpleDeployerRawFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
newFileHashcodeMap = deployer.deploy(this.diff); // no dry run - we need to do
this to force backup file creation
@@ -802,7 +813,7 @@ public class SimpleDeployerRawFileTest {
DeploymentProperties v1Duplicate = new DeploymentProperties();
v1Duplicate.putAll(this.originalDeployProps);
v1Duplicate.setDeploymentId(3); // this is the same as v1, but it needs a unique
deployment ID
- dd = new DeploymentData(v1Duplicate, null, sourceRawFiles, deployDir, null, null,
null, null);
+ dd = new DeploymentData(v1Duplicate, null, sourceRawFiles, deployDir, null, null,
null, null, true);
deployer = new Deployer(dd);
this.diff = new DeployDifferences();
FileHashcodeMap restoreFileHashcodeMap;
diff --git
a/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawRelativeFileTest.java
b/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawRelativeFileTest.java
index cf366cb..5a0f842 100644
---
a/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawRelativeFileTest.java
+++
b/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerRawRelativeFileTest.java
@@ -91,7 +91,7 @@ public class SimpleDeployerRawRelativeFileTest {
this.sourceRawFiles.put(sourceRawFile, new File(originalFileName)); // RELATIVE!
note we name it different than the source file
this.originalDeployProps = new DeploymentProperties(1, "simple",
"1.0", "original test deployment");
DeploymentData dd = new DeploymentData(originalDeployProps, null, sourceRawFiles,
deployDir, null, null, null,
- null);
+ null, true);
Deployer deployer = new Deployer(dd);
this.originalFileHashcodeMap = deployer.deploy(null);
this.currentFile = sourceRawFiles.get(sourceRawFile);
@@ -205,7 +205,8 @@ public class SimpleDeployerRawRelativeFileTest {
}
private void baseX_X_X(boolean dryRun) throws Exception {
- DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -247,7 +248,8 @@ public class SimpleDeployerRawRelativeFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -293,7 +295,8 @@ public class SimpleDeployerRawRelativeFileTest {
String newHashcode = MessageDigestGenerator.getDigestString(newContent);
writeFile(newContent, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -341,7 +344,8 @@ public class SimpleDeployerRawRelativeFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -389,7 +393,8 @@ public class SimpleDeployerRawRelativeFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -452,7 +457,8 @@ public class SimpleDeployerRawRelativeFileTest {
File newDestRawFile = new File(newFileName); // RELATIVE!
newRawFiles.put(newRawFile, newDestRawFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -514,7 +520,8 @@ public class SimpleDeployerRawRelativeFileTest {
String inTheWayHashcode =
MessageDigestGenerator.getDigestString(inTheWayContent);
writeFile(inTheWayContent, inTheWayFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -573,7 +580,8 @@ public class SimpleDeployerRawRelativeFileTest {
private void baseNoCurrent(boolean dryRun) throws Exception {
assert this.currentFileWithAbsolutePath.delete() : "Failed to delete the
current file, cannot prepare the test";
- DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, sourceRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -624,7 +632,8 @@ public class SimpleDeployerRawRelativeFileTest {
File newDestRawFile = new File(newFileName); // RELATIVE!
newRawFiles.put(newRawFile, newDestRawFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -688,7 +697,8 @@ public class SimpleDeployerRawRelativeFileTest {
File newDestRawFile = new File(newFileName); // RELATIVE!
newRawFiles.put(newRawFile, newDestRawFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -761,7 +771,8 @@ public class SimpleDeployerRawRelativeFileTest {
Map<File, File> newRawFiles = new HashMap<File, File>(1);
newRawFiles.put(newRawFile, this.currentFile);
- DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, null, newRawFiles,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
newFileHashcodeMap = deployer.deploy(this.diff); // no dry run - we need to do
this to force backup file creation
@@ -800,7 +811,7 @@ public class SimpleDeployerRawRelativeFileTest {
DeploymentProperties v1Duplicate = new DeploymentProperties();
v1Duplicate.putAll(this.originalDeployProps);
v1Duplicate.setDeploymentId(3); // this is the same as v1, but it needs a unique
deployment ID
- dd = new DeploymentData(v1Duplicate, null, sourceRawFiles, deployDir, null, null,
null, null);
+ dd = new DeploymentData(v1Duplicate, null, sourceRawFiles, deployDir, null, null,
null, null, true);
deployer = new Deployer(dd);
this.diff = new DeployDifferences();
FileHashcodeMap restoreFileHashcodeMap;
diff --git
a/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerTest.java
b/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerTest.java
index 24884ed..712a984 100644
--- a/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerTest.java
+++ b/modules/core/util/src/test/java/org/rhq/core/util/updater/SimpleDeployerTest.java
@@ -87,7 +87,7 @@ public class SimpleDeployerTest {
this.originalZipFiles.add(originalZipFile);
this.originalDeployProps = new DeploymentProperties(1, "simple",
"1.0", "original test deployment");
DeploymentData dd = new DeploymentData(originalDeployProps, originalZipFiles,
null, deployDir, null, null,
- null, null);
+ null, null, true);
Deployer deployer = new Deployer(dd);
this.originalFileHashcodeMap = deployer.deploy(null);
this.currentFile = new File(deployDir, originalFileName);
@@ -205,9 +205,64 @@ public class SimpleDeployerTest {
baseNoNewWithCurrentDifferentThanOriginal(true);
}
+ public void testWithSubdirectories() throws Exception {
+ // this test is different than all the rest, start with clean tmp/dest dirs with
no beforeMethod buildup
+ FileUtil.purge(this.tmpDir, false);
+ FileUtil.purge(this.deployDir, false);
+
+ // fill the deployDir with some unrelated content
+ String unrelatedFileName1 = "unrelated.txt";
+ String unrelatedFileName2 = "unrelateddir/unrelated.txt";
+ File unrelated1 = writeFile("unrelated1", this.deployDir,
unrelatedFileName1);
+ File unrelated2 = writeFile("unrelated2", this.deployDir,
unrelatedFileName2);
+ assert unrelated1.exists();
+ assert unrelated2.exists();
+
+ // deploy initial content
+ String origFileName1 = "original-file1.txt";
+ String origFileName2 = "subdir/original-file2.txt";
+ this.originalZipFile = createZip(new String[] { "content1",
"content2" }, this.tmpDir, "original.zip",
+ new String[] { origFileName1, origFileName2 });
+ this.originalZipFiles = new HashSet<File>(1);
+ this.originalZipFiles.add(originalZipFile);
+ this.originalDeployProps = new DeploymentProperties(1, "simple",
"1.0", "original test deployment");
+ DeploymentData dd = new DeploymentData(originalDeployProps, originalZipFiles,
null, deployDir, null, null,
+ null, null, true);
+ Deployer deployer = new Deployer(dd);
+ this.originalFileHashcodeMap = deployer.deploy(null);
+ assert new File(this.deployDir, origFileName1).exists();
+ assert new File(this.deployDir, origFileName2).exists();
+ assert unrelated1.exists() : "the deployment removed unrelated file1";
+ assert unrelated2.getParentFile().isDirectory() : "the deployment removed an
unrelated dir";
+ assert unrelated2.exists() : "the deployment removed unrelated file2";
+
+ // deploy new content
+ this.newDeployProps = new DeploymentProperties(2, "simple",
"2.0", "new test deployment");
+ this.diff = new DeployDifferences();
+ this.metadata = new DeploymentsMetadata(this.deployDir);
+ String newFileName1 = "new-file1.txt";
+ String newFileName2 = "newsubdir/new-file2.txt";
+ File newZipFile = createZip(new String[] { "newcontent1",
"newcontent2" }, this.tmpDir, "new.zip",
+ new String[] { newFileName1, newFileName2 });
+ HashSet<File> newZipFiles = new HashSet<File>(1);
+ newZipFiles.add(newZipFile);
+ dd = new DeploymentData(newDeployProps, newZipFiles, null, deployDir, null, null,
null, null, true);
+ deployer = new Deployer(dd);
+ FileHashcodeMap newFileHashcodeMap = deployer.deploy(this.diff);
+ assert new File(this.deployDir, newFileName1).exists();
+ assert new File(this.deployDir, newFileName2).exists();
+ assert !new File(this.deployDir, origFileName1).exists();
+ assert !new File(this.deployDir, origFileName2).exists();
+ assert !unrelated1.exists() : "the deployment did not remove unrelated
file1";
+ assert !unrelated2.exists() : "the deployment did not remove unrelated
file1";
+ assert this.diff.getBackedUpFiles().size() == 2 : this.diff;
+ assert new File(this.diff.getBackedUpFiles().get(unrelatedFileName1)).exists() :
this.diff;
+ assert new File(this.diff.getBackedUpFiles().get(unrelatedFileName2)).exists() :
this.diff;
+ }
+
private void baseX_X_X(boolean dryRun) throws Exception {
DeploymentData dd = new DeploymentData(newDeployProps, originalZipFiles, null,
deployDir, null, null, null,
- null);
+ null, true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -249,7 +304,8 @@ public class SimpleDeployerTest {
Set<File> newZipFiles = new HashSet<File>(1);
newZipFiles.add(newZipFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -296,7 +352,7 @@ public class SimpleDeployerTest {
writeFile(newContent, this.currentFile);
DeploymentData dd = new DeploymentData(newDeployProps, originalZipFiles, null,
deployDir, null, null, null,
- null);
+ null, true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -344,7 +400,8 @@ public class SimpleDeployerTest {
Set<File> newZipFiles = new HashSet<File>(1);
newZipFiles.add(newZipFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -392,7 +449,8 @@ public class SimpleDeployerTest {
Set<File> newZipFiles = new HashSet<File>(1);
newZipFiles.add(newZipFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -454,7 +512,8 @@ public class SimpleDeployerTest {
Set<File> newZipFiles = new HashSet<File>(1);
newZipFiles.add(newZipFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -515,7 +574,8 @@ public class SimpleDeployerTest {
String inTheWayHashcode =
MessageDigestGenerator.getDigestString(inTheWayContent);
writeFile(inTheWayContent, inTheWayFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -575,7 +635,7 @@ public class SimpleDeployerTest {
assert this.currentFile.delete() : "Failed to delete the current file,
cannot prepare the test";
DeploymentData dd = new DeploymentData(newDeployProps, originalZipFiles, null,
deployDir, null, null, null,
- null);
+ null, true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -625,7 +685,8 @@ public class SimpleDeployerTest {
Set<File> newZipFiles = new HashSet<File>(1);
newZipFiles.add(newZipFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -688,7 +749,8 @@ public class SimpleDeployerTest {
Set<File> newZipFiles = new HashSet<File>(1);
newZipFiles.add(newZipFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
if (dryRun) {
@@ -761,7 +823,8 @@ public class SimpleDeployerTest {
Set<File> newZipFiles = new HashSet<File>(1);
newZipFiles.add(newZipFile);
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, null,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
newFileHashcodeMap = deployer.deploy(this.diff); // no dry run - we need to do
this to force backup file creation
@@ -800,7 +863,7 @@ public class SimpleDeployerTest {
DeploymentProperties v1Duplicate = new DeploymentProperties();
v1Duplicate.putAll(this.originalDeployProps);
v1Duplicate.setDeploymentId(3); // this is the same as v1, but it needs a unique
deployment ID
- dd = new DeploymentData(v1Duplicate, originalZipFiles, null, deployDir, null,
null, null, null);
+ dd = new DeploymentData(v1Duplicate, originalZipFiles, null, deployDir, null,
null, null, null, true);
deployer = new Deployer(dd);
this.diff = new DeployDifferences();
FileHashcodeMap restoreFileHashcodeMap;
@@ -849,7 +912,8 @@ public class SimpleDeployerTest {
Pattern iRegex = Pattern.compile(".*ignoreSubdir.*"); // this matches
the subdirectory name, thus everything under it is ignored
assert ignoredFile.exists() : "for some reason we couldn't create our
test file; cannot know if clean worked";
- DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, iRegex);
+ DeploymentData dd = new DeploymentData(newDeployProps, newZipFiles, null,
deployDir, null, null, null, iRegex,
+ true);
Deployer deployer = new Deployer(dd);
FileHashcodeMap newFileHashcodeMap;
newFileHashcodeMap = deployer.deploy(this.diff, true, dryRun); // note: clean is
true
@@ -928,8 +992,8 @@ public class SimpleDeployerTest {
FileOutputStream out = null;
try {
- out = new FileOutputStream(fileToOverwrite);
fileToOverwrite.getParentFile().mkdirs();
+ out = new FileOutputStream(fileToOverwrite);
out.write(content.getBytes());
return fileToOverwrite;
} finally {
commit f2f5fba1821350531a5eb90b7d71c6295ce37ab7
Author: John Mazzitelli <mazz(a)redhat.com>
Date: Tue Dec 21 12:24:11 2010 -0500
be able to delete deployments if they are finished
diff --git
a/modules/core/domain/src/main/java/org/rhq/core/domain/bundle/BundleDeploymentStatus.java
b/modules/core/domain/src/main/java/org/rhq/core/domain/bundle/BundleDeploymentStatus.java
index 5c9787b..8eca429 100644
---
a/modules/core/domain/src/main/java/org/rhq/core/domain/bundle/BundleDeploymentStatus.java
+++
b/modules/core/domain/src/main/java/org/rhq/core/domain/bundle/BundleDeploymentStatus.java
@@ -31,12 +31,12 @@ package org.rhq.core.domain.bundle;
* @author Jay Shaughnessy
*/
public enum BundleDeploymentStatus {
- PENDING("Pending"), //
- IN_PROGRESS("In Progress"), //
+ PENDING("Pending"), // for future, when we support true scheduling of
bundle deployments
+ IN_PROGRESS("In Progress"), //
MIXED("Mixed"), //
SUCCESS("Success"), //
- FAILURE("Failure"), //
- WARN("Warning");
+ FAILURE("Failure") //
+ ; // need this for GWT
private String displayName;
diff --git
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentListView.java
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentListView.java
index 9cb3f58..cea040d 100644
---
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentListView.java
+++
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentListView.java
@@ -100,10 +100,10 @@ public class BundleDeploymentListView extends
Table<RPCDataSource<BundleDeployme
});
HashMap<String, String> statusIcons = new HashMap<String, String>();
+ statusIcons.put(BundleDeploymentStatus.PENDING.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.IN_PROGRESS.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.FAILURE.name(),
"subsystems/bundle/Error_11.png");
statusIcons.put(BundleDeploymentStatus.MIXED.name(),
"subsystems/bundle/Warning_11.png");
- statusIcons.put(BundleDeploymentStatus.WARN.name(),
"subsystems/bundle/Warning_11.png");
statusIcons.put(BundleDeploymentStatus.SUCCESS.name(),
"subsystems/bundle/Ok_11.png");
statusField.setValueIcons(statusIcons);
statusField.setValueIconWidth(11);
diff --git
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentView.java
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentView.java
index 94b216a..870988e 100644
---
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentView.java
+++
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleDeploymentView.java
@@ -105,10 +105,10 @@ public class BundleDeploymentView extends LocatableVLayout
implements Bookmarkab
//setMargin(10); // do not set margin, we already have our margin set outside of
us
statusIcons = new HashMap<String, String>();
+ statusIcons.put(BundleDeploymentStatus.PENDING.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.IN_PROGRESS.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.FAILURE.name(),
"subsystems/bundle/Error_11.png");
statusIcons.put(BundleDeploymentStatus.MIXED.name(),
"subsystems/bundle/Warning_11.png");
- statusIcons.put(BundleDeploymentStatus.WARN.name(),
"subsystems/bundle/Warning_11.png");
statusIcons.put(BundleDeploymentStatus.SUCCESS.name(),
"subsystems/bundle/Ok_11.png");
}
diff --git
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleResourceDeploymentHistoryListView.java
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleResourceDeploymentHistoryListView.java
index 972e618..89eec9d 100644
---
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleResourceDeploymentHistoryListView.java
+++
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/deployment/BundleResourceDeploymentHistoryListView.java
@@ -69,10 +69,10 @@ public class BundleResourceDeploymentHistoryListView extends
LocatableVLayout {
this.resourceDeployment = resourceDeployment;
statusIcons = new HashMap<String, String>();
+ statusIcons.put(BundleDeploymentStatus.PENDING.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.IN_PROGRESS.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.FAILURE.name(),
"subsystems/bundle/Error_11.png");
statusIcons.put(BundleDeploymentStatus.MIXED.name(),
"subsystems/bundle/Warning_11.png");
- statusIcons.put(BundleDeploymentStatus.WARN.name(),
"subsystems/bundle/Warning_11.png");
statusIcons.put(BundleDeploymentStatus.SUCCESS.name(),
"subsystems/bundle/Ok_11.png");
}
diff --git
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/destination/BundleDestinationListView.java
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/destination/BundleDestinationListView.java
index 5bfc174..00d25df 100644
---
a/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/destination/BundleDestinationListView.java
+++
b/modules/enterprise/gui/coregui/src/main/java/org/rhq/enterprise/gui/coregui/client/bundle/destination/BundleDestinationListView.java
@@ -101,10 +101,10 @@ public class BundleDestinationListView extends
Table<RPCDataSource<BundleDestina
});
HashMap<String, String> statusIcons = new HashMap<String, String>();
+ statusIcons.put(BundleDeploymentStatus.PENDING.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.IN_PROGRESS.name(),
"subsystems/bundle/install-loader.gif");
statusIcons.put(BundleDeploymentStatus.FAILURE.name(),
"subsystems/bundle/Error_11.png");
statusIcons.put(BundleDeploymentStatus.MIXED.name(),
"subsystems/bundle/Warning_11.png");
- statusIcons.put(BundleDeploymentStatus.WARN.name(),
"subsystems/bundle/Warning_11.png");
statusIcons.put(BundleDeploymentStatus.SUCCESS.name(),
"subsystems/bundle/Ok_11.png");
latestDeploymentStatusField.setValueIcons(statusIcons);
latestDeploymentStatusField.setValueIconHeight(11);
diff --git
a/modules/enterprise/server/jar/src/main/java/org/rhq/enterprise/server/bundle/BundleManagerBean.java
b/modules/enterprise/server/jar/src/main/java/org/rhq/enterprise/server/bundle/BundleManagerBean.java
index a57b618..25ff496 100644
---
a/modules/enterprise/server/jar/src/main/java/org/rhq/enterprise/server/bundle/BundleManagerBean.java
+++
b/modules/enterprise/server/jar/src/main/java/org/rhq/enterprise/server/bundle/BundleManagerBean.java
@@ -1263,11 +1263,14 @@ public class BundleManagerBean implements BundleManagerLocal,
BundleManagerRemot
if (null == doomed) {
return;
}
- if (!BundleDeploymentStatus.PENDING.equals(doomed.getStatus())) {
+ // only allow deployments to be deleted if they are finished
+ if (BundleDeploymentStatus.SUCCESS == doomed.getStatus()
+ || BundleDeploymentStatus.FAILURE == doomed.getStatus()
+ || BundleDeploymentStatus.MIXED == doomed.getStatus()) {
+ entityManager.remove(doomed);
+ } else {
throw new IllegalArgumentException("Can not delete deployment with
status [" + doomed.getStatus() + "]");
}
-
- entityManager.remove(doomed);
}
@RequiredPermission(Permission.MANAGE_BUNDLE)