FAQ
Author: hashutosh
Date: Tue Nov 27 19:45:58 2012
New Revision: 1414353

URL: http://svn.apache.org/viewvc?rev=1414353&view=rev
Log:
HIVE-3648 : HiveMetaStoreFsImpl is not compatible with hadoop viewfs (Arup Malakar via Ashutosh Chauhan)

Modified:
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java?rev=1414353&r1=1414352&r2=1414353&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java Tue Nov 27 19:45:58 2012
@@ -25,8 +25,9 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;

public class HiveMetaStoreFsImpl implements MetaStoreFS {

@@ -37,16 +38,10 @@ public class HiveMetaStoreFsImpl impleme
public boolean deleteDir(FileSystem fs, Path f, boolean recursive,
Configuration conf) throws MetaException {
LOG.info("deleting " + f);
-
- // older versions of Hadoop don't have a Trash constructor based on the
- // Path or FileSystem. So need to achieve this by creating a dummy conf.
- // this needs to be filtered out based on version
- Configuration dupConf = new Configuration(conf);
- FileSystem.setDefaultUri(dupConf, fs.getUri());
+ HadoopShims hadoopShim = ShimLoader.getHadoopShims();

try {
- Trash trashTmp = new Trash(dupConf);
- if (trashTmp.moveToTrash(f)) {
+ if (hadoopShim.moveToAppropriateTrash(fs, f, conf)) {
LOG.info("Moved to trash: " + f);
return true;
}

Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1414353&r1=1414352&r2=1414353&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Tue Nov 27 19:45:58 2012
@@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
import org.apache.hadoop.io.Text;
@@ -626,6 +627,19 @@ public class Hadoop20Shims implements Ha
}

@Override
+ public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
+ throws IOException {
+ // older versions of Hadoop don't have a Trash constructor based on the
+ // Path or FileSystem. So need to achieve this by creating a dummy conf.
+ // this needs to be filtered out based on version
+
+ Configuration dupConf = new Configuration(conf);
+ FileSystem.setDefaultUri(dupConf, fs.getUri());
+ Trash trash = new Trash(dupConf);
+ return trash.moveToTrash(path);
+ }
+
+ @Override
public long getDefaultBlockSize(FileSystem fs, Path path) {
return fs.getDefaultBlockSize();
}

Modified: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1414353&r1=1414352&r2=1414353&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Tue Nov 27 19:45:58 2012
@@ -17,12 +17,14 @@
*/
package org.apache.hadoop.hive.shims;

+import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.hive.shims.HadoopShimsSecure;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobConf;
@@ -97,6 +99,18 @@ public class Hadoop20SShims extends Hado
}

@Override
+ public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
+ throws IOException {
+ // older versions of Hadoop don't have a Trash constructor based on the
+ // Path or FileSystem. So need to achieve this by creating a dummy conf.
+ // this needs to be filtered out based on version
+
+ Configuration dupConf = new Configuration(conf);
+ FileSystem.setDefaultUri(dupConf, fs.getUri());
+ Trash trash = new Trash(dupConf);
+ return trash.moveToTrash(path);
+ }
+ @Override
public long getDefaultBlockSize(FileSystem fs, Path path) {
return fs.getDefaultBlockSize();
}

Modified: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1414353&r1=1414352&r2=1414353&view=diff
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Tue Nov 27 19:45:58 2012
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hive.shims;

+import java.io.IOException;
import java.lang.Integer;
import java.net.MalformedURLException;
import java.net.URL;
@@ -24,6 +25,7 @@ import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
import org.apache.hadoop.hive.shims.HadoopShimsSecure;
import org.apache.hadoop.mapred.ClusterStatus;
@@ -125,5 +127,9 @@ public class Hadoop23Shims extends Hadoo
return fs.getDefaultReplication(path);
}

-
+ @Override
+ public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
+ throws IOException {
+ return Trash.moveToAppropriateTrash(fs, path, conf);
+ }
}

Modified: hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1414353&r1=1414352&r2=1414353&view=diff
==============================================================================
--- hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Tue Nov 27 19:45:58 2012
@@ -582,4 +582,8 @@ public abstract class HadoopShimsSecure

@Override
abstract public long getDefaultBlockSize(FileSystem fs, Path path);
+
+ @Override
+ abstract public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
+ throws IOException;
}

Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1414353&r1=1414352&r2=1414353&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Tue Nov 27 19:45:58 2012
@@ -289,6 +289,19 @@ public interface HadoopShims {
*/
public String getJobLauncherHttpAddress(Configuration conf);

+
+ /**
+ * Move the directory/file to trash. In case of the symlinks or mount points, the file is
+ * moved to the trashbin in the actual volume of the path p being deleted
+ * @param fs
+ * @param path
+ * @param conf
+ * @return false if the item is already in the trash or trash is disabled
+ * @throws IOException
+ */
+ public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf)
+ throws IOException;
+
/**
* Get the default block size for the path. FileSystem alone is not sufficient to
* determine the same, as in case of CSMT the underlying file system determines that.

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedNov 27, '12 at 7:46p
activeNov 27, '12 at 7:46p
posts1
users1
websitehive.apache.org

1 user in discussion

Hashutosh: 1 post

People

Translate

site design / logo © 2021 Grokbase