Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions paimon-common/src/main/java/org/apache/paimon/fs/FileIO.java
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,13 @@ default FileStatus[] listDirectories(Path path) throws IOException {
*/
boolean delete(Path path, boolean recursive) throws IOException;

/**
* Move the path to trash.
*
* @param path the path to move to trash.
*/
boolean moveToTrash(Path path) throws IOException;

/**
* Make the given file and all non-existent parents into directories. Has the semantics of Unix
* 'mkdir -p'. Existence of the directory hierarchy is not an error.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,11 @@ public boolean delete(Path path, boolean recursive) throws IOException {
return wrap(() -> fileIO(path).delete(path, recursive));
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean mkdirs(Path path) throws IOException {
return wrap(() -> fileIO(path).mkdirs(path));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Trash;

import java.io.IOException;
import java.io.OutputStreamWriter;
Expand Down Expand Up @@ -153,6 +154,19 @@ public boolean delete(Path path, boolean recursive) throws IOException {
return getFileSystem(hadoopPath).delete(hadoopPath, recursive);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
org.apache.hadoop.fs.Path hadoopPath = path(path);
FileSystem fs = getFileSystem(hadoopPath);
Configuration conf = hadoopConf.get();

org.apache.hadoop.fs.Path fullyResolvedPath = fs.resolvePath(hadoopPath);
FileSystem fullyResolvedFs = FileSystem.get(fullyResolvedPath.toUri(), conf);

Trash trash = new Trash(fullyResolvedFs, conf);
return trash.moveToTrash(fullyResolvedPath);
}

@Override
public boolean mkdirs(Path path) throws IOException {
org.apache.hadoop.fs.Path hadoopPath = path(path);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,11 @@ public boolean delete(Path path, boolean recursive) throws IOException {
return delete(file);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

private boolean delete(final File f) {
if (f.isDirectory()) {
final File[] files = f.listFiles();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,11 @@ public boolean delete(Path path, boolean recursive) throws IOException {
return fileIO().delete(path, recursive);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean mkdirs(Path path) throws IOException {
return fileIO().mkdirs(path);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,11 @@ public boolean delete(Path path, boolean recursive) throws IOException {
return delete(file);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

private boolean delete(final File f) {
if (f.isDirectory()) {
final File[] files = f.listFiles();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,11 @@ public boolean delete(Path f, boolean recursive) throws IOException {
return originalFs.delete(f, recursive);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean mkdirs(Path f) throws IOException {
return originalFs.mkdirs(f);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import org.apache.paimon.catalog.CatalogContext;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.options.Options;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -66,6 +67,11 @@ public void configure(CatalogContext context) {
this.hadoopOptions = mirrorCertainHadoopConfig(loadHadoopConfigFromContext(context));
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

// add additional config entries from the IO config to the Hadoop config
private Options loadHadoopConfigFromContext(CatalogContext context) {
Options hadoopConfig = new Options();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import org.apache.paimon.fs.PluginFileIO;
import org.apache.paimon.plugin.PluginLoader;

import java.io.IOException;

/** Azure Blob Storage {@link FileIOLoader}. */
public class AzureLoader implements FileIOLoader {

Expand Down Expand Up @@ -62,6 +64,11 @@ public boolean isObjectStore() {
return true;
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileIO createFileIO(Path path) {
FileIO fileIO = getLoader().newInstance(AZURE_CLASS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import org.apache.paimon.catalog.CatalogContext;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.options.Options;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -89,6 +90,11 @@ public void configure(CatalogContext context) {
}
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileSystem createFileSystem(org.apache.hadoop.fs.Path path) {
final String scheme = path.toUri().getScheme();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.paimon.fs.PluginFileIO;
import org.apache.paimon.plugin.PluginLoader;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

Expand Down Expand Up @@ -70,6 +71,11 @@ public boolean isObjectStore() {
return true;
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileIO createFileIO(Path path) {
FileIO fileIO = getLoader().newInstance(COSN_CLASS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,11 @@ public void configure(CatalogContext context) {
}
}

@Override
public boolean moveToTrash(org.apache.paimon.fs.Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileSystem createFileSystem(Path path) throws IOException {
final String scheme = path.toUri().getScheme();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
import org.apache.paimon.fs.PluginFileIO;
import org.apache.paimon.plugin.PluginLoader;

import java.io.IOException;

/** A {@link PluginLoader} to load gcs. */
public class GSLoader implements FileIOLoader {

Expand Down Expand Up @@ -63,6 +65,11 @@ public boolean isObjectStore() {
return true;
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileIO createFileIO(Path path) {
FileIO fileIO = getLoader().newInstance(GCS_CLASS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,11 @@ public TwoPhaseOutputStream newTwoPhaseOutputStream(Path path, boolean overwrite
return new JindoTwoPhaseOutputStream(new JindoMultiPartUpload(fs, hadoopPath), hadoopPath);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected Pair<JindoHadoopSystem, String> createFileSystem(org.apache.hadoop.fs.Path path) {
final String scheme = path.toUri().getScheme();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import org.apache.paimon.catalog.CatalogContext;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.options.Options;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -105,6 +106,11 @@ public void configure(CatalogContext context) {
}
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileSystem createFileSystem(org.apache.hadoop.fs.Path path) {
final String scheme = path.toUri().getScheme();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.paimon.fs.PluginFileIO;
import org.apache.paimon.plugin.PluginLoader;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

Expand Down Expand Up @@ -76,6 +77,11 @@ public boolean isObjectStore() {
return true;
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileIO createFileIO(Path path) {
FileIO fileIO = getLoader().newInstance(OBS_CLASS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,11 @@ public void configure(CatalogContext context) {
}
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public TwoPhaseOutputStream newTwoPhaseOutputStream(Path path, boolean overwrite)
throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ public boolean isObjectStore() {
return true;
}

@Override
public boolean moveToTrash(Path path) {
throw new UnsupportedOperationException();
}

@Override
protected FileIO createFileIO(Path path) {
FileIO fileIO = getLoader().newInstance(OSS_CLASS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,11 @@ public TwoPhaseOutputStream newTwoPhaseOutputStream(Path path, boolean overwrite
return new S3TwoPhaseOutputStream(new S3MultiPartUpload(fs, fs.getConf()), hadoopPath);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

// add additional config entries from the IO config to the Hadoop config
private Options loadHadoopConfigFromContext(CatalogContext context) {
Options hadoopConfig = new Options();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.paimon.fs.PluginFileIO;
import org.apache.paimon.plugin.PluginLoader;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

Expand Down Expand Up @@ -76,6 +77,11 @@ public boolean isObjectStore() {
return true;
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
protected FileIO createFileIO(Path path) {
FileIO fileIO = getLoader().newInstance(S3_CLASS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,11 @@ public boolean delete(Path path, boolean recursive) throws IOException {
return getFileSystem(flinkPath).delete(flinkPath, recursive);
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean mkdirs(Path path) throws IOException {
org.apache.flink.core.fs.Path flinkPath = path(path);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,11 @@ public boolean delete(Path path, boolean recursive) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean moveToTrash(Path path) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean mkdirs(Path path) throws IOException {
throw new UnsupportedOperationException();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -961,7 +961,7 @@ protected void dropTableImpl(Identifier identifier, List<Path> externalPaths) {
identifier.getTableName(),
!externalTable,
false,
true));
false));

// When drop a Hive external table, only the hive metadata is deleted and the data files
// are not deleted.
Expand All @@ -975,7 +975,15 @@ protected void dropTableImpl(Identifier identifier, List<Path> externalPaths) {
Path path = getTableLocation(identifier);
try {
if (fileIO.exists(path)) {
fileIO.deleteDirectoryQuietly(path);
try {
LOG.info("Move to trash, path: {}", path);
fileIO.moveToTrash(path);
} catch (UnsupportedOperationException e) {
LOG.warn(
"Catch UnsupportedOperationException and delete directory quietly, path: {}",
path);
fileIO.deleteDirectoryQuietly(path);
}
}
for (Path externalPath : externalPaths) {
if (fileIO.exists(externalPath)) {
Expand Down
Loading