Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/integration/setup/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ do echo "waiting for s3 to become ready"
done

RETRY_TIMES=0
until docker ps -f name="proxy" --format "{{.Status}}" | grep "Up About"
until docker ps -f name="proxy" --format "{{.Status}}" | grep "Up "
do echo "waiting for proxy to become ready"
RETRY_TIMES=$((RETRY_TIMES+1));
if [ "$RETRY_TIMES" -eq 30 ]; then
Expand Down
15 changes: 11 additions & 4 deletions .github/integration/tests/20_upload.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ user=test_dummy.org


# Create folder with subfolder structure and add some encrypted files
mkdir data_files_enc/dir1 data_files_enc/dir1/dir2
mkdir -p data_files_enc/dir1 data_files_enc/dir1/dir2
cp data_files_enc/data_file.c4gh data_files_enc/data_file3.c4gh
cp data_files_enc/data_file.c4gh data_files_enc/dir1/data_file.c4gh
cp data_files_enc/data_file.c4gh data_files_enc/dir1/dir2/data_file.c4gh
Expand All @@ -19,10 +19,19 @@ cp data_files_enc/data_file.c4gh data_files_enc/dir1/dir2/data_file2.c4gh
./sda-cli -config testing/s3cmd.conf upload data_file.c4gh
check_uploaded_file "test/$user/data_file.c4gh" data_file.c4gh

# Upload the file twice check that this returns an error
msg=$(./sda-cli -config testing/s3cmd.conf upload data_file.c4gh 2>&1 | tail -1 || true)
if ! grep -q "Error:" <<< "$msg"
then
echo "wrong error message: $msg"
exit 1
fi

# Try to upload a file twice with the --force-overwrite flag
# Upload a file twice with the --force-overwrite flag
./sda-cli -config testing/s3cmd.conf upload --force-overwrite data_file.c4gh

# Upload an already uploaded file and a new one using the --continue flag (useful for resuming uploads)
./sda-cli -config testing/s3cmd.conf upload data_file.c4gh data_files_enc/data_file1.c4gh --continue

# Test upload all files from a folder, one by one
for k in data_file.c4gh data_file1.c4gh
Expand All @@ -32,8 +41,6 @@ do
check_uploaded_file "test/$user/$k" "$k"
done



# Upload a folder recursively and a single file
./sda-cli -config testing/s3cmd.conf upload -r data_files_enc/dir1 data_files_enc/data_file3.c4gh

Expand Down
27 changes: 27 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,33 @@ exit without further processing.
- If a file has already been uploaded, the process will exit without further
processing. To overwrite existing files, use the `-force-overwrite` flag.

### Resume a multiple-file upload

Suppose you run the following command to upload two encrypted files,

```bash
./sda-cli -config <configuration_file> upload <encrypted_file_to_upload_1> <encrypted_file_to_upload_2>
```

After <encrypted_file_to_upload_1> has been uploaded, the process is interrupted while uploading <encrypted_file_to_upload_2>. To resume the upload, simply add the `-continue` flag to the command, as shown below:

```bash
./sda-cli -config <configuration_file> upload <encrypted_file_to_re-upload> <encrypted_file_to_upload> -continue
```
Comment thread
aaperis marked this conversation as resolved.

The `-continue` flag is especially useful when recursively uploading an entire folder with encrypted files. For example, the following command:

Comment thread
aaperis marked this conversation as resolved.
```bash
./sda-cli -config <configuration_file> upload -r <folder_to_upload_with_encrypted_data> -continue
```

will skip uploading any files that are already uploaded to the target location and proceed with uploading the remaining files of `<folder_to_upload_with_encrypted_data>`, effectively resuming the upload of the later in case this was previously interrupted.

Notes:

- `-continue` can be combined with any of the available flags except `-force-overwrite`. In this case, the latter takes precedence.
- `-continue` will not resume partially uploaded files. Any such file will be re-uploaded.

## List

Before using the `list` functionality, ensure you have [downloaded the configuration file](#download-the-configuration-file).
Expand Down
3 changes: 1 addition & 2 deletions testing/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: "3.7"
services:
s3_backend:
command: server /data
Expand Down Expand Up @@ -130,7 +129,7 @@ services:
command:
- "/bin/sh"
- "-c"
- if [ ! -f "/out/c4gh.sec.pem" ]; then wget -qO- "https://github.com/neicnordic/crypt4gh/releases/latest/download/crypt4gh_linux_x86_64.tar.gz" | tar zxf -;
- if [ ! -f "/shared/c4gh.sec.pem" ]; then wget -qO- "https://github.com/neicnordic/crypt4gh/releases/latest/download/crypt4gh_linux_x86_64.tar.gz" | tar zxf -;
./crypt4gh generate -n /shared/c4gh -p privatekeypass; fi;
volumes:
- shared:/shared
Expand Down
48 changes: 25 additions & 23 deletions upload/upload.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ Options:
-accessToken <access-token> Access token for the SDA inbox service. This is optional
if already set in the config file or as the 'ACCESSTOKEN'
environment variable.
-continue Skip already uploaded files and continue with uploading the rest.
Useful for resuming an upload from a previous breakpoint.
-encrypt-with-key <public-key-file>
Encrypt files using the specified public key before upload.
The key file may contain multiple concatenated public keys.
Expand Down Expand Up @@ -70,6 +72,8 @@ var targetDir = Args.String("targetDir", "",

var forceOverwrite = Args.Bool("force-overwrite", false, "Force overwrite existing files.")

var continueUpload = Args.Bool("continue", false, "Skip existing files and continue with the rest.")

var pubKeyPath = Args.String("encrypt-with-key", "",
"Public key file to use for encryption of files before upload.\n"+
"The key file may optionally contain several concatenated public keys.\n"+
Expand Down Expand Up @@ -145,30 +149,28 @@ func uploadFiles(files, outFiles []string, targetDir string, config *helpers.Con
return err
}

// Check if files exists in S3
var listPrefix string
if targetDir != "" {
listPrefix = targetDir + "/" + outFiles[k]
if *forceOverwrite {
fmt.Println("force-overwrite flag provided, continuing by overwritting target...")
} else {
listPrefix = outFiles[k]
}
fileExists, err := helpers.ListFiles(*config, listPrefix)
if err != nil {
return fmt.Errorf("listing uploaded files: %s", err.Error())
}
if len(fileExists.Contents) > 0 {
if aws.StringValue(
fileExists.Contents[0].Key,
) == filepath.Clean(
config.AccessKey+"/"+targetDir+"/"+outFiles[k],
) {
fmt.Printf("File %s is already uploaded!\n", filepath.Base(filename))
if !*forceOverwrite {
fmt.Println("Quitting...")

return errors.New("file already uploaded")
}
fmt.Println("force-overwrite flag provided, continuing...")
// Check if files exists in S3
listPrefix := outFiles[k]
if targetDir != "" {
listPrefix = targetDir + "/" + outFiles[k]
}

listResult, err := helpers.ListFiles(*config, listPrefix)
if err != nil {
return fmt.Errorf("listing uploaded files: %s", err.Error())
}

fileExists := len(listResult.Contents) > 0 && aws.StringValue(listResult.Contents[0].Key) == filepath.Clean(config.AccessKey+"/"+listPrefix)
switch {
case fileExists && *continueUpload:
fmt.Printf("File %s has already been uploaded, continuing with the next file...\n", filepath.Base(filename))

continue
case fileExists && !*continueUpload:
return fmt.Errorf("file %s is already uploaded", filepath.Base(filename))
}
}

Expand Down
Loading