Versions Compared

Key

  • This line was added.
  • This line was removed.
  • Formatting was changed.
Panel

Table of Contents

...

Code Block
languagexml
titleconnections.conf (v3)
linenumberstrue
# Script  			
[amazon_s3_target]
type = s3
provider = AWS
env_auth = false
access_key_idName: connectionv3.conf
# 
# Description: 
# Connection Script file for Inter-Cloud Data Transfer Task
# 
#
# 09.03.2022   Version 1   Initial Version requires UA 7.2
# 09.03.2022   Version 2   SFTP support
# 27.04.2022   Version 3   Azure target mistake corrected

#

[amazon_s3_target]
type = s3
provider = AWS
env_auth = false
access_key_id = ${_credentialUser('${ops_var_target_credential}')}
secret_access_key = ${_credentialPwd('${ops_var_target_credential}')}
region = us-east-2
acl = bucket-owner-full-control


[amazon_s3_source]
type = s3
provider = AWS
env_auth = false
access_key_id = ${_credentialUser('${ops_var_source_credential}')}
secret_access_key = ${_credentialPwd('${ops_var_source_credential}')}
region = us-east-2
acl = bucket-owner-full-control
role_arn = arn:aws:iam::552436975963:role/SB-AWS-FULLX


[microsoft_azure_blob_storage_sas_source]
type = azureblob
sas_url = ${_credentialPwd('${ops_var_source_credential}')}


[microsoft_azure_blob_storage_sas_target]
type = azureblob
sas_url = ${_credentialPwd('${ops_var_target_credential}')}


[microsoft_azure_blob_storage_source]
type = azureblob
account = ${_credentialUser('${ops_var_source_credential}')}
key = ${_credentialPwd('${ops_var_source_credential}')}


[microsoft_azure_blob_storage_target]
type = azureblob
account = ${_credentialUser('${ops_var_target_credential}')}
secret_access_key = ${_credentialTokencredentialPwd('${ops_var_target_credential}')}
region
=
us-east-2


[amazon_s3[datalakegen2_storage_source]
type = s3
provider = AWS
env_authazureblob
account = false
access_key_id = ${_credentialUser('${ops_var_source_credential}')}
secret_access_key = ${_credentialTokencredentialPwd('${ops_var_source_credential}')}
region = us-east-2


[microsoftdatalakegen2_azure_blob_storage_sas_target]
type = azureblob
sas_urlaccount = ${_credentialUser('${ops_var_target_credential}')}
key = ${_credentialPwd('${ops_var_target_credential}')}


[microsoftdatalakegen2_azure_blob_storage_sassp_source]
type = azureblob
sas_urlaccount = ${_credentialPwdcredentialUser('${ops_var_source_credential}')}
service_principal_file = [microsoft_azure_blob_storage_source${_scriptPath('azure-principal.json')} 
# service_principal_file = C:\virtual_machines\Setup\SoftwareKeys\Azure\azure-principal.json


[datalakegen2_storage_sp_target]
type = azureblob
account = ${_credentialUser('${ops_var_sourcetarget_credential}')}
keyservice_principal_file = ${_credentialPwdscriptPath('${ops_var_source_credential}')}azure-principal.json')} 
# service_principal_file = C:\virtual_machines\Setup\SoftwareKeys\Azure\azure-principal.json


[microsoftgoogle_azurecloud_blob_storage_targetsource]
type = azureblob
accountgoogle cloud storage
service_account_file = ${_credentialUsercredentialPwd('${ops_var_target_credentialsource_credential}')}
keyobject_acl = ${_credentialPwd('${ops_var_target_credential}')}bucketOwnerFullControl
project_number = clagcs
location = europe-west3


[google_cloud_storage_sourcetarget]
type = google cloud storage
service_account_file = ${_credentialPwd('${ops_var_sourcetarget_credential}')}
object_acl = bucketOwnerFullControl
project_number = clagcs
location = europe-west3


[google_cloud_storage_targetonedrive_source]
type = googleonedrive
token cloud storage
service_account_file= ${_credentialToken('${ops_var_source_credential}')}
drive_id = ${_credentialPwdcredentialUser('${ops_var_targetsource_credential}')}
objectdrive_acltype = bucketOwnerFullControlbusiness
projectupdate_numbercredential = clagcs
location = europe-west3token



[onedrive_sourcetarget]
type = onedrive
token = ${_credentialToken('${ops_var_sourcetarget_credential}')}
drive_id = ${_credentialUser('${ops_var_sourcetarget_credential}')}
drive_type = business
update_credential = token


[onedrivehdfs_targetsource]
type = onedrive
token hdfs
namenode = 172.18.0.2:8020
username = ${_credentialToken('${ops_var_target_credential}')}
drive_id = ${_credentialUser('${ops_var_target_credential}')}
drive_type = business
update_credential = token


[hdfsmaria_dev


[hdfs_target]
type = hdfs
namenode = 172.18.0.2:8020
username = maria_dev


[linux_source]
type = local


[linux_target]
type = local


[windows_source]
type = local


[windows_target]
type = local


[sftp_source]
type = hdfssftp
namenodehost = 1723.1810.0.2:8020
username = maria_dev10.58
user = ubuntu
pass = ${_credentialToken('${ops_var_source_credential}')}


[hdfssftp_target]
type = hdfssftp
namenodehost = 1723.1810.010.2:802058
usernameuser = maria_devubuntu
pass 
[linux_source]
type = local


[linux_target]
type = local= ${_credentialToken('${ops_var_target_credential}')}
   
   

Considerations

Rclone supports connections to almost any storage system on the market:

...

However, the current Universal Task has only been tested only for the following storage types:

  • LINUX

  • AWS S3

  • Azure Blob Storage

  • Google GCS

  • Microsoft One Drive incl. Share Point

  • HDFS

  • HTTPS URL

  • SFTP


Note
titleNote

If you want to connect to a different system, (for example, Dropbox), you should contact Stonebranch for support.

...

ActionDescription

list directory

List directories; for example,

Copy objects from source to target
  • List object stores like S3 buckets, Azure container.
  • List OS directories from Linux, Windows, HDFS.

copy

  • container.
  • List OS directories from Linux, Windows, HDFS.

copy

Copy objects from source to target.

copy-to

Copies an object from source to target and allows to rename the target object.

move

Move objects from source to target.

move-to

Moves an object from source to target and allows to rename the target object.

list objects

List objects in an OS directory or cloud object store.move

Move objects from source to target.

remove-object

Remove objects in an OS directory or cloud object store.

remove-object-store

Remove an OS directory or cloud object store.

create-object-store

Create an OS directory or cloud object store.

copy-url

Download a URL's content and copy it to the destination without saving it in temporary storage.

monitor-object

Monitor a file or object in an OS directory or cloud object store and optionally launches a task when an object is found.

In the following for each task action, the fields will be described and an example is provided.

...

Flag

Description

max-depth 1

Limits the recursion depth.

max-depth 1 means only the current directory is in scope. This is the default value.

Attention: If the flag is not set, a recursive action is performed.Recommendation: Add the flag you change max-depth 1 to all Copy, Move, remove-object, and remove-object-store in the task field Other Parameters to avoid to a value greater than 1, a recursive action is performed.

ignore-existing

Skips all files that exist on destination.

Examples:

  1. You move a file to a destination that already exists. In this case, rclone will not perform the copy but instead deletes the source file. If you set the flag --ignore-existing, the delete operation will not be performed.

  2. The --ignore-existing parameter avoids a file being deleted if the source is equal to the destination in a copy operation.

Recommendation: Add the flag --ignore-existing to all copy and move tasks, which avoids a file being deleted if the source is equal to the destination in a copy operation.

error-on-no-transfer

The error-on-no-transfer flag let the task fail in case no transfer was done.

update

To Skip files that are newer on the destination during a move or copy action, you can add the flag --update.

...

FieldDescription

Agent

Linux or Windows Universal Agent to execute the Rclone command line.

Agent Cluster

Optional Agent Cluster for load balancing.

Action

[ list directory, copy, list objects, move, remove-object, remove-object-store, create-object-store, copy-url, monitor-object ]

Move objects from source to target.

Source

Enter a source storage Type name as defined in the Connection File; for example,

amazon_s3, microsoft_azure_blob_storage, hdfs, onedrive, linux ..

For a list of all possible storage types, refer to Overview of Cloud Storage Systems.

Target

Enter a target storage Type name as defined in the Connection File; for example,

amazon_s3, microsoft_azure_blob_storage, hdfs, onedrive, linux ..

For a list of all possible storage types, refer to Overview of Cloud Storage Systems.

Connection File

In the connection file you configure all required Parameters and Credentials to connect to the Source and Target Cloud Storage System.

For example, if you want to transfer a file from AWS S3 to Azure Blob Storage, you must configure the connection Parameters for AWS S3 and Azure Blob Storage.

For details on how to configure the Connection File, refer to section Configure the Connection File.

Filter Type

[ include, exclude, none ]

Define the type of filter to apply.

Filter

Provide the Patterns for matching file matching; for example, in a copy action:

Filter Type: include

Filter report[1-3].txt means all reports with names matching report1.txt and report2.txt will be copied.

For more examples on the filter matching pattern, refer to Rclone Filtering.

Other Parameters

This field can be used to apply additional flag parameters to the selected action.

For a list of all possible flags, refer to Global Flags.

Recommendation: Add the flag max-depth 1 to all Copy, Move, remove-object and remove-object-store in the task field Other Parameters to avoid a recursive action.

Attention: If the flag max-depth 1 is not set, a recursive action is performed.

Dry-run

[ checked , unchecked ]

Do a trial run with no permanent changes.

UAC Rest Credentials

Universal Controller Rest API Credentials.

UAC Base URL

Universal Controller URL.

For example, https://192.168.88.40/uc

Loglevel

Universal Task logging settings [DEBUG | INFO| WARNING | ERROR | CRITICAL].

...

FieldDescription

Agent

Linux or Windows Universal Agent to execute the Rclone command line.

Agent Cluster

Optional Agent Cluster for load balancing.

Action

[ list directory, copy, list objects, move, remove-object, remove-object-store, create-object-store, copy-url, monitor-object ]

Remove objects in an OS directory or cloud object store.

Storage Type

Enter a storage Type name as defined in the Connection File; for example,

amazon_s3, microsoft_azure_blob_storage, hdfs, onedrive, linux ..

For a list of all possible storage types, refer to Overview of Cloud Storage Systems.

File Path

Path to the directory in which you want to remove the objects.

For example:

File Path: stonebranchpmtest

Filter: report[1-3].txt

This removes all S3 objects matching the filter: report[1-3].txt( report1.txt, report2.txt and report3.txt ) from the S3 bucket stonebranchpmtest.

Connection File

In the connection file you configure all required Parameters and Credentials to connect to the Source and Target Cloud Storage System.

For example, if you want to transfer a file from AWS S3 to Azure Blob Storage, you must configure the connection Parameters for AWS S3 and Azure Blob Storage.

For details on how to configure the Connection File, refer to section Configure the Connection File.

Other Parameters

This field can be used to apply additional flag parameters to the selected action.

For a list of all possible flags, refer to Global Flags.

Recommendation: Add the flag max-depth 1 to all Copy, Move, remove-object and remove-object-store in the task field Other Parameters to avoid a recursive action.

Attention: If the flag max-depth 1 is not set, a recursive action is performed.

Dry-run

[ checked , unchecked ]

Do a trial run with no permanent changes.

UAC Rest Credentials

Universal Controller Rest API Credentials.

UAC Base URL

Universal Controller URL.

For example, https://192.168.88.40/uc

Loglevel

Universal Task logging settings [DEBUG | INFO| WARNING | ERROR | CRITICAL].

...

FieldDescription

Agent

Linux or Windows Universal Agent to execute the Rclone command line.

Agent Cluster

Optional Agent Cluster for load balancing.

Action

[ list directory, copy, list objects, move, remove-object, remove-object-store, create-object-store, copy-url, monitor-object ]

Remove an OS directory or cloud object store.

Storage Type

Enter a storage Type name as defined in the Connection File; for example,

amazon_s3, microsoft_azure_blob_storage, hdfs, onedrive, linux ..

For a list of all possible storage types, refer to Overview of Cloud Storage Systems.

Directory

Name of the directory you want to remove.

The directory can be an object store or a file system OS directory.

The directory needs to be empty before it can be removed.

For example, Directory: stonebranchpmtest would remove the bucket stonebranchpmtest.

Connection File

In the connection file, you configure all required Parameters and Credentials to connect to the Source and Target Cloud Storage System.

For example, if you want to transfer a file from AWS S3 to Azure Blob Storage, you must configure the connection Parameters for AWS S3 and Azure Blob Storage.

For details on how to configure the Connection File, refer to section Configure the Connection File.

Other Parameters

This field can be used to apply additional flag parameters to the selected action.

For a list of all possible flags, refer to Global Flags.

Recommendation: Add the flag max-depth 1 to all Copy, Move, remove-object and remove-object-store in the task field Other Parameters to avoid a recursive action.

Attention: If the flag max-depth 1 is not set, a recursive action is performed.

Dry-run

[ checked , unchecked ]

Do a trial run with no permanent changes.

UAC Rest Credentials

Universal Controller Rest API Credentials.

UAC Base URL

Universal Controller URL.

For example, https://192.168.88.40/uc

Loglevel

Universal Task logging settings [DEBUG | INFO| WARNING | ERROR | CRITICAL].

...