Exporting files to a volume

šŸ“˜

When exporting a file from the Platform to an attached volume, export is possible only to a volume that is in the same location (cloud provider and region) as the project from which the file is being exported.

The objective here is to export files from a Seven Bridges project to a volume (cloud bucket). Please note that export to a volume is available only via the API, including API client libraries, and through the Seven Bridges CLI.

Again, Seven Bridges bulk API calls should be used to reduce the overall number of API calls. Note that below examples make use of the copy_only export feature, which requires advance_access to be activated when initializing the API.

Not optimized for rate limit

In this example, files are fetched and exported in a loop, one by one.

for name in source_file_names:
     
    f = api.files.query(project=src_project, names=[name])[0]
     
    export = api.exports.submit_export(
        file=f,
        volume=volume,
        location="christian_demo_files/" + f.name,
        overwrite=True,
        copy_only=False
    )

Optimized for rate limit

Fetch and export files in bulk.

import time

def bulk_export_files(
        files, volume, location, overwrite=True, copy_only=False):
    """
    Exports list of files to volume in bulk
    """

    chunk_size = 100  # Max legal bulk size for export is 100 items.
    final_responses = []

    def is_finished(response):
        return response in ["COMPLETED", "FAILED", "ABORTED"]

    def error_handling_after_completion(responses):
        errors = [s.resource.error.message for s in responses
                  if s.resource.state == "FAILED"]
        if errors:
            data = [
                s.resource.error
                if s.resource.state == "FAILED"
                else s.resource.result
                for s in responses
            ]
            raise Exception(
                'There were errors with bulk exporting.\n' +
                '\n'.join([str(d) for d in data])
            )

    def error_handling_after_submission(responses):
        errors = [s.error.message for s in responses if not s.valid]
        if errors:
            data = [s for s in responses if not s.valid]

            raise Exception(
                'There were errors with bulk submission.\n' +
                '\n'.join([
                    f'<Error: status={s.error.status}, code={s.error.code}>; '
                    f'{s.error.message}'
                    for s in data
                ])
            )

    # export files in batches of 100 files each
    for i in range(0, len(files), chunk_size):

        # setup list of dictionary with export requests
        exports = [
            {
                'file': f,
                'volume': volume,
                'location': location + '/' + f.name,
                'overwrite': overwrite
            }
            for f in files[i:i + chunk_size]
        ]

        # initiate bulk export of batch and wait until finished
        responses = api.exports.bulk_submit(
            exports,
            copy_only=copy_only
        )

        # check for errors in bulk submission
        error_handling_after_submission(responses)

        # wait for bulk job to finish
        while not all(is_finished(s.resource.state)
                      for s in responses):
            time.sleep(10)
            responses = api.exports.bulk_get(
                [s.resource for s in responses]
            )

        # check if each job finished successfully
        error_handling_after_completion(responses)

        final_responses.extend(responses)

    return final_responses
 
files_to_export = list(
    api.files.query(
        project=src_project,
        names=source_file_names,
        limit=100
    ).all())
 
responses = bulk_export_files(
    files=files_to_export,
    volume=volume,
    location='christian_demo_files',
    copy_only=False
)