/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Amazon.Runtime.Internal.Util;
using Amazon.S3.Model;
using Amazon.S3.Transfer.Internal;
using Amazon.Util;
using System.Runtime.ExceptionServices;
namespace Amazon.S3.Transfer
{
public partial class TransferUtility : ITransferUtility
{
#region UploadDirectory
///
/// Uploads files from a specified directory.
/// The object key is derived from the file names
/// inside the directory.
/// For large uploads, the file will be divided and uploaded in parts using
/// Amazon S3's multipart API. The parts will be reassembled as one object in
/// Amazon S3.
///
///
///
/// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
/// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
/// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
/// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
/// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
///
///
///
/// The source directory, that is, the directory containing the files to upload.
///
///
/// The target Amazon S3 bucket, that is, the name of the bucket to upload the files to.
///
public void UploadDirectory(string directory, string bucketName)
{
try
{
UploadDirectoryAsync(directory, bucketName).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
///
/// Uploads files from a specified directory.
/// The object key is derived from the file names
/// inside the directory.
/// For large uploads, the file will be divided and uploaded in parts using
/// Amazon S3's multipart API. The parts will be reassembled as one object in
/// Amazon S3.
///
///
///
/// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
/// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
/// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
/// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
/// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
///
///
///
/// The source directory, that is, the directory containing the files to upload.
///
///
/// The target Amazon S3 bucket, that is, the name of the bucket to upload the files to.
///
///
/// A pattern used to identify the files from the source directory to upload.
///
///
/// A search option that specifies whether to recursively search for files to upload
/// in subdirectories.
///
public void UploadDirectory(string directory, string bucketName, string searchPattern, SearchOption searchOption)
{
try
{
UploadDirectoryAsync(directory, bucketName, searchPattern, searchOption).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
///
/// Uploads files from a specified directory.
/// The object key is derived from the file names
/// inside the directory.
/// For large uploads, the file will be divided and uploaded in parts using
/// Amazon S3's multipart API. The parts will be reassembled as one object in
/// Amazon S3.
///
///
///
/// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
/// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
/// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
/// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
/// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
///
///
///
/// The request that contains all the parameters required to upload a directory.
///
public void UploadDirectory(TransferUtilityUploadDirectoryRequest request)
{
try
{
UploadDirectoryAsync(request).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
#endregion
#region Upload
///
/// Uploads the specified file.
/// The object key is derived from the file's name.
/// Multiple threads are used to read the file and perform multiple uploads in parallel.
/// For large uploads, the file will be divided and uploaded in parts using
/// Amazon S3's multipart API. The parts will be reassembled as one object in
/// Amazon S3.
///
///
///
/// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
/// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
/// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
/// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
/// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
///
///
///
/// The file path of the file to upload.
///
///
/// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
///
public void Upload(string filePath, string bucketName)
{
try
{
UploadAsync(filePath, bucketName).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
///
/// Uploads the specified file.
/// Multiple threads are used to read the file and perform multiple uploads in parallel.
/// For large uploads, the file will be divided and uploaded in parts using
/// Amazon S3's multipart API. The parts will be reassembled as one object in
/// Amazon S3.
///
///
///
/// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
/// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
/// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
/// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
/// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
///
///
///
/// The file path of the file to upload.
///
///
/// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
///
///
/// The key under which the Amazon S3 object is stored.
///
public void Upload(string filePath, string bucketName, string key)
{
try
{
UploadAsync(filePath, bucketName, key).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
///
/// Uploads the contents of the specified stream.
/// For large uploads, the file will be divided and uploaded in parts using
/// Amazon S3's multipart API. The parts will be reassembled as one object in
/// Amazon S3.
///
///
///
/// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
/// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
/// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
/// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
/// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
///
///
///
/// The stream to read to obtain the content to upload.
///
///
/// The target Amazon S3 bucket, that is, the name of the bucket to upload the stream to.
///
///
/// The key under which the Amazon S3 object is stored.
///
public void Upload(Stream stream, string bucketName, string key)
{
try
{
UploadAsync(stream, bucketName, key).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
///
/// Uploads the file or stream specified by the request.
/// To track the progress of the upload,
/// add an event listener to the request's UploadProgressEvent.
/// For large uploads, the file will be divided and uploaded in parts using
/// Amazon S3's multipart API. The parts will be reassembled as one object in
/// Amazon S3.
///
///
///
/// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
/// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
/// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
/// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
/// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
///
///
///
/// Contains all the parameters required to upload to Amazon S3.
///
public void Upload(TransferUtilityUploadRequest request)
{
try
{
UploadAsync(request).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
#endregion
#region OpenStream
///
/// Returns a stream from which the caller can read the content from the specified
/// Amazon S3 bucket and key.
/// The caller of this method is responsible for closing the stream.
///
///
/// The name of the bucket.
///
///
/// The object key.
///
///
/// A stream of the contents from the specified Amazon S3 and key.
///
public Stream OpenStream(string bucketName, string key)
{
try
{
return OpenStreamAsync(bucketName, key).Result;
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
return null;
}
}
///
/// Returns a stream to read the contents from Amazon S3 as
/// specified by the TransferUtilityOpenStreamRequest.
/// The caller of this method is responsible for closing the stream.
///
///
/// Contains all the parameters required to open a stream to an S3 object.
///
///
/// A stream of the contents from Amazon S3.
///
public Stream OpenStream(TransferUtilityOpenStreamRequest request)
{
try
{
return OpenStreamAsync(request).Result;
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
return null;
}
}
#endregion
#region Download
///
/// Downloads the content from Amazon S3 and writes it to the specified file.
///
///
/// The file path where the content from Amazon S3 will be written to.
///
///
/// The name of the bucket containing the Amazon S3 object to download.
///
///
/// The key under which the Amazon S3 object is stored.
///
public void Download(string filePath, string bucketName, string key)
{
try
{
DownloadAsync(filePath, bucketName, key).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
///
/// Downloads the content from Amazon S3 and writes it to the specified file.
/// If the key is not specified in the request parameter,
/// the file name will used as the key name.
///
///
/// Contains all the parameters required to download an Amazon S3 object.
///
public void Download(TransferUtilityDownloadRequest request)
{
try
{
DownloadAsync(request).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
#endregion
#region DownloadDirectory
///
/// Downloads the objects in Amazon S3 that have a key that starts with the value
/// specified by s3Directory.
///
///
/// The name of the bucket containing the Amazon S3 objects to download.
///
///
/// The directory in Amazon S3 to download.
///
///
/// The local directory to download the objects to.
///
public void DownloadDirectory(string bucketName, string s3Directory, string localDirectory)
{
try
{
DownloadDirectoryAsync(bucketName, s3Directory, localDirectory).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
///
/// Downloads the objects in Amazon S3 that have a key that starts with the value
/// specified by the S3Directory
/// property of the passed in TransferUtilityDownloadDirectoryRequest object.
///
///
/// Contains all the parameters required to download objects from Amazon S3
/// into a local directory.
///
public void DownloadDirectory(TransferUtilityDownloadDirectoryRequest request)
{
try
{
DownloadDirectoryAsync(request).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
#endregion
#region AbortMultipartUploads
///
/// Aborts the multipart uploads that were initiated before the specified date.
///
///
/// The name of the bucket containing multipart uploads.
///
///
/// The date before which the multipart uploads were initiated.
///
public void AbortMultipartUploads(string bucketName, DateTime initiatedDate)
{
try
{
AbortMultipartUploadsAsync(bucketName, initiatedDate).Wait();
}
catch (AggregateException e)
{
ExceptionDispatchInfo.Capture(e.InnerException).Throw();
}
}
#endregion
}
}