Skip to content

Commit

Permalink
Fix ubuntu (#5)
Browse files Browse the repository at this point in the history
Refactoring
  • Loading branch information
teoadal committed Jun 23, 2023
1 parent 327f75f commit 27f1461
Show file tree
Hide file tree
Showing 20 changed files with 510 additions and 530 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ ENTRYPOINT ["dotnet", "./src/publish/Storage.Benchmark.dll"]
#unzip dotMemoryclt.zip -d ./dotMemoryclt && \
#chmod +x -R ./dotMemoryclt/*
#
#ENTRYPOINT ./dotMemoryclt/tools/dotmemory start-net-core --temp-dir=./src/dotMemoryclt/tmp --timeout=16m --save-to-dir=./src/dotMemoryclt/workspaces --log-file=./src/dotMemoryclt/tmp/log.txt --trigger-timer=2m ./src/publish/Storage.Benchmark.dll
#ENTRYPOINT ./dotMemoryclt/tools/dotmemory start-net-core --temp-dir=./src/dotMemoryclt/tmp --timeout=16m --save-to-dir=./src/dotMemoryclt/workspaces --log-file=./src/dotMemoryclt/tmp/log.txt --trigger-timer=1m ./src/publish/Storage.Benchmark.dll
52 changes: 13 additions & 39 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ Job = .NET 7.0 Runtime=.NET 7.0
Для работы с хранилищем необходимо создать клиент.

```csharp
var storageClient = new StorageClient(new StorageSettings
var storageClient = new S3Client(new S3Settings
{
AccessKey = "ROOTUSER",
Bucket = "mybucket",
Expand Down Expand Up @@ -66,7 +66,7 @@ Console.WriteLine(bucketCreateResult
Как и в прошлый раз, мы знаем название bucket'a, так как мы передаём его в настройках клиента.

```csharp
bool bucketCheckResult = await storageClient.BucketExists(cancellationToken);
bool bucketCheckResult = await storageClient.IsBucketExists(cancellationToken);
if (bucketCheckResult) Console.WriteLine("Bucket существует");
```

Expand All @@ -88,53 +88,27 @@ multipart), а можно не разбивать. Самый простой с
будет больше 5 МБ, то применяется multipart):

```csharp
bool fileUploadResult = await storageClient.UploadFile(fileName, fileStream, fileContentType, cancellationToken);
bool fileUploadResult = await storageClient.UploadFile(fileName, fileContentType, fileStream, cancellationToken);
if (fileUploadResult) Console.WriteLine("Файл загружен");
```

#### Создание без Multipart

Можно принудительно загружать файл без multipart. Есть сигнатура и для ``byte[]``.

```csharp
bool fileUploadResult = await storageClient.PutFile(fileName, byteArray, fileContentType, cancellationToken);
if (fileUploadResult) Console.WriteLine("Файл загружен");
```

#### Создание с использованием Multipart
#### Управление Multipart-загрузкой

Можно принудительно загружать файл с использованием multipart. В этом случае нужно будет явно указать размер одного
кусочка (не менее 5 МБ).
Для самостоятельного управления multipart-загрузкой, можно воспользоваться методом `UploadFile` без указания данных. Получится примеоно такой код:

```csharp
bool fileUploadResult = await storageClient.PutFileMultipart(fileName, fileStream, fileContentType, partSize, cancellationToken);
if (fileUploadResult) Console.WriteLine("Файл загружен");
```

#### Управление Multipart-загрузкой

Для самостоятельного управления multipart-загрузкой, можно использовать методы клиента, начинающиеся со
слова `Multipart`.
using S3Upload upload = await storageClient.UploadFile(fileName, fileType, cancellationToken);

```csharp
Stream fileStream = ...
// получаем идентификатор загрузки
string uploadId = await storageClient.Multipart(fileName, fileType, cancellationToken);
while(fileStream.Position < fileStream.Position) {
// создаём свою логику разделения на данных на куски (parts)...
string eTag = await MultipartUpload(fileName, uploadId, partNumber, partData, partSize, cancellation);

// запоминаем 'eTag' и номер куска...
if (string.IsNullOrEmpty(eTag)) { // отменяем всю загрузку, если кусок загрузить не удалось
await MultipartAbort(fileName, uploadId, cancellation);
return false;
}
await upload.Upload(stream, cancellationToken); // загружаем часть документа
if (!await upload.Upload(byteArray, cancellationToken)) { // загружаем другую часть документа
await upload.Abort(cancellationToken); // отменяем загрузку
}
else {
await upload.Complete(cancellationToken); // завершаем загрузку
}

// сообщаем хранилищу, что загрузка завершена
await MultipartComplete(fileName, uploadId, tags, cancellation);
```

В коде клиента именно эту логику использует метод PutFileMultipart. Конкретную реализацию можно подсмотреть в нём.
Expand All @@ -155,7 +129,7 @@ else {
### Проверка существования файла

```csharp
bool fileExistsResult = await storageClient.FileExists(fileName, cancellationToken);
bool fileExistsResult = await storageClient.IsFileExists(fileName, cancellationToken);
if (fileExistsResult) Console.WriteLine("Файл существует");
```

Expand Down
23 changes: 13 additions & 10 deletions src/Storage.Benchmark/InternalBenchmarks/DownloadBenchmark.cs
Original file line number Diff line number Diff line change
@@ -1,25 +1,28 @@
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Jobs;
using Storage.Benchmark.Utils;

namespace Storage.Benchmark.InternalBenchmarks;

[SimpleJob(RuntimeMoniker.Net70)]
[MeanColumn, MemoryDiagnoser]
[InProcess]
public class DownloadBenchmark
{
[Benchmark]
public async Task<int> JustDownload()
{
using var file = await _storageClient.GetFile(_fileId, _cancellation);
return BenchmarkHelper.ReadStreamMock(await file.GetStream(_cancellation));
using var file = await _s3Client.GetFile(_fileId, _cancellation);

return await BenchmarkHelper.ReadStreamMock(
await file.GetStream(_cancellation),
BenchmarkHelper.StreamBuffer,
_cancellation);
}

#region Configuration

private CancellationToken _cancellation;
private string _fileId = null!;
private StorageClient _storageClient = null!;
private S3Client _s3Client = null!;

[GlobalSetup]
public void Config()
Expand All @@ -28,17 +31,17 @@ public void Config()
var settings = BenchmarkHelper.ReadSettings(config);

_cancellation = new CancellationToken();
_fileId = $"привет-как-дела{Guid.NewGuid()}";
_storageClient = BenchmarkHelper.CreateStoragesClient(settings);
_fileId = $"привет-как-делаdcd156a8-b6bd-4130-a2c7-8a38dbfebbc7";
_s3Client = BenchmarkHelper.CreateStoragesClient(settings);

BenchmarkHelper.EnsureBucketExists(_storageClient, _cancellation);
BenchmarkHelper.EnsureFileExists(config, _storageClient, _fileId, _cancellation);
// BenchmarkHelper.EnsureBucketExists(_storageClient, _cancellation);
// BenchmarkHelper.EnsureFileExists(config, _storageClient, _fileId, _cancellation);
}

[GlobalCleanup]
public void Clear()
{
_storageClient.Dispose();
_s3Client.Dispose();
}

#endregion
Expand Down
20 changes: 1 addition & 19 deletions src/Storage.Benchmark/Program.cs
Original file line number Diff line number Diff line change
@@ -1,29 +1,11 @@
using BenchmarkDotNet.Running;
using Storage.Benchmark.InternalBenchmarks;

namespace Storage.Benchmark;

public static class Program
{
public static void Main(string[] args)
{
BenchmarkRunner.Run<DownloadBenchmark>();

// const string fileId = "привет-как-делаdcd156a8-b6bd-4130-a2c7-8a38dbfebbc7";
//
// var config = BenchmarkHelper.ReadConfiguration();
// var settings = BenchmarkHelper.ReadSettings(config);
// var cancellation = new CancellationToken();
// var storageClient = BenchmarkHelper.CreateStoragesClient(settings);
//
// var result = 0;
// for (var i = 0; i < 50; i++)
// {
// using var file = await storageClient.GetFile(fileId, cancellation);
// BenchmarkHelper.ReadStreamMock(await file.GetStream(cancellation));
//
// Console.WriteLine(result++);
// }
BenchmarkRunner.Run<S3Benchmark>();
}

}
60 changes: 30 additions & 30 deletions src/Storage.Benchmark/S3Benchmark.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ public class S3Benchmark
public async Task<int> Aws()
{
var result = 0;

await Amazon.S3.Util.AmazonS3Util.DoesS3BucketExistV2Async(_amazonClient, _bucket);

try
{
await _amazonClient.GetObjectMetadataAsync(_bucket, _fileId, _cancellation);
Expand All @@ -27,41 +27,41 @@ public async Task<int> Aws()
{
result++; // it's OK - file not found
}

_inputData.Seek(0, SeekOrigin.Begin);

await _amazonTransfer.UploadAsync(_inputData, _bucket, _fileId, _cancellation);
result++;

await _amazonClient.GetObjectMetadataAsync(_bucket, _fileId, _cancellation);
result++;

_outputData.Seek(0, SeekOrigin.Begin);
var fileDownload = await _amazonClient.GetObjectAsync(_bucket, _fileId, _cancellation);
await fileDownload.ResponseStream.CopyToAsync(_outputData, _cancellation);
result++;

await _amazonClient.DeleteObjectAsync(new DeleteObjectRequest
{
BucketName = _bucket,
Key = _fileId
}, _cancellation);

return ++result;
}

[Benchmark]
public async Task<int> Minio()
{
var result = 0;

if (!await _minioClient.BucketExistsAsync(new BucketExistsArgs().WithBucket(_bucket), _cancellation))
{
ThrowException();
}

result++;

try
{
await _minioClient.StatObjectAsync(new StatObjectArgs()
Expand All @@ -72,36 +72,36 @@ await _minioClient.StatObjectAsync(new StatObjectArgs()
{
result++; // it's OK - file not found
}

_inputData.Seek(0, SeekOrigin.Begin);
await _minioClient.PutObjectAsync(new PutObjectArgs()
.WithBucket(_bucket)
.WithObject(_fileId)
.WithObjectSize(_inputData.Length)
.WithStreamData(_inputData)
.WithContentType("application/pdf"), _cancellation);

result++;

await _minioClient.StatObjectAsync(new StatObjectArgs()
.WithBucket(_bucket)
.WithObject(_fileId), _cancellation);

result++;

_outputData.Seek(0, SeekOrigin.Begin);
await _minioClient.GetObjectAsync(new GetObjectArgs()
.WithBucket(_bucket)
.WithObject(_fileId)
.WithCallbackStream((file, ct) => file.CopyToAsync(_outputData, ct)),
_cancellation);

result++;

await _minioClient.RemoveObjectAsync(new RemoveObjectArgs()
.WithBucket(_bucket)
.WithObject(_fileId), _cancellation);

return ++result;
}

Expand All @@ -110,26 +110,26 @@ public async Task<int> Storage()
{
var result = 0;

var bucketExistsResult = await _storageClient.BucketExists(_cancellation);
var bucketExistsResult = await _s3Client.IsBucketExists(_cancellation);
if (!bucketExistsResult) ThrowException();
result++;

var fileExistsResult = await _storageClient.FileExists(_fileId, _cancellation);
var fileExistsResult = await _s3Client.IsFileExists(_fileId, _cancellation);
if (fileExistsResult) ThrowException();
result++;

_inputData.Seek(0, SeekOrigin.Begin);
var fileUploadResult = await _storageClient.UploadFile(_fileId, _inputData, "application/pdf", _cancellation);
var fileUploadResult = await _s3Client.UploadFile(_fileId, "application/pdf", _inputData, _cancellation);
if (!fileUploadResult) ThrowException();

result++;

fileExistsResult = await _storageClient.FileExists(_fileId, _cancellation);
fileExistsResult = await _s3Client.IsFileExists(_fileId, _cancellation);
if (!fileExistsResult) ThrowException();
result++;

_outputData.Seek(0, SeekOrigin.Begin);
var storageFile = await _storageClient.GetFile(_fileId, _cancellation);
var storageFile = await _s3Client.GetFile(_fileId, _cancellation);
if (!storageFile) ThrowException(storageFile.ToString());

var fileStream = await storageFile.GetStream(_cancellation);
Expand All @@ -139,7 +139,7 @@ public async Task<int> Storage()

result++;

await _storageClient.DeleteFile(_fileId, _cancellation);
await _s3Client.DeleteFile(_fileId, _cancellation);
return ++result;
}

Expand All @@ -154,7 +154,7 @@ public async Task<int> Storage()
private IAmazonS3 _amazonClient = null!;
private TransferUtility _amazonTransfer = null!;
private MinioClient _minioClient = null!;
private StorageClient _storageClient = null!;
private S3Client _s3Client = null!;

[GlobalSetup]
public void Config()
Expand All @@ -171,15 +171,15 @@ public void Config()
_amazonClient = BenchmarkHelper.CreateAWSClient(settings);
_amazonTransfer = new TransferUtility(_amazonClient);
_minioClient = BenchmarkHelper.CreateMinioClient(settings);
_storageClient = BenchmarkHelper.CreateStoragesClient(settings);
_s3Client = BenchmarkHelper.CreateStoragesClient(settings);

BenchmarkHelper.EnsureBucketExists(_storageClient, _cancellation);
BenchmarkHelper.EnsureBucketExists(_s3Client, _cancellation);
}

[GlobalCleanup]
public void Clear()
{
_storageClient.Dispose();
_s3Client.Dispose();
_inputData.Dispose();
_outputData.Dispose();
}
Expand Down
1 change: 0 additions & 1 deletion src/Storage.Benchmark/Storage.Benchmark.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>

<ItemGroup>
Expand Down

0 comments on commit 27f1461

Please sign in to comment.