Skip to content

Commit

Permalink
- First version with structured logging with Serilog
Browse files Browse the repository at this point in the history
- ToUpperInvariant, ToLowerInvariant
  • Loading branch information
KoalaBear84 committed Oct 2, 2022
1 parent 321ee89 commit b68950d
Show file tree
Hide file tree
Showing 35 changed files with 401 additions and 3,707 deletions.
51 changes: 26 additions & 25 deletions src/OpenDirectoryDownloader.GoogleDrive/GoogleDriveIndexer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
using Google.Apis.Drive.v3;
using Google.Apis.Services;
using Google.Apis.Util.Store;
using NLog;
using OpenDirectoryDownloader.Shared;
using OpenDirectoryDownloader.Shared.Models;
using Serilog;
using System;
using System.IO;
using System.Linq;
Expand All @@ -14,39 +14,40 @@

namespace OpenDirectoryDownloader.GoogleDrive;

public static class GoogleDriveIndexer
public class GoogleDriveIndexer
{
private static readonly Logger Logger = LogManager.GetCurrentClassLogger();

// If modifying these scopes, delete your previously saved credentials
// at ~/.credentials/drive-dotnet-quickstart.json
private static readonly string[] Scopes = { DriveService.Scope.DriveMetadataReadonly };
private static readonly DriveService DriveService;
private static DriveService DriveService;
private static readonly string ApplicationName = "OpenDirectoryDownloader";
private const string FolderMimeType = "application/vnd.google-apps.folder";
private const string ShortcutMimeType = "application/vnd.google-apps.shortcut";
private static readonly RateLimiter RateLimiter = new(900, TimeSpan.FromSeconds(100), 0.9d);

static GoogleDriveIndexer()
public ILogger Logger { get; }

public GoogleDriveIndexer(ILogger logger)
{
Logger = logger;

try
{
UserCredential credential;

using (FileStream fileStream = new("OpenDirectoryDownloader.GoogleDrive.json", FileMode.Open, FileAccess.Read))
{
// The file token.json stores the user's access and refresh tokens, and is created
// automatically when the authorization flow completes for the first time.
string credPath = "token.json";
credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
GoogleClientSecrets.FromStream(fileStream).Secrets,
Scopes,
"user",
CancellationToken.None,
new FileDataStore(credPath, true)).Result;

Console.WriteLine($"Credential file saved to: {credPath}");
}
using FileStream fileStream = new("OpenDirectoryDownloader.GoogleDrive.json", FileMode.Open, FileAccess.Read);

// The file token.json stores the user's access and refresh tokens, and is created
// automatically when the authorization flow completes for the first time.
string credPath = "token.json";
credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
GoogleClientSecrets.FromStream(fileStream).Secrets,
Scopes,
"user",
CancellationToken.None,
new FileDataStore(credPath, true)).Result;

Console.WriteLine($"Credential file saved to: {credPath}");

// Create Drive API service.
DriveService = new DriveService(new BaseClientService.Initializer()
Expand All @@ -63,7 +64,7 @@ static GoogleDriveIndexer()
}
}

public static async Task<WebDirectory> IndexAsync(WebDirectory webDirectory, string resourceKey)
public async Task<WebDirectory> IndexAsync(WebDirectory webDirectory, string resourceKey)
{
webDirectory.StartTime = DateTimeOffset.UtcNow;
string nextPageToken = string.Empty;
Expand Down Expand Up @@ -149,7 +150,7 @@ public static async Task<WebDirectory> IndexAsync(WebDirectory webDirectory, str

if (retries > 0)
{
Logger.Warn($"Retrieval succesful after try {retries + 1} for {webDirectory.Url}");
Logger.Warning("Retrieval succesful after try {retries} for {url}", retries + 1, webDirectory.Url);
}

if (string.IsNullOrWhiteSpace(nextPageToken))
Expand All @@ -171,19 +172,19 @@ public static async Task<WebDirectory> IndexAsync(WebDirectory webDirectory, str
else
{
retries++;
Logger.Warn($"Google Drive error for {webDirectory.Url} on try {retries + 1}: {ex.Message}");
Logger.Warning("Google Drive error for {url} on try {retries}: {error}", webDirectory.Url, retries + 1, ex.Message);
}

if (retries == maxRetries)
{
Logger.Error($"Skip {webDirectory.Url} because of {maxRetries} consecutive errors on : {ex.Message}");
Logger.Error("Skip {url} because of {maxRetries} consecutive errors on : {error}", webDirectory.Url, maxRetries, ex.Message);
webDirectory.Error = true;
return webDirectory;
}
}
}

Logger.Debug($"Finished Google Drive Request for Folder {folderId}");
Logger.Debug("Finished Google Drive Request for Folder {folderId}", folderId);

return webDirectory;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

<ItemGroup>
<PackageReference Include="Google.Apis.Drive.v3" Version="1.57.0.2789" />
<PackageReference Include="NLog" Version="5.0.4" />
<PackageReference Include="Serilog.Sinks.Console" Version="4.1.1-dev-00896" />
</ItemGroup>

<ItemGroup>
Expand Down
2 changes: 1 addition & 1 deletion src/OpenDirectoryDownloader/AngleSharpExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ public static IElement Parent(this IElement element, string elementName)
do
{
parentElement = parentElement.ParentElement;
} while (parentElement != null && parentElement.TagName.ToUpper() != elementName.ToUpper());
} while (parentElement != null && parentElement.TagName.ToUpperInvariant() != elementName.ToUpperInvariant());

return parentElement;
}
Expand Down
43 changes: 20 additions & 23 deletions src/OpenDirectoryDownloader/BrowserContext.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using NLog;
using PuppeteerExtraSharp;
using PuppeteerExtraSharp;
using PuppeteerExtraSharp.Plugins.ExtraStealth;
using PuppeteerSharp;
using System;
Expand All @@ -13,8 +12,6 @@ namespace OpenDirectoryDownloader;

public class BrowserContext: IDisposable
{
private static readonly Logger Logger = LogManager.GetCurrentClassLogger();

private const string SetCookieHeader = "set-cookie";
private const string CloudflareClearanceKey = "cf_clearance";

Expand Down Expand Up @@ -63,31 +60,31 @@ public async Task<bool> DoCloudFlareAsync(string url)

Stopwatch stopwatch = Stopwatch.StartNew();

Logger.Debug($"Navigating to {url}..");
Program.Logger.Debug("Navigating to {url}..", url);

await Page.GoToAsync(url);
await Task.Delay(TimeSpan.FromSeconds(60), CancellationTokenSource.Token);

Logger.Debug($"Navigation done in {stopwatch.ElapsedMilliseconds}ms");
Program.Logger.Debug("Navigation done in {elapsedMilliseconds}ms", stopwatch.ElapsedMilliseconds);

Logger.Debug("Finished with browser!");
Program.Logger.Debug("Finished with browser!");
}
catch (OperationCanceledException ex)
{
if (!OK)
{
Logger.Error(ex, "Looks like Cloudflare protection wasn't solved in time.");
Program.Logger.Error(ex, "Looks like Cloudflare protection wasn't solved in time.");
}
}
catch (Exception ex)
{
Logger.Error(ex, "Error with browser");
Program.Logger.Error(ex, "Error with browser");
}
finally
{
Logger.Debug("Closing browser");
Program.Logger.Debug("Closing browser");
await Browser.CloseAsync();
Logger.Debug("Closed browser");
Program.Logger.Debug("Closed browser");
}

return OK;
Expand All @@ -101,12 +98,12 @@ public async Task InitializeAsync()

if (!browserFetcher.LocalRevisions().Contains(BrowserFetcher.DefaultChromiumRevision))
{
Logger.Warn($"Downloading browser... First time it can take a while, depending on your internet connection.");
Program.Logger.Warning("Downloading browser... First time it can take a while, depending on your internet connection.");
RevisionInfo revisionInfo = await browserFetcher.DownloadAsync(BrowserFetcher.DefaultChromiumRevision);
Logger.Warn($"Downloaded browser. Downloaded: {revisionInfo.Downloaded}, Platform: {revisionInfo.Platform}, Revision: {revisionInfo.Revision}, Path: {revisionInfo.FolderPath}");
Program.Logger.Warning("Downloaded browser. Downloaded: {downloaded}, Platform: {platform}, Revision: {revision}, Path: {path}", revisionInfo.Downloaded, revisionInfo.Platform, revisionInfo.Revision, revisionInfo.FolderPath);
}

Logger.Debug($"Creating browser...");
Program.Logger.Debug("Creating browser...");

PuppeteerExtra puppeteerExtra = new();

Expand All @@ -121,17 +118,17 @@ public async Task InitializeAsync()
IgnoreHTTPSErrors = true
});

Logger.Info($"Started browser with PID {Browser.Process.Id}");
Program.Logger.Information("Started browser with PID {processId}", Browser.Process.Id);

Browser.Closed += Browser_Closed;
Browser.Disconnected += Browser_Disconnected;
Browser.TargetChanged += Browser_TargetChanged;
Browser.TargetCreated += Browser_TargetCreated;
Browser.TargetDestroyed += Browser_TargetDestroyed;

Logger.Debug($"Created browser.");
Program.Logger.Debug("Created browser.");

Logger.Debug($"Creating page...");
Program.Logger.Debug("Creating page...");

Page = (await Browser.PagesAsync())[0];

Expand All @@ -155,11 +152,11 @@ public async Task InitializeAsync()
Page.WorkerCreated += Page_WorkerCreated;
Page.WorkerDestroyed += Page_WorkerDestroyed;

Logger.Debug($"Created page.");
Program.Logger.Debug("Created page.");
}
catch (Exception ex)
{
Logger.Error(ex, "Error with initializing browser");
Program.Logger.Error(ex, "Error with initializing browser");
throw;
}
}
Expand All @@ -179,7 +176,7 @@ public async Task<string> GetHtml(string url)

Stopwatch stopwatch = Stopwatch.StartNew();

Logger.Debug($"Navigating to {url}..");
Program.Logger.Debug("Navigating to {url}..", url);

NavigationOptions navigationOptions = new()
{
Expand All @@ -188,7 +185,7 @@ public async Task<string> GetHtml(string url)
};

await Page.GoToAsync(url, navigationOptions);
Logger.Debug($"Navigation done in {stopwatch.ElapsedMilliseconds}ms");
Program.Logger.Debug("Navigation done in {elapsedMilliseconds}ms", stopwatch.ElapsedMilliseconds);

string html = await Page.GetContentAsync();

Expand All @@ -198,12 +195,12 @@ public async Task<string> GetHtml(string url)
{
if (!OK)
{
Logger.Error(ex, "Timeout in navigating to URL");
Program.Logger.Error(ex, "Timeout in navigating to URL");
}
}
catch (Exception ex)
{
Logger.Error(ex, "Error with browser");
Program.Logger.Error(ex, "Error with browser");
throw;
}

Expand Down
31 changes: 14 additions & 17 deletions src/OpenDirectoryDownloader/Calibre/CalibreParser.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using NLog;
using OpenDirectoryDownloader.Helpers;
using OpenDirectoryDownloader.Helpers;
using OpenDirectoryDownloader.Shared.Models;
using System;
using System.Collections.Generic;
Expand All @@ -13,8 +12,6 @@ namespace OpenDirectoryDownloader.Calibre;

public static class CalibreParser
{
private static readonly Logger Logger = LogManager.GetCurrentClassLogger();

public static Version ParseVersion(string versionString)
{
if (versionString.Contains('/'))
Expand All @@ -39,7 +36,7 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri,
try
{
Console.WriteLine("Retrieving libraries...");
Logger.Info("Retrieving libraries...");
Program.Logger.Information("Retrieving libraries...");

if (version.Major < 3)
{
Expand All @@ -55,12 +52,12 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri,
CalibreUpdate.CalibreUpdate calibreUpdate = CalibreUpdate.CalibreUpdate.FromJson(updateResultJson);

Console.WriteLine($"Retrieved {calibreUpdate.LibraryMap.Count} libraries");
Logger.Info($"Retrieved {calibreUpdate.LibraryMap.Count} libraries");
Program.Logger.Information($"Retrieved {calibreUpdate.LibraryMap.Count} libraries");

foreach (KeyValuePair<string, string> library in calibreUpdate.LibraryMap)
{
Console.WriteLine($"Retrieving metadata of books for library {library.Value}...");
Logger.Info($"Retrieving metadata of books for library {library.Value}...");
Program.Logger.Information($"Retrieving metadata of books for library {library.Value}...");

WebDirectory libraryWebDirectory = new(webDirectory)
{
Expand Down Expand Up @@ -89,10 +86,10 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri,
CalibreResult.CalibreResult libraryResult = CalibreResult.CalibreResult.FromJson(libraryResultJson);

Console.WriteLine($"Retrieved metadata of {libraryResult.Metadata.Count} books for library {library.Value}");
Logger.Info($"Retrieved metadata of {libraryResult.Metadata.Count} books for library {library.Value}");
Program.Logger.Information($"Retrieved metadata of {libraryResult.Metadata.Count} books for library {library.Value}");

Console.WriteLine($"Parsing info of {libraryResult.Metadata.Count} books for library {library.Value}...");
Logger.Info($"Parsing info of {libraryResult.Metadata.Count} books for library {library.Value}...");
Program.Logger.Information($"Parsing info of {libraryResult.Metadata.Count} books for library {library.Value}...");

int booksToIndex = libraryResult.Metadata.Count;
int booksIndexed = 0;
Expand All @@ -113,25 +110,25 @@ public static async Task ParseCalibre(HttpClient httpClient, Uri calibreRootUri,
if (newBooksIndexed % 100 == 0 && stopwatch.Elapsed > TimeSpan.FromSeconds(5))
{
Logger.Warn($"Parsing books at {100 * ((decimal)newBooksIndexed / booksToIndex):F1}% ({newBooksIndexed}/{booksToIndex})");
Program.Logger.Warning($"Parsing books at {100 * ((decimal)newBooksIndexed / booksToIndex):F1}% ({newBooksIndexed}/{booksToIndex})");
stopwatch.Restart();
}
});

Console.WriteLine($"Parsed info of {libraryResult.Metadata.Count} books for library {library.Value}");
Logger.Info($"Parsed info of {libraryResult.Metadata.Count} books for library {library.Value}");
Program.Logger.Information($"Parsed info of {libraryResult.Metadata.Count} books for library {library.Value}");
}
}
catch (Exception ex)
{
Logger.Error(ex, "Error parsing Calibre");
Program.Logger.Error(ex, "Error parsing Calibre");
webDirectory.Error = true;
}
}

private static void GetBookInfo(HttpClient httpClient, Uri calibreRootUri, KeyValuePair<string, string> library, WebDirectory libraryWebDirectory, KeyValuePair<string, CalibreResult.Metadatum> book)
{
Logger.Debug($"Retrieving info for book [{book.Key}]: {book.Value.Title}...");
Program.Logger.Debug($"Retrieving info for book [{book.Key}]: {book.Value.Title}...");

WebDirectory bookWebDirectory = new(libraryWebDirectory)
{
Expand Down Expand Up @@ -180,19 +177,19 @@ private static void GetBookInfo(HttpClient httpClient, Uri calibreRootUri, KeyVa
{
bookWebDirectory.Files.Add(new WebFile
{
Url = new Uri(calibreRootUri, $"./get/{format.ToUpper()}/{book.Key}/{library.Key}").ToString(),
FileName = $"{PathHelper.GetValidPath(book.Value.Title)} - {PathHelper.GetValidPath(book.Value.AuthorSort)}.{format.ToLower()}",
Url = new Uri(calibreRootUri, $"./get/{format.ToUpperInvariant()}/{book.Key}/{library.Key}").ToString(),
FileName = $"{PathHelper.GetValidPath(book.Value.Title)} - {PathHelper.GetValidPath(book.Value.AuthorSort)}.{format.ToLowerInvariant()}",
FileSize = book.Value.FormatSizes.ContainsKey(format) ? book.Value.FormatSizes[format] : 0
});
}

libraryWebDirectory.Subdirectories.Add(bookWebDirectory);

Logger.Debug($"Retrieved info for book [{book.Key}]: {book.Value.Title}");
Program.Logger.Debug($"Retrieved info for book [{book.Key}]: {book.Value.Title}");
}
catch (Exception ex)
{
Logger.Debug(ex, $"Error processing book {book.Key}");
Program.Logger.Debug(ex, $"Error processing book {book.Key}");
bookWebDirectory.Error = true;
}
}
Expand Down
Loading

0 comments on commit b68950d

Please sign in to comment.