Move source code into "Source" folder

This commit is contained in:
Robert McRackan 2022-05-09 10:31:45 -04:00
parent 1ee73fa1a7
commit 389fbb2371
287 changed files with 26 additions and 8 deletions

View file

@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0-windows</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AAXClean" Version="0.4.6" />
<PackageReference Include="AAXClean.Codecs" Version="0.2.7" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\FileManager\FileManager.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,120 @@
using System;
using AAXClean;
using Dinah.Core.Net.Http;
namespace AaxDecrypter
{
public abstract class AaxcDownloadConvertBase : AudiobookDownloadBase
{
public event EventHandler<AppleTags> RetrievedMetadata;
protected AaxFile AaxFile;
protected AaxcDownloadConvertBase(string outFileName, string cacheDirectory, DownloadOptions dlLic)
: base(outFileName, cacheDirectory, dlLic) { }
/// <summary>Setting cover art by this method will insert the art into the audiobook metadata</summary>
public override void SetCoverArt(byte[] coverArt)
{
base.SetCoverArt(coverArt);
if (coverArt is not null && AaxFile?.AppleTags is not null)
AaxFile.AppleTags.Cover = coverArt;
}
protected bool Step_GetMetadata()
{
AaxFile = new AaxFile(InputFileStream);
if (DownloadOptions.StripUnabridged)
{
AaxFile.AppleTags.Title = AaxFile.AppleTags.TitleSansUnabridged;
AaxFile.AppleTags.Album = AaxFile.AppleTags.Album?.Replace(" (Unabridged)", "");
}
//Finishing configuring lame encoder.
if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
{
double bitrateMultiple = 1;
if (AaxFile.AudioChannels == 2)
{
if (DownloadOptions.Downsample)
bitrateMultiple = 0.5;
else
DownloadOptions.LameConfig.Mode = NAudio.Lame.MPEGMode.Stereo;
}
if (DownloadOptions.MatchSourceBitrate)
{
int kbps = (int)(AaxFile.AverageBitrate * bitrateMultiple / 1024);
if (DownloadOptions.LameConfig.VBR is null)
DownloadOptions.LameConfig.BitRate = kbps;
else if (DownloadOptions.LameConfig.VBR == NAudio.Lame.VBRMode.ABR)
DownloadOptions.LameConfig.ABRRateKbps = kbps;
}
}
OnRetrievedTitle(AaxFile.AppleTags.TitleSansUnabridged);
OnRetrievedAuthors(AaxFile.AppleTags.FirstAuthor ?? "[unknown]");
OnRetrievedNarrators(AaxFile.AppleTags.Narrator ?? "[unknown]");
OnRetrievedCoverArt(AaxFile.AppleTags.Cover);
RetrievedMetadata?.Invoke(this, AaxFile.AppleTags);
return !IsCanceled;
}
protected DownloadProgress Step_DownloadAudiobook_Start()
{
var zeroProgress = new DownloadProgress
{
BytesReceived = 0,
ProgressPercentage = 0,
TotalBytesToReceive = InputFileStream.Length
};
OnDecryptProgressUpdate(zeroProgress);
AaxFile.SetDecryptionKey(DownloadOptions.AudibleKey, DownloadOptions.AudibleIV);
return zeroProgress;
}
protected void Step_DownloadAudiobook_End(DownloadProgress zeroProgress)
{
AaxFile.Close();
CloseInputFileStream();
OnDecryptProgressUpdate(zeroProgress);
}
protected void AaxFile_ConversionProgressUpdate(object sender, ConversionProgressEventArgs e)
{
var duration = AaxFile.Duration;
var remainingSecsToProcess = (duration - e.ProcessPosition).TotalSeconds;
var estTimeRemaining = remainingSecsToProcess / e.ProcessSpeed;
if (double.IsNormal(estTimeRemaining))
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
var progressPercent = (e.ProcessPosition / e.TotalDuration);
OnDecryptProgressUpdate(
new DownloadProgress
{
ProgressPercentage = 100 * progressPercent,
BytesReceived = (long)(InputFileStream.Length * progressPercent),
TotalBytesToReceive = InputFileStream.Length
});
}
public override void Cancel()
{
IsCanceled = true;
AaxFile?.Cancel();
AaxFile?.Dispose();
CloseInputFileStream();
}
}
}

View file

@ -0,0 +1,141 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using AAXClean;
using AAXClean.Codecs;
using Dinah.Core.StepRunner;
using FileManager;
namespace AaxDecrypter
{
public class AaxcDownloadMultiConverter : AaxcDownloadConvertBase
{
protected override StepSequence Steps { get; }
private Func<MultiConvertFileProperties, string> multipartFileNameCallback { get; }
private static TimeSpan minChapterLength { get; } = TimeSpan.FromSeconds(3);
private List<string> multiPartFilePaths { get; } = new List<string>();
public AaxcDownloadMultiConverter(string outFileName, string cacheDirectory, DownloadOptions dlLic,
Func<MultiConvertFileProperties, string> multipartFileNameCallback = null)
: base(outFileName, cacheDirectory, dlLic)
{
Steps = new StepSequence
{
Name = "Download and Convert Aaxc To " + DownloadOptions.OutputFormat,
["Step 1: Get Aaxc Metadata"] = Step_GetMetadata,
["Step 2: Download Decrypted Audiobook"] = Step_DownloadAudiobookAsMultipleFilesPerChapter,
["Step 3: Cleanup"] = Step_Cleanup,
};
this.multipartFileNameCallback = multipartFileNameCallback ?? MultiConvertFileProperties.DefaultMultipartFilename;
}
/*
https://github.com/rmcrackan/Libation/pull/127#issuecomment-939088489
If the chapter truly is empty, that is, 0 audio frames in length, then yes it is ignored.
If the chapter is shorter than 3 seconds long but still has some audio frames, those frames are combined with the following chapter and not split into a new file.
I also implemented file naming by chapter title. When 2 or more consecutive chapters are combined, the first of the combined chapter's title is used in the file name. For example, given an audiobook with the following chapters:
00:00:00 - 00:00:02 | Part 1
00:00:02 - 00:35:00 | Chapter 1
00:35:02 - 01:02:00 | Chapter 2
01:02:00 - 01:02:02 | Part 2
01:02:02 - 01:41:00 | Chapter 3
01:41:00 - 02:05:00 | Chapter 4
The book will be split into the following files:
00:00:00 - 00:35:00 | Book - 01 - Part 1.m4b
00:35:00 - 01:02:00 | Book - 02 - Chapter 2.m4b
01:02:00 - 01:41:00 | Book - 03 - Part 2.m4b
01:41:00 - 02:05:00 | Book - 04 - Chapter 4.m4b
That naming may not be desirable for everyone, but it's an easy change to instead use the last of the combined chapter's title in the file name.
*/
private bool Step_DownloadAudiobookAsMultipleFilesPerChapter()
{
var zeroProgress = Step_DownloadAudiobook_Start();
var chapters = DownloadOptions.ChapterInfo.Chapters.ToList();
// Ensure split files are at least minChapterLength in duration.
var splitChapters = new ChapterInfo(DownloadOptions.ChapterInfo.StartOffset);
var runningTotal = TimeSpan.Zero;
string title = "";
for (int i = 0; i < chapters.Count; i++)
{
if (runningTotal == TimeSpan.Zero)
title = chapters[i].Title;
runningTotal += chapters[i].Duration;
if (runningTotal >= minChapterLength)
{
splitChapters.AddChapter(title, runningTotal);
runningTotal = TimeSpan.Zero;
}
}
// reset, just in case
multiPartFilePaths.Clear();
ConversionResult result;
AaxFile.ConversionProgressUpdate += AaxFile_ConversionProgressUpdate;
if (DownloadOptions.OutputFormat == OutputFormat.M4b)
result = ConvertToMultiMp4a(splitChapters);
else
result = ConvertToMultiMp3(splitChapters);
AaxFile.ConversionProgressUpdate -= AaxFile_ConversionProgressUpdate;
Step_DownloadAudiobook_End(zeroProgress);
return result == ConversionResult.NoErrorsDetected;
}
private ConversionResult ConvertToMultiMp4a(ChapterInfo splitChapters)
{
var chapterCount = 0;
return AaxFile.ConvertToMultiMp4a(splitChapters, newSplitCallback =>
createOutputFileStream(++chapterCount, splitChapters, newSplitCallback),
DownloadOptions.TrimOutputToChapterLength);
}
private ConversionResult ConvertToMultiMp3(ChapterInfo splitChapters)
{
var chapterCount = 0;
return AaxFile.ConvertToMultiMp3(splitChapters, newSplitCallback =>
{
createOutputFileStream(++chapterCount, splitChapters, newSplitCallback);
((NAudio.Lame.LameConfig)newSplitCallback.UserState).ID3.Track = chapterCount.ToString();
}, DownloadOptions.LameConfig, DownloadOptions.TrimOutputToChapterLength);
}
private void createOutputFileStream(int currentChapter, ChapterInfo splitChapters, NewSplitCallback newSplitCallback)
{
var fileName = multipartFileNameCallback(new()
{
OutputFileName = OutputFileName,
PartsPosition = currentChapter,
PartsTotal = splitChapters.Count,
Title = newSplitCallback?.Chapter?.Title
});
fileName = FileUtility.GetValidFilename(fileName);
multiPartFilePaths.Add(fileName);
FileUtility.SaferDelete(fileName);
newSplitCallback.OutputFile = File.Open(fileName, FileMode.OpenOrCreate);
OnFileCreated(fileName);
}
}
}

View file

@ -0,0 +1,55 @@
using System;
using System.IO;
using AAXClean;
using AAXClean.Codecs;
using Dinah.Core.StepRunner;
using FileManager;
namespace AaxDecrypter
{
public class AaxcDownloadSingleConverter : AaxcDownloadConvertBase
{
protected override StepSequence Steps { get; }
public AaxcDownloadSingleConverter(string outFileName, string cacheDirectory, DownloadOptions dlLic)
: base(outFileName, cacheDirectory, dlLic)
{
Steps = new StepSequence
{
Name = "Download and Convert Aaxc To " + DownloadOptions.OutputFormat,
["Step 1: Get Aaxc Metadata"] = Step_GetMetadata,
["Step 2: Download Decrypted Audiobook"] = Step_DownloadAudiobookAsSingleFile,
["Step 3: Create Cue"] = Step_CreateCue,
["Step 4: Cleanup"] = Step_Cleanup,
};
}
private bool Step_DownloadAudiobookAsSingleFile()
{
var zeroProgress = Step_DownloadAudiobook_Start();
FileUtility.SaferDelete(OutputFileName);
var outputFile = File.Open(OutputFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite);
OnFileCreated(OutputFileName);
AaxFile.ConversionProgressUpdate += AaxFile_ConversionProgressUpdate;
var decryptionResult
= DownloadOptions.OutputFormat == OutputFormat.M4b
? AaxFile.ConvertToMp4a(outputFile, DownloadOptions.ChapterInfo, DownloadOptions.TrimOutputToChapterLength)
: AaxFile.ConvertToMp3(outputFile, DownloadOptions.LameConfig, DownloadOptions.ChapterInfo, DownloadOptions.TrimOutputToChapterLength);
AaxFile.ConversionProgressUpdate -= AaxFile_ConversionProgressUpdate;
DownloadOptions.ChapterInfo = AaxFile.Chapters;
Step_DownloadAudiobook_End(zeroProgress);
var success = decryptionResult == ConversionResult.NoErrorsDetected && !IsCanceled;
if (success)
base.OnFileCreated(OutputFileName);
return success;
}
}
}

View file

@ -0,0 +1,184 @@
using System;
using System.Collections.Generic;
using System.IO;
using Dinah.Core;
using Dinah.Core.Net.Http;
using Dinah.Core.StepRunner;
using FileManager;
namespace AaxDecrypter
{
public enum OutputFormat { M4b, Mp3 }
public abstract class AudiobookDownloadBase
{
public event EventHandler<string> RetrievedTitle;
public event EventHandler<string> RetrievedAuthors;
public event EventHandler<string> RetrievedNarrators;
public event EventHandler<byte[]> RetrievedCoverArt;
public event EventHandler<DownloadProgress> DecryptProgressUpdate;
public event EventHandler<TimeSpan> DecryptTimeRemaining;
public event EventHandler<string> FileCreated;
protected bool IsCanceled { get; set; }
protected string OutputFileName { get; private set; }
protected DownloadOptions DownloadOptions { get; }
protected NetworkFileStream InputFileStream => (nfsPersister ??= OpenNetworkFileStream()).NetworkFileStream;
// Don't give the property a 'set'. This should have to be an obvious choice; not accidental
protected void SetOutputFileName(string newOutputFileName) => OutputFileName = newOutputFileName;
protected abstract StepSequence Steps { get; }
private NetworkFileStreamPersister nfsPersister;
private string jsonDownloadState { get; }
public string TempFilePath { get; }
protected AudiobookDownloadBase(string outFileName, string cacheDirectory, DownloadOptions dlLic)
{
OutputFileName = ArgumentValidator.EnsureNotNullOrWhiteSpace(outFileName, nameof(outFileName));
var outDir = Path.GetDirectoryName(OutputFileName);
if (!Directory.Exists(outDir))
throw new DirectoryNotFoundException($"Directory does not exist: {nameof(outDir)}");
if (!Directory.Exists(cacheDirectory))
throw new DirectoryNotFoundException($"Directory does not exist: {nameof(cacheDirectory)}");
jsonDownloadState = Path.Combine(cacheDirectory, Path.GetFileName(Path.ChangeExtension(OutputFileName, ".json")));
TempFilePath = Path.ChangeExtension(jsonDownloadState, ".aaxc");
DownloadOptions = ArgumentValidator.EnsureNotNull(dlLic, nameof(dlLic));
// delete file after validation is complete
FileUtility.SaferDelete(OutputFileName);
}
public abstract void Cancel();
public virtual void SetCoverArt(byte[] coverArt)
{
if (coverArt is not null)
OnRetrievedCoverArt(coverArt);
}
public bool Run()
{
var (IsSuccess, Elapsed) = Steps.Run();
if (!IsSuccess)
Serilog.Log.Logger.Error("Conversion failed");
return IsSuccess;
}
protected void OnRetrievedTitle(string title)
=> RetrievedTitle?.Invoke(this, title);
protected void OnRetrievedAuthors(string authors)
=> RetrievedAuthors?.Invoke(this, authors);
protected void OnRetrievedNarrators(string narrators)
=> RetrievedNarrators?.Invoke(this, narrators);
protected void OnRetrievedCoverArt(byte[] coverArt)
=> RetrievedCoverArt?.Invoke(this, coverArt);
protected void OnDecryptProgressUpdate(DownloadProgress downloadProgress)
=> DecryptProgressUpdate?.Invoke(this, downloadProgress);
protected void OnDecryptTimeRemaining(TimeSpan timeRemaining)
=> DecryptTimeRemaining?.Invoke(this, timeRemaining);
protected void OnFileCreated(string path)
=> FileCreated?.Invoke(this, path);
protected void CloseInputFileStream()
{
nfsPersister?.NetworkFileStream?.Close();
nfsPersister?.Dispose();
}
protected bool Step_CreateCue()
{
if (!DownloadOptions.CreateCueSheet) return true;
// not a critical step. its failure should not prevent future steps from running
try
{
var path = Path.ChangeExtension(OutputFileName, ".cue");
path = FileUtility.GetValidFilename(path);
File.WriteAllText(path, Cue.CreateContents(Path.GetFileName(OutputFileName), DownloadOptions.ChapterInfo));
OnFileCreated(path);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, $"{nameof(Step_CreateCue)}. FAILED");
}
return !IsCanceled;
}
protected bool Step_Cleanup()
{
bool success = !IsCanceled;
if (success)
{
FileUtility.SaferDelete(jsonDownloadState);
if (DownloadOptions.AudibleKey is not null &&
DownloadOptions.AudibleIV is not null &&
DownloadOptions.RetainEncryptedFile)
{
string aaxPath = Path.ChangeExtension(TempFilePath, ".aax");
FileUtility.SaferMove(TempFilePath, aaxPath);
//Write aax decryption key
string keyPath = Path.ChangeExtension(aaxPath, ".key");
FileUtility.SaferDelete(keyPath);
File.WriteAllText(keyPath, $"Key={DownloadOptions.AudibleKey}\r\nIV={DownloadOptions.AudibleIV}");
OnFileCreated(aaxPath);
OnFileCreated(keyPath);
}
else
FileUtility.SaferDelete(TempFilePath);
}
else
{
FileUtility.SaferDelete(OutputFileName);
}
return success;
}
private NetworkFileStreamPersister OpenNetworkFileStream()
{
if (!File.Exists(jsonDownloadState))
return NewNetworkFilePersister();
try
{
var nfsp = new NetworkFileStreamPersister(jsonDownloadState);
// If More than ~1 hour has elapsed since getting the download url, it will expire.
// The new url will be to the same file.
nfsp.NetworkFileStream.SetUriForSameFile(new Uri(DownloadOptions.DownloadUrl));
return nfsp;
}
catch
{
FileUtility.SaferDelete(jsonDownloadState);
FileUtility.SaferDelete(TempFilePath);
return NewNetworkFilePersister();
}
}
private NetworkFileStreamPersister NewNetworkFilePersister()
{
var headers = new System.Net.WebHeaderCollection
{
{ "User-Agent", DownloadOptions.UserAgent }
};
var networkFileStream = new NetworkFileStream(TempFilePath, new Uri(DownloadOptions.DownloadUrl), 0, headers);
return new NetworkFileStreamPersister(networkFileStream, jsonDownloadState);
}
}
}

View file

@ -0,0 +1,62 @@
using System;
using System.IO;
using System.Text;
using AAXClean;
using Dinah.Core;
namespace AaxDecrypter
{
public static class Cue
{
public static string CreateContents(string filePath, ChapterInfo chapters)
{
var stringBuilder = new StringBuilder();
stringBuilder.AppendLine(GetFileLine(filePath, "MP3"));
var startOffset = chapters.StartOffset;
var trackCount = 0;
foreach (var c in chapters.Chapters)
{
var startTime = c.StartOffset - startOffset;
trackCount++;
stringBuilder.AppendLine($"TRACK {trackCount} AUDIO");
stringBuilder.AppendLine($" TITLE \"{c.Title}\"");
stringBuilder.AppendLine($" INDEX 01 {(int)startTime.TotalMinutes}:{startTime:ss}:{(int)(startTime.Milliseconds / 1000d * 75)}");
}
return stringBuilder.ToString();
}
public static void UpdateFileName(FileInfo cueFileInfo, string audioFilePath)
=> UpdateFileName(cueFileInfo.FullName, audioFilePath);
public static void UpdateFileName(string cueFilePath, FileInfo audioFileInfo)
=> UpdateFileName(cueFilePath, audioFileInfo.FullName);
public static void UpdateFileName(FileInfo cueFileInfo, FileInfo audioFileInfo)
=> UpdateFileName(cueFileInfo.FullName, audioFileInfo.FullName);
public static void UpdateFileName(string cueFilePath, string audioFilePath)
{
var cueContents = File.ReadAllLines(cueFilePath);
for (var i = 0; i < cueContents.Length; i++)
{
var line = cueContents[i];
if (!line.Trim().StartsWith("FILE") || !line.Contains(" "))
continue;
var fileTypeBegins = line.LastIndexOf(" ") + 1;
cueContents[i] = GetFileLine(audioFilePath, line[fileTypeBegins..]);
break;
}
File.WriteAllLines(cueFilePath, cueContents);
}
private static string GetFileLine(string filePath, string audioType) => $"FILE {Path.GetFileName(filePath).SurroundWithQuotes()} {audioType}";
}
}

View file

@ -0,0 +1,30 @@
using AAXClean;
using Dinah.Core;
namespace AaxDecrypter
{
public class DownloadOptions
{
public string DownloadUrl { get; }
public string UserAgent { get; }
public string AudibleKey { get; init; }
public string AudibleIV { get; init; }
public OutputFormat OutputFormat { get; init; }
public bool TrimOutputToChapterLength { get; init; }
public bool RetainEncryptedFile { get; init; }
public bool StripUnabridged { get; init; }
public bool CreateCueSheet { get; init; }
public ChapterInfo ChapterInfo { get; set; }
public NAudio.Lame.LameConfig LameConfig { get; set; }
public bool Downsample { get; set; }
public bool MatchSourceBitrate { get; set; }
public DownloadOptions(string downloadUrl, string userAgent)
{
DownloadUrl = ArgumentValidator.EnsureNotNullOrEmpty(downloadUrl, nameof(downloadUrl));
UserAgent = ArgumentValidator.EnsureNotNullOrEmpty(userAgent, nameof(userAgent));
// no null/empty check for key/iv. unencrypted files do not have them
}
}
}

View file

@ -0,0 +1,25 @@
using System;
using System.IO;
using FileManager;
namespace AaxDecrypter
{
public class MultiConvertFileProperties
{
public string OutputFileName { get; set; }
public int PartsPosition { get; set; }
public int PartsTotal { get; set; }
public string Title { get; set; }
public static string DefaultMultipartFilename(MultiConvertFileProperties multiConvertFileProperties)
{
var template = Path.ChangeExtension(multiConvertFileProperties.OutputFileName, null) + " - <ch# 0> - <title>" + Path.GetExtension(multiConvertFileProperties.OutputFileName);
var fileNamingTemplate = new FileNamingTemplate(template) { IllegalCharacterReplacements = " " };
fileNamingTemplate.AddParameterReplacement("ch# 0", FileUtility.GetSequenceFormatted(multiConvertFileProperties.PartsPosition, multiConvertFileProperties.PartsTotal));
fileNamingTemplate.AddParameterReplacement("title", multiConvertFileProperties.Title ?? "");
return fileNamingTemplate.GetFilePath();
}
}
}

View file

@ -0,0 +1,443 @@
using Dinah.Core;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading;
namespace AaxDecrypter
{
/// <summary>
/// A <see cref="CookieContainer"/> for a single Uri.
/// </summary>
public class SingleUriCookieContainer : CookieContainer
{
private Uri baseAddress;
public Uri Uri
{
get => baseAddress;
set
{
baseAddress = new UriBuilder(value.Scheme, value.Host).Uri;
}
}
public CookieCollection GetCookies()
{
return GetCookies(Uri);
}
}
/// <summary>
/// A resumable, simultaneous file downloader and reader.
/// </summary>
public class NetworkFileStream : Stream, IUpdatable
{
public event EventHandler Updated;
#region Public Properties
/// <summary>
/// Location to save the downloaded data.
/// </summary>
[JsonProperty(Required = Required.Always)]
public string SaveFilePath { get; }
/// <summary>
/// Http(s) address of the file to download.
/// </summary>
[JsonProperty(Required = Required.Always)]
public Uri Uri { get; private set; }
/// <summary>
/// All cookies set by caller or by the remote server.
/// </summary>
[JsonProperty(Required = Required.Always)]
public SingleUriCookieContainer CookieContainer { get; }
/// <summary>
/// Http headers to be sent to the server with the request.
/// </summary>
[JsonProperty(Required = Required.Always)]
public WebHeaderCollection RequestHeaders { get; private set; }
/// <summary>
/// The position in <see cref="SaveFilePath"/> that has been written and flushed to disk.
/// </summary>
[JsonProperty(Required = Required.Always)]
public long WritePosition { get; private set; }
/// <summary>
/// The total length of the <see cref="Uri"/> file to download.
/// </summary>
[JsonProperty(Required = Required.Always)]
public long ContentLength { get; private set; }
#endregion
#region Private Properties
private HttpWebRequest HttpRequest { get; set; }
private FileStream _writeFile { get; }
private FileStream _readFile { get; }
private Stream _networkStream { get; set; }
private bool hasBegunDownloading { get; set; }
public bool IsCancelled { get; private set; }
private EventWaitHandle downloadEnded { get; set; }
private EventWaitHandle downloadedPiece { get; set; }
#endregion
#region Constants
//Download buffer size
private const int DOWNLOAD_BUFF_SZ = 4 * 1024;
//NetworkFileStream will flush all data in _writeFile to disk after every
//DATA_FLUSH_SZ bytes are written to the file stream.
private const int DATA_FLUSH_SZ = 1024 * 1024;
#endregion
#region Constructor
/// <summary>
/// A resumable, simultaneous file downloader and reader.
/// </summary>
/// <param name="saveFilePath">Path to a location on disk to save the downloaded data from <paramref name="uri"/></param>
/// <param name="uri">Http(s) address of the file to download.</param>
/// <param name="writePosition">The position in <paramref name="uri"/> to begin downloading.</param>
/// <param name="requestHeaders">Http headers to be sent to the server with the <see cref="HttpWebRequest"/>.</param>
/// <param name="cookies">A <see cref="SingleUriCookieContainer"/> with cookies to send with the <see cref="HttpWebRequest"/>. It will also be populated with any cookies set by the server. </param>
public NetworkFileStream(string saveFilePath, Uri uri, long writePosition = 0, WebHeaderCollection requestHeaders = null, SingleUriCookieContainer cookies = null)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(saveFilePath, nameof(saveFilePath));
ArgumentValidator.EnsureNotNullOrWhiteSpace(uri?.AbsoluteUri, nameof(uri));
ArgumentValidator.EnsureGreaterThan(writePosition, nameof(writePosition), -1);
if (!Directory.Exists(Path.GetDirectoryName(saveFilePath)))
throw new ArgumentException($"Specified {nameof(saveFilePath)} directory \"{Path.GetDirectoryName(saveFilePath)}\" does not exist.");
SaveFilePath = saveFilePath;
Uri = uri;
WritePosition = writePosition;
RequestHeaders = requestHeaders ?? new WebHeaderCollection();
CookieContainer = cookies ?? new SingleUriCookieContainer { Uri = uri };
_writeFile = new FileStream(SaveFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)
{
Position = WritePosition
};
_readFile = new FileStream(SaveFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
SetUriForSameFile(uri);
}
#endregion
#region Downloader
/// <summary>
/// Update the <see cref="JsonFilePersister"/>.
/// </summary>
private void Update()
{
RequestHeaders = HttpRequest.Headers;
Updated?.Invoke(this, EventArgs.Empty);
}
/// <summary>
/// Set a different <see cref="System.Uri"/> to the same file targeted by this instance of <see cref="NetworkFileStream"/>
/// </summary>
/// <param name="uriToSameFile">New <see cref="System.Uri"/> host must match existing host.</param>
public void SetUriForSameFile(Uri uriToSameFile)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(uriToSameFile?.AbsoluteUri, nameof(uriToSameFile));
if (uriToSameFile.Host != Uri.Host)
throw new ArgumentException($"New uri to the same file must have the same host.\r\n Old Host :{Uri.Host}\r\nNew Host: {uriToSameFile.Host}");
if (hasBegunDownloading)
throw new InvalidOperationException("Cannot change Uri after download has started.");
Uri = uriToSameFile;
HttpRequest = WebRequest.CreateHttp(Uri);
HttpRequest.CookieContainer = CookieContainer;
HttpRequest.Headers = RequestHeaders;
//If NetworkFileStream is resuming, Header will already contain a range.
HttpRequest.Headers.Remove("Range");
HttpRequest.AddRange(WritePosition);
}
/// <summary>
/// Begins downloading <see cref="Uri"/> to <see cref="SaveFilePath"/> in a background thread.
/// </summary>
private void BeginDownloading()
{
downloadEnded = new EventWaitHandle(false, EventResetMode.ManualReset);
if (ContentLength != 0 && WritePosition == ContentLength)
{
hasBegunDownloading = true;
downloadEnded.Set();
return;
}
if (ContentLength != 0 && WritePosition > ContentLength)
throw new WebException($"Specified write position (0x{WritePosition:X10}) is larger than {nameof(ContentLength)} (0x{ContentLength:X10}).");
var response = HttpRequest.GetResponse() as HttpWebResponse;
if (response.StatusCode != HttpStatusCode.PartialContent)
throw new WebException($"Server at {Uri.Host} responded with unexpected status code: {response.StatusCode}.");
//Content length is the length of the range request, and it is only equal
//to the complete file length if requesting Range: bytes=0-
if (WritePosition == 0)
ContentLength = response.ContentLength;
_networkStream = response.GetResponseStream();
downloadedPiece = new EventWaitHandle(false, EventResetMode.AutoReset);
//Download the file in the background.
new Thread(() => DownloadFile())
{ IsBackground = true }
.Start();
hasBegunDownloading = true;
return;
}
/// <summary>
/// Downlod <see cref="Uri"/> to <see cref="SaveFilePath"/>.
/// </summary>
private void DownloadFile()
{
var downloadPosition = WritePosition;
var nextFlush = downloadPosition + DATA_FLUSH_SZ;
var buff = new byte[DOWNLOAD_BUFF_SZ];
do
{
var bytesRead = _networkStream.Read(buff, 0, DOWNLOAD_BUFF_SZ);
_writeFile.Write(buff, 0, bytesRead);
downloadPosition += bytesRead;
if (downloadPosition > nextFlush)
{
_writeFile.Flush();
WritePosition = downloadPosition;
Update();
nextFlush = downloadPosition + DATA_FLUSH_SZ;
downloadedPiece.Set();
}
} while (downloadPosition < ContentLength && !IsCancelled);
_writeFile.Close();
_networkStream.Close();
WritePosition = downloadPosition;
Update();
downloadedPiece.Set();
downloadEnded.Set();
if (!IsCancelled && WritePosition < ContentLength)
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is less than {nameof(ContentLength)} (0x{ContentLength:X10}).");
if (WritePosition > ContentLength)
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is greater than {nameof(ContentLength)} (0x{ContentLength:X10}).");
}
#endregion
#region Json Connverters
public static JsonSerializerSettings GetJsonSerializerSettings()
{
var settings = new JsonSerializerSettings();
settings.Converters.Add(new CookieContainerConverter());
settings.Converters.Add(new WebHeaderCollectionConverter());
return settings;
}
internal class CookieContainerConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
=> objectType == typeof(SingleUriCookieContainer);
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
var jObj = JObject.Load(reader);
var result = new SingleUriCookieContainer()
{
Uri = new Uri(jObj["Uri"].Value<string>()),
Capacity = jObj["Capacity"].Value<int>(),
MaxCookieSize = jObj["MaxCookieSize"].Value<int>(),
PerDomainCapacity = jObj["PerDomainCapacity"].Value<int>()
};
var cookieList = jObj["Cookies"].ToList();
foreach (var cookie in cookieList)
{
result.Add(
new Cookie
{
Comment = cookie["Comment"].Value<string>(),
HttpOnly = cookie["HttpOnly"].Value<bool>(),
Discard = cookie["Discard"].Value<bool>(),
Domain = cookie["Domain"].Value<string>(),
Expired = cookie["Expired"].Value<bool>(),
Expires = cookie["Expires"].Value<DateTime>(),
Name = cookie["Name"].Value<string>(),
Path = cookie["Path"].Value<string>(),
Port = cookie["Port"].Value<string>(),
Secure = cookie["Secure"].Value<bool>(),
Value = cookie["Value"].Value<string>(),
Version = cookie["Version"].Value<int>(),
});
}
return result;
}
public override bool CanWrite => true;
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var cookies = value as SingleUriCookieContainer;
var obj = (JObject)JToken.FromObject(value);
var container = cookies.GetCookies();
var propertyNames = container.Select(c => JToken.FromObject(c));
obj.AddFirst(new JProperty("Cookies", new JArray(propertyNames)));
obj.WriteTo(writer);
}
}
internal class WebHeaderCollectionConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
=> objectType == typeof(WebHeaderCollection);
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
var jObj = JObject.Load(reader);
var result = new WebHeaderCollection();
foreach (var kvp in jObj)
result.Add(kvp.Key, kvp.Value.Value<string>());
return result;
}
public override bool CanWrite => true;
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var jObj = new JObject();
var type = value.GetType();
var headers = value as WebHeaderCollection;
var jHeaders = headers.AllKeys.Select(k => new JProperty(k, headers[k]));
jObj.Add(jHeaders);
jObj.WriteTo(writer);
}
}
#endregion
#region Download Stream Reader
[JsonIgnore]
public override bool CanRead => true;
[JsonIgnore]
public override bool CanSeek => true;
[JsonIgnore]
public override bool CanWrite => false;
[JsonIgnore]
public override long Length
{
get
{
if (!hasBegunDownloading)
BeginDownloading();
return ContentLength;
}
}
[JsonIgnore]
public override long Position { get => _readFile.Position; set => Seek(value, SeekOrigin.Begin); }
[JsonIgnore]
public override bool CanTimeout => false;
[JsonIgnore]
public override int ReadTimeout { get => base.ReadTimeout; set => base.ReadTimeout = value; }
[JsonIgnore]
public override int WriteTimeout { get => base.WriteTimeout; set => base.WriteTimeout = value; }
public override void Flush() => throw new NotImplementedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) => throw new NotImplementedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (!hasBegunDownloading)
BeginDownloading();
var toRead = Math.Min(count, Length - Position);
WaitToPosition(Position + toRead);
return _readFile.Read(buffer, offset, count);
}
public override long Seek(long offset, SeekOrigin origin)
{
var newPosition = origin switch
{
SeekOrigin.Current => Position + offset,
SeekOrigin.End => ContentLength + offset,
_ => offset,
};
WaitToPosition(newPosition);
return _readFile.Position = newPosition;
}
/// <summary>
/// Blocks until the file has downloaded to at least <paramref name="requiredPosition"/>, then returns.
/// </summary>
/// <param name="requiredPosition">The minimum required flished data length in <see cref="SaveFilePath"/>.</param>
private void WaitToPosition(long requiredPosition)
{
while (requiredPosition > WritePosition && !IsCancelled && hasBegunDownloading && !downloadedPiece.WaitOne(1000)) ;
}
public override void Close()
{
IsCancelled = true;
while (downloadEnded is not null && !downloadEnded.WaitOne(1000)) ;
_readFile.Close();
_writeFile.Close();
_networkStream?.Close();
Update();
}
#endregion
~NetworkFileStream()
{
downloadEnded?.Close();
downloadedPiece?.Close();
}
}
}

View file

@ -0,0 +1,23 @@
using Dinah.Core.IO;
using Newtonsoft.Json;
namespace AaxDecrypter
{
internal class NetworkFileStreamPersister : JsonFilePersister<NetworkFileStream>
{
/// <summary>Alias for Target </summary>
public NetworkFileStream NetworkFileStream => Target;
/// <summary>uses path. create file if doesn't yet exist</summary>
public NetworkFileStreamPersister(NetworkFileStream networkFileStream, string path, string jsonPath = null)
: base(networkFileStream, path, jsonPath) { }
/// <summary>load from existing file</summary>
public NetworkFileStreamPersister(string path, string jsonPath = null)
: base(path, jsonPath) { }
protected override JsonSerializerSettings GetSerializerSettings() => NetworkFileStream.GetJsonSerializerSettings();
}
}

View file

@ -0,0 +1,76 @@
using System;
using System.Threading;
using Dinah.Core.Net.Http;
using Dinah.Core.StepRunner;
using FileManager;
namespace AaxDecrypter
{
public class UnencryptedAudiobookDownloader : AudiobookDownloadBase
{
protected override StepSequence Steps { get; }
public UnencryptedAudiobookDownloader(string outFileName, string cacheDirectory, DownloadOptions dlLic)
: base(outFileName, cacheDirectory, dlLic)
{
Steps = new StepSequence
{
Name = "Download Mp3 Audiobook",
["Step 1: Get Mp3 Metadata"] = Step_GetMetadata,
["Step 2: Download Audiobook"] = Step_DownloadAudiobookAsSingleFile,
["Step 3: Create Cue"] = Step_CreateCue,
["Step 4: Cleanup"] = Step_Cleanup,
};
}
public override void Cancel()
{
IsCanceled = true;
CloseInputFileStream();
}
protected bool Step_GetMetadata()
{
OnRetrievedCoverArt(null);
return !IsCanceled;
}
private bool Step_DownloadAudiobookAsSingleFile()
{
DateTime startTime = DateTime.Now;
// MUST put InputFileStream.Length first, because it starts background downloader.
while (InputFileStream.Length > InputFileStream.WritePosition && !InputFileStream.IsCancelled)
{
var rate = InputFileStream.WritePosition / (DateTime.Now - startTime).TotalSeconds;
var estTimeRemaining = (InputFileStream.Length - InputFileStream.WritePosition) / rate;
if (double.IsNormal(estTimeRemaining))
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
var progressPercent = (double)InputFileStream.WritePosition / InputFileStream.Length;
OnDecryptProgressUpdate(
new DownloadProgress
{
ProgressPercentage = 100 * progressPercent,
BytesReceived = (long)(InputFileStream.Length * progressPercent),
TotalBytesToReceive = InputFileStream.Length
});
Thread.Sleep(200);
}
CloseInputFileStream();
var realOutputFileName = FileUtility.SaferMoveToValidPath(InputFileStream.SaveFilePath, OutputFileName);
SetOutputFileName(realOutputFileName);
OnFileCreated(realOutputFileName);
return !IsCanceled;
}
}
}

View file

@ -0,0 +1,4 @@
{
"//": "https://github.com/BalassaMarton/MSBump",
BumpRevision: true
}

View file

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0-windows</TargetFramework>
<Version>7.2.0.1</Version>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MSBump" Version="2.3.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Octokit" Version="0.51.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\AudibleUtilities\AudibleUtilities.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,411 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using AudibleUtilities;
using Dinah.Core;
using Dinah.Core.IO;
using Dinah.Core.Logging;
using LibationFileManager;
using Newtonsoft.Json.Linq;
using Serilog;
namespace AppScaffolding
{
public static class LibationScaffolding
{
// AppScaffolding
private static Assembly _executingAssembly;
private static Assembly ExecutingAssembly
=> _executingAssembly ??= Assembly.GetExecutingAssembly();
// LibationWinForms or LibationCli
private static Assembly _entryAssembly;
private static Assembly EntryAssembly
=> _entryAssembly ??= Assembly.GetEntryAssembly();
// previously: System.Reflection.Assembly.GetExecutingAssembly().GetName().Version;
private static Version _buildVersion;
public static Version BuildVersion
=> _buildVersion
??= new[] { ExecutingAssembly.GetName(), EntryAssembly.GetName() }
.Max(a => a.Version);
/// <summary>Run migrations before loading Configuration for the first time. Then load and return Configuration</summary>
public static Configuration RunPreConfigMigrations()
{
// must occur before access to Configuration instance
Migrations.migrate_to_v5_2_0__pre_config();
//***********************************************//
// //
// do not use Configuration before this line //
// //
//***********************************************//
return Configuration.Instance;
}
/// <summary>most migrations go in here</summary>
public static void RunPostConfigMigrations(Configuration config)
{
AudibleApiStorage.EnsureAccountsSettingsFileExists();
PopulateMissingConfigValues(config);
//
// migrations go below here
//
Migrations.migrate_to_v6_6_9(config);
}
public static void PopulateMissingConfigValues(Configuration config)
{
config.InProgress ??= Configuration.WinTemp;
if (!config.Exists(nameof(config.AllowLibationFixup)))
config.AllowLibationFixup = true;
if (!config.Exists(nameof(config.CreateCueSheet)))
config.CreateCueSheet = true;
if (!config.Exists(nameof(config.RetainAaxFile)))
config.RetainAaxFile = false;
if (!config.Exists(nameof(config.SplitFilesByChapter)))
config.SplitFilesByChapter = false;
if (!config.Exists(nameof(config.StripUnabridged)))
config.StripUnabridged = false;
if (!config.Exists(nameof(config.StripAudibleBrandAudio)))
config.StripAudibleBrandAudio = false;
if (!config.Exists(nameof(config.DecryptToLossy)))
config.DecryptToLossy = false;
if (!config.Exists(nameof(config.LameTargetBitrate)))
config.LameTargetBitrate = false;
if (!config.Exists(nameof(config.LameDownsampleMono)))
config.LameDownsampleMono = true;
if (!config.Exists(nameof(config.LameBitrate)))
config.LameBitrate = 64;
if (!config.Exists(nameof(config.LameConstantBitrate)))
config.LameConstantBitrate = false;
if (!config.Exists(nameof(config.LameMatchSourceBR)))
config.LameMatchSourceBR = true;
if (!config.Exists(nameof(config.LameVBRQuality)))
config.LameVBRQuality = 2;
if (!config.Exists(nameof(config.BadBook)))
config.BadBook = Configuration.BadBookAction.Ask;
if (!config.Exists(nameof(config.ShowImportedStats)))
config.ShowImportedStats = true;
if (!config.Exists(nameof(config.ImportEpisodes)))
config.ImportEpisodes = true;
if (!config.Exists(nameof(config.DownloadEpisodes)))
config.DownloadEpisodes = true;
if (!config.Exists(nameof(config.FolderTemplate)))
config.FolderTemplate = Templates.Folder.DefaultTemplate;
if (!config.Exists(nameof(config.FileTemplate)))
config.FileTemplate = Templates.File.DefaultTemplate;
if (!config.Exists(nameof(config.ChapterFileTemplate)))
config.ChapterFileTemplate = Templates.ChapterFile.DefaultTemplate;
}
/// <summary>Initialize logging. Run after migration</summary>
public static void RunPostMigrationScaffolding(Configuration config)
{
ensureSerilogConfig(config);
configureLogging(config);
logStartupState(config);
}
private static void ensureSerilogConfig(Configuration config)
{
if (config.GetObject("Serilog") is not null)
return;
var serilogObj = new JObject
{
{ "MinimumLevel", "Information" },
{ "WriteTo", new JArray
{
// new JObject { {"Name", "Console" } }, // this has caused more problems than it's solved
new JObject
{
{ "Name", "File" },
{ "Args",
new JObject
{
// for this sink to work, a path must be provided. we override this below
{ "path", Path.Combine(config.LibationFiles, "_Log.log") },
{ "rollingInterval", "Month" },
// Serilog template formatting examples
// - default: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] {Message:lj}{NewLine}{Exception}"
// output example: 2019-11-26 08:48:40.224 -05:00 [DBG] Begin Libation
// - with class and method info: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception}";
// output example: 2019-11-26 08:48:40.224 -05:00 [DBG] (at LibationWinForms.Program.init()) Begin Libation
// {Properties:j} needed for expanded exception logging
{ "outputTemplate", "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception} {Properties:j}" }
}
}
}
}
},
// better exception logging with: Serilog.Exceptions library -- WithExceptionDetails
{ "Using", new JArray{ "Dinah.Core", "Serilog.Exceptions" } }, // dll's name, NOT namespace
{ "Enrich", new JArray{ "WithCaller", "WithExceptionDetails" } },
};
config.SetObject("Serilog", serilogObj);
}
// to restore original: Console.SetOut(origOut);
private static TextWriter origOut { get; } = Console.Out;
private static void configureLogging(Configuration config)
{
config.ConfigureLogging();
// capture most Console.WriteLine() and write to serilog. See below tests for details.
// Some dependencies print helpful info via Console.WriteLine. We'd like to log it.
//
// If Serilog also writes to Console, this might be asking for trouble. ie: infinite loops.
// To use that way, SerilogTextWriter needs to be more robust and tested. Esp the Write() methods.
// However, empirical testing so far has shown no issues.
Console.SetOut(new MultiTextWriter(origOut, new SerilogTextWriter()));
#region Console => Serilog tests
/*
// all below apply to "Console." and "Console.Out."
// captured
Console.WriteLine("str");
Console.WriteLine(new { a = "anon" });
Console.WriteLine("{0}", "format");
Console.WriteLine("{0}{1}", "zero|", "one");
Console.WriteLine("{0}{1}{2}", "zero|", "one|", "two");
Console.WriteLine("{0}", new object[] { "arr" });
// not captured
Console.WriteLine();
Console.WriteLine(true);
Console.WriteLine('0');
Console.WriteLine(1);
Console.WriteLine(2m);
Console.WriteLine(3f);
Console.WriteLine(4d);
Console.WriteLine(5L);
Console.WriteLine((uint)6);
Console.WriteLine((ulong)7);
Console.Write("str");
Console.Write(true);
Console.Write('0');
Console.Write(1);
Console.Write(2m);
Console.Write(3f);
Console.Write(4d);
Console.Write(5L);
Console.Write((uint)6);
Console.Write((ulong)7);
Console.Write(new { a = "anon" });
Console.Write("{0}", "format");
Console.Write("{0}{1}", "zero|", "one");
Console.Write("{0}{1}{2}", "zero|", "one|", "two");
Console.Write("{0}", new object[] { "arr" });
*/
#endregion
// .Here() captures debug info via System.Runtime.CompilerServices attributes. Warning: expensive
//var withLineNumbers_outputTemplate = "[{Timestamp:HH:mm:ss} {Level}] {SourceContext}{NewLine}{Message}{NewLine}in method {MemberName} at {FilePath}:{LineNumber}{NewLine}{Exception}{NewLine}";
//Log.Logger.Here().Debug("Begin Libation. Debug with line numbers");
}
private static void logStartupState(Configuration config)
{
// begin logging session with a form feed
Log.Logger.Information("\r\n\f");
Log.Logger.Information("Begin. {@DebugInfo}", new
{
AppName = EntryAssembly.GetName().Name,
Version = BuildVersion.ToString(),
#if DEBUG
Mode = "Debug",
#else
Mode = "Release",
#endif
LogLevel_Verbose_Enabled = Log.Logger.IsVerboseEnabled(),
LogLevel_Debug_Enabled = Log.Logger.IsDebugEnabled(),
LogLevel_Information_Enabled = Log.Logger.IsInformationEnabled(),
LogLevel_Warning_Enabled = Log.Logger.IsWarningEnabled(),
LogLevel_Error_Enabled = Log.Logger.IsErrorEnabled(),
LogLevel_Fatal_Enabled = Log.Logger.IsFatalEnabled(),
config.LibationFiles,
AudibleFileStorage.BooksDirectory,
config.InProgress,
AudibleFileStorage.DownloadsInProgressDirectory,
DownloadsInProgressFiles = FileManager.FileUtility.SaferEnumerateFiles(AudibleFileStorage.DownloadsInProgressDirectory).Count(),
AudibleFileStorage.DecryptInProgressDirectory,
DecryptInProgressFiles = FileManager.FileUtility.SaferEnumerateFiles(AudibleFileStorage.DecryptInProgressDirectory).Count(),
});
}
public static (bool hasUpgrade, string zipUrl, string htmlUrl, string zipName) GetLatestRelease()
{
(bool, string, string, string) isFalse = (false, null, null, null);
// timed out
var latest = getLatestRelease(TimeSpan.FromSeconds(10));
if (latest is null)
return isFalse;
var latestVersionString = latest.TagName.Trim('v');
if (!Version.TryParse(latestVersionString, out var latestRelease))
return isFalse;
// we're up to date
if (latestRelease <= BuildVersion)
return isFalse;
// we have an update
var zip = latest.Assets.FirstOrDefault(a => a.BrowserDownloadUrl.EndsWith(".zip"));
var zipUrl = zip?.BrowserDownloadUrl;
Log.Logger.Information("Update available: {@DebugInfo}", new
{
latestRelease = latestRelease.ToString(),
latest.HtmlUrl,
zipUrl
});
return (true, zipUrl, latest.HtmlUrl, zip.Name);
}
private static Octokit.Release getLatestRelease(TimeSpan timeout)
{
try
{
var task = System.Threading.Tasks.Task.Run(() => getLatestRelease());
if (task.Wait(timeout))
return task.Result;
Log.Logger.Information("Timed out");
}
catch (AggregateException aggEx)
{
Log.Logger.Error(aggEx, "Checking for new version too often");
}
return null;
}
private static Octokit.Release getLatestRelease()
{
var gitHubClient = new Octokit.GitHubClient(new Octokit.ProductHeaderValue("Libation"));
// https://octokitnet.readthedocs.io/en/latest/releases/
var releases = gitHubClient.Repository.Release.GetAll("rmcrackan", "Libation").GetAwaiter().GetResult();
var latest = releases.First(r => !r.Draft && !r.Prerelease);
return latest;
}
}
internal static class Migrations
{
#region migrate to v5.2.0
// get rid of meta-directories, combine DownloadsInProgressEnum and DecryptInProgressEnum => InProgress
public static void migrate_to_v5_2_0__pre_config()
{
{
var settingsKey = "DownloadsInProgressEnum";
if (UNSAFE_MigrationHelper.Settings_TryGet(settingsKey, out var value))
{
UNSAFE_MigrationHelper.Settings_Delete(settingsKey);
UNSAFE_MigrationHelper.Settings_Insert("InProgress", translatePath(value));
}
}
{
UNSAFE_MigrationHelper.Settings_Delete("DecryptInProgressEnum");
}
{ // appsettings.json
var appSettingsKey = UNSAFE_MigrationHelper.LIBATION_FILES_KEY;
if (UNSAFE_MigrationHelper.APPSETTINGS_TryGet(appSettingsKey, out var value))
UNSAFE_MigrationHelper.APPSETTINGS_Update(appSettingsKey, translatePath(value));
}
}
private static string translatePath(string path)
=> path switch
{
"AppDir" => @".\LibationFiles",
"MyDocs" => Path.GetFullPath(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments), "LibationFiles")),
"UserProfile" => Path.GetFullPath(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), "Libation")),
"WinTemp" => Path.GetFullPath(Path.Combine(Path.GetTempPath(), "Libation")),
_ => path
};
#endregion
public static void migrate_to_v6_6_9(Configuration config)
{
var writeToPath = $"Serilog.WriteTo";
// remove WriteTo[].Name == Console
{
if (UNSAFE_MigrationHelper.Settings_TryGetArrayLength(writeToPath, out var length1))
{
for (var i = length1 - 1; i >= 0; i--)
{
var exists = UNSAFE_MigrationHelper.Settings_TryGetFromJsonPath($"{writeToPath}[{i}].Name", out var value);
if (exists && value == "Console")
UNSAFE_MigrationHelper.Settings_RemoveFromArray(writeToPath, i);
}
}
}
// add Serilog.Exceptions -- WithExceptionDetails
{
// outputTemplate should contain "{Properties:j}"
{
// re-calculate. previous loop may have changed the length
if (UNSAFE_MigrationHelper.Settings_TryGetArrayLength(writeToPath, out var length2))
{
var propertyName = "outputTemplate";
for (var i = 0; i < length2; i++)
{
var jsonPath = $"{writeToPath}[{i}].Args";
var exists = UNSAFE_MigrationHelper.Settings_TryGetFromJsonPath($"{jsonPath}.{propertyName}", out var value);
var newValue = "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception} {Properties:j}";
if (exists && value != newValue)
UNSAFE_MigrationHelper.Settings_SetWithJsonPath(jsonPath, propertyName, newValue);
}
}
}
// Serilog.Using must include "Serilog.Exceptions"
UNSAFE_MigrationHelper.Settings_AddUniqueToArray("Serilog.Using", "Serilog.Exceptions");
// Serilog.Enrich must include "WithExceptionDetails"
UNSAFE_MigrationHelper.Settings_AddUniqueToArray("Serilog.Enrich", "WithExceptionDetails");
}
}
}
}

View file

@ -0,0 +1,271 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Dinah.Core;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace AppScaffolding
{
/// <summary>
///
///
/// directly manipulates settings files without going through domain logic.
///
/// for migrations only. use with caution.
///
///
/// </summary>
internal static class UNSAFE_MigrationHelper
{
#region appsettings.json
private static string APPSETTINGS_JSON { get; } = Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location), "appsettings.json");
public static bool APPSETTINGS_Json_Exists => File.Exists(APPSETTINGS_JSON);
public static bool APPSETTINGS_TryGet(string key, out string value)
{
bool success = false;
JToken val = null;
process_APPSETTINGS_Json(jObj => success = jObj.TryGetValue(key, out val), false);
value = success ? val.Value<string>() : null;
return success;
}
/// <summary>only insert if not exists</summary>
public static void APPSETTINGS_Insert(string key, string value)
=> process_APPSETTINGS_Json(jObj => jObj.TryAdd(key, value));
/// <summary>only update if exists</summary>
public static void APPSETTINGS_Update(string key, string value)
=> process_APPSETTINGS_Json(jObj => {
if (jObj.ContainsKey(key))
jObj[key] = value;
});
/// <summary>only delete if exists</summary>
public static void APPSETTINGS_Delete(string key)
=> process_APPSETTINGS_Json(jObj => {
if (jObj.ContainsKey(key))
jObj.Remove(key);
});
/// <param name="save">True: save if contents changed. False: no not attempt save</param>
private static void process_APPSETTINGS_Json(Action<JObject> action, bool save = true)
{
// only insert if not exists
if (!APPSETTINGS_Json_Exists)
return;
var startingContents = File.ReadAllText(APPSETTINGS_JSON);
JObject jObj;
try
{
jObj = JObject.Parse(startingContents);
}
catch
{
return;
}
action(jObj);
if (!save)
return;
// only save if different
var endingContents_indented = jObj.ToString(Formatting.Indented);
var endingContents_compact = jObj.ToString(Formatting.None);
if (startingContents.EqualsInsensitive(endingContents_indented) || startingContents.EqualsInsensitive(endingContents_compact))
return;
File.WriteAllText(APPSETTINGS_JSON, endingContents_indented);
System.Threading.Thread.Sleep(100);
}
#endregion
#region Settings.json
public const string LIBATION_FILES_KEY = "LibationFiles";
private const string SETTINGS_JSON = "Settings.json";
public static string SettingsJsonPath
{
get
{
var success = APPSETTINGS_TryGet(LIBATION_FILES_KEY, out var value);
return !success || value is null ? null : Path.Combine(value, SETTINGS_JSON);
}
}
public static bool SettingsJson_Exists => SettingsJsonPath is not null && File.Exists(SettingsJsonPath);
public static bool Settings_TryGet(string key, out string value)
{
bool success = false;
JToken val = null;
process_SettingsJson(jObj => success = jObj.TryGetValue(key, out val), false);
value = success ? val.Value<string>() : null;
return success;
}
public static bool Settings_JsonPathIsType(string jsonPath, JTokenType jTokenType)
{
JToken val = null;
process_SettingsJson(jObj => val = jObj.SelectToken(jsonPath), false);
return val?.Type == jTokenType;
}
public static bool Settings_TryGetFromJsonPath(string jsonPath, out string value)
{
JToken val = null;
process_SettingsJson(jObj => val = jObj.SelectToken(jsonPath), false);
if (val?.Type == JTokenType.String)
{
value = val.Value<string>();
return true;
}
else
{
value = null;
return false;
}
}
public static void Settings_SetWithJsonPath(string jsonPath, string propertyName, string newValue)
{
if (!Settings_TryGetFromJsonPath($"{jsonPath}.{propertyName}", out _))
return;
process_SettingsJson(jObj =>
{
var token = jObj.SelectToken(jsonPath);
if (token is null
|| token is not JObject o
|| o[propertyName] is null)
return;
var oldValue = token.Value<string>(propertyName);
if (oldValue != newValue)
token[propertyName] = newValue;
});
}
public static bool Settings_TryGetArrayLength(string jsonPath, out int length)
{
length = 0;
if (!Settings_JsonPathIsType(jsonPath, JTokenType.Array))
return false;
JArray array = null;
process_SettingsJson(jObj => array = (JArray)jObj.SelectToken(jsonPath));
length = array.Count;
return true;
}
public static void Settings_AddToArray(string jsonPath, string newValue)
{
if (!Settings_JsonPathIsType(jsonPath, JTokenType.Array))
return;
process_SettingsJson(jObj =>
{
var array = (JArray)jObj.SelectToken(jsonPath);
array.Add(newValue);
});
}
/// <summary>Do not add if already exists</summary>
public static void Settings_AddUniqueToArray(string arrayPath, string newValue)
{
if (!Settings_TryGetArrayLength(arrayPath, out var qty))
return;
for (var i = 0; i < qty; i++)
{
var exists = Settings_TryGetFromJsonPath($"{arrayPath}[{i}]", out var value);
if (exists && value == newValue)
return;
}
Settings_AddToArray(arrayPath, newValue);
}
/// <summary>only remove if not exists</summary>
public static void Settings_RemoveFromArray(string jsonPath, int position)
{
if (!Settings_JsonPathIsType(jsonPath, JTokenType.Array))
return;
process_SettingsJson(jObj =>
{
var array = (JArray)jObj.SelectToken(jsonPath);
if (position < array.Count)
array.RemoveAt(position);
});
}
/// <summary>only insert if not exists</summary>
public static void Settings_Insert(string key, string value)
=> process_SettingsJson(jObj => jObj.TryAdd(key, value));
/// <summary>only update if exists</summary>
public static void Settings_Update(string key, string value)
=> process_SettingsJson(jObj => {
if (jObj.ContainsKey(key))
jObj[key] = value;
});
/// <summary>only delete if exists</summary>
public static void Settings_Delete(string key)
=> process_SettingsJson(jObj => {
if (jObj.ContainsKey(key))
jObj.Remove(key);
});
/// <param name="save">True: save if contents changed. False: no not attempt save</param>
private static void process_SettingsJson(Action<JObject> action, bool save = true)
{
// only insert if not exists
if (!SettingsJson_Exists)
return;
var startingContents = File.ReadAllText(SettingsJsonPath);
JObject jObj;
try
{
jObj = JObject.Parse(startingContents);
}
catch
{
return;
}
action(jObj);
if (!save)
return;
// only save if different
var endingContents_indented = jObj.ToString(Formatting.Indented);
var endingContents_compact = jObj.ToString(Formatting.None);
if (startingContents.EqualsInsensitive(endingContents_indented) || startingContents.EqualsInsensitive(endingContents_compact))
return;
File.WriteAllText(SettingsJsonPath, endingContents_indented);
System.Threading.Thread.Sleep(100);
}
#endregion
}
}

View file

@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0-windows</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="CsvHelper" Version="27.2.1" />
<PackageReference Include="NPOI" Version="2.5.6" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\DtoImporterService\DtoImporterService.csproj" />
<ProjectReference Include="..\LibationSearchEngine\LibationSearchEngine.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,21 @@
using System;
using System.Collections.Generic;
using DataLayer;
using LibationFileManager;
namespace ApplicationServices
{
public static class DbContexts
{
/// <summary>Use for fully functional context, incl. SaveChanges(). For query-only, use the other method</summary>
public static LibationContext GetContext()
=> LibationContext.Create(SqliteStorage.ConnectionString);
/// <summary>Use for full library querying. No lazy loading</summary>
public static List<LibraryBook> GetLibrary_Flat_NoTracking()
{
using var context = GetContext();
return context.GetLibrary_Flat_NoTracking();
}
}
}

View file

@ -0,0 +1,331 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using AudibleApi;
using AudibleUtilities;
using DataLayer;
using Dinah.Core;
using DtoImporterService;
using LibationFileManager;
using Serilog;
using static DtoImporterService.PerfLogger;
namespace ApplicationServices
{
public static class LibraryCommands
{
public static event EventHandler<int> ScanBegin;
public static event EventHandler ScanEnd;
public static bool Scanning { get; private set; }
private static object _lock { get; } = new();
static LibraryCommands()
{
ScanBegin += (_, __) => Scanning = true;
ScanEnd += (_, __) => Scanning = false;
}
public static async Task<List<LibraryBook>> FindInactiveBooks(Func<Account, Task<ApiExtended>> apiExtendedfunc, List<LibraryBook> existingLibrary, params Account[] accounts)
{
logRestart();
//These are the minimum response groups required for the
//library scanner to pass all validation and filtering.
var libraryResponseGroups =
LibraryOptions.ResponseGroupOptions.ProductAttrs |
LibraryOptions.ResponseGroupOptions.ProductDesc |
LibraryOptions.ResponseGroupOptions.Relationships;
if (accounts is null || accounts.Length == 0)
return new List<LibraryBook>();
try
{
logTime($"pre {nameof(scanAccountsAsync)} all");
var libraryItems = await scanAccountsAsync(apiExtendedfunc, accounts, libraryResponseGroups);
logTime($"post {nameof(scanAccountsAsync)} all");
var totalCount = libraryItems.Count;
Log.Logger.Information($"GetAllLibraryItems: Total count {totalCount}");
var missingBookList = existingLibrary.Where(b => !libraryItems.Any(i => i.DtoItem.Asin == b.Book.AudibleProductId)).ToList();
return missingBookList;
}
catch (AudibleApi.Authentication.LoginFailedException lfEx)
{
lfEx.SaveFiles(Configuration.Instance.LibationFiles);
// nuget Serilog.Exceptions would automatically log custom properties
// However, it comes with a scary warning when used with EntityFrameworkCore which I'm not yet ready to implement:
// https://github.com/RehanSaeed/Serilog.Exceptions
// work-around: use 3rd param. don't just put exception object in 3rd param -- info overload: stack trace, etc
Log.Logger.Error(lfEx, "Error scanning library. Login failed. {@DebugInfo}", new
{
lfEx.RequestUrl,
ResponseStatusCodeNumber = (int)lfEx.ResponseStatusCode,
ResponseStatusCodeDesc = lfEx.ResponseStatusCode,
lfEx.ResponseInputFields,
lfEx.ResponseBodyFilePaths
});
throw;
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error scanning library");
throw;
}
finally
{
stop();
var putBreakPointHere = logOutput;
}
}
#region FULL LIBRARY scan and import
public static async Task<(int totalCount, int newCount)> ImportAccountAsync(Func<Account, Task<ApiExtended>> apiExtendedfunc, params Account[] accounts)
{
logRestart();
if (accounts is null || accounts.Length == 0)
return (0, 0);
try
{
lock (_lock)
{
if (Scanning)
return (0, 0);
ScanBegin?.Invoke(null, accounts.Length);
}
logTime($"pre {nameof(scanAccountsAsync)} all");
var importItems = await scanAccountsAsync(apiExtendedfunc, accounts, LibraryOptions.ResponseGroupOptions.ALL_OPTIONS);
logTime($"post {nameof(scanAccountsAsync)} all");
var totalCount = importItems.Count;
Log.Logger.Information($"GetAllLibraryItems: Total count {totalCount}");
if (totalCount == 0)
return default;
Log.Logger.Information("Begin long-running import");
logTime($"pre {nameof(importIntoDbAsync)}");
var newCount = await importIntoDbAsync(importItems);
logTime($"post {nameof(importIntoDbAsync)}");
Log.Logger.Information($"Import complete. New count {newCount}");
return (totalCount, newCount);
}
catch (AudibleApi.Authentication.LoginFailedException lfEx)
{
lfEx.SaveFiles(Configuration.Instance.LibationFiles);
// nuget Serilog.Exceptions would automatically log custom properties
// However, it comes with a scary warning when used with EntityFrameworkCore which I'm not yet ready to implement:
// https://github.com/RehanSaeed/Serilog.Exceptions
// work-around: use 3rd param. don't just put exception object in 3rd param -- info overload: stack trace, etc
Log.Logger.Error(lfEx, "Error importing library. Login failed. {@DebugInfo}", new
{
lfEx.RequestUrl,
ResponseStatusCodeNumber = (int)lfEx.ResponseStatusCode,
ResponseStatusCodeDesc = lfEx.ResponseStatusCode,
lfEx.ResponseInputFields,
lfEx.ResponseBodyFilePaths
});
throw;
}
catch (Exception ex)
{
Log.Logger.Error(ex, "Error importing library");
throw;
}
finally
{
stop();
var putBreakPointHere = logOutput;
ScanEnd?.Invoke(null, null);
}
}
private static async Task<List<ImportItem>> scanAccountsAsync(Func<Account, Task<ApiExtended>> apiExtendedfunc, Account[] accounts, LibraryOptions.ResponseGroupOptions libraryResponseGroups)
{
var tasks = new List<Task<List<ImportItem>>>();
foreach (var account in accounts)
{
// get APIs in serial b/c of logins. do NOT move inside of parallel (Task.WhenAll)
var apiExtended = await apiExtendedfunc(account);
// add scanAccountAsync as a TASK: do not await
tasks.Add(scanAccountAsync(apiExtended, account, libraryResponseGroups));
}
// import library in parallel
var arrayOfLists = await Task.WhenAll(tasks);
var importItems = arrayOfLists.SelectMany(a => a).ToList();
return importItems;
}
private static async Task<List<ImportItem>> scanAccountAsync(ApiExtended apiExtended, Account account, LibraryOptions.ResponseGroupOptions libraryResponseGroups)
{
ArgumentValidator.EnsureNotNull(account, nameof(account));
Log.Logger.Information("ImportLibraryAsync. {@DebugInfo}", new
{
Account = account?.MaskedLogEntry ?? "[null]"
});
logTime($"pre scanAccountAsync {account.AccountName}");
var dtoItems = await apiExtended.GetLibraryValidatedAsync(libraryResponseGroups, Configuration.Instance.ImportEpisodes);
logTime($"post scanAccountAsync {account.AccountName} qty: {dtoItems.Count}");
return dtoItems.Select(d => new ImportItem { DtoItem = d, AccountId = account.AccountId, LocaleName = account.Locale?.Name }).ToList();
}
private static async Task<int> importIntoDbAsync(List<ImportItem> importItems)
{
logTime("importIntoDbAsync -- pre db");
using var context = DbContexts.GetContext();
var libraryBookImporter = new LibraryBookImporter(context);
var newCount = await Task.Run(() => libraryBookImporter.Import(importItems));
logTime("importIntoDbAsync -- post Import()");
int qtyChanges = saveChanges(context);
logTime("importIntoDbAsync -- post SaveChanges");
// this is any changes at all to the database, not just new books
if (qtyChanges > 0)
await Task.Run(() => finalizeLibrarySizeChange());
logTime("importIntoDbAsync -- post finalizeLibrarySizeChange");
return newCount;
}
private static int saveChanges(LibationContext context)
{
try
{
return context.SaveChanges();
}
catch (Microsoft.EntityFrameworkCore.DbUpdateException ex)
{
// DbUpdateException exceptions can wreck serilog. Condense it until we can find a better solution. I suspect the culpret is the "WithExceptionDetails" serilog extension
static string format(Exception ex) => $"\r\nMessage: {ex.Message}\r\nStack Trace:\r\n{ex.StackTrace}";
var msg = "Microsoft.EntityFrameworkCore.DbUpdateException";
if (ex.InnerException is null)
throw new Exception($"{msg}{format(ex)}");
throw new Exception(
$"{msg}{format(ex)}",
new Exception($"Inner Exception{format(ex.InnerException)}"));
}
}
#endregion
#region remove books
public static Task<List<LibraryBook>> RemoveBooksAsync(List<string> idsToRemove) => Task.Run(() => removeBooks(idsToRemove));
private static List<LibraryBook> removeBooks(List<string> idsToRemove)
{
using var context = DbContexts.GetContext();
var libBooks = context.GetLibrary_Flat_NoTracking();
var removeLibraryBooks = libBooks.Where(lb => idsToRemove.Contains(lb.Book.AudibleProductId)).ToList();
context.LibraryBooks.RemoveRange(removeLibraryBooks);
context.Books.RemoveRange(removeLibraryBooks.Select(lb => lb.Book));
var qtyChanges = context.SaveChanges();
if (qtyChanges > 0)
finalizeLibrarySizeChange();
return removeLibraryBooks;
}
#endregion
// call this whenever books are added or removed from library
private static void finalizeLibrarySizeChange()
{
SearchEngineCommands.FullReIndex();
LibrarySizeChanged?.Invoke(null, null);
}
/// <summary>Occurs when books are added or removed from library</summary>
public static event EventHandler LibrarySizeChanged;
/// <summary>
/// Occurs when <see cref="UserDefinedItem.Tags"/>, <see cref="UserDefinedItem.BookStatus"/>, or <see cref="UserDefinedItem.PdfStatus"/>
/// changed values are successfully persisted.
/// </summary>
public static event EventHandler<string> BookUserDefinedItemCommitted;
#region Update book details
public static int UpdateUserDefinedItem(Book book)
{
try
{
using var context = DbContexts.GetContext();
// Attach() NoTracking entities before SaveChanges()
context.Attach(book.UserDefinedItem).State = Microsoft.EntityFrameworkCore.EntityState.Modified;
var qtyChanges = context.SaveChanges();
if (qtyChanges > 0)
{
SearchEngineCommands.UpdateLiberatedStatus(book);
SearchEngineCommands.UpdateBookTags(book);
BookUserDefinedItemCommitted?.Invoke(null, book.AudibleProductId);
}
return qtyChanges;
}
catch (Exception ex)
{
Log.Logger.Error(ex, $"Error updating {nameof(book.UserDefinedItem)}");
throw;
}
}
#endregion
// must be here instead of in db layer due to AaxcExists
public static LiberatedStatus Liberated_Status(Book book)
=> book.Audio_Exists ? book.UserDefinedItem.BookStatus
: AudibleFileStorage.AaxcExists(book.AudibleProductId) ? LiberatedStatus.PartialDownload
: LiberatedStatus.NotLiberated;
// exists here for feature predictability. It makes sense for this to be where Liberated_Status is
public static LiberatedStatus? Pdf_Status(Book book) => book.UserDefinedItem.PdfStatus;
// below are queries, not commands. maybe I should make a LibraryQueries. except there's already one of those...
public record LibraryStats(int booksFullyBackedUp, int booksDownloadedOnly, int booksNoProgress, int booksError, int pdfsDownloaded, int pdfsNotDownloaded) { }
public static LibraryStats GetCounts()
{
var libraryBooks = DbContexts.GetLibrary_Flat_NoTracking();
var results = libraryBooks
.AsParallel()
.Select(lb => Liberated_Status(lb.Book))
.ToList();
var booksFullyBackedUp = results.Count(r => r == LiberatedStatus.Liberated);
var booksDownloadedOnly = results.Count(r => r == LiberatedStatus.PartialDownload);
var booksNoProgress = results.Count(r => r == LiberatedStatus.NotLiberated);
var booksError = results.Count(r => r == LiberatedStatus.Error);
Log.Logger.Information("Book counts. {@DebugInfo}", new { total = results.Count, booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError });
var boolResults = libraryBooks
.AsParallel()
.Where(lb => lb.Book.HasPdf)
.Select(lb => Pdf_Status(lb.Book))
.ToList();
var pdfsDownloaded = boolResults.Count(r => r == LiberatedStatus.Liberated);
var pdfsNotDownloaded = boolResults.Count(r => r == LiberatedStatus.NotLiberated);
Log.Logger.Information("PDF counts. {@DebugInfo}", new { total = boolResults.Count, pdfsDownloaded, pdfsNotDownloaded });
return new(booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError, pdfsDownloaded, pdfsNotDownloaded);
}
}
}

View file

@ -0,0 +1,287 @@
using System;
using System.Collections.Generic;
using System.Linq;
using CsvHelper;
using CsvHelper.Configuration.Attributes;
using DataLayer;
using NPOI.XSSF.UserModel;
using Serilog;
namespace ApplicationServices
{
public class ExportDto
{
public static string GetName(string fieldName)
{
var property = typeof(ExportDto).GetProperty(fieldName);
var attribute = property.GetCustomAttributes(typeof(NameAttribute), true)[0];
var description = (NameAttribute)attribute;
var text = description.Names;
return text[0];
}
[Name("Account")]
public string Account { get; set; }
[Name("Date Added to library")]
public DateTime DateAdded { get; set; }
[Name("Audible Product Id")]
public string AudibleProductId { get; set; }
[Name("Locale")]
public string Locale { get; set; }
[Name("Title")]
public string Title { get; set; }
[Name("Authors")]
public string AuthorNames { get; set; }
[Name("Narrators")]
public string NarratorNames { get; set; }
[Name("Length In Minutes")]
public int LengthInMinutes { get; set; }
[Name("Description")]
public string Description { get; set; }
[Name("Publisher")]
public string Publisher { get; set; }
[Name("Has PDF")]
public bool HasPdf { get; set; }
[Name("Series Names")]
public string SeriesNames { get; set; }
[Name("Series Order")]
public string SeriesOrder { get; set; }
[Name("Community Rating: Overall")]
public float? CommunityRatingOverall { get; set; }
[Name("Community Rating: Performance")]
public float? CommunityRatingPerformance { get; set; }
[Name("Community Rating: Story")]
public float? CommunityRatingStory { get; set; }
[Name("Cover Id")]
public string PictureId { get; set; }
[Name("Is Abridged?")]
public bool IsAbridged { get; set; }
[Name("Date Published")]
public DateTime? DatePublished { get; set; }
[Name("Categories")]
public string CategoriesNames { get; set; }
[Name("My Rating: Overall")]
public float? MyRatingOverall { get; set; }
[Name("My Rating: Performance")]
public float? MyRatingPerformance { get; set; }
[Name("My Rating: Story")]
public float? MyRatingStory { get; set; }
[Name("My Libation Tags")]
public string MyLibationTags { get; set; }
[Name("Book Liberated Status")]
public string BookStatus { get; set; }
[Name("PDF Liberated Status")]
public string PdfStatus { get; set; }
[Name("Content Type")]
public string ContentType { get; set; }
}
public static class LibToDtos
{
public static List<ExportDto> ToDtos(this IEnumerable<LibraryBook> library)
=> library.Select(a => new ExportDto
{
Account = a.Account,
DateAdded = a.DateAdded,
AudibleProductId = a.Book.AudibleProductId,
Locale = a.Book.Locale,
Title = a.Book.Title,
AuthorNames = a.Book.AuthorNames,
NarratorNames = a.Book.NarratorNames,
LengthInMinutes = a.Book.LengthInMinutes,
Description = a.Book.Description,
Publisher = a.Book.Publisher,
HasPdf = a.Book.HasPdf,
SeriesNames = a.Book.SeriesNames,
SeriesOrder = a.Book.SeriesLink.Any() ? a.Book.SeriesLink?.Select(sl => $"{sl.Order} : {sl.Series.Name}").Aggregate((a, b) => $"{a}, {b}") : "",
CommunityRatingOverall = a.Book.Rating?.OverallRating,
CommunityRatingPerformance = a.Book.Rating?.PerformanceRating,
CommunityRatingStory = a.Book.Rating?.StoryRating,
PictureId = a.Book.PictureId,
IsAbridged = a.Book.IsAbridged,
DatePublished = a.Book.DatePublished,
CategoriesNames = a.Book.CategoriesNames.Any() ? a.Book.CategoriesNames.Aggregate((a, b) => $"{a}, {b}") : "",
MyRatingOverall = a.Book.UserDefinedItem.Rating.OverallRating,
MyRatingPerformance = a.Book.UserDefinedItem.Rating.PerformanceRating,
MyRatingStory = a.Book.UserDefinedItem.Rating.StoryRating,
MyLibationTags = a.Book.UserDefinedItem.Tags,
BookStatus = a.Book.UserDefinedItem.BookStatus.ToString(),
PdfStatus = a.Book.UserDefinedItem.PdfStatus.ToString(),
ContentType = a.Book.ContentType.ToString()
}).ToList();
}
public static class LibraryExporter
{
public static void ToCsv(string saveFilePath)
{
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
if (!dtos.Any())
return;
using var writer = new System.IO.StreamWriter(saveFilePath);
using var csv = new CsvWriter(writer, System.Globalization.CultureInfo.CurrentCulture);
csv.WriteHeader(typeof(ExportDto));
csv.NextRecord();
csv.WriteRecords(dtos);
}
public static void ToJson(string saveFilePath)
{
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
var json = Newtonsoft.Json.JsonConvert.SerializeObject(dtos, Newtonsoft.Json.Formatting.Indented);
System.IO.File.WriteAllText(saveFilePath, json);
}
public static void ToXlsx(string saveFilePath)
{
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
var workbook = new XSSFWorkbook();
var sheet = workbook.CreateSheet("Library");
var detailSubtotalFont = workbook.CreateFont();
detailSubtotalFont.IsBold = true;
var detailSubtotalCellStyle = workbook.CreateCellStyle();
detailSubtotalCellStyle.SetFont(detailSubtotalFont);
// headers
var rowIndex = 0;
var row = sheet.CreateRow(rowIndex);
var columns = new[] {
nameof (ExportDto.Account),
nameof (ExportDto.DateAdded),
nameof (ExportDto.AudibleProductId),
nameof (ExportDto.Locale),
nameof (ExportDto.Title),
nameof (ExportDto.AuthorNames),
nameof (ExportDto.NarratorNames),
nameof (ExportDto.LengthInMinutes),
nameof (ExportDto.Description),
nameof (ExportDto.Publisher),
nameof (ExportDto.HasPdf),
nameof (ExportDto.SeriesNames),
nameof (ExportDto.SeriesOrder),
nameof (ExportDto.CommunityRatingOverall),
nameof (ExportDto.CommunityRatingPerformance),
nameof (ExportDto.CommunityRatingStory),
nameof (ExportDto.PictureId),
nameof (ExportDto.IsAbridged),
nameof (ExportDto.DatePublished),
nameof (ExportDto.CategoriesNames),
nameof (ExportDto.MyRatingOverall),
nameof (ExportDto.MyRatingPerformance),
nameof (ExportDto.MyRatingStory),
nameof (ExportDto.MyLibationTags),
nameof (ExportDto.BookStatus),
nameof (ExportDto.PdfStatus),
nameof (ExportDto.ContentType)
};
var col = 0;
foreach (var c in columns)
{
var cell = row.CreateCell(col++);
var name = ExportDto.GetName(c);
cell.SetCellValue(name);
cell.CellStyle = detailSubtotalCellStyle;
}
var dateFormat = workbook.CreateDataFormat();
var dateStyle = workbook.CreateCellStyle();
dateStyle.DataFormat = dateFormat.GetFormat("MM/dd/yyyy HH:mm:ss");
rowIndex++;
// Add data rows
foreach (var dto in dtos)
{
col = 0;
row = sheet.CreateRow(rowIndex);
row.CreateCell(col++).SetCellValue(dto.Account);
var dateAddedCell = row.CreateCell(col++);
dateAddedCell.CellStyle = dateStyle;
dateAddedCell.SetCellValue(dto.DateAdded);
row.CreateCell(col++).SetCellValue(dto.AudibleProductId);
row.CreateCell(col++).SetCellValue(dto.Locale);
row.CreateCell(col++).SetCellValue(dto.Title);
row.CreateCell(col++).SetCellValue(dto.AuthorNames);
row.CreateCell(col++).SetCellValue(dto.NarratorNames);
row.CreateCell(col++).SetCellValue(dto.LengthInMinutes);
row.CreateCell(col++).SetCellValue(dto.Description);
row.CreateCell(col++).SetCellValue(dto.Publisher);
row.CreateCell(col++).SetCellValue(dto.HasPdf);
row.CreateCell(col++).SetCellValue(dto.SeriesNames);
row.CreateCell(col++).SetCellValue(dto.SeriesOrder);
col = createCell(row, col, dto.CommunityRatingOverall);
col = createCell(row, col, dto.CommunityRatingPerformance);
col = createCell(row, col, dto.CommunityRatingStory);
row.CreateCell(col++).SetCellValue(dto.PictureId);
row.CreateCell(col++).SetCellValue(dto.IsAbridged);
var datePubCell = row.CreateCell(col++);
datePubCell.CellStyle = dateStyle;
if (dto.DatePublished.HasValue)
datePubCell.SetCellValue(dto.DatePublished.Value);
else
datePubCell.SetCellValue("");
row.CreateCell(col++).SetCellValue(dto.CategoriesNames);
col = createCell(row, col, dto.MyRatingOverall);
col = createCell(row, col, dto.MyRatingPerformance);
col = createCell(row, col, dto.MyRatingStory);
row.CreateCell(col++).SetCellValue(dto.MyLibationTags);
row.CreateCell(col++).SetCellValue(dto.BookStatus);
row.CreateCell(col++).SetCellValue(dto.PdfStatus);
row.CreateCell(col++).SetCellValue(dto.ContentType);
rowIndex++;
}
using var fileData = new System.IO.FileStream(saveFilePath, System.IO.FileMode.Create);
workbook.Write(fileData);
}
private static int createCell(NPOI.SS.UserModel.IRow row, int col, float? nullableFloat)
{
if (nullableFloat.HasValue)
row.CreateCell(col++).SetCellValue(nullableFloat.Value);
else
row.CreateCell(col++).SetCellValue("");
return col;
}
}
}

View file

@ -0,0 +1,57 @@
using System;
using System.IO;
using DataLayer;
using LibationSearchEngine;
namespace ApplicationServices
{
public static class SearchEngineCommands
{
public static void FullReIndex(SearchEngine engine = null)
{
engine ??= new SearchEngine();
var library = DbContexts.GetLibrary_Flat_NoTracking();
engine.CreateNewIndex(library);
}
public static SearchResultSet Search(string searchString) => performSearchEngineFunc_safe(e =>
e.Search(searchString)
);
public static void UpdateBookTags(Book book) => performSearchEngineAction_safe(e =>
e.UpdateTags(book.AudibleProductId, book.UserDefinedItem.Tags)
);
public static void UpdateLiberatedStatus(Book book) => performSearchEngineAction_safe(e =>
e.UpdateLiberatedStatus(book)
);
private static void performSearchEngineAction_safe(Action<SearchEngine> action)
{
var engine = new SearchEngine();
try
{
action(engine);
}
catch (FileNotFoundException)
{
FullReIndex(engine);
action(engine);
}
}
private static T performSearchEngineFunc_safe<T>(Func<SearchEngine, T> func)
{
var engine = new SearchEngine();
try
{
return func(engine);
}
catch (FileNotFoundException)
{
FullReIndex(engine);
return func(engine);
}
}
}
}

View file

@ -0,0 +1,103 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApi;
using AudibleApi.Authorization;
using Dinah.Core;
using Newtonsoft.Json;
namespace AudibleUtilities
{
public class Account : IUpdatable
{
public event EventHandler Updated;
private void update(object sender = null, EventArgs e = null)
=> Updated?.Invoke(this, new EventArgs());
// canonical. immutable. email or phone number
public string AccountId { get; }
// user-friendly, non-canonical name. mutable
private string _accountName;
public string AccountName
{
get => _accountName;
set
{
if (string.IsNullOrWhiteSpace(value))
return;
var v = value.Trim();
if (v == _accountName)
return;
_accountName = v;
update();
}
}
// whether to include this account when scanning libraries.
// technically this is an app setting; not an attribute of account. but since it's managed with accounts, it makes sense to put this exception-to-the-rule here
private bool _libraryScan = true;
public bool LibraryScan
{
get => _libraryScan;
set
{
if (value == _libraryScan)
return;
_libraryScan = value;
update();
}
}
private string _decryptKey = "";
/// <summary>aka: activation bytes</summary>
public string DecryptKey
{
get => _decryptKey;
set
{
var v = (value ?? "").Trim();
if (v == _decryptKey)
return;
_decryptKey = v;
update();
}
}
private Identity _identity;
public Identity IdentityTokens
{
get => _identity;
set
{
if (_identity is null && value is null)
return;
if (_identity is not null)
_identity.Updated -= update;
if (value is not null)
value.Updated += update;
_identity = value;
update();
}
}
[JsonIgnore]
public Locale Locale => IdentityTokens?.Locale;
public Account(string accountId)
{
AccountId = ArgumentValidator.EnsureNotNullOrWhiteSpace(accountId, nameof(accountId)).Trim();
}
public override string ToString() => $"{AccountId} - {Locale?.Name ?? "[empty]"}";
public string MaskedLogEntry => @$"AccountId={mask(AccountId)}|AccountName={mask(AccountName)}|Locale={Locale?.Name ?? "[empty]"}";
private static string mask(string str)
=> str is null ? "[null]"
: str == string.Empty ? "[empty]"
: str.ToMask();
}
}

View file

@ -0,0 +1,142 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApi;
using AudibleApi.Authorization;
using Dinah.Core;
using Newtonsoft.Json;
namespace AudibleUtilities
{
// 'AccountsSettings' is intentionally NOT IEnumerable<> so that properties can be added/extended
// from newtonsoft (https://www.newtonsoft.com/json/help/html/SerializationGuide.htm):
// .NET : IList, IEnumerable, IList<T>, Array
// JSON : Array (properties on the collection will not be serialized)
public class AccountsSettings : IUpdatable
{
public event EventHandler Updated;
private void update(object sender = null, EventArgs e = null)
{
foreach (var account in Accounts)
validate(account);
update_no_validate();
}
private void update_no_validate() => Updated?.Invoke(this, new EventArgs());
public AccountsSettings() { }
// for some reason this will make the json instantiator use _accounts_json.set()
[JsonConstructor]
protected AccountsSettings(List<Account> accountsSettings) { }
#region Accounts
private List<Account> _accounts_backing = new List<Account>();
[JsonProperty(PropertyName = nameof(Accounts))]
private List<Account> _accounts_json
{
get => _accounts_backing;
// 'set' is only used by json deser
set
{
if (value is null)
return;
foreach (var account in value)
_add(account);
update_no_validate();
}
}
[JsonIgnore]
public IReadOnlyList<Account> Accounts => _accounts_json.AsReadOnly();
#endregion
#region de/serialize
public static AccountsSettings FromJson(string json)
=> JsonConvert.DeserializeObject<AccountsSettings>(json, Identity.GetJsonSerializerSettings());
public string ToJson(Formatting formatting = Formatting.Indented)
=> JsonConvert.SerializeObject(this, formatting, Identity.GetJsonSerializerSettings());
#endregion
// more common naming convention alias for internal collection
public IReadOnlyList<Account> GetAll() => Accounts;
public Account Upsert(string accountId, string locale)
{
var acct = GetAccount(accountId, locale);
if (acct is not null)
return acct;
var l = Localization.Get(locale);
var id = new Identity(l);
var account = new Account(accountId) { IdentityTokens = id };
Add(account);
return account;
}
public void Add(Account account)
{
_add(account);
update_no_validate();
}
public void _add(Account account)
{
validate(account);
_accounts_backing.Add(account);
account.Updated += update;
}
public Account GetAccount(string accountId, string locale)
{
if (locale is null)
return null;
return Accounts.SingleOrDefault(a => a.AccountId == accountId && a.IdentityTokens.Locale.Name == locale);
}
public bool Delete(string accountId, string locale)
{
var acct = GetAccount(accountId, locale);
if (acct is null)
return false;
return Delete(acct);
}
public bool Delete(Account account)
{
if (!_accounts_backing.Contains(account))
return false;
account.Updated -= update;
var result = _accounts_backing.Remove(account);
update_no_validate();
return result;
}
private void validate(Account account)
{
ArgumentValidator.EnsureNotNull(account, nameof(account));
var accountId = account.AccountId;
var locale = account?.IdentityTokens?.Locale?.Name;
var acct = GetAccount(accountId, locale);
// new: ok
if (acct is null)
return;
// same account instance: ok
if (acct == account)
return;
// same account id + locale, different instance: bad
throw new InvalidOperationException("Cannot add an account with the same account Id and Locale");
}
}
}

View file

@ -0,0 +1,24 @@
using System;
using AudibleApi.Authorization;
using Dinah.Core.IO;
using Newtonsoft.Json;
namespace AudibleUtilities
{
public class AccountsSettingsPersister : JsonFilePersister<AccountsSettings>
{
/// <summary>Alias for Target </summary>
public AccountsSettings AccountsSettings => Target;
/// <summary>uses path. create file if doesn't yet exist</summary>
public AccountsSettingsPersister(AccountsSettings target, string path, string jsonPath = null)
: base(target, path, jsonPath) { }
/// <summary>load from existing file</summary>
public AccountsSettingsPersister(string path, string jsonPath = null)
: base(path, jsonPath) { }
protected override JsonSerializerSettings GetSerializerSettings()
=> Identity.GetJsonSerializerSettings();
}
}

View file

@ -0,0 +1,316 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using AudibleApi;
using AudibleApi.Common;
using Dinah.Core;
using Polly;
using Polly.Retry;
namespace AudibleUtilities
{
/// <summary>USE THIS from within Libation. It wraps the call with correct JSONPath</summary>
public class ApiExtended
{
public Api Api { get; private set; }
private ApiExtended(Api api) => Api = api;
/// <summary>Get api from existing tokens else login with 'eager' choice. External browser url is provided. Response can be external browser login or continuing with native api callbacks.</summary>
public static async Task<ApiExtended> CreateAsync(Account account, ILoginChoiceEager loginChoiceEager)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
LoginType = nameof(ILoginChoiceEager),
Account = account?.MaskedLogEntry ?? "[null]",
LocaleName = account?.Locale?.Name
});
var api = await EzApiCreator.GetApiAsync(
loginChoiceEager,
account.Locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
return new ApiExtended(api);
}
/// <summary>Get api from existing tokens else login with native api callbacks.</summary>
public static async Task<ApiExtended> CreateAsync(Account account, ILoginCallback loginCallback)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
LoginType = nameof(ILoginCallback),
Account = account?.MaskedLogEntry ?? "[null]",
LocaleName = account?.Locale?.Name
});
var api = await EzApiCreator.GetApiAsync(
loginCallback,
account.Locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
return new ApiExtended(api);
}
/// <summary>Get api from existing tokens else login with external browser</summary>
public static async Task<ApiExtended> CreateAsync(Account account, ILoginExternal loginExternal)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
LoginType = nameof(ILoginExternal),
Account = account?.MaskedLogEntry ?? "[null]",
LocaleName = account?.Locale?.Name
});
var api = await EzApiCreator.GetApiAsync(
loginExternal,
account.Locale,
AudibleApiStorage.AccountsSettingsFile,
account.GetIdentityTokensJsonPath());
return new ApiExtended(api);
}
/// <summary>Get api from existing tokens. Assumes you have valid login tokens. Else exception</summary>
public static async Task<ApiExtended> CreateAsync(Account account)
{
ArgumentValidator.EnsureNotNull(account, nameof(account));
ArgumentValidator.EnsureNotNull(account.Locale, nameof(account.Locale));
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
AccountMaskedLogEntry = account.MaskedLogEntry
});
return await CreateAsync(account.AccountId, account.Locale.Name);
}
/// <summary>Get api from existing tokens. Assumes you have valid login tokens. Else exception</summary>
public static async Task<ApiExtended> CreateAsync(string username, string localeName)
{
Serilog.Log.Logger.Information("{@DebugInfo}", new
{
Username = username.ToMask(),
LocaleName = localeName,
});
var api = await EzApiCreator.GetApiAsync(
Localization.Get(localeName),
AudibleApiStorage.AccountsSettingsFile,
AudibleApiStorage.GetIdentityTokensJsonPath(username, localeName));
return new ApiExtended(api);
}
private static AsyncRetryPolicy policy { get; }
= Policy.Handle<Exception>()
// 2 retries == 3 total
.RetryAsync(2);
public Task<List<Item>> GetLibraryValidatedAsync(LibraryOptions.ResponseGroupOptions responseGroups = LibraryOptions.ResponseGroupOptions.ALL_OPTIONS, bool importEpisodes = true)
{
// bug on audible's side. the 1st time after a long absence, a query to get library will return without titles or authors. a subsequent identical query will be successful. this is true whether or not tokens are refreshed
// worse, this 1st dummy call doesn't seem to help:
// var page = await api.GetLibraryAsync(new AudibleApi.LibraryOptions { NumberOfResultPerPage = 1, PageNumber = 1, PurchasedAfter = DateTime.Now.AddYears(-20), ResponseGroups = AudibleApi.LibraryOptions.ResponseGroupOptions.ALL_OPTIONS });
// i don't want to incur the cost of making a full dummy call every time because it fails sometimes
return policy.ExecuteAsync(() => getItemsAsync(responseGroups, importEpisodes));
}
private async Task<List<Item>> getItemsAsync(LibraryOptions.ResponseGroupOptions responseGroups, bool importEpisodes)
{
var items = new List<Item>();
#if DEBUG
//// this will not work for multi accounts
//var library_json = "library.json";
//library_json = System.IO.Path.GetFullPath(library_json);
//if (System.IO.File.Exists(library_json))
//{
// items = AudibleApi.Common.Converter.FromJson<List<Item>>(System.IO.File.ReadAllText(library_json));
//}
#endif
Serilog.Log.Logger.Debug("Begin initial library scan");
if (!items.Any())
items = await Api.GetAllLibraryItemsAsync(responseGroups);
Serilog.Log.Logger.Debug("Initial library scan complete. Begin episode scan");
await manageEpisodesAsync(items, importEpisodes);
Serilog.Log.Logger.Debug("Episode scan complete");
#if DEBUG
//System.IO.File.WriteAllText(library_json, AudibleApi.Common.Converter.ToJson(items));
#endif
var validators = new List<IValidator>();
validators.AddRange(getValidators());
foreach (var v in validators)
{
var exceptions = v.Validate(items);
if (exceptions is not null && exceptions.Any())
throw new AggregateException(exceptions);
}
return items;
}
#region episodes and podcasts
private async Task manageEpisodesAsync(List<Item> items, bool importEpisodes)
{
// add podcasts and episodes to list. If fail, don't let it de-rail the rest of the import
try
{
// get parents
var parents = items.Where(i => i.IsEpisodes).ToList();
#if DEBUG
//var parentsDebug = parents.Select(i => i.ToJson()).Aggregate((a, b) => $"{a}\r\n\r\n{b}");
//System.IO.File.WriteAllText("parents.json", parentsDebug);
#endif
if (!parents.Any())
return;
Serilog.Log.Logger.Information($"{parents.Count} series of shows/podcasts found");
// remove episode parents. even if the following stuff fails, these will still be removed from the collection
items.RemoveAll(i => i.IsEpisodes);
if (importEpisodes)
{
// add children
var children = await getEpisodesAsync(parents);
Serilog.Log.Logger.Information($"{children.Count} episodes of shows/podcasts found");
items.AddRange(children);
}
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error adding podcasts and episodes");
}
}
private async Task<List<Item>> getEpisodesAsync(List<Item> parents)
{
var results = new List<Item>();
foreach (var parent in parents)
{
var children = await getEpisodeChildrenAsync(parent);
// actual individual episode, not the parent of a series.
// for now I'm keeping it inside this method since it fits the work flow, incl. importEpisodes logic
if (!children.Any())
{
results.Add(parent);
continue;
}
foreach (var child in children)
{
// use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime
child.PurchaseDate = parent.PurchaseDate;
// parent is essentially a series
child.Series = new Series[]
{
new Series
{
Asin = parent.Asin,
// This should properly be Single() not FirstOrDefault(), but FirstOrDefault is defensive for malformed data from audible
Sequence = parent.Relationships.FirstOrDefault(r => r.Asin == child.Asin).Sort.ToString(),
Title = parent.TitleWithSubtitle
}
};
// overload (read: abuse) IsEpisodes flag
child.Relationships = new Relationship[]
{
new Relationship
{
RelationshipToProduct = RelationshipToProduct.Child,
RelationshipType = RelationshipType.Episode
}
};
}
results.AddRange(children);
}
return results;
}
private async Task<List<Item>> getEpisodeChildrenAsync(Item parent)
{
var childrenIds = parent.Relationships
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
.Select(r => r.Asin)
.ToList();
// fetch children in batches
const int batchSize = 20;
var results = new List<Item>();
for (var i = 1; ; i++)
{
var idBatch = childrenIds.Skip((i - 1) * batchSize).Take(batchSize).ToList();
if (!idBatch.Any())
break;
List<Item> childrenBatch;
try
{
childrenBatch = await Api.GetCatalogProductsAsync(idBatch, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
#if DEBUG
//var childrenBatchDebug = childrenBatch.Select(i => i.ToJson()).Aggregate((a, b) => $"{a}\r\n\r\n{b}");
//System.IO.File.WriteAllText($"children of {parent.Asin}.json", childrenBatchDebug);
#endif
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new
{
ParentId = parent.Asin,
ParentTitle = parent.Title,
BatchNumber = i,
ChildIdBatch = idBatch
});
throw;
}
Serilog.Log.Logger.Debug($"Batch {i}: {childrenBatch.Count} results");
// the service returned no results. probably indicates an error. stop running batches
if (!childrenBatch.Any())
break;
results.AddRange(childrenBatch);
}
Serilog.Log.Logger.Debug("Parent episodes/podcasts series. Children found. {@DebugInfo}", new
{
ParentId = parent.Asin,
ParentTitle = parent.Title,
ChildCount = childrenIds.Count
});
if (childrenIds.Count != results.Count)
{
var ex = new ApplicationException($"Mis-match: Children defined by parent={childrenIds.Count}. Children returned by batches={results.Count}");
Serilog.Log.Logger.Error(ex, "Quantity of series episodes defined by parent does not match quantity returned by batch fetching.");
throw ex;
}
return results;
}
#endregion
private static List<IValidator> getValidators()
{
var type = typeof(IValidator);
var types = AppDomain.CurrentDomain.GetAssemblies()
.SelectMany(s => s.GetTypes())
.Where(p => type.IsAssignableFrom(p) && !p.IsInterface);
return types.Select(t => Activator.CreateInstance(t) as IValidator).ToList();
}
}
}

View file

@ -0,0 +1,47 @@
using System;
using System.IO;
using LibationFileManager;
using Newtonsoft.Json;
namespace AudibleUtilities
{
public static class AudibleApiStorage
{
public static string AccountsSettingsFile => Path.Combine(Configuration.Instance.LibationFiles, "AccountsSettings.json");
public static void EnsureAccountsSettingsFileExists()
{
// saves. BEWARE: this will overwrite an existing file
if (!File.Exists(AccountsSettingsFile))
_ = new AccountsSettingsPersister(new AccountsSettings(), AccountsSettingsFile);
}
/// <summary>If you use this, be a good citizen and DISPOSE of it</summary>
public static AccountsSettingsPersister GetAccountsSettingsPersister() => new AccountsSettingsPersister(AccountsSettingsFile);
public static string GetIdentityTokensJsonPath(this Account account)
=> GetIdentityTokensJsonPath(account.AccountId, account.Locale?.Name);
public static string GetIdentityTokensJsonPath(string username, string localeName)
{
var usernameSanitized = trimSurroundingQuotes(JsonConvert.ToString(username));
var localeNameSanitized = trimSurroundingQuotes(JsonConvert.ToString(localeName));
return $"$.Accounts[?(@.AccountId == '{usernameSanitized}' && @.IdentityTokens.LocaleName == '{localeNameSanitized}')].IdentityTokens";
}
private static string trimSurroundingQuotes(string str)
{
// SubString algo is better than .Trim("\"")
// orig string "
// json string "\""
// Eg:
// => str.Trim("\"")
// output \
// vs
// => str.Substring(1, str.Length - 2)
// output \"
// also works with surrounding single quotes
return str.Substring(1, str.Length - 2);
}
}
}

View file

@ -0,0 +1,88 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApi.Common;
namespace AudibleUtilities
{
public interface IValidator
{
IEnumerable<Exception> Validate(IEnumerable<Item> items);
}
public class LibraryValidator : IValidator
{
public IEnumerable<Exception> Validate(IEnumerable<Item> items)
{
var exceptions = new List<Exception>();
if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
exceptions.Add(new ArgumentException($"Collection contains item(s) with null or blank {nameof(Item.ProductId)}", nameof(items)));
if (items.Any(i => i.DateAdded < new DateTime(1980, 1, 1)))
exceptions.Add(new ArgumentException($"Collection contains item(s) with invalid {nameof(Item.DateAdded)}", nameof(items)));
return exceptions;
}
}
public class BookValidator : IValidator
{
public IEnumerable<Exception> Validate(IEnumerable<Item> items)
{
var exceptions = new List<Exception>();
// a book having no authors is rare but allowed
if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
exceptions.Add(new ArgumentException($"Collection contains item(s) with blank {nameof(Item.ProductId)}", nameof(items)));
// this can happen with podcast episodes
foreach (var i in items.Where(i => string.IsNullOrWhiteSpace(i.Title)))
i.Title = "[blank title]";
return exceptions;
}
}
public class CategoryValidator : IValidator
{
public IEnumerable<Exception> Validate(IEnumerable<Item> items)
{
var exceptions = new List<Exception>();
var distinct = items.GetCategoriesDistinct();
if (distinct.Any(s => s.CategoryId is null))
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Categories)} with null {nameof(Ladder.CategoryId)}", nameof(items)));
if (distinct.Any(s => s.CategoryName is null))
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Categories)} with null {nameof(Ladder.CategoryName)}", nameof(items)));
return exceptions;
}
}
public class ContributorValidator : IValidator
{
public IEnumerable<Exception> Validate(IEnumerable<Item> items)
{
var exceptions = new List<Exception>();
if (items.GetAuthorsDistinct().Any(a => string.IsNullOrWhiteSpace(a.Name)))
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Authors)} with null {nameof(Person.Name)}", nameof(items)));
if (items.GetNarratorsDistinct().Any(a => string.IsNullOrWhiteSpace(a.Name)))
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Narrators)} with null {nameof(Person.Name)}", nameof(items)));
return exceptions;
}
}
public class SeriesValidator : IValidator
{
public IEnumerable<Exception> Validate(IEnumerable<Item> items)
{
var exceptions = new List<Exception>();
var distinct = items.GetSeriesDistinct();
if (distinct.Any(s => s.SeriesId is null))
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesId)}", nameof(items)));
if (distinct.Any(s => s.SeriesName is null))
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesName)}", nameof(items)));
return exceptions;
}
}
}

View file

@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0-windows</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AudibleApi" Version="2.7.3.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\LibationFileManager\LibationFileManager.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1 @@
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo(nameof(AudibleUtilities) + ".Tests")]

View file

@ -0,0 +1,71 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class BookConfig : IEntityTypeConfiguration<Book>
{
public void Configure(EntityTypeBuilder<Book> entity)
{
entity.HasKey(b => b.BookId);
entity.HasIndex(b => b.AudibleProductId);
entity.OwnsOne(b => b.Rating);
//
// CRUCIAL: ignore unmapped collections, even get-only
//
entity.Ignore(nameof(Book.Authors));
entity.Ignore(nameof(Book.Narrators));
//// these don't seem to matter
//entity.Ignore(nameof(Book.AuthorNames));
//entity.Ignore(nameof(Book.NarratorNames));
//entity.Ignore(nameof(Book.HasPdfs));
// OwnsMany: "Can only ever appear on navigation properties of other entity types.
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner."
entity
.OwnsMany(b => b.Supplements, b_s =>
{
b_s.WithOwner(s => s.Book)
.HasForeignKey(s => s.BookId);
b_s.HasKey(s => s.SupplementId);
});
// even though it's owned, we need to map its backing field
entity
.Metadata
.FindNavigation(nameof(Book.Supplements))
.SetPropertyAccessMode(PropertyAccessMode.Field);
// owns it 1:1, store in separate table
entity
.OwnsOne(b => b.UserDefinedItem, b_udi =>
{
b_udi.WithOwner(udi => udi.Book)
.HasForeignKey(udi => udi.BookId);
b_udi.Property(udi => udi.BookId).ValueGeneratedNever();
b_udi.ToTable(nameof(Book.UserDefinedItem));
// owns it 1:1, store in same table
b_udi.OwnsOne(udi => udi.Rating);
});
entity
.Metadata
.FindNavigation(nameof(Book.ContributorsLink))
// PropertyAccessMode.Field : Contributions is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.Metadata
.FindNavigation(nameof(Book.SeriesLink))
// PropertyAccessMode.Field : Series is a get-only property, not a field, so use its backing field
.SetPropertyAccessMode(PropertyAccessMode.Field);
entity
.HasOne(b => b.Category)
.WithMany()
.HasForeignKey(b => b.CategoryId);
}
}
}

View file

@ -0,0 +1,25 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class BookContributorConfig : IEntityTypeConfiguration<BookContributor>
{
public void Configure(EntityTypeBuilder<BookContributor> entity)
{
entity.HasKey(bc => new { bc.BookId, bc.ContributorId, bc.Role });
entity.HasIndex(bc => bc.BookId);
entity.HasIndex(bc => bc.ContributorId);
entity
.HasOne(bc => bc.Book)
.WithMany(b => b.ContributorsLink)
.HasForeignKey(bc => bc.BookId);
entity
.HasOne(bc => bc.Contributor)
.WithMany(c => c.BooksLink)
.HasForeignKey(bc => bc.ContributorId);
}
}
}

View file

@ -0,0 +1,14 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class CategoryConfig : IEntityTypeConfiguration<Category>
{
public void Configure(EntityTypeBuilder<Category> entity)
{
entity.HasKey(c => c.CategoryId);
entity.HasIndex(c => c.AudibleCategoryId);
}
}
}

View file

@ -0,0 +1,22 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class ContributorConfig : IEntityTypeConfiguration<Contributor>
{
public void Configure(EntityTypeBuilder<Contributor> entity)
{
entity.HasKey(c => c.ContributorId);
entity.HasIndex(c => c.Name);
//entity.OwnsOne(b => b.AuthorProperty);
// ... in separate table
entity
.Metadata
.FindNavigation(nameof(Contributor.BooksLink))
.SetPropertyAccessMode(PropertyAccessMode.Field);
}
}
}

View file

@ -0,0 +1,32 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class LibraryBookConfig : IEntityTypeConfiguration<LibraryBook>
{
public void Configure(EntityTypeBuilder<LibraryBook> entity)
{
// to allow same book (incl region) with diff acct.s:
//
// this file:
// - composite key:
// entity.HasKey(b => new { b.BookId, b.Account });
// entity.HasIndex(b => b.BookId);
// entity.HasIndex(b => b.Account);
// - change the below relationship since Book+LibraryBook would no longer be 1:1
//
// other files:
// - change Book class since Book+LibraryBook would no longer be 1:1
// - update LibraryBook import code
// - would likely challenge assumptions throughout Libation which have been true up until now
entity.HasKey(lb => lb.BookId);
entity
.HasOne(lb => lb.Book)
.WithOne()
.HasForeignKey<LibraryBook>(lb => lb.BookId);
}
}
}

View file

@ -0,0 +1,25 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class SeriesBookConfig : IEntityTypeConfiguration<SeriesBook>
{
public void Configure(EntityTypeBuilder<SeriesBook> entity)
{
entity.HasKey(sb => new { sb.SeriesId, sb.BookId });
entity.HasIndex(sb => sb.SeriesId);
entity.HasIndex(sb => sb.BookId);
entity
.HasOne(sb => sb.Series)
.WithMany(s => s.BooksLink)
.HasForeignKey(sb => sb.SeriesId);
entity
.HasOne(sb => sb.Book)
.WithMany(b => b.SeriesLink)
.HasForeignKey(sb => sb.BookId);
}
}
}

View file

@ -0,0 +1,19 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
namespace DataLayer.Configurations
{
internal class SeriesConfig : IEntityTypeConfiguration<Series>
{
public void Configure(EntityTypeBuilder<Series> entity)
{
entity.HasKey(s => s.SeriesId);
entity.HasIndex(s => s.AudibleSeriesId);
entity
.Metadata
.FindNavigation(nameof(Series.BooksLink))
.SetPropertyAccessMode(PropertyAccessMode.Field);
}
}
}

View file

@ -0,0 +1,37 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0-windows</TargetFramework>
</PropertyGroup>
<PropertyGroup>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
<ApplicationIcon />
<OutputType>Library</OutputType>
<StartupObject />
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Dinah.EntityFrameworkCore" Version="4.0.0.3" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="6.0.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="6.0.4" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="6.0.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\LibationFileManager\LibationFileManager.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="appsettings.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View file

@ -0,0 +1,270 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class AudibleProductId
{
public string Id { get; }
public AudibleProductId(string id)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
Id = id;
}
}
// enum will be easier than bool to extend later
public enum ContentType { Unknown = 0, Product = 1, Episode = 2 }
public class Book
{
// implementation detail. set by db only. only used by data layer
internal int BookId { get; private set; }
// immutable
public string AudibleProductId { get; private set; }
public string Title { get; private set; }
public string Description { get; private set; }
public int LengthInMinutes { get; private set; }
public ContentType ContentType { get; private set; }
public string Locale { get; private set; }
// mutable
public string PictureId { get; set; }
// book details
public bool IsAbridged { get; private set; }
public DateTime? DatePublished { get; private set; }
// non-null. use "empty pattern"
internal int CategoryId { get; private set; }
public Category Category { get; private set; }
public string[] CategoriesNames
=> Category is null ? new string[0]
: Category.ParentCategory is null ? new[] { Category.Name }
: new[] { Category.ParentCategory.Name, Category.Name };
public string[] CategoriesIds
=> Category is null ? null
: Category.ParentCategory is null ? new[] { Category.AudibleCategoryId }
: new[] { Category.ParentCategory.AudibleCategoryId, Category.AudibleCategoryId };
public string TitleSortable => Formatters.GetSortName(Title);
public string SeriesSortable => Formatters.GetSortName(SeriesNames);
// is owned, not optional 1:1
public UserDefinedItem UserDefinedItem { get; private set; }
// UserDefinedItem convenience properties
/// <summary>True if IsLiberated or Error. False if NotLiberated</summary>
public bool Audio_Exists => UserDefinedItem.BookStatus != LiberatedStatus.NotLiberated;
/// <summary>True if exists and IsLiberated. Else false</summary>
public bool PDF_Exists => UserDefinedItem.PdfStatus == LiberatedStatus.Liberated;
// is owned, not optional 1:1
/// <summary>The product's aggregate community rating</summary>
public Rating Rating { get; private set; } = new Rating(0, 0, 0);
// ef-ctor
private Book() { }
// non-ef ctor
/// <param name="audibleProductId">special id class b/c it's too easy to get string order mixed up</param>
public Book(
AudibleProductId audibleProductId,
string title,
string description,
int lengthInMinutes,
ContentType contentType,
IEnumerable<Contributor> authors,
IEnumerable<Contributor> narrators,
Category category,
string localeName)
{
// validate
ArgumentValidator.EnsureNotNull(audibleProductId, nameof(audibleProductId));
var productId = audibleProductId.Id;
ArgumentValidator.EnsureNotNullOrWhiteSpace(productId, nameof(productId));
// assign as soon as possible. stuff below relies on this
AudibleProductId = productId;
Locale = localeName;
ArgumentValidator.EnsureNotNullOrWhiteSpace(title, nameof(title));
// non-ef-ctor init.s
UserDefinedItem = new UserDefinedItem(this);
_contributorsLink = new HashSet<BookContributor>();
_seriesLink = new HashSet<SeriesBook>();
_supplements = new HashSet<Supplement>();
Category = category;
// simple assigns
Title = title.Trim() ?? "";
Description = description?.Trim() ?? "";
LengthInMinutes = lengthInMinutes;
ContentType = contentType;
// assigns with biz logic
ReplaceAuthors(authors);
ReplaceNarrators(narrators);
}
#region contributors, authors, narrators
// use uninitialised backing fields - this means we can detect if the collection was loaded
private HashSet<BookContributor> _contributorsLink;
// i'd like this to be internal but migration throws this exception when i try:
// Value cannot be null.
// Parameter name: property
public IEnumerable<BookContributor> ContributorsLink
=> _contributorsLink?
.OrderBy(bc => bc.Order)
.ToList();
public IEnumerable<Contributor> Authors => getContributions(Role.Author).Select(bc => bc.Contributor).ToList();
public string AuthorNames => string.Join(", ", Authors.Select(a => a.Name));
public IEnumerable<Contributor> Narrators => getContributions(Role.Narrator).Select(bc => bc.Contributor).ToList();
public string NarratorNames => string.Join(", ", Narrators.Select(n => n.Name));
public string Publisher => getContributions(Role.Publisher).SingleOrDefault()?.Contributor.Name;
public void ReplaceAuthors(IEnumerable<Contributor> authors, DbContext context = null)
=> replaceContributors(authors, Role.Author, context);
public void ReplaceNarrators(IEnumerable<Contributor> narrators, DbContext context = null)
=> replaceContributors(narrators, Role.Narrator, context);
public void ReplacePublisher(Contributor publisher, DbContext context = null)
=> replaceContributors(new List<Contributor> { publisher }, Role.Publisher, context);
private void replaceContributors(IEnumerable<Contributor> newContributors, Role role, DbContext context = null)
{
ArgumentValidator.EnsureEnumerableNotNullOrEmpty(newContributors, nameof(newContributors));
// the edge cases of doing local-loaded vs remote-only got weird. just load it
if (_contributorsLink is null)
getEntry(context).Collection(s => s.ContributorsLink).Load();
var roleContributions = getContributions(role);
var isIdentical = roleContributions.Select(c => c.Contributor).SequenceEqual(newContributors);
if (isIdentical)
return;
_contributorsLink.RemoveWhere(bc => bc.Role == role);
addNewContributors(newContributors, role);
}
private void addNewContributors(IEnumerable<Contributor> newContributors, Role role)
{
byte order = 0;
var newContributionsEnum = newContributors.Select(c => new BookContributor(this, c, role, order++));
var newContributions = new HashSet<BookContributor>(newContributionsEnum);
_contributorsLink.UnionWith(newContributions);
}
private List<BookContributor> getContributions(Role role)
=> ContributorsLink
.Where(a => a.Role == role)
.OrderBy(a => a.Order)
.ToList();
#endregion
private Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Book> getEntry(DbContext context)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
var entry = context.Entry(this);
if (!entry.IsKeySet)
throw new InvalidOperationException("Could not load a valid Book from database");
return entry;
}
#region series
private HashSet<SeriesBook> _seriesLink;
public IEnumerable<SeriesBook> SeriesLink => _seriesLink?.ToList();
public string SeriesNames
{
get
{
if (_seriesLink is null)
return "";
// first: alphabetical by name
var withNames = _seriesLink
.Where(s => !string.IsNullOrWhiteSpace(s.Series.Name))
.Select(s => s.Series.Name)
.OrderBy(a => a)
.ToList();
// then un-named are alpha by series id
var nullNames = _seriesLink
.Where(s => string.IsNullOrWhiteSpace(s.Series.Name))
.Select(s => s.Series.AudibleSeriesId)
.OrderBy(a => a)
.ToList();
var all = withNames.Union(nullNames).ToList();
return string.Join(", ", all);
}
}
public void UpsertSeries(Series series, string order, DbContext context = null)
{
ArgumentValidator.EnsureNotNull(series, nameof(series));
// our add() is conditional upon what's already included in the collection.
// therefore if not loaded, a trip is required. might as well just load it
if (_seriesLink is null)
getEntry(context).Collection(s => s.SeriesLink).Load();
var singleSeriesBook = _seriesLink.SingleOrDefault(sb => sb.Series == series);
if (singleSeriesBook is null)
_seriesLink.Add(new SeriesBook(series, this, order));
else
singleSeriesBook.UpdateOrder(order);
}
#endregion
#region supplements
private HashSet<Supplement> _supplements;
public IEnumerable<Supplement> Supplements => _supplements?.ToList();
public bool HasPdf => Supplements.Any();
public void AddSupplementDownloadUrl(string url)
{
// supplements are owned by Book, so no need to Load():
// Are automatically loaded, and can only be tracked by a DbContext alongside their owner.
ArgumentValidator.EnsureNotNullOrWhiteSpace(url, nameof(url));
if (_supplements.Any(s => url.EqualsInsensitive(url)))
return;
_supplements.Add(new Supplement(this, url));
UserDefinedItem.PdfStatus ??= LiberatedStatus.NotLiberated;
}
#endregion
public void UpdateProductRating(float overallRating, float performanceRating, float storyRating)
=> Rating.Update(overallRating, performanceRating, storyRating);
public void UpdateBookDetails(bool isAbridged, DateTime? datePublished)
{
// don't overwrite with default values
IsAbridged |= isAbridged;
DatePublished = datePublished ?? DatePublished;
}
public void UpdateCategory(Category category, DbContext context = null)
{
// since category is never null, nullity means it hasn't been loaded
if (Category is null)
getEntry(context).Reference(s => s.Category).Load();
Category = category;
}
public override string ToString() => $"[{AudibleProductId}] {Title}";
}
}

View file

@ -0,0 +1,31 @@
using Dinah.Core;
namespace DataLayer
{
public enum Role { Author = 1, Narrator = 2, Publisher = 3 }
public class BookContributor
{
internal int BookId { get; private set; }
internal int ContributorId { get; private set; }
public Role Role { get; private set; }
public byte Order { get; private set; }
public Book Book { get; private set; }
public Contributor Contributor { get; private set; }
private BookContributor() { }
internal BookContributor(Book book, Contributor contributor, Role role, byte order)
{
ArgumentValidator.EnsureNotNull(book, nameof(book));
ArgumentValidator.EnsureNotNull(contributor, nameof(contributor));
Book = book;
Contributor = contributor;
Role = role;
Order = order;
}
public override string ToString() => $"{Book} {Contributor} {Role} {Order}";
}
}

View file

@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class AudibleCategoryId
{
public string Id { get; }
public AudibleCategoryId(string id)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
Id = id;
}
}
public class Category
{
// Empty is a special case. use private ctor w/o validation
public static Category GetEmpty() => new() { CategoryId = -1, AudibleCategoryId = "", Name = "" };
internal int CategoryId { get; private set; }
public string AudibleCategoryId { get; private set; }
public string Name { get; private set; }
public Category ParentCategory { get; private set; }
private Category() { }
/// <summary>special id class b/c it's too easy to get string order mixed up</summary>
public Category(AudibleCategoryId audibleSeriesId, string name, Category parentCategory = null)
{
ArgumentValidator.EnsureNotNull(audibleSeriesId, nameof(audibleSeriesId));
var id = audibleSeriesId.Id;
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
ArgumentValidator.EnsureNotNullOrWhiteSpace(name, nameof(name));
AudibleCategoryId = id;
Name = name;
UpdateParentCategory(parentCategory);
}
public void UpdateParentCategory(Category parentCategory)
{
// don't overwrite with null but not an error
if (parentCategory is not null)
ParentCategory = parentCategory;
}
public override string ToString() => $"[{AudibleCategoryId}] {Name}";
}
}

View file

@ -0,0 +1,47 @@
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
namespace DataLayer
{
public class Contributor
{
// Empty is a special case. use private ctor w/o validation
public static Contributor GetEmpty() => new() { ContributorId = -1, Name = "" };
// contributors search links are just name with url-encoding. space can be + or %20
// author search link: /search?searchAuthor=Robert+Bevan
// narrator search link: /search?searchNarrator=Robert+Bevan
// can also search multiples. concat with comma before url encode
// id.s
// ----
// https://www.audible.com/author/Neil-Gaiman/B000AQ01G2 == https://www.audible.com/author/B000AQ01G2
// goes to summary page
// at bottom "See all titles by Neil Gaiman" goes to https://www.audible.com/search?searchAuthor=Neil+Gaiman
// some authors have no id. simply goes to https://www.audible.com/search?searchAuthor=Rufus+Fears
// all narrators have no id: https://www.audible.com/search?searchNarrator=Neil+Gaiman
internal int ContributorId { get; private set; }
public string Name { get; private set; }
private HashSet<BookContributor> _booksLink;
public IEnumerable<BookContributor> BooksLink => _booksLink?.ToList();
public string AudibleContributorId { get; private set; }
private Contributor() { }
public Contributor(string name, string audibleContributorId = null)
{
Name = ArgumentValidator.EnsureNotNullOrWhiteSpace(name, nameof(name));
_booksLink = new HashSet<BookContributor>();
// don't overwrite with null or whitespace but not an error
if (!string.IsNullOrWhiteSpace(audibleContributorId))
AudibleContributorId = audibleContributorId;
}
public override string ToString() => Name;
}
}

View file

@ -0,0 +1,27 @@
using System;
using Dinah.Core;
namespace DataLayer
{
public class LibraryBook
{
internal int BookId { get; private set; }
public Book Book { get; private set; }
public DateTime DateAdded { get; private set; }
public string Account { get; private set; }
private LibraryBook() { }
public LibraryBook(Book book, DateTime dateAdded, string account)
{
ArgumentValidator.EnsureNotNull(book, nameof(book));
ArgumentValidator.EnsureNotNull(account, nameof(account));
Book = book;
DateAdded = dateAdded;
Account = account;
}
public override string ToString() => $"{DateAdded:d} {Book}";
}
}

View file

@ -0,0 +1,78 @@
using System;
using System.Collections.Generic;
using Dinah.Core;
namespace DataLayer
{
/// <summary>Parameterless ctor and setters should be used by EF only. Everything else should treat it as immutable</summary>
public class Rating : ValueObject_Static<Rating>
{
public float OverallRating { get; private set; }
public float PerformanceRating { get; private set; }
public float StoryRating { get; private set; }
private Rating() { }
internal Rating(float overallRating, float performanceRating, float storyRating)
{
OverallRating = overallRating;
PerformanceRating = performanceRating;
StoryRating = storyRating;
}
// EF magically tracks this owned object. by replacing it with a new() immutable object, stuff gets weird. update instead
internal void Update(float overallRating, float performanceRating, float storyRating)
{
// don't overwrite with all 0
if (overallRating + performanceRating + storyRating == 0)
return;
OverallRating = overallRating;
PerformanceRating = performanceRating;
StoryRating = storyRating;
}
protected override IEnumerable<object> GetEqualityComponents()
{
yield return OverallRating;
yield return PerformanceRating;
yield return StoryRating;
}
public float FirstScore
=> OverallRating > 0 ? OverallRating
: PerformanceRating > 0 ? PerformanceRating
: StoryRating;
/// <summary>character: ★</summary>
const char STAR = '\u2605';
/// <summary>character: ½</summary>
const char HALF = '\u00BD';
string getStars(float score)
{
var fullStars = (int)Math.Floor(score);
var starString = "".PadLeft(fullStars, STAR);
if (score - fullStars == 0.5f)
starString += HALF;
return starString;
}
public string ToStarString()
{
var items = new List<string>();
if (OverallRating > 0)
items.Add($"Overall: {getStars(OverallRating)}");
if (PerformanceRating > 0)
items.Add($"Perform: {getStars(PerformanceRating)}");
if (StoryRating > 0)
items.Add($"Story: {getStars(StoryRating)}");
return string.Join("\r\n", items);
}
public override string ToString() => $"Overall={OverallRating} Perf={PerformanceRating} Story={StoryRating}";
}
}

View file

@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class AudibleSeriesId
{
public string Id { get; }
public AudibleSeriesId(string id)
{
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
Id = id;
}
}
public class Series
{
internal int SeriesId { get; private set; }
public string AudibleSeriesId { get; private set; }
/// <summary>optional</summary>
public string Name { get; private set; }
private HashSet<SeriesBook> _booksLink;
public IEnumerable<SeriesBook> BooksLink
=> _booksLink?
.OrderBy(sb => sb.Index)
.ToList();
private Series() { }
/// <summary>special id class b/c it's too easy to get string order mixed up</summary>
public Series(AudibleSeriesId audibleSeriesId, string name = null)
{
ArgumentValidator.EnsureNotNull(audibleSeriesId, nameof(audibleSeriesId));
var id = audibleSeriesId.Id;
ArgumentValidator.EnsureNotNullOrWhiteSpace(id, nameof(id));
AudibleSeriesId = id;
_booksLink = new HashSet<SeriesBook>();
UpdateName(name);
}
public void UpdateName(string name)
{
// don't overwrite with null or whitespace but not an error
if (!string.IsNullOrWhiteSpace(name))
Name = name;
}
public override string ToString() => Name;
}
}

View file

@ -0,0 +1,35 @@
using Dinah.Core;
namespace DataLayer
{
public class SeriesBook
{
internal int SeriesId { get; private set; }
internal int BookId { get; private set; }
public string Order { get; private set; }
public float Index => StringLib.ExtractFirstNumber(Order);
public Series Series { get; private set; }
public Book Book { get; private set; }
private SeriesBook() { }
internal SeriesBook(Series series, Book book, string order)
{
ArgumentValidator.EnsureNotNull(series, nameof(series));
ArgumentValidator.EnsureNotNull(book, nameof(book));
Series = series;
Book = book;
Order = order;
}
public void UpdateOrder(string order)
{
if (!string.IsNullOrWhiteSpace(order))
Order = order;
}
public override string ToString() => $"Series={Series} Book={Book}";
}
}

View file

@ -0,0 +1,26 @@
using Dinah.Core;
namespace DataLayer
{
/// <summary>PDF/ZIP files only. Although book download info could be the same format, they're substantially different and subject to change</summary>
public class Supplement
{
internal int SupplementId { get; private set; }
internal int BookId { get; private set; }
public Book Book { get; private set; }
public string Url { get; private set; }
private Supplement() { }
public Supplement(Book book, string url)
{
ArgumentValidator.EnsureNotNull(book, nameof(book));
ArgumentValidator.EnsureNotNullOrWhiteSpace(url, nameof(url));
Book = book;
Url = url;
}
public override string ToString() => $"{Book} {Url.Substring(Url.Length - 4)}";
}
}

View file

@ -0,0 +1,167 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using Dinah.Core;
namespace DataLayer
{
/// <summary>
/// Do not track in-process state. In-process state is determined by the presence of temp file.
/// </summary>
public enum LiberatedStatus
{
NotLiberated = 0,
Liberated = 1,
/// <summary>Error occurred during liberation. Don't retry</summary>
Error = 2,
/// <summary>Application-state only. Not a valid persistence state.</summary>
PartialDownload = 0x1000
}
public class UserDefinedItem
{
internal int BookId { get; private set; }
public Book Book { get; private set; }
private UserDefinedItem() { }
internal UserDefinedItem(Book book)
{
ArgumentValidator.EnsureNotNull(book, nameof(book));
Book = book;
// import previously saved tags
ArgumentValidator.EnsureNotNullOrWhiteSpace(book.AudibleProductId, nameof(book.AudibleProductId));
Tags = LibationFileManager.TagsPersistence.GetTags(book.AudibleProductId);
}
#region Tags
private string _tags = "";
public string Tags
{
get => _tags;
set
{
var newTags = sanitize(value);
if (_tags != newTags)
{
_tags = newTags;
OnItemChanged(nameof(Tags));
}
}
}
public IEnumerable<string> TagsEnumerated => Tags == "" ? new string[0] : Tags.Split(null as char[], StringSplitOptions.RemoveEmptyEntries);
#region sanitize tags: space delimited. Inline/denormalized. Lower case. Alpha numeric and hyphen
// only legal chars are letters numbers underscores and separating whitespace
//
// technically, the only char.s which aren't easily supported are \ [ ]
// however, whitelisting is far safer than blacklisting (eg: new lines, non-printable character)
// it's easy to expand whitelist as needed
// for lucene, ToLower() isn't needed because search is case-inspecific. for here, it prevents duplicates
//
// there are also other allowed but misleading characters. eg: the ^ operator defines a 'boost' score
// full list of characters which must be escaped:
// + - && || ! ( ) { } [ ] ^ " ~ * ? : \
static Regex regex { get; } = new Regex(@"[^\w\d\s_]", RegexOptions.Compiled);
private static string sanitize(string input)
{
if (string.IsNullOrWhiteSpace(input))
return "";
var str = input
.Trim()
.ToLowerInvariant()
// assume a hyphen is supposed to be an underscore
.Replace("-", "_");
var unique = regex
// turn illegal characters into a space. this will also take care of turning new lines into spaces
.Replace(str, " ")
// split and remove excess spaces
.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries)
// de-dup
.Distinct()
// this will prevent order from being relevant
.OrderBy(a => a);
// currently, the string is the canonical set. if we later make the collection into the canonical set:
// var tags = new Hashset<string>(list); // de-dup, order doesn't matter but can seem random due to hashing algo
// var isEqual = tagsNew.SetEquals(tagsOld);
return string.Join(" ", unique);
}
#endregion
#endregion
#region Rating
// owned: not an optional one-to-one
/// <summary>The user's individual book rating</summary>
public Rating Rating { get; private set; } = new Rating(0, 0, 0);
public void UpdateRating(float overallRating, float performanceRating, float storyRating)
=> Rating.Update(overallRating, performanceRating, storyRating);
#endregion
#region LiberatedStatuses
/// <summary>
/// Occurs when <see cref="Tags"/>, <see cref="BookStatus"/>, or <see cref="PdfStatus"/> values change.
/// This signals the change of the in-memory value; it does not ensure that the new value has been persisted.
/// </summary>
public static event EventHandler<string> ItemChanged;
private void OnItemChanged(string e)
{
// HACK
// must not fire during initial import.
//
// these checks are necessary because current architecture attaches to this instead of attaching to an event *after* fully committed to db. the attached delegate/action sometimes calls commit:
//
// desired:
// - importing new book
// - update pdf status
// - initial book commit
//
// actual without these checks [BAD]:
// - importing new book
// - update pdf status
// - invoke event
// - commit UserDefinedItem
// - initial book commit
if (BookId > 0 && Book is not null && Book.BookId > 0)
ItemChanged?.Invoke(this, e);
}
private LiberatedStatus _bookStatus;
private LiberatedStatus? _pdfStatus;
public LiberatedStatus BookStatus
{
get => _bookStatus;
set
{
if (_bookStatus != value)
{
_bookStatus = value;
OnItemChanged(nameof(BookStatus));
}
}
}
public LiberatedStatus? PdfStatus
{
get => _pdfStatus;
set
{
if (_pdfStatus != value)
{
_pdfStatus = value;
OnItemChanged(nameof(PdfStatus));
}
}
}
#endregion
public override string ToString() => $"{Book} {Rating} {Tags}";
}
}

View file

@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace DataLayer
{
internal class Formatters
{
private static string[] _sortPrefixIgnores { get; } = { "the", "a", "an" };
public static string GetSortName(string unformattedName)
{
var sortName = unformattedName
.Replace("|", "")
.Replace(":", "")
.ToLowerInvariant()
.Trim();
if (_sortPrefixIgnores.Any(prefix => sortName.StartsWith(prefix + " ")))
sortName = sortName
.Substring(sortName.IndexOf(" ") + 1)
.TrimStart();
return sortName;
}
}
}

View file

@ -0,0 +1,72 @@
using DataLayer.Configurations;
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class LibationContext : InterceptableDbContext
{
// IMPORTANT: USING DbSet<>
// ========================
// these run against the db. linq queries against these MUST be translatable to sql. primatives only. no POCOs or this error occurs:
// Unable to create a constant value of type 'DataLayer.Contributor'. Only primitive types or enumeration types are supported in this context.
// to use full object-linq, load and use local
// load full table:
// List<Contributor> contributors = ...;
// Contributors.Load();
// Contributors.Local.Where(a => contributors.Contains(a));
// load only those in object:
// // overwrite collection
// Entry(product).Collection(x => x.Narrators).Load();
// product.Narrators = narrators;
public DbSet<LibraryBook> LibraryBooks { get; private set; }
public DbSet<Book> Books { get; private set; }
public DbSet<Contributor> Contributors { get; private set; }
public DbSet<Series> Series { get; private set; }
public DbSet<Category> Categories { get; private set; }
public static LibationContext Create(string connectionString)
{
var factory = new LibationContextFactory();
var context = factory.Create(connectionString);
return context;
}
// see DesignTimeDbContextFactoryBase for info about ctors and connection strings/OnConfiguring()
internal LibationContext(DbContextOptions options) : base(options) { }
// called on each instantiation
protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
{
AddInterceptor(new TagPersistenceInterceptor());
base.OnConfiguring(optionsBuilder);
}
// typically only called once per execution; NOT once per instantiation
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
base.OnModelCreating(modelBuilder);
modelBuilder.ApplyConfiguration(new BookConfig());
modelBuilder.ApplyConfiguration(new ContributorConfig());
modelBuilder.ApplyConfiguration(new BookContributorConfig());
modelBuilder.ApplyConfiguration(new LibraryBookConfig());
modelBuilder.ApplyConfiguration(new SeriesConfig());
modelBuilder.ApplyConfiguration(new SeriesBookConfig());
modelBuilder.ApplyConfiguration(new CategoryConfig());
// seeds go here. examples in Dinah.EntityFrameworkCore.Tests\DbContextFactoryExample.cs
modelBuilder
.Entity<Category>()
.HasData(Category.GetEmpty());
modelBuilder
.Entity<Contributor>()
.HasData(Contributor.GetEmpty());
// views are now supported via "keyless entity types" (instead of "entity types" or the prev "query types"):
// https://docs.microsoft.com/en-us/ef/core/modeling/keyless-entity-types
}
}
}

Binary file not shown.

View file

@ -0,0 +1,11 @@
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
public class LibationContextFactory : DesignTimeDbContextFactoryBase<LibationContext>
{
protected override LibationContext CreateNewInstance(DbContextOptions<LibationContext> options) => new LibationContext(options);
protected override void UseDatabaseEngine(DbContextOptionsBuilder optionsBuilder, string connectionString) => optionsBuilder.UseSqlite(connectionString);
}
}

View file

@ -0,0 +1,332 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20191125182309_Fresh")]
partial class Fresh
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "3.0.0");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("Library");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
});
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,294 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class Fresh : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Categories",
columns: table => new
{
CategoryId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
AudibleCategoryId = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true),
ParentCategoryCategoryId = table.Column<int>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Categories", x => x.CategoryId);
table.ForeignKey(
name: "FK_Categories_Categories_ParentCategoryCategoryId",
column: x => x.ParentCategoryCategoryId,
principalTable: "Categories",
principalColumn: "CategoryId",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "Contributors",
columns: table => new
{
ContributorId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
Name = table.Column<string>(nullable: true),
AudibleContributorId = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Contributors", x => x.ContributorId);
});
migrationBuilder.CreateTable(
name: "Series",
columns: table => new
{
SeriesId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
AudibleSeriesId = table.Column<string>(nullable: true),
Name = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Series", x => x.SeriesId);
});
migrationBuilder.CreateTable(
name: "Books",
columns: table => new
{
BookId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
AudibleProductId = table.Column<string>(nullable: true),
Title = table.Column<string>(nullable: true),
Description = table.Column<string>(nullable: true),
LengthInMinutes = table.Column<int>(nullable: false),
PictureId = table.Column<string>(nullable: true),
IsAbridged = table.Column<bool>(nullable: false),
DatePublished = table.Column<DateTime>(nullable: true),
CategoryId = table.Column<int>(nullable: false),
Rating_OverallRating = table.Column<float>(nullable: true),
Rating_PerformanceRating = table.Column<float>(nullable: true),
Rating_StoryRating = table.Column<float>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Books", x => x.BookId);
table.ForeignKey(
name: "FK_Books_Categories_CategoryId",
column: x => x.CategoryId,
principalTable: "Categories",
principalColumn: "CategoryId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "BookContributor",
columns: table => new
{
BookId = table.Column<int>(nullable: false),
ContributorId = table.Column<int>(nullable: false),
Role = table.Column<int>(nullable: false),
Order = table.Column<byte>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_BookContributor", x => new { x.BookId, x.ContributorId, x.Role });
table.ForeignKey(
name: "FK_BookContributor_Books_BookId",
column: x => x.BookId,
principalTable: "Books",
principalColumn: "BookId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_BookContributor_Contributors_ContributorId",
column: x => x.ContributorId,
principalTable: "Contributors",
principalColumn: "ContributorId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Library",
columns: table => new
{
BookId = table.Column<int>(nullable: false),
DateAdded = table.Column<DateTime>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Library", x => x.BookId);
table.ForeignKey(
name: "FK_Library_Books_BookId",
column: x => x.BookId,
principalTable: "Books",
principalColumn: "BookId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "SeriesBook",
columns: table => new
{
SeriesId = table.Column<int>(nullable: false),
BookId = table.Column<int>(nullable: false),
Index = table.Column<float>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_SeriesBook", x => new { x.SeriesId, x.BookId });
table.ForeignKey(
name: "FK_SeriesBook_Books_BookId",
column: x => x.BookId,
principalTable: "Books",
principalColumn: "BookId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_SeriesBook_Series_SeriesId",
column: x => x.SeriesId,
principalTable: "Series",
principalColumn: "SeriesId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Supplement",
columns: table => new
{
SupplementId = table.Column<int>(nullable: false)
.Annotation("Sqlite:Autoincrement", true),
BookId = table.Column<int>(nullable: false),
Url = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Supplement", x => x.SupplementId);
table.ForeignKey(
name: "FK_Supplement_Books_BookId",
column: x => x.BookId,
principalTable: "Books",
principalColumn: "BookId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "UserDefinedItem",
columns: table => new
{
BookId = table.Column<int>(nullable: false),
Tags = table.Column<string>(nullable: true),
Rating_OverallRating = table.Column<float>(nullable: true),
Rating_PerformanceRating = table.Column<float>(nullable: true),
Rating_StoryRating = table.Column<float>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_UserDefinedItem", x => x.BookId);
table.ForeignKey(
name: "FK_UserDefinedItem_Books_BookId",
column: x => x.BookId,
principalTable: "Books",
principalColumn: "BookId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.InsertData(
table: "Categories",
columns: new[] { "CategoryId", "AudibleCategoryId", "Name", "ParentCategoryCategoryId" },
values: new object[] { -1, "", "", null });
migrationBuilder.InsertData(
table: "Contributors",
columns: new[] { "ContributorId", "AudibleContributorId", "Name" },
values: new object[] { -1, null, "" });
migrationBuilder.CreateIndex(
name: "IX_BookContributor_BookId",
table: "BookContributor",
column: "BookId");
migrationBuilder.CreateIndex(
name: "IX_BookContributor_ContributorId",
table: "BookContributor",
column: "ContributorId");
migrationBuilder.CreateIndex(
name: "IX_Books_AudibleProductId",
table: "Books",
column: "AudibleProductId");
migrationBuilder.CreateIndex(
name: "IX_Books_CategoryId",
table: "Books",
column: "CategoryId");
migrationBuilder.CreateIndex(
name: "IX_Categories_AudibleCategoryId",
table: "Categories",
column: "AudibleCategoryId");
migrationBuilder.CreateIndex(
name: "IX_Categories_ParentCategoryCategoryId",
table: "Categories",
column: "ParentCategoryCategoryId");
migrationBuilder.CreateIndex(
name: "IX_Contributors_Name",
table: "Contributors",
column: "Name");
migrationBuilder.CreateIndex(
name: "IX_Series_AudibleSeriesId",
table: "Series",
column: "AudibleSeriesId");
migrationBuilder.CreateIndex(
name: "IX_SeriesBook_BookId",
table: "SeriesBook",
column: "BookId");
migrationBuilder.CreateIndex(
name: "IX_SeriesBook_SeriesId",
table: "SeriesBook",
column: "SeriesId");
migrationBuilder.CreateIndex(
name: "IX_Supplement_BookId",
table: "Supplement",
column: "BookId");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "BookContributor");
migrationBuilder.DropTable(
name: "Library");
migrationBuilder.DropTable(
name: "SeriesBook");
migrationBuilder.DropTable(
name: "Supplement");
migrationBuilder.DropTable(
name: "UserDefinedItem");
migrationBuilder.DropTable(
name: "Contributors");
migrationBuilder.DropTable(
name: "Series");
migrationBuilder.DropTable(
name: "Books");
migrationBuilder.DropTable(
name: "Categories");
}
}
}

View file

@ -0,0 +1,338 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20200812152646_AddLocaleAndAccount")]
partial class AddLocaleAndAccount
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "3.1.7");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("Library");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
});
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,31 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class AddLocaleAndAccount : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "Account",
table: "Library",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "Locale",
table: "Books",
nullable: true);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "Account",
table: "Library");
migrationBuilder.DropColumn(
name: "Locale",
table: "Books");
}
}
}

View file

@ -0,0 +1,387 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210619030017_AddAaxcDecryptionKeys")]
partial class AddAaxcDecryptionKeys
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.5");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleIV")
.HasColumnType("TEXT");
b.Property<string>("AudibleKey")
.HasColumnType("TEXT");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("Library");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,33 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class AddAaxcDecryptionKeys : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "AudibleIV",
table: "Books",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "AudibleKey",
table: "Books",
type: "TEXT",
nullable: true);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "AudibleIV",
table: "Books");
migrationBuilder.DropColumn(
name: "AudibleKey",
table: "Books");
}
}
}

View file

@ -0,0 +1,381 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210622205558_RemoveAaxcDecryptionKeys")]
partial class RemoveAaxcDecryptionKeys
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.5");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("Library");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,33 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class RemoveAaxcDecryptionKeys : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "AudibleIV",
table: "Books");
migrationBuilder.DropColumn(
name: "AudibleKey",
table: "Books");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "AudibleIV",
table: "Books",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "AudibleKey",
table: "Books",
type: "TEXT",
nullable: true);
}
}
}

View file

@ -0,0 +1,390 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210727180408_AddLiberatedStatus")]
partial class AddLiberatedStatus
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.5");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("Library");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("BookLocation")
.HasColumnType("TEXT");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,44 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class AddLiberatedStatus : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "BookLocation",
table: "UserDefinedItem",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<int>(
name: "BookStatus",
table: "UserDefinedItem",
type: "INTEGER",
nullable: false,
defaultValue: 0);
migrationBuilder.AddColumn<int>(
name: "PdfStatus",
table: "UserDefinedItem",
type: "INTEGER",
nullable: true);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "BookLocation",
table: "UserDefinedItem");
migrationBuilder.DropColumn(
name: "BookStatus",
table: "UserDefinedItem");
migrationBuilder.DropColumn(
name: "PdfStatus",
table: "UserDefinedItem");
}
}
}

View file

@ -0,0 +1,387 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210821012137_RemoveUdiBookLocation")]
partial class RemoveUdiBookLocation
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.5");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("Library");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,23 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class RemoveUdiBookLocation : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "BookLocation",
table: "UserDefinedItem");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "BookLocation",
table: "UserDefinedItem",
type: "TEXT",
nullable: true);
}
}
}

View file

@ -0,0 +1,390 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210901205042_BookIsEpisode")]
partial class BookIsEpisode
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.9");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<int>("ContentType")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("Library");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,24 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class BookIsEpisode : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<int>(
name: "ContentType",
table: "Books",
type: "INTEGER",
nullable: false,
defaultValue: 0);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "ContentType",
table: "Books");
}
}
}

View file

@ -0,0 +1,390 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210902192153_RenameLibraryBooks")]
partial class RenameLibraryBooks
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.9");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<int>("ContentType")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("LibraryBooks");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<float?>("Index")
.HasColumnType("REAL");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,63 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class RenameLibraryBooks : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropForeignKey(
name: "FK_Library_Books_BookId",
table: "Library");
migrationBuilder.DropPrimaryKey(
name: "PK_Library",
table: "Library");
migrationBuilder.RenameTable(
name: "Library",
newName: "LibraryBooks");
migrationBuilder.AddPrimaryKey(
name: "PK_LibraryBooks",
table: "LibraryBooks",
column: "BookId");
migrationBuilder.AddForeignKey(
name: "FK_LibraryBooks_Books_BookId",
table: "LibraryBooks",
column: "BookId",
principalTable: "Books",
principalColumn: "BookId",
onDelete: ReferentialAction.Cascade);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropForeignKey(
name: "FK_LibraryBooks_Books_BookId",
table: "LibraryBooks");
migrationBuilder.DropPrimaryKey(
name: "PK_LibraryBooks",
table: "LibraryBooks");
migrationBuilder.RenameTable(
name: "LibraryBooks",
newName: "Library");
migrationBuilder.AddPrimaryKey(
name: "PK_Library",
table: "Library",
column: "BookId");
migrationBuilder.AddForeignKey(
name: "FK_Library_Books_BookId",
table: "Library",
column: "BookId",
principalTable: "Books",
principalColumn: "BookId",
onDelete: ReferentialAction.Cascade);
}
}
}

View file

@ -0,0 +1,390 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
[Migration("20210922154900_AddSeriesOrderString")]
partial class AddSeriesOrderString
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.10");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<int>("ContentType")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("LibraryBooks");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Order")
.HasColumnType("TEXT");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,33 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace DataLayer.Migrations
{
public partial class AddSeriesOrderString : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "Index",
table: "SeriesBook");
migrationBuilder.AddColumn<string>(
name: "Order",
table: "SeriesBook",
type: "TEXT",
nullable: true);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "Order",
table: "SeriesBook");
migrationBuilder.AddColumn<float>(
name: "Index",
table: "SeriesBook",
type: "REAL",
nullable: true);
}
}
}

View file

@ -0,0 +1,388 @@
// <auto-generated />
using System;
using DataLayer;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace DataLayer.Migrations
{
[DbContext(typeof(LibationContext))]
partial class LibationContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.10");
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Property<int>("BookId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleProductId")
.HasColumnType("TEXT");
b.Property<int>("CategoryId")
.HasColumnType("INTEGER");
b.Property<int>("ContentType")
.HasColumnType("INTEGER");
b.Property<DateTime?>("DatePublished")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<bool>("IsAbridged")
.HasColumnType("INTEGER");
b.Property<int>("LengthInMinutes")
.HasColumnType("INTEGER");
b.Property<string>("Locale")
.HasColumnType("TEXT");
b.Property<string>("PictureId")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.HasIndex("AudibleProductId");
b.HasIndex("CategoryId");
b.ToTable("Books");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<int>("ContributorId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<byte>("Order")
.HasColumnType("INTEGER");
b.HasKey("BookId", "ContributorId", "Role");
b.HasIndex("BookId");
b.HasIndex("ContributorId");
b.ToTable("BookContributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.Property<int>("CategoryId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleCategoryId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int?>("ParentCategoryCategoryId")
.HasColumnType("INTEGER");
b.HasKey("CategoryId");
b.HasIndex("AudibleCategoryId");
b.HasIndex("ParentCategoryCategoryId");
b.ToTable("Categories");
b.HasData(
new
{
CategoryId = -1,
AudibleCategoryId = "",
Name = ""
});
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Property<int>("ContributorId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleContributorId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("ContributorId");
b.HasIndex("Name");
b.ToTable("Contributors");
b.HasData(
new
{
ContributorId = -1,
Name = ""
});
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Account")
.HasColumnType("TEXT");
b.Property<DateTime>("DateAdded")
.HasColumnType("TEXT");
b.HasKey("BookId");
b.ToTable("LibraryBooks");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Property<int>("SeriesId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AudibleSeriesId")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.HasKey("SeriesId");
b.HasIndex("AudibleSeriesId");
b.ToTable("Series");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("BookId")
.HasColumnType("INTEGER");
b.Property<string>("Order")
.HasColumnType("TEXT");
b.HasKey("SeriesId", "BookId");
b.HasIndex("BookId");
b.HasIndex("SeriesId");
b.ToTable("SeriesBook");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.HasOne("DataLayer.Category", "Category")
.WithMany()
.HasForeignKey("CategoryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<float>("OverallRating")
.HasColumnType("REAL");
b1.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b1.Property<float>("StoryRating")
.HasColumnType("REAL");
b1.HasKey("BookId");
b1.ToTable("Books");
b1.WithOwner()
.HasForeignKey("BookId");
});
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
{
b1.Property<int>("SupplementId")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<string>("Url")
.HasColumnType("TEXT");
b1.HasKey("SupplementId");
b1.HasIndex("BookId");
b1.ToTable("Supplement");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.Navigation("Book");
});
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
{
b1.Property<int>("BookId")
.HasColumnType("INTEGER");
b1.Property<int>("BookStatus")
.HasColumnType("INTEGER");
b1.Property<int?>("PdfStatus")
.HasColumnType("INTEGER");
b1.Property<string>("Tags")
.HasColumnType("TEXT");
b1.HasKey("BookId");
b1.ToTable("UserDefinedItem");
b1.WithOwner("Book")
.HasForeignKey("BookId");
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
{
b2.Property<int>("UserDefinedItemBookId")
.HasColumnType("INTEGER");
b2.Property<float>("OverallRating")
.HasColumnType("REAL");
b2.Property<float>("PerformanceRating")
.HasColumnType("REAL");
b2.Property<float>("StoryRating")
.HasColumnType("REAL");
b2.HasKey("UserDefinedItemBookId");
b2.ToTable("UserDefinedItem");
b2.WithOwner()
.HasForeignKey("UserDefinedItemBookId");
});
b1.Navigation("Book");
b1.Navigation("Rating");
});
b.Navigation("Category");
b.Navigation("Rating");
b.Navigation("Supplements");
b.Navigation("UserDefinedItem");
});
modelBuilder.Entity("DataLayer.BookContributor", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("ContributorsLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Contributor", "Contributor")
.WithMany("BooksLink")
.HasForeignKey("ContributorId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Contributor");
});
modelBuilder.Entity("DataLayer.Category", b =>
{
b.HasOne("DataLayer.Category", "ParentCategory")
.WithMany()
.HasForeignKey("ParentCategoryCategoryId");
b.Navigation("ParentCategory");
});
modelBuilder.Entity("DataLayer.LibraryBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithOne()
.HasForeignKey("DataLayer.LibraryBook", "BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
});
modelBuilder.Entity("DataLayer.SeriesBook", b =>
{
b.HasOne("DataLayer.Book", "Book")
.WithMany("SeriesLink")
.HasForeignKey("BookId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("DataLayer.Series", "Series")
.WithMany("BooksLink")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Book");
b.Navigation("Series");
});
modelBuilder.Entity("DataLayer.Book", b =>
{
b.Navigation("ContributorsLink");
b.Navigation("SeriesLink");
});
modelBuilder.Entity("DataLayer.Contributor", b =>
{
b.Navigation("BooksLink");
});
modelBuilder.Entity("DataLayer.Series", b =>
{
b.Navigation("BooksLink");
});
#pragma warning restore 612, 618
}
}
}

View file

@ -0,0 +1,39 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
// only library importing should use tracking. All else should be NoTracking.
// only library importing should directly query Book. All else should use LibraryBook
public static class BookQueries
{
public static Book GetBook_Flat_NoTracking(this LibationContext context, string productId)
=> context
.Books
.AsNoTrackingWithIdentityResolution()
.GetBook(productId);
public static Book GetBook(this IQueryable<Book> books, string productId)
=> books
.GetBooks()
// 'Single' is more accurate but 'First' is faster and less error prone
.FirstOrDefault(b => b.AudibleProductId == productId);
/// <summary>This is still IQueryable. YOU MUST CALL ToList() YOURSELF</summary>
public static IQueryable<Book> GetBooks(this IQueryable<Book> books, Expression<Func<Book, bool>> predicate)
=> books
.GetBooks()
.Where(predicate);
/// <summary>This is still IQueryable. YOU MUST CALL ToList() YOURSELF</summary>
public static IQueryable<Book> GetBooks(this IQueryable<Book> books)
=> books
// owned items are always loaded. eg: book.UserDefinedItem, book.Supplements
.Include(b => b.SeriesLink).ThenInclude(sb => sb.Series)
.Include(b => b.ContributorsLink).ThenInclude(c => c.Contributor)
.Include(b => b.Category).ThenInclude(c => c.ParentCategory);
}
}

View file

@ -0,0 +1,44 @@
using System.Collections.Generic;
using System.Linq;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
// only library importing should use tracking. All else should be NoTracking.
// only library importing should directly query Book. All else should use LibraryBook
public static class LibraryBookQueries
{
//// tracking is a bad idea for main grid. it prevents anything else from updating entities unless getting them from the grid
//public static List<LibraryBook> GetLibrary_Flat_WithTracking(this LibationContext context)
// => context
// .Library
// .GetLibrary()
// .ToList();
public static List<LibraryBook> GetLibrary_Flat_NoTracking(this LibationContext context)
=> context
.LibraryBooks
.AsNoTrackingWithIdentityResolution()
.GetLibrary()
.ToList();
public static LibraryBook GetLibraryBook_Flat_NoTracking(this LibationContext context, string productId)
=> context
.LibraryBooks
.AsNoTrackingWithIdentityResolution()
.GetLibraryBook(productId);
public static LibraryBook GetLibraryBook(this IQueryable<LibraryBook> library, string productId)
=> library
.GetLibrary()
.SingleOrDefault(lb => lb.Book.AudibleProductId == productId);
/// <summary>This is still IQueryable. YOU MUST CALL ToList() YOURSELF</summary>
public static IQueryable<LibraryBook> GetLibrary(this IQueryable<LibraryBook> library)
=> library
// owned items are always loaded. eg: book.UserDefinedItem, book.Supplements
.Include(le => le.Book).ThenInclude(b => b.SeriesLink).ThenInclude(sb => sb.Series)
.Include(le => le.Book).ThenInclude(b => b.ContributorsLink).ThenInclude(c => c.Contributor)
.Include(le => le.Book).ThenInclude(b => b.Category).ThenInclude(c => c.ParentCategory);
}
}

View file

@ -0,0 +1,30 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core.Collections.Generic;
using Dinah.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
namespace DataLayer
{
internal class TagPersistenceInterceptor : IDbInterceptor
{
public void Executed(DbContext context) { }
public void Executing(DbContext context)
{
var tagsCollection
= context
.ChangeTracker
.Entries()
.Where(e => e.State.In(EntityState.Modified, EntityState.Added))
.Select(e => e.Entity as UserDefinedItem)
.Where(udi => udi is not null)
// do NOT filter out entires with blank tags. blank is the valid way to show the absence of tags
.Select(t => (t.Book.AudibleProductId, t.Tags))
.ToList();
LibationFileManager.TagsPersistence.Save(tagsCollection);
}
}
}

View file

@ -0,0 +1,6 @@
{
"ConnectionStrings": {
"// this connection string is ONLY used for DataLayer's Migrations. this appsettings.json file is NOT used at all by application; it is overwritten": "",
"LibationContext": "Data Source=LibationContext.db;Foreign Keys=False;"
}
}

View file

@ -0,0 +1,185 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApi.Common;
using AudibleUtilities;
using DataLayer;
using Dinah.Core.Collections.Generic;
namespace DtoImporterService
{
public class BookImporter : ItemsImporterBase
{
protected override IValidator Validator => new BookValidator();
public Dictionary<string, Book> Cache { get; private set; } = new();
private ContributorImporter contributorImporter { get; }
private SeriesImporter seriesImporter { get; }
private CategoryImporter categoryImporter { get; }
public BookImporter(LibationContext context) : base(context)
{
contributorImporter = new ContributorImporter(DbContext);
seriesImporter = new SeriesImporter(DbContext);
categoryImporter = new CategoryImporter(DbContext);
}
protected override int DoImport(IEnumerable<ImportItem> importItems)
{
// pre-req.s
contributorImporter.Import(importItems);
seriesImporter.Import(importItems);
categoryImporter.Import(importItems);
// load db existing => hash table
loadLocal_books(importItems);
// upsert
var qtyNew = upsertBooks(importItems);
return qtyNew;
}
private void loadLocal_books(IEnumerable<ImportItem> importItems)
{
// get distinct
var productIds = importItems
.Select(i => i.DtoItem.ProductId)
.Distinct()
.ToList();
Cache = DbContext.Books
.GetBooks(b => productIds.Contains(b.AudibleProductId))
.ToDictionarySafe(b => b.AudibleProductId);
}
private int upsertBooks(IEnumerable<ImportItem> importItems)
{
var qtyNew = 0;
foreach (var item in importItems)
{
if (!Cache.TryGetValue(item.DtoItem.ProductId, out var book))
{
book = createNewBook(item);
qtyNew++;
}
updateBook(item, book);
}
return qtyNew;
}
private Book createNewBook(ImportItem importItem)
{
var item = importItem.DtoItem;
var contentType = item.IsEpisodes ? DataLayer.ContentType.Episode : DataLayer.ContentType.Product;
// absence of authors is very rare, but possible
if (!item.Authors?.Any() ?? true)
item.Authors = new[] { new Person { Name = "", Asin = null } };
// nested logic is required so order of names is retained. else, contributors may appear in the order they were inserted into the db
var authors = item
.Authors
.Select(a => contributorImporter.Cache[a.Name])
.ToList();
var narrators
= item.Narrators is null || !item.Narrators.Any()
// if no narrators listed, author is the narrator
? authors
// nested logic is required so order of names is retained. else, contributors may appear in the order they were inserted into the db
: item
.Narrators
.Select(n => contributorImporter.Cache[n.Name])
.ToList();
// categories are laid out for a breadcrumb. category is 1st, subcategory is 2nd
// absence of categories is also possible
// CATEGORY HACK: only use the 1st 2 categories
// (real impl: var lastCategory = item.Categories.LastOrDefault()?.CategoryId ?? "";)
var lastCategory
= item.Categories.Length == 0 ? ""
: item.Categories.Length == 1 ? item.Categories[0].CategoryId
// 2+
: item.Categories[1].CategoryId;
var category = categoryImporter.Cache[lastCategory];
Book book;
try
{
book = DbContext.Books.Add(new Book(
new AudibleProductId(item.ProductId),
item.TitleWithSubtitle,
item.Description,
item.LengthInMinutes,
contentType,
authors,
narrators,
category,
importItem.LocaleName)
).Entity;
Cache.Add(book.AudibleProductId, book);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error adding book. {@DebugInfo}", new {
item.ProductId,
item.TitleWithSubtitle,
item.Description,
item.LengthInMinutes,
contentType,
QtyAuthors = authors?.Count,
QtyNarrators = narrators?.Count,
Category = category?.Name,
importItem.LocaleName
});
throw;
}
var publisherName = item.Publisher;
if (!string.IsNullOrWhiteSpace(publisherName))
{
var publisher = contributorImporter.Cache[publisherName];
book.ReplacePublisher(publisher);
}
book.UpdateBookDetails(item.IsAbridged, item.DatePublished);
if (item.PdfUrl is not null)
book.AddSupplementDownloadUrl(item.PdfUrl.ToString());
return book;
}
private void updateBook(ImportItem importItem, Book book)
{
var item = importItem.DtoItem;
// set/update book-specific info which may have changed
if (item.PictureId is not null)
book.PictureId = item.PictureId;
book.UpdateProductRating(item.Product_OverallStars, item.Product_PerformanceStars, item.Product_StoryStars);
// important to update user-specific info. this will have changed if user has rated/reviewed the book since last library import
book.UserDefinedItem.UpdateRating(item.MyUserRating_Overall, item.MyUserRating_Performance, item.MyUserRating_Story);
// update series even for existing books. these are occasionally updated
// these will upsert over library-scraped series, but will not leave orphans
if (item.Series is not null)
{
foreach (var seriesEntry in item.Series)
{
var series = seriesImporter.Cache[seriesEntry.SeriesId];
book.UpsertSeries(series, seriesEntry.Sequence);
}
}
}
}
}

View file

@ -0,0 +1,104 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApi.Common;
using AudibleUtilities;
using DataLayer;
using Dinah.Core.Collections.Generic;
namespace DtoImporterService
{
public class CategoryImporter : ItemsImporterBase
{
protected override IValidator Validator => new CategoryValidator();
public Dictionary<string, Category> Cache { get; private set; } = new();
public CategoryImporter(LibationContext context) : base(context) { }
protected override int DoImport(IEnumerable<ImportItem> importItems)
{
// get distinct
var categoryIds = importItems
.Select(i => i.DtoItem)
.GetCategoriesDistinct()
.Select(c => c.CategoryId)
.Distinct()
.ToList();
// load db existing => .Local
loadLocal_categories(categoryIds);
// upsert
var categoryPairs = importItems
.Select(i => i.DtoItem)
.GetCategoryPairsDistinct()
.ToList();
var qtyNew = upsertCategories(categoryPairs);
return qtyNew;
}
private void loadLocal_categories(List<string> categoryIds)
{
// must include default/empty/missing
categoryIds.Add(Category.GetEmpty().AudibleCategoryId);
// load existing => local
Cache = DbContext.Categories
.Where(c => categoryIds.Contains(c.AudibleCategoryId))
.ToDictionarySafe(c => c.AudibleCategoryId);
}
// only use after loading contributors => local
private int upsertCategories(List<Ladder[]> categoryPairs)
{
var qtyNew = 0;
foreach (var pair in categoryPairs)
{
for (var i = 0; i < pair.Length; i++)
{
// CATEGORY HACK: not yet supported: depth beyond 0 and 1
if (i > 1)
break;
var id = pair[i].CategoryId;
var name = pair[i].CategoryName;
Category parentCategory = null;
if (i == 1)
Cache.TryGetValue(pair[0].CategoryId, out parentCategory);
if (!Cache.TryGetValue(id, out var category))
{
category = addCategory(id, name);
qtyNew++;
}
category.UpdateParentCategory(parentCategory);
}
}
return qtyNew;
}
private Category addCategory(string id, string name)
{
try
{
var category = new Category(new AudibleCategoryId(id), name);
var entityEntry = DbContext.Categories.Add(category);
var entity = entityEntry.Entity;
Cache.Add(entity.AudibleCategoryId, entity);
return entity;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error adding category. {@DebugInfo}", new { id, name });
throw;
}
}
}
}

View file

@ -0,0 +1,113 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApi.Common;
using AudibleUtilities;
using DataLayer;
using Dinah.Core.Collections.Generic;
namespace DtoImporterService
{
public class ContributorImporter : ItemsImporterBase
{
protected override IValidator Validator => new ContributorValidator();
public Dictionary<string, Contributor> Cache { get; private set; } = new();
public ContributorImporter(LibationContext context) : base(context) { }
protected override int DoImport(IEnumerable<ImportItem> importItems)
{
// get distinct
var authors = importItems
.Select(i => i.DtoItem)
.GetAuthorsDistinct()
.ToList();
var narrators = importItems
.Select(i => i.DtoItem)
.GetNarratorsDistinct()
.ToList();
var publishers = importItems
.Select(i => i.DtoItem)
.GetPublishersDistinct()
.ToList();
// load db existing => .Local
var allNames = publishers
.Union(authors.Select(n => n.Name))
.Union(narrators.Select(n => n.Name))
.Where(name => !string.IsNullOrWhiteSpace(name))
.ToList();
loadLocal_contributors(allNames);
// upsert
var qtyNew = 0;
qtyNew += upsertPeople(authors);
qtyNew += upsertPeople(narrators);
qtyNew += upsertPublishers(publishers);
return qtyNew;
}
private void loadLocal_contributors(List<string> contributorNames)
{
// must include default/empty/missing
contributorNames.Add(Contributor.GetEmpty().Name);
// load existing => local
Cache = DbContext.Contributors
.Where(c => contributorNames.Contains(c.Name))
.ToDictionarySafe(c => c.Name);
}
private int upsertPeople(List<Person> people)
{
var hash = people
// new people only
.Where(p => !Cache.ContainsKey(p.Name))
// remove duplicates by Name. first in wins
.ToDictionarySafe(p => p.Name);
foreach (var kvp in hash)
{
var person = kvp.Value;
addContributor(person.Name, person.Asin);
}
return hash.Count;
}
// only use after loading contributors => local
private int upsertPublishers(List<string> publishers)
{
var hash = publishers
// new publishers only
.Where(p => !Cache.ContainsKey(p))
// remove duplicates
.ToHashSet();
foreach (var pub in hash)
addContributor(pub);
return hash.Count;
}
private Contributor addContributor(string name, string id = null)
{
try
{
var newContrib = new Contributor(name);
var entityEntry = DbContext.Contributors.Add(newContrib);
var entity = entityEntry.Entity;
Cache.Add(entity.Name, entity);
return entity;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error adding contributor. {@DebugInfo}", new { name, id });
throw;
}
}
}
}

View file

@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0-windows</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\AudibleUtilities\AudibleUtilities.csproj" />
<ProjectReference Include="..\DataLayer\DataLayer.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,12 @@
using System;
using AudibleApi.Common;
namespace DtoImporterService
{
public class ImportItem
{
public Item DtoItem { get; set; }
public string AccountId { get; set; }
public string LocaleName { get; set; }
}
}

View file

@ -0,0 +1,61 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleUtilities;
using DataLayer;
using Dinah.Core;
namespace DtoImporterService
{
public abstract class ImporterBase<T>
{
protected LibationContext DbContext { get; }
protected ImporterBase(LibationContext context)
{
ArgumentValidator.EnsureNotNull(context, nameof(context));
DbContext = context;
}
/// <summary>LONG RUNNING. call with await Task.Run</summary>
public int Import(T param) => Run(DoImport, param);
public TResult Run<TResult>(Func<T, TResult> func, T param)
{
try
{
var exceptions = Validate(param);
if (exceptions is not null && exceptions.Any())
throw new AggregateException($"Importer validation failed", exceptions);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Import error: validation");
throw;
}
try
{
var result = func(param);
return result;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Import error: post-validation importing");
throw;
}
}
protected abstract int DoImport(T elements);
public abstract IEnumerable<Exception> Validate(T param);
}
public abstract class ItemsImporterBase : ImporterBase<IEnumerable<ImportItem>>
{
protected ItemsImporterBase(LibationContext context) : base(context) { }
protected abstract IValidator Validator { get; }
public sealed override IEnumerable<Exception> Validate(IEnumerable<ImportItem> importItems)
=> Validator.Validate(importItems.Select(i => i.DtoItem));
}
}

View file

@ -0,0 +1,73 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleUtilities;
using DataLayer;
using Dinah.Core.Collections.Generic;
namespace DtoImporterService
{
public class LibraryBookImporter : ItemsImporterBase
{
protected override IValidator Validator => new LibraryValidator();
private BookImporter bookImporter { get; }
public LibraryBookImporter(LibationContext context) : base(context)
{
bookImporter = new BookImporter(DbContext);
}
protected override int DoImport(IEnumerable<ImportItem> importItems)
{
bookImporter.Import(importItems);
var qtyNew = upsertLibraryBooks(importItems);
return qtyNew;
}
private int upsertLibraryBooks(IEnumerable<ImportItem> importItems)
{
// technically, we should be able to have duplicate books from separate accounts.
// this would violate the current pk and would be difficult to deal with elsewhere:
// - what to show in the grid
// - which to consider liberated
//
// sqlite cannot alter pk. the work around is an extensive headache
// - update: now possible in .net5/efcore5
//
// currently, inserting LibraryBook will throw error if the same book is in multiple accounts for the same region.
//
// CURRENT SOLUTION: don't re-insert
var currentLibraryProductIds = DbContext.LibraryBooks.Select(l => l.Book.AudibleProductId).ToList();
var newItems = importItems
.Where(dto => !currentLibraryProductIds.Contains(dto.DtoItem.ProductId))
.ToList();
// if 2 accounts try to import the same book in the same transaction: error since we're only tracking and pulling by asin.
// just use the first
var hash = newItems.ToDictionarySafe(dto => dto.DtoItem.ProductId);
foreach (var kvp in hash)
{
var newItem = kvp.Value;
var libraryBook = new LibraryBook(
bookImporter.Cache[newItem.DtoItem.ProductId],
newItem.DtoItem.DateAdded,
newItem.AccountId);
try
{
DbContext.LibraryBooks.Add(libraryBook);
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error adding library book. {@DebugInfo}", new { libraryBook.Book, libraryBook.Account });
}
}
var qtyNew = hash.Count;
return qtyNew;
}
}
}

View file

@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace DtoImporterService
{
public record timeLogEntry(string msg, long totalElapsed, long delta);
public static class PerfLogger
{
private static Stopwatch sw { get; } = new();
private static List<timeLogEntry> __log { get; } = new() { new("begin", 0, 0) };
public static void logTime(string s)
{
var totalElapsed = sw.ElapsedMilliseconds;
var prev = __log.Last().totalElapsed;
var delta = totalElapsed - prev;
__log.Add(new(s, totalElapsed, delta));
}
public static void logRestart()
{
__log.Clear();
__log.Add(new("begin", 0, 0));
sw.Restart();
}
public static void stop() => sw.Stop();
public static string logOutput =>
$"{nameof(timeLogEntry.msg)}\t{nameof(timeLogEntry.totalElapsed)}\t{nameof(timeLogEntry.delta)}\r\n"
+ __log.Select(t => $"{t.msg}\t{t.totalElapsed}\t{t.delta}").Aggregate((a, b) => $"{a}\r\n{b}");
}
}

View file

@ -0,0 +1,82 @@
using System;
using System.Collections.Generic;
using System.Linq;
using AudibleApi.Common;
using AudibleUtilities;
using DataLayer;
using Dinah.Core.Collections.Generic;
namespace DtoImporterService
{
public class SeriesImporter : ItemsImporterBase
{
protected override IValidator Validator => new SeriesValidator();
public Dictionary<string, DataLayer.Series> Cache { get; private set; } = new();
public SeriesImporter(LibationContext context) : base(context) { }
protected override int DoImport(IEnumerable<ImportItem> importItems)
{
// get distinct
var series = importItems
.Select(i => i.DtoItem)
.GetSeriesDistinct()
.ToList();
// load db existing => .Local
loadLocal_series(series);
// upsert
var qtyNew = upsertSeries(series);
return qtyNew;
}
private void loadLocal_series(List<AudibleApi.Common.Series> series)
{
var seriesIds = series.Select(s => s.SeriesId).Distinct().ToList();
if (seriesIds.Any())
Cache = DbContext.Series
.Where(s => seriesIds.Contains(s.AudibleSeriesId))
.ToDictionarySafe(s => s.AudibleSeriesId);
}
private int upsertSeries(List<AudibleApi.Common.Series> requestedSeries)
{
var qtyNew = 0;
foreach (var s in requestedSeries)
{
// AudibleApi.Common.Series.SeriesId == DataLayer.AudibleSeriesId
if (!Cache.TryGetValue(s.SeriesId, out var series))
{
series = addSeries(s.SeriesId);
qtyNew++;
}
series.UpdateName(s.SeriesName);
}
return qtyNew;
}
private DataLayer.Series addSeries(string seriesId)
{
try
{
var series = new DataLayer.Series(new AudibleSeriesId(seriesId));
var entityEntry = DbContext.Series.Add(series);
var entity = entityEntry.Entity;
Cache.Add(entity.AudibleSeriesId, entity);
return entity;
}
catch (Exception ex)
{
Serilog.Log.Logger.Error(ex, "Error adding series. {@DebugInfo}", new { seriesId });
throw;
}
}
}
}

View file

@ -0,0 +1,37 @@
* Local (eg DbContext.Books.Local): indexes/hashes PK and nothing else. Local.Find(PK) is fast. All other searches (eg FirstOrDefault) have awful performance. It deceptively *feels* like we get this partially for free since added/modified entries live here.
* live db: for all importers, fields used for lookup are indexed
Using BookImporter as an example: since AudibleProductId is indexed, hitting the live db is much faster than using Local. Fastest is putting all in a local hash table
Note: GetBook/GetBooks eager loads Series, category, et al
for 1,200 iterations
* load to LocalView
DbContext.Books.Local.FirstOrDefault(p => p.AudibleProductId == item.DtoItem.ProductId)
27,125 ms
* read from live db
DbContext.Books.GetBook(item.DtoItem.ProductId)
12,224 ms
* load to hash table: Dictionary<string, Book>
dictionary[item.DtoItem.ProductId];
1 ms (yes: ONE)
With hashtable, somehow memory usage was not significantly affected
-----------------------------------
why were we using Local to begin with?
articles suggest loading to Local with
context.Books.Load();
this loads this table but not associated fields
we want Books and associated fields
context.Books.GetBooks(b => remainingProductIds.Contains(b.AudibleProductId)).ToList();
this is emulating Load() but with also getting associated fields
from: Microsoft.EntityFrameworkCore.EntityFrameworkQueryableExtensions
// Summary:
// Enumerates the query. When using Entity Framework, this causes the results of
// the query to be loaded into the associated context. This is equivalent to calling
// ToList and then throwing away the list (without the overhead of actually creating
// the list).
public static void Load<TSource>([NotNullAttribute] this IQueryable<TSource> source);

View file

@ -0,0 +1,47 @@
using System;
namespace FileLiberator
{
public abstract class AudioDecodable : Processable
{
public event EventHandler<Action<byte[]>> RequestCoverArt;
public event EventHandler<string> TitleDiscovered;
public event EventHandler<string> AuthorsDiscovered;
public event EventHandler<string> NarratorsDiscovered;
public event EventHandler<byte[]> CoverImageDiscovered;
public abstract void Cancel();
protected void OnTitleDiscovered(string title) => OnTitleDiscovered(null, title);
protected void OnTitleDiscovered(object _, string title)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(TitleDiscovered), Title = title });
TitleDiscovered?.Invoke(this, title);
}
protected void OnAuthorsDiscovered(string authors) => OnAuthorsDiscovered(null, authors);
protected void OnAuthorsDiscovered(object _, string authors)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(AuthorsDiscovered), Authors = authors });
AuthorsDiscovered?.Invoke(this, authors);
}
protected void OnNarratorsDiscovered(string narrators) => OnNarratorsDiscovered(null, narrators);
protected void OnNarratorsDiscovered(object _, string narrators)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(NarratorsDiscovered), Narrators = narrators });
NarratorsDiscovered?.Invoke(this, narrators);
}
protected void OnRequestCoverArt(Action<byte[]> setCoverArtDel)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(RequestCoverArt) });
RequestCoverArt?.Invoke(this, setCoverArtDel);
}
protected void OnCoverImageDiscovered(byte[] coverImage)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(CoverImageDiscovered), CoverImageBytes = coverImage?.Length });
CoverImageDiscovered?.Invoke(this, coverImage);
}
}
}

View file

@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using DataLayer;
using Dinah.Core;
using FileManager;
using LibationFileManager;
namespace FileLiberator
{
public static class AudioFileStorageExt
{
private class MultipartRenamer
{
private LibraryBook libraryBook { get; }
internal MultipartRenamer(LibraryBook libraryBook) => this.libraryBook = libraryBook;
internal string MultipartFilename(AaxDecrypter.MultiConvertFileProperties props)
=> Templates.ChapterFile.GetFilename(libraryBook.ToDto(), props);
}
public static Func<AaxDecrypter.MultiConvertFileProperties, string> CreateMultipartRenamerFunc(this AudioFileStorage _, LibraryBook libraryBook)
=> new MultipartRenamer(libraryBook).MultipartFilename;
/// <summary>
/// DownloadDecryptBook:
/// File path for where to move files into.
/// Path: directory nested inside of Books directory
/// File name: n/a
/// </summary>
public static string GetDestinationDirectory(this AudioFileStorage _, LibraryBook libraryBook)
=> Templates.Folder.GetFilename(libraryBook.ToDto());
/// <summary>
/// DownloadDecryptBook:
/// Path: in progress directory.
/// File name: final file name.
/// </summary>
public static string GetInProgressFilename(this AudioFileStorage _, LibraryBook libraryBook, string extension)
=> Templates.File.GetFilename(libraryBook.ToDto(), AudibleFileStorage.DecryptInProgressDirectory, extension, returnFirstExisting: true);
/// <summary>
/// PDF: audio file does not exist
/// </summary>
public static string GetBooksDirectoryFilename(this AudioFileStorage _, LibraryBook libraryBook, string extension)
=> Templates.File.GetFilename(libraryBook.ToDto(), AudibleFileStorage.BooksDirectory, extension);
/// <summary>
/// PDF: audio file already exists
/// </summary>
public static string GetCustomDirFilename(this AudioFileStorage _, LibraryBook libraryBook, string dirFullPath, string extension)
=> Templates.File.GetFilename(libraryBook.ToDto(), dirFullPath, extension);
}
}

View file

@ -0,0 +1,93 @@
using System;
using System.IO;
using System.Threading.Tasks;
using AAXClean;
using AAXClean.Codecs;
using DataLayer;
using Dinah.Core;
using Dinah.Core.ErrorHandling;
using Dinah.Core.Net.Http;
using FileManager;
using LibationFileManager;
namespace FileLiberator
{
public class ConvertToMp3 : AudioDecodable
{
private Mp4File m4bBook;
private long fileSize;
private static string Mp3FileName(string m4bPath) => Path.ChangeExtension(m4bPath ?? "", ".mp3");
public override void Cancel() => m4bBook?.Cancel();
public override bool Validate(LibraryBook libraryBook)
{
var path = AudibleFileStorage.Audio.GetPath(libraryBook.Book.AudibleProductId);
return path?.ToLower()?.EndsWith(".m4b") == true && !File.Exists(Mp3FileName(path));
}
public override async Task<StatusHandler> ProcessAsync(LibraryBook libraryBook)
{
OnBegin(libraryBook);
OnStreamingBegin($"Begin converting {libraryBook} to mp3");
try
{
var m4bPath = AudibleFileStorage.Audio.GetPath(libraryBook.Book.AudibleProductId);
m4bBook = new Mp4File(m4bPath, FileAccess.Read);
m4bBook.ConversionProgressUpdate += M4bBook_ConversionProgressUpdate;
fileSize = m4bBook.InputStream.Length;
OnTitleDiscovered(m4bBook.AppleTags.Title);
OnAuthorsDiscovered(m4bBook.AppleTags.FirstAuthor);
OnNarratorsDiscovered(m4bBook.AppleTags.Narrator);
OnCoverImageDiscovered(m4bBook.AppleTags.Cover);
using var mp3File = File.OpenWrite(Path.GetTempFileName());
var result = await Task.Run(() => m4bBook.ConvertToMp3(mp3File));
m4bBook.InputStream.Close();
mp3File.Close();
var proposedMp3Path = Mp3FileName(m4bPath);
var realMp3Path = FileUtility.SaferMoveToValidPath(mp3File.Name, proposedMp3Path);
OnFileCreated(libraryBook, realMp3Path);
var statusHandler = new StatusHandler();
if (result == ConversionResult.Failed)
statusHandler.AddError("Conversion failed");
return statusHandler;
}
finally
{
OnStreamingCompleted($"Completed converting to mp3: {libraryBook.Book.Title}");
OnCompleted(libraryBook);
}
}
private void M4bBook_ConversionProgressUpdate(object sender, ConversionProgressEventArgs e)
{
var duration = m4bBook.Duration;
var remainingSecsToProcess = (duration - e.ProcessPosition).TotalSeconds;
var estTimeRemaining = remainingSecsToProcess / e.ProcessSpeed;
if (double.IsNormal(estTimeRemaining))
OnStreamingTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
double progressPercent = 100 * e.ProcessPosition.TotalSeconds / duration.TotalSeconds;
OnStreamingProgressChanged(
new DownloadProgress
{
ProgressPercentage = progressPercent,
BytesReceived = (long)(fileSize * progressPercent),
TotalBytesToReceive = fileSize
});
}
}
}

View file

@ -0,0 +1,281 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using AaxDecrypter;
using AudibleApi;
using DataLayer;
using Dinah.Core;
using Dinah.Core.ErrorHandling;
using FileManager;
using LibationFileManager;
namespace FileLiberator
{
public class DownloadDecryptBook : AudioDecodable
{
private AudiobookDownloadBase abDownloader;
public override bool Validate(LibraryBook libraryBook) => !libraryBook.Book.Audio_Exists;
public override void Cancel() => abDownloader?.Cancel();
public override async Task<StatusHandler> ProcessAsync(LibraryBook libraryBook)
{
var entries = new List<FilePathCache.CacheEntry>();
// these only work so minimally b/c CacheEntry is a record.
// in case of parallel decrypts, only capture the ones for this book id.
// if user somehow starts multiple decrypts of the same book in parallel: on their own head be it
void FilePathCache_Inserted(object sender, FilePathCache.CacheEntry e)
{
if (e.Id.EqualsInsensitive(libraryBook.Book.AudibleProductId))
entries.Add(e);
}
void FilePathCache_Removed(object sender, FilePathCache.CacheEntry e)
{
if (e.Id.EqualsInsensitive(libraryBook.Book.AudibleProductId))
entries.Remove(e);
}
OnBegin(libraryBook);
try
{
if (libraryBook.Book.Audio_Exists)
return new StatusHandler { "Cannot find decrypt. Final audio file already exists" };
bool success = false;
try
{
FilePathCache.Inserted += FilePathCache_Inserted;
FilePathCache.Removed += FilePathCache_Removed;
success = await downloadAudiobookAsync(libraryBook);
}
finally
{
FilePathCache.Inserted -= FilePathCache_Inserted;
FilePathCache.Removed -= FilePathCache_Removed;
}
// decrypt failed
if (!success)
{
foreach (var tmpFile in entries.Where(f => f.FileType != FileType.AAXC))
FileUtility.SaferDelete(tmpFile.Path);
return new StatusHandler { "Decrypt failed" };
}
// moves new files from temp dir to final dest
var movedAudioFile = moveFilesToBooksDir(libraryBook, entries);
// decrypt failed
if (!movedAudioFile)
return new StatusHandler { "Cannot find final audio file after decryption" };
libraryBook.Book.UserDefinedItem.BookStatus = LiberatedStatus.Liberated;
return new StatusHandler();
}
finally
{
OnCompleted(libraryBook);
}
}
private async Task<bool> downloadAudiobookAsync(LibraryBook libraryBook)
{
OnStreamingBegin($"Begin decrypting {libraryBook}");
try
{
var config = Configuration.Instance;
downloadValidation(libraryBook);
var api = await libraryBook.GetApiAsync();
var contentLic = await api.GetDownloadLicenseAsync(libraryBook.Book.AudibleProductId);
var audiobookDlLic = BuildDownloadOptions(config, contentLic);
var outFileName = AudibleFileStorage.Audio.GetInProgressFilename(libraryBook, audiobookDlLic.OutputFormat.ToString().ToLower());
var cacheDir = AudibleFileStorage.DownloadsInProgressDirectory;
if (contentLic.DrmType != AudibleApi.Common.DrmType.Adrm)
abDownloader = new UnencryptedAudiobookDownloader(outFileName, cacheDir, audiobookDlLic);
else
{
AaxcDownloadConvertBase converter
= config.SplitFilesByChapter ? new AaxcDownloadMultiConverter(
outFileName, cacheDir, audiobookDlLic,
AudibleFileStorage.Audio.CreateMultipartRenamerFunc(libraryBook))
: new AaxcDownloadSingleConverter(outFileName, cacheDir, audiobookDlLic);
if (config.AllowLibationFixup)
converter.RetrievedMetadata += (_, tags) => tags.Generes = string.Join(", ", libraryBook.Book.CategoriesNames);
abDownloader = converter;
}
abDownloader.DecryptProgressUpdate += OnStreamingProgressChanged;
abDownloader.DecryptTimeRemaining += OnStreamingTimeRemaining;
abDownloader.RetrievedTitle += OnTitleDiscovered;
abDownloader.RetrievedAuthors += OnAuthorsDiscovered;
abDownloader.RetrievedNarrators += OnNarratorsDiscovered;
abDownloader.RetrievedCoverArt += AaxcDownloader_RetrievedCoverArt;
abDownloader.FileCreated += (_, path) => OnFileCreated(libraryBook, path);
// REAL WORK DONE HERE
var success = await Task.Run(abDownloader.Run);
return success;
}
finally
{
OnStreamingCompleted($"Completed downloading and decrypting {libraryBook.Book.Title}");
}
}
private static DownloadOptions BuildDownloadOptions(Configuration config, AudibleApi.Common.ContentLicense contentLic)
{
//I assume if ContentFormat == "MPEG" that the delivered file is an unencrypted mp3.
//I also assume that if DrmType != Adrm, the file will be an mp3.
//These assumptions may be wrong, and only time and bug reports will tell.
bool encrypted = contentLic.DrmType == AudibleApi.Common.DrmType.Adrm;
var outputFormat = !encrypted || (config.AllowLibationFixup && config.DecryptToLossy) ?
OutputFormat.Mp3 : OutputFormat.M4b;
var audiobookDlLic = new DownloadOptions
(
contentLic?.ContentMetadata?.ContentUrl?.OfflineUrl,
Resources.USER_AGENT
)
{
AudibleKey = contentLic?.Voucher?.Key,
AudibleIV = contentLic?.Voucher?.Iv,
OutputFormat = outputFormat,
TrimOutputToChapterLength = config.AllowLibationFixup && config.StripAudibleBrandAudio,
RetainEncryptedFile = config.RetainAaxFile && encrypted,
StripUnabridged = config.AllowLibationFixup && config.StripUnabridged,
Downsample = config.AllowLibationFixup && config.LameDownsampleMono,
MatchSourceBitrate = config.AllowLibationFixup && config.LameMatchSourceBR && config.LameTargetBitrate,
CreateCueSheet = config.CreateCueSheet
};
if (config.AllowLibationFixup || outputFormat == OutputFormat.Mp3)
{
long startMs = audiobookDlLic.TrimOutputToChapterLength ?
contentLic.ContentMetadata.ChapterInfo.BrandIntroDurationMs : 0;
audiobookDlLic.ChapterInfo = new AAXClean.ChapterInfo(TimeSpan.FromMilliseconds(startMs));
for (int i = 0; i < contentLic.ContentMetadata.ChapterInfo.Chapters.Length; i++)
{
var chapter = contentLic.ContentMetadata.ChapterInfo.Chapters[i];
long chapLenMs = chapter.LengthMs;
if (i == 0)
chapLenMs -= startMs;
if (config.StripAudibleBrandAudio && i == contentLic.ContentMetadata.ChapterInfo.Chapters.Length - 1)
chapLenMs -= contentLic.ContentMetadata.ChapterInfo.BrandOutroDurationMs;
audiobookDlLic.ChapterInfo.AddChapter(chapter.Title, TimeSpan.FromMilliseconds(chapLenMs));
}
}
NAudio.Lame.LameConfig lameConfig = new();
lameConfig.Mode = NAudio.Lame.MPEGMode.Mono;
if (config.LameTargetBitrate)
{
if (config.LameConstantBitrate)
lameConfig.BitRate = config.LameBitrate;
else
{
lameConfig.ABRRateKbps = config.LameBitrate;
lameConfig.VBR = NAudio.Lame.VBRMode.ABR;
lameConfig.WriteVBRTag = true;
}
}
else
{
lameConfig.VBR = NAudio.Lame.VBRMode.Default;
lameConfig.VBRQuality = config.LameVBRQuality;
lameConfig.WriteVBRTag = true;
}
audiobookDlLic.LameConfig = lameConfig;
return audiobookDlLic;
}
private static void downloadValidation(LibraryBook libraryBook)
{
string errorString(string field)
=> $"{errorTitle()}\r\nCannot download book. {field} is not known. Try re-importing the account which owns this book.";
string errorTitle()
{
var title
= (libraryBook.Book.Title.Length > 53)
? $"{libraryBook.Book.Title.Truncate(50)}..."
: libraryBook.Book.Title;
var errorBookTitle = $"{title} [{libraryBook.Book.AudibleProductId}]";
return errorBookTitle;
};
if (string.IsNullOrWhiteSpace(libraryBook.Account))
throw new Exception(errorString("Account"));
if (string.IsNullOrWhiteSpace(libraryBook.Book.Locale))
throw new Exception(errorString("Locale"));
}
private void AaxcDownloader_RetrievedCoverArt(object _, byte[] e)
{
if (e is not null)
OnCoverImageDiscovered(e);
else if (Configuration.Instance.AllowLibationFixup)
OnRequestCoverArt(abDownloader.SetCoverArt);
}
/// <summary>Move new files to 'Books' directory</summary>
/// <returns>True if audiobook file(s) were successfully created and can be located on disk. Else false.</returns>
private static bool moveFilesToBooksDir(LibraryBook libraryBook, List<FilePathCache.CacheEntry> entries)
{
// create final directory. move each file into it
var destinationDir = AudibleFileStorage.Audio.GetDestinationDirectory(libraryBook);
Directory.CreateDirectory(destinationDir);
FilePathCache.CacheEntry getFirstAudio() => entries.FirstOrDefault(f => f.FileType == FileType.Audio);
if (getFirstAudio() == default)
return false;
for (var i = 0; i < entries.Count; i++)
{
var entry = entries[i];
var realDest = FileUtility.SaferMoveToValidPath(entry.Path, Path.Combine(destinationDir, Path.GetFileName(entry.Path)));
FilePathCache.Insert(libraryBook.Book.AudibleProductId, realDest);
// propogate corrected path. Must update cache with corrected path. Also want updated path for cue file (after this for-loop)
entries[i] = entry with { Path = realDest };
}
var cue = entries.FirstOrDefault(f => f.FileType == FileType.Cue);
if (cue != default)
Cue.UpdateFileName(cue.Path, getFirstAudio().Path);
AudibleFileStorage.Audio.Refresh();
return true;
}
}
}

View file

@ -0,0 +1,31 @@
using System;
using System.Net.Http;
using System.Threading.Tasks;
using Dinah.Core.Net.Http;
namespace FileLiberator
{
// currently only used to download the .zip flies for upgrade
public class DownloadFile : Streamable
{
public async Task<string> PerformDownloadFileAsync(string downloadUrl, string proposedDownloadFilePath)
{
var client = new HttpClient();
var progress = new Progress<DownloadProgress>(OnStreamingProgressChanged);
OnStreamingBegin(proposedDownloadFilePath);
try
{
var actualDownloadedFilePath = await client.DownloadFileAsync(downloadUrl, proposedDownloadFilePath, progress);
OnFileCreated("Upgrade", actualDownloadedFilePath);
return actualDownloadedFilePath;
}
finally
{
OnStreamingCompleted(proposedDownloadFilePath);
}
}
}
}

View file

@ -0,0 +1,86 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using DataLayer;
using Dinah.Core.ErrorHandling;
using Dinah.Core.Net.Http;
using FileManager;
using LibationFileManager;
namespace FileLiberator
{
public class DownloadPdf : Processable
{
public override bool Validate(LibraryBook libraryBook)
=> !string.IsNullOrWhiteSpace(getdownloadUrl(libraryBook))
&& !libraryBook.Book.PDF_Exists;
public override async Task<StatusHandler> ProcessAsync(LibraryBook libraryBook)
{
OnBegin(libraryBook);
try
{
var proposedDownloadFilePath = getProposedDownloadFilePath(libraryBook);
var actualDownloadedFilePath = await downloadPdfAsync(libraryBook, proposedDownloadFilePath);
var result = verifyDownload(actualDownloadedFilePath);
libraryBook.Book.UserDefinedItem.PdfStatus = result.IsSuccess ? LiberatedStatus.Liberated : LiberatedStatus.NotLiberated;
return result;
}
finally
{
OnCompleted(libraryBook);
}
}
private static string getProposedDownloadFilePath(LibraryBook libraryBook)
{
var extension = Path.GetExtension(getdownloadUrl(libraryBook));
// if audio file exists, get it's dir. else return base Book dir
var existingPath = Path.GetDirectoryName(AudibleFileStorage.Audio.GetPath(libraryBook.Book.AudibleProductId));
if (existingPath is not null)
return AudibleFileStorage.Audio.GetCustomDirFilename(libraryBook, existingPath, extension);
return AudibleFileStorage.Audio.GetBooksDirectoryFilename(libraryBook, extension);
}
private static string getdownloadUrl(LibraryBook libraryBook)
=> libraryBook?.Book?.Supplements?.FirstOrDefault()?.Url;
private async Task<string> downloadPdfAsync(LibraryBook libraryBook, string proposedDownloadFilePath)
{
OnStreamingBegin(proposedDownloadFilePath);
try
{
var api = await libraryBook.GetApiAsync();
var downloadUrl = await api.GetPdfDownloadLinkAsync(libraryBook.Book.AudibleProductId);
var progress = new Progress<DownloadProgress>(OnStreamingProgressChanged);
var client = new HttpClient();
var actualDownloadedFilePath = await client.DownloadFileAsync(downloadUrl, proposedDownloadFilePath, progress);
OnFileCreated(libraryBook, actualDownloadedFilePath);
OnStatusUpdate(actualDownloadedFilePath);
return actualDownloadedFilePath;
}
finally
{
OnStreamingCompleted(proposedDownloadFilePath);
}
}
private static StatusHandler verifyDownload(string actualDownloadedFilePath)
=> !File.Exists(actualDownloadedFilePath)
? new StatusHandler { "Downloaded PDF cannot be found" }
: new StatusHandler();
}
}

View file

@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0-windows</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\AaxDecrypter\AaxDecrypter.csproj" />
<ProjectReference Include="..\DataLayer\DataLayer.csproj" />
<ProjectReference Include="..\AudibleUtilities\AudibleUtilities.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,76 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using DataLayer;
using Dinah.Core;
using Dinah.Core.ErrorHandling;
namespace FileLiberator
{
public abstract class Processable : Streamable
{
public event EventHandler<LibraryBook> Begin;
/// <summary>General string message to display. DON'T rely on this for success, failure, or control logic</summary>
public event EventHandler<string> StatusUpdate;
public event EventHandler<LibraryBook> Completed;
/// <returns>True == Valid</returns>
public abstract bool Validate(LibraryBook libraryBook);
/// <returns>True == success</returns>
public abstract Task<StatusHandler> ProcessAsync(LibraryBook libraryBook);
// when used in foreach: stateful. deferred execution
public IEnumerable<LibraryBook> GetValidLibraryBooks(IEnumerable<LibraryBook> library)
=> library.Where(libraryBook =>
Validate(libraryBook)
&& (libraryBook.Book.ContentType != ContentType.Episode || LibationFileManager.Configuration.Instance.DownloadEpisodes)
);
public async Task<StatusHandler> ProcessSingleAsync(LibraryBook libraryBook, bool validate)
{
if (validate && !Validate(libraryBook))
return new StatusHandler { "Validation failed" };
Serilog.Log.Logger.Information("Begin " + nameof(ProcessSingleAsync) + " {@DebugInfo}", new
{
libraryBook.Book.Title,
libraryBook.Book.AudibleProductId,
libraryBook.Book.Locale,
Account = libraryBook.Account?.ToMask() ?? "[empty]"
});
var status
= (await ProcessAsync(libraryBook))
?? new StatusHandler { "Processable should never return a null status" };
return status;
}
public async Task<StatusHandler> TryProcessAsync(LibraryBook libraryBook)
=> Validate(libraryBook)
? await ProcessAsync(libraryBook)
: new StatusHandler();
protected void OnBegin(LibraryBook libraryBook)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(Begin), Book = libraryBook.LogFriendly() });
Begin?.Invoke(this, libraryBook);
}
protected void OnStatusUpdate(string statusUpdate)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(StatusUpdate), Status = statusUpdate });
StatusUpdate?.Invoke(this, statusUpdate);
}
protected void OnCompleted(LibraryBook libraryBook)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(Completed), Book = libraryBook.LogFriendly() });
Completed?.Invoke(this, libraryBook);
}
}
}

View file

@ -0,0 +1,47 @@
using System;
using Dinah.Core.Net.Http;
namespace FileLiberator
{
public abstract class Streamable
{
public event EventHandler<string> StreamingBegin;
public event EventHandler<DownloadProgress> StreamingProgressChanged;
public event EventHandler<TimeSpan> StreamingTimeRemaining;
public event EventHandler<string> StreamingCompleted;
/// <summary>Fired when a file is successfully saved to disk</summary>
public event EventHandler<(string id, string path)> FileCreated;
protected void OnStreamingBegin(string filePath)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(StreamingBegin), Message = filePath });
StreamingBegin?.Invoke(this, filePath);
}
protected void OnStreamingProgressChanged(DownloadProgress progress) => OnStreamingProgressChanged(null, progress);
protected void OnStreamingProgressChanged(object _, DownloadProgress progress)
{
StreamingProgressChanged?.Invoke(this, progress);
}
protected void OnStreamingTimeRemaining(TimeSpan timeRemaining) => OnStreamingTimeRemaining(null, timeRemaining);
protected void OnStreamingTimeRemaining(object _, TimeSpan timeRemaining)
{
StreamingTimeRemaining?.Invoke(this, timeRemaining);
}
protected void OnStreamingCompleted(string filePath)
{
Serilog.Log.Logger.Debug("Event fired {@DebugInfo}", new { Name = nameof(StreamingCompleted), Message = filePath });
StreamingCompleted?.Invoke(this, filePath);
}
protected void OnFileCreated(DataLayer.LibraryBook libraryBook, string path) => OnFileCreated(libraryBook.Book.AudibleProductId, path);
protected void OnFileCreated(string id, string path)
{
Serilog.Log.Logger.Information("File created {@DebugInfo}", new { Name = nameof(FileCreated), id, path });
LibationFileManager.FilePathCache.Insert(id, path);
FileCreated?.Invoke(this, (id, path));
}
}
}

View file

@ -0,0 +1,43 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using DataLayer;
using Dinah.Core;
using LibationFileManager;
namespace FileLiberator
{
public static class UtilityExtensions
{
public static (string id, string title, string locale, string account) LogFriendly(this LibraryBook libraryBook)
=> (
id: libraryBook.Book.AudibleProductId,
title: libraryBook.Book.Title,
locale: libraryBook.Book.Locale,
account: libraryBook.Account.ToMask()
);
public static async Task<AudibleApi.Api> GetApiAsync(this LibraryBook libraryBook)
{
var apiExtended = await AudibleUtilities.ApiExtended.CreateAsync(libraryBook.Account, libraryBook.Book.Locale);
return apiExtended.Api;
}
public static LibraryBookDto ToDto(this LibraryBook libraryBook) => new()
{
Account = libraryBook.Account,
AudibleProductId = libraryBook.Book.AudibleProductId,
Title = libraryBook.Book.Title ?? "",
Locale = libraryBook.Book.Locale,
Authors = libraryBook.Book.Authors.Select(c => c.Name).ToList(),
Narrators = libraryBook.Book.Narrators.Select(c => c.Name).ToList(),
SeriesName = libraryBook.Book.SeriesLink.FirstOrDefault()?.Series.Name,
SeriesNumber = libraryBook.Book.SeriesLink.FirstOrDefault()?.Order
};
}
}

View file

@ -0,0 +1 @@
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo(nameof(FileLiberator) + ".Tests")]

View file

@ -0,0 +1,157 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace FileManager
{
/// <summary>
/// Tracks actual locations of files.
/// </summary>
public class BackgroundFileSystem
{
public string RootDirectory { get; private set; }
public string SearchPattern { get; private set; }
public SearchOption SearchOption { get; private set; }
private FileSystemWatcher fileSystemWatcher { get; set; }
private BlockingCollection<FileSystemEventArgs> directoryChangesEvents { get; set; }
private Task backgroundScanner { get; set; }
private object fsCacheLocker { get; } = new();
private List<string> fsCache { get; } = new();
public BackgroundFileSystem(string rootDirectory, string searchPattern, SearchOption searchOptions)
{
RootDirectory = rootDirectory;
SearchPattern = searchPattern;
SearchOption = searchOptions;
Init();
}
public string FindFile(System.Text.RegularExpressions.Regex regex)
{
lock (fsCacheLocker)
return fsCache.FirstOrDefault(s => regex.IsMatch(s));
}
public void RefreshFiles()
{
lock (fsCacheLocker)
{
fsCache.Clear();
fsCache.AddRange(FileUtility.SaferEnumerateFiles(RootDirectory, SearchPattern, SearchOption));
}
}
private void Init()
{
Stop();
lock (fsCacheLocker)
fsCache.AddRange(FileUtility.SaferEnumerateFiles(RootDirectory, SearchPattern, SearchOption));
directoryChangesEvents = new BlockingCollection<FileSystemEventArgs>();
fileSystemWatcher = new FileSystemWatcher(RootDirectory)
{
IncludeSubdirectories = true,
EnableRaisingEvents = true
};
fileSystemWatcher.Created += FileSystemWatcher_Changed;
fileSystemWatcher.Deleted += FileSystemWatcher_Changed;
fileSystemWatcher.Renamed += FileSystemWatcher_Changed;
fileSystemWatcher.Error += FileSystemWatcher_Error;
backgroundScanner = new Task(BackgroundScanner);
backgroundScanner.Start();
}
private void Stop()
{
//Stop raising events
fileSystemWatcher?.Dispose();
//Calling CompleteAdding() will cause background scanner to terminate.
directoryChangesEvents?.CompleteAdding();
//Wait for background scanner to terminate before reinitializing.
backgroundScanner?.Wait();
//Dispose of directoryChangesEvents after backgroundScanner exists.
directoryChangesEvents?.Dispose();
lock (fsCacheLocker)
fsCache.Clear();
}
private void FileSystemWatcher_Error(object sender, ErrorEventArgs e)
{
Stop();
Init();
}
private void FileSystemWatcher_Changed(object sender, FileSystemEventArgs e)
{
directoryChangesEvents.Add(e);
}
#region Background Thread
private void BackgroundScanner()
{
while (directoryChangesEvents.TryTake(out FileSystemEventArgs change, -1))
{
lock (fsCacheLocker)
UpdateLocalCache(change);
}
}
private void UpdateLocalCache(FileSystemEventArgs change)
{
if (change.ChangeType == WatcherChangeTypes.Deleted)
{
RemovePath(change.FullPath);
}
else if (change.ChangeType == WatcherChangeTypes.Created)
{
AddPath(change.FullPath);
}
else if (change.ChangeType == WatcherChangeTypes.Renamed && change is RenamedEventArgs renameChange)
{
RemovePath(renameChange.OldFullPath);
AddPath(renameChange.FullPath);
}
}
private void RemovePath(string path)
{
var pathsToRemove = fsCache.Where(p => p.StartsWith(path)).ToArray();
foreach (var p in pathsToRemove)
fsCache.Remove(p);
}
private void AddPath(string path)
{
if (File.GetAttributes(path).HasFlag(FileAttributes.Directory))
AddUniqueFiles(FileUtility.SaferEnumerateFiles(path, SearchPattern, SearchOption));
else
AddUniqueFile(path);
}
private void AddUniqueFiles(IEnumerable<string> newFiles)
{
foreach (var file in newFiles)
AddUniqueFile(file);
}
private void AddUniqueFile(string newFile)
{
if (!fsCache.Contains(newFile))
fsCache.Add(newFile);
}
#endregion
~BackgroundFileSystem() => Stop();
}
}

View file

@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Dinah.Core" Version="4.0.6.1" />
<PackageReference Include="Polly" Version="7.2.3" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Dinah.Core;
namespace FileManager
{
/// <summary>Get valid filename. Advanced features incl. parameterized template</summary>
public class FileNamingTemplate
{
/// <summary>Proposed full file path. May contain optional html-styled template tags. Eg: &lt;name&gt;</summary>
public string Template { get; }
/// <param name="template">Proposed file name with optional html-styled template tags.</param>
public FileNamingTemplate(string template) => Template = ArgumentValidator.EnsureNotNullOrWhiteSpace(template, nameof(template));
/// <summary>Optional step 1: Replace html-styled template tags with parameters. Eg {"name", "Bill Gates"} => /&lt;name&gt;/ => /Bill Gates/</summary>
public Dictionary<string, object> ParameterReplacements { get; } = new Dictionary<string, object>();
/// <summary>Convenience method</summary>
public void AddParameterReplacement(string key, object value)
// using .Add() instead of "[key] = value" will make unintended overwriting throw exception
=> ParameterReplacements.Add(key, value);
/// <summary>If set, truncate each parameter replacement to this many characters. Default 50</summary>
public int? ParameterMaxSize { get; set; } = 50;
/// <summary>Optional step 2: Replace all illegal characters with this. Default=<see cref="string.Empty"/></summary>
public string IllegalCharacterReplacements { get; set; }
/// <summary>Generate a valid path for this file or directory</summary>
public string GetFilePath(bool returnFirstExisting = false)
{
var filename = Template;
foreach (var r in ParameterReplacements)
filename = filename.Replace($"<{formatKey(r.Key)}>", formatValue(r.Value));
return FileUtility.GetValidFilename(filename, IllegalCharacterReplacements, returnFirstExisting);
}
private static string formatKey(string key)
=> key
.Replace("<", "")
.Replace(">", "");
private string formatValue(object value)
{
if (value is null)
return "";
// Other illegal characters will be taken care of later. Must take care of slashes now so params can't introduce new folders.
// Esp important for file templates.
var val = value
.ToString()
.Replace($"{System.IO.Path.DirectorySeparatorChar}", IllegalCharacterReplacements)
.Replace($"{System.IO.Path.AltDirectorySeparatorChar}", IllegalCharacterReplacements);
return
ParameterMaxSize.HasValue && ParameterMaxSize.Value > 0
? val.Truncate(ParameterMaxSize.Value)
: val;
}
}
}

View file

@ -0,0 +1,272 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using Dinah.Core;
using Polly;
using Polly.Retry;
namespace FileManager
{
public static class FileUtility
{
/// <summary>
/// "txt" => ".txt"
/// <br />".txt" => ".txt"
/// <br />null or whitespace => ""
/// </summary>
public static string GetStandardizedExtension(string extension)
=> string.IsNullOrWhiteSpace(extension)
? (extension ?? "")?.Trim()
: '.' + extension.Trim().Trim('.');
/// <summary>
/// Return position with correct number of leading zeros.
/// <br />- 2 of 9 => "2"
/// <br />- 2 of 90 => "02"
/// <br />- 2 of 900 => "002"
/// </summary>
/// <param name="position">position in sequence. The 'x' in 'x of y'</param>
/// <param name="total">total qty in sequence. The 'y' in 'x of y'</param>
public static string GetSequenceFormatted(int position, int total)
{
ArgumentValidator.EnsureGreaterThan(position, nameof(position), 0);
ArgumentValidator.EnsureGreaterThan(total, nameof(total), 0);
if (position > total)
throw new ArgumentException($"{position} may not be greater than {total}");
return position.ToString().PadLeft(total.ToString().Length, '0');
}
private const int MAX_FILENAME_LENGTH = 255;
private const int MAX_DIRECTORY_LENGTH = 247;
/// <summary>
/// Ensure valid file name path:
/// <br/>- remove invalid chars
/// <br/>- ensure uniqueness
/// <br/>- enforce max file length
/// </summary>
public static string GetValidFilename(string path, string illegalCharacterReplacements = "", bool returnFirstExisting = false)
{
ArgumentValidator.EnsureNotNull(path, nameof(path));
// remove invalid chars
path = GetSafePath(path, illegalCharacterReplacements);
// ensure uniqueness and check lengths
var dir = Path.GetDirectoryName(path);
dir = dir.Truncate(MAX_DIRECTORY_LENGTH);
var filename = Path.GetFileNameWithoutExtension(path);
var fileStem = Path.Combine(dir, filename);
var extension = Path.GetExtension(path);
var fullfilename = fileStem.Truncate(MAX_FILENAME_LENGTH - extension.Length) + extension;
fullfilename = removeInvalidWhitespace(fullfilename);
var i = 0;
while (File.Exists(fullfilename) && !returnFirstExisting)
{
var increm = $" ({++i})";
fullfilename = fileStem.Truncate(MAX_FILENAME_LENGTH - increm.Length - extension.Length) + increm + extension;
}
return fullfilename;
}
// GetInvalidFileNameChars contains everything in GetInvalidPathChars plus ':', '*', '?', '\\', '/'
/// <summary>Use with file name, not full path. Valid path charaters which are invalid file name characters will be replaced: ':', '\\', '/'</summary>
public static string GetSafeFileName(string str, string illegalCharacterReplacements = "")
=> string.Join(illegalCharacterReplacements ?? "", str.Split(Path.GetInvalidFileNameChars()));
/// <summary>Use with full path, not file name. Valid path charaters which are invalid file name characters will be retained: '\\', '/'</summary>
public static string GetSafePath(string path, string illegalCharacterReplacements = "")
{
ArgumentValidator.EnsureNotNull(path, nameof(path));
path = replaceInvalidChars(path, illegalCharacterReplacements);
path = standardizeSlashes(path);
path = replaceColons(path, illegalCharacterReplacements);
path = removeDoubleSlashes(path);
return path;
}
private static char[] invalidChars { get; } = Path.GetInvalidPathChars().Union(new[] {
'*', '?',
// these are weird. If you run Path.GetInvalidPathChars() in Visual Studio's "C# Interactive", then these characters are included.
// In live code, Path.GetInvalidPathChars() does not include them
'"', '<', '>'
}).ToArray();
private static string replaceInvalidChars(string path, string illegalCharacterReplacements)
=> string.Join(illegalCharacterReplacements ?? "", path.Split(invalidChars));
private static string standardizeSlashes(string path)
=> path.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar);
private static string replaceColons(string path, string illegalCharacterReplacements)
{
// replace all colons except within the first 2 chars
var builder = new System.Text.StringBuilder();
for (var i = 0; i < path.Length; i++)
{
var c = path[i];
if (i >= 2 && c == ':')
builder.Append(illegalCharacterReplacements);
else
builder.Append(c);
}
return builder.ToString();
}
private static string removeDoubleSlashes(string path)
{
if (path.Length < 2)
return path;
// exception: don't try to condense the initial dbl bk slashes in a path. eg: \\192.168.0.1
var remainder = path[1..];
var dblSeparator = $"{Path.DirectorySeparatorChar}{Path.DirectorySeparatorChar}";
while (remainder.Contains(dblSeparator))
remainder = remainder.Replace(dblSeparator, $"{Path.DirectorySeparatorChar}");
return path[0] + remainder;
}
private static string removeInvalidWhitespace_pattern { get; } = $@"[\s\.]*\{Path.DirectorySeparatorChar}\s*";
private static Regex removeInvalidWhitespace_regex { get; } = new(removeInvalidWhitespace_pattern, RegexOptions.Compiled | RegexOptions.IgnorePatternWhitespace);
/// <summary>no part of the path may begin or end in whitespace</summary>
private static string removeInvalidWhitespace(string fullfilename)
{
// no whitespace at beginning or end
// replace whitespace around path slashes
// regex (with space added for clarity)
// \s* \\ \s* => \
// no ending dots. beginning dots are valid
// regex is easier by ending with separator
fullfilename += Path.DirectorySeparatorChar;
fullfilename = removeInvalidWhitespace_regex.Replace(fullfilename, Path.DirectorySeparatorChar.ToString());
// take seperator back off
fullfilename = RemoveLastCharacter(fullfilename);
fullfilename = removeDoubleSlashes(fullfilename);
return fullfilename;
}
public static string RemoveLastCharacter(this string str) => string.IsNullOrEmpty(str) ? str : str[..^1];
/// <summary>
/// Move file.
/// <br/>- Ensure valid file name path: remove invalid chars, ensure uniqueness, enforce max file length
/// <br/>- Perform <see cref="SaferMove"/>
/// <br/>- Return valid path
/// </summary>
public static string SaferMoveToValidPath(string source, string destination)
{
destination = GetValidFilename(destination);
SaferMove(source, destination);
return destination;
}
private static int maxRetryAttempts { get; } = 3;
private static TimeSpan pauseBetweenFailures { get; } = TimeSpan.FromMilliseconds(100);
private static RetryPolicy retryPolicy { get; } =
Policy
.Handle<Exception>()
.WaitAndRetry(maxRetryAttempts, i => pauseBetweenFailures);
/// <summary>Delete file. No error when source does not exist. Retry up to 3 times before throwing exception.</summary>
public static void SaferDelete(string source)
=> retryPolicy.Execute(() =>
{
try
{
if (!File.Exists(source))
{
Serilog.Log.Logger.Debug("No file to delete: {@DebugText}", new { source });
return;
}
Serilog.Log.Logger.Debug("Attempt to delete file: {@DebugText}", new { source });
File.Delete(source);
Serilog.Log.Logger.Information("File successfully deleted: {@DebugText}", new { source });
}
catch (Exception e)
{
Serilog.Log.Logger.Error(e, "Failed to delete file: {@DebugText}", new { source });
throw;
}
});
/// <summary>Move file. No error when source does not exist. Retry up to 3 times before throwing exception.</summary>
public static void SaferMove(string source, string destination)
=> retryPolicy.Execute(() =>
{
try
{
if (!File.Exists(source))
{
Serilog.Log.Logger.Debug("No file to move: {@DebugText}", new { source });
return;
}
SaferDelete(destination);
var dir = Path.GetDirectoryName(destination);
Serilog.Log.Logger.Debug("Attempt to create directory: {@DebugText}", new { dir });
Directory.CreateDirectory(dir);
Serilog.Log.Logger.Debug("Attempt to move file: {@DebugText}", new { source, destination });
File.Move(source, destination);
Serilog.Log.Logger.Information("File successfully moved: {@DebugText}", new { source, destination });
}
catch (Exception e)
{
Serilog.Log.Logger.Error(e, "Failed to move file: {@DebugText}", new { source, destination });
throw;
}
});
/// <summary>
/// A safer way to get all the files in a directory and sub directory without crashing on UnauthorizedException or PathTooLongException
/// </summary>
/// <param name="rootPath">Starting directory</param>
/// <param name="patternMatch">Filename pattern match</param>
/// <param name="searchOption">Search subdirectories or only top level directory for files</param>
/// <returns>List of files</returns>
public static IEnumerable<string> SaferEnumerateFiles(string path, string searchPattern = "*", SearchOption searchOption = SearchOption.TopDirectoryOnly)
{
var foundFiles = Enumerable.Empty<string>();
if (searchOption == SearchOption.AllDirectories)
{
try
{
IEnumerable<string> subDirs = Directory.EnumerateDirectories(path);
// Add files in subdirectories recursively to the list
foreach (string dir in subDirs)
foundFiles = foundFiles.Concat(SaferEnumerateFiles(dir, searchPattern, searchOption));
}
catch (UnauthorizedAccessException) { }
catch (PathTooLongException) { }
}
try
{
// Add files from the current directory
foundFiles = foundFiles.Concat(Directory.EnumerateFiles(path, searchPattern));
}
catch (UnauthorizedAccessException) { }
return foundFiles;
}
}
}

View file

@ -0,0 +1,242 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace FileManager
{
public class PersistentDictionary
{
public string Filepath { get; }
public bool IsReadOnly { get; }
// optimize for strings. expectation is most settings will be strings and a rare exception will be something else
private Dictionary<string, string> stringCache { get; } = new Dictionary<string, string>();
private Dictionary<string, object> objectCache { get; } = new Dictionary<string, object>();
public PersistentDictionary(string filepath, bool isReadOnly = false)
{
Filepath = filepath;
IsReadOnly = isReadOnly;
if (File.Exists(Filepath))
return;
// will create any missing directories, incl subdirectories. if all already exist: no action
Directory.CreateDirectory(Path.GetDirectoryName(filepath));
if (IsReadOnly)
return;
createNewFile();
}
public string GetString(string propertyName)
{
if (!stringCache.ContainsKey(propertyName))
{
var jObject = readFile();
if (!jObject.ContainsKey(propertyName))
return null;
stringCache[propertyName] = jObject[propertyName].Value<string>();
}
return stringCache[propertyName];
}
public T GetNonString<T>(string propertyName)
{
var obj = GetObject(propertyName);
if (obj is null) return default;
if (obj is JValue jValue) return jValue.Value<T>();
if (obj is JObject jObject) return jObject.ToObject<T>();
return (T)obj;
}
public object GetObject(string propertyName)
{
if (!objectCache.ContainsKey(propertyName))
{
var jObject = readFile();
if (!jObject.ContainsKey(propertyName))
return null;
objectCache[propertyName] = jObject[propertyName].Value<object>();
}
return objectCache[propertyName];
}
public string GetStringFromJsonPath(string jsonPath, string propertyName) => GetStringFromJsonPath($"{jsonPath}.{propertyName}");
public string GetStringFromJsonPath(string jsonPath)
{
if (!stringCache.ContainsKey(jsonPath))
{
try
{
var jObject = readFile();
var token = jObject.SelectToken(jsonPath);
if (token is null)
return null;
stringCache[jsonPath] = (string)token;
}
catch
{
return null;
}
}
return stringCache[jsonPath];
}
public bool Exists(string propertyName) => readFile().ContainsKey(propertyName);
private object locker { get; } = new object();
public void SetString(string propertyName, string newValue)
{
// only do this check in string cache, NOT object cache
if (stringCache.ContainsKey(propertyName) && stringCache[propertyName] == newValue)
return;
// set cache
stringCache[propertyName] = newValue;
writeFile(propertyName, newValue);
}
public void SetNonString(string propertyName, object newValue)
{
// set cache
objectCache[propertyName] = newValue;
var parsedNewValue = JToken.Parse(JsonConvert.SerializeObject(newValue));
writeFile(propertyName, parsedNewValue);
}
private void writeFile(string propertyName, JToken newValue)
{
if (IsReadOnly)
return;
// write new setting to file
lock (locker)
{
var jObject = readFile();
var startContents = JsonConvert.SerializeObject(jObject, Formatting.Indented);
jObject[propertyName] = newValue;
var endContents = JsonConvert.SerializeObject(jObject, Formatting.Indented);
if (startContents == endContents)
return;
File.WriteAllText(Filepath, endContents);
}
try
{
var str = formatValueForLog(newValue?.ToString());
Serilog.Log.Logger.Information("Config changed. {@DebugInfo}", new { propertyName, newValue = str });
}
catch { }
}
/// <summary>WILL ONLY set if already present. WILL NOT create new</summary>
/// <returns>Value was changed</returns>
public bool SetWithJsonPath(string jsonPath, string propertyName, string newValue, bool suppressLogging = false)
{
if (IsReadOnly)
return false;
var path = $"{jsonPath}.{propertyName}";
{
// only do this check in string cache, NOT object cache
if (stringCache.ContainsKey(path) && stringCache[path] == newValue)
return false;
// set cache
stringCache[path] = newValue;
}
try
{
lock (locker)
{
var jObject = readFile();
var token = jObject.SelectToken(jsonPath);
if (token is null || token[propertyName] is null)
return false;
var oldValue = token.Value<string>(propertyName);
if (oldValue == newValue)
return false;
token[propertyName] = newValue;
File.WriteAllText(Filepath, JsonConvert.SerializeObject(jObject, Formatting.Indented));
}
}
catch (Exception exDebug)
{
return false;
}
if (!suppressLogging)
{
try
{
var str = formatValueForLog(newValue?.ToString());
Serilog.Log.Logger.Information("Config changed. {@DebugInfo}", new { jsonPath, propertyName, newValue = str });
}
catch { }
}
return true;
}
private static string formatValueForLog(string value)
=> value is null ? "[null]"
: string.IsNullOrEmpty(value) ? "[empty]"
: string.IsNullOrWhiteSpace(value) ? $"[whitespace. Length={value.Length}]"
: value.Length > 100 ? $"[Length={value.Length}] {value[0..50]}...{value[^50..^0]}"
: value;
private JObject readFile()
{
if (!File.Exists(Filepath))
{
var msg = "Unrecoverable error. Settings file cannot be found";
var ex = new FileNotFoundException(msg, Filepath);
Serilog.Log.Logger.Error(ex, msg);
throw ex;
}
var settingsJsonContents = File.ReadAllText(Filepath);
if (string.IsNullOrWhiteSpace(settingsJsonContents))
{
createNewFile();
settingsJsonContents = File.ReadAllText(Filepath);
}
var jObject = JsonConvert.DeserializeObject<JObject>(settingsJsonContents);
if (jObject is null)
{
var msg = "Unrecoverable error. Unable to read settings from Settings file";
var ex = new NullReferenceException(msg);
Serilog.Log.Logger.Error(ex, msg);
throw ex;
}
return jObject;
}
private void createNewFile()
{
File.WriteAllText(Filepath, "{}");
System.Threading.Thread.Sleep(100);
}
}
}

View file

@ -0,0 +1 @@
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo(nameof(FileManager) + ".Tests")]

Some files were not shown because too many files have changed in this diff Show more