commit 9a1838beccd63f1039ab11dc55ad20d22ee827e9 Author: Robert Morrison Date: Fri May 12 22:58:58 2023 +0100 EVIL COMMIT! That one evil commit that means you've actually started development like a real developer. But before that you just wrote things diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9965de2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,477 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET +project.lock.json +project.fragment.lock.json +artifacts/ + +# Tye +.tye/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files +*.ncb +*.aps + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml + +## +## Visual studio for Mac +## + + +# globs +Makefile.in +*.userprefs +*.usertasks +config.make +config.status +aclocal.m4 +install-sh +autom4te.cache/ +*.tar.gz +tarballs/ +test-results/ + +# Mac bundle stuff +*.dmg +*.app + +# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk diff --git a/Config.cs b/Config.cs new file mode 100644 index 0000000..41b0e3d --- /dev/null +++ b/Config.cs @@ -0,0 +1,173 @@ +using Serilog; +using Tommy; + +namespace DownloadManager; + +class configuration +{ + private record struct options(bool dryRun, + bool confirm, + string downloadDirectory, + string logDirectory); + + /* Default locations for essential things + arrays can be added to later by the user with the last valid entry being used.*/ + private static readonly string _home = + Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); + private static readonly string _defaultConfigLocation = + _home + "/.config/sherlock5512/downloadmanager"; + private string[] _configLocations = + { $"{_defaultConfigLocation}/config.toml" }; + private string[] _ruleDirLocation = + { $"{_defaultConfigLocation}/rules/" }; + + private static readonly options _defaultOptions = + new options( + dryRun: false, + confirm: false, + downloadDirectory: _home + "/Downloads", + logDirectory: _home + "/.local/share/sherlock5512"); + + + /* Options are exposed via properties */ + private options _options; + public bool dryRun { get => _options.dryRun; } + public bool confirm { get => _options.confirm; } + public string downloadDirectory { get => _options.downloadDirectory; } + public string logDirectory { get => _options.logDirectory; } + public string[] ruleDirectories /* Only returns directories that exist */ + { + get => _ruleDirLocation + .Where(x => Directory.Exists(x)) + .ToArray(); + } + + + public configuration() + { + _options = new(); + Log.Information("new Configuration object created"); + _options = loadConfig() ?? _options; /* Attempt to load config */ + createDirs(); + verifyConfig(); + } + + + /* When arguments are passed we use them and don't load a config*/ + public configuration(bool? dryRun, bool? confirm, string? downloadDirectory, string? logDirectory) + { + _options = new( /* labels are used here to allow constructor signature changes */ + dryRun: dryRun ?? _defaultOptions.dryRun, + confirm: confirm ?? _defaultOptions.confirm, + downloadDirectory: downloadDirectory ?? _defaultOptions.downloadDirectory, + logDirectory: logDirectory ?? _defaultOptions.logDirectory + ); + createDirs(); + verifyConfig(); + } + + /* If the user specifies a config file on the command line */ + public configuration(string configLocation) + { + _configLocations.Append(configLocation); + var opt = loadConfig(); + + if (opt is null) + { + Log.Fatal("Could not load user provided config {loc}", configLocation); + Environment.Exit(78); + } + _options = (options)opt; + createDirs(); + verifyConfig(); + } + + + private options? loadConfig() + { + + string[] ValidLocations = _configLocations.Where(x => Path.Exists(x)).ToArray(); + + foreach (var location in ValidLocations) + { + Log.Information("Attempting to load config from {location}", location); + try + { + Log.Debug("In try statement"); + using (StreamReader reader = File.OpenText(location)) + { + TomlTable table = TOML.Parse(reader); + + Log.Debug("Parsed TOML to table: {@table}", table); + // get our config values + bool dryRun = table.HasKey("dryRun") ? table["dryRun"] : _defaultOptions.dryRun; + bool confirm = table.HasKey("confirm") ? table["confirm"] : _defaultOptions.confirm; + string downloadDirectory = table.HasKey("downloadDirectory") ? table["downloadDirectory"] : _defaultOptions.downloadDirectory; + string logDirectory = table.HasKey("logDirectory") ? table["logDirectory"] : _defaultOptions.logDirectory; + + // construct the options object + options opts = new( + dryRun: dryRun, + confirm: confirm, + downloadDirectory: downloadDirectory, + logDirectory: logDirectory + ); + + Log.Debug("Constructed options object: {options}", opts); + return opts; + } + } + catch (TomlParseException e) + { + foreach (var syntaxEx in e.SyntaxErrors) + { + Log.Error("Toml error at l:{line} c:{col}: {message}", syntaxEx.Line, syntaxEx.Column, syntaxEx.Message); + } + } + catch (Exception e) + { + Log.Fatal(e, "Unhandled error decoding toml {location}", location); + Environment.Exit(78); + } + Log.Debug("After try catch block"); + } + Log.Error("Attempted to load all valid locations but no config could be loaded successfully"); + return null; + } + + + private bool verifyConfig() + { + + if (!Directory.Exists(_options.logDirectory)) + { + Log.Fatal("Log directory {dir} does not exist", logDirectory); + Environment.Exit(78); + } + if (!Directory.Exists(_options.downloadDirectory)) + { + Log.Fatal("Downloads directory {dir} does not exist", downloadDirectory); + Environment.Exit(78); + } + + return true; + } + + /* Attempt to create some directories if missing */ + private bool createDirs() + { + try + { + Directory.CreateDirectory(_defaultConfigLocation); + Directory.CreateDirectory(_options.logDirectory); + } + catch (UnauthorizedAccessException e) + { + Log.Warning(e, "Failed to create directory, This may cause other errors"); + return false; + } + return true; + } + + +} diff --git a/DownloadManager.csproj b/DownloadManager.csproj new file mode 100644 index 0000000..e84a623 --- /dev/null +++ b/DownloadManager.csproj @@ -0,0 +1,23 @@ + + + + Exe + net7.0 + enable + enable + true + + + + + + + + + + + + + + + diff --git a/ImageProcessing.cs b/ImageProcessing.cs new file mode 100644 index 0000000..0ed519c --- /dev/null +++ b/ImageProcessing.cs @@ -0,0 +1,116 @@ +using Serilog; +using SkiaSharp; + +/// +/// Helper class to get data from image files +/// +public class ImageProcessing +{ + /// Memoization dictionary for the GCD function + private static Dictionary<(int, int), int> _gcdMemo = new(); + + /// + /// Uses the recursive euclidean method to calcuate the GCD of two integers + /// + /// the first integer + /// the second integer + /// + /// Thrown when either of the inputs is negative + /// + /// + /// The GCD of the inputs + /// + private int GCD(int a, int b) + { + + Log.Debug("calculating GCD for a:{a} b:{b}", a, b); + if (a < 0) + { + throw new ArgumentOutOfRangeException("a", "This function can only accept positive integers"); + } + if (b < 0) + { + throw new ArgumentOutOfRangeException("b", "This function can only accept positive integers"); + } + + + /* We can only run this calculation when a is larger than b */ + if (a < b) + { + return GCD(b, a); + } + + /* Check if we have already calculated these inputs */ + if (_gcdMemo.TryGetValue((a, b), out int gcd)) + { + Log.Debug("Memo hit, returning {gcd}", gcd); + return gcd; + } + + /* Otherwise we calculate the GCD and return it */ + /* Note that the recursive nature of this function means we may hit a match more often */ + Log.Debug("No memo hit, running calculation"); + gcd = b == 0 ? a : GCD(b, a % b); + + _gcdMemo.Add((a, b), gcd); + + return gcd; + } + + /// + /// Where possible calculate the aspect ratio of the image at " + /// + /// + /// The path to an image file + /// + /// + /// Either the string representation of the images aspect ratio + /// or + /// + /// + /// This function may return null if + /// + /// does not point to an image + /// The image cannot be handled by SKIA + /// The image has no size + /// You don't have permission to read the image + /// + /// + public string? GetAspectRatioString(string path) + { + /* This function uses some clever abstraction to allow callers to only worry about passing a path */ + return (GetAspectRatioString(LoadImage(path))); + } + + private SKBitmap? LoadImage(string path) + { + try + { + using (var stream = File.OpenRead(path)) + { + var bitmap = SKBitmap.Decode(stream); + return bitmap; + } + } + catch (Exception e) + { + Log.Error(e, "Error decoding image {path}", path); + return null; + } + } + + private string? GetAspectRatioString(SKBitmap? bitmap) + { + if (bitmap == null) + { + return null; + } + + int gcd = GCD(bitmap.Width, bitmap.Height); + int aspectW = bitmap.Width / gcd; + int aspectH = bitmap.Height / gcd; + + return $"{aspectW}:{aspectH}"; + } + +} diff --git a/Program.cs b/Program.cs new file mode 100644 index 0000000..f1dbf9a --- /dev/null +++ b/Program.cs @@ -0,0 +1,38 @@ +using Serilog; +using System.CommandLine; + +namespace DownloadManager; + +class Program +{ + static void Main(string[] args) + { + Log.Logger = new LoggerConfiguration() + .MinimumLevel.Debug() + .WriteTo.Console() + .CreateLogger(); + Log.Information("Starting DownloadManager"); + + configuration config = new(); + Log.Logger = new LoggerConfiguration() + .MinimumLevel.Debug() + .WriteTo.Console() + .WriteTo.File($"{config.logDirectory}/log.txt", rollingInterval: RollingInterval.Day) + .CreateLogger(); + Log.Information("Configuration created, Logger re-created with file logging."); + + RootCommand root = new RootCommand("Download Manager"); + + root.SetHandler(() => { run(config); }); + + root.Invoke(args); + } + + + /* Handler method for root command */ + static void run(configuration config) + { + RuleManager ruleManager = new(ref config); + ruleManager.ApplyRules(); + } +} diff --git a/Readme.md b/Readme.md new file mode 100644 index 0000000..b45fcc9 --- /dev/null +++ b/Readme.md @@ -0,0 +1,59 @@ +# DownloadManager + +WARNING: this is WIP code and currently does NOT work properly + +A simple yet complex program to do something dumb. + +Basically write rules, run the program and watch your files fall in to +line. No more cluttered download folders. + +## Features + +You can currently + +- Write rules +- Run rules + +### Coming someday + +Future features include. + +- A built in rule editor/validator +- Command line options (the framework is there just need the implementation) +- More/Better output +- Interactive mode (E.G "Do you want to do this.") +- Pre-apply and Apply stages separated + - This allows for the showing of "SOME FILES -> Destination, OTHER + FILES -> Other destination" + - Also makes confirmation on batches easier. + - could also allow for making the Apply stage async, allowing the UI to + update as files are moved (progress bar much?) + +## Rules + +Rules are written in JSON and _must_ contain all 4 values to be loaded +properly, any malformed rules will not be loaded with the errors logged. + +**NOTE**: For now _**ALL**_ paths must be absolute. This is due to C# being +generally designed around windows and as such not having the features to +handle unix style paths e.g `~/Downloads`. In the future I plan to add +replace `~` at the start of a path with the running users home directory. + +Type is an integer that represents how to match the pattern + +1. Exact match (**Implemented**) +2. Regex matching (**Being written**) +3. Shell style globbing (**This may never happen as it seems hard to do and +kinda redundant with regex**) +4. Danbooru - this is a special match type that doesn't use the pattern +field, instead it find files that are likely to be from danbooru +(prefixed with `__`) and then sorts them based on their aspect ratio. + +```json +{ + "Name" : "Example Rule", + "Type" : 1, + "Pattern" : "File.File", + "Destination" : "/Directory" +} +``` diff --git a/Rule.cs b/Rule.cs new file mode 100644 index 0000000..725a47b --- /dev/null +++ b/Rule.cs @@ -0,0 +1,253 @@ +using Serilog; +using Spectre.Console; +using System.Text.Json; +using System.Text.RegularExpressions; +namespace DownloadManager; + +record struct rule +{ + public required string Name { get; init; } + public required PatternType Type { get; init; } + public required string Pattern { get; init; } + public required string Destination { get; init; } +} + +/* PatternType is included here for ease of use */ +enum PatternType +{ + ExactMatch = 1, + Regex = 2, + Glob = 3, + Danbooru = 4, +} + +/* Management class to make handing rules easier */ +class RuleManager +{ + + private List _rules; + private configuration _config; + private static ImageProcessing IP = new(); + + public RuleManager(ref configuration config) + { + _rules = new(); /* Start with no rules and load them later */ + _config = config; + loadRules(); + } + + // attempt to load as many rules as possible + private void loadRules() + { + string[] ruleDirectories = _config.ruleDirectories; + string[] ruleFiles = { }; + Log.Information("Attempting to load rules"); + + if (ruleDirectories.Length == 0) + { + Log.Warning("No rule directories loaded"); + return; + } + + try + { + Log.Information("Attempting to find rules in {dir}", ruleDirectories.Last()); + ruleFiles = Directory.GetFiles(ruleDirectories.Last()); /* Use last since it will be the users choice */ + + } + catch (IOException e) + { + Log.Error(e, "{path} may be a file and not a directory", ruleDirectories.Last()); + } + catch (UnauthorizedAccessException e) + { + Log.Error(e, "You do not have permission to open {path}", ruleDirectories.Last()); + } + catch (Exception e) + { + Log.Fatal(e, "Unexpected exception occured, Please open a GitHub issue"); + Environment.Exit(70); + } + + + Log.Information("found {count} rules in {path}", ruleFiles.Length, ruleDirectories.Last()); + Log.Debug("{@rules}", ruleFiles); + + if (ruleFiles.Length == 0) + { + Log.Warning("No rules loaded"); + return; + } + + Log.Information("Loading rules from ruleFiles"); + + foreach (var file in ruleFiles) + { + try + { + Log.Information("Attempting to load rule {name}", Path.GetFileName(file)); + using (var ruleStream = File.OpenText(file)) + { + string json = ruleStream.ReadToEnd(); + Log.Debug("{json}", json); + rule r = JsonSerializer.Deserialize(json); + Log.Information("Deserialised rule {@rule}", r); + _rules.Add(r); + } + } + catch (JsonException e) + { + Log.Error(e, "Could not deserialise JSON rule"); + } + catch (UnauthorizedAccessException e) + { + Log.Error(e, "I/O error while reading rule"); + } + catch (Exception e) + { + Log.Fatal(e, "Unexpected exception occurred, Please open a GitHub issue"); + Environment.Exit(70); + } + } + _rules.Sort(delegate (rule x, rule y) + { + return x.Name.CompareTo(y.Name); + }); + } + + public bool ApplyRules() + { + if (_rules.Count() == 0) + { + Log.Warning("ApplyRules was called but there are no rules configured"); + return false; + } + + Log.Information("Attempting to run {count} rules, Dryrun:{dryrun}", _rules.Count(), _config.dryRun); + + foreach (var rule in _rules) + { + bool result = false; + switch (rule.Type) + { + case PatternType.ExactMatch: + result = ApplyExact(rule); + break; + case PatternType.Regex: + result = ApplyRegex(rule); + break; + case PatternType.Glob: + result = ApplyGlob(rule); + break; + case PatternType.Danbooru: + result = ApplyDanbooru(rule); + break; + } + if (result) + { + Log.Information("Successfully applied rule {name}", rule.Name); + + } + } + + return false; + } + + private bool ApplyExact(rule rule) + { + // this ruletype should only match one file + string? file = Directory.GetFiles(_config.downloadDirectory) + .Where(x => Path.GetFileName(x) == rule.Pattern) + .FirstOrDefault(defaultValue: null); + if (file is null) + { + Log.Information("Could not apply rule {name} as nothing matched {pattern}", rule.Name, rule.Pattern); + return false; + } + safeMove(file, rule.Destination); + return true; + } + + private bool ApplyRegex(rule rule) + { + Regex rx = new Regex(rule.Pattern, RegexOptions.Compiled); + + string[] files = Directory.GetFiles(_config.downloadDirectory) + .Where(path => rx.IsMatch(Path.GetFileName(path))) // Match against file name to make regex more logical + .ToArray(); + + if (files.Length == 0) + { + Log.Information("Could not apply rule {name} as nothing matched {pattern}", rule.Name, rule.Pattern); + return false; + } + + foreach (var file in files) + { + safeMove(file, rule.Destination); + } + + return false; + } + + private bool ApplyGlob(rule rule) { return false; } + + private bool ApplyDanbooru(rule rule) + { + string[] files = Directory.GetFiles(_config.downloadDirectory) + .Where(path => Path.GetFileName(path).Substring(0, 2) == "__") + .ToArray(); + + if (files.Length == 0) + { + Log.Information("Could not apply rule {name}, no files applicable", rule.Name); + } + + foreach (var file in files) + { + string? aspect = IP.GetAspectRatioString(file); + + if (aspect is null) + { + continue; // probably not an image since cannot calculate aspect ratio + } + + string dest = Path.Combine(rule.Destination, aspect); + safeMove(file, dest); + } + + + return false; + } + + // TODO: add appropriate exception avoidance/handling here + private void safeMove(string file, string targetDir) + { + Log.Information("Moving {file} to {targetdir}", file, targetDir); + string target = Path.Combine(targetDir, Path.GetFileName(file)); + + + if (File.Exists(target)) + { + Log.Warning("target {target} already exists", target); + target = Path.Combine(targetDir, + Path.GetFileNameWithoutExtension(file), + DateTime.Today.ToString("yyyy-MM-dd"), + Path.GetExtension(file)); + Log.Warning("Saving as: {target}", target); + } + + if (_config.dryRun) { return; } + + if (_config.confirm) + { + if (!AnsiConsole.Confirm("Move file?")) + { + return; + } + } + + Directory.CreateDirectory(targetDir); + File.Move(file, target); + } +} diff --git a/TODO b/TODO new file mode 100644 index 0000000..20fc97a --- /dev/null +++ b/TODO @@ -0,0 +1,4 @@ +- Verify config works properly. + +- Ensure danbooru rule works properly +- consider if using TOML for rules would be a good idea